diff --git a/lib/ruby_llm/active_record/chat_methods.rb b/lib/ruby_llm/active_record/chat_methods.rb index 41930548c..88b8d0194 100644 --- a/lib/ruby_llm/active_record/chat_methods.rb +++ b/lib/ruby_llm/active_record/chat_methods.rb @@ -79,7 +79,8 @@ def to_llm model_record = model_association @chat ||= (context || RubyLLM).chat( model: model_record.model_id, - provider: model_record.provider.to_sym + provider: model_record.provider.to_sym, + assume_model_exists: assume_model_exists || false ) @chat.reset_messages! diff --git a/spec/ruby_llm/active_record/acts_as_model_spec.rb b/spec/ruby_llm/active_record/acts_as_model_spec.rb index 56fb01570..4ef603965 100644 --- a/spec/ruby_llm/active_record/acts_as_model_spec.rb +++ b/spec/ruby_llm/active_record/acts_as_model_spec.rb @@ -255,7 +255,8 @@ def messages # Mock the chat creation to verify parameters expect(RubyLLM).to receive(:chat).with( # rubocop:disable RSpec/MessageSpies,RSpec/StubbedMock model: 'test-gpt', - provider: :openai + provider: :openai, + assume_model_exists: false ).and_return( instance_double(RubyLLM::Chat, reset_messages!: nil, add_message: nil, instance_variable_get: {}, on_new_message: nil, on_end_message: nil, @@ -272,7 +273,8 @@ def messages expect(RubyLLM).to receive(:chat).with( # rubocop:disable RSpec/MessageSpies,RSpec/StubbedMock model: 'test-claude', - provider: :anthropic + provider: :anthropic, + assume_model_exists: false ).and_return( instance_double(RubyLLM::Chat, reset_messages!: nil, add_message: nil, instance_variable_get: {}, on_new_message: nil, on_end_message: nil, diff --git a/spec/ruby_llm/active_record/acts_as_spec.rb b/spec/ruby_llm/active_record/acts_as_spec.rb index a36c00b49..8612ce0c8 100644 --- a/spec/ruby_llm/active_record/acts_as_spec.rb +++ b/spec/ruby_llm/active_record/acts_as_spec.rb @@ -500,6 +500,14 @@ class ToolCall < ActiveRecord::Base # rubocop:disable RSpec/LeakyConstantDeclara expect(llm_tool_call.name).to eq('calculator') expect(llm_tool_call.arguments).to eq({ 'expression' => '2 + 2' }) end + + it 'correctly preserves custom model' do + custom_model = 'my-custom-model' + bot_chat = Assistants::BotChat.create!(model: custom_model, assume_model_exists: true, provider: 'openrouter') + bot_chat.save! + llm_chat = bot_chat.to_llm + expect(llm_chat.model.id).to eq(custom_model) + end end end