From c61f9c937c4f10059b8bf62cb58c989eb4932c74 Mon Sep 17 00:00:00 2001 From: Karol Bajko Date: Fri, 11 Oct 2024 00:04:22 +0200 Subject: [PATCH] Removed LLM from adapter of the assistant --- lib/langchain/assistants/assistant.rb | 2 +- lib/langchain/assistants/llm/adapter.rb | 10 +++++----- lib/langchain/assistants/llm/adapters/_base.rb | 8 -------- 3 files changed, 6 insertions(+), 14 deletions(-) diff --git a/lib/langchain/assistants/assistant.rb b/lib/langchain/assistants/assistant.rb index e1dec7a33..7b1d76425 100644 --- a/lib/langchain/assistants/assistant.rb +++ b/lib/langchain/assistants/assistant.rb @@ -343,7 +343,7 @@ def chat_with_llm tools: @tools, tool_choice: tool_choice ) - @llm_adapter.chat(**params, &@block) + @llm.chat(**params, &@block) end # Run the tools automatically diff --git a/lib/langchain/assistants/llm/adapter.rb b/lib/langchain/assistants/llm/adapter.rb index 8a961a205..99007f9b5 100644 --- a/lib/langchain/assistants/llm/adapter.rb +++ b/lib/langchain/assistants/llm/adapter.rb @@ -8,15 +8,15 @@ class Adapter def self.build(llm) case llm when Langchain::LLM::Anthropic - LLM::Adapters::Anthropic.new(llm) + LLM::Adapters::Anthropic.new when Langchain::LLM::GoogleGemini, Langchain::LLM::GoogleVertexAI - LLM::Adapters::GoogleGemini.new(llm) + LLM::Adapters::GoogleGemini.new when Langchain::LLM::MistralAI - LLM::Adapters::MistralAI.new(llm) + LLM::Adapters::MistralAI.new when Langchain::LLM::Ollama - LLM::Adapters::Ollama.new(llm) + LLM::Adapters::Ollama.new when Langchain::LLM::OpenAI - LLM::Adapters::OpenAI.new(llm) + LLM::Adapters::OpenAI.new else raise ArgumentError, "Unsupported LLM type: #{llm.class}" end diff --git a/lib/langchain/assistants/llm/adapters/_base.rb b/lib/langchain/assistants/llm/adapters/_base.rb index b3278ad3b..4cdcad18c 100644 --- a/lib/langchain/assistants/llm/adapters/_base.rb +++ b/lib/langchain/assistants/llm/adapters/_base.rb @@ -3,10 +3,6 @@ class Assistant module LLM module Adapters class Base - def initialize(llm) - @llm = llm - end - def build_chat_params(tools:, instructions:, messages:, tool_choice:) raise NotImplementedError, "Subclasses must implement build_chat_params" end @@ -26,10 +22,6 @@ def tool_role def support_system_message? raise NotImplementedError, "Subclasses must implement set_instructions" end - - def chat(*args, **kwargs, &block) - @llm.chat(*args, **kwargs, &block) - end end end end