Skip to content

Commit

Permalink
Merge branch 'main' into add_execute_tools_one
Browse files Browse the repository at this point in the history
  • Loading branch information
mattlindsey authored Nov 12, 2024
2 parents 0210fc4 + f4024d1 commit 1fc012a
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 37 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
- [BUGFIX] [https://github.com/patterns-ai-core/langchainrb/pull/837] Fix bug when tool functions with no input variables are used with Langchain::LLM::Anthropic
- [BUGFIX] [https://github.com/patterns-ai-core/langchainrb/pull/836] Fix bug when assistant.instructions = nil did not remove the system message
- [FEATURE] [https://github.com/patterns-ai-core/langchainrb/pull/838] Allow setting safety_settings: [] in default_options for Langchain::LLM::GoogleGemini and Langchain::LLM::GoogleVertexAI constructors
- [BUGFIX] [https://github.com/patterns-ai-core/langchainrb/pull/871] Allow passing in options hash to Ollama

## [0.18.0] - 2024-10-12
- [BREAKING] Remove `Langchain::Assistant#clear_thread!` method
Expand Down
6 changes: 4 additions & 2 deletions lib/langchain/llm/ollama.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ class Ollama < Base
temperature: 0.0,
completion_model: "llama3.1",
embedding_model: "llama3.1",
chat_model: "llama3.1"
chat_model: "llama3.1",
options: {}
}.freeze

EMBEDDING_SIZES = {
Expand Down Expand Up @@ -45,7 +46,8 @@ def initialize(url: "http://localhost:11434", api_key: nil, default_options: {})
temperature: {default: @defaults[:temperature]},
template: {},
stream: {default: false},
response_format: {default: @defaults[:response_format]}
response_format: {default: @defaults[:response_format]},
options: {default: @defaults[:options]}
)
chat_parameters.remap(response_format: :format)
end
Expand Down
5 changes: 4 additions & 1 deletion lib/langchain/llm/parameters/chat.rb
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,10 @@ class Chat < SimpleDelegator
parallel_tool_calls: {},

# Additional optional parameters
logit_bias: {}
logit_bias: {},

# Additional llm options. Ollama only.
options: {}
}

def initialize(parameters: {})
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 4 additions & 3 deletions spec/langchain/llm/ollama_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
end

context "when default_options are passed" do
let(:default_options) { {response_format: "json"} }
let(:default_options) { {response_format: "json", options: {num_ctx: 8_192}} }
let(:messages) { [{role: "user", content: "Return data from the following sentence: John is a 30 year old software engineering living in SF."}] }
let(:response) { subject.chat(messages: messages) { |resp| streamed_responses << resp } }
let(:streamed_responses) { [] }
Expand All @@ -32,11 +32,12 @@

it "sets the defaults options" do
expect(subject.defaults[:response_format]).to eq("json")
expect(subject.defaults[:options]).to eq(num_ctx: 8_192)
end

it "get passed to consecutive chat() call", vcr: {cassette_name: "Langchain_LLM_Ollama_chat_returns_a_chat_completion_format_json"} do
expect(client).to receive(:post).with("api/chat", hash_including(format: "json")).and_call_original
expect(JSON.parse(response.chat_completion)).to eq({"Name" => "John", "Age" => 30, "Profession" => "Software Engineering", "Location" => "SF"})
expect(client).to receive(:post).with("api/chat", hash_including(format: "json", options: {num_ctx: 8_192})).and_call_original
expect(JSON.parse(response.chat_completion)).to eq({"name" => "John", "age" => 30, "profession" => "software engineer", "location" => "SF"})
end
end
end
Expand Down

0 comments on commit 1fc012a

Please sign in to comment.