Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add exception for not supported streaming tools scenario #149

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions src/OllamaApiClient.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Runtime.CompilerServices;
using System.Text;
Expand Down Expand Up @@ -182,6 +183,12 @@ public Task<EmbedResponse> EmbedAsync(EmbedRequest request, CancellationToken ca
{
if (string.IsNullOrEmpty(request.Model))
request.Model = SelectedModel;

if (request.Stream && (request.Tools?.Any() ?? false))
throw new NotSupportedException("""
Currently, Ollama does not support function calls in streaming mode.
See Ollama docs at https://github.com/ollama/ollama/blob/main/docs/api.md#parameters-1 to see whether support has since been added.
""");

using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "api/chat")
{
Expand Down
70 changes: 66 additions & 4 deletions test/OllamaApiClientTests.cs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
using System.IO;
using System.Net;
using System.Text;
using System.Text.Json;
Expand Down Expand Up @@ -390,8 +391,8 @@ public async Task Receives_Response_Message_With_ToolsCalls()
"eval_count": 28,
"eval_duration": 4602334000
}
""".ReplaceLineEndings(""); // the JSON stream reader reads by line, so we need to make this one single line

""".ReplaceLineEndings(""); // the JSON stream reader reads by line, so we need to make this one single line
await using var stream = new MemoryStream();

await using var writer = new StreamWriter(stream, leaveOpen: true);
Expand All @@ -407,7 +408,8 @@ public async Task Receives_Response_Message_With_ToolsCalls()

var chat = new ChatRequest
{
Model = "llama3.1:latest",
Model = "llama3.1:latest",
Stream = false,
Messages = [
new(ChatRole.User, "How is the weather in LA?"),
],
Expand Down Expand Up @@ -466,6 +468,66 @@ public async Task Receives_Response_Message_With_ToolsCalls()

toolsFunction.Arguments.ElementAt(2).Key.Should().Be("number");
toolsFunction.Arguments.ElementAt(2).Value.ToString().Should().Be("42");
}

[Test, NonParallelizable]
public async Task Response_Streaming_Message_With_ToolsCalls_Throws_Not_Supported()
{
_response = new HttpResponseMessage
{
StatusCode = HttpStatusCode.OK,
Content = new StringContent(string.Empty)
};

var request = new ChatRequest
{
Model = "llama3.1:latest",
Messages = [
new(ChatRole.User, "How is the weather in LA?"),
],
Tools = [
new Tool
{
Function = new Function
{
Description = "Get the current weather for a location",
Name = "get_current_weather",
Parameters = new Parameters
{
Properties = new Dictionary<string, Properties>
{
["location"] = new()
{
Type = "string",
Description = "The location to get the weather for, e.g. San Francisco, CA"
},
["format"] = new()
{
Type = "string",
Description = "The format to return the weather in, e.g. 'celsius' or 'fahrenheit'",
Enum = ["celsius", "fahrenheit"]
},
["number"] = new()
{
Type = "integer",
Description = "The number of the day to get the weather for, e.g. 42"
}
},
Required = ["location", "format"],
}
},
Type = "function"
}
]
};

var act = async () =>
{
var enumerator = _client.ChatAsync(request, CancellationToken.None).GetAsyncEnumerator();
await enumerator.MoveNextAsync();
};

await act.Should().ThrowAsync<NotSupportedException>();
}
}

Expand Down Expand Up @@ -528,7 +590,7 @@ public async Task Throws_Known_Exception_For_Models_That_Dont_Support_Tools()
Content = new StringContent("{ error: llama2 does not support tools }")
};

var act = () => _client.ChatAsync(new ChatRequest(), CancellationToken.None).StreamToEndAsync();
var act = () => _client.ChatAsync(new ChatRequest() { Stream = false }, CancellationToken.None).StreamToEndAsync();
await act.Should().ThrowAsync<ModelDoesNotSupportToolsException>();
}

Expand Down
Loading