diff --git a/dotnet/AutoGen.sln b/dotnet/AutoGen.sln index db0b2cbb54c6..78d18527b629 100644 --- a/dotnet/AutoGen.sln +++ b/dotnet/AutoGen.sln @@ -64,7 +64,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Gemini.Sample", "sa EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.AotCompatibility.Tests", "test\AutoGen.AotCompatibility.Tests\AutoGen.AotCompatibility.Tests.csproj", "{6B82F26D-5040-4453-B21B-C8D1F913CE4C}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.OpenAI.V1.Sample", "sample\AutoGen.OpenAI.Sample\AutoGen.OpenAI.V1.Sample.csproj", "{0E635268-351C-4A6B-A28D-593D868C2CA4}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.OpenAI.Sample", "sample\AutoGen.OpenAI.Sample\AutoGen.OpenAI.Sample.csproj", "{0E635268-351C-4A6B-A28D-593D868C2CA4}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.WebAPI.Sample", "sample\AutoGen.WebAPI.Sample\AutoGen.WebAPI.Sample.csproj", "{12079C18-A519-403F-BBFD-200A36A0C083}" EndProject @@ -74,6 +74,10 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.AzureAIInference.Te EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Tests.Share", "test\AutoGen.Test.Share\AutoGen.Tests.Share.csproj", "{143725E2-206C-4D37-93E4-9EDF699826B2}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.OpenAI", "src\AutoGen.OpenAI\AutoGen.OpenAI.csproj", "{3AF1CBEC-2877-41E9-92AE-3A391B2AA9E8}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.OpenAI.Tests", "test\AutoGen.OpenAI.Tests\AutoGen.OpenAI.Tests.csproj", "{42A8251C-E7B3-47BB-A82E-459952EBE132}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -212,6 +216,14 @@ Global {143725E2-206C-4D37-93E4-9EDF699826B2}.Debug|Any CPU.Build.0 = Debug|Any CPU {143725E2-206C-4D37-93E4-9EDF699826B2}.Release|Any CPU.ActiveCfg = Release|Any CPU {143725E2-206C-4D37-93E4-9EDF699826B2}.Release|Any CPU.Build.0 = Release|Any CPU + {3AF1CBEC-2877-41E9-92AE-3A391B2AA9E8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3AF1CBEC-2877-41E9-92AE-3A391B2AA9E8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3AF1CBEC-2877-41E9-92AE-3A391B2AA9E8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3AF1CBEC-2877-41E9-92AE-3A391B2AA9E8}.Release|Any CPU.Build.0 = Release|Any CPU + {42A8251C-E7B3-47BB-A82E-459952EBE132}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {42A8251C-E7B3-47BB-A82E-459952EBE132}.Debug|Any CPU.Build.0 = Debug|Any CPU + {42A8251C-E7B3-47BB-A82E-459952EBE132}.Release|Any CPU.ActiveCfg = Release|Any CPU + {42A8251C-E7B3-47BB-A82E-459952EBE132}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -250,6 +262,8 @@ Global {5C45981D-1319-4C25-935C-83D411CB28DF} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} {5970868F-831E-418F-89A9-4EC599563E16} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} {143725E2-206C-4D37-93E4-9EDF699826B2} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} + {3AF1CBEC-2877-41E9-92AE-3A391B2AA9E8} = {18BF8DD7-0585-48BF-8F97-AD333080CE06} + {42A8251C-E7B3-47BB-A82E-459952EBE132} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {93384647-528D-46C8-922C-8DB36A382F0B} diff --git a/dotnet/eng/Version.props b/dotnet/eng/Version.props index d90e8bc76c80..36cfd917c2c0 100644 --- a/dotnet/eng/Version.props +++ b/dotnet/eng/Version.props @@ -2,8 +2,9 @@ 1.0.0-beta.17 - 1.15.1 - 1.15.1-alpha + 2.0.0-beta.3 + 1.18.1-rc + 1.18.1-alpha 5.0.0 4.3.0 6.0.0 @@ -16,6 +17,7 @@ 3.0.0 4.3.0.2 1.0.0-beta.1 + 2.0.0-beta.10 7.4.4 \ No newline at end of file diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs index 45be312cbd5e..f68053224663 100644 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs +++ b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/CreateAnAgent.cs @@ -3,8 +3,10 @@ using AutoGen; using AutoGen.Core; -using AutoGen.OpenAI.V1; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; +using OpenAI; public partial class AssistantCodeSnippet { @@ -32,23 +34,18 @@ public void CodeSnippet2() { #region code_snippet_2 // get OpenAI Key and create config - var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY"); - string endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); // change to your endpoint + var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY"); + var model = "gpt-4o-mini"; - var llmConfig = new AzureOpenAIConfig( - endpoint: endPoint, - deploymentName: "gpt-3.5-turbo-16k", // change to your deployment name - apiKey: apiKey); + var openAIClient = new OpenAIClient(apiKey); // create assistant agent - var assistantAgent = new AssistantAgent( + var assistantAgent = new OpenAIChatAgent( name: "assistant", systemMessage: "You are an assistant that help user to do some tasks.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = new[] { llmConfig }, - }); + chatClient: openAIClient.GetChatClient(model)) + .RegisterMessageConnector() + .RegisterPrintMessage(); #endregion code_snippet_2 } @@ -71,27 +68,21 @@ public async Task CodeSnippet4() // get OpenAI Key and create config var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY"); string endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); // change to your endpoint - - var llmConfig = new AzureOpenAIConfig( - endpoint: endPoint, - deploymentName: "gpt-3.5-turbo-16k", // change to your deployment name - apiKey: apiKey); + var model = "gpt-4o-mini"; + var openAIClient = new OpenAIClient(new System.ClientModel.ApiKeyCredential(apiKey), new OpenAIClientOptions + { + Endpoint = new Uri(endPoint), + }); #region code_snippet_4 - var assistantAgent = new AssistantAgent( + var assistantAgent = new OpenAIChatAgent( + chatClient: openAIClient.GetChatClient(model), name: "assistant", systemMessage: "You are an assistant that convert user input to upper case.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = new[] - { - llmConfig - }, - FunctionContracts = new[] - { - this.UpperCaseFunctionContract, // The FunctionDefinition object for the UpperCase function - }, - }); + functions: [ + this.UpperCaseFunctionContract.ToChatTool(), // The FunctionDefinition object for the UpperCase function + ]) + .RegisterMessageConnector() + .RegisterPrintMessage(); var response = await assistantAgent.SendAsync("hello"); response.Should().BeOfType(); @@ -106,31 +97,24 @@ public async Task CodeSnippet5() // get OpenAI Key and create config var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY"); string endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); // change to your endpoint - - var llmConfig = new AzureOpenAIConfig( - endpoint: endPoint, - deploymentName: "gpt-3.5-turbo-16k", // change to your deployment name - apiKey: apiKey); + var model = "gpt-4o-mini"; + var openAIClient = new OpenAIClient(new System.ClientModel.ApiKeyCredential(apiKey), new OpenAIClientOptions + { + Endpoint = new Uri(endPoint), + }); #region code_snippet_5 - var assistantAgent = new AssistantAgent( - name: "assistant", - systemMessage: "You are an assistant that convert user input to upper case.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = new[] - { - llmConfig - }, - FunctionContracts = new[] - { - this.UpperCaseFunctionContract, // The FunctionDefinition object for the UpperCase function - }, - }, - functionMap: new Dictionary>> + var functionCallMiddleware = new FunctionCallMiddleware( + functions: [this.UpperCaseFunctionContract], + functionMap: new Dictionary>>() { - { this.UpperCaseFunctionContract.Name, this.UpperCaseWrapper }, // The wrapper function for the UpperCase function + { this.UpperCaseFunctionContract.Name, this.UpperCase }, }); + var assistantAgent = new OpenAIChatAgent( + name: "assistant", + systemMessage: "You are an assistant that convert user input to upper case.", + chatClient: openAIClient.GetChatClient(model)) + .RegisterMessageConnector() + .RegisterStreamingMiddleware(functionCallMiddleware); var response = await assistantAgent.SendAsync("hello"); response.Should().BeOfType(); diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs index 567476ba21cd..854a385dc341 100644 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs +++ b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/FunctionCallCodeSnippet.cs @@ -3,7 +3,6 @@ using AutoGen; using AutoGen.Core; -using AutoGen.OpenAI.V1; using FluentAssertions; public partial class FunctionCallCodeSnippet diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs index c5cdb35f49bc..c5ff7b770338 100644 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs +++ b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/GetStartCodeSnippet.cs @@ -4,7 +4,9 @@ #region snippet_GetStartCodeSnippet using AutoGen; using AutoGen.Core; -using AutoGen.OpenAI.V1; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; +using OpenAI; #endregion snippet_GetStartCodeSnippet public class GetStartCodeSnippet @@ -13,16 +15,14 @@ public async Task CodeSnippet1() { #region code_snippet_1 var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var gpt35Config = new OpenAIConfig(openAIKey, "gpt-3.5-turbo"); + var openAIClient = new OpenAIClient(openAIKey); + var model = "gpt-4o-mini"; - var assistantAgent = new AssistantAgent( + var assistantAgent = new OpenAIChatAgent( name: "assistant", systemMessage: "You are an assistant that help user to do some tasks.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35Config], - }) + chatClient: openAIClient.GetChatClient(model)) + .RegisterMessageConnector() .RegisterPrintMessage(); // register a hook to print message nicely to console // set human input mode to ALWAYS so that user always provide input diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs index 9ad252c1ebeb..1b5a9a903207 100644 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs +++ b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/MiddlewareAgentCodeSnippet.cs @@ -3,7 +3,7 @@ using System.Text.Json; using AutoGen.Core; -using AutoGen.OpenAI.V1; +using AutoGen.OpenAI; using FluentAssertions; namespace AutoGen.BasicSample.CodeSnippet; diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs index b7b5104e9905..60520078e72e 100644 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs +++ b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/OpenAICodeSnippet.cs @@ -3,11 +3,12 @@ #region using_statement using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; #endregion using_statement using FluentAssertions; +using OpenAI; +using OpenAI.Chat; namespace AutoGen.BasicSample.CodeSnippet; #region weather_function @@ -32,31 +33,30 @@ public async Task CreateOpenAIChatAgentAsync() { #region create_openai_chat_agent var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; + var modelId = "gpt-4o-mini"; var openAIClient = new OpenAIClient(openAIKey); // create an open ai chat agent var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openAIClient, + chatClient: openAIClient.GetChatClient(modelId), name: "assistant", - modelName: modelId, systemMessage: "You are an assistant that help user to do some tasks."); // OpenAIChatAgent supports the following message types: // - IMessage where ChatRequestMessage is from Azure.AI.OpenAI - var helloMessage = new ChatRequestUserMessage("Hello"); + var helloMessage = new UserChatMessage("Hello"); // Use MessageEnvelope.Create to create an IMessage var chatMessageContent = MessageEnvelope.Create(helloMessage); var reply = await openAIChatAgent.SendAsync(chatMessageContent); - // The type of reply is MessageEnvelope where ChatResponseMessage is from Azure.AI.OpenAI - reply.Should().BeOfType>(); + // The type of reply is MessageEnvelope where ChatResponseMessage is from Azure.AI.OpenAI + reply.Should().BeOfType>(); // You can un-envelop the reply to get the ChatResponseMessage - ChatResponseMessage response = reply.As>().Content; - response.Role.Should().Be(ChatRole.Assistant); + ChatCompletion response = reply.As>().Content; + response.Role.Should().Be(ChatMessageRole.Assistant); #endregion create_openai_chat_agent #region create_openai_chat_agent_streaming @@ -64,8 +64,8 @@ public async Task CreateOpenAIChatAgentAsync() await foreach (var streamingMessage in streamingReply) { - streamingMessage.Should().BeOfType>(); - streamingMessage.As>().Content.Role.Should().Be(ChatRole.Assistant); + streamingMessage.Should().BeOfType>(); + streamingMessage.As>().Content.Role.Should().Be(ChatMessageRole.Assistant); } #endregion create_openai_chat_agent_streaming @@ -77,7 +77,7 @@ public async Task CreateOpenAIChatAgentAsync() // now the agentWithConnector supports more message types var messages = new IMessage[] { - MessageEnvelope.Create(new ChatRequestUserMessage("Hello")), + MessageEnvelope.Create(new UserChatMessage("Hello")), new TextMessage(Role.Assistant, "Hello", from: "user"), new MultiModalMessage(Role.Assistant, [ @@ -106,9 +106,8 @@ public async Task OpenAIChatAgentGetWeatherFunctionCallAsync() // create an open ai chat agent var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openAIClient, + chatClient: openAIClient.GetChatClient(modelId), name: "assistant", - modelName: modelId, systemMessage: "You are an assistant that help user to do some tasks.") .RegisterMessageConnector(); diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs index be0329b7fd5a..0ac7f71a3cae 100644 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs +++ b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/PrintMessageMiddlewareCodeSnippet.cs @@ -2,10 +2,8 @@ // PrintMessageMiddlewareCodeSnippet.cs using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; namespace AutoGen.BasicSample.CodeSnippet; @@ -15,8 +13,8 @@ public async Task PrintMessageMiddlewareAsync() { var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); var endpoint = new Uri(config.Endpoint); - var openaiClient = new OpenAIClient(endpoint, new AzureKeyCredential(config.ApiKey)); - var agent = new OpenAIChatAgent(openaiClient, "assistant", config.DeploymentName) + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); + var agent = new OpenAIChatAgent(gpt4o, "assistant", config.DeploymentName) .RegisterMessageConnector(); #region PrintMessageMiddleware @@ -31,10 +29,10 @@ public async Task PrintMessageStreamingMiddlewareAsync() { var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); var endpoint = new Uri(config.Endpoint); - var openaiClient = new OpenAIClient(endpoint, new AzureKeyCredential(config.ApiKey)); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); #region print_message_streaming - var streamingAgent = new OpenAIChatAgent(openaiClient, "assistant", config.DeploymentName) + var streamingAgent = new OpenAIChatAgent(gpt4o, "assistant") .RegisterMessageConnector() .RegisterPrintMessage(); diff --git a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs index cf3e25eeee40..667705835eb3 100644 --- a/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs +++ b/dotnet/sample/AutoGen.BasicSamples/CodeSnippet/TypeSafeFunctionCallCodeSnippet.cs @@ -2,8 +2,7 @@ // TypeSafeFunctionCallCodeSnippet.cs using System.Text.Json; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI.Extension; #region weather_report_using_statement using AutoGen.Core; #endregion weather_report_using_statement @@ -32,7 +31,7 @@ public async Task Consume() var functionInstance = new TypeSafeFunctionCall(); // Get the generated function definition - FunctionDefinition functionDefiniton = functionInstance.WeatherReportFunctionContract.ToOpenAIFunctionDefinition(); + var functionDefiniton = functionInstance.WeatherReportFunctionContract.ToChatTool(); // Get the generated function wrapper Func> functionWrapper = functionInstance.WeatherReportWrapper; @@ -69,32 +68,31 @@ public async Task UpperCase(string input) #region code_snippet_1 // file: FunctionDefinition.generated.cs - public FunctionDefinition UpperCaseFunction + public FunctionContract WeatherReportFunctionContract { - get => new FunctionDefinition + get => new FunctionContract { - Name = @"UpperCase", - Description = "convert input to upper case", - Parameters = BinaryData.FromObjectAsJson(new + ClassName = @"TypeSafeFunctionCall", + Name = @"WeatherReport", + Description = @"Get weather report", + ReturnType = typeof(Task), + Parameters = new global::AutoGen.Core.FunctionParameterContract[] { - Type = "object", - Properties = new - { - input = new + new FunctionParameterContract { - Type = @"string", - Description = @"input", + Name = @"city", + Description = @"city", + ParameterType = typeof(string), + IsRequired = true, }, - }, - Required = new[] - { - "input", + new FunctionParameterContract + { + Name = @"date", + Description = @"date", + ParameterType = typeof(string), + IsRequired = true, }, }, - new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - }) }; } #endregion code_snippet_1 diff --git a/dotnet/sample/AutoGen.BasicSamples/Example01_AssistantAgent.cs b/dotnet/sample/AutoGen.BasicSamples/Example01_AssistantAgent.cs index 3ee363bfc062..40c88102588a 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example01_AssistantAgent.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example01_AssistantAgent.cs @@ -4,6 +4,8 @@ using AutoGen; using AutoGen.BasicSample; using AutoGen.Core; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; /// @@ -13,18 +15,12 @@ public static class Example01_AssistantAgent { public static async Task RunAsync() { - var gpt35 = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var config = new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35], - }; - - // create assistant agent - var assistantAgent = new AssistantAgent( + var gpt4oMini = LLMConfiguration.GetOpenAIGPT4o_mini(); + var assistantAgent = new OpenAIChatAgent( + chatClient: gpt4oMini, name: "assistant", - systemMessage: "You convert what user said to all uppercase.", - llmConfig: config) + systemMessage: "You convert what user said to all uppercase.") + .RegisterMessageConnector() .RegisterPrintMessage(); // talk to the assistant agent diff --git a/dotnet/sample/AutoGen.BasicSamples/Example02_TwoAgent_MathChat.cs b/dotnet/sample/AutoGen.BasicSamples/Example02_TwoAgent_MathChat.cs index c2957f32da76..b2dd9726b4b9 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example02_TwoAgent_MathChat.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example02_TwoAgent_MathChat.cs @@ -1,30 +1,28 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Example02_TwoAgent_MathChat.cs -using AutoGen; using AutoGen.BasicSample; using AutoGen.Core; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; public static class Example02_TwoAgent_MathChat { public static async Task RunAsync() { #region code_snippet_1 - // get gpt-3.5-turbo config - var gpt35 = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); + var gpt4oMini = LLMConfiguration.GetOpenAIGPT4o_mini(); + // create teacher agent // teacher agent will create math questions - var teacher = new AssistantAgent( + var teacher = new OpenAIChatAgent( + chatClient: gpt4oMini, name: "teacher", systemMessage: @"You are a teacher that create pre-school math question for student and check answer. If the answer is correct, you stop the conversation by saying [COMPLETE]. - If the answer is wrong, you ask student to fix it.", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35], - }) + If the answer is wrong, you ask student to fix it.") + .RegisterMessageConnector() .RegisterMiddleware(async (msgs, option, agent, _) => { var reply = await agent.GenerateReplyAsync(msgs, option); @@ -39,14 +37,11 @@ public static async Task RunAsync() // create student agent // student agent will answer the math questions - var student = new AssistantAgent( + var student = new OpenAIChatAgent( + chatClient: gpt4oMini, name: "student", - systemMessage: "You are a student that answer question from teacher", - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35], - }) + systemMessage: "You are a student that answer question from teacher") + .RegisterMessageConnector() .RegisterPrintMessage(); // start the conversation diff --git a/dotnet/sample/AutoGen.BasicSamples/Example03_Agent_FunctionCall.cs b/dotnet/sample/AutoGen.BasicSamples/Example03_Agent_FunctionCall.cs index 0ef8eaa48ae6..94b67a94b141 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example03_Agent_FunctionCall.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example03_Agent_FunctionCall.cs @@ -1,9 +1,10 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Example03_Agent_FunctionCall.cs -using AutoGen; using AutoGen.BasicSample; using AutoGen.Core; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; /// @@ -45,33 +46,30 @@ public async Task CalculateTax(int price, float taxRate) public static async Task RunAsync() { var instance = new Example03_Agent_FunctionCall(); - var gpt35 = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); // AutoGen makes use of AutoGen.SourceGenerator to automatically generate FunctionDefinition and FunctionCallWrapper for you. // The FunctionDefinition will be created based on function signature and XML documentation. // The return type of type-safe function needs to be Task. And to get the best performance, please try only use primitive types and arrays of primitive types as parameters. - var config = new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gpt35], - FunctionContracts = new[] - { + var toolCallMiddleware = new FunctionCallMiddleware( + functions: [ instance.ConcatStringFunctionContract, instance.UpperCaseFunctionContract, instance.CalculateTaxFunctionContract, - }, - }; - - var agent = new AssistantAgent( - name: "agent", - systemMessage: "You are a helpful AI assistant", - llmConfig: config, + ], functionMap: new Dictionary>> { - { nameof(ConcatString), instance.ConcatStringWrapper }, - { nameof(UpperCase), instance.UpperCaseWrapper }, - { nameof(CalculateTax), instance.CalculateTaxWrapper }, - }) + { nameof(instance.ConcatString), instance.ConcatStringWrapper }, + { nameof(instance.UpperCase), instance.UpperCaseWrapper }, + { nameof(instance.CalculateTax), instance.CalculateTaxWrapper }, + }); + + var agent = new OpenAIChatAgent( + chatClient: gpt4o, + name: "agent", + systemMessage: "You are a helpful AI assistant") + .RegisterMessageConnector() + .RegisterStreamingMiddleware(toolCallMiddleware) .RegisterPrintMessage(); // talk to the assistant agent diff --git a/dotnet/sample/AutoGen.BasicSamples/Example04_Dynamic_GroupChat_Coding_Task.cs b/dotnet/sample/AutoGen.BasicSamples/Example04_Dynamic_GroupChat_Coding_Task.cs index 32f06136a964..f90816d890e1 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example04_Dynamic_GroupChat_Coding_Task.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example04_Dynamic_GroupChat_Coding_Task.cs @@ -1,12 +1,12 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Example04_Dynamic_GroupChat_Coding_Task.cs -using AutoGen; using AutoGen.BasicSample; using AutoGen.Core; using AutoGen.DotnetInteractive; using AutoGen.DotnetInteractive.Extension; -using AutoGen.OpenAI.V1; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; public partial class Example04_Dynamic_GroupChat_Coding_Task @@ -20,20 +20,21 @@ public static async Task RunAsync() .AddPythonKernel("python3") .Build(); - var gptConfig = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); - var groupAdmin = new GPTAgent( + var groupAdmin = new OpenAIChatAgent( + chatClient: gpt4o, name: "groupAdmin", - systemMessage: "You are the admin of the group chat", - temperature: 0f, - config: gptConfig) + systemMessage: "You are the admin of the group chat") + .RegisterMessageConnector() .RegisterPrintMessage(); - var userProxy = new UserProxyAgent(name: "user", defaultReply: GroupChatExtension.TERMINATE, humanInputMode: HumanInputMode.NEVER) + var userProxy = new DefaultReplyAgent(name: "user", defaultReply: GroupChatExtension.TERMINATE) .RegisterPrintMessage(); // Create admin agent - var admin = new AssistantAgent( + var admin = new OpenAIChatAgent( + chatClient: gpt4o, name: "admin", systemMessage: """ You are a manager who takes coding problem from user and resolve problem by splitting them into small tasks and assign each task to the most appropriate agent. @@ -69,12 +70,8 @@ You are a manager who takes coding problem from user and resolve problem by spli ``` Your reply must contain one of [task|ask|summary] to indicate the type of your message. - """, - llmConfig: new ConversableAgentConfig - { - Temperature = 0, - ConfigList = [gptConfig], - }) + """) + .RegisterMessageConnector() .RegisterPrintMessage(); // create coder agent @@ -82,8 +79,9 @@ Your reply must contain one of [task|ask|summary] to indicate the type of your m // The dotnet coder write dotnet code to resolve the task. // The code reviewer review the code block from coder's reply. // The nuget agent install nuget packages if there's any. - var coderAgent = new GPTAgent( + var coderAgent = new OpenAIChatAgent( name: "coder", + chatClient: gpt4o, systemMessage: @"You act as python coder, you write python code to resolve task. Once you finish writing code, ask runner to run the code for you. Here're some rules to follow on writing dotnet code: @@ -100,9 +98,8 @@ Your reply must contain one of [task|ask|summary] to indicate the type of your m Here's some externel information - The link to mlnet repo is: https://github.com/dotnet/machinelearning. you don't need a token to use github pr api. Make sure to include a User-Agent header, otherwise github will reject it. -", - config: gptConfig, - temperature: 0.4f) +") + .RegisterMessageConnector() .RegisterPrintMessage(); // code reviewer agent will review if code block from coder's reply satisfy the following conditions: @@ -110,7 +107,8 @@ Your reply must contain one of [task|ask|summary] to indicate the type of your m // - The code block is csharp code block // - The code block is top level statement // - The code block is not using declaration - var codeReviewAgent = new GPTAgent( + var codeReviewAgent = new OpenAIChatAgent( + chatClient: gpt4o, name: "reviewer", systemMessage: """ You are a code reviewer who reviews code from coder. You need to check if the code satisfy the following conditions: @@ -133,9 +131,8 @@ Your reply must contain one of [task|ask|summary] to indicate the type of your m result: REJECTED ``` - """, - config: gptConfig, - temperature: 0f) + """) + .RegisterMessageConnector() .RegisterPrintMessage(); // create runner agent diff --git a/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs b/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs index 863f477630de..e8dd86474e7a 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example05_Dalle_And_GPT4V.cs @@ -2,11 +2,11 @@ // Example05_Dalle_And_GPT4V.cs using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; -using autogen = AutoGen.LLMConfigAPI; +using OpenAI; +using OpenAI.Images; public partial class Example05_Dalle_And_GPT4V { @@ -30,16 +30,12 @@ public async Task GenerateImage(string prompt) // and return url. var option = new ImageGenerationOptions { - Size = ImageSize.Size1024x1024, - Style = ImageGenerationStyle.Vivid, - ImageCount = 1, - Prompt = prompt, - Quality = ImageGenerationQuality.Standard, - DeploymentName = "dall-e-3", + Size = GeneratedImageSize.W1024xH1024, + Style = GeneratedImageStyle.Vivid, }; - var imageResponse = await openAIClient.GetImageGenerationsAsync(option); - var imageUrl = imageResponse.Value.Data.First().Url.OriginalString; + var imageResponse = await openAIClient.GetImageClient("dall-e-3").GenerateImageAsync(prompt, option); + var imageUrl = imageResponse.Value.ImageUri.OriginalString; return $@"// ignore this line [IMAGE_GENERATION] The image is generated from prompt {prompt} @@ -57,8 +53,6 @@ public static async Task RunAsync() // get OpenAI Key and create config var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var gpt35Config = autogen.GetOpenAIConfigList(openAIKey, new[] { "gpt-3.5-turbo" }); - var gpt4vConfig = autogen.GetOpenAIConfigList(openAIKey, new[] { "gpt-4-vision-preview" }); var openAIClient = new OpenAIClient(openAIKey); var instance = new Example05_Dalle_And_GPT4V(openAIClient); var imagePath = Path.Combine("resource", "images", "background.png"); @@ -74,8 +68,7 @@ public static async Task RunAsync() { nameof(GenerateImage), instance.GenerateImageWrapper }, }); var dalleAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - modelName: "gpt-3.5-turbo", + chatClient: openAIClient.GetChatClient("gpt-4o-mini"), name: "dalle", systemMessage: "You are a DALL-E agent that generate image from prompt, when conversation is terminated, return the most recent image url") .RegisterMessageConnector() @@ -110,9 +103,8 @@ public static async Task RunAsync() .RegisterPrintMessage(); var gpt4VAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "gpt4v", - modelName: "gpt-4-vision-preview", + chatClient: openAIClient.GetChatClient("gpt-4o-mini"), + name: "gpt-4o-mini", systemMessage: @"You are a critism that provide feedback to DALL-E agent. Carefully check the image generated by DALL-E agent and provide feedback. If the image satisfies the condition, then say [APPROVE]. diff --git a/dotnet/sample/AutoGen.BasicSamples/Example06_UserProxyAgent.cs b/dotnet/sample/AutoGen.BasicSamples/Example06_UserProxyAgent.cs index 9e1cf42b48f5..e1349cb32a99 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example06_UserProxyAgent.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example06_UserProxyAgent.cs @@ -1,7 +1,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Example06_UserProxyAgent.cs using AutoGen.Core; -using AutoGen.OpenAI.V1; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; namespace AutoGen.BasicSample; @@ -9,12 +10,13 @@ public static class Example06_UserProxyAgent { public static async Task RunAsync() { - var gpt35 = LLMConfiguration.GetOpenAIGPT3_5_Turbo(); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); - var assistantAgent = new GPTAgent( + var assistantAgent = new OpenAIChatAgent( + chatClient: gpt4o, name: "assistant", - systemMessage: "You are an assistant that help user to do some tasks.", - config: gpt35) + systemMessage: "You are an assistant that help user to do some tasks.") + .RegisterMessageConnector() .RegisterPrintMessage(); // set human input mode to ALWAYS so that user always provide input diff --git a/dotnet/sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs b/dotnet/sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs index f4fd98c3d03d..1f1315586a28 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example07_Dynamic_GroupChat_Calculate_Fibonacci.cs @@ -7,10 +7,10 @@ using AutoGen.Core; using AutoGen.DotnetInteractive; using AutoGen.DotnetInteractive.Extension; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using Microsoft.DotNet.Interactive; +using OpenAI.Chat; public partial class Example07_Dynamic_GroupChat_Calculate_Fibonacci { @@ -50,11 +50,10 @@ public async Task ReviewCodeBlock( #endregion reviewer_function #region create_coder - public static async Task CreateCoderAgentAsync(OpenAIClient client, string deployModel) + public static async Task CreateCoderAgentAsync(ChatClient client) { var coder = new OpenAIChatAgent( - openAIClient: client, - modelName: deployModel, + chatClient: client, name: "coder", systemMessage: @"You act as dotnet coder, you write dotnet code to resolve task. Once you finish writing code, ask runner to run the code for you. @@ -122,11 +121,10 @@ public static async Task CreateRunnerAgentAsync(Kernel kernel) #endregion create_runner #region create_admin - public static async Task CreateAdminAsync(OpenAIClient client, string deployModel) + public static async Task CreateAdminAsync(ChatClient client) { var admin = new OpenAIChatAgent( - openAIClient: client, - modelName: deployModel, + chatClient: client, name: "admin", temperature: 0) .RegisterMessageConnector() @@ -137,9 +135,8 @@ public static async Task CreateAdminAsync(OpenAIClient client, string de #endregion create_admin #region create_reviewer - public static async Task CreateReviewerAgentAsync(OpenAIClient openAIClient, string deployModel) + public static async Task CreateReviewerAgentAsync(ChatClient chatClient) { - var gpt3Config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); var functions = new Example07_Dynamic_GroupChat_Calculate_Fibonacci(); var functionCallMiddleware = new FunctionCallMiddleware( functions: [functions.ReviewCodeBlockFunctionContract], @@ -148,10 +145,9 @@ public static async Task CreateReviewerAgentAsync(OpenAIClient openAICli { nameof(functions.ReviewCodeBlock), functions.ReviewCodeBlockWrapper }, }); var reviewer = new OpenAIChatAgent( - openAIClient: openAIClient, + chatClient: chatClient, name: "code_reviewer", - systemMessage: @"You review code block from coder", - modelName: deployModel) + systemMessage: @"You review code block from coder") .RegisterMessageConnector() .RegisterStreamingMiddleware(functionCallMiddleware) .RegisterMiddleware(async (msgs, option, innerAgent, ct) => @@ -237,14 +233,13 @@ public static async Task RunWorkflowAsync() .CreateDefaultInProcessKernelBuilder() .Build(); - var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var openaiClient = new OpenAIClient(new Uri(config.Endpoint), new Azure.AzureKeyCredential(config.ApiKey)); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); #region create_workflow - var reviewer = await CreateReviewerAgentAsync(openaiClient, config.DeploymentName); - var coder = await CreateCoderAgentAsync(openaiClient, config.DeploymentName); + var reviewer = await CreateReviewerAgentAsync(gpt4o); + var coder = await CreateCoderAgentAsync(gpt4o); var runner = await CreateRunnerAgentAsync(kernel); - var admin = await CreateAdminAsync(openaiClient, config.DeploymentName); + var admin = await CreateAdminAsync(gpt4o); var admin2CoderTransition = Transition.Create(admin, coder); var coder2ReviewerTransition = Transition.Create(coder, reviewer); @@ -343,17 +338,16 @@ public static async Task RunAsync() Directory.CreateDirectory(workDir); } - var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var openaiClient = new OpenAIClient(new Uri(config.Endpoint), new Azure.AzureKeyCredential(config.ApiKey)); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var kernel = DotnetInteractiveKernelBuilder .CreateDefaultInProcessKernelBuilder() .Build(); #region create_group_chat - var reviewer = await CreateReviewerAgentAsync(openaiClient, config.DeploymentName); - var coder = await CreateCoderAgentAsync(openaiClient, config.DeploymentName); + var reviewer = await CreateReviewerAgentAsync(gpt4o); + var coder = await CreateCoderAgentAsync(gpt4o); var runner = await CreateRunnerAgentAsync(kernel); - var admin = await CreateAdminAsync(openaiClient, config.DeploymentName); + var admin = await CreateAdminAsync(gpt4o); var groupChat = new GroupChat( admin: admin, members: diff --git a/dotnet/sample/AutoGen.BasicSamples/Example08_LMStudio.cs b/dotnet/sample/AutoGen.BasicSamples/Example08_LMStudio.cs index cce330117622..e58454fdb5f8 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example08_LMStudio.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example08_LMStudio.cs @@ -3,7 +3,9 @@ #region lmstudio_using_statements using AutoGen.Core; -using AutoGen.LMStudio; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; +using OpenAI; #endregion lmstudio_using_statements namespace AutoGen.BasicSample; @@ -13,8 +15,16 @@ public class Example08_LMStudio public static async Task RunAsync() { #region lmstudio_example_1 - var config = new LMStudioConfig("localhost", 1234); - var lmAgent = new LMStudioAgent("asssistant", config: config) + var endpoint = "http://localhost:1234"; + var openaiClient = new OpenAIClient("api-key", new OpenAIClientOptions + { + Endpoint = new Uri(endpoint), + }); + + var lmAgent = new OpenAIChatAgent( + chatClient: openaiClient.GetChatClient(""), + name: "assistant") + .RegisterMessageConnector() .RegisterPrintMessage(); await lmAgent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?"); diff --git a/dotnet/sample/AutoGen.BasicSamples/Example09_LMStudio_FunctionCall.cs b/dotnet/sample/AutoGen.BasicSamples/Example09_LMStudio_FunctionCall.cs deleted file mode 100644 index afa7d43b975b..000000000000 --- a/dotnet/sample/AutoGen.BasicSamples/Example09_LMStudio_FunctionCall.cs +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Example09_LMStudio_FunctionCall.cs - -using System.Text.Json; -using System.Text.Json.Serialization; -using AutoGen.Core; -using AutoGen.LMStudio; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; - -namespace AutoGen.BasicSample; - -public class LLaMAFunctionCall -{ - [JsonPropertyName("name")] - public string Name { get; set; } - - [JsonPropertyName("arguments")] - public JsonElement Arguments { get; set; } -} - -public partial class Example09_LMStudio_FunctionCall -{ - /// - /// Get weather from location. - /// - /// location - /// date. type is string - [Function] - public async Task GetWeather(string location, string date) - { - return $"[Function] The weather on {date} in {location} is sunny."; - } - - - /// - /// Search query on Google and return the results. - /// - /// search query - [Function] - public async Task GoogleSearch(string query) - { - return $"[Function] Here are the search results for {query}."; - } - - private static object SerializeFunctionDefinition(FunctionDefinition functionDefinition) - { - return new - { - type = "function", - function = new - { - name = functionDefinition.Name, - description = functionDefinition.Description, - parameters = functionDefinition.Parameters.ToObjectFromJson(), - } - }; - } - - public static async Task RunAsync() - { - #region lmstudio_function_call_example - // This example has been verified to work with Trelis-Llama-2-7b-chat-hf-function-calling-v3 - var instance = new Example09_LMStudio_FunctionCall(); - var config = new LMStudioConfig("localhost", 1234); - var systemMessage = @$"You are a helpful AI assistant."; - - // Because the LM studio server doesn't support openai function call yet - // To simulate the function call, we can put the function call details in the system message - // And ask agent to response in function call object format using few-shot example - object[] functionList = - [ - SerializeFunctionDefinition(instance.GetWeatherFunctionContract.ToOpenAIFunctionDefinition()), - SerializeFunctionDefinition(instance.GetWeatherFunctionContract.ToOpenAIFunctionDefinition()) - ]; - var functionListString = JsonSerializer.Serialize(functionList, new JsonSerializerOptions { WriteIndented = true }); - var lmAgent = new LMStudioAgent( - name: "assistant", - systemMessage: @$" -You are a helpful AI assistant -You have access to the following functions. Use them if required: - -{functionListString}", - config: config) - .RegisterMiddleware(async (msgs, option, innerAgent, ct) => - { - // inject few-shot example to the message - var exampleGetWeather = new TextMessage(Role.User, "Get weather in London"); - var exampleAnswer = new TextMessage(Role.Assistant, "{\n \"name\": \"GetWeather\",\n \"arguments\": {\n \"city\": \"London\"\n }\n}", from: innerAgent.Name); - - msgs = new[] { exampleGetWeather, exampleAnswer }.Concat(msgs).ToArray(); - var reply = await innerAgent.GenerateReplyAsync(msgs, option, ct); - - // if reply is a function call, invoke function - var content = reply.GetContent(); - try - { - if (JsonSerializer.Deserialize(content) is { } functionCall) - { - var arguments = JsonSerializer.Serialize(functionCall.Arguments); - // invoke function wrapper - if (functionCall.Name == instance.GetWeatherFunctionContract.Name) - { - var result = await instance.GetWeatherWrapper(arguments); - return new TextMessage(Role.Assistant, result); - } - else if (functionCall.Name == instance.GetWeatherFunctionContract.Name) - { - var result = await instance.GoogleSearchWrapper(arguments); - return new TextMessage(Role.Assistant, result); - } - else - { - throw new Exception($"Unknown function call: {functionCall.Name}"); - } - } - } - catch (JsonException) - { - // ignore - } - - return reply; - }) - .RegisterPrintMessage(); - - var userProxyAgent = new UserProxyAgent( - name: "user", - humanInputMode: HumanInputMode.ALWAYS); - - await userProxyAgent.SendAsync( - receiver: lmAgent, - "Search the names of the five largest stocks in the US by market cap ") - .ToArrayAsync(); - #endregion lmstudio_function_call_example - } -} diff --git a/dotnet/sample/AutoGen.BasicSamples/Example10_SemanticKernel.cs b/dotnet/sample/AutoGen.BasicSamples/Example10_SemanticKernel.cs index 61c341204ec2..da7e54852f34 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example10_SemanticKernel.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example10_SemanticKernel.cs @@ -39,7 +39,7 @@ public class Example10_SemanticKernel public static async Task RunAsync() { var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; + var modelId = "gpt-4o-mini"; var builder = Kernel.CreateBuilder() .AddOpenAIChatCompletion(modelId: modelId, apiKey: openAIKey); var kernel = builder.Build(); diff --git a/dotnet/sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs b/dotnet/sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs index 6cb6b76ac88b..32aaa8c187b4 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example11_Sequential_GroupChat_Example.cs @@ -3,11 +3,10 @@ #region using_statement using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using AutoGen.SemanticKernel; using AutoGen.SemanticKernel.Extension; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Plugins.Web; using Microsoft.SemanticKernel.Plugins.Web.Bing; @@ -52,15 +51,10 @@ You put the original search result between ```bing and ``` public static async Task CreateSummarizerAgentAsync() { #region CreateSummarizerAgent - var config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var apiKey = config.ApiKey; - var endPoint = new Uri(config.Endpoint); - - var openAIClient = new OpenAIClient(endPoint, new Azure.AzureKeyCredential(apiKey)); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var openAIClientAgent = new OpenAIChatAgent( - openAIClient: openAIClient, + chatClient: gpt4o, name: "summarizer", - modelName: config.DeploymentName, systemMessage: "You summarize search result from bing in a short and concise manner"); return openAIClientAgent diff --git a/dotnet/sample/AutoGen.BasicSamples/Example12_TwoAgent_Fill_Application.cs b/dotnet/sample/AutoGen.BasicSamples/Example12_TwoAgent_Fill_Application.cs index 7aec3beee6b6..69c2121cd80b 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example12_TwoAgent_Fill_Application.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example12_TwoAgent_Fill_Application.cs @@ -3,9 +3,8 @@ using System.Text; using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; namespace AutoGen.BasicSample; @@ -69,11 +68,7 @@ public async Task SaveProgress( public static async Task CreateSaveProgressAgent() { - var gpt3Config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var endPoint = gpt3Config.Endpoint ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var apiKey = gpt3Config.ApiKey ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var openaiClient = new OpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(apiKey)); - + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var instance = new TwoAgent_Fill_Application(); var functionCallConnector = new FunctionCallMiddleware( functions: [instance.SaveProgressFunctionContract], @@ -83,9 +78,8 @@ public static async Task CreateSaveProgressAgent() }); var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: gpt4o, name: "application", - modelName: gpt3Config.DeploymentName, systemMessage: """You are a helpful application form assistant who saves progress while user fills application.""") .RegisterMessageConnector() .RegisterMiddleware(functionCallConnector) @@ -109,15 +103,10 @@ Save progress according to the most recent information provided by user. public static async Task CreateAssistantAgent() { - var gpt3Config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var endPoint = gpt3Config.Endpoint ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var apiKey = gpt3Config.ApiKey ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var openaiClient = new OpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(apiKey)); - + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: gpt4o, name: "assistant", - modelName: gpt3Config.DeploymentName, systemMessage: """You create polite prompt to ask user provide missing information""") .RegisterMessageConnector() .RegisterPrintMessage(); @@ -127,15 +116,10 @@ public static async Task CreateAssistantAgent() public static async Task CreateUserAgent() { - var gpt3Config = LLMConfiguration.GetAzureOpenAIGPT3_5_Turbo(); - var endPoint = gpt3Config.Endpoint ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - var apiKey = gpt3Config.ApiKey ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var openaiClient = new OpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(apiKey)); - + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: gpt4o, name: "user", - modelName: gpt3Config.DeploymentName, systemMessage: """ You are a user who is filling an application form. Simply provide the information as requested and answer the questions, don't do anything else. diff --git a/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs b/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs index 5d4f1474232d..4a4b10ae3d75 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example15_GPT4V_BinaryDataImageMessage.cs @@ -2,7 +2,8 @@ // Example15_GPT4V_BinaryDataImageMessage.cs using AutoGen.Core; -using AutoGen.OpenAI.V1; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; namespace AutoGen.BasicSample; @@ -27,14 +28,14 @@ public static class Example15_GPT4V_BinaryDataImageMessage public static async Task RunAsync() { - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var openAiConfig = new OpenAIConfig(openAIKey, "gpt-4o"); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); - var visionAgent = new GPTAgent( + var visionAgent = new OpenAIChatAgent( + chatClient: gpt4o, name: "gpt", systemMessage: "You are a helpful AI assistant", - config: openAiConfig, temperature: 0) + .RegisterMessageConnector() .RegisterPrintMessage(); List messages = diff --git a/dotnet/sample/AutoGen.BasicSamples/Example17_ReActAgent.cs b/dotnet/sample/AutoGen.BasicSamples/Example17_ReActAgent.cs index 5f50b2344170..170736bf22e4 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Example17_ReActAgent.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Example17_ReActAgent.cs @@ -2,16 +2,16 @@ // Example17_ReActAgent.cs using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; +using OpenAI; +using OpenAI.Chat; namespace AutoGen.BasicSample; public class OpenAIReActAgent : IAgent { - private readonly OpenAIClient _client; - private readonly string modelName = "gpt-3.5-turbo"; + private readonly ChatClient _client; private readonly FunctionContract[] tools; private readonly Dictionary>> toolExecutors = new(); private readonly IAgent reasoner; @@ -39,16 +39,15 @@ public class OpenAIReActAgent : IAgent Begin! Question: {input}"; - public OpenAIReActAgent(OpenAIClient client, string modelName, string name, FunctionContract[] tools, Dictionary>> toolExecutors) + public OpenAIReActAgent(ChatClient client, string name, FunctionContract[] tools, Dictionary>> toolExecutors) { _client = client; this.Name = name; - this.modelName = modelName; this.tools = tools; this.toolExecutors = toolExecutors; this.reasoner = CreateReasoner(); this.actor = CreateActor(); - this.helper = new OpenAIChatAgent(client, "helper", modelName) + this.helper = new OpenAIChatAgent(client, "helper") .RegisterMessageConnector(); } @@ -106,8 +105,7 @@ private string CreateReActPrompt(string input) private IAgent CreateReasoner() { return new OpenAIChatAgent( - openAIClient: _client, - modelName: modelName, + chatClient: _client, name: "reasoner") .RegisterMessageConnector() .RegisterPrintMessage(); @@ -117,8 +115,7 @@ private IAgent CreateActor() { var functionCallMiddleware = new FunctionCallMiddleware(tools, toolExecutors); return new OpenAIChatAgent( - openAIClient: _client, - modelName: modelName, + chatClient: _client, name: "actor") .RegisterMessageConnector() .RegisterMiddleware(functionCallMiddleware) @@ -166,9 +163,9 @@ public static async Task RunAsync() var modelName = "gpt-4-turbo"; var tools = new Tools(); var openAIClient = new OpenAIClient(openAIKey); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var reactAgent = new OpenAIReActAgent( - client: openAIClient, - modelName: modelName, + client: openAIClient.GetChatClient(modelName), name: "react-agent", tools: [tools.GetLocalizationFunctionContract, tools.GetDateTodayFunctionContract, tools.WeatherReportFunctionContract], toolExecutors: new Dictionary>> diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs index 73c0332c7856..cf97af134675 100644 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs +++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs @@ -3,11 +3,11 @@ #region Using using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; #endregion Using using FluentAssertions; +using OpenAI.Chat; namespace AutoGen.BasicSample; @@ -16,20 +16,17 @@ public class Agent_Middleware public static async Task RunTokenCountAsync() { #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("Please set the environment variable OPENAI_API_KEY"); - var model = "gpt-3.5-turbo"; - var openaiClient = new OpenAIClient(apiKey); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var openaiMessageConnector = new OpenAIChatRequestMessageConnector(); var totalTokenCount = 0; var agent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: gpt4o, name: "agent", - modelName: model, systemMessage: "You are a helpful AI assistant") .RegisterMiddleware(async (messages, option, innerAgent, ct) => { var reply = await innerAgent.GenerateReplyAsync(messages, option, ct); - if (reply is MessageEnvelope chatCompletions) + if (reply is MessageEnvelope chatCompletions) { var tokenCount = chatCompletions.Content.Usage.TotalTokens; totalTokenCount += tokenCount; @@ -53,21 +50,17 @@ public static async Task RunTokenCountAsync() public static async Task RunRagTaskAsync() { #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("Please set the environment variable OPENAI_API_KEY"); - var model = "gpt-3.5-turbo"; - var openaiClient = new OpenAIClient(apiKey); - var openaiMessageConnector = new OpenAIChatRequestMessageConnector(); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var agent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: gpt4o, name: "agent", - modelName: model, systemMessage: "You are a helpful AI assistant") .RegisterMessageConnector() .RegisterMiddleware(async (messages, option, innerAgent, ct) => { var today = DateTime.UtcNow; var todayMessage = new TextMessage(Role.System, $"Today is {today:yyyy-MM-dd}"); - messages = messages.Concat(new[] { todayMessage }); + messages = messages.Concat([todayMessage]); return await innerAgent.GenerateReplyAsync(messages, option, ct); }) .RegisterPrintMessage(); diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs index 1b92572821b5..b2cc228496db 100644 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs +++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Chat_With_Agent.cs @@ -3,9 +3,8 @@ #region Using using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; #endregion Using using FluentAssertions; @@ -17,13 +16,10 @@ public class Chat_With_Agent public static async Task RunAsync() { #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; - var openaiClient = new OpenAIClient(apiKey); + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var agent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: gpt4o, name: "agent", - modelName: model, systemMessage: "You are a helpful AI assistant") .RegisterMessageConnector(); // convert OpenAI message to AutoGen message #endregion Create_Agent diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs index 865924ca7d06..dadc295e308d 100644 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs +++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Dynamic_Group_Chat.cs @@ -2,12 +2,12 @@ // Dynamic_Group_Chat.cs using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using AutoGen.SemanticKernel; using AutoGen.SemanticKernel.Extension; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; +using OpenAI; namespace AutoGen.BasicSample; @@ -16,14 +16,13 @@ public class Dynamic_Group_Chat public static async Task RunAsync() { var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; + var model = "gpt-4o-mini"; #region Create_Coder var openaiClient = new OpenAIClient(apiKey); var coder = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: openaiClient.GetChatClient(model), name: "coder", - modelName: model, systemMessage: "You are a C# coder, when writing csharp code, please put the code between ```csharp and ```") .RegisterMessageConnector() // convert OpenAI message to AutoGen message .RegisterPrintMessage(); // print the message content @@ -49,9 +48,8 @@ public static async Task RunAsync() #region Create_Group var admin = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "admin", - modelName: model) + chatClient: openaiClient.GetChatClient(model), + name: "admin") .RegisterMessageConnector(); // convert OpenAI message to AutoGen message var group = new GroupChat( diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs index 28b8f5d5fbdc..093d0c77ce64 100644 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs +++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/FSM_Group_Chat.cs @@ -4,9 +4,10 @@ using System.Text; #region Using using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; +using OpenAI; +using OpenAI.Chat; #endregion Using namespace AutoGen.BasicSample; @@ -74,7 +75,7 @@ public async Task SaveProgress( public class FSM_Group_Chat { - public static async Task CreateSaveProgressAgent(OpenAIClient client, string model) + public static async Task CreateSaveProgressAgent(ChatClient client) { #region Create_Save_Progress_Agent var tool = new FillFormTool(); @@ -86,9 +87,8 @@ public static async Task CreateSaveProgressAgent(OpenAIClient client, st }); var chatAgent = new OpenAIChatAgent( - openAIClient: client, + chatClient: client, name: "application", - modelName: model, systemMessage: """You are a helpful application form assistant who saves progress while user fills application.""") .RegisterMessageConnector() .RegisterMiddleware(functionCallMiddleware) @@ -111,13 +111,12 @@ Save progress according to the most recent information provided by user. return chatAgent; } - public static async Task CreateAssistantAgent(OpenAIClient openaiClient, string model) + public static async Task CreateAssistantAgent(ChatClient chatClient) { #region Create_Assistant_Agent var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: chatClient, name: "assistant", - modelName: model, systemMessage: """You create polite prompt to ask user provide missing information""") .RegisterMessageConnector() .RegisterPrintMessage(); @@ -125,13 +124,12 @@ public static async Task CreateAssistantAgent(OpenAIClient openaiClient, return chatAgent; } - public static async Task CreateUserAgent(OpenAIClient openaiClient, string model) + public static async Task CreateUserAgent(ChatClient chatClient) { #region Create_User_Agent var chatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: chatClient, name: "user", - modelName: model, systemMessage: """ You are a user who is filling an application form. Simply provide the information as requested and answer the questions, don't do anything else. @@ -151,11 +149,12 @@ public static async Task CreateUserAgent(OpenAIClient openaiClient, stri public static async Task RunAsync() { var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; + var model = "gpt-4o-mini"; var openaiClient = new OpenAIClient(apiKey); - var applicationAgent = await CreateSaveProgressAgent(openaiClient, model); - var assistantAgent = await CreateAssistantAgent(openaiClient, model); - var userAgent = await CreateUserAgent(openaiClient, model); + var chatClient = openaiClient.GetChatClient(model); + var applicationAgent = await CreateSaveProgressAgent(chatClient); + var assistantAgent = await CreateAssistantAgent(chatClient); + var userAgent = await CreateUserAgent(chatClient); #region Create_Graph var userToApplicationTransition = Transition.Create(userAgent, applicationAgent); diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs index 5e2aff061ae5..e993b3d51f1c 100644 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs +++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Image_Chat_With_Agent.cs @@ -3,9 +3,8 @@ #region Using using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; #endregion Using using FluentAssertions; @@ -16,14 +15,10 @@ public class Image_Chat_With_Agent public static async Task RunAsync() { #region Create_Agent - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-4o"; // The model needs to support multimodal inputs - var openaiClient = new OpenAIClient(apiKey); - + var gpt4o = LLMConfiguration.GetOpenAIGPT4o_mini(); var agent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: gpt4o, name: "agent", - modelName: model, systemMessage: "You are a helpful AI assistant") .RegisterMessageConnector() // convert OpenAI message to AutoGen message .RegisterPrintMessage(); diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Streaming_Tool_Call.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Streaming_Tool_Call.cs index d358dab60fe1..d5cb196f94f7 100644 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Streaming_Tool_Call.cs +++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Streaming_Tool_Call.cs @@ -2,10 +2,10 @@ // Streaming_Tool_Call.cs using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; +using OpenAI; namespace AutoGen.BasicSample.GettingStart; @@ -28,12 +28,11 @@ public static async Task RunAsync() #region Create_Agent var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-4o"; + var model = "gpt-4o-mini"; var openaiClient = new OpenAIClient(apiKey); var agent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: openaiClient.GetChatClient(model), name: "agent", - modelName: model, systemMessage: "You are a helpful AI assistant") .RegisterMessageConnector() .RegisterStreamingMiddleware(autoInvokeMiddleware) diff --git a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs index f1a7b5585daa..21a5df4c2ecd 100644 --- a/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs +++ b/dotnet/sample/AutoGen.BasicSamples/GettingStart/Use_Tools_With_Agent.cs @@ -3,11 +3,11 @@ #region Using using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; #endregion Using using FluentAssertions; +using OpenAI; namespace AutoGen.BasicSample; @@ -50,12 +50,11 @@ public static async Task RunAsync() #region Create_Agent var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; + var model = "gpt-4o-mini"; var openaiClient = new OpenAIClient(apiKey); var agent = new OpenAIChatAgent( - openAIClient: openaiClient, + chatClient: openaiClient.GetChatClient(model), name: "agent", - modelName: model, systemMessage: "You are a helpful AI assistant") .RegisterMessageConnector(); // convert OpenAI message to AutoGen message #endregion Create_Agent diff --git a/dotnet/sample/AutoGen.BasicSamples/LLMConfiguration.cs b/dotnet/sample/AutoGen.BasicSamples/LLMConfiguration.cs index bab6685126a7..26d9668792ef 100644 --- a/dotnet/sample/AutoGen.BasicSamples/LLMConfiguration.cs +++ b/dotnet/sample/AutoGen.BasicSamples/LLMConfiguration.cs @@ -1,25 +1,19 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // LLMConfiguration.cs -using AutoGen.OpenAI.V1; +using OpenAI; +using OpenAI.Chat; namespace AutoGen.BasicSample; internal static class LLMConfiguration { - public static OpenAIConfig GetOpenAIGPT3_5_Turbo() + public static ChatClient GetOpenAIGPT4o_mini() { var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; - return new OpenAIConfig(openAIKey, modelId); - } - - public static OpenAIConfig GetOpenAIGPT4() - { - var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-4"; + var modelId = "gpt-4o-mini"; - return new OpenAIConfig(openAIKey, modelId); + return new OpenAIClient(openAIKey).GetChatClient(modelId); } public static AzureOpenAIConfig GetAzureOpenAIGPT3_5_Turbo(string? deployName = null) @@ -29,12 +23,4 @@ public static AzureOpenAIConfig GetAzureOpenAIGPT3_5_Turbo(string? deployName = deployName = deployName ?? Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); return new AzureOpenAIConfig(endpoint, deployName, azureOpenAIKey); } - - public static AzureOpenAIConfig GetAzureOpenAIGPT4(string deployName = "gpt-4") - { - var azureOpenAIKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); - - return new AzureOpenAIConfig(endpoint, deployName, azureOpenAIKey); - } } diff --git a/dotnet/sample/AutoGen.BasicSamples/Program.cs b/dotnet/sample/AutoGen.BasicSamples/Program.cs index 51ea95900126..8817a3df36e1 100644 --- a/dotnet/sample/AutoGen.BasicSamples/Program.cs +++ b/dotnet/sample/AutoGen.BasicSamples/Program.cs @@ -17,7 +17,6 @@ allSamples.Add(new Tuple>("User Proxy Agent", async () => { await Example06_UserProxyAgent.RunAsync(); })); allSamples.Add(new Tuple>("Dynamic Group Chat - Calculate Fibonacci", async () => { await Example07_Dynamic_GroupChat_Calculate_Fibonacci.RunAsync(); })); allSamples.Add(new Tuple>("LM Studio", async () => { await Example08_LMStudio.RunAsync(); })); -allSamples.Add(new Tuple>("LM Studio - Function Call", async () => { await Example09_LMStudio_FunctionCall.RunAsync(); })); allSamples.Add(new Tuple>("Semantic Kernel", async () => { await Example10_SemanticKernel.RunAsync(); })); allSamples.Add(new Tuple>("Sequential Group Chat", async () => { await Sequential_GroupChat_Example.RunAsync(); })); allSamples.Add(new Tuple>("Two Agent - Fill Application", async () => { await TwoAgent_Fill_Application.RunAsync(); })); @@ -37,10 +36,14 @@ Console.WriteLine("\n\nEnter your selection:"); -try +while (true) { - int val = Convert.ToInt32(Console.ReadLine()); - + var input = Console.ReadLine(); + if (input == "exit") + { + break; + } + int val = Convert.ToInt32(input); if (!map.ContainsKey(val)) { Console.WriteLine("Invalid choice"); @@ -51,9 +54,6 @@ await map[val].Item2.Invoke(); } } -catch -{ - Console.WriteLine("Error encountered, please check your entry and run again"); -} + diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.V1.Sample.csproj b/dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.Sample.csproj similarity index 84% rename from dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.V1.Sample.csproj rename to dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.Sample.csproj index 49c0e21c9ece..fcbbb834fc63 100644 --- a/dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.V1.Sample.csproj +++ b/dotnet/sample/AutoGen.OpenAI.Sample/AutoGen.OpenAI.Sample.csproj @@ -14,8 +14,9 @@ - + + diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Azure_OpenAI.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Azure_OpenAI.cs new file mode 100644 index 000000000000..dafe2e314859 --- /dev/null +++ b/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Azure_OpenAI.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Connect_To_Azure_OpenAI.cs + +#region using_statement +using AutoGen.Core; +using AutoGen.OpenAI.Extension; +using Azure; +using Azure.AI.OpenAI; +#endregion using_statement + +namespace AutoGen.OpenAI.Sample; + +public class Connect_To_Azure_OpenAI +{ + public static async Task RunAsync() + { + #region create_agent + var apiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new InvalidOperationException("Please set environment variable AZURE_OPENAI_API_KEY"); + var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("Please set environment variable AZURE_OPENAI_ENDPOINT"); + var model = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? "gpt-4o-mini"; + + // Use AzureOpenAIClient to connect to openai model deployed on azure. + // The AzureOpenAIClient comes from Azure.AI.OpenAI package + var openAIClient = new AzureOpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey)); + + var agent = new OpenAIChatAgent( + chatClient: openAIClient.GetChatClient(model), + name: "assistant", + systemMessage: "You are a helpful assistant designed to output JSON.", + seed: 0) + .RegisterMessageConnector() + .RegisterPrintMessage(); + #endregion create_agent + + #region send_message + await agent.SendAsync("Can you write a piece of C# code to calculate 100th of fibonacci?"); + #endregion send_message + } +} diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs index bfc4446b2ee9..2bb10e978412 100644 --- a/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs +++ b/dotnet/sample/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs @@ -3,53 +3,28 @@ #region using_statement using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using Azure.Core.Pipeline; +using AutoGen.OpenAI.Extension; +using OpenAI; #endregion using_statement namespace AutoGen.OpenAI.Sample; -#region CustomHttpClientHandler -public sealed class CustomHttpClientHandler : HttpClientHandler -{ - private string _modelServiceUrl; - - public CustomHttpClientHandler(string modelServiceUrl) - { - _modelServiceUrl = modelServiceUrl; - } - - protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - request.RequestUri = new Uri($"{_modelServiceUrl}{request.RequestUri.PathAndQuery}"); - - return base.SendAsync(request, cancellationToken); - } -} -#endregion CustomHttpClientHandler - public class Connect_To_Ollama { public static async Task RunAsync() { #region create_agent - using var client = new HttpClient(new CustomHttpClientHandler("http://localhost:11434")); - var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview) - { - Transport = new HttpClientTransport(client), - }; - // api-key is not required for local server // so you can use any string here - var openAIClient = new OpenAIClient("api-key", option); + var openAIClient = new OpenAIClient("api-key", new OpenAIClientOptions + { + Endpoint = new Uri("http://localhost:11434/v1/"), // remember to add /v1/ at the end to connect to Ollama openai server + }); var model = "llama3"; var agent = new OpenAIChatAgent( - openAIClient: openAIClient, + chatClient: openAIClient.GetChatClient(model), name: "assistant", - modelName: model, systemMessage: "You are a helpful assistant designed to output JSON.", seed: 0) .RegisterMessageConnector() diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Program.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Program.cs index 5a38a3ff03b9..c71f152d0370 100644 --- a/dotnet/sample/AutoGen.OpenAI.Sample/Program.cs +++ b/dotnet/sample/AutoGen.OpenAI.Sample/Program.cs @@ -3,4 +3,4 @@ using AutoGen.OpenAI.Sample; -Tool_Call_With_Ollama_And_LiteLLM.RunAsync().Wait(); +Structural_Output.RunAsync().Wait(); diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Structural_Output.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Structural_Output.cs new file mode 100644 index 000000000000..e562d7223a69 --- /dev/null +++ b/dotnet/sample/AutoGen.OpenAI.Sample/Structural_Output.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Structural_Output.cs + +using System.Text.Json; +using System.Text.Json.Serialization; +using AutoGen.Core; +using AutoGen.OpenAI.Extension; +using FluentAssertions; +using Json.Schema; +using Json.Schema.Generation; +using OpenAI; +using OpenAI.Chat; + +namespace AutoGen.OpenAI.Sample; + +internal class Structural_Output +{ + public static async Task RunAsync() + { + #region create_agent + var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); + var model = "gpt-4o-mini"; + + var schemaBuilder = new JsonSchemaBuilder().FromType(); + var schema = schemaBuilder.Build(); + + var personSchemaFormat = ChatResponseFormat.CreateJsonSchemaFormat( + name: "Person", + jsonSchema: BinaryData.FromObjectAsJson(schema), + description: "Person schema"); + + var openAIClient = new OpenAIClient(apiKey); + var openAIClientAgent = new OpenAIChatAgent( + chatClient: openAIClient.GetChatClient(model), + name: "assistant", + systemMessage: "You are a helpful assistant", + responseFormat: personSchemaFormat) // structural output by passing schema to response format + .RegisterMessageConnector() + .RegisterPrintMessage(); + #endregion create_agent + + #region chat_with_agent + var reply = await openAIClientAgent.SendAsync("My name is John, I am 25 years old, and I live in Seattle. I like to play soccer and read books."); + + var person = JsonSerializer.Deserialize(reply.GetContent()); + Console.WriteLine($"Name: {person.Name}"); + Console.WriteLine($"Age: {person.Age}"); + + if (!string.IsNullOrEmpty(person.Address)) + { + Console.WriteLine($"Address: {person.Address}"); + } + + Console.WriteLine("Done."); + #endregion chat_with_agent + + person.Name.Should().Be("John"); + person.Age.Should().Be(25); + person.Address.Should().BeNullOrEmpty(); + person.City.Should().Be("Seattle"); + person.Hobbies.Count.Should().Be(2); + } +} + +#region person_class +public class Person +{ + [JsonPropertyName("name")] + [Description("Name of the person")] + [Required] + public string Name { get; set; } + + [JsonPropertyName("age")] + [Description("Age of the person")] + [Required] + public int Age { get; set; } + + [JsonPropertyName("city")] + [Description("City of the person")] + public string? City { get; set; } + + [JsonPropertyName("address")] + [Description("Address of the person")] + public string? Address { get; set; } + + [JsonPropertyName("hobbies")] + [Description("Hobbies of the person")] + public List? Hobbies { get; set; } +} +#endregion person_class diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs index d9a963388d70..ed43c628a672 100644 --- a/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs +++ b/dotnet/sample/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs @@ -2,10 +2,8 @@ // Tool_Call_With_Ollama_And_LiteLLM.cs using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using Azure.Core.Pipeline; +using AutoGen.OpenAI.Extension; +using OpenAI; namespace AutoGen.OpenAI.Sample; @@ -44,20 +42,17 @@ public static async Task RunAsync() #endregion Create_tools #region Create_Agent var liteLLMUrl = "http://localhost:4000"; - using var httpClient = new HttpClient(new CustomHttpClientHandler(liteLLMUrl)); - var option = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_04_01_Preview) - { - Transport = new HttpClientTransport(httpClient), - }; // api-key is not required for local server // so you can use any string here - var openAIClient = new OpenAIClient("api-key", option); + var openAIClient = new OpenAIClient("api-key", new OpenAIClientOptions + { + Endpoint = new Uri("http://localhost:4000"), + }); var agent = new OpenAIChatAgent( - openAIClient: openAIClient, + chatClient: openAIClient.GetChatClient("dolphincoder:latest"), name: "assistant", - modelName: "dolphincoder:latest", systemMessage: "You are a helpful AI assistant") .RegisterMessageConnector() .RegisterMiddleware(functionMiddleware) diff --git a/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs b/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs index 0def9c4ca8e2..392796d819fa 100644 --- a/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs +++ b/dotnet/sample/AutoGen.OpenAI.Sample/Use_Json_Mode.cs @@ -4,10 +4,11 @@ using System.Text.Json; using System.Text.Json.Serialization; using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; +using OpenAI; +using OpenAI.Chat; namespace AutoGen.BasicSample; @@ -17,16 +18,15 @@ public static async Task RunAsync() { #region create_agent var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var model = "gpt-3.5-turbo"; + var model = "gpt-4o-mini"; var openAIClient = new OpenAIClient(apiKey); var openAIClientAgent = new OpenAIChatAgent( - openAIClient: openAIClient, + chatClient: openAIClient.GetChatClient(model), name: "assistant", - modelName: model, systemMessage: "You are a helpful assistant designed to output JSON.", seed: 0, // explicitly set a seed to enable deterministic output - responseFormat: ChatCompletionsResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode + responseFormat: ChatResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode .RegisterMessageConnector() .RegisterPrintMessage(); #endregion create_agent diff --git a/dotnet/sample/AutoGen.SemanticKernel.Sample/AutoGen.SemanticKernel.Sample.csproj b/dotnet/sample/AutoGen.SemanticKernel.Sample/AutoGen.SemanticKernel.Sample.csproj index df1064e18c44..45514431368f 100644 --- a/dotnet/sample/AutoGen.SemanticKernel.Sample/AutoGen.SemanticKernel.Sample.csproj +++ b/dotnet/sample/AutoGen.SemanticKernel.Sample/AutoGen.SemanticKernel.Sample.csproj @@ -9,8 +9,9 @@ + + - diff --git a/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs b/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs index 688f3b017a68..700bdfe75c7b 100644 --- a/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs +++ b/dotnet/sample/AutoGen.SemanticKernel.Sample/Use_Kernel_Functions_With_Other_Agent.cs @@ -3,10 +3,10 @@ #region Using using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using Microsoft.SemanticKernel; +using OpenAI; #endregion Using namespace AutoGen.SemanticKernel.Sample; @@ -17,7 +17,7 @@ public static async Task RunAsync() { #region Create_plugin var openAIKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); - var modelId = "gpt-3.5-turbo"; + var modelId = "gpt-4o-mini"; var kernelBuilder = Kernel.CreateBuilder(); var kernel = kernelBuilder.Build(); var getWeatherFunction = KernelFunctionFactory.CreateFromMethod( @@ -33,9 +33,8 @@ public static async Task RunAsync() var openAIClient = new OpenAIClient(openAIKey); var openAIAgent = new OpenAIChatAgent( - openAIClient: openAIClient, - name: "assistant", - modelName: modelId) + chatClient: openAIClient.GetChatClient(modelId), + name: "assistant") .RegisterMessageConnector() // register message connector so it support AutoGen built-in message types like TextMessage. .RegisterMiddleware(kernelPluginMiddleware) // register the middleware to handle the plugin functions .RegisterPrintMessage(); // pretty print the message to the console diff --git a/dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs b/dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs index 52a5d9bfdd0f..c4808b443c79 100644 --- a/dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs +++ b/dotnet/src/AutoGen.LMStudio/LMStudioAgent.cs @@ -18,6 +18,7 @@ namespace AutoGen.LMStudio; /// /// [!code-csharp[LMStudioAgent](../../sample/AutoGen.BasicSamples/Example08_LMStudio.cs?name=lmstudio_example_1)] /// +[Obsolete("Use OpenAIChatAgent to connect to LM Studio")] public class LMStudioAgent : IAgent { private readonly GPTAgent innerAgent; diff --git a/dotnet/src/AutoGen.OpenAI.V1/Agent/GPTAgent.cs b/dotnet/src/AutoGen.OpenAI.V1/Agent/GPTAgent.cs index 8d6458299f2d..a32af5c38f15 100644 --- a/dotnet/src/AutoGen.OpenAI.V1/Agent/GPTAgent.cs +++ b/dotnet/src/AutoGen.OpenAI.V1/Agent/GPTAgent.cs @@ -27,6 +27,7 @@ namespace AutoGen.OpenAI.V1; /// - /// - where TMessage1 is and TMessage2 is /// +[Obsolete("Use OpenAIChatAgent instead")] public class GPTAgent : IStreamingAgent { private readonly OpenAIClient openAIClient; diff --git a/dotnet/src/AutoGen.OpenAI.V1/AutoGen.OpenAI.V1.csproj b/dotnet/src/AutoGen.OpenAI.V1/AutoGen.OpenAI.V1.csproj index e3a2f41c8f7a..21951cb32fbd 100644 --- a/dotnet/src/AutoGen.OpenAI.V1/AutoGen.OpenAI.V1.csproj +++ b/dotnet/src/AutoGen.OpenAI.V1/AutoGen.OpenAI.V1.csproj @@ -8,9 +8,11 @@ - AutoGen.OpenAI + AutoGen.OpenAI.V1 OpenAI Intergration for AutoGen. + This package connects to openai using Azure.AI.OpenAI v1 package. It is reserved to keep compatibility with the projects which stick to that v1 package. + To use the latest version of OpenAI SDK, please use AutoGen.OpenAI package. diff --git a/dotnet/src/AutoGen.OpenAI/Agent/OpenAIChatAgent.cs b/dotnet/src/AutoGen.OpenAI/Agent/OpenAIChatAgent.cs new file mode 100644 index 000000000000..1ae1e45db155 --- /dev/null +++ b/dotnet/src/AutoGen.OpenAI/Agent/OpenAIChatAgent.cs @@ -0,0 +1,210 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// OpenAIChatAgent.cs + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using AutoGen.OpenAI.Extension; +using global::OpenAI; +using global::OpenAI.Chat; + +namespace AutoGen.OpenAI; + +/// +/// OpenAI client agent. This agent is a thin wrapper around to provide a simple interface for chat completions. +/// supports the following message types: +/// +/// +/// where T is : chat message. +/// +/// +/// returns the following message types: +/// +/// +/// where T is : chat response message. +/// where T is : streaming chat completions update. +/// +/// +/// +public class OpenAIChatAgent : IStreamingAgent +{ + private readonly ChatClient chatClient; + private readonly ChatCompletionOptions options; + private readonly string systemMessage; + + /// + /// Create a new instance of . + /// + /// openai client + /// agent name + /// system message + /// temperature + /// max tokens to generated + /// response format, set it to to enable json mode. + /// seed to use, set it to enable deterministic output + /// functions + public OpenAIChatAgent( + ChatClient chatClient, + string name, + string systemMessage = "You are a helpful AI assistant", + float temperature = 0.7f, + int maxTokens = 1024, + int? seed = null, + ChatResponseFormat? responseFormat = null, + IEnumerable? functions = null) + : this( + chatClient: chatClient, + name: name, + options: CreateChatCompletionOptions(temperature, maxTokens, seed, responseFormat, functions), + systemMessage: systemMessage) + { + } + + /// + /// Create a new instance of . + /// + /// openai chat client + /// agent name + /// system message + /// chat completion option. The option can't contain messages + public OpenAIChatAgent( + ChatClient chatClient, + string name, + ChatCompletionOptions options, + string systemMessage = "You are a helpful AI assistant") + { + this.chatClient = chatClient; + this.Name = name; + this.options = options; + this.systemMessage = systemMessage; + } + + public string Name { get; } + + public async Task GenerateReplyAsync( + IEnumerable messages, + GenerateReplyOptions? options = null, + CancellationToken cancellationToken = default) + { + var chatHistory = this.CreateChatMessages(messages); + var settings = this.CreateChatCompletionsOptions(options); + var reply = await this.chatClient.CompleteChatAsync(chatHistory, settings, cancellationToken); + return new MessageEnvelope(reply.Value, from: this.Name); + } + + public async IAsyncEnumerable GenerateStreamingReplyAsync( + IEnumerable messages, + GenerateReplyOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var chatHistory = this.CreateChatMessages(messages); + var settings = this.CreateChatCompletionsOptions(options); + var response = this.chatClient.CompleteChatStreamingAsync(chatHistory, settings, cancellationToken); + await foreach (var update in response.WithCancellation(cancellationToken)) + { + if (update.ContentUpdate.Count > 1) + { + throw new InvalidOperationException("Only one choice is supported in streaming response"); + } + + yield return new MessageEnvelope(update, from: this.Name); + } + } + + private IEnumerable CreateChatMessages(IEnumerable messages) + { + var oaiMessages = messages.Select(m => m switch + { + IMessage chatMessage => chatMessage.Content, + _ => throw new ArgumentException("Invalid message type") + }); + + // add system message if there's no system message in messages + if (!oaiMessages.Any(m => m is SystemChatMessage)) + { + oaiMessages = new[] { new SystemChatMessage(systemMessage) }.Concat(oaiMessages); + } + + return oaiMessages; + } + + private ChatCompletionOptions CreateChatCompletionsOptions(GenerateReplyOptions? options) + { + var option = new ChatCompletionOptions() + { + Seed = this.options.Seed, + Temperature = options?.Temperature ?? this.options.Temperature, + MaxTokens = options?.MaxToken ?? this.options.MaxTokens, + ResponseFormat = this.options.ResponseFormat, + FrequencyPenalty = this.options.FrequencyPenalty, + FunctionChoice = this.options.FunctionChoice, + IncludeLogProbabilities = this.options.IncludeLogProbabilities, + ParallelToolCallsEnabled = this.options.ParallelToolCallsEnabled, + PresencePenalty = this.options.PresencePenalty, + ToolChoice = this.options.ToolChoice, + TopLogProbabilityCount = this.options.TopLogProbabilityCount, + TopP = this.options.TopP, + EndUserId = this.options.EndUserId, + }; + + // add tools from this.options to option + foreach (var tool in this.options.Tools) + { + option.Tools.Add(tool); + } + + // add stop sequences from this.options to option + foreach (var seq in this.options.StopSequences) + { + option.StopSequences.Add(seq); + } + + var openAIFunctionDefinitions = options?.Functions?.Select(f => f.ToChatTool()).ToList(); + if (openAIFunctionDefinitions is { Count: > 0 }) + { + foreach (var f in openAIFunctionDefinitions) + { + option.Tools.Add(f); + } + } + + if (options?.StopSequence is var sequence && sequence is { Length: > 0 }) + { + foreach (var seq in sequence) + { + option.StopSequences.Add(seq); + } + } + + return option; + } + + private static ChatCompletionOptions CreateChatCompletionOptions( + float temperature = 0.7f, + int maxTokens = 1024, + int? seed = null, + ChatResponseFormat? responseFormat = null, + IEnumerable? functions = null) + { + var options = new ChatCompletionOptions + { + Temperature = temperature, + MaxTokens = maxTokens, + Seed = seed, + ResponseFormat = responseFormat, + }; + + if (functions is not null) + { + foreach (var f in functions) + { + options.Tools.Add(f); + } + } + + return options; + } +} diff --git a/dotnet/src/AutoGen.OpenAI/AutoGen.OpenAI.csproj b/dotnet/src/AutoGen.OpenAI/AutoGen.OpenAI.csproj new file mode 100644 index 000000000000..f93fdd4bc5e2 --- /dev/null +++ b/dotnet/src/AutoGen.OpenAI/AutoGen.OpenAI.csproj @@ -0,0 +1,26 @@ + + + $(PackageTargetFrameworks) + AutoGen.OpenAI + + + + + + + AutoGen.OpenAI + + OpenAI Intergration for AutoGen. + If your project still depends on Azure.AI.OpenAI v1, please use AutoGen.OpenAI.V1 package instead. + + + + + + + + + + + + diff --git a/dotnet/src/AutoGen.OpenAI/Extension/FunctionContractExtension.cs b/dotnet/src/AutoGen.OpenAI/Extension/FunctionContractExtension.cs new file mode 100644 index 000000000000..dd1c1125aec0 --- /dev/null +++ b/dotnet/src/AutoGen.OpenAI/Extension/FunctionContractExtension.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// FunctionContractExtension.cs + +using System; +using System.Collections.Generic; +using Json.Schema; +using Json.Schema.Generation; +using OpenAI.Chat; + +namespace AutoGen.OpenAI.Extension; + +public static class FunctionContractExtension +{ + /// + /// Convert a to a that can be used in gpt funciton call. + /// + /// function contract + /// + public static ChatTool ToChatTool(this FunctionContract functionContract) + { + var requiredParameterNames = new List(); + var propertiesSchemas = new Dictionary(); + var propertySchemaBuilder = new JsonSchemaBuilder().Type(SchemaValueType.Object); + foreach (var param in functionContract.Parameters ?? []) + { + if (param.Name is null) + { + throw new InvalidOperationException("Parameter name cannot be null"); + } + + var schemaBuilder = new JsonSchemaBuilder().FromType(param.ParameterType ?? throw new ArgumentNullException(nameof(param.ParameterType))); + if (param.Description != null) + { + schemaBuilder = schemaBuilder.Description(param.Description); + } + + if (param.IsRequired) + { + requiredParameterNames.Add(param.Name); + } + + var schema = schemaBuilder.Build(); + propertiesSchemas[param.Name] = schema; + + } + propertySchemaBuilder = propertySchemaBuilder.Properties(propertiesSchemas); + propertySchemaBuilder = propertySchemaBuilder.Required(requiredParameterNames); + + var option = new System.Text.Json.JsonSerializerOptions() + { + PropertyNamingPolicy = System.Text.Json.JsonNamingPolicy.CamelCase + }; + + var functionDefinition = ChatTool.CreateFunctionTool( + functionContract.Name ?? throw new ArgumentNullException(nameof(functionContract.Name)), + functionContract.Description, + BinaryData.FromObjectAsJson(propertySchemaBuilder.Build(), option)); + + return functionDefinition; + } + + /// + /// Convert a to a that can be used in gpt funciton call. + /// + /// function contract + /// + [Obsolete("Use ToChatTool instead")] + public static ChatTool ToOpenAIFunctionDefinition(this FunctionContract functionContract) + { + return functionContract.ToChatTool(); + } +} diff --git a/dotnet/src/AutoGen.OpenAI/Extension/OpenAIAgentExtension.cs b/dotnet/src/AutoGen.OpenAI/Extension/OpenAIAgentExtension.cs new file mode 100644 index 000000000000..1e8ae58954ea --- /dev/null +++ b/dotnet/src/AutoGen.OpenAI/Extension/OpenAIAgentExtension.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// OpenAIAgentExtension.cs + +namespace AutoGen.OpenAI.Extension; + +public static class OpenAIAgentExtension +{ + /// + /// Register an to the + /// + /// the connector to use. If null, a new instance of will be created. + public static MiddlewareStreamingAgent RegisterMessageConnector( + this OpenAIChatAgent agent, OpenAIChatRequestMessageConnector? connector = null) + { + if (connector == null) + { + connector = new OpenAIChatRequestMessageConnector(); + } + + return agent.RegisterStreamingMiddleware(connector); + } + + /// + /// Register an to the where T is + /// + /// the connector to use. If null, a new instance of will be created. + public static MiddlewareStreamingAgent RegisterMessageConnector( + this MiddlewareStreamingAgent agent, OpenAIChatRequestMessageConnector? connector = null) + { + if (connector == null) + { + connector = new OpenAIChatRequestMessageConnector(); + } + + return agent.RegisterStreamingMiddleware(connector); + } +} diff --git a/dotnet/src/AutoGen.OpenAI/GlobalUsing.cs b/dotnet/src/AutoGen.OpenAI/GlobalUsing.cs new file mode 100644 index 000000000000..d66bf001ed5e --- /dev/null +++ b/dotnet/src/AutoGen.OpenAI/GlobalUsing.cs @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// GlobalUsing.cs + +global using AutoGen.Core; diff --git a/dotnet/src/AutoGen.OpenAI/Middleware/OpenAIChatRequestMessageConnector.cs b/dotnet/src/AutoGen.OpenAI/Middleware/OpenAIChatRequestMessageConnector.cs new file mode 100644 index 000000000000..2297d123bf86 --- /dev/null +++ b/dotnet/src/AutoGen.OpenAI/Middleware/OpenAIChatRequestMessageConnector.cs @@ -0,0 +1,358 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// OpenAIChatRequestMessageConnector.cs + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using OpenAI.Chat; + +namespace AutoGen.OpenAI; + +/// +/// This middleware converts the incoming to where T is before sending to agent. And converts the output to after receiving from agent. +/// Supported are +/// - +/// - +/// - +/// - +/// - +/// - where T is +/// - where TMessage1 is and TMessage2 is +/// +public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddleware +{ + private bool strictMode = false; + + /// + /// Create a new instance of . + /// + /// If true, will throw an + /// When the message type is not supported. If false, it will ignore the unsupported message type. + public OpenAIChatRequestMessageConnector(bool strictMode = false) + { + this.strictMode = strictMode; + } + + public string? Name => nameof(OpenAIChatRequestMessageConnector); + + public async Task InvokeAsync(MiddlewareContext context, IAgent agent, CancellationToken cancellationToken = default) + { + var chatMessages = ProcessIncomingMessages(agent, context.Messages); + + var reply = await agent.GenerateReplyAsync(chatMessages, context.Options, cancellationToken); + + return PostProcessMessage(reply); + } + + public async IAsyncEnumerable InvokeAsync( + MiddlewareContext context, + IStreamingAgent agent, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var chatMessages = ProcessIncomingMessages(agent, context.Messages); + var streamingReply = agent.GenerateStreamingReplyAsync(chatMessages, context.Options, cancellationToken); + var chunks = new List(); + + // only streaming the text content + await foreach (var reply in streamingReply) + { + if (reply is IMessage update) + { + if (update.Content.ContentUpdate.Count == 1 && update.Content.ContentUpdate[0].Kind == ChatMessageContentPartKind.Text) + { + yield return new TextMessageUpdate(Role.Assistant, update.Content.ContentUpdate[0].Text, from: update.From); + } + + chunks.Add(update.Content); + } + else + { + if (this.strictMode) + { + throw new InvalidOperationException($"Invalid streaming message type {reply.GetType().Name}"); + } + else + { + yield return reply; + } + } + } + + // process the tool call + var streamingChatToolCallUpdates = chunks.Where(c => c.ToolCallUpdates.Count > 0) + .SelectMany(c => c.ToolCallUpdates) + .ToList(); + + // collect all text parts + var textParts = chunks.SelectMany(c => c.ContentUpdate) + .Where(c => c.Kind == ChatMessageContentPartKind.Text) + .Select(c => c.Text) + .ToList(); + + // combine the tool call and function call into one ToolCallMessages + var text = string.Join(string.Empty, textParts); + var toolCalls = new List(); + var currentToolName = string.Empty; + var currentToolArguments = string.Empty; + var currentToolId = string.Empty; + int? currentIndex = null; + foreach (var toolCall in streamingChatToolCallUpdates) + { + if (currentIndex is null) + { + currentIndex = toolCall.Index; + } + + if (toolCall.Index == currentIndex) + { + currentToolName += toolCall.FunctionName; + currentToolArguments += toolCall.FunctionArgumentsUpdate; + currentToolId += toolCall.Id; + + yield return new ToolCallMessageUpdate(currentToolName, currentToolArguments, from: agent.Name); + } + else + { + toolCalls.Add(new ToolCall(currentToolName, currentToolArguments) { ToolCallId = currentToolId }); + currentToolName = toolCall.FunctionName; + currentToolArguments = toolCall.FunctionArgumentsUpdate; + currentToolId = toolCall.Id; + currentIndex = toolCall.Index; + + yield return new ToolCallMessageUpdate(currentToolName, currentToolArguments, from: agent.Name); + } + } + + if (string.IsNullOrEmpty(currentToolName) is false) + { + toolCalls.Add(new ToolCall(currentToolName, currentToolArguments) { ToolCallId = currentToolId }); + } + + if (toolCalls.Any()) + { + yield return new ToolCallMessage(toolCalls, from: agent.Name) + { + Content = text, + }; + } + } + + public IMessage PostProcessMessage(IMessage message) + { + return message switch + { + IMessage m => PostProcessChatCompletions(m), + _ when strictMode is false => message, + _ => throw new InvalidOperationException($"Invalid return message type {message.GetType().Name}"), + }; + } + + private IMessage PostProcessChatCompletions(IMessage message) + { + // throw exception if prompt filter results is not null + if (message.Content.FinishReason == ChatFinishReason.ContentFilter) + { + throw new InvalidOperationException("The content is filtered because its potential risk. Please try another input."); + } + + // throw exception is there is more than on choice + if (message.Content.Content.Count > 1) + { + throw new InvalidOperationException("The content has more than one choice. Please try another input."); + } + + return PostProcessChatResponseMessage(message.Content, message.From); + } + + private IMessage PostProcessChatResponseMessage(ChatCompletion chatCompletion, string? from) + { + // throw exception if prompt filter results is not null + if (chatCompletion.FinishReason == ChatFinishReason.ContentFilter) + { + throw new InvalidOperationException("The content is filtered because its potential risk. Please try another input."); + } + + // throw exception is there is more than on choice + if (chatCompletion.Content.Count > 1) + { + throw new InvalidOperationException("The content has more than one choice. Please try another input."); + } + var textContent = chatCompletion.Content.FirstOrDefault(); + + // if tool calls is not empty, return ToolCallMessage + if (chatCompletion.ToolCalls is { Count: > 0 }) + { + var toolCalls = chatCompletion.ToolCalls.Select(tc => new ToolCall(tc.FunctionName, tc.FunctionArguments) { ToolCallId = tc.Id }); + return new ToolCallMessage(toolCalls, from) + { + Content = textContent?.Kind switch + { + _ when textContent?.Kind == ChatMessageContentPartKind.Text => textContent.Text, + _ => null, + }, + }; + } + + // else, process function call. + // This is deprecated and will be removed in the future. + if (chatCompletion.FunctionCall is ChatFunctionCall fc) + { + return new ToolCallMessage(fc.FunctionName, fc.FunctionArguments, from) + { + Content = textContent?.Kind switch + { + _ when textContent?.Kind == ChatMessageContentPartKind.Text => textContent.Text, + _ => null, + }, + }; + } + + // if the content is text, return TextMessage + if (textContent?.Kind == ChatMessageContentPartKind.Text) + { + return new TextMessage(Role.Assistant, textContent.Text, from); + } + + throw new InvalidOperationException("Invalid ChatResponseMessage"); + } + + public IEnumerable ProcessIncomingMessages(IAgent agent, IEnumerable messages) + { + return messages.SelectMany(m => + { + if (m is IMessage crm) + { + return [crm]; + } + else + { + var chatRequestMessages = m switch + { + TextMessage textMessage => ProcessTextMessage(agent, textMessage), + ImageMessage imageMessage when (imageMessage.From is null || imageMessage.From != agent.Name) => ProcessImageMessage(agent, imageMessage), + MultiModalMessage multiModalMessage when (multiModalMessage.From is null || multiModalMessage.From != agent.Name) => ProcessMultiModalMessage(agent, multiModalMessage), + ToolCallMessage toolCallMessage when (toolCallMessage.From is null || toolCallMessage.From == agent.Name) => ProcessToolCallMessage(agent, toolCallMessage), + ToolCallResultMessage toolCallResultMessage => ProcessToolCallResultMessage(toolCallResultMessage), + AggregateMessage aggregateMessage => ProcessFunctionCallMiddlewareMessage(agent, aggregateMessage), + _ when strictMode is false => [], + _ => throw new InvalidOperationException($"Invalid message type: {m.GetType().Name}"), + }; + + if (chatRequestMessages.Any()) + { + return chatRequestMessages.Select(cm => MessageEnvelope.Create(cm, m.From)); + } + else + { + return [m]; + } + } + }); + } + + private IEnumerable ProcessTextMessage(IAgent agent, TextMessage message) + { + if (message.Role == Role.System) + { + return [new SystemChatMessage(message.Content) { ParticipantName = message.From }]; + } + + if (agent.Name == message.From) + { + return [new AssistantChatMessage(message.Content) { ParticipantName = agent.Name }]; + } + else + { + return message.From switch + { + null when message.Role == Role.User => [new UserChatMessage(message.Content)], + null when message.Role == Role.Assistant => [new AssistantChatMessage(message.Content)], + null => throw new InvalidOperationException("Invalid Role"), + _ => [new UserChatMessage(message.Content) { ParticipantName = message.From }] + }; + } + } + + private IEnumerable ProcessImageMessage(IAgent agent, ImageMessage message) + { + if (agent.Name == message.From) + { + // image message from assistant is not supported + throw new ArgumentException("ImageMessage is not supported when message.From is the same with agent"); + } + + var imageContentItem = this.CreateChatMessageImageContentItemFromImageMessage(message); + return [new UserChatMessage([imageContentItem]) { ParticipantName = message.From }]; + } + + private IEnumerable ProcessMultiModalMessage(IAgent agent, MultiModalMessage message) + { + if (agent.Name == message.From) + { + // image message from assistant is not supported + throw new ArgumentException("MultiModalMessage is not supported when message.From is the same with agent"); + } + + IEnumerable items = message.Content.Select(ci => ci switch + { + TextMessage text => ChatMessageContentPart.CreateTextMessageContentPart(text.Content), + ImageMessage image => this.CreateChatMessageImageContentItemFromImageMessage(image), + _ => throw new NotImplementedException(), + }); + + return [new UserChatMessage(items) { ParticipantName = message.From }]; + } + + private ChatMessageContentPart CreateChatMessageImageContentItemFromImageMessage(ImageMessage message) + { + return message.Data is null && message.Url is not null + ? ChatMessageContentPart.CreateImageMessageContentPart(new Uri(message.Url)) + : ChatMessageContentPart.CreateImageMessageContentPart(message.Data, message.Data?.MediaType); + } + + private IEnumerable ProcessToolCallMessage(IAgent agent, ToolCallMessage message) + { + if (message.From is not null && message.From != agent.Name) + { + throw new ArgumentException("ToolCallMessage is not supported when message.From is not the same with agent"); + } + + var toolCallParts = message.ToolCalls.Select((tc, i) => ChatToolCall.CreateFunctionToolCall(tc.ToolCallId ?? $"{tc.FunctionName}_{i}", tc.FunctionName, tc.FunctionArguments)); + var textContent = message.GetContent() ?? null; + var chatRequestMessage = new AssistantChatMessage(toolCallParts, textContent) { ParticipantName = message.From }; + + return [chatRequestMessage]; + } + + private IEnumerable ProcessToolCallResultMessage(ToolCallResultMessage message) + { + return message.ToolCalls + .Where(tc => tc.Result is not null) + .Select((tc, i) => new ToolChatMessage(tc.ToolCallId ?? $"{tc.FunctionName}_{i}", tc.Result)); + } + + + private IEnumerable ProcessFunctionCallMiddlewareMessage(IAgent agent, AggregateMessage aggregateMessage) + { + if (aggregateMessage.From is not null && aggregateMessage.From != agent.Name) + { + // convert as user message + var resultMessage = aggregateMessage.Message2; + + return resultMessage.ToolCalls.Select(tc => new UserChatMessage(tc.Result) { ParticipantName = aggregateMessage.From }); + } + else + { + var toolCallMessage1 = aggregateMessage.Message1; + var toolCallResultMessage = aggregateMessage.Message2; + + var assistantMessage = this.ProcessToolCallMessage(agent, toolCallMessage1); + var toolCallResults = this.ProcessToolCallResultMessage(toolCallResultMessage); + + return assistantMessage.Concat(toolCallResults); + } + } +} diff --git a/dotnet/src/AutoGen.SemanticKernel/AutoGen.SemanticKernel.csproj b/dotnet/src/AutoGen.SemanticKernel/AutoGen.SemanticKernel.csproj index 8769c3ac4879..b89626c01a06 100644 --- a/dotnet/src/AutoGen.SemanticKernel/AutoGen.SemanticKernel.csproj +++ b/dotnet/src/AutoGen.SemanticKernel/AutoGen.SemanticKernel.csproj @@ -19,6 +19,7 @@ + diff --git a/dotnet/src/AutoGen.SemanticKernel/SemanticKernelAgent.cs b/dotnet/src/AutoGen.SemanticKernel/SemanticKernelAgent.cs index d12c54c1b3b2..e10f5b043f24 100644 --- a/dotnet/src/AutoGen.SemanticKernel/SemanticKernelAgent.cs +++ b/dotnet/src/AutoGen.SemanticKernel/SemanticKernelAgent.cs @@ -106,7 +106,6 @@ private PromptExecutionSettings BuildOption(GenerateReplyOptions? options) MaxTokens = options?.MaxToken ?? 1024, StopSequences = options?.StopSequence, ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions, - ResultsPerPrompt = 1, }; } diff --git a/dotnet/src/AutoGen.SemanticKernel/SemanticKernelChatCompletionAgent.cs b/dotnet/src/AutoGen.SemanticKernel/SemanticKernelChatCompletionAgent.cs index 82d83a9e8556..1354996430bb 100644 --- a/dotnet/src/AutoGen.SemanticKernel/SemanticKernelChatCompletionAgent.cs +++ b/dotnet/src/AutoGen.SemanticKernel/SemanticKernelChatCompletionAgent.cs @@ -27,7 +27,7 @@ public async Task GenerateReplyAsync(IEnumerable messages, G CancellationToken cancellationToken = default) { ChatMessageContent[] reply = await _chatCompletionAgent - .InvokeAsync(BuildChatHistory(messages), cancellationToken) + .InvokeAsync(BuildChatHistory(messages), cancellationToken: cancellationToken) .ToArrayAsync(cancellationToken: cancellationToken); return reply.Length > 1 diff --git a/dotnet/src/AutoGen.WebAPI/OpenAI/Service/OpenAIChatCompletionService.cs b/dotnet/src/AutoGen.WebAPI/OpenAI/Service/OpenAIChatCompletionService.cs index 27481da006a2..80d49050ee48 100644 --- a/dotnet/src/AutoGen.WebAPI/OpenAI/Service/OpenAIChatCompletionService.cs +++ b/dotnet/src/AutoGen.WebAPI/OpenAI/Service/OpenAIChatCompletionService.cs @@ -7,7 +7,6 @@ using System.Threading.Tasks; using AutoGen.Core; using AutoGen.WebAPI.OpenAI.DTO; - namespace AutoGen.Server; internal class OpenAIChatCompletionService @@ -44,7 +43,7 @@ public async Task GetChatCompletionAsync(OpenAIChatComplet { Message = message, Index = 0, - FinishReason = "completed", + FinishReason = "stop", }; openAIChatCompletion.Choices = [choice]; diff --git a/dotnet/src/AutoGen/API/LLMConfigAPI.cs b/dotnet/src/AutoGen/API/LLMConfigAPI.cs index fcc744351731..28b5ad44312f 100644 --- a/dotnet/src/AutoGen/API/LLMConfigAPI.cs +++ b/dotnet/src/AutoGen/API/LLMConfigAPI.cs @@ -4,7 +4,6 @@ using System; using System.Collections.Generic; using System.Linq; -using AutoGen.OpenAI.V1; namespace AutoGen { diff --git a/dotnet/src/AutoGen/Agent/ConversableAgent.cs b/dotnet/src/AutoGen/Agent/ConversableAgent.cs index b60d2eba099d..da61c812f464 100644 --- a/dotnet/src/AutoGen/Agent/ConversableAgent.cs +++ b/dotnet/src/AutoGen/Agent/ConversableAgent.cs @@ -6,9 +6,8 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; -using AutoGen.LMStudio; -using AutoGen.OpenAI.V1; - +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; namespace AutoGen; public enum HumanInputMode @@ -87,13 +86,21 @@ public ConversableAgent( { IAgent nextAgent = llmConfig switch { - AzureOpenAIConfig azureConfig => new GPTAgent(this.Name!, this.systemMessage, azureConfig, temperature: config.Temperature ?? 0), - OpenAIConfig openAIConfig => new GPTAgent(this.Name!, this.systemMessage, openAIConfig, temperature: config.Temperature ?? 0), - LMStudioConfig lmStudioConfig => new LMStudioAgent( - name: this.Name, - config: lmStudioConfig, - systemMessage: this.systemMessage, - temperature: config.Temperature ?? 0), + AzureOpenAIConfig azureConfig => new OpenAIChatAgent( + chatClient: azureConfig.CreateChatClient(), + name: this.Name!, + systemMessage: this.systemMessage) + .RegisterMessageConnector(), + OpenAIConfig openAIConfig => new OpenAIChatAgent( + chatClient: openAIConfig.CreateChatClient(), + name: this.Name!, + systemMessage: this.systemMessage) + .RegisterMessageConnector(), + LMStudioConfig lmStudioConfig => new OpenAIChatAgent( + chatClient: lmStudioConfig.CreateChatClient(), + name: this.Name!, + systemMessage: this.systemMessage) + .RegisterMessageConnector(), _ => throw new ArgumentException($"Unsupported config type {llmConfig.GetType()}"), }; diff --git a/dotnet/src/AutoGen/AutoGen.csproj b/dotnet/src/AutoGen/AutoGen.csproj index 4c3b2a5ab81e..fe4431a35731 100644 --- a/dotnet/src/AutoGen/AutoGen.csproj +++ b/dotnet/src/AutoGen/AutoGen.csproj @@ -17,7 +17,6 @@ - @@ -27,7 +26,8 @@ - + + diff --git a/dotnet/src/AutoGen/AzureOpenAIConfig.cs b/dotnet/src/AutoGen/AzureOpenAIConfig.cs new file mode 100644 index 000000000000..6112a3815d59 --- /dev/null +++ b/dotnet/src/AutoGen/AzureOpenAIConfig.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// AzureOpenAIConfig.cs + +using Azure.AI.OpenAI; +using OpenAI.Chat; + +namespace AutoGen; + +public class AzureOpenAIConfig : ILLMConfig +{ + public AzureOpenAIConfig(string endpoint, string deploymentName, string apiKey) + { + this.Endpoint = endpoint; + this.DeploymentName = deploymentName; + this.ApiKey = apiKey; + } + + public string Endpoint { get; } + + public string DeploymentName { get; } + + public string ApiKey { get; } + + internal ChatClient CreateChatClient() + { + var client = new AzureOpenAIClient(new System.Uri(this.Endpoint), this.ApiKey); + + return client.GetChatClient(DeploymentName); + } +} diff --git a/dotnet/src/AutoGen/LMStudioConfig.cs b/dotnet/src/AutoGen/LMStudioConfig.cs new file mode 100644 index 000000000000..5fd9edc70802 --- /dev/null +++ b/dotnet/src/AutoGen/LMStudioConfig.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// LMStudioConfig.cs +using System; +using OpenAI; +using OpenAI.Chat; + +namespace AutoGen; + +/// +/// Add support for consuming openai-like API from LM Studio +/// +public class LMStudioConfig : ILLMConfig +{ + public LMStudioConfig(string host, int port) + { + this.Host = host; + this.Port = port; + this.Uri = new Uri($"http://{host}:{port}"); + } + + public LMStudioConfig(Uri uri) + { + this.Uri = uri; + this.Host = uri.Host; + this.Port = uri.Port; + } + + public string Host { get; } + + public int Port { get; } + + public Uri Uri { get; } + + internal ChatClient CreateChatClient() + { + var client = new OpenAIClient("api-key", new OpenAIClientOptions + { + Endpoint = this.Uri, + }); + + // model name doesn't matter for LM Studio + + return client.GetChatClient("model-name"); + } +} diff --git a/dotnet/src/AutoGen/OpenAIConfig.cs b/dotnet/src/AutoGen/OpenAIConfig.cs new file mode 100644 index 000000000000..ea50fa085f11 --- /dev/null +++ b/dotnet/src/AutoGen/OpenAIConfig.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// OpenAIConfig.cs + +using OpenAI; +using OpenAI.Chat; + +namespace AutoGen; + +public class OpenAIConfig : ILLMConfig +{ + public OpenAIConfig(string apiKey, string modelId) + { + this.ApiKey = apiKey; + this.ModelId = modelId; + } + + public string ApiKey { get; } + + public string ModelId { get; } + + internal ChatClient CreateChatClient() + { + var client = new OpenAIClient(this.ApiKey); + + return client.GetChatClient(this.ModelId); + } +} diff --git a/dotnet/test/AutoGen.OpenAI.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt b/dotnet/test/AutoGen.OpenAI.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt new file mode 100644 index 000000000000..3574e593d8d6 --- /dev/null +++ b/dotnet/test/AutoGen.OpenAI.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt @@ -0,0 +1,232 @@ +[ + { + "OriginalMessage": "TextMessage(system, You are a helpful AI assistant, )", + "ConvertedMessages": [ + { + "Name": null, + "Role": "system", + "Content": [ + { + "Kind": {}, + "Text": "You are a helpful AI assistant", + "Refusal": null, + "ImageUri": null, + "ImageBytes": null, + "ImageBytesMediaType": null, + "ImageDetail": null + } + ] + } + ] + }, + { + "OriginalMessage": "TextMessage(user, Hello, user)", + "ConvertedMessages": [ + { + "Role": "user", + "Content": [ + { + "Kind": {}, + "Text": "Hello", + "Refusal": null, + "ImageUri": null, + "ImageBytes": null, + "ImageBytesMediaType": null, + "ImageDetail": null + } + ], + "Name": "user", + "MultiModaItem": [ + { + "Type": "Text", + "Text": "Hello" + } + ] + } + ] + }, + { + "OriginalMessage": "TextMessage(assistant, How can I help you?, assistant)", + "ConvertedMessages": [ + { + "Role": "assistant", + "Content": [ + { + "Kind": {}, + "Text": "How can I help you?", + "Refusal": null, + "ImageUri": null, + "ImageBytes": null, + "ImageBytesMediaType": null, + "ImageDetail": null + } + ], + "Name": "assistant", + "TooCall": [], + "FunctionCallName": null, + "FunctionCallArguments": null + } + ] + }, + { + "OriginalMessage": "ImageMessage(user, https://example.com/image.png, user)", + "ConvertedMessages": [ + { + "Role": "user", + "Content": [ + { + "Kind": {}, + "Text": null, + "Refusal": null, + "ImageUri": "https://example.com/image.png", + "ImageBytes": null, + "ImageBytesMediaType": null, + "ImageDetail": null + } + ], + "Name": "user", + "MultiModaItem": [ + { + "Type": "Image", + "ImageUrl": "https://example.com/image.png" + } + ] + } + ] + }, + { + "OriginalMessage": "MultiModalMessage(assistant, user)\n\tTextMessage(user, Hello, user)\n\tImageMessage(user, https://example.com/image.png, user)", + "ConvertedMessages": [ + { + "Role": "user", + "Content": [ + { + "Kind": {}, + "Text": "Hello", + "Refusal": null, + "ImageUri": null, + "ImageBytes": null, + "ImageBytesMediaType": null, + "ImageDetail": null + }, + { + "Kind": {}, + "Text": null, + "Refusal": null, + "ImageUri": "https://example.com/image.png", + "ImageBytes": null, + "ImageBytesMediaType": null, + "ImageDetail": null + } + ], + "Name": "user", + "MultiModaItem": [ + { + "Type": "Text", + "Text": "Hello" + }, + { + "Type": "Image", + "ImageUrl": "https://example.com/image.png" + } + ] + } + ] + }, + { + "OriginalMessage": "ToolCallMessage(assistant)\n\tToolCall(test, test, )", + "ConvertedMessages": [ + { + "Role": "assistant", + "Content": [], + "Name": "assistant", + "TooCall": [ + { + "Type": "Function", + "Name": "test", + "Arguments": "test", + "Id": "test" + } + ], + "FunctionCallName": null, + "FunctionCallArguments": null + } + ] + }, + { + "OriginalMessage": "ToolCallResultMessage(user)\n\tToolCall(test, test, result)", + "ConvertedMessages": [ + { + "Role": "tool", + "Content": "result", + "ToolCallId": "test" + } + ] + }, + { + "OriginalMessage": "ToolCallResultMessage(user)\n\tToolCall(result, test, test)\n\tToolCall(result, test, test)", + "ConvertedMessages": [ + { + "Role": "tool", + "Content": "test", + "ToolCallId": "result_0" + }, + { + "Role": "tool", + "Content": "test", + "ToolCallId": "result_1" + } + ] + }, + { + "OriginalMessage": "ToolCallMessage(assistant)\n\tToolCall(test, test, )\n\tToolCall(test, test, )", + "ConvertedMessages": [ + { + "Role": "assistant", + "Content": [], + "Name": "assistant", + "TooCall": [ + { + "Type": "Function", + "Name": "test", + "Arguments": "test", + "Id": "test_0" + }, + { + "Type": "Function", + "Name": "test", + "Arguments": "test", + "Id": "test_1" + } + ], + "FunctionCallName": null, + "FunctionCallArguments": null + } + ] + }, + { + "OriginalMessage": "AggregateMessage(assistant)\n\tToolCallMessage(assistant)\n\tToolCall(test, test, )\n\tToolCallResultMessage(assistant)\n\tToolCall(test, test, result)", + "ConvertedMessages": [ + { + "Role": "assistant", + "Content": [], + "Name": "assistant", + "TooCall": [ + { + "Type": "Function", + "Name": "test", + "Arguments": "test", + "Id": "test" + } + ], + "FunctionCallName": null, + "FunctionCallArguments": null + }, + { + "Role": "tool", + "Content": "result", + "ToolCallId": "test" + } + ] + } +] \ No newline at end of file diff --git a/dotnet/test/AutoGen.OpenAI.Tests/AutoGen.OpenAI.Tests.csproj b/dotnet/test/AutoGen.OpenAI.Tests/AutoGen.OpenAI.Tests.csproj new file mode 100644 index 000000000000..a6495fc4487c --- /dev/null +++ b/dotnet/test/AutoGen.OpenAI.Tests/AutoGen.OpenAI.Tests.csproj @@ -0,0 +1,19 @@ + + + + $(TestTargetFrameworks) + false + True + True + + + + + + + + + + + + diff --git a/dotnet/test/AutoGen.OpenAI.Tests/GlobalUsing.cs b/dotnet/test/AutoGen.OpenAI.Tests/GlobalUsing.cs new file mode 100644 index 000000000000..d66bf001ed5e --- /dev/null +++ b/dotnet/test/AutoGen.OpenAI.Tests/GlobalUsing.cs @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// GlobalUsing.cs + +global using AutoGen.Core; diff --git a/dotnet/test/AutoGen.OpenAI.Tests/MathClassTest.cs b/dotnet/test/AutoGen.OpenAI.Tests/MathClassTest.cs new file mode 100644 index 000000000000..be1c38ad0a3c --- /dev/null +++ b/dotnet/test/AutoGen.OpenAI.Tests/MathClassTest.cs @@ -0,0 +1,219 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// MathClassTest.cs + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using AutoGen.OpenAI.Extension; +using AutoGen.Tests; +using Azure.AI.OpenAI; +using FluentAssertions; +using OpenAI; +using Xunit.Abstractions; + +namespace AutoGen.OpenAI.Tests +{ + public partial class MathClassTest + { + private readonly ITestOutputHelper _output; + + // as of 2024-05-20, aoai return 500 error when round > 1 + // I'm pretty sure that round > 5 was supported before + // So this is probably some wield regression on aoai side + // I'll keep this test case here for now, plus setting round to 1 + // so the test can still pass. + // In the future, we should rewind this test case to round > 1 (previously was 5) + private int round = 1; + public MathClassTest(ITestOutputHelper output) + { + _output = output; + } + + private Task Print(IEnumerable messages, GenerateReplyOptions? option, IAgent agent, CancellationToken ct) + { + try + { + var reply = agent.GenerateReplyAsync(messages, option, ct).Result; + + _output.WriteLine(reply.FormatMessage()); + return Task.FromResult(reply); + } + catch (Exception) + { + _output.WriteLine("Request failed"); + _output.WriteLine($"agent name: {agent.Name}"); + foreach (var message in messages) + { + _output.WriteLine(message.FormatMessage()); + } + + throw; + } + + } + + [FunctionAttribute] + public async Task CreateMathQuestion(string question, int question_index) + { + return $@"[MATH_QUESTION] +Question {question_index}: +{question} + +Student, please answer"; + } + + [FunctionAttribute] + public async Task AnswerQuestion(string answer) + { + return $@"[MATH_ANSWER] +The answer is {answer} +teacher please check answer"; + } + + [FunctionAttribute] + public async Task AnswerIsCorrect(string message) + { + return $@"[ANSWER_IS_CORRECT] +{message} +please update progress"; + } + + [FunctionAttribute] + public async Task UpdateProgress(int correctAnswerCount) + { + if (correctAnswerCount >= this.round) + { + return $@"[UPDATE_PROGRESS] +{GroupChatExtension.TERMINATE}"; + } + else + { + return $@"[UPDATE_PROGRESS] +the number of resolved question is {correctAnswerCount} +teacher, please create the next math question"; + } + } + + + [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + public async Task OpenAIAgentMathChatTestAsync() + { + var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set"); + var endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set"); + var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new ArgumentException("AZURE_OPENAI_DEPLOY_NAME is not set"); + var openaiClient = new AzureOpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(key)); + var teacher = await CreateTeacherAgentAsync(openaiClient, deployName); + var student = await CreateStudentAssistantAgentAsync(openaiClient, deployName); + + var adminFunctionMiddleware = new FunctionCallMiddleware( + functions: [this.UpdateProgressFunctionContract], + functionMap: new Dictionary>> + { + { this.UpdateProgressFunctionContract.Name, this.UpdateProgressWrapper }, + }); + var admin = new OpenAIChatAgent( + chatClient: openaiClient.GetChatClient(deployName), + name: "Admin", + systemMessage: $@"You are admin. You update progress after each question is answered.") + .RegisterMessageConnector() + .RegisterStreamingMiddleware(adminFunctionMiddleware) + .RegisterMiddleware(Print); + + var groupAdmin = new OpenAIChatAgent( + chatClient: openaiClient.GetChatClient(deployName), + name: "GroupAdmin", + systemMessage: "You are group admin. You manage the group chat.") + .RegisterMessageConnector() + .RegisterMiddleware(Print); + await RunMathChatAsync(teacher, student, admin, groupAdmin); + } + + private async Task CreateTeacherAgentAsync(OpenAIClient client, string model) + { + var functionCallMiddleware = new FunctionCallMiddleware( + functions: [this.CreateMathQuestionFunctionContract, this.AnswerIsCorrectFunctionContract], + functionMap: new Dictionary>> + { + { this.CreateMathQuestionFunctionContract.Name!, this.CreateMathQuestionWrapper }, + { this.AnswerIsCorrectFunctionContract.Name!, this.AnswerIsCorrectWrapper }, + }); + + var teacher = new OpenAIChatAgent( + chatClient: client.GetChatClient(model), + name: "Teacher", + systemMessage: @"You are a preschool math teacher. +You create math question and ask student to answer it. +Then you check if the answer is correct. +If the answer is wrong, you ask student to fix it") + .RegisterMessageConnector() + .RegisterStreamingMiddleware(functionCallMiddleware) + .RegisterMiddleware(Print); + + return teacher; + } + + private async Task CreateStudentAssistantAgentAsync(OpenAIClient client, string model) + { + var functionCallMiddleware = new FunctionCallMiddleware( + functions: [this.AnswerQuestionFunctionContract], + functionMap: new Dictionary>> + { + { this.AnswerQuestionFunctionContract.Name!, this.AnswerQuestionWrapper }, + }); + var student = new OpenAIChatAgent( + chatClient: client.GetChatClient(model), + name: "Student", + systemMessage: @"You are a student. You answer math question from teacher.") + .RegisterMessageConnector() + .RegisterStreamingMiddleware(functionCallMiddleware) + .RegisterMiddleware(Print); + + return student; + } + + private async Task RunMathChatAsync(IAgent teacher, IAgent student, IAgent admin, IAgent groupAdmin) + { + var teacher2Student = Transition.Create(teacher, student); + var student2Teacher = Transition.Create(student, teacher); + var teacher2Admin = Transition.Create(teacher, admin); + var admin2Teacher = Transition.Create(admin, teacher); + var workflow = new Graph( + [ + teacher2Student, + student2Teacher, + teacher2Admin, + admin2Teacher, + ]); + var group = new GroupChat( + workflow: workflow, + members: [ + admin, + teacher, + student, + ], + admin: groupAdmin); + + var groupChatManager = new GroupChatManager(group); + var chatHistory = await admin.InitiateChatAsync(groupChatManager, "teacher, create question", maxRound: 50); + + chatHistory.Where(msg => msg.From == teacher.Name && msg.GetContent()?.Contains("[MATH_QUESTION]") is true) + .Count() + .Should().BeGreaterThanOrEqualTo(this.round); + + chatHistory.Where(msg => msg.From == student.Name && msg.GetContent()?.Contains("[MATH_ANSWER]") is true) + .Count() + .Should().BeGreaterThanOrEqualTo(this.round); + + chatHistory.Where(msg => msg.From == teacher.Name && msg.GetContent()?.Contains("[ANSWER_IS_CORRECT]") is true) + .Count() + .Should().BeGreaterThanOrEqualTo(this.round); + + // check if there's terminate chat message from admin + chatHistory.Where(msg => msg.From == admin.Name && msg.IsGroupChatTerminateMessage()) + .Count() + .Should().Be(1); + } + } +} diff --git a/dotnet/test/AutoGen.OpenAI.Tests/OpenAIChatAgentTest.cs b/dotnet/test/AutoGen.OpenAI.Tests/OpenAIChatAgentTest.cs new file mode 100644 index 000000000000..bcbfee6e208a --- /dev/null +++ b/dotnet/test/AutoGen.OpenAI.Tests/OpenAIChatAgentTest.cs @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// OpenAIChatAgentTest.cs + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using AutoGen.OpenAI.Extension; +using AutoGen.Tests; +using Azure.AI.OpenAI; +using FluentAssertions; +using OpenAI; +using OpenAI.Chat; + +namespace AutoGen.OpenAI.Tests; + +public partial class OpenAIChatAgentTest +{ + /// + /// Get the weather for a location. + /// + /// location + /// + [Function] + public async Task GetWeatherAsync(string location) + { + return $"The weather in {location} is sunny."; + } + + [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + public async Task BasicConversationTestAsync() + { + var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); + var openaiClient = CreateOpenAIClientFromAzureOpenAI(); + var openAIChatAgent = new OpenAIChatAgent( + chatClient: openaiClient.GetChatClient(deployName), + name: "assistant"); + + // By default, OpenAIChatClient supports the following message types + // - IMessage + var chatMessageContent = MessageEnvelope.Create(new UserChatMessage("Hello")); + var reply = await openAIChatAgent.SendAsync(chatMessageContent); + + reply.Should().BeOfType>(); + reply.As>().From.Should().Be("assistant"); + reply.As>().Content.Role.Should().Be(ChatMessageRole.Assistant); + reply.As>().Content.Usage.TotalTokens.Should().BeGreaterThan(0); + + // test streaming + var streamingReply = openAIChatAgent.GenerateStreamingReplyAsync(new[] { chatMessageContent }); + + await foreach (var streamingMessage in streamingReply) + { + streamingMessage.Should().BeOfType>(); + streamingMessage.As>().From.Should().Be("assistant"); + } + } + + [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + public async Task OpenAIChatMessageContentConnectorTestAsync() + { + var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); + var openaiClient = CreateOpenAIClientFromAzureOpenAI(); + var openAIChatAgent = new OpenAIChatAgent( + chatClient: openaiClient.GetChatClient(deployName), + name: "assistant"); + + MiddlewareStreamingAgent assistant = openAIChatAgent + .RegisterMessageConnector(); + + var messages = new IMessage[] + { + MessageEnvelope.Create(new UserChatMessage("Hello")), + new TextMessage(Role.Assistant, "Hello", from: "user"), + new MultiModalMessage(Role.Assistant, + [ + new TextMessage(Role.Assistant, "Hello", from: "user"), + ], + from: "user"), + }; + + foreach (var message in messages) + { + var reply = await assistant.SendAsync(message); + + reply.Should().BeOfType(); + reply.As().From.Should().Be("assistant"); + } + + // test streaming + foreach (var message in messages) + { + var reply = assistant.GenerateStreamingReplyAsync([message]); + + await foreach (var streamingMessage in reply) + { + streamingMessage.Should().BeOfType(); + streamingMessage.As().From.Should().Be("assistant"); + } + } + } + + [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + public async Task OpenAIChatAgentToolCallTestAsync() + { + var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); + var openaiClient = CreateOpenAIClientFromAzureOpenAI(); + var openAIChatAgent = new OpenAIChatAgent( + chatClient: openaiClient.GetChatClient(deployName), + name: "assistant"); + + var functionCallMiddleware = new FunctionCallMiddleware( + functions: [this.GetWeatherAsyncFunctionContract]); + MiddlewareStreamingAgent assistant = openAIChatAgent + .RegisterMessageConnector(); + + assistant.StreamingMiddlewares.Count().Should().Be(1); + var functionCallAgent = assistant + .RegisterStreamingMiddleware(functionCallMiddleware); + + var question = "What's the weather in Seattle"; + var messages = new IMessage[] + { + MessageEnvelope.Create(new UserChatMessage(question)), + new TextMessage(Role.Assistant, question, from: "user"), + new MultiModalMessage(Role.Assistant, + [ + new TextMessage(Role.Assistant, question, from: "user"), + ], + from: "user"), + }; + + foreach (var message in messages) + { + var reply = await functionCallAgent.SendAsync(message); + + reply.Should().BeOfType(); + reply.As().From.Should().Be("assistant"); + reply.As().ToolCalls.Count().Should().Be(1); + reply.As().ToolCalls.First().FunctionName.Should().Be(this.GetWeatherAsyncFunctionContract.Name); + } + + // test streaming + foreach (var message in messages) + { + var reply = functionCallAgent.GenerateStreamingReplyAsync([message]); + ToolCallMessage? toolCallMessage = null; + await foreach (var streamingMessage in reply) + { + if (streamingMessage is ToolCallMessage finalMessage) + { + toolCallMessage = finalMessage; + break; + } + + streamingMessage.Should().BeOfType(); + streamingMessage.As().From.Should().Be("assistant"); + } + + toolCallMessage.Should().NotBeNull(); + toolCallMessage!.From.Should().Be("assistant"); + toolCallMessage.ToolCalls.Count().Should().Be(1); + toolCallMessage.ToolCalls.First().FunctionName.Should().Be(this.GetWeatherAsyncFunctionContract.Name); + } + } + + [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + public async Task OpenAIChatAgentToolCallInvokingTestAsync() + { + var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); + var openaiClient = CreateOpenAIClientFromAzureOpenAI(); + var openAIChatAgent = new OpenAIChatAgent( + chatClient: openaiClient.GetChatClient(deployName), + name: "assistant"); + + var functionCallMiddleware = new FunctionCallMiddleware( + functions: [this.GetWeatherAsyncFunctionContract], + functionMap: new Dictionary>> { { this.GetWeatherAsyncFunctionContract.Name!, this.GetWeatherAsyncWrapper } }); + MiddlewareStreamingAgent assistant = openAIChatAgent + .RegisterMessageConnector(); + + var functionCallAgent = assistant + .RegisterStreamingMiddleware(functionCallMiddleware); + + var question = "What's the weather in Seattle"; + var messages = new IMessage[] + { + MessageEnvelope.Create(new UserChatMessage(question)), + new TextMessage(Role.Assistant, question, from: "user"), + new MultiModalMessage(Role.Assistant, + [ + new TextMessage(Role.Assistant, question, from: "user"), + ], + from: "user"), + }; + + foreach (var message in messages) + { + var reply = await functionCallAgent.SendAsync(message); + + reply.Should().BeOfType(); + reply.From.Should().Be("assistant"); + reply.GetToolCalls()!.Count().Should().Be(1); + reply.GetToolCalls()!.First().FunctionName.Should().Be(this.GetWeatherAsyncFunctionContract.Name); + reply.GetContent()!.ToLower().Should().Contain("seattle"); + } + + // test streaming + foreach (var message in messages) + { + var reply = functionCallAgent.GenerateStreamingReplyAsync([message]); + await foreach (var streamingMessage in reply) + { + if (streamingMessage is not IMessage) + { + streamingMessage.Should().BeOfType(); + streamingMessage.As().From.Should().Be("assistant"); + } + else + { + streamingMessage.Should().BeOfType(); + streamingMessage.As().GetContent()!.ToLower().Should().Contain("seattle"); + } + } + } + } + + [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + public async Task ItCreateOpenAIChatAgentWithChatCompletionOptionAsync() + { + var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); + var openaiClient = CreateOpenAIClientFromAzureOpenAI(); + var options = new ChatCompletionOptions() + { + Temperature = 0.7f, + MaxTokens = 1, + }; + + var openAIChatAgent = new OpenAIChatAgent( + chatClient: openaiClient.GetChatClient(deployName), + name: "assistant", + options: options) + .RegisterMessageConnector(); + + var respond = await openAIChatAgent.SendAsync("hello"); + respond.GetContent()?.Should().NotBeNullOrEmpty(); + } + + + private OpenAIClient CreateOpenAIClientFromAzureOpenAI() + { + var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); + var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); + return new AzureOpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key)); + } +} diff --git a/dotnet/test/AutoGen.OpenAI.Tests/OpenAIMessageTests.cs b/dotnet/test/AutoGen.OpenAI.Tests/OpenAIMessageTests.cs new file mode 100644 index 000000000000..a05f440a17b7 --- /dev/null +++ b/dotnet/test/AutoGen.OpenAI.Tests/OpenAIMessageTests.cs @@ -0,0 +1,692 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// OpenAIMessageTests.cs + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text.Json; +using System.Threading.Tasks; +using ApprovalTests; +using ApprovalTests.Namers; +using ApprovalTests.Reporters; +using AutoGen.Tests; +using FluentAssertions; +using OpenAI.Chat; +using Xunit; + +namespace AutoGen.OpenAI.Tests; + +public class OpenAIMessageTests +{ + private readonly JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions + { + WriteIndented = true, + IgnoreReadOnlyProperties = false, + }; + + [Fact] + [UseReporter(typeof(DiffReporter))] + [UseApprovalSubdirectory("ApprovalTests")] + public void BasicMessageTest() + { + IMessage[] messages = [ + new TextMessage(Role.System, "You are a helpful AI assistant"), + new TextMessage(Role.User, "Hello", "user"), + new TextMessage(Role.Assistant, "How can I help you?", from: "assistant"), + new ImageMessage(Role.User, "https://example.com/image.png", "user"), + new MultiModalMessage(Role.Assistant, + [ + new TextMessage(Role.User, "Hello", "user"), + new ImageMessage(Role.User, "https://example.com/image.png", "user"), + ], "user"), + new ToolCallMessage("test", "test", "assistant"), + new ToolCallResultMessage("result", "test", "test", "user"), + new ToolCallResultMessage( + [ + new ToolCall("result", "test", "test"), + new ToolCall("result", "test", "test"), + ], "user"), + new ToolCallMessage( + [ + new ToolCall("test", "test"), + new ToolCall("test", "test"), + ], "assistant"), + new AggregateMessage( + message1: new ToolCallMessage("test", "test", "assistant"), + message2: new ToolCallResultMessage("result", "test", "test", "assistant"), "assistant"), + ]; + var openaiMessageConnectorMiddleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant"); + + var oaiMessages = messages.Select(m => (m, openaiMessageConnectorMiddleware.ProcessIncomingMessages(agent, [m]))); + VerifyOAIMessages(oaiMessages); + } + + [Fact] + public async Task ItProcessUserTextMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (UserChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Content.First().Text.Should().Be("Hello"); + chatRequestMessage.ParticipantName.Should().Be("user"); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + IMessage message = new TextMessage(Role.User, "Hello", "user"); + await agent.GenerateReplyAsync([message]); + } + + [Fact] + public async Task ItShortcutChatRequestMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + + var chatRequestMessage = (UserChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Content.First().Text.Should().Be("hello"); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + var userMessage = new UserChatMessage("hello"); + var chatRequestMessage = MessageEnvelope.Create(userMessage); + await agent.GenerateReplyAsync([chatRequestMessage]); + } + + [Fact] + public async Task ItShortcutMessageWhenStrictModelIsFalseAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + + var chatRequestMessage = ((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Should().Be("hello"); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + var userMessage = "hello"; + var chatRequestMessage = MessageEnvelope.Create(userMessage); + await agent.GenerateReplyAsync([chatRequestMessage]); + } + + [Fact] + public async Task ItThrowExceptionWhenStrictModeIsTrueAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(true); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(middleware); + + // user message + var userMessage = "hello"; + var chatRequestMessage = MessageEnvelope.Create(userMessage); + Func action = async () => await agent.GenerateReplyAsync([chatRequestMessage]); + + await action.Should().ThrowAsync().WithMessage("Invalid message type: MessageEnvelope`1"); + } + + [Fact] + public async Task ItProcessAssistantTextMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (AssistantChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Content.First().Text.Should().Be("How can I help you?"); + chatRequestMessage.ParticipantName.Should().Be("assistant"); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // assistant message + IMessage message = new TextMessage(Role.Assistant, "How can I help you?", "assistant"); + await agent.GenerateReplyAsync([message]); + } + + [Fact] + public async Task ItProcessSystemTextMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (SystemChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Content.First().Text.Should().Be("You are a helpful AI assistant"); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // system message + IMessage message = new TextMessage(Role.System, "You are a helpful AI assistant"); + await agent.GenerateReplyAsync([message]); + } + + [Fact] + public async Task ItProcessImageMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (UserChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.ParticipantName.Should().Be("user"); + chatRequestMessage.Content.Count().Should().Be(1); + chatRequestMessage.Content.First().Kind.Should().Be(ChatMessageContentPartKind.Image); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + IMessage message = new ImageMessage(Role.User, "https://example.com/image.png", "user"); + await agent.GenerateReplyAsync([message]); + } + + [Fact] + public async Task ItThrowExceptionWhenProcessingImageMessageFromSelfAndStrictModeIsTrueAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(true); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(middleware); + + var imageMessage = new ImageMessage(Role.Assistant, "https://example.com/image.png", "assistant"); + Func action = async () => await agent.GenerateReplyAsync([imageMessage]); + + await action.Should().ThrowAsync().WithMessage("Invalid message type: ImageMessage"); + } + + [Fact] + public async Task ItProcessMultiModalMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (UserChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.ParticipantName.Should().Be("user"); + chatRequestMessage.Content.Count().Should().Be(2); + chatRequestMessage.Content.First().Kind.Should().Be(ChatMessageContentPartKind.Text); + chatRequestMessage.Content.Last().Kind.Should().Be(ChatMessageContentPartKind.Image); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + IMessage message = new MultiModalMessage( + Role.User, + [ + new TextMessage(Role.User, "Hello", "user"), + new ImageMessage(Role.User, "https://example.com/image.png", "user"), + ], "user"); + await agent.GenerateReplyAsync([message]); + } + + [Fact] + public async Task ItThrowExceptionWhenProcessingMultiModalMessageFromSelfAndStrictModeIsTrueAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(true); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(middleware); + + var multiModalMessage = new MultiModalMessage( + Role.Assistant, + [ + new TextMessage(Role.User, "Hello", "assistant"), + new ImageMessage(Role.User, "https://example.com/image.png", "assistant"), + ], "assistant"); + + Func action = async () => await agent.GenerateReplyAsync([multiModalMessage]); + + await action.Should().ThrowAsync().WithMessage("Invalid message type: MultiModalMessage"); + } + + [Fact] + public async Task ItProcessToolCallMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (AssistantChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.ParticipantName.Should().Be("assistant"); + chatRequestMessage.ToolCalls.Count().Should().Be(1); + chatRequestMessage.Content.First().Text.Should().Be("textContent"); + chatRequestMessage.ToolCalls.First().Should().BeOfType(); + var functionToolCall = (ChatToolCall)chatRequestMessage.ToolCalls.First(); + functionToolCall.FunctionName.Should().Be("test"); + functionToolCall.Id.Should().Be("test"); + functionToolCall.FunctionArguments.Should().Be("test"); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + IMessage message = new ToolCallMessage("test", "test", "assistant") + { + Content = "textContent", + }; + await agent.GenerateReplyAsync([message]); + } + + [Fact] + public async Task ItProcessParallelToolCallMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (AssistantChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Content.Should().BeNullOrEmpty(); + chatRequestMessage.ParticipantName.Should().Be("assistant"); + chatRequestMessage.ToolCalls.Count().Should().Be(2); + for (int i = 0; i < chatRequestMessage.ToolCalls.Count(); i++) + { + chatRequestMessage.ToolCalls.ElementAt(i).Should().BeOfType(); + var functionToolCall = (ChatToolCall)chatRequestMessage.ToolCalls.ElementAt(i); + functionToolCall.FunctionName.Should().Be("test"); + functionToolCall.Id.Should().Be($"test_{i}"); + functionToolCall.FunctionArguments.Should().Be("test"); + } + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + var toolCalls = new[] + { + new ToolCall("test", "test"), + new ToolCall("test", "test"), + }; + IMessage message = new ToolCallMessage(toolCalls, "assistant"); + await agent.GenerateReplyAsync([message]); + } + + [Fact] + public async Task ItThrowExceptionWhenProcessingToolCallMessageFromUserAndStrictModeIsTrueAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(strictMode: true); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(middleware); + + var toolCallMessage = new ToolCallMessage("test", "test", "user"); + Func action = async () => await agent.GenerateReplyAsync([toolCallMessage]); + await action.Should().ThrowAsync().WithMessage("Invalid message type: ToolCallMessage"); + } + + [Fact] + public async Task ItProcessToolCallResultMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (ToolChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Content.First().Text.Should().Be("result"); + chatRequestMessage.ToolCallId.Should().Be("test"); + + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + IMessage message = new ToolCallResultMessage("result", "test", "test", "user"); + await agent.GenerateReplyAsync([message]); + } + + [Fact] + public async Task ItProcessParallelToolCallResultMessageAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + msgs.Count().Should().Be(2); + + for (int i = 0; i < msgs.Count(); i++) + { + var innerMessage = msgs.ElementAt(i); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (ToolChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Content.First().Text.Should().Be("result"); + chatRequestMessage.ToolCallId.Should().Be($"test_{i}"); + } + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + var toolCalls = new[] + { + new ToolCall("test", "test", "result"), + new ToolCall("test", "test", "result"), + }; + IMessage message = new ToolCallResultMessage(toolCalls, "user"); + await agent.GenerateReplyAsync([message]); + } + + [Fact] + public async Task ItProcessFunctionCallMiddlewareMessageFromUserAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + msgs.Count().Should().Be(1); + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (UserChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Content.First().Text.Should().Be("result"); + chatRequestMessage.ParticipantName.Should().Be("user"); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + var toolCallMessage = new ToolCallMessage("test", "test", "user"); + var toolCallResultMessage = new ToolCallResultMessage("result", "test", "test", "user"); + var aggregateMessage = new AggregateMessage(toolCallMessage, toolCallResultMessage, "user"); + await agent.GenerateReplyAsync([aggregateMessage]); + } + + [Fact] + public async Task ItProcessFunctionCallMiddlewareMessageFromAssistantAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + msgs.Count().Should().Be(2); + var innerMessage = msgs.Last(); + innerMessage!.Should().BeOfType>(); + var chatRequestMessage = (ToolChatMessage)((MessageEnvelope)innerMessage!).Content; + chatRequestMessage.Content.First().Text.Should().Be("result"); + chatRequestMessage.ToolCallId.Should().Be("test"); + + var toolCallMessage = msgs.First(); + toolCallMessage!.Should().BeOfType>(); + var toolCallRequestMessage = (AssistantChatMessage)((MessageEnvelope)toolCallMessage!).Content; + toolCallRequestMessage.Content.Should().BeNullOrEmpty(); + toolCallRequestMessage.ToolCalls.Count().Should().Be(1); + toolCallRequestMessage.ToolCalls.First().Should().BeOfType(); + var functionToolCall = (ChatToolCall)toolCallRequestMessage.ToolCalls.First(); + functionToolCall.FunctionName.Should().Be("test"); + functionToolCall.Id.Should().Be("test"); + functionToolCall.FunctionArguments.Should().Be("test"); + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + var toolCallMessage = new ToolCallMessage("test", "test", "assistant"); + var toolCallResultMessage = new ToolCallResultMessage("result", "test", "test", "assistant"); + var aggregateMessage = new ToolCallAggregateMessage(toolCallMessage, toolCallResultMessage, "assistant"); + await agent.GenerateReplyAsync([aggregateMessage]); + } + + [Fact] + public async Task ItProcessParallelFunctionCallMiddlewareMessageFromAssistantAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(async (msgs, _, innerAgent, _) => + { + msgs.Count().Should().Be(3); + var toolCallMessage = msgs.First(); + toolCallMessage!.Should().BeOfType>(); + var toolCallRequestMessage = (AssistantChatMessage)((MessageEnvelope)toolCallMessage!).Content; + toolCallRequestMessage.Content.Should().BeNullOrEmpty(); + toolCallRequestMessage.ToolCalls.Count().Should().Be(2); + + for (int i = 0; i < toolCallRequestMessage.ToolCalls.Count(); i++) + { + toolCallRequestMessage.ToolCalls.ElementAt(i).Should().BeOfType(); + var functionToolCall = (ChatToolCall)toolCallRequestMessage.ToolCalls.ElementAt(i); + functionToolCall.FunctionName.Should().Be("test"); + functionToolCall.Id.Should().Be($"test_{i}"); + functionToolCall.FunctionArguments.Should().Be("test"); + } + + for (int i = 1; i < msgs.Count(); i++) + { + var toolCallResultMessage = msgs.ElementAt(i); + toolCallResultMessage!.Should().BeOfType>(); + var toolCallResultRequestMessage = (ToolChatMessage)((MessageEnvelope)toolCallResultMessage!).Content; + toolCallResultRequestMessage.Content.First().Text.Should().Be("result"); + toolCallResultRequestMessage.ToolCallId.Should().Be($"test_{i - 1}"); + } + + return await innerAgent.GenerateReplyAsync(msgs); + }) + .RegisterMiddleware(middleware); + + // user message + var toolCalls = new[] + { + new ToolCall("test", "test", "result"), + new ToolCall("test", "test", "result"), + }; + var toolCallMessage = new ToolCallMessage(toolCalls, "assistant"); + var toolCallResultMessage = new ToolCallResultMessage(toolCalls, "assistant"); + var aggregateMessage = new AggregateMessage(toolCallMessage, toolCallResultMessage, "assistant"); + await agent.GenerateReplyAsync([aggregateMessage]); + } + + [Fact] + public async Task ItReturnOriginalMessageWhenStrictModeIsFalseAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(middleware); + + // text message + var textMessage = "hello"; + var messageToSend = MessageEnvelope.Create(textMessage); + + var message = await agent.GenerateReplyAsync([messageToSend]); + message.Should().BeOfType>(); + } + + [Fact] + public async Task ItThrowInvalidOperationExceptionWhenStrictModeIsTrueAsync() + { + var middleware = new OpenAIChatRequestMessageConnector(true); + var agent = new EchoAgent("assistant") + .RegisterMiddleware(middleware); + + // text message + var textMessage = new UserChatMessage("hello"); + var messageToSend = MessageEnvelope.Create(textMessage); + Func action = async () => await agent.GenerateReplyAsync([messageToSend]); + + await action.Should().ThrowAsync().WithMessage("Invalid return message type MessageEnvelope`1"); + } + + [Fact] + public void ToOpenAIChatRequestMessageShortCircuitTest() + { + var agent = new EchoAgent("assistant"); + var middleware = new OpenAIChatRequestMessageConnector(); +#pragma warning disable CS0618 // Type or member is obsolete + ChatMessage[] messages = + [ + new UserChatMessage("Hello"), + new AssistantChatMessage("How can I help you?"), + new SystemChatMessage("You are a helpful AI assistant"), + new FunctionChatMessage("functionName", "result"), + new ToolChatMessage("test", "test"), + ]; +#pragma warning restore CS0618 // Type or member is obsolete + + foreach (var oaiMessage in messages) + { + IMessage message = new MessageEnvelope(oaiMessage); + var oaiMessages = middleware.ProcessIncomingMessages(agent, [message]); + oaiMessages.Count().Should().Be(1); + //oaiMessages.First().Should().BeOfType>(); + if (oaiMessages.First() is IMessage chatRequestMessage) + { + chatRequestMessage.Content.Should().Be(oaiMessage); + } + else + { + // fail the test + Assert.True(false); + } + } + } + private void VerifyOAIMessages(IEnumerable<(IMessage, IEnumerable)> messages) + { + var jsonObjects = messages.Select(pair => + { + var (originalMessage, ms) = pair; + var objs = new List(); + foreach (var m in ms) + { + object? obj = null; + var chatRequestMessage = (m as IMessage)?.Content; + if (chatRequestMessage is UserChatMessage userMessage) + { + obj = new + { + Role = "user", + Content = userMessage.Content, + Name = userMessage.ParticipantName, + MultiModaItem = userMessage.Content?.Select(item => + { + return item switch + { + _ when item.Kind == ChatMessageContentPartKind.Image => new + { + Type = "Image", + ImageUrl = GetImageUrlFromContent(item), + } as object, + _ when item.Kind == ChatMessageContentPartKind.Text => new + { + Type = "Text", + Text = item.Text, + } as object, + _ => throw new System.NotImplementedException(), + }; + }), + }; + } + + if (chatRequestMessage is AssistantChatMessage assistantMessage) + { + obj = new + { + Role = "assistant", + Content = assistantMessage.Content, + Name = assistantMessage.ParticipantName, + TooCall = assistantMessage.ToolCalls.Select(tc => + { + return tc switch + { + ChatToolCall functionToolCall => new + { + Type = "Function", + Name = functionToolCall.FunctionName, + Arguments = functionToolCall.FunctionArguments, + Id = functionToolCall.Id, + } as object, + _ => throw new System.NotImplementedException(), + }; + }), + FunctionCallName = assistantMessage.FunctionCall?.FunctionName, + FunctionCallArguments = assistantMessage.FunctionCall?.FunctionArguments, + }; + } + + if (chatRequestMessage is SystemChatMessage systemMessage) + { + obj = new + { + Name = systemMessage.ParticipantName, + Role = "system", + Content = systemMessage.Content, + }; + } + +#pragma warning disable CS0618 // Type or member is obsolete + if (chatRequestMessage is FunctionChatMessage functionMessage) + { + obj = new + { + Role = "function", + Content = functionMessage.Content, + Name = functionMessage.FunctionName, + }; + } +#pragma warning restore CS0618 // Type or member is obsolete + + if (chatRequestMessage is ToolChatMessage toolCallMessage) + { + obj = new + { + Role = "tool", + Content = toolCallMessage.Content.First().Text, + ToolCallId = toolCallMessage.ToolCallId, + }; + } + + objs.Add(obj ?? throw new System.NotImplementedException()); + } + + return new + { + OriginalMessage = originalMessage.ToString(), + ConvertedMessages = objs, + }; + }); + + var json = JsonSerializer.Serialize(jsonObjects, this.jsonSerializerOptions); + Approvals.Verify(json); + } + + private object? GetImageUrlFromContent(ChatMessageContentPart content) + { + return content.ImageUri; + } + + private static T CreateInstance(params object[] args) + { + var type = typeof(T); + var instance = type.Assembly.CreateInstance( + type.FullName!, false, + BindingFlags.Instance | BindingFlags.NonPublic, + null, args, null, null); + return (T)instance!; + } +} diff --git a/dotnet/test/AutoGen.OpenAI.V1.Tests/AutoGen.OpenAI.V1.Tests.csproj b/dotnet/test/AutoGen.OpenAI.V1.Tests/AutoGen.OpenAI.V1.Tests.csproj index 74d7d7b0a16f..0be8c5200336 100644 --- a/dotnet/test/AutoGen.OpenAI.V1.Tests/AutoGen.OpenAI.V1.Tests.csproj +++ b/dotnet/test/AutoGen.OpenAI.V1.Tests/AutoGen.OpenAI.V1.Tests.csproj @@ -8,10 +8,9 @@ - + - - + diff --git a/dotnet/test/AutoGen.OpenAI.V1.Tests/GPTAgentTest.cs b/dotnet/test/AutoGen.OpenAI.V1.Tests/GPTAgentTest.cs new file mode 100644 index 000000000000..b8944d45d762 --- /dev/null +++ b/dotnet/test/AutoGen.OpenAI.V1.Tests/GPTAgentTest.cs @@ -0,0 +1,270 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// GPTAgentTest.cs + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using AutoGen.OpenAI.V1.Extension; +using AutoGen.Tests; +using Azure.AI.OpenAI; +using FluentAssertions; +using Xunit.Abstractions; + +namespace AutoGen.OpenAI.V1.Tests; + +public partial class GPTAgentTest +{ + private ITestOutputHelper _output; + public GPTAgentTest(ITestOutputHelper output) + { + _output = output; + } + + private ILLMConfig CreateAzureOpenAIGPT35TurboConfig() + { + var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set"); + var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set"); + var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new ArgumentException("AZURE_OPENAI_DEPLOY_NAME is not set"); + return new AzureOpenAIConfig(endpoint, deployName, key); + } + + private ILLMConfig CreateOpenAIGPT4VisionConfig() + { + var key = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new ArgumentException("OPENAI_API_KEY is not set"); + return new OpenAIConfig(key, "gpt-4o-mini"); + } + + [Obsolete] + [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + public async Task GPTAgentTestAsync() + { + var config = this.CreateAzureOpenAIGPT35TurboConfig(); + + var agent = new GPTAgent("gpt", "You are a helpful AI assistant", config); + + await UpperCaseTestAsync(agent); + await UpperCaseStreamingTestAsync(agent); + } + + [Obsolete] + [ApiKeyFact("OPENAI_API_KEY", "AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")] + public async Task GPTAgentVisionTestAsync() + { + var visionConfig = this.CreateOpenAIGPT4VisionConfig(); + var visionAgent = new GPTAgent( + name: "gpt", + systemMessage: "You are a helpful AI assistant", + config: visionConfig, + temperature: 0); + + var gpt3Config = this.CreateAzureOpenAIGPT35TurboConfig(); + var gpt3Agent = new GPTAgent( + name: "gpt3", + systemMessage: "You are a helpful AI assistant, return highest label from conversation", + config: gpt3Config, + temperature: 0, + functions: new[] { this.GetHighestLabelFunctionContract.ToOpenAIFunctionDefinition() }, + functionMap: new Dictionary>> + { + { nameof(GetHighestLabel), this.GetHighestLabelWrapper }, + }); + + var imageUri = new Uri(@"https://microsoft.github.io/autogen/assets/images/level2algebra-659ba95286432d9945fc89e84d606797.png"); + var oaiMessage = new ChatRequestUserMessage( + new ChatMessageTextContentItem("which label has the highest inference cost"), + new ChatMessageImageContentItem(imageUri)); + var multiModalMessage = new MultiModalMessage(Role.User, + [ + new TextMessage(Role.User, "which label has the highest inference cost", from: "user"), + new ImageMessage(Role.User, imageUri, from: "user"), + ], + from: "user"); + + var imageMessage = new ImageMessage(Role.User, imageUri, from: "user"); + + string imagePath = Path.Combine("testData", "images", "square.png"); + ImageMessage imageMessageData; + using (var fs = new FileStream(imagePath, FileMode.Open, FileAccess.Read)) + { + var ms = new MemoryStream(); + await fs.CopyToAsync(ms); + ms.Seek(0, SeekOrigin.Begin); + var imageData = await BinaryData.FromStreamAsync(ms, "image/png"); + imageMessageData = new ImageMessage(Role.Assistant, imageData, from: "user"); + } + + IMessage[] messages = [ + MessageEnvelope.Create(oaiMessage), + multiModalMessage, + imageMessage, + imageMessageData + ]; + + foreach (var message in messages) + { + var response = await visionAgent.SendAsync(message); + response.From.Should().Be(visionAgent.Name); + + var labelResponse = await gpt3Agent.SendAsync(response); + labelResponse.From.Should().Be(gpt3Agent.Name); + labelResponse.GetToolCalls()!.First().FunctionName.Should().Be(nameof(GetHighestLabel)); + } + } + + [Obsolete] + [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + public async Task GPTFunctionCallAgentTestAsync() + { + var config = this.CreateAzureOpenAIGPT35TurboConfig(); + var agentWithFunction = new GPTAgent("gpt", "You are a helpful AI assistant", config, 0, functions: new[] { this.EchoAsyncFunctionContract.ToOpenAIFunctionDefinition() }); + + await EchoFunctionCallTestAsync(agentWithFunction); + } + + [Obsolete] + [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + public async Task GPTAgentFunctionCallSelfExecutionTestAsync() + { + var config = this.CreateAzureOpenAIGPT35TurboConfig(); + var agent = new GPTAgent( + name: "gpt", + systemMessage: "You are a helpful AI assistant", + config: config, + temperature: 0, + functions: new[] { this.EchoAsyncFunctionContract.ToOpenAIFunctionDefinition() }, + functionMap: new Dictionary>> + { + { nameof(EchoAsync), this.EchoAsyncWrapper }, + }); + + await EchoFunctionCallExecutionStreamingTestAsync(agent); + await EchoFunctionCallExecutionTestAsync(agent); + } + + /// + /// echo when asked. + /// + /// message to echo + [FunctionAttribute] + public async Task EchoAsync(string message) + { + return $"[ECHO] {message}"; + } + + /// + /// return the label name with hightest inference cost + /// + /// + /// + [FunctionAttribute] + public async Task GetHighestLabel(string labelName, string color) + { + return $"[HIGHEST_LABEL] {labelName} {color}"; + } + + private async Task EchoFunctionCallTestAsync(IAgent agent) + { + //var message = new TextMessage(Role.System, "You are a helpful AI assistant that call echo function"); + var helloWorld = new TextMessage(Role.User, "echo Hello world"); + + var reply = await agent.SendAsync(chatHistory: new[] { helloWorld }); + + reply.From.Should().Be(agent.Name); + reply.GetToolCalls()!.First().FunctionName.Should().Be(nameof(EchoAsync)); + } + + private async Task EchoFunctionCallExecutionTestAsync(IAgent agent) + { + //var message = new TextMessage(Role.System, "You are a helpful AI assistant that echo whatever user says"); + var helloWorld = new TextMessage(Role.User, "echo Hello world"); + + var reply = await agent.SendAsync(chatHistory: new[] { helloWorld }); + + reply.GetContent().Should().Be("[ECHO] Hello world"); + reply.From.Should().Be(agent.Name); + reply.Should().BeOfType(); + } + + private async Task EchoFunctionCallExecutionStreamingTestAsync(IStreamingAgent agent) + { + //var message = new TextMessage(Role.System, "You are a helpful AI assistant that echo whatever user says"); + var helloWorld = new TextMessage(Role.User, "echo Hello world"); + var option = new GenerateReplyOptions + { + Temperature = 0, + }; + var replyStream = agent.GenerateStreamingReplyAsync(messages: new[] { helloWorld }, option); + var answer = "[ECHO] Hello world"; + IMessage? finalReply = default; + await foreach (var reply in replyStream) + { + reply.From.Should().Be(agent.Name); + finalReply = reply; + } + + if (finalReply is ToolCallAggregateMessage aggregateMessage) + { + var toolCallResultMessage = aggregateMessage.Message2; + toolCallResultMessage.ToolCalls.First().Result.Should().Be(answer); + toolCallResultMessage.From.Should().Be(agent.Name); + toolCallResultMessage.ToolCalls.First().FunctionName.Should().Be(nameof(EchoAsync)); + } + else + { + throw new Exception("unexpected message type"); + } + } + + private async Task UpperCaseTestAsync(IAgent agent) + { + var message = new TextMessage(Role.User, "Please convert abcde to upper case."); + + var reply = await agent.SendAsync(chatHistory: new[] { message }); + + reply.GetContent().Should().Contain("ABCDE"); + reply.From.Should().Be(agent.Name); + } + + private async Task UpperCaseStreamingTestAsync(IStreamingAgent agent) + { + var message = new TextMessage(Role.User, "Please convert 'hello world' to upper case"); + var option = new GenerateReplyOptions + { + Temperature = 0, + }; + var replyStream = agent.GenerateStreamingReplyAsync(messages: new[] { message }, option); + var answer = "HELLO WORLD"; + TextMessage? finalReply = default; + await foreach (var reply in replyStream) + { + if (reply is TextMessageUpdate update) + { + update.From.Should().Be(agent.Name); + + if (finalReply is null) + { + finalReply = new TextMessage(update); + } + else + { + finalReply.Update(update); + } + + continue; + } + else if (reply is TextMessage textMessage) + { + finalReply = textMessage; + continue; + } + + throw new Exception("unexpected message type"); + } + + finalReply!.Content.Should().Contain(answer); + finalReply!.Role.Should().Be(Role.Assistant); + finalReply!.From.Should().Be(agent.Name); + } +} diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/AutoGen.SemanticKernel.Tests.csproj b/dotnet/test/AutoGen.SemanticKernel.Tests/AutoGen.SemanticKernel.Tests.csproj index 7f42b67da715..6ff942ea3ba2 100644 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/AutoGen.SemanticKernel.Tests.csproj +++ b/dotnet/test/AutoGen.SemanticKernel.Tests/AutoGen.SemanticKernel.Tests.csproj @@ -10,10 +10,10 @@ + - - + diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs b/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs index 3732cd0197bd..0dc2ea215ddf 100644 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs +++ b/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs @@ -2,9 +2,10 @@ // KernelFunctionMiddlewareTests.cs using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using AutoGen.Tests; +using Azure; using Azure.AI.OpenAI; using FluentAssertions; using Microsoft.SemanticKernel; @@ -19,13 +20,15 @@ public async Task ItRegisterKernelFunctionMiddlewareFromTestPluginTests() var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = new OpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key)); + var openaiClient = new AzureOpenAIClient( + endpoint: new Uri(endpoint), + credential: new AzureKeyCredential(key)); var kernel = new Kernel(); var plugin = kernel.ImportPluginFromType(); var kernelFunctionMiddleware = new KernelPluginMiddleware(kernel, plugin); - var agent = new OpenAIChatAgent(openaiClient, "assistant", modelName: deployName) + var agent = new OpenAIChatAgent(openaiClient.GetChatClient(deployName), "assistant") .RegisterMessageConnector() .RegisterMiddleware(kernelFunctionMiddleware); @@ -63,7 +66,9 @@ public async Task ItRegisterKernelFunctionMiddlewareFromMethodTests() var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = new OpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key)); + var openaiClient = new AzureOpenAIClient( + endpoint: new Uri(endpoint), + credential: new AzureKeyCredential(key)); var kernel = new Kernel(); var getWeatherMethod = kernel.CreateFunctionFromMethod((string location) => $"The weather in {location} is sunny.", functionName: "GetWeather", description: "Get the weather for a location."); @@ -71,7 +76,7 @@ public async Task ItRegisterKernelFunctionMiddlewareFromMethodTests() var plugin = kernel.ImportPluginFromFunctions("plugin", [getWeatherMethod, createPersonObjectMethod]); var kernelFunctionMiddleware = new KernelPluginMiddleware(kernel, plugin); - var agent = new OpenAIChatAgent(openaiClient, "assistant", modelName: deployName) + var agent = new OpenAIChatAgent(chatClient: openaiClient.GetChatClient(deployName), "assistant") .RegisterMessageConnector() .RegisterMiddleware(kernelFunctionMiddleware); diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/SemanticKernelAgentTest.cs b/dotnet/test/AutoGen.SemanticKernel.Tests/SemanticKernelAgentTest.cs index d9dde62f1bd0..dc1b655a7a4f 100644 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/SemanticKernelAgentTest.cs +++ b/dotnet/test/AutoGen.SemanticKernel.Tests/SemanticKernelAgentTest.cs @@ -34,8 +34,11 @@ public async Task BasicConversationTestAsync() var builder = Kernel.CreateBuilder() .AddAzureOpenAIChatCompletion(deploymentName, endpoint, key); + var kernel = builder.Build(); + kernel.GetRequiredService(); + var skAgent = new SemanticKernelAgent(kernel, "assistant"); var chatMessageContent = MessageEnvelope.Create(new ChatMessageContent(AuthorRole.Assistant, "Hello")); @@ -223,11 +226,10 @@ public async Task SkChatCompletionAgentPluginTestAsync() Kernel = kernel, Name = "assistant", Instructions = "You are a helpful AI assistant", - ExecutionSettings = - new OpenAIPromptExecutionSettings() - { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions - } + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() + { + ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + }) }; var skAgent = new SemanticKernelChatCompletionAgent(agent).RegisterMiddleware( diff --git a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExample.test.cs b/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExample.test.cs index 066aff8156a9..8b477446d9f9 100644 --- a/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExample.test.cs +++ b/dotnet/test/AutoGen.SourceGenerator.Tests/FunctionExample.test.cs @@ -5,9 +5,9 @@ using ApprovalTests; using ApprovalTests.Namers; using ApprovalTests.Reporters; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; +using OpenAI.Chat; using Xunit; namespace AutoGen.SourceGenerator.Tests @@ -30,7 +30,7 @@ public void Add_Test() }; this.VerifyFunction(functionExamples.AddWrapper, args, 3); - this.VerifyFunctionDefinition(functionExamples.AddFunctionContract.ToOpenAIFunctionDefinition()); + this.VerifyFunctionDefinition(functionExamples.AddFunctionContract.ToChatTool()); } [Fact] @@ -42,7 +42,7 @@ public void Sum_Test() }; this.VerifyFunction(functionExamples.SumWrapper, args, 6.0); - this.VerifyFunctionDefinition(functionExamples.SumFunctionContract.ToOpenAIFunctionDefinition()); + this.VerifyFunctionDefinition(functionExamples.SumFunctionContract.ToChatTool()); } [Fact] @@ -58,7 +58,7 @@ public async Task DictionaryToString_Test() }; await this.VerifyAsyncFunction(functionExamples.DictionaryToStringAsyncWrapper, args, JsonSerializer.Serialize(args.xargs, jsonSerializerOptions)); - this.VerifyFunctionDefinition(functionExamples.DictionaryToStringAsyncFunctionContract.ToOpenAIFunctionDefinition()); + this.VerifyFunctionDefinition(functionExamples.DictionaryToStringAsyncFunctionContract.ToChatTool()); } [Fact] @@ -97,18 +97,18 @@ public void Query_Test() }; this.VerifyFunction(functionExamples.QueryWrapper, args, new[] { "hello", "hello", "hello" }); - this.VerifyFunctionDefinition(functionExamples.QueryFunctionContract.ToOpenAIFunctionDefinition()); + this.VerifyFunctionDefinition(functionExamples.QueryFunctionContract.ToChatTool()); } [UseReporter(typeof(DiffReporter))] [UseApprovalSubdirectory("ApprovalTests")] - private void VerifyFunctionDefinition(FunctionDefinition function) + private void VerifyFunctionDefinition(ChatTool function) { var func = new { - name = function.Name, - description = function.Description.Replace(Environment.NewLine, ","), - parameters = function.Parameters.ToObjectFromJson(options: jsonSerializerOptions), + name = function.FunctionName, + description = function.FunctionDescription.Replace(Environment.NewLine, ","), + parameters = function.FunctionParameters.ToObjectFromJson(options: jsonSerializerOptions), }; Approvals.Verify(JsonSerializer.Serialize(func, jsonSerializerOptions)); diff --git a/dotnet/test/AutoGen.Tests/BasicSampleTest.cs b/dotnet/test/AutoGen.Tests/BasicSampleTest.cs index 89925b7d3b39..317fdc36e019 100644 --- a/dotnet/test/AutoGen.Tests/BasicSampleTest.cs +++ b/dotnet/test/AutoGen.Tests/BasicSampleTest.cs @@ -31,7 +31,7 @@ public async Task TwoAgentMathClassTestAsync() await Example02_TwoAgent_MathChat.RunAsync(); } - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] + [ApiKeyFact("OPENAI_API_KEY")] public async Task AgentFunctionCallTestAsync() { await Example03_Agent_FunctionCall.RunAsync(); diff --git a/dotnet/test/AutoGen.Tests/MiddlewareTest.cs b/dotnet/test/AutoGen.Tests/MiddlewareTest.cs index 6c1c89a33c1c..6398a24f5c57 100644 --- a/dotnet/test/AutoGen.Tests/MiddlewareTest.cs +++ b/dotnet/test/AutoGen.Tests/MiddlewareTest.cs @@ -6,7 +6,6 @@ using System.Linq; using System.Text.Json; using System.Threading.Tasks; -using Azure.AI.OpenAI; using FluentAssertions; using Xunit; @@ -73,7 +72,7 @@ public async Task FunctionCallMiddlewareTestAsync() var agent = new EchoAgent("echo"); var args = new EchoSchema { message = "hello" }; var argsJson = JsonSerializer.Serialize(args) ?? throw new InvalidOperationException("Failed to serialize args"); - var functionCall = new FunctionCall("echo", argsJson); + var functionCall = new ToolCall("echo", argsJson); var functionCallAgent = agent.RegisterMiddleware(async (messages, options, agent, ct) => { if (options?.Functions is null) @@ -81,7 +80,7 @@ public async Task FunctionCallMiddlewareTestAsync() return await agent.GenerateReplyAsync(messages, options, ct); } - return new ToolCallMessage(functionCall.Name, functionCall.Arguments, from: agent.Name); + return new ToolCallMessage(functionCall.FunctionName, functionCall.FunctionArguments, from: agent.Name); }); // test 1 @@ -90,7 +89,7 @@ public async Task FunctionCallMiddlewareTestAsync() functionMap: new Dictionary>> { { "echo", EchoWrapper } }); var testAgent = agent.RegisterMiddleware(mw); - var functionCallMessage = new ToolCallMessage(functionCall.Name, functionCall.Arguments, from: "user"); + var functionCallMessage = new ToolCallMessage(functionCall.FunctionName, functionCall.FunctionArguments, from: "user"); var reply = await testAgent.SendAsync(functionCallMessage); reply.Should().BeOfType(); reply.GetContent()!.Should().Be("[FUNC] hello"); diff --git a/dotnet/test/AutoGen.Tests/Orchestrator/RolePlayOrchestratorTests.cs b/dotnet/test/AutoGen.Tests/Orchestrator/RolePlayOrchestratorTests.cs index a91524257b66..d4d602d84914 100644 --- a/dotnet/test/AutoGen.Tests/Orchestrator/RolePlayOrchestratorTests.cs +++ b/dotnet/test/AutoGen.Tests/Orchestrator/RolePlayOrchestratorTests.cs @@ -15,12 +15,13 @@ using AutoGen.Gemini; using AutoGen.Mistral; using AutoGen.Mistral.Extension; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using Azure.AI.Inference; using Azure.AI.OpenAI; using FluentAssertions; using Moq; +using OpenAI; using Xunit; namespace AutoGen.Tests; @@ -221,11 +222,10 @@ public async Task GPT_3_5_CoderReviewerRunnerTestAsync() var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); - var openaiClient = new OpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key)); + var openaiClient = new AzureOpenAIClient(new Uri(endpoint), new System.ClientModel.ApiKeyCredential(key)); var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: deployName) + chatClient: openaiClient.GetChatClient(deployName), + name: "assistant") .RegisterMessageConnector(); await CoderReviewerRunnerTestAsync(openAIChatAgent); @@ -234,13 +234,12 @@ public async Task GPT_3_5_CoderReviewerRunnerTestAsync() [ApiKeyFact("OPENAI_API_KEY")] public async Task GPT_4o_CoderReviewerRunnerTestAsync() { - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY"); + var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("OPENAI_API_KEY is not set"); var model = "gpt-4o"; var openaiClient = new OpenAIClient(apiKey); var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: model) + chatClient: openaiClient.GetChatClient(model), + name: "assistant") .RegisterMessageConnector(); await CoderReviewerRunnerTestAsync(openAIChatAgent); @@ -249,13 +248,12 @@ public async Task GPT_4o_CoderReviewerRunnerTestAsync() [ApiKeyFact("OPENAI_API_KEY")] public async Task GPT_4o_mini_CoderReviewerRunnerTestAsync() { - var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY"); + var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("OPENAI_API_KEY is not set"); var model = "gpt-4o-mini"; var openaiClient = new OpenAIClient(apiKey); var openAIChatAgent = new OpenAIChatAgent( - openAIClient: openaiClient, - name: "assistant", - modelName: model) + chatClient: openaiClient.GetChatClient(model), + name: "assistant") .RegisterMessageConnector(); await CoderReviewerRunnerTestAsync(openAIChatAgent); diff --git a/dotnet/test/AutoGen.Tests/SingleAgentTest.cs b/dotnet/test/AutoGen.Tests/SingleAgentTest.cs index b0633d429b2c..fb28f48e12db 100644 --- a/dotnet/test/AutoGen.Tests/SingleAgentTest.cs +++ b/dotnet/test/AutoGen.Tests/SingleAgentTest.cs @@ -3,13 +3,8 @@ using System; using System.Collections.Generic; -using System.IO; using System.Linq; using System.Threading.Tasks; -using AutoGen.LMStudio; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; using FluentAssertions; using Xunit; using Xunit.Abstractions; @@ -38,90 +33,6 @@ private ILLMConfig CreateOpenAIGPT4VisionConfig() return new OpenAIConfig(key, "gpt-4-vision-preview"); } - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task GPTAgentTestAsync() - { - var config = this.CreateAzureOpenAIGPT35TurboConfig(); - - var agent = new GPTAgent("gpt", "You are a helpful AI assistant", config); - - await UpperCaseTestAsync(agent); - await UpperCaseStreamingTestAsync(agent); - } - - [ApiKeyFact("OPENAI_API_KEY", "AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")] - public async Task GPTAgentVisionTestAsync() - { - var visionConfig = this.CreateOpenAIGPT4VisionConfig(); - var visionAgent = new GPTAgent( - name: "gpt", - systemMessage: "You are a helpful AI assistant", - config: visionConfig, - temperature: 0); - - var gpt3Config = this.CreateAzureOpenAIGPT35TurboConfig(); - var gpt3Agent = new GPTAgent( - name: "gpt3", - systemMessage: "You are a helpful AI assistant, return highest label from conversation", - config: gpt3Config, - temperature: 0, - functions: new[] { this.GetHighestLabelFunctionContract.ToOpenAIFunctionDefinition() }, - functionMap: new Dictionary>> - { - { nameof(GetHighestLabel), this.GetHighestLabelWrapper }, - }); - - var imageUri = new Uri(@"https://microsoft.github.io/autogen/assets/images/level2algebra-659ba95286432d9945fc89e84d606797.png"); - var oaiMessage = new ChatRequestUserMessage( - new ChatMessageTextContentItem("which label has the highest inference cost"), - new ChatMessageImageContentItem(imageUri)); - var multiModalMessage = new MultiModalMessage(Role.User, - [ - new TextMessage(Role.User, "which label has the highest inference cost", from: "user"), - new ImageMessage(Role.User, imageUri, from: "user"), - ], - from: "user"); - - var imageMessage = new ImageMessage(Role.User, imageUri, from: "user"); - - string imagePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ApprovalTests", "square.png"); - ImageMessage imageMessageData; - using (var fs = new FileStream(imagePath, FileMode.Open, FileAccess.Read)) - { - var ms = new MemoryStream(); - await fs.CopyToAsync(ms); - ms.Seek(0, SeekOrigin.Begin); - var imageData = await BinaryData.FromStreamAsync(ms, "image/png"); - imageMessageData = new ImageMessage(Role.Assistant, imageData, from: "user"); - } - - IMessage[] messages = [ - MessageEnvelope.Create(oaiMessage), - multiModalMessage, - imageMessage, - imageMessageData - ]; - - foreach (var message in messages) - { - var response = await visionAgent.SendAsync(message); - response.From.Should().Be(visionAgent.Name); - - var labelResponse = await gpt3Agent.SendAsync(response); - labelResponse.From.Should().Be(gpt3Agent.Name); - labelResponse.GetToolCalls()!.First().FunctionName.Should().Be(nameof(GetHighestLabel)); - } - } - - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task GPTFunctionCallAgentTestAsync() - { - var config = this.CreateAzureOpenAIGPT35TurboConfig(); - var agentWithFunction = new GPTAgent("gpt", "You are a helpful AI assistant", config, 0, functions: new[] { this.EchoAsyncFunctionContract.ToOpenAIFunctionDefinition() }); - - await EchoFunctionCallTestAsync(agentWithFunction); - } - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] public async Task AssistantAgentFunctionCallTestAsync() { @@ -147,43 +58,6 @@ public async Task AssistantAgentFunctionCallTestAsync() await EchoFunctionCallTestAsync(assistantAgent); } - [Fact] - public async Task ItCreateAssistantAgentFromLMStudioConfigAsync() - { - var host = "http://localhost"; - var port = 8080; - var lmStudioConfig = new LMStudioConfig(host, port); - - var assistantAgent = new AssistantAgent( - name: "assistant", - llmConfig: new ConversableAgentConfig() - { - ConfigList = [lmStudioConfig], - }); - - assistantAgent.Name.Should().Be("assistant"); - assistantAgent.InnerAgent.Should().BeOfType(); - } - - [ApiKeyFact("LMStudio_ENDPOINT")] - public async Task ItTestAssistantAgentFromLMStudioConfigAsync() - { - var Uri = Environment.GetEnvironmentVariable("LMStudio_ENDPOINT") ?? throw new ArgumentException("LMStudio_ENDPOINT is not set"); - var lmStudioConfig = new LMStudioConfig(new Uri(Uri)); - - var assistantAgent = new AssistantAgent( - name: "assistant", - llmConfig: new ConversableAgentConfig() - { - ConfigList = [lmStudioConfig], - }); - - assistantAgent.Name.Should().Be("assistant"); - assistantAgent.InnerAgent.Should().BeOfType(); - await this.UpperCaseTestAsync(assistantAgent); - } - - [Fact] public async Task AssistantAgentDefaultReplyTestAsync() { @@ -225,25 +99,6 @@ public async Task AssistantAgentFunctionCallSelfExecutionTestAsync() await EchoFunctionCallExecutionTestAsync(assistantAgent); } - [ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_DEPLOY_NAME")] - public async Task GPTAgentFunctionCallSelfExecutionTestAsync() - { - var config = this.CreateAzureOpenAIGPT35TurboConfig(); - var agent = new GPTAgent( - name: "gpt", - systemMessage: "You are a helpful AI assistant", - config: config, - temperature: 0, - functions: new[] { this.EchoAsyncFunctionContract.ToOpenAIFunctionDefinition() }, - functionMap: new Dictionary>> - { - { nameof(EchoAsync), this.EchoAsyncWrapper }, - }); - - await EchoFunctionCallExecutionStreamingTestAsync(agent); - await EchoFunctionCallExecutionTestAsync(agent); - } - /// /// echo when asked. /// diff --git a/dotnet/test/AutoGen.Tests/TwoAgentTest.cs b/dotnet/test/AutoGen.Tests/TwoAgentTest.cs index b5d7bceed4dd..335f4aaa57c6 100644 --- a/dotnet/test/AutoGen.Tests/TwoAgentTest.cs +++ b/dotnet/test/AutoGen.Tests/TwoAgentTest.cs @@ -5,7 +5,6 @@ using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; -using AutoGen.OpenAI.V1; using FluentAssertions; using Xunit.Abstractions; diff --git a/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs b/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs index 964f1fc94edb..c56bbf983504 100644 --- a/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs +++ b/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs @@ -1,16 +1,16 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // OpenAIChatCompletionMiddlewareTests.cs +using System.ClientModel.Primitives; using AutoGen.Core; -using AutoGen.OpenAI.V1; -using AutoGen.OpenAI.V1.Extension; -using Azure.AI.OpenAI; -using Azure.Core.Pipeline; +using AutoGen.OpenAI; +using AutoGen.OpenAI.Extension; using FluentAssertions; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.TestHost; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; +using OpenAI; namespace AutoGen.WebAPI.Tests; @@ -24,7 +24,7 @@ public async Task ItReturnTextMessageWhenSendTextMessage() using var host = await hostBuilder.StartAsync(); var client = host.GetTestClient(); var openaiClient = CreateOpenAIClient(client); - var openAIAgent = new OpenAIChatAgent(openaiClient, "test", "test") + var openAIAgent = new OpenAIChatAgent(openaiClient.GetChatClient("test"), "test") .RegisterMessageConnector(); var response = await openAIAgent.SendAsync("Hey"); @@ -42,7 +42,7 @@ public async Task ItReturnTextMessageWhenSendTextMessageUseStreaming() using var host = await hostBuilder.StartAsync(); var client = host.GetTestClient(); var openaiClient = CreateOpenAIClient(client); - var openAIAgent = new OpenAIChatAgent(openaiClient, "test", "test") + var openAIAgent = new OpenAIChatAgent(openaiClient.GetChatClient("test"), "test") .RegisterMessageConnector(); var message = new TextMessage(Role.User, "ABCDEFGHIJKLMN"); @@ -73,10 +73,9 @@ private IHostBuilder CreateHostBuilder(IAgent agent) private OpenAIClient CreateOpenAIClient(HttpClient client) { - var clientOption = new OpenAIClientOptions(OpenAIClientOptions.ServiceVersion.V2024_02_15_Preview) + return new OpenAIClient("api-key", new OpenAIClientOptions { - Transport = new HttpClientTransport(client), - }; - return new OpenAIClient("api-key", clientOption); + Transport = new HttpClientPipelineTransport(client), + }); } }