From 0df285011f9d7c06be0a20b433f1f1b979a73f47 Mon Sep 17 00:00:00 2001 From: Daniel Chalef <131175+danielchalef@users.noreply.github.com> Date: Tue, 7 Nov 2023 12:37:39 -0800 Subject: [PATCH] add gpt-3.5-turbo-1106 and gpt-3.5-turbo-1106 support (#268) --- config.yaml | 4 ++-- pkg/llms/llm_base.go | 24 ++++++++++++++---------- pkg/testutils/utils.go | 2 +- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/config.yaml b/config.yaml index 3fe3e92e..255b554b 100644 --- a/config.yaml +++ b/config.yaml @@ -1,8 +1,8 @@ llm: # openai or anthropic service: "openai" - # OpenAI: gpt-3.5-turbo, gpt-4, gpt-3.5-turbo-16k, gpt-4-32k; Anthropic: claude-instant-1 or claude-2 - model: "gpt-3.5-turbo" + # OpenAI: gpt-3.5-turbo, gpt-4, gpt-3.5-turbo-1106, gpt-3.5-turbo-16k, gpt-4-32k; Anthropic: claude-instant-1 or claude-2 + model: "gpt-3.5-turbo-1106" ## OpenAI-specific settings # Only used for Azure OpenAI API azure_openai_endpoint: diff --git a/pkg/llms/llm_base.go b/pkg/llms/llm_base.go index 5be35ba9..398beaa6 100644 --- a/pkg/llms/llm_base.go +++ b/pkg/llms/llm_base.go @@ -94,10 +94,12 @@ func NewLLMError(message string, originalError error) *LLMError { } var ValidOpenAILLMs = map[string]bool{ - "gpt-3.5-turbo": true, - "gpt-4": true, - "gpt-3.5-turbo-16k": true, - "gpt-4-32k": true, + "gpt-3.5-turbo": true, + "gpt-4": true, + "gpt-3.5-turbo-16k": true, + "gpt-3.5-turbo-1106": true, + "gpt-4-32k": true, + "gpt-4-1106-preview": true, } var ValidAnthropicLLMs = map[string]bool{ @@ -108,12 +110,14 @@ var ValidAnthropicLLMs = map[string]bool{ var ValidLLMMap = internal.MergeMaps(ValidOpenAILLMs, ValidAnthropicLLMs) var MaxLLMTokensMap = map[string]int{ - "gpt-3.5-turbo": 4096, - "gpt-3.5-turbo-16k": 16_384, - "gpt-4": 8192, - "gpt-4-32k": 32_768, - "claude-instant-1": 100_000, - "claude-2": 100_000, + "gpt-3.5-turbo": 4096, + "gpt-3.5-turbo-16k": 16_385, + "gpt-3.5-turbo-1106": 16_385, + "gpt-4": 8192, + "gpt-4-32k": 32_768, + "gpt-4-1106-preview": 128_000, + "claude-instant-1": 100_000, + "claude-2": 100_000, } func GetLLMModelName(cfg *config.Config) (string, error) { diff --git a/pkg/testutils/utils.go b/pkg/testutils/utils.go index cbc75e76..c9a4f3f5 100644 --- a/pkg/testutils/utils.go +++ b/pkg/testutils/utils.go @@ -26,7 +26,7 @@ func testConfigDefaults() (*config.Config, error) { testConfig := &config.Config{ LLM: config.LLM{ Service: "openai", - Model: "gpt-3.5-turbo", + Model: "gpt-3.5-turbo-1106", }, NLP: config.NLP{ ServerURL: "http://localhost:5557",