From 8aac431525d03bbc6608382a237d9a096a150e8c Mon Sep 17 00:00:00 2001 From: Elad Ben-Israel Date: Tue, 19 Mar 2024 17:27:08 +0200 Subject: [PATCH] chore(openai): a few refactors (#134) * Add openai lib * change git clone in the readme to the public url * add description to package.json * update mklib.sh to include a description in package.json * fix PR comment about peerDependencies * tweaks * remove author from template * Update openai.test.w * fix test * another fix to test * more cleanups * Update package.json * Update openai.test.w * Rename openai/example.main.w to openai/examples/example.main.w * fix bring statement in examples * remove unused files --------- Co-authored-by: Shai Ber --- openai/api.w | 9 ------ openai/{ => examples}/example.main.w | 4 +-- openai/openai.extern.d.ts | 10 ++----- openai/openai.js | 25 ++-------------- openai/openai.test.w | 10 +++---- openai/openai.w | 45 +++++++++++++++++++++------- openai/package.json | 2 +- openai/utils.w | 5 ---- 8 files changed, 47 insertions(+), 63 deletions(-) delete mode 100644 openai/api.w rename openai/{ => examples}/example.main.w (76%) delete mode 100644 openai/utils.w diff --git a/openai/api.w b/openai/api.w deleted file mode 100644 index 9ea0c07e..00000000 --- a/openai/api.w +++ /dev/null @@ -1,9 +0,0 @@ -pub struct CompletionParams { - model: str; - max_tokens: num; -} - -// TODO: need to recreate the openai interface with higher fidelity -pub interface IOpenAI { - inflight createCompletion(prompt: str, params: CompletionParams?): str; -} diff --git a/openai/example.main.w b/openai/examples/example.main.w similarity index 76% rename from openai/example.main.w rename to openai/examples/example.main.w index ea969c12..640540ce 100644 --- a/openai/example.main.w +++ b/openai/examples/example.main.w @@ -1,11 +1,11 @@ bring expect; -bring "./openai.w" as openai; +bring "../openai.w" as openai; bring cloud; let oai = new openai.OpenAI(apiKey: "my-openai-key"); new cloud.Function(inflight () => { - let answer = oai.createCompletion("tell me a short joke", model: "gpt-3.5-turbo", max_tokens: 2048); + let answer = oai.createCompletion("tell me a short joke", model: "gpt-3.5-turbo", maxTokens: 2048); log(answer); }) as "tell me a joke"; diff --git a/openai/openai.extern.d.ts b/openai/openai.extern.d.ts index ddb1e323..d3495fa1 100644 --- a/openai/openai.extern.d.ts +++ b/openai/openai.extern.d.ts @@ -1,10 +1,6 @@ export default interface extern { - createNewInflightClient: (apiKey: string, org?: (string) | undefined) => Promise, + createNewInflightClient: (apiKey: string, org?: (string) | undefined) => Promise, } -export interface CompletionParams { - readonly max_tokens: number; - readonly model: string; -} -export interface IOpenAI$Inflight { - readonly createCompletion: (prompt: string, params?: (CompletionParams) | undefined) => Promise; +export interface IClient$Inflight { + readonly createCompletion: (params: Readonly) => Promise>; } \ No newline at end of file diff --git a/openai/openai.js b/openai/openai.js index 0f6c0298..052dfeb2 100644 --- a/openai/openai.js +++ b/openai/openai.js @@ -9,30 +9,9 @@ exports.createNewInflightClient = (apiKey, org) => { let client = new openai.OpenAI(config); - // TODO: this is a hack for now, we should model the openai api in the api.w file with more fidelity - // and then we can just return the client itself, like we do in redis return { - createCompletion: async (prompt, params = {}) => { - if (!prompt) { - throw new Error("Prompt is required"); - }; - - if (typeof prompt !== "string") { - throw new Error("Prompt must be a string"); - } - - if (!params.model) { - params.model = "gpt-3.5-turbo"; - }; - - if (!params.max_tokens) { - params.max_tokens = 2048; - } - - params.messages = [ { role: 'user', content: prompt } ]; - - const response = await client.chat.completions.create(params); - return response.choices[0]?.message?.content; + createCompletion: async params => { + return await client.chat.completions.create(params); } }; }; diff --git a/openai/openai.test.w b/openai/openai.test.w index 5ab63fe7..c22cee72 100644 --- a/openai/openai.test.w +++ b/openai/openai.test.w @@ -3,17 +3,17 @@ bring "./openai.w" as openai; bring cloud; -let key = "my-openai-key"; -let oai = new openai.OpenAI(apiKey: key); +let oai = new openai.OpenAI(apiKey: "dummy-key"); test "basic completion" { - let answer = oai.createCompletion("tell me a short joke", model :"gpt-3.5-turbo", max_tokens: 1024); + let answer = oai.createCompletion("tell me a short joke", maxTokens: 1024); // in tests, the response is just an echo of the request expect.equal(answer, Json.stringify({ mock: { - prompt:"tell me a short joke", - params:{"model":"gpt-3.5-turbo","max_tokens":1024} + "max_tokens":1024, + "model":"gpt-3.5-turbo", + "messages":[{"role":"user","content":"tell me a short joke"}] } })); } diff --git a/openai/openai.w b/openai/openai.w index be6a4737..b2ad590a 100644 --- a/openai/openai.w +++ b/openai/openai.w @@ -1,7 +1,10 @@ -bring util; -bring "./api.w" as api; -bring "./utils.w" as utils; bring cloud; +bring util; + +pub struct CompletionParams { + model: str?; + maxTokens: num?; +} pub struct OpenAIProps { apiKey: str?; @@ -10,13 +13,25 @@ pub struct OpenAIProps { orgSecret: cloud.Secret?; } -inflight class Sim impl api.IOpenAI { - pub createCompletion(prompt: str, params: api.CompletionParams?): str { - return Json.stringify({ mock: { prompt: prompt, params: params } }); +interface IClient { + inflight createCompletion(params: Json): Json; +} + +inflight class Sim impl IClient { + pub createCompletion(req: Json): Json { + return { + choices: [ + { + message: { + content: Json.stringify({ mock: req }) + } + } + ] + }; } } -pub class OpenAI impl api.IOpenAI { +pub class OpenAI { apiKey: cloud.Secret?; org: cloud.Secret?; keyOverride: str?; @@ -24,7 +39,7 @@ pub class OpenAI impl api.IOpenAI { mock: bool; - inflight openai: api.IOpenAI; + inflight openai: IClient; new(props: OpenAIProps?) { this.apiKey = props?.apiKeySecret; @@ -50,11 +65,19 @@ pub class OpenAI impl api.IOpenAI { if this.mock { this.openai = new Sim(); } else { - this.openai = utils.createNewInflightClient(apiKey, org); + this.openai = OpenAI.createNewInflightClient(apiKey, org); } } - pub inflight createCompletion(prompt: str, params: api.CompletionParams?): str { - return this.openai.createCompletion(prompt, params); + pub inflight createCompletion(prompt: str, params: CompletionParams?): str { + let resp = this.openai.createCompletion({ + max_tokens: params?.maxTokens ?? 2048, + model: params?.model ?? "gpt-3.5-turbo", + messages: [ { role: "user", content: prompt } ] + }); + + return resp.get("choices").getAt(0).get("message").get("content").asStr(); } + + extern "./openai.js" pub static inflight createNewInflightClient(apiKey: str, org: str?): IClient; } diff --git a/openai/package.json b/openai/package.json index 3319e2da..6ef7ca04 100644 --- a/openai/package.json +++ b/openai/package.json @@ -1,7 +1,7 @@ { "name": "@winglibs/openai", "description": "OpenAI library for Wing", - "version": "0.0.1", + "version": "0.0.2", "repository": { "type": "git", "url": "https://github.com/winglang/winglibs.git", diff --git a/openai/utils.w b/openai/utils.w deleted file mode 100644 index 1f080534..00000000 --- a/openai/utils.w +++ /dev/null @@ -1,5 +0,0 @@ -bring "./api.w" as api; - -pub class Util { - extern "./openai.js" pub static inflight createNewInflightClient(apiKey: str, org: str?): api.IOpenAI; -} \ No newline at end of file