Skip to content

Commit

Permalink
Merge pull request #1371 from samchon/feat/llm
Browse files Browse the repository at this point in the history
Adapt changed structure of `ILlmFunction<Parameters>`.
  • Loading branch information
samchon authored Nov 22, 2024
2 parents bfd1543 + 8bdde5c commit e5b50e0
Show file tree
Hide file tree
Showing 2,077 changed files with 771,009 additions and 13,395 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ export namespace json {

// LLM FUNCTION CALLING APPLICATION
export namespace llm {
// LLM function calling application from a class or interface type
export function application<App>(): ILlmApplication;
export function schema<T>(): ILlmSchema; // LLM type schema
// application from a class or interface type
export function application<App, Model>(): ILlmApplication;
export function schema<T, Model>(): ILlmSchema; // LLM type schema
}

// PROTOCOL BUFFER
Expand Down
2 changes: 1 addition & 1 deletion benchmark/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,6 @@
"suppress-warnings": "^1.0.2",
"tstl": "^3.0.0",
"uuid": "^9.0.1",
"typia": "../typia-7.0.0-dev.20241115.tgz"
"typia": "../typia-7.0.0-dev.20241122.tgz"
}
}
73 changes: 37 additions & 36 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typia",
"version": "7.0.0-dev.20241115",
"version": "7.0.0-dev.20241122",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down Expand Up @@ -35,48 +35,14 @@
"type": "git",
"url": "https://github.com/samchon/typia"
},
"keywords": [
"fast",
"json",
"stringify",
"typescript",
"transform",
"ajv",
"io-ts",
"zod",
"schema",
"json-schema",
"generator",
"assert",
"clone",
"is",
"validate",
"equal",
"runtime",
"type",
"typebox",
"checker",
"validator",
"safe",
"parse",
"prune",
"random",
"protobuf",
"llm",
"llm-function-calling",
"openai",
"chatgpt",
"llama",
"gemini"
],
"author": "Jeongho Nam",
"license": "MIT",
"bugs": {
"url": "https://github.com/samchon/typia/issues"
},
"homepage": "https://typia.io",
"dependencies": {
"@samchon/openapi": "2.0.0-dev.20241112",
"@samchon/openapi": "2.0.0-dev.20241122-2",
"commander": "^10.0.0",
"comment-json": "^4.2.3",
"inquirer": "^8.2.5",
Expand Down Expand Up @@ -115,5 +81,40 @@
"lib",
"src"
],
"keywords": [
"fast",
"json",
"stringify",
"typescript",
"transform",
"ajv",
"io-ts",
"zod",
"schema",
"json-schema",
"generator",
"assert",
"clone",
"is",
"validate",
"equal",
"runtime",
"type",
"typebox",
"checker",
"validator",
"safe",
"parse",
"prune",
"random",
"protobuf",
"llm",
"llm-function-calling",
"openai",
"chatgpt",
"llama",
"gemini",
"claude"
],
"private": true
}
6 changes: 3 additions & 3 deletions packages/typescript-json/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ export namespace json {

// LLM FUNCTION CALLING APPLICATION
export namespace llm {
// LLM function calling application from a class or interface type
export function application<App>(): ILlmApplication;
export function schema<T>(): ILlmSchema; // LLM type schema
// application from a class or interface type
export function application<App, Model>(): ILlmApplication;
export function schema<T, Model>(): ILlmSchema; // LLM type schema
}

// PROTOCOL BUFFER
Expand Down
51 changes: 26 additions & 25 deletions packages/typescript-json/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typescript-json",
"version": "7.0.0-dev.20241115",
"version": "7.0.0-dev.20241122",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down Expand Up @@ -31,6 +31,29 @@
"type": "git",
"url": "https://github.com/samchon/typia"
},
"author": "Jeongho Nam",
"license": "MIT",
"bugs": {
"url": "https://github.com/samchon/typia/issues"
},
"homepage": "https://typia.io",
"dependencies": {
"typia": "7.0.0-dev.20241122"
},
"peerDependencies": {
"typescript": ">=4.8.0 <5.7.0"
},
"stackblitz": {
"startCommand": "npm install && npm run test"
},
"sideEffects": false,
"files": [
"LICENSE",
"README.md",
"package.json",
"lib",
"src"
],
"keywords": [
"fast",
"json",
Expand Down Expand Up @@ -63,29 +86,7 @@
"openai",
"chatgpt",
"llama",
"gemini"
],
"author": "Jeongho Nam",
"license": "MIT",
"bugs": {
"url": "https://github.com/samchon/typia/issues"
},
"homepage": "https://typia.io",
"dependencies": {
"typia": "7.0.0-dev.20241115"
},
"peerDependencies": {
"typescript": ">=4.8.0 <5.7.0"
},
"stackblitz": {
"startCommand": "npm install && npm run test"
},
"sideEffects": false,
"files": [
"LICENSE",
"README.md",
"package.json",
"lib",
"src"
"gemini",
"claude"
]
}
31 changes: 25 additions & 6 deletions src/internal/_llmApplicationFinalize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,36 @@ import { HttpLlmConverter } from "@samchon/openapi/lib/converters/HttpLlmConvert

export const _llmApplicationFinalize = <Model extends ILlmApplication.Model>(
app: ILlmApplication<Model>,
options?: ILlmApplication.IOptions<Model>,
options?: Partial<ILlmApplication.IOptions<Model>>,
): void => {
app.options = {
separate: options?.separate ?? null,
recursive: app.model === "chatgpt" ? undefined : (3 as any),
};
app.options = (
isChatGptOptions(app, options)
? ({
separate: options?.separate ?? null,
reference: options?.reference ?? false,
constraint: options?.constraint ?? false,
} satisfies ILlmApplication.IOptions<"chatgpt">)
: ({
separate: (options?.separate ??
null) as ILlmApplication.ICommonOptions<
Exclude<Model, "chatgpt">
>["separate"],
recursive:
(options as ILlmApplication.IOptions<"3.0"> | undefined)
?.recursive ?? 3,
} satisfies ILlmApplication.ICommonOptions<Exclude<Model, "chatgpt">>)
) as ILlmApplication.IOptions<Model>;
if (app.options.separate === null) return;
for (const func of app.functions)
func.separated = HttpLlmConverter.separateParameters({
model: app.model,
parameters: func.parameters,
predicate: app.options.separate,
predicate: app.options
.separate as ILlmApplication.IOptions<Model>["separate"] as any,
});
};

const isChatGptOptions = <Model extends ILlmApplication.Model>(
app: ILlmApplication<Model>,
_options: unknown,
): _options is ILlmApplication.IOptions<"chatgpt"> => app.model === "chatgpt";
12 changes: 6 additions & 6 deletions src/llm.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { ILlmApplication } from "@samchon/openapi";
import { IChatGptSchema, ILlmApplication } from "@samchon/openapi";

/**
* > You must configure the generic argument `App`.
Expand Down Expand Up @@ -82,7 +82,7 @@ export function application(
*/
export function application<
App extends object,
Model extends ILlmApplication.Model = "3.1",
Model extends ILlmApplication.Model,
>(
options?: Partial<Omit<ILlmApplication.IOptions<Model>, "recursive">>,
): ILlmApplication<Model>;
Expand Down Expand Up @@ -164,14 +164,14 @@ export function schema(): never;
*
* @template T Target type
* @template Model LLM schema model
* @param $defs Definitions of named schemas if the model is `chatgpt`
* @returns LLM schema
* @reference https://platform.openai.com/docs/guides/function-calling
* @author Jeongho Nam - https://github.com/samchon
*/
export function schema<
T,
Model extends ILlmApplication.Model = "3.1",
>(): ILlmApplication.ModelSchema[Model];
export function schema<T, Model extends ILlmApplication.Model>(
...$defs: Model extends "chatgpt" ? [Record<string, IChatGptSchema>] : []
): ILlmApplication.ModelSchema[Model];

/**
* @internal
Expand Down
Loading

0 comments on commit e5b50e0

Please sign in to comment.