Skip to content

Commit

Permalink
refactor: ♻️ update pricing logic and cleanup code
Browse files Browse the repository at this point in the history
  • Loading branch information
pelikhan committed Dec 11, 2024
1 parent e68807e commit 680d579
Show file tree
Hide file tree
Showing 6 changed files with 561 additions and 466 deletions.
106 changes: 106 additions & 0 deletions docs/public/schemas/llms.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "GenAIScript LLM Provider Configuration",
"type": "object",
"description": "Schema for LLMS configuration file",
"properties": {
"providers": {
"type": "array",
"description": "List of LLM providers",
"items": {
"type": "object",
"description": "Details of a single LLM provider",
"properties": {
"id": {
"type": "string",
"enum": [
"openai",
"github",
"azure",
"azure_serverless",
"azure_serverless_models",
"anthropic",
"googe",
"huggingface",
"transformers",
"ollama",
"mistal",
"lmstudio",
"jan",
"alibaba",
"llamafile",
"litellm"
],
"description": "Identifier for the LLM provider"
},
"detail": {
"type": "string",
"description": "Description of the LLM provider"
},
"logprobs": {
"type": "boolean",
"description": "Indicates if log probabilities are supported"
},
"topLogprobs": {
"type": "boolean",
"description": "Indicates if top log probabilities are supported"
},
"seed": {
"type": "boolean",
"description": "Indicates if seeding is supported"
},
"tools": {
"type": "boolean",
"description": "Indicates if tools are supported"
}
},
"required": ["id", "detail"]
}
}
},
"pricings": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9:_-]+$": {
"type": "object",
"properties": {
"price_per_million_input_tokens": {
"type": "number"
},
"price_per_million_output_tokens": {
"type": "number"
},
"input_cache_token_rebate": {
"type": "number"
},
"tiers": {
"type": "array",
"items": {
"type": "object",
"properties": {
"context_size": {
"type": "integer"
},
"price_per_million_input_tokens": {
"type": "number"
},
"price_per_million_output_tokens": {
"type": "number"
}
},
"required": [
"price_per_million_input_tokens",
"price_per_million_output_tokens"
]
}
}
},
"required": [
"price_per_million_input_tokens",
"price_per_million_output_tokens"
]
}
}
},
"required": ["providers", "pricings"]
}
8 changes: 7 additions & 1 deletion packages/cli/src/run.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,10 @@ export async function runScriptWithExitCode(
host.path.basename(scriptId).replace(GENAI_ANYTS_REGEX, ""),
`${new Date().toISOString().replace(/[:.]/g, "-")}.trace.md`
)
const res = await runScriptInternal(scriptId, files, { ...options, outTrace })
const res = await runScriptInternal(scriptId, files, {
...options,
outTrace,
})
exitCode = res.exitCode
if (
exitCode === SUCCESS_ERROR_CODE ||
Expand Down Expand Up @@ -398,6 +401,7 @@ export async function runScriptInternal(
stats,
})
} catch (err) {
stats.log()
if (isCancelError(err))
return fail("user cancelled", USER_CANCELLED_ERROR_CODE)
logError(err)
Expand All @@ -406,6 +410,8 @@ export async function runScriptInternal(

await aggregateResults(scriptId, outTrace, stats, result)
await traceAgentMemory(trace)
stats.log()

if (outAnnotations && result.annotations?.length) {
if (isJSONLFilename(outAnnotations))
await appendJSONL(outAnnotations, result.annotations)
Expand Down
82 changes: 12 additions & 70 deletions packages/core/src/constants.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import CONFIGURATION_DATA from "./llms.json"
export const CHANGE = "change"
export const TRACE_CHUNK = "traceChunk"
export const TRACE_DETAILS = "traceDetails"
Expand Down Expand Up @@ -226,76 +227,17 @@ export const MODEL_PROVIDERS = Object.freeze<
*/
tools?: boolean
}[]
>([
{
id: MODEL_PROVIDER_OPENAI,
detail: "OpenAI (or compatible)",
},
{
id: MODEL_PROVIDER_GITHUB,
detail: "GitHub Models",
},
{
id: MODEL_PROVIDER_AZURE_OPENAI,
detail: "Azure OpenAI deployment",
},
{
id: MODEL_PROVIDER_AZURE_SERVERLESS_OPENAI,
detail: "Azure AI OpenAI (serverless deployments)",
},
{
id: MODEL_PROVIDER_AZURE_SERVERLESS_MODELS,
detail: "Azure AI Models (serverless deployments, not OpenAI)",
},
{
id: MODEL_PROVIDER_ANTHROPIC,
detail: "Anthropic models",
},
{
id: MODEL_PROVIDER_GOOGLE,
detail: "Google AI",
seed: false,
tools: false,
},
{
id: MODEL_PROVIDER_HUGGINGFACE,
detail: "Hugging Face models",
},
{
id: MODEL_PROVIDER_MISTRAL,
detail: "Mistral AI",
},
{
id: MODEL_PROVIDER_TRANSFORMERS,
detail: "Hugging Face Transformers",
},
{
id: MODEL_PROVIDER_OLLAMA,
detail: "Ollama local model",
logit_bias: false,
},
{
id: MODEL_PROVIDER_LMSTUDIO,
detail: "LM Studio local server",
},
{
id: MODEL_PROVIDER_LMSTUDIO,
detail: "Jan local server",
},
{
id: MODEL_PROVIDER_ALIBABA,
detail: "Alibaba models",
tools: false,
},
{
id: MODEL_PROVIDER_LLAMAFILE,
detail: "llamafile.ai local model",
},
{
id: MODEL_PROVIDER_LITELLM,
detail: "LiteLLM proxy",
},
])
>(CONFIGURATION_DATA.providers)
export const MODEL_PRICINGS = Object.freeze<
Record<
string,
{
price_per_million_input_tokens: number
price_per_million_output_tokens: number
input_cache_token_rebate?: number
}
>
>(CONFIGURATION_DATA.pricings)

export const NEW_SCRIPT_TEMPLATE = `$\`Write a short poem in code.\`
`
Expand Down
Loading

0 comments on commit 680d579

Please sign in to comment.