Skip to content

Commit

Permalink
feat: ✨ add support for listing model information in CLI
Browse files Browse the repository at this point in the history
  • Loading branch information
pelikhan committed Dec 20, 2024
1 parent c62171d commit 0d394b9
Show file tree
Hide file tree
Showing 6 changed files with 71 additions and 129 deletions.
86 changes: 11 additions & 75 deletions docs/src/content/docs/reference/cli/commands.md
Original file line number Diff line number Diff line change
@@ -1,78 +1,3 @@
{
"large": {
"model": "openai:gpt-4o",
"source": "default",
"candidates": [
"openai:gpt-4o",
"azure_serverless:gpt-4o",
"anthropic:claude-2.1",
"google:gemini-1.5-pro-latest",
"github:gpt-4o",
"client:gpt-4o"
]
},
"small": {
"model": "openai:gpt-4o-mini",
"source": "default",
"candidates": [
"openai:gpt-4o-mini",
"azure_serverless:gpt-4o-mini",
"anthropic:claude-instant-1.2",
"google:gemini-1.5-flash-latest",
"github:gpt-4o-mini",
"client:gpt-4o-mini"
]
},
"vision": {
"model": "openai:gpt-4o",
"source": "default",
"candidates": [
"openai:gpt-4o",
"azure_serverless:gpt-4o",
"anthropic:claude-2.1",
"google:gemini-1.5-flash-latest",
"github:gpt-4o"
]
},
"embeddings": {
"model": "openai:text-embedding-3-small",
"source": "default",
"candidates": [
"openai:text-embedding-3-small",
"github:text-embedding-3-small"
]
},
"reasoning": {
"model": "openai:o1",
"source": "default",
"candidates": [
"openai:o1",
"azure_serverless:o1-preview",
"github:o1-preview"
]
},
"reasoning_small": {
"model": "openai:o1-mini",
"source": "default",
"candidates": [
"openai:o1-mini",
"azure_serverless:o1-mini",
"github:o1-mini"
]
},
"agent": {
"model": "large",
"source": "default"
},
"long": {
"model": "large",
"source": "default"
},
"memory": {
"model": "large",
"source": "default"
}
}
---
title: Commands
description: List of all CLI commands
Expand Down Expand Up @@ -327,6 +252,17 @@ Options:
-h, --help display help for command
```

## `models`

```
Usage: genaiscript models [options]
List model information
Options:
-h, --help display help for command
```

Check warning on line 264 in docs/src/content/docs/reference/cli/commands.md

View workflow job for this annotation

GitHub Actions / build

The section header for the `models` command is missing a colon at the end. It should be `## models:` instead of `## models`.

## `cache`

```
Expand Down
9 changes: 7 additions & 2 deletions packages/cli/src/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import {
} from "./parse" // Parsing functions
import { compileScript, createScript, fixScripts, listScripts } from "./scripts" // Script utilities
import { codeQuery } from "./codequery" // Code parsing and query execution
import { envInfo, modelInfo, systemInfo } from "./info" // Information utilities
import { envInfo, modelsInfo, scriptModelInfo, systemInfo } from "./info" // Information utilities
import { scriptTestList, scriptTestsView, scriptsTest } from "./test" // Test functions
import { cacheClear } from "./cache" // Cache management
import "node:console" // Importing console for side effects
Expand Down Expand Up @@ -320,7 +320,11 @@ export async function cli() {
.description("List model connection information for scripts")
.argument("[script]", "Script id or file")
.option("-t, --token", "show token")
.action(modelInfo) // Action to show model information
.action(scriptModelInfo) // Action to show model information

const models = program.command("models")
.description("List model information")
.action(modelsInfo)

// Define 'cache' command for cache management
const cache = program.command("cache").description("Cache management")
Expand Down Expand Up @@ -446,5 +450,6 @@ export async function cli() {
.option("-e, --error", "show errors")
.option("-m, --models", "show models if possible")
.action(envInfo) // Action to show environment information

program.parse() // Parse command-line arguments
}
9 changes: 8 additions & 1 deletion packages/cli/src/info.ts
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,10 @@ async function resolveScriptsConnectionInfo(
* @param script - The specific script ID or filename to filter by (optional).
* @param options - Configuration options, including whether to show tokens.
*/
export async function modelInfo(script: string, options?: { token?: boolean }) {
export async function scriptModelInfo(
script: string,
options?: { token?: boolean }
) {
const prj = await buildProject()
const templates = prj.scripts.filter(
(t) =>
Expand All @@ -128,3 +131,7 @@ export async function modelInfo(script: string, options?: { token?: boolean }) {
const info = await resolveScriptsConnectionInfo(templates, options)
console.log(YAMLStringify(info))
}

export async function modelsInfo() {
console.log(YAML.stringify(runtimeHost.modelAliases))
}
41 changes: 2 additions & 39 deletions packages/cli/src/nodehost.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ import { resolveGlobalConfiguration } from "../../core/src/config"
import { HostConfiguration } from "../../core/src/hostconfiguration"
import { resolveLanguageModel } from "../../core/src/lm"
import { CancellationOptions } from "../../core/src/cancellation"
import LLMS from "../../../packages/core/src/llms.json"
import { defaultModelConfigurations } from "../../core/src/llms"

class NodeServerManager implements ServerManager {
async start(): Promise<void> {
Expand All @@ -73,43 +73,6 @@ class NodeServerManager implements ServerManager {
}
}

function readModelAliases(): ModelConfigurations {
const aliases = [
LARGE_MODEL_ID,
SMALL_MODEL_ID,
VISION_MODEL_ID,
"embeddings",
"reasoning",
"reasoning_small",
]
const res = {
...(Object.fromEntries(
aliases.map((alias) => [alias, readModelAlias(alias)])
) as ModelConfigurations),
...Object.fromEntries(
Object.entries(LLMS.aliases).map((kv) => [
kv[0],
{
model: kv[1],
source: "default",
} satisfies ModelConfiguration,
])
),
}
return res

function readModelAlias(alias: string) {
const candidates = Object.values(LLMS.providers)
.map(({ aliases }) => (aliases as Record<string, string>)?.[alias])
.filter((c) => !!c)
return deleteEmptyValues({
model: candidates[0],
source: "default",
candidates,
})
}
}

export class NodeHost implements RuntimeHost {
private pulledModels: string[] = []
readonly dotEnvPath: string
Expand All @@ -124,7 +87,7 @@ export class NodeHost implements RuntimeHost {
"default" | "cli" | "env" | "config",
Omit<ModelConfigurations, "large" | "small" | "vision" | "embeddings">
> = {
default: readModelAliases(),
default: defaultModelConfigurations(),
cli: {},
env: {},
config: {},
Expand Down
41 changes: 41 additions & 0 deletions packages/core/src/llms.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import { LARGE_MODEL_ID, SMALL_MODEL_ID, VISION_MODEL_ID } from "./constants"
import { ModelConfiguration, ModelConfigurations } from "./host"
import LLMS from "./llms.json"
import { deleteEmptyValues } from "./util"

export function defaultModelConfigurations(): ModelConfigurations {
const aliases = [
LARGE_MODEL_ID,
SMALL_MODEL_ID,
VISION_MODEL_ID,
"embeddings",
"reasoning",
"reasoning_small",
]
const res = {
...(Object.fromEntries(
aliases.map((alias) => [alias, readModelAlias(alias)])
) as ModelConfigurations),
...Object.fromEntries(
Object.entries(LLMS.aliases).map((kv) => [
kv[0],
{
model: kv[1],
source: "default",
} satisfies ModelConfiguration,
])
),
}
return structuredClone(res)

function readModelAlias(alias: string) {
const candidates = Object.values(LLMS.providers)
.map(({ aliases }) => (aliases as Record<string, string>)?.[alias])
.filter((c) => !!c)
return deleteEmptyValues({
model: candidates[0],
source: "default",
candidates,
})
}
}
14 changes: 2 additions & 12 deletions packages/core/src/testhost.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,6 @@ import {
ResponseStatus,
} from "./host"
import { TraceOptions } from "./trace"
import {
DEFAULT_EMBEDDINGS_MODEL,
DEFAULT_LARGE_MODEL,
DEFAULT_SMALL_MODEL,
DEFAULT_VISION_MODEL,
} from "./constants"
import {
dirname,
extname,
Expand All @@ -38,6 +32,7 @@ import {
import { LanguageModel } from "./chat"
import { NotSupportedError } from "./error"
import { Project } from "./server/messages"
import { defaultModelConfigurations } from "./llms"

// Function to create a frozen object representing Node.js path methods
// This object provides utility methods for path manipulations
Expand Down Expand Up @@ -68,12 +63,7 @@ export class TestHost implements RuntimeHost {
azureToken: AzureTokenResolver = undefined

// Default options for language models
readonly modelAliases: ModelConfigurations = {
large: { model: DEFAULT_LARGE_MODEL, source: "default" },
small: { model: DEFAULT_SMALL_MODEL, source: "default" },
vision: { model: DEFAULT_VISION_MODEL, source: "default" },
embeddings: { model: DEFAULT_EMBEDDINGS_MODEL, source: "default" },
}
readonly modelAliases: ModelConfigurations = defaultModelConfigurations()

// Static method to set this class as the runtime host
static install() {
Expand Down

0 comments on commit 0d394b9

Please sign in to comment.