diff --git a/.github/workflows/anthropic.yml b/.github/workflows/anthropic.yml
index c3abd8369..ccd972dfa 100644
--- a/.github/workflows/anthropic.yml
+++ b/.github/workflows/anthropic.yml
@@ -1,6 +1,9 @@
name: anthropic tests
on:
workflow_dispatch:
+ release:
+ types:
+ - published
pull_request:
paths:
- yarn.lock
diff --git a/.github/workflows/azure.yml b/.github/workflows/azure.yml
new file mode 100644
index 000000000..ce4f02e3b
--- /dev/null
+++ b/.github/workflows/azure.yml
@@ -0,0 +1,38 @@
+name: azure openai tests
+on:
+ workflow_dispatch:
+ release:
+ types:
+ - published
+ pull_request:
+ paths:
+ - yarn.lock
+ - ".github/workflows/azure.yml"
+ - "packages/core/**/*"
+ - "packages/cli/**/*"
+ - "packages/samples/**/*"
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}-azure
+ cancel-in-progress: true
+jobs:
+ tests:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: "recursive"
+ fetch-depth: 0
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ cache: yarn
+ - run: yarn install --frozen-lockfile
+ - name: typecheck
+ run: yarn typecheck
+ - name: compile
+ run: yarn compile
+ - name: poem
+ run: yarn run:script poem --model azure:gpt-4o -tlp 5 --out-trace $GITHUB_STEP_SUMMARY
+ env:
+ AZURE_OPENAI_API_ENDPOINT: ${{ secrets.AZURE_OPENAI_API_ENDPOINT }}
+ AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
diff --git a/.github/workflows/google.yml b/.github/workflows/google.yml
index 93f7b9e2c..6f61fd4a7 100644
--- a/.github/workflows/google.yml
+++ b/.github/workflows/google.yml
@@ -1,10 +1,13 @@
name: google tests
on:
workflow_dispatch:
+ release:
+ types:
+ - published
pull_request:
paths:
- yarn.lock
- - ".github/workflows/anthropic.yml"
+ - ".github/workflows/google.yml"
- "packages/core/**/*"
- "packages/cli/**/*"
- "packages/samples/**/*"
diff --git a/.github/workflows/ollama.yml b/.github/workflows/ollama.yml
index 08d7e24ac..bb7587634 100644
--- a/.github/workflows/ollama.yml
+++ b/.github/workflows/ollama.yml
@@ -1,6 +1,9 @@
name: ollama smoke tests
on:
workflow_dispatch:
+ release:
+ types:
+ - published
pull_request:
paths:
- yarn.lock
diff --git a/docs/package.json b/docs/package.json
index 2935051cf..c759f09fb 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -2,7 +2,7 @@
"name": "docs",
"type": "module",
"private": true,
- "version": "1.86.2",
+ "version": "1.86.4",
"license": "MIT",
"scripts": {
"install:force": "rm yarn.lock && yarn install",
diff --git a/docs/src/content/docs/getting-started/configuration.mdx b/docs/src/content/docs/getting-started/configuration.mdx
index b85325e6e..1123382b1 100644
--- a/docs/src/content/docs/getting-started/configuration.mdx
+++ b/docs/src/content/docs/getting-started/configuration.mdx
@@ -481,7 +481,7 @@ The rest of the steps are the same: Find the deployment name and use it in your
## Azure AI Serverless Deployments
-You can deploy "serverless" models through [Azure AI Studio](https://ai.azure.com/) and pay as you go per token.
+You can deploy "serverless" models through [Azure AI Foundry](https://ai.azure.com/) and pay as you go per token.
You can browse the [Azure AI model catalog](https://ai.azure.com/explore/models)
and use the [serverless API](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/deploy-models-serverless-availability) filter to see the available models.
@@ -493,7 +493,7 @@ They are configured slightly differently.
### Azure AI OpenAI
-The `azure_serverless` provider supports OpenAI models deployed through the Azure AI Studio serverless deployments.
+The `azure_serverless` provider supports OpenAI models deployed through the Azure AI Foundry serverless deployments.
It supports both Entra ID and key-based authentication.
```js "azure_serverless:"
@@ -593,7 +593,7 @@ AZURE_SERVERLESS_OPENAI_API_KEY=...
### Azure AI Models
-The `azure_serverless_models` provider supports non-OpenAI models deployed through the Azure AI Studio serverless deployments.
+The `azure_serverless_models` provider supports non-OpenAI models deployed through the Azure AI Foundary serverless deployments.
```js "azure_serverless_models:"
script({ model: "azure_serverless_models:deployment-id" })
diff --git a/package.json b/package.json
index f563bd11d..324e29727 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "genaiscript-workspace",
- "version": "1.86.2",
+ "version": "1.86.4",
"license": "MIT",
"private": true,
"workspaces": {
diff --git a/packages/cli/package.json b/packages/cli/package.json
index a060fe5b3..7235185f2 100644
--- a/packages/cli/package.json
+++ b/packages/cli/package.json
@@ -1,6 +1,6 @@
{
"name": "genaiscript",
- "version": "1.86.2",
+ "version": "1.86.4",
"main": "built/genaiscript.cjs",
"type": "commonjs",
"bin": {
diff --git a/packages/cli/src/azuretoken.ts b/packages/cli/src/azuretoken.ts
index 78013ba65..2395a93e3 100644
--- a/packages/cli/src/azuretoken.ts
+++ b/packages/cli/src/azuretoken.ts
@@ -87,7 +87,7 @@ export async function createAzureToken(
// Log the expiration time of the token
logVerbose(
- `azure: ${credentialsType || ""} token (${scopes.join(",")}) expires at ${new Date(res.expiresOnTimestamp).toLocaleString()}`
+ `azure: ${credentialsType || ""} token (${scopes.join(",")}) expires on ${new Date(res.expiresOnTimestamp).toUTCString()}`
)
return res
@@ -108,12 +108,6 @@ class AzureTokenResolverImpl implements AzureTokenResolver {
return this._error
}
- clear() {
- this._token = undefined
- this._error = undefined
- this._resolver = undefined
- }
-
async token(
credentialsType: AzureCredentialsType,
options?: CancellationOptions
@@ -121,31 +115,33 @@ class AzureTokenResolverImpl implements AzureTokenResolver {
// cached
const { cancellationToken } = options || {}
- if (isAzureTokenExpired(this._token)) this._token = undefined
+ if (isAzureTokenExpired(this._token)) {
+ logVerbose(`azure: ${this.name} token expired`)
+ this._token = undefined
+ this._error = undefined
+ }
if (this._token || this._error)
return { token: this._token, error: this._error }
if (!this._resolver) {
const scope = await runtimeHost.readSecret(this.envName)
const scopes = scope ? scope.split(",") : this.scopes
- const resolver = (this._resolver = createAzureToken(
+ this._resolver = createAzureToken(
scopes,
credentialsType,
cancellationToken
)
.then((res) => {
- if (this._resolver !== resolver) return undefined
this._token = res
this._error = undefined
this._resolver = undefined
return { token: this._token, error: this._error }
})
.catch((err) => {
- if (this._resolver !== resolver) return undefined
this._resolver = undefined
this._token = undefined
this._error = serializeError(err)
return { token: this._token, error: this._error }
- }))
+ })
}
return this._resolver
}
diff --git a/packages/cli/src/nodehost.ts b/packages/cli/src/nodehost.ts
index 427e72550..b09c736e7 100644
--- a/packages/cli/src/nodehost.ts
+++ b/packages/cli/src/nodehost.ts
@@ -196,9 +196,6 @@ export class NodeHost implements RuntimeHost {
}
async readConfig(): Promise {
- this.azureToken.clear()
- this.azureServerlessToken.clear()
-
const config = await resolveGlobalConfiguration(this.dotEnvPath)
const { envFile, modelAliases } = config
if (modelAliases)
diff --git a/packages/core/package.json b/packages/core/package.json
index 297fa25ae..c1a87a4f4 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -1,6 +1,6 @@
{
"name": "genaiscript-core-internal",
- "version": "1.86.2",
+ "version": "1.86.4",
"main": "src/index.ts",
"license": "MIT",
"private": true,
diff --git a/packages/core/src/host.ts b/packages/core/src/host.ts
index 58064117c..023f6c159 100644
--- a/packages/core/src/host.ts
+++ b/packages/core/src/host.ts
@@ -103,7 +103,6 @@ export function isAzureTokenExpired(token: AuthenticationToken) {
}
export interface AzureTokenResolver {
- clear(): void
token(
credentialsType: AzureCredentialsType,
options?: CancellationOptions
diff --git a/packages/core/src/openai.ts b/packages/core/src/openai.ts
index a1a720b62..16592161f 100644
--- a/packages/core/src/openai.ts
+++ b/packages/core/src/openai.ts
@@ -44,8 +44,7 @@ import { INITryParse } from "./ini"
import { serializeChunkChoiceToLogProbs } from "./logprob"
export function getConfigHeaders(cfg: LanguageModelConfiguration) {
- const { provider } = parseModelIdentifier(cfg.model)
- let { token, type, base } = cfg
+ let { token, type, base, provider } = cfg
if (type === "azure_serverless_models") {
const keys = INITryParse(token)
if (keys && Object.keys(keys).length > 1) token = keys[cfg.model]
diff --git a/packages/sample/genaisrc/pr-describe.genai.mjs b/packages/sample/genaisrc/pr-describe.genai.mjs
index 85f551cb1..5fffb1bd4 100644
--- a/packages/sample/genaisrc/pr-describe.genai.mjs
+++ b/packages/sample/genaisrc/pr-describe.genai.mjs
@@ -1,7 +1,13 @@
script({
temperature: 1,
title: "pr-describe",
- system: ["system", "system.fs_find_files", "system.fs_read_file"],
+ system: [
+ "system",
+ "system.output_markdown",
+ "system.assistant",
+ "system.fs_find_files",
+ "system.fs_read_file",
+ ],
parameters: {
defaultBranch: {
type: "string",
diff --git a/packages/sample/package.json b/packages/sample/package.json
index 2129c198c..12b79dd03 100644
--- a/packages/sample/package.json
+++ b/packages/sample/package.json
@@ -1,6 +1,6 @@
{
"name": "genaiscript-sample",
- "version": "1.86.2",
+ "version": "1.86.4",
"license": "MIT",
"private": true,
"scripts": {
diff --git a/packages/vscode/package.json b/packages/vscode/package.json
index 5f4e9f407..2ad8c3707 100644
--- a/packages/vscode/package.json
+++ b/packages/vscode/package.json
@@ -7,7 +7,7 @@
},
"displayName": "GenAIScript Insiders",
"description": "Generative AI Scripting.",
- "version": "1.86.2",
+ "version": "1.86.4",
"icon": "icon.png",
"engines": {
"vscode": "^1.95.0"
diff --git a/slides/package.json b/slides/package.json
index bc388a799..7d33aefe4 100644
--- a/slides/package.json
+++ b/slides/package.json
@@ -1,6 +1,6 @@
{
"name": "genaiscript-slides",
- "version": "1.86.2",
+ "version": "1.86.4",
"type": "module",
"private": true,
"npm": {