Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refcatoring assembly of messages #794

Merged
merged 11 commits into from
Oct 24, 2024
1 change: 1 addition & 0 deletions .github/workflows/genai-pr-commit-review.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ jobs:
run: git fetch origin && git pull origin main:main
- name: genaiscript pr-review-commit
run: node packages/cli/built/genaiscript.cjs run pr-review-commit --out ./temp/genai/pr-review-commit -prr --out-trace $GITHUB_STEP_SUMMARY
continue-on-error: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_COMMIT_SHA: ${{ github.event.pull_request.head.sha}}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/genai-pr-docs-commit-review.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ jobs:
run: git fetch origin && git pull origin main:main
- name: genaiscript pr-review-commit
run: node packages/cli/built/genaiscript.cjs run pr-docs-review-commit --out ./temp/genai/pr-docs-review-commit -prr --out-trace $GITHUB_STEP_SUMMARY
continue-on-error: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_COMMIT_SHA: ${{ github.event.pull_request.head.sha}}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ollama.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,6 @@ jobs:
- name: download ollama docker
run: docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
- name: run summarize-ollama-phi3
run: yarn test:summarize --model ollama:phi3 --out ./temp/summarize-ollama-phi3
run: yarn test:summarize --model ollama:phi3.5 --out ./temp/summarize-ollama-phi3
# - name: run vector-search
# run: yarn run:script vector-search --model ollama:phi3 --out ./temp/rag
32 changes: 16 additions & 16 deletions .github/workflows/openai.yml
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
name: openai smoke tests
on:
workflow_dispatch:
pull_request:
paths:
- yarn.lock
- ".github/workflows/github-models.yml"
- "packages/core/**/*"
- "packages/cli/**/*"
- "packages/samples/**/*"
push:
branches:
- main
paths:
- yarn.lock
- ".github/workflows/github-models.yml"
- "packages/core/**/*"
- "packages/cli/**/*"
- "packages/samples/**/*"
# pull_request:
# paths:
# - yarn.lock
# - ".github/workflows/github-models.yml"
# - "packages/core/**/*"
# - "packages/cli/**/*"
# - "packages/samples/**/*"
# push:
# branches:
# - main
# paths:
# - yarn.lock
# - ".github/workflows/github-models.yml"
# - "packages/core/**/*"
# - "packages/cli/**/*"
# - "packages/samples/**/*"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-github-models
cancel-in-progress: true
Expand Down
8 changes: 3 additions & 5 deletions .github/workflows/playwright.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,7 @@ jobs:
run: yarn typecheck
- name: compile
run: yarn compile
- name: download ollama docker
run: docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
- name: run browse-text
run: yarn run:script browse-text --out ./temp/browse-text
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
OPENAI_API_TYPE: ${{ secrets.OPENAI_API_TYPE }}
OPENAI_API_BASE: ${{ secrets.OPENAI_API_BASE }}
run: yarn run:script browse-text --out ./temp/browse-text --model ollama:phi3.5
8 changes: 8 additions & 0 deletions docs/src/content/docs/reference/scripts/system.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -2978,7 +2978,15 @@ JSON schemas can also be applied to YAML or TOML files.
...
\`\`\`


## Code section with Schema

When you generate JSON or YAML or CSV code section according to a named schema,
you MUST add the schema identifier in the code fence header.
`

fence("...", { language: "json", schema: "<schema-identifier>" })

`````


Expand Down
6 changes: 3 additions & 3 deletions docs/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -5003,9 +5003,9 @@ yocto-queue@^1.1.1:
integrity sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g==

zod-to-json-schema@^3.23.3:
version "3.23.4"
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.23.4.tgz#f6fa99a15412ac2c79721ad1b9d901ecfb11575b"
integrity sha512-LjSQ9WT8qkvb4OHlkC3AlizvC+pwKIxpBohK4FLzCm7RyJES8uLpHP6BWljCtBjBXJ9Xa1R9IGZcICuSf8xl2Q==
version "3.23.5"
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.23.5.tgz#ec23def47dcafe3a4d640eba6a346b34f9a693a5"
integrity sha512-5wlSS0bXfF/BrL4jPAbz9da5hDlDptdEppYfe+x4eIJ7jioqKG9uUxOwPzqof09u/XeVdrgFu29lZi+8XNDJtA==

zod-to-ts@^1.2.0:
version "1.2.0"
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@
"devDependencies": {
"@inquirer/prompts": "^7.0.0",
"glob": "^11.0.0",
"npm-check-updates": "^17.1.4",
"npm-check-updates": "^17.1.5",
"prettier": "^3.3.3",
"zx": "^8.1.9"
}
Expand Down
4 changes: 2 additions & 2 deletions packages/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,15 +76,15 @@
"commander": "^12.1.0",
"diff": "^7.0.0",
"dotenv": "^16.4.5",
"es-toolkit": "^1.25.2",
"es-toolkit": "^1.26.0",
"esbuild": "^0.24.0",
"execa": "^9.4.1",
"fs-extra": "^11.2.0",
"glob": "^11.0.0",
"memorystream": "^0.3.1",
"node-sarif-builder": "^3.2.0",
"octokit": "^4.0.2",
"openai": "^4.68.3",
"openai": "^4.68.4",
"pretty-bytes": "^6.1.1",
"replace-ext": "^2.0.0",
"ws": "^8.18.0",
Expand Down
2 changes: 1 addition & 1 deletion packages/cli/src/playwright.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ export class BrowserManager {
undefined,
"npx",
[
"--yes,",
"--yes",
`playwright@${PLAYWRIGHT_VERSION}`,
"install",
"--with-deps",
Expand Down
4 changes: 2 additions & 2 deletions packages/core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
"csv-stringify": "^6.5.1",
"diff": "^7.0.0",
"dotenv": "^16.4.5",
"es-toolkit": "^1.25.2",
"es-toolkit": "^1.26.0",
"esbuild": "^0.24.0",
"fast-xml-parser": "^4.5.0",
"fetch-retry": "^6.0.0",
Expand All @@ -63,7 +63,7 @@
"minimatch": "^10.0.1",
"minisearch": "^7.1.0",
"mustache": "^4.2.0",
"openai": "^4.68.3",
"openai": "^4.68.4",
"p-limit": "^6.1.0",
"parse-diff": "^0.11.1",
"prettier": "^3.3.3",
Expand Down
75 changes: 66 additions & 9 deletions packages/core/src/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import {
MAX_DATA_REPAIRS,
MAX_TOOL_CALLS,
MAX_TOOL_CONTENT_TOKENS,
SYSTEM_FENCE,
} from "./constants"
import { parseAnnotations } from "./annotations"
import { errorMessage, isCancelError, serializeError } from "./error"
Expand All @@ -32,10 +33,12 @@ import { createChatTurnGenerationContext } from "./runpromptcontext"
import { dedent } from "./indent"
import { traceLanguageModelConnection } from "./models"
import {
ChatCompletionAssistantMessageParam,
ChatCompletionContentPartImage,
ChatCompletionMessageParam,
ChatCompletionResponse,
ChatCompletionsOptions,
ChatCompletionSystemMessageParam,
ChatCompletionTool,
ChatCompletionToolCall,
ChatCompletionUserMessageParam,
Expand Down Expand Up @@ -564,17 +567,26 @@ async function processChatMessage(
const node = ctx.node
checkCancelled(cancellationToken)
// expand template
const { errors, userPrompt } = await renderPromptNode(
options.model,
node,
{
const { errors, messages: participantMessages } =
await renderPromptNode(options.model, node, {
flexTokens: options.flexTokens,
trace,
}
)
if (userPrompt?.trim().length) {
trace.detailsFenced(`πŸ’¬ message`, userPrompt, "markdown")
messages.push({ role: "user", content: userPrompt })
})
if (participantMessages?.length) {
if (
participantMessages.some(
({ role }) => role === "system"
)
)
throw new Error(
"system messages not supported for chat participants"
)
renderMessagesToMarkdown(participantMessages)
trace.details(
`πŸ’¬ messages (${participantMessages.length})`,
renderMessagesToMarkdown(participantMessages)
)
messages.push(...participantMessages)
needsNewTurn = true
} else trace.item("no message")
if (errors?.length) {
Expand Down Expand Up @@ -785,3 +797,48 @@ export function tracePromptResult(trace: MarkdownTrace, resp: RunPromptResult) {
"\n\n" + HTMLEscape(prettifyMarkdown(text)) + "\n\n"
)
}

export function appendUserMessage(
messages: ChatCompletionMessageParam[],
content: string
) {
if (!content) return
const last = messages.at(-1) as ChatCompletionUserMessageParam
if (last?.role === "user") last.content += content + "\n"
else
messages.push({
role: "user",
content,
} as ChatCompletionUserMessageParam)
}

export function appendAssistantMessage(
messages: ChatCompletionMessageParam[],
content: string
) {
if (!content) return
const last = messages.at(-1) as ChatCompletionAssistantMessageParam
if (last?.role === "assistant") last.content += content
else
messages.push({
role: "assistant",
content,
} satisfies ChatCompletionAssistantMessageParam)
}

export function appendSystemMessage(
messages: ChatCompletionMessageParam[],
content: string
) {
if (!content) return
let last = messages[0] as ChatCompletionSystemMessageParam
if (!last) {
last = {
role: "system",
content,
} as ChatCompletionSystemMessageParam
messages.unshift(last)
}
if (last.content) last.content += SYSTEM_FENCE
last.content += content
}
Loading
Loading