diff --git a/packages/core/src/chat.ts b/packages/core/src/chat.ts index c645be5843..47ee709250 100644 --- a/packages/core/src/chat.ts +++ b/packages/core/src/chat.ts @@ -32,6 +32,7 @@ import { createChatTurnGenerationContext } from "./runpromptcontext" import { dedent } from "./indent" import { traceLanguageModelConnection } from "./models" import { + ChatCompletionAssistantMessageParam, ChatCompletionContentPartImage, ChatCompletionMessageParam, ChatCompletionResponse, @@ -578,7 +579,11 @@ async function processChatMessage( throw new Error( "system messages not supported for chat participants" ) - trace.detailsFenced(`💬 message`, userPrompt, "markdown") + renderMessagesToMarkdown(participantMessages) + trace.details( + `💬 messages (${participantMessages.length})`, + renderMessagesToMarkdown(participantMessages) + ) messages.push(...participantMessages) needsNewTurn = true } else trace.item("no message") @@ -790,3 +795,29 @@ export function tracePromptResult(trace: MarkdownTrace, resp: RunPromptResult) { "\n\n" + HTMLEscape(prettifyMarkdown(text)) + "\n\n" ) } + +export function appendUserMessage( + messages: ChatCompletionMessageParam[], + content: string +) { + const last = messages.at(-1) as ChatCompletionUserMessageParam + if (last?.role === "user") last.content += content + "\n" + else + messages.push({ + role: "user", + content, + } as ChatCompletionUserMessageParam) +} + +export function appendAssistantMessage( + messages: ChatCompletionMessageParam[], + content: string +) { + const last = messages.at(-1) as ChatCompletionAssistantMessageParam + if (last?.role === "assistant") last.content += content + else + messages.push({ + role: "assistant", + content, + } satisfies ChatCompletionAssistantMessageParam) +} diff --git a/packages/core/src/expander.ts b/packages/core/src/expander.ts index e156198deb..902b76627d 100644 --- a/packages/core/src/expander.ts +++ b/packages/core/src/expander.ts @@ -1,7 +1,7 @@ import { Project, PromptScript } from "./ast" import { assert, normalizeFloat, normalizeInt } from "./util" import { MarkdownTrace } from "./trace" -import { errorMessage, isCancelError } from "./error" +import { errorMessage, isCancelError, NotSupportedError } from "./error" import { JS_REGEX, MAX_TOOL_CALLS, @@ -293,10 +293,18 @@ export async function expandTemplate( if (sysr.fileOutputs) fileOutputs.push(...sysr.fileOutputs) if (sysr.logs?.length) trace.details("📝 console.log", sysr.logs) - if (sysr.text) { - systemMessage.content += - SYSTEM_FENCE + "\n" + sysr.text + "\n" - trace.fence(sysr.text, "markdown") + for (const smsg of sysr.messages) { + if ( + smsg.role === "user" && + typeof smsg.content === "string" + ) { + systemMessage.content += + SYSTEM_FENCE + "\n" + smsg.content + "\n" + trace.fence(smsg.content, "markdown") + } else + throw new NotSupportedError( + "only string user messages supported in system" + ) } if (sysr.aici) { trace.fence(sysr.aici, "yaml") @@ -328,20 +336,14 @@ export async function expandTemplate( const schemaTs = JSONSchemaStringifyToTypeScript(responseSchema, { typeName, }) - messages.unshift({ - role: "system", - content: `You are a service that translates user requests + systemMessage.content += `You are a service that translates user requests into JSON objects of type "${typeName}" according to the following TypeScript definitions: \`\`\`ts ${schemaTs} -\`\`\``, - }) +\`\`\`` } else if (responseType === "json_object") { - messages.unshift({ - role: "system", - content: `Answer using JSON.`, - }) + systemMessage.content += SYSTEM_FENCE + "Answer using JSON.\n" } else if (responseType === "json_schema") { if (!responseSchema) throw new Error(`responseSchema is required for json_schema`) @@ -349,29 +351,7 @@ ${schemaTs} toStrictJSONSchema(responseSchema) } if (systemMessage.content) messages.unshift(systemMessage) - - if (prompt.assistantText) { - trace.detailsFenced("🤖 assistant", prompt.assistantText, "markdown") - const assistantMessage: ChatCompletionAssistantMessageParam = { - role: "assistant", - content: prompt.assistantText, - } - messages.push(assistantMessage) - } - if (prompt.systemText) { - trace.detailsFenced("👾 system", prompt.systemText, "markdown") - const systemMessage: ChatCompletionSystemMessageParam = { - role: "system", - content: prompt.systemText, - } - // insert system messages after the last system role message in messages - // assume system messages are at the start - let li = -1 - for (let li = 0; li < messages.length; li++) - if (messages[li].role === "system") break - messages.splice(li, 0, systemMessage) - } - + messages.push(...prompt.messages) trace.endDetails() return { diff --git a/packages/core/src/promptdom.ts b/packages/core/src/promptdom.ts index 0f55027196..2678aa9889 100644 --- a/packages/core/src/promptdom.ts +++ b/packages/core/src/promptdom.ts @@ -16,12 +16,11 @@ import { TEMPLATE_ARG_FILE_MAX_TOKENS, } from "./constants" import { parseModelIdentifier } from "./models" -import { toChatCompletionUserMessage } from "./chat" +import { appendAssistantMessage, appendUserMessage } from "./chat" import { errorMessage } from "./error" import { tidyData } from "./tidy" import { dedent } from "./indent" import { - ChatCompletionAssistantMessageParam, ChatCompletionMessageParam, ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam, @@ -950,24 +949,9 @@ export async function renderPromptNode( content, } as ChatCompletionSystemMessageParam) } - const appendUser = (content: string) => { - const last = messages.at(-1) as ChatCompletionUserMessageParam - if (last?.role === "user") last.content += content + "\n" - else - messages.push({ - role: "user", - content, - } as ChatCompletionUserMessageParam) - } - const appendAssistant = (content: string) => { - const last = messages.at(-1) as ChatCompletionAssistantMessageParam - if (last?.role === "assistant") last.content += content - else - messages.push({ - role: "assistant", - content, - } satisfies ChatCompletionAssistantMessageParam) - } + const appendUser = (content: string) => appendUserMessage(messages, content) + const appendAssistant = (content: string) => + appendAssistantMessage(messages, content) const images: PromptImage[] = [] const errors: unknown[] = [] diff --git a/packages/core/src/runpromptcontext.ts b/packages/core/src/runpromptcontext.ts index 19b69ae0cb..1c8c235311 100644 --- a/packages/core/src/runpromptcontext.ts +++ b/packages/core/src/runpromptcontext.ts @@ -13,7 +13,6 @@ import { createSchemaNode, createStringTemplateNode, createTextNode, - createSystemNode, renderPromptNode, createOutputProcessor, createFileMerge, @@ -676,10 +675,18 @@ export function createChatGenerationContext( fileOutputs.push(...sysr.fileOutputs) if (sysr.logs?.length) runTrace.details("📝 console.log", sysr.logs) - if (sysr.text) { - systemMessage.content += - SYSTEM_FENCE + "\n" + sysr.text + "\n" - runTrace.fence(sysr.text, "markdown") + for (const smsg of sysr.messages) { + if ( + smsg.role === "user" && + typeof smsg.content === "string" + ) { + systemMessage.content += + SYSTEM_FENCE + "\n" + smsg.content + "\n" + runTrace.fence(smsg.content, "markdown") + } else + throw new NotSupportedError( + "only string user messages supported in system" + ) } if (sysr.aici) { runTrace.fence(sysr.aici, "yaml")