Skip to content

Commit

Permalink
serializable errors (#306)
Browse files Browse the repository at this point in the history
* serialize errors

* more cleanup

* trace as yaml

* more cleanup

* show trace when error

* merciful with cancel
  • Loading branch information
pelikhan authored Mar 28, 2024
1 parent fef0e2c commit 6f5b7cb
Show file tree
Hide file tree
Showing 19 changed files with 102 additions and 84 deletions.
7 changes: 4 additions & 3 deletions packages/cli/src/llamaindexretreival.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import {
fileExists,
installImport,
lookupMime,
serializeError,
} from "genaiscript-core"
import type { BaseReader, NodeWithScore, Metadata } from "llamaindex"
import type { GenericFileSystem } from "@llamaindex/env"
Expand All @@ -26,7 +27,7 @@ class BlobFileSystem implements GenericFileSystem {
constructor(
readonly filename: string,
readonly blob: Blob
) {}
) { }
writeFile(path: string, content: string): Promise<void> {
throw new Error("Method not implemented.")
}
Expand Down Expand Up @@ -66,7 +67,7 @@ export class LlamaIndexRetreivalService implements RetreivalService {
private module: PromiseType<ReturnType<typeof tryImportLlamaIndex>>
private READERS: Record<string, BaseReader>

constructor(readonly host: Host) {}
constructor(readonly host: Host) { }

async init(trace?: MarkdownTrace) {
if (this.module) return
Expand Down Expand Up @@ -198,7 +199,7 @@ export class LlamaIndexRetreivalService implements RetreivalService {
if (!reader)
return {
ok: false,
error: `no reader for content type '${type}'`,
error: serializeError(new Error(`no reader for content type '${type}'`)),
}
const fs = new BlobFileSystem(filenameOrUrl, blob)
const documents = (await reader.loadData(filenameOrUrl, fs)).map(
Expand Down
3 changes: 2 additions & 1 deletion packages/cli/src/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import {
outline,
CORE_VERSION,
ServerResponse,
serializeError,
} from "genaiscript-core"

export async function startServer(options: { port: string }) {
Expand Down Expand Up @@ -76,7 +77,7 @@ export async function startServer(options: { port: string }) {
}
response.ok = true
} catch (e) {
response = { ok: false, error: e.message }
response = { ok: false, error: serializeError(e) }
} finally {
if (response.error) logError(response.error)
ws.send(JSON.stringify({ id, response }))
Expand Down
1 change: 1 addition & 0 deletions packages/core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
"openai": "^4.29.0",
"prettier": "^3.2.5",
"pretty-bytes": "^6.1.1",
"serialize-error": "^11.0.3",
"toml": "^3.0.0",
"typescript": "^5.3.3",
"yaml": "^2.4.1"
Expand Down
3 changes: 1 addition & 2 deletions packages/core/src/aici.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,13 @@ import {
ChatCompletionHandler,
ChatCompletionResponse,
LanguageModel,
RequestError,
} from "./chat"
import { PromptNode, visitNode } from "./promptdom"
import wrapFetch from "fetch-retry"
import { fromHex, logError, logVerbose, utf8Decode } from "./util"
import { AICI_CONTROLLER, TOOL_ID } from "./constants"
import { host } from "./host"
import { NotSupportedError } from "./error"
import { NotSupportedError, RequestError } from "./error"
import { ChatCompletionContentPartText } from "openai/resources"

function renderAICINode(node: AICINode) {
Expand Down
16 changes: 0 additions & 16 deletions packages/core/src/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,22 +79,6 @@ export interface ChatCompletionsOptions {
maxDelay?: number
}

export class RequestError extends Error {
constructor(
public readonly status: number,
public readonly statusText: string,
public readonly body: any,
public readonly bodyText: string,
readonly retryAfter: number
) {
super(
`OpenAI error: ${
body?.message ? body?.message : `${statusText} (${status})`
}`
)
}
}

export function toChatCompletionUserMessage(
expanded: string,
images?: PromptImage[]
Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/docx.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ export async function DOCXTryParse(
const results = await extractRawText({ path })
return results.value
} catch (error) {
logError(error.message)
logError(error)
trace?.error(`reading docx`, error)
return undefined
}
Expand Down
63 changes: 43 additions & 20 deletions packages/core/src/error.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,52 @@
import { RequestError } from "./chat"
import { serializeError as rawSerializeError, ErrorObject as RawErrorObject } from 'serialize-error';

export function createCancelError(msg: string) {
const e = new Error(msg)
;(e as any).__cancel = true
return e
export type ErrorObject = RawErrorObject;

export function serializeError(e: unknown | string | Error | ErrorObject): ErrorObject {
if (e instanceof Error)
return rawSerializeError(e, { maxDepth: 3, useToJSON: false })
else if (e instanceof Object) {
const obj = e as ErrorObject
return obj
} else if (typeof e === "string")
return { message: e }
else if (e !== undefined && e !== null)
return { message: e.toString?.() }
else return {}
}

export class CancelError extends Error {
constructor(message: string) {
super(message)
this.name = "CancelError"
}
}

export class NotSupportedError extends Error {
constructor(message: string) {
super(message)
this.name = "NotSupportedError"
}
}

export function throwError(e: string | Error, cancel?: boolean) {
if (typeof e === "string") e = new Error(e)
if (cancel)
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(e as any).__cancel = true
throw e
export class RequestError extends Error {
constructor(
public readonly status: number,
public readonly statusText: string,
public readonly body: any,
public readonly bodyText: string,
readonly retryAfter: number
) {
super(
`LLM error: ${body?.message ? body?.message : `${statusText} (${status})`
}`
)
}
}

export function isCancelError(e: Error) {
export function isCancelError(e: Error | ErrorObject) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
return !!(e as any)?.__cancel || e.name === "AbortError"
return e?.name === "CancelError" || e?.name === "AbortError"
}

export function isTokenError(e: Error) {
Expand All @@ -30,10 +60,3 @@ export function isRequestError(e: Error, statusCode?: number, code?: string) {
(code === undefined || code === e.body?.code)
)
}

export class NotSupportedError extends Error {
constructor(message: string, options?: ErrorOptions) {
super(message)
this.name = "NotSupportedError"
}
}
3 changes: 2 additions & 1 deletion packages/core/src/host.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { CancellationToken } from "./cancellation"
import { ErrorObject } from "./error"
import { Progress } from "./progress"
import { MarkdownTrace } from "./trace"

Expand Down Expand Up @@ -65,7 +66,7 @@ export interface RetreivalClientOptions {

export interface ResponseStatus {
ok: boolean
error?: string
error?: ErrorObject
status?: number
}

Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/oai_token.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { RequestError } from "./chat"
import { AZURE_OPENAI_API_VERSION } from "./constants"
import { RequestError } from "./error"
import { OAIToken, host } from "./host"
import { fromBase64, logInfo, logWarn, utf8Decode } from "./util"

Expand Down
7 changes: 3 additions & 4 deletions packages/core/src/openai.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { initToken } from "./oai_token"
import { logError, logVerbose } from "./util"
import { host } from "./host"
import {
Expand All @@ -16,9 +15,9 @@ import {
ChatCompletionResponse,
ChatCompletionToolCall,
LanguageModel,
RequestError,
getChatCompletionCache,
} from "./chat"
import { RequestError } from "./error"

const OpenAIChatCompletion: ChatCompletionHandler = async (
req,
Expand Down Expand Up @@ -137,11 +136,11 @@ const OpenAIChatCompletion: ChatCompletionHandler = async (
let body: string
try {
body = await r.text()
} catch (e) {}
} catch (e) { }
let bodyJSON: { error: unknown }
try {
bodyJSON = body ? JSON.parse(body) : undefined
} catch (e) {}
} catch (e) { }
throw new RequestError(
r.status,
r.statusText,
Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/pdf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ export async function PDFTryParse(
}
return pages
} catch (error) {
logError(error.message)
logError(error)
return undefined
}
}
Expand Down
8 changes: 4 additions & 4 deletions packages/core/src/promptcontext.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import { OAIToken, host } from "./host"
import { MarkdownTrace } from "./trace"
import { YAMLParse, YAMLStringify } from "./yaml"
import { createParsers } from "./parsers"
import { throwError } from "./error"
import { upsert, search } from "./retreival"
import { outline } from "./highlights"
import { readText } from "./fs"
Expand All @@ -29,6 +28,7 @@ import {
} from "./runpromptcontext"
import { CSVParse, CSVToMarkdown } from "./csv"
import { INIParse, INIStringify, INITryParse } from "./ini"
import { CancelError } from "./error"

function stringLikeToFileName(f: string | LinkedFile) {
return typeof f === "string" ? f : f?.filename
Expand Down Expand Up @@ -192,8 +192,8 @@ export function createPromptContext(

const ctx = Object.freeze<PromptContext & RunPromptContextNode>({
...createRunPromptContext(options, env, trace),
script: () => {},
system: () => {},
script: () => { },
system: () => { },
env,
path,
fs,
Expand All @@ -215,7 +215,7 @@ export function createPromptContext(
appendPromptChild(createFileMergeNode(fn))
},
cancel: (reason?: string) => {
throwError(reason || "user cancelled", true)
throw new CancelError(reason || "user cancelled")
},
defData: (name, data, defOptions) => {
appendPromptChild(createDefDataNode(name, data, env, defOptions))
Expand Down
13 changes: 3 additions & 10 deletions packages/core/src/promptrunner.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
import {
ChatCompletionMessageParam,
ChatCompletionResponse,
ChatCompletionTool,
RequestError,
toChatCompletionUserMessage,
} from "./chat"
import { Fragment, PromptTemplate } from "./ast"
import { commentAttributes, stringToPos } from "./parser"
Expand All @@ -24,7 +21,6 @@ import { applyChangeLog, parseChangeLogs } from "./changelog"
import { parseAnnotations } from "./annotations"
import { validateFencesWithSchema } from "./schema"
import { CORE_VERSION } from "./version"
import { createCancelError } from "./error"
import { fileExists, readText } from "./fs"
import { estimateChatTokens } from "./tokens"
import { CSVToMarkdown } from "./csv"
Expand All @@ -34,6 +30,7 @@ import { FragmentTransformResponse, expandTemplate } from "./expander"
import { resolveLanguageModel } from "./models"
import { MAX_DATA_REPAIRS } from "./constants"
import { initToken } from "./oai_token"
import { CancelError, RequestError } from "./error"

async function fragmentVars(
trace: MarkdownTrace,
Expand Down Expand Up @@ -188,7 +185,7 @@ export async function runTemplate(
if (!success) {
const text = success === null ? "Script cancelled" : "Script failed"
return <FragmentTransformResponse>{
error: success === null ? createCancelError(text) : new Error(text),
error: success === null ? new CancelError(text) : new Error(text),
prompt: messages,
vars,
trace: trace.content,
Expand Down Expand Up @@ -305,11 +302,8 @@ export async function runTemplate(
status()
}
} catch (error: unknown) {
trace.error(`llm error`, error)
if (error instanceof TypeError) {
trace.heading(3, `Request error`)
trace.item(error.message)
if (error.cause) trace.fence(error.cause)
if (error.stack) trace.fence(error.stack)
resp = {
text: "Unexpected error",
}
Expand All @@ -330,7 +324,6 @@ export async function runTemplate(
resp = { text: "Request cancelled" }
error = undefined
} else {
trace.heading(3, `Fetch error`)
trace.error(`fetch error`, error)
resp = { text: "Unexpected error" }
}
Expand Down
4 changes: 2 additions & 2 deletions packages/core/src/tokens.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export function estimateTokens(model: string, text: string) {
try {
return encode(text).length
} catch (e) {
logError(e.message)
logError(e)
return text.length >> 2
}
}
Expand Down Expand Up @@ -46,7 +46,7 @@ export function estimateChatTokens(
const chatTokens = encodeChat(chat, model as any)
return chatTokens.length | 0
} catch (e) {
logError(e.message)
logError(e)
return JSON.stringify(messages).length >> 2
}
}
Loading

0 comments on commit 6f5b7cb

Please sign in to comment.