diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5fca5405..62d8f116 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -113,7 +113,7 @@ jobs: - name: Check README.md is in sync run: | - if ! diff -q README.md langgraph/README.md >/dev/null; then + if ! diff -q README.md libs/langgraph/README.md >/dev/null; then echo "README.md is out of sync with langgraph/README.md" diff -C 3 README.md langgraph/README.md exit 1 diff --git a/LICENSE b/LICENSE index 8cd8f501..e7530f5e 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ The MIT License -Copyright (c) 2023 LangChain +Copyright (c) 2024 LangChain Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/docs/Makefile b/docs/Makefile index d95a5d9d..5a2870c6 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -6,11 +6,11 @@ build-docs: serve-docs: python _scripts/copy_notebooks.py - python -m mkdocs serve -f mkdocs.yml -w ../langgraph --dirty -o + python -m mkdocs serve -f mkdocs.yml -w ../libs --dirty -o serve-clean-docs: clean python _scripts/copy_notebooks.py - python -m mkdocs serve -c -f mkdocs.yml --strict -w ../langgraph + python -m mkdocs serve -c -f mkdocs.yml --strict -w ../libs clean: find ./docs -name "*.ipynb" -type f -delete diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 7c68905d..b712f302 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -57,7 +57,8 @@ plugins: include_source: True include_requirejs: true - typedoc: - source: "../langgraph/*.d.ts" + # One level of globbing is intentional + source: "../libs/*/*.d.ts" output_dir: "./reference" tsconfig: "../tsconfig.json" options: "typedoc.json" diff --git a/environment_tests/docker-compose.yml b/environment_tests/docker-compose.yml index a70efa12..544de21b 100644 --- a/environment_tests/docker-compose.yml +++ b/environment_tests/docker-compose.yml @@ -9,7 +9,8 @@ services: - ../.yarn:/root/.yarn - ../environment_tests/test-exports-esbuild:/package - ../environment_tests/scripts:/scripts - - ../langgraph:/langgraph + - ../libs/langgraph:/langgraph + - ../libs/checkpoint:/checkpoint command: bash /scripts/docker-ci-entrypoint.sh test-exports-esm: image: node:18 @@ -23,7 +24,8 @@ services: - ../.yarn:/root/.yarn - ../environment_tests/test-exports-esm:/package - ../environment_tests/scripts:/scripts - - ../langgraph:/langgraph + - ../libs/langgraph:/langgraph + - ../libs/checkpoint:/checkpoint command: bash /scripts/docker-ci-entrypoint.sh test-exports-tsc: image: node:18 @@ -34,7 +36,8 @@ services: - ../.yarn:/root/.yarn - ../environment_tests/test-exports-tsc:/package - ../environment_tests/scripts:/scripts - - ../langgraph:/langgraph + - ../libs/langgraph:/langgraph + - ../libs/checkpoint:/checkpoint command: bash /scripts/docker-ci-entrypoint.sh test-exports-cjs: image: node:18 @@ -45,7 +48,8 @@ services: - ../.yarn:/root/.yarn - ../environment_tests/test-exports-cjs:/package - ../environment_tests/scripts:/scripts - - ../langgraph:/langgraph + - ../libs/langgraph:/langgraph + - ../libs/checkpoint:/checkpoint command: bash /scripts/docker-ci-entrypoint.sh test-exports-cf: image: node:18 @@ -56,7 +60,8 @@ services: - ../.yarn:/root/.yarn - ../environment_tests/test-exports-cf:/package - ../environment_tests/scripts:/scripts - - ../langgraph:/langgraph + - ../libs/langgraph:/langgraph + - ../libs/checkpoint:/checkpoint command: bash /scripts/docker-ci-entrypoint.sh test-exports-vercel: image: node:18 @@ -70,7 +75,8 @@ services: - ../.yarn:/root/.yarn - ../environment_tests/test-exports-vercel:/package - ../environment_tests/scripts:/scripts - - ../langgraph:/langgraph + - ../libs/langgraph:/langgraph + - ../libs/checkpoint:/checkpoint command: bash /scripts/docker-ci-entrypoint.sh test-exports-vite: image: node:18 @@ -81,7 +87,8 @@ services: - ../.yarn:/root/.yarn - ../environment_tests/test-exports-vite:/package - ../environment_tests/scripts:/scripts - - ../langgraph:/langgraph + - ../libs/langgraph:/langgraph + - ../libs/checkpoint:/checkpoint command: bash /scripts/docker-ci-entrypoint.sh success: image: alpine:3.14 diff --git a/environment_tests/scripts/docker-ci-entrypoint.sh b/environment_tests/scripts/docker-ci-entrypoint.sh index b9507544..2d10bfc5 100644 --- a/environment_tests/scripts/docker-ci-entrypoint.sh +++ b/environment_tests/scripts/docker-ci-entrypoint.sh @@ -11,8 +11,10 @@ shopt -s extglob cp -r ../package/!(node_modules|dist|dist-cjs|dist-esm|build|.next|.turbo) . mkdir -p ./libs/langgraph/ +mkdir -p ./libs/checkpoint/ cp -r ../langgraph ./libs/ +cp -r ../checkpoint ./libs/ # copy cache mkdir -p ./.yarn diff --git a/environment_tests/test-exports-cf/package.json b/environment_tests/test-exports-cf/package.json index 429bf98e..be42f6f4 100644 --- a/environment_tests/test-exports-cf/package.json +++ b/environment_tests/test-exports-cf/package.json @@ -9,6 +9,7 @@ }, "dependencies": { "@langchain/langgraph": "workspace:*", + "@langchain/langgraph-checkpoint": "workspace:*", "wrangler": "3.19.0", "vitest": "0.34.3", "typescript": "^5.0.3" diff --git a/environment_tests/test-exports-cjs/package.json b/environment_tests/test-exports-cjs/package.json index 3c9a8f34..5e469ff4 100644 --- a/environment_tests/test-exports-cjs/package.json +++ b/environment_tests/test-exports-cjs/package.json @@ -22,6 +22,7 @@ "license": "MIT", "dependencies": { "@langchain/langgraph": "workspace:*", + "@langchain/langgraph-checkpoint": "workspace:*", "@tsconfig/recommended": "^1.0.2", "@types/node": "^18.15.11", "typescript": "^5.0.0" diff --git a/environment_tests/test-exports-esbuild/package.json b/environment_tests/test-exports-esbuild/package.json index fc2f2dde..12903c2d 100644 --- a/environment_tests/test-exports-esbuild/package.json +++ b/environment_tests/test-exports-esbuild/package.json @@ -20,6 +20,7 @@ "license": "MIT", "dependencies": { "@langchain/langgraph": "workspace:*", + "@langchain/langgraph-checkpoint": "workspace:*", "@tsconfig/recommended": "^1.0.2", "@types/node": "^18.15.11", "esbuild": "^0.17.18", diff --git a/environment_tests/test-exports-esm/package.json b/environment_tests/test-exports-esm/package.json index ba9b4cc3..ecadfbeb 100644 --- a/environment_tests/test-exports-esm/package.json +++ b/environment_tests/test-exports-esm/package.json @@ -23,6 +23,7 @@ "license": "MIT", "dependencies": { "@langchain/langgraph": "workspace:*", + "@langchain/langgraph-checkpoint": "workspace:*", "@tsconfig/recommended": "^1.0.2", "@types/node": "^18.15.11", "tsx": "^4.15.2", diff --git a/environment_tests/test-exports-tsc/package.json b/environment_tests/test-exports-tsc/package.json index 87119b4e..1d678b75 100644 --- a/environment_tests/test-exports-tsc/package.json +++ b/environment_tests/test-exports-tsc/package.json @@ -16,6 +16,7 @@ "license": "MIT", "dependencies": { "@langchain/langgraph": "workspace:*", + "@langchain/langgraph-checkpoint": "workspace:*", "@types/node": "^18.15.11", "typescript": "latest" } diff --git a/environment_tests/test-exports-vercel/package.json b/environment_tests/test-exports-vercel/package.json index 2e4a5927..9629b551 100644 --- a/environment_tests/test-exports-vercel/package.json +++ b/environment_tests/test-exports-vercel/package.json @@ -13,6 +13,7 @@ }, "dependencies": { "@langchain/langgraph": "workspace:*", + "@langchain/langgraph-checkpoint": "workspace:*", "@types/node": "18.15.11", "@types/react": "18.0.33", "@types/react-dom": "18.0.11", diff --git a/environment_tests/test-exports-vite/package.json b/environment_tests/test-exports-vite/package.json index 9869a2af..ff1b7849 100644 --- a/environment_tests/test-exports-vite/package.json +++ b/environment_tests/test-exports-vite/package.json @@ -14,6 +14,7 @@ }, "dependencies": { "@langchain/langgraph": "workspace:*", + "@langchain/langgraph-checkpoint": "workspace:*", "typescript": "^5.0.0", "vite": "^4.2.0" } diff --git a/langgraph/src/checkpoint/id.ts b/langgraph/src/checkpoint/id.ts deleted file mode 100644 index 0870538f..00000000 --- a/langgraph/src/checkpoint/id.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { v6 } from "uuid"; - -export function uuid6(clockseq: number): string { - return v6({ clockseq }); -} diff --git a/langgraph/src/checkpoint/index.ts b/langgraph/src/checkpoint/index.ts deleted file mode 100644 index 066d5ccd..00000000 --- a/langgraph/src/checkpoint/index.ts +++ /dev/null @@ -1,8 +0,0 @@ -export { MemorySaver } from "./memory.js"; -export { - type Checkpoint, - type CheckpointMetadata, - copyCheckpoint, - emptyCheckpoint, - BaseCheckpointSaver, -} from "./base.js"; diff --git a/langgraph/src/checkpoint/memory.ts b/langgraph/src/checkpoint/memory.ts deleted file mode 100644 index b87a3eb4..00000000 --- a/langgraph/src/checkpoint/memory.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { RunnableConfig } from "@langchain/core/runnables"; -import { - BaseCheckpointSaver, - Checkpoint, - CheckpointMetadata, - CheckpointTuple, -} from "./base.js"; -import { SerializerProtocol } from "../serde/base.js"; - -export class MemorySaver extends BaseCheckpointSaver { - storage: Record>; - - constructor(serde?: SerializerProtocol) { - super(serde); - this.storage = {}; - } - - async getTuple(config: RunnableConfig): Promise { - const thread_id = config.configurable?.thread_id; - const checkpoint_id = config.configurable?.checkpoint_id; - const checkpoints = this.storage[thread_id]; - - if (checkpoint_id) { - const checkpoint = checkpoints[checkpoint_id]; - if (checkpoint) { - return { - config, - checkpoint: (await this.serde.parse(checkpoint[0])) as Checkpoint, - metadata: (await this.serde.parse( - checkpoint[1] - )) as CheckpointMetadata, - }; - } - } else { - if (checkpoints) { - const maxThreadTs = Object.keys(checkpoints).sort((a, b) => - b.localeCompare(a) - )[0]; - const checkpoint = checkpoints[maxThreadTs]; - return { - config: { configurable: { thread_id, checkpoint_id: maxThreadTs } }, - checkpoint: (await this.serde.parse(checkpoint[0])) as Checkpoint, - metadata: (await this.serde.parse( - checkpoint[1] - )) as CheckpointMetadata, - }; - } - } - - return undefined; - } - - async *list( - config: RunnableConfig, - limit?: number, - before?: RunnableConfig - ): AsyncGenerator { - const thread_id = config.configurable?.thread_id; - const checkpoints = this.storage[thread_id] ?? {}; - - // sort in desc order - for (const [checkpoint_id, checkpoint] of Object.entries(checkpoints) - .filter((c) => - before ? c[0] < before.configurable?.checkpoint_id : true - ) - .sort((a, b) => b[0].localeCompare(a[0])) - .slice(0, limit)) { - yield { - config: { configurable: { thread_id, checkpoint_id } }, - checkpoint: (await this.serde.parse(checkpoint[0])) as Checkpoint, - metadata: (await this.serde.parse(checkpoint[1])) as CheckpointMetadata, - }; - } - } - - async put( - config: RunnableConfig, - checkpoint: Checkpoint, - metadata: CheckpointMetadata - ): Promise { - const thread_id = config.configurable?.thread_id; - - if (this.storage[thread_id]) { - this.storage[thread_id][checkpoint.id] = [ - this.serde.stringify(checkpoint), - this.serde.stringify(metadata), - ]; - } else { - this.storage[thread_id] = { - [checkpoint.id]: [ - this.serde.stringify(checkpoint), - this.serde.stringify(metadata), - ], - }; - } - - return { - configurable: { - thread_id, - checkpoint_id: checkpoint.id, - }, - }; - } -} diff --git a/langgraph/src/checkpoint/sqlite.ts b/langgraph/src/checkpoint/sqlite.ts deleted file mode 100644 index 8def2737..00000000 --- a/langgraph/src/checkpoint/sqlite.ts +++ /dev/null @@ -1,210 +0,0 @@ -import Database, { Database as DatabaseType } from "better-sqlite3"; -import { RunnableConfig } from "@langchain/core/runnables"; -import { - BaseCheckpointSaver, - Checkpoint, - CheckpointMetadata, - CheckpointTuple, -} from "./base.js"; -import { SerializerProtocol } from "../serde/base.js"; - -// snake_case is used to match Python implementation -interface Row { - checkpoint: string; - metadata: string; - parent_id?: string; - thread_id: string; - checkpoint_id: string; -} - -export class SqliteSaver extends BaseCheckpointSaver { - db: DatabaseType; - - protected isSetup: boolean; - - constructor(db: DatabaseType, serde?: SerializerProtocol) { - super(serde); - this.db = db; - this.isSetup = false; - } - - static fromConnString(connStringOrLocalPath: string): SqliteSaver { - return new SqliteSaver(new Database(connStringOrLocalPath)); - } - - private setup(): void { - if (this.isSetup) { - return; - } - - try { - this.db.pragma("journal_mode=WAL"); - this.db.exec(` -CREATE TABLE IF NOT EXISTS checkpoints ( - thread_id TEXT NOT NULL, - checkpoint_id TEXT NOT NULL, - parent_id TEXT, - checkpoint BLOB, - metadata BLOB, - PRIMARY KEY (thread_id, checkpoint_id) -);`); - } catch (error) { - console.log("Error creating checkpoints table", error); - throw error; - } - - this.isSetup = true; - } - - async getTuple(config: RunnableConfig): Promise { - this.setup(); - const thread_id = config.configurable?.thread_id; - const checkpoint_id = config.configurable?.checkpoint_id; - - if (checkpoint_id) { - try { - const row: Row = this.db - .prepare( - `SELECT checkpoint, parent_id, metadata FROM checkpoints WHERE thread_id = ? AND checkpoint_id = ?` - ) - .get(thread_id, checkpoint_id) as Row; - - if (row) { - return { - config, - checkpoint: (await this.serde.parse(row.checkpoint)) as Checkpoint, - metadata: (await this.serde.parse( - row.metadata - )) as CheckpointMetadata, - parentConfig: row.parent_id - ? { - configurable: { - thread_id, - checkpoint_id: row.parent_id, - }, - } - : undefined, - }; - } - } catch (error) { - console.log("Error retrieving checkpoint", error); - throw error; - } - } else { - const row: Row = this.db - .prepare( - `SELECT thread_id, checkpoint_id, parent_id, checkpoint, metadata FROM checkpoints WHERE thread_id = ? ORDER BY checkpoint_id DESC LIMIT 1` - ) - .get(thread_id) as Row; - - if (row) { - return { - config: { - configurable: { - thread_id: row.thread_id, - checkpoint_id: row.checkpoint_id, - }, - }, - checkpoint: (await this.serde.parse(row.checkpoint)) as Checkpoint, - metadata: (await this.serde.parse( - row.metadata - )) as CheckpointMetadata, - parentConfig: row.parent_id - ? { - configurable: { - thread_id: row.thread_id, - checkpoint_id: row.parent_id, - }, - } - : undefined, - }; - } - } - - return undefined; - } - - async *list( - config: RunnableConfig, - limit?: number, - before?: RunnableConfig - ): AsyncGenerator { - this.setup(); - const thread_id = config.configurable?.thread_id; - let sql = `SELECT thread_id, checkpoint_id, parent_id, checkpoint, metadata FROM checkpoints WHERE thread_id = ? ${ - before ? "AND checkpoint_id < ?" : "" - } ORDER BY checkpoint_id DESC`; - if (limit) { - sql += ` LIMIT ${limit}`; - } - const args = [thread_id, before?.configurable?.checkpoint_id].filter( - Boolean - ); - - try { - const rows: Row[] = this.db.prepare(sql).all(...args) as Row[]; - - if (rows) { - for (const row of rows) { - yield { - config: { - configurable: { - thread_id: row.thread_id, - checkpoint_id: row.checkpoint_id, - }, - }, - checkpoint: (await this.serde.parse(row.checkpoint)) as Checkpoint, - metadata: (await this.serde.parse( - row.metadata - )) as CheckpointMetadata, - parentConfig: row.parent_id - ? { - configurable: { - thread_id: row.thread_id, - checkpoint_id: row.parent_id, - }, - } - : undefined, - }; - } - } - } catch (error) { - console.log("Error listing checkpoints", error); - throw error; - } - } - - async put( - config: RunnableConfig, - checkpoint: Checkpoint, - metadata: CheckpointMetadata - ): Promise { - this.setup(); - - try { - const row = [ - config.configurable?.thread_id, - checkpoint.id, - config.configurable?.checkpoint_id, - this.serde.stringify(checkpoint), - this.serde.stringify(metadata), - ]; - - this.db - .prepare( - `INSERT OR REPLACE INTO checkpoints (thread_id, checkpoint_id, parent_id, checkpoint, metadata) VALUES (?, ?, ?, ?, ?)` - ) - .run(...row); - } catch (error) { - console.log("Error saving checkpoint", error); - throw error; - } - - return { - configurable: { - thread_id: config.configurable?.thread_id, - checkpoint_id: checkpoint.id, - }, - }; - } -} diff --git a/langgraph/src/pregel/algo.ts b/langgraph/src/pregel/algo.ts deleted file mode 100644 index bf722faa..00000000 --- a/langgraph/src/pregel/algo.ts +++ /dev/null @@ -1,435 +0,0 @@ -/* eslint-disable no-param-reassign */ -import { mergeConfigs, patchConfig } from "@langchain/core/runnables"; -import { - BaseChannel, - createCheckpoint, - emptyChannels, -} from "../channels/base.js"; -import { - Checkpoint, - ReadonlyCheckpoint, - copyCheckpoint, - getChannelVersion, - getVersionSeen, -} from "../checkpoint/base.js"; -import { PregelNode } from "./read.js"; -import { readChannel, readChannels } from "./io.js"; -import { - _isSend, - _isSendInterface, - CONFIG_KEY_READ, - CONFIG_KEY_SEND, - INTERRUPT, - Send, - TAG_HIDDEN, - TASKS, -} from "../constants.js"; -import { All, PregelExecutableTask, PregelTaskDescription } from "./types.js"; -import { EmptyChannelError, InvalidUpdateError } from "../errors.js"; - -/** - * Construct a type with a set of properties K of type T - */ -export type StrRecord = { - [P in K]: T; -}; - -export async function executeTasks( - tasks: Array<() => Promise>, - stepTimeout?: number, - signal?: AbortSignal -): Promise { - if (stepTimeout && signal) { - if ("any" in AbortSignal) { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - signal = (AbortSignal as any).any([ - signal, - AbortSignal.timeout(stepTimeout), - ]); - } - } else if (stepTimeout) { - signal = AbortSignal.timeout(stepTimeout); - } - - // Abort if signal is aborted - signal?.throwIfAborted(); - - // Start all tasks - const started = tasks.map((task) => task()); - - // Wait for all tasks to settle - // If any tasks fail, or signal is aborted, the promise will reject - await Promise.all( - signal - ? [ - ...started, - new Promise((_resolve, reject) => { - signal?.addEventListener("abort", () => reject(new Error("Abort"))); - }), - ] - : started - ); -} - -export function _shouldInterrupt( - checkpoint: ReadonlyCheckpoint, - interruptNodes: All | Array, - snapshotChannels: Array, - tasks: Array> -): boolean { - const anySnapshotChannelUpdated = snapshotChannels.some( - (chan) => - getChannelVersion(checkpoint, chan as string) > - getVersionSeen(checkpoint, INTERRUPT, chan as string) - ); - const anyTaskNodeInInterruptNodes = tasks.some((task) => - interruptNodes === "*" - ? !task.config?.tags?.includes(TAG_HIDDEN) - : interruptNodes.includes(task.name) - ); - return anySnapshotChannelUpdated && anyTaskNodeInInterruptNodes; -} - -export function _localRead>( - checkpoint: ReadonlyCheckpoint, - channels: Cc, - writes: Array<[keyof Cc, unknown]>, - select: Array | keyof Cc, - fresh: boolean = false -): Record | unknown { - if (fresh) { - const newCheckpoint = createCheckpoint(checkpoint, channels, -1); - // create a new copy of channels - const newChannels = emptyChannels(channels, newCheckpoint); - // Note: _applyWrites contains side effects - _applyWrites(copyCheckpoint(newCheckpoint), newChannels, writes); - return readChannels(newChannels, select); - } else { - return readChannels(channels, select); - } -} - -export function _localWrite( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - commit: (writes: [string, any][]) => void, - processes: Record, - channels: Record, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - writes: [string, any][] -) { - for (const [chan, value] of writes) { - if (chan === TASKS) { - if (!_isSend(value)) { - throw new InvalidUpdateError( - `Invalid packet type, expected SendProtocol, got ${JSON.stringify( - value - )}` - ); - } - if (!(value.node in processes)) { - throw new InvalidUpdateError( - `Invalid node name ${value.node} in packet` - ); - } - } else if (!(chan in channels)) { - console.warn(`Skipping write for channel '${chan}' which has no readers`); - } - } - commit(writes); -} - -export function _applyWrites>( - checkpoint: Checkpoint, - channels: Cc, - pendingWrites: Array<[keyof Cc, unknown]> -): void { - if (checkpoint.pending_sends) { - checkpoint.pending_sends = []; - } - const pendingWritesByChannel = {} as Record>; - // Group writes by channel - for (const [chan, val] of pendingWrites) { - if (chan === TASKS) { - checkpoint.pending_sends.push({ - node: (val as Send).node, - args: (val as Send).args, - }); - } else { - if (chan in pendingWritesByChannel) { - pendingWritesByChannel[chan].push(val); - } else { - pendingWritesByChannel[chan] = [val]; - } - } - } - - // find the highest version of all channels - let maxVersion = 0; - if (Object.keys(checkpoint.channel_versions).length > 0) { - maxVersion = Math.max(...Object.values(checkpoint.channel_versions)); - } - - const updatedChannels: Set = new Set(); - // Apply writes to channels - for (const [chan, vals] of Object.entries(pendingWritesByChannel)) { - if (chan in channels) { - // side effect: update channels - try { - channels[chan].update(vals); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } catch (e: any) { - if (e.name === InvalidUpdateError.unminifiable_name) { - throw new InvalidUpdateError( - `Invalid update for channel ${chan}. Values: ${vals}\n\nError: ${e.message}` - ); - } - } - - // side effect: update checkpoint channel versions - checkpoint.channel_versions[chan] = maxVersion + 1; - - updatedChannels.add(chan); - } else { - console.warn(`Skipping write for channel ${chan} which has no readers`); - } - } - - // Channels that weren't updated in this step are notified of a new step - for (const chan in channels) { - if (!updatedChannels.has(chan)) { - // side effect: update channels - channels[chan].update([]); - } - } -} - -export function _prepareNextTasks< - Nn extends StrRecord, - Cc extends StrRecord ->( - checkpoint: ReadonlyCheckpoint, - processes: Nn, - channels: Cc, - forExecution: false, - extra: { step: number } -): [Checkpoint, Array]; - -export function _prepareNextTasks< - Nn extends StrRecord, - Cc extends StrRecord ->( - checkpoint: ReadonlyCheckpoint, - processes: Nn, - channels: Cc, - forExecution: true, - extra: { step: number } -): [Checkpoint, Array>]; - -export function _prepareNextTasks< - Nn extends StrRecord, - Cc extends StrRecord ->( - checkpoint: ReadonlyCheckpoint, - processes: Nn, - channels: Cc, - forExecution: boolean, - extra: { step: number } -): [ - Checkpoint, - PregelTaskDescription[] | PregelExecutableTask[] -] { - const newCheckpoint = copyCheckpoint(checkpoint); - const tasks: Array> = []; - const taskDescriptions: Array = []; - - for (const packet of checkpoint.pending_sends) { - if (!_isSendInterface(packet)) { - console.warn( - `Ignoring invalid packet ${JSON.stringify(packet)} in pending sends.` - ); - continue; - } - if (!(packet.node in processes)) { - console.warn( - `Ignoring unknown node name ${packet.node} in pending sends.` - ); - continue; - } - if (forExecution) { - const proc = processes[packet.node]; - const node = proc.getNode(); - if (node !== undefined) { - const triggers = [TASKS]; - const metadata = { - langgraph_step: extra.step, - langgraph_node: packet.node, - langgraph_triggers: triggers, - langgraph_task_idx: tasks.length, - }; - const writes: [keyof Cc, unknown][] = []; - tasks.push({ - name: packet.node, - input: packet.args, - proc: node, - writes, - triggers, - config: patchConfig( - mergeConfigs(proc.config, processes[packet.node].config, { - metadata, - }), - { - runName: packet.node, - // callbacks: - configurable: { - [CONFIG_KEY_SEND]: _localWrite.bind( - undefined, - (items: [keyof Cc, unknown][]) => writes.push(...items), - processes, - channels - ), - [CONFIG_KEY_READ]: _localRead.bind( - undefined, - checkpoint, - channels, - writes as Array<[string, unknown]> - ), - }, - } - ), - }); - } - } else { - taskDescriptions.push({ - name: packet.node, - input: packet.args, - }); - } - } - - // Check if any processes should be run in next step - // If so, prepare the values to be passed to them - for (const [name, proc] of Object.entries(processes)) { - const hasUpdatedChannels = proc.triggers - .filter((chan) => { - try { - readChannel(channels, chan, false); - return true; - } catch (e) { - return false; - } - }) - .some( - (chan) => - getChannelVersion(newCheckpoint, chan) > - getVersionSeen(newCheckpoint, name, chan) - ); - // If any of the channels read by this process were updated - if (hasUpdatedChannels) { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let val: any; - - // If all trigger channels subscribed by this process are not empty - // then invoke the process with the values of all non-empty channels - if (Array.isArray(proc.channels)) { - let emptyChannels = 0; - for (const chan of proc.channels) { - try { - val = readChannel(channels, chan, false); - break; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } catch (e: any) { - if (e.name === EmptyChannelError.unminifiable_name) { - emptyChannels += 1; - continue; - } else { - throw e; - } - } - } - - if (emptyChannels === proc.channels.length) { - continue; - } - } else if (typeof proc.channels === "object") { - val = {}; - try { - for (const [k, chan] of Object.entries(proc.channels)) { - val[k] = readChannel(channels, chan, !proc.triggers.includes(chan)); - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } catch (e: any) { - if (e.name === EmptyChannelError.unminifiable_name) { - continue; - } else { - throw e; - } - } - } else { - throw new Error( - `Invalid channels type, expected list or dict, got ${proc.channels}` - ); - } - - // If the process has a mapper, apply it to the value - if (proc.mapper !== undefined) { - val = proc.mapper(val); - } - - if (forExecution) { - // Update seen versions - if (!newCheckpoint.versions_seen[name]) { - newCheckpoint.versions_seen[name] = {}; - } - proc.triggers.forEach((chan: string) => { - const version = newCheckpoint.channel_versions[chan]; - if (version !== undefined) { - // side effect: updates newCheckpoint - newCheckpoint.versions_seen[name][chan] = version; - } - }); - - const node = proc.getNode(); - if (node !== undefined) { - const metadata = { - langgraph_step: extra.step, - langgraph_node: name, - langgraph_triggers: proc.triggers, - langgraph_task_idx: tasks.length, - }; - const writes: [keyof Cc, unknown][] = []; - tasks.push({ - name, - input: val, - proc: node, - writes, - triggers: proc.triggers, - config: patchConfig(mergeConfigs(proc.config, { metadata }), { - runName: name, - configurable: { - [CONFIG_KEY_SEND]: _localWrite.bind( - undefined, - (items: [keyof Cc, unknown][]) => writes.push(...items), - processes, - channels - ), - [CONFIG_KEY_READ]: _localRead.bind( - undefined, - checkpoint, - channels, - writes as Array<[string, unknown]> - ), - }, - }), - }); - } - } else { - taskDescriptions.push({ - name, - input: val, - }); - } - } - } - - return [newCheckpoint, forExecution ? tasks : taskDescriptions]; -} diff --git a/langgraph/src/pregel/debug.ts b/langgraph/src/pregel/debug.ts deleted file mode 100644 index 42c8ed23..00000000 --- a/langgraph/src/pregel/debug.ts +++ /dev/null @@ -1,77 +0,0 @@ -import { BaseChannel } from "../channels/base.js"; -import { EmptyChannelError } from "../errors.js"; -import { PregelExecutableTask } from "./types.js"; - -type ConsoleColors = { - start: string; - end: string; -}; - -type ConsoleColorMap = { - [key: string]: ConsoleColors; -}; - -const COLORS_MAP: ConsoleColorMap = { - blue: { - start: "\x1b[34m", - end: "\x1b[0m", - }, -}; - -/** - * Wrap some text in a color for printing to the console. - */ -const wrap = (color: ConsoleColors, text: string): string => - `${color.start}${text}${color.end}`; - -export function printStepStart( - step: number, - nextTasks: readonly PregelExecutableTask[] -): void { - const nTasks = nextTasks.length; - console.log( - `${wrap(COLORS_MAP.blue, "[langgraph/step]")}`, - `Starting step ${step} with ${nTasks} task${ - nTasks === 1 ? "" : "s" - }. Next tasks:\n`, - `\n${nextTasks - .map( - (task) => `${String(task.name)}(${JSON.stringify(task.input, null, 2)})` - ) - .join("\n")}` - ); -} - -export function printCheckpoint( - step: number, - channels: Record> -) { - console.log( - `${wrap(COLORS_MAP.blue, "[langgraph/checkpoint]")}`, - `Finishing step ${step}. Channel values:\n`, - `\n${JSON.stringify( - Object.fromEntries(_readChannels(channels)), - null, - 2 - )}` - ); -} - -function* _readChannels( - channels: Record> - // eslint-disable-next-line @typescript-eslint/no-explicit-any -): IterableIterator<[string, any]> { - for (const [name, channel] of Object.entries(channels)) { - try { - yield [name, channel.get()]; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } catch (error: any) { - if (error.name === EmptyChannelError.unminifiable_name) { - // Skip the channel if it's empty - continue; - } else { - throw error; // Re-throw the error if it's not an EmptyChannelError - } - } - } -} diff --git a/langgraph/src/pregel/index.ts b/langgraph/src/pregel/index.ts deleted file mode 100644 index 8208cdfb..00000000 --- a/langgraph/src/pregel/index.ts +++ /dev/null @@ -1,840 +0,0 @@ -/* eslint-disable no-param-reassign */ -import { - Runnable, - RunnableConfig, - RunnableFunc, - RunnableLike, - RunnableSequence, - _coerceToRunnable, - ensureConfig, - mergeConfigs, - patchConfig, -} from "@langchain/core/runnables"; -import { CallbackManagerForChainRun } from "@langchain/core/callbacks/manager"; -import { IterableReadableStream } from "@langchain/core/utils/stream"; -import { - BaseChannel, - createCheckpoint, - emptyChannels, -} from "../channels/base.js"; -import { - BaseCheckpointSaver, - copyCheckpoint, - emptyCheckpoint, -} from "../checkpoint/base.js"; -import { PregelNode } from "./read.js"; -import { validateGraph, validateKeys } from "./validate.js"; -import { - mapInput, - mapOutputUpdates, - mapOutputValues, - mapDebugTasks, - readChannels, - single, - mapDebugTaskResults, -} from "./io.js"; -import { ChannelWrite, ChannelWriteEntry, PASSTHROUGH } from "./write.js"; -import { - CONFIG_KEY_READ, - CONFIG_KEY_SEND, - INTERRUPT, - TASKS, -} from "../constants.js"; -import { All, PregelExecutableTask, StateSnapshot } from "./types.js"; -import { - GraphRecursionError, - GraphValueError, - InvalidUpdateError, -} from "../errors.js"; -import { - executeTasks, - _prepareNextTasks, - _shouldInterrupt, - _localRead, - _applyWrites, -} from "./algo.js"; - -const DEFAULT_LOOP_LIMIT = 25; - -type WriteValue = Runnable | RunnableFunc | unknown; - -function isString(value: unknown): value is string { - return typeof value === "string"; -} - -function* prefixGenerator( - generator: Generator, - prefix: string | undefined -) { - if (!prefix) yield* generator; - for (const value of generator) yield [prefix, value]; -} - -export class Channel { - static subscribeTo( - channels: string, - options?: { - key?: string; - tags?: string[]; - } - ): PregelNode; - - static subscribeTo( - channels: string[], - options?: { - tags?: string[]; - } - ): PregelNode; - - static subscribeTo( - channels: string | string[], - options?: { - key?: string; - tags?: string[]; - } - ): PregelNode { - const { key, tags } = options ?? {}; - if (Array.isArray(channels) && key !== undefined) { - throw new Error( - "Can't specify a key when subscribing to multiple channels" - ); - } - - let channelMappingOrArray: string[] | Record; - - if (isString(channels)) { - if (key) { - channelMappingOrArray = { [key]: channels }; - } else { - channelMappingOrArray = [channels]; - } - } else { - channelMappingOrArray = Object.fromEntries( - channels.map((chan) => [chan, chan]) - ); - } - - const triggers: string[] = Array.isArray(channels) ? channels : [channels]; - - return new PregelNode({ - channels: channelMappingOrArray, - triggers, - tags, - }); - } - - static writeTo( - channels: string[], - kwargs?: Record - ): ChannelWrite { - const channelWriteEntries: Array = []; - - for (const channel of channels) { - channelWriteEntries.push({ - channel, - value: PASSTHROUGH, - skipNone: false, - }); - } - - for (const [key, value] of Object.entries(kwargs ?? {})) { - if (Runnable.isRunnable(value) || typeof value === "function") { - channelWriteEntries.push({ - channel: key, - value: PASSTHROUGH, - skipNone: true, - mapper: _coerceToRunnable(value as RunnableLike), - }); - } else { - channelWriteEntries.push({ - channel: key, - value, - skipNone: false, - }); - } - } - - return new ChannelWrite(channelWriteEntries); - } -} - -export type StreamMode = "values" | "updates" | "debug"; - -/** - * Construct a type with a set of properties K of type T - */ -type StrRecord = { - [P in K]: T; -}; - -export interface PregelInterface< - Nn extends StrRecord, - Cc extends StrRecord -> { - nodes: Nn; - - channels: Cc; - - inputs: keyof Cc | Array; - - outputs: keyof Cc | Array; - /** - * @default true - */ - autoValidate?: boolean; - /** - * @default "values" - */ - streamMode?: StreamMode | StreamMode[]; - - streamChannels?: keyof Cc | Array; - /** - * @default [] - */ - interruptAfter?: Array | All; - /** - * @default [] - */ - interruptBefore?: Array | All; - /** - * @default undefined - */ - stepTimeout?: number; - /** - * @default false - */ - debug?: boolean; - - checkpointer?: BaseCheckpointSaver; -} - -export interface PregelOptions< - Nn extends StrRecord, - Cc extends StrRecord -> extends RunnableConfig { - streamMode?: StreamMode | StreamMode[]; - inputKeys?: keyof Cc | Array; - outputKeys?: keyof Cc | Array; - interruptBefore?: All | Array; - interruptAfter?: All | Array; - debug?: boolean; -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export type PregelInputType = any; - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export type PregelOutputType = any; - -export class Pregel< - Nn extends StrRecord, - Cc extends StrRecord - > - extends Runnable> - implements PregelInterface -{ - static lc_name() { - return "LangGraph"; - } - - // Because Pregel extends `Runnable`. - lc_namespace = ["langgraph", "pregel"]; - - nodes: Nn; - - channels: Cc; - - inputs: keyof Cc | Array; - - outputs: keyof Cc | Array; - - autoValidate: boolean = true; - - streamMode: StreamMode[] = ["values"]; - - streamChannels?: keyof Cc | Array; - - interruptAfter?: Array | All; - - interruptBefore?: Array | All; - - stepTimeout?: number; - - debug: boolean = false; - - checkpointer?: BaseCheckpointSaver; - - constructor(fields: PregelInterface) { - super(fields); - - let { streamMode } = fields; - if (streamMode != null && !Array.isArray(streamMode)) { - streamMode = [streamMode]; - } - - this.nodes = fields.nodes; - this.channels = fields.channels; - this.autoValidate = fields.autoValidate ?? this.autoValidate; - this.streamMode = streamMode ?? this.streamMode; - this.outputs = fields.outputs; - this.streamChannels = fields.streamChannels ?? this.streamChannels; - this.interruptAfter = fields.interruptAfter; - this.interruptBefore = fields.interruptBefore; - this.inputs = fields.inputs; - this.stepTimeout = fields.stepTimeout ?? this.stepTimeout; - this.debug = fields.debug ?? this.debug; - this.checkpointer = fields.checkpointer; - - // Bind the method to the instance - this._transform = this._transform.bind(this); - - if (this.autoValidate) { - this.validate(); - } - } - - validate(): this { - validateGraph({ - nodes: this.nodes, - channels: this.channels, - outputChannels: this.outputs, - inputChannels: this.inputs, - streamChannels: this.streamChannels, - interruptAfterNodes: this.interruptAfter, - interruptBeforeNodes: this.interruptBefore, - }); - - return this; - } - - get streamChannelsList(): Array { - if (Array.isArray(this.streamChannels)) { - return this.streamChannels; - } else if (this.streamChannels) { - return [this.streamChannels]; - } else { - return Object.keys(this.channels); - } - } - - get streamChannelsAsIs(): keyof Cc | Array { - if (this.streamChannels) { - return this.streamChannels; - } else { - return Object.keys(this.channels); - } - } - - async getState(config: RunnableConfig): Promise { - if (!this.checkpointer) { - throw new GraphValueError("No checkpointer set"); - } - - const saved = await this.checkpointer.getTuple(config); - const checkpoint = saved ? saved.checkpoint : emptyCheckpoint(); - const channels = emptyChannels(this.channels, checkpoint); - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const [_, nextTasks] = _prepareNextTasks( - checkpoint, - this.nodes, - channels, - false, - { step: -1 } - ); - return { - values: readChannels(channels, this.streamChannelsAsIs), - next: nextTasks.map((task) => task.name), - metadata: saved?.metadata, - config: saved ? saved.config : config, - createdAt: saved?.checkpoint.ts, - parentConfig: saved?.parentConfig, - }; - } - - async *getStateHistory( - config: RunnableConfig, - limit?: number, - before?: RunnableConfig - ): AsyncIterableIterator { - if (!this.checkpointer) { - throw new GraphValueError("No checkpointer set"); - } - for await (const saved of this.checkpointer.list(config, limit, before)) { - const channels = emptyChannels(this.channels, saved.checkpoint); - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const [_, nextTasks] = _prepareNextTasks( - saved.checkpoint, - this.nodes, - channels, - false, - { step: -1 } - ); - yield { - values: readChannels(channels, this.streamChannelsAsIs), - next: nextTasks.map((task) => task.name), - metadata: saved.metadata, - config: saved.config, - createdAt: saved.checkpoint.ts, - parentConfig: saved.parentConfig, - }; - } - } - - async updateState( - config: RunnableConfig, - values: Record | unknown, - asNode?: keyof Nn - ): Promise { - if (!this.checkpointer) { - throw new GraphValueError("No checkpointer set"); - } - - // Get the latest checkpoint - const saved = await this.checkpointer.getTuple(config); - const checkpoint = saved - ? copyCheckpoint(saved.checkpoint) - : emptyCheckpoint(); - // Find last that updated the state, if not provided - const maxSeens = Object.entries(checkpoint.versions_seen).reduce( - (acc, [node, versions]) => { - const maxSeen = Math.max(...Object.values(versions)); - if (maxSeen) { - if (!acc[maxSeen]) { - acc[maxSeen] = []; - } - acc[maxSeen].push(node); - } - return acc; - }, - {} as Record - ); - if (!asNode && !Object.keys(maxSeens).length) { - if (!Array.isArray(this.inputs) && this.inputs in this.nodes) { - asNode = this.inputs as keyof Nn; - } - } else if (!asNode) { - const maxSeen = Math.max(...Object.keys(maxSeens).map(Number)); - const nodes = maxSeens[maxSeen]; - if (nodes.length === 1) { - asNode = nodes[0] as keyof Nn; - } - } - if (!asNode) { - throw new InvalidUpdateError("Ambiguous update, specify as_node"); - } - // update channels - const channels = emptyChannels(this.channels, checkpoint); - // create task to run all writers of the chosen node - const writers = this.nodes[asNode].getWriters(); - if (!writers.length) { - throw new InvalidUpdateError( - `No writers found for node ${asNode as string}` - ); - } - const task: PregelExecutableTask = { - name: asNode, - input: values, - proc: - // eslint-disable-next-line @typescript-eslint/no-explicit-any - writers.length > 1 ? RunnableSequence.from(writers as any) : writers[0], - writes: [], - triggers: [INTERRUPT], - config: undefined, - }; - // execute task - await task.proc.invoke( - task.input, - patchConfig(config, { - runName: `${this.name}UpdateState`, - configurable: { - [CONFIG_KEY_SEND]: (items: [keyof Cc, unknown][]) => - task.writes.push(...items), - [CONFIG_KEY_READ]: _localRead.bind( - undefined, - checkpoint, - channels, - task.writes as Array<[string, unknown]> - ), - }, - }) - ); - // apply to checkpoint and save - _applyWrites(checkpoint, channels, task.writes); - const step = (saved?.metadata?.step ?? -2) + 1; - return await this.checkpointer.put( - saved?.config ?? config, - createCheckpoint(checkpoint, channels, step), - { - source: "update", - step, - writes: { [asNode]: values }, - } - ); - } - - _defaults(config: PregelOptions): [ - boolean, // debug - StreamMode[], // stream mode - keyof Cc | Array, // input keys - keyof Cc | Array, // output keys - RunnableConfig, // config without pregel keys - All | Array, // interrupt before - All | Array // interrupt after, - ] { - const { - debug, - streamMode, - inputKeys, - outputKeys, - interruptAfter, - interruptBefore, - ...rest - } = config; - const defaultDebug = debug !== undefined ? debug : this.debug; - - let defaultOutputKeys = outputKeys; - if (defaultOutputKeys === undefined) { - defaultOutputKeys = this.streamChannelsAsIs; - } else { - validateKeys(defaultOutputKeys, this.channels); - } - - let defaultInputKeys = inputKeys; - if (defaultInputKeys === undefined) { - defaultInputKeys = this.inputs; - } else { - validateKeys(defaultInputKeys, this.channels); - } - - const defaultInterruptBefore = - interruptBefore ?? this.interruptBefore ?? []; - - const defaultInterruptAfter = interruptAfter ?? this.interruptAfter ?? []; - - let defaultStreamMode: StreamMode[]; - if (streamMode !== undefined) { - defaultStreamMode = Array.isArray(streamMode) ? streamMode : [streamMode]; - } else { - defaultStreamMode = this.streamMode; - } - - if ( - config.configurable !== undefined && - config.configurable[CONFIG_KEY_READ] !== undefined - ) { - defaultStreamMode = ["values"]; - } - - return [ - defaultDebug, - defaultStreamMode, - defaultInputKeys, - defaultOutputKeys, - rest, - defaultInterruptBefore, - defaultInterruptAfter, - ]; - } - - async *_transform( - input: AsyncGenerator, - runManager?: CallbackManagerForChainRun, - config: PregelOptions = {} - ): AsyncGenerator { - const bg: Promise[] = []; - try { - if (config.recursionLimit && config.recursionLimit < 1) { - throw new GraphValueError( - `Recursion limit must be greater than 0, got ${config.recursionLimit}` - ); - } - if (this.checkpointer && !config.configurable) { - throw new GraphValueError( - `Checkpointer requires one or more of the following 'configurable' keys: thread_id, checkpoint_id` - ); - } - // assign defaults - const [ - debug, - streamMode, - inputKeys, - outputKeys, - restConfig, - interruptBefore, - interruptAfter, - ] = this._defaults(config); - // copy nodes to ignore mutations during execution - const processes = { ...this.nodes }; - // get checkpoint, or create an empty one - const saved = this.checkpointer - ? await this.checkpointer.getTuple(config) - : null; - let checkpoint = saved ? saved.checkpoint : emptyCheckpoint(); - let checkpointConfig = saved ? saved.config : config; - let start = (saved?.metadata?.step ?? -2) + 1; - // create channels from checkpoint - const channels = emptyChannels(this.channels, checkpoint); - // map inputs to channel updates - const inputPendingWrites: Array<[keyof Cc, unknown]> = []; - for await (const c of input) { - for (const value of mapInput(inputKeys, c)) { - inputPendingWrites.push(value); - } - } - if (inputPendingWrites.length) { - // discard any unfinished tasks from previous checkpoint - const discarded = _prepareNextTasks( - checkpoint, - processes, - channels, - true, - { step: -1 } - ); - checkpoint = discarded[0]; // eslint-disable-line prefer-destructuring - // apply input writes - _applyWrites(checkpoint, channels, inputPendingWrites); - // save input checkpoint - if (this.checkpointer) { - checkpoint = createCheckpoint(checkpoint, channels, start); - bg.push( - this.checkpointer.put(checkpointConfig, checkpoint, { - source: "input", - step: start, - writes: Object.fromEntries(inputPendingWrites), - }) - ); - checkpointConfig = { - configurable: { - ...checkpointConfig.configurable, - checkpoint_id: checkpoint.id, - }, - }; - } - // increment start to 0 - start += 1; - } else { - checkpoint = copyCheckpoint(checkpoint); - for (const k of this.streamChannelsList) { - const version = checkpoint.channel_versions[k as string] ?? 0; - if (!checkpoint.versions_seen[INTERRUPT]) { - checkpoint.versions_seen[INTERRUPT] = {}; - } - checkpoint.versions_seen[INTERRUPT][k as string] = version; - } - } - - // Similarly to Bulk Synchronous Parallel / Pregel model - // computation proceeds in steps, while there are channel updates - // channel updates from step N are only visible in step N+1 - // channels are guaranteed to be immutable for the duration of the step, - // with channel updates applied only at the transition between steps - const stop = start + (config.recursionLimit ?? DEFAULT_LOOP_LIMIT); - for (let step = start; step < stop + 1; step += 1) { - const [nextCheckpoint, nextTasks] = _prepareNextTasks( - checkpoint, - processes, - channels, - true, - { step } - ); - - // if no more tasks, we're done - if (nextTasks.length === 0 && step === start) { - throw new GraphValueError(`No tasks to run in graph.`); - } else if (nextTasks.length === 0) { - break; - } else if (step === stop) { - throw new GraphRecursionError( - `Recursion limit of ${config.recursionLimit} reached without hitting a stop condition. You can increase the limit by setting the "recursionLimit" config key.` - ); - } - - // before execution, check if we should interrupt - if ( - _shouldInterrupt( - checkpoint, - interruptBefore, - this.streamChannelsList, - nextTasks - ) - ) { - break; - } else { - checkpoint = nextCheckpoint; - } - - // produce debug stream mode event - if (streamMode.includes("debug")) { - yield* prefixGenerator( - mapDebugTasks(step, nextTasks), - streamMode.length > 1 ? "debug" : undefined - ); - } - - if (debug) { - console.log(nextTasks); - } - - const tasksWithConfig = nextTasks.map( - // eslint-disable-next-line no-loop-func - (task, i) => - [ - task.proc, - task.input, - patchConfig( - mergeConfigs(restConfig, processes[task.name].config, { - metadata: { - langgraph_step: step, - langgraph_node: task.name, - langgraph_triggers: [TASKS], - langgraph_task_idx: i, - }, - }), - { - callbacks: runManager?.getChild(`graph:step:${step}`), - runName: task.name as string, - configurable: { - ...config.configurable, - [CONFIG_KEY_SEND]: (items: [keyof Cc, unknown][]) => - task.writes.push(...items), - [CONFIG_KEY_READ]: _localRead.bind( - undefined, - checkpoint, - channels, - task.writes as Array<[string, unknown]> - ), - }, - } - ), - ] as const - ); - - // execute tasks, and wait for one to fail or all to finish. - // each task is independent from all other concurrent tasks - const tasks = tasksWithConfig.map( - ([proc, input, updatedConfig]) => - () => - proc.invoke(input, updatedConfig) - ); - - await executeTasks(tasks, this.stepTimeout, config.signal); - - // combine pending writes from all tasks - const pendingWrites: Array<[keyof Cc, unknown]> = []; - for (const task of nextTasks) { - pendingWrites.push(...task.writes); - } - - // apply writes to channels - _applyWrites(checkpoint, channels, pendingWrites); - - if (streamMode.includes("updates")) { - // TODO: Refactor - for await (const task of nextTasks) { - yield* prefixGenerator( - mapOutputUpdates(outputKeys, [task]), - streamMode.length > 1 ? "updates" : undefined - ); - } - } - - // yield current value and checkpoint view - if (streamMode.includes("values")) { - yield* prefixGenerator( - mapOutputValues(outputKeys, pendingWrites, channels), - streamMode.length > 1 ? "values" : undefined - ); - } - - if (streamMode.includes("debug")) { - yield* prefixGenerator( - mapDebugTaskResults(step, nextTasks, this.streamChannelsList), - streamMode.length > 1 ? "debug" : undefined - ); - } - - // save end of step checkpoint - if (this.checkpointer) { - checkpoint = createCheckpoint(checkpoint, channels, step); - bg.push( - this.checkpointer.put(checkpointConfig, checkpoint, { - source: "loop", - step, - writes: single( - this.streamMode.includes("values") - ? mapOutputValues(outputKeys, pendingWrites, channels) - : mapOutputUpdates(outputKeys, nextTasks) - ), - }) - ); - checkpointConfig = { - configurable: { - ...checkpointConfig.configurable, - checkpoint_id: checkpoint.id, - }, - }; - } - - if ( - _shouldInterrupt( - checkpoint, - interruptAfter, - this.streamChannelsList, - nextTasks - ) - ) { - break; - } - } - } finally { - await Promise.all(bg); - } - } - - async invoke( - input: PregelInputType, - options?: PregelOptions - ): Promise { - const config = ensureConfig(options); - if (!config?.outputKeys) { - config.outputKeys = this.outputs; - } - if (!config?.streamMode) { - config.streamMode = "values"; - } - - let latest: PregelOutputType | undefined; - for await (const chunk of await this.stream(input, config)) { - latest = chunk; - } - if (!latest) { - return undefined as PregelOutputType; - } - return latest; - } - - async stream( - input: PregelInputType, - config?: PregelOptions - ): Promise> { - const inputIterator: AsyncGenerator = (async function* () { - yield input; - })(); - return IterableReadableStream.fromAsyncGenerator( - this.transform(inputIterator, config) - ); - } - - async *transform( - generator: AsyncGenerator, - config?: PregelOptions - ): AsyncGenerator { - for await (const chunk of this._transformStreamWithConfig( - generator, - this._transform, - config - )) { - yield chunk; - } - } -} diff --git a/langgraph/src/pregel/types.ts b/langgraph/src/pregel/types.ts deleted file mode 100644 index fd21896b..00000000 --- a/langgraph/src/pregel/types.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { Runnable, RunnableConfig } from "@langchain/core/runnables"; -import { CheckpointMetadata } from "../checkpoint/base.js"; - -export interface PregelTaskDescription { - readonly name: string; - readonly input: unknown; -} - -export interface PregelExecutableTask< - N extends PropertyKey, - C extends PropertyKey -> { - readonly name: N; - readonly input: unknown; - readonly proc: Runnable; - readonly writes: Array<[C, unknown]>; - readonly config: RunnableConfig | undefined; - readonly triggers: Array; -} - -export interface StateSnapshot { - /** - * Current values of channels - */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - readonly values: Record | any; - /** - * Nodes to execute in the next step, if any - */ - readonly next: Array; - /** - * Config used to fetch this snapshot - */ - readonly config: RunnableConfig; - /** - * Metadata about the checkpoint - */ - readonly metadata?: CheckpointMetadata; - /** - * Time when the snapshot was created - */ - readonly createdAt?: string; - /** - * Config used to fetch the parent snapshot, if any - * @default undefined - */ - readonly parentConfig?: RunnableConfig | undefined; -} - -export type All = "*"; diff --git a/langgraph/tsconfig.json b/langgraph/tsconfig.json deleted file mode 100644 index 08186e4b..00000000 --- a/langgraph/tsconfig.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "extends": "../tsconfig.json", - "include": [ - "src/*.ts", - "src/**/*.ts" - ], - "exclude": [ - "node_modules", - "dist", - "docs", - "scripts" - ], - "compilerOptions": { - "outDir": "dist", - } -} \ No newline at end of file diff --git a/langgraph/.env.example b/libs/checkpoint-sqlite/.env.example similarity index 100% rename from langgraph/.env.example rename to libs/checkpoint-sqlite/.env.example diff --git a/langgraph/.eslintrc.cjs b/libs/checkpoint-sqlite/.eslintrc.cjs similarity index 100% rename from langgraph/.eslintrc.cjs rename to libs/checkpoint-sqlite/.eslintrc.cjs diff --git a/libs/checkpoint-sqlite/.gitignore b/libs/checkpoint-sqlite/.gitignore new file mode 100644 index 00000000..c10034e2 --- /dev/null +++ b/libs/checkpoint-sqlite/.gitignore @@ -0,0 +1,7 @@ +index.cjs +index.js +index.d.ts +index.d.cts +node_modules +dist +.yarn diff --git a/langgraph/.prettierrc b/libs/checkpoint-sqlite/.prettierrc similarity index 100% rename from langgraph/.prettierrc rename to libs/checkpoint-sqlite/.prettierrc diff --git a/.release-it.json b/libs/checkpoint-sqlite/.release-it.json similarity index 100% rename from .release-it.json rename to libs/checkpoint-sqlite/.release-it.json diff --git a/langgraph/LICENSE b/libs/checkpoint-sqlite/LICENSE similarity index 97% rename from langgraph/LICENSE rename to libs/checkpoint-sqlite/LICENSE index 8cd8f501..e7530f5e 100644 --- a/langgraph/LICENSE +++ b/libs/checkpoint-sqlite/LICENSE @@ -1,6 +1,6 @@ The MIT License -Copyright (c) 2023 LangChain +Copyright (c) 2024 LangChain Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/libs/checkpoint-sqlite/README.md b/libs/checkpoint-sqlite/README.md new file mode 100644 index 00000000..05245b48 --- /dev/null +++ b/libs/checkpoint-sqlite/README.md @@ -0,0 +1,60 @@ +# @langchain/langgraph-checkpoint-sqlite + +Implementation of a [LangGraph.js](https://github.com/langchain-ai/langgraphjs) CheckpointSaver that uses a SQLite DB. + +## Usage + +```ts +import { SqliteSaver } from "@langchain/langgraph-checkpoint-sqlite"; + +const writeConfig = { + configurable: { + thread_id: "1", + checkpoint_ns: "" + } +}; +const readConfig = { + configurable: { + thread_id: "1" + } +}; + +const checkpointer = SqliteSaver.fromConnString(":memory:"); +const checkpoint = { + v: 1, + ts: "2024-07-31T20:14:19.804150+00:00", + id: "1ef4f797-8335-6428-8001-8a1503f9b875", + channel_values: { + my_key: "meow", + node: "node" + }, + channel_versions: { + __start__: 2, + my_key: 3, + start:node: 3, + node: 3 + }, + versions_seen: { + __input__: {}, + __start__: { + __start__: 1 + }, + node: { + start:node: 2 + } + }, + pending_sends: [], + current_tasks: {} +} + +// store checkpoint +await checkpointer.put(writeConfig, checkpoint, {}, {}) + +// load checkpoint +await checkpointer.get(readConfig) + +// list checkpoints +for await (const checkpoint of checkpointer.list(readConfig)) { + console.log(checkpoint); +} +``` diff --git a/langgraph/jest.config.cjs b/libs/checkpoint-sqlite/jest.config.cjs similarity index 100% rename from langgraph/jest.config.cjs rename to libs/checkpoint-sqlite/jest.config.cjs diff --git a/langgraph/jest.env.cjs b/libs/checkpoint-sqlite/jest.env.cjs similarity index 100% rename from langgraph/jest.env.cjs rename to libs/checkpoint-sqlite/jest.env.cjs diff --git a/libs/checkpoint-sqlite/langchain.config.js b/libs/checkpoint-sqlite/langchain.config.js new file mode 100644 index 00000000..fe70c345 --- /dev/null +++ b/libs/checkpoint-sqlite/langchain.config.js @@ -0,0 +1,21 @@ +import { resolve, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; + +/** + * @param {string} relativePath + * @returns {string} + */ +function abs(relativePath) { + return resolve(dirname(fileURLToPath(import.meta.url)), relativePath); +} + +export const config = { + internals: [/node\:/, /@langchain\/core\//, /async_hooks/], + entrypoints: { + index: "index" + }, + tsConfigPath: resolve("./tsconfig.json"), + cjsSource: "./dist-cjs", + cjsDestination: "./dist", + abs, +}; diff --git a/libs/checkpoint-sqlite/package.json b/libs/checkpoint-sqlite/package.json new file mode 100644 index 00000000..a15a0faf --- /dev/null +++ b/libs/checkpoint-sqlite/package.json @@ -0,0 +1,96 @@ +{ + "name": "@langchain/langgraph-checkpoint-sqlite", + "version": "0.0.0", + "description": "LangGraph", + "type": "module", + "engines": { + "node": ">=18" + }, + "main": "./index.js", + "types": "./index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:langchain-ai/langgraphjs.git" + }, + "scripts": { + "build": "yarn turbo:command build:internal --filter=@langchain/langgraph-checkpoint-sqlite", + "build:internal": "yarn lc_build_v2 --create-entrypoints --pre --tree-shaking", + "lint:eslint": "NODE_OPTIONS=--max-old-space-size=4096 eslint --cache --ext .ts,.js src/", + "lint:dpdm": "dpdm --exit-code circular:1 --no-warning --no-tree src/*.ts src/**/*.ts", + "lint": "yarn lint:eslint && yarn lint:dpdm", + "lint:fix": "yarn lint:eslint --fix && yarn lint:dpdm", + "prepack": "yarn build", + "test": "NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%", + "test:watch": "NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts", + "test:single": "NODE_OPTIONS=--experimental-vm-modules yarn run jest --config jest.config.cjs --testTimeout 100000", + "test:int": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%", + "format": "prettier --config .prettierrc --write \"src\"", + "format:check": "prettier --config .prettierrc --check \"src\"" + }, + "author": "LangChain", + "license": "MIT", + "dependencies": { + "@langchain/langgraph-checkpoint": "~0.0.0" + }, + "peerDependencies": { + "@langchain/core": ">=0.2.20 <0.3.0", + "better-sqlite3": "^9.5.0" + }, + "peerDependenciesMeta": { + "better-sqlite3": { + "optional": true + } + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@langchain/scripts": "^0.0.22", + "@swc/core": "^1.3.90", + "@swc/jest": "^0.2.29", + "@tsconfig/recommended": "^1.0.3", + "@types/better-sqlite3": "^7.6.9", + "@types/uuid": "^10", + "@typescript-eslint/eslint-plugin": "^6.12.0", + "@typescript-eslint/parser": "^6.12.0", + "better-sqlite3": "^9.5.0", + "dotenv": "^16.3.1", + "dpdm": "^3.12.0", + "eslint": "^8.33.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-prettier": "^8.6.0", + "eslint-plugin-import": "^2.29.1", + "eslint-plugin-jest": "^28.8.0", + "eslint-plugin-no-instanceof": "^1.0.1", + "eslint-plugin-prettier": "^4.2.1", + "jest": "^29.5.0", + "jest-environment-node": "^29.6.4", + "prettier": "^2.8.3", + "release-it": "^17.6.0", + "rollup": "^4.5.2", + "ts-jest": "^29.1.0", + "tsx": "^4.7.0", + "typescript": "^4.9.5 || ^5.4.5" + }, + "publishConfig": { + "access": "public", + "registry": "https://registry.npmjs.org/" + }, + "exports": { + ".": { + "types": { + "import": "./index.d.ts", + "require": "./index.d.cts", + "default": "./index.d.ts" + }, + "import": "./index.js", + "require": "./index.cjs" + }, + "./package.json": "./package.json" + }, + "files": [ + "dist/", + "index.cjs", + "index.js", + "index.d.ts", + "index.d.cts" + ] +} diff --git a/libs/checkpoint-sqlite/src/index.ts b/libs/checkpoint-sqlite/src/index.ts new file mode 100644 index 00000000..41912219 --- /dev/null +++ b/libs/checkpoint-sqlite/src/index.ts @@ -0,0 +1,270 @@ +import Database, { Database as DatabaseType } from "better-sqlite3"; +import type { RunnableConfig } from "@langchain/core/runnables"; +import { + BaseCheckpointSaver, + type Checkpoint, + type CheckpointListOptions, + type CheckpointTuple, + type SerializerProtocol, + type PendingWrite, + type CheckpointMetadata, +} from "@langchain/langgraph-checkpoint"; + +interface CheckpointRow { + checkpoint: string; + metadata: string; + parent_checkpoint_id?: string; + thread_id: string; + checkpoint_id: string; + checkpoint_ns?: string; +} + +interface WritesRow { + thread_id: string; + checkpoint_ns: string; + checkpoint_id: string; + task_id: string; + idx: number; + channel: string; + type?: string; + value?: string; +} + +export class SqliteSaver extends BaseCheckpointSaver { + db: DatabaseType; + + protected isSetup: boolean; + + constructor(db: DatabaseType, serde?: SerializerProtocol) { + super(serde); + this.db = db; + this.isSetup = false; + } + + static fromConnString(connStringOrLocalPath: string): SqliteSaver { + return new SqliteSaver(new Database(connStringOrLocalPath)); + } + + protected setup(): void { + if (this.isSetup) { + return; + } + + this.db.pragma("journal_mode=WAL"); + this.db.exec(` +CREATE TABLE IF NOT EXISTS checkpoints ( + thread_id TEXT NOT NULL, + checkpoint_ns TEXT NOT NULL DEFAULT '', + checkpoint_id TEXT NOT NULL, + parent_checkpoint_id TEXT, + type TEXT, + checkpoint BLOB, + metadata BLOB, + PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id) +);`); + this.db.exec(` +CREATE TABLE IF NOT EXISTS writes ( + thread_id TEXT NOT NULL, + checkpoint_ns TEXT NOT NULL DEFAULT '', + checkpoint_id TEXT NOT NULL, + task_id TEXT NOT NULL, + idx INTEGER NOT NULL, + channel TEXT NOT NULL, + type TEXT, + value BLOB, + PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id, task_id, idx) +);`); + + this.isSetup = true; + } + + async getTuple(config: RunnableConfig): Promise { + this.setup(); + const { + thread_id, + checkpoint_ns = "", + checkpoint_id, + } = config.configurable ?? {}; + let row: CheckpointRow; + if (checkpoint_id) { + row = this.db + .prepare( + `SELECT thread_id, checkpoint_id, parent_checkpoint_id, type, checkpoint, metadata FROM checkpoints WHERE thread_id = ? AND checkpoint_ns = ? AND checkpoint_id = ?` + ) + .get(thread_id, checkpoint_ns, checkpoint_id) as CheckpointRow; + } else { + row = this.db + .prepare( + `SELECT thread_id, checkpoint_id, parent_checkpoint_id, type, checkpoint, metadata FROM checkpoints WHERE thread_id = ? AND checkpoint_ns = ? ORDER BY checkpoint_id DESC LIMIT 1` + ) + .get(thread_id, checkpoint_ns) as CheckpointRow; + } + if (row === undefined) { + return undefined; + } + let finalConfig = config; + if (!checkpoint_id) { + finalConfig = { + configurable: { + thread_id: row.thread_id, + checkpoint_ns, + checkpoint_id: row.checkpoint_id, + }, + }; + } + if ( + finalConfig.configurable?.thread_id === undefined || + finalConfig.configurable?.checkpoint_id === undefined + ) { + throw new Error("Missing thread_id or checkpoint_id"); + } + // find any pending writes + const pendingWritesRows = this.db + .prepare( + `SELECT task_id, channel, type, value FROM writes WHERE thread_id = ? AND checkpoint_ns = ? AND checkpoint_id = ?` + ) + .all( + finalConfig.configurable.thread_id.toString(), + checkpoint_ns, + finalConfig.configurable.checkpoint_id.toString() + ) as WritesRow[]; + const pendingWrites = await Promise.all( + pendingWritesRows.map(async (row) => { + return [ + row.task_id, + row.channel, + await this.serde.parse(row.value ?? ""), + ] as [string, string, unknown]; + }) + ); + return { + config: finalConfig, + checkpoint: (await this.serde.parse(row.checkpoint)) as Checkpoint, + metadata: (await this.serde.parse(row.metadata)) as CheckpointMetadata, + parentConfig: row.parent_checkpoint_id + ? { + configurable: { + thread_id: row.thread_id, + checkpoint_ns, + checkpoint_id: row.parent_checkpoint_id, + }, + } + : undefined, + pendingWrites, + }; + } + + async *list( + config: RunnableConfig, + options?: CheckpointListOptions + ): AsyncGenerator { + const { limit, before } = options ?? {}; + this.setup(); + const thread_id = config.configurable?.thread_id; + let sql = `SELECT thread_id, checkpoint_ns, checkpoint_id, parent_checkpoint_id, type, checkpoint, metadata FROM checkpoints WHERE thread_id = ? ${ + before ? "AND checkpoint_id < ?" : "" + } ORDER BY checkpoint_id DESC`; + if (limit) { + sql += ` LIMIT ${limit}`; + } + const args = [thread_id, before?.configurable?.checkpoint_id].filter( + Boolean + ); + + const rows: CheckpointRow[] = this.db + .prepare(sql) + .all(...args) as CheckpointRow[]; + + if (rows) { + for (const row of rows) { + yield { + config: { + configurable: { + thread_id: row.thread_id, + checkpoint_ns: row.checkpoint_ns, + checkpoint_id: row.checkpoint_id, + }, + }, + checkpoint: (await this.serde.parse(row.checkpoint)) as Checkpoint, + metadata: (await this.serde.parse( + row.metadata + )) as CheckpointMetadata, + parentConfig: row.parent_checkpoint_id + ? { + configurable: { + thread_id: row.thread_id, + checkpoint_ns: row.checkpoint_ns, + checkpoint_id: row.parent_checkpoint_id, + }, + } + : undefined, + }; + } + } + } + + async put( + config: RunnableConfig, + checkpoint: Checkpoint, + metadata: CheckpointMetadata + ): Promise { + this.setup(); + + const row = [ + config.configurable?.thread_id?.toString(), + config.configurable?.checkpoint_ns, + checkpoint.id, + config.configurable?.checkpoint_id, + "Checkpoint", + this.serde.stringify(checkpoint), + this.serde.stringify(metadata), + ]; + + this.db + .prepare( + `INSERT OR REPLACE INTO checkpoints (thread_id, checkpoint_ns, checkpoint_id, parent_checkpoint_id, type, checkpoint, metadata) VALUES (?, ?, ?, ?, ?, ?, ?)` + ) + .run(...row); + + return { + configurable: { + thread_id: config.configurable?.thread_id, + checkpoint_ns: config.configurable?.checkpoint_ns, + checkpoint_id: checkpoint.id, + }, + }; + } + + async putWrites( + config: RunnableConfig, + writes: PendingWrite[], + taskId: string + ): Promise { + this.setup(); + + const stmt = this.db.prepare(` + INSERT OR REPLACE INTO writes + (thread_id, checkpoint_ns, checkpoint_id, task_id, idx, channel, type, value) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + `); + + const transaction = this.db.transaction((rows) => { + for (const row of rows) { + stmt.run(...row); + } + }); + + const rows = writes.map((write, idx) => [ + config.configurable?.thread_id, + config.configurable?.checkpoint_ns, + config.configurable?.checkpoint_id, + taskId, + idx, + write[0], + "Checkpoint", + this.serde.stringify(write[1]), + ]); + + transaction(rows); + } +} diff --git a/libs/checkpoint-sqlite/src/tests/checkpoints.test.ts b/libs/checkpoint-sqlite/src/tests/checkpoints.test.ts new file mode 100644 index 00000000..01a9cc6a --- /dev/null +++ b/libs/checkpoint-sqlite/src/tests/checkpoints.test.ts @@ -0,0 +1,120 @@ +import { describe, it, expect } from "@jest/globals"; +import { + Checkpoint, + CheckpointTuple, + uuid6, +} from "@langchain/langgraph-checkpoint"; +import { SqliteSaver } from "../index.js"; + +const checkpoint1: Checkpoint = { + v: 1, + id: uuid6(-1), + ts: "2024-04-19T17:19:07.952Z", + channel_values: { + someKey1: "someValue1", + }, + channel_versions: { + someKey2: 1, + }, + versions_seen: { + someKey3: { + someKey4: 1, + }, + }, + pending_sends: [], +}; +const checkpoint2: Checkpoint = { + v: 1, + id: uuid6(1), + ts: "2024-04-20T17:19:07.952Z", + channel_values: { + someKey1: "someValue2", + }, + channel_versions: { + someKey2: 2, + }, + versions_seen: { + someKey3: { + someKey4: 2, + }, + }, + pending_sends: [], +}; + +describe("SqliteSaver", () => { + it("should save and retrieve checkpoints correctly", async () => { + const sqliteSaver = SqliteSaver.fromConnString(":memory:"); + + // get undefined checkpoint + const undefinedCheckpoint = await sqliteSaver.getTuple({ + configurable: { thread_id: "1" }, + }); + expect(undefinedCheckpoint).toBeUndefined(); + + // save first checkpoint + const runnableConfig = await sqliteSaver.put( + { configurable: { thread_id: "1" } }, + checkpoint1, + { source: "update", step: -1, writes: null } + ); + expect(runnableConfig).toEqual({ + configurable: { + thread_id: "1", + checkpoint_id: checkpoint1.id, + }, + }); + + // get first checkpoint tuple + const firstCheckpointTuple = await sqliteSaver.getTuple({ + configurable: { thread_id: "1" }, + }); + expect(firstCheckpointTuple?.config).toEqual({ + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: checkpoint1.id, + }, + }); + expect(firstCheckpointTuple?.checkpoint).toEqual(checkpoint1); + expect(firstCheckpointTuple?.parentConfig).toBeUndefined(); + + // save second checkpoint + await sqliteSaver.put( + { + configurable: { + thread_id: "1", + checkpoint_id: "2024-04-18T17:19:07.952Z", + }, + }, + checkpoint2, + { source: "update", step: -1, writes: null } + ); + + // verify that parentTs is set and retrieved correctly for second checkpoint + const secondCheckpointTuple = await sqliteSaver.getTuple({ + configurable: { thread_id: "1" }, + }); + expect(secondCheckpointTuple?.parentConfig).toEqual({ + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: "2024-04-18T17:19:07.952Z", + }, + }); + + // list checkpoints + const checkpointTupleGenerator = await sqliteSaver.list({ + configurable: { thread_id: "1" }, + }); + const checkpointTuples: CheckpointTuple[] = []; + for await (const checkpoint of checkpointTupleGenerator) { + checkpointTuples.push(checkpoint); + } + expect(checkpointTuples.length).toBe(2); + + const checkpointTuple1 = checkpointTuples[0]; + const checkpointTuple2 = checkpointTuples[1]; + expect(checkpointTuple1.checkpoint.ts).toBe("2024-04-20T17:19:07.952Z"); + expect(checkpointTuple2.checkpoint.ts).toBe("2024-04-19T17:19:07.952Z"); + }); +}); diff --git a/langgraph/tsconfig.cjs.json b/libs/checkpoint-sqlite/tsconfig.cjs.json similarity index 100% rename from langgraph/tsconfig.cjs.json rename to libs/checkpoint-sqlite/tsconfig.cjs.json diff --git a/libs/checkpoint-sqlite/tsconfig.json b/libs/checkpoint-sqlite/tsconfig.json new file mode 100644 index 00000000..bc85d83b --- /dev/null +++ b/libs/checkpoint-sqlite/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "@tsconfig/recommended", + "compilerOptions": { + "outDir": "../dist", + "rootDir": "./src", + "target": "ES2021", + "lib": ["ES2021", "ES2022.Object", "DOM"], + "module": "ES2020", + "moduleResolution": "nodenext", + "esModuleInterop": true, + "declaration": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "useDefineForClassFields": true, + "strictPropertyInitialization": false, + "allowJs": true, + "strict": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "docs"] +} diff --git a/langgraph/turbo.json b/libs/checkpoint-sqlite/turbo.json similarity index 100% rename from langgraph/turbo.json rename to libs/checkpoint-sqlite/turbo.json diff --git a/libs/checkpoint/.env.example b/libs/checkpoint/.env.example new file mode 100644 index 00000000..aea660a4 --- /dev/null +++ b/libs/checkpoint/.env.example @@ -0,0 +1,6 @@ +# ------------------LangSmith tracing------------------ +LANGCHAIN_TRACING_V2=true +LANGCHAIN_ENDPOINT="https://api.smith.langchain.com" +LANGCHAIN_API_KEY= +LANGCHAIN_PROJECT= +# ----------------------------------------------------- \ No newline at end of file diff --git a/libs/checkpoint/.eslintrc.cjs b/libs/checkpoint/.eslintrc.cjs new file mode 100644 index 00000000..02711dad --- /dev/null +++ b/libs/checkpoint/.eslintrc.cjs @@ -0,0 +1,69 @@ +module.exports = { + extends: [ + "airbnb-base", + "eslint:recommended", + "prettier", + "plugin:@typescript-eslint/recommended", + ], + parserOptions: { + ecmaVersion: 12, + parser: "@typescript-eslint/parser", + project: "./tsconfig.json", + sourceType: "module", + }, + plugins: ["@typescript-eslint", "no-instanceof", "eslint-plugin-jest"], + ignorePatterns: [ + ".eslintrc.cjs", + "scripts", + "node_modules", + "dist", + "dist-cjs", + "*.js", + "*.cjs", + "*.d.ts", + ], + rules: { + "no-process-env": 2, + "no-instanceof/no-instanceof": 2, + "@typescript-eslint/explicit-module-boundary-types": 0, + "@typescript-eslint/no-empty-function": 0, + "@typescript-eslint/no-shadow": 0, + "@typescript-eslint/no-empty-interface": 0, + "@typescript-eslint/no-use-before-define": ["error", "nofunc"], + "@typescript-eslint/no-unused-vars": ["warn", { args: "none" }], + "@typescript-eslint/no-floating-promises": "error", + "@typescript-eslint/no-misused-promises": "error", + "arrow-body-style": 0, + camelcase: 0, + "class-methods-use-this": 0, + "import/extensions": [2, "ignorePackages"], + "import/no-extraneous-dependencies": [ + "error", + { devDependencies: ["**/*.test.ts"] }, + ], + "import/no-unresolved": 0, + "import/prefer-default-export": 0, + 'jest/no-focused-tests': 'error', + "keyword-spacing": "error", + "max-classes-per-file": 0, + "max-len": 0, + "no-await-in-loop": 0, + "no-bitwise": 0, + "no-console": 0, + "no-empty-function": 0, + "no-restricted-syntax": 0, + "no-shadow": 0, + "no-continue": 0, + "no-void": 0, + "no-underscore-dangle": 0, + "no-use-before-define": 0, + "no-useless-constructor": 0, + "no-return-await": 0, + "consistent-return": 0, + "no-else-return": 0, + "func-names": 0, + "no-lonely-if": 0, + "prefer-rest-params": 0, + "new-cap": ["error", { properties: false, capIsNew: false }], + }, +}; diff --git a/libs/checkpoint/.gitignore b/libs/checkpoint/.gitignore new file mode 100644 index 00000000..c10034e2 --- /dev/null +++ b/libs/checkpoint/.gitignore @@ -0,0 +1,7 @@ +index.cjs +index.js +index.d.ts +index.d.cts +node_modules +dist +.yarn diff --git a/libs/checkpoint/.prettierrc b/libs/checkpoint/.prettierrc new file mode 100644 index 00000000..ba08ff04 --- /dev/null +++ b/libs/checkpoint/.prettierrc @@ -0,0 +1,19 @@ +{ + "$schema": "https://json.schemastore.org/prettierrc", + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "semi": true, + "singleQuote": false, + "quoteProps": "as-needed", + "jsxSingleQuote": false, + "trailingComma": "es5", + "bracketSpacing": true, + "arrowParens": "always", + "requirePragma": false, + "insertPragma": false, + "proseWrap": "preserve", + "htmlWhitespaceSensitivity": "css", + "vueIndentScriptAndStyle": false, + "endOfLine": "lf" +} diff --git a/langgraph/.release-it.json b/libs/checkpoint/.release-it.json similarity index 100% rename from langgraph/.release-it.json rename to libs/checkpoint/.release-it.json diff --git a/libs/checkpoint/LICENSE b/libs/checkpoint/LICENSE new file mode 100644 index 00000000..e7530f5e --- /dev/null +++ b/libs/checkpoint/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2024 LangChain + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/libs/checkpoint/README.md b/libs/checkpoint/README.md new file mode 100644 index 00000000..246a555e --- /dev/null +++ b/libs/checkpoint/README.md @@ -0,0 +1,97 @@ +# @langchain/langgraph-checkpoint + +This library defines the base interface for [LangGraph.js](https://github.com/langchain-ai/langgraphjs) checkpointers. Checkpointers provide persistence layer for LangGraph. They allow you to interact with and manage the graph's state. When you use a graph with a checkpointer, the checkpointer saves a _checkpoint_ of the graph state at every superstep, enabling several powerful capabilities like human-in-the-loop, "memory" between interactions and more. + +## Key concepts + +### Checkpoint + +Checkpoint is a snapshot of the graph state at a given point in time. Checkpoint tuple refers to an object containing checkpoint and the associated config, metadata and pending writes. + +### Thread + +Threads enable the checkpointing of multiple different runs, making them essential for multi-tenant chat applications and other scenarios where maintaining separate states is necessary. A thread is a unique ID assigned to a series of checkpoints saved by a checkpointer. When using a checkpointer, you must specify a `thread_id` and optionally `checkpoint_id` when running the graph. + +- `thread_id` is simply the ID of a thread. This is always required +- `checkpoint_id` can optionally be passed. This identifier refers to a specific checkpoint within a thread. This can be used to kick of a run of a graph from some point halfway through a thread. + +You must pass these when invoking the graph as part of the configurable part of the config, e.g. + +```ts +{ configurable: { thread_id: "1" } } // valid config +{ configurable: { thread_id: "1", checkpoint_id: "0c62ca34-ac19-445d-bbb0-5b4984975b2a" } } // also valid config +``` + +### Serde + +`@langchain/langgraph-checkpoint` also defines protocol for serialization/deserialization (serde) and provides an default implementation that handles a range of types. + +### Pending writes + +When a graph node fails mid-execution at a given superstep, LangGraph stores pending checkpoint writes from any other nodes that completed successfully at that superstep, so that whenever we resume graph execution from that superstep we don't re-run the successful nodes. + +## Interface + +Each checkpointer should conform to `BaseCheckpointSaver` interface and must implement the following methods: + +- `.put` - Store a checkpoint with its configuration and metadata. +- `.putWrites` - Store intermediate writes linked to a checkpoint (i.e. pending writes). +- `.getTuple` - Fetch a checkpoint tuple using for a given configuration (`thread_id` and `thread_ts`). +- `.list` - List checkpoints that match a given configuration and filter criteria. + +## Usage + +```ts +import { MemorySaver } from "@langchain/langgraph-checkpoint"; + +const writeConfig = { + configurable: { + thread_id: "1", + checkpoint_ns: "" + } +}; +const readConfig = { + configurable: { + thread_id: "1" + } +}; + +const checkpointer = new MemorySaver(); +const checkpoint = { + v: 1, + ts: "2024-07-31T20:14:19.804150+00:00", + id: "1ef4f797-8335-6428-8001-8a1503f9b875", + channel_values: { + my_key: "meow", + node: "node" + }, + channel_versions: { + __start__: 2, + my_key: 3, + start:node: 3, + node: 3 + }, + versions_seen: { + __input__: {}, + __start__: { + __start__: 1 + }, + node: { + start:node: 2 + } + }, + pending_sends: [], + current_tasks: {} +} + +// store checkpoint +await checkpointer.put(writeConfig, checkpoint, {}, {}) + +// load checkpoint +await checkpointer.get(readConfig) + +// list checkpoints +for await (const checkpoint of checkpointer.list(readConfig)) { + console.log(checkpoint); +} +``` diff --git a/libs/checkpoint/jest.config.cjs b/libs/checkpoint/jest.config.cjs new file mode 100644 index 00000000..385d19f6 --- /dev/null +++ b/libs/checkpoint/jest.config.cjs @@ -0,0 +1,20 @@ +/** @type {import('ts-jest').JestConfigWithTsJest} */ +module.exports = { + preset: "ts-jest/presets/default-esm", + testEnvironment: "./jest.env.cjs", + modulePathIgnorePatterns: ["dist/"], + moduleNameMapper: { + "^(\\.{1,2}/.*)\\.js$": "$1", + }, + transform: { + "^.+\\.tsx?$": ["@swc/jest"], + }, + transformIgnorePatterns: [ + "/node_modules/", + "\\.pnp\\.[^\\/]+$", + "./scripts/jest-setup-after-env.js", + ], + setupFiles: ["dotenv/config"], + testTimeout: 20_000, + passWithNoTests: true, +}; diff --git a/libs/checkpoint/jest.env.cjs b/libs/checkpoint/jest.env.cjs new file mode 100644 index 00000000..2ccedccb --- /dev/null +++ b/libs/checkpoint/jest.env.cjs @@ -0,0 +1,12 @@ +const { TestEnvironment } = require("jest-environment-node"); + +class AdjustedTestEnvironmentToSupportFloat32Array extends TestEnvironment { + constructor(config, context) { + // Make `instanceof Float32Array` return true in tests + // to avoid https://github.com/xenova/transformers.js/issues/57 and https://github.com/jestjs/jest/issues/2549 + super(config, context); + this.global.Float32Array = Float32Array; + } +} + +module.exports = AdjustedTestEnvironmentToSupportFloat32Array; diff --git a/libs/checkpoint/langchain.config.js b/libs/checkpoint/langchain.config.js new file mode 100644 index 00000000..fe70c345 --- /dev/null +++ b/libs/checkpoint/langchain.config.js @@ -0,0 +1,21 @@ +import { resolve, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; + +/** + * @param {string} relativePath + * @returns {string} + */ +function abs(relativePath) { + return resolve(dirname(fileURLToPath(import.meta.url)), relativePath); +} + +export const config = { + internals: [/node\:/, /@langchain\/core\//, /async_hooks/], + entrypoints: { + index: "index" + }, + tsConfigPath: resolve("./tsconfig.json"), + cjsSource: "./dist-cjs", + cjsDestination: "./dist", + abs, +}; diff --git a/libs/checkpoint/package.json b/libs/checkpoint/package.json new file mode 100644 index 00000000..e04adb64 --- /dev/null +++ b/libs/checkpoint/package.json @@ -0,0 +1,88 @@ +{ + "name": "@langchain/langgraph-checkpoint", + "version": "0.0.0", + "description": "Library with base interfaces for LangGraph checkpoint savers.", + "type": "module", + "engines": { + "node": ">=18" + }, + "main": "./index.js", + "types": "./index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:langchain-ai/langgraphjs.git" + }, + "scripts": { + "build": "yarn turbo:command build:internal --filter=@langchain/langgraph-checkpoint", + "build:internal": "yarn lc_build_v2 --create-entrypoints --pre --tree-shaking", + "lint:eslint": "NODE_OPTIONS=--max-old-space-size=4096 eslint --cache --ext .ts,.js src/", + "lint:dpdm": "dpdm --exit-code circular:1 --no-warning --no-tree src/*.ts src/**/*.ts", + "lint": "yarn lint:eslint && yarn lint:dpdm", + "lint:fix": "yarn lint:eslint --fix && yarn lint:dpdm", + "prepack": "yarn build", + "test": "NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%", + "test:watch": "NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts", + "test:single": "NODE_OPTIONS=--experimental-vm-modules yarn run jest --config jest.config.cjs --testTimeout 100000", + "test:int": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%", + "format": "prettier --config .prettierrc --write \"src\"", + "format:check": "prettier --config .prettierrc --check \"src\"" + }, + "author": "LangChain", + "license": "MIT", + "dependencies": { + "uuid": "^10.0.0" + }, + "peerDependencies": { + "@langchain/core": ">=0.2.20 <0.3.0" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@langchain/scripts": "^0.0.22", + "@swc/core": "^1.3.90", + "@swc/jest": "^0.2.29", + "@tsconfig/recommended": "^1.0.3", + "@types/uuid": "^10", + "@typescript-eslint/eslint-plugin": "^6.12.0", + "@typescript-eslint/parser": "^6.12.0", + "dotenv": "^16.3.1", + "dpdm": "^3.12.0", + "eslint": "^8.33.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-prettier": "^8.6.0", + "eslint-plugin-import": "^2.29.1", + "eslint-plugin-jest": "^28.8.0", + "eslint-plugin-no-instanceof": "^1.0.1", + "eslint-plugin-prettier": "^4.2.1", + "jest": "^29.5.0", + "jest-environment-node": "^29.6.4", + "prettier": "^2.8.3", + "release-it": "^17.6.0", + "rollup": "^4.5.2", + "ts-jest": "^29.1.0", + "tsx": "^4.7.0", + "typescript": "^4.9.5 || ^5.4.5" + }, + "publishConfig": { + "access": "public", + "registry": "https://registry.npmjs.org/" + }, + "exports": { + ".": { + "types": { + "import": "./index.d.ts", + "require": "./index.d.cts", + "default": "./index.d.ts" + }, + "import": "./index.js", + "require": "./index.cjs" + }, + "./package.json": "./package.json" + }, + "files": [ + "dist/", + "index.cjs", + "index.js", + "index.d.ts", + "index.d.cts" + ] +} diff --git a/langgraph/src/checkpoint/base.ts b/libs/checkpoint/src/base.ts similarity index 66% rename from langgraph/src/checkpoint/base.ts rename to libs/checkpoint/src/base.ts index 6cfdeb5b..7c128caf 100644 --- a/langgraph/src/checkpoint/base.ts +++ b/libs/checkpoint/src/base.ts @@ -1,27 +1,14 @@ -import { RunnableConfig } from "@langchain/core/runnables"; -import { DefaultSerializer, SerializerProtocol } from "../serde/base.js"; +import type { RunnableConfig } from "@langchain/core/runnables"; +import { DefaultSerializer, SerializerProtocol } from "./serde/base.js"; import { uuid6 } from "./id.js"; -import { SendInterface } from "../constants.js"; +import type { + PendingWrite, + CheckpointPendingWrite, + CheckpointMetadata, +} from "./types.js"; +import type { ChannelProtocol, SendProtocol } from "./serde/types.js"; -export interface CheckpointMetadata { - source: "input" | "loop" | "update"; - /** - * The source of the checkpoint. - * - "input": The checkpoint was created from an input to invoke/stream/batch. - * - "loop": The checkpoint was created from inside the pregel loop. - * - "update": The checkpoint was created from a manual state update. */ - step: number; - /** - * The step number of the checkpoint. - * -1 for the first "input" checkpoint. - * 0 for the first "loop" checkpoint. - * ... for the nth checkpoint afterwards. */ - writes: Record | null; - /** - * The writes that were made between the previous checkpoint and this one. - * Mapping from node name to writes emitted by that node. - */ -} +export type ChannelVersions = Record; export interface Checkpoint< N extends string = string, @@ -55,7 +42,7 @@ export interface Checkpoint< * List of packets sent to nodes but not yet processed. * Cleared by the next checkpoint. */ - pending_sends: SendInterface[]; + pending_sends: SendProtocol[]; } export interface ReadonlyCheckpoint extends Readonly { @@ -66,21 +53,6 @@ export interface ReadonlyCheckpoint extends Readonly { >; } -export function getChannelVersion( - checkpoint: ReadonlyCheckpoint, - channel: string -): number { - return checkpoint.channel_versions[channel] ?? 0; -} - -export function getVersionSeen( - checkpoint: ReadonlyCheckpoint, - node: string, - channel: string -): number { - return checkpoint.versions_seen[node]?.[channel] ?? 0; -} - export function deepCopy(obj: T): T { if (typeof obj !== "object" || obj === null) { return obj; @@ -128,9 +100,17 @@ export interface CheckpointTuple { checkpoint: Checkpoint; metadata?: CheckpointMetadata; parentConfig?: RunnableConfig; + pendingWrites?: CheckpointPendingWrite[]; } -export abstract class BaseCheckpointSaver { +export type CheckpointListOptions = { + limit?: number; + before?: RunnableConfig; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + filter?: Record; +}; + +export abstract class BaseCheckpointSaver { serde: SerializerProtocol = DefaultSerializer; constructor(serde?: SerializerProtocol) { @@ -148,13 +128,32 @@ export abstract class BaseCheckpointSaver { abstract list( config: RunnableConfig, - limit?: number, - before?: RunnableConfig + options?: CheckpointListOptions ): AsyncGenerator; abstract put( config: RunnableConfig, checkpoint: Checkpoint, - metadata: CheckpointMetadata + metadata: CheckpointMetadata, + newVersions: ChannelVersions ): Promise; + + /** + * Store intermediate writes linked to a checkpoint. + */ + abstract putWrites( + config: RunnableConfig, + writes: PendingWrite[], + taskId: string + ): Promise; + + /** + * Generate the next version ID for a channel. + * + * Default is to use integer versions, incrementing by 1. If you override, you can use str/int/float versions, + * as long as they are monotonically increasing. + */ + getNextVersion(current: V | undefined, _channel: ChannelProtocol) { + return current !== undefined ? (current as number) + 1 : 1; + } } diff --git a/libs/checkpoint/src/id.ts b/libs/checkpoint/src/id.ts new file mode 100644 index 00000000..ec558c28 --- /dev/null +++ b/libs/checkpoint/src/id.ts @@ -0,0 +1,17 @@ +import { v5, v6 } from "uuid"; + +export function uuid6(clockseq: number): string { + return v6({ clockseq }); +} + +// Skip UUID validation check, since UUID6s +// generated with negative clockseq are not +// technically compliant, but still work. +// See: https://github.com/uuidjs/uuid/issues/511 +export function uuid5(name: string, namespace: string): string { + const namespaceBytes = namespace + .replace(/-/g, "") + .match(/.{2}/g)! + .map((byte) => parseInt(byte, 16)); + return v5(name, new Uint8Array(namespaceBytes)); +} diff --git a/libs/checkpoint/src/index.ts b/libs/checkpoint/src/index.ts new file mode 100644 index 00000000..332ee38b --- /dev/null +++ b/libs/checkpoint/src/index.ts @@ -0,0 +1,6 @@ +export { MemorySaver } from "./memory.js"; +export * from "./base.js"; +export * from "./id.js"; +export * from "./types.js"; +export * from "./serde/base.js"; +export * from "./serde/types.js"; diff --git a/libs/checkpoint/src/memory.ts b/libs/checkpoint/src/memory.ts new file mode 100644 index 00000000..90825f5c --- /dev/null +++ b/libs/checkpoint/src/memory.ts @@ -0,0 +1,263 @@ +import type { RunnableConfig } from "@langchain/core/runnables"; +import { + BaseCheckpointSaver, + Checkpoint, + CheckpointListOptions, + CheckpointTuple, +} from "./base.js"; +import { SerializerProtocol } from "./serde/base.js"; +import { + CheckpointMetadata, + CheckpointPendingWrite, + PendingWrite, +} from "./types.js"; + +function _generateKey( + threadId: string, + checkpointNamespace: string, + checkpointId: string +) { + return JSON.stringify([threadId, checkpointNamespace, checkpointId]); +} + +export class MemorySaver extends BaseCheckpointSaver { + // thread ID -> checkpoint namespace -> checkpoint ID -> checkpoint mapping + storage: Record< + string, + Record> + > = {}; + + writes: Record = {}; + + constructor(serde?: SerializerProtocol) { + super(serde); + } + + async getTuple(config: RunnableConfig): Promise { + const thread_id = config.configurable?.thread_id; + const checkpoint_ns = config.configurable?.checkpoint_ns ?? ""; + let checkpoint_id = config.configurable?.checkpoint_id; + + if (checkpoint_id) { + const saved = this.storage[thread_id]?.[checkpoint_ns]?.[checkpoint_id]; + if (saved !== undefined) { + const [checkpoint, metadata, parentCheckpointId] = saved; + const writes = + this.writes[_generateKey(thread_id, checkpoint_ns, checkpoint_id)] ?? + []; + const pendingWrites: CheckpointPendingWrite[] = await Promise.all( + writes.map(async ([taskId, channel, value]) => { + return [taskId, channel, await this.serde.parse(value as string)]; + }) + ); + const parentConfig = + parentCheckpointId !== undefined + ? { + configurable: { + thread_id, + checkpoint_ns, + checkpoint_id, + }, + } + : undefined; + return { + config, + checkpoint: (await this.serde.parse(checkpoint)) as Checkpoint, + metadata: (await this.serde.parse(metadata)) as CheckpointMetadata, + pendingWrites, + parentConfig, + }; + } + } else { + const checkpoints = this.storage[thread_id]?.[checkpoint_ns]; + if (checkpoints !== undefined) { + // eslint-disable-next-line prefer-destructuring + checkpoint_id = Object.keys(checkpoints).sort((a, b) => + b.localeCompare(a) + )[0]; + const saved = checkpoints[checkpoint_id]; + const [checkpoint, metadata, parentCheckpointId] = saved; + const writes = + this.writes[_generateKey(thread_id, checkpoint_ns, checkpoint_id)] ?? + []; + const pendingWrites: CheckpointPendingWrite[] = await Promise.all( + writes.map(async ([taskId, channel, value]) => { + return [taskId, channel, await this.serde.parse(value as string)]; + }) + ); + const parentConfig = + parentCheckpointId !== undefined + ? { + configurable: { + thread_id, + checkpoint_ns, + checkpoint_id: parentCheckpointId, + }, + } + : undefined; + return { + config: { + configurable: { + thread_id, + checkpoint_id, + checkpoint_ns, + }, + }, + checkpoint: (await this.serde.parse(checkpoint)) as Checkpoint, + metadata: (await this.serde.parse(metadata)) as CheckpointMetadata, + pendingWrites, + parentConfig, + }; + } + } + + return undefined; + } + + async *list( + config: RunnableConfig, + options?: CheckpointListOptions + ): AsyncGenerator { + // eslint-disable-next-line prefer-const + let { before, limit } = options ?? {}; + const threadIds = config.configurable?.thread_id + ? [config.configurable?.thread_id] + : Object.keys(this.storage); + const checkpointNamespace = config.configurable?.checkpoint_ns ?? ""; + + for (const threadId of threadIds) { + const checkpoints = this.storage[threadId]?.[checkpointNamespace] ?? {}; + const sortedCheckpoints = Object.entries(checkpoints).sort((a, b) => + b[0].localeCompare(a[0]) + ); + + for (const [ + checkpointId, + [checkpoint, metadataStr, parentCheckpointId], + ] of sortedCheckpoints) { + // Filter by checkpoint ID + if ( + before && + before.configurable?.checkpoint_id && + checkpointId >= before.configurable.checkpoint_id + ) { + continue; + } + + // Parse metadata + const metadata = (await this.serde.parse( + metadataStr + )) as CheckpointMetadata; + + // Limit search results + if (limit !== undefined) { + if (limit <= 0) break; + // eslint-disable-next-line no-param-reassign + limit -= 1; + } + + const writes = + this.writes[ + _generateKey(threadId, checkpointNamespace, checkpointId) + ] ?? []; + const pendingWrites: CheckpointPendingWrite[] = await Promise.all( + writes.map(async ([taskId, channel, value]) => { + return [taskId, channel, await this.serde.parse(value as string)]; + }) + ); + + yield { + config: { + configurable: { + thread_id: threadId, + checkpoint_ns: checkpointNamespace, + checkpoint_id: checkpointId, + }, + }, + checkpoint: (await this.serde.parse(checkpoint)) as Checkpoint, + metadata, + pendingWrites, + parentConfig: parentCheckpointId + ? { + configurable: { + thread_id: threadId, + checkpoint_ns: checkpointNamespace, + checkpoint_id: parentCheckpointId, + }, + } + : undefined, + }; + } + } + } + + async put( + config: RunnableConfig, + checkpoint: Checkpoint, + metadata: CheckpointMetadata + ): Promise { + const threadId = config.configurable?.thread_id; + const checkpointNamespace = config.configurable?.checkpoint_ns; + if (threadId === undefined) { + throw new Error( + `Failed to put checkpoint. The passed RunnableConfig is missing a required "thread_id" field in its "configurable" property.` + ); + } + if (checkpointNamespace === undefined) { + throw new Error( + `Failed to put checkpoint. The passed RunnableConfig is missing a required "checkpoint_ns" field in its "configurable" property.` + ); + } + + if (!this.storage[threadId]) { + this.storage[threadId] = {}; + } + if (!this.storage[threadId][checkpointNamespace]) { + this.storage[threadId][checkpointNamespace] = {}; + } + + this.storage[threadId][checkpointNamespace][checkpoint.id] = [ + this.serde.stringify(checkpoint), + this.serde.stringify(metadata), + config.configurable?.checkpoint_id, // parent + ]; + + return { + configurable: { + thread_id: threadId, + checkpoint_ns: checkpointNamespace, + checkpoint_id: checkpoint.id, + }, + }; + } + + async putWrites( + config: RunnableConfig, + writes: PendingWrite[], + taskId: string + ): Promise { + const threadId = config.configurable?.thread_id; + const checkpointNamespace = config.configurable?.checkpoint_ns; + const checkpointId = config.configurable?.checkpoint_id; + if (threadId === undefined) { + throw new Error( + `Failed to put writes. The passed RunnableConfig is missing a required "thread_id" field in its "configurable" property` + ); + } + if (checkpointId === undefined) { + throw new Error( + `Failed to put writes. The passed RunnableConfig is missing a required "checkpoint_id" field in its "configurable" property.` + ); + } + const key = _generateKey(threadId, checkpointNamespace, checkpointId); + if (this.writes[key] === undefined) { + this.writes[key] = []; + } + const pendingWrites: CheckpointPendingWrite[] = writes.map( + ([channel, value]) => { + return [taskId, channel, this.serde.stringify(value)]; + } + ); + this.writes[key].push(...pendingWrites); + } +} diff --git a/langgraph/src/serde/base.ts b/libs/checkpoint/src/serde/base.ts similarity index 100% rename from langgraph/src/serde/base.ts rename to libs/checkpoint/src/serde/base.ts diff --git a/libs/checkpoint/src/serde/types.ts b/libs/checkpoint/src/serde/types.ts new file mode 100644 index 00000000..d4195d00 --- /dev/null +++ b/libs/checkpoint/src/serde/types.ts @@ -0,0 +1,58 @@ +// Mirrors BaseChannel in "@langchain/langgraph" +export interface ChannelProtocol< + ValueType = unknown, + UpdateType = unknown, + CheckpointType = unknown +> { + ValueType: ValueType; + + UpdateType: UpdateType; + + /** + * The name of the channel. + */ + lc_graph_name: string; + + /** + * Return a new identical channel, optionally initialized from a checkpoint. + * Can be thought of as a "restoration" from a checkpoint which is a "snapshot" of the channel's state. + * + * @param {CheckpointType | undefined} checkpoint + * @param {CheckpointType | undefined} initialValue + * @returns {this} + */ + fromCheckpoint(checkpoint?: CheckpointType): this; + + /** + * Update the channel's value with the given sequence of updates. + * The order of the updates in the sequence is arbitrary. + * + * @throws {InvalidUpdateError} if the sequence of updates is invalid. + * @param {Array} values + * @returns {void} + */ + update(values: UpdateType[]): void; + + /** + * Return the current value of the channel. + * + * @throws {EmptyChannelError} if the channel is empty (never updated yet). + * @returns {ValueType} + */ + get(): ValueType; + + /** + * Return a string representation of the channel's current state. + * + * @throws {EmptyChannelError} if the channel is empty (never updated yet), or doesn't support checkpoints. + * @returns {CheckpointType | undefined} + */ + checkpoint(): CheckpointType | undefined; +} + +// Mirrors SendInterface in "@langchain/langgraph" +export interface SendProtocol { + node: string; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + args: any; +} diff --git a/langgraph/src/tests/checkpoints.test.ts b/libs/checkpoint/src/tests/checkpoints.test.ts similarity index 55% rename from langgraph/src/tests/checkpoints.test.ts rename to libs/checkpoint/src/tests/checkpoints.test.ts index 4ed3e833..13cb0f8e 100644 --- a/langgraph/src/tests/checkpoints.test.ts +++ b/libs/checkpoint/src/tests/checkpoints.test.ts @@ -1,8 +1,7 @@ import { describe, it, expect } from "@jest/globals"; -import { Checkpoint, CheckpointTuple, deepCopy } from "../checkpoint/base.js"; -import { MemorySaver } from "../checkpoint/memory.js"; -import { SqliteSaver } from "../checkpoint/sqlite.js"; -import { uuid6 } from "../checkpoint/id.js"; +import { Checkpoint, CheckpointTuple, deepCopy } from "../base.js"; +import { MemorySaver } from "../memory.js"; +import { uuid6 } from "../id.js"; const checkpoint1: Checkpoint = { v: 1, @@ -85,115 +84,45 @@ describe("MemorySaver", () => { // save checkpoint const runnableConfig = await memorySaver.put( - { configurable: { thread_id: "1" } }, + { configurable: { thread_id: "1", checkpoint_ns: "" } }, checkpoint1, { source: "update", step: -1, writes: null } ); expect(runnableConfig).toEqual({ configurable: { thread_id: "1", + checkpoint_ns: "", checkpoint_id: checkpoint1.id, }, }); // get checkpoint tuple const checkpointTuple = await memorySaver.getTuple({ - configurable: { thread_id: "1" }, + configurable: { thread_id: "1", checkpoint_ns: "" }, }); expect(checkpointTuple?.config).toEqual({ configurable: { thread_id: "1", + checkpoint_ns: "", checkpoint_id: checkpoint1.id, }, }); expect(checkpointTuple?.checkpoint).toEqual(checkpoint1); // save another checkpoint - await memorySaver.put({ configurable: { thread_id: "1" } }, checkpoint2, { - source: "update", - step: -1, - writes: null, - }); - - // list checkpoints - const checkpointTupleGenerator = await memorySaver.list({ - configurable: { thread_id: "1" }, - }); - const checkpointTuples: CheckpointTuple[] = []; - for await (const checkpoint of checkpointTupleGenerator) { - checkpointTuples.push(checkpoint); - } - expect(checkpointTuples.length).toBe(2); - - const checkpointTuple1 = checkpointTuples[0]; - const checkpointTuple2 = checkpointTuples[1]; - expect(checkpointTuple1.checkpoint.ts).toBe("2024-04-20T17:19:07.952Z"); - expect(checkpointTuple2.checkpoint.ts).toBe("2024-04-19T17:19:07.952Z"); - }); -}); - -describe("SqliteSaver", () => { - it("should save and retrieve checkpoints correctly", async () => { - const sqliteSaver = SqliteSaver.fromConnString(":memory:"); - - // get undefined checkpoint - const undefinedCheckpoint = await sqliteSaver.getTuple({ - configurable: { thread_id: "1" }, - }); - expect(undefinedCheckpoint).toBeUndefined(); - - // save first checkpoint - const runnableConfig = await sqliteSaver.put( - { configurable: { thread_id: "1" } }, - checkpoint1, - { source: "update", step: -1, writes: null } - ); - expect(runnableConfig).toEqual({ - configurable: { - thread_id: "1", - checkpoint_id: checkpoint1.id, - }, - }); - - // get first checkpoint tuple - const firstCheckpointTuple = await sqliteSaver.getTuple({ - configurable: { thread_id: "1" }, - }); - expect(firstCheckpointTuple?.config).toEqual({ - configurable: { - thread_id: "1", - checkpoint_id: checkpoint1.id, - }, - }); - expect(firstCheckpointTuple?.checkpoint).toEqual(checkpoint1); - expect(firstCheckpointTuple?.parentConfig).toBeUndefined(); - - // save second checkpoint - await sqliteSaver.put( - { - configurable: { - thread_id: "1", - checkpoint_id: "2024-04-18T17:19:07.952Z", - }, - }, + await memorySaver.put( + { configurable: { thread_id: "1", checkpoint_ns: "" } }, checkpoint2, - { source: "update", step: -1, writes: null } + { + source: "update", + step: -1, + writes: null, + } ); - // verify that parentTs is set and retrieved correctly for second checkpoint - const secondCheckpointTuple = await sqliteSaver.getTuple({ - configurable: { thread_id: "1" }, - }); - expect(secondCheckpointTuple?.parentConfig).toEqual({ - configurable: { - thread_id: "1", - checkpoint_id: "2024-04-18T17:19:07.952Z", - }, - }); - // list checkpoints - const checkpointTupleGenerator = await sqliteSaver.list({ - configurable: { thread_id: "1" }, + const checkpointTupleGenerator = await memorySaver.list({ + configurable: { thread_id: "1", checkpoint_ns: "" }, }); const checkpointTuples: CheckpointTuple[] = []; for await (const checkpoint of checkpointTupleGenerator) { diff --git a/libs/checkpoint/src/types.ts b/libs/checkpoint/src/types.ts new file mode 100644 index 00000000..4e9e3289 --- /dev/null +++ b/libs/checkpoint/src/types.ts @@ -0,0 +1,32 @@ +export type All = "*"; + +export type PendingWriteValue = unknown; + +export type PendingWrite = [Channel, PendingWriteValue]; + +export type CheckpointPendingWrite = [ + TaskId, + ...PendingWrite +]; + +export interface CheckpointMetadata { + /** + * The source of the checkpoint. + * - "input": The checkpoint was created from an input to invoke/stream/batch. + * - "loop": The checkpoint was created from inside the pregel loop. + * - "update": The checkpoint was created from a manual state update. + */ + source: "input" | "loop" | "update"; + /** + * The step number of the checkpoint. + * -1 for the first "input" checkpoint. + * 0 for the first "loop" checkpoint. + * ... for the nth checkpoint afterwards. + */ + step: number; + /** + * The writes that were made between the previous checkpoint and this one. + * Mapping from node name to writes emitted by that node. + */ + writes: Record | null; +} diff --git a/libs/checkpoint/tsconfig.cjs.json b/libs/checkpoint/tsconfig.cjs.json new file mode 100644 index 00000000..3b7026ea --- /dev/null +++ b/libs/checkpoint/tsconfig.cjs.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "commonjs", + "declaration": false + }, + "exclude": ["node_modules", "dist", "docs", "**/tests"] +} diff --git a/libs/checkpoint/tsconfig.json b/libs/checkpoint/tsconfig.json new file mode 100644 index 00000000..bc85d83b --- /dev/null +++ b/libs/checkpoint/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "@tsconfig/recommended", + "compilerOptions": { + "outDir": "../dist", + "rootDir": "./src", + "target": "ES2021", + "lib": ["ES2021", "ES2022.Object", "DOM"], + "module": "ES2020", + "moduleResolution": "nodenext", + "esModuleInterop": true, + "declaration": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "useDefineForClassFields": true, + "strictPropertyInitialization": false, + "allowJs": true, + "strict": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "docs"] +} diff --git a/libs/checkpoint/turbo.json b/libs/checkpoint/turbo.json new file mode 100644 index 00000000..d1bb60a7 --- /dev/null +++ b/libs/checkpoint/turbo.json @@ -0,0 +1,11 @@ +{ + "extends": ["//"], + "tasks": { + "build": { + "outputs": ["**/dist/**"] + }, + "build:internal": { + "dependsOn": ["^build:internal"] + } + } +} diff --git a/libs/langgraph/.env.example b/libs/langgraph/.env.example new file mode 100644 index 00000000..aea660a4 --- /dev/null +++ b/libs/langgraph/.env.example @@ -0,0 +1,6 @@ +# ------------------LangSmith tracing------------------ +LANGCHAIN_TRACING_V2=true +LANGCHAIN_ENDPOINT="https://api.smith.langchain.com" +LANGCHAIN_API_KEY= +LANGCHAIN_PROJECT= +# ----------------------------------------------------- \ No newline at end of file diff --git a/libs/langgraph/.eslintrc.cjs b/libs/langgraph/.eslintrc.cjs new file mode 100644 index 00000000..02711dad --- /dev/null +++ b/libs/langgraph/.eslintrc.cjs @@ -0,0 +1,69 @@ +module.exports = { + extends: [ + "airbnb-base", + "eslint:recommended", + "prettier", + "plugin:@typescript-eslint/recommended", + ], + parserOptions: { + ecmaVersion: 12, + parser: "@typescript-eslint/parser", + project: "./tsconfig.json", + sourceType: "module", + }, + plugins: ["@typescript-eslint", "no-instanceof", "eslint-plugin-jest"], + ignorePatterns: [ + ".eslintrc.cjs", + "scripts", + "node_modules", + "dist", + "dist-cjs", + "*.js", + "*.cjs", + "*.d.ts", + ], + rules: { + "no-process-env": 2, + "no-instanceof/no-instanceof": 2, + "@typescript-eslint/explicit-module-boundary-types": 0, + "@typescript-eslint/no-empty-function": 0, + "@typescript-eslint/no-shadow": 0, + "@typescript-eslint/no-empty-interface": 0, + "@typescript-eslint/no-use-before-define": ["error", "nofunc"], + "@typescript-eslint/no-unused-vars": ["warn", { args: "none" }], + "@typescript-eslint/no-floating-promises": "error", + "@typescript-eslint/no-misused-promises": "error", + "arrow-body-style": 0, + camelcase: 0, + "class-methods-use-this": 0, + "import/extensions": [2, "ignorePackages"], + "import/no-extraneous-dependencies": [ + "error", + { devDependencies: ["**/*.test.ts"] }, + ], + "import/no-unresolved": 0, + "import/prefer-default-export": 0, + 'jest/no-focused-tests': 'error', + "keyword-spacing": "error", + "max-classes-per-file": 0, + "max-len": 0, + "no-await-in-loop": 0, + "no-bitwise": 0, + "no-console": 0, + "no-empty-function": 0, + "no-restricted-syntax": 0, + "no-shadow": 0, + "no-continue": 0, + "no-void": 0, + "no-underscore-dangle": 0, + "no-use-before-define": 0, + "no-useless-constructor": 0, + "no-return-await": 0, + "consistent-return": 0, + "no-else-return": 0, + "func-names": 0, + "no-lonely-if": 0, + "prefer-rest-params": 0, + "new-cap": ["error", { properties: false, capIsNew: false }], + }, +}; diff --git a/langgraph/.gitignore b/libs/langgraph/.gitignore similarity index 100% rename from langgraph/.gitignore rename to libs/langgraph/.gitignore diff --git a/libs/langgraph/.prettierrc b/libs/langgraph/.prettierrc new file mode 100644 index 00000000..ba08ff04 --- /dev/null +++ b/libs/langgraph/.prettierrc @@ -0,0 +1,19 @@ +{ + "$schema": "https://json.schemastore.org/prettierrc", + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "semi": true, + "singleQuote": false, + "quoteProps": "as-needed", + "jsxSingleQuote": false, + "trailingComma": "es5", + "bracketSpacing": true, + "arrowParens": "always", + "requirePragma": false, + "insertPragma": false, + "proseWrap": "preserve", + "htmlWhitespaceSensitivity": "css", + "vueIndentScriptAndStyle": false, + "endOfLine": "lf" +} diff --git a/libs/langgraph/.release-it.json b/libs/langgraph/.release-it.json new file mode 100644 index 00000000..a1236e8d --- /dev/null +++ b/libs/langgraph/.release-it.json @@ -0,0 +1,13 @@ +{ + "github": { + "release": true, + "autoGenerate": true, + "tokenRef": "GITHUB_TOKEN_RELEASE" + }, + "npm": { + "publish": true, + "versionArgs": [ + "--workspaces-update=false" + ] + } +} diff --git a/libs/langgraph/LICENSE b/libs/langgraph/LICENSE new file mode 100644 index 00000000..e7530f5e --- /dev/null +++ b/libs/langgraph/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2024 LangChain + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/langgraph/README.md b/libs/langgraph/README.md similarity index 100% rename from langgraph/README.md rename to libs/langgraph/README.md diff --git a/libs/langgraph/jest.config.cjs b/libs/langgraph/jest.config.cjs new file mode 100644 index 00000000..385d19f6 --- /dev/null +++ b/libs/langgraph/jest.config.cjs @@ -0,0 +1,20 @@ +/** @type {import('ts-jest').JestConfigWithTsJest} */ +module.exports = { + preset: "ts-jest/presets/default-esm", + testEnvironment: "./jest.env.cjs", + modulePathIgnorePatterns: ["dist/"], + moduleNameMapper: { + "^(\\.{1,2}/.*)\\.js$": "$1", + }, + transform: { + "^.+\\.tsx?$": ["@swc/jest"], + }, + transformIgnorePatterns: [ + "/node_modules/", + "\\.pnp\\.[^\\/]+$", + "./scripts/jest-setup-after-env.js", + ], + setupFiles: ["dotenv/config"], + testTimeout: 20_000, + passWithNoTests: true, +}; diff --git a/libs/langgraph/jest.env.cjs b/libs/langgraph/jest.env.cjs new file mode 100644 index 00000000..2ccedccb --- /dev/null +++ b/libs/langgraph/jest.env.cjs @@ -0,0 +1,12 @@ +const { TestEnvironment } = require("jest-environment-node"); + +class AdjustedTestEnvironmentToSupportFloat32Array extends TestEnvironment { + constructor(config, context) { + // Make `instanceof Float32Array` return true in tests + // to avoid https://github.com/xenova/transformers.js/issues/57 and https://github.com/jestjs/jest/issues/2549 + super(config, context); + this.global.Float32Array = Float32Array; + } +} + +module.exports = AdjustedTestEnvironmentToSupportFloat32Array; diff --git a/langgraph/langchain.config.js b/libs/langgraph/langchain.config.js similarity index 100% rename from langgraph/langchain.config.js rename to libs/langgraph/langchain.config.js diff --git a/langgraph/package.json b/libs/langgraph/package.json similarity index 95% rename from langgraph/package.json rename to libs/langgraph/package.json index ac7273de..e1fbce24 100644 --- a/langgraph/package.json +++ b/libs/langgraph/package.json @@ -1,6 +1,6 @@ { "name": "@langchain/langgraph", - "version": "0.0.34", + "version": "0.1.0-rc.1", "description": "LangGraph", "type": "module", "engines": { @@ -31,9 +31,20 @@ "license": "MIT", "dependencies": { "@langchain/core": ">=0.2.20 <0.3.0", + "@langchain/langgraph-checkpoint": "0.0.0", + "@langchain/langgraph-checkpoint-sqlite": "0.0.0", + "double-ended-queue": "^2.1.0-0", "uuid": "^10.0.0", "zod": "^3.23.8" }, + "peerDependencies": { + "better-sqlite3": "^9.5.0" + }, + "peerDependenciesMeta": { + "better-sqlite3": { + "optional": true + } + }, "devDependencies": { "@jest/globals": "^29.5.0", "@langchain/anthropic": "^0.2.12", @@ -43,12 +54,11 @@ "@swc/core": "^1.3.90", "@swc/jest": "^0.2.29", "@tsconfig/recommended": "^1.0.3", - "@types/better-sqlite3": "^7.6.9", + "@types/double-ended-queue": "^2", "@types/uuid": "^10", "@typescript-eslint/eslint-plugin": "^6.12.0", "@typescript-eslint/parser": "^6.12.0", "@xenova/transformers": "^2.17.2", - "better-sqlite3": "^9.5.0", "cheerio": "1.0.0-rc.12", "dotenv": "^16.3.1", "dpdm": "^3.12.0", @@ -70,14 +80,6 @@ "typescript": "^4.9.5 || ^5.4.5", "zod-to-json-schema": "^3.22.4" }, - "peerDependencies": { - "better-sqlite3": "^9.5.0" - }, - "peerDependenciesMeta": { - "better-sqlite3": { - "optional": true - } - }, "publishConfig": { "access": "public", "registry": "https://registry.npmjs.org/" diff --git a/langgraph/scripts/jest-setup-after-env.js b/libs/langgraph/scripts/jest-setup-after-env.js similarity index 100% rename from langgraph/scripts/jest-setup-after-env.js rename to libs/langgraph/scripts/jest-setup-after-env.js diff --git a/langgraph/src/channels/any_value.ts b/libs/langgraph/src/channels/any_value.ts similarity index 90% rename from langgraph/src/channels/any_value.ts rename to libs/langgraph/src/channels/any_value.ts index 4905612c..9e9e4086 100644 --- a/langgraph/src/channels/any_value.ts +++ b/libs/langgraph/src/channels/any_value.ts @@ -25,14 +25,16 @@ export class AnyValue extends BaseChannel { return empty as this; } - update(values: Value[]): void { + update(values: Value[]): boolean { if (values.length === 0) { + const updated = this.value !== undefined; this.value = undefined; - return; + return updated; } // eslint-disable-next-line prefer-destructuring this.value = values[values.length - 1]; + return false; } get(): Value { diff --git a/langgraph/src/channels/base.ts b/libs/langgraph/src/channels/base.ts similarity index 64% rename from langgraph/src/channels/base.ts rename to libs/langgraph/src/channels/base.ts index 2822bd4c..61ac5598 100644 --- a/langgraph/src/channels/base.ts +++ b/libs/langgraph/src/channels/base.ts @@ -1,6 +1,9 @@ -import { ReadonlyCheckpoint, deepCopy } from "../checkpoint/base.js"; -import { uuid6 } from "../checkpoint/id.js"; -import { Checkpoint } from "../checkpoint/index.js"; +import { + ReadonlyCheckpoint, + deepCopy, + uuid6, + Checkpoint, +} from "@langchain/langgraph-checkpoint"; import { EmptyChannelError } from "../errors.js"; export abstract class BaseChannel< @@ -30,12 +33,17 @@ export abstract class BaseChannel< /** * Update the channel's value with the given sequence of updates. * The order of the updates in the sequence is arbitrary. + * This method is called by Pregel for all channels at the end of each step. + * If there are no updates, it is called with an empty sequence. + * + * Raises InvalidUpdateError if the sequence of updates is invalid. + * Returns True if the channel was updated, False otherwise. * * @throws {InvalidUpdateError} if the sequence of updates is invalid. * @param {Array} values * @returns {void} */ - abstract update(values: UpdateType[]): void; + abstract update(values: UpdateType[]): boolean; /** * Return the current value of the channel. @@ -52,6 +60,15 @@ export abstract class BaseChannel< * @returns {CheckpointType | undefined} */ abstract checkpoint(): CheckpointType | undefined; + + /** + * Mark the current value of the channel as consumed. By default, no-op. + * This is called by Pregel before the start of the next step, for all + * channels that triggered a node. If the channel was updated, return true. + */ + consume(): boolean { + return false; + } } export function emptyChannels>( @@ -70,20 +87,25 @@ export function emptyChannels>( export function createCheckpoint( checkpoint: ReadonlyCheckpoint, - channels: Record>, + channels: Record> | undefined, step: number ): Checkpoint { // eslint-disable-next-line @typescript-eslint/no-explicit-any - const values: Record = {}; - for (const k of Object.keys(channels)) { - try { - values[k] = channels[k].checkpoint(); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } catch (error: any) { - if (error.name === EmptyChannelError.unminifiable_name) { - // no-op - } else { - throw error; // Rethrow unexpected errors + let values: Record; + if (channels === undefined) { + values = checkpoint.channel_values; + } else { + values = {}; + for (const k of Object.keys(channels)) { + try { + values[k] = channels[k].checkpoint(); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (error: any) { + if (error.name === EmptyChannelError.unminifiable_name) { + // no-op + } else { + throw error; // Rethrow unexpected errors + } } } } diff --git a/langgraph/src/channels/binop.ts b/libs/langgraph/src/channels/binop.ts similarity index 94% rename from langgraph/src/channels/binop.ts rename to libs/langgraph/src/channels/binop.ts index 64cce61a..daf95c20 100644 --- a/langgraph/src/channels/binop.ts +++ b/libs/langgraph/src/channels/binop.ts @@ -43,9 +43,9 @@ export class BinaryOperatorAggregate< return empty as this; } - public update(values: UpdateType[]): void { + public update(values: UpdateType[]): boolean { let newValues = values; - if (!newValues.length) return; + if (!newValues.length) return false; if (this.value === undefined) { [this.value as UpdateType] = newValues; @@ -57,6 +57,7 @@ export class BinaryOperatorAggregate< this.value = this.operator(this.value, value); } } + return true; } public get(): ValueType { diff --git a/langgraph/src/channels/dynamic_barrier_value.ts b/libs/langgraph/src/channels/dynamic_barrier_value.ts similarity index 61% rename from langgraph/src/channels/dynamic_barrier_value.ts rename to libs/langgraph/src/channels/dynamic_barrier_value.ts index 122d3ed0..38c54221 100644 --- a/langgraph/src/channels/dynamic_barrier_value.ts +++ b/libs/langgraph/src/channels/dynamic_barrier_value.ts @@ -6,6 +6,12 @@ export interface WaitForNames { __names: Value[]; } +function isWaitForNames( + v: WaitForNames | Value +): v is WaitForNames { + return (v as WaitForNames).__names !== undefined; +} + /** A channel that switches between two states @@ -41,39 +47,47 @@ export class DynamicBarrierValue extends BaseChannel< return empty as this; } - update(values: (Value | WaitForNames)[]): void { - // switch to priming state after reading it once - if (this.names && areSetsEqual(this.names, this.seen)) { - this.seen = new Set(); - this.names = undefined; - } - - const newNames = values.filter( - (v) => - typeof v === "object" && - !!v && - "__names" in v && - Object.keys(v).join(",") === "__names" && - Array.isArray(v.__names) - ) as WaitForNames[]; - - if (newNames.length > 1) { - throw new InvalidUpdateError( - `Expected at most one WaitForNames object, got ${newNames.length}` - ); - } else if (newNames.length === 1) { - this.names = new Set(newNames[0].__names); - } else if (this.names) { + update(values: (Value | WaitForNames)[]): boolean { + const waitForNames = values.filter(isWaitForNames); + if (waitForNames.length > 0) { + if (waitForNames.length > 1) { + throw new InvalidUpdateError( + "Received multiple WaitForNames updates in the same step." + ); + } + this.names = new Set(waitForNames[0].__names); + return true; + } else if (this.names !== undefined) { + let updated = false; for (const value of values) { - if (this.names.has(value as Value)) { - this.seen.add(value as Value); + if (isWaitForNames(value)) { + throw new Error( + "Assertion Error: Received unexpected WaitForNames instance." + ); + } + if (this.names.has(value)) { + if (!this.seen.has(value)) { + this.seen.add(value); + updated = true; + } } else { throw new InvalidUpdateError( - `Value ${value} not in names ${this.names}` + `Value ${value} not in ${[...this.names]}` ); } } + return updated; + } + return false; + } + + consume(): boolean { + if (this.seen && this.names && areSetsEqual(this.seen, this.names)) { + this.seen = new Set(); + this.names = undefined; + return true; } + return false; } // If we have not yet seen all the node names we want to wait for, diff --git a/langgraph/src/channels/ephemeral_value.ts b/libs/langgraph/src/channels/ephemeral_value.ts similarity index 91% rename from langgraph/src/channels/ephemeral_value.ts rename to libs/langgraph/src/channels/ephemeral_value.ts index a70e84b6..75e8eeee 100644 --- a/langgraph/src/channels/ephemeral_value.ts +++ b/libs/langgraph/src/channels/ephemeral_value.ts @@ -24,11 +24,12 @@ export class EphemeralValue extends BaseChannel { return empty as this; } - update(values: Value[]): void { + update(values: Value[]): boolean { if (values.length === 0) { + const updated = this.value !== undefined; // If there are no updates for this specific channel at the end of the step, wipe it. this.value = undefined; - return; + return updated; } if (values.length !== 1 && this.guard) { throw new InvalidUpdateError( @@ -38,6 +39,7 @@ export class EphemeralValue extends BaseChannel { // eslint-disable-next-line prefer-destructuring this.value = values[values.length - 1]; + return true; } get(): Value { diff --git a/langgraph/src/channels/index.ts b/libs/langgraph/src/channels/index.ts similarity index 100% rename from langgraph/src/channels/index.ts rename to libs/langgraph/src/channels/index.ts diff --git a/langgraph/src/channels/last_value.ts b/libs/langgraph/src/channels/last_value.ts similarity index 86% rename from langgraph/src/channels/last_value.ts rename to libs/langgraph/src/channels/last_value.ts index 63fffeaf..9d76b488 100644 --- a/langgraph/src/channels/last_value.ts +++ b/libs/langgraph/src/channels/last_value.ts @@ -22,16 +22,19 @@ export class LastValue extends BaseChannel { return empty as this; } - update(values: Value[]): void { + update(values: Value[]): boolean { if (values.length === 0) { - return; + return false; } if (values.length !== 1) { - throw new InvalidUpdateError(); + throw new InvalidUpdateError( + "LastValue can only receive one value per step." + ); } // eslint-disable-next-line prefer-destructuring this.value = values[values.length - 1]; + return true; } get(): Value { diff --git a/langgraph/src/channels/named_barrier_value.ts b/libs/langgraph/src/channels/named_barrier_value.ts similarity index 77% rename from langgraph/src/channels/named_barrier_value.ts rename to libs/langgraph/src/channels/named_barrier_value.ts index b28aab6f..a4934658 100644 --- a/langgraph/src/channels/named_barrier_value.ts +++ b/libs/langgraph/src/channels/named_barrier_value.ts @@ -1,4 +1,4 @@ -import { EmptyChannelError } from "../errors.js"; +import { EmptyChannelError, InvalidUpdateError } from "../errors.js"; import { BaseChannel } from "./index.js"; export const areSetsEqual = (a: Set, b: Set) => @@ -35,22 +35,23 @@ export class NamedBarrierValue extends BaseChannel< return empty as this; } - update(values: Value[]): void { - // We have seen all nodes, so we can reset the seen set in preparation for the next round of updates. - if (areSetsEqual(this.names, this.seen)) { - this.seen = new Set(); - } + update(values: Value[]): boolean { + let updated = false; for (const nodeName of values) { if (this.names.has(nodeName)) { - this.seen.add(nodeName); + if (!this.seen.has(nodeName)) { + this.seen.add(nodeName); + updated = true; + } } else { - throw new Error( + throw new InvalidUpdateError( `Value ${JSON.stringify(nodeName)} not in names ${JSON.stringify( this.names )}` ); } } + return updated; } // If we have not yet seen all the node names we want to wait for, @@ -65,4 +66,12 @@ export class NamedBarrierValue extends BaseChannel< checkpoint(): Value[] { return [...this.seen]; } + + consume(): boolean { + if (this.seen && this.names && areSetsEqual(this.seen, this.names)) { + this.seen = new Set(); + return true; + } + return false; + } } diff --git a/langgraph/src/channels/topic.ts b/libs/langgraph/src/channels/topic.ts similarity index 74% rename from langgraph/src/channels/topic.ts rename to libs/langgraph/src/channels/topic.ts index afbd5a5b..1472c71e 100644 --- a/langgraph/src/channels/topic.ts +++ b/libs/langgraph/src/channels/topic.ts @@ -1,15 +1,8 @@ +import { EmptyChannelError } from "../errors.js"; import { BaseChannel } from "./base.js"; -function* flatten( - values: Array -): IterableIterator { - for (const value of values) { - if (Array.isArray(value)) { - yield* value; - } else { - yield value; - } - } +function arraysEqual(a: T[], b: T[]): boolean { + return a.length === b.length && a.every((val, index) => val === b[index]); } export class Topic extends BaseChannel< @@ -50,12 +43,13 @@ export class Topic extends BaseChannel< return empty as this; } - public update(values: Array): void { + public update(values: Array): boolean { + const current = [...this.values]; if (!this.accumulate) { this.values = []; } - const flatValues = flatten(values); - if (flatValues) { + const flatValues = values.flat() as Value[]; + if (flatValues.length > 0) { if (this.unique) { for (const value of flatValues) { if (!this.seen.has(value)) { @@ -67,9 +61,13 @@ export class Topic extends BaseChannel< this.values.push(...flatValues); } } + return !arraysEqual(this.values, current); } public get(): Array { + if (this.values.length === 0) { + throw new EmptyChannelError(); + } return this.values; } diff --git a/libs/langgraph/src/checkpoint/sqlite.ts b/libs/langgraph/src/checkpoint/sqlite.ts new file mode 100644 index 00000000..40a722cb --- /dev/null +++ b/libs/langgraph/src/checkpoint/sqlite.ts @@ -0,0 +1 @@ +export * from "@langchain/langgraph-checkpoint-sqlite"; diff --git a/langgraph/src/constants.ts b/libs/langgraph/src/constants.ts similarity index 86% rename from langgraph/src/constants.ts rename to libs/langgraph/src/constants.ts index 95e8dec3..f560a6c4 100644 --- a/langgraph/src/constants.ts +++ b/libs/langgraph/src/constants.ts @@ -1,6 +1,9 @@ +export const INPUT = "__input__"; +export const ERROR = "__error__"; export const CONFIG_KEY_SEND = "__pregel_send"; export const CONFIG_KEY_READ = "__pregel_read"; - +export const CONFIG_KEY_CHECKPOINTER = "__pregel_checkpointer"; +export const CONFIG_KEY_RESUMING = "__pregel_resuming"; export const INTERRUPT = "__interrupt__"; export const TAG_HIDDEN = "langsmith:hidden"; @@ -8,6 +11,19 @@ export const TAG_HIDDEN = "langsmith:hidden"; export const TASKS = "__pregel_tasks"; export const TASK_NAMESPACE = "6ba7b831-9dad-11d1-80b4-00c04fd430c8"; +export const RESERVED = [ + INTERRUPT, + ERROR, + TASKS, + CONFIG_KEY_SEND, + CONFIG_KEY_READ, + CONFIG_KEY_CHECKPOINTER, + CONFIG_KEY_RESUMING, + INPUT, +]; + +export const CHECKPOINT_NAMESPACE_SEPARATOR = "|"; + export interface SendInterface { node: string; // eslint-disable-next-line @typescript-eslint/no-explicit-any diff --git a/langgraph/src/errors.ts b/libs/langgraph/src/errors.ts similarity index 67% rename from langgraph/src/errors.ts rename to libs/langgraph/src/errors.ts index 4f492c51..2d75ef12 100644 --- a/langgraph/src/errors.ts +++ b/libs/langgraph/src/errors.ts @@ -20,6 +20,28 @@ export class GraphValueError extends Error { } } +export class GraphInterrupt extends Error { + constructor(message?: string) { + super(message); + this.name = "GraphInterrupt"; + } + + static get unminifiable_name() { + return "GraphInterrupt"; + } +} + +export class EmptyInputError extends Error { + constructor(message?: string) { + super(message); + this.name = "EmptyInputError"; + } + + static get unminifiable_name() { + return "EmptyInputError"; + } +} + export class EmptyChannelError extends Error { constructor(message?: string) { super(message); diff --git a/langgraph/src/graph/annotation.ts b/libs/langgraph/src/graph/annotation.ts similarity index 99% rename from langgraph/src/graph/annotation.ts rename to libs/langgraph/src/graph/annotation.ts index 24c6c5b4..2ae507de 100644 --- a/langgraph/src/graph/annotation.ts +++ b/libs/langgraph/src/graph/annotation.ts @@ -62,6 +62,7 @@ export class AnnotationRoot { } } +// TODO: Add docstring export function Annotation(): LastValue; export function Annotation( diff --git a/langgraph/src/graph/graph.ts b/libs/langgraph/src/graph/graph.ts similarity index 95% rename from langgraph/src/graph/graph.ts rename to libs/langgraph/src/graph/graph.ts index dc7bb6c0..e9884801 100644 --- a/langgraph/src/graph/graph.ts +++ b/libs/langgraph/src/graph/graph.ts @@ -9,15 +9,21 @@ import { Node as RunnableGraphNode, Graph as RunnableGraph, } from "@langchain/core/runnables/graph"; +import { BaseCheckpointSaver } from "@langchain/langgraph-checkpoint"; import { z } from "zod"; import { PregelNode } from "../pregel/read.js"; -import { Channel, Pregel, PregelInterface } from "../pregel/index.js"; -import { BaseCheckpointSaver } from "../checkpoint/base.js"; +import { Channel, Pregel } from "../pregel/index.js"; +import type { PregelParams } from "../pregel/types.js"; import { BaseChannel } from "../channels/base.js"; import { EphemeralValue } from "../channels/ephemeral_value.js"; import { All } from "../pregel/types.js"; import { ChannelWrite, PASSTHROUGH } from "../pregel/write.js"; -import { _isSend, Send, TAG_HIDDEN } from "../constants.js"; +import { + _isSend, + CHECKPOINT_NAMESPACE_SEPARATOR, + Send, + TAG_HIDDEN, +} from "../constants.js"; import { RunnableCallable } from "../utils.js"; import { InvalidUpdateError } from "../errors.js"; @@ -131,6 +137,11 @@ export class Graph< key: K, action: RunnableLike ): Graph { + if (key.includes(CHECKPOINT_NAMESPACE_SEPARATOR)) { + throw new Error( + `"${CHECKPOINT_NAMESPACE_SEPARATOR}" is a reserved character and is not allowed in node names.` + ); + } this.warnIfCompiled( `Adding a node to a graph that has already been compiled. This will not be reflected in the compiled graph.` ); @@ -165,7 +176,9 @@ export class Graph< !this.supportMultipleEdges && Array.from(this.edges).some(([start]) => start === startKey) ) { - throw new Error(`Already found path for ${startKey}`); + throw new Error( + `Already found path for ${startKey}. For multiple edges, use StateGraph with an annotated state key.` + ); } this.edges.add([startKey, endKey]); @@ -256,8 +269,8 @@ export class Graph< [START]: new EphemeralValue(), [END]: new EphemeralValue(), } as Record, - inputs: START, - outputs: END, + inputChannels: START, + outputChannels: END, streamChannels: [] as N[], streamMode: "values", }); @@ -362,7 +375,7 @@ export class CompiledGraph< constructor({ builder, ...rest - }: { builder: Graph } & PregelInterface< + }: { builder: Graph } & PregelParams< Record>, Record >) { diff --git a/langgraph/src/graph/index.ts b/libs/langgraph/src/graph/index.ts similarity index 100% rename from langgraph/src/graph/index.ts rename to libs/langgraph/src/graph/index.ts diff --git a/langgraph/src/graph/message.ts b/libs/langgraph/src/graph/message.ts similarity index 100% rename from langgraph/src/graph/message.ts rename to libs/langgraph/src/graph/message.ts diff --git a/langgraph/src/graph/messages_annotation.ts b/libs/langgraph/src/graph/messages_annotation.ts similarity index 100% rename from langgraph/src/graph/messages_annotation.ts rename to libs/langgraph/src/graph/messages_annotation.ts diff --git a/langgraph/src/graph/state.ts b/libs/langgraph/src/graph/state.ts similarity index 96% rename from langgraph/src/graph/state.ts rename to libs/langgraph/src/graph/state.ts index 34952497..6a055b34 100644 --- a/langgraph/src/graph/state.ts +++ b/libs/langgraph/src/graph/state.ts @@ -4,6 +4,7 @@ import { RunnableConfig, RunnableLike, } from "@langchain/core/runnables"; +import { BaseCheckpointSaver } from "@langchain/langgraph-checkpoint"; import { BaseChannel } from "../channels/base.js"; import { END, CompiledGraph, Graph, START, Branch } from "./graph.js"; import { @@ -12,7 +13,6 @@ import { PASSTHROUGH, SKIP_WRITE, } from "../pregel/write.js"; -import { BaseCheckpointSaver } from "../checkpoint/base.js"; import { ChannelRead, PregelNode } from "../pregel/read.js"; import { NamedBarrierValue } from "../channels/named_barrier_value.js"; import { EphemeralValue } from "../channels/ephemeral_value.js"; @@ -120,14 +120,14 @@ export class StateGraph< throw new Error("END cannot be a start node"); } if (!Object.keys(this.nodes).some((node) => node === start)) { - throw new Error(`Need to add_node ${start} first`); + throw new Error(`Need to addNode ${start} first`); } } if (endKey === END) { throw new Error("END cannot be an end node"); } if (!Object.keys(this.nodes).some((node) => node === endKey)) { - throw new Error(`Need to add_node ${endKey} first`); + throw new Error(`Need to addNode ${endKey} first`); } this.waitingEdges.add([startKey, endKey]); @@ -152,7 +152,7 @@ export class StateGraph< // prepare output channels const stateKeys = Object.keys(this.channels); - const outputs = + const outputChannels = stateKeys.length === 1 && stateKeys[0] === ROOT ? stateKeys[0] : stateKeys; @@ -169,9 +169,9 @@ export class StateGraph< ...this.channels, [START]: new EphemeralValue(), } as Record, - inputs: START, - outputs, - streamChannels: outputs, + inputChannels: START, + outputChannels, + streamChannels: outputChannels, streamMode: "updates", }); @@ -349,7 +349,7 @@ export class CompiledStateGraph< return new ChannelWrite(writes, [TAG_HIDDEN]); }, // reader - (config) => ChannelRead.doRead(config, this.outputs, true) + (config) => ChannelRead.doRead(config, this.outputChannels, true) ) ); diff --git a/langgraph/src/index.ts b/libs/langgraph/src/index.ts similarity index 100% rename from langgraph/src/index.ts rename to libs/langgraph/src/index.ts diff --git a/langgraph/src/prebuilt/agent_executor.ts b/libs/langgraph/src/prebuilt/agent_executor.ts similarity index 100% rename from langgraph/src/prebuilt/agent_executor.ts rename to libs/langgraph/src/prebuilt/agent_executor.ts diff --git a/langgraph/src/prebuilt/chat_agent_executor.ts b/libs/langgraph/src/prebuilt/chat_agent_executor.ts similarity index 100% rename from langgraph/src/prebuilt/chat_agent_executor.ts rename to libs/langgraph/src/prebuilt/chat_agent_executor.ts diff --git a/langgraph/src/prebuilt/index.ts b/libs/langgraph/src/prebuilt/index.ts similarity index 100% rename from langgraph/src/prebuilt/index.ts rename to libs/langgraph/src/prebuilt/index.ts diff --git a/langgraph/src/prebuilt/react_agent_executor.ts b/libs/langgraph/src/prebuilt/react_agent_executor.ts similarity index 98% rename from langgraph/src/prebuilt/react_agent_executor.ts rename to libs/langgraph/src/prebuilt/react_agent_executor.ts index c28e3ce3..743b1b40 100644 --- a/langgraph/src/prebuilt/react_agent_executor.ts +++ b/libs/langgraph/src/prebuilt/react_agent_executor.ts @@ -19,7 +19,7 @@ import { BaseLanguageModelInput, } from "@langchain/core/language_models/base"; import { ChatPromptTemplate } from "@langchain/core/prompts"; -import { BaseCheckpointSaver } from "../checkpoint/base.js"; +import { BaseCheckpointSaver } from "@langchain/langgraph-checkpoint"; import { END, START, StateGraph } from "../graph/index.js"; import { MessagesAnnotation } from "../graph/messages_annotation.js"; import { CompiledStateGraph, StateGraphArgs } from "../graph/state.js"; diff --git a/langgraph/src/prebuilt/tool_executor.ts b/libs/langgraph/src/prebuilt/tool_executor.ts similarity index 100% rename from langgraph/src/prebuilt/tool_executor.ts rename to libs/langgraph/src/prebuilt/tool_executor.ts diff --git a/langgraph/src/prebuilt/tool_node.ts b/libs/langgraph/src/prebuilt/tool_node.ts similarity index 100% rename from langgraph/src/prebuilt/tool_node.ts rename to libs/langgraph/src/prebuilt/tool_node.ts diff --git a/libs/langgraph/src/pregel/algo.ts b/libs/langgraph/src/pregel/algo.ts new file mode 100644 index 00000000..dee52efb --- /dev/null +++ b/libs/langgraph/src/pregel/algo.ts @@ -0,0 +1,588 @@ +/* eslint-disable no-param-reassign */ +import { + mergeConfigs, + patchConfig, + RunnableConfig, +} from "@langchain/core/runnables"; +import { CallbackManagerForChainRun } from "@langchain/core/callbacks/manager"; +import { + BaseCheckpointSaver, + Checkpoint, + ReadonlyCheckpoint, + copyCheckpoint, + type PendingWrite, + type PendingWriteValue, + uuid5, +} from "@langchain/langgraph-checkpoint"; +import { + BaseChannel, + createCheckpoint, + emptyChannels, +} from "../channels/base.js"; +import { PregelNode } from "./read.js"; +import { readChannel, readChannels } from "./io.js"; +import { + _isSend, + _isSendInterface, + CHECKPOINT_NAMESPACE_SEPARATOR, + CONFIG_KEY_CHECKPOINTER, + CONFIG_KEY_READ, + CONFIG_KEY_RESUMING, + CONFIG_KEY_SEND, + INTERRUPT, + RESERVED, + Send, + TAG_HIDDEN, + TASKS, +} from "../constants.js"; +import { All, PregelExecutableTask, PregelTaskDescription } from "./types.js"; +import { EmptyChannelError, InvalidUpdateError } from "../errors.js"; +import { _getIdMetadata, getNullChannelVersion } from "./utils.js"; + +/** + * Construct a type with a set of properties K of type T + */ +export type StrRecord = { + [P in K]: T; +}; + +export type WritesProtocol = { + name: string; + writes: PendingWrite[]; + triggers: string[]; +}; + +export const increment = (current?: number) => { + return current !== undefined ? current + 1 : 1; +}; + +export async function* executeTasks( + tasks: Record< + string, + () => Promise<{ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + task: PregelExecutableTask; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + result: any; + error: Error; + }> + >, + stepTimeout?: number, + signal?: AbortSignal + // eslint-disable-next-line @typescript-eslint/no-explicit-any +): AsyncGenerator> { + if (stepTimeout && signal) { + if ("any" in AbortSignal) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signal = (AbortSignal as any).any([ + signal, + AbortSignal.timeout(stepTimeout), + ]); + } + } else if (stepTimeout) { + signal = AbortSignal.timeout(stepTimeout); + } + + // Abort if signal is aborted + signal?.throwIfAborted(); + + // Start all tasks + const executingTasks = Object.fromEntries( + Object.entries(tasks).map(([taskId, task]) => { + return [taskId, task()]; + }) + ); + let listener: () => void; + const signalPromise = new Promise((_resolve, reject) => { + listener = () => reject(new Error("Abort")); + signal?.addEventListener("abort", listener); + }).finally(() => signal?.removeEventListener("abort", listener)); + + while (Object.keys(executingTasks).length > 0) { + const { task, error } = await Promise.race([ + ...Object.values(executingTasks), + signalPromise, + ]); + if (error !== undefined) { + // TODO: don't stop others if exception is interrupt + throw error; + } + yield task; + delete executingTasks[task.id]; + } +} + +export function shouldInterrupt( + checkpoint: Checkpoint, + interruptNodes: All | N[], + tasks: PregelExecutableTask[] +): boolean { + const versionValues = Object.values(checkpoint.channel_versions); + const versionType = + versionValues.length > 0 ? typeof versionValues[0] : undefined; + let nullVersion: number | string; + if (versionType === "number") { + nullVersion = 0; + } else if (versionType === "string") { + nullVersion = ""; + } + const seen = checkpoint.versions_seen[INTERRUPT] ?? {}; + + const anyChannelUpdated = Object.entries(checkpoint.channel_versions).some( + ([chan, version]) => { + return version > (seen[chan] ?? nullVersion); + } + ); + + const anyTriggeredNodeInInterruptNodes = tasks.some((task) => + interruptNodes === "*" + ? !task.config?.tags?.includes(TAG_HIDDEN) + : interruptNodes.includes(task.name) + ); + + return anyChannelUpdated && anyTriggeredNodeInInterruptNodes; +} + +export function _localRead>( + checkpoint: ReadonlyCheckpoint, + channels: Cc, + task: WritesProtocol, + select: Array | keyof Cc, + fresh: boolean = false +): Record | unknown { + if (fresh) { + const newCheckpoint = createCheckpoint(checkpoint, channels, -1); + // create a new copy of channels + const newChannels = emptyChannels(channels, newCheckpoint); + // Note: _applyWrites contains side effects + _applyWrites(copyCheckpoint(newCheckpoint), newChannels, [task]); + return readChannels(newChannels, select); + } else { + return readChannels(channels, select); + } +} + +export function _localWrite( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + commit: (writes: [string, any][]) => void, + processes: Record, + channels: Record, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + writes: [string, any][] +) { + for (const [chan, value] of writes) { + if (chan === TASKS) { + if (!_isSend(value)) { + throw new InvalidUpdateError( + `Invalid packet type, expected SendProtocol, got ${JSON.stringify( + value + )}` + ); + } + if (!(value.node in processes)) { + throw new InvalidUpdateError( + `Invalid node name ${value.node} in packet` + ); + } + } else if (!(chan in channels)) { + console.warn(`Skipping write for channel '${chan}' which has no readers`); + } + } + commit(writes); +} + +export function _applyWrites>( + checkpoint: Checkpoint, + channels: Cc, + tasks: WritesProtocol[], + // eslint-disable-next-line @typescript-eslint/no-explicit-any + getNextVersion?: (version: any, channel: BaseChannel) => any +): void { + // Update seen versions + for (const task of tasks) { + if (checkpoint.versions_seen[task.name] === undefined) { + checkpoint.versions_seen[task.name] = {}; + } + for (const chan of task.triggers) { + if (chan in checkpoint.channel_versions) { + checkpoint.versions_seen[task.name][chan] = + checkpoint.channel_versions[chan]; + } + } + } + + // Find the highest version of all channels + let maxVersion: number | undefined; + if (Object.keys(checkpoint.channel_versions).length > 0) { + maxVersion = Math.max(...Object.values(checkpoint.channel_versions)); + } + + // Consume all channels that were read + const channelsToConsume = new Set( + tasks + .flatMap((task) => task.triggers) + .filter((chan) => !RESERVED.includes(chan)) + ); + + for (const chan of channelsToConsume) { + if (channels[chan].consume()) { + if (getNextVersion !== undefined) { + checkpoint.channel_versions[chan] = getNextVersion( + maxVersion, + channels[chan] + ); + } + } + } + + // Clear pending sends + if (checkpoint.pending_sends) { + checkpoint.pending_sends = []; + } + + // Group writes by channel + const pendingWriteValuesByChannel = {} as Record< + keyof Cc, + PendingWriteValue[] + >; + for (const task of tasks) { + for (const [chan, val] of task.writes) { + if (chan === TASKS) { + checkpoint.pending_sends.push({ + node: (val as Send).node, + args: (val as Send).args, + }); + } else { + if (chan in pendingWriteValuesByChannel) { + pendingWriteValuesByChannel[chan].push(val); + } else { + pendingWriteValuesByChannel[chan] = [val]; + } + } + } + } + + // find the highest version of all channels + maxVersion = undefined; + if (Object.keys(checkpoint.channel_versions).length > 0) { + maxVersion = Math.max(...Object.values(checkpoint.channel_versions)); + } + + const updatedChannels: Set = new Set(); + // Apply writes to channels + for (const [chan, vals] of Object.entries(pendingWriteValuesByChannel)) { + if (chan in channels) { + let updated; + try { + updated = channels[chan].update(vals); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (e: any) { + if (e.name === InvalidUpdateError.unminifiable_name) { + throw new InvalidUpdateError( + `Invalid update for channel ${chan} with values ${JSON.stringify( + vals + )}` + ); + } else { + throw e; + } + } + if (updated && getNextVersion !== undefined) { + checkpoint.channel_versions[chan] = getNextVersion( + maxVersion, + channels[chan] + ); + } + updatedChannels.add(chan); + } + } + + // Channels that weren't updated in this step are notified of a new step + for (const chan of Object.keys(channels)) { + if (!updatedChannels.has(chan)) { + const updated = channels[chan].update([]); + if (updated && getNextVersion !== undefined) { + checkpoint.channel_versions[chan] = getNextVersion( + maxVersion, + channels[chan] + ); + } + } + } +} + +export type NextTaskExtraFields = { + step: number; + isResuming?: boolean; + checkpointer?: BaseCheckpointSaver; + manager?: CallbackManagerForChainRun; +}; + +export function _prepareNextTasks< + Nn extends StrRecord, + Cc extends StrRecord +>( + checkpoint: ReadonlyCheckpoint, + processes: Nn, + channels: Cc, + config: RunnableConfig, + forExecution: false, + extra: NextTaskExtraFields +): PregelTaskDescription[]; + +export function _prepareNextTasks< + Nn extends StrRecord, + Cc extends StrRecord +>( + checkpoint: ReadonlyCheckpoint, + processes: Nn, + channels: Cc, + config: RunnableConfig, + forExecution: true, + extra: NextTaskExtraFields +): PregelExecutableTask[]; + +export function _prepareNextTasks< + Nn extends StrRecord, + Cc extends StrRecord +>( + checkpoint: ReadonlyCheckpoint, + processes: Nn, + channels: Cc, + config: RunnableConfig, + forExecution: boolean, + extra: NextTaskExtraFields +): PregelTaskDescription[] | PregelExecutableTask[] { + const parentNamespace = config.configurable?.checkpoint_ns ?? ""; + const tasks: Array> = []; + const taskDescriptions: Array = []; + const { step, isResuming = false, checkpointer, manager } = extra; + + for (const packet of checkpoint.pending_sends) { + if (!_isSendInterface(packet)) { + console.warn( + `Ignoring invalid packet ${JSON.stringify(packet)} in pending sends.` + ); + continue; + } + if (!(packet.node in processes)) { + console.warn( + `Ignoring unknown node name ${packet.node} in pending sends.` + ); + continue; + } + const triggers = [TASKS]; + const metadata = _getIdMetadata({ + langgraph_step: step, + langgraph_node: packet.node, + langgraph_triggers: triggers, + langgraph_task_idx: forExecution ? tasks.length : taskDescriptions.length, + }); + const checkpointNamespace = + parentNamespace === "" + ? packet.node + : `${parentNamespace}${CHECKPOINT_NAMESPACE_SEPARATOR}${packet.node}`; + const taskId = uuid5( + JSON.stringify([checkpointNamespace, metadata]), + checkpoint.id + ); + + if (forExecution) { + const proc = processes[packet.node]; + const node = proc.getNode(); + if (node !== undefined) { + const writes: [keyof Cc, unknown][] = []; + tasks.push({ + name: packet.node, + input: packet.args, + proc: node, + writes, + triggers, + config: patchConfig( + mergeConfigs(config, processes[packet.node].config, { + metadata, + }), + { + runName: packet.node, + callbacks: manager?.getChild(`graph:step:${step}`), + configurable: { + [CONFIG_KEY_SEND]: _localWrite.bind( + undefined, + (items: [keyof Cc, unknown][]) => writes.push(...items), + processes, + channels + ), + [CONFIG_KEY_READ]: _localRead.bind( + undefined, + checkpoint, + channels, + { + name: packet.node, + writes: writes as Array<[string, unknown]>, + triggers, + } + ), + }, + } + ), + id: taskId, + }); + } + } else { + taskDescriptions.push({ id: taskId, name: packet.node }); + } + } + + // Check if any processes should be run in next step + // If so, prepare the values to be passed to them + const nullVersion = getNullChannelVersion(checkpoint.channel_versions); + if (nullVersion === undefined) { + return forExecution ? tasks : taskDescriptions; + } + for (const [name, proc] of Object.entries(processes)) { + const seen = checkpoint.versions_seen[name] ?? {}; + const triggers = proc.triggers + .filter((chan) => { + const result = readChannel(channels, chan, false, true); + const isEmptyChannelError = + // eslint-disable-next-line no-instanceof/no-instanceof + result instanceof Error && + result.name === EmptyChannelError.unminifiable_name; + return ( + !isEmptyChannelError && + (checkpoint.channel_versions[chan] ?? nullVersion) > + (seen[chan] ?? nullVersion) + ); + }) + .sort(); + // If any of the channels read by this process were updated + if (triggers.length > 0) { + const val = _procInput(proc, channels, forExecution); + if (val === undefined) { + continue; + } + + const metadata = _getIdMetadata({ + langgraph_step: step, + langgraph_node: name, + langgraph_triggers: triggers, + langgraph_task_idx: forExecution + ? tasks.length + : taskDescriptions.length, + }); + + const checkpointNamespace = + parentNamespace === "" + ? name + : `${parentNamespace}${CHECKPOINT_NAMESPACE_SEPARATOR}${name}`; + + const taskId = uuid5( + JSON.stringify([checkpointNamespace, metadata]), + checkpoint.id + ); + + if (forExecution) { + const node = proc.getNode(); + if (node !== undefined) { + const writes: [keyof Cc, unknown][] = []; + tasks.push({ + name, + input: val, + proc: node, + writes, + triggers, + config: patchConfig( + mergeConfigs(config, proc.config, { metadata }), + { + runName: name, + callbacks: manager?.getChild(`graph:step:${step}`), + configurable: { + [CONFIG_KEY_SEND]: _localWrite.bind( + undefined, + (items: [keyof Cc, unknown][]) => writes.push(...items), + processes, + channels + ), + [CONFIG_KEY_READ]: _localRead.bind( + undefined, + checkpoint, + channels, + { + name, + writes: writes as Array<[string, unknown]>, + triggers, + } + ), + [CONFIG_KEY_CHECKPOINTER]: checkpointer, + [CONFIG_KEY_RESUMING]: isResuming, + checkpoint_id: checkpoint.id, + checkpoint_ns: checkpointNamespace, + }, + } + ), + id: taskId, + }); + } + } else { + taskDescriptions.push({ id: taskId, name }); + } + } + } + return forExecution ? tasks : taskDescriptions; +} + +function _procInput( + proc: PregelNode, + channels: StrRecord, + forExecution: boolean +) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let val: any; + // If all trigger channels subscribed by this process are not empty + // then invoke the process with the values of all non-empty channels + if (Array.isArray(proc.channels)) { + let successfulRead = false; + for (const chan of proc.channels) { + try { + val = readChannel(channels, chan, false); + successfulRead = true; + break; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (e: any) { + if (e.name === EmptyChannelError.unminifiable_name) { + continue; + } else { + throw e; + } + } + } + if (!successfulRead) { + return; + } + } else if (typeof proc.channels === "object") { + val = {}; + try { + for (const [k, chan] of Object.entries(proc.channels)) { + val[k] = readChannel(channels, chan, !proc.triggers.includes(chan)); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (e: any) { + if (e.name === EmptyChannelError.unminifiable_name) { + return; + } else { + throw e; + } + } + } else { + throw new Error( + `Invalid channels type, expected list or dict, got ${proc.channels}` + ); + } + + // If the process has a mapper, apply it to the value + if (forExecution && proc.mapper !== undefined) { + val = proc.mapper(val); + } + + return val; +} diff --git a/libs/langgraph/src/pregel/debug.ts b/libs/langgraph/src/pregel/debug.ts new file mode 100644 index 00000000..ca2a9d7d --- /dev/null +++ b/libs/langgraph/src/pregel/debug.ts @@ -0,0 +1,292 @@ +import { RunnableConfig } from "@langchain/core/runnables"; +import { + CheckpointMetadata, + CheckpointPendingWrite, + PendingWrite, + uuid5, +} from "@langchain/langgraph-checkpoint"; +import { BaseChannel } from "../channels/base.js"; +import { ERROR, TAG_HIDDEN, TASK_NAMESPACE } from "../constants.js"; +import { EmptyChannelError } from "../errors.js"; +import { PregelExecutableTask, PregelTaskDescription } from "./types.js"; +import { readChannels } from "./io.js"; +import { _getIdMetadata } from "./utils.js"; + +type ConsoleColors = { + start: string; + end: string; +}; + +type ConsoleColorMap = { + [key: string]: ConsoleColors; +}; + +const COLORS_MAP: ConsoleColorMap = { + blue: { + start: "\x1b[34m", + end: "\x1b[0m", + }, + green: { + start: "\x1b[32m", + end: "\x1b[0m", + }, + yellow: { + start: "\x1b[33;1m", + end: "\x1b[0m", + }, +}; + +/** + * Wrap some text in a color for printing to the console. + */ +const wrap = (color: ConsoleColors, text: string): string => + `${color.start}${text}${color.end}`; + +export function printCheckpoint( + step: number, + channels: Record> +) { + console.log( + [ + `${wrap(COLORS_MAP.blue, "[langgraph/checkpoint]")}`, + `Finishing step ${step}. Channel values:\n`, + `\n${JSON.stringify( + Object.fromEntries(_readChannels(channels)), + null, + 2 + )}`, + ].join("") + ); +} + +function* _readChannels( + channels: Record> + // eslint-disable-next-line @typescript-eslint/no-explicit-any +): IterableIterator<[string, any]> { + for (const [name, channel] of Object.entries(channels)) { + try { + yield [name, channel.get()]; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (error: any) { + if (error.name === EmptyChannelError.unminifiable_name) { + // Skip the channel if it's empty + continue; + } else { + throw error; // Re-throw the error if it's not an EmptyChannelError + } + } + } +} + +export function* mapDebugTasks( + step: number, + tasks: readonly PregelExecutableTask[] +) { + const ts = new Date().toISOString(); + for (const { name, input, config, triggers } of tasks) { + if (config?.tags?.includes(TAG_HIDDEN)) continue; + + const metadata = { ...config?.metadata }; + const idMetadata = _getIdMetadata({ + langgraph_step: metadata.langgraph_step, + langgraph_node: metadata.langgraph_node, + langgraph_triggers: metadata.langgraph_triggers, + langgraph_task_idx: metadata.langgraph_task_idx, + }); + + yield { + type: "task", + timestamp: ts, + step, + payload: { + id: uuid5(JSON.stringify([name, step, idMetadata]), TASK_NAMESPACE), + name, + input, + triggers, + }, + }; + } +} + +export function* mapDebugTaskResults< + N extends PropertyKey, + C extends PropertyKey +>( + step: number, + tasks: readonly PregelExecutableTask[], + streamChannelsList: Array +) { + const ts = new Date().toISOString(); + for (const { name, writes, config } of tasks) { + if (config?.tags?.includes(TAG_HIDDEN)) continue; + + const metadata = { ...config?.metadata }; + const idMetadata = _getIdMetadata(metadata); + + yield { + type: "task_result", + timestamp: ts, + step, + payload: { + id: uuid5(JSON.stringify([name, step, idMetadata]), TASK_NAMESPACE), + name, + result: writes.filter(([channel]) => + streamChannelsList.includes(channel) + ), + }, + }; + } +} + +export function* mapDebugCheckpoint< + N extends PropertyKey, + C extends PropertyKey +>( + step: number, + config: RunnableConfig, + channels: Record, + streamChannels: string | string[], + metadata: CheckpointMetadata, + tasks: readonly PregelExecutableTask[], + pendingWrites: CheckpointPendingWrite[] +) { + function formatConfig(config: RunnableConfig) { + // https://stackoverflow.com/a/78298178 + type CamelToSnake< + T extends string, + A extends string = "" + > = T extends `${infer F}${infer R}` + ? CamelToSnake< + R, + `${A}${F extends Lowercase ? F : `_${Lowercase}`}` + > + : A; + + // make sure the config is consistent with Python + const pyConfig: Partial< + Record, unknown> + > = {}; + + if (config.callbacks != null) pyConfig.callbacks = config.callbacks; + if (config.configurable != null) + pyConfig.configurable = config.configurable; + if (config.maxConcurrency != null) + pyConfig.max_concurrency = config.maxConcurrency; + + if (config.metadata != null) pyConfig.metadata = config.metadata; + if (config.recursionLimit != null) + pyConfig.recursion_limit = config.recursionLimit; + if (config.runId != null) pyConfig.run_id = config.runId; + if (config.runName != null) pyConfig.run_name = config.runName; + if (config.tags != null) pyConfig.tags = config.tags; + + return pyConfig; + } + + function getCurrentUTC() { + const now = new Date(); + return new Date(now.getTime() - now.getTimezoneOffset() * 60 * 1000); + } + + const ts = getCurrentUTC().toISOString(); + yield { + type: "checkpoint", + timestamp: ts, + step, + payload: { + config: formatConfig(config), + values: readChannels(channels, streamChannels), + metadata, + next: tasks.map((task) => task.name), + tasks: tasksWithWrites(tasks, pendingWrites), + }, + }; +} + +export function tasksWithWrites( + tasks: PregelTaskDescription[] | readonly PregelExecutableTask[], + pendingWrites: CheckpointPendingWrite[] +): PregelTaskDescription[] { + return tasks.map((task): PregelTaskDescription => { + const error = pendingWrites.find( + ([id, n]) => id === task.id && n === ERROR + )?.[2]; + + if (error) return { id: task.id, name: task.name as string, error }; + return { id: task.id, name: task.name as string }; + }); +} + +export function printStepCheckpoint( + step: number, + channels: Record>, + whitelist: string[] +): void { + console.log( + [ + `${wrap(COLORS_MAP.blue, `[${step}:checkpoint]`)}`, + `\x1b[1m State at the end of step ${step}:\x1b[0m\n`, + JSON.stringify(readChannels(channels, whitelist), null, 2), + ].join("") + ); +} + +export function printStepTasks( + step: number, + nextTasks: readonly PregelExecutableTask[] +): void { + const nTasks = nextTasks.length; + console.log( + [ + `${wrap(COLORS_MAP.blue, `[${step}:tasks]`)}`, + `\x1b[1m Starting step ${step} with ${nTasks} task${ + nTasks === 1 ? "" : "s" + }:\x1b[0m\n`, + nextTasks + .map( + (task) => + `- ${wrap(COLORS_MAP.green, String(task.name))} -> ${JSON.stringify( + task.input, + null, + 2 + )}` + ) + .join("\n"), + ].join("") + ); +} + +export function printStepWrites( + step: number, + writes: PendingWrite[], + whitelist: string[] +): void { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const byChannel: Record = {}; + + for (const [channel, value] of writes) { + if (whitelist.includes(channel)) { + if (!byChannel[channel]) { + byChannel[channel] = []; + } + byChannel[channel].push(value); + } + } + + console.log( + [ + `${wrap(COLORS_MAP.blue, `[${step}:writes]`)}`, + `\x1b[1m Finished step ${step} with writes to ${ + Object.keys(byChannel).length + } channel${Object.keys(byChannel).length !== 1 ? "s" : ""}:\x1b[0m\n`, + Object.entries(byChannel) + .map( + ([name, vals]) => + `- ${wrap(COLORS_MAP.yellow, name)} -> ${vals + .map((v) => JSON.stringify(v)) + .join(", ")}` + ) + .join("\n"), + ].join("") + ); +} diff --git a/libs/langgraph/src/pregel/index.ts b/libs/langgraph/src/pregel/index.ts new file mode 100644 index 00000000..81b54a9c --- /dev/null +++ b/libs/langgraph/src/pregel/index.ts @@ -0,0 +1,826 @@ +/* eslint-disable no-param-reassign */ +import { + Runnable, + RunnableConfig, + RunnableFunc, + RunnableLike, + RunnableSequence, + _coerceToRunnable, + ensureConfig, + getCallbackManagerForConfig, + patchConfig, +} from "@langchain/core/runnables"; +import { IterableReadableStream } from "@langchain/core/utils/stream"; +import { + BaseCheckpointSaver, + CheckpointListOptions, + copyCheckpoint, + emptyCheckpoint, + uuid5, +} from "@langchain/langgraph-checkpoint"; +import { + BaseChannel, + createCheckpoint, + emptyChannels, +} from "../channels/base.js"; +import { PregelNode } from "./read.js"; +import { validateGraph, validateKeys } from "./validate.js"; +import { mapOutputUpdates, readChannels } from "./io.js"; +import { + mapDebugTaskResults, + printStepCheckpoint, + printStepTasks, + printStepWrites, + tasksWithWrites, +} from "./debug.js"; +import { ChannelWrite, ChannelWriteEntry, PASSTHROUGH } from "./write.js"; +import { + CONFIG_KEY_CHECKPOINTER, + CONFIG_KEY_READ, + CONFIG_KEY_SEND, + ERROR, + INTERRUPT, +} from "../constants.js"; +import { + All, + PregelExecutableTask, + PregelInterface, + PregelParams, + StateSnapshot, + StreamMode, +} from "./types.js"; +import { + GraphRecursionError, + GraphValueError, + InvalidUpdateError, +} from "../errors.js"; +import { + executeTasks, + _prepareNextTasks, + _localRead, + _applyWrites, + StrRecord, +} from "./algo.js"; +import { prefixGenerator } from "../utils.js"; +import { _coerceToDict, getNewChannelVersions } from "./utils.js"; +import { PregelLoop } from "./loop.js"; + +type WriteValue = Runnable | RunnableFunc | unknown; + +function isString(value: unknown): value is string { + return typeof value === "string"; +} + +export class Channel { + static subscribeTo( + channels: string, + options?: { + key?: string; + tags?: string[]; + } + ): PregelNode; + + static subscribeTo( + channels: string[], + options?: { + tags?: string[]; + } + ): PregelNode; + + static subscribeTo( + channels: string | string[], + options?: { + key?: string; + tags?: string[]; + } + ): PregelNode { + const { key, tags } = options ?? {}; + if (Array.isArray(channels) && key !== undefined) { + throw new Error( + "Can't specify a key when subscribing to multiple channels" + ); + } + + let channelMappingOrArray: string[] | Record; + + if (isString(channels)) { + if (key) { + channelMappingOrArray = { [key]: channels }; + } else { + channelMappingOrArray = [channels]; + } + } else { + channelMappingOrArray = Object.fromEntries( + channels.map((chan) => [chan, chan]) + ); + } + + const triggers: string[] = Array.isArray(channels) ? channels : [channels]; + + return new PregelNode({ + channels: channelMappingOrArray, + triggers, + tags, + }); + } + + static writeTo( + channels: string[], + kwargs?: Record + ): ChannelWrite { + const channelWriteEntries: Array = []; + + for (const channel of channels) { + channelWriteEntries.push({ + channel, + value: PASSTHROUGH, + skipNone: false, + }); + } + + for (const [key, value] of Object.entries(kwargs ?? {})) { + if (Runnable.isRunnable(value) || typeof value === "function") { + channelWriteEntries.push({ + channel: key, + value: PASSTHROUGH, + skipNone: true, + mapper: _coerceToRunnable(value as RunnableLike), + }); + } else { + channelWriteEntries.push({ + channel: key, + value, + skipNone: false, + }); + } + } + + return new ChannelWrite(channelWriteEntries); + } +} + +/** + * Config for executing the graph. + */ +export interface PregelOptions< + Nn extends StrRecord, + Cc extends StrRecord +> extends RunnableConfig { + /** The stream mode for the graph run. Default is ["values"]. */ + streamMode?: StreamMode | StreamMode[]; + inputKeys?: keyof Cc | Array; + /** The output keys to retrieve from the graph run. */ + outputKeys?: keyof Cc | Array; + /** The nodes to interrupt the graph run before. */ + interruptBefore?: All | Array; + /** The nodes to interrupt the graph run after. */ + interruptAfter?: All | Array; + /** Enable debug mode for the graph run. */ + debug?: boolean; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type PregelInputType = any; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type PregelOutputType = any; + +export class Pregel< + Nn extends StrRecord, + Cc extends StrRecord + > + extends Runnable> + implements PregelInterface +{ + static lc_name() { + return "LangGraph"; + } + + // Because Pregel extends `Runnable`. + lc_namespace = ["langgraph", "pregel"]; + + nodes: Nn; + + channels: Cc; + + inputChannels: keyof Cc | Array; + + outputChannels: keyof Cc | Array; + + autoValidate: boolean = true; + + streamMode: StreamMode[] = ["values"]; + + streamChannels?: keyof Cc | Array; + + interruptAfter?: Array | All; + + interruptBefore?: Array | All; + + stepTimeout?: number; + + debug: boolean = false; + + checkpointer?: BaseCheckpointSaver; + + constructor(fields: PregelParams) { + super(fields); + + let { streamMode } = fields; + if (streamMode != null && !Array.isArray(streamMode)) { + streamMode = [streamMode]; + } + + this.nodes = fields.nodes; + this.channels = fields.channels; + this.autoValidate = fields.autoValidate ?? this.autoValidate; + this.streamMode = streamMode ?? this.streamMode; + this.inputChannels = fields.inputChannels; + this.outputChannels = fields.outputChannels; + this.streamChannels = fields.streamChannels ?? this.streamChannels; + this.interruptAfter = fields.interruptAfter; + this.interruptBefore = fields.interruptBefore; + this.stepTimeout = fields.stepTimeout ?? this.stepTimeout; + this.debug = fields.debug ?? this.debug; + this.checkpointer = fields.checkpointer; + + if (this.autoValidate) { + this.validate(); + } + } + + validate(): this { + validateGraph({ + nodes: this.nodes, + channels: this.channels, + outputChannels: this.outputChannels, + inputChannels: this.inputChannels, + streamChannels: this.streamChannels, + interruptAfterNodes: this.interruptAfter, + interruptBeforeNodes: this.interruptBefore, + }); + + return this; + } + + get streamChannelsList(): Array { + if (Array.isArray(this.streamChannels)) { + return this.streamChannels; + } else if (this.streamChannels) { + return [this.streamChannels]; + } else { + return Object.keys(this.channels); + } + } + + get streamChannelsAsIs(): keyof Cc | Array { + if (this.streamChannels) { + return this.streamChannels; + } else { + return Object.keys(this.channels); + } + } + + /** + * Get the current state of the graph. + */ + async getState(config: RunnableConfig): Promise { + if (!this.checkpointer) { + throw new GraphValueError("No checkpointer set"); + } + + const saved = await this.checkpointer.getTuple(config); + const checkpoint = saved ? saved.checkpoint : emptyCheckpoint(); + const channels = emptyChannels(this.channels, checkpoint); + const nextTasks = _prepareNextTasks( + checkpoint, + this.nodes, + channels, + saved !== undefined ? saved.config : config, + false, + { step: saved ? (saved.metadata?.step ?? -1) + 1 : -1 } + ); + return { + values: readChannels(channels, this.streamChannelsAsIs), + next: nextTasks.map((task) => task.name), + tasks: tasksWithWrites(nextTasks, saved?.pendingWrites ?? []), + metadata: saved?.metadata, + config: saved ? saved.config : config, + createdAt: saved?.checkpoint.ts, + parentConfig: saved?.parentConfig, + }; + } + + /** + * Get the history of the state of the graph. + */ + async *getStateHistory( + config: RunnableConfig, + options?: CheckpointListOptions + ): AsyncIterableIterator { + if (!this.checkpointer) { + throw new GraphValueError("No checkpointer set"); + } + for await (const saved of this.checkpointer.list(config, options)) { + const channels = emptyChannels(this.channels, saved.checkpoint); + const nextTasks = _prepareNextTasks( + saved.checkpoint, + this.nodes, + channels, + saved.config, + false, + { step: -1 } + ); + yield { + values: readChannels(channels, this.streamChannelsAsIs), + next: nextTasks.map((task) => task.name), + tasks: tasksWithWrites(nextTasks, saved.pendingWrites ?? []), + metadata: saved.metadata, + config: saved.config, + createdAt: saved.checkpoint.ts, + parentConfig: saved.parentConfig, + }; + } + } + + /** + * Update the state of the graph with the given values, as if they came from + * node `as_node`. If `as_node` is not provided, it will be set to the last node + * that updated the state, if not ambiguous. + */ + async updateState( + config: RunnableConfig, + values: Record | unknown, + asNode?: keyof Nn + ): Promise { + if (!this.checkpointer) { + throw new GraphValueError("No checkpointer set"); + } + + // Get the latest checkpoint + const saved = await this.checkpointer.getTuple(config); + const checkpoint = saved + ? copyCheckpoint(saved.checkpoint) + : emptyCheckpoint(); + const checkpointPreviousVersions = saved?.checkpoint.channel_versions ?? {}; + const step = saved?.metadata?.step ?? -1; + + // merge configurable fields with previous checkpoint config + const checkpointConfig = { + ...config, + configurable: { + ...config.configurable, + // TODO: add proper support for updating nested subgraph state + checkpoint_ns: "", + ...saved?.config.configurable, + }, + }; + + // Find last node that updated the state, if not provided + if (values == null && asNode === undefined) { + return await this.checkpointer.put( + checkpointConfig, + createCheckpoint(checkpoint, undefined, step), + { + source: "update", + step, + writes: {}, + }, + {} + ); + } + + const nonNullVersion = Object.values(checkpoint.versions_seen) + .map((seenVersions) => { + return Object.values(seenVersions); + }) + .flat() + .find((v) => !!v); + if (asNode === undefined && nonNullVersion === undefined) { + if ( + typeof this.inputChannels === "string" && + this.nodes[this.inputChannels] !== undefined + ) { + asNode = this.inputChannels; + } + } else if (asNode === undefined) { + // TODO: Double check + const lastSeenByNode = Object.entries(checkpoint.versions_seen) + .map(([n, seen]) => { + return Object.values(seen).map((v) => { + return [v, n] as const; + }); + }) + .flat() + .sort(([aNumber], [bNumber]) => { + return aNumber - bNumber; + }); + // if two nodes updated the state at the same time, it's ambiguous + if (lastSeenByNode) { + if (lastSeenByNode.length === 1) { + // eslint-disable-next-line prefer-destructuring + asNode = lastSeenByNode[0][1]; + } else if ( + lastSeenByNode[lastSeenByNode.length - 1][0] !== + lastSeenByNode[lastSeenByNode.length - 2][0] + ) { + // eslint-disable-next-line prefer-destructuring + asNode = lastSeenByNode[lastSeenByNode.length - 1][1]; + } + } + } + if (asNode === undefined) { + throw new InvalidUpdateError(`Ambiguous update, specify "asNode"`); + } + if (this.nodes[asNode] === undefined) { + throw new InvalidUpdateError( + `Node "${asNode.toString()}" does not exist` + ); + } + // update channels + const channels = emptyChannels(this.channels, checkpoint); + // run all writers of the chosen node + const writers = this.nodes[asNode].getWriters(); + if (!writers.length) { + throw new InvalidUpdateError( + `No writers found for node "${asNode.toString()}"` + ); + } + const task: PregelExecutableTask = { + name: asNode, + input: values, + proc: + // eslint-disable-next-line @typescript-eslint/no-explicit-any + writers.length > 1 ? RunnableSequence.from(writers as any) : writers[0], + writes: [], + triggers: [INTERRUPT], + config: undefined, + id: uuid5(INTERRUPT, checkpoint.id), + }; + + // execute task + await task.proc.invoke( + task.input, + patchConfig(config, { + runName: `${this.name}UpdateState`, + configurable: { + [CONFIG_KEY_SEND]: (items: [keyof Cc, unknown][]) => + task.writes.push(...items), + [CONFIG_KEY_READ]: _localRead.bind( + undefined, + checkpoint, + channels, + // TODO: Why does keyof StrRecord allow number and symbol? + task as PregelExecutableTask + ), + }, + }) + ); + + // apply to checkpoint and save + // TODO: Why does keyof StrRecord allow number and symbol? + _applyWrites( + checkpoint, + channels, + [task as PregelExecutableTask], + this.checkpointer.getNextVersion.bind(this.checkpointer) + ); + + const newVersions = getNewChannelVersions( + checkpointPreviousVersions, + checkpoint.channel_versions + ); + return await this.checkpointer.put( + checkpointConfig, + createCheckpoint(checkpoint, channels, step + 1), + { + source: "update", + step: step + 1, + writes: { [asNode]: values }, + }, + newVersions + ); + } + + _defaults(config: PregelOptions): [ + boolean, // debug + StreamMode[], // stream mode + string | string[], // input keys + string | string[], // output keys + RunnableConfig, // config without pregel keys + All | string[], // interrupt before + All | string[], // interrupt after + BaseCheckpointSaver | undefined + ] { + const { + debug, + streamMode, + inputKeys, + outputKeys, + interruptAfter, + interruptBefore, + ...rest + } = config; + const defaultDebug = debug !== undefined ? debug : this.debug; + + let defaultOutputKeys = outputKeys; + if (defaultOutputKeys === undefined) { + defaultOutputKeys = this.streamChannelsAsIs; + } else { + validateKeys(defaultOutputKeys, this.channels); + } + + let defaultInputKeys = inputKeys; + if (defaultInputKeys === undefined) { + defaultInputKeys = this.inputChannels; + } else { + validateKeys(defaultInputKeys, this.channels); + } + + const defaultInterruptBefore = + interruptBefore ?? this.interruptBefore ?? []; + + const defaultInterruptAfter = interruptAfter ?? this.interruptAfter ?? []; + + let defaultStreamMode: StreamMode[]; + if (streamMode !== undefined) { + defaultStreamMode = Array.isArray(streamMode) ? streamMode : [streamMode]; + } else { + defaultStreamMode = this.streamMode; + } + + let defaultCheckpointer: BaseCheckpointSaver | undefined; + if ( + config.configurable !== undefined && + config.configurable[CONFIG_KEY_READ] !== undefined + ) { + defaultStreamMode = ["values"]; + } + if ( + config !== undefined && + config.configurable?.[CONFIG_KEY_CHECKPOINTER] !== undefined && + (defaultInterruptAfter.length > 0 || defaultInterruptBefore.length > 0) + ) { + defaultCheckpointer = config.configurable[CONFIG_KEY_CHECKPOINTER]; + } else { + defaultCheckpointer = this.checkpointer; + } + + return [ + defaultDebug, + defaultStreamMode, + defaultInputKeys as string | string[], + defaultOutputKeys as string | string[], + rest, + defaultInterruptBefore as All | string[], + defaultInterruptAfter as All | string[], + defaultCheckpointer, + ]; + } + + /** + * Stream graph steps for a single input. + * @param input The input to the graph. + * @param options The configuration to use for the run. + * @param options.streamMode The mode to stream output. Defaults to value set on initialization. + * Options are "values", "updates", and "debug". Default is "values". + * values: Emit the current values of the state for each step. + * updates: Emit only the updates to the state for each step. + * Output is a dict with the node name as key and the updated values as value. + * debug: Emit debug events for each step. + * @param options.outputKeys The keys to stream. Defaults to all non-context channels. + * @param options.interruptBefore Nodes to interrupt before. + * @param options.interruptAfter Nodes to interrupt after. + * @param options.debug Whether to print debug information during execution. + */ + override async stream( + input: PregelInputType, + options?: Partial> + ): Promise> { + return super.stream(input, options); + } + + override async *_streamIterator( + input: PregelInputType, + options?: Partial> + ): AsyncGenerator { + const inputConfig = ensureConfig(options); + if ( + inputConfig.recursionLimit === undefined || + inputConfig.recursionLimit < 1 + ) { + throw new Error(`Passed "recursionLimit" must be at least 1.`); + } + if ( + this.checkpointer !== undefined && + inputConfig.configurable === undefined + ) { + throw new Error( + `Checkpointer requires one or more of the following "configurable" keys: "thread_id", "checkpoint_ns", "checkpoint_id"` + ); + } + const callbackManager = await getCallbackManagerForConfig(inputConfig); + const runManager = await callbackManager?.handleChainStart( + this.toJSON(), + _coerceToDict(input, "input"), + inputConfig.runId, + undefined, + undefined, + undefined, + inputConfig?.runName ?? this.getName() + ); + delete inputConfig.runId; + // assign defaults + const [ + debug, + streamMode, + , + outputKeys, + config, + interruptBefore, + interruptAfter, + checkpointer, + ] = this._defaults(inputConfig); + let loop; + let backgroundError; + const onBackgroundError = (e: Error) => { + backgroundError = e; + }; + try { + loop = await PregelLoop.initialize({ + input, + config, + checkpointer, + graph: this, + onBackgroundError, + }); + while ( + backgroundError === undefined && + (await loop.tick({ + outputKeys, + interruptAfter, + interruptBefore, + manager: runManager, + })) + ) { + if (debug) { + printStepCheckpoint( + loop.checkpointMetadata.step, + loop.channels, + this.streamChannelsList as string[] + ); + } + while (loop.stream.length > 0) { + const nextItem = loop.stream.shift(); + if (nextItem === undefined) { + throw new Error("Data structure error."); + } + if (streamMode.includes(nextItem[0])) { + if (streamMode.length === 1) { + yield nextItem[1]; + } else { + yield nextItem; + } + } + } + if (debug) { + printStepTasks(loop.step, loop.tasks); + } + // execute tasks, and wait for one to fail or all to finish. + // each task is independent from all other concurrent tasks + // yield updates/debug output as each task finishes + const tasks = Object.fromEntries( + loop.tasks + .filter((task) => task.writes.length === 0) + .map((pregelTask) => { + return [ + pregelTask.id, + async () => { + let error; + let result; + try { + result = await pregelTask.proc.invoke( + pregelTask.input, + pregelTask.config + ); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (e: any) { + error = e; + error.pregelTaskId = pregelTask.id; + } + return { + task: pregelTask, + result, + error, + }; + }, + ]; + }) + ); + + try { + for await (const task of executeTasks( + tasks, + this.stepTimeout, + config.signal + )) { + loop.putWrites(task.id, task.writes); + if (streamMode.includes("updates")) { + yield* prefixGenerator( + mapOutputUpdates(outputKeys, [task]), + streamMode.length > 1 ? "updates" : undefined + ); + } + if (streamMode.includes("debug")) { + yield* prefixGenerator( + mapDebugTaskResults(loop.step, [task], this.streamChannelsList), + streamMode.length > 1 ? "debug" : undefined + ); + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (e: any) { + if (e.pregelTaskId) { + loop.putWrites(e.pregelTaskId, [[ERROR, { message: e.message }]]); + } + throw e; + } + + if (debug) { + printStepWrites( + loop.step, + loop.tasks.map((task) => task.writes).flat(), + this.streamChannelsList as string[] + ); + } + } + if (backgroundError !== undefined) { + throw backgroundError; + } + while (loop.stream.length > 0) { + const nextItem = loop.stream.shift(); + if (nextItem === undefined) { + throw new Error("Data structure error."); + } + if (streamMode.includes(nextItem[0])) { + if (streamMode.length === 1) { + yield nextItem[1]; + } else { + yield nextItem; + } + } + } + if (loop.status === "out_of_steps") { + throw new GraphRecursionError( + [ + `Recursion limit of ${config.recursionLimit} reached`, + "without hitting a stop condition. You can increase the", + `limit by setting the "recursionLimit" config key.`, + ].join(" ") + ); + } + await runManager?.handleChainEnd(readChannels(loop.channels, outputKeys)); + } catch (e) { + await runManager?.handleChainError(e); + throw e; + } finally { + await loop?.backgroundTasksPromise; + } + } + + /** + * Run the graph with a single input and config. + * @param input The input to the graph. + * @param options The configuration to use for the run. + * @param options.streamMode The mode to stream output. Defaults to value set on initialization. + * Options are "values", "updates", and "debug". Default is "values". + * values: Emit the current values of the state for each step. + * updates: Emit only the updates to the state for each step. + * Output is a dict with the node name as key and the updated values as value. + * debug: Emit debug events for each step. + * @param options.outputKeys The keys to stream. Defaults to all non-context channels. + * @param options.interruptBefore Nodes to interrupt before. + * @param options.interruptAfter Nodes to interrupt after. + * @param options.debug Whether to print debug information during execution. + */ + override async invoke( + input: PregelInputType, + options?: Partial> + ): Promise { + const streamMode = options?.streamMode ?? "values"; + const config = { + ...ensureConfig(options), + outputKeys: options?.outputKeys ?? this.outputChannels, + streamMode, + }; + const chunks = []; + const stream = await this.stream(input, config); + for await (const chunk of stream) { + chunks.push(chunk); + } + if (streamMode === "values") { + return chunks[chunks.length - 1]; + } + return chunks; + } +} diff --git a/langgraph/src/pregel/io.ts b/libs/langgraph/src/pregel/io.ts similarity index 75% rename from langgraph/src/pregel/io.ts rename to libs/langgraph/src/pregel/io.ts index d8723ce1..e695126d 100644 --- a/langgraph/src/pregel/io.ts +++ b/libs/langgraph/src/pregel/io.ts @@ -1,13 +1,13 @@ -import { v5 as uuidv5 } from "uuid"; -import { BaseChannel } from "../channels/base.js"; -import { PregelExecutableTask } from "./types.js"; -import { TAG_HIDDEN, TASK_NAMESPACE } from "../constants.js"; +import type { PendingWrite } from "@langchain/langgraph-checkpoint"; +import type { BaseChannel } from "../channels/base.js"; +import type { PregelExecutableTask } from "./types.js"; +import { TAG_HIDDEN } from "../constants.js"; import { EmptyChannelError } from "../errors.js"; export function readChannel( channels: Record, chan: C, - catch_: boolean = true, + catchErrors: boolean = true, returnException: boolean = false ): unknown | null { try { @@ -17,7 +17,7 @@ export function readChannel( if (e.name === EmptyChannelError.unminifiable_name) { if (returnException) { return e; - } else if (catch_) { + } else if (catchErrors) { return null; } } @@ -80,67 +80,12 @@ export function* mapInput( } } -export function* mapDebugTasks( - step: number, - tasks: readonly PregelExecutableTask[] -) { - const ts = new Date().toISOString(); - for (const { name, input, config, triggers } of tasks) { - if (config?.tags?.includes(TAG_HIDDEN)) continue; - - const metadata = { ...config?.metadata }; - delete metadata.checkpoint_id; - - yield { - type: "task", - timestamp: ts, - step, - payload: { - id: uuidv5(JSON.stringify([name, step, metadata]), TASK_NAMESPACE), - name, - input, - triggers, - }, - }; - } -} - -export function* mapDebugTaskResults< - N extends PropertyKey, - C extends PropertyKey ->( - step: number, - tasks: readonly PregelExecutableTask[], - streamChannelsList: Array -) { - const ts = new Date().toISOString(); - for (const { name, writes, config } of tasks) { - if (config?.tags?.includes(TAG_HIDDEN)) continue; - - const metadata = { ...config?.metadata }; - delete metadata.checkpoint_id; - - yield { - type: "task_result", - timestamp: ts, - step, - payload: { - id: uuidv5(JSON.stringify([name, step, metadata]), TASK_NAMESPACE), - name, - result: writes.filter(([channel]) => - streamChannelsList.includes(channel) - ), - }, - }; - } -} - /** * Map pending writes (a sequence of tuples (channel, value)) to output chunk. */ export function* mapOutputValues( outputChannels: C | Array, - pendingWrites: readonly [C, unknown][], + pendingWrites: readonly PendingWrite[], channels: Record // eslint-disable-next-line @typescript-eslint/no-explicit-any ): Generator, any> { diff --git a/libs/langgraph/src/pregel/loop.ts b/libs/langgraph/src/pregel/loop.ts new file mode 100644 index 00000000..ea9795eb --- /dev/null +++ b/libs/langgraph/src/pregel/loop.ts @@ -0,0 +1,519 @@ +import Deque from "double-ended-queue"; +import type { RunnableConfig } from "@langchain/core/runnables"; +import type { CallbackManagerForChainRun } from "@langchain/core/callbacks/manager"; +import { + BaseCheckpointSaver, + Checkpoint, + CheckpointTuple, + copyCheckpoint, + emptyCheckpoint, + PendingWrite, + CheckpointPendingWrite, + CheckpointMetadata, + All, +} from "@langchain/langgraph-checkpoint"; +import { + BaseChannel, + createCheckpoint, + emptyChannels, +} from "../channels/base.js"; +import { PregelExecutableTask, PregelInterface, StreamMode } from "./types.js"; +import { + CONFIG_KEY_READ, + CONFIG_KEY_RESUMING, + ERROR, + INPUT, + INTERRUPT, +} from "../constants.js"; +import { + _applyWrites, + _prepareNextTasks, + increment, + shouldInterrupt, + WritesProtocol, +} from "./algo.js"; +import { gatherIterator, prefixGenerator } from "../utils.js"; +import { mapInput, mapOutputUpdates, mapOutputValues } from "./io.js"; +import { EmptyInputError, GraphInterrupt } from "../errors.js"; +import { getNewChannelVersions } from "./utils.js"; +import { mapDebugTasks, mapDebugCheckpoint } from "./debug.js"; + +const INPUT_DONE = Symbol.for("INPUT_DONE"); +const INPUT_RESUMING = Symbol.for("INPUT_RESUMING"); +const DEFAULT_LOOP_LIMIT = 25; + +export type PregelLoopInitializeParams = { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + input?: any; + config: RunnableConfig; + checkpointer?: BaseCheckpointSaver; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + graph: PregelInterface; + onBackgroundError: (e: Error) => void; +}; + +type PregelLoopParams = { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + input?: any; + config: RunnableConfig; + checkpointer?: BaseCheckpointSaver; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + graph: PregelInterface; + checkpoint: Checkpoint; + checkpointMetadata: CheckpointMetadata; + checkpointPreviousVersions: Record; + checkpointPendingWrites: CheckpointPendingWrite[]; + checkpointConfig: RunnableConfig; + channels: Record; + step: number; + stop: number; + onBackgroundError: (e: Error) => void; +}; + +export class PregelLoop { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected input?: any; + + config: RunnableConfig; + + protected checkpointer?: BaseCheckpointSaver; + + protected checkpointerGetNextVersion: ( + current: number | undefined, + channel: BaseChannel + ) => number; + + // TODO: Fix typing + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected graph: PregelInterface; + + channels: Record; + + protected checkpoint: Checkpoint; + + protected checkpointConfig: RunnableConfig; + + checkpointMetadata: CheckpointMetadata; + + protected checkpointPendingWrites: CheckpointPendingWrite[] = []; + + protected checkpointPreviousVersions: Record; + + step: number; + + protected stop: number; + + status: + | "pending" + | "done" + | "interrupt_before" + | "interrupt_after" + | "out_of_steps" = "pending"; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + tasks: PregelExecutableTask[] = []; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + stream: Deque<[StreamMode, any]> = new Deque(); + + protected isNested: boolean; + + protected _putCheckpointPromise: Promise = Promise.resolve(); + + onBackgroundError: (e: Error) => void; + + get backgroundTasksPromise() { + return this._putCheckpointPromise; + } + + constructor(params: PregelLoopParams) { + this.input = params.input; + this.config = params.config; + this.checkpointer = params.checkpointer; + // TODO: if managed values no longer needs graph we can replace with + // managed_specs, channel_specs + if (this.checkpointer !== undefined) { + this.checkpointerGetNextVersion = this.checkpointer.getNextVersion.bind( + this.checkpointer + ); + } else { + this.checkpointerGetNextVersion = increment; + } + this.graph = params.graph; + this.checkpoint = params.checkpoint; + this.checkpointConfig = params.checkpointConfig; + this.checkpointMetadata = params.checkpointMetadata; + this.checkpointPreviousVersions = params.checkpointPreviousVersions; + this.channels = params.channels; + this.checkpointPendingWrites = params.checkpointPendingWrites; + this.step = params.step; + this.stop = params.stop; + this.isNested = CONFIG_KEY_READ in (this.config.configurable ?? {}); + this.onBackgroundError = params.onBackgroundError; + } + + static async initialize(params: PregelLoopInitializeParams) { + const saved: CheckpointTuple = (await params.checkpointer?.getTuple( + params.config + )) ?? { + config: params.config, + checkpoint: emptyCheckpoint(), + metadata: { + source: "input", + step: -2, + writes: null, + }, + pendingWrites: [], + }; + const checkpointConfig = { + ...params.config, + ...saved.config, + configurable: { + ...params.config.configurable, + ...saved.config.configurable, + }, + }; + const checkpoint = copyCheckpoint(saved.checkpoint); + const checkpointMetadata = { ...saved.metadata } as CheckpointMetadata; + const checkpointPendingWrites = saved.pendingWrites ?? []; + + const channels = emptyChannels(params.graph.channels, checkpoint); + + const step = (checkpointMetadata.step ?? 0) + 1; + const stop = + step + (params.config.recursionLimit ?? DEFAULT_LOOP_LIMIT) + 1; + const checkpointPreviousVersions = { ...checkpoint.channel_versions }; + return new PregelLoop({ + input: params.input, + config: params.config, + checkpointer: params.checkpointer, + graph: params.graph, + checkpoint, + checkpointMetadata, + checkpointConfig, + channels, + step, + stop, + checkpointPreviousVersions, + checkpointPendingWrites, + onBackgroundError: params.onBackgroundError, + }); + } + + protected async _checkpointerPutAfterPrevious(input: { + config: RunnableConfig; + checkpoint: Checkpoint; + metadata: CheckpointMetadata; + newVersions: Record; + }) { + try { + await this._putCheckpointPromise; + } finally { + this._putCheckpointPromise = + this.checkpointer + ?.put( + input.config, + input.checkpoint, + input.metadata, + input.newVersions + ) + .catch(this.onBackgroundError) ?? Promise.resolve(); + } + } + + /** + * Put writes for a task, to be read by the next tick. + * @param taskId + * @param writes + */ + putWrites(taskId: string, writes: PendingWrite[]) { + const pendingWrites: CheckpointPendingWrite[] = writes.map( + ([key, value]) => { + return [taskId, key, value]; + } + ); + this.checkpointPendingWrites.push(...pendingWrites); + if (this.checkpointer !== undefined) { + void this.checkpointer + .putWrites( + { + ...this.checkpointConfig, + configurable: { + ...this.checkpointConfig.configurable, + checkpoint_ns: this.config.configurable?.checkpoint_ns ?? "", + checkpoint_id: this.checkpoint.id, + }, + }, + writes, + taskId + ) + .catch(this.onBackgroundError); + } + } + + /** + * Execute a single iteration of the Pregel loop. + * Returns true if more iterations are needed. + * @param params + */ + async tick(params: { + outputKeys: string | string[]; + interruptAfter: string[] | All; + interruptBefore: string[] | All; + manager?: CallbackManagerForChainRun; + }): Promise { + const { + outputKeys = [], + interruptAfter = [], + interruptBefore = [], + manager, + } = params; + if (this.status !== "pending") { + throw new Error( + `Cannot tick when status is no longer "pending". Current status: "${this.status}"` + ); + } + if (![INPUT_DONE, INPUT_RESUMING].includes(this.input)) { + await this._first(); + } else if (this.tasks.every((task) => task.writes.length > 0)) { + const writes = this.tasks.flatMap((t) => t.writes); + // All tasks have finished + _applyWrites( + this.checkpoint, + this.channels, + this.tasks, + this.checkpointerGetNextVersion + ); + // produce values output + const valuesOutput = await gatherIterator( + prefixGenerator( + mapOutputValues(outputKeys, writes, this.channels), + "values" + ) + ); + this.stream.push(...valuesOutput); + // clear pending writes + this.checkpointPendingWrites = []; + const updatesOnly = + this.graph.streamMode?.length === 1 && + this.graph.streamMode?.includes("updates"); + const metadataWrites = updatesOnly + ? mapOutputUpdates(outputKeys, this.tasks).next().value + : mapOutputValues(outputKeys, writes, this.channels).next().value; + await this._putCheckpoint({ + source: "loop", + writes: metadataWrites, + }); + // after execution, check if we should interrupt + if (shouldInterrupt(this.checkpoint, interruptAfter, this.tasks)) { + this.status = "interrupt_after"; + if (this.isNested) { + throw new GraphInterrupt(); + } else { + return false; + } + } + } else { + return false; + } + if (this.step > this.stop) { + this.status = "out_of_steps"; + return false; + } + + const nextTasks = _prepareNextTasks( + this.checkpoint, + this.graph.nodes, + this.channels, + this.config, + true, + { + step: this.step, + checkpointer: this.checkpointer, + isResuming: this.input === INPUT_RESUMING, + manager, + } + ); + this.tasks = nextTasks; + + // Produce debug output + if (this.checkpointer) { + this.stream.push( + ...(await gatherIterator( + prefixGenerator( + mapDebugCheckpoint( + this.step - 1, // printing checkpoint for previous step + this.checkpointConfig, + this.channels, + this.graph.streamChannelsAsIs as string[], + this.checkpointMetadata, + this.tasks, + this.checkpointPendingWrites + ), + "debug" + ) + )) + ); + } + + if (this.tasks.length === 0) { + this.status = "done"; + return false; + } + // if there are pending writes from a previous loop, apply them + if (this.checkpointPendingWrites.length > 0) { + for (const [tid, k, v] of this.checkpointPendingWrites) { + // TODO: Do the same for INTERRUPT + if (k === ERROR) { + continue; + } + const task = this.tasks.find((t) => t.id === tid); + if (task) { + task.writes.push([k, v]); + } + } + } + // if all tasks have finished, re-tick + if (this.tasks.every((task) => task.writes.length > 0)) { + return this.tick({ + outputKeys, + interruptAfter, + interruptBefore, + manager, + }); + } + + // Before execution, check if we should interrupt + if (shouldInterrupt(this.checkpoint, interruptBefore, this.tasks)) { + this.status = "interrupt_before"; + if (this.isNested) { + throw new GraphInterrupt(); + } else { + return false; + } + } + // Produce debug output + const debugOutput = await gatherIterator( + prefixGenerator(mapDebugTasks(this.step, this.tasks), "debug") + ); + this.stream.push(...debugOutput); + + return true; + } + + /** + * Resuming from previous checkpoint requires + * - finding a previous checkpoint + * - receiving None input (outer graph) or RESUMING flag (subgraph) + */ + protected async _first() { + const isResuming = + (Object.keys(this.checkpoint.channel_versions).length !== 0 && + this.config.configurable?.[CONFIG_KEY_RESUMING] !== undefined) || + this.input === null; + if (isResuming) { + for (const channelName of Object.keys(this.channels)) { + if (this.checkpoint.channel_versions[channelName] !== undefined) { + const version = this.checkpoint.channel_versions[channelName]; + this.checkpoint.versions_seen[INTERRUPT] = { + ...this.checkpoint.versions_seen[INTERRUPT], + [channelName]: version, + }; + } + } + // map inputs to channel updates + } else { + const inputWrites = await gatherIterator( + mapInput(this.graph.inputChannels, this.input) + ); + if (inputWrites.length === 0) { + throw new EmptyInputError( + `Received no input writes for ${JSON.stringify( + this.graph.inputChannels, + null, + 2 + )}` + ); + } + const discardTasks = _prepareNextTasks( + this.checkpoint, + this.graph.nodes, + this.channels, + this.config, + true, + { step: this.step } + ); + _applyWrites( + this.checkpoint, + this.channels, + (discardTasks as WritesProtocol[]).concat([ + { + name: INPUT, + writes: inputWrites as PendingWrite[], + triggers: [], + }, + ]), + this.checkpointerGetNextVersion + ); + // save input checkpoint + await this._putCheckpoint({ source: "input", writes: this.input }); + } + // done with input + this.input = isResuming ? INPUT_RESUMING : INPUT_DONE; + } + + protected async _putCheckpoint( + inputMetadata: Omit + ) { + // Assign step + const metadata = { + ...inputMetadata, + step: this.step, + }; + // Bail if no checkpointer + if (this.checkpointer !== undefined) { + // create new checkpoint + this.checkpointMetadata = metadata; + // child graphs keep at most one checkpoint per parent checkpoint + // this is achieved by writing child checkpoints as progress is made + // (so that error recovery / resuming from interrupt don't lose work) + // but doing so always with an id equal to that of the parent checkpoint + this.checkpoint = createCheckpoint( + this.checkpoint, + this.channels, + this.step + // id: this.isNested ? this.config.configurable?.checkpoint_id : undefined, + ); + this.checkpointConfig = { + ...this.checkpointConfig, + configurable: { + ...this.checkpointConfig.configurable, + checkpoint_ns: this.config.configurable?.checkpoint_ns ?? "", + }, + }; + const channelVersions = { ...this.checkpoint.channel_versions }; + const newVersions = getNewChannelVersions( + this.checkpointPreviousVersions, + channelVersions + ); + this.checkpointPreviousVersions = channelVersions; + // save it, without blocking + // if there's a previous checkpoint save in progress, wait for it + // ensuring checkpointers receive checkpoints in order + void this._checkpointerPutAfterPrevious({ + config: { ...this.checkpointConfig }, + checkpoint: copyCheckpoint(this.checkpoint), + metadata: { ...this.checkpointMetadata }, + newVersions, + }); + this.checkpointConfig = { + ...this.checkpointConfig, + configurable: { + ...this.checkpointConfig.configurable, + checkpoint_id: this.checkpoint.id, + }, + }; + } + this.step += 1; + } +} diff --git a/langgraph/src/pregel/read.ts b/libs/langgraph/src/pregel/read.ts similarity index 100% rename from langgraph/src/pregel/read.ts rename to libs/langgraph/src/pregel/read.ts diff --git a/libs/langgraph/src/pregel/types.ts b/libs/langgraph/src/pregel/types.ts new file mode 100644 index 00000000..23ed0ffb --- /dev/null +++ b/libs/langgraph/src/pregel/types.ts @@ -0,0 +1,126 @@ +import type { Runnable, RunnableConfig } from "@langchain/core/runnables"; +import type { + PendingWrite, + CheckpointMetadata, + BaseCheckpointSaver, +} from "@langchain/langgraph-checkpoint"; +import type { BaseChannel } from "../channels/base.js"; +import type { PregelNode } from "./read.js"; + +export type StreamMode = "values" | "updates" | "debug"; + +/** + * Construct a type with a set of properties K of type T + */ +type StrRecord = { + [P in K]: T; +}; + +export interface PregelInterface< + Nn extends StrRecord, + Cc extends StrRecord +> { + nodes: Nn; + + channels: Cc; + + /** + * @default true + */ + autoValidate?: boolean; + + /** + * @default "values" + */ + streamMode?: StreamMode | StreamMode[]; + + inputChannels: keyof Cc | Array; + + outputChannels: keyof Cc | Array; + + /** + * @default [] + */ + interruptAfter?: Array | All; + + /** + * @default [] + */ + interruptBefore?: Array | All; + + streamChannels?: keyof Cc | Array; + + get streamChannelsAsIs(): keyof Cc | Array; + + /** + * @default undefined + */ + stepTimeout?: number; + + /** + * @default false + */ + debug?: boolean; + + checkpointer?: BaseCheckpointSaver; +} + +export type PregelParams< + Nn extends StrRecord, + Cc extends StrRecord +> = Omit, "streamChannelsAsIs">; + +export interface PregelTaskDescription { + readonly id: string; + readonly name: string; + readonly error?: unknown; +} + +export interface PregelExecutableTask< + N extends PropertyKey, + C extends PropertyKey +> { + readonly name: N; + readonly input: unknown; + readonly proc: Runnable; + readonly writes: PendingWrite[]; + readonly config: RunnableConfig | undefined; + readonly triggers: Array; + readonly retry_policy?: string; + readonly id: string; +} + +export interface StateSnapshot { + /** + * Current values of channels + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + readonly values: Record | any; + /** + * Nodes to execute in the next step, if any + */ + readonly next: Array; + /** + * Config used to fetch this snapshot + */ + readonly config: RunnableConfig; + /** + * Metadata about the checkpoint + */ + readonly metadata?: CheckpointMetadata; + /** + * Time when the snapshot was created + */ + readonly createdAt?: string; + /** + * Config used to fetch the parent snapshot, if any + * @default undefined + */ + readonly parentConfig?: RunnableConfig | undefined; + /** + * Tasks to execute in this step. If already attempted, may contain an error. + */ + readonly tasks: PregelTaskDescription[]; +} + +export type All = "*"; diff --git a/libs/langgraph/src/pregel/utils.ts b/libs/langgraph/src/pregel/utils.ts new file mode 100644 index 00000000..677e723c --- /dev/null +++ b/libs/langgraph/src/pregel/utils.ts @@ -0,0 +1,52 @@ +import type { ChannelVersions } from "@langchain/langgraph-checkpoint"; + +export function getNullChannelVersion(currentVersions: ChannelVersions) { + const versionValues = Object.values(currentVersions); + const versionType = + versionValues.length > 0 ? typeof versionValues[0] : undefined; + let nullVersion: number | string | undefined; + if (versionType === "number") { + nullVersion = 0; + } else if (versionType === "string") { + nullVersion = ""; + } + return nullVersion; +} + +export function getNewChannelVersions( + previousVersions: ChannelVersions, + currentVersions: ChannelVersions +): ChannelVersions { + // Get new channel versions + if (Object.keys(previousVersions).length > 0) { + const nullVersion = getNullChannelVersion(currentVersions); + return Object.fromEntries( + Object.entries(currentVersions).filter( + ([k, v]) => v > (previousVersions[k] ?? nullVersion) + ) + ); + } else { + return currentVersions; + } +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function _coerceToDict(value: any, defaultKey: string) { + return value && + !Array.isArray(value) && + // eslint-disable-next-line no-instanceof/no-instanceof + !(value instanceof Date) && + typeof value === "object" + ? value + : { [defaultKey]: value }; +} + +// Order matters +export function _getIdMetadata(metadata: Record) { + return { + langgraph_step: metadata.langgraph_step, + langgraph_node: metadata.langgraph_node, + langgraph_triggers: metadata.langgraph_triggers, + langgraph_task_idx: metadata.langgraph_task_idx, + }; +} diff --git a/langgraph/src/pregel/validate.ts b/libs/langgraph/src/pregel/validate.ts similarity index 100% rename from langgraph/src/pregel/validate.ts rename to libs/langgraph/src/pregel/validate.ts diff --git a/langgraph/src/pregel/write.ts b/libs/langgraph/src/pregel/write.ts similarity index 96% rename from langgraph/src/pregel/write.ts rename to libs/langgraph/src/pregel/write.ts index 5e90d9ae..9c276443 100644 --- a/langgraph/src/pregel/write.ts +++ b/libs/langgraph/src/pregel/write.ts @@ -53,8 +53,9 @@ export class ChannelWrite< .join(",")}>`; super({ ...{ writes, name, tags }, - func: async (input: RunInput, config?: RunnableConfig) => - this._write(input, config ?? {}), + func: async (input: RunInput, config?: RunnableConfig) => { + return this._write(input, config ?? {}); + }, }); this.writes = writes; diff --git a/langgraph/src/setup/async_local_storage.ts b/libs/langgraph/src/setup/async_local_storage.ts similarity index 100% rename from langgraph/src/setup/async_local_storage.ts rename to libs/langgraph/src/setup/async_local_storage.ts diff --git a/langgraph/src/tests/channels.test.ts b/libs/langgraph/src/tests/channels.test.ts similarity index 98% rename from langgraph/src/tests/channels.test.ts rename to libs/langgraph/src/tests/channels.test.ts index 8000de20..f3382885 100644 --- a/langgraph/src/tests/channels.test.ts +++ b/libs/langgraph/src/tests/channels.test.ts @@ -48,7 +48,7 @@ describe("Topic", () => { expect(channel.get()).toEqual(["c", "d", "d"]); channel.update([]); - expect(channel.get()).toEqual([]); + expect(() => channel.get()).toThrow(EmptyChannelError); channel.update(["e"]); expect(channel.get()).toEqual(["e"]); @@ -72,7 +72,7 @@ describe("Topic with unique: true", () => { expect(channel.get()).toEqual(["c", "d"]); channel.update([]); - expect(channel.get()).toEqual([]); + expect(() => channel.get()).toThrow(EmptyChannelError); channel.update(["e"]); expect(channel.get()).toEqual(["e"]); diff --git a/langgraph/src/tests/chatbot.int.test.ts b/libs/langgraph/src/tests/chatbot.int.test.ts similarity index 100% rename from langgraph/src/tests/chatbot.int.test.ts rename to libs/langgraph/src/tests/chatbot.int.test.ts diff --git a/langgraph/src/tests/data/mermaid.png b/libs/langgraph/src/tests/data/mermaid.png similarity index 100% rename from langgraph/src/tests/data/mermaid.png rename to libs/langgraph/src/tests/data/mermaid.png diff --git a/langgraph/src/tests/diagrams.test.ts b/libs/langgraph/src/tests/diagrams.test.ts similarity index 100% rename from langgraph/src/tests/diagrams.test.ts rename to libs/langgraph/src/tests/diagrams.test.ts diff --git a/langgraph/src/tests/graph.test.ts b/libs/langgraph/src/tests/graph.test.ts similarity index 100% rename from langgraph/src/tests/graph.test.ts rename to libs/langgraph/src/tests/graph.test.ts diff --git a/langgraph/src/tests/prebuilt.int.test.ts b/libs/langgraph/src/tests/prebuilt.int.test.ts similarity index 100% rename from langgraph/src/tests/prebuilt.int.test.ts rename to libs/langgraph/src/tests/prebuilt.int.test.ts diff --git a/langgraph/src/tests/prebuilt.test.ts b/libs/langgraph/src/tests/prebuilt.test.ts similarity index 100% rename from langgraph/src/tests/prebuilt.test.ts rename to libs/langgraph/src/tests/prebuilt.test.ts diff --git a/langgraph/src/tests/pregel.io.test.ts b/libs/langgraph/src/tests/pregel.io.test.ts similarity index 98% rename from langgraph/src/tests/pregel.io.test.ts rename to libs/langgraph/src/tests/pregel.io.test.ts index 7d7efc75..b7a38dfe 100644 --- a/langgraph/src/tests/pregel.io.test.ts +++ b/libs/langgraph/src/tests/pregel.io.test.ts @@ -1,5 +1,6 @@ import { describe, expect, it } from "@jest/globals"; import { RunnablePassthrough } from "@langchain/core/runnables"; +import { uuid6 } from "@langchain/langgraph-checkpoint"; import { mapInput, mapOutputUpdates, @@ -307,6 +308,7 @@ describe("mapOutputUpdates", () => { "someOutputChannelName" >[] = [ { + id: uuid6(-1), name: "task1", input: null, proc: new RunnablePassthrough(), @@ -315,6 +317,7 @@ describe("mapOutputUpdates", () => { config: undefined, }, { + id: uuid6(-1), name: "task2", input: null, proc: new RunnablePassthrough(), @@ -359,6 +362,7 @@ describe("mapOutputUpdates", () => { | "someOutputChannelName4" >[] = [ { + id: uuid6(-1), name: "task1", input: null, proc: new RunnablePassthrough(), @@ -370,6 +374,7 @@ describe("mapOutputUpdates", () => { config: undefined, }, { + id: uuid6(-1), name: "task2", input: null, proc: new RunnablePassthrough(), diff --git a/langgraph/src/tests/pregel.read.test.ts b/libs/langgraph/src/tests/pregel.read.test.ts similarity index 100% rename from langgraph/src/tests/pregel.read.test.ts rename to libs/langgraph/src/tests/pregel.read.test.ts diff --git a/langgraph/src/tests/pregel.test.ts b/libs/langgraph/src/tests/pregel.test.ts similarity index 69% rename from langgraph/src/tests/pregel.test.ts rename to libs/langgraph/src/tests/pregel.test.ts index 82d5cb40..c7904256 100644 --- a/langgraph/src/tests/pregel.test.ts +++ b/libs/langgraph/src/tests/pregel.test.ts @@ -1,6 +1,8 @@ /* eslint-disable no-process-env */ /* eslint-disable no-promise-executor-return */ -import { it, expect, jest, beforeAll, describe } from "@jest/globals"; +/* eslint-disable no-instanceof/no-instanceof */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { it, expect, jest, describe } from "@jest/globals"; import { RunnableConfig, RunnableLambda, @@ -20,10 +22,19 @@ import { } from "@langchain/core/messages"; import { ToolCall } from "@langchain/core/messages/tool"; import { - gatherIterator, + Checkpoint, + CheckpointTuple, + MemorySaver, + uuid5, + uuid6, +} from "@langchain/langgraph-checkpoint"; +import { SqliteSaver } from "@langchain/langgraph-checkpoint-sqlite"; +import { + createAnyStringSame, FakeChatModel, MemorySaverAssertImmutable, } from "./utils.js"; +import { gatherIterator } from "../utils.js"; import { LastValue } from "../channels/last_value.js"; import { Annotation, @@ -36,32 +47,20 @@ import { import { Topic } from "../channels/topic.js"; import { PregelNode } from "../pregel/read.js"; import { BaseChannel } from "../channels/base.js"; -import { MemorySaver } from "../checkpoint/memory.js"; import { BinaryOperatorAggregate } from "../channels/binop.js"; import { Channel, Pregel, PregelOptions } from "../pregel/index.js"; import { _applyWrites, _localRead, _prepareNextTasks, - _shouldInterrupt, + increment, + shouldInterrupt, } from "../pregel/algo.js"; import { ToolExecutor, createAgentExecutor } from "../prebuilt/index.js"; import { MessageGraph, messagesStateReducer } from "../graph/message.js"; import { PASSTHROUGH } from "../pregel/write.js"; -import { Checkpoint } from "../checkpoint/base.js"; import { GraphRecursionError, InvalidUpdateError } from "../errors.js"; -import { SqliteSaver } from "../checkpoint/sqlite.js"; -import { uuid6 } from "../checkpoint/id.js"; -import { Send, TASKS } from "../constants.js"; - -// Tracing slows down the tests -beforeAll(() => { - process.env.LANGCHAIN_TRACING_V2 = "false"; - process.env.LANGCHAIN_ENDPOINT = ""; - process.env.LANGCHAIN_ENDPOINT = ""; - process.env.LANGCHAIN_API_KEY = ""; - process.env.LANGCHAIN_PROJECT = ""; -}); +import { ERROR, INTERRUPT, Send, TASKS } from "../constants.js"; describe("Channel", () => { describe("writeTo", () => { @@ -107,6 +106,76 @@ describe("Channel", () => { }); describe("Pregel", () => { + describe("checkpoint error handling", () => { + it("should catch checkpoint errors", async () => { + class FaultyGetCheckpointer extends MemorySaver { + async getTuple(): Promise { + throw new Error("Faulty get_tuple"); + } + } + + class FaultyPutCheckpointer extends MemorySaver { + async put(): Promise { + throw new Error("Faulty put"); + } + } + + class FaultyPutWritesCheckpointer extends MemorySaver { + async putWrites(): Promise { + throw new Error("Faulty put_writes"); + } + } + + class FaultyVersionCheckpointer extends MemorySaver { + getNextVersion(): number { + throw new Error("Faulty get_next_version"); + } + } + + const logic = () => ({ foo: "" }); + + const State = Annotation.Root({ + foo: Annotation({ + reducer: (_, b) => b, + }), + }); + const builder = new StateGraph(State) + .addNode("agent", logic) + .addEdge("__start__", "agent") + .addEdge("agent", "__end__"); + let graph = builder.compile({ + checkpointer: new FaultyGetCheckpointer(), + }); + await expect(async () => { + await graph.invoke({}, { configurable: { thread_id: "1" } }); + }).rejects.toThrowError("Faulty get_tuple"); + graph = builder.compile({ + checkpointer: new FaultyPutCheckpointer(), + }); + await expect(async () => { + await graph.invoke({}, { configurable: { thread_id: "1" } }); + }).rejects.toThrowError("Faulty put"); + graph = builder.compile({ + checkpointer: new FaultyVersionCheckpointer(), + }); + await expect(async () => { + await graph.invoke({}, { configurable: { thread_id: "1" } }); + }).rejects.toThrowError("Faulty get_next_version"); + const graph2 = new StateGraph(State) + .addNode("agent", logic) + .addEdge("__start__", "agent") + .addEdge("agent", "__end__") + .addNode("parallel", logic) + .addEdge("__start__", "parallel") + .addEdge("parallel", "__end__") + .compile({ + checkpointer: new FaultyPutWritesCheckpointer(), + }); + await expect(async () => { + await graph2.invoke({}, { configurable: { thread_id: "1" } }); + }).rejects.toThrowError("Faulty put_writes"); + }); + }); describe("streamChannelsList", () => { it("should return the expected list of stream channels", () => { // set up test @@ -120,8 +189,8 @@ describe("Pregel", () => { input: new LastValue(), output: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", streamChannels: "output", }); const pregel2 = new Pregel({ @@ -130,8 +199,8 @@ describe("Pregel", () => { input: new LastValue(), output: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", streamChannels: ["input", "output"], }); const pregel3 = new Pregel({ @@ -140,8 +209,8 @@ describe("Pregel", () => { input: new LastValue(), output: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); // call method / assertions @@ -186,17 +255,18 @@ describe("Pregel", () => { tags: ["hello"], }; + const checkpointer = new MemorySaver(); // create Pregel class const pregel = new Pregel({ nodes, debug: false, - inputs: "outputKey", - outputs: "outputKey", + inputChannels: "outputKey", + outputChannels: "outputKey", interruptBefore: ["one"], interruptAfter: ["one"], streamMode: "values", channels, - checkpointer: new MemorySaver(), + checkpointer, }); // call method / assertions @@ -208,6 +278,7 @@ describe("Pregel", () => { {}, ["one"], // interrupt before ["one"], // interrupt after + checkpointer, ]; const expectedDefaults2 = [ @@ -218,6 +289,7 @@ describe("Pregel", () => { { tags: ["hello"] }, "*", // interrupt before ["one"], // interrupt after + checkpointer, ]; expect(pregel._defaults(config1)).toEqual(expectedDefaults1); @@ -226,7 +298,7 @@ describe("Pregel", () => { }); }); -describe("_shouldInterrupt", () => { +describe("shouldInterrupt", () => { it("should return true if any snapshot channel has been updated since last interrupt and any channel written to is in interrupt nodes list", () => { // set up test const checkpoint: Checkpoint = { @@ -240,7 +312,7 @@ describe("_shouldInterrupt", () => { channel1: 2, // current channel version is greater than last version seen }, versions_seen: { - __interrupt__: { + [INTERRUPT]: { channel1: 1, }, }, @@ -248,11 +320,10 @@ describe("_shouldInterrupt", () => { }; const interruptNodes = ["node1"]; - const snapshotChannels = ["channel1"]; // call method / assertions expect( - _shouldInterrupt(checkpoint, interruptNodes, snapshotChannels, [ + shouldInterrupt(checkpoint, interruptNodes, [ { name: "node1", input: undefined, @@ -260,6 +331,7 @@ describe("_shouldInterrupt", () => { writes: [], triggers: [], config: undefined, + id: uuid5(JSON.stringify(["", {}]), checkpoint.id), }, ]) ).toBe(true); @@ -282,11 +354,10 @@ describe("_shouldInterrupt", () => { }; const interruptNodes = ["node1"]; - const snapshotChannels = ["channel1"]; // call method / assertions expect( - _shouldInterrupt(checkpoint, interruptNodes, snapshotChannels, [ + shouldInterrupt(checkpoint, interruptNodes, [ { name: "node1", input: undefined, @@ -294,6 +365,7 @@ describe("_shouldInterrupt", () => { writes: [], triggers: [], config: undefined, + id: uuid5(JSON.stringify(["", {}]), checkpoint.id), }, ]) ).toBe(true); @@ -320,11 +392,10 @@ describe("_shouldInterrupt", () => { }; const interruptNodes = ["node1"]; - const snapshotChannels = ["channel1"]; // call method / assertions expect( - _shouldInterrupt(checkpoint, interruptNodes, snapshotChannels, [ + shouldInterrupt(checkpoint, interruptNodes, [ { name: "node1", input: undefined, @@ -332,6 +403,7 @@ describe("_shouldInterrupt", () => { writes: [], triggers: [], config: undefined, + id: uuid5(JSON.stringify(["", {}]), checkpoint.id), }, ]) ).toBe(false); @@ -358,11 +430,10 @@ describe("_shouldInterrupt", () => { }; const interruptNodes = ["node1"]; - const snapshotChannels = ["channel1"]; // call method / assertions expect( - _shouldInterrupt(checkpoint, interruptNodes, snapshotChannels, [ + shouldInterrupt(checkpoint, interruptNodes, [ { name: "node2", // node2 is not in interrupt nodes input: undefined, @@ -370,6 +441,7 @@ describe("_shouldInterrupt", () => { writes: [], triggers: [], config: undefined, + id: uuid5(JSON.stringify(["", {}]), checkpoint.id), }, ]) ).toBe(false); @@ -403,9 +475,23 @@ describe("_localRead", () => { const writes: Array<[string, any]> = []; // call method / assertions - expect(_localRead(checkpoint, channels, writes, "channel1", false)).toBe(1); expect( - _localRead(checkpoint, channels, writes, ["channel1", "channel2"], false) + _localRead( + checkpoint, + channels, + { name: "test", writes, triggers: [] }, + "channel1", + false + ) + ).toBe(1); + expect( + _localRead( + checkpoint, + channels, + { name: "test", writes, triggers: [] }, + ["channel1", "channel2"], + false + ) ).toEqual({ channel1: 1, channel2: 2 }); }); @@ -438,11 +524,23 @@ describe("_localRead", () => { ]; // call method / assertions - expect(_localRead(checkpoint, channels, writes, "channel1", true)).toBe( - 100 - ); expect( - _localRead(checkpoint, channels, writes, ["channel1", "channel2"], true) + _localRead( + checkpoint, + channels, + { name: "test", writes, triggers: [] }, + "channel1", + true + ) + ).toBe(100); + expect( + _localRead( + checkpoint, + channels, + { name: "test", writes, triggers: [] }, + ["channel1", "channel2"], + true + ) ).toEqual({ channel1: 100, channel2: 200 }); }); }); @@ -488,7 +586,12 @@ describe("_applyWrites", () => { expect(channels.channel2.get()).toBe("channel2value"); expect(checkpoint.channel_versions.channel1).toBe(2); - _applyWrites(checkpoint, channels, pendingWrites); // contains side effects + _applyWrites( + checkpoint, + channels, + [{ name: "foo", writes: pendingWrites, triggers: [] }], + increment + ); // contains side effects expect(channels.channel1.get()).toBe("channel1valueUpdated!"); expect(channels.channel2.get()).toBe("channel2value"); @@ -530,7 +633,9 @@ describe("_applyWrites", () => { // call method / assertions expect(() => { - _applyWrites(checkpoint, channels, pendingWrites); // contains side effects + _applyWrites(checkpoint, channels, [ + { name: "foo", writes: pendingWrites, triggers: [] }, + ]); // contains side effects }).toThrow(InvalidUpdateError); }); }); @@ -540,7 +645,7 @@ describe("_prepareNextTasks", () => { // set up test const checkpoint: Checkpoint = { v: 1, - id: "123", + id: "1ee95cd6-c0f1-5f94-8a67-5c223c8bb55a", ts: "2024-04-19T17:19:07.952Z", channel_values: { channel1: 1, @@ -584,21 +689,28 @@ describe("_prepareNextTasks", () => { }; // call method / assertions - const [newCheckpoint, taskDescriptions] = _prepareNextTasks( + const taskDescriptions = _prepareNextTasks( checkpoint, processes, channels, + { configurable: { thread_id: "foo" } }, false, { step: -1 } ); expect(taskDescriptions.length).toBe(2); - expect(taskDescriptions[0]).toEqual({ name: "node1", input: 1 }); - expect(taskDescriptions[1]).toEqual({ name: "node2", input: 100 }); + expect(taskDescriptions[0]).toEqual({ + id: expect.any(String), + name: "node1", + }); + expect(taskDescriptions[1]).toEqual({ + id: expect.any(String), + name: "node2", + }); // the returned checkpoint is a copy of the passed checkpoint without versionsSeen updated - expect(newCheckpoint.versions_seen.node1.channel1).toBe(1); - expect(newCheckpoint.versions_seen.node2.channel2).toBe(5); + expect(checkpoint.versions_seen.node1.channel1).toBe(1); + expect(checkpoint.versions_seen.node2.channel2).toBe(5); }); it("should return an array of PregelExecutableTasks", () => { @@ -695,10 +807,11 @@ describe("_prepareNextTasks", () => { }; // call method / assertions - const [newCheckpoint, tasks] = _prepareNextTasks( + const tasks = _prepareNextTasks( checkpoint, processes, channels, + { configurable: { thread_id: "foo" } }, true, { step: -1 } ); @@ -713,16 +826,17 @@ describe("_prepareNextTasks", () => { config: { tags: [], configurable: expect.any(Object), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "node1", langgraph_step: -1, langgraph_task_idx: 0, langgraph_triggers: [TASKS], - }, + }), recursionLimit: 25, runId: undefined, runName: "node1", }, + id: expect.any(String), }); expect(tasks[1]).toEqual({ name: "node1", @@ -733,41 +847,44 @@ describe("_prepareNextTasks", () => { config: { tags: [], configurable: expect.any(Object), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "node1", langgraph_step: -1, langgraph_task_idx: 1, langgraph_triggers: ["channel1"], - }, + }), recursionLimit: 25, runId: undefined, runName: "node1", }, + id: expect.any(String), }); expect(tasks[2]).toEqual({ name: "node2", input: 100, proc: new RunnablePassthrough(), writes: [], - triggers: ["channel1", "channel2"], + triggers: ["channel1"], config: { tags: [], configurable: expect.any(Object), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "node2", langgraph_step: -1, langgraph_task_idx: 2, - langgraph_triggers: ["channel1", "channel2"], - }, + langgraph_triggers: ["channel1"], + }), recursionLimit: 25, runId: undefined, runName: "node2", }, + id: expect.any(String), }); - expect(newCheckpoint.versions_seen.node1.channel1).toBe(2); - expect(newCheckpoint.versions_seen.node2.channel1).toBe(2); - expect(newCheckpoint.versions_seen.node2.channel2).toBe(5); + // Should not update versions seen, that occurs when applying writes + expect(checkpoint.versions_seen.node1.channel1).toBe(1); + expect(checkpoint.versions_seen.node2.channel1).not.toBeDefined(); + expect(checkpoint.versions_seen.node2.channel2).toBe(5); }); }); @@ -785,8 +902,8 @@ it("can invoke pregel with a single process", async () => { input: new LastValue(), output: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); expect(await app.invoke(2)).toBe(3); @@ -822,8 +939,8 @@ it("should process input and produce output with implicit channels", async () => input: new LastValue(), output: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); expect(await app.invoke(2)).toBe(3); @@ -851,8 +968,8 @@ it("should process input and write kwargs correctly", async () => { fixed: new LastValue(), outputPlusOne: new LastValue(), }, - outputs: ["output", "fixed", "outputPlusOne"], - inputs: "input", + outputChannels: ["output", "fixed", "outputPlusOne"], + inputChannels: "input", }); expect(await app.invoke(2)).toEqual({ @@ -862,6 +979,21 @@ it("should process input and write kwargs correctly", async () => { }); }); +// TODO: Check undefined too +const FALSEY_VALUES = [null, 0, "", [], {}, new Set()]; +it.each(FALSEY_VALUES)( + "should process falsey value: %p", + async (falsyValue) => { + const graph = new Graph() + .addNode("return_falsy_const", () => falsyValue) + .addEdge(START, "return_falsy_const") + .addEdge("return_falsy_const", END) + .compile(); + + expect(await graph.invoke(1)).toBe(falsyValue); + } +); + it("should invoke single process in out objects", async () => { const addOne = jest.fn((x: number): number => x + 1); const chain = Channel.subscribeTo("input") @@ -876,8 +1008,8 @@ it("should invoke single process in out objects", async () => { input: new LastValue(), output: new LastValue(), }, - inputs: "input", - outputs: ["output"], + inputChannels: "input", + outputChannels: ["output"], }); expect(await app.invoke(2)).toEqual({ output: 3 }); @@ -895,8 +1027,8 @@ it("should process input and output as objects", async () => { input: new LastValue(), output: new LastValue(), }, - inputs: ["input"], - outputs: ["output"], + inputChannels: ["input"], + outputChannels: ["output"], }); expect(await app.invoke({ input: 2 })).toEqual({ output: 3 }); @@ -919,8 +1051,8 @@ it("should invoke two processes and get correct output", async () => { output: new LastValue(), input: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", streamChannels: ["inbox", "output"], }); @@ -959,9 +1091,9 @@ it("should process two processes with object input and output", async () => { input: new LastValue(), output: new LastValue(), }, - inputs: ["input", "inbox"], + inputChannels: ["input", "inbox"], streamChannels: ["output", "inbox"], - outputs: "output", + outputChannels: "output", }); expect( @@ -982,19 +1114,22 @@ it("should process two processes with object input and output", async () => { expect(await gatherIterator(app.stream({ input: 2, inbox: 12 }))).toEqual([ { inbox: [3], output: 13 }, - { inbox: [], output: 4 }, + { output: 4 }, ]); const debug = await gatherIterator( app.stream({ input: 2, inbox: 12 }, { streamMode: "debug" }) ); + + const anyStringSame = createAnyStringSame(); + expect(debug).toEqual([ { type: "task", timestamp: expect.any(String), step: 0, payload: { - id: "1726020d-12ca-56e2-a3d3-5b5752b526cf", + id: anyStringSame("task1"), name: "one", input: 2, triggers: ["input"], @@ -1005,7 +1140,7 @@ it("should process two processes with object input and output", async () => { timestamp: expect.any(String), step: 0, payload: { - id: "ad0a1023-e379-52e7-be4c-5a2c1433aba0", + id: anyStringSame("task2"), name: "two", input: [12], triggers: ["inbox"], @@ -1016,7 +1151,7 @@ it("should process two processes with object input and output", async () => { timestamp: expect.any(String), step: 0, payload: { - id: "1726020d-12ca-56e2-a3d3-5b5752b526cf", + id: anyStringSame("task1"), name: "one", result: [["inbox", 3]], }, @@ -1026,7 +1161,7 @@ it("should process two processes with object input and output", async () => { timestamp: expect.any(String), step: 0, payload: { - id: "ad0a1023-e379-52e7-be4c-5a2c1433aba0", + id: anyStringSame("task2"), name: "two", result: [["output", 13]], }, @@ -1036,7 +1171,7 @@ it("should process two processes with object input and output", async () => { timestamp: expect.any(String), step: 1, payload: { - id: "92ce7404-7c07-5383-b528-6933ac523e6a", + id: anyStringSame("task3"), name: "two", input: [3], triggers: ["inbox"], @@ -1047,7 +1182,7 @@ it("should process two processes with object input and output", async () => { timestamp: expect.any(String), step: 1, payload: { - id: "92ce7404-7c07-5383-b528-6933ac523e6a", + id: anyStringSame("task3"), name: "two", result: [["output", 4]], }, @@ -1077,8 +1212,8 @@ it("should process batch with two processes and delays", async () => { output: new LastValue(), input: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); expect(await app.batch([3, 2, 1, 3, 5])).toEqual([5, 4, 3, 5, 7]); @@ -1110,6 +1245,237 @@ it("should process batch with two processes and delays with graph", async () => expect(await graph.batch([3, 2, 1, 3, 5])).toEqual([5, 4, 3, 5, 7]); }); +it("should invoke two processes with input/output and interrupt", async () => { + const checkpointer = new MemorySaverAssertImmutable(); + const addOne = jest.fn((x: number) => { + return x + 1; + }); + const one = Channel.subscribeTo("input") + .pipe(addOne) + .pipe(Channel.writeTo(["inbox"])); + const two = Channel.subscribeTo("inbox") + .pipe(addOne) + .pipe(Channel.writeTo(["output"])); + + const app = new Pregel({ + nodes: { one, two }, + channels: { + inbox: new LastValue(), + output: new LastValue(), + input: new LastValue(), + }, + inputChannels: "input", + outputChannels: "output", + checkpointer, + interruptAfter: ["one"], + }); + + const thread1 = { configurable: { thread_id: "1" } }; + const thread2 = { configurable: { thread_id: "2" } }; + + // start execution, stop at inbox + expect(await app.invoke(2, thread1)).toBeUndefined(); + + // inbox == 3 + let checkpoint = await checkpointer.get(thread1); + expect(checkpoint?.channel_values.inbox).toBe(3); + + // resume execution, finish + expect(await app.invoke(null, thread1)).toBe(4); + + // start execution again, stop at inbox + expect(await app.invoke(20, thread1)).toBeUndefined(); + + // inbox == 21 + checkpoint = await checkpointer.get(thread1); + expect(checkpoint).not.toBeUndefined(); + expect(checkpoint?.channel_values.inbox).toBe(21); + + // send a new value in, interrupting the previous execution + expect(await app.invoke(3, thread1)).toBeUndefined(); + expect(await app.invoke(null, thread1)).toBe(5); + + // start execution again, stopping at inbox + expect(await app.invoke(20, thread2)).toBeUndefined(); + + // inbox == 21 + let snapshot = await app.getState(thread2); + expect(snapshot.values.inbox).toBe(21); + expect(snapshot.next).toEqual(["two"]); + + // update the state, resume + await app.updateState(thread2, 25, "one"); + expect(await app.invoke(null, thread2)).toBe(26); + + // no pending tasks + snapshot = await app.getState(thread2); + expect(snapshot.next).toEqual([]); + + // list history + const history = await gatherIterator(app.getStateHistory(thread1)); + expect(history).toEqual([ + expect.objectContaining({ + values: { inbox: 4, output: 5, input: 3 }, + tasks: [], + next: [], + config: { + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + metadata: { source: "loop", step: 6, writes: 5 }, + createdAt: expect.any(String), + parentConfig: history[1].config, + }), + expect.objectContaining({ + values: { inbox: 4, output: 4, input: 3 }, + tasks: [{ id: expect.any(String), name: "two" }], + next: ["two"], + config: { + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + metadata: { source: "loop", step: 5 }, + createdAt: expect.any(String), + parentConfig: history[2].config, + }), + expect.objectContaining({ + values: { inbox: 21, output: 4, input: 3 }, + tasks: [{ id: expect.any(String), name: "one" }], + next: ["one"], + config: { + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + metadata: { source: "input", step: 4, writes: 3 }, + createdAt: expect.any(String), + parentConfig: history[3].config, + }), + expect.objectContaining({ + values: { inbox: 21, output: 4, input: 20 }, + tasks: [{ id: expect.any(String), name: "two" }], + next: ["two"], + config: { + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + metadata: { source: "loop", step: 3 }, + createdAt: expect.any(String), + parentConfig: history[4].config, + }), + expect.objectContaining({ + values: { inbox: 3, output: 4, input: 20 }, + tasks: [{ id: expect.any(String), name: "one" }], + next: ["one"], + config: { + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + metadata: { source: "input", step: 2, writes: 20 }, + createdAt: expect.any(String), + parentConfig: history[5].config, + }), + expect.objectContaining({ + values: { inbox: 3, output: 4, input: 2 }, + tasks: [], + next: [], + config: { + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + metadata: { source: "loop", step: 1, writes: 4 }, + createdAt: expect.any(String), + parentConfig: history[6].config, + }), + expect.objectContaining({ + values: { inbox: 3, input: 2 }, + tasks: [{ id: expect.any(String), name: "two" }], + next: ["two"], + config: { + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + metadata: { source: "loop", step: 0 }, + createdAt: expect.any(String), + parentConfig: history[7].config, + }), + expect.objectContaining({ + values: { input: 2 }, + tasks: [{ id: expect.any(String), name: "one" }], + next: ["one"], + config: { + configurable: { + thread_id: "1", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + metadata: { source: "input", step: -1, writes: 2 }, + createdAt: expect.any(String), + parentConfig: undefined, + }), + ]); + + // forking from any previous checkpoint w/out forking should do nothing + expect( + await gatherIterator( + app.stream(null, { ...history[0].config, streamMode: "updates" }) + ) + ).toEqual([]); + expect( + await gatherIterator( + app.stream(null, { ...history[1].config, streamMode: "updates" }) + ) + ).toEqual([]); + expect( + await gatherIterator( + app.stream(null, { ...history[2].config, streamMode: "updates" }) + ) + ).toEqual([]); + + // forking and re-running from any prev checkpoint should re-run nodes + let forkConfig = await app.updateState(history[0].config, null); + expect( + await gatherIterator( + app.stream(null, { ...forkConfig, streamMode: "updates" }) + ) + ).toEqual([]); + + forkConfig = await app.updateState(history[1].config, null); + expect( + await gatherIterator( + app.stream(null, { ...forkConfig, streamMode: "updates" }) + ) + ).toEqual([{ two: { output: 5 } }]); + + forkConfig = await app.updateState(history[2].config, null); + expect( + await gatherIterator( + app.stream(null, { ...forkConfig, streamMode: "updates" }) + ) + ).toEqual([{ one: { inbox: 4 } }]); +}); + it("should batch many processes with input and output", async () => { const testSize = 100; const addOne = jest.fn((x: number) => x + 1); @@ -1138,8 +1504,8 @@ it("should batch many processes with input and output", async () => { const app = new Pregel({ nodes, channels, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); for (let i = 0; i < 3; i += 1) { @@ -1171,13 +1537,37 @@ it("should raise InvalidUpdateError when the same LastValue channel is updated t output: new LastValue(), input: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); await expect(app.invoke(2)).rejects.toThrow(InvalidUpdateError); }); +it("should fail to process two processes in an invalid way", async () => { + const addOne = jest.fn((x: number): number => x + 1); + + const one = Channel.subscribeTo("input") + .pipe(addOne) + .pipe(Channel.writeTo(["output"])); + const two = Channel.subscribeTo("input") + .pipe(addOne) + .pipe(Channel.writeTo(["output"])); + + const app = new Pregel({ + nodes: { one, two }, + channels: { + output: new LastValue(), + input: new LastValue(), + }, + inputChannels: "input", + outputChannels: "output", + }); + + // LastValue channels can only be updated once per iteration + await expect(app.invoke(2)).rejects.toThrow(InvalidUpdateError); +}); + it("should process two inputs to two outputs validly", async () => { const addOne = jest.fn((x: number): number => x + 1); @@ -1195,14 +1585,123 @@ it("should process two inputs to two outputs validly", async () => { input: new LastValue(), output2: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); // An Inbox channel accumulates updates into a sequence expect(await app.invoke(2)).toEqual([3, 3]); }); +it("pending writes resume", async () => { + const checkpointer = new MemorySaverAssertImmutable(); + const StateAnnotation = Annotation.Root({ + value: Annotation({ reducer: (a, b) => a + b }), + }); + class AwhileMaker extends RunnableLambda { + calls: number = 0; + + sleep: number; + + rtn: Record | Error; + + constructor(sleep: number, rtn: Record | Error) { + super({ + func: async () => { + this.calls += 1; + await new Promise((resolve) => setTimeout(resolve, this.sleep)); + if (this.rtn instanceof Error) { + throw this.rtn; + } + return this.rtn; + }, + }); + this.sleep = sleep; + this.rtn = rtn; + } + + reset() { + this.calls = 0; + } + } + + const one = new AwhileMaker(0.2, { value: 2 }); + const two = new AwhileMaker(0.6, new Error("I'm not good")); + const builder = new StateGraph(StateAnnotation) + .addNode("one", one) + .addNode("two", two) + .addEdge("__start__", "one") + .addEdge("__start__", "two") + .addEdge("one", "__end__") + // TODO: Add retry policy + .addEdge("two", "__end__"); + const graph = builder.compile({ checkpointer }); + const thread1 = { configurable: { thread_id: "1" } }; + await expect(graph.invoke({ value: 1 }, thread1)).rejects.toThrow( + "I'm not good" + ); + expect(one.calls).toEqual(1); + expect(two.calls).toEqual(1); + + const state = await graph.getState(thread1); + expect(state).toBeDefined(); + expect(state.values).toEqual({ value: 1 }); + expect(state.next).toEqual(["one", "two"]); + expect(state.tasks).toEqual([ + { id: expect.any(String), name: "one" }, + { + id: expect.any(String), + name: "two", + error: expect.objectContaining({ + message: "I'm not good", + }), + }, + ]); + expect(state.metadata).toEqual({ source: "loop", step: 0 }); + + // should contain pending write of "one" and should contain error from "two" + const checkpoint = await checkpointer.getTuple(thread1); + expect(checkpoint).toBeDefined(); + const expectedWrites = [ + [expect.any(String), "one", "one"], + [expect.any(String), "value", 2], + [ + expect.any(String), + ERROR, + expect.objectContaining({ + message: "I'm not good", + }), + ], + ]; + expect(checkpoint?.pendingWrites).toEqual( + expect.arrayContaining(expectedWrites) + ); + + // both non-error pending writes come from same task + const nonErrorWrites = checkpoint!.pendingWrites!.filter( + (w) => w[1] !== ERROR + ); + expect(nonErrorWrites[0][0]).toEqual(nonErrorWrites[1][0]); + const errorWrites = checkpoint!.pendingWrites!.filter((w) => w[1] === ERROR); + expect(errorWrites[0][0]).not.toEqual(nonErrorWrites[0][0]); + + // resume execution + await expect(graph.invoke(null, thread1)).rejects.toThrow("I'm not good"); + // node "one" succeeded previously, so shouldn't be called again + expect(one.calls).toEqual(1); + // node "two" should have been called once again + expect(two.calls).toEqual(2); + + // confirm no new checkpoints saved + const state2 = await graph.getState(thread1); + expect(state2.metadata).toEqual(state.metadata); + + // resume execution, without exception + two.rtn = { value: 3 }; + // both the pending write and the new write were applied, 1 + 2 + 3 = 6 + expect(await graph.invoke(null, thread1)).toEqual({ value: 6 }); +}); + it("should allow a conditional edge after a send", async () => { const State = { items: Annotation({ @@ -1252,8 +1751,8 @@ it("should handle checkpoints correctly", async () => { input: new LastValue(), output: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", checkpointer: memory, }); @@ -1321,11 +1820,13 @@ it("should process two inputs joined into one topic and produce two outputs", as output: new LastValue(), input: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); // Invoke app and check results + // We get a single array result as chain_four waits for all publishers to finish + // before operating on all elements published to topic_two as an array for (let i = 0; i < 100; i += 1) { expect(await app.invoke(2)).toEqual([13, 13]); } @@ -1355,8 +1856,8 @@ it("should invoke join then call other app", async () => { output: new LastValue(), input: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); const one = Channel.subscribeTo("input") @@ -1384,8 +1885,8 @@ it("should invoke join then call other app", async () => { output: new LastValue(), input: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); // Run the test 10 times sequentially @@ -1425,16 +1926,13 @@ it("should handle two processes with one input and two outputs", async () => { output: new LastValue(), between: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", streamChannels: ["output", "between"], }); const results = await app.stream(2); - const streamResults = []; - for await (const chunk of results) { - streamResults.push(chunk); - } + const streamResults = await gatherIterator(results); expect(streamResults).toEqual([ { between: 3, output: 3 }, @@ -1456,8 +1954,8 @@ it("should finish executing without output", async () => { between: new LastValue(), output: new LastValue(), }, - inputs: "input", - outputs: "output", + inputChannels: "input", + outputChannels: "output", }); // It finishes executing (once no more messages being published) @@ -1508,15 +2006,15 @@ describe("StateGraph", () => { type Step = [AgentAction | AgentFinish, string]; - type AgentState = { - input: string; - agentOutcome?: AgentAction | AgentFinish; - steps: Step[]; - }; + const AgentAnnotation = Annotation.Root({ + input: Annotation, + agentOutcome: Annotation, + steps: Annotation({ + reducer: (x: Step[], y: Step[]) => x.concat(y), + }), + }); - const executeTools = async ( - data: AgentState - ): Promise> => { + const executeTools = async (data: typeof AgentAnnotation.State) => { const newData = data; const { agentOutcome } = newData; delete newData.agentOutcome; @@ -1529,11 +2027,13 @@ describe("StateGraph", () => { ?.invoke(agentOutcome.toolInput)) ?? "failed"; return { - steps: [[agentOutcome, observation]], + steps: [[agentOutcome, observation]] as Step[], }; }; - const shouldContinue = async (data: AgentState): Promise => { + const shouldContinue = async ( + data: typeof AgentAnnotation.State + ): Promise => { if (data.agentOutcome && "returnValues" in data.agentOutcome) { return "exit"; } @@ -1571,7 +2071,7 @@ describe("StateGraph", () => { }; }; - const agent = async (state: AgentState) => { + const agent = async (state: typeof AgentAnnotation.State) => { const chain = prompt.pipe(llm).pipe(agentParser); const result = await chain.invoke({ input: state.input }); return { @@ -1579,27 +2079,35 @@ describe("StateGraph", () => { }; }; - const graph = new StateGraph({ - channels: { - input: null, - agentOutcome: null, - steps: { - value: (x: Step[], y: Step[]) => x.concat(y), - default: () => [], - }, - }, - }) + const graph = new StateGraph(AgentAnnotation) .addNode("agent", agent) + .addNode("passthrough", () => { + return {}; + }) .addNode("tools", executeTools) .addEdge(START, "agent") - .addConditionalEdges("agent", shouldContinue, { + .addEdge("agent", "passthrough") + .addConditionalEdges("passthrough", shouldContinue, { continue: "tools", exit: END, }) .addEdge("tools", "agent") .compile(); - const result = await graph.invoke({ input: "what is the weather in sf?" }); + let callbackOutputs; + const result = await graph.invoke( + { input: "what is the weather in sf?" }, + { + callbacks: [ + { + handleChainEnd(outputs) { + callbackOutputs = outputs; + }, + }, + ], + } + ); + await new Promise((resolve) => setTimeout(resolve, 100)); expect(result).toEqual({ input: "what is the weather in sf?", agentOutcome: { @@ -1627,6 +2135,7 @@ describe("StateGraph", () => { ], ], }); + expect(result).toEqual(callbackOutputs); }); it("can stream", async () => { @@ -1660,7 +2169,7 @@ describe("StateGraph", () => { }; }; - const agent = async (state: AgentState) => { + const agent = async (state: typeof AgentAnnotation.State) => { const chain = prompt.pipe(llm).pipe(agentParser); const result = await chain.invoke({ input: state.input }); return { @@ -1668,16 +2177,7 @@ describe("StateGraph", () => { }; }; - const app = new StateGraph({ - channels: { - input: null, - agentOutcome: null, - steps: { - value: (x: Step[], y: Step[]) => x.concat(y), - default: () => [], - }, - }, - }) + const app = new StateGraph(AgentAnnotation) .addNode("agent", agent) .addNode("tools", executeTools) .addEdge(START, "agent") @@ -1689,10 +2189,7 @@ describe("StateGraph", () => { .compile(); const stream = await app.stream({ input: "what is the weather in sf?" }); - const streamItems = []; - for await (const item of stream) { - streamItems.push(item); - } + const streamItems = await gatherIterator(stream); expect(streamItems.length).toBe(5); expect(streamItems[0]).toEqual({ agent: { @@ -2128,10 +2625,7 @@ describe("StateGraph", () => { const stream = await builder.compile().stream({ messages: [inputMessage], }); - let chunks = []; - for await (const chunk of stream) { - chunks.push(chunk); - } + let chunks = await gatherIterator(stream); const nodeOrder = ["agent", "tools", "agent", "tools", "tools", "agent"]; expect(nodeOrder.length).toEqual(chunks.length); expect(chunks).toEqual( @@ -2148,15 +2642,14 @@ describe("StateGraph", () => { interruptAfter: ["agent"], }); const config = { configurable: { thread_id: "1" } }; - chunks = []; - for await (const chunk of await appWithInterrupt.stream( - { - messages: [inputMessage], - }, - config - )) { - chunks.push(chunk); - } + chunks = await gatherIterator( + appWithInterrupt.stream( + { + messages: [inputMessage], + }, + config + ) + ); expect(chunks).toEqual([ { agent: { @@ -2169,6 +2662,7 @@ describe("StateGraph", () => { values: { messages: expectedOutputMessages.slice(0, 2), }, + tasks: [{ id: expect.any(String), name: "tools" }], next: ["tools"], metadata: { source: "loop", @@ -2182,8 +2676,11 @@ describe("StateGraph", () => { config: (await appWithInterrupt.checkpointer?.getTuple(config))?.config, createdAt: (await appWithInterrupt.checkpointer?.getTuple(config)) ?.checkpoint.ts, - // TODO: Populate, see Python test - parentConfig: undefined, + parentConfig: ( + await gatherIterator( + appWithInterrupt.checkpointer!.list(config, { limit: 2 }) + ) + ).slice(-1)[0].config, }); // modify ai message @@ -2215,6 +2712,7 @@ describe("StateGraph", () => { ], }, next: ["tools"], + tasks: [{ id: expect.any(String), name: "tools" }], metadata: { source: "update", step: 2, @@ -2239,14 +2737,14 @@ describe("StateGraph", () => { config: (await appWithInterrupt.checkpointer?.getTuple(config))?.config, createdAt: (await appWithInterrupt.checkpointer?.getTuple(config)) ?.checkpoint.ts, - // TODO: Populate, see Python test - parentConfig: undefined, + parentConfig: ( + await gatherIterator( + appWithInterrupt.checkpointer!.list(config, { limit: 2 }) + ) + ).slice(-1)[0].config, }); - chunks = []; - for await (const chunk of await appWithInterrupt.stream(null, config)) { - chunks.push(chunk); - } + chunks = await gatherIterator(appWithInterrupt.stream(null, config)); expect(chunks).toEqual([ { tools: { @@ -2291,6 +2789,10 @@ describe("StateGraph", () => { ], }, next: ["tools", "tools"], + tasks: [ + { id: expect.any(String), name: "tools" }, + { id: expect.any(String), name: "tools" }, + ], metadata: { source: "loop", step: 4, @@ -2303,8 +2805,11 @@ describe("StateGraph", () => { createdAt: (await appWithInterrupt.checkpointer?.getTuple(config)) ?.checkpoint.ts, config: (await appWithInterrupt.checkpointer?.getTuple(config))?.config, - // TODO: Populate, see Python test - parentConfig: undefined, + parentConfig: ( + await gatherIterator( + appWithInterrupt.checkpointer!.list(config, { limit: 2 }) + ) + ).slice(-1)[0].config, }); // replaces message even if object identity is different, as long as id is the same @@ -2345,6 +2850,7 @@ describe("StateGraph", () => { ], }, next: [], + tasks: [], metadata: { source: "update", step: 5, @@ -2361,8 +2867,11 @@ describe("StateGraph", () => { createdAt: (await appWithInterrupt.checkpointer?.getTuple(config)) ?.checkpoint.ts, config: (await appWithInterrupt.checkpointer?.getTuple(config))?.config, - // TODO: Populate, see Python test - parentConfig: undefined, + parentConfig: ( + await gatherIterator( + appWithInterrupt.checkpointer!.list(config, { limit: 2 }) + ) + ).slice(-1)[0].config, }); }); @@ -2715,10 +3224,7 @@ describe("MessageGraph", () => { const stream = await app.stream([ new HumanMessage("what is the weather in sf?"), ]); - const streamItems = []; - for await (const item of stream) { - streamItems.push(item); - } + const streamItems = await gatherIterator(stream); const lastItem = streamItems[streamItems.length - 1]; expect(Object.keys(lastItem)).toEqual(["agent"]); @@ -2728,6 +3234,270 @@ describe("MessageGraph", () => { }); }); +it("checkpoint events", async () => { + const builder = new StateGraph({ + my_key: Annotation({ reducer: (a, b) => a + b }), + market: Annotation, + }) + .addNode("prepare", () => ({ my_key: " prepared" })) + .addNode("tool_two_slow", () => ({ my_key: " slow" })) + .addNode("tool_two_fast", () => ({ my_key: " fast" })) + .addNode("finish", () => ({ my_key: " finished" })) + .addEdge(START, "prepare") + .addEdge("finish", END) + .addEdge("tool_two_fast", "finish") + .addEdge("tool_two_slow", "finish") + .addConditionalEdges({ + source: "prepare", + path: function condition(s) { + return s.market === "DE" ? "tool_two_slow" : "tool_two_fast"; + }, + pathMap: ["tool_two_slow", "tool_two_fast"], + }); + + let graph = builder.compile(); + + expect(await graph.invoke({ my_key: "value", market: "DE" })).toEqual({ + my_key: "value prepared slow finished", + market: "DE", + }); + + expect(await graph.invoke({ my_key: "value", market: "US" })).toEqual({ + my_key: "value prepared fast finished", + market: "US", + }); + + const checkpointer = SqliteSaver.fromConnString(":memory:"); + graph = builder.compile({ checkpointer }); + + const config = { configurable: { thread_id: "10" } }; + const actual = await gatherIterator( + graph.stream( + { my_key: "value", market: "DE" }, + { ...config, streamMode: "debug" } + ) + ); + const anyStringSame = createAnyStringSame(); + + expect(actual).toEqual([ + { + type: "checkpoint", + timestamp: expect.any(String), + step: -1, + payload: { + config: { + tags: [], + metadata: { thread_id: "10" }, + recursion_limit: 25, + configurable: { + thread_id: "10", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + values: {}, + metadata: { + source: "input", + step: -1, + writes: { my_key: "value", market: "DE" }, + }, + next: ["__start__"], + tasks: [{ id: expect.any(String), name: "__start__" }], + }, + }, + { + type: "checkpoint", + timestamp: expect.any(String), + step: 0, + payload: { + config: { + tags: [], + metadata: { thread_id: "10" }, + recursion_limit: 25, + configurable: { + thread_id: "10", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + values: { + my_key: "value", + market: "DE", + }, + metadata: { + source: "loop", + step: 0, + writes: undefined, + }, + next: ["prepare"], + tasks: [{ id: expect.any(String), name: "prepare" }], + }, + }, + { + type: "task", + timestamp: expect.any(String), + step: 1, + payload: { + id: anyStringSame("task1"), + name: "prepare", + input: { my_key: "value", market: "DE" }, + triggers: ["start:prepare"], + }, + }, + { + type: "task_result", + timestamp: expect.any(String), + step: 1, + payload: { + id: anyStringSame("task1"), + name: "prepare", + result: [["my_key", " prepared"]], + }, + }, + { + type: "checkpoint", + timestamp: expect.any(String), + step: 1, + payload: { + config: { + tags: [], + metadata: { thread_id: "10" }, + recursion_limit: 25, + configurable: { + thread_id: "10", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + values: { + my_key: "value prepared", + market: "DE", + }, + metadata: { + source: "loop", + step: 1, + writes: { prepare: { my_key: " prepared" } }, + }, + next: ["tool_two_slow"], + tasks: [{ id: expect.any(String), name: "tool_two_slow" }], + }, + }, + { + type: "task", + timestamp: expect.any(String), + step: 2, + payload: { + id: anyStringSame("task2"), + name: "tool_two_slow", + input: { my_key: "value prepared", market: "DE" }, + triggers: ["branch:prepare:condition:tool_two_slow"], + }, + }, + { + type: "task_result", + timestamp: expect.any(String), + step: 2, + payload: { + id: anyStringSame("task2"), + name: "tool_two_slow", + result: [["my_key", " slow"]], + }, + }, + { + type: "checkpoint", + timestamp: expect.any(String), + step: 2, + payload: { + config: { + tags: [], + metadata: { thread_id: "10" }, + recursion_limit: 25, + configurable: { + thread_id: "10", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + values: { + my_key: "value prepared slow", + market: "DE", + }, + metadata: { + source: "loop", + step: 2, + writes: { tool_two_slow: { my_key: " slow" } }, + }, + next: ["finish"], + tasks: [{ id: expect.any(String), name: "finish" }], + }, + }, + { + type: "task", + timestamp: expect.any(String), + step: 3, + payload: { + id: anyStringSame("task3"), + name: "finish", + input: { my_key: "value prepared slow", market: "DE" }, + triggers: ["tool_two_slow"], + }, + }, + { + type: "task_result", + timestamp: expect.any(String), + step: 3, + payload: { + id: anyStringSame("task3"), + name: "finish", + result: [["my_key", " finished"]], + }, + }, + { + type: "checkpoint", + timestamp: expect.any(String), + step: 3, + payload: { + config: { + tags: [], + metadata: { thread_id: "10" }, + recursion_limit: 25, + configurable: { + thread_id: "10", + checkpoint_ns: "", + checkpoint_id: expect.any(String), + }, + }, + values: { + my_key: "value prepared slow finished", + market: "DE", + }, + metadata: { + source: "loop", + step: 3, + writes: { finish: { my_key: " finished" } }, + }, + next: [], + tasks: [], + }, + }, + ]); + + // check if the checkpoints actually match + const checkpoints = await gatherIterator(checkpointer.list(config)); + expect( + checkpoints.reverse().map((i) => { + return { metadata: i.metadata, config: i.config }; + }) + ).toEqual( + actual + .filter((i) => i.type === "checkpoint") + .map((i) => ({ + metadata: i.payload.metadata, + config: { configurable: i.payload.config.configurable }, + })) + ); +}); + it("StateGraph start branch then end", async () => { type State = { my_key: string; @@ -2798,30 +3568,49 @@ it("StateGraph start branch then end", async () => { const thread1 = { configurable: { thread_id: "1" } }; expect( await toolTwoWithCheckpointer.invoke( - { my_key: "value", market: "DE" }, + { my_key: "value ⛰️", market: "DE" }, thread1 ) - ).toEqual({ my_key: "value", market: "DE" }); + ).toEqual({ my_key: "value ⛰️", market: "DE" }); + expect( + ( + await gatherIterator(toolTwoWithCheckpointer.checkpointer!.list(thread1)) + ).map((c) => c.metadata) + ).toEqual([ + { + source: "loop", + step: 0, + }, + { + source: "input", + step: -1, + writes: { my_key: "value ⛰️", market: "DE" }, + }, + ]); expect(await toolTwoWithCheckpointer.getState(thread1)).toEqual({ - values: { my_key: "value", market: "DE" }, + values: { my_key: "value ⛰️", market: "DE" }, next: ["tool_two_slow"], + tasks: [{ id: expect.any(String), name: "tool_two_slow" }], config: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread1))! .config, createdAt: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread1))! .checkpoint.ts, - metadata: { source: "loop", step: 0, writes: null }, + metadata: { source: "loop", step: 0 }, parentConfig: ( - await last(toolTwoWithCheckpointer.checkpointer!.list(thread1, 2)) + await last( + toolTwoWithCheckpointer.checkpointer!.list(thread1, { limit: 2 }) + ) ).config, }); expect(await toolTwoWithCheckpointer.invoke(null, thread1)).toEqual({ - my_key: "value slow", + my_key: "value ⛰️ slow", market: "DE", }); expect(await toolTwoWithCheckpointer.getState(thread1)).toEqual({ - values: { my_key: "value slow", market: "DE" }, + values: { my_key: "value ⛰️ slow", market: "DE" }, next: [], + tasks: [], config: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread1))! .config, createdAt: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread1))! @@ -2832,7 +3621,130 @@ it("StateGraph start branch then end", async () => { writes: { tool_two_slow: { my_key: " slow" } }, }, parentConfig: ( - await last(toolTwoWithCheckpointer.checkpointer!.list(thread1, 2)) + await last( + toolTwoWithCheckpointer.checkpointer!.list(thread1, { limit: 2 }) + ) + ).config, + }); + const thread2 = { configurable: { thread_id: "2" } }; + // stop when about to enter node + expect( + await toolTwoWithCheckpointer.invoke( + { my_key: "value", market: "US" }, + thread2 + ) + ).toEqual({ + my_key: "value", + market: "US", + }); + expect(await toolTwoWithCheckpointer.getState(thread2)).toEqual({ + values: { my_key: "value", market: "US" }, + next: ["tool_two_fast"], + tasks: [{ id: expect.any(String), name: "tool_two_fast" }], + config: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread2))! + .config, + createdAt: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread2))! + .checkpoint.ts, + metadata: { source: "loop", step: 0 }, + parentConfig: ( + await last( + toolTwoWithCheckpointer.checkpointer!.list(thread2, { limit: 2 }) + ) + ).config, + }); + // resume, for same result as above + expect(await toolTwoWithCheckpointer.invoke(null, thread2)).toEqual({ + my_key: "value fast", + market: "US", + }); + expect(await toolTwoWithCheckpointer.getState(thread2)).toEqual({ + values: { my_key: "value fast", market: "US" }, + next: [], + tasks: [], + config: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread2))! + .config, + createdAt: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread2))! + .checkpoint.ts, + metadata: { + source: "loop", + step: 1, + writes: { tool_two_fast: { my_key: " fast" } }, + }, + parentConfig: ( + await last( + toolTwoWithCheckpointer.checkpointer!.list(thread2, { limit: 2 }) + ) + ).config, + }); + const thread3 = { configurable: { thread_id: "3" } }; + // stop when about to enter node + expect( + await toolTwoWithCheckpointer.invoke( + { my_key: "value", market: "US" }, + thread3 + ) + ).toEqual({ + my_key: "value", + market: "US", + }); + expect(await toolTwoWithCheckpointer.getState(thread3)).toEqual({ + values: { my_key: "value", market: "US" }, + next: ["tool_two_fast"], + tasks: [{ id: expect.any(String), name: "tool_two_fast" }], + config: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread3))! + .config, + createdAt: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread3))! + .checkpoint.ts, + metadata: { source: "loop", step: 0 }, + parentConfig: ( + await last( + toolTwoWithCheckpointer.checkpointer!.list(thread3, { limit: 2 }) + ) + ).config, + }); + // update state + await toolTwoWithCheckpointer.updateState(thread3, { my_key: "key" }); // appends to my_key + expect(await toolTwoWithCheckpointer.getState(thread3)).toEqual({ + values: { my_key: "valuekey", market: "US" }, + next: ["tool_two_fast"], + tasks: [{ id: expect.any(String), name: "tool_two_fast" }], + config: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread3))! + .config, + createdAt: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread3))! + .checkpoint.ts, + metadata: { + source: "update", + step: 1, + writes: { [START]: { my_key: "key" } }, + }, + parentConfig: ( + await last( + toolTwoWithCheckpointer.checkpointer!.list(thread3, { limit: 2 }) + ) + ).config, + }); + // resume, for same result as above + expect(await toolTwoWithCheckpointer.invoke(null, thread3)).toEqual({ + my_key: "valuekey fast", + market: "US", + }); + expect(await toolTwoWithCheckpointer.getState(thread3)).toEqual({ + values: { my_key: "valuekey fast", market: "US" }, + next: [], + tasks: [], + config: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread3))! + .config, + createdAt: (await toolTwoWithCheckpointer.checkpointer!.getTuple(thread3))! + .checkpoint.ts, + metadata: { + source: "loop", + step: 2, + writes: { tool_two_fast: { my_key: " fast" } }, + }, + parentConfig: ( + await last( + toolTwoWithCheckpointer.checkpointer!.list(thread3, { limit: 2 }) + ) ).config, }); }); diff --git a/langgraph/src/tests/pregel.validate.test.ts b/libs/langgraph/src/tests/pregel.validate.test.ts similarity index 100% rename from langgraph/src/tests/pregel.validate.test.ts rename to libs/langgraph/src/tests/pregel.validate.test.ts diff --git a/langgraph/src/tests/pregel.write.test.ts b/libs/langgraph/src/tests/pregel.write.test.ts similarity index 100% rename from langgraph/src/tests/pregel.write.test.ts rename to libs/langgraph/src/tests/pregel.write.test.ts diff --git a/langgraph/src/tests/tracing.int.test.ts b/libs/langgraph/src/tests/tracing.int.test.ts similarity index 100% rename from langgraph/src/tests/tracing.int.test.ts rename to libs/langgraph/src/tests/tracing.int.test.ts diff --git a/langgraph/src/tests/tracing.test.ts b/libs/langgraph/src/tests/tracing.test.ts similarity index 81% rename from langgraph/src/tests/tracing.test.ts rename to libs/langgraph/src/tests/tracing.test.ts index 3376eeca..e4ca271c 100644 --- a/langgraph/src/tests/tracing.test.ts +++ b/libs/langgraph/src/tests/tracing.test.ts @@ -3,8 +3,9 @@ import { AIMessage, BaseMessage, HumanMessage } from "@langchain/core/messages"; import { FakeToolCallingChatModel } from "./utils.js"; // Import from main `@langchain/langgraph` endpoint to turn on automatic config passing import { END, START, StateGraph } from "../index.js"; +import { gatherIterator } from "../utils.js"; -it("should pass config through if importing from the primary entrypoint", async () => { +it("stream events for a multi-node graph", async () => { const stateGraph = new StateGraph<{ messages: BaseMessage[]; }>({ @@ -30,10 +31,7 @@ it("should pass config through if importing from the primary entrypoint", async .compile(); const eventStream = graph.streamEvents({ messages: [] }, { version: "v2" }); - const events = []; - for await (const event of eventStream) { - events.push(event); - } + const events = await gatherIterator(eventStream); expect(events).toEqual([ { event: "on_chain_start", @@ -57,12 +55,12 @@ it("should pass config through if importing from the primary entrypoint", async name: "__start__", tags: ["graph:step:0", "langsmith:hidden"], run_id: expect.any(String), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "__start__", langgraph_step: 0, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["__start__"], + }), }, { event: "on_chain_end", @@ -75,12 +73,12 @@ it("should pass config through if importing from the primary entrypoint", async run_id: expect.any(String), name: "__start__", tags: ["graph:step:0", "langsmith:hidden"], - metadata: { + metadata: expect.objectContaining({ langgraph_node: "__start__", langgraph_step: 0, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["__start__"], + }), }, { event: "on_chain_start", @@ -92,12 +90,12 @@ it("should pass config through if importing from the primary entrypoint", async name: "testnode", tags: ["graph:step:1"], run_id: expect.any(String), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["start:testnode"], + }), }, { event: "on_chain_start", @@ -109,12 +107,12 @@ it("should pass config through if importing from the primary entrypoint", async name: "RunnableLambda", tags: ["seq:step:1"], run_id: expect.any(String), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["start:testnode"], + }), }, { event: "on_chat_model_start", @@ -126,14 +124,14 @@ it("should pass config through if importing from the primary entrypoint", async name: "model_call", tags: [], run_id: expect.any(String), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], + langgraph_triggers: ["start:testnode"], ls_model_type: "chat", ls_stop: undefined, - }, + }), }, { event: "on_chat_model_end", @@ -146,15 +144,14 @@ it("should pass config through if importing from the primary entrypoint", async run_id: expect.any(String), name: "model_call", tags: [], - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - + langgraph_triggers: ["start:testnode"], ls_model_type: "chat", ls_stop: undefined, - }, + }), }, { event: "on_chain_end", @@ -169,12 +166,12 @@ it("should pass config through if importing from the primary entrypoint", async run_id: expect.any(String), name: "RunnableLambda", tags: ["seq:step:1"], - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["start:testnode"], + }), }, { event: "on_chain_start", @@ -186,12 +183,12 @@ it("should pass config through if importing from the primary entrypoint", async name: "ChannelWrite", tags: ["seq:step:2", "langsmith:hidden"], run_id: expect.any(String), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["start:testnode"], + }), }, { event: "on_chain_end", @@ -204,12 +201,12 @@ it("should pass config through if importing from the primary entrypoint", async run_id: expect.any(String), name: "ChannelWrite", tags: ["seq:step:2", "langsmith:hidden"], - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["start:testnode"], + }), }, { event: "on_chain_start", @@ -221,12 +218,12 @@ it("should pass config through if importing from the primary entrypoint", async name: "func", tags: ["seq:step:3"], run_id: expect.any(String), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["start:testnode"], + }), }, { event: "on_chat_model_start", @@ -238,14 +235,14 @@ it("should pass config through if importing from the primary entrypoint", async name: "conditional_edge_call", tags: [], run_id: expect.any(String), - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], + langgraph_triggers: ["start:testnode"], ls_model_type: "chat", ls_stop: undefined, - }, + }), }, { event: "on_chat_model_end", @@ -258,14 +255,14 @@ it("should pass config through if importing from the primary entrypoint", async run_id: expect.any(String), name: "conditional_edge_call", tags: [], - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], + langgraph_triggers: ["start:testnode"], ls_model_type: "chat", ls_stop: undefined, - }, + }), }, { event: "on_chain_end", @@ -280,12 +277,12 @@ it("should pass config through if importing from the primary entrypoint", async run_id: expect.any(String), name: "func", tags: ["seq:step:3"], - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["start:testnode"], + }), }, { event: "on_chain_end", @@ -298,12 +295,12 @@ it("should pass config through if importing from the primary entrypoint", async run_id: expect.any(String), name: "testnode", tags: ["graph:step:1"], - metadata: { + metadata: expect.objectContaining({ langgraph_node: "testnode", langgraph_step: 1, langgraph_task_idx: 0, - langgraph_triggers: ["__pregel_tasks"], - }, + langgraph_triggers: ["start:testnode"], + }), }, { event: "on_chain_stream", @@ -323,9 +320,7 @@ it("should pass config through if importing from the primary entrypoint", async event: "on_chain_end", data: { output: { - testnode: { - messages: [new AIMessage("hey!")], - }, + messages: [new AIMessage("hey!")], }, }, run_id: expect.any(String), diff --git a/langgraph/src/tests/utils.ts b/libs/langgraph/src/tests/utils.ts similarity index 79% rename from langgraph/src/tests/utils.ts rename to libs/langgraph/src/tests/utils.ts index fa2724cd..69310c37 100644 --- a/langgraph/src/tests/utils.ts +++ b/libs/langgraph/src/tests/utils.ts @@ -9,9 +9,12 @@ import { BaseMessage, AIMessage } from "@langchain/core/messages"; import { ChatResult } from "@langchain/core/outputs"; import { RunnableConfig } from "@langchain/core/runnables"; import { Tool } from "@langchain/core/tools"; +import { + MemorySaver, + Checkpoint, + CheckpointMetadata, +} from "@langchain/langgraph-checkpoint"; import { z } from "zod"; -import { MemorySaver } from "../checkpoint/memory.js"; -import { Checkpoint, CheckpointMetadata } from "../checkpoint/base.js"; export interface FakeChatModelArgs extends BaseChatModelParams { responses: BaseMessage[]; @@ -183,11 +186,51 @@ export class FakeSearchTool extends Tool { } } -// https://github.com/tc39/proposal-array-from-async -export async function gatherIterator( - i: AsyncIterable | Promise> -): Promise> { - const out: T[] = []; - for await (const item of await i) out.push(item); - return out; +class AnyStringSame { + $$typeof = Symbol.for("jest.asymmetricMatcher"); + + private lastValue: string | undefined = undefined; + + private key: string; + + constructor(key: string) { + this.key = key; + } + + asymmetricMatch(other: unknown) { + // eslint-disable-next-line no-instanceof/no-instanceof + if (!(typeof other === "string" || other instanceof String)) { + return false; + } + + if (this.lastValue != null && this.lastValue !== other) { + return false; + } + + this.lastValue = other as string; + return true; + } + + toString() { + return "AnyStringSame"; + } + + getExpectedType() { + return "string"; + } + + toAsymmetricMatcher() { + if (this.lastValue != null) + return `AnyStringSame<${this.key}, ${this.lastValue}>`; + return `AnyStringSame<${this.key}>`; + } } + +export const createAnyStringSame = () => { + const memory = new Map(); + + return (key: string) => { + if (!memory.has(key)) memory.set(key, new AnyStringSame(key)); + return memory.get(key); + }; +}; diff --git a/langgraph/src/utils.ts b/libs/langgraph/src/utils.ts similarity index 67% rename from langgraph/src/utils.ts rename to libs/langgraph/src/utils.ts index 9641cc6e..fc632aca 100644 --- a/langgraph/src/utils.ts +++ b/libs/langgraph/src/utils.ts @@ -89,3 +89,49 @@ export class RunnableCallable extends Runnable { return returnValue; } } + +export function prefixGenerator( + generator: Generator, + prefix: Prefix +): Generator<[Prefix, T]>; +export function prefixGenerator( + generator: Generator, + prefix?: undefined +): Generator; +export function prefixGenerator< + T, + Prefix extends string | undefined = undefined +>( + generator: Generator, + prefix?: Prefix | undefined +): Generator; +export function* prefixGenerator< + T, + Prefix extends string | undefined = undefined +>( + generator: Generator, + prefix?: Prefix | undefined +): Generator { + if (prefix === undefined) { + yield* generator as Generator; + } else { + for (const value of generator) { + yield [prefix, value] as Prefix extends string ? [Prefix, T] : T; + } + } +} + +// https://github.com/tc39/proposal-array-from-async +export async function gatherIterator( + i: + | AsyncIterable + | Promise> + | Iterable + | Promise> +): Promise> { + const out: T[] = []; + for await (const item of await i) { + out.push(item); + } + return out; +} diff --git a/langgraph/src/web.ts b/libs/langgraph/src/web.ts similarity index 78% rename from langgraph/src/web.ts rename to libs/langgraph/src/web.ts index 1716eb8d..8eb3bba1 100644 --- a/langgraph/src/web.ts +++ b/libs/langgraph/src/web.ts @@ -12,20 +12,20 @@ export { type UpdateType, type CompiledGraph, } from "./graph/index.js"; -export { MemorySaver } from "./checkpoint/memory.js"; -export { - type Checkpoint, - type CheckpointMetadata, - type CheckpointTuple, - copyCheckpoint, - emptyCheckpoint, - BaseCheckpointSaver, -} from "./checkpoint/base.js"; export { GraphRecursionError, GraphValueError, InvalidUpdateError, EmptyChannelError, } from "./errors.js"; -export { type SerializerProtocol } from "./serde/base.js"; export { Send } from "./constants.js"; + +export { + MemorySaver, + type Checkpoint, + type CheckpointMetadata, + type CheckpointTuple, + copyCheckpoint, + emptyCheckpoint, + BaseCheckpointSaver, +} from "@langchain/langgraph-checkpoint"; diff --git a/libs/langgraph/tsconfig.cjs.json b/libs/langgraph/tsconfig.cjs.json new file mode 100644 index 00000000..3b7026ea --- /dev/null +++ b/libs/langgraph/tsconfig.cjs.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "commonjs", + "declaration": false + }, + "exclude": ["node_modules", "dist", "docs", "**/tests"] +} diff --git a/libs/langgraph/tsconfig.json b/libs/langgraph/tsconfig.json new file mode 100644 index 00000000..bc85d83b --- /dev/null +++ b/libs/langgraph/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "@tsconfig/recommended", + "compilerOptions": { + "outDir": "../dist", + "rootDir": "./src", + "target": "ES2021", + "lib": ["ES2021", "ES2022.Object", "DOM"], + "module": "ES2020", + "moduleResolution": "nodenext", + "esModuleInterop": true, + "declaration": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "useDefineForClassFields": true, + "strictPropertyInitialization": false, + "allowJs": true, + "strict": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "docs"] +} diff --git a/libs/langgraph/turbo.json b/libs/langgraph/turbo.json new file mode 100644 index 00000000..d1bb60a7 --- /dev/null +++ b/libs/langgraph/turbo.json @@ -0,0 +1,11 @@ +{ + "extends": ["//"], + "tasks": { + "build": { + "outputs": ["**/dist/**"] + }, + "build:internal": { + "dependsOn": ["^build:internal"] + } + } +} diff --git a/package.json b/package.json index dde47760..99a3ca72 100644 --- a/package.json +++ b/package.json @@ -14,7 +14,7 @@ "url": "git@github.com:langchain-ai/langgraphjs.git" }, "workspaces": [ - "langgraph", + "libs/*", "examples" ], "scripts": { @@ -28,7 +28,7 @@ "test:exports:docker": "docker compose -f environment_tests/docker-compose.yml up --force-recreate", "format": "turbo run format", "format:check": "turbo run format:check", - "release": "node scripts/release_workspace.js --workspace" + "release": "node scripts/release_workspace.cjs --workspace" }, "author": "LangChain", "license": "MIT", diff --git a/scripts/release_workspace.js b/scripts/release_workspace.cjs similarity index 91% rename from scripts/release_workspace.js rename to scripts/release_workspace.cjs index 42e76f16..c1893a02 100644 --- a/scripts/release_workspace.js +++ b/scripts/release_workspace.cjs @@ -1,9 +1,10 @@ -import { execSync, spawn } from 'child_process'; -import { Command } from 'commander'; -import fs from 'fs'; -import path from 'path'; -import readline from 'readline'; -import semver from 'semver'; +const { execSync } = require("child_process"); +const { Command } = require("commander"); +const fs = require("fs"); +const path = require("path"); +const { spawn } = require("child_process"); +const readline = require("readline"); +const semver = require('semver') const PRIMARY_PROJECTS = ["@langchain/langgraph"]; const RELEASE_BRANCH = "release"; @@ -26,32 +27,28 @@ function getWorkspaceVersion(workspaceDirectory) { * Each object in the return value contains the relative path to the workspace * directory, along with the full package.json file contents. * - * @returns {Promise}>>} + * @returns {Array<{ dir: string, packageJSON: Record}>} */ -async function getAllWorkspaces() { - const possibleWorkspaceDirectories = ["./langgraph"]; - const allWorkspacesPromise = possibleWorkspaceDirectories.flatMap(async (workspaceDirectory) => { +function getAllWorkspaces() { + const possibleWorkspaceDirectories = ["./libs/*"]; + const allWorkspaces = possibleWorkspaceDirectories.flatMap((workspaceDirectory) => { if (workspaceDirectory.endsWith("*")) { // List all folders inside directory, require, and return the package.json. const allDirs = fs.readdirSync(path.join(process.cwd(), workspaceDirectory.replace("*", ""))); - const filePath = path.join(process.cwd(), `${workspaceDirectory.replace("*", "")}${dir}`, "package.json"); - const packageJSON = JSON.parse(fs.readFileSync(filePath, "utf-8")); const subDirs = allDirs.map((dir) => { return { dir: `${workspaceDirectory.replace("*", "")}${dir}`, - packageJSON: packageJSON + packageJSON: require(path.join(process.cwd(), `${workspaceDirectory.replace("*", "")}${dir}`, "package.json")) } }); return subDirs; } - const filePath = path.join(process.cwd(), workspaceDirectory, "package.json") - const packageJSON = JSON.parse(fs.readFileSync(filePath, "utf-8")); + const packageJSON = require(path.join(process.cwd(), workspaceDirectory, "package.json")); return { dir: workspaceDirectory, packageJSON, }; }); - const allWorkspaces = await Promise.all(allWorkspacesPromise); return allWorkspaces; } @@ -269,7 +266,7 @@ async function main() { } // Find the workspace package.json's. - const allWorkspaces = await getAllWorkspaces(); + const allWorkspaces = getAllWorkspaces(); const matchingWorkspace = allWorkspaces.find(({ packageJSON }) => packageJSON.name === options.workspace); if (!matchingWorkspace) { @@ -281,15 +278,16 @@ async function main() { // Run build, lint, tests console.log("Running build, lint, and tests."); - execSync(`yarn turbo:command run --filter ${options.workspace} build lint test --concurrency=1`); + execSync(`yarn turbo:command run --filter ${options.workspace} build lint test --concurrency 1`); console.log("Successfully ran build, lint, and tests."); // Only run export tests for primary projects. if (PRIMARY_PROJECTS.includes(options.workspace.trim())) { // Run export tests. // LangChain must be built before running export tests. - console.log("Building '@langchain/langchain' and running export tests."); + console.log("Building '@langchain/langgraph' and running export tests."); execSync(`yarn run turbo:command build --filter=@langchain/langgraph`); + execSync(`yarn run test:exports:docker`); console.log("Successfully built @langchain/langgraph, and tested exports."); } else { console.log("Skipping export tests for non primary project."); diff --git a/tsconfig.json b/tsconfig.json index ed96543d..dc8bbecb 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -17,9 +17,13 @@ "strict": true, "outDir": "dist" }, + "include": [ + "libs/*/src/**/*", + "libs/langgraph/*.d.ts", + ], "exclude": [ "node_modules", - "dist", + "**/dist/", "environment_tests", "examples/web_browsing/labeling.ts", "langgraph/dist/checkpoint/sqlite.js" diff --git a/yarn.lock b/yarn.lock index 670653ad..45fdc23d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1872,27 +1872,105 @@ __metadata: languageName: node linkType: hard -"@langchain/langgraph@workspace:*, @langchain/langgraph@workspace:langgraph": +"@langchain/langgraph-checkpoint-sqlite@0.0.0, @langchain/langgraph-checkpoint-sqlite@workspace:libs/checkpoint-sqlite": version: 0.0.0-use.local - resolution: "@langchain/langgraph@workspace:langgraph" + resolution: "@langchain/langgraph-checkpoint-sqlite@workspace:libs/checkpoint-sqlite" + dependencies: + "@jest/globals": ^29.5.0 + "@langchain/langgraph-checkpoint": ~0.0.0 + "@langchain/scripts": ^0.0.22 + "@swc/core": ^1.3.90 + "@swc/jest": ^0.2.29 + "@tsconfig/recommended": ^1.0.3 + "@types/better-sqlite3": ^7.6.9 + "@types/uuid": ^10 + "@typescript-eslint/eslint-plugin": ^6.12.0 + "@typescript-eslint/parser": ^6.12.0 + better-sqlite3: ^9.5.0 + dotenv: ^16.3.1 + dpdm: ^3.12.0 + eslint: ^8.33.0 + eslint-config-airbnb-base: ^15.0.0 + eslint-config-prettier: ^8.6.0 + eslint-plugin-import: ^2.29.1 + eslint-plugin-jest: ^28.8.0 + eslint-plugin-no-instanceof: ^1.0.1 + eslint-plugin-prettier: ^4.2.1 + jest: ^29.5.0 + jest-environment-node: ^29.6.4 + prettier: ^2.8.3 + release-it: ^17.6.0 + rollup: ^4.5.2 + ts-jest: ^29.1.0 + tsx: ^4.7.0 + typescript: ^4.9.5 || ^5.4.5 + peerDependencies: + "@langchain/core": ">=0.2.20 <0.3.0" + better-sqlite3: ^9.5.0 + peerDependenciesMeta: + better-sqlite3: + optional: true + languageName: unknown + linkType: soft + +"@langchain/langgraph-checkpoint@0.0.0, @langchain/langgraph-checkpoint@workspace:libs/checkpoint, @langchain/langgraph-checkpoint@~0.0.0": + version: 0.0.0-use.local + resolution: "@langchain/langgraph-checkpoint@workspace:libs/checkpoint" + dependencies: + "@jest/globals": ^29.5.0 + "@langchain/scripts": ^0.0.22 + "@swc/core": ^1.3.90 + "@swc/jest": ^0.2.29 + "@tsconfig/recommended": ^1.0.3 + "@types/uuid": ^10 + "@typescript-eslint/eslint-plugin": ^6.12.0 + "@typescript-eslint/parser": ^6.12.0 + dotenv: ^16.3.1 + dpdm: ^3.12.0 + eslint: ^8.33.0 + eslint-config-airbnb-base: ^15.0.0 + eslint-config-prettier: ^8.6.0 + eslint-plugin-import: ^2.29.1 + eslint-plugin-jest: ^28.8.0 + eslint-plugin-no-instanceof: ^1.0.1 + eslint-plugin-prettier: ^4.2.1 + jest: ^29.5.0 + jest-environment-node: ^29.6.4 + prettier: ^2.8.3 + release-it: ^17.6.0 + rollup: ^4.5.2 + ts-jest: ^29.1.0 + tsx: ^4.7.0 + typescript: ^4.9.5 || ^5.4.5 + uuid: ^10.0.0 + peerDependencies: + "@langchain/core": ">=0.2.20 <0.3.0" + languageName: unknown + linkType: soft + +"@langchain/langgraph@workspace:*, @langchain/langgraph@workspace:libs/langgraph": + version: 0.0.0-use.local + resolution: "@langchain/langgraph@workspace:libs/langgraph" dependencies: "@jest/globals": ^29.5.0 "@langchain/anthropic": ^0.2.12 "@langchain/community": ^0.2.25 "@langchain/core": ">=0.2.20 <0.3.0" + "@langchain/langgraph-checkpoint": 0.0.0 + "@langchain/langgraph-checkpoint-sqlite": 0.0.0 "@langchain/openai": ^0.2.4 "@langchain/scripts": ^0.0.23 "@swc/core": ^1.3.90 "@swc/jest": ^0.2.29 "@tsconfig/recommended": ^1.0.3 - "@types/better-sqlite3": ^7.6.9 + "@types/double-ended-queue": ^2 "@types/uuid": ^10 "@typescript-eslint/eslint-plugin": ^6.12.0 "@typescript-eslint/parser": ^6.12.0 "@xenova/transformers": ^2.17.2 - better-sqlite3: ^9.5.0 cheerio: 1.0.0-rc.12 dotenv: ^16.3.1 + double-ended-queue: ^2.1.0-0 dpdm: ^3.12.0 eslint: ^8.33.0 eslint-config-airbnb-base: ^15.0.0 @@ -1960,6 +2038,28 @@ __metadata: languageName: node linkType: hard +"@langchain/scripts@npm:^0.0.22": + version: 0.0.22 + resolution: "@langchain/scripts@npm:0.0.22" + dependencies: + "@rollup/wasm-node": ^4.19.0 + axios: ^1.6.7 + commander: ^11.1.0 + glob: ^10.3.10 + lodash: ^4.17.21 + readline: ^1.3.0 + rimraf: ^5.0.1 + rollup: ^4.5.2 + ts-morph: ^21.0.1 + typescript: ^5.4.5 + bin: + lc-build: bin/build.js + lc_build_v2: bin/build_v2.js + notebook_validate: bin/validate_notebook.js + checksum: d44cd14e83b895959cb8692a8308eb1f740556de359de3f16a7722445c5adaafc0606b9f078743030b17bb633efccb344c8f156d8e57e3221fcc73377427a021 + languageName: node + linkType: hard + "@langchain/scripts@npm:^0.0.23": version: 0.0.23 resolution: "@langchain/scripts@npm:0.0.23" @@ -3018,6 +3118,13 @@ __metadata: languageName: node linkType: hard +"@types/double-ended-queue@npm:^2": + version: 2.1.7 + resolution: "@types/double-ended-queue@npm:2.1.7" + checksum: 6a4a6e339c90048013cf762f07b284492e7e0fe3caf9d8e1d716ca6335d29a23993c21110f869b667a54d605280c072f84030eb18f37f47c1b36884030721d36 + languageName: node + linkType: hard + "@types/estree@npm:*, @types/estree@npm:1.0.5": version: 1.0.5 resolution: "@types/estree@npm:1.0.5" @@ -3868,13 +3975,13 @@ __metadata: linkType: hard "better-sqlite3@npm:^9.5.0": - version: 9.5.0 - resolution: "better-sqlite3@npm:9.5.0" + version: 9.6.0 + resolution: "better-sqlite3@npm:9.6.0" dependencies: bindings: ^1.5.0 node-gyp: latest prebuild-install: ^7.1.1 - checksum: cfa56519755d6dd29ef8361c872c6f29392fcfcc44a435099eb61729aae23c4f18057972592a0b9ff0fcedc98e43db4ee7a1a2de21ab2868f8dcec4f8272ad9d + checksum: be3a1d2a3f6f9b5141be6607a38c0a51fa5849495b071955e507bc0c2a2fb08430852c1bf03796eec1a53344b25645807db48dcb51c71b0662b74c5a70420bb0 languageName: node linkType: hard @@ -5182,6 +5289,13 @@ __metadata: languageName: node linkType: hard +"double-ended-queue@npm:^2.1.0-0": + version: 2.1.0-0 + resolution: "double-ended-queue@npm:2.1.0-0" + checksum: 3030cf9dcf6f8e7d8cb6ae5b7304890445d7c32233a614e400ba7b378086ad76f5822d0e501afd5ffe0af1de4bcb842fa23d4c79174d54f6566399435fafc271 + languageName: node + linkType: hard + "dpdm@npm:^3.12.0": version: 3.14.0 resolution: "dpdm@npm:3.14.0"