diff --git a/.changeset/ai-sdk-chat-transport.md b/.changeset/ai-sdk-chat-transport.md new file mode 100644 index 0000000000..f5cdb9187d --- /dev/null +++ b/.changeset/ai-sdk-chat-transport.md @@ -0,0 +1,42 @@ +--- +"@trigger.dev/sdk": minor +--- + +Add AI SDK chat transport integration via two new subpath exports: + +**`@trigger.dev/sdk/chat`** (frontend, browser-safe): +- `TriggerChatTransport` — custom `ChatTransport` for the AI SDK's `useChat` hook that runs chat completions as durable Trigger.dev tasks +- `createChatTransport()` — factory function + +```tsx +import { useChat } from "@ai-sdk/react"; +import { TriggerChatTransport } from "@trigger.dev/sdk/chat"; + +const { messages, sendMessage } = useChat({ + transport: new TriggerChatTransport({ + task: "my-chat-task", + accessToken, + }), +}); +``` + +**`@trigger.dev/sdk/ai`** (backend, extends existing `ai.tool`/`ai.currentToolOptions`): +- `chatTask()` — pre-typed task wrapper with auto-pipe support +- `pipeChat()` — pipe a `StreamTextResult` or stream to the frontend +- `CHAT_STREAM_KEY` — the default stream key constant +- `ChatTaskPayload` type + +```ts +import { chatTask } from "@trigger.dev/sdk/ai"; +import { streamText, convertToModelMessages } from "ai"; + +export const myChatTask = chatTask({ + id: "my-chat-task", + run: async ({ messages }) => { + return streamText({ + model: openai("gpt-4o"), + messages: convertToModelMessages(messages), + }); + }, +}); +``` diff --git a/docs/docs.json b/docs/docs.json index 5c2bddede0..4694355e5d 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -74,6 +74,7 @@ "tags", "runs/metadata", "tasks/streams", + "guides/ai-chat", "run-usage", "context", "runs/priority", diff --git a/docs/guides/ai-chat.mdx b/docs/guides/ai-chat.mdx new file mode 100644 index 0000000000..e549226b14 --- /dev/null +++ b/docs/guides/ai-chat.mdx @@ -0,0 +1,268 @@ +--- +title: "AI Chat with useChat" +sidebarTitle: "AI Chat (useChat)" +description: "Run AI SDK chat completions as durable Trigger.dev tasks with built-in realtime streaming." +--- + +## Overview + +The `@trigger.dev/sdk` provides a custom [ChatTransport](https://sdk.vercel.ai/docs/ai-sdk-ui/transport) for the Vercel AI SDK's `useChat` hook. This lets you run chat completions as **durable Trigger.dev tasks** instead of fragile API routes — with automatic retries, observability, and realtime streaming built in. + +**How it works:** +1. The frontend sends messages via `useChat` → `TriggerChatTransport` +2. The transport triggers a Trigger.dev task with the conversation as payload +3. The task streams `UIMessageChunk` events back via Trigger.dev's realtime streams +4. The AI SDK's `useChat` processes the stream natively — text, tool calls, reasoning, etc. + +No custom API routes needed. Your chat backend is a Trigger.dev task. + + + Requires `@trigger.dev/sdk` version **4.4.0 or later** and the `ai` package **v5.0.0 or later**. + + +## Quick start + +### 1. Define a chat task + +Use `chatTask` from `@trigger.dev/sdk/ai` to define a task that handles chat messages. The payload is automatically typed as `ChatTaskPayload`. + +If you return a `StreamTextResult` from `run`, it's **automatically piped** to the frontend. + +```ts trigger/chat.ts +import { chatTask } from "@trigger.dev/sdk/ai"; +import { streamText, convertToModelMessages } from "ai"; +import { openai } from "@ai-sdk/openai"; + +export const myChat = chatTask({ + id: "my-chat", + run: async ({ messages }) => { + // messages is UIMessage[] from the frontend + return streamText({ + model: openai("gpt-4o"), + messages: convertToModelMessages(messages), + }); + // Returning a StreamTextResult auto-pipes it to the frontend + }, +}); +``` + +### 2. Generate an access token + +On your server (e.g. a Next.js API route or server action), create a trigger public token: + +```ts app/actions.ts +"use server"; + +import { auth } from "@trigger.dev/sdk"; + +export async function getChatToken() { + return await auth.createTriggerPublicToken("my-chat"); +} +``` + +### 3. Use in the frontend + +Import `TriggerChatTransport` from `@trigger.dev/sdk/chat` (browser-safe — no server dependencies). + +```tsx app/components/chat.tsx +"use client"; + +import { useChat } from "@ai-sdk/react"; +import { TriggerChatTransport } from "@trigger.dev/sdk/chat"; + +export function Chat({ accessToken }: { accessToken: string }) { + const { messages, sendMessage, status, error } = useChat({ + transport: new TriggerChatTransport({ + task: "my-chat", + accessToken, + }), + }); + + return ( +
+ {messages.map((m) => ( +
+ {m.role}: + {m.parts.map((part, i) => + part.type === "text" ? {part.text} : null + )} +
+ ))} + +
{ + e.preventDefault(); + const input = e.currentTarget.querySelector("input"); + if (input?.value) { + sendMessage({ text: input.value }); + input.value = ""; + } + }} + > + + +
+
+ ); +} +``` + +## Backend patterns + +### Simple: return a StreamTextResult + +The easiest approach — return the `streamText` result from `run` and it's automatically piped to the frontend: + +```ts +import { chatTask } from "@trigger.dev/sdk/ai"; +import { streamText, convertToModelMessages } from "ai"; +import { openai } from "@ai-sdk/openai"; + +export const simpleChat = chatTask({ + id: "simple-chat", + run: async ({ messages }) => { + return streamText({ + model: openai("gpt-4o"), + system: "You are a helpful assistant.", + messages: convertToModelMessages(messages), + }); + }, +}); +``` + +### Complex: use pipeChat() from anywhere + +For complex agent flows where `streamText` is called deep inside your code, use `pipeChat()`. It works from **anywhere inside a task** — even nested function calls. + +```ts trigger/agent-chat.ts +import { chatTask, pipeChat } from "@trigger.dev/sdk/ai"; +import { streamText, convertToModelMessages } from "ai"; +import { openai } from "@ai-sdk/openai"; + +export const agentChat = chatTask({ + id: "agent-chat", + run: async ({ messages }) => { + // Don't return anything — pipeChat is called inside + await runAgentLoop(convertToModelMessages(messages)); + }, +}); + +// This could be deep inside your agent library +async function runAgentLoop(messages: CoreMessage[]) { + // ... agent logic, tool calls, etc. + + const result = streamText({ + model: openai("gpt-4o"), + messages, + }); + + // Pipe from anywhere — no need to return it + await pipeChat(result); +} +``` + +### Manual: use task() with pipeChat() + +If you need full control over task options, use the standard `task()` with `ChatTaskPayload` and `pipeChat()`: + +```ts +import { task } from "@trigger.dev/sdk"; +import { pipeChat, type ChatTaskPayload } from "@trigger.dev/sdk/ai"; +import { streamText, convertToModelMessages } from "ai"; +import { openai } from "@ai-sdk/openai"; + +export const manualChat = task({ + id: "manual-chat", + retry: { maxAttempts: 3 }, + queue: { concurrencyLimit: 10 }, + run: async (payload: ChatTaskPayload) => { + const result = streamText({ + model: openai("gpt-4o"), + messages: convertToModelMessages(payload.messages), + }); + + await pipeChat(result); + }, +}); +``` + +## Frontend options + +### TriggerChatTransport options + +```ts +new TriggerChatTransport({ + // Required + task: "my-chat", // Task ID to trigger + accessToken: token, // Trigger public token or secret key + + // Optional + baseURL: "https://...", // Custom API URL (self-hosted) + streamKey: "chat", // Custom stream key (default: "chat") + headers: { ... }, // Extra headers for API requests + streamTimeoutSeconds: 120, // Stream timeout (default: 120s) +}); +``` + +### Dynamic access tokens + +For token refresh patterns, pass a function: + +```ts +new TriggerChatTransport({ + task: "my-chat", + accessToken: () => getLatestToken(), // Called on each sendMessage +}); +``` + +### Passing extra data + +Use the `body` option on `sendMessage` to pass additional data to the task: + +```ts +sendMessage({ + text: "Hello", +}, { + body: { + systemPrompt: "You are a pirate.", + temperature: 0.9, + }, +}); +``` + +The `body` fields are merged into the `ChatTaskPayload` and available in your task's `run` function. + +## ChatTaskPayload + +The payload sent to the task has this shape: + +| Field | Type | Description | +|-------|------|-------------| +| `messages` | `UIMessage[]` | The conversation history | +| `chatId` | `string` | Unique chat session ID | +| `trigger` | `"submit-message" \| "regenerate-message"` | What triggered the request | +| `messageId` | `string \| undefined` | Message ID to regenerate (if applicable) | +| `metadata` | `unknown` | Custom metadata from the frontend | + +Plus any extra fields from the `body` option. + +## Self-hosting + +If you're self-hosting Trigger.dev, pass the `baseURL` option: + +```ts +new TriggerChatTransport({ + task: "my-chat", + accessToken, + baseURL: "https://your-trigger-instance.com", +}); +``` + +## Related + +- [Realtime Streams](/tasks/streams) — How streams work under the hood +- [Using the Vercel AI SDK](/guides/examples/vercel-ai-sdk) — Basic AI SDK usage with Trigger.dev +- [Realtime React Hooks](/realtime/react-hooks/overview) — Lower-level realtime hooks +- [Authentication](/realtime/auth) — Public access tokens and trigger tokens diff --git a/packages/trigger-sdk/package.json b/packages/trigger-sdk/package.json index 9ee58f6598..81ab56cbe4 100644 --- a/packages/trigger-sdk/package.json +++ b/packages/trigger-sdk/package.json @@ -24,7 +24,8 @@ "./package.json": "./package.json", ".": "./src/v3/index.ts", "./v3": "./src/v3/index.ts", - "./ai": "./src/v3/ai.ts" + "./ai": "./src/v3/ai.ts", + "./chat": "./src/v3/chat.ts" }, "sourceDialects": [ "@triggerdotdev/source" @@ -37,6 +38,9 @@ ], "ai": [ "dist/commonjs/v3/ai.d.ts" + ], + "chat": [ + "dist/commonjs/v3/chat.d.ts" ] } }, @@ -123,6 +127,17 @@ "types": "./dist/commonjs/v3/ai.d.ts", "default": "./dist/commonjs/v3/ai.js" } + }, + "./chat": { + "import": { + "@triggerdotdev/source": "./src/v3/chat.ts", + "types": "./dist/esm/v3/chat.d.ts", + "default": "./dist/esm/v3/chat.js" + }, + "require": { + "types": "./dist/commonjs/v3/chat.d.ts", + "default": "./dist/commonjs/v3/chat.js" + } } }, "main": "./dist/commonjs/v3/index.js", diff --git a/packages/trigger-sdk/src/v3/ai.ts b/packages/trigger-sdk/src/v3/ai.ts index 59afa2fe21..9e79df22b8 100644 --- a/packages/trigger-sdk/src/v3/ai.ts +++ b/packages/trigger-sdk/src/v3/ai.ts @@ -3,11 +3,16 @@ import { isSchemaZodEsque, Task, type inferSchemaIn, + type PipeStreamOptions, + type TaskOptions, type TaskSchema, type TaskWithSchema, } from "@trigger.dev/core/v3"; +import type { UIMessage } from "ai"; import { dynamicTool, jsonSchema, JSONSchema7, Schema, Tool, ToolCallOptions, zodSchema } from "ai"; import { metadata } from "./metadata.js"; +import { streams } from "./streams.js"; +import { createTask } from "./shared.js"; const METADATA_KEY = "tool.execute.options"; @@ -116,3 +121,240 @@ export const ai = { tool: toolFromTask, currentToolOptions: getToolOptionsFromMetadata, }; + +// --------------------------------------------------------------------------- +// Chat transport helpers — backend side +// --------------------------------------------------------------------------- + +/** + * The default stream key used for chat transport communication. + * Both `TriggerChatTransport` (frontend) and `pipeChat`/`chatTask` (backend) + * use this key by default. + */ +export const CHAT_STREAM_KEY = "chat"; + +/** + * The payload shape that the chat transport sends to the triggered task. + * + * When using `chatTask()`, the payload is automatically typed — you don't need + * to import this type. Use this type only if you're using `task()` directly + * with `pipeChat()`. + */ +export type ChatTaskPayload = { + /** The conversation messages */ + messages: TMessage[]; + + /** The unique identifier for the chat session */ + chatId: string; + + /** + * The trigger type: + * - `"submit-message"`: A new user message + * - `"regenerate-message"`: Regenerate the last assistant response + */ + trigger: "submit-message" | "regenerate-message"; + + /** The ID of the message to regenerate (only for `"regenerate-message"`) */ + messageId?: string; + + /** Custom metadata from the frontend */ + metadata?: unknown; +}; + +/** + * Options for `pipeChat`. + */ +export type PipeChatOptions = { + /** + * Override the stream key. Must match the `streamKey` on `TriggerChatTransport`. + * @default "chat" + */ + streamKey?: string; + + /** An AbortSignal to cancel the stream. */ + signal?: AbortSignal; + + /** + * The target run ID to pipe to. + * @default "self" (current run) + */ + target?: string; +}; + +/** + * An object with a `toUIMessageStream()` method (e.g. `StreamTextResult` from `streamText()`). + */ +type UIMessageStreamable = { + toUIMessageStream: (...args: any[]) => AsyncIterable | ReadableStream; +}; + +function isUIMessageStreamable(value: unknown): value is UIMessageStreamable { + return ( + typeof value === "object" && + value !== null && + "toUIMessageStream" in value && + typeof (value as any).toUIMessageStream === "function" + ); +} + +function isAsyncIterable(value: unknown): value is AsyncIterable { + return typeof value === "object" && value !== null && Symbol.asyncIterator in value; +} + +function isReadableStream(value: unknown): value is ReadableStream { + return typeof value === "object" && value !== null && typeof (value as any).getReader === "function"; +} + +/** + * Pipes a chat stream to the realtime stream, making it available to the + * `TriggerChatTransport` on the frontend. + * + * Accepts: + * - A `StreamTextResult` from `streamText()` (has `.toUIMessageStream()`) + * - An `AsyncIterable` of `UIMessageChunk`s + * - A `ReadableStream` of `UIMessageChunk`s + * + * Must be called from inside a Trigger.dev task's `run` function. + * + * @example + * ```ts + * import { task } from "@trigger.dev/sdk"; + * import { pipeChat, type ChatTaskPayload } from "@trigger.dev/sdk/ai"; + * import { streamText, convertToModelMessages } from "ai"; + * + * export const myChatTask = task({ + * id: "my-chat-task", + * run: async (payload: ChatTaskPayload) => { + * const result = streamText({ + * model: openai("gpt-4o"), + * messages: convertToModelMessages(payload.messages), + * }); + * + * await pipeChat(result); + * }, + * }); + * ``` + * + * @example + * ```ts + * // Works from anywhere inside a task — even deep in your agent code + * async function runAgentLoop(messages: CoreMessage[]) { + * const result = streamText({ model, messages }); + * await pipeChat(result); + * } + * ``` + */ +export async function pipeChat( + source: UIMessageStreamable | AsyncIterable | ReadableStream, + options?: PipeChatOptions +): Promise { + const streamKey = options?.streamKey ?? CHAT_STREAM_KEY; + + let stream: AsyncIterable | ReadableStream; + + if (isUIMessageStreamable(source)) { + stream = source.toUIMessageStream(); + } else if (isAsyncIterable(source) || isReadableStream(source)) { + stream = source; + } else { + throw new Error( + "pipeChat: source must be a StreamTextResult (with .toUIMessageStream()), " + + "an AsyncIterable, or a ReadableStream" + ); + } + + const pipeOptions: PipeStreamOptions = {}; + if (options?.signal) { + pipeOptions.signal = options.signal; + } + if (options?.target) { + pipeOptions.target = options.target; + } + + const { waitUntilComplete } = streams.pipe(streamKey, stream, pipeOptions); + await waitUntilComplete(); +} + +/** + * Options for defining a chat task. + * + * Extends the standard `TaskOptions` but pre-types the payload as `ChatTaskPayload` + * and overrides `run` to accept `ChatTaskPayload` directly. + * + * **Auto-piping:** If the `run` function returns a value with `.toUIMessageStream()` + * (like a `StreamTextResult`), the stream is automatically piped to the frontend. + * For complex flows, use `pipeChat()` manually from anywhere in your code. + */ +export type ChatTaskOptions = Omit< + TaskOptions, + "run" +> & { + /** + * The run function for the chat task. + * + * Receives a `ChatTaskPayload` with the conversation messages, chat session ID, + * and trigger type. + * + * **Auto-piping:** If this function returns a value with `.toUIMessageStream()`, + * the stream is automatically piped to the frontend. + */ + run: (payload: ChatTaskPayload) => Promise; +}; + +/** + * Creates a Trigger.dev task pre-configured for AI SDK chat. + * + * - **Pre-types the payload** as `ChatTaskPayload` — no manual typing needed + * - **Auto-pipes the stream** if `run` returns a `StreamTextResult` + * - For complex flows, use `pipeChat()` from anywhere inside your task code + * + * @example + * ```ts + * import { chatTask } from "@trigger.dev/sdk/ai"; + * import { streamText, convertToModelMessages } from "ai"; + * import { openai } from "@ai-sdk/openai"; + * + * // Simple: return streamText result — auto-piped to the frontend + * export const myChatTask = chatTask({ + * id: "my-chat-task", + * run: async ({ messages }) => { + * return streamText({ + * model: openai("gpt-4o"), + * messages: convertToModelMessages(messages), + * }); + * }, + * }); + * ``` + * + * @example + * ```ts + * import { chatTask, pipeChat } from "@trigger.dev/sdk/ai"; + * + * // Complex: pipeChat() from deep in your agent code + * export const myAgentTask = chatTask({ + * id: "my-agent-task", + * run: async ({ messages }) => { + * await runComplexAgentLoop(messages); + * }, + * }); + * ``` + */ +export function chatTask( + options: ChatTaskOptions +): Task { + const { run: userRun, ...restOptions } = options; + + return createTask({ + ...restOptions, + run: async (payload: ChatTaskPayload) => { + const result = await userRun(payload); + + // Auto-pipe if the run function returned a StreamTextResult or similar + if (isUIMessageStreamable(result)) { + await pipeChat(result); + } + + return result; + }, + }); +} diff --git a/packages/trigger-sdk/src/v3/chat.test.ts b/packages/trigger-sdk/src/v3/chat.test.ts new file mode 100644 index 0000000000..ae89f28a8a --- /dev/null +++ b/packages/trigger-sdk/src/v3/chat.test.ts @@ -0,0 +1,920 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import type { UIMessage, UIMessageChunk } from "ai"; +import { TriggerChatTransport, createChatTransport } from "./chat.js"; + +// Helper: encode text as SSE format +function sseEncode(chunks: UIMessageChunk[]): string { + return chunks.map((chunk, i) => `id: ${i}\ndata: ${JSON.stringify(chunk)}\n\n`).join(""); +} + +// Helper: create a ReadableStream from SSE text +function createSSEStream(sseText: string): ReadableStream { + const encoder = new TextEncoder(); + return new ReadableStream({ + start(controller) { + controller.enqueue(encoder.encode(sseText)); + controller.close(); + }, + }); +} + +// Helper: create test UIMessages with unique IDs +let messageIdCounter = 0; + +function createUserMessage(text: string): UIMessage { + return { + id: `msg-user-${++messageIdCounter}`, + role: "user", + parts: [{ type: "text", text }], + }; +} + +function createAssistantMessage(text: string): UIMessage { + return { + id: `msg-assistant-${++messageIdCounter}`, + role: "assistant", + parts: [{ type: "text", text }], + }; +} + +// Sample UIMessageChunks as the AI SDK would produce +const sampleChunks: UIMessageChunk[] = [ + { type: "text-start", id: "part-1" }, + { type: "text-delta", id: "part-1", delta: "Hello" }, + { type: "text-delta", id: "part-1", delta: " world" }, + { type: "text-delta", id: "part-1", delta: "!" }, + { type: "text-end", id: "part-1" }, +]; + +describe("TriggerChatTransport", () => { + let originalFetch: typeof global.fetch; + + beforeEach(() => { + originalFetch = global.fetch; + }); + + afterEach(() => { + global.fetch = originalFetch; + vi.restoreAllMocks(); + }); + + describe("constructor", () => { + it("should create transport with required options", () => { + const transport = new TriggerChatTransport({ + task: "my-chat-task", + accessToken: "test-token", + }); + + expect(transport).toBeInstanceOf(TriggerChatTransport); + }); + + it("should accept optional configuration", () => { + const transport = new TriggerChatTransport({ + task: "my-chat-task", + accessToken: "test-token", + baseURL: "https://custom.trigger.dev", + streamKey: "custom-stream", + headers: { "X-Custom": "value" }, + }); + + expect(transport).toBeInstanceOf(TriggerChatTransport); + }); + + it("should accept a function for accessToken", () => { + let tokenCallCount = 0; + const transport = new TriggerChatTransport({ + task: "my-chat-task", + accessToken: () => { + tokenCallCount++; + return `dynamic-token-${tokenCallCount}`; + }, + }); + + expect(transport).toBeInstanceOf(TriggerChatTransport); + }); + }); + + describe("sendMessages", () => { + it("should trigger the task and return a ReadableStream of UIMessageChunks", async () => { + const triggerRunId = "run_abc123"; + const publicToken = "pub_token_xyz"; + + // Mock fetch to handle both the trigger request and the SSE stream request + global.fetch = vi.fn().mockImplementation(async (url: string | URL, init?: RequestInit) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + // Handle the task trigger request + if (urlStr.includes("/api/v1/tasks/") && urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ id: triggerRunId }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": publicToken, + }, + } + ); + } + + // Handle the SSE stream request + if (urlStr.includes("/realtime/v1/streams/")) { + const sseText = sseEncode(sampleChunks); + return new Response(createSSEStream(sseText), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + const transport = new TriggerChatTransport({ + task: "my-chat-task", + accessToken: "test-token", + baseURL: "https://api.test.trigger.dev", + }); + + const messages: UIMessage[] = [createUserMessage("Hello!")]; + + const stream = await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-1", + messageId: undefined, + messages, + abortSignal: undefined, + }); + + expect(stream).toBeInstanceOf(ReadableStream); + + // Read all chunks from the stream + const reader = stream.getReader(); + const receivedChunks: UIMessageChunk[] = []; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + receivedChunks.push(value); + } + + expect(receivedChunks).toHaveLength(sampleChunks.length); + expect(receivedChunks[0]).toEqual({ type: "text-start", id: "part-1" }); + expect(receivedChunks[1]).toEqual({ type: "text-delta", id: "part-1", delta: "Hello" }); + expect(receivedChunks[4]).toEqual({ type: "text-end", id: "part-1" }); + }); + + it("should send the correct payload to the trigger API", async () => { + const fetchSpy = vi.fn().mockImplementation(async (url: string | URL, init?: RequestInit) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/api/v1/tasks/") && urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ id: "run_test" }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": "pub_token", + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + return new Response(createSSEStream(""), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + global.fetch = fetchSpy; + + const transport = new TriggerChatTransport({ + task: "my-chat-task", + accessToken: "test-token", + baseURL: "https://api.test.trigger.dev", + }); + + const messages: UIMessage[] = [createUserMessage("Hello!")]; + + await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-123", + messageId: undefined, + messages, + abortSignal: undefined, + metadata: { custom: "data" }, + }); + + // Verify the trigger fetch call + const triggerCall = fetchSpy.mock.calls.find((call: any[]) => + (typeof call[0] === "string" ? call[0] : call[0].toString()).includes("/trigger") + ); + + expect(triggerCall).toBeDefined(); + const triggerUrl = typeof triggerCall![0] === "string" ? triggerCall![0] : triggerCall![0].toString(); + expect(triggerUrl).toContain("/api/v1/tasks/my-chat-task/trigger"); + + const triggerBody = JSON.parse(triggerCall![1]?.body as string); + const payload = JSON.parse(triggerBody.payload); + expect(payload.messages).toEqual(messages); + expect(payload.chatId).toBe("chat-123"); + expect(payload.trigger).toBe("submit-message"); + expect(payload.metadata).toEqual({ custom: "data" }); + }); + + it("should use the correct stream URL with custom streamKey", async () => { + const fetchSpy = vi.fn().mockImplementation(async (url: string | URL) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ id: "run_custom" }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": "token", + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + return new Response(createSSEStream(""), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + global.fetch = fetchSpy; + + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: "token", + baseURL: "https://api.test.trigger.dev", + streamKey: "my-custom-stream", + }); + + await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-1", + messageId: undefined, + messages: [createUserMessage("test")], + abortSignal: undefined, + }); + + // Verify the stream URL uses the custom stream key + const streamCall = fetchSpy.mock.calls.find((call: any[]) => + (typeof call[0] === "string" ? call[0] : call[0].toString()).includes("/realtime/v1/streams/") + ); + + expect(streamCall).toBeDefined(); + const streamUrl = typeof streamCall![0] === "string" ? streamCall![0] : streamCall![0].toString(); + expect(streamUrl).toContain("/realtime/v1/streams/run_custom/my-custom-stream"); + }); + + it("should include extra headers in stream requests", async () => { + const fetchSpy = vi.fn().mockImplementation(async (url: string | URL) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ id: "run_hdrs" }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": "token", + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + return new Response(createSSEStream(""), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + global.fetch = fetchSpy; + + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: "token", + baseURL: "https://api.test.trigger.dev", + headers: { "X-Custom-Header": "custom-value" }, + }); + + await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-1", + messageId: undefined, + messages: [createUserMessage("test")], + abortSignal: undefined, + }); + + // Verify the stream request includes custom headers + const streamCall = fetchSpy.mock.calls.find((call: any[]) => + (typeof call[0] === "string" ? call[0] : call[0].toString()).includes("/realtime/v1/streams/") + ); + + expect(streamCall).toBeDefined(); + const requestHeaders = streamCall![1]?.headers as Record; + expect(requestHeaders["X-Custom-Header"]).toBe("custom-value"); + }); + }); + + describe("reconnectToStream", () => { + it("should return null when no session exists for chatId", async () => { + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: "token", + }); + + const result = await transport.reconnectToStream({ + chatId: "nonexistent-chat", + }); + + expect(result).toBeNull(); + }); + + it("should reconnect to an existing session", async () => { + const triggerRunId = "run_reconnect"; + const publicToken = "pub_reconnect_token"; + + global.fetch = vi.fn().mockImplementation(async (url: string | URL) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ id: triggerRunId }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": publicToken, + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + const chunks: UIMessageChunk[] = [ + { type: "text-start", id: "part-1" }, + { type: "text-delta", id: "part-1", delta: "Reconnected!" }, + { type: "text-end", id: "part-1" }, + ]; + return new Response(createSSEStream(sseEncode(chunks)), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: "token", + baseURL: "https://api.test.trigger.dev", + }); + + // First, send messages to establish a session + await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-reconnect", + messageId: undefined, + messages: [createUserMessage("Hello")], + abortSignal: undefined, + }); + + // Now reconnect + const stream = await transport.reconnectToStream({ + chatId: "chat-reconnect", + }); + + expect(stream).toBeInstanceOf(ReadableStream); + + // Read the stream + const reader = stream!.getReader(); + const receivedChunks: UIMessageChunk[] = []; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + receivedChunks.push(value); + } + + expect(receivedChunks.length).toBeGreaterThan(0); + }); + }); + + describe("createChatTransport", () => { + it("should create a TriggerChatTransport instance", () => { + const transport = createChatTransport({ + task: "my-task", + accessToken: "token", + }); + + expect(transport).toBeInstanceOf(TriggerChatTransport); + }); + + it("should pass options through to the transport", () => { + const transport = createChatTransport({ + task: "custom-task", + accessToken: "custom-token", + baseURL: "https://custom.example.com", + streamKey: "custom-key", + headers: { "X-Test": "value" }, + }); + + expect(transport).toBeInstanceOf(TriggerChatTransport); + }); + }); + + describe("publicAccessToken from trigger response", () => { + it("should use publicAccessToken from response body when x-trigger-jwt header is absent", async () => { + const fetchSpy = vi.fn().mockImplementation(async (url: string | URL, init?: RequestInit) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + // Return without x-trigger-jwt header — the ApiClient will attempt + // to generate a JWT from the access token. In this test the token + // generation will add a publicAccessToken to the result. + return new Response( + JSON.stringify({ id: "run_pat" }), + { + status: 200, + headers: { + "content-type": "application/json", + // Include x-trigger-jwt to simulate the server returning a public token + "x-trigger-jwt": "server-generated-public-token", + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + // Verify the Authorization header uses the server-generated token + const authHeader = (init?.headers as Record)?.["Authorization"]; + expect(authHeader).toBe("Bearer server-generated-public-token"); + + const chunks: UIMessageChunk[] = [ + { type: "text-start", id: "p1" }, + { type: "text-end", id: "p1" }, + ]; + return new Response(createSSEStream(sseEncode(chunks)), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + global.fetch = fetchSpy; + + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: "caller-token", + baseURL: "https://api.test.trigger.dev", + }); + + const stream = await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-pat", + messageId: undefined, + messages: [createUserMessage("test")], + abortSignal: undefined, + }); + + // Consume the stream + const reader = stream.getReader(); + while (true) { + const { done } = await reader.read(); + if (done) break; + } + + // Verify the stream subscription used the public token, not the caller token + const streamCall = fetchSpy.mock.calls.find((call: any[]) => + (typeof call[0] === "string" ? call[0] : call[0].toString()).includes("/realtime/v1/streams/") + ); + expect(streamCall).toBeDefined(); + const streamHeaders = streamCall![1]?.headers as Record; + expect(streamHeaders["Authorization"]).toBe("Bearer server-generated-public-token"); + }); + }); + + describe("error handling", () => { + it("should propagate trigger API errors", async () => { + global.fetch = vi.fn().mockImplementation(async (url: string | URL) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ error: "Task not found" }), + { + status: 404, + headers: { "content-type": "application/json" }, + } + ); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + const transport = new TriggerChatTransport({ + task: "nonexistent-task", + accessToken: "token", + baseURL: "https://api.test.trigger.dev", + }); + + await expect( + transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-error", + messageId: undefined, + messages: [createUserMessage("test")], + abortSignal: undefined, + }) + ).rejects.toThrow(); + }); + }); + + describe("abort signal", () => { + it("should close the stream gracefully when aborted", async () => { + let streamResolve: (() => void) | undefined; + const streamWait = new Promise((resolve) => { + streamResolve = resolve; + }); + + global.fetch = vi.fn().mockImplementation(async (url: string | URL) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ id: "run_abort" }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": "token", + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + // Create a slow stream that waits before sending data + const stream = new ReadableStream({ + async start(controller) { + const encoder = new TextEncoder(); + controller.enqueue( + encoder.encode(`id: 0\ndata: ${JSON.stringify({ type: "text-start", id: "p1" })}\n\n`) + ); + // Wait for the test to signal it's done + await streamWait; + controller.close(); + }, + }); + + return new Response(stream, { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + const abortController = new AbortController(); + + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: "token", + baseURL: "https://api.test.trigger.dev", + }); + + const stream = await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-abort", + messageId: undefined, + messages: [createUserMessage("test")], + abortSignal: abortController.signal, + }); + + // Read the first chunk + const reader = stream.getReader(); + const first = await reader.read(); + expect(first.done).toBe(false); + + // Abort and clean up + abortController.abort(); + streamResolve?.(); + + // The stream should close — reading should return done + const next = await reader.read(); + expect(next.done).toBe(true); + }); + }); + + describe("multiple sessions", () => { + it("should track multiple chat sessions independently", async () => { + let callCount = 0; + + global.fetch = vi.fn().mockImplementation(async (url: string | URL) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + callCount++; + return new Response( + JSON.stringify({ id: `run_multi_${callCount}` }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": `token_${callCount}`, + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + return new Response(createSSEStream(""), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: "token", + baseURL: "https://api.test.trigger.dev", + }); + + // Start two independent chat sessions + await transport.sendMessages({ + trigger: "submit-message", + chatId: "session-a", + messageId: undefined, + messages: [createUserMessage("Hello A")], + abortSignal: undefined, + }); + + await transport.sendMessages({ + trigger: "submit-message", + chatId: "session-b", + messageId: undefined, + messages: [createUserMessage("Hello B")], + abortSignal: undefined, + }); + + // Both sessions should be independently reconnectable + const streamA = await transport.reconnectToStream({ chatId: "session-a" }); + const streamB = await transport.reconnectToStream({ chatId: "session-b" }); + const streamC = await transport.reconnectToStream({ chatId: "nonexistent" }); + + expect(streamA).toBeInstanceOf(ReadableStream); + expect(streamB).toBeInstanceOf(ReadableStream); + expect(streamC).toBeNull(); + }); + }); + + describe("dynamic accessToken", () => { + it("should call the accessToken function for each sendMessages call", async () => { + let tokenCallCount = 0; + + global.fetch = vi.fn().mockImplementation(async (url: string | URL) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ id: `run_dyn_${tokenCallCount}` }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": "stream-token", + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + const chunks: UIMessageChunk[] = [ + { type: "text-start", id: "p1" }, + { type: "text-end", id: "p1" }, + ]; + return new Response(createSSEStream(sseEncode(chunks)), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: () => { + tokenCallCount++; + return `dynamic-token-${tokenCallCount}`; + }, + baseURL: "https://api.test.trigger.dev", + }); + + // First call — the token function should be invoked + await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-dyn-1", + messageId: undefined, + messages: [createUserMessage("first")], + abortSignal: undefined, + }); + + const firstCount = tokenCallCount; + expect(firstCount).toBeGreaterThanOrEqual(1); + + // Second call — the token function should be invoked again + await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-dyn-2", + messageId: undefined, + messages: [createUserMessage("second")], + abortSignal: undefined, + }); + + // Token function was called at least once more + expect(tokenCallCount).toBeGreaterThan(firstCount); + }); + }); + + describe("body merging", () => { + it("should merge ChatRequestOptions.body into the task payload", async () => { + const fetchSpy = vi.fn().mockImplementation(async (url: string | URL) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ id: "run_body" }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": "token", + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + return new Response(createSSEStream(""), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + global.fetch = fetchSpy; + + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: "token", + baseURL: "https://api.test.trigger.dev", + }); + + await transport.sendMessages({ + trigger: "submit-message", + chatId: "chat-body", + messageId: undefined, + messages: [createUserMessage("test")], + abortSignal: undefined, + body: { systemPrompt: "You are helpful", temperature: 0.7 }, + }); + + const triggerCall = fetchSpy.mock.calls.find((call: any[]) => + (typeof call[0] === "string" ? call[0] : call[0].toString()).includes("/trigger") + ); + + const triggerBody = JSON.parse(triggerCall![1]?.body as string); + const payload = JSON.parse(triggerBody.payload); + + // body properties should be merged into the payload + expect(payload.systemPrompt).toBe("You are helpful"); + expect(payload.temperature).toBe(0.7); + // Standard fields should still be present + expect(payload.chatId).toBe("chat-body"); + expect(payload.trigger).toBe("submit-message"); + }); + }); + + describe("message types", () => { + it("should handle regenerate-message trigger", async () => { + const fetchSpy = vi.fn().mockImplementation(async (url: string | URL) => { + const urlStr = typeof url === "string" ? url : url.toString(); + + if (urlStr.includes("/trigger")) { + return new Response( + JSON.stringify({ id: "run_regen" }), + { + status: 200, + headers: { + "content-type": "application/json", + "x-trigger-jwt": "token", + }, + } + ); + } + + if (urlStr.includes("/realtime/v1/streams/")) { + return new Response(createSSEStream(""), { + status: 200, + headers: { + "content-type": "text/event-stream", + "X-Stream-Version": "v1", + }, + }); + } + + throw new Error(`Unexpected fetch URL: ${urlStr}`); + }); + + global.fetch = fetchSpy; + + const transport = new TriggerChatTransport({ + task: "my-task", + accessToken: "token", + baseURL: "https://api.test.trigger.dev", + }); + + const messages: UIMessage[] = [ + createUserMessage("Hello!"), + createAssistantMessage("Hi there!"), + ]; + + await transport.sendMessages({ + trigger: "regenerate-message", + chatId: "chat-regen", + messageId: "msg-to-regen", + messages, + abortSignal: undefined, + }); + + // Verify the payload includes the regenerate trigger type and messageId + const triggerCall = fetchSpy.mock.calls.find((call: any[]) => + (typeof call[0] === "string" ? call[0] : call[0].toString()).includes("/trigger") + ); + + const triggerBody = JSON.parse(triggerCall![1]?.body as string); + const payload = JSON.parse(triggerBody.payload); + expect(payload.trigger).toBe("regenerate-message"); + expect(payload.messageId).toBe("msg-to-regen"); + }); + }); +}); diff --git a/packages/trigger-sdk/src/v3/chat.ts b/packages/trigger-sdk/src/v3/chat.ts new file mode 100644 index 0000000000..77378bded0 --- /dev/null +++ b/packages/trigger-sdk/src/v3/chat.ts @@ -0,0 +1,297 @@ +/** + * @module @trigger.dev/sdk/chat + * + * Browser-safe module for AI SDK chat transport integration. + * Use this on the frontend with the AI SDK's `useChat` hook. + * + * For backend helpers (`chatTask`, `pipeChat`), use `@trigger.dev/sdk/ai` instead. + * + * @example + * ```tsx + * import { useChat } from "@ai-sdk/react"; + * import { TriggerChatTransport } from "@trigger.dev/sdk/chat"; + * + * function Chat({ accessToken }: { accessToken: string }) { + * const { messages, sendMessage, status } = useChat({ + * transport: new TriggerChatTransport({ + * task: "my-chat-task", + * accessToken, + * }), + * }); + * } + * ``` + */ + +import type { ChatTransport, UIMessage, UIMessageChunk, ChatRequestOptions } from "ai"; +import { ApiClient, SSEStreamSubscription } from "@trigger.dev/core/v3"; + +const DEFAULT_STREAM_KEY = "chat"; +const DEFAULT_BASE_URL = "https://api.trigger.dev"; +const DEFAULT_STREAM_TIMEOUT_SECONDS = 120; + +/** + * Options for creating a TriggerChatTransport. + */ +export type TriggerChatTransportOptions = { + /** + * The Trigger.dev task ID to trigger for chat completions. + * This task should be defined using `chatTask()` from `@trigger.dev/sdk/ai`, + * or a regular `task()` that uses `pipeChat()`. + */ + task: string; + + /** + * An access token for authenticating with the Trigger.dev API. + * + * This must be a token with permission to trigger the task. You can use: + * - A **trigger public token** created via `auth.createTriggerPublicToken(taskId)` (recommended for frontend use) + * - A **secret API key** (for server-side use only — never expose in the browser) + * + * Can also be a function that returns a token string, useful for dynamic token refresh. + */ + accessToken: string | (() => string); + + /** + * Base URL for the Trigger.dev API. + * @default "https://api.trigger.dev" + */ + baseURL?: string; + + /** + * The stream key where the task pipes UIMessageChunk data. + * When using `chatTask()` or `pipeChat()`, this is handled automatically. + * Only set this if you're using a custom stream key. + * + * @default "chat" + */ + streamKey?: string; + + /** + * Additional headers to include in API requests to Trigger.dev. + */ + headers?: Record; + + /** + * The number of seconds to wait for the realtime stream to produce data + * before timing out. + * + * @default 120 + */ + streamTimeoutSeconds?: number; +}; + +/** + * Internal state for tracking active chat sessions. + * @internal + */ +type ChatSessionState = { + runId: string; + publicAccessToken: string; +}; + +/** + * A custom AI SDK `ChatTransport` that runs chat completions as durable Trigger.dev tasks. + * + * When `sendMessages` is called, the transport: + * 1. Triggers a Trigger.dev task with the chat messages as payload + * 2. Subscribes to the task's realtime stream to receive `UIMessageChunk` data + * 3. Returns a `ReadableStream` that the AI SDK processes natively + * + * @example + * ```tsx + * import { useChat } from "@ai-sdk/react"; + * import { TriggerChatTransport } from "@trigger.dev/sdk/chat"; + * + * function Chat({ accessToken }: { accessToken: string }) { + * const { messages, sendMessage, status } = useChat({ + * transport: new TriggerChatTransport({ + * task: "my-chat-task", + * accessToken, + * }), + * }); + * + * // ... render messages + * } + * ``` + * + * On the backend, define the task using `chatTask` from `@trigger.dev/sdk/ai`: + * + * @example + * ```ts + * import { chatTask } from "@trigger.dev/sdk/ai"; + * import { streamText, convertToModelMessages } from "ai"; + * + * export const myChatTask = chatTask({ + * id: "my-chat-task", + * run: async ({ messages }) => { + * return streamText({ + * model: openai("gpt-4o"), + * messages: convertToModelMessages(messages), + * }); + * }, + * }); + * ``` + */ +export class TriggerChatTransport implements ChatTransport { + private readonly taskId: string; + private readonly resolveAccessToken: () => string; + private readonly baseURL: string; + private readonly streamKey: string; + private readonly extraHeaders: Record; + private readonly streamTimeoutSeconds: number; + + private sessions: Map = new Map(); + + constructor(options: TriggerChatTransportOptions) { + this.taskId = options.task; + this.resolveAccessToken = + typeof options.accessToken === "function" + ? options.accessToken + : () => options.accessToken as string; + this.baseURL = options.baseURL ?? DEFAULT_BASE_URL; + this.streamKey = options.streamKey ?? DEFAULT_STREAM_KEY; + this.extraHeaders = options.headers ?? {}; + this.streamTimeoutSeconds = options.streamTimeoutSeconds ?? DEFAULT_STREAM_TIMEOUT_SECONDS; + } + + sendMessages = async ( + options: { + trigger: "submit-message" | "regenerate-message"; + chatId: string; + messageId: string | undefined; + messages: UIMessage[]; + abortSignal: AbortSignal | undefined; + } & ChatRequestOptions + ): Promise> => { + const { trigger, chatId, messageId, messages, abortSignal, body, metadata } = options; + + const payload = { + messages, + chatId, + trigger, + messageId, + metadata, + ...(body ?? {}), + }; + + const currentToken = this.resolveAccessToken(); + const apiClient = new ApiClient(this.baseURL, currentToken); + + const triggerResponse = await apiClient.triggerTask(this.taskId, { + payload: JSON.stringify(payload), + options: { + payloadType: "application/json", + }, + }); + + const runId = triggerResponse.id; + const publicAccessToken = + "publicAccessToken" in triggerResponse + ? (triggerResponse as { publicAccessToken?: string }).publicAccessToken + : undefined; + + this.sessions.set(chatId, { + runId, + publicAccessToken: publicAccessToken ?? currentToken, + }); + + return this.subscribeToStream(runId, publicAccessToken ?? currentToken, abortSignal); + }; + + reconnectToStream = async ( + options: { + chatId: string; + } & ChatRequestOptions + ): Promise | null> => { + const session = this.sessions.get(options.chatId); + if (!session) { + return null; + } + + return this.subscribeToStream(session.runId, session.publicAccessToken, undefined); + }; + + private subscribeToStream( + runId: string, + accessToken: string, + abortSignal: AbortSignal | undefined + ): ReadableStream { + const headers: Record = { + Authorization: `Bearer ${accessToken}`, + ...this.extraHeaders, + }; + + const subscription = new SSEStreamSubscription( + `${this.baseURL}/realtime/v1/streams/${runId}/${this.streamKey}`, + { + headers, + signal: abortSignal, + timeoutInSeconds: this.streamTimeoutSeconds, + } + ); + + return new ReadableStream({ + start: async (controller) => { + try { + const sseStream = await subscription.subscribe(); + const reader = sseStream.getReader(); + + try { + while (true) { + const { done, value } = await reader.read(); + + if (done) { + controller.close(); + return; + } + + if (abortSignal?.aborted) { + reader.cancel(); + reader.releaseLock(); + controller.close(); + return; + } + + // Guard against heartbeat or malformed SSE events + if (value.chunk != null && typeof value.chunk === "object") { + controller.enqueue(value.chunk as UIMessageChunk); + } + } + } catch (readError) { + reader.releaseLock(); + throw readError; + } + } catch (error) { + if (error instanceof Error && error.name === "AbortError") { + controller.close(); + return; + } + + controller.error(error); + } + }, + }); + } +} + +/** + * Creates a new `TriggerChatTransport` instance. + * + * @example + * ```tsx + * import { useChat } from "@ai-sdk/react"; + * import { createChatTransport } from "@trigger.dev/sdk/chat"; + * + * const transport = createChatTransport({ + * task: "my-chat-task", + * accessToken: publicAccessToken, + * }); + * + * function Chat() { + * const { messages, sendMessage } = useChat({ transport }); + * } + * ``` + */ +export function createChatTransport(options: TriggerChatTransportOptions): TriggerChatTransport { + return new TriggerChatTransport(options); +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f504496dd1..14cea433e7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -2099,6 +2099,55 @@ importers: specifier: 3.25.76 version: 3.25.76 + references/ai-chat: + dependencies: + '@ai-sdk/openai': + specifier: ^3.0.0 + version: 3.0.27(zod@3.25.76) + '@ai-sdk/react': + specifier: ^3.0.0 + version: 3.0.84(react@19.1.0)(zod@3.25.76) + '@trigger.dev/sdk': + specifier: workspace:* + version: link:../../packages/trigger-sdk + ai: + specifier: ^6.0.0 + version: 6.0.3(zod@3.25.76) + next: + specifier: 15.3.3 + version: 15.3.3(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0) + react: + specifier: ^19.0.0 + version: 19.1.0 + react-dom: + specifier: ^19.0.0 + version: 19.1.0(react@19.1.0) + devDependencies: + '@tailwindcss/postcss': + specifier: ^4 + version: 4.0.17 + '@trigger.dev/build': + specifier: workspace:* + version: link:../../packages/build + '@types/node': + specifier: 20.14.14 + version: 20.14.14 + '@types/react': + specifier: ^19 + version: 19.0.12 + '@types/react-dom': + specifier: ^19 + version: 19.0.4(@types/react@19.0.12) + tailwindcss: + specifier: ^4 + version: 4.0.17 + trigger.dev: + specifier: workspace:* + version: link:../../packages/cli-v3 + typescript: + specifier: 5.5.4 + version: 5.5.4 + references/bun-catalog: dependencies: '@trigger.dev/sdk': @@ -2858,6 +2907,12 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/gateway@3.0.42': + resolution: {integrity: sha512-Il9lZWPUQMX59H5yJvA08gxfL2Py8oHwvAYRnK0Mt91S+JgPcyk/yEmXNDZG9ghJrwSawtK5Yocy8OnzsTOGsw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/openai@1.0.1': resolution: {integrity: sha512-snZge8457afWlosVNUn+BG60MrxAPOOm3zmIMxJZih8tneNSiRbTVCbSzAtq/9vsnOHDe5RR83PRl85juOYEnA==} engines: {node: '>=18'} @@ -2888,6 +2943,12 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/openai@3.0.27': + resolution: {integrity: sha512-pLMxWOypwroXiK9dxNpn60/HGhWWWDEOJ3lo9vZLoxvpJNtKnLKojwVIvlW3yEjlD7ll1+jUO2uzsABNTaP5Yg==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/provider-utils@1.0.22': resolution: {integrity: sha512-YHK2rpj++wnLVc9vPGzGFP3Pjeld2MwhKinetA0zKXOoHAT/Jit5O8kZsxcSlJPu9wvcGT1UGZEjZrtO7PfFOQ==} engines: {node: '>=18'} @@ -2936,6 +2997,12 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/provider-utils@4.0.14': + resolution: {integrity: sha512-7bzKd9lgiDeXM7O4U4nQ8iTxguAOkg8LZGD9AfDVZYjO5cKYRwBPwVjboFcVrxncRHu0tYxZtXZtiLKpG4pEng==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/provider@0.0.26': resolution: {integrity: sha512-dQkfBDs2lTYpKM8389oopPdQgIU007GQyCbuPPrV+K6MtSII3HBfE0stUIMXUb44L+LK1t6GXPP7wjSzjO6uKg==} engines: {node: '>=18'} @@ -2960,6 +3027,10 @@ packages: resolution: {integrity: sha512-m9ka3ptkPQbaHHZHqDXDF9C9B5/Mav0KTdky1k2HZ3/nrW2t1AgObxIVPyGDWQNS9FXT/FS6PIoSjpcP/No8rQ==} engines: {node: '>=18'} + '@ai-sdk/provider@3.0.8': + resolution: {integrity: sha512-oGMAgGoQdBXbZqNG0Ze56CHjDZ1IDYOwGYxYjO5KLSlz5HiNQ9udIXsPZ61VWaHGZ5XW/jyjmr6t2xz2jGVwbQ==} + engines: {node: '>=18'} + '@ai-sdk/react@1.0.0': resolution: {integrity: sha512-BDrZqQA07Btg64JCuhFvBgYV+tt2B8cXINzEqWknGoxqcwgdE8wSLG2gkXoLzyC2Rnj7oj0HHpOhLUxDCmoKZg==} engines: {node: '>=18'} @@ -2992,6 +3063,12 @@ packages: zod: optional: true + '@ai-sdk/react@3.0.84': + resolution: {integrity: sha512-caX8dsXGHDctQsFGgq05sdaw9YD2C8Y9SfnOk0b0LPPi4J7/V54tq22MPTGVO9zS3LmsfFQf0GDM4WFZNC5XZA==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ~19.0.1 || ~19.1.2 || ^19.2.1 + '@ai-sdk/ui-utils@1.0.0': resolution: {integrity: sha512-oXBDIM/0niWeTWyw77RVl505dNxBUDLLple7bTsqo2d3i1UKwGlzBUX8XqZsh7GbY7I6V05nlG0Y8iGlWxv1Aw==} engines: {node: '>=18'} @@ -5911,6 +5988,9 @@ packages: '@next/env@15.2.4': resolution: {integrity: sha512-+SFtMgoiYP3WoSswuNmxJOCwi06TdWE733D+WPjpXIe4LXGULwEaofiiAy6kbS0+XjM5xF5n3lKuBwN2SnqD9g==} + '@next/env@15.3.3': + resolution: {integrity: sha512-OdiMrzCl2Xi0VTjiQQUK0Xh7bJHnOuET2s+3V+Y40WJBAXrJeGA3f+I8MZJ/YQ3mVGi5XGR1L66oFlgqXhQ4Vw==} + '@next/env@15.4.8': resolution: {integrity: sha512-LydLa2MDI1NMrOFSkO54mTc8iIHSttj6R6dthITky9ylXV2gCGi0bHQjVCtLGRshdRPjyh2kXbxJukDtBWQZtQ==} @@ -5935,6 +6015,12 @@ packages: cpu: [arm64] os: [darwin] + '@next/swc-darwin-arm64@15.3.3': + resolution: {integrity: sha512-WRJERLuH+O3oYB4yZNVahSVFmtxRNjNF1I1c34tYMoJb0Pve+7/RaLAJJizyYiFhjYNGHRAE1Ri2Fd23zgDqhg==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [darwin] + '@next/swc-darwin-arm64@15.4.8': resolution: {integrity: sha512-Pf6zXp7yyQEn7sqMxur6+kYcywx5up1J849psyET7/8pG2gQTVMjU3NzgIt8SeEP5to3If/SaWmaA6H6ysBr1A==} engines: {node: '>= 10'} @@ -5965,6 +6051,12 @@ packages: cpu: [x64] os: [darwin] + '@next/swc-darwin-x64@15.3.3': + resolution: {integrity: sha512-XHdzH/yBc55lu78k/XwtuFR/ZXUTcflpRXcsu0nKmF45U96jt1tsOZhVrn5YH+paw66zOANpOnFQ9i6/j+UYvw==} + engines: {node: '>= 10'} + cpu: [x64] + os: [darwin] + '@next/swc-darwin-x64@15.4.8': resolution: {integrity: sha512-xla6AOfz68a6kq3gRQccWEvFC/VRGJmA/QuSLENSO7CZX5WIEkSz7r1FdXUjtGCQ1c2M+ndUAH7opdfLK1PQbw==} engines: {node: '>= 10'} @@ -5998,6 +6090,13 @@ packages: os: [linux] libc: [glibc] + '@next/swc-linux-arm64-gnu@15.3.3': + resolution: {integrity: sha512-VZ3sYL2LXB8znNGcjhocikEkag/8xiLgnvQts41tq6i+wql63SMS1Q6N8RVXHw5pEUjiof+II3HkDd7GFcgkzw==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + libc: [glibc] + '@next/swc-linux-arm64-gnu@15.4.8': resolution: {integrity: sha512-y3fmp+1Px/SJD+5ntve5QLZnGLycsxsVPkTzAc3zUiXYSOlTPqT8ynfmt6tt4fSo1tAhDPmryXpYKEAcoAPDJw==} engines: {node: '>= 10'} @@ -6033,6 +6132,13 @@ packages: os: [linux] libc: [musl] + '@next/swc-linux-arm64-musl@15.3.3': + resolution: {integrity: sha512-h6Y1fLU4RWAp1HPNJWDYBQ+e3G7sLckyBXhmH9ajn8l/RSMnhbuPBV/fXmy3muMcVwoJdHL+UtzRzs0nXOf9SA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + libc: [musl] + '@next/swc-linux-arm64-musl@15.4.8': resolution: {integrity: sha512-DX/L8VHzrr1CfwaVjBQr3GWCqNNFgyWJbeQ10Lx/phzbQo3JNAxUok1DZ8JHRGcL6PgMRgj6HylnLNndxn4Z6A==} engines: {node: '>= 10'} @@ -6068,6 +6174,13 @@ packages: os: [linux] libc: [glibc] + '@next/swc-linux-x64-gnu@15.3.3': + resolution: {integrity: sha512-jJ8HRiF3N8Zw6hGlytCj5BiHyG/K+fnTKVDEKvUCyiQ/0r5tgwO7OgaRiOjjRoIx2vwLR+Rz8hQoPrnmFbJdfw==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + libc: [glibc] + '@next/swc-linux-x64-gnu@15.4.8': resolution: {integrity: sha512-9fLAAXKAL3xEIFdKdzG5rUSvSiZTLLTCc6JKq1z04DR4zY7DbAPcRvNm3K1inVhTiQCs19ZRAgUerHiVKMZZIA==} engines: {node: '>= 10'} @@ -6103,6 +6216,13 @@ packages: os: [linux] libc: [musl] + '@next/swc-linux-x64-musl@15.3.3': + resolution: {integrity: sha512-HrUcTr4N+RgiiGn3jjeT6Oo208UT/7BuTr7K0mdKRBtTbT4v9zJqCDKO97DUqqoBK1qyzP1RwvrWTvU6EPh/Cw==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + libc: [musl] + '@next/swc-linux-x64-musl@15.4.8': resolution: {integrity: sha512-s45V7nfb5g7dbS7JK6XZDcapicVrMMvX2uYgOHP16QuKH/JA285oy6HcxlKqwUNaFY/UC6EvQ8QZUOo19cBKSA==} engines: {node: '>= 10'} @@ -6135,6 +6255,12 @@ packages: cpu: [arm64] os: [win32] + '@next/swc-win32-arm64-msvc@15.3.3': + resolution: {integrity: sha512-SxorONgi6K7ZUysMtRF3mIeHC5aA3IQLmKFQzU0OuhuUYwpOBc1ypaLJLP5Bf3M9k53KUUUj4vTPwzGvl/NwlQ==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [win32] + '@next/swc-win32-arm64-msvc@15.4.8': resolution: {integrity: sha512-KjgeQyOAq7t/HzAJcWPGA8X+4WY03uSCZ2Ekk98S9OgCFsb6lfBE3dbUzUuEQAN2THbwYgFfxX2yFTCMm8Kehw==} engines: {node: '>= 10'} @@ -6177,6 +6303,12 @@ packages: cpu: [x64] os: [win32] + '@next/swc-win32-x64-msvc@15.3.3': + resolution: {integrity: sha512-4QZG6F8enl9/S2+yIiOiju0iCTFd93d8VC1q9LZS4p/Xuk81W2QDjCFeoogmrWWkAD59z8ZxepBQap2dKS5ruw==} + engines: {node: '>= 10'} + cpu: [x64] + os: [win32] + '@next/swc-win32-x64-msvc@15.4.8': resolution: {integrity: sha512-Exsmf/+42fWVnLMaZHzshukTBxZrSwuuLKFvqhGHJ+mC1AokqieLY/XzAl3jc/CqhXLqLY3RRjkKJ9YnLPcRWg==} engines: {node: '>= 10'} @@ -11117,6 +11249,10 @@ packages: resolution: {integrity: sha512-fnYhv671l+eTTp48gB4zEsTW/YtRgRPnkI2nT7x6qw5rkI1Lq2hTmQIpHPgyThI0znLK+vX2n9XxKdXZ7BUbbw==} engines: {node: '>= 20'} + '@vercel/oidc@3.1.0': + resolution: {integrity: sha512-Fw28YZpRnA3cAHHDlkt7xQHiJ0fcL+NRcIqsocZQUSmbzeIKRpwttJjik5ZGanXP+vlA4SbTg+AbA3bP363l+w==} + engines: {node: '>= 20'} + '@vercel/otel@1.13.0': resolution: {integrity: sha512-esRkt470Y2jRK1B1g7S1vkt4Csu44gp83Zpu8rIyPoqy2BKgk4z7ik1uSMswzi45UogLHFl6yR5TauDurBQi4Q==} engines: {node: '>=18'} @@ -11464,6 +11600,12 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 + ai@6.0.82: + resolution: {integrity: sha512-WLml1ab2IXtREgkxrq2Pl6lFO6NKgC17MqTzmK5mO1UO6tMAJiVjkednw9p0j4+/LaUIZQoRiIT8wA37LswZ9Q==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + ajv-formats@2.1.1: resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} peerDependencies: @@ -14242,11 +14384,12 @@ packages: glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} - deprecated: Glob versions prior to v9 are no longer supported + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me glob@9.3.5: resolution: {integrity: sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q==} engines: {node: '>=16 || 14 >=14.17'} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me globals@11.12.0: resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} @@ -16252,6 +16395,28 @@ packages: sass: optional: true + next@15.3.3: + resolution: {integrity: sha512-JqNj29hHNmCLtNvd090SyRbXJiivQ+58XjCcrC50Crb5g5u2zi7Y2YivbsEfzk6AtVI80akdOQbaMZwWB1Hthw==} + engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0} + deprecated: This version has a security vulnerability. Please upgrade to a patched version. See https://nextjs.org/blog/CVE-2025-66478 for more details. + hasBin: true + peerDependencies: + '@opentelemetry/api': ^1.1.0 + '@playwright/test': ^1.41.2 + babel-plugin-react-compiler: '*' + react: ^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0 + react-dom: ^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0 + sass: ^1.3.0 + peerDependenciesMeta: + '@opentelemetry/api': + optional: true + '@playwright/test': + optional: true + babel-plugin-react-compiler: + optional: true + sass: + optional: true + next@15.4.8: resolution: {integrity: sha512-jwOXTz/bo0Pvlf20FSb6VXVeWRssA2vbvq9SdrOPEg9x8E1B27C2rQtvriAn600o9hH61kjrVRexEffv3JybuA==} engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0} @@ -17202,10 +17367,6 @@ packages: resolution: {integrity: sha512-Aweb9unOEpQ3ezu4Q00DPvvM2ZTUitJdNKeP/+uQgr1IBIqu574IaZoURId7BKtWMREwzKa9OgzPzezWGPWFQw==} engines: {node: ^10 || ^12 || >=14} - postcss@8.5.3: - resolution: {integrity: sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==} - engines: {node: ^10 || ^12 || >=14} - postcss@8.5.4: resolution: {integrity: sha512-QSa9EBe+uwlGTFmHsPKokv3B/oEMQZxfqW0QqNCyhpa6mB1afzulwn8hihglqAb2pOw+BJgNlmXQ8la2VeHB7w==} engines: {node: ^10 || ^12 || >=14} @@ -20255,6 +20416,13 @@ snapshots: '@vercel/oidc': 3.0.5 zod: 3.25.76 + '@ai-sdk/gateway@3.0.42(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 3.0.8 + '@ai-sdk/provider-utils': 4.0.14(zod@3.25.76) + '@vercel/oidc': 3.1.0 + zod: 3.25.76 + '@ai-sdk/openai@1.0.1(zod@3.25.76)': dependencies: '@ai-sdk/provider': 1.0.0 @@ -20285,6 +20453,12 @@ snapshots: '@ai-sdk/provider-utils': 3.0.12(zod@3.25.76) zod: 3.25.76 + '@ai-sdk/openai@3.0.27(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 3.0.8 + '@ai-sdk/provider-utils': 4.0.14(zod@3.25.76) + zod: 3.25.76 + '@ai-sdk/provider-utils@1.0.22(zod@3.25.76)': dependencies: '@ai-sdk/provider': 0.0.26 @@ -20339,6 +20513,13 @@ snapshots: eventsource-parser: 3.0.6 zod: 3.25.76 + '@ai-sdk/provider-utils@4.0.14(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 3.0.8 + '@standard-schema/spec': 1.1.0 + eventsource-parser: 3.0.6 + zod: 3.25.76 + '@ai-sdk/provider@0.0.26': dependencies: json-schema: 0.4.0 @@ -20363,6 +20544,10 @@ snapshots: dependencies: json-schema: 0.4.0 + '@ai-sdk/provider@3.0.8': + dependencies: + json-schema: 0.4.0 + '@ai-sdk/react@1.0.0(react@18.3.1)(zod@3.25.76)': dependencies: '@ai-sdk/provider-utils': 2.0.0(zod@3.25.76) @@ -20393,6 +20578,16 @@ snapshots: optionalDependencies: zod: 3.25.76 + '@ai-sdk/react@3.0.84(react@19.1.0)(zod@3.25.76)': + dependencies: + '@ai-sdk/provider-utils': 4.0.14(zod@3.25.76) + ai: 6.0.82(zod@3.25.76) + react: 19.1.0 + swr: 2.2.5(react@19.1.0) + throttleit: 2.1.0 + transitivePeerDependencies: + - zod + '@ai-sdk/ui-utils@1.0.0(zod@3.25.76)': dependencies: '@ai-sdk/provider': 1.0.0 @@ -20439,7 +20634,7 @@ snapshots: commander: 10.0.1 marked: 9.1.6 marked-terminal: 7.1.0(marked@9.1.6) - semver: 7.6.3 + semver: 7.7.3 '@arethetypeswrong/core@0.15.1': dependencies: @@ -22384,7 +22579,7 @@ snapshots: '@babel/traverse': 7.24.7 '@babel/types': 7.24.0 convert-source-map: 1.9.0 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -22672,7 +22867,7 @@ snapshots: '@babel/helper-split-export-declaration': 7.24.7 '@babel/parser': 7.27.5 '@babel/types': 7.27.3 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -23121,7 +23316,7 @@ snapshots: '@epic-web/test-server@0.1.0(bufferutil@4.0.9)': dependencies: '@hono/node-server': 1.12.2(hono@4.5.11) - '@hono/node-ws': 1.0.4(@hono/node-server@1.12.2(hono@4.5.11))(bufferutil@4.0.9) + '@hono/node-ws': 1.0.4(@hono/node-server@1.12.2(hono@4.11.8))(bufferutil@4.0.9) '@open-draft/deferred-promise': 2.2.0 '@types/ws': 8.5.12 hono: 4.5.11 @@ -23653,7 +23848,7 @@ snapshots: '@eslint/eslintrc@1.4.1': dependencies: ajv: 6.12.6 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) espree: 9.6.0 globals: 13.19.0 ignore: 5.2.4 @@ -23876,7 +24071,7 @@ snapshots: dependencies: hono: 4.11.8 - '@hono/node-ws@1.0.4(@hono/node-server@1.12.2(hono@4.5.11))(bufferutil@4.0.9)': + '@hono/node-ws@1.0.4(@hono/node-server@1.12.2(hono@4.11.8))(bufferutil@4.0.9)': dependencies: '@hono/node-server': 1.12.2(hono@4.5.11) ws: 8.18.3(bufferutil@4.0.9) @@ -23887,7 +24082,7 @@ snapshots: '@humanwhocodes/config-array@0.11.8': dependencies: '@humanwhocodes/object-schema': 1.2.1 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) minimatch: 3.1.2 transitivePeerDependencies: - supports-color @@ -23903,7 +24098,7 @@ snapshots: '@antfu/install-pkg': 1.1.0 '@antfu/utils': 9.3.0 '@iconify/types': 2.0.0 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) globals: 15.15.0 kolorist: 1.8.0 local-pkg: 1.1.2 @@ -24399,6 +24594,8 @@ snapshots: '@next/env@15.2.4': {} + '@next/env@15.3.3': {} + '@next/env@15.4.8': {} '@next/env@15.5.6': {} @@ -24412,6 +24609,9 @@ snapshots: '@next/swc-darwin-arm64@15.2.4': optional: true + '@next/swc-darwin-arm64@15.3.3': + optional: true + '@next/swc-darwin-arm64@15.4.8': optional: true @@ -24427,6 +24627,9 @@ snapshots: '@next/swc-darwin-x64@15.2.4': optional: true + '@next/swc-darwin-x64@15.3.3': + optional: true + '@next/swc-darwin-x64@15.4.8': optional: true @@ -24442,6 +24645,9 @@ snapshots: '@next/swc-linux-arm64-gnu@15.2.4': optional: true + '@next/swc-linux-arm64-gnu@15.3.3': + optional: true + '@next/swc-linux-arm64-gnu@15.4.8': optional: true @@ -24457,6 +24663,9 @@ snapshots: '@next/swc-linux-arm64-musl@15.2.4': optional: true + '@next/swc-linux-arm64-musl@15.3.3': + optional: true + '@next/swc-linux-arm64-musl@15.4.8': optional: true @@ -24472,6 +24681,9 @@ snapshots: '@next/swc-linux-x64-gnu@15.2.4': optional: true + '@next/swc-linux-x64-gnu@15.3.3': + optional: true + '@next/swc-linux-x64-gnu@15.4.8': optional: true @@ -24487,6 +24699,9 @@ snapshots: '@next/swc-linux-x64-musl@15.2.4': optional: true + '@next/swc-linux-x64-musl@15.3.3': + optional: true + '@next/swc-linux-x64-musl@15.4.8': optional: true @@ -24502,6 +24717,9 @@ snapshots: '@next/swc-win32-arm64-msvc@15.2.4': optional: true + '@next/swc-win32-arm64-msvc@15.3.3': + optional: true + '@next/swc-win32-arm64-msvc@15.4.8': optional: true @@ -24523,6 +24741,9 @@ snapshots: '@next/swc-win32-x64-msvc@15.2.4': optional: true + '@next/swc-win32-x64-msvc@15.3.3': + optional: true + '@next/swc-win32-x64-msvc@15.4.8': optional: true @@ -30568,7 +30789,7 @@ snapshots: '@tailwindcss/node': 4.0.17 '@tailwindcss/oxide': 4.0.17 lightningcss: 1.29.2 - postcss: 8.5.3 + postcss: 8.5.6 tailwindcss: 4.0.17 '@tailwindcss/typography@0.5.9(tailwindcss@3.4.1)': @@ -31111,7 +31332,7 @@ snapshots: '@types/react@19.0.12': dependencies: - csstype: 3.1.3 + csstype: 3.2.0 '@types/readable-stream@4.0.14': dependencies: @@ -31450,6 +31671,8 @@ snapshots: '@vercel/oidc@3.0.5': {} + '@vercel/oidc@3.1.0': {} + '@vercel/otel@1.13.0(@opentelemetry/api-logs@0.203.0)(@opentelemetry/api@1.9.0)(@opentelemetry/instrumentation@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-logs@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-metrics@2.0.1(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))': dependencies: '@opentelemetry/api': 1.9.0 @@ -31810,7 +32033,7 @@ snapshots: agent-base@6.0.2: dependencies: - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) transitivePeerDependencies: - supports-color @@ -31893,6 +32116,14 @@ snapshots: '@opentelemetry/api': 1.9.0 zod: 3.25.76 + ai@6.0.82(zod@3.25.76): + dependencies: + '@ai-sdk/gateway': 3.0.42(zod@3.25.76) + '@ai-sdk/provider': 3.0.8 + '@ai-sdk/provider-utils': 4.0.14(zod@3.25.76) + '@opentelemetry/api': 1.9.0 + zod: 3.25.76 + ajv-formats@2.1.1(ajv@8.17.1): optionalDependencies: ajv: 8.17.1 @@ -32306,7 +32537,7 @@ snapshots: dependencies: bytes: 3.1.2 content-type: 1.0.5 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) http-errors: 2.0.0 iconv-lite: 0.6.3 on-finished: 2.4.1 @@ -32320,7 +32551,7 @@ snapshots: dependencies: bytes: 3.1.2 content-type: 1.0.5 - debug: 4.4.3 + debug: 4.4.3(supports-color@10.0.0) http-errors: 2.0.0 iconv-lite: 0.7.2 on-finished: 2.4.1 @@ -33318,9 +33549,11 @@ snapshots: optionalDependencies: supports-color: 10.0.0 - debug@4.4.3: + debug@4.4.3(supports-color@10.0.0): dependencies: ms: 2.1.3 + optionalDependencies: + supports-color: 10.0.0 decamelize-keys@1.1.1: dependencies: @@ -33464,7 +33697,7 @@ snapshots: docker-modem@5.0.6: dependencies: - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) readable-stream: 3.6.0 split-ca: 1.0.1 ssh2: 1.16.0 @@ -33674,7 +33907,7 @@ snapshots: enhanced-resolve@5.15.0: dependencies: graceful-fs: 4.2.11 - tapable: 2.2.1 + tapable: 2.3.0 enhanced-resolve@5.18.3: dependencies: @@ -34161,12 +34394,12 @@ snapshots: eslint-import-resolver-typescript@3.5.5(@typescript-eslint/parser@5.59.6(eslint@8.31.0)(typescript@5.5.4))(eslint-import-resolver-node@0.3.7)(eslint-plugin-import@2.29.1)(eslint@8.31.0): dependencies: - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) enhanced-resolve: 5.15.0 eslint: 8.31.0 eslint-module-utils: 2.7.4(@typescript-eslint/parser@5.59.6(eslint@8.31.0)(typescript@5.5.4))(eslint-import-resolver-node@0.3.7)(eslint-import-resolver-typescript@3.5.5)(eslint@8.31.0) eslint-plugin-import: 2.29.1(@typescript-eslint/parser@5.59.6(eslint@8.31.0)(typescript@5.5.4))(eslint-import-resolver-typescript@3.5.5)(eslint@8.31.0) - get-tsconfig: 4.7.2 + get-tsconfig: 4.7.6 globby: 13.2.2 is-core-module: 2.14.0 is-glob: 4.0.3 @@ -34643,7 +34876,7 @@ snapshots: content-type: 1.0.5 cookie: 0.7.1 cookie-signature: 1.2.2 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) depd: 2.0.0 encodeurl: 2.0.0 escape-html: 1.0.3 @@ -34856,7 +35089,7 @@ snapshots: finalhandler@2.1.0(supports-color@10.0.0): dependencies: - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) encodeurl: 2.0.0 escape-html: 1.0.3 on-finished: 2.4.1 @@ -35572,7 +35805,7 @@ snapshots: https-proxy-agent@5.0.1: dependencies: agent-base: 6.0.2 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) transitivePeerDependencies: - supports-color @@ -37240,7 +37473,7 @@ snapshots: micromark@3.1.0: dependencies: '@types/debug': 4.1.12 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) decode-named-character-reference: 1.0.2 micromark-core-commonmark: 1.0.6 micromark-factory-space: 1.0.0 @@ -37262,7 +37495,7 @@ snapshots: micromark@4.0.2: dependencies: '@types/debug': 4.1.12 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) decode-named-character-reference: 1.0.2 devlop: 1.1.0 micromark-core-commonmark: 2.0.3 @@ -37621,6 +37854,33 @@ snapshots: - '@babel/core' - babel-plugin-macros + next@15.3.3(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@19.1.0(react@19.1.0))(react@19.1.0): + dependencies: + '@next/env': 15.3.3 + '@swc/counter': 0.1.3 + '@swc/helpers': 0.5.15 + busboy: 1.6.0 + caniuse-lite: 1.0.30001754 + postcss: 8.4.31 + react: 19.1.0 + react-dom: 19.1.0(react@19.1.0) + styled-jsx: 5.1.6(react@19.1.0) + optionalDependencies: + '@next/swc-darwin-arm64': 15.3.3 + '@next/swc-darwin-x64': 15.3.3 + '@next/swc-linux-arm64-gnu': 15.3.3 + '@next/swc-linux-arm64-musl': 15.3.3 + '@next/swc-linux-x64-gnu': 15.3.3 + '@next/swc-linux-x64-musl': 15.3.3 + '@next/swc-win32-arm64-msvc': 15.3.3 + '@next/swc-win32-x64-msvc': 15.3.3 + '@opentelemetry/api': 1.9.0 + '@playwright/test': 1.37.0 + sharp: 0.34.5 + transitivePeerDependencies: + - '@babel/core' + - babel-plugin-macros + next@15.4.8(@opentelemetry/api@1.9.0)(@playwright/test@1.37.0)(react-dom@19.0.0(react@19.0.0))(react@19.0.0): dependencies: '@next/env': 15.4.8 @@ -38669,12 +38929,6 @@ snapshots: picocolors: 1.1.1 source-map-js: 1.2.0 - postcss@8.5.3: - dependencies: - nanoid: 3.3.8 - picocolors: 1.1.1 - source-map-js: 1.2.1 - postcss@8.5.4: dependencies: nanoid: 3.3.11 @@ -39770,7 +40024,7 @@ snapshots: remix-auth-oauth2@1.11.0(@remix-run/server-runtime@2.1.0(typescript@5.5.4))(remix-auth@3.6.0(@remix-run/react@2.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(typescript@5.5.4))(@remix-run/server-runtime@2.1.0(typescript@5.5.4))): dependencies: '@remix-run/server-runtime': 2.1.0(typescript@5.5.4) - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) remix-auth: 3.6.0(@remix-run/react@2.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(typescript@5.5.4))(@remix-run/server-runtime@2.1.0(typescript@5.5.4)) transitivePeerDependencies: - supports-color @@ -39965,7 +40219,7 @@ snapshots: router@2.2.0: dependencies: - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) depd: 2.0.0 is-promise: 4.0.0 parseurl: 1.3.3 @@ -40119,7 +40373,7 @@ snapshots: send@1.1.0(supports-color@10.0.0): dependencies: - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) destroy: 1.2.0 encodeurl: 2.0.0 escape-html: 1.0.3 @@ -40136,7 +40390,7 @@ snapshots: send@1.2.1: dependencies: - debug: 4.4.3 + debug: 4.4.3(supports-color@10.0.0) encodeurl: 2.0.0 escape-html: 1.0.3 etag: 1.8.1 @@ -40900,6 +41154,12 @@ snapshots: react: 19.0.0 use-sync-external-store: 1.2.2(react@19.0.0) + swr@2.2.5(react@19.1.0): + dependencies: + client-only: 0.0.1 + react: 19.1.0 + use-sync-external-store: 1.2.2(react@19.1.0) + sync-content@2.0.1: dependencies: glob: 11.0.0 @@ -41842,6 +42102,10 @@ snapshots: dependencies: react: 19.0.0 + use-sync-external-store@1.2.2(react@19.1.0): + dependencies: + react: 19.1.0 + util-deprecate@1.0.2: {} util@0.12.5: @@ -41954,7 +42218,7 @@ snapshots: vite-node@0.28.5(@types/node@20.14.14)(lightningcss@1.29.2)(terser@5.44.1): dependencies: cac: 6.7.14 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) mlly: 1.7.4 pathe: 1.1.2 picocolors: 1.1.1 @@ -41974,7 +42238,7 @@ snapshots: vite-node@3.1.4(@types/node@20.14.14)(lightningcss@1.29.2)(terser@5.44.1): dependencies: cac: 6.7.14 - debug: 4.4.1(supports-color@10.0.0) + debug: 4.4.3(supports-color@10.0.0) es-module-lexer: 1.7.0 pathe: 2.0.3 vite: 5.4.21(@types/node@20.14.14)(lightningcss@1.29.2)(terser@5.44.1) diff --git a/references/ai-chat/next.config.ts b/references/ai-chat/next.config.ts new file mode 100644 index 0000000000..cb651cdc00 --- /dev/null +++ b/references/ai-chat/next.config.ts @@ -0,0 +1,5 @@ +import type { NextConfig } from "next"; + +const nextConfig: NextConfig = {}; + +export default nextConfig; diff --git a/references/ai-chat/package.json b/references/ai-chat/package.json new file mode 100644 index 0000000000..b373eb364d --- /dev/null +++ b/references/ai-chat/package.json @@ -0,0 +1,30 @@ +{ + "name": "references-ai-chat", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev --turbopack", + "build": "next build", + "start": "next start", + "dev:trigger": "trigger dev" + }, + "dependencies": { + "@ai-sdk/openai": "^3.0.0", + "@ai-sdk/react": "^3.0.0", + "@trigger.dev/sdk": "workspace:*", + "ai": "^6.0.0", + "next": "15.3.3", + "react": "^19.0.0", + "react-dom": "^19.0.0" + }, + "devDependencies": { + "@tailwindcss/postcss": "^4", + "@trigger.dev/build": "workspace:*", + "@types/node": "^22", + "@types/react": "^19", + "@types/react-dom": "^19", + "tailwindcss": "^4", + "trigger.dev": "workspace:*", + "typescript": "^5" + } +} diff --git a/references/ai-chat/postcss.config.mjs b/references/ai-chat/postcss.config.mjs new file mode 100644 index 0000000000..79bcf135dc --- /dev/null +++ b/references/ai-chat/postcss.config.mjs @@ -0,0 +1,8 @@ +/** @type {import('postcss-load-config').Config} */ +const config = { + plugins: { + "@tailwindcss/postcss": {}, + }, +}; + +export default config; diff --git a/references/ai-chat/src/app/globals.css b/references/ai-chat/src/app/globals.css new file mode 100644 index 0000000000..f1d8c73cdc --- /dev/null +++ b/references/ai-chat/src/app/globals.css @@ -0,0 +1 @@ +@import "tailwindcss"; diff --git a/references/ai-chat/src/app/layout.tsx b/references/ai-chat/src/app/layout.tsx new file mode 100644 index 0000000000..f507028583 --- /dev/null +++ b/references/ai-chat/src/app/layout.tsx @@ -0,0 +1,15 @@ +import type { Metadata } from "next"; +import "./globals.css"; + +export const metadata: Metadata = { + title: "AI Chat — Trigger.dev", + description: "AI SDK useChat powered by Trigger.dev durable tasks", +}; + +export default function RootLayout({ children }: { children: React.ReactNode }) { + return ( + + {children} + + ); +} diff --git a/references/ai-chat/src/app/page.tsx b/references/ai-chat/src/app/page.tsx new file mode 100644 index 0000000000..16f01282c8 --- /dev/null +++ b/references/ai-chat/src/app/page.tsx @@ -0,0 +1,17 @@ +import { auth } from "@trigger.dev/sdk"; +import { Chat } from "@/components/chat"; + +export default async function Home() { + const accessToken = await auth.createTriggerPublicToken("ai-chat"); + + return ( +
+
+

+ AI Chat — powered by Trigger.dev +

+ +
+
+ ); +} diff --git a/references/ai-chat/src/components/chat.tsx b/references/ai-chat/src/components/chat.tsx new file mode 100644 index 0000000000..34c68d8ba7 --- /dev/null +++ b/references/ai-chat/src/components/chat.tsx @@ -0,0 +1,91 @@ +"use client"; + +import { useChat } from "@ai-sdk/react"; +import { TriggerChatTransport } from "@trigger.dev/sdk/chat"; +import { useState } from "react"; + +export function Chat({ accessToken }: { accessToken: string }) { + const [input, setInput] = useState(""); + + const { messages, sendMessage, status, error } = useChat({ + transport: new TriggerChatTransport({ + task: "ai-chat", + accessToken, + baseURL: process.env.NEXT_PUBLIC_TRIGGER_API_URL, + }), + }); + + function handleSubmit(e: React.FormEvent) { + e.preventDefault(); + if (!input.trim() || status === "streaming") return; + + sendMessage({ text: input }); + setInput(""); + } + + return ( +
+ {/* Messages */} +
+ {messages.length === 0 && ( +

Send a message to start chatting.

+ )} + + {messages.map((message) => ( +
+
+ {message.parts.map((part, i) => { + if (part.type === "text") { + return {part.text}; + } + return null; + })} +
+
+ ))} + + {status === "streaming" && ( +
+
+ Thinking… +
+
+ )} +
+ + {/* Error */} + {error && ( +
+ {error.message} +
+ )} + + {/* Input */} +
+ setInput(e.target.value)} + placeholder="Type a message…" + className="flex-1 rounded-lg border border-gray-300 px-3 py-2 text-sm outline-none focus:border-blue-500 focus:ring-1 focus:ring-blue-500" + /> + +
+
+ ); +} diff --git a/references/ai-chat/src/trigger/chat.ts b/references/ai-chat/src/trigger/chat.ts new file mode 100644 index 0000000000..8c77bbeebc --- /dev/null +++ b/references/ai-chat/src/trigger/chat.ts @@ -0,0 +1,14 @@ +import { chatTask } from "@trigger.dev/sdk/ai"; +import { streamText, convertToModelMessages } from "ai"; +import { openai } from "@ai-sdk/openai"; + +export const chat = chatTask({ + id: "ai-chat", + run: async ({ messages }) => { + return streamText({ + model: openai("gpt-4o-mini"), + system: "You are a helpful assistant. Be concise and friendly.", + messages: await convertToModelMessages(messages), + }); + }, +}); diff --git a/references/ai-chat/trigger.config.ts b/references/ai-chat/trigger.config.ts new file mode 100644 index 0000000000..4412bfc932 --- /dev/null +++ b/references/ai-chat/trigger.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "@trigger.dev/sdk"; + +export default defineConfig({ + project: process.env.TRIGGER_PROJECT_REF!, + dirs: ["./src/trigger"], + maxDuration: 300, +}); diff --git a/references/ai-chat/tsconfig.json b/references/ai-chat/tsconfig.json new file mode 100644 index 0000000000..c1334095f8 --- /dev/null +++ b/references/ai-chat/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "target": "ES2017", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], + "exclude": ["node_modules"] +}