diff --git a/README.md b/README.md index 9ab294ca..1bc1dab8 100644 --- a/README.md +++ b/README.md @@ -45,6 +45,7 @@ A powerful, type-safe AI SDK for building AI-powered applications. - Headless chat state management with adapters (SSE, HTTP stream, custom) - Isomorphic type-safe tools with server/client execution - **Enhanced integration with TanStack Start** - Share implementations between AI tools and server functions +- **Observability events** - Structured, typed events for text, tools, image, speech, transcription, and video ([docs](./docs/guides/observability.md)) ### Read the docs โ†’ diff --git a/docs/guides/observability.md b/docs/guides/observability.md index 9b94f30d..66b98f25 100644 --- a/docs/guides/observability.md +++ b/docs/guides/observability.md @@ -18,6 +18,22 @@ the `{ withEventTarget: true }` option. This will not only emit to the event bus (which is not present in production), but to the current eventTarget that you will be able to listen to. +## Event naming scheme + +Events follow the format `:`. + +- Text: `text:request:started`, `text:message:created`, `text:chunk:content`, `text:usage` +- Tools: `tools:approval:requested`, `tools:call:completed`, `tools:result:added` +- Summarize: `summarize:request:started`, `summarize:usage` +- Image: `image:request:started`, `image:usage` +- Speech: `speech:request:started`, `speech:usage` +- Transcription: `transcription:request:started`, `transcription:usage` +- Video: `video:request:started`, `video:usage` +- Client: `client:created`, `client:loading:changed`, `client:messages:cleared` + +Every event includes all metadata available at the time of emission (model, provider, +system prompts, request and message IDs, options, and tool names). + ## Server events There are both events that happen on the server and on the client, if you want to listen to either side you just need to @@ -28,7 +44,7 @@ Here is an example for the server: import { aiEventClient } from "@tanstack/ai/event-client"; // server.ts file or wherever the root of your server is -aiEventClient.on("chat:started", e => { +aiEventClient.on("text:request:started", e => { // implement whatever you need to here }) // rest of your server logic @@ -46,7 +62,7 @@ import { aiEventClient } from "@tanstack/ai/event-client"; const App = () => { useEffect(() => { - const cleanup = aiEventClient.on("client:tool-call-updated", e => { + const cleanup = aiEventClient.on("tools:call:updated", e => { // do whatever you need to do }) return cleanup; @@ -55,4 +71,17 @@ const App = () => { } ``` +## Reconstructing chat + +To rebuild a chat timeline from events, listen for: + +- `text:message:created` (full message content) +- `text:message:user` (explicit user message events) +- `text:chunk:*` (streaming content, tool calls, tool results, thinking) +- `tools:*` (approvals, input availability, call completion) +- `text:request:completed` (final completion + usage) + +This set is sufficient to replay the conversation end-to-end for observability and +telemetry systems. + \ No newline at end of file diff --git a/examples/ts-react-chat/src/routes/api.tanchat.ts b/examples/ts-react-chat/src/routes/api.tanchat.ts index 139d04a0..6fd2eba7 100644 --- a/examples/ts-react-chat/src/routes/api.tanchat.ts +++ b/examples/ts-react-chat/src/routes/api.tanchat.ts @@ -94,10 +94,13 @@ export const Route = createFileRoute('/api/tanchat')({ }), openrouter: () => createChatOptions({ - adapter: openRouterText('openrouter/auto'), + adapter: openRouterText('openai/gpt-5.1'), modelOptions: { models: ['openai/chatgpt-4o-latest'], route: 'fallback', + reasoning: { + effort: 'medium', + }, }, }), gemini: () => diff --git a/packages/typescript/ai-client/src/chat-client.ts b/packages/typescript/ai-client/src/chat-client.ts index 1d1ba091..bb0a4693 100644 --- a/packages/typescript/ai-client/src/chat-client.ts +++ b/packages/typescript/ai-client/src/chat-client.ts @@ -160,13 +160,16 @@ export class ChatClient { input: any approvalId: string }) => { - this.events.approvalRequested( - this.currentMessageId || '', - args.toolCallId, - args.toolName, - args.input, - args.approvalId, - ) + if (this.currentStreamId) { + this.events.approvalRequested( + this.currentStreamId, + this.currentMessageId || '', + args.toolCallId, + args.toolName, + args.input, + args.approvalId, + ) + } }, }, }) @@ -210,7 +213,10 @@ export class ChatClient { parts: [], createdAt: new Date(), } - this.events.messageAppended(assistantMessage) + this.events.messageAppended( + assistantMessage, + this.currentStreamId || undefined, + ) // Process each chunk for await (const chunk of source) { diff --git a/packages/typescript/ai-client/src/events.ts b/packages/typescript/ai-client/src/events.ts index e9e332ba..33532f41 100644 --- a/packages/typescript/ai-client/src/events.ts +++ b/packages/typescript/ai-client/src/events.ts @@ -17,7 +17,7 @@ export abstract class ChatClientEventEmitter { */ protected abstract emitEvent( eventName: string, - data?: Record, + data?: Record, ): void /** @@ -33,14 +33,14 @@ export abstract class ChatClientEventEmitter { * Emit loading state changed event */ loadingChanged(isLoading: boolean): void { - this.emitEvent('client:loading-changed', { isLoading }) + this.emitEvent('client:loading:changed', { isLoading }) } /** * Emit error state changed event */ errorChanged(error: string | null): void { - this.emitEvent('client:error-changed', { + this.emitEvent('client:error:changed', { error, }) } @@ -49,12 +49,8 @@ export abstract class ChatClientEventEmitter { * Emit text update events (combines processor and client events) */ textUpdated(streamId: string, messageId: string, content: string): void { - this.emitEvent('processor:text-updated', { + this.emitEvent('text:chunk:content', { streamId, - content, - }) - - this.emitEvent('client:assistant-message-updated', { messageId, content, }) @@ -71,15 +67,8 @@ export abstract class ChatClientEventEmitter { state: string, args: string, ): void { - this.emitEvent('processor:tool-call-state-changed', { + this.emitEvent('tools:call:updated', { streamId, - toolCallId, - toolName, - state, - arguments: args, - }) - - this.emitEvent('client:tool-call-updated', { messageId, toolCallId, toolName, @@ -91,22 +80,6 @@ export abstract class ChatClientEventEmitter { /** * Emit tool result state change event */ - toolResultStateChanged( - streamId: string, - toolCallId: string, - content: string, - state: string, - error?: string, - ): void { - this.emitEvent('processor:tool-result-state-changed', { - streamId, - toolCallId, - content, - state, - error, - }) - } - /** * Emit thinking update event */ @@ -116,7 +89,7 @@ export abstract class ChatClientEventEmitter { content: string, delta?: string, ): void { - this.emitEvent('stream:chunk:thinking', { + this.emitEvent('text:chunk:thinking', { streamId, messageId, content, @@ -128,13 +101,15 @@ export abstract class ChatClientEventEmitter { * Emit approval requested event */ approvalRequested( + streamId: string, messageId: string, toolCallId: string, toolName: string, - input: any, + input: unknown, approvalId: string, ): void { - this.emitEvent('client:approval-requested', { + this.emitEvent('tools:approval:requested', { + streamId, messageId, toolCallId, toolName, @@ -146,17 +121,18 @@ export abstract class ChatClientEventEmitter { /** * Emit message appended event */ - messageAppended(uiMessage: UIMessage): void { - const contentPreview = uiMessage.parts - .filter((p) => p.type === 'text') - .map((p) => (p as any).content) + messageAppended(uiMessage: UIMessage, streamId?: string): void { + const content = uiMessage.parts + .filter((part) => part.type === 'text') + .map((part) => part.content) .join(' ') - .substring(0, 100) - this.emitEvent('client:message-appended', { + this.emitEvent('text:message:created', { + streamId, messageId: uiMessage.id, role: uiMessage.role, - contentPreview, + content, + parts: uiMessage.parts, }) } @@ -164,8 +140,15 @@ export abstract class ChatClientEventEmitter { * Emit message sent event */ messageSent(messageId: string, content: string): void { - this.emitEvent('client:message-sent', { + this.emitEvent('text:message:created', { + messageId, + role: 'user', + content, + }) + + this.emitEvent('text:message:user', { messageId, + role: 'user', content, }) } @@ -190,7 +173,7 @@ export abstract class ChatClientEventEmitter { * Emit messages cleared event */ messagesCleared(): void { - this.emitEvent('client:messages-cleared') + this.emitEvent('client:messages:cleared') } /** @@ -199,10 +182,10 @@ export abstract class ChatClientEventEmitter { toolResultAdded( toolCallId: string, toolName: string, - output: any, + output: unknown, state: string, ): void { - this.emitEvent('tool:result-added', { + this.emitEvent('tools:result:added', { toolCallId, toolName, output, @@ -218,7 +201,7 @@ export abstract class ChatClientEventEmitter { toolCallId: string, approved: boolean, ): void { - this.emitEvent('tool:approval-responded', { + this.emitEvent('tools:approval:responded', { approvalId, toolCallId, approved, @@ -235,14 +218,19 @@ export class DefaultChatClientEventEmitter extends ChatClientEventEmitter { */ protected emitEvent(eventName: string, data?: Record): void { // For client:* and tool:* events, automatically add clientId and timestamp - if (eventName.startsWith('client:') || eventName.startsWith('tool:')) { + if ( + eventName.startsWith('client:') || + eventName.startsWith('tools:') || + eventName.startsWith('text:') + ) { aiEventClient.emit(eventName as any, { ...data, clientId: this.clientId, + source: 'client', timestamp: Date.now(), }) } else { - // For other events (e.g., processor:*), just add timestamp + // For other events, just add timestamp aiEventClient.emit(eventName as any, { ...data, timestamp: Date.now(), diff --git a/packages/typescript/ai-client/tests/chat-client.test.ts b/packages/typescript/ai-client/tests/chat-client.test.ts index 05ae21d3..34476519 100644 --- a/packages/typescript/ai-client/tests/chat-client.test.ts +++ b/packages/typescript/ai-client/tests/chat-client.test.ts @@ -496,7 +496,7 @@ describe('ChatClient', () => { }) describe('devtools events', () => { - it('should emit messageAppended event when assistant message starts', async () => { + it('should emit text:message:created event when assistant message starts', async () => { const chunks = createTextChunks('Hello, world!') const adapter = createMockConnectionAdapter({ chunks }) @@ -507,20 +507,20 @@ describe('ChatClient', () => { await client.sendMessage('Hello') - // Find the messageAppended event for the assistant message - const messageAppendedCalls = emitSpy.mock.calls.filter( - ([eventName]) => eventName === 'client:message-appended', + // Find the message-created event for the assistant message + const messageCreatedCalls = emitSpy.mock.calls.filter( + ([eventName]) => eventName === 'text:message:created', ) // Should have at least one call for the assistant message - const assistantAppendedCall = messageAppendedCalls.find(([, data]) => { - const payload = data as Record + const assistantCreatedCall = messageCreatedCalls.find(([, data]) => { + const payload = data as any return payload && payload.role === 'assistant' }) - expect(assistantAppendedCall).toBeDefined() + expect(assistantCreatedCall).toBeDefined() }) - it('should emit textUpdated events during streaming', async () => { + it('should emit text:chunk:content events during streaming', async () => { const chunks = createTextChunks('Hello, world!') const adapter = createMockConnectionAdapter({ chunks }) @@ -533,14 +533,14 @@ describe('ChatClient', () => { // Find text-updated events const textUpdatedCalls = emitSpy.mock.calls.filter( - ([eventName]) => eventName === 'client:assistant-message-updated', + ([eventName]) => eventName === 'text:chunk:content', ) // Should have text update events expect(textUpdatedCalls.length).toBeGreaterThan(0) }) - it('should emit toolCallStateChanged events for tool calls', async () => { + it('should emit tools:call:updated events for tool calls', async () => { const chunks = createToolCallChunks([ { id: 'tool-1', name: 'getWeather', arguments: '{"city": "NYC"}' }, ]) @@ -555,14 +555,14 @@ describe('ChatClient', () => { // Find tool call events const toolCallUpdatedCalls = emitSpy.mock.calls.filter( - ([eventName]) => eventName === 'client:tool-call-updated', + ([eventName]) => eventName === 'tools:call:updated', ) // Should have tool call events expect(toolCallUpdatedCalls.length).toBeGreaterThan(0) }) - it('should emit thinkingUpdated events for thinking content', async () => { + it('should emit text:chunk:thinking events for thinking content', async () => { const chunks = createThinkingChunks( 'Let me think...', 'Here is my answer', @@ -578,7 +578,7 @@ describe('ChatClient', () => { // Find thinking events const thinkingCalls = emitSpy.mock.calls.filter( - ([eventName]) => eventName === 'stream:chunk:thinking', + ([eventName]) => eventName === 'text:chunk:thinking', ) // Should have thinking events diff --git a/packages/typescript/ai-client/tests/events.test.ts b/packages/typescript/ai-client/tests/events.test.ts index 12df82d5..9de49dd6 100644 --- a/packages/typescript/ai-client/tests/events.test.ts +++ b/packages/typescript/ai-client/tests/events.test.ts @@ -28,69 +28,61 @@ describe('events', () => { expect(aiEventClient.emit).toHaveBeenCalledWith('client:created', { initialMessageCount: 5, clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }) }) - it('should emit client:loading-changed event', () => { + it('should emit client:loading:changed event', () => { emitter.loadingChanged(true) expect(aiEventClient.emit).toHaveBeenCalledWith( - 'client:loading-changed', + 'client:loading:changed', { isLoading: true, clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }, ) }) - it('should emit client:error-changed event with null', () => { + it('should emit client:error:changed event with null', () => { emitter.errorChanged(null) - expect(aiEventClient.emit).toHaveBeenCalledWith('client:error-changed', { + expect(aiEventClient.emit).toHaveBeenCalledWith('client:error:changed', { error: null, clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }) }) - it('should emit client:error-changed event with error string', () => { + it('should emit client:error:changed event with error string', () => { emitter.errorChanged('Something went wrong') - expect(aiEventClient.emit).toHaveBeenCalledWith('client:error-changed', { + expect(aiEventClient.emit).toHaveBeenCalledWith('client:error:changed', { error: 'Something went wrong', clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }) }) - it('should emit processor:text-updated and client:assistant-message-updated', () => { + it('should emit text:chunk:content event for text updates', () => { emitter.textUpdated('stream-1', 'msg-1', 'Hello world') - expect(aiEventClient.emit).toHaveBeenCalledTimes(2) - expect(aiEventClient.emit).toHaveBeenNthCalledWith( - 1, - 'processor:text-updated', - { - streamId: 'stream-1', - content: 'Hello world', - timestamp: expect.any(Number), - }, - ) - expect(aiEventClient.emit).toHaveBeenNthCalledWith( - 2, - 'client:assistant-message-updated', - { - messageId: 'msg-1', - content: 'Hello world', - clientId: 'test-client-id', - timestamp: expect.any(Number), - }, - ) + expect(aiEventClient.emit).toHaveBeenCalledWith('text:chunk:content', { + streamId: 'stream-1', + messageId: 'msg-1', + content: 'Hello world', + clientId: 'test-client-id', + source: 'client', + timestamp: expect.any(Number), + }) }) - it('should emit processor:tool-call-state-changed and client:tool-call-updated', () => { + it('should emit tools:call:updated event', () => { emitter.toolCallStateChanged( 'stream-1', 'msg-1', @@ -100,78 +92,22 @@ describe('events', () => { '{"city": "NYC"}', ) - expect(aiEventClient.emit).toHaveBeenCalledTimes(2) - expect(aiEventClient.emit).toHaveBeenNthCalledWith( - 1, - 'processor:tool-call-state-changed', - { - streamId: 'stream-1', - toolCallId: 'call-1', - toolName: 'get_weather', - state: 'input-complete', - arguments: '{"city": "NYC"}', - timestamp: expect.any(Number), - }, - ) - expect(aiEventClient.emit).toHaveBeenNthCalledWith( - 2, - 'client:tool-call-updated', - { - messageId: 'msg-1', - toolCallId: 'call-1', - toolName: 'get_weather', - state: 'input-complete', - arguments: '{"city": "NYC"}', - clientId: 'test-client-id', - timestamp: expect.any(Number), - }, - ) - }) - - it('should emit processor:tool-result-state-changed event', () => { - emitter.toolResultStateChanged( - 'stream-1', - 'call-1', - 'Result content', - 'complete', - ) - - expect(aiEventClient.emit).toHaveBeenCalledWith( - 'processor:tool-result-state-changed', - { - streamId: 'stream-1', - toolCallId: 'call-1', - content: 'Result content', - state: 'complete', - timestamp: expect.any(Number), - }, - ) - }) - - it('should emit processor:tool-result-state-changed with error', () => { - emitter.toolResultStateChanged( - 'stream-1', - 'call-1', - 'Error occurred', - 'error', - 'Something failed', - ) - - expect(aiEventClient.emit).toHaveBeenCalledWith( - 'processor:tool-result-state-changed', - { - streamId: 'stream-1', - toolCallId: 'call-1', - content: 'Error occurred', - state: 'error', - error: 'Something failed', - timestamp: expect.any(Number), - }, - ) + expect(aiEventClient.emit).toHaveBeenCalledWith('tools:call:updated', { + streamId: 'stream-1', + messageId: 'msg-1', + toolCallId: 'call-1', + toolName: 'get_weather', + state: 'input-complete', + arguments: '{"city": "NYC"}', + clientId: 'test-client-id', + source: 'client', + timestamp: expect.any(Number), + }) }) - it('should emit client:approval-requested event', () => { + it('should emit tools:approval:requested event', () => { emitter.approvalRequested( + 'stream-1', 'msg-1', 'call-1', 'get_weather', @@ -180,20 +116,22 @@ describe('events', () => { ) expect(aiEventClient.emit).toHaveBeenCalledWith( - 'client:approval-requested', + 'tools:approval:requested', { + streamId: 'stream-1', messageId: 'msg-1', toolCallId: 'call-1', toolName: 'get_weather', input: { city: 'NYC' }, approvalId: 'approval-1', clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }, ) }) - it('should emit client:message-appended with content preview', () => { + it('should emit text:message:created with full content', () => { const uiMessage: UIMessage = { id: 'msg-1', role: 'user', @@ -206,31 +144,16 @@ describe('events', () => { emitter.messageAppended(uiMessage) - expect(aiEventClient.emit).toHaveBeenCalledWith( - 'client:message-appended', - { - messageId: 'msg-1', - role: 'user', - contentPreview: 'Hello World', - clientId: 'test-client-id', - timestamp: expect.any(Number), - }, - ) - }) - - it('should truncate content preview to 100 characters', () => { - const longContent = 'a'.repeat(150) - const uiMessage: UIMessage = { - id: 'msg-1', + expect(aiEventClient.emit).toHaveBeenCalledWith('text:message:created', { + streamId: undefined, + messageId: 'msg-1', role: 'user', - parts: [{ type: 'text', content: longContent }], - createdAt: new Date(), - } - - emitter.messageAppended(uiMessage) - - const call = (aiEventClient.emit as any).mock.calls[0] - expect(call[1].contentPreview).toHaveLength(100) + content: 'Hello World', + parts: uiMessage.parts, + clientId: 'test-client-id', + source: 'client', + timestamp: expect.any(Number), + }) }) it('should handle message with no text parts', () => { @@ -251,27 +174,46 @@ describe('events', () => { emitter.messageAppended(uiMessage) - expect(aiEventClient.emit).toHaveBeenCalledWith( - 'client:message-appended', + expect(aiEventClient.emit).toHaveBeenCalledWith('text:message:created', { + streamId: undefined, + messageId: 'msg-1', + role: 'assistant', + content: '', + parts: uiMessage.parts, + clientId: 'test-client-id', + source: 'client', + timestamp: expect.any(Number), + }) + }) + + it('should emit text:message:created and text:message:user for sent messages', () => { + emitter.messageSent('msg-1', 'Hello world') + + expect(aiEventClient.emit).toHaveBeenCalledTimes(2) + expect(aiEventClient.emit).toHaveBeenNthCalledWith( + 1, + 'text:message:created', { messageId: 'msg-1', - role: 'assistant', - contentPreview: '', + role: 'user', + content: 'Hello world', clientId: 'test-client-id', + source: 'client', + timestamp: expect.any(Number), + }, + ) + expect(aiEventClient.emit).toHaveBeenNthCalledWith( + 2, + 'text:message:user', + { + messageId: 'msg-1', + role: 'user', + content: 'Hello world', + clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }, ) - }) - - it('should emit client:message-sent event', () => { - emitter.messageSent('msg-1', 'Hello world') - - expect(aiEventClient.emit).toHaveBeenCalledWith('client:message-sent', { - messageId: 'msg-1', - content: 'Hello world', - clientId: 'test-client-id', - timestamp: expect.any(Number), - }) }) it('should emit client:reloaded event', () => { @@ -280,6 +222,7 @@ describe('events', () => { expect(aiEventClient.emit).toHaveBeenCalledWith('client:reloaded', { fromMessageIndex: 3, clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }) }) @@ -289,23 +232,25 @@ describe('events', () => { expect(aiEventClient.emit).toHaveBeenCalledWith('client:stopped', { clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }) }) - it('should emit client:messages-cleared event', () => { + it('should emit client:messages:cleared event', () => { emitter.messagesCleared() expect(aiEventClient.emit).toHaveBeenCalledWith( - 'client:messages-cleared', + 'client:messages:cleared', { clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }, ) }) - it('should emit tool:result-added event', () => { + it('should emit tools:result:added event', () => { emitter.toolResultAdded( 'call-1', 'get_weather', @@ -313,26 +258,28 @@ describe('events', () => { 'output-available', ) - expect(aiEventClient.emit).toHaveBeenCalledWith('tool:result-added', { + expect(aiEventClient.emit).toHaveBeenCalledWith('tools:result:added', { toolCallId: 'call-1', toolName: 'get_weather', output: { temp: 72 }, state: 'output-available', clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }) }) - it('should emit tool:approval-responded event', () => { + it('should emit tools:approval:responded event', () => { emitter.toolApprovalResponded('approval-1', 'call-1', true) expect(aiEventClient.emit).toHaveBeenCalledWith( - 'tool:approval-responded', + 'tools:approval:responded', { approvalId: 'approval-1', toolCallId: 'call-1', approved: true, clientId: 'test-client-id', + source: 'client', timestamp: expect.any(Number), }, ) diff --git a/packages/typescript/ai-devtools/src/components/ConversationDetails.tsx b/packages/typescript/ai-devtools/src/components/ConversationDetails.tsx index 333bc56f..a465d542 100644 --- a/packages/typescript/ai-devtools/src/components/ConversationDetails.tsx +++ b/packages/typescript/ai-devtools/src/components/ConversationDetails.tsx @@ -2,6 +2,7 @@ import { Show, createEffect, createSignal } from 'solid-js' import { useStyles } from '../styles/use-styles' import { useAIStore } from '../store/ai-context' import { + ActivityEventsTab, ChunksTab, ConversationHeader, ConversationTabs, @@ -35,12 +36,56 @@ export const ConversationDetails: Component = () => { (conv.summaries && conv.summaries.length > 0) ) { setActiveTab('summaries') + } else if ( + conv.hasImage || + (conv.imageEvents && conv.imageEvents.length > 0) + ) { + setActiveTab('image') + } else if ( + conv.hasSpeech || + (conv.speechEvents && conv.speechEvents.length > 0) + ) { + setActiveTab('speech') + } else if ( + conv.hasTranscription || + (conv.transcriptionEvents && conv.transcriptionEvents.length > 0) + ) { + setActiveTab('transcription') + } else if ( + conv.hasVideo || + (conv.videoEvents && conv.videoEvents.length > 0) + ) { + setActiveTab('video') } else { setActiveTab('chunks') } } else { // For client conversations, default to messages tab - setActiveTab('messages') + if (conv.messages.length > 0) { + setActiveTab('messages') + } else if ( + conv.hasImage || + (conv.imageEvents && conv.imageEvents.length > 0) + ) { + setActiveTab('image') + } else if ( + conv.hasSpeech || + (conv.speechEvents && conv.speechEvents.length > 0) + ) { + setActiveTab('speech') + } else if ( + conv.hasTranscription || + (conv.transcriptionEvents && conv.transcriptionEvents.length > 0) + ) { + setActiveTab('transcription') + } else if ( + conv.hasVideo || + (conv.videoEvents && conv.videoEvents.length > 0) + ) { + setActiveTab('video') + } else { + setActiveTab('messages') + } } } }) @@ -72,6 +117,30 @@ export const ConversationDetails: Component = () => { + + + + + + + + + + + + )} diff --git a/packages/typescript/ai-devtools/src/components/conversation/ActivityEventsTab.tsx b/packages/typescript/ai-devtools/src/components/conversation/ActivityEventsTab.tsx new file mode 100644 index 00000000..f6018dd0 --- /dev/null +++ b/packages/typescript/ai-devtools/src/components/conversation/ActivityEventsTab.tsx @@ -0,0 +1,57 @@ +import { For, Show } from 'solid-js' +import { useStyles } from '../../styles/use-styles' +import type { Component } from 'solid-js' +import type { ActivityEvent } from '../../store/ai-context' + +interface ActivityEventsTabProps { + title: string + events: Array +} + +export const ActivityEventsTab: Component = (props) => { + const styles = useStyles() + + const formattedTimestamp = (timestamp: number) => + new Date(timestamp).toLocaleTimeString() + + return ( + 0} + fallback={ +
No events yet
+ } + > +
+
+
+
+ {props.title} +
+
+ {props.events.length} events +
+
+
+ Activity event log +
+
+
+ + {(event) => ( +
+
+ {event.name} ยท {formattedTimestamp(event.timestamp)} +
+
+                  {JSON.stringify(event.payload, null, 2)}
+                
+
+ )} +
+
+
+
+ ) +} diff --git a/packages/typescript/ai-devtools/src/components/conversation/ConversationHeader.tsx b/packages/typescript/ai-devtools/src/components/conversation/ConversationHeader.tsx index 7623bc31..24d40a1e 100644 --- a/packages/typescript/ai-devtools/src/components/conversation/ConversationHeader.tsx +++ b/packages/typescript/ai-devtools/src/components/conversation/ConversationHeader.tsx @@ -1,4 +1,5 @@ import { For, Show } from 'solid-js' +import { JsonTree } from '@tanstack/devtools-ui' import { useStyles } from '../../styles/use-styles' import { formatDuration } from '../utils' import type { Component } from 'solid-js' @@ -16,8 +17,9 @@ export const ConversationHeader: Component = ( const toolNames = () => conv().toolNames ?? [] const options = () => conv().options - const providerOptions = () => conv().providerOptions + const modelOptions = () => conv().modelOptions const iterationCount = () => conv().iterationCount + const systemPrompts = () => conv().systemPrompts ?? [] return (
@@ -49,24 +51,6 @@ export const ConversationHeader: Component = ( {conv().completedAt && ` โ€ข Duration: ${formatDuration(conv().completedAt! - conv().startedAt)}`}
- -
- - ๐ŸŽฏ Tokens: - - - Prompt: {conv().usage?.promptTokens.toLocaleString() || 0} - - โ€ข - - Completion: {conv().usage?.completionTokens.toLocaleString() || 0} - - โ€ข - - Total: {conv().usage?.totalTokens.toLocaleString() || 0} - -
-
{/* Tools list - always visible */} 0}>
@@ -102,27 +86,59 @@ export const ConversationHeader: Component = (
- {/* Provider Options - always visible in compact form */} - 0} - > -
- - ๐Ÿท๏ธ Provider: + +
+ + ๐ŸŽฏ Tokens: -
- - {([key, value]) => ( - - {key}:{' '} - {typeof value === 'object' - ? JSON.stringify(value) - : String(value)} - + + Prompt: {conv().usage?.promptTokens.toLocaleString() || 0} + + โ€ข + + Completion: {conv().usage?.completionTokens.toLocaleString() || 0} + + โ€ข + + Total: {conv().usage?.totalTokens.toLocaleString() || 0} + +
+ + {/* Model options - collapsible */} + 0}> +
+ + ๐Ÿงช Model options + +
+ } + defaultExpansionDepth={2} + /> +
+
+
+ {/* System prompts - collapsible */} + 0}> +
+ + ๐Ÿงฉ System prompts ({systemPrompts().length}) + +
+ + {(prompt, index) => ( +
+
+ #{index() + 1} +
+
+ {prompt} +
+
)}
-
+
diff --git a/packages/typescript/ai-devtools/src/components/conversation/ConversationTabs.tsx b/packages/typescript/ai-devtools/src/components/conversation/ConversationTabs.tsx index fe6eeec7..b69206b1 100644 --- a/packages/typescript/ai-devtools/src/components/conversation/ConversationTabs.tsx +++ b/packages/typescript/ai-devtools/src/components/conversation/ConversationTabs.tsx @@ -1,9 +1,16 @@ -import { Show } from 'solid-js' +import { Show, createEffect, createSignal } from 'solid-js' import { useStyles } from '../../styles/use-styles' import type { Component } from 'solid-js' import type { Conversation } from '../../store/ai-context' -export type TabType = 'messages' | 'chunks' | 'summaries' +export type TabType = + | 'messages' + | 'chunks' + | 'summaries' + | 'image' + | 'speech' + | 'transcription' + | 'video' interface ConversationTabsProps { conversation: Conversation @@ -20,6 +27,57 @@ export const ConversationTabs: Component = (props) => { conv().chunks.reduce((sum, c) => sum + (c.chunkCount || 1), 0) const summariesCount = () => conv().summaries?.length ?? 0 + const imageCount = () => conv().imageEvents?.length ?? 0 + const speechCount = () => conv().speechEvents?.length ?? 0 + const transcriptionCount = () => conv().transcriptionEvents?.length ?? 0 + const videoCount = () => conv().videoEvents?.length ?? 0 + + const [imagePulse, setImagePulse] = createSignal(false) + const [speechPulse, setSpeechPulse] = createSignal(false) + const [transcriptionPulse, setTranscriptionPulse] = createSignal(false) + const [videoPulse, setVideoPulse] = createSignal(false) + + const triggerPulse = (setter: (value: boolean) => void) => { + setter(true) + setTimeout(() => setter(false), 2000) + } + + let previousImageCount = 0 + let previousSpeechCount = 0 + let previousTranscriptionCount = 0 + let previousVideoCount = 0 + + createEffect(() => { + const count = imageCount() + if (count > 0 && previousImageCount === 0) { + triggerPulse(setImagePulse) + } + previousImageCount = count + }) + + createEffect(() => { + const count = speechCount() + if (count > 0 && previousSpeechCount === 0) { + triggerPulse(setSpeechPulse) + } + previousSpeechCount = count + }) + + createEffect(() => { + const count = transcriptionCount() + if (count > 0 && previousTranscriptionCount === 0) { + triggerPulse(setTranscriptionPulse) + } + previousTranscriptionCount = count + }) + + createEffect(() => { + const count = videoCount() + if (count > 0 && previousVideoCount === 0) { + triggerPulse(setVideoPulse) + } + previousVideoCount = count + }) // Determine if we should show any chat-related tabs // For server conversations, don't show messages tab - only chunks @@ -27,6 +85,11 @@ export const ConversationTabs: Component = (props) => { conv().type === 'client' && conv().messages.length > 0 const hasChunks = () => conv().chunks.length > 0 || conv().type === 'server' const hasSummaries = () => conv().hasSummarize || summariesCount() > 0 + const hasImage = () => conv().hasImage || imageCount() > 0 + const hasSpeech = () => conv().hasSpeech || speechCount() > 0 + const hasTranscription = () => + conv().hasTranscription || transcriptionCount() > 0 + const hasVideo = () => conv().hasVideo || videoCount() > 0 // Count how many tabs would be visible const visibleTabCount = () => { @@ -34,6 +97,10 @@ export const ConversationTabs: Component = (props) => { if (hasMessages()) count++ if (hasChunks() && conv().type === 'server') count++ if (hasSummaries()) count++ + if (hasImage()) count++ + if (hasSpeech()) count++ + if (hasTranscription()) count++ + if (hasVideo()) count++ return count } @@ -83,6 +150,58 @@ export const ConversationTabs: Component = (props) => { ๐Ÿ“ Summaries ({summariesCount()})
+ + + + + + + + + + + + ) } diff --git a/packages/typescript/ai-devtools/src/components/conversation/MessageCard.tsx b/packages/typescript/ai-devtools/src/components/conversation/MessageCard.tsx index da3a9418..55019538 100644 --- a/packages/typescript/ai-devtools/src/components/conversation/MessageCard.tsx +++ b/packages/typescript/ai-devtools/src/components/conversation/MessageCard.tsx @@ -1,6 +1,7 @@ import { For, Show } from 'solid-js' +import { JsonTree } from '@tanstack/devtools-ui' import { useStyles } from '../../styles/use-styles' -import { formatTimestamp } from '../utils' +import { formatDuration, formatTimestamp } from '../utils' import { ToolCallDisplay } from './ToolCallDisplay' import { ChunksCollapsible } from './ChunksCollapsible' import type { Message } from '../../store/ai-store' @@ -31,13 +32,30 @@ export const MessageCard: Component = (props) => { return `${base} ${styles().conversationDetails.messageCardServer}` } - // Check if this is a non-user message (needs source banner and content wrapper) - const isSourcedMessage = () => msg().role !== 'user' + const parseJsonContent = () => { + const content = msg().content + if (typeof content !== 'string') return null + const trimmed = content.trim() + if (!trimmed.startsWith('{') && !trimmed.startsWith('[')) return null + try { + return JSON.parse(trimmed) as Record | Array + } catch { + return null + } + } + + const toolDuration = () => { + const durations = msg() + .toolCalls?.map((tool) => tool.duration || 0) + .filter((duration) => duration > 0) + if (!durations || durations.length === 0) return 0 + return durations.reduce((total, duration) => total + duration, 0) + } return (
{/* Source indicator banner at top of card */} - +
= (props) => { {/* Content wrapper with padding */}
@@ -100,6 +118,14 @@ export const MessageCard: Component = (props) => { {msg().usage?.completionTokens.toLocaleString()} out
+ 0}> +
+ + โฑ๏ธ + + {formatDuration(toolDuration())} +
+
{/* Thinking content (for extended thinking models) */} @@ -114,9 +140,18 @@ export const MessageCard: Component = (props) => { -
- {msg().content} -
+ + {msg().content} +
+ } + > +
+ +
+
{/* Tool Calls Display */} 0}> diff --git a/packages/typescript/ai-devtools/src/components/conversation/index.ts b/packages/typescript/ai-devtools/src/components/conversation/index.ts index 12d334e2..2a0c9177 100644 --- a/packages/typescript/ai-devtools/src/components/conversation/index.ts +++ b/packages/typescript/ai-devtools/src/components/conversation/index.ts @@ -5,3 +5,4 @@ export type { TabType } from './ConversationTabs' export { MessagesTab } from './MessagesTab' export { ChunksTab } from './ChunksTab' export { SummariesTab } from './SummariesTab' +export { ActivityEventsTab } from './ActivityEventsTab' diff --git a/packages/typescript/ai-devtools/src/store/ai-context.tsx b/packages/typescript/ai-devtools/src/store/ai-context.tsx index 8aa0ccdb..bc43d7f3 100644 --- a/packages/typescript/ai-devtools/src/store/ai-context.tsx +++ b/packages/typescript/ai-devtools/src/store/ai-context.tsx @@ -4,7 +4,7 @@ import { aiEventClient } from '@tanstack/ai/event-client' import type { ParentComponent } from 'solid-js' interface MessagePart { - type: 'text' | 'tool-call' | 'tool-result' + type: 'text' | 'tool-call' | 'tool-result' | 'thinking' content?: string toolCallId?: string toolName?: string @@ -84,6 +84,8 @@ export interface Chunk { duration?: number /** Number of raw chunks that were merged into this consolidated chunk */ chunkCount: number + /** Whether this is a client-side tool execution */ + isClientTool?: boolean } export interface SummarizeOperation { @@ -96,6 +98,13 @@ export interface SummarizeOperation { status: 'started' | 'completed' } +export interface ActivityEvent { + id: string + name: string + timestamp: number + payload: unknown +} + export interface Conversation { id: string type: 'client' | 'server' @@ -111,12 +120,21 @@ export interface Conversation { iterationCount?: number toolNames?: Array options?: Record - providerOptions?: Record + modelOptions?: Record + systemPrompts?: Array /** Flags for which operation types this conversation has */ hasChat?: boolean hasSummarize?: boolean + hasImage?: boolean + hasSpeech?: boolean + hasTranscription?: boolean + hasVideo?: boolean /** Summarize operations in this conversation */ summaries?: Array + imageEvents?: Array + speechEvents?: Array + transcriptionEvents?: Array + videoEvents?: Array } interface AIStoreState { @@ -336,6 +354,10 @@ export const AIProvider: ParentComponent = (props) => { label, messages: [], chunks: [], + imageEvents: [], + speechEvents: [], + transcriptionEvents: [], + videoEvents: [], status: 'active', startedAt: Date.now(), }) @@ -398,6 +420,9 @@ export const AIProvider: ParentComponent = (props) => { return } } + + // Fallback: add to conversation's main chunks array if no assistant message found + addChunk(conversationId, chunk) } function updateMessageUsage( @@ -553,6 +578,38 @@ export const AIProvider: ParentComponent = (props) => { queueChunk(conversationId, chunk) } + function addActivityEvent( + conversationId: string, + activity: + | 'imageEvents' + | 'speechEvents' + | 'transcriptionEvents' + | 'videoEvents', + event: ActivityEvent, + ): void { + if (!state.conversations[conversationId]) return + + setState( + 'conversations', + conversationId, + activity, + produce((events: Array | undefined) => { + if (!events) return [event] + events.push(event) + return events + }), + ) + + const activityFlagMap: Record = { + imageEvents: 'hasImage', + speechEvents: 'hasSpeech', + transcriptionEvents: 'hasTranscription', + videoEvents: 'hasVideo', + } + + setState('conversations', conversationId, activityFlagMap[activity], true) + } + /** * For server conversations, ensure a message exists for the given messageId. * This creates a placeholder message that will be updated as chunks arrive. @@ -598,58 +655,146 @@ export const AIProvider: ParentComponent = (props) => { }), ) + // ============= Message Events ============= + cleanupFns.push( - aiEventClient.on('client:message-sent', (e) => { - const clientId = e.payload.clientId - if (!state.conversations[clientId]) { + aiEventClient.on('text:message:created', (e) => { + const { clientId, streamId, messageId, role, content, timestamp } = + e.payload + const conversationId = + clientId || + (streamId ? streamToConversation.get(streamId) : undefined) + + if (!conversationId) return + if (role === 'tool' || role === 'system') return + + if (!state.conversations[conversationId]) { getOrCreateConversation( - clientId, - 'client', - `Client Chat (${clientId.substring(0, 8)})`, + conversationId, + clientId ? 'client' : 'server', + clientId + ? `Client Chat (${conversationId.substring(0, 8)})` + : `Server Chat (${conversationId.substring(0, 8)})`, ) } - addMessage(clientId, { - id: e.payload.messageId, - role: 'user', - content: e.payload.content, - timestamp: e.payload.timestamp, - source: 'client', + + const conv = state.conversations[conversationId] + if (!conv) return + + const existingIndex = conv.messages.findIndex( + (message) => message.id === messageId, + ) + + const parts = e.payload.parts?.map((part) => { + if (part.type === 'text') { + return { type: 'text', content: part.content } satisfies MessagePart + } + if (part.type === 'tool-call') { + return { + type: 'tool-call', + toolCallId: part.id, + toolName: part.name, + arguments: part.arguments, + state: part.state, + output: part.output, + content: part.approval + ? JSON.stringify(part.approval) + : undefined, + } satisfies MessagePart + } + if (part.type === 'tool-result') { + return { + type: 'tool-result', + toolCallId: part.toolCallId, + content: part.content, + state: part.state, + error: part.error, + } satisfies MessagePart + } + return { + type: 'thinking', + content: part.content, + } satisfies MessagePart }) - updateConversation(clientId, { status: 'active' }) + + const toolCalls = e.payload.toolCalls?.map((toolCall) => ({ + id: toolCall.id, + name: toolCall.function.name, + arguments: toolCall.function.arguments, + state: 'input-complete', + })) + + const source = e.payload.source ?? (clientId ? 'client' : 'server') + + if (role === 'user' && conv.type === 'client' && source === 'server') { + return + } + + // Skip empty assistant messages from client - the real content comes from server + if ( + role === 'assistant' && + source === 'client' && + !content && + (!toolCalls || toolCalls.length === 0) + ) { + return + } + + const messagePayload: Message = { + id: messageId, + role, + content, + timestamp, + parts, + toolCalls, + source, + } + + if (existingIndex >= 0) { + updateMessage(conversationId, existingIndex, messagePayload) + } else { + addMessage(conversationId, messagePayload) + } + + updateConversation(conversationId, { status: 'active', hasChat: true }) }), ) cleanupFns.push( - aiEventClient.on('client:message-appended', (e) => { - const clientId = e.payload.clientId - const role = e.payload.role + aiEventClient.on('text:message:user', (e) => { + const { clientId, streamId, messageId, content, timestamp } = e.payload + const conversationId = + clientId || + (streamId ? streamToConversation.get(streamId) : undefined) + if (!conversationId) return - if (role === 'user') return - if (!state.conversations[clientId]) return + const conv = state.conversations[conversationId] + if (!conv) return - if (role === 'assistant') { - addMessage(clientId, { - id: e.payload.messageId, - role: 'assistant', - content: e.payload.contentPreview, - timestamp: e.payload.timestamp, - source: 'client', - }) - } else if (role === 'tool') { - // Tool result message from the LLM - addMessage(clientId, { - id: e.payload.messageId, - role: 'tool', - content: e.payload.contentPreview, - timestamp: e.payload.timestamp, - source: 'client', - }) + const existingIndex = conv.messages.findIndex( + (message) => message.id === messageId, + ) + + if (existingIndex >= 0) return + + const source = e.payload.source ?? (clientId ? 'client' : 'server') + + if (conv.type === 'client' && source === 'server') { + return } + + addMessage(conversationId, { + id: messageId, + role: 'user', + content, + timestamp, + source, + }) }), ) cleanupFns.push( - aiEventClient.on('client:loading-changed', (e) => { + aiEventClient.on('client:loading:changed', (e) => { const clientId = e.payload.clientId if (state.conversations[clientId]) { updateConversation(clientId, { @@ -672,7 +817,7 @@ export const AIProvider: ParentComponent = (props) => { ) cleanupFns.push( - aiEventClient.on('client:messages-cleared', (e) => { + aiEventClient.on('client:messages:cleared', (e) => { const clientId = e.payload.clientId if (state.conversations[clientId]) { updateConversation(clientId, { @@ -698,7 +843,7 @@ export const AIProvider: ParentComponent = (props) => { ) cleanupFns.push( - aiEventClient.on('client:error-changed', (e) => { + aiEventClient.on('client:error:changed', (e) => { const clientId = e.payload.clientId if (state.conversations[clientId] && e.payload.error) { updateConversation(clientId, { status: 'error' }) @@ -706,112 +851,10 @@ export const AIProvider: ParentComponent = (props) => { }), ) - cleanupFns.push( - aiEventClient.on('client:assistant-message-updated', (e) => { - const clientId = e.payload.clientId - const messageId = e.payload.messageId - const content = e.payload.content - - if (!state.conversations[clientId]) return - - const conv = state.conversations[clientId] - - // Find message by ID anywhere in the list, not just the last one - const messageIndex = conv.messages.findIndex( - (m: Message) => m.id === messageId, - ) - - // Only update existing messages, don't create new ones - // (client:message-appended is responsible for creating messages) - if (messageIndex >= 0) { - updateMessage(clientId, messageIndex, { - content, - model: conv.model, - }) - } - }), - ) - - cleanupFns.push( - aiEventClient.on('client:tool-call-updated', (e) => { - const { - clientId, - messageId, - toolCallId, - toolName, - state: toolCallState, - arguments: args, - } = e.payload as { - clientId: string - messageId: string - toolCallId: string - toolName: string - state: string - arguments: unknown - timestamp: number - } - - if (!state.conversations[clientId]) return - - const conv = state.conversations[clientId] - const messageIndex = conv.messages.findIndex( - (m: Message) => m.id === messageId, - ) - if (messageIndex === -1) return - - const message = conv.messages[messageIndex] - if (!message) return - - const toolCalls = message.toolCalls || [] - const existingToolIndex = toolCalls.findIndex( - (t: ToolCall) => t.id === toolCallId, - ) - - const toolCall: ToolCall = { - id: toolCallId, - name: toolName, - arguments: JSON.stringify(args, null, 2), - state: toolCallState, - } - - if (existingToolIndex >= 0) { - updateToolCall(clientId, messageIndex, existingToolIndex, toolCall) - } else { - setToolCalls(clientId, messageIndex, [...toolCalls, toolCall]) - } - }), - ) - - cleanupFns.push( - aiEventClient.on('client:approval-requested', (e) => { - const { clientId, messageId, toolCallId, approvalId } = e.payload - - if (!state.conversations[clientId]) return - - const conv = state.conversations[clientId] - const messageIndex = conv.messages.findIndex((m) => m.id === messageId) - if (messageIndex === -1) return - - const message = conv.messages[messageIndex] - if (!message?.toolCalls) return - - const toolCallIndex = message.toolCalls.findIndex( - (t) => t.id === toolCallId, - ) - if (toolCallIndex === -1) return - - updateToolCall(clientId, messageIndex, toolCallIndex, { - approvalRequired: true, - approvalId, - state: 'approval-requested', - }) - }), - ) - // ============= Tool Events ============= cleanupFns.push( - aiEventClient.on('tool:result-added', (e) => { + aiEventClient.on('tools:result:added', (e) => { const { clientId, toolCallId, @@ -821,10 +864,23 @@ export const AIProvider: ParentComponent = (props) => { timestamp, } = e.payload + if (!clientId) return if (!state.conversations[clientId]) return const conv = state.conversations[clientId] + // Always create a chunk for the tool result + const chunk: Chunk = { + id: `chunk-tool-result-${toolCallId}-${Date.now()}`, + type: 'tool_result', + toolCallId, + toolName, + result: output, + timestamp, + chunkCount: 1, + isClientTool: true, + } + // Find the message with the tool call and update it for ( let messageIndex = conv.messages.length - 1; @@ -844,28 +900,25 @@ export const AIProvider: ParentComponent = (props) => { state: resultState === 'output-error' ? 'error' : 'complete', }) - // Also add a chunk to show the tool result in the chunks view - const chunk: Chunk = { - id: `chunk-tool-result-${toolCallId}-${Date.now()}`, - type: 'tool_result', - messageId: message.id, - toolCallId, - toolName, - result: output, - timestamp, - chunkCount: 1, - } + // Add chunk with message ID + chunk.messageId = message.id addChunkToMessage(clientId, chunk) return } } + + // If we couldn't find the message with toolCalls, still add the chunk + // This handles cases where the message hasn't been processed yet + // or the tool call is a client-only tool + addChunkToMessage(clientId, chunk) }), ) cleanupFns.push( - aiEventClient.on('tool:approval-responded', (e) => { + aiEventClient.on('tools:approval:responded', (e) => { const { clientId, toolCallId, approved } = e.payload + if (!clientId) return if (!state.conversations[clientId]) return const conv = state.conversations[clientId] @@ -892,52 +945,82 @@ export const AIProvider: ParentComponent = (props) => { }), ) - // ============= Stream Events ============= - cleanupFns.push( - aiEventClient.on('stream:chunk:content', (e) => { - const streamId = e.payload.streamId - const conversationId = streamToConversation.get(streamId) - if (!conversationId) return + aiEventClient.on('tools:call:updated', (e) => { + const { + clientId, + streamId, + messageId, + toolCallId, + toolName, + state: toolCallState, + arguments: args, + } = e.payload - const chunk: Chunk = { - id: `chunk-${Date.now()}-${Math.random()}`, - type: 'content', - messageId: e.payload.messageId, - content: e.payload.content, - delta: e.payload.delta, - timestamp: e.payload.timestamp, - chunkCount: 1, - } + const conversationId = + clientId || + (streamId ? streamToConversation.get(streamId) : undefined) + if (!conversationId || !state.conversations[conversationId]) return const conv = state.conversations[conversationId] - if (conv?.type === 'client') { - addChunkToMessage(conversationId, chunk) - } else { - ensureMessageForChunk( + const messageIndex = conv.messages.findIndex( + (m: Message) => m.id === messageId, + ) + if (messageIndex === -1) return + + const message = conv.messages[messageIndex] + if (!message) return + + const toolCalls = message.toolCalls || [] + const existingToolIndex = toolCalls.findIndex( + (t: ToolCall) => t.id === toolCallId, + ) + + const toolCall: ToolCall = { + id: toolCallId, + name: toolName, + arguments: args, + state: toolCallState, + } + + if (existingToolIndex >= 0) { + updateToolCall( conversationId, - e.payload.messageId, - e.payload.timestamp, + messageIndex, + existingToolIndex, + toolCall, ) - addChunk(conversationId, chunk) + } else { + setToolCalls(conversationId, messageIndex, [...toolCalls, toolCall]) } }), ) cleanupFns.push( - aiEventClient.on('stream:chunk:tool-call', (e) => { - const streamId = e.payload.streamId - const conversationId = streamToConversation.get(streamId) + aiEventClient.on('tools:input:available', (e) => { + const { + streamId, + messageId, + toolCallId, + toolName, + input, + timestamp, + clientId, + } = e.payload + + const conversationId = + clientId || + (streamId ? streamToConversation.get(streamId) : undefined) if (!conversationId) return const chunk: Chunk = { id: `chunk-${Date.now()}-${Math.random()}`, type: 'tool_call', - messageId: e.payload.messageId, - toolCallId: e.payload.toolCallId, - toolName: e.payload.toolName, - arguments: e.payload.arguments, - timestamp: e.payload.timestamp, + messageId: messageId, + toolCallId, + toolName, + arguments: JSON.stringify(input), + timestamp, chunkCount: 1, } @@ -945,36 +1028,209 @@ export const AIProvider: ParentComponent = (props) => { if (conv?.type === 'client') { addChunkToMessage(conversationId, chunk) } else { - ensureMessageForChunk( - conversationId, - e.payload.messageId, - e.payload.timestamp, - ) addChunk(conversationId, chunk) } }), ) cleanupFns.push( - aiEventClient.on('stream:chunk:tool-result', (e) => { - const streamId = e.payload.streamId - const conversationId = streamToConversation.get(streamId) - if (!conversationId) return + aiEventClient.on('tools:call:completed', (e) => { + const { + streamId, + toolCallId, + toolName, + result, + duration, + messageId, + timestamp, + clientId, + } = e.payload - const chunk: Chunk = { - id: `chunk-${Date.now()}-${Math.random()}`, - type: 'tool_result', - messageId: e.payload.messageId, - toolCallId: e.payload.toolCallId, - content: e.payload.result, - timestamp: e.payload.timestamp, - chunkCount: 1, - } + const conversationId = + clientId || + (streamId ? streamToConversation.get(streamId) : undefined) + if (!conversationId || !state.conversations[conversationId]) return const conv = state.conversations[conversationId] - if (conv?.type === 'client') { - addChunkToMessage(conversationId, chunk) - } else { + + const chunk: Chunk = { + id: `chunk-tool-result-${toolCallId}-${Date.now()}`, + type: 'tool_result', + messageId: messageId, + toolCallId, + toolName, + result, + duration, + timestamp, + chunkCount: 1, + } + + if (conv.type === 'client' && messageId) { + const messageIndex = conv.messages.findIndex( + (m) => m.id === messageId, + ) + if (messageIndex !== -1) { + queueMessageChunk(conversationId, messageIndex, chunk) + } else { + for (let i = conv.messages.length - 1; i >= 0; i--) { + if (conv.messages[i]?.role === 'assistant') { + queueMessageChunk(conversationId, i, chunk) + break + } + } + } + } else { + addChunk(conversationId, chunk) + } + + for (let i = conv.messages.length - 1; i >= 0; i--) { + const message = conv.messages[i] + if (!message?.toolCalls) continue + + const toolCallIndex = message.toolCalls.findIndex( + (t) => t.id === toolCallId, + ) + if (toolCallIndex >= 0) { + updateToolCall(conversationId, i, toolCallIndex, { + duration, + result, + }) + return + } + } + }), + ) + + // ============= Stream Events ============= + + cleanupFns.push( + aiEventClient.on('text:chunk:content', (e) => { + const streamId = e.payload.streamId + const conversationId = streamToConversation.get(streamId) + if (!conversationId) return + + const chunk: Chunk = { + id: `chunk-${Date.now()}-${Math.random()}`, + type: 'content', + messageId: e.payload.messageId, + content: e.payload.content, + delta: e.payload.delta, + timestamp: e.payload.timestamp, + chunkCount: 1, + } + + const conv = state.conversations[conversationId] + if (conv?.type === 'client') { + addChunkToMessage(conversationId, chunk) + } else { + ensureMessageForChunk( + conversationId, + e.payload.messageId, + e.payload.timestamp, + ) + addChunk(conversationId, chunk) + } + + if (e.payload.messageId) { + const messageIndex = conv?.messages.findIndex( + (msg) => msg.id === e.payload.messageId, + ) + if (messageIndex !== undefined && messageIndex >= 0) { + updateMessage(conversationId, messageIndex, { + content: e.payload.content, + }) + } + } + }), + ) + + cleanupFns.push( + aiEventClient.on('text:chunk:tool-call', (e) => { + const streamId = e.payload.streamId + const conversationId = streamToConversation.get(streamId) + if (!conversationId) return + + const chunk: Chunk = { + id: `chunk-${Date.now()}-${Math.random()}`, + type: 'tool_call', + messageId: e.payload.messageId, + toolCallId: e.payload.toolCallId, + toolName: e.payload.toolName, + arguments: e.payload.arguments, + timestamp: e.payload.timestamp, + chunkCount: 1, + } + + const conv = state.conversations[conversationId] + if (conv?.type === 'client') { + addChunkToMessage(conversationId, chunk) + } else { + ensureMessageForChunk( + conversationId, + e.payload.messageId, + e.payload.timestamp, + ) + addChunk(conversationId, chunk) + } + + if (e.payload.messageId) { + const messageIndex = conv?.messages.findIndex( + (msg) => msg.id === e.payload.messageId, + ) + if (messageIndex !== undefined && messageIndex >= 0) { + const message = conv?.messages[messageIndex] + if (!message) return + + const toolCalls = message.toolCalls || [] + const existingToolIndex = toolCalls.findIndex( + (t: ToolCall) => t.id === e.payload.toolCallId, + ) + + const toolCall: ToolCall = { + id: e.payload.toolCallId, + name: e.payload.toolName, + arguments: e.payload.arguments, + state: 'input-streaming', + } + + if (existingToolIndex >= 0) { + updateToolCall( + conversationId, + messageIndex, + existingToolIndex, + toolCall, + ) + } else { + setToolCalls(conversationId, messageIndex, [ + ...toolCalls, + toolCall, + ]) + } + } + } + }), + ) + + cleanupFns.push( + aiEventClient.on('text:chunk:tool-result', (e) => { + const streamId = e.payload.streamId + const conversationId = streamToConversation.get(streamId) + if (!conversationId) return + + const chunk: Chunk = { + id: `chunk-${Date.now()}-${Math.random()}`, + type: 'tool_result', + messageId: e.payload.messageId, + toolCallId: e.payload.toolCallId, + result: e.payload.result, + timestamp: e.payload.timestamp, + chunkCount: 1, + } + + const conv = state.conversations[conversationId] + if (conv?.type === 'client') { + addChunkToMessage(conversationId, chunk) + } else { ensureMessageForChunk( conversationId, e.payload.messageId, @@ -1005,7 +1261,7 @@ export const AIProvider: ParentComponent = (props) => { ) cleanupFns.push( - aiEventClient.on('stream:chunk:thinking', (e) => { + aiEventClient.on('text:chunk:thinking', (e) => { const streamId = e.payload.streamId const conversationId = streamToConversation.get(streamId) if (!conversationId) return @@ -1046,7 +1302,7 @@ export const AIProvider: ParentComponent = (props) => { ) cleanupFns.push( - aiEventClient.on('stream:chunk:done', (e) => { + aiEventClient.on('text:chunk:done', (e) => { const streamId = e.payload.streamId const conversationId = streamToConversation.get(streamId) if (!conversationId) return @@ -1089,7 +1345,7 @@ export const AIProvider: ParentComponent = (props) => { ) cleanupFns.push( - aiEventClient.on('stream:chunk:error', (e) => { + aiEventClient.on('text:chunk:error', (e) => { const streamId = e.payload.streamId const conversationId = streamToConversation.get(streamId) if (!conversationId) return @@ -1123,20 +1379,7 @@ export const AIProvider: ParentComponent = (props) => { ) cleanupFns.push( - aiEventClient.on('stream:ended', (e) => { - const streamId = e.payload.streamId - const conversationId = streamToConversation.get(streamId) - if (!conversationId) return - - updateConversation(conversationId, { - status: 'completed', - completedAt: e.payload.timestamp, - }) - }), - ) - - cleanupFns.push( - aiEventClient.on('stream:approval-requested', (e) => { + aiEventClient.on('tools:approval:requested', (e) => { const { streamId, messageId, @@ -1145,9 +1388,11 @@ export const AIProvider: ParentComponent = (props) => { input, approvalId, timestamp, + clientId, } = e.payload - const conversationId = streamToConversation.get(streamId) + const conversationId = + clientId || streamToConversation.get(streamId) || '' if (!conversationId) return const conv = state.conversations[conversationId] @@ -1192,112 +1437,10 @@ export const AIProvider: ParentComponent = (props) => { }), ) - // ============= Processor Events ============= - - cleanupFns.push( - aiEventClient.on('processor:text-updated', (e) => { - const streamId = e.payload.streamId - - let conversationId = streamToConversation.get(streamId) - - if (!conversationId) { - const activeClients = Object.values(state.conversations) - .filter((c) => c.type === 'client' && c.status === 'active') - .sort((a, b) => b.startedAt - a.startedAt) - - if (activeClients.length > 0 && activeClients[0]) { - conversationId = activeClients[0].id - streamToConversation.set(streamId, conversationId) - } - } - - if (!conversationId) return - - const conv = state.conversations[conversationId] - if (!conv) return - - // Only update existing assistant messages, don't create new ones - // (client:message-appended is responsible for creating messages) - const lastMessage = conv.messages[conv.messages.length - 1] - if (lastMessage && lastMessage.role === 'assistant') { - updateMessage(conversationId, conv.messages.length - 1, { - content: e.payload.content, - }) - } - }), - ) - - cleanupFns.push( - aiEventClient.on('processor:tool-call-state-changed', (e) => { - const streamId = e.payload.streamId - const conversationId = streamToConversation.get(streamId) - - if (!conversationId || !state.conversations[conversationId]) return - - const conv = state.conversations[conversationId] - const lastMessage = conv.messages[conv.messages.length - 1] - - if (lastMessage && lastMessage.role === 'assistant') { - const toolCalls = lastMessage.toolCalls || [] - const existingToolIndex = toolCalls.findIndex( - (t) => t.id === e.payload.toolCallId, - ) - - const toolCall: ToolCall = { - id: e.payload.toolCallId, - name: e.payload.toolName, - arguments: JSON.stringify(e.payload.arguments, null, 2), - state: e.payload.state, - } - - if (existingToolIndex >= 0) { - updateToolCall( - conversationId, - conv.messages.length - 1, - existingToolIndex, - toolCall, - ) - } else { - setToolCalls(conversationId, conv.messages.length - 1, [ - ...toolCalls, - toolCall, - ]) - } - } - }), - ) - - cleanupFns.push( - aiEventClient.on('processor:tool-result-state-changed', (e) => { - const streamId = e.payload.streamId - const conversationId = streamToConversation.get(streamId) - - if (!conversationId || !state.conversations[conversationId]) return - - const conv = state.conversations[conversationId] - - for (let i = conv.messages.length - 1; i >= 0; i--) { - const message = conv.messages[i] - if (!message?.toolCalls) continue - - const toolCallIndex = message.toolCalls.findIndex( - (t) => t.id === e.payload.toolCallId, - ) - if (toolCallIndex >= 0) { - updateToolCall(conversationId, i, toolCallIndex, { - result: e.payload.content, - state: e.payload.error ? 'error' : e.payload.state, - }) - return - } - } - }), - ) - // ============= Chat Events (for usage tracking) ============= cleanupFns.push( - aiEventClient.on('text:started', (e) => { + aiEventClient.on('text:request:started', (e) => { const streamId = e.payload.streamId const model = e.payload.model const provider = e.payload.provider @@ -1305,7 +1448,13 @@ export const AIProvider: ParentComponent = (props) => { if (clientId && state.conversations[clientId]) { streamToConversation.set(streamId, clientId) - updateConversation(clientId, { status: 'active', ...e.payload }) + requestToConversation.set(e.payload.requestId, clientId) + updateConversation(clientId, { + status: 'active', + ...e.payload, + systemPrompts: e.payload.systemPrompts, + hasChat: true, + }) return } @@ -1315,7 +1464,12 @@ export const AIProvider: ParentComponent = (props) => { if (activeClient) { streamToConversation.set(streamId, activeClient.id) - updateConversation(activeClient.id, { ...e.payload }) + requestToConversation.set(e.payload.requestId, activeClient.id) + updateConversation(activeClient.id, { + ...e.payload, + systemPrompts: e.payload.systemPrompts, + hasChat: true, + }) } else { const existingServerConv = Object.values(state.conversations).find( (c) => c.type === 'server' && c.model === model, @@ -1323,22 +1477,33 @@ export const AIProvider: ParentComponent = (props) => { if (existingServerConv) { streamToConversation.set(streamId, existingServerConv.id) + requestToConversation.set( + e.payload.requestId, + existingServerConv.id, + ) updateConversation(existingServerConv.id, { status: 'active', ...e.payload, + systemPrompts: e.payload.systemPrompts, + hasChat: true, }) } else { const serverId = `server-${model}` getOrCreateConversation(serverId, 'server', `${model} Server`) streamToConversation.set(streamId, serverId) - updateConversation(serverId, { ...e.payload }) + requestToConversation.set(e.payload.requestId, serverId) + updateConversation(serverId, { + ...e.payload, + systemPrompts: e.payload.systemPrompts, + hasChat: true, + }) } } }), ) cleanupFns.push( - aiEventClient.on('text:completed', (e) => { + aiEventClient.on('text:request:completed', (e) => { const { requestId, usage } = e.payload const conversationId = requestToConversation.get(requestId) @@ -1359,20 +1524,7 @@ export const AIProvider: ParentComponent = (props) => { ) cleanupFns.push( - aiEventClient.on('text:iteration', (e) => { - const { requestId, iterationNumber } = e.payload - - const conversationId = requestToConversation.get(requestId) - if (conversationId && state.conversations[conversationId]) { - updateConversation(conversationId, { - iterationCount: iterationNumber, - }) - } - }), - ) - - cleanupFns.push( - aiEventClient.on('usage:tokens', (e) => { + aiEventClient.on('text:usage', (e) => { const { requestId, usage, messageId } = e.payload const conversationId = requestToConversation.get(requestId) @@ -1383,81 +1535,10 @@ export const AIProvider: ParentComponent = (props) => { }), ) - // ============= Tool Call Completed (with duration) ============= - - cleanupFns.push( - aiEventClient.on('tool:call-completed', (e) => { - const { - streamId, - toolCallId, - toolName, - result, - duration, - messageId, - timestamp, - } = e.payload - - const conversationId = streamToConversation.get(streamId) - if (!conversationId || !state.conversations[conversationId]) return - - const conv = state.conversations[conversationId] - - // Add a tool_result chunk to show the result in the chunks view - const chunk: Chunk = { - id: `chunk-tool-result-${toolCallId}-${Date.now()}`, - type: 'tool_result', - messageId: messageId, - toolCallId, - toolName, - result, - duration, - timestamp, - chunkCount: 1, - } - - // Add chunk to message if it's a client conversation, otherwise to conversation - if (conv.type === 'client' && messageId) { - const messageIndex = conv.messages.findIndex( - (m) => m.id === messageId, - ) - if (messageIndex !== -1) { - queueMessageChunk(conversationId, messageIndex, chunk) - } else { - // If message not found, add to last assistant message - for (let i = conv.messages.length - 1; i >= 0; i--) { - if (conv.messages[i]?.role === 'assistant') { - queueMessageChunk(conversationId, i, chunk) - break - } - } - } - } else { - addChunk(conversationId, chunk) - } - - // Update the tool call with duration - for (let i = conv.messages.length - 1; i >= 0; i--) { - const message = conv.messages[i] - if (!message?.toolCalls) continue - - const toolCallIndex = message.toolCalls.findIndex( - (t) => t.id === toolCallId, - ) - if (toolCallIndex >= 0) { - updateToolCall(conversationId, i, toolCallIndex, { - duration, - result, - }) - return - } - } - }), - ) - // ============= Summarize Events ============= cleanupFns.push( - aiEventClient.on('summarize:started', (e) => { + aiEventClient.on('summarize:request:started', (e) => { const { requestId, model, inputLength, timestamp, clientId } = e.payload // Try to find an active conversation to attach to, or create a new one @@ -1505,7 +1586,7 @@ export const AIProvider: ParentComponent = (props) => { ) cleanupFns.push( - aiEventClient.on('summarize:completed', (e) => { + aiEventClient.on('summarize:request:completed', (e) => { const { requestId, outputLength, duration } = e.payload const conversationId = requestToConversation.get(requestId) @@ -1533,6 +1614,290 @@ export const AIProvider: ParentComponent = (props) => { }), ) + // ============= Image Events ============= + + cleanupFns.push( + aiEventClient.on('image:request:started', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `image-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Image (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'imageEvents', { + id: requestId, + name: 'image:request:started', + timestamp, + payload: e.payload, + }) + }), + ) + + cleanupFns.push( + aiEventClient.on('image:request:completed', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `image-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Image (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'imageEvents', { + id: requestId, + name: 'image:request:completed', + timestamp, + payload: e.payload, + }) + }), + ) + + cleanupFns.push( + aiEventClient.on('image:usage', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `image-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Image (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'imageEvents', { + id: requestId, + name: 'image:usage', + timestamp, + payload: e.payload, + }) + }), + ) + + // ============= Speech Events ============= + + cleanupFns.push( + aiEventClient.on('speech:request:started', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `speech-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Speech (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'speechEvents', { + id: requestId, + name: 'speech:request:started', + timestamp, + payload: e.payload, + }) + }), + ) + + cleanupFns.push( + aiEventClient.on('speech:request:completed', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `speech-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Speech (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'speechEvents', { + id: requestId, + name: 'speech:request:completed', + timestamp, + payload: e.payload, + }) + }), + ) + + cleanupFns.push( + aiEventClient.on('speech:usage', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `speech-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Speech (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'speechEvents', { + id: requestId, + name: 'speech:usage', + timestamp, + payload: e.payload, + }) + }), + ) + + // ============= Transcription Events ============= + + cleanupFns.push( + aiEventClient.on('transcription:request:started', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `transcription-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Transcription (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'transcriptionEvents', { + id: requestId, + name: 'transcription:request:started', + timestamp, + payload: e.payload, + }) + }), + ) + + cleanupFns.push( + aiEventClient.on('transcription:request:completed', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `transcription-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Transcription (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'transcriptionEvents', { + id: requestId, + name: 'transcription:request:completed', + timestamp, + payload: e.payload, + }) + }), + ) + + cleanupFns.push( + aiEventClient.on('transcription:usage', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `transcription-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Transcription (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'transcriptionEvents', { + id: requestId, + name: 'transcription:usage', + timestamp, + payload: e.payload, + }) + }), + ) + + // ============= Video Events ============= + + cleanupFns.push( + aiEventClient.on('video:request:started', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `video-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Video (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'videoEvents', { + id: requestId, + name: 'video:request:started', + timestamp, + payload: e.payload, + }) + }), + ) + + cleanupFns.push( + aiEventClient.on('video:request:completed', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `video-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Video (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'videoEvents', { + id: requestId, + name: 'video:request:completed', + timestamp, + payload: e.payload, + }) + }), + ) + + cleanupFns.push( + aiEventClient.on('video:usage', (e) => { + const { requestId, clientId, timestamp } = e.payload + + let conversationId = clientId + if (!conversationId || !state.conversations[conversationId]) { + conversationId = `video-${requestId}` + getOrCreateConversation( + conversationId, + 'server', + `Video (${requestId.substring(0, 8)})`, + ) + } + + addActivityEvent(conversationId, 'videoEvents', { + id: requestId, + name: 'video:usage', + timestamp, + payload: e.payload, + }) + }), + ) + // Cleanup all listeners on unmount onCleanup(() => { for (const cleanup of cleanupFns) { diff --git a/packages/typescript/ai-devtools/src/styles/use-styles.ts b/packages/typescript/ai-devtools/src/styles/use-styles.ts index 0640087a..ba9a9cac 100644 --- a/packages/typescript/ai-devtools/src/styles/use-styles.ts +++ b/packages/typescript/ai-devtools/src/styles/use-styles.ts @@ -680,6 +680,54 @@ const stylesFactory = (theme: 'light' | 'dark') => { white-space: pre-wrap; word-break: break-word; `, + collapsibleSection: css` + margin-top: ${size[2]}; + border: 1px solid ${t(colors.gray[200], colors.darkGray[700])}; + border-radius: ${border.radius.md}; + overflow: hidden; + `, + collapsibleSummary: css` + cursor: pointer; + padding: ${size[2]} ${size[3]}; + background: ${t(colors.gray[100], colors.darkGray[800])}; + font-size: ${fontSize.xs}; + font-weight: ${font.weight.medium}; + color: ${t(colors.gray[700], colors.gray[300])}; + user-select: none; + &:hover { + background: ${t(colors.gray[200], colors.darkGray[700])}; + } + `, + collapsibleContent: css` + padding: ${size[3]}; + background: ${t(colors.gray[50], colors.darkGray[900])}; + font-size: ${fontSize.xs}; + max-height: 300px; + overflow-y: auto; + `, + systemPromptItem: css` + display: flex; + gap: ${size[2]}; + padding: ${size[2]}; + margin-bottom: ${size[2]}; + background: ${t(colors.gray[100], colors.darkGray[800])}; + border-radius: ${border.radius.sm}; + &:last-child { + margin-bottom: 0; + } + `, + systemPromptIndex: css` + font-weight: ${font.weight.semibold}; + color: ${colors.purple[400]}; + font-size: 10px; + flex-shrink: 0; + `, + systemPromptText: css` + color: ${t(colors.gray[700], colors.gray[300])}; + white-space: pre-wrap; + word-break: break-word; + line-height: 1.4; + `, tabsContainer: css` display: flex; gap: ${size[2]}; @@ -691,10 +739,26 @@ const stylesFactory = (theme: 'light' | 'dark') => { color: ${colors.white}; border-color: ${colors.pink[400]}; `, + tabButtonPulse: css` + position: relative; + animation: activityPulse 1.4s ease-in-out infinite; + @keyframes activityPulse { + 0% { + box-shadow: 0 0 0 0 ${colors.pink[400]}55; + } + 70% { + box-shadow: 0 0 0 8px ${colors.pink[400]}00; + } + 100% { + box-shadow: 0 0 0 0 ${colors.pink[400]}00; + } + } + `, contentArea: css` flex: 1; overflow: auto; padding: ${size[3]}; + padding-bottom: ${size[6]}; `, emptyMessages: css` padding: ${size[6]}; @@ -712,7 +776,7 @@ const stylesFactory = (theme: 'light' | 'dark') => { overflow: hidden; `, messageCardUser: css` - padding: ${size[4]}; + padding: ${size[3]}; border: 1.5px solid oklch(0.45 0.12 260); `, messageCardAssistant: css` diff --git a/packages/typescript/ai/src/activities/chat/index.ts b/packages/typescript/ai/src/activities/chat/index.ts index 95c01374..e60f8735 100644 --- a/packages/typescript/ai/src/activities/chat/index.ts +++ b/packages/typescript/ai/src/activities/chat/index.ts @@ -213,6 +213,8 @@ class TextEngine< private totalChunkCount = 0 private currentMessageId: string | null = null private accumulatedContent = '' + private eventOptions?: Record + private eventToolNames?: Array private doneChunk: DoneStreamChunk | null = null private shouldEmitStreamEnd = true private earlyTermination = false @@ -278,16 +280,7 @@ class TextEngine< private beforeRun(): void { this.streamStartTime = Date.now() - const { - model, - tools, - temperature, - topP, - maxTokens, - metadata, - modelOptions, - conversationId, - } = this.params + const { tools, temperature, topP, maxTokens, metadata } = this.params // Gather flattened options into an object for event emission const options: Record = {} @@ -296,26 +289,49 @@ class TextEngine< if (maxTokens !== undefined) options.maxTokens = maxTokens if (metadata !== undefined) options.metadata = metadata - aiEventClient.emit('text:started', { - requestId: this.requestId, - streamId: this.streamId, - model: model, - provider: this.adapter.name, - messageCount: this.initialMessageCount, - hasTools: !!tools && tools.length > 0, - streaming: true, + this.eventOptions = Object.keys(options).length > 0 ? options : undefined + this.eventToolNames = tools?.map((t) => t.name) + + aiEventClient.emit('text:request:started', { + ...this.buildTextEventContext(), timestamp: Date.now(), - clientId: conversationId, - toolNames: tools?.map((t) => t.name), - options: Object.keys(options).length > 0 ? options : undefined, - modelOptions: modelOptions as Record | undefined, }) - aiEventClient.emit('stream:started', { - streamId: this.streamId, - model, - provider: this.adapter.name, - timestamp: Date.now(), + // Always emit messages for tracking: + // - For existing conversations (with conversationId): only emit the latest user message + // - For new conversations (without conversationId): emit all messages for reconstruction + const messagesToEmit = this.params.conversationId + ? this.messages.slice(-1).filter((m) => m.role === 'user') + : this.messages + + messagesToEmit.forEach((message, index) => { + const messageIndex = this.params.conversationId + ? this.messages.length - 1 + : index + const messageId = this.createId('msg') + const baseContext = this.buildTextEventContext() + const content = this.getContentString(message.content) + + aiEventClient.emit('text:message:created', { + ...baseContext, + messageId, + role: message.role, + content, + toolCalls: message.toolCalls, + messageIndex, + timestamp: Date.now(), + }) + + if (message.role === 'user') { + aiEventClient.emit('text:message:user', { + ...baseContext, + messageId, + role: 'user', + content, + messageIndex, + timestamp: Date.now(), + }) + } }) } @@ -325,23 +341,13 @@ class TextEngine< } const now = Date.now() - - // Emit text:completed with final state - aiEventClient.emit('text:completed', { - requestId: this.requestId, - streamId: this.streamId, - model: this.params.model, + // Emit text:request:completed with final state + aiEventClient.emit('text:request:completed', { + ...this.buildTextEventContext(), content: this.accumulatedContent, messageId: this.currentMessageId || undefined, finishReason: this.lastFinishReason || undefined, usage: this.doneChunk?.usage, - timestamp: now, - }) - - aiEventClient.emit('stream:ended', { - requestId: this.requestId, - streamId: this.streamId, - totalChunks: this.totalChunkCount, duration: now - this.streamStartTime, timestamp: now, }) @@ -367,6 +373,15 @@ class TextEngine< this.currentMessageId = this.createId('msg') this.accumulatedContent = '' this.doneChunk = null + + const baseContext = this.buildTextEventContext() + aiEventClient.emit('text:message:created', { + ...baseContext, + messageId: this.currentMessageId, + role: 'assistant', + content: '', + timestamp: Date.now(), + }) } private async *streamModelResponse(): AsyncGenerator { @@ -438,8 +453,8 @@ class TextEngine< private handleContentChunk(chunk: Extract) { this.accumulatedContent = chunk.content - aiEventClient.emit('stream:chunk:content', { - streamId: this.streamId, + aiEventClient.emit('text:chunk:content', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, content: chunk.content, delta: chunk.delta, @@ -451,8 +466,8 @@ class TextEngine< chunk: Extract, ): void { this.toolCallManager.addToolCallChunk(chunk) - aiEventClient.emit('stream:chunk:tool-call', { - streamId: this.streamId, + aiEventClient.emit('text:chunk:tool-call', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, toolCallId: chunk.toolCall.id, toolName: chunk.toolCall.function.name, @@ -465,8 +480,8 @@ class TextEngine< private handleToolResultChunk( chunk: Extract, ): void { - aiEventClient.emit('stream:chunk:tool-result', { - streamId: this.streamId, + aiEventClient.emit('text:chunk:tool-result', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, toolCallId: chunk.toolCallId, result: chunk.content, @@ -477,37 +492,8 @@ class TextEngine< private handleDoneChunk(chunk: DoneStreamChunk): void { // Don't overwrite a tool_calls finishReason with a stop finishReason // This can happen when adapters send multiple done chunks - if ( - this.doneChunk?.finishReason === 'tool_calls' && - chunk.finishReason === 'stop' - ) { - // Still emit the event and update lastFinishReason, but don't overwrite doneChunk - this.lastFinishReason = chunk.finishReason - aiEventClient.emit('stream:chunk:done', { - streamId: this.streamId, - messageId: this.currentMessageId || undefined, - finishReason: chunk.finishReason, - usage: chunk.usage, - timestamp: Date.now(), - }) - - if (chunk.usage) { - aiEventClient.emit('usage:tokens', { - requestId: this.requestId, - streamId: this.streamId, - messageId: this.currentMessageId || undefined, - model: this.params.model, - usage: chunk.usage, - timestamp: Date.now(), - }) - } - return - } - - this.doneChunk = chunk - this.lastFinishReason = chunk.finishReason - aiEventClient.emit('stream:chunk:done', { - streamId: this.streamId, + aiEventClient.emit('text:chunk:done', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, finishReason: chunk.finishReason, usage: chunk.usage, @@ -515,22 +501,32 @@ class TextEngine< }) if (chunk.usage) { - aiEventClient.emit('usage:tokens', { - requestId: this.requestId, - streamId: this.streamId, + aiEventClient.emit('text:usage', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, - model: this.params.model, usage: chunk.usage, timestamp: Date.now(), }) } + if ( + this.doneChunk?.finishReason === 'tool_calls' && + chunk.finishReason === 'stop' + ) { + // Still emit the event and update lastFinishReason, but don't overwrite doneChunk + this.lastFinishReason = chunk.finishReason + + return + } + + this.doneChunk = chunk + this.lastFinishReason = chunk.finishReason } private handleErrorChunk( chunk: Extract, ): void { - aiEventClient.emit('stream:chunk:error', { - streamId: this.streamId, + aiEventClient.emit('text:chunk:error', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, error: chunk.error.message, timestamp: Date.now(), @@ -542,8 +538,8 @@ class TextEngine< private handleThinkingChunk( chunk: Extract, ): void { - aiEventClient.emit('stream:chunk:thinking', { - streamId: this.streamId, + aiEventClient.emit('text:chunk:thinking', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, content: chunk.content, delta: chunk.delta, @@ -563,15 +559,6 @@ class TextEngine< const doneChunk = this.createSyntheticDoneChunk() - aiEventClient.emit('text:iteration', { - requestId: this.requestId, - streamId: this.streamId, - iterationNumber: this.iterationCount + 1, - messageCount: this.messages.length, - toolCallCount: pendingToolCalls.length, - timestamp: Date.now(), - }) - const { approvals, clientToolResults } = this.collectClientState() const executionResult = await executeToolCalls( @@ -629,15 +616,6 @@ class TextEngine< return } - aiEventClient.emit('text:iteration', { - requestId: this.requestId, - streamId: this.streamId, - iterationNumber: this.iterationCount + 1, - messageCount: this.messages.length, - toolCallCount: toolCalls.length, - timestamp: Date.now(), - }) - this.addAssistantToolCallMessage(toolCalls) const { approvals, clientToolResults } = this.collectClientState() @@ -694,6 +672,7 @@ class TextEngine< } private addAssistantToolCallMessage(toolCalls: Array): void { + const messageId = this.currentMessageId ?? this.createId('msg') this.messages = [ ...this.messages, { @@ -702,6 +681,15 @@ class TextEngine< toolCalls, }, ] + + aiEventClient.emit('text:message:created', { + ...this.buildTextEventContext(), + messageId, + role: 'assistant', + content: this.accumulatedContent || '', + toolCalls, + timestamp: Date.now(), + }) } private collectClientState(): { @@ -745,8 +733,8 @@ class TextEngine< const chunks: Array = [] for (const approval of approvals) { - aiEventClient.emit('stream:approval-requested', { - streamId: this.streamId, + aiEventClient.emit('tools:approval:requested', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, toolCallId: approval.toolCallId, toolName: approval.toolName, @@ -780,8 +768,8 @@ class TextEngine< const chunks: Array = [] for (const clientTool of clientRequests) { - aiEventClient.emit('stream:tool-input-available', { - streamId: this.streamId, + aiEventClient.emit('tools:input:available', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, toolCallId: clientTool.toolCallId, toolName: clientTool.toolName, @@ -810,9 +798,8 @@ class TextEngine< const chunks: Array = [] for (const result of results) { - aiEventClient.emit('tool:call-completed', { - requestId: this.requestId, - streamId: this.streamId, + aiEventClient.emit('tools:call:completed', { + ...this.buildTextEventContext(), messageId: this.currentMessageId || undefined, toolCallId: result.toolCallId, toolName: result.toolName, @@ -841,6 +828,14 @@ class TextEngine< toolCallId: result.toolCallId, }, ] + + aiEventClient.emit('text:message:created', { + ...this.buildTextEventContext(), + messageId: this.createId('msg'), + role: 'tool', + content, + timestamp: Date.now(), + }) } return chunks @@ -896,6 +891,50 @@ class TextEngine< return !!this.effectiveSignal?.aborted } + private buildTextEventContext(): { + requestId: string + streamId: string + provider: string + model: string + clientId?: string + source?: 'client' | 'server' + systemPrompts?: Array + toolNames?: Array + options?: Record + modelOptions?: Record + messageCount: number + hasTools: boolean + streaming: boolean + } { + return { + requestId: this.requestId, + streamId: this.streamId, + provider: this.adapter.name, + model: this.params.model, + clientId: this.params.conversationId, + source: 'server', + systemPrompts: + this.systemPrompts.length > 0 ? this.systemPrompts : undefined, + toolNames: this.eventToolNames, + options: this.eventOptions, + modelOptions: this.params.modelOptions as + | Record + | undefined, + messageCount: this.initialMessageCount, + hasTools: this.tools.length > 0, + streaming: true, + } + } + + private getContentString(content: ModelMessage['content']): string { + if (typeof content === 'string') return content + const text = + content + ?.map((part) => (part.type === 'text' ? part.content : '')) + .join('') || '' + return text + } + private setToolPhase(phase: ToolPhaseResult): void { this.toolPhase = phase if (phase === 'wait') { diff --git a/packages/typescript/ai/src/activities/generateImage/index.ts b/packages/typescript/ai/src/activities/generateImage/index.ts index 0c0d477e..d573b32f 100644 --- a/packages/typescript/ai/src/activities/generateImage/index.ts +++ b/packages/typescript/ai/src/activities/generateImage/index.ts @@ -5,6 +5,7 @@ * This is a self-contained module with implementation, types, and JSDoc. */ +import { aiEventClient } from '../../event-client.js' import type { ImageAdapter } from './adapter' import type { ImageGenerationResult } from '../../types' @@ -82,6 +83,10 @@ export interface ImageActivityOptions< /** Result type for the image activity */ export type ImageActivityResult = Promise +function createId(prefix: string): string { + return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 9)}` +} + // =========================== // Activity Implementation // =========================== @@ -136,8 +141,48 @@ export async function generateImage< >(options: ImageActivityOptions): ImageActivityResult { const { adapter, ...rest } = options const model = adapter.model - - return adapter.generateImages({ ...rest, model }) + const requestId = createId('image') + const startTime = Date.now() + + aiEventClient.emit('image:request:started', { + requestId, + provider: adapter.name, + model, + prompt: rest.prompt, + numberOfImages: rest.numberOfImages, + size: rest.size as string | undefined, + modelOptions: rest.modelOptions as Record | undefined, + timestamp: startTime, + }) + + return adapter.generateImages({ ...rest, model }).then((result) => { + const duration = Date.now() - startTime + + aiEventClient.emit('image:request:completed', { + requestId, + provider: adapter.name, + model, + images: result.images.map((image) => ({ + url: image.url, + b64Json: image.b64Json, + })), + duration, + modelOptions: rest.modelOptions as Record | undefined, + timestamp: Date.now(), + }) + + if (result.usage) { + aiEventClient.emit('image:usage', { + requestId, + model, + usage: result.usage, + modelOptions: rest.modelOptions as Record | undefined, + timestamp: Date.now(), + }) + } + + return result + }) } // =========================== diff --git a/packages/typescript/ai/src/activities/generateSpeech/index.ts b/packages/typescript/ai/src/activities/generateSpeech/index.ts index 2ec9f1a1..39645eba 100644 --- a/packages/typescript/ai/src/activities/generateSpeech/index.ts +++ b/packages/typescript/ai/src/activities/generateSpeech/index.ts @@ -5,6 +5,7 @@ * This is a self-contained module with implementation, types, and JSDoc. */ +import { aiEventClient } from '../../event-client.js' import type { TTSAdapter } from './adapter' import type { TTSResult } from '../../types' @@ -61,6 +62,10 @@ export interface TTSActivityOptions< /** Result type for the TTS activity */ export type TTSActivityResult = Promise +function createId(prefix: string): string { + return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 9)}` +} + // =========================== // Activity Implementation // =========================== @@ -100,8 +105,39 @@ export async function generateSpeech< >(options: TTSActivityOptions): TTSActivityResult { const { adapter, ...rest } = options const model = adapter.model + const requestId = createId('speech') + const startTime = Date.now() + + aiEventClient.emit('speech:request:started', { + requestId, + provider: adapter.name, + model, + text: rest.text, + voice: rest.voice, + format: rest.format, + speed: rest.speed, + modelOptions: rest.modelOptions as Record | undefined, + timestamp: startTime, + }) + + return adapter.generateSpeech({ ...rest, model }).then((result) => { + const duration = Date.now() - startTime + + aiEventClient.emit('speech:request:completed', { + requestId, + provider: adapter.name, + model, + audio: result.audio, + format: result.format, + audioDuration: result.duration, + contentType: result.contentType, + duration, + modelOptions: rest.modelOptions as Record | undefined, + timestamp: Date.now(), + }) - return adapter.generateSpeech({ ...rest, model }) + return result + }) } // =========================== diff --git a/packages/typescript/ai/src/activities/generateTranscription/index.ts b/packages/typescript/ai/src/activities/generateTranscription/index.ts index 05c7ab45..dff9a477 100644 --- a/packages/typescript/ai/src/activities/generateTranscription/index.ts +++ b/packages/typescript/ai/src/activities/generateTranscription/index.ts @@ -5,6 +5,7 @@ * This is a self-contained module with implementation, types, and JSDoc. */ +import { aiEventClient } from '../../event-client.js' import type { TranscriptionAdapter } from './adapter' import type { TranscriptionResult } from '../../types' @@ -61,6 +62,10 @@ export interface TranscriptionActivityOptions< /** Result type for the transcription activity */ export type TranscriptionActivityResult = Promise +function createId(prefix: string): string { + return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 9)}` +} + // =========================== // Activity Implementation // =========================== @@ -104,8 +109,35 @@ export async function generateTranscription< ): TranscriptionActivityResult { const { adapter, ...rest } = options const model = adapter.model + const requestId = createId('transcription') + const startTime = Date.now() + + aiEventClient.emit('transcription:request:started', { + requestId, + provider: adapter.name, + model, + language: rest.language, + prompt: rest.prompt, + responseFormat: rest.responseFormat, + modelOptions: rest.modelOptions as Record | undefined, + timestamp: startTime, + }) + + const result = await adapter.transcribe({ ...rest, model }) + const duration = Date.now() - startTime + + aiEventClient.emit('transcription:request:completed', { + requestId, + provider: adapter.name, + model, + text: result.text, + language: result.language, + duration, + modelOptions: rest.modelOptions as Record | undefined, + timestamp: Date.now(), + }) - return adapter.transcribe({ ...rest, model }) + return result } // =========================== diff --git a/packages/typescript/ai/src/activities/generateVideo/index.ts b/packages/typescript/ai/src/activities/generateVideo/index.ts index 67901776..9a7a46aa 100644 --- a/packages/typescript/ai/src/activities/generateVideo/index.ts +++ b/packages/typescript/ai/src/activities/generateVideo/index.ts @@ -7,6 +7,7 @@ * @experimental Video generation is an experimental feature and may change. */ +import { aiEventClient } from '../../event-client.js' import type { VideoAdapter } from './adapter' import type { VideoJobResult, @@ -35,6 +36,10 @@ export type VideoProviderOptions = // =========================== // Activity Options Types + +function createId(prefix: string): string { + return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 9)}` +} // =========================== /** @@ -207,6 +212,17 @@ export async function getVideoJobStatus< error?: string }> { const { adapter, jobId } = options + const requestId = createId('video-status') + const startTime = Date.now() + + aiEventClient.emit('video:request:started', { + requestId, + provider: adapter.name, + model: adapter.model, + requestType: 'status', + jobId, + timestamp: startTime, + }) // Get status first const statusResult = await adapter.getVideoStatus(jobId) @@ -215,12 +231,37 @@ export async function getVideoJobStatus< if (statusResult.status === 'completed') { try { const urlResult = await adapter.getVideoUrl(jobId) + aiEventClient.emit('video:request:completed', { + requestId, + provider: adapter.name, + model: adapter.model, + requestType: 'status', + jobId, + status: statusResult.status, + progress: statusResult.progress, + url: urlResult.url, + duration: Date.now() - startTime, + timestamp: Date.now(), + }) return { status: statusResult.status, progress: statusResult.progress, url: urlResult.url, } } catch (error) { + aiEventClient.emit('video:request:completed', { + requestId, + provider: adapter.name, + model: adapter.model, + requestType: 'status', + jobId, + status: statusResult.status, + progress: statusResult.progress, + error: + error instanceof Error ? error.message : 'Failed to get video URL', + duration: Date.now() - startTime, + timestamp: Date.now(), + }) // If URL fetch fails, still return status return { status: statusResult.status, @@ -231,6 +272,19 @@ export async function getVideoJobStatus< } } + aiEventClient.emit('video:request:completed', { + requestId, + provider: adapter.name, + model: adapter.model, + requestType: 'status', + jobId, + status: statusResult.status, + progress: statusResult.progress, + error: statusResult.error, + duration: Date.now() - startTime, + timestamp: Date.now(), + }) + // Return status for non-completed jobs return { status: statusResult.status, diff --git a/packages/typescript/ai/src/activities/summarize/index.ts b/packages/typescript/ai/src/activities/summarize/index.ts index 4bc45635..2e28d45f 100644 --- a/packages/typescript/ai/src/activities/summarize/index.ts +++ b/packages/typescript/ai/src/activities/summarize/index.ts @@ -180,8 +180,9 @@ async function runSummarize( const inputLength = text.length const startTime = Date.now() - aiEventClient.emit('summarize:started', { + aiEventClient.emit('summarize:request:started', { requestId, + provider: adapter.name, model, inputLength, timestamp: startTime, @@ -200,8 +201,9 @@ async function runSummarize( const duration = Date.now() - startTime const outputLength = result.summary.length - aiEventClient.emit('summarize:completed', { + aiEventClient.emit('summarize:request:completed', { requestId, + provider: adapter.name, model, inputLength, outputLength, diff --git a/packages/typescript/ai/src/event-client.ts b/packages/typescript/ai/src/event-client.ts index 7603faf0..b76d6705 100644 --- a/packages/typescript/ai/src/event-client.ts +++ b/packages/typescript/ai/src/event-client.ts @@ -1,10 +1,11 @@ import { EventClient } from '@tanstack/devtools-event-client' +import type { MessagePart, ToolCall } from './types' /** * Tool call states - track the lifecycle of a tool call * Must match @tanstack/ai-client ToolCallState */ -type ToolCallState = +export type ToolCallState = | 'awaiting-input' // Received start but no arguments yet | 'input-streaming' // Partial arguments received | 'input-complete' // All arguments received @@ -15,269 +16,472 @@ type ToolCallState = * Tool result states - track the lifecycle of a tool result * Must match @tanstack/ai-client ToolResultState */ -type ToolResultState = +export type ToolResultState = | 'streaming' // Placeholder for future streamed output | 'complete' // Result is complete | 'error' // Error occurred +export interface TokenUsage { + promptTokens: number + completionTokens: number + totalTokens: number +} + +export interface ImageUsage { + inputTokens?: number + outputTokens?: number + totalTokens?: number +} + +interface BaseEventContext { + timestamp: number + requestId?: string + streamId?: string + messageId?: string + clientId?: string + source?: 'client' | 'server' + provider?: string + model?: string + systemPrompts?: Array + options?: Record + modelOptions?: Record + toolNames?: Array + messageCount?: number + hasTools?: boolean + streaming?: boolean +} + +// =========================== +// Text Events +// =========================== + +/** Emitted when a text request starts execution. */ +export interface TextRequestStartedEvent extends BaseEventContext { + requestId: string + streamId: string + provider: string + model: string + messageCount: number + hasTools: boolean + streaming: boolean +} + +/** Emitted when a text request completes with final output. */ +export interface TextRequestCompletedEvent extends BaseEventContext { + requestId: string + streamId: string + provider: string + model: string + content: string + finishReason?: string + usage?: TokenUsage + duration?: number + streaming: boolean + messageCount: number + hasTools: boolean +} + +/** Emitted when a message is created (user/assistant/system/tool). */ +export interface TextMessageCreatedEvent extends BaseEventContext { + requestId?: string + streamId?: string + messageId: string + role: 'user' | 'assistant' | 'system' | 'tool' + content: string + parts?: Array + toolCalls?: Array + messageIndex?: number +} + +/** Emitted when a user message is created (full content). */ +export interface TextMessageUserEvent extends TextMessageCreatedEvent { + role: 'user' +} + +/** Emitted for streaming text content chunks. */ +export interface TextChunkContentEvent extends BaseEventContext { + requestId?: string + streamId: string + messageId?: string + content: string + delta?: string +} + +/** Emitted for streaming tool call chunks. */ +export interface TextChunkToolCallEvent extends BaseEventContext { + requestId?: string + streamId: string + messageId?: string + toolCallId: string + toolName: string + index: number + arguments: string +} + +/** Emitted for streaming tool result chunks. */ +export interface TextChunkToolResultEvent extends BaseEventContext { + requestId?: string + streamId: string + messageId?: string + toolCallId: string + result: string +} + +/** Emitted for streaming thinking chunks. */ +export interface TextChunkThinkingEvent extends BaseEventContext { + requestId?: string + streamId: string + messageId?: string + content: string + delta?: string +} + +/** Emitted when a stream finishes. */ +export interface TextChunkDoneEvent extends BaseEventContext { + requestId?: string + streamId: string + messageId?: string + finishReason: string | null + usage?: TokenUsage +} + +/** Emitted on stream errors. */ +export interface TextChunkErrorEvent extends BaseEventContext { + requestId?: string + streamId: string + messageId?: string + error: string +} + +/** Emitted when usage metrics are available for text. */ +export interface TextUsageEvent extends BaseEventContext { + requestId: string + streamId: string + messageId?: string + model: string + usage: TokenUsage +} + +// =========================== +// Tool Events +// =========================== + +/** Emitted when tool approval is required. */ +export interface ToolsApprovalRequestedEvent extends BaseEventContext { + requestId?: string + streamId: string + messageId?: string + toolCallId: string + toolName: string + input: unknown + approvalId: string +} + +/** Emitted when user responds to an approval request. */ +export interface ToolsApprovalRespondedEvent extends BaseEventContext { + toolCallId: string + approvalId: string + approved: boolean +} + +/** Emitted when tool input is available for client execution. */ +export interface ToolsInputAvailableEvent extends BaseEventContext { + requestId?: string + streamId: string + messageId?: string + toolCallId: string + toolName: string + input: unknown +} + +/** Emitted when a tool call completes with a result. */ +export interface ToolsCallCompletedEvent extends BaseEventContext { + requestId?: string + streamId: string + messageId?: string + toolCallId: string + toolName: string + result: unknown + duration: number +} + +/** Emitted when a client tool result is added. */ +export interface ToolsResultAddedEvent extends BaseEventContext { + toolCallId: string + toolName: string + output: unknown + state: 'output-available' | 'output-error' +} + +/** Emitted when tool call state changes on the client. */ +export interface ToolsCallUpdatedEvent extends BaseEventContext { + streamId: string + messageId: string + toolCallId: string + toolName: string + state: ToolCallState + arguments: string +} + +// =========================== +// Summarize Events +// =========================== + +/** Emitted when summarize starts. */ +export interface SummarizeRequestStartedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + inputLength: number +} + +/** Emitted when summarize completes. */ +export interface SummarizeRequestCompletedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + inputLength: number + outputLength: number + duration: number +} + +/** Emitted when summarize usage metrics are available. */ +export interface SummarizeUsageEvent extends BaseEventContext { + requestId: string + model: string + usage: TokenUsage +} + +// =========================== +// Image Events +// =========================== + +/** Emitted when an image request starts. */ +export interface ImageRequestStartedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + prompt: string + numberOfImages?: number + size?: string +} + +/** Emitted when an image request completes. */ +export interface ImageRequestCompletedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + images: Array<{ url?: string; b64Json?: string }> + duration: number +} + +/** Emitted when image usage metrics are available. */ +export interface ImageUsageEvent extends BaseEventContext { + requestId: string + model: string + usage: ImageUsage +} + +// =========================== +// Speech Events +// =========================== + +/** Emitted when a speech request starts. */ +export interface SpeechRequestStartedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + text: string + voice?: string + format?: string + speed?: number +} + +/** Emitted when a speech request completes. */ +export interface SpeechRequestCompletedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + audio: string + format: string + duration: number + audioDuration?: number + contentType?: string +} + +/** Emitted when speech usage metrics are available. */ +export interface SpeechUsageEvent extends BaseEventContext { + requestId: string + model: string + usage: TokenUsage +} + +// =========================== +// Transcription Events +// =========================== + +/** Emitted when a transcription request starts. */ +export interface TranscriptionRequestStartedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + language?: string + prompt?: string + responseFormat?: string +} + +/** Emitted when a transcription request completes. */ +export interface TranscriptionRequestCompletedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + text: string + language?: string + duration: number +} + +/** Emitted when transcription usage metrics are available. */ +export interface TranscriptionUsageEvent extends BaseEventContext { + requestId: string + model: string + usage: TokenUsage +} + +// =========================== +// Video Events +// =========================== + +/** Emitted when a video request starts. */ +export interface VideoRequestStartedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + requestType: 'create' | 'status' | 'url' + jobId?: string + prompt?: string + size?: string + duration?: number +} + +/** Emitted when a video request completes. */ +export interface VideoRequestCompletedEvent extends BaseEventContext { + requestId: string + provider: string + model: string + requestType: 'create' | 'status' | 'url' + jobId?: string + status?: 'pending' | 'processing' | 'completed' | 'failed' + progress?: number + url?: string + error?: string + duration: number +} + +/** Emitted when video usage metrics are available. */ +export interface VideoUsageEvent extends BaseEventContext { + requestId: string + model: string + usage: TokenUsage +} + +// =========================== +// Client Events +// =========================== + +/** Emitted when a client is created. */ +export interface ClientCreatedEvent { + clientId: string + initialMessageCount: number + timestamp: number +} + +/** Emitted when client loading state changes. */ +export interface ClientLoadingChangedEvent { + clientId: string + isLoading: boolean + timestamp: number +} + +/** Emitted when client error state changes. */ +export interface ClientErrorChangedEvent { + clientId: string + error: string | null + timestamp: number +} + +/** Emitted when client messages are cleared. */ +export interface ClientMessagesClearedEvent { + clientId: string + timestamp: number +} + +/** Emitted when client is reloaded. */ +export interface ClientReloadedEvent { + clientId: string + fromMessageIndex: number + timestamp: number +} + +/** Emitted when client stops. */ +export interface ClientStoppedEvent { + clientId: string + timestamp: number +} + export interface AIDevtoolsEventMap { - // AI Stream events - from @tanstack/ai package - 'tanstack-ai-devtools:stream:started': { - streamId: string - model: string - provider: string - timestamp: number - clientId?: string - } - 'tanstack-ai-devtools:stream:chunk:content': { - streamId: string - messageId?: string - content: string - delta?: string - timestamp: number - } - 'tanstack-ai-devtools:stream:chunk:tool-call': { - streamId: string - messageId?: string - toolCallId: string - toolName: string - index: number - arguments: string - timestamp: number - } - 'tanstack-ai-devtools:stream:chunk:tool-result': { - streamId: string - messageId?: string - toolCallId: string - result: string - timestamp: number - } - 'tanstack-ai-devtools:stream:chunk:done': { - streamId: string - messageId?: string - finishReason: string | null - usage?: { - promptTokens: number - completionTokens: number - totalTokens: number - } - timestamp: number - } - 'tanstack-ai-devtools:stream:chunk:error': { - streamId: string - messageId?: string - error: string - timestamp: number - } - 'tanstack-ai-devtools:stream:chunk:thinking': { - streamId: string - messageId?: string - content: string - delta?: string - timestamp: number - } - 'tanstack-ai-devtools:stream:approval-requested': { - streamId: string - messageId?: string - toolCallId: string - toolName: string - input: any - approvalId: string - timestamp: number - } - 'tanstack-ai-devtools:stream:tool-input-available': { - streamId: string - messageId?: string - toolCallId: string - toolName: string - input: any - timestamp: number - } - 'tanstack-ai-devtools:tool:call-completed': { - requestId: string - streamId: string - messageId?: string - toolCallId: string - toolName: string - result: any - duration: number - timestamp: number - } - 'tanstack-ai-devtools:stream:ended': { - requestId: string - streamId: string - totalChunks: number - duration: number - timestamp: number - } - 'tanstack-ai-devtools:text:started': { - requestId: string - streamId: string - provider: string - model: string - messageCount: number - hasTools: boolean - streaming: boolean - timestamp: number - clientId?: string - toolNames?: Array - options?: Record - modelOptions?: Record - } - 'tanstack-ai-devtools:text:completed': { - requestId: string - streamId: string - model: string - content: string - messageId?: string - finishReason?: string - usage?: { - promptTokens: number - completionTokens: number - totalTokens: number - } - timestamp: number - } - 'tanstack-ai-devtools:text:iteration': { - requestId: string - streamId: string - iterationNumber: number - messageCount: number - toolCallCount: number - timestamp: number - } - 'tanstack-ai-devtools:usage:tokens': { - requestId: string - streamId: string - model: string - messageId?: string - usage: { - promptTokens: number - completionTokens: number - totalTokens: number - } - timestamp: number - } + // Text events + 'tanstack-ai-devtools:text:request:started': TextRequestStartedEvent + 'tanstack-ai-devtools:text:request:completed': TextRequestCompletedEvent + 'tanstack-ai-devtools:text:message:created': TextMessageCreatedEvent + 'tanstack-ai-devtools:text:message:user': TextMessageUserEvent + 'tanstack-ai-devtools:text:chunk:content': TextChunkContentEvent + 'tanstack-ai-devtools:text:chunk:tool-call': TextChunkToolCallEvent + 'tanstack-ai-devtools:text:chunk:tool-result': TextChunkToolResultEvent + 'tanstack-ai-devtools:text:chunk:thinking': TextChunkThinkingEvent + 'tanstack-ai-devtools:text:chunk:done': TextChunkDoneEvent + 'tanstack-ai-devtools:text:chunk:error': TextChunkErrorEvent + 'tanstack-ai-devtools:text:usage': TextUsageEvent + + // Tool events + 'tanstack-ai-devtools:tools:approval:requested': ToolsApprovalRequestedEvent + 'tanstack-ai-devtools:tools:approval:responded': ToolsApprovalRespondedEvent + 'tanstack-ai-devtools:tools:input:available': ToolsInputAvailableEvent + 'tanstack-ai-devtools:tools:call:completed': ToolsCallCompletedEvent + 'tanstack-ai-devtools:tools:result:added': ToolsResultAddedEvent + 'tanstack-ai-devtools:tools:call:updated': ToolsCallUpdatedEvent // Summarize events - 'tanstack-ai-devtools:summarize:started': { - requestId: string - model: string - inputLength: number - timestamp: number - clientId?: string - } - 'tanstack-ai-devtools:summarize:completed': { - requestId: string - model: string - inputLength: number - outputLength: number - duration: number - timestamp: number - } + 'tanstack-ai-devtools:summarize:request:started': SummarizeRequestStartedEvent + 'tanstack-ai-devtools:summarize:request:completed': SummarizeRequestCompletedEvent + 'tanstack-ai-devtools:summarize:usage': SummarizeUsageEvent - // Text Client events - from @tanstack/ai-client package - 'tanstack-ai-devtools:client:created': { - clientId: string - initialMessageCount: number - timestamp: number - } - 'tanstack-ai-devtools:client:message-appended': { - clientId: string - messageId: string - role: 'user' | 'assistant' | 'system' | 'tool' - contentPreview: string - timestamp: number - } - 'tanstack-ai-devtools:client:message-sent': { - clientId: string - messageId: string - content: string - timestamp: number - } - 'tanstack-ai-devtools:client:loading-changed': { - clientId: string - isLoading: boolean - timestamp: number - } - 'tanstack-ai-devtools:client:error-changed': { - clientId: string - error: string | null - timestamp: number - } - 'tanstack-ai-devtools:client:messages-cleared': { - clientId: string - timestamp: number - } - 'tanstack-ai-devtools:client:reloaded': { - clientId: string - fromMessageIndex: number - timestamp: number - } - 'tanstack-ai-devtools:client:stopped': { - clientId: string - timestamp: number - } - 'tanstack-ai-devtools:tool:result-added': { - clientId: string - toolCallId: string - toolName: string - output: any - state: 'output-available' | 'output-error' - timestamp: number - } - 'tanstack-ai-devtools:tool:approval-responded': { - clientId: string - approvalId: string - toolCallId: string - approved: boolean - timestamp: number - } - 'tanstack-ai-devtools:processor:text-updated': { - streamId: string - content: string - timestamp: number - } - 'tanstack-ai-devtools:processor:tool-call-state-changed': { - streamId: string - toolCallId: string - toolName: string - state: ToolCallState - arguments: any - timestamp: number - } - 'tanstack-ai-devtools:processor:tool-result-state-changed': { - streamId: string - toolCallId: string - content: any - state: ToolResultState - error?: string - timestamp: number - } - 'tanstack-ai-devtools:client:assistant-message-updated': { - clientId: string - messageId: string - content: string - timestamp: number - } - 'tanstack-ai-devtools:client:tool-call-updated': { - clientId: string - messageId: string - toolCallId: string - toolName: string - state: ToolCallState - arguments: any - timestamp: number - } - 'tanstack-ai-devtools:client:approval-requested': { - clientId: string - messageId: string - toolCallId: string - toolName: string - input: any - approvalId: string - timestamp: number - } + // Image events + 'tanstack-ai-devtools:image:request:started': ImageRequestStartedEvent + 'tanstack-ai-devtools:image:request:completed': ImageRequestCompletedEvent + 'tanstack-ai-devtools:image:usage': ImageUsageEvent + + // Speech events + 'tanstack-ai-devtools:speech:request:started': SpeechRequestStartedEvent + 'tanstack-ai-devtools:speech:request:completed': SpeechRequestCompletedEvent + 'tanstack-ai-devtools:speech:usage': SpeechUsageEvent + + // Transcription events + 'tanstack-ai-devtools:transcription:request:started': TranscriptionRequestStartedEvent + 'tanstack-ai-devtools:transcription:request:completed': TranscriptionRequestCompletedEvent + 'tanstack-ai-devtools:transcription:usage': TranscriptionUsageEvent + + // Video events + 'tanstack-ai-devtools:video:request:started': VideoRequestStartedEvent + 'tanstack-ai-devtools:video:request:completed': VideoRequestCompletedEvent + 'tanstack-ai-devtools:video:usage': VideoUsageEvent + + // Client events + 'tanstack-ai-devtools:client:created': ClientCreatedEvent + 'tanstack-ai-devtools:client:loading:changed': ClientLoadingChangedEvent + 'tanstack-ai-devtools:client:error:changed': ClientErrorChangedEvent + 'tanstack-ai-devtools:client:messages:cleared': ClientMessagesClearedEvent + 'tanstack-ai-devtools:client:reloaded': ClientReloadedEvent + 'tanstack-ai-devtools:client:stopped': ClientStoppedEvent } class AiEventClient extends EventClient { diff --git a/packages/typescript/ai/src/index.ts b/packages/typescript/ai/src/index.ts index 0476457d..d65008cd 100644 --- a/packages/typescript/ai/src/index.ts +++ b/packages/typescript/ai/src/index.ts @@ -73,8 +73,8 @@ export { // All types export * from './types' -// Event client -export { aiEventClient } from './event-client' +// Event client + event types +export * from './event-client' // Message converters export { diff --git a/packages/typescript/ai/tests/ai-text.test.ts b/packages/typescript/ai/tests/ai-text.test.ts index 3eee78e8..8ff65fe7 100644 --- a/packages/typescript/ai/tests/ai-text.test.ts +++ b/packages/typescript/ai/tests/ai-text.test.ts @@ -127,11 +127,13 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { collectChunks(stream2), ]) - const event1 = capturedEvents.find((e) => e.type === 'text:started') + const event1 = capturedEvents.find( + (e) => e.type === 'text:request:started', + ) const event2 = capturedEvents .slice() .reverse() - .find((e) => e.type === 'text:started') + .find((e) => e.type === 'text:request:started') expect(event1).toBeDefined() expect(event2).toBeDefined() @@ -140,7 +142,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect(chunks2.length).toBeGreaterThan(0) }) - it('should emit chat:started event with correct data', async () => { + it('should emit text:request:started event with correct data', async () => { const adapter = new MockAdapter() await collectChunks( @@ -160,7 +162,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const event = capturedEvents.find((e) => e.type === 'text:started') + const event = capturedEvents.find( + (e) => e.type === 'text:request:started', + ) expect(event).toBeDefined() expect(event?.data.model).toBe('test-model') expect(event?.data.messageCount).toBe(2) @@ -168,7 +172,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect(event?.data.streaming).toBe(true) }) - it('should emit stream:started event with correct data', async () => { + it('should emit text:request:started event with correct data', async () => { const adapter = new MockAdapter() await collectChunks( @@ -178,7 +182,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const event = capturedEvents.find((e) => e.type === 'stream:started') + const event = capturedEvents.find( + (e) => e.type === 'text:request:started', + ) expect(event).toBeDefined() expect(event?.data.model).toBe('test-model') expect(event?.data.provider).toBe('mock') @@ -261,15 +267,15 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect(chunks[1]?.type).toBe('done') // Check events - expect(capturedEvents.some((e) => e.type === 'text:started')).toBe(true) - expect(capturedEvents.some((e) => e.type === 'stream:started')).toBe(true) expect( - capturedEvents.some((e) => e.type === 'stream:chunk:content'), + capturedEvents.some((e) => e.type === 'text:request:started'), ).toBe(true) - expect(capturedEvents.some((e) => e.type === 'stream:chunk:done')).toBe( + expect(capturedEvents.some((e) => e.type === 'text:chunk:content')).toBe( + true, + ) + expect(capturedEvents.some((e) => e.type === 'text:chunk:done')).toBe( true, ) - expect(capturedEvents.some((e) => e.type === 'stream:ended')).toBe(true) }) it('should accumulate content across multiple chunks', async () => { @@ -330,7 +336,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { // Check content events const contentEvents = capturedEvents.filter( - (e) => e.type === 'stream:chunk:content', + (e) => e.type === 'text:chunk:content', ) expect(contentEvents).toHaveLength(3) }) @@ -453,12 +459,11 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { // Check events expect( - capturedEvents.some((e) => e.type === 'stream:chunk:tool-call'), + capturedEvents.some((e) => e.type === 'text:chunk:tool-call'), + ).toBe(true) + expect( + capturedEvents.some((e) => e.type === 'tools:call:completed'), ).toBe(true) - expect(capturedEvents.some((e) => e.type === 'text:iteration')).toBe(true) - expect(capturedEvents.some((e) => e.type === 'tool:call-completed')).toBe( - true, - ) }) it('should handle streaming tool call arguments (incremental JSON)', async () => { @@ -646,12 +651,11 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { const toolResultChunks = chunks.filter((c) => c.type === 'tool_result') expect(toolResultChunks).toHaveLength(2) - // Check iteration event - const iterationEvents = capturedEvents.filter( - (e) => e.type === 'text:iteration', + // Check tool completion events + const toolCompletionEvents = capturedEvents.filter( + (e) => e.type === 'tools:call:completed', ) - expect(iterationEvents.length).toBeGreaterThan(0) - expect(iterationEvents[0]?.data.toolCallCount).toBe(2) + expect(toolCompletionEvents.length).toBeGreaterThan(0) }) it('should handle tool calls with accumulated content', async () => { @@ -944,9 +948,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { const result = JSON.parse(resultChunk.content) expect(result.result).toBe('success') - // Check tool:call-completed event + // Check tools:call:completed event const completedEvents = capturedEvents.filter( - (e) => e.type === 'tool:call-completed', + (e) => e.type === 'tools:call:completed', ) expect(completedEvents.length).toBeGreaterThan(0) }) @@ -1215,7 +1219,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { // Should emit approval-requested event expect( - capturedEvents.some((e) => e.type === 'stream:approval-requested'), + capturedEvents.some((e) => e.type === 'tools:approval:requested'), ).toBe(true) }) @@ -1277,7 +1281,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { // Should emit tool-input-available event expect( - capturedEvents.some((e) => e.type === 'stream:tool-input-available'), + capturedEvents.some((e) => e.type === 'tools:input:available'), ).toBe(true) }) @@ -1916,15 +1920,9 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { expect((chunks[1] as any).error.message).toBe('API error occurred') // Should emit error event - expect(capturedEvents.some((e) => e.type === 'stream:chunk:error')).toBe( + expect(capturedEvents.some((e) => e.type === 'text:chunk:error')).toBe( true, ) - - // Should NOT emit stream:ended after error - const endedEvents = capturedEvents.filter( - (e) => e.type === 'stream:ended', - ) - expect(endedEvents).toHaveLength(0) }) }) @@ -2053,20 +2051,15 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { const eventTypes = capturedEvents.map((e) => e.type) // Check event order and presence - expect(eventTypes.includes('text:started')).toBe(true) - expect(eventTypes.includes('stream:started')).toBe(true) - expect(eventTypes.includes('stream:chunk:content')).toBe(true) - expect(eventTypes.includes('stream:chunk:done')).toBe(true) - expect(eventTypes.includes('stream:ended')).toBe(true) - - // chat:started should come before stream:started - const chatStartedIndex = eventTypes.indexOf('text:started') - const streamStartedIndex = eventTypes.indexOf('stream:started') - expect(chatStartedIndex).toBeLessThan(streamStartedIndex) - - // stream:ended should come last - const streamEndedIndex = eventTypes.indexOf('stream:ended') - expect(streamEndedIndex).toBe(eventTypes.length - 1) + expect(eventTypes.includes('text:request:started')).toBe(true) + expect(eventTypes.includes('text:chunk:content')).toBe(true) + expect(eventTypes.includes('text:chunk:done')).toBe(true) + expect(eventTypes.includes('text:request:completed')).toBe(true) + + // request:started should come before first content chunk + const requestStartedIndex = eventTypes.indexOf('text:request:started') + const contentIndex = eventTypes.indexOf('text:chunk:content') + expect(requestStartedIndex).toBeLessThan(contentIndex) }) it('should emit iteration events for tool calls', async () => { @@ -2133,15 +2126,14 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - // Should emit chat:iteration event - const iterationEvents = capturedEvents.filter( - (e) => e.type === 'text:iteration', + // Should emit tools:call:completed event + const toolCompletionEvents = capturedEvents.filter( + (e) => e.type === 'tools:call:completed', ) - expect(iterationEvents.length).toBeGreaterThan(0) - expect(iterationEvents[0]?.data.iterationNumber).toBe(1) + expect(toolCompletionEvents.length).toBeGreaterThan(0) }) - it('should emit stream:ended event after successful completion', async () => { + it('should emit text:request:completed event after successful completion', async () => { const adapter = new MockAdapter() await collectChunks( @@ -2151,10 +2143,11 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const endedEvent = capturedEvents.find((e) => e.type === 'stream:ended') - expect(endedEvent).toBeDefined() - expect(endedEvent?.data.totalChunks).toBeGreaterThan(0) - expect(endedEvent?.data.duration).toBeGreaterThanOrEqual(0) + const completedEvent = capturedEvents.find( + (e) => e.type === 'text:request:completed', + ) + expect(completedEvent).toBeDefined() + expect(completedEvent?.data.duration).toBeGreaterThanOrEqual(0) }) it('should track total chunk count across iterations', async () => { @@ -2230,10 +2223,10 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }), ) - const endedEvent = capturedEvents.find((e) => e.type === 'stream:ended') - expect(endedEvent).toBeDefined() - // Should count: 3 chunks from iteration 1 + 2 chunks from iteration 2 + 1 tool_result = 6 - expect(endedEvent?.data.totalChunks).toBeGreaterThanOrEqual(4) + const completedEvent = capturedEvents.find( + (e) => e.type === 'text:request:completed', + ) + expect(completedEvent).toBeDefined() }) }) @@ -2363,7 +2356,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { }) describe('Tool Result Chunk Events from Adapter', () => { - it('should emit stream:chunk:tool-result event when adapter sends tool_result chunk', async () => { + it('should emit text:chunk:tool-result event when adapter sends tool_result chunk', async () => { class ToolResultChunkAdapter extends MockAdapter { async *chatStream(options: TextOptions): AsyncIterable { this.trackStreamCall(options) @@ -2406,7 +2399,7 @@ describe('chat() - Comprehensive Logic Path Coverage', () => { // Should emit tool-result event for the tool_result chunk from adapter const toolResultEvents = capturedEvents.filter( - (e) => e.type === 'stream:chunk:tool-result', + (e) => e.type === 'text:chunk:tool-result', ) expect(toolResultEvents.length).toBeGreaterThan(0) expect(toolResultEvents[0]?.data.toolCallId).toBe('call-previous') diff --git a/testing/panel/src/lib/recording.ts b/testing/panel/src/lib/recording.ts index bb1a7e42..6948d27d 100644 --- a/testing/panel/src/lib/recording.ts +++ b/testing/panel/src/lib/recording.ts @@ -1,11 +1,19 @@ import * as fs from 'node:fs/promises' import * as path from 'node:path' -import { aiEventClient } from '@tanstack/ai/event-client' -import type { StreamChunk, ToolCall } from '@tanstack/ai' +import { aiEventClient as baseAiEventClient } from '@tanstack/ai/event-client' +import type { AIDevtoolsEventMap } from '../../../../packages/typescript/ai/src/event-client' +import type { StreamChunk } from '@tanstack/ai' /** * Recording data structure matching the old format */ +export interface RecordedToolCall { + id: string + name: string + arguments: string + result?: unknown +} + export interface ChunkRecording { version: '1.0' timestamp: number @@ -18,7 +26,7 @@ export interface ChunkRecording { }> result?: { content: string - toolCalls: Array + toolCalls: Array finishReason: string | null } } @@ -57,7 +65,7 @@ export function createEventRecording( index: number }> accumulatedContent: string - toolCalls: Map + toolCalls: Map finishReason: string | null traceId?: string } @@ -71,11 +79,17 @@ export function createEventRecording( // Helper to reconstruct StreamChunk from events const createContentChunk = ( content: string, - delta?: string, + delta: string | undefined, + model: string, + timestamp: number, + id: string, ): StreamChunk => ({ type: 'content', content, - delta, + delta: delta ?? '', + model, + timestamp, + id, }) const createToolCallChunk = ( @@ -83,6 +97,9 @@ export function createEventRecording( toolName: string, index: number, arguments_: string, + model: string, + timestamp: number, + id: string, ): StreamChunk => ({ type: 'tool_call', toolCall: { @@ -94,17 +111,40 @@ export function createEventRecording( }, }, index, + model, + timestamp, + id, }) const createToolResultChunk = ( toolCallId: string, result: string, + model: string, + timestamp: number, + id: string, ): StreamChunk => ({ type: 'tool_result', toolCallId, content: result, + model, + timestamp, + id, }) + type FinishReason = 'stop' | 'length' | 'content_filter' | 'tool_calls' | null + + const normalizeFinishReason = (finishReason: string | null): FinishReason => { + if ( + finishReason === 'stop' || + finishReason === 'length' || + finishReason === 'content_filter' || + finishReason === 'tool_calls' + ) { + return finishReason + } + return 'stop' + } + const createDoneChunk = ( finishReason: string | null, usage?: { @@ -112,71 +152,89 @@ export function createEventRecording( completionTokens: number totalTokens: number }, + model?: string, + timestamp?: number, + id?: string, ): StreamChunk => ({ type: 'done', - finishReason: finishReason as any, + finishReason: normalizeFinishReason(finishReason), usage, + model: model ?? 'unknown', + timestamp: timestamp ?? Date.now(), + id: id ?? `done-${Date.now()}`, }) - const createErrorChunk = (error: string): StreamChunk => ({ + const createErrorChunk = ( + error: string, + model: string, + timestamp: number, + id: string, + ): StreamChunk => ({ type: 'error', error: { message: error, }, + model, + timestamp, + id, }) const createThinkingChunk = ( content: string, - delta?: string, + delta: string | undefined, + model: string, + timestamp: number, + id: string, ): StreamChunk => ({ type: 'thinking', content, - delta, + delta: delta ?? '', + model, + timestamp, + id, }) - // Subscribe to stream:started to initialize recording + type DevtoolsEventHandler = + (event: { payload: AIDevtoolsEventMap[TEventName] }) => void + + type DevtoolsEventClient = { + on: ( + eventName: TEventName, + handler: DevtoolsEventHandler, + options?: { withEventTarget?: boolean }, + ) => () => void + } + + const aiEventClient = baseAiEventClient as DevtoolsEventClient + + // Subscribe to text:request:started to initialize recording const unsubscribeStarted = aiEventClient.on( - 'stream:started', + 'text:request:started', (event) => { - const { streamId, model, provider } = event.payload - // If traceId is provided, we'll track this streamId when we see chat:started - // For now, track all streams (we'll filter later) + const { streamId, model, provider, requestId, options, modelOptions } = + event.payload + activeStreams.set(streamId, { streamId, - requestId: '', // Will be set from chat:started + requestId, model, provider, chunks: [], accumulatedContent: '', - toolCalls: new Map(), + toolCalls: new Map(), finishReason: null, traceId: undefined, }) - }, - { withEventTarget: false }, - ) - // Subscribe to chat:started to get requestId and check if we should record - const unsubscribeChatStarted = aiEventClient.on( - 'chat:started', - (event) => { - const { streamId, requestId, providerOptions } = event.payload - const stream = activeStreams.get(streamId) - if (stream) { - stream.requestId = requestId - // Check if providerOptions contain traceId matching our filter - // If traceId is provided, only record streams that match - if ( - traceId && - providerOptions && - (providerOptions as any).traceId === traceId - ) { - stream.traceId = traceId - recordingStreamId = streamId - } else if (!traceId) { - // If no traceId filter, record all streams - recordingStreamId = streamId - } + const optionsTraceId = options?.traceId + const modelOptionsTraceId = modelOptions?.traceId + + const eventTraceId = optionsTraceId || modelOptionsTraceId + + if (traceId && eventTraceId === traceId) { + recordingStreamId = streamId + } else if (!traceId) { + recordingStreamId = streamId } }, { withEventTarget: false }, @@ -190,15 +248,23 @@ export function createEventRecording( // Subscribe to content chunks const unsubscribeContent = aiEventClient.on( - 'stream:chunk:content', + 'text:chunk:content', (event) => { - const { streamId, content, delta, timestamp } = event.payload + const { streamId, content, delta, timestamp, model } = event.payload if (!shouldRecord(streamId)) return const stream = activeStreams.get(streamId) if (stream) { stream.accumulatedContent = content + const resolvedModel = model ?? 'unknown' + const chunkId = `chunk-${chunkIndex}` stream.chunks.push({ - chunk: createContentChunk(content, delta), + chunk: createContentChunk( + content, + delta, + resolvedModel, + timestamp, + chunkId, + ), timestamp, index: chunkIndex++, }) @@ -209,7 +275,7 @@ export function createEventRecording( // Subscribe to tool call chunks const unsubscribeToolCall = aiEventClient.on( - 'stream:chunk:tool-call', + 'text:chunk:tool-call', (event) => { const { streamId, @@ -218,12 +284,23 @@ export function createEventRecording( index, arguments: args, timestamp, + model, } = event.payload if (!shouldRecord(streamId)) return const stream = activeStreams.get(streamId) if (stream) { + const resolvedModel = model ?? 'unknown' + const chunkId = `chunk-${chunkIndex}` stream.chunks.push({ - chunk: createToolCallChunk(toolCallId, toolName, index, args), + chunk: createToolCallChunk( + toolCallId, + toolName, + index, + args, + resolvedModel, + timestamp, + chunkId, + ), timestamp, index: chunkIndex++, }) @@ -237,7 +314,7 @@ export function createEventRecording( name: toolName, arguments: args, result: undefined, - } as ToolCall) + }) } } }, @@ -246,14 +323,22 @@ export function createEventRecording( // Subscribe to tool result chunks const unsubscribeToolResult = aiEventClient.on( - 'stream:chunk:tool-result', + 'text:chunk:tool-result', (event) => { - const { streamId, toolCallId, result, timestamp } = event.payload + const { streamId, toolCallId, result, timestamp, model } = event.payload if (!shouldRecord(streamId)) return const stream = activeStreams.get(streamId) if (stream) { + const resolvedModel = model ?? 'unknown' + const chunkId = `chunk-${chunkIndex}` stream.chunks.push({ - chunk: createToolResultChunk(toolCallId, result), + chunk: createToolResultChunk( + toolCallId, + result, + resolvedModel, + timestamp, + chunkId, + ), timestamp, index: chunkIndex++, }) @@ -264,15 +349,23 @@ export function createEventRecording( // Subscribe to done chunks const unsubscribeDone = aiEventClient.on( - 'stream:chunk:done', + 'text:chunk:done', (event) => { - const { streamId, finishReason, usage, timestamp } = event.payload + const { streamId, finishReason, usage, timestamp, model } = event.payload if (!shouldRecord(streamId)) return const stream = activeStreams.get(streamId) if (stream) { stream.finishReason = finishReason || null + const resolvedModel = model ?? 'unknown' + const chunkId = `chunk-${chunkIndex}` stream.chunks.push({ - chunk: createDoneChunk(finishReason, usage), + chunk: createDoneChunk( + finishReason, + usage, + resolvedModel, + timestamp, + chunkId, + ), timestamp, index: chunkIndex++, }) @@ -283,14 +376,16 @@ export function createEventRecording( // Subscribe to error chunks const unsubscribeError = aiEventClient.on( - 'stream:chunk:error', + 'text:chunk:error', (event) => { - const { streamId, error, timestamp } = event.payload + const { streamId, error, timestamp, model } = event.payload if (!shouldRecord(streamId)) return const stream = activeStreams.get(streamId) if (stream) { + const resolvedModel = model ?? 'unknown' + const chunkId = `chunk-${chunkIndex}` stream.chunks.push({ - chunk: createErrorChunk(error), + chunk: createErrorChunk(error, resolvedModel, timestamp, chunkId), timestamp, index: chunkIndex++, }) @@ -301,14 +396,22 @@ export function createEventRecording( // Subscribe to thinking chunks const unsubscribeThinking = aiEventClient.on( - 'stream:chunk:thinking', + 'text:chunk:thinking', (event) => { - const { streamId, content, delta, timestamp } = event.payload + const { streamId, content, delta, timestamp, model } = event.payload if (!shouldRecord(streamId)) return const stream = activeStreams.get(streamId) if (stream) { + const resolvedModel = model ?? 'unknown' + const chunkId = `chunk-${chunkIndex}` stream.chunks.push({ - chunk: createThinkingChunk(content, delta), + chunk: createThinkingChunk( + content, + delta, + resolvedModel, + timestamp, + chunkId, + ), timestamp, index: chunkIndex++, }) @@ -317,10 +420,10 @@ export function createEventRecording( { withEventTarget: false }, ) - // Subscribe to chat:completed to get final tool calls + // Subscribe to text:request:completed to get final tool calls const unsubscribeChatCompleted = aiEventClient.on( - 'chat:completed', - async (event) => { + 'text:request:completed', + (event) => { const { streamId, content, finishReason } = event.payload if (!shouldRecord(streamId)) return const stream = activeStreams.get(streamId) @@ -332,9 +435,9 @@ export function createEventRecording( { withEventTarget: false }, ) - // Subscribe to tool:call-completed to update tool call results + // Subscribe to tools:call:completed to update tool call results const unsubscribeToolCompleted = aiEventClient.on( - 'tool:call-completed', + 'tools:call:completed', (event) => { const { streamId, toolCallId, toolName, result } = event.payload if (!shouldRecord(streamId)) return @@ -351,16 +454,16 @@ export function createEventRecording( name: toolName, arguments: '', result, - } as ToolCall) + }) } } }, { withEventTarget: false }, ) - // Subscribe to stream:ended to save recording + // Subscribe to text:request:completed to save recording const unsubscribeStreamEnded = aiEventClient.on( - 'stream:ended', + 'text:request:completed', async (event) => { const { streamId } = event.payload if (!shouldRecord(streamId)) return @@ -410,7 +513,6 @@ export function createEventRecording( return { stop: () => { unsubscribeStarted() - unsubscribeChatStarted() unsubscribeContent() unsubscribeToolCall() unsubscribeToolResult()