From de3a07f98798abc304cf03e80c999e25d21c89f2 Mon Sep 17 00:00:00 2001 From: Ammar Date: Mon, 24 Nov 2025 19:46:45 -0600 Subject: [PATCH] =?UTF-8?q?=F0=9F=A4=96=20fix:=20stabilize=20streaming=20b?= =?UTF-8?q?arrier=20token=20count=20in=20stories?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The interval callbacks that maintain streaming state were adding 1 token every 2 seconds, causing flaky token counts (72 vs 73) depending on test timing. Setting tokens: 0 keeps streaming state active without affecting the displayed count. _Generated with mux_ --- src/browser/App.stories.tsx | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/browser/App.stories.tsx b/src/browser/App.stories.tsx index aa3e5f5725..b26916b98f 100644 --- a/src/browser/App.stories.tsx +++ b/src/browser/App.stories.tsx @@ -996,13 +996,14 @@ export const ActiveWorkspaceWithChat: Story = { }, 100); // Keep sending reasoning deltas to maintain streaming state + // tokens: 0 to avoid flaky token counts in visual tests const intervalId = setInterval(() => { callback({ type: "reasoning-delta", workspaceId: workspaceId, messageId: "msg-12", delta: ".", - tokens: 1, + tokens: 0, timestamp: NOW, }); }, 2000); @@ -1108,13 +1109,14 @@ export const ActiveWorkspaceWithChat: Story = { }, 100); // Keep sending deltas to maintain streaming state + // tokens: 0 to avoid flaky token counts in visual tests const intervalId = setInterval(() => { callback({ type: "stream-delta", workspaceId: streamingWorkspaceId, messageId: "stream-msg-2", delta: ".", - tokens: 1, + tokens: 0, timestamp: NOW, }); }, 2000);