Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/browser/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import { matchesKeybind, KEYBINDS } from "./utils/ui/keybinds";
import { useResumeManager } from "./hooks/useResumeManager";
import { useUnreadTracking } from "./hooks/useUnreadTracking";
import { useAutoCompactContinue } from "./hooks/useAutoCompactContinue";
import { useWorkspaceStoreRaw, useWorkspaceRecency } from "./stores/WorkspaceStore";
import { useWorkspaceStoreRaw, useWorkspaceRecency, canInterrupt } from "./stores/WorkspaceStore";
import { ChatInput } from "./components/ChatInput/index";
import type { ChatInputAPI } from "./components/ChatInput/types";

Expand Down Expand Up @@ -415,7 +415,7 @@ function AppInner() {
const allStates = workspaceStore.getAllStates();
const streamingModels = new Map<string, string>();
for (const [workspaceId, state] of allStates) {
if (state.canInterrupt && state.currentModel) {
if (canInterrupt(state.interruptType) && state.currentModel) {
streamingModels.set(workspaceId, state.currentModel);
}
}
Expand Down
41 changes: 22 additions & 19 deletions src/browser/components/AIView.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,11 @@ import { formatKeybind, KEYBINDS } from "@/browser/utils/ui/keybinds";
import { useAutoScroll } from "@/browser/hooks/useAutoScroll";
import { usePersistedState } from "@/browser/hooks/usePersistedState";
import { useThinking } from "@/browser/contexts/ThinkingContext";
import { useWorkspaceState, useWorkspaceAggregator } from "@/browser/stores/WorkspaceStore";
import {
useWorkspaceState,
useWorkspaceAggregator,
canInterrupt,
} from "@/browser/stores/WorkspaceStore";
import { WorkspaceHeader } from "./WorkspaceHeader";
import { getModelName } from "@/common/utils/ai/models";
import type { DisplayedMessage } from "@/common/types/message";
Expand Down Expand Up @@ -227,15 +231,15 @@ const AIViewInner: React.FC<AIViewProps> = ({
// Track if last message was interrupted or errored (for RetryBarrier)
// Uses same logic as useResumeManager for DRY
const showRetryBarrier = workspaceState
? !workspaceState.canInterrupt &&
? !canInterrupt(workspaceState.interruptType) &&
hasInterruptedStream(workspaceState.messages, workspaceState.pendingStreamStartTime)
: false;

// Handle keyboard shortcuts (using optional refs that are safe even if not initialized)
useAIViewKeybinds({
workspaceId,
currentModel: workspaceState?.currentModel ?? null,
canInterrupt: workspaceState?.canInterrupt ?? false,
canInterrupt: canInterrupt(workspaceState.interruptType),
showRetryBarrier,
currentWorkspaceThinking,
setThinkingLevel,
Expand Down Expand Up @@ -284,8 +288,7 @@ const AIViewInner: React.FC<AIViewProps> = ({
);
}

// Extract state from workspace state
const { messages, canInterrupt, isCompacting, loading, currentModel } = workspaceState;
const { messages, interruptType, isCompacting, loading, currentModel } = workspaceState;

// Get active stream message ID for token counting
const activeStreamMessageId = aggregator.getActiveStreamMessageId();
Expand All @@ -297,6 +300,14 @@ const AIViewInner: React.FC<AIViewProps> = ({
// Merge consecutive identical stream errors
const mergedMessages = mergeConsecutiveStreamErrors(messages);

const model = currentModel ? getModelName(currentModel) : "";
const interrupting = interruptType === "hard";

const prefix = interrupting ? "⏸️ Interrupting " : "";
const action = interrupting ? "" : isCompacting ? "compacting..." : "streaming...";

const statusText = `${prefix}${model} ${action}`.trim();

// When editing, find the cutoff point
const editCutoffHistoryId = editingMessage
? mergedMessages.find(
Expand Down Expand Up @@ -369,8 +380,8 @@ const AIViewInner: React.FC<AIViewProps> = ({
onTouchMove={markUserInteraction}
onScroll={handleScroll}
role="log"
aria-live={canInterrupt ? "polite" : "off"}
aria-busy={canInterrupt}
aria-live={canInterrupt(interruptType) ? "polite" : "off"}
aria-busy={canInterrupt(interruptType)}
aria-label="Conversation transcript"
tabIndex={0}
className="h-full overflow-y-auto p-[15px] leading-[1.5] break-words whitespace-pre-wrap"
Expand Down Expand Up @@ -429,21 +440,13 @@ const AIViewInner: React.FC<AIViewProps> = ({
</>
)}
<PinnedTodoList workspaceId={workspaceId} />
{canInterrupt && (
{canInterrupt(interruptType) && (
<StreamingBarrier
statusText={
isCompacting
? currentModel
? `${getModelName(currentModel)} compacting...`
: "compacting..."
: currentModel
? `${getModelName(currentModel)} streaming...`
: "streaming..."
}
statusText={statusText}
cancelText={
isCompacting
? `${formatKeybind(vimEnabled ? KEYBINDS.INTERRUPT_STREAM_VIM : KEYBINDS.INTERRUPT_STREAM_NORMAL)} cancel | ${formatKeybind(KEYBINDS.ACCEPT_EARLY_COMPACTION)} accept early`
: `hit ${formatKeybind(vimEnabled ? KEYBINDS.INTERRUPT_STREAM_VIM : KEYBINDS.INTERRUPT_STREAM_NORMAL)} to cancel`
: `hit ${formatKeybind(vimEnabled ? KEYBINDS.INTERRUPT_STREAM_VIM : KEYBINDS.INTERRUPT_STREAM_NORMAL)} to ${interruptType === "hard" ? "force" : ""} cancel`
}
tokenCount={
activeStreamMessageId
Expand Down Expand Up @@ -480,7 +483,7 @@ const AIViewInner: React.FC<AIViewProps> = ({
editingMessage={editingMessage}
onCancelEdit={handleCancelEdit}
onEditLastUserMessage={handleEditLastUserMessage}
canInterrupt={canInterrupt}
canInterrupt={canInterrupt(interruptType)}
onReady={handleChatInputReady}
/>
</div>
Expand Down
8 changes: 4 additions & 4 deletions src/browser/components/WorkspaceStatusDot.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { cn } from "@/common/lib/utils";
import { useWorkspaceSidebarState } from "@/browser/stores/WorkspaceStore";
import { canInterrupt, useWorkspaceSidebarState } from "@/browser/stores/WorkspaceStore";
import { getStatusTooltip } from "@/browser/utils/ui/statusTooltip";
import { memo, useMemo } from "react";
import { Tooltip, TooltipWrapper } from "./Tooltip";
Expand All @@ -11,10 +11,10 @@ export const WorkspaceStatusDot = memo<{
size?: number;
}>(
({ workspaceId, lastReadTimestamp, onClick, size = 8 }) => {
const { canInterrupt, currentModel, agentStatus, recencyTimestamp } =
const { interruptType, currentModel, agentStatus, recencyTimestamp } =
useWorkspaceSidebarState(workspaceId);

const streaming = canInterrupt;
const streaming = canInterrupt(interruptType);

// Compute unread status if lastReadTimestamp provided (sidebar only)
const unread = useMemo(() => {
Expand All @@ -35,7 +35,7 @@ export const WorkspaceStatusDot = memo<{
[streaming, currentModel, agentStatus, unread, recencyTimestamp]
);

const bgColor = canInterrupt ? "bg-blue-400" : unread ? "bg-gray-300" : "bg-muted-dark";
const bgColor = streaming ? "bg-blue-400" : unread ? "bg-gray-300" : "bg-muted-dark";
const cursor = onClick && !streaming ? "cursor-pointer" : "cursor-default";

return (
Expand Down
8 changes: 6 additions & 2 deletions src/browser/hooks/useResumeManager.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import { useEffect, useRef } from "react";
import { useWorkspaceStoreRaw, type WorkspaceState } from "@/browser/stores/WorkspaceStore";
import {
canInterrupt,
useWorkspaceStoreRaw,
type WorkspaceState,
} from "@/browser/stores/WorkspaceStore";
import { CUSTOM_EVENTS, type CustomEventType } from "@/common/constants/events";
import { getAutoRetryKey, getRetryStateKey } from "@/common/constants/storage";
import { getSendOptionsFromStorage } from "@/browser/utils/messages/sendOptions";
Expand Down Expand Up @@ -100,7 +104,7 @@ export function useResumeManager() {
}

// 1. Must have interrupted stream that's eligible for auto-retry (not currently streaming)
if (state.canInterrupt) return false; // Currently streaming
if (canInterrupt(state.interruptType)) return false; // Currently streaming

if (!isEligibleForAutoRetry(state.messages, state.pendingStreamStartTime)) {
return false;
Expand Down
6 changes: 3 additions & 3 deletions src/browser/stores/WorkspaceStore.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ describe("WorkspaceStore", () => {

expect(state).toMatchObject({
messages: [],
canInterrupt: false,
interruptType: "none",
isCompacting: false,
loading: true, // loading because not caught up
muxMessages: [],
Expand All @@ -273,7 +273,7 @@ describe("WorkspaceStore", () => {
// Object.is() comparison and skip re-renders for primitive values.
// TODO: Optimize aggregator caching in Phase 2
expect(state1).toEqual(state2);
expect(state1.canInterrupt).toBe(state2.canInterrupt);
expect(state1.interruptType).toBe(state2.interruptType);
expect(state1.loading).toBe(state2.loading);
});
});
Expand Down Expand Up @@ -428,7 +428,7 @@ describe("WorkspaceStore", () => {

const state2 = store.getWorkspaceState("test-workspace");
expect(state1).not.toBe(state2); // Cache should be invalidated
expect(state2.canInterrupt).toBe(true); // Stream started, so can interrupt
expect(state2.interruptType).toBeTruthy(); // Stream started, so can interrupt
});

it("invalidates getAllStates() cache when workspace changes", async () => {
Expand Down
21 changes: 16 additions & 5 deletions src/browser/stores/WorkspaceStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import { createFreshRetryState } from "@/browser/utils/messages/retryState";
export interface WorkspaceState {
name: string; // User-facing workspace name (e.g., "feature-branch")
messages: DisplayedMessage[];
canInterrupt: boolean;
interruptType: InterruptType; // Whether an interrupt is soft/hard or not possible
isCompacting: boolean;
loading: boolean;
muxMessages: MuxMessage[];
Expand All @@ -43,12 +43,18 @@ export interface WorkspaceState {
pendingStreamStartTime: number | null;
}

export type InterruptType = "soft" | "hard" | "none";

export function canInterrupt(interruptible: InterruptType): boolean {
return interruptible === "soft" || interruptible === "hard";
}

/**
* Subset of WorkspaceState needed for sidebar display.
* Subscribing to only these fields prevents re-renders when messages update.
*/
export interface WorkspaceSidebarState {
canInterrupt: boolean;
interruptType: InterruptType;
currentModel: string | null;
recencyTimestamp: number | null;
agentStatus: { emoji: string; message: string; url?: string } | undefined;
Expand Down Expand Up @@ -302,10 +308,15 @@ export class WorkspaceStore {
const messages = aggregator.getAllMessages();
const metadata = this.workspaceMetadata.get(workspaceId);

const hasHardInterrupt = activeStreams.some((c) => c.softInterruptPending);
const hasSoftInterrupt = activeStreams.length > 0;

const interruptible = hasHardInterrupt ? "hard" : hasSoftInterrupt ? "soft" : "none";

return {
name: metadata?.name ?? workspaceId, // Fall back to ID if metadata missing
messages: aggregator.getDisplayedMessages(),
canInterrupt: activeStreams.length > 0,
interruptType: interruptible,
isCompacting: aggregator.isCompacting(),
loading: !hasMessages && !isCaughtUp,
muxMessages: messages,
Expand Down Expand Up @@ -333,7 +344,7 @@ export class WorkspaceStore {
// Return cached if values match
if (
cached &&
cached.canInterrupt === fullState.canInterrupt &&
cached.interruptType === fullState.interruptType &&
cached.currentModel === fullState.currentModel &&
cached.recencyTimestamp === fullState.recencyTimestamp &&
cached.agentStatus === fullState.agentStatus
Expand All @@ -343,7 +354,7 @@ export class WorkspaceStore {

// Create and cache new state
const newState: WorkspaceSidebarState = {
canInterrupt: fullState.canInterrupt,
interruptType: fullState.interruptType,
currentModel: fullState.currentModel,
recencyTimestamp: fullState.recencyTimestamp,
agentStatus: fullState.agentStatus,
Expand Down
33 changes: 27 additions & 6 deletions src/browser/utils/messages/StreamingMessageAggregator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ interface StreamingContext {
startTime: number;
isComplete: boolean;
isCompacting: boolean;
softInterruptPending: boolean;
model: string;
}

Expand Down Expand Up @@ -292,6 +293,15 @@ export class StreamingMessageAggregator {
return false;
}

getSoftInterruptPending(): boolean {
for (const context of this.activeStreams.values()) {
if (context.softInterruptPending) {
return true;
}
}
return false;
}

getCurrentModel(): string | undefined {
// If there's an active stream, return its model
for (const context of this.activeStreams.values()) {
Expand Down Expand Up @@ -357,6 +367,7 @@ export class StreamingMessageAggregator {
startTime: Date.now(),
isComplete: false,
isCompacting,
softInterruptPending: false,
model: data.model,
};

Expand All @@ -379,12 +390,22 @@ export class StreamingMessageAggregator {
const message = this.messages.get(data.messageId);
if (!message) return;

// Append each delta as a new part (merging happens at display time)
message.parts.push({
type: "text",
text: data.delta,
timestamp: data.timestamp,
});
// Handle soft interrupt signal from backend
if (data.softInterruptPending !== undefined) {
const context = this.activeStreams.get(data.messageId);
if (context) {
context.softInterruptPending = data.softInterruptPending;
}
}

// Skip appending if this is an empty delta (e.g., just signaling interrupt)
if (data.delta) {
message.parts.push({
type: "text",
text: data.delta,
timestamp: data.timestamp,
});
}

// Track delta for token counting and TPS calculation
this.trackDelta(data.messageId, data.tokens, data.timestamp, "text");
Expand Down
1 change: 1 addition & 0 deletions src/common/types/stream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ export interface StreamDeltaEvent {
delta: string;
tokens: number; // Token count for this delta
timestamp: number; // When delta was received (Date.now())
softInterruptPending?: boolean; // Set to true when soft interrupt is triggered
}

export interface StreamEndEvent {
Expand Down
Loading