Skip to content
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions bun.lock
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"lockfileVersion": 1,
"configVersion": 0,
"workspaces": {
"": {
"name": "@coder/cmux",
Expand Down
30 changes: 28 additions & 2 deletions mobile/src/utils/slashCommandHelpers.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,37 @@ describe("buildMobileCompactionPayload", () => {
expect(payload.metadata.parsed).toEqual({
model: "anthropic:claude-opus-4-1",
maxOutputTokens: 800,
continueMessage: parsed.continueMessage,
resumeModel: baseOptions.model,
continueMessage: {
text: parsed.continueMessage,
imageParts: [],
model: baseOptions.model,
},
});
expect(payload.sendOptions.model).toBe("anthropic:claude-opus-4-1");
expect(payload.sendOptions.mode).toBe("compact");
expect(payload.sendOptions.maxOutputTokens).toBe(800);
});

it("omits continueMessage when no text provided", () => {
const baseOptions: SendMessageOptions = {
model: "anthropic:claude-sonnet-4-5",
mode: "plan",
thinkingLevel: "default",
};

const parsed = {
type: "compact" as const,
maxOutputTokens: 1000,
continueMessage: undefined,
model: undefined,
};

const payload = buildMobileCompactionPayload(parsed, baseOptions);

if (payload.metadata.type !== "compaction-request") {
throw new Error("Expected compaction metadata");
}

expect(payload.metadata.parsed.continueMessage).toBeUndefined();
});
});
9 changes: 7 additions & 2 deletions mobile/src/utils/slashCommandHelpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,13 @@ export function buildMobileCompactionPayload(
parsed: {
model: parsed.model,
maxOutputTokens: parsed.maxOutputTokens,
continueMessage: parsed.continueMessage,
resumeModel: baseOptions.model,
continueMessage: parsed.continueMessage
? {
text: parsed.continueMessage,
imageParts: [],
model: baseOptions.model,
}
: undefined,
},
};

Expand Down
35 changes: 34 additions & 1 deletion src/browser/components/AIView.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,22 @@ import { formatKeybind, KEYBINDS } from "@/browser/utils/ui/keybinds";
import { useAutoScroll } from "@/browser/hooks/useAutoScroll";
import { usePersistedState } from "@/browser/hooks/usePersistedState";
import { useThinking } from "@/browser/contexts/ThinkingContext";
import { useWorkspaceState, useWorkspaceAggregator } from "@/browser/stores/WorkspaceStore";
import {
useWorkspaceState,
useWorkspaceAggregator,
useWorkspaceUsage,
} from "@/browser/stores/WorkspaceStore";
import { WorkspaceHeader } from "./WorkspaceHeader";
import { getModelName } from "@/common/utils/ai/models";
import type { DisplayedMessage } from "@/common/types/message";
import type { RuntimeConfig } from "@/common/types/runtime";
import { useAIViewKeybinds } from "@/browser/hooks/useAIViewKeybinds";
import { evictModelFromLRU } from "@/browser/hooks/useModelLRU";
import { QueuedMessage } from "./Messages/QueuedMessage";
import { CompactionWarning } from "./CompactionWarning";
import { shouldAutoCompact } from "@/browser/utils/compaction/autoCompactionCheck";
import { useProviderOptions } from "@/browser/hooks/useProviderOptions";
import { useSendMessageOptions } from "@/browser/hooks/useSendMessageOptions";

interface AIViewProps {
workspaceId: string;
Expand Down Expand Up @@ -74,6 +82,9 @@ const AIViewInner: React.FC<AIViewProps> = ({

const workspaceState = useWorkspaceState(workspaceId);
const aggregator = useWorkspaceAggregator(workspaceId);
const workspaceUsage = useWorkspaceUsage(workspaceId);
const { options } = useProviderOptions();
const use1M = options.anthropic?.use1MContext ?? false;
const handledModelErrorsRef = useRef<Set<string>>(new Set());

useEffect(() => {
Expand Down Expand Up @@ -130,6 +141,9 @@ const AIViewInner: React.FC<AIViewProps> = ({
markUserInteraction,
} = useAutoScroll();

// Use send options for auto-compaction check
const pendingSendOptions = useSendMessageOptions(workspaceId);

// ChatInput API for focus management
const chatInputAPI = useRef<ChatInputAPI | null>(null);
const handleChatInputReady = useCallback((api: ChatInputAPI) => {
Expand Down Expand Up @@ -318,6 +332,18 @@ const AIViewInner: React.FC<AIViewProps> = ({
// Get active stream message ID for token counting
const activeStreamMessageId = aggregator.getActiveStreamMessageId();

// Use pending send model for auto-compaction check, not the last stream's model.
// This ensures the threshold is based on the model the user will actually send with,
// preventing context-length errors when switching from a large-context to smaller model.
const pendingModel = pendingSendOptions.model;

const autoCompactionCheck = pendingModel
? shouldAutoCompact(workspaceUsage, pendingModel, use1M)
: { shouldShowWarning: false, usagePercentage: 0, thresholdPercentage: 70 };

// Show warning when: shouldShowWarning flag is true AND not currently compacting
const shouldShowCompactionWarning = !isCompacting && autoCompactionCheck.shouldShowWarning;

// Note: We intentionally do NOT reset autoRetry when streams start.
// If user pressed the interrupt key, autoRetry stays false until they manually retry.
// This makes state transitions explicit and predictable.
Expand Down Expand Up @@ -503,6 +529,12 @@ const AIViewInner: React.FC<AIViewProps> = ({
</button>
)}
</div>
{shouldShowCompactionWarning && (
<CompactionWarning
usagePercentage={autoCompactionCheck.usagePercentage}
thresholdPercentage={autoCompactionCheck.thresholdPercentage}
/>
)}
<ChatInput
variant="workspace"
workspaceId={workspaceId}
Expand All @@ -516,6 +548,7 @@ const AIViewInner: React.FC<AIViewProps> = ({
onEditLastUserMessage={() => void handleEditLastUserMessage()}
canInterrupt={canInterrupt}
onReady={handleChatInputReady}
autoCompactionCheck={autoCompactionCheck}
/>
</div>

Expand Down
125 changes: 96 additions & 29 deletions src/browser/components/ChatInput/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import {
} from "@/common/constants/storage";
import {
prepareCompactionMessage,
executeCompaction,
processSlashCommand,
type SlashCommandContext,
} from "@/browser/utils/chatCommands";
Expand Down Expand Up @@ -478,12 +479,39 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
const isSlashCommand = normalizedCommandInput.startsWith("/");
const parsed = isSlashCommand ? parseCommand(normalizedCommandInput) : null;

// Prepare image parts early so slash commands can access them
const imageParts = imageAttachments.map((img, index) => {
// Validate before sending to help with debugging
if (!img.url || typeof img.url !== "string") {
console.error(
`Image attachment [${index}] has invalid url:`,
typeof img.url,
img.url?.slice(0, 50)
);
}
if (!img.url?.startsWith("data:")) {
console.error(`Image attachment [${index}] url is not a data URL:`, img.url?.slice(0, 100));
}
if (!img.mediaType || typeof img.mediaType !== "string") {
console.error(
`Image attachment [${index}] has invalid mediaType:`,
typeof img.mediaType,
img.mediaType
);
}
return {
url: img.url,
mediaType: img.mediaType,
};
});

if (parsed) {
const context: SlashCommandContext = {
variant,
workspaceId: variant === "workspace" ? props.workspaceId : undefined,
sendMessageOptions,
setInput,
setImageAttachments,
setIsSending,
setToast,
setVimEnabled,
Expand All @@ -493,6 +521,7 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
onTruncateHistory: variant === "workspace" ? props.onTruncateHistory : undefined,
onCancelEdit: variant === "workspace" ? props.onCancelEdit : undefined,
editMessageId: editingMessage?.id,
imageParts: imageParts.length > 0 ? imageParts : undefined,
resetInputHeight: () => {
if (inputRef.current) {
inputRef.current.style.height = "36px";
Expand Down Expand Up @@ -540,36 +569,70 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
// Save current state for restoration on error
const previousImageAttachments = [...imageAttachments];

try {
// Prepare image parts if any
const imageParts = imageAttachments.map((img, index) => {
// Validate before sending to help with debugging
if (!img.url || typeof img.url !== "string") {
console.error(
`Image attachment [${index}] has invalid url:`,
typeof img.url,
img.url?.slice(0, 50)
);
}
if (!img.url?.startsWith("data:")) {
console.error(
`Image attachment [${index}] url is not a data URL:`,
img.url?.slice(0, 100)
);
}
if (!img.mediaType || typeof img.mediaType !== "string") {
console.error(
`Image attachment [${index}] has invalid mediaType:`,
typeof img.mediaType,
img.mediaType
);
// Auto-compaction check (workspace variant only)
// Check if we should auto-compact before sending this message
// Result is computed in parent (AIView) and passed down to avoid duplicate calculation
const shouldAutoCompact =
props.autoCompactionCheck &&
props.autoCompactionCheck.usagePercentage >=
props.autoCompactionCheck.thresholdPercentage &&
!isCompacting; // Skip if already compacting to prevent double-compaction queue
if (variant === "workspace" && !editingMessage && shouldAutoCompact) {
// Clear input immediately for responsive UX
setInput("");
setImageAttachments([]);
setIsSending(true);

try {
const result = await executeCompaction({
workspaceId: props.workspaceId,
continueMessage: {
text: messageText,
imageParts,
model: sendMessageOptions.model,
},
sendMessageOptions,
});

if (!result.success) {
// Restore on error
setInput(messageText);
setImageAttachments(previousImageAttachments);
setToast({
id: Date.now().toString(),
type: "error",
title: "Auto-Compaction Failed",
message: result.error ?? "Failed to start auto-compaction",
});
} else {
setToast({
id: Date.now().toString(),
type: "success",
message: `Context threshold reached - auto-compacting...`,
});
}
return {
url: img.url,
mediaType: img.mediaType,
};
});
} catch (error) {
// Restore on unexpected error
setInput(messageText);
setImageAttachments(previousImageAttachments);
setToast({
id: Date.now().toString(),
type: "error",
title: "Auto-Compaction Failed",
message:
error instanceof Error ? error.message : "Unexpected error during auto-compaction",
});
} finally {
setIsSending(false);
}

return; // Skip normal send
}

// Regular message - send directly via API
setIsSending(true);

try {
// When editing a /compact command, regenerate the actual summarization request
let actualMessageText = messageText;
let muxMetadata: MuxFrontendMetadata | undefined;
Expand All @@ -585,7 +648,11 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
} = prepareCompactionMessage({
workspaceId: props.workspaceId,
maxOutputTokens: parsedEditingCommand.maxOutputTokens,
continueMessage: parsedEditingCommand.continueMessage,
continueMessage: {
text: parsedEditingCommand.continueMessage ?? "",
imageParts,
model: sendMessageOptions.model,
},
model: parsedEditingCommand.model,
sendMessageOptions,
});
Expand Down
2 changes: 2 additions & 0 deletions src/browser/components/ChatInput/types.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import type { ImagePart } from "@/common/types/ipc";
import type { FrontendWorkspaceMetadata } from "@/common/types/workspace";
import type { AutoCompactionCheckResult } from "@/browser/utils/compaction/autoCompactionCheck";

export interface ChatInputAPI {
focus: () => void;
Expand All @@ -23,6 +24,7 @@ export interface ChatInputWorkspaceVariant {
canInterrupt?: boolean;
disabled?: boolean;
onReady?: (api: ChatInputAPI) => void;
autoCompactionCheck?: AutoCompactionCheckResult; // Computed in parent (AIView) to avoid duplicate calculation
}

// Creation variant: simplified for first message / workspace creation
Expand Down
40 changes: 40 additions & 0 deletions src/browser/components/CompactionWarning.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import React from "react";

/**
* Warning banner shown when context usage is approaching the compaction threshold.
*
* Displays progressive warnings:
* - Below threshold: "Context left until Auto-Compact: X% remaining" (where X = threshold - current)
* - At/above threshold: "Approaching context limit. Next message will trigger auto-compaction."
*
* Displayed above ChatInput when:
* - Token usage >= (threshold - 10%) of model's context window
* - Not currently compacting (user can still send messages)
*
* @param usagePercentage - Current token usage as percentage (0-100)
* @param thresholdPercentage - Auto-compaction trigger threshold (0-100, default 70)
*/
export const CompactionWarning: React.FC<{
usagePercentage: number;
thresholdPercentage: number;
}> = (props) => {
// At threshold or above, next message will trigger compaction
const willCompactNext = props.usagePercentage >= props.thresholdPercentage;

// Urgent warning at/above threshold - prominent blue box
if (willCompactNext) {
return (
<div className="text-plan-mode bg-plan-mode/10 mx-4 my-4 rounded-sm px-4 py-3 text-center text-xs font-medium">
⚠️ Context limit reached. Next message will trigger Auto-Compaction.
</div>
);
}

// Countdown warning below threshold - subtle grey text, right-aligned
const remaining = props.thresholdPercentage - props.usagePercentage;
return (
<div className="text-muted mx-4 mt-2 mb-1 text-right text-[10px]">
Context left until Auto-Compact: {Math.round(remaining)}%
</div>
);
};
10 changes: 9 additions & 1 deletion src/browser/hooks/useResumeManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,15 @@ export function useResumeManager() {
if (lastUserMsg?.compactionRequest) {
// Apply compaction overrides using shared function (same as ChatInput)
// This ensures custom model/tokens are preserved across resume
options = applyCompactionOverrides(options, lastUserMsg.compactionRequest.parsed);
options = applyCompactionOverrides(options, {
model: lastUserMsg.compactionRequest.parsed.model,
maxOutputTokens: lastUserMsg.compactionRequest.parsed.maxOutputTokens,
continueMessage: {
text: lastUserMsg.compactionRequest.parsed.continueMessage?.text ?? "",
imageParts: lastUserMsg.compactionRequest.parsed.continueMessage?.imageParts,
model: lastUserMsg.compactionRequest.parsed.continueMessage?.model ?? options.model,
},
});
}
}

Expand Down
Loading
Loading