Skip to content

Commit ffb6e94

Browse files
committed
fix: properly implement space-to-send with hook callback
- Add onSend callback to useVoiceInput options - Add stopListeningAndSend method that sets a flag before stopping - When transcription completes, if flag was set, call onSend - Use setTimeout(0) to let React flush state update before sending - Simplifies ChatInput code by moving logic into the hook
1 parent 5914ed1 commit ffb6e94

File tree

2 files changed

+26
-14
lines changed

2 files changed

+26
-14
lines changed

src/browser/components/ChatInput/index.tsx

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -192,6 +192,7 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
192192
const voiceInput = useVoiceInput({
193193
onTranscript: handleVoiceTranscript,
194194
onError: handleVoiceError,
195+
onSend: () => void handleSend(),
195196
openAIKeySet,
196197
});
197198

@@ -986,20 +987,10 @@ export const ChatInput: React.FC<ChatInputProps> = (props) => {
986987
ref={(el) => el?.focus()}
987988
onClick={voiceInput.isListening ? voiceInput.toggleListening : undefined}
988989
onKeyDown={(e) => {
989-
// Space stops recording and sends immediately
990+
// Space stops recording and sends immediately after transcription
990991
if (e.key === " " && voiceInput.isListening) {
991992
e.preventDefault();
992-
voiceInput.stopListening();
993-
// Small delay to let transcription complete, then send
994-
// The transcript callback will update input, then we send
995-
const checkAndSend = () => {
996-
if (!voiceInput.isTranscribing) {
997-
void handleSend();
998-
} else {
999-
setTimeout(checkAndSend, 100);
1000-
}
1001-
};
1002-
setTimeout(checkAndSend, 100);
993+
voiceInput.stopListeningAndSend();
1003994
}
1004995
}}
1005996
disabled={voiceInput.isTranscribing}

src/browser/hooks/useVoiceInput.ts

Lines changed: 23 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@ export interface UseVoiceInputOptions {
2929
onTranscript: (text: string, isFinal: boolean) => void;
3030
/** Called when an error occurs */
3131
onError?: (error: string) => void;
32+
/** Called to send the message (used by stopListeningAndSend) */
33+
onSend?: () => void;
3234
/** Whether OpenAI API key is configured */
3335
openAIKeySet: boolean;
3436
}
@@ -46,33 +48,42 @@ export interface UseVoiceInputResult {
4648
startListening: () => void;
4749
/** Stop recording and transcribe */
4850
stopListening: () => void;
51+
/** Stop recording, transcribe, and send when done */
52+
stopListeningAndSend: () => void;
4953
/** Toggle recording state */
5054
toggleListening: () => void;
5155
}
5256

5357
export function useVoiceInput(options: UseVoiceInputOptions): UseVoiceInputResult {
54-
const { onTranscript, onError, openAIKeySet } = options;
58+
const { onTranscript, onError, onSend, openAIKeySet } = options;
5559

5660
const [isListening, setIsListening] = useState(false);
5761
const [isTranscribing, setIsTranscribing] = useState(false);
5862

5963
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
6064
const audioChunksRef = useRef<Blob[]>([]);
6165
const streamRef = useRef<MediaStream | null>(null);
66+
// Flag to auto-send after transcription completes
67+
const sendAfterTranscribeRef = useRef(false);
6268

6369
const isSupported = isMediaRecorderSupported();
6470
const isMobile = isMobileDevice();
6571

6672
// Store callbacks in refs to avoid recreating on every render
6773
const onTranscriptRef = useRef(onTranscript);
6874
const onErrorRef = useRef(onError);
75+
const onSendRef = useRef(onSend);
6976
useEffect(() => {
7077
onTranscriptRef.current = onTranscript;
7178
onErrorRef.current = onError;
72-
}, [onTranscript, onError]);
79+
onSendRef.current = onSend;
80+
}, [onTranscript, onError, onSend]);
7381

7482
const transcribeAudio = useCallback(async (audioBlob: Blob) => {
7583
setIsTranscribing(true);
84+
const shouldSendAfter = sendAfterTranscribeRef.current;
85+
sendAfterTranscribeRef.current = false;
86+
7687
try {
7788
// Convert blob to base64
7889
const arrayBuffer = await audioBlob.arrayBuffer();
@@ -86,6 +97,10 @@ export function useVoiceInput(options: UseVoiceInputOptions): UseVoiceInputResul
8697
if (result.success) {
8798
if (result.data.trim()) {
8899
onTranscriptRef.current(result.data, true);
100+
// Auto-send after transcript is set (use setTimeout to let React update state)
101+
if (shouldSendAfter) {
102+
setTimeout(() => onSendRef.current?.(), 0);
103+
}
89104
}
90105
} else {
91106
onErrorRef.current?.(result.error);
@@ -161,6 +176,11 @@ export function useVoiceInput(options: UseVoiceInputOptions): UseVoiceInputResul
161176
setIsListening(false);
162177
}, []);
163178

179+
const stopListeningAndSend = useCallback(() => {
180+
sendAfterTranscribeRef.current = true;
181+
stopListening();
182+
}, [stopListening]);
183+
164184
const toggleListening = useCallback(() => {
165185
if (isListening) {
166186
stopListening();
@@ -188,6 +208,7 @@ export function useVoiceInput(options: UseVoiceInputOptions): UseVoiceInputResul
188208
shouldShowUI: isSupported && !isMobile && openAIKeySet,
189209
startListening: () => void startListening(),
190210
stopListening,
211+
stopListeningAndSend,
191212
toggleListening,
192213
};
193214
}

0 commit comments

Comments
 (0)