Skip to content

Commit e13adab

Browse files
authored
improvement(wand): upgrade wand to use SSE (#1100)
* improvement(wand): upgrade wand to use SSE * fix(ocr-azure): added OCR_AZURE_API_KEY envvar (#1102) * make wand identical to chat panel
1 parent 44bc12b commit e13adab

File tree

2 files changed

+43
-29
lines changed

2 files changed

+43
-29
lines changed

apps/sim/app/api/wand-generate/route.ts

Lines changed: 19 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -95,12 +95,19 @@ export async function POST(req: NextRequest) {
9595
{
9696
stream,
9797
historyLength: history.length,
98+
endpoint: useWandAzure ? azureEndpoint : 'api.openai.com',
99+
model: useWandAzure ? wandModelName : 'gpt-4o',
100+
apiVersion: useWandAzure ? azureApiVersion : 'N/A',
98101
}
99102
)
100103

101104
// For streaming responses
102105
if (stream) {
103106
try {
107+
logger.debug(
108+
`[${requestId}] Starting streaming request to ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'}`
109+
)
110+
104111
const streamCompletion = await client.chat.completions.create({
105112
model: useWandAzure ? wandModelName : 'gpt-4o',
106113
messages: messages,
@@ -109,6 +116,8 @@ export async function POST(req: NextRequest) {
109116
stream: true,
110117
})
111118

119+
logger.debug(`[${requestId}] Stream connection established successfully`)
120+
112121
return new Response(
113122
new ReadableStream({
114123
async start(controller) {
@@ -118,31 +127,34 @@ export async function POST(req: NextRequest) {
118127
for await (const chunk of streamCompletion) {
119128
const content = chunk.choices[0]?.delta?.content || ''
120129
if (content) {
121-
// Use the same format as codegen API for consistency
130+
// Use SSE format identical to chat streaming
122131
controller.enqueue(
123-
encoder.encode(`${JSON.stringify({ chunk: content, done: false })}\n`)
132+
encoder.encode(`data: ${JSON.stringify({ chunk: content })}\n\n`)
124133
)
125134
}
126135
}
127136

128-
// Send completion signal
129-
controller.enqueue(encoder.encode(`${JSON.stringify({ chunk: '', done: true })}\n`))
137+
// Send completion signal in SSE format
138+
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`))
130139
controller.close()
131140
logger.info(`[${requestId}] Wand generation streaming completed`)
132141
} catch (streamError: any) {
133142
logger.error(`[${requestId}] Streaming error`, { error: streamError.message })
134143
controller.enqueue(
135-
encoder.encode(`${JSON.stringify({ error: 'Streaming failed', done: true })}\n`)
144+
encoder.encode(
145+
`data: ${JSON.stringify({ error: 'Streaming failed', done: true })}\n\n`
146+
)
136147
)
137148
controller.close()
138149
}
139150
},
140151
}),
141152
{
142153
headers: {
143-
'Content-Type': 'text/plain',
144-
'Cache-Control': 'no-cache, no-transform',
154+
'Content-Type': 'text/event-stream',
155+
'Cache-Control': 'no-cache',
145156
Connection: 'keep-alive',
157+
'X-Accel-Buffering': 'no',
146158
},
147159
}
148160
)

apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-wand.ts

Lines changed: 24 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -198,35 +198,37 @@ export function useWand({
198198
const { done, value } = await reader.read()
199199
if (done) break
200200

201-
// Process incoming chunks
202-
const text = decoder.decode(value)
203-
const lines = text.split('\n').filter((line) => line.trim() !== '')
201+
// Process incoming chunks using SSE format (identical to Chat panel)
202+
const chunk = decoder.decode(value)
203+
const lines = chunk.split('\n\n')
204204

205205
for (const line of lines) {
206-
try {
207-
const data = JSON.parse(line)
206+
if (line.startsWith('data: ')) {
207+
try {
208+
const data = JSON.parse(line.substring(6))
208209

209-
// Check if there's an error
210-
if (data.error) {
211-
throw new Error(data.error)
212-
}
210+
// Check if there's an error
211+
if (data.error) {
212+
throw new Error(data.error)
213+
}
213214

214-
// Process chunk
215-
if (data.chunk && !data.done) {
216-
accumulatedContent += data.chunk
217-
// Stream each chunk to the UI immediately
218-
if (onStreamChunk) {
219-
onStreamChunk(data.chunk)
215+
// Process chunk
216+
if (data.chunk) {
217+
accumulatedContent += data.chunk
218+
// Stream each chunk to the UI immediately
219+
if (onStreamChunk) {
220+
onStreamChunk(data.chunk)
221+
}
220222
}
221-
}
222223

223-
// Check if streaming is complete
224-
if (data.done) {
225-
break
224+
// Check if streaming is complete
225+
if (data.done) {
226+
break
227+
}
228+
} catch (parseError) {
229+
// Continue processing other lines
230+
logger.debug('Failed to parse SSE line', { line, parseError })
226231
}
227-
} catch (parseError) {
228-
// Continue processing other lines
229-
logger.debug('Failed to parse streaming line', { line, parseError })
230232
}
231233
}
232234
}

0 commit comments

Comments
 (0)