Skip to content

Commit cc6cd51

Browse files
committed
Special-case grok-code-fast to not add stop sequences
1 parent dd8b571 commit cc6cd51

File tree

2 files changed

+79
-75
lines changed

2 files changed

+79
-75
lines changed

packages/internal/src/openrouter-ai-sdk/chat/index.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,9 @@ export class OpenRouterChatLanguageModel implements LanguageModelV2 {
120120
presence_penalty: presencePenalty,
121121
seed,
122122

123-
stop: stopSequences,
123+
...(this.modelId === 'x-ai/grok-code-fast-1'
124+
? {}
125+
: { stop: stopSequences }),
124126
response_format: responseFormat,
125127
top_k: topK,
126128

packages/internal/src/openrouter-ai-sdk/completion/index.ts

Lines changed: 76 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -3,63 +3,63 @@ import type {
33
LanguageModelV2CallOptions,
44
LanguageModelV2StreamPart,
55
LanguageModelV2Usage,
6-
} from '@ai-sdk/provider';
7-
import type { ParseResult } from '@ai-sdk/provider-utils';
8-
import type { FinishReason } from 'ai';
9-
import type { z } from 'zod/v4';
10-
import type { OpenRouterUsageAccounting } from '../types';
6+
} from '@ai-sdk/provider'
7+
import type { ParseResult } from '@ai-sdk/provider-utils'
8+
import type { FinishReason } from 'ai'
9+
import type { z } from 'zod/v4'
10+
import type { OpenRouterUsageAccounting } from '../types'
1111
import type {
1212
OpenRouterCompletionModelId,
1313
OpenRouterCompletionSettings,
14-
} from '../types/openrouter-completion-settings';
14+
} from '../types/openrouter-completion-settings'
1515

16-
import { UnsupportedFunctionalityError } from '@ai-sdk/provider';
16+
import { UnsupportedFunctionalityError } from '@ai-sdk/provider'
1717
import {
1818
combineHeaders,
1919
createEventSourceResponseHandler,
2020
createJsonResponseHandler,
2121
generateId,
2222
postJsonToApi,
23-
} from '@ai-sdk/provider-utils';
24-
import { openrouterFailedResponseHandler } from '../schemas/error-response';
25-
import { mapOpenRouterFinishReason } from '../utils/map-finish-reason';
26-
import { convertToOpenRouterCompletionPrompt } from './convert-to-openrouter-completion-prompt';
27-
import { OpenRouterCompletionChunkSchema } from './schemas';
23+
} from '@ai-sdk/provider-utils'
24+
import { openrouterFailedResponseHandler } from '../schemas/error-response'
25+
import { mapOpenRouterFinishReason } from '../utils/map-finish-reason'
26+
import { convertToOpenRouterCompletionPrompt } from './convert-to-openrouter-completion-prompt'
27+
import { OpenRouterCompletionChunkSchema } from './schemas'
2828

2929
type OpenRouterCompletionConfig = {
30-
provider: string;
31-
compatibility: 'strict' | 'compatible';
32-
headers: () => Record<string, string | undefined>;
33-
url: (options: { modelId: string; path: string }) => string;
34-
fetch?: typeof fetch;
35-
extraBody?: Record<string, unknown>;
36-
};
30+
provider: string
31+
compatibility: 'strict' | 'compatible'
32+
headers: () => Record<string, string | undefined>
33+
url: (options: { modelId: string; path: string }) => string
34+
fetch?: typeof fetch
35+
extraBody?: Record<string, unknown>
36+
}
3737

3838
export class OpenRouterCompletionLanguageModel implements LanguageModelV2 {
39-
readonly specificationVersion = 'v2' as const;
40-
readonly provider = 'openrouter';
41-
readonly modelId: OpenRouterCompletionModelId;
39+
readonly specificationVersion = 'v2' as const
40+
readonly provider = 'openrouter'
41+
readonly modelId: OpenRouterCompletionModelId
4242
readonly supportedUrls: Record<string, RegExp[]> = {
4343
'image/*': [
4444
/^data:image\/[a-zA-Z]+;base64,/,
4545
/^https?:\/\/.+\.(jpg|jpeg|png|gif|webp)$/i,
4646
],
4747
'text/*': [/^data:text\//, /^https?:\/\/.+$/],
4848
'application/*': [/^data:application\//, /^https?:\/\/.+$/],
49-
};
50-
readonly defaultObjectGenerationMode = undefined;
51-
readonly settings: OpenRouterCompletionSettings;
49+
}
50+
readonly defaultObjectGenerationMode = undefined
51+
readonly settings: OpenRouterCompletionSettings
5252

53-
private readonly config: OpenRouterCompletionConfig;
53+
private readonly config: OpenRouterCompletionConfig
5454

5555
constructor(
5656
modelId: OpenRouterCompletionModelId,
5757
settings: OpenRouterCompletionSettings,
5858
config: OpenRouterCompletionConfig,
5959
) {
60-
this.modelId = modelId;
61-
this.settings = settings;
62-
this.config = config;
60+
this.modelId = modelId
61+
this.settings = settings
62+
this.config = config
6363
}
6464

6565
private getArgs({
@@ -79,18 +79,18 @@ export class OpenRouterCompletionLanguageModel implements LanguageModelV2 {
7979
const { prompt: completionPrompt } = convertToOpenRouterCompletionPrompt({
8080
prompt,
8181
inputFormat: 'prompt',
82-
});
82+
})
8383

8484
if (tools?.length) {
8585
throw new UnsupportedFunctionalityError({
8686
functionality: 'tools',
87-
});
87+
})
8888
}
8989

9090
if (toolChoice) {
9191
throw new UnsupportedFunctionalityError({
9292
functionality: 'toolChoice',
93-
});
93+
})
9494
}
9595

9696
return {
@@ -119,7 +119,9 @@ export class OpenRouterCompletionLanguageModel implements LanguageModelV2 {
119119
presence_penalty: presencePenalty,
120120
seed,
121121

122-
stop: stopSequences,
122+
...(this.modelId === 'x-ai/grok-code-fast-1'
123+
? {}
124+
: { stop: stopSequences }),
123125
response_format: responseFormat,
124126
top_k: topK,
125127

@@ -133,19 +135,19 @@ export class OpenRouterCompletionLanguageModel implements LanguageModelV2 {
133135
// extra body:
134136
...this.config.extraBody,
135137
...this.settings.extraBody,
136-
};
138+
}
137139
}
138140

139141
async doGenerate(
140142
options: LanguageModelV2CallOptions,
141143
): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {
142-
const providerOptions = options.providerOptions || {};
143-
const openrouterOptions = providerOptions.openrouter || {};
144+
const providerOptions = options.providerOptions || {}
145+
const openrouterOptions = providerOptions.openrouter || {}
144146

145147
const args = {
146148
...this.getArgs(options),
147149
...openrouterOptions,
148-
};
150+
}
149151

150152
const { value: response, responseHeaders } = await postJsonToApi({
151153
url: this.config.url({
@@ -160,16 +162,16 @@ export class OpenRouterCompletionLanguageModel implements LanguageModelV2 {
160162
),
161163
abortSignal: options.abortSignal,
162164
fetch: this.config.fetch,
163-
});
165+
})
164166

165167
if ('error' in response) {
166-
throw new Error(`${response.error.message}`);
168+
throw new Error(`${response.error.message}`)
167169
}
168170

169-
const choice = response.choices[0];
171+
const choice = response.choices[0]
170172

171173
if (!choice) {
172-
throw new Error('No choice in OpenRouter completion response');
174+
throw new Error('No choice in OpenRouter completion response')
173175
}
174176

175177
return {
@@ -195,19 +197,19 @@ export class OpenRouterCompletionLanguageModel implements LanguageModelV2 {
195197
response: {
196198
headers: responseHeaders,
197199
},
198-
};
200+
}
199201
}
200202

201203
async doStream(
202204
options: LanguageModelV2CallOptions,
203205
): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {
204-
const providerOptions = options.providerOptions || {};
205-
const openrouterOptions = providerOptions.openrouter || {};
206+
const providerOptions = options.providerOptions || {}
207+
const openrouterOptions = providerOptions.openrouter || {}
206208

207209
const args = {
208210
...this.getArgs(options),
209211
...openrouterOptions,
210-
};
212+
}
211213

212214
const { value: response, responseHeaders } = await postJsonToApi({
213215
url: this.config.url({
@@ -231,18 +233,18 @@ export class OpenRouterCompletionLanguageModel implements LanguageModelV2 {
231233
),
232234
abortSignal: options.abortSignal,
233235
fetch: this.config.fetch,
234-
});
236+
})
235237

236-
let finishReason: FinishReason = 'other';
238+
let finishReason: FinishReason = 'other'
237239
const usage: LanguageModelV2Usage = {
238240
inputTokens: Number.NaN,
239241
outputTokens: Number.NaN,
240242
totalTokens: Number.NaN,
241243
reasoningTokens: Number.NaN,
242244
cachedInputTokens: Number.NaN,
243-
};
245+
}
244246

245-
const openrouterUsage: Partial<OpenRouterUsageAccounting> = {};
247+
const openrouterUsage: Partial<OpenRouterUsageAccounting> = {}
246248
return {
247249
stream: response.pipeThrough(
248250
new TransformStream<
@@ -252,66 +254,66 @@ export class OpenRouterCompletionLanguageModel implements LanguageModelV2 {
252254
transform(chunk, controller) {
253255
// handle failed chunk parsing / validation:
254256
if (!chunk.success) {
255-
finishReason = 'error';
256-
controller.enqueue({ type: 'error', error: chunk.error });
257-
return;
257+
finishReason = 'error'
258+
controller.enqueue({ type: 'error', error: chunk.error })
259+
return
258260
}
259261

260-
const value = chunk.value;
262+
const value = chunk.value
261263

262264
// handle error chunks:
263265
if ('error' in value) {
264-
finishReason = 'error';
265-
controller.enqueue({ type: 'error', error: value.error });
266-
return;
266+
finishReason = 'error'
267+
controller.enqueue({ type: 'error', error: value.error })
268+
return
267269
}
268270

269271
if (value.usage != null) {
270-
usage.inputTokens = value.usage.prompt_tokens;
271-
usage.outputTokens = value.usage.completion_tokens;
272+
usage.inputTokens = value.usage.prompt_tokens
273+
usage.outputTokens = value.usage.completion_tokens
272274
usage.totalTokens =
273-
value.usage.prompt_tokens + value.usage.completion_tokens;
275+
value.usage.prompt_tokens + value.usage.completion_tokens
274276

275277
// Collect OpenRouter specific usage information
276-
openrouterUsage.promptTokens = value.usage.prompt_tokens;
278+
openrouterUsage.promptTokens = value.usage.prompt_tokens
277279

278280
if (value.usage.prompt_tokens_details) {
279281
const cachedInputTokens =
280-
value.usage.prompt_tokens_details.cached_tokens ?? 0;
282+
value.usage.prompt_tokens_details.cached_tokens ?? 0
281283

282-
usage.cachedInputTokens = cachedInputTokens;
284+
usage.cachedInputTokens = cachedInputTokens
283285
openrouterUsage.promptTokensDetails = {
284286
cachedTokens: cachedInputTokens,
285-
};
287+
}
286288
}
287289

288-
openrouterUsage.completionTokens = value.usage.completion_tokens;
290+
openrouterUsage.completionTokens = value.usage.completion_tokens
289291
if (value.usage.completion_tokens_details) {
290292
const reasoningTokens =
291-
value.usage.completion_tokens_details.reasoning_tokens ?? 0;
293+
value.usage.completion_tokens_details.reasoning_tokens ?? 0
292294

293-
usage.reasoningTokens = reasoningTokens;
295+
usage.reasoningTokens = reasoningTokens
294296
openrouterUsage.completionTokensDetails = {
295297
reasoningTokens,
296-
};
298+
}
297299
}
298300

299-
openrouterUsage.cost = value.usage.cost;
300-
openrouterUsage.totalTokens = value.usage.total_tokens;
301+
openrouterUsage.cost = value.usage.cost
302+
openrouterUsage.totalTokens = value.usage.total_tokens
301303
}
302304

303-
const choice = value.choices[0];
305+
const choice = value.choices[0]
304306

305307
if (choice?.finish_reason != null) {
306-
finishReason = mapOpenRouterFinishReason(choice.finish_reason);
308+
finishReason = mapOpenRouterFinishReason(choice.finish_reason)
307309
}
308310

309311
if (choice?.text != null) {
310312
controller.enqueue({
311313
type: 'text-delta',
312314
delta: choice.text,
313315
id: generateId(),
314-
});
316+
})
315317
}
316318
},
317319

@@ -325,13 +327,13 @@ export class OpenRouterCompletionLanguageModel implements LanguageModelV2 {
325327
usage: openrouterUsage,
326328
},
327329
},
328-
});
330+
})
329331
},
330332
}),
331333
),
332334
response: {
333335
headers: responseHeaders,
334336
},
335-
};
337+
}
336338
}
337339
}

0 commit comments

Comments
 (0)