Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
"@ai-sdk/anthropic": "^2.0.45",
"@ai-sdk/google": "^2.0.39",
"@ai-sdk/openai": "^2.0.71",
"@ai-sdk/provider": "^2.0.0",
"@anthropic-ai/sdk": "^0.68.0",
"@axe-core/puppeteer": "^4.10.2",
"@genkit-ai/compat-oai": "1.23.0",
Expand Down
3 changes: 3 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

36 changes: 36 additions & 0 deletions runner/codegen/ai-sdk-claude-thinking-patch.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import type {LanguageModelV2Middleware} from '@ai-sdk/provider';

/**
* Middleware for Anthropic AI SDK models that is necessary for enabling
* thinking mode + structured responses.
*
* This is necessary because Anthropic would be used with enforced tool usage
* by default with `generateObject()`. This is a workaround that makes the tool
* optional: https://github.com/vercel/ai/issues/9351.
*/
export const anthropicThinkingWithStructuredResponseMiddleware: LanguageModelV2Middleware = {
transformParams: ({params}) => {
if (params.responseFormat?.type === 'json' && params.responseFormat.schema) {
params.tools = [
{
type: 'function',
description: 'Respond with a JSON object for the structured output/answer.',
inputSchema: params.responseFormat.schema,
name: 'json',
},
];
params.toolChoice = {type: 'auto'};

params.prompt.push({
role: 'user',
content: [
{
type: 'text',
text: 'Use the `json` tool to provide the structured output/answer. No other text is needed.',
},
],
});
}
return Promise.resolve(params);
},
};
27 changes: 11 additions & 16 deletions runner/codegen/ai-sdk-runner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,15 @@ import {
ModelMessage,
SystemModelMessage,
TextPart,
wrapLanguageModel,
} from 'ai';
import {google, GoogleGenerativeAIProviderOptions} from '@ai-sdk/google';
import {anthropic, AnthropicProviderOptions} from '@ai-sdk/anthropic';
import {openai, OpenAIResponsesProviderOptions} from '@ai-sdk/openai';
import z from 'zod';
import {callWithTimeout} from '../utils/timeout.js';
import {combineAbortSignals} from '../utils/abort-signal.js';
import {anthropicThinkingWithStructuredResponseMiddleware} from './ai-sdk-claude-thinking-patch.js';

const SUPPORTED_MODELS = [
'claude-opus-4.1-no-thinking',
Expand Down Expand Up @@ -159,26 +161,19 @@ export class AiSDKRunner implements LlmRunner {
const modelName = request.model as (typeof SUPPORTED_MODELS)[number];
switch (modelName) {
case 'claude-opus-4.1-no-thinking':
case 'claude-opus-4.1-with-thinking-16k': {
const thinkingEnabled = modelName.includes('-with-thinking');
return {
model: anthropic('claude-opus-4-1'),
providerOptions: {
anthropic: {
sendReasoning: thinkingEnabled,
thinking: {
type: thinkingEnabled ? 'enabled' : 'disabled',
budgetTokens: thinkingEnabled ? claude16kThinkingTokenBudget : undefined,
},
} satisfies AnthropicProviderOptions,
},
};
}
case 'claude-opus-4.1-with-thinking-16k':
case 'claude-sonnet-4.5-no-thinking':
case 'claude-sonnet-4.5-with-thinking-16k': {
const thinkingEnabled = modelName.includes('-with-thinking');
const isOpus4_1Model = modelName.includes('opus-4.1');
const model = anthropic(isOpus4_1Model ? 'claude-opus-4-1' : 'claude-sonnet-4-5');
return {
model: anthropic('claude-sonnet-4-5'),
model: thinkingEnabled
? wrapLanguageModel({
model,
middleware: anthropicThinkingWithStructuredResponseMiddleware,
})
: model,
providerOptions: {
anthropic: {
sendReasoning: thinkingEnabled,
Expand Down
Loading