Skip to content

Commit 7c6c3fa

Browse files
committed
sdk: implement promptAiSdk
1 parent 726b1cc commit 7c6c3fa

File tree

5 files changed

+105
-22
lines changed

5 files changed

+105
-22
lines changed

common/src/types/contracts/llm.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ export type PromptAiSdkStreamFn = (
4444

4545
export type PromptAiSdkFn = (
4646
params: {
47+
apiKey: string
4748
messages: Message[]
4849
clientSessionId: string
4950
fingerprintId: string

evals/git-evals/gen-evals.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -174,6 +174,7 @@ File Changes:\n${fileContext}`
174174
sessionConnections: {},
175175
logger: console,
176176
trackEvent: () => {},
177+
apiKey: 'unused-api-key',
177178
})
178179

179180
// Extract spec from <spec></spec> tags

scripts/get-changelog.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,7 @@ Start your response with a heading using ### (three hashes) and organize the con
179179
sessionConnections: {},
180180
logger: console,
181181
trackEvent: () => {},
182+
apiKey: 'unused-api-key'
182183
})
183184

184185
// Clean up the AI response

sdk/src/impl/agent-runtime.ts

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ import {
88
getUserInfoFromApiKey,
99
startAgentRun,
1010
} from './database'
11-
import { promptAiSdkStream } from './llm'
11+
import { promptAiSdk, promptAiSdkStream } from './llm'
1212

1313
import type {
1414
AgentRuntimeDeps,
@@ -21,7 +21,6 @@ export function getAgentRuntimeImpl(params: {
2121
apiKey: string
2222
}): Omit<
2323
AgentRuntimeDeps & AgentRuntimeScopedDeps,
24-
| 'promptAiSdk'
2524
| 'promptAiSdkStructured'
2625
| 'handleStepsLogChunk'
2726
| 'requestToolCall'
@@ -49,7 +48,7 @@ export function getAgentRuntimeImpl(params: {
4948

5049
// LLM
5150
promptAiSdkStream,
52-
// promptAiSdk: PromptAiSdkFn,
51+
promptAiSdk,
5352
// promptAiSdkStructured: PromptAiSdkStructuredFn,
5453

5554
// Mutable State

sdk/src/impl/llm.ts

Lines changed: 100 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,15 @@ import { buildArray } from '@codebuff/common/util/array'
88
import { getErrorObject } from '@codebuff/common/util/error'
99
import { convertCbToModelMessages } from '@codebuff/common/util/messages'
1010
import { StopSequenceHandler } from '@codebuff/common/util/stop-sequence'
11-
import { streamText, APICallError } from 'ai'
11+
import { streamText, APICallError, generateText } from 'ai'
1212

1313
import { WEBSITE_URL } from '../constants'
1414

1515
import type { LanguageModelV2 } from '@ai-sdk/provider'
16-
import type { PromptAiSdkStreamFn } from '@codebuff/common/types/contracts/llm'
16+
import type {
17+
PromptAiSdkFn,
18+
PromptAiSdkStreamFn,
19+
} from '@codebuff/common/types/contracts/llm'
1720
import type { ParamsOf } from '@codebuff/common/types/function-params'
1821
import type {
1922
OpenRouterProviderOptions,
@@ -206,21 +209,99 @@ export async function* promptAiSdkStream(
206209
return messageId
207210
}
208211

209-
for await (const chunk of promptAiSdkStream({
210-
apiKey: '12345',
211-
messages: [{ role: 'user', content: 'Hello' }],
212-
clientSessionId: 'test-session',
213-
fingerprintId: 'test-fingerprint',
214-
model: 'openai/gpt-5',
215-
userId: 'test-user-id',
216-
userInputId: '64a2e61f-1fab-4701-8651-7ff7a473e97a',
217-
sendAction: () => {},
218-
logger: console,
219-
trackEvent: () => {},
220-
liveUserInputRecord: {
221-
'test-user-id': ['64a2e61f-1fab-4701-8651-7ff7a473e97a'],
222-
},
223-
sessionConnections: { 'test-session': true },
224-
})) {
225-
console.dir({ asdf: chunk }, { depth: null })
212+
export async function promptAiSdk(
213+
params: ParamsOf<PromptAiSdkFn>,
214+
): ReturnType<PromptAiSdkFn> {
215+
const { logger } = params
216+
217+
if (!checkLiveUserInput(params)) {
218+
logger.info(
219+
{
220+
userId: params.userId,
221+
userInputId: params.userInputId,
222+
liveUserInputId: getLiveUserInputIds(params),
223+
},
224+
'Skipping prompt due to canceled user input',
225+
)
226+
return ''
227+
}
228+
229+
const startTime = Date.now()
230+
let aiSDKModel = getAiSdkModel(params)
231+
232+
const response = await generateText({
233+
...params,
234+
prompt: undefined,
235+
model: aiSDKModel,
236+
messages: convertCbToModelMessages(params),
237+
providerOptions: {
238+
codebuff: {
239+
codebuff_metadata: {
240+
run_id: params.userInputId,
241+
client_id: params.clientSessionId,
242+
},
243+
},
244+
},
245+
})
246+
const content = response.text
247+
248+
const messageId = response.response.id
249+
const providerMetadata = response.providerMetadata ?? {}
250+
const usage = response.usage
251+
let inputTokens = usage.inputTokens || 0
252+
const outputTokens = usage.outputTokens || 0
253+
let cacheReadInputTokens: number = 0
254+
let cacheCreationInputTokens: number = 0
255+
let costOverrideDollars: number | undefined
256+
if (providerMetadata.anthropic) {
257+
cacheReadInputTokens =
258+
typeof providerMetadata.anthropic.cacheReadInputTokens === 'number'
259+
? providerMetadata.anthropic.cacheReadInputTokens
260+
: 0
261+
cacheCreationInputTokens =
262+
typeof providerMetadata.anthropic.cacheCreationInputTokens === 'number'
263+
? providerMetadata.anthropic.cacheCreationInputTokens
264+
: 0
265+
}
266+
if (providerMetadata.openrouter) {
267+
if (providerMetadata.openrouter.usage) {
268+
const openrouterUsage = providerMetadata.openrouter
269+
.usage as OpenRouterUsageAccounting
270+
cacheReadInputTokens =
271+
openrouterUsage.promptTokensDetails?.cachedTokens ?? 0
272+
inputTokens = openrouterUsage.promptTokens - cacheReadInputTokens
273+
274+
costOverrideDollars =
275+
(openrouterUsage.cost ?? 0) +
276+
(openrouterUsage.costDetails?.upstreamInferenceCost ?? 0)
277+
}
278+
}
279+
280+
// Call the cost callback if provided
281+
if (params.onCostCalculated && costOverrideDollars) {
282+
const creditsUsed = costOverrideDollars * (1 + PROFIT_MARGIN)
283+
await params.onCostCalculated(creditsUsed)
284+
}
285+
286+
return content
226287
}
288+
289+
console.log(
290+
await promptAiSdk({
291+
apiKey: '12345',
292+
messages: [{ role: 'user', content: 'Hello' }],
293+
clientSessionId: 'test-session',
294+
fingerprintId: 'test-fingerprint',
295+
model: 'openai/gpt-5',
296+
userId: 'test-user-id',
297+
userInputId: '64a2e61f-1fab-4701-8651-7ff7a473e97a',
298+
sendAction: () => {},
299+
logger: console,
300+
trackEvent: () => {},
301+
liveUserInputRecord: {
302+
'test-user-id': ['64a2e61f-1fab-4701-8651-7ff7a473e97a'],
303+
},
304+
sessionConnections: { 'test-session': true },
305+
}),
306+
'asdf',
307+
)

0 commit comments

Comments
 (0)