Skip to content
Open
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/lib/api-config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ export const standardHeaders = () => ({
accept: "application/json",
})

const COPILOT_VERSION = "0.26.7"
const COPILOT_VERSION = "0.33.5"
const EDITOR_PLUGIN_VERSION = `copilot-chat/${COPILOT_VERSION}`
const USER_AGENT = `GitHubCopilotChat/${COPILOT_VERSION}`

const API_VERSION = "2025-04-01"
const API_VERSION = "2025-10-01"

export const copilotBaseUrl = (state: State) =>
state.accountType === "individual" ?
Expand Down
3 changes: 3 additions & 0 deletions src/lib/tokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,9 @@ const calculateMessageTokens = (
const tokensPerName = 1
let tokens = tokensPerMessage
for (const [key, value] of Object.entries(message)) {
if (key === "reasoning_opaque") {
continue
}
if (typeof value === "string") {
tokens += encoder.encode(value).length
}
Expand Down
2 changes: 2 additions & 0 deletions src/routes/messages/anthropic-types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ export interface AnthropicToolUseBlock {
export interface AnthropicThinkingBlock {
type: "thinking"
thinking: string
signature: string
}

export type AnthropicUserContentBlock =
Expand Down Expand Up @@ -196,6 +197,7 @@ export interface AnthropicStreamState {
messageStartSent: boolean
contentBlockIndex: number
contentBlockOpen: boolean
thinkingBlockOpen: boolean
toolCalls: {
[openAIToolIndex: number]: {
id: string
Expand Down
1 change: 1 addition & 0 deletions src/routes/messages/handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ export async function handleCompletion(c: Context) {
contentBlockIndex: 0,
contentBlockOpen: false,
toolCalls: {},
thinkingBlockOpen: false,
}

for await (const rawEvent of response) {
Expand Down
135 changes: 98 additions & 37 deletions src/routes/messages/non-stream-translation.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import type { Model } from "~/services/copilot/get-models"

import { state } from "~/lib/state"
import {
type ChatCompletionResponse,
type ChatCompletionsPayload,
Expand Down Expand Up @@ -29,11 +32,15 @@ import { mapOpenAIStopReasonToAnthropic } from "./utils"
export function translateToOpenAI(
payload: AnthropicMessagesPayload,
): ChatCompletionsPayload {
const modelId = translateModelName(payload.model)
const model = state.models?.data.find((m) => m.id === modelId)
const thinkingBudget = getThinkingBudget(payload, model)
return {
model: translateModelName(payload.model),
model: modelId,
messages: translateAnthropicMessagesToOpenAI(
payload.messages,
payload.system,
modelId,
),
max_tokens: payload.max_tokens,
stop: payload.stop_sequences,
Expand All @@ -43,14 +50,36 @@ export function translateToOpenAI(
user: payload.metadata?.user_id,
tools: translateAnthropicToolsToOpenAI(payload.tools),
tool_choice: translateAnthropicToolChoiceToOpenAI(payload.tool_choice),
thinking_budget: thinkingBudget,
}
}

function getThinkingBudget(
payload: AnthropicMessagesPayload,
model: Model | undefined,
): number | undefined {
const thinking = payload.thinking
if (model && thinking) {
const maxThinkingBudget = Math.min(
model.capabilities.supports.max_thinking_budget ?? 0,
(model.capabilities.limits.max_output_tokens ?? 0) - 1,
)
if (maxThinkingBudget > 0 && thinking.budget_tokens !== undefined) {
const budgetTokens = Math.min(thinking.budget_tokens, maxThinkingBudget)
return Math.max(
budgetTokens,
model.capabilities.supports.min_thinking_budget ?? 1024,
)
}
}
return undefined
}

function translateModelName(model: string): string {
// Subagent requests use a specific model number which Copilot doesn't support
if (model.startsWith("claude-sonnet-4-")) {
return model.replace(/^claude-sonnet-4-.*/, "claude-sonnet-4")
} else if (model.startsWith("claude-opus-")) {
} else if (model.startsWith("claude-opus-4-")) {
return model.replace(/^claude-opus-4-.*/, "claude-opus-4")
Comment on lines +82 to 83
Copy link

Copilot AI Dec 10, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The change from model.startsWith("claude-opus-") to model.startsWith("claude-opus-4-") makes the pattern more restrictive. This means models like "claude-opus-3" or "claude-opus-5" would no longer match this condition. If this is intentional to only handle claude-opus-4 variants, the change is correct. However, verify this doesn't break compatibility with other claude-opus versions.

Suggested change
} else if (model.startsWith("claude-opus-4-")) {
return model.replace(/^claude-opus-4-.*/, "claude-opus-4")
} else if (model.startsWith("claude-opus-")) {
return model.replace(/^claude-opus-.*/, "claude-opus")

Copilot uses AI. Check for mistakes.
}
return model
Expand All @@ -59,13 +88,14 @@ function translateModelName(model: string): string {
function translateAnthropicMessagesToOpenAI(
anthropicMessages: Array<AnthropicMessage>,
system: string | Array<AnthropicTextBlock> | undefined,
modelId: string,
): Array<Message> {
const systemMessages = handleSystemPrompt(system)

const otherMessages = anthropicMessages.flatMap((message) =>
message.role === "user" ?
handleUserMessage(message)
: handleAssistantMessage(message),
: handleAssistantMessage(message, modelId),
)

return [...systemMessages, ...otherMessages]
Expand Down Expand Up @@ -125,6 +155,7 @@ function handleUserMessage(message: AnthropicUserMessage): Array<Message> {

function handleAssistantMessage(
message: AnthropicAssistantMessage,
modelId: string,
): Array<Message> {
if (!Array.isArray(message.content)) {
return [
Expand All @@ -139,25 +170,40 @@ function handleAssistantMessage(
(block): block is AnthropicToolUseBlock => block.type === "tool_use",
)

const textBlocks = message.content.filter(
(block): block is AnthropicTextBlock => block.type === "text",
)

const thinkingBlocks = message.content.filter(
let thinkingBlocks = message.content.filter(
(block): block is AnthropicThinkingBlock => block.type === "thinking",
)

// Combine text and thinking blocks, as OpenAI doesn't have separate thinking blocks
const allTextContent = [
...textBlocks.map((b) => b.text),
...thinkingBlocks.map((b) => b.thinking),
].join("\n\n")
if (modelId.startsWith("claude")) {
thinkingBlocks = thinkingBlocks.filter(
(b) =>
b.thinking
&& b.thinking.length > 0
&& b.signature
&& b.signature.length > 0
// gpt signature has @ in it, so filter those out for claude models
&& !b.signature.includes("@"),
)
}

const thinkingContents = thinkingBlocks
.filter((b) => b.thinking && b.thinking.length > 0)
.map((b) => b.thinking)

const allThinkingContent =
thinkingContents.length > 0 ? thinkingContents.join("\n\n") : undefined

const signature = thinkingBlocks.find(
(b) => b.signature && b.signature.length > 0,
)?.signature

return toolUseBlocks.length > 0 ?
[
{
role: "assistant",
content: allTextContent || null,
content: mapContent(message.content),
reasoning_text: allThinkingContent,
reasoning_opaque: signature,
tool_calls: toolUseBlocks.map((toolUse) => ({
id: toolUse.id,
type: "function",
Expand All @@ -172,6 +218,8 @@ function handleAssistantMessage(
{
role: "assistant",
content: mapContent(message.content),
reasoning_text: allThinkingContent,
reasoning_opaque: signature,
},
]
}
Expand All @@ -191,11 +239,8 @@ function mapContent(
const hasImage = content.some((block) => block.type === "image")
if (!hasImage) {
return content
.filter(
(block): block is AnthropicTextBlock | AnthropicThinkingBlock =>
block.type === "text" || block.type === "thinking",
)
.map((block) => (block.type === "text" ? block.text : block.thinking))
.filter((block): block is AnthropicTextBlock => block.type === "text")
.map((block) => block.text)
.join("\n\n")
}

Expand All @@ -204,12 +249,6 @@ function mapContent(
switch (block.type) {
case "text": {
contentParts.push({ type: "text", text: block.text })

break
}
case "thinking": {
contentParts.push({ type: "text", text: block.thinking })

break
}
case "image": {
Expand All @@ -219,7 +258,6 @@ function mapContent(
url: `data:${block.source.media_type};base64,${block.source.data}`,
},
})

break
}
// No default
Expand Down Expand Up @@ -282,34 +320,32 @@ export function translateToAnthropic(
response: ChatCompletionResponse,
): AnthropicResponse {
// Merge content from all choices
const allTextBlocks: Array<AnthropicTextBlock> = []
const allToolUseBlocks: Array<AnthropicToolUseBlock> = []
let stopReason: "stop" | "length" | "tool_calls" | "content_filter" | null =
null // default
stopReason = response.choices[0]?.finish_reason ?? stopReason
const assistantContentBlocks: Array<AnthropicAssistantContentBlock> = []
let stopReason = response.choices[0]?.finish_reason ?? null

// Process all choices to extract text and tool use blocks
for (const choice of response.choices) {
const textBlocks = getAnthropicTextBlocks(choice.message.content)
const thinkBlocks = getAnthropicThinkBlocks(
choice.message.reasoning_text,
choice.message.reasoning_opaque,
)
const toolUseBlocks = getAnthropicToolUseBlocks(choice.message.tool_calls)

allTextBlocks.push(...textBlocks)
allToolUseBlocks.push(...toolUseBlocks)
assistantContentBlocks.push(...thinkBlocks, ...textBlocks, ...toolUseBlocks)

// Use the finish_reason from the first choice, or prioritize tool_calls
if (choice.finish_reason === "tool_calls" || stopReason === "stop") {
stopReason = choice.finish_reason
}
}

// Note: GitHub Copilot doesn't generate thinking blocks, so we don't include them in responses

return {
id: response.id,
type: "message",
role: "assistant",
model: response.model,
content: [...allTextBlocks, ...allToolUseBlocks],
content: assistantContentBlocks,
stop_reason: mapOpenAIStopReasonToAnthropic(stopReason),
stop_sequence: null,
usage: {
Expand All @@ -329,7 +365,7 @@ export function translateToAnthropic(
function getAnthropicTextBlocks(
messageContent: Message["content"],
): Array<AnthropicTextBlock> {
if (typeof messageContent === "string") {
if (typeof messageContent === "string" && messageContent.length > 0) {
return [{ type: "text", text: messageContent }]
}

Expand All @@ -342,6 +378,31 @@ function getAnthropicTextBlocks(
return []
}

function getAnthropicThinkBlocks(
reasoningText: string | null | undefined,
reasoningOpaque: string | null | undefined,
): Array<AnthropicThinkingBlock> {
if (reasoningText && reasoningText.length > 0) {
return [
{
type: "thinking",
thinking: reasoningText,
signature: reasoningOpaque || "",
},
]
}
if (reasoningOpaque && reasoningOpaque.length > 0) {
return [
{
type: "thinking",
thinking: "",
signature: reasoningOpaque,
},
]
}
return []
}

function getAnthropicToolUseBlocks(
toolCalls: Array<ToolCall> | undefined,
): Array<AnthropicToolUseBlock> {
Expand Down
Loading