Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import { createVertexAnthropic } from "@ai-sdk/google-vertex/anthropic"
import { createOpenAI } from "@ai-sdk/openai"
import { createOpenAICompatible } from "@ai-sdk/openai-compatible"
import { createOpenRouter, type LanguageModelV2 } from "@openrouter/ai-sdk-provider"
import { createOpenaiCompatible as createGitHubCopilotOpenAICompatible } from "./sdk/openai-compatible/src"
import { createCopilot } from "./sdk/copilot"
import { createXai } from "@ai-sdk/xai"
import { createMistral } from "@ai-sdk/mistral"
import { createGroq } from "@ai-sdk/groq"
Expand Down Expand Up @@ -74,8 +74,8 @@ export namespace Provider {
"@ai-sdk/perplexity": createPerplexity,
"@ai-sdk/vercel": createVercel,
"@gitlab/gitlab-ai-provider": createGitLab,
// @ts-ignore (TODO: kill this code so we dont have to maintain it)
"@ai-sdk/github-copilot": createGitHubCopilotOpenAICompatible,
// @ts-ignore
"@ai-sdk/github-copilot": createCopilot,
}

type CustomModelLoader = (sdk: any, modelID: string, options?: Record<string, any>) => Promise<any>
Expand Down Expand Up @@ -976,6 +976,12 @@ export namespace Provider {
...options["headers"],
...model.headers,
}
if (model.providerID.startsWith("github-copilot") && model.id.toLowerCase().includes("claude")) {
options["headers"] = {
...options["headers"],
"anthropic-beta": "interleaved-thinking-2025-05-14",
}
}

const key = Bun.hash.xxHash32(JSON.stringify({ providerID: model.providerID, npm: model.api.npm, options }))
const existing = s.sdk.get(key)
Expand Down
145 changes: 145 additions & 0 deletions packages/opencode/src/provider/sdk/copilot/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
import { OpenAICompatibleChatLanguageModel } from "@ai-sdk/openai-compatible"
import type { LanguageModelV2, LanguageModelV2StreamPart, SharedV2ProviderMetadata } from "@ai-sdk/provider"
import { type FetchFunction, withoutTrailingSlash, withUserAgentSuffix } from "@ai-sdk/provider-utils"
import { OpenAIResponsesLanguageModel } from "../openai-compatible/src/responses/openai-responses-language-model"
import { ProviderTransform } from "../../transform"

type RawChunk = {
choices?: Array<{
message?: { reasoning_opaque?: string }
delta?: { reasoning_opaque?: string }
}>
}

const extractor = {
async extractMetadata({ parsedBody }: { parsedBody: unknown }): Promise<SharedV2ProviderMetadata | undefined> {
const body = parsedBody as RawChunk
const opaque = body?.choices?.[0]?.message?.reasoning_opaque
if (!opaque) return undefined
return { openaiCompatible: { reasoning_opaque: opaque } }
},
createStreamExtractor: () => ({ processChunk() {}, buildMetadata: () => undefined }),
}

function wrapStream(stream: ReadableStream<LanguageModelV2StreamPart>) {
const state = { opaque: undefined as string | undefined }
return stream.pipeThrough(
new TransformStream<LanguageModelV2StreamPart, LanguageModelV2StreamPart>({
transform(chunk, controller) {
if (chunk.type === "raw") {
const raw = chunk.rawValue as RawChunk
state.opaque ??= raw?.choices?.[0]?.delta?.reasoning_opaque
}
if (chunk.type === "reasoning-end" && state.opaque) {
controller.enqueue({
...chunk,
providerMetadata: { ...chunk.providerMetadata, openaiCompatible: { reasoning_opaque: state.opaque } },
})
return
}
controller.enqueue(chunk)
},
}),
)
}

function createFetchAdapter(base?: FetchFunction, modelId?: string): FetchFunction {
const fetcher = base ?? globalThis.fetch
const isGemini = modelId?.toLowerCase().includes("gemini")

return (async (url, init) => {
// catch MCP tools not sanitized in transform.ts
if (isGemini && init?.body && url.toString().includes("/chat/completions")) {
const body = JSON.parse(init.body as string)
if (body.tools) {
body.tools = body.tools.map((t: any) => ({
...t,
function: { ...t.function, parameters: ProviderTransform.sanitizeGeminiSchema(t.function.parameters) },
}))
init = { ...init, body: JSON.stringify(body) }
}
}

const response = await fetcher(url, init)
if (!url.toString().includes("/chat/completions")) return response

const contentType = response.headers.get("content-type") ?? ""

if (contentType.includes("text/event-stream")) {
return new Response(
response.body!.pipeThrough(
new TransformStream({
transform(chunk, controller) {
const text = new TextDecoder().decode(chunk)
controller.enqueue(new TextEncoder().encode(text.replace(/"reasoning_text":/g, '"reasoning_content":')))
},
}),
),
{ status: response.status, headers: response.headers },
)
}

const text = await response.text()
return new Response(text.replace(/"reasoning_text":/g, '"reasoning_content":'), {
status: response.status,
headers: response.headers,
})
}) as FetchFunction
}

export function createCopilot(
options: {
apiKey?: string
baseURL?: string
name?: string
headers?: Record<string, string>
fetch?: FetchFunction
} = {},
) {
const baseURL = withoutTrailingSlash(options.baseURL ?? "https://api.openai.com/v1")
const headers = {
...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),
...options.headers,
}
const getHeaders = () => withUserAgentSuffix(headers, "opencode/copilot")

const createChatModel = (id: string): LanguageModelV2 => {
const copilotFetch = createFetchAdapter(options.fetch, id)
const model = new OpenAICompatibleChatLanguageModel(id, {
provider: "openai.chat",
headers: getHeaders,
url: ({ path }) => `${baseURL}${path}`,
fetch: copilotFetch,
metadataExtractor: extractor,
})

return {
specificationVersion: model.specificationVersion,
modelId: model.modelId,
provider: model.provider,
get supportedUrls() {
return model.supportedUrls
},
doGenerate: model.doGenerate.bind(model),
async doStream(opts) {
const result = await model.doStream({ ...opts, includeRawChunks: true })
return { ...result, stream: wrapStream(result.stream) }
},
}
}

const createResponsesModel = (id: string): LanguageModelV2 => {
return new OpenAIResponsesLanguageModel(id, {
provider: `${options.name ?? "copilot"}.responses`,
headers: getHeaders,
url: ({ path }) => `${baseURL}${path}`,
fetch: options.fetch,
})
}

return Object.assign((id: string) => createChatModel(id), {
languageModel: createChatModel,
chat: createChatModel,
responses: createResponsesModel,
})
}
116 changes: 81 additions & 35 deletions packages/opencode/src/provider/transform.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,28 @@ export namespace ProviderTransform {
.filter((msg): msg is ModelMessage => msg !== undefined && msg.content !== "")
}

// extract copilot's reasoning_opaque while preserving reasoning text
if (model.providerID.startsWith("github-copilot")) {
msgs = msgs.map((msg) => {
if (msg.role !== "assistant" || !Array.isArray(msg.content)) return msg

const opaque = msg.content
.filter((part: any) => part.type === "reasoning")
.map((part: any) => part.providerOptions?.openaiCompatible?.reasoning_opaque)
.find(Boolean)

if (!opaque) return msg

return {
...msg,
providerOptions: {
...msg.providerOptions,
openaiCompatible: { ...(msg.providerOptions as any)?.openaiCompatible, reasoning_opaque: opaque },
},
}
})
}

if (model.api.id.includes("claude")) {
return msgs.map((msg) => {
if ((msg.role === "assistant" || msg.role === "tool") && Array.isArray(msg.content)) {
Expand Down Expand Up @@ -353,6 +375,14 @@ export namespace ProviderTransform {
return Object.fromEntries(OPENAI_EFFORTS.map((effort) => [effort, { reasoningEffort: effort }]))

case "@ai-sdk/github-copilot":
// Claude models on Copilot use thinking_budget (token count) instead of reasoningEffort
if (model.id.includes("claude")) {
return {
high: { thinking_budget: Math.min(16_000, Math.floor(model.limit.output / 2 - 1)) },
max: { thinking_budget: Math.min(31_999, model.limit.output - 1) },
}
}
// Non-Claude models use OpenAI-style reasoningEffort
const copilotEfforts = iife(() => {
if (id.includes("5.1-codex-max") || id.includes("5.2")) return [...WIDELY_SUPPORTED_EFFORTS, "xhigh"]
return WIDELY_SUPPORTED_EFFORTS
Expand Down Expand Up @@ -666,6 +696,13 @@ export namespace ProviderTransform {
}
}

if (npm === "@ai-sdk/github-copilot") {
const budget = typeof options?.["thinking_budget"] === "number" ? options["thinking_budget"] : 0
if (budget > 0) {
return Math.max(standardLimit, budget + 1)
}
}

return standardLimit
}

Expand All @@ -688,49 +725,58 @@ export namespace ProviderTransform {
}
*/

// Convert integer enums to string enums for Google/Gemini
if (model.providerID === "google" || model.api.id.includes("gemini")) {
const sanitizeGemini = (obj: any): any => {
if (obj === null || typeof obj !== "object") {
return obj
}
const isGemini = model.providerID === "google" || model.id.toLowerCase().includes("gemini")
if (isGemini) {
schema = sanitizeGeminiSchema(schema)
}

if (Array.isArray(obj)) {
return obj.map(sanitizeGemini)
}
return schema
}

const result: any = {}
for (const [key, value] of Object.entries(obj)) {
if (key === "enum" && Array.isArray(value)) {
// Convert all enum values to strings
result[key] = value.map((v) => String(v))
// If we have integer type with enum, change type to string
if (result.type === "integer" || result.type === "number") {
result.type = "string"
}
} else if (typeof value === "object" && value !== null) {
result[key] = sanitizeGemini(value)
} else {
result[key] = value
}
export function sanitizeGeminiSchema(obj: any): any {
if (obj === null || typeof obj !== "object") return obj
if (Array.isArray(obj)) return obj.map(sanitizeGeminiSchema)

const result: any = {}
for (const [key, value] of Object.entries(obj)) {
if (key === "type" && Array.isArray(value)) {
// gemini will 400 on union types with null
const types = value as string[]
const hasNull = types.includes("null")
const nonNullTypes = types.filter((t) => t !== "null")

if (hasNull && nonNullTypes.length === 1) {
result.type = nonNullTypes[0]
result.nullable = true
} else if (nonNullTypes.length === 1) {
result.type = nonNullTypes[0]
} else {
result.type = value
}

// Filter required array to only include fields that exist in properties
if (result.type === "object" && result.properties && Array.isArray(result.required)) {
result.required = result.required.filter((field: any) => field in result.properties)
} else if (key === "enum" && Array.isArray(value)) {
// Convert all enum values to strings
result[key] = value.map((v) => String(v))
// If we have integer type with enum, change type to string
if (result.type === "integer" || result.type === "number") {
result.type = "string"
}

if (result.type === "array" && result.items == null) {
result.items = {}
}

return result
} else if (typeof value === "object" && value !== null) {
result[key] = sanitizeGeminiSchema(value)
} else {
result[key] = value
}
}

schema = sanitizeGemini(schema)
// Filter required array to only include fields that exist in properties
if (result.type === "object" && result.properties && Array.isArray(result.required)) {
result.required = result.required.filter((field: any) => field in result.properties)
}

return schema
if (result.type === "array" && result.items == null) {
result.items = {}
}

return result
}

export function error(providerID: string, error: APICallError) {
Expand Down
2 changes: 1 addition & 1 deletion packages/opencode/test/provider/transform.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -959,7 +959,7 @@ describe("ProviderTransform.message - providerOptions key remapping", () => {
expect(result[0].providerOptions?.openai).toBeUndefined()
})

test("openai with github-copilot npm remaps providerID to 'openai'", () => {
test("github-copilot npm remaps providerID to 'openai' key", () => {
const model = createModel("github-copilot", "@ai-sdk/github-copilot")
const msgs = [
{
Expand Down
Loading