From f8bd4ff7059693d773eeab3a94606f48bda90eb1 Mon Sep 17 00:00:00 2001 From: Aiden Cline Date: Tue, 9 Dec 2025 01:25:36 -0600 Subject: [PATCH] core: refactor providerOptions function to accept Provider.Model for cleaner API --- packages/opencode/src/provider/transform.ts | 6 +++--- packages/opencode/src/session/compaction.ts | 3 +-- packages/opencode/src/session/prompt.ts | 4 ++-- packages/opencode/src/session/summary.ts | 4 ++-- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 17fbf18f5..fb432860a 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -273,8 +273,8 @@ export namespace ProviderTransform { return options } - export function providerOptions(npm: string | undefined, providerID: string, options: { [x: string]: any }) { - switch (npm) { + export function providerOptions(model: Provider.Model, options: { [x: string]: any }) { + switch (model.api.npm) { case "@ai-sdk/openai": case "@ai-sdk/azure": return { @@ -302,7 +302,7 @@ export namespace ProviderTransform { } default: return { - [providerID]: options, + [model.providerID]: options, } } } diff --git a/packages/opencode/src/session/compaction.ts b/packages/opencode/src/session/compaction.ts index de75eda6e..0dbdf8979 100644 --- a/packages/opencode/src/session/compaction.ts +++ b/packages/opencode/src/session/compaction.ts @@ -140,8 +140,7 @@ export namespace SessionCompaction { // set to 0, we handle loop maxRetries: 0, providerOptions: ProviderTransform.providerOptions( - model.api.npm, - model.providerID, + model, pipe({}, mergeDeep(ProviderTransform.options(model, input.sessionID)), mergeDeep(model.options)), ), headers: model.headers, diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index d5010bc47..3164bec50 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -562,7 +562,7 @@ export namespace SessionPrompt { OUTPUT_TOKEN_MAX, ), abortSignal: abort, - providerOptions: ProviderTransform.providerOptions(model.api.npm, model.providerID, params.options), + providerOptions: ProviderTransform.providerOptions(model, params.options), stopWhen: stepCountIs(1), temperature: params.temperature, topP: params.topP, @@ -1458,7 +1458,7 @@ export namespace SessionPrompt { await generateText({ // use higher # for reasoning models since reasoning tokens eat up a lot of the budget maxOutputTokens: small.capabilities.reasoning ? 3000 : 20, - providerOptions: ProviderTransform.providerOptions(small.api.npm, small.providerID, options), + providerOptions: ProviderTransform.providerOptions(small, options), messages: [ ...SystemPrompt.title(small.providerID).map( (x): ModelMessage => ({ diff --git a/packages/opencode/src/session/summary.ts b/packages/opencode/src/session/summary.ts index ba0a1a00c..d1004aa0e 100644 --- a/packages/opencode/src/session/summary.ts +++ b/packages/opencode/src/session/summary.ts @@ -91,7 +91,7 @@ export namespace SessionSummary { if (textPart && !userMsg.summary?.title) { const result = await generateText({ maxOutputTokens: small.capabilities.reasoning ? 1500 : 20, - providerOptions: ProviderTransform.providerOptions(small.api.npm, small.providerID, options), + providerOptions: ProviderTransform.providerOptions(small, options), messages: [ ...SystemPrompt.title(small.providerID).map( (x): ModelMessage => ({ @@ -138,7 +138,7 @@ export namespace SessionSummary { const result = await generateText({ model: language, maxOutputTokens: 100, - providerOptions: ProviderTransform.providerOptions(small.api.npm, small.providerID, options), + providerOptions: ProviderTransform.providerOptions(small, options), messages: [ ...SystemPrompt.summarize(small.providerID).map( (x): ModelMessage => ({