From b8204c0bb7f6bcc77c6cd570f0020ac4ed044503 Mon Sep 17 00:00:00 2001 From: Shantur Rathore Date: Wed, 17 Dec 2025 16:20:10 +0000 Subject: [PATCH] fix: config option setCacheKey not being respected (#5686) --- packages/opencode/src/session/llm.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/session/llm.ts b/packages/opencode/src/session/llm.ts index ce7b60f0a..7aba178e1 100644 --- a/packages/opencode/src/session/llm.ts +++ b/packages/opencode/src/session/llm.ts @@ -73,6 +73,8 @@ export namespace LLM { system.push(header, rest.join("\n")) } + const provider = await Provider.getProvider(input.model.providerID) + const params = await Plugin.trigger( "chat.params", { @@ -90,7 +92,7 @@ export namespace LLM { topK: ProviderTransform.topK(input.model), options: pipe( {}, - mergeDeep(ProviderTransform.options(input.model, input.sessionID)), + mergeDeep(ProviderTransform.options(input.model, input.sessionID, provider.options)), input.small ? mergeDeep(ProviderTransform.smallOptions(input.model)) : mergeDeep({}), mergeDeep(input.model.options), mergeDeep(input.agent.options),