mirror of
https://github.com/sst/opencode.git
synced 2025-12-23 10:11:41 +00:00
Merge 242f2f209f into 83397ebde2
This commit is contained in:
commit
b478ae5518
1 changed files with 23 additions and 0 deletions
|
|
@ -815,6 +815,29 @@ export namespace Provider {
|
|||
opts.signal = combined
|
||||
}
|
||||
|
||||
// Reasoning models require max_completion_tokens instead of max_tokens
|
||||
// when using openai-compatible or azure provider
|
||||
const isOpenAICompatible =
|
||||
model.api.npm === "@ai-sdk/openai-compatible" || model.api.npm === "@ai-sdk/azure"
|
||||
if (isOpenAICompatible && model.capabilities.reasoning) {
|
||||
if (opts.body && typeof opts.body === "string") {
|
||||
try {
|
||||
const body = JSON.parse(opts.body)
|
||||
if (body.max_tokens !== undefined) {
|
||||
body.max_completion_tokens = body.max_tokens
|
||||
delete body.max_tokens
|
||||
opts.body = JSON.stringify(body)
|
||||
}
|
||||
} catch (e) {
|
||||
log.warn("Failed to transform max_tokens parameter for reasoning model", {
|
||||
providerID: model.providerID,
|
||||
modelID: model.id,
|
||||
error: e,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fetchFn(input, {
|
||||
...opts,
|
||||
// @ts-ignore see here: https://github.com/oven-sh/bun/issues/16682
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue