add openai prompt cache key

This commit is contained in:
Dax Raad 2025-08-12 17:37:15 -04:00
parent d1876e3031
commit 5785ded6e2
3 changed files with 14 additions and 27 deletions

View file

@ -28,7 +28,7 @@ await Promise.all([
fs.mkdir(Global.Path.bin, { recursive: true }),
])
const CACHE_VERSION = "7"
const CACHE_VERSION = "8"
const version = await Bun.file(path.join(Global.Path.cache, "version"))
.text()

View file

@ -83,32 +83,19 @@ export namespace ProviderTransform {
return undefined
}
export function options(providerID: string, modelID: string): Record<string, any> | undefined {
export function options(providerID: string, modelID: string, sessionID: string): Record<string, any> | undefined {
const result: Record<string, any> = {}
if (providerID === "openai") {
result["promptCacheKey"] = sessionID
}
if (modelID.includes("gpt-5")) {
if (providerID === "azure") {
return {
reasoningEffort: "minimal",
}
}
return {
reasoningEffort: "minimal",
textVerbosity: "low",
// reasoningSummary: "auto",
// include: ["reasoning.encrypted_content"],
result["reasoningEffort"] = "minimal"
if (providerID !== "azure") {
result["textVerbosity"] = "low"
}
}
// if (modelID.includes("claude")) {
// return {
// thinking: {
// type: "enabled",
// budgetTokens: 32000,
// },
// }
// }
// if (_providerID === "bedrock") {
// return {
// reasoningConfig: { type: "enabled", budgetTokens: 32000 },
// }
// }
return result
}
}

View file

@ -674,7 +674,7 @@ export namespace Session {
providerOptions: {
[input.providerID]: {
...small.info.options,
...ProviderTransform.options(input.providerID, small.info.id),
...ProviderTransform.options(input.providerID, small.info.id, input.sessionID),
},
},
messages: [
@ -868,7 +868,7 @@ export namespace Session {
: undefined,
topP: agent.topP ?? ProviderTransform.topP(input.providerID, input.modelID),
options: {
...ProviderTransform.options(input.providerID, input.modelID),
...ProviderTransform.options(input.providerID, input.modelID, input.sessionID),
...model.info.options,
...agent.options,
},