diff --git a/packages/opencode/src/provider/sdk/openai-compatible/src/chat/convert-to-openai-compatible-chat-messages.ts b/packages/opencode/src/provider/sdk/openai-compatible/src/chat/convert-to-openai-compatible-chat-messages.ts index ea78a365a..30062794e 100644 --- a/packages/opencode/src/provider/sdk/openai-compatible/src/chat/convert-to-openai-compatible-chat-messages.ts +++ b/packages/opencode/src/provider/sdk/openai-compatible/src/chat/convert-to-openai-compatible-chat-messages.ts @@ -20,7 +20,6 @@ export function convertToOpenAICompatibleChatMessages( const messages: OpenAICompatibleChatPrompt = []; for (const { role, content, ...message } of prompt) { const metadata = getOpenAIMetadata({ ...message }); - logger.error(`provider options: ${JSON.stringify(message.providerOptions)}`); switch (role) { case 'system': { messages.push({ role: 'system', content, ...metadata }); @@ -84,7 +83,6 @@ export function convertToOpenAICompatibleChatMessages( function: { name: string; arguments: string }; }> = []; for (const part of content) { - logger.error(`provider options (part): ${JSON.stringify(part.providerOptions)}`); const partMetadata = getOpenAIMetadata(part); switch (part.type) { case 'text': { diff --git a/packages/opencode/src/provider/sdk/openai-compatible/src/chat/openai-compatible-chat-language-model.ts b/packages/opencode/src/provider/sdk/openai-compatible/src/chat/openai-compatible-chat-language-model.ts index 4889017fc..f334616dd 100644 --- a/packages/opencode/src/provider/sdk/openai-compatible/src/chat/openai-compatible-chat-language-model.ts +++ b/packages/opencode/src/provider/sdk/openai-compatible/src/chat/openai-compatible-chat-language-model.ts @@ -353,10 +353,6 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 { const providerOptionsName = this.providerOptionsName let isActiveReasoning = false let isActiveText = false - const log = Log.create() - - let thoughtSigReasoningText: string = "" - let thoughtSigReasoningOpaque: string = "" return { stream: response.pipeThrough( @@ -367,7 +363,6 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 { // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX transform(chunk, controller) { - log.info(chunk) // Emit raw chunk if requested (before anything else) if (options.includeRawChunks) { controller.enqueue({ type: "raw", rawValue: chunk.rawValue }) @@ -455,12 +450,6 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 { id: "reasoning-0", delta: reasoningContent, }) - - thoughtSigReasoningText += reasoningContent - } - - if (delta.reasoning_opaque) { - thoughtSigReasoningOpaque = delta.reasoning_opaque } if (delta.content) { @@ -610,10 +599,6 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 { } const providerMetadata: SharedV2ProviderMetadata = { - [providerOptionsName]: { - reasoningOpaque: thoughtSigReasoningOpaque, - reasoningText: thoughtSigReasoningText, - }, ...metadataExtractor?.buildMetadata(), } const log = Log.create() @@ -720,8 +705,6 @@ const createOpenAICompatibleChatChunkSchema =