fix: remove debug statements for thought sigs

This commit is contained in:
Aadish Verma 2025-12-10 14:35:04 -08:00 committed by StarLight842
parent 81857f7483
commit db519e8e6c
2 changed files with 0 additions and 19 deletions

View file

@ -20,7 +20,6 @@ export function convertToOpenAICompatibleChatMessages(
const messages: OpenAICompatibleChatPrompt = [];
for (const { role, content, ...message } of prompt) {
const metadata = getOpenAIMetadata({ ...message });
logger.error(`provider options: ${JSON.stringify(message.providerOptions)}`);
switch (role) {
case 'system': {
messages.push({ role: 'system', content, ...metadata });
@ -84,7 +83,6 @@ export function convertToOpenAICompatibleChatMessages(
function: { name: string; arguments: string };
}> = [];
for (const part of content) {
logger.error(`provider options (part): ${JSON.stringify(part.providerOptions)}`);
const partMetadata = getOpenAIMetadata(part);
switch (part.type) {
case 'text': {

View file

@ -353,10 +353,6 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
const providerOptionsName = this.providerOptionsName
let isActiveReasoning = false
let isActiveText = false
const log = Log.create()
let thoughtSigReasoningText: string = ""
let thoughtSigReasoningOpaque: string = ""
return {
stream: response.pipeThrough(
@ -367,7 +363,6 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
// TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX
transform(chunk, controller) {
log.info(chunk)
// Emit raw chunk if requested (before anything else)
if (options.includeRawChunks) {
controller.enqueue({ type: "raw", rawValue: chunk.rawValue })
@ -455,12 +450,6 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
id: "reasoning-0",
delta: reasoningContent,
})
thoughtSigReasoningText += reasoningContent
}
if (delta.reasoning_opaque) {
thoughtSigReasoningOpaque = delta.reasoning_opaque
}
if (delta.content) {
@ -610,10 +599,6 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
}
const providerMetadata: SharedV2ProviderMetadata = {
[providerOptionsName]: {
reasoningOpaque: thoughtSigReasoningOpaque,
reasoningText: thoughtSigReasoningText,
},
...metadataExtractor?.buildMetadata(),
}
const log = Log.create()
@ -720,8 +705,6 @@ const createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.core.$ZodT
reasoning: z.string().nullish(),
// Copilot sets `reasoning_text`
reasoning_text: z.string().nullish(),
// For Gemini 3 Pro's thought signatures
reasoning_opaque: z.string().nullish(),
tool_calls: z
.array(
z.object({