mirror of
https://github.com/sst/opencode.git
synced 2025-07-07 16:14:59 +00:00
feat: openai responses api & codex-mini-latest
This commit is contained in:
parent
4a444e9c9b
commit
014400465b
3 changed files with 179 additions and 112 deletions
|
@ -522,15 +522,6 @@ func (a *agent) processEvent(ctx context.Context, sessionID string, assistantMsg
|
||||||
assistantMsg.AddToolCall(*event.ToolCall)
|
assistantMsg.AddToolCall(*event.ToolCall)
|
||||||
_, err := a.messages.Update(ctx, *assistantMsg)
|
_, err := a.messages.Update(ctx, *assistantMsg)
|
||||||
return err
|
return err
|
||||||
// TODO: see how to handle this
|
|
||||||
// case provider.EventToolUseDelta:
|
|
||||||
// tm := time.Unix(assistantMsg.UpdatedAt, 0)
|
|
||||||
// assistantMsg.AppendToolCallInput(event.ToolCall.ID, event.ToolCall.Input)
|
|
||||||
// if time.Since(tm) > 1000*time.Millisecond {
|
|
||||||
// err := a.messages.Update(ctx, *assistantMsg)
|
|
||||||
// assistantMsg.UpdatedAt = time.Now().Unix()
|
|
||||||
// return err
|
|
||||||
// }
|
|
||||||
case provider.EventToolUseStop:
|
case provider.EventToolUseStop:
|
||||||
assistantMsg.FinishToolCall(event.ToolCall.ID)
|
assistantMsg.FinishToolCall(event.ToolCall.ID)
|
||||||
_, err := a.messages.Update(ctx, *assistantMsg)
|
_, err := a.messages.Update(ctx, *assistantMsg)
|
||||||
|
|
|
@ -3,6 +3,7 @@ package models
|
||||||
const (
|
const (
|
||||||
ProviderOpenAI ModelProvider = "openai"
|
ProviderOpenAI ModelProvider = "openai"
|
||||||
|
|
||||||
|
CodexMini ModelID = "codex-mini"
|
||||||
GPT41 ModelID = "gpt-4.1"
|
GPT41 ModelID = "gpt-4.1"
|
||||||
GPT41Mini ModelID = "gpt-4.1-mini"
|
GPT41Mini ModelID = "gpt-4.1-mini"
|
||||||
GPT41Nano ModelID = "gpt-4.1-nano"
|
GPT41Nano ModelID = "gpt-4.1-nano"
|
||||||
|
@ -18,6 +19,20 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
var OpenAIModels = map[ModelID]Model{
|
var OpenAIModels = map[ModelID]Model{
|
||||||
|
CodexMini: {
|
||||||
|
ID: CodexMini,
|
||||||
|
Name: "Codex Mini",
|
||||||
|
Provider: ProviderOpenAI,
|
||||||
|
APIModel: "codex-mini-latest",
|
||||||
|
CostPer1MIn: 1.50,
|
||||||
|
CostPer1MInCached: 0.375,
|
||||||
|
CostPer1MOutCached: 0.0,
|
||||||
|
CostPer1MOut: 6.00,
|
||||||
|
ContextWindow: 200_000,
|
||||||
|
DefaultMaxTokens: 100_000,
|
||||||
|
CanReason: true,
|
||||||
|
SupportsAttachments: true,
|
||||||
|
},
|
||||||
GPT41: {
|
GPT41: {
|
||||||
ID: GPT41,
|
ID: GPT41,
|
||||||
Name: "GPT 4.1",
|
Name: "GPT 4.1",
|
||||||
|
|
|
@ -10,6 +10,7 @@ import (
|
||||||
|
|
||||||
"github.com/openai/openai-go"
|
"github.com/openai/openai-go"
|
||||||
"github.com/openai/openai-go/option"
|
"github.com/openai/openai-go/option"
|
||||||
|
"github.com/openai/openai-go/responses"
|
||||||
"github.com/openai/openai-go/shared"
|
"github.com/openai/openai-go/shared"
|
||||||
"github.com/sst/opencode/internal/config"
|
"github.com/sst/opencode/internal/config"
|
||||||
"github.com/sst/opencode/internal/llm/models"
|
"github.com/sst/opencode/internal/llm/models"
|
||||||
|
@ -66,76 +67,96 @@ func newOpenAIClient(opts providerClientOptions) OpenAIClient {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessages []openai.ChatCompletionMessageParamUnion) {
|
func (o *openaiClient) convertMessages(messages []message.Message) responses.ResponseInputParam {
|
||||||
// Add system message first
|
inputItems := responses.ResponseInputParam{}
|
||||||
openaiMessages = append(openaiMessages, openai.SystemMessage(o.providerOptions.systemMessage))
|
|
||||||
|
inputItems = append(inputItems, responses.ResponseInputItemUnionParam{
|
||||||
|
OfMessage: &responses.EasyInputMessageParam{
|
||||||
|
Content: responses.EasyInputMessageContentUnionParam{OfString: openai.String(o.providerOptions.systemMessage)},
|
||||||
|
Role: responses.EasyInputMessageRoleSystem,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
for _, msg := range messages {
|
for _, msg := range messages {
|
||||||
switch msg.Role {
|
switch msg.Role {
|
||||||
case message.User:
|
case message.User:
|
||||||
var content []openai.ChatCompletionContentPartUnionParam
|
inputItemContentList := responses.ResponseInputMessageContentListParam{
|
||||||
textBlock := openai.ChatCompletionContentPartTextParam{Text: msg.Content().String()}
|
responses.ResponseInputContentUnionParam{
|
||||||
content = append(content, openai.ChatCompletionContentPartUnionParam{OfText: &textBlock})
|
OfInputText: &responses.ResponseInputTextParam{
|
||||||
for _, binaryContent := range msg.BinaryContent() {
|
Text: msg.Content().String(),
|
||||||
imageURL := openai.ChatCompletionContentPartImageImageURLParam{URL: binaryContent.String(models.ProviderOpenAI)}
|
},
|
||||||
imageBlock := openai.ChatCompletionContentPartImageParam{ImageURL: imageURL}
|
},
|
||||||
|
|
||||||
content = append(content, openai.ChatCompletionContentPartUnionParam{OfImageURL: &imageBlock})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
openaiMessages = append(openaiMessages, openai.UserMessage(content))
|
for _, binaryContent := range msg.BinaryContent() {
|
||||||
|
inputItemContentList = append(inputItemContentList, responses.ResponseInputContentUnionParam{
|
||||||
|
OfInputImage: &responses.ResponseInputImageParam{
|
||||||
|
ImageURL: openai.String(binaryContent.String(models.ProviderOpenAI)),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
userMsg := responses.ResponseInputItemUnionParam{
|
||||||
|
OfInputMessage: &responses.ResponseInputItemMessageParam{
|
||||||
|
Content: inputItemContentList,
|
||||||
|
Role: string(responses.ResponseInputMessageItemRoleUser),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
inputItems = append(inputItems, userMsg)
|
||||||
|
|
||||||
case message.Assistant:
|
case message.Assistant:
|
||||||
assistantMsg := openai.ChatCompletionAssistantMessageParam{
|
|
||||||
Role: "assistant",
|
|
||||||
}
|
|
||||||
|
|
||||||
if msg.Content().String() != "" {
|
if msg.Content().String() != "" {
|
||||||
assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{
|
assistantMsg := responses.ResponseInputItemUnionParam{
|
||||||
OfString: openai.String(msg.Content().String()),
|
OfOutputMessage: &responses.ResponseOutputMessageParam{
|
||||||
|
Content: []responses.ResponseOutputMessageContentUnionParam{{
|
||||||
|
OfOutputText: &responses.ResponseOutputTextParam{
|
||||||
|
Text: msg.Content().String(),
|
||||||
|
},
|
||||||
|
}},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
inputItems = append(inputItems, assistantMsg)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(msg.ToolCalls()) > 0 {
|
if len(msg.ToolCalls()) > 0 {
|
||||||
assistantMsg.ToolCalls = make([]openai.ChatCompletionMessageToolCallParam, len(msg.ToolCalls()))
|
for _, call := range msg.ToolCalls() {
|
||||||
for i, call := range msg.ToolCalls() {
|
toolMsg := responses.ResponseInputItemUnionParam{
|
||||||
assistantMsg.ToolCalls[i] = openai.ChatCompletionMessageToolCallParam{
|
OfFunctionCall: &responses.ResponseFunctionToolCallParam{
|
||||||
ID: call.ID,
|
CallID: call.ID,
|
||||||
Type: "function",
|
|
||||||
Function: openai.ChatCompletionMessageToolCallFunctionParam{
|
|
||||||
Name: call.Name,
|
Name: call.Name,
|
||||||
Arguments: call.Input,
|
Arguments: call.Input,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
inputItems = append(inputItems, toolMsg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
openaiMessages = append(openaiMessages, openai.ChatCompletionMessageParamUnion{
|
|
||||||
OfAssistant: &assistantMsg,
|
|
||||||
})
|
|
||||||
|
|
||||||
case message.Tool:
|
case message.Tool:
|
||||||
for _, result := range msg.ToolResults() {
|
for _, result := range msg.ToolResults() {
|
||||||
openaiMessages = append(openaiMessages,
|
toolMsg := responses.ResponseInputItemUnionParam{
|
||||||
openai.ToolMessage(result.Content, result.ToolCallID),
|
OfFunctionCallOutput: &responses.ResponseInputItemFunctionCallOutputParam{
|
||||||
)
|
Output: result.Content,
|
||||||
|
CallID: result.ToolCallID,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
inputItems = append(inputItems, toolMsg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return inputItems
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *openaiClient) convertTools(tools []tools.BaseTool) []openai.ChatCompletionToolParam {
|
func (o *openaiClient) convertTools(tools []tools.BaseTool) []responses.ToolUnionParam {
|
||||||
openaiTools := make([]openai.ChatCompletionToolParam, len(tools))
|
outputTools := make([]responses.ToolUnionParam, len(tools))
|
||||||
|
|
||||||
for i, tool := range tools {
|
for i, tool := range tools {
|
||||||
info := tool.Info()
|
info := tool.Info()
|
||||||
openaiTools[i] = openai.ChatCompletionToolParam{
|
outputTools[i] = responses.ToolUnionParam{
|
||||||
Function: openai.FunctionDefinitionParam{
|
OfFunction: &responses.FunctionToolParam{
|
||||||
Name: info.Name,
|
Name: info.Name,
|
||||||
Description: openai.String(info.Description),
|
Description: openai.String(info.Description),
|
||||||
Parameters: openai.FunctionParameters{
|
Parameters: map[string]any{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": info.Parameters,
|
"properties": info.Parameters,
|
||||||
"required": info.Required,
|
"required": info.Required,
|
||||||
|
@ -144,7 +165,7 @@ func (o *openaiClient) convertTools(tools []tools.BaseTool) []openai.ChatComplet
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return openaiTools
|
return outputTools
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *openaiClient) finishReason(reason string) message.FinishReason {
|
func (o *openaiClient) finishReason(reason string) message.FinishReason {
|
||||||
|
@ -160,27 +181,26 @@ func (o *openaiClient) finishReason(reason string) message.FinishReason {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *openaiClient) preparedParams(messages []openai.ChatCompletionMessageParamUnion, tools []openai.ChatCompletionToolParam) openai.ChatCompletionNewParams {
|
func (o *openaiClient) preparedParams(input responses.ResponseInputParam, tools []responses.ToolUnionParam) responses.ResponseNewParams {
|
||||||
params := openai.ChatCompletionNewParams{
|
params := responses.ResponseNewParams{
|
||||||
Model: openai.ChatModel(o.providerOptions.model.APIModel),
|
Model: shared.ResponsesModel(o.providerOptions.model.APIModel),
|
||||||
Messages: messages,
|
Input: responses.ResponseNewParamsInputUnion{OfInputItemList: input},
|
||||||
Tools: tools,
|
Tools: tools,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
params.MaxOutputTokens = openai.Int(o.providerOptions.maxTokens)
|
||||||
|
|
||||||
if o.providerOptions.model.CanReason == true {
|
if o.providerOptions.model.CanReason == true {
|
||||||
params.MaxCompletionTokens = openai.Int(o.providerOptions.maxTokens)
|
|
||||||
switch o.options.reasoningEffort {
|
switch o.options.reasoningEffort {
|
||||||
case "low":
|
case "low":
|
||||||
params.ReasoningEffort = shared.ReasoningEffortLow
|
params.Reasoning.Effort = shared.ReasoningEffortLow
|
||||||
case "medium":
|
case "medium":
|
||||||
params.ReasoningEffort = shared.ReasoningEffortMedium
|
params.Reasoning.Effort = shared.ReasoningEffortMedium
|
||||||
case "high":
|
case "high":
|
||||||
params.ReasoningEffort = shared.ReasoningEffortHigh
|
params.Reasoning.Effort = shared.ReasoningEffortHigh
|
||||||
default:
|
default:
|
||||||
params.ReasoningEffort = shared.ReasoningEffortMedium
|
params.Reasoning.Effort = shared.ReasoningEffortMedium
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
params.MaxTokens = openai.Int(o.providerOptions.maxTokens)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if o.providerOptions.model.Provider == models.ProviderOpenRouter {
|
if o.providerOptions.model.Provider == models.ProviderOpenRouter {
|
||||||
|
@ -204,7 +224,7 @@ func (o *openaiClient) send(ctx context.Context, messages []message.Message, too
|
||||||
attempts := 0
|
attempts := 0
|
||||||
for {
|
for {
|
||||||
attempts++
|
attempts++
|
||||||
openaiResponse, err := o.client.Chat.Completions.New(
|
openaiResponse, err := o.client.Responses.New(
|
||||||
ctx,
|
ctx,
|
||||||
params,
|
params,
|
||||||
)
|
)
|
||||||
|
@ -228,12 +248,12 @@ func (o *openaiClient) send(ctx context.Context, messages []message.Message, too
|
||||||
}
|
}
|
||||||
|
|
||||||
content := ""
|
content := ""
|
||||||
if openaiResponse.Choices[0].Message.Content != "" {
|
if openaiResponse.OutputText() != "" {
|
||||||
content = openaiResponse.Choices[0].Message.Content
|
content = openaiResponse.OutputText()
|
||||||
}
|
}
|
||||||
|
|
||||||
toolCalls := o.toolCalls(*openaiResponse)
|
toolCalls := o.toolCalls(*openaiResponse)
|
||||||
finishReason := o.finishReason(string(openaiResponse.Choices[0].FinishReason))
|
finishReason := o.finishReason("stop")
|
||||||
|
|
||||||
if len(toolCalls) > 0 {
|
if len(toolCalls) > 0 {
|
||||||
finishReason = message.FinishReasonToolUse
|
finishReason = message.FinishReasonToolUse
|
||||||
|
@ -249,10 +269,9 @@ func (o *openaiClient) send(ctx context.Context, messages []message.Message, too
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *openaiClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
|
func (o *openaiClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
|
||||||
|
eventChan := make(chan ProviderEvent)
|
||||||
|
|
||||||
params := o.preparedParams(o.convertMessages(messages), o.convertTools(tools))
|
params := o.preparedParams(o.convertMessages(messages), o.convertTools(tools))
|
||||||
params.StreamOptions = openai.ChatCompletionStreamOptionsParam{
|
|
||||||
IncludeUsage: openai.Bool(true),
|
|
||||||
}
|
|
||||||
|
|
||||||
cfg := config.Get()
|
cfg := config.Get()
|
||||||
if cfg.Debug {
|
if cfg.Debug {
|
||||||
|
@ -261,55 +280,96 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
|
||||||
}
|
}
|
||||||
|
|
||||||
attempts := 0
|
attempts := 0
|
||||||
eventChan := make(chan ProviderEvent)
|
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
for {
|
for {
|
||||||
attempts++
|
attempts++
|
||||||
openaiStream := o.client.Chat.Completions.NewStreaming(
|
stream := o.client.Responses.NewStreaming(ctx, params)
|
||||||
ctx,
|
|
||||||
params,
|
|
||||||
)
|
|
||||||
|
|
||||||
acc := openai.ChatCompletionAccumulator{}
|
outputText := ""
|
||||||
currentContent := ""
|
currentToolCallID := ""
|
||||||
toolCalls := make([]message.ToolCall, 0)
|
for stream.Next() {
|
||||||
|
event := stream.Current()
|
||||||
|
|
||||||
for openaiStream.Next() {
|
switch event := event.AsAny().(type) {
|
||||||
chunk := openaiStream.Current()
|
case responses.ResponseCompletedEvent:
|
||||||
acc.AddChunk(chunk)
|
toolCalls := o.toolCalls(event.Response)
|
||||||
|
finishReason := o.finishReason("stop")
|
||||||
|
|
||||||
for _, choice := range chunk.Choices {
|
if len(toolCalls) > 0 {
|
||||||
if choice.Delta.Content != "" {
|
finishReason = message.FinishReasonToolUse
|
||||||
eventChan <- ProviderEvent{
|
|
||||||
Type: EventContentDelta,
|
|
||||||
Content: choice.Delta.Content,
|
|
||||||
}
|
|
||||||
currentContent += choice.Delta.Content
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
eventChan <- ProviderEvent{
|
||||||
|
Type: EventComplete,
|
||||||
|
Response: &ProviderResponse{
|
||||||
|
Content: outputText,
|
||||||
|
ToolCalls: toolCalls,
|
||||||
|
Usage: o.usage(event.Response),
|
||||||
|
FinishReason: finishReason,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
close(eventChan)
|
||||||
|
return
|
||||||
|
|
||||||
|
case responses.ResponseTextDeltaEvent:
|
||||||
|
outputText += event.Delta
|
||||||
|
eventChan <- ProviderEvent{
|
||||||
|
Type: EventContentDelta,
|
||||||
|
Content: event.Delta,
|
||||||
|
}
|
||||||
|
|
||||||
|
case responses.ResponseTextDoneEvent:
|
||||||
|
eventChan <- ProviderEvent{
|
||||||
|
Type: EventContentStop,
|
||||||
|
Content: outputText,
|
||||||
|
}
|
||||||
|
close(eventChan)
|
||||||
|
return
|
||||||
|
|
||||||
|
case responses.ResponseOutputItemAddedEvent:
|
||||||
|
if event.Item.Type == "function_call" {
|
||||||
|
currentToolCallID = event.Item.ID
|
||||||
|
eventChan <- ProviderEvent{
|
||||||
|
Type: EventToolUseStart,
|
||||||
|
ToolCall: &message.ToolCall{
|
||||||
|
ID: event.Item.ID,
|
||||||
|
Name: event.Item.Name,
|
||||||
|
Finished: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case responses.ResponseFunctionCallArgumentsDeltaEvent:
|
||||||
|
if event.ItemID == currentToolCallID {
|
||||||
|
eventChan <- ProviderEvent{
|
||||||
|
Type: EventToolUseDelta,
|
||||||
|
ToolCall: &message.ToolCall{
|
||||||
|
ID: currentToolCallID,
|
||||||
|
Finished: false,
|
||||||
|
Input: event.Delta,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case responses.ResponseFunctionCallArgumentsDoneEvent:
|
||||||
|
if event.ItemID == currentToolCallID {
|
||||||
|
eventChan <- ProviderEvent{
|
||||||
|
Type: EventToolUseStop,
|
||||||
|
ToolCall: &message.ToolCall{
|
||||||
|
ID: currentToolCallID,
|
||||||
|
Input: event.Arguments,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
currentToolCallID = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: handle other events
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
err := openaiStream.Err()
|
err := stream.Err()
|
||||||
if err == nil || errors.Is(err, io.EOF) {
|
if err == nil || errors.Is(err, io.EOF) {
|
||||||
// Stream completed successfully
|
|
||||||
finishReason := o.finishReason(string(acc.ChatCompletion.Choices[0].FinishReason))
|
|
||||||
if len(acc.ChatCompletion.Choices[0].Message.ToolCalls) > 0 {
|
|
||||||
toolCalls = append(toolCalls, o.toolCalls(acc.ChatCompletion)...)
|
|
||||||
}
|
|
||||||
if len(toolCalls) > 0 {
|
|
||||||
finishReason = message.FinishReasonToolUse
|
|
||||||
}
|
|
||||||
|
|
||||||
eventChan <- ProviderEvent{
|
|
||||||
Type: EventComplete,
|
|
||||||
Response: &ProviderResponse{
|
|
||||||
Content: currentContent,
|
|
||||||
ToolCalls: toolCalls,
|
|
||||||
Usage: o.usage(acc.ChatCompletion),
|
|
||||||
FinishReason: finishReason,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
close(eventChan)
|
close(eventChan)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -373,15 +433,16 @@ func (o *openaiClient) shouldRetry(attempts int, err error) (bool, int64, error)
|
||||||
return true, int64(retryMs), nil
|
return true, int64(retryMs), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *openaiClient) toolCalls(completion openai.ChatCompletion) []message.ToolCall {
|
func (o *openaiClient) toolCalls(response responses.Response) []message.ToolCall {
|
||||||
var toolCalls []message.ToolCall
|
var toolCalls []message.ToolCall
|
||||||
|
|
||||||
if len(completion.Choices) > 0 && len(completion.Choices[0].Message.ToolCalls) > 0 {
|
for _, output := range response.Output {
|
||||||
for _, call := range completion.Choices[0].Message.ToolCalls {
|
if output.Type == "function_call" {
|
||||||
|
call := output.AsFunctionCall()
|
||||||
toolCall := message.ToolCall{
|
toolCall := message.ToolCall{
|
||||||
ID: call.ID,
|
ID: call.ID,
|
||||||
Name: call.Function.Name,
|
Name: call.Name,
|
||||||
Input: call.Function.Arguments,
|
Input: call.Arguments,
|
||||||
Type: "function",
|
Type: "function",
|
||||||
Finished: true,
|
Finished: true,
|
||||||
}
|
}
|
||||||
|
@ -392,13 +453,13 @@ func (o *openaiClient) toolCalls(completion openai.ChatCompletion) []message.Too
|
||||||
return toolCalls
|
return toolCalls
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *openaiClient) usage(completion openai.ChatCompletion) TokenUsage {
|
func (o *openaiClient) usage(response responses.Response) TokenUsage {
|
||||||
cachedTokens := completion.Usage.PromptTokensDetails.CachedTokens
|
cachedTokens := response.Usage.InputTokensDetails.CachedTokens
|
||||||
inputTokens := completion.Usage.PromptTokens - cachedTokens
|
inputTokens := response.Usage.InputTokens - cachedTokens
|
||||||
|
|
||||||
return TokenUsage{
|
return TokenUsage{
|
||||||
InputTokens: inputTokens,
|
InputTokens: inputTokens,
|
||||||
OutputTokens: completion.Usage.CompletionTokens,
|
OutputTokens: response.Usage.OutputTokens,
|
||||||
CacheCreationTokens: 0, // OpenAI doesn't provide this directly
|
CacheCreationTokens: 0, // OpenAI doesn't provide this directly
|
||||||
CacheReadTokens: cachedTokens,
|
CacheReadTokens: cachedTokens,
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue