fix: include cached tokens in tui

This commit is contained in:
adamdottv 2025-06-16 12:59:38 -05:00
parent c8eb1b24c3
commit c7bb7ce4de
No known key found for this signature in database
GPG key ID: 9CB48779AF150E75
3 changed files with 26 additions and 2 deletions

View file

@ -102,7 +102,11 @@ func (m statusComponent) View() string {
cost += message.Metadata.Assistant.Cost
usage := message.Metadata.Assistant.Tokens
if usage.Output > 0 {
tokens = (usage.Input + usage.Output + usage.Reasoning)
tokens = (usage.Input +
usage.Cache.Write +
usage.Cache.Read +
usage.Output +
usage.Reasoning)
}
}
}

View file

@ -782,12 +782,28 @@
},
"reasoning": {
"type": "number"
},
"cache": {
"type": "object",
"properties": {
"read": {
"type": "number"
},
"write": {
"type": "number"
}
},
"required": [
"read",
"write"
]
}
},
"required": [
"input",
"output",
"reasoning"
"reasoning",
"cache"
]
}
},

View file

@ -126,6 +126,10 @@ type MessageInfo struct {
Summary *bool `json:"summary,omitempty"`
System []string `json:"system"`
Tokens struct {
Cache struct {
Read float32 `json:"read"`
Write float32 `json:"write"`
} `json:"cache"`
Input float32 `json:"input"`
Output float32 `json:"output"`
Reasoning float32 `json:"reasoning"`