Merge branch 'dev' into opentui

This commit is contained in:
Dax Raad 2025-10-15 19:49:10 -04:00
commit bade3cb474
10 changed files with 179 additions and 70 deletions

View file

@ -76,7 +76,7 @@ Take a look at the git history to see what kind of PRs we end up merging.
To run OpenCode locally you need.
- Bun
- Bun 1.3 or higher
- Golang 1.24.x
And run.

View file

@ -108,3 +108,4 @@
| 2025-10-11 | 488,427 (+4,053) | 414,699 (+8,684) | 903,126 (+12,737) |
| 2025-10-12 | 492,125 (+3,698) | 418,745 (+4,046) | 910,870 (+7,744) |
| 2025-10-14 | 505,130 (+13,005) | 429,286 (+10,541) | 934,416 (+23,546) |
| 2025-10-15 | 512,717 (+7,587) | 439,290 (+10,004) | 952,007 (+17,591) |

View file

@ -22,7 +22,7 @@ const getModelsInfo = query(async (workspaceID: string) => {
return withActor(async () => {
return {
all: Object.entries(ZenModel.list())
.filter(([id, _model]) => !["claude-3-5-haiku", "qwen3-max"].includes(id))
.filter(([id, _model]) => !["claude-3-5-haiku"].includes(id))
.filter(([id, _model]) => !id.startsWith("an-"))
.sort(([_idA, modelA], [_idB, modelB]) => modelA.name.localeCompare(modelB.name))
.map(([id, model]) => ({ id, name: model.name })),

View file

@ -120,12 +120,17 @@ export namespace Provider {
break
}
case "ap": {
const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "nova-pro"].some((m) =>
modelID.includes(m),
)
if (modelRequiresPrefix) {
regionPrefix = "apac"
modelID = `${regionPrefix}.${modelID}`
const isAustraliaRegion = ["ap-southeast-2", "ap-southeast-4"].includes(region)
if (isAustraliaRegion && modelID.startsWith("anthropic.claude-sonnet-4-5")) {
modelID = `au.${modelID}`
} else {
const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "nova-pro"].some((m) =>
modelID.includes(m),
)
if (modelRequiresPrefix) {
regionPrefix = "apac"
modelID = `${regionPrefix}.${modelID}`
}
}
break
}
@ -265,17 +270,17 @@ export namespace Provider {
cost:
!model.cost && !existing?.cost
? {
input: 0,
output: 0,
cache_read: 0,
cache_write: 0,
}
input: 0,
output: 0,
cache_read: 0,
cache_write: 0,
}
: {
cache_read: 0,
cache_write: 0,
...existing?.cost,
...model.cost,
},
cache_read: 0,
cache_write: 0,
...existing?.cost,
...model.cost,
},
options: {
...existing?.options,
...model.options,

View file

@ -33,6 +33,8 @@ import { lazy } from "../util/lazy"
import { Todo } from "../session/todo"
import { InstanceBootstrap } from "../project/bootstrap"
import { MCP } from "../mcp"
import { Storage } from "../storage/storage"
import type { ContentfulStatusCode } from "hono/utils/http-status"
const ERRORS = {
400: {
@ -42,17 +44,33 @@ const ERRORS = {
schema: resolver(
z
.object({
data: z.record(z.string(), z.any()),
data: z.any().nullable(),
errors: z.array(z.record(z.string(), z.any())),
success: z.literal(false),
})
.meta({
ref: "Error",
ref: "BadRequestError",
}),
),
},
},
},
404: {
description: "Not found",
content: {
"application/json": {
schema: resolver(
Storage.NotFoundError.Schema
)
},
},
},
} as const
function errors(...codes: number[]) {
return Object.fromEntries(codes.map((code) => [code, ERRORS[code as keyof typeof ERRORS]]))
}
export namespace Server {
const log = Log.create({ service: "server" })
@ -68,13 +86,18 @@ export namespace Server {
error: err,
})
if (err instanceof NamedError) {
return c.json(err.toObject(), {
status: 400,
})
let status: ContentfulStatusCode
if (err instanceof Storage.NotFoundError)
status = 404
else if (err instanceof Provider.ModelNotFoundError)
status = 400
else
status = 500
return c.json(err.toObject(), { status })
}
const message = err instanceof Error && err.stack ? err.stack : err.toString()
return c.json(new NamedError.Unknown({ message }).toObject(), {
status: 400,
status: 500,
})
})
.use(async (c, next) => {
@ -153,7 +176,7 @@ export namespace Server {
},
},
},
...ERRORS,
...errors(400),
},
}),
validator("json", Config.Info),
@ -177,7 +200,7 @@ export namespace Server {
},
},
},
...ERRORS,
...errors(400),
},
}),
async (c) => {
@ -210,7 +233,7 @@ export namespace Server {
},
},
},
...ERRORS,
...errors(400),
},
}),
validator(
@ -305,6 +328,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -333,6 +357,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -361,6 +386,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -381,7 +407,7 @@ export namespace Server {
description: "Create a new session",
operationId: "session.create",
responses: {
...ERRORS,
...errors(400),
200: {
description: "Successfully created session",
content: {
@ -413,6 +439,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -440,6 +467,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -481,6 +509,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -541,6 +570,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -567,6 +597,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -596,6 +627,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -625,6 +657,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -661,6 +694,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -693,6 +727,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -727,6 +762,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -762,6 +798,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -792,6 +829,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -822,6 +860,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -852,6 +891,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -879,6 +919,7 @@ export namespace Server {
},
},
},
...errors(400, 404),
},
}),
validator(
@ -1132,6 +1173,7 @@ export namespace Server {
},
},
},
...errors(400),
},
}),
validator(
@ -1243,6 +1285,7 @@ export namespace Server {
},
},
},
...errors(400),
},
}),
validator(
@ -1375,6 +1418,7 @@ export namespace Server {
},
},
},
...errors(400),
},
}),
validator(
@ -1426,7 +1470,7 @@ export namespace Server {
},
},
},
...ERRORS,
...errors(400),
},
}),
validator(

View file

@ -1,4 +1,4 @@
import { generateText, type ModelMessage } from "ai"
import { streamText, type ModelMessage } from "ai"
import { Session } from "."
import { Identifier } from "../id/id"
import { Instance } from "../project/instance"
@ -123,7 +123,19 @@ export namespace SessionCompaction {
created: Date.now(),
},
})) as MessageV2.Assistant
const generated = await generateText({
const part = (await Session.updatePart({
type: "text",
sessionID: input.sessionID,
messageID: msg.id,
id: Identifier.ascending("part"),
text: "",
time: {
start: Date.now(),
},
})) as MessageV2.TextPart
let summaryText = ""
const stream = streamText({
maxRetries: 10,
model: model.language,
providerOptions: {
@ -148,23 +160,35 @@ export namespace SessionCompaction {
},
],
})
const usage = Session.getUsage({ model: model.info, usage: generated.usage, metadata: generated.providerMetadata })
msg.cost += usage.cost
msg.tokens = usage.tokens
msg.summary = true
msg.time.completed = Date.now()
await Session.updateMessage(msg)
const part = await Session.updatePart({
type: "text",
sessionID: input.sessionID,
messageID: msg.id,
id: Identifier.ascending("part"),
text: generated.text,
time: {
start: Date.now(),
end: Date.now(),
},
})
for await (const value of stream.fullStream) {
switch (value.type) {
case "text-delta":
summaryText += value.text
await Session.updatePart({
...part,
text: summaryText,
})
break
case "text-end":
part.text = summaryText
await Session.updatePart({
...part,
})
break
case "finish": {
const usage = Session.getUsage({ model: model.info, usage: value.totalUsage, metadata: undefined })
msg.cost += usage.cost
msg.tokens = usage.tokens
msg.summary = true
msg.time.completed = Date.now()
await Session.updateMessage(msg)
part.time!.end = Date.now()
await Session.updatePart(part)
break
}
}
}
Bus.publish(Event.Compacted, {
sessionID: input.sessionID,

View file

@ -518,6 +518,8 @@ export namespace MessageV2 {
}
if (msg.info.role === "assistant") {
const hasEmptyTextPart = msg.parts.some((part) => part.type === "text" && part.text.trim() === "")
if (hasEmptyTextPart) continue
result.push({
id: msg.info.id,
role: "assistant",

View file

@ -29,6 +29,8 @@ export namespace Todo {
}
export async function get(sessionID: string) {
return Storage.read<Info[]>(["todo", sessionID]) ?? []
return Storage.read<Info[]>(["todo", sessionID])
.then((x) => x || [])
.catch(() => [])
}
}

View file

@ -5,12 +5,21 @@ import { Global } from "../global"
import { lazy } from "../util/lazy"
import { Lock } from "../util/lock"
import { $ } from "bun"
import { NamedError } from "@/util/error"
import z from "zod"
export namespace Storage {
const log = Log.create({ service: "storage" })
type Migration = (dir: string) => Promise<void>
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
}),
)
const MIGRATIONS: Migration[] = [
async (dir) => {
const project = path.resolve(dir, "../project")
@ -131,31 +140,51 @@ export namespace Storage {
export async function remove(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
await fs.unlink(target).catch(() => {})
return withErrorHandling(async () => {
await fs.unlink(target).catch(() => {})
})
}
export async function read<T>(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
using _ = await Lock.read(target)
return Bun.file(target).json() as Promise<T>
return withErrorHandling(async () => {
using _ = await Lock.read(target)
return Bun.file(target).json() as Promise<T>
})
}
export async function update<T>(key: string[], fn: (draft: T) => void) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
using _ = await Lock.write("storage")
const content = await Bun.file(target).json()
fn(content)
await Bun.write(target, JSON.stringify(content, null, 2))
return content as T
return withErrorHandling(async () => {
using _ = await Lock.write("storage")
const content = await Bun.file(target).json()
fn(content)
await Bun.write(target, JSON.stringify(content, null, 2))
return content as T
})
}
export async function write<T>(key: string[], content: T) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
using _ = await Lock.write("storage")
await Bun.write(target, JSON.stringify(content, null, 2))
return withErrorHandling(async () => {
using _ = await Lock.write("storage")
await Bun.write(target, JSON.stringify(content, null, 2))
})
}
async function withErrorHandling<T>(body: () => Promise<T>) {
return body().catch((e) => {
if (!(e instanceof Error))
throw e
const errnoException = e as NodeJS.ErrnoException
if (errnoException.code === "ENOENT") {
throw new NotFoundError({ message: `Resource not found: ${errnoException.path}` })
}
throw e
})
}
const glob = new Bun.Glob("**/*")

View file

@ -62,17 +62,18 @@ You are charged per request and you can add credits to your account.
You can also access our models through the following API endpoints.
| Model | Model ID | Endpoint | AI SDK Package |
| ---------------- | ---------------- | --------------------------------------------- | --------------------------- |
| GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| Claude Sonnet 4.5 | claude-sonnet-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Claude Sonnet 4 | claude-sonnet-4 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Claude Haiku 3.5 | claude-3-5-haiku | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Qwen3 Coder 480B | qwen3-coder | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Grok Code Fast 1 | grok-code | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Kimi K2 | kimi-k2 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Model | Model ID | Endpoint | AI SDK Package |
| ----------------- | ----------------- | --------------------------------------------- | --------------------------- |
| GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| Claude Sonnet 4.5 | claude-sonnet-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Claude Sonnet 4 | claude-sonnet-4 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Claude Haiku 4.5 | claude-haiku-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Claude Haiku 3.5 | claude-3-5-haiku | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
| Qwen3 Coder 480B | qwen3-coder | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Grok Code Fast 1 | grok-code | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Kimi K2 | kimi-k2 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
The [model id](/docs/config/#models) in your OpenCode config
uses the format `opencode/<model-id>`. For example, for GPT 5 Codex, you would
@ -94,6 +95,7 @@ We support a pay-as-you-go model. Below are the prices **per 1M tokens**.
| Claude Sonnet 4.5 (> 200K tokens) | $6.00 | $22.50 | $0.60 | $7.50 |
| Claude Sonnet 4 (≤ 200K tokens) | $3.00 | $15.00 | $0.30 | $3.75 |
| Claude Sonnet 4 (> 200K tokens) | $6.00 | $22.50 | $0.60 | $7.50 |
| Claude Haiku 4.5 | $1.00 | $5.00 | $0.10 | $1.25 |
| Claude Haiku 3.5 | $0.80 | $4.00 | $0.08 | $1.00 |
| Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 |
| GPT 5 | $1.25 | $10.00 | $0.125 | - |