Merge branch 'dev' into opentui

This commit is contained in:
Dax Raad 2025-10-28 14:30:28 -04:00
commit c877e911f0
129 changed files with 5172 additions and 1465 deletions

View file

@ -118,3 +118,7 @@
| 2025-10-22 | 557,949 (+9,228) | 491,395 (+11,692) | 1,049,344 (+20,920) |
| 2025-10-23 | 564,716 (+6,767) | 498,736 (+7,341) | 1,063,452 (+14,108) |
| 2025-10-24 | 572,692 (+7,976) | 506,905 (+8,169) | 1,079,597 (+16,145) |
| 2025-10-25 | 578,927 (+6,235) | 516,129 (+9,224) | 1,095,056 (+15,459) |
| 2025-10-26 | 584,409 (+5,482) | 521,179 (+5,050) | 1,105,588 (+10,532) |
| 2025-10-27 | 589,999 (+5,590) | 526,001 (+4,822) | 1,116,000 (+10,412) |
| 2025-10-28 | 595,776 (+5,777) | 532,438 (+6,437) | 1,128,214 (+12,214) |

121
bun.lock
View file

@ -39,7 +39,7 @@
},
"packages/console/core": {
"name": "@opencode-ai/console-core",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@aws-sdk/client-sts": "3.782.0",
"@jsx-email/render": "1.1.1",
@ -66,7 +66,7 @@
},
"packages/console/function": {
"name": "@opencode-ai/console-function",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@ai-sdk/anthropic": "2.0.0",
"@ai-sdk/openai": "2.0.2",
@ -90,7 +90,7 @@
},
"packages/console/mail": {
"name": "@opencode-ai/console-mail",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",
@ -111,12 +111,12 @@
},
"packages/desktop": {
"name": "@opencode-ai/desktop",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
"@opencode-ai/ui": "workspace:*",
"@pierre/precision-diffs": "catalog:",
"@pierre/precision-diffs": "0.3.5",
"@shikijs/transformers": "3.9.2",
"@solid-primitives/active-element": "2.1.3",
"@solid-primitives/event-bus": "1.1.2",
@ -143,6 +143,7 @@
"@types/luxon": "3.7.1",
"@types/node": "catalog:",
"@typescript/native-preview": "catalog:",
"opencode": "workspace:*",
"typescript": "catalog:",
"vite": "catalog:",
"vite-plugin-icons-spritesheet": "3.0.1",
@ -151,7 +152,7 @@
},
"packages/function": {
"name": "@opencode-ai/function",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "22.0.0",
@ -167,7 +168,7 @@
},
"packages/opencode": {
"name": "opencode",
"version": "0.15.16",
"version": "0.15.20",
"bin": {
"opencode": "./bin/opencode",
},
@ -243,7 +244,7 @@
},
"packages/plugin": {
"name": "@opencode-ai/plugin",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"zod": "catalog:",
@ -263,7 +264,7 @@
},
"packages/sdk/js": {
"name": "@opencode-ai/sdk",
"version": "0.15.16",
"version": "0.15.20",
"devDependencies": {
"@hey-api/openapi-ts": "0.81.0",
"@tsconfig/node22": "catalog:",
@ -274,7 +275,7 @@
},
"packages/slack": {
"name": "@opencode-ai/slack",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"@slack/bolt": "^3.17.1",
@ -287,10 +288,10 @@
},
"packages/ui": {
"name": "@opencode-ai/ui",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@kobalte/core": "catalog:",
"@pierre/precision-diffs": "0.0.2-alpha.1-1",
"@pierre/precision-diffs": "catalog:",
"@solidjs/meta": "catalog:",
"fuzzysort": "catalog:",
"luxon": "catalog:",
@ -310,7 +311,7 @@
},
"packages/web": {
"name": "@opencode-ai/web",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@astrojs/cloudflare": "12.6.3",
"@astrojs/markdown-remark": "6.3.1",
@ -357,7 +358,7 @@
"@hono/zod-validator": "0.4.2",
"@kobalte/core": "0.13.11",
"@openauthjs/openauth": "0.0.0-20250322224806",
"@pierre/precision-diffs": "0.0.2-alpha.1-1",
"@pierre/precision-diffs": "0.3.2",
"@solidjs/meta": "0.29.4",
"@tailwindcss/vite": "4.1.11",
"@tsconfig/bun": "1.0.9",
@ -426,7 +427,7 @@
"@astrojs/markdown-remark": ["@astrojs/markdown-remark@6.3.1", "", { "dependencies": { "@astrojs/internal-helpers": "0.6.1", "@astrojs/prism": "3.2.0", "github-slugger": "^2.0.0", "hast-util-from-html": "^2.0.3", "hast-util-to-text": "^4.0.2", "import-meta-resolve": "^4.1.0", "js-yaml": "^4.1.0", "mdast-util-definitions": "^6.0.0", "rehype-raw": "^7.0.0", "rehype-stringify": "^10.0.1", "remark-gfm": "^4.0.1", "remark-parse": "^11.0.0", "remark-rehype": "^11.1.1", "remark-smartypants": "^3.0.2", "shiki": "^3.0.0", "smol-toml": "^1.3.1", "unified": "^11.0.5", "unist-util-remove-position": "^5.0.0", "unist-util-visit": "^5.0.0", "unist-util-visit-parents": "^6.0.1", "vfile": "^6.0.3" } }, "sha512-c5F5gGrkczUaTVgmMW9g1YMJGzOtRvjjhw6IfGuxarM6ct09MpwysP10US729dy07gg8y+ofVifezvP3BNsWZg=="],
"@astrojs/mdx": ["@astrojs/mdx@4.3.8", "", { "dependencies": { "@astrojs/markdown-remark": "6.3.8", "@mdx-js/mdx": "^3.1.1", "acorn": "^8.15.0", "es-module-lexer": "^1.7.0", "estree-util-visit": "^2.0.0", "hast-util-to-html": "^9.0.5", "picocolors": "^1.1.1", "rehype-raw": "^7.0.0", "remark-gfm": "^4.0.1", "remark-smartypants": "^3.0.2", "source-map": "^0.7.6", "unist-util-visit": "^5.0.0", "vfile": "^6.0.3" }, "peerDependencies": { "astro": "^5.0.0" } }, "sha512-PXT0n2FfZAWEmQi4u4AZ0OPDDrDIF+aXPZGT5HCf52dex5EV3htMByeJUqYIoXdmazAFTASub0vRZLWBqJhJ9w=="],
"@astrojs/mdx": ["@astrojs/mdx@4.3.9", "", { "dependencies": { "@astrojs/markdown-remark": "6.3.8", "@mdx-js/mdx": "^3.1.1", "acorn": "^8.15.0", "es-module-lexer": "^1.7.0", "estree-util-visit": "^2.0.0", "hast-util-to-html": "^9.0.5", "picocolors": "^1.1.1", "rehype-raw": "^7.0.0", "remark-gfm": "^4.0.1", "remark-smartypants": "^3.0.2", "source-map": "^0.7.6", "unist-util-visit": "^5.0.0", "vfile": "^6.0.3" }, "peerDependencies": { "astro": "^5.0.0" } }, "sha512-80LHiM4z3FxAjATHNgFpa8nlTNSprAWB4UUKnr/QG56Pwk7uRnJWrXlok4wSCi/3fg8kTZ98A408Q91M+iqJdw=="],
"@astrojs/prism": ["@astrojs/prism@3.2.0", "", { "dependencies": { "prismjs": "^1.29.0" } }, "sha512-GilTHKGCW6HMq7y3BUv9Ac7GMe/MO9gi9GW62GzKtth0SwukCu/qp2wLiGpEujhY+VVhaG9v7kv/5vFzvf4NYw=="],
@ -958,6 +959,8 @@
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.31", "", { "os": "darwin", "cpu": "arm64" }, "sha512-irsQW6XUAwJ5YkWH3OHrAD3LX7MN36RWkNQbUh2/pYCRUa4+bdsh6esFv7eXnDt/fUKAQ+tNtw/6jCo7I3TXMw=="],
"@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.31", "", { "os": "darwin", "cpu": "x64" }, "sha512-MDxfSloyrl/AzTIgUvEQm61MHSG753f8UzKdg+gZTzUHb7kWwpPfYrzFAVwN9AnURVUMKvTzoFBZ61UxOSIarw=="],
"@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.31", "", { "os": "linux", "cpu": "arm64" }, "sha512-x+/F3lIsn7aHTqugO5hvdHjwILs/p92P+lAGCK9iBkEX20gTk9dOc6IUpC8iy0eNUJyCjYAilkWtAVIbS+S47Q=="],
"@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.31", "", { "os": "linux", "cpu": "x64" }, "sha512-sjDrN4KIT305dycX5A50jNPCcf7nVLKGkJwY7g4x+eWuOItbRCfChr3CyniABDbUlJkPiB8/tvbM/7tID7mjqQ=="],
@ -1024,7 +1027,7 @@
"@petamoriken/float16": ["@petamoriken/float16@3.9.3", "", {}, "sha512-8awtpHXCx/bNpFt4mt2xdkgtgVvKqty8VbjHI/WWWQuEw+KLzFot3f4+LkQY9YmOtq7A5GdOnqoIC8Pdygjk2g=="],
"@pierre/precision-diffs": ["@pierre/precision-diffs@0.0.2-alpha.1-1", "", { "dependencies": { "@shikijs/core": "3.13.0", "@shikijs/transformers": "3.13.0", "diff": "8.0.2", "fast-deep-equal": "3.1.3", "hast-util-to-html": "9.0.5", "shiki": "3.13.0" } }, "sha512-T43cwB7gMnbM+tp9p73NptUm4uUOfmrP5ihMOAHWQPpzBa/oeTjqZlmEmSQLpT8WKKnWG0lbKZPtlw7l0gW0Vw=="],
"@pierre/precision-diffs": ["@pierre/precision-diffs@0.3.5", "", { "dependencies": { "@shikijs/core": "3.13.0", "@shikijs/transformers": "3.13.0", "diff": "8.0.2", "fast-deep-equal": "3.1.3", "hast-util-to-html": "9.0.5", "shiki": "3.13.0" }, "peerDependencies": { "react": "^18.3.1 || ^19.0.0", "react-dom": "^18.3.1 || ^19.0.0" } }, "sha512-qbotIS8CahO/7guljDzU3RVpDfg6WViWe0EB0/SZQi3xHD+nzxxlC+pGoyIFSn+47GG0EKxTnvkfaYANm19FCA=="],
"@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="],
@ -1386,7 +1389,7 @@
"@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="],
"@types/express": ["@types/express@4.17.24", "", { "dependencies": { "@types/body-parser": "*", "@types/express-serve-static-core": "^4.17.33", "@types/qs": "*", "@types/serve-static": "*" } }, "sha512-Mbrt4SRlXSTWryOnHAh2d4UQ/E7n9lZyGSi6KgX+4hkuL9soYbLOVXVhnk/ODp12YsGc95f4pOvqywJ6kngUwg=="],
"@types/express": ["@types/express@4.17.25", "", { "dependencies": { "@types/body-parser": "*", "@types/express-serve-static-core": "^4.17.33", "@types/qs": "*", "@types/serve-static": "^1" } }, "sha512-dVd04UKsfpINUnK0yBoYHDF3xu7xVH4BuDotC/xGuycx4CgbP48X/KF/586bcObxT0HENHXEU8Nqtu6NR+eKhw=="],
"@types/express-serve-static-core": ["@types/express-serve-static-core@4.19.7", "", { "dependencies": { "@types/node": "*", "@types/qs": "*", "@types/range-parser": "*", "@types/send": "*" } }, "sha512-FvPtiIf1LfhzsaIXhv/PHan/2FeQBbtBDtfX2QfvPxdUelMDEckK08SM6nqo1MIZY3RUlfA+HV8+hFUSio78qg=="],
@ -1438,7 +1441,7 @@
"@types/scheduler": ["@types/scheduler@0.26.0", "", {}, "sha512-WFHp9YUJQ6CKshqoC37iOlHnQSmxNc795UhB26CyBBttrN9svdIrUjl/NjnNmfcwtncN0h/0PPAFWv9ovP8mLA=="],
"@types/send": ["@types/send@0.17.6", "", { "dependencies": { "@types/mime": "^1", "@types/node": "*" } }, "sha512-Uqt8rPBE8SY0RK8JB1EzVOIZ32uqy8HwdxCnoCOsYrvnswqmFZ/k+9Ikidlk/ImhsdvBsloHbAlewb2IEBV/Og=="],
"@types/send": ["@types/send@1.2.1", "", { "dependencies": { "@types/node": "*" } }, "sha512-arsCikDvlU99zl1g69TcAB3mzZPpxgw0UQnaHeC1Nwb015xp8bknZv5rIfri9xTOcMuaVgvabfIRA7PSZVuZIQ=="],
"@types/serve-static": ["@types/serve-static@1.15.10", "", { "dependencies": { "@types/http-errors": "*", "@types/node": "*", "@types/send": "<1" } }, "sha512-tRs1dB+g8Itk72rlSI2ZrW6vZg0YrLI81iQSTkMmOqnqCaNr/8Ek4VwWcN5vZgCYWbg/JJSGBlUaYGAOP73qBw=="],
@ -1580,7 +1583,7 @@
"aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="],
"axios": ["axios@1.12.2", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw=="],
"axios": ["axios@1.13.0", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, "sha512-zt40Pz4zcRXra9CVV31KeyofwiNvAbJ5B6YPz9pMJ+yOSLikvPT4Yi5LjfgjRa9CawVYBaD1JQzIVcIvBejKeA=="],
"axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="],
@ -1588,11 +1591,11 @@
"babel-dead-code-elimination": ["babel-dead-code-elimination@1.0.10", "", { "dependencies": { "@babel/core": "^7.23.7", "@babel/parser": "^7.23.6", "@babel/traverse": "^7.23.7", "@babel/types": "^7.23.6" } }, "sha512-DV5bdJZTzZ0zn0DC24v3jD7Mnidh6xhKa4GfKCbq3sfW8kaWhDdZjP3i81geA8T33tdYqWKw4D3fVv0CwEgKVA=="],
"babel-plugin-jsx-dom-expressions": ["babel-plugin-jsx-dom-expressions@0.40.1", "", { "dependencies": { "@babel/helper-module-imports": "7.18.6", "@babel/plugin-syntax-jsx": "^7.18.6", "@babel/types": "^7.20.7", "html-entities": "2.3.3", "parse5": "^7.1.2", "validate-html-nesting": "^1.2.1" }, "peerDependencies": { "@babel/core": "^7.20.12" } }, "sha512-b4iHuirqK7RgaMzB2Lsl7MqrlDgQtVRSSazyrmx7wB3T759ggGjod5Rkok5MfHjQXhR7tRPmdwoeGPqBnW2KfA=="],
"babel-plugin-jsx-dom-expressions": ["babel-plugin-jsx-dom-expressions@0.40.3", "", { "dependencies": { "@babel/helper-module-imports": "7.18.6", "@babel/plugin-syntax-jsx": "^7.18.6", "@babel/types": "^7.20.7", "html-entities": "2.3.3", "parse5": "^7.1.2" }, "peerDependencies": { "@babel/core": "^7.20.12" } }, "sha512-5HOwwt0BYiv/zxl7j8Pf2bGL6rDXfV6nUhLs8ygBX+EFJXzBPHM/euj9j/6deMZ6wa52Wb2PBaAV5U/jKwIY1w=="],
"babel-plugin-module-resolver": ["babel-plugin-module-resolver@5.0.2", "", { "dependencies": { "find-babel-config": "^2.1.1", "glob": "^9.3.3", "pkg-up": "^3.1.0", "reselect": "^4.1.7", "resolve": "^1.22.8" } }, "sha512-9KtaCazHee2xc0ibfqsDeamwDps6FZNo5S0Q81dUqEuFzVwPhcT4J5jOqIVvgCA3Q/wO9hKYxN/Ds3tIsp5ygg=="],
"babel-preset-solid": ["babel-preset-solid@1.9.9", "", { "dependencies": { "babel-plugin-jsx-dom-expressions": "^0.40.1" }, "peerDependencies": { "@babel/core": "^7.0.0", "solid-js": "^1.9.8" }, "optionalPeers": ["solid-js"] }, "sha512-pCnxWrciluXCeli/dj5PIEHgbNzim3evtTn12snjqqg8QZWJNMjH1AWIp4iG/tbVjqQ72aBEymMSagvmgxubXw=="],
"babel-preset-solid": ["babel-preset-solid@1.9.10", "", { "dependencies": { "babel-plugin-jsx-dom-expressions": "^0.40.3" }, "peerDependencies": { "@babel/core": "^7.0.0", "solid-js": "^1.9.10" }, "optionalPeers": ["solid-js"] }, "sha512-HCelrgua/Y+kqO8RyL04JBWS/cVdrtUv/h45GntgQY+cJl4eBcKkCDV3TdMjtKx1nXwRaR9QXslM/Npm1dxdZQ=="],
"bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="],
@ -1910,7 +1913,7 @@
"ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="],
"electron-to-chromium": ["electron-to-chromium@1.5.240", "", {}, "sha512-OBwbZjWgrCOH+g6uJsA2/7Twpas2OlepS9uvByJjR2datRDuKGYeD+nP8lBBks2qnB7bGJNHDUx7c/YLaT3QMQ=="],
"electron-to-chromium": ["electron-to-chromium@1.5.241", "", {}, "sha512-ILMvKX/ZV5WIJzzdtuHg8xquk2y0BOGlFOxBVwTpbiXqWIH0hamG45ddU4R3PQ0gYu+xgo0vdHXHli9sHIGb4w=="],
"emoji-regex": ["emoji-regex@10.6.0", "", {}, "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A=="],
@ -2694,7 +2697,7 @@
"neotraverse": ["neotraverse@0.6.18", "", {}, "sha512-Z4SmBUweYa09+o6pG+eASabEpP6QkQ70yHj351pQoEXIs8uHbaU2DWVmzBANKgflPa47A50PtB2+NgRpQvr7vA=="],
"nitropack": ["nitropack@2.12.8", "", { "dependencies": { "@cloudflare/kv-asset-handler": "^0.4.0", "@rollup/plugin-alias": "^5.1.1", "@rollup/plugin-commonjs": "^28.0.8", "@rollup/plugin-inject": "^5.0.5", "@rollup/plugin-json": "^6.1.0", "@rollup/plugin-node-resolve": "^16.0.3", "@rollup/plugin-replace": "^6.0.2", "@rollup/plugin-terser": "^0.4.4", "@vercel/nft": "^0.30.3", "archiver": "^7.0.1", "c12": "^3.3.1", "chokidar": "^4.0.3", "citty": "^0.1.6", "compatx": "^0.2.0", "confbox": "^0.2.2", "consola": "^3.4.2", "cookie-es": "^2.0.0", "croner": "^9.1.0", "crossws": "^0.3.5", "db0": "^0.3.4", "defu": "^6.1.4", "destr": "^2.0.5", "dot-prop": "^10.1.0", "esbuild": "^0.25.11", "escape-string-regexp": "^5.0.0", "etag": "^1.8.1", "exsolve": "^1.0.7", "globby": "^15.0.0", "gzip-size": "^7.0.0", "h3": "^1.15.4", "hookable": "^5.5.3", "httpxy": "^0.1.7", "ioredis": "^5.8.2", "jiti": "^2.6.1", "klona": "^2.0.6", "knitwork": "^1.2.0", "listhen": "^1.9.0", "magic-string": "^0.30.19", "magicast": "^0.3.5", "mime": "^4.1.0", "mlly": "^1.8.0", "node-fetch-native": "^1.6.7", "node-mock-http": "^1.0.3", "ofetch": "^1.4.1", "ohash": "^2.0.11", "pathe": "^2.0.3", "perfect-debounce": "^2.0.0", "pkg-types": "^2.3.0", "pretty-bytes": "^7.1.0", "radix3": "^1.1.2", "rollup": "^4.52.5", "rollup-plugin-visualizer": "^6.0.5", "scule": "^1.3.0", "semver": "^7.7.3", "serve-placeholder": "^2.0.2", "serve-static": "^2.2.0", "source-map": "^0.7.6", "std-env": "^3.10.0", "ufo": "^1.6.1", "ultrahtml": "^1.6.0", "uncrypto": "^0.1.3", "unctx": "^2.4.1", "unenv": "2.0.0-rc.21", "unimport": "^5.5.0", "unplugin-utils": "^0.3.1", "unstorage": "^1.17.1", "untyped": "^2.0.0", "unwasm": "^0.3.11", "youch": "4.1.0-beta.11", "youch-core": "^0.3.3" }, "peerDependencies": { "xml2js": "^0.6.2" }, "optionalPeers": ["xml2js"], "bin": { "nitro": "dist/cli/index.mjs", "nitropack": "dist/cli/index.mjs" } }, "sha512-k4KT/6CMiX+aAI2LWEdVhvI4PPPWt6NTz70TcxrGUgvMpt8Pv4/iG0KTwBJ58KdwFp59p3Mlp8QyGVmIVP6GvQ=="],
"nitropack": ["nitropack@2.12.9", "", { "dependencies": { "@cloudflare/kv-asset-handler": "^0.4.0", "@rollup/plugin-alias": "^5.1.1", "@rollup/plugin-commonjs": "^28.0.9", "@rollup/plugin-inject": "^5.0.5", "@rollup/plugin-json": "^6.1.0", "@rollup/plugin-node-resolve": "^16.0.3", "@rollup/plugin-replace": "^6.0.2", "@rollup/plugin-terser": "^0.4.4", "@vercel/nft": "^0.30.3", "archiver": "^7.0.1", "c12": "^3.3.1", "chokidar": "^4.0.3", "citty": "^0.1.6", "compatx": "^0.2.0", "confbox": "^0.2.2", "consola": "^3.4.2", "cookie-es": "^2.0.0", "croner": "^9.1.0", "crossws": "^0.3.5", "db0": "^0.3.4", "defu": "^6.1.4", "destr": "^2.0.5", "dot-prop": "^10.1.0", "esbuild": "^0.25.11", "escape-string-regexp": "^5.0.0", "etag": "^1.8.1", "exsolve": "^1.0.7", "globby": "^15.0.0", "gzip-size": "^7.0.0", "h3": "^1.15.4", "hookable": "^5.5.3", "httpxy": "^0.1.7", "ioredis": "^5.8.2", "jiti": "^2.6.1", "klona": "^2.0.6", "knitwork": "^1.2.0", "listhen": "^1.9.0", "magic-string": "^0.30.21", "magicast": "^0.5.0", "mime": "^4.1.0", "mlly": "^1.8.0", "node-fetch-native": "^1.6.7", "node-mock-http": "^1.0.3", "ofetch": "^1.5.0", "ohash": "^2.0.11", "pathe": "^2.0.3", "perfect-debounce": "^2.0.0", "pkg-types": "^2.3.0", "pretty-bytes": "^7.1.0", "radix3": "^1.1.2", "rollup": "^4.52.5", "rollup-plugin-visualizer": "^6.0.5", "scule": "^1.3.0", "semver": "^7.7.3", "serve-placeholder": "^2.0.2", "serve-static": "^2.2.0", "source-map": "^0.7.6", "std-env": "^3.10.0", "ufo": "^1.6.1", "ultrahtml": "^1.6.0", "uncrypto": "^0.1.3", "unctx": "^2.4.1", "unenv": "^2.0.0-rc.23", "unimport": "^5.5.0", "unplugin-utils": "^0.3.1", "unstorage": "^1.17.1", "untyped": "^2.0.0", "unwasm": "^0.3.11", "youch": "^4.1.0-beta.11", "youch-core": "^0.3.3" }, "peerDependencies": { "xml2js": "^0.6.2" }, "optionalPeers": ["xml2js"], "bin": { "nitro": "dist/cli/index.mjs", "nitropack": "dist/cli/index.mjs" } }, "sha512-t6qqNBn2UDGMWogQuORjbL2UPevB8PvIPsPHmqvWpeGOlPr4P8Oc5oA8t3wFwGmaolM2M/s2SwT23nx9yARmOg=="],
"nlcst-to-string": ["nlcst-to-string@4.0.0", "", { "dependencies": { "@types/nlcst": "^2.0.0" } }, "sha512-YKLBCcUYKAg0FNlOBT6aI91qFmSiFKiluk655WzPF+DDMA02qIyy8uiRqI8QXtcFpEvll12LpL5MXqEmAZ+dcA=="],
@ -2740,7 +2743,7 @@
"object.assign": ["object.assign@4.1.7", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.3", "define-properties": "^1.2.1", "es-object-atoms": "^1.0.0", "has-symbols": "^1.1.0", "object-keys": "^1.1.1" } }, "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw=="],
"ofetch": ["ofetch@1.4.1", "", { "dependencies": { "destr": "^2.0.3", "node-fetch-native": "^1.6.4", "ufo": "^1.5.4" } }, "sha512-QZj2DfGplQAr2oj9KzceK9Hwz6Whxazmn85yYeVuS3u9XTMOGMRx0kO95MQ+vLsj/S/NwBDMMLU5hpxvI6Tklw=="],
"ofetch": ["ofetch@1.5.0", "", { "dependencies": { "destr": "^2.0.5", "node-fetch-native": "^1.6.7", "ufo": "^1.6.1" } }, "sha512-A7llJ7eZyziA5xq9//3ZurA8OhFqtS99K5/V1sLBJ5j137CM/OAjlbA/TEJXBuOWwOfLqih+oH5U3ran4za1FQ=="],
"ohash": ["ohash@2.0.11", "", {}, "sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ=="],
@ -3448,8 +3451,6 @@
"uuid": ["uuid@8.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw=="],
"validate-html-nesting": ["validate-html-nesting@1.2.3", "", {}, "sha512-kdkWdCl6eCeLlRShJKbjVOU2kFKxMF8Ghu50n+crEoyx+VKm3FxAxF9z4DCy6+bbTOqNW0+jcIYRnjoIRzigRw=="],
"vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="],
"vfile": ["vfile@6.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile-message": "^4.0.0" } }, "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q=="],
@ -3506,7 +3507,7 @@
"workerd": ["workerd@1.20251011.0", "", { "optionalDependencies": { "@cloudflare/workerd-darwin-64": "1.20251011.0", "@cloudflare/workerd-darwin-arm64": "1.20251011.0", "@cloudflare/workerd-linux-64": "1.20251011.0", "@cloudflare/workerd-linux-arm64": "1.20251011.0", "@cloudflare/workerd-windows-64": "1.20251011.0" }, "bin": { "workerd": "bin/workerd" } }, "sha512-Dq35TLPEJAw7BuYQMkN3p9rge34zWMU2Gnd4DSJFeVqld4+DAO2aPG7+We2dNIAyM97S8Y9BmHulbQ00E0HC7Q=="],
"wrangler": ["wrangler@4.45.0", "", { "dependencies": { "@cloudflare/kv-asset-handler": "0.4.0", "@cloudflare/unenv-preset": "2.7.8", "blake3-wasm": "2.1.5", "esbuild": "0.25.4", "miniflare": "4.20251011.1", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.21", "workerd": "1.20251011.0" }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@cloudflare/workers-types": "^4.20251011.0" }, "optionalPeers": ["@cloudflare/workers-types"], "bin": { "wrangler": "bin/wrangler.js", "wrangler2": "bin/wrangler.js" } }, "sha512-2qM6bHw8l7r89Z9Y5A7Wn4L9U+dFoLjYgEUVpqy7CcmXpppL3QIYqU6rU5lre7/SRzBuPu/H93Vwfh538gZ3iw=="],
"wrangler": ["wrangler@4.45.1", "", { "dependencies": { "@cloudflare/kv-asset-handler": "0.4.0", "@cloudflare/unenv-preset": "2.7.8", "blake3-wasm": "2.1.5", "esbuild": "0.25.4", "miniflare": "4.20251011.1", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.21", "workerd": "1.20251011.0" }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@cloudflare/workers-types": "^4.20251011.0" }, "optionalPeers": ["@cloudflare/workers-types"], "bin": { "wrangler": "bin/wrangler.js", "wrangler2": "bin/wrangler.js" } }, "sha512-SmmbDl6NUkv6mHT8/Scb09lvxXy0Y2hD98oZHswCysrYbs4JW5LP1eTuroE23Z2jK75D7TEzv2MXmwcDIytxhg=="],
"wrap-ansi": ["wrap-ansi@9.0.2", "", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww=="],
@ -3704,6 +3705,8 @@
"@openauthjs/openauth/jose": ["jose@5.9.6", "", {}, "sha512-AMlnetc9+CV9asI19zHmrgS/WYsWUwCn2R7RzlbJWD7F9eWYUTGyBmU9o6PxngtLGOiDGPRu+Uc4fhKzbpteZQ=="],
"@opencode-ai/ui/@pierre/precision-diffs": ["@pierre/precision-diffs@0.3.2", "", { "dependencies": { "@shikijs/core": "3.13.0", "@shikijs/transformers": "3.13.0", "diff": "8.0.2", "fast-deep-equal": "3.1.3", "hast-util-to-html": "9.0.5", "shiki": "3.13.0" }, "peerDependencies": { "react": "^18.3.1 || ^19.0.0", "react-dom": "^18.3.1 || ^19.0.0" } }, "sha512-HE+wFB0TV+wmjur/J+qI5PsRQl5RN6tCEFTusW0S5FDfZJUIpkxJCacqUxyEI0DriXMKhgGQ+oCQShfaFELdrQ=="],
"@opencode-ai/web/@shikijs/transformers": ["@shikijs/transformers@3.4.2", "", { "dependencies": { "@shikijs/core": "3.4.2", "@shikijs/types": "3.4.2" } }, "sha512-I5baLVi/ynLEOZoWSAMlACHNnG+yw5HDmse0oe+GW6U1u+ULdEB3UHiVWaHoJSSONV7tlcVxuaMy74sREDkSvg=="],
"@opencode-ai/web/@types/luxon": ["@types/luxon@3.6.2", "", {}, "sha512-R/BdP7OxEMc44l2Ex5lSXHoIXTB2JLNa3y2QISIbr58U/YcsffyQrYW//hZSdrfxrjRZj3GcUoxMPGdO8gSYuw=="],
@ -3714,6 +3717,8 @@
"@opentui/solid/@babel/core": ["@babel/core@7.28.0", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.0", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.6", "@babel/parser": "^7.28.0", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.0", "@babel/types": "^7.28.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ=="],
"@opentui/solid/babel-preset-solid": ["babel-preset-solid@1.9.9", "", { "dependencies": { "babel-plugin-jsx-dom-expressions": "^0.40.1" }, "peerDependencies": { "@babel/core": "^7.0.0", "solid-js": "^1.9.8" }, "optionalPeers": ["solid-js"] }, "sha512-pCnxWrciluXCeli/dj5PIEHgbNzim3evtTn12snjqqg8QZWJNMjH1AWIp4iG/tbVjqQ72aBEymMSagvmgxubXw=="],
"@oslojs/jwt/@oslojs/encoding": ["@oslojs/encoding@0.4.1", "", {}, "sha512-hkjo6MuIK/kQR5CrGNdAPZhS01ZCXuWDRJ187zh6qqF2+yMHZpD9fAYpX8q2bOO6Ryhl3XpCT6kUX76N8hhm4Q=="],
"@parcel/watcher/detect-libc": ["detect-libc@1.0.3", "", { "bin": { "detect-libc": "./bin/detect-libc.js" } }, "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg=="],
@ -3774,6 +3779,8 @@
"@tanstack/server-functions-plugin/@babel/code-frame": ["@babel/code-frame@7.26.2", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.25.9", "js-tokens": "^4.0.0", "picocolors": "^1.0.0" } }, "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ=="],
"@types/serve-static/@types/send": ["@types/send@0.17.6", "", { "dependencies": { "@types/mime": "^1", "@types/node": "*" } }, "sha512-Uqt8rPBE8SY0RK8JB1EzVOIZ32uqy8HwdxCnoCOsYrvnswqmFZ/k+9Ikidlk/ImhsdvBsloHbAlewb2IEBV/Og=="],
"@vercel/nft/estree-walker": ["estree-walker@2.0.2", "", {}, "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w=="],
"@vercel/nft/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="],
@ -3856,8 +3863,6 @@
"editorconfig/minimatch": ["minimatch@9.0.1", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w=="],
"editorconfig/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
"es-get-iterator/isarray": ["isarray@2.0.5", "", {}, "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw=="],
"esbuild-plugin-copy/chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="],
@ -3884,8 +3889,6 @@
"gaxios/uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="],
"gel/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
"giget/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
"giget/tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="],
@ -3946,15 +3949,15 @@
"nitropack/jiti": ["jiti@2.6.1", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ=="],
"nitropack/magicast": ["magicast@0.5.0", "", { "dependencies": { "@babel/parser": "^7.28.5", "@babel/types": "^7.28.5", "source-map-js": "^1.2.1" } }, "sha512-D0cxqnb8DpO66P4LkD9ME6a4AhRK6A+xprXksD5vtsJN6G4zbzdI10vDaWCIyj3eLwjNZrQxUYB20FDhKrMEKQ=="],
"nitropack/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
"nitropack/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
"nitropack/serve-static": ["serve-static@2.2.0", "", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ=="],
"nitropack/unenv": ["unenv@2.0.0-rc.21", "", { "dependencies": { "defu": "^6.1.4", "exsolve": "^1.0.7", "ohash": "^2.0.11", "pathe": "^2.0.3", "ufo": "^1.6.1" } }, "sha512-Wj7/AMtE9MRnAXa6Su3Lk0LNCfqDYgfwVjwRFVum9U7wsto1imuHqk4kTm7Jni+5A0Hn7dttL6O/zjvUvoo+8A=="],
"node-abi/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
"nitropack/unenv": ["unenv@2.0.0-rc.23", "", { "dependencies": { "pathe": "^2.0.3" } }, "sha512-NeOb/HbW2OwOzYaV21MewVQYfzlSwG0kVUB74RyV0gEIP44M5DsYTK9e7jDcekB/3YU+pfNWniZj+r4M/aejyQ=="],
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
@ -3962,6 +3965,8 @@
"nypm/pkg-types": ["pkg-types@1.3.1", "", { "dependencies": { "confbox": "^0.1.8", "mlly": "^1.7.4", "pathe": "^2.0.1" } }, "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ=="],
"opencode/@pierre/precision-diffs": ["@pierre/precision-diffs@0.3.2", "", { "dependencies": { "@shikijs/core": "3.13.0", "@shikijs/transformers": "3.13.0", "diff": "8.0.2", "fast-deep-equal": "3.1.3", "hast-util-to-html": "9.0.5", "shiki": "3.13.0" }, "peerDependencies": { "react": "^18.3.1 || ^19.0.0", "react-dom": "^18.3.1 || ^19.0.0" } }, "sha512-HE+wFB0TV+wmjur/J+qI5PsRQl5RN6tCEFTusW0S5FDfZJUIpkxJCacqUxyEI0DriXMKhgGQ+oCQShfaFELdrQ=="],
"opencontrol/@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="],
"opencontrol/@tsconfig/bun": ["@tsconfig/bun@1.0.7", "", {}, "sha512-udGrGJBNQdXGVulehc1aWT73wkR9wdaGBtB6yL70RJsqwW/yJhIg6ZbRlPOfIUiFNrnBuYLBi9CSmMKfDC7dvA=="],
@ -4280,7 +4285,7 @@
"@modelcontextprotocol/sdk/express/content-disposition": ["content-disposition@1.0.0", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg=="],
"@modelcontextprotocol/sdk/express/cookie": ["cookie@0.7.1", "", {}, "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="],
"@modelcontextprotocol/sdk/express/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="],
"@modelcontextprotocol/sdk/express/cookie-signature": ["cookie-signature@1.2.2", "", {}, "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg=="],
@ -4294,6 +4299,8 @@
"@modelcontextprotocol/sdk/express/serve-static": ["serve-static@2.2.0", "", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ=="],
"@modelcontextprotocol/sdk/express/statuses": ["statuses@2.0.2", "", {}, "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="],
"@modelcontextprotocol/sdk/express/type-is": ["type-is@2.0.1", "", { "dependencies": { "content-type": "^1.0.5", "media-typer": "^1.1.0", "mime-types": "^3.0.0" } }, "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw=="],
"@octokit/auth-oauth-app/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@26.0.0", "", {}, "sha512-7AtcfKtpo77j7Ts73b4OWhOZHTKo/gGY8bB3bNBQz4H+GRSWqx2yvj8TXRsbdTE0eRmYmXOEY66jM7mJ7LzfsA=="],
@ -4318,6 +4325,10 @@
"@octokit/request/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@26.0.0", "", {}, "sha512-7AtcfKtpo77j7Ts73b4OWhOZHTKo/gGY8bB3bNBQz4H+GRSWqx2yvj8TXRsbdTE0eRmYmXOEY66jM7mJ7LzfsA=="],
"@opencode-ai/ui/@pierre/precision-diffs/@shikijs/transformers": ["@shikijs/transformers@3.13.0", "", { "dependencies": { "@shikijs/core": "3.13.0", "@shikijs/types": "3.13.0" } }, "sha512-833lcuVzcRiG+fXvgslWsM2f4gHpjEgui1ipIknSizRuTgMkNZupiXE5/TVJ6eSYfhNBFhBZKkReKWO2GgYmqA=="],
"@opencode-ai/ui/@pierre/precision-diffs/shiki": ["shiki@3.13.0", "", { "dependencies": { "@shikijs/core": "3.13.0", "@shikijs/engine-javascript": "3.13.0", "@shikijs/engine-oniguruma": "3.13.0", "@shikijs/langs": "3.13.0", "@shikijs/themes": "3.13.0", "@shikijs/types": "3.13.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-aZW4l8Og16CokuCLf8CF8kq+KK2yOygapU5m3+hoGw0Mdosc6fPitjM+ujYarppj5ZIKGyPDPP1vqmQhr+5/0g=="],
"@opencode-ai/web/@shikijs/transformers/@shikijs/core": ["@shikijs/core@3.4.2", "", { "dependencies": { "@shikijs/types": "3.4.2", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4", "hast-util-to-html": "^9.0.5" } }, "sha512-AG8vnSi1W2pbgR2B911EfGqtLE9c4hQBYkv/x7Z+Kt0VxhgQKcW7UNDVYsu9YxwV6u+OJrvdJrMq6DNWoBjihQ=="],
"@opencode-ai/web/@shikijs/transformers/@shikijs/types": ["@shikijs/types@3.4.2", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-zHC1l7L+eQlDXLnxvM9R91Efh2V4+rN3oMVS2swCBssbj2U/FBwybD1eeLaq8yl/iwT+zih8iUbTBCgGZOYlVg=="],
@ -4386,8 +4397,6 @@
"archiver-utils/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="],
"astro/sharp/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
"astro/shiki/@shikijs/engine-javascript": ["@shikijs/engine-javascript@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0", "@shikijs/vscode-textmate": "^10.0.2", "oniguruma-to-es": "^4.3.3" } }, "sha512-Ty7xv32XCp8u0eQt8rItpMs6rU9Ki6LJ1dQOW3V/56PKDcpvfHPnYFbsx5FFUP2Yim34m/UkazidamMNVR4vKg=="],
"astro/shiki/@shikijs/engine-oniguruma": ["@shikijs/engine-oniguruma@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0", "@shikijs/vscode-textmate": "^10.0.2" } }, "sha512-O42rBGr4UDSlhT2ZFMxqM7QzIU+IcpoTMzb3W7AlziI1ZF7R8eS2M0yt5Ry35nnnTX/LTLXFPUjRFCIW+Operg=="],
@ -4406,8 +4415,6 @@
"babel-plugin-module-resolver/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="],
"bl/buffer/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
"body-parser/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
"c12/pkg-types/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
@ -4494,8 +4501,6 @@
"listhen/h3/cookie-es": ["cookie-es@1.2.2", "", {}, "sha512-+W7VmiVINB+ywl1HGXJXmrqkOhpKrIiVZV6tQuV54ZyQC7MMuBt81Vc336GMLoHBq5hV/F9eXgt5Mnx0Rha5Fg=="],
"miniflare/sharp/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
"mlly/pkg-types/confbox": ["confbox@0.1.8", "", {}, "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w=="],
"nitropack/c12/dotenv": ["dotenv@17.2.3", "", {}, "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w=="],
@ -4510,6 +4515,10 @@
"nypm/pkg-types/confbox": ["confbox@0.1.8", "", {}, "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w=="],
"opencode/@pierre/precision-diffs/@shikijs/transformers": ["@shikijs/transformers@3.13.0", "", { "dependencies": { "@shikijs/core": "3.13.0", "@shikijs/types": "3.13.0" } }, "sha512-833lcuVzcRiG+fXvgslWsM2f4gHpjEgui1ipIknSizRuTgMkNZupiXE5/TVJ6eSYfhNBFhBZKkReKWO2GgYmqA=="],
"opencode/@pierre/precision-diffs/shiki": ["shiki@3.13.0", "", { "dependencies": { "@shikijs/core": "3.13.0", "@shikijs/engine-javascript": "3.13.0", "@shikijs/engine-oniguruma": "3.13.0", "@shikijs/langs": "3.13.0", "@shikijs/themes": "3.13.0", "@shikijs/types": "3.13.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-aZW4l8Og16CokuCLf8CF8kq+KK2yOygapU5m3+hoGw0Mdosc6fPitjM+ujYarppj5ZIKGyPDPP1vqmQhr+5/0g=="],
"opencontrol/@modelcontextprotocol/sdk/express": ["express@5.1.0", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.0", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA=="],
"opencontrol/@modelcontextprotocol/sdk/pkce-challenge": ["pkce-challenge@4.1.0", "", {}, "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ=="],
@ -4696,6 +4705,18 @@
"@modelcontextprotocol/sdk/express/type-is/media-typer": ["media-typer@1.1.0", "", {}, "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw=="],
"@opencode-ai/ui/@pierre/precision-diffs/@shikijs/transformers/@shikijs/types": ["@shikijs/types@3.13.0", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-oM9P+NCFri/mmQ8LoFGVfVyemm5Hi27330zuOBp0annwJdKH1kOLndw3zCtAVDehPLg9fKqoEx3Ht/wNZxolfw=="],
"@opencode-ai/ui/@pierre/precision-diffs/shiki/@shikijs/engine-javascript": ["@shikijs/engine-javascript@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0", "@shikijs/vscode-textmate": "^10.0.2", "oniguruma-to-es": "^4.3.3" } }, "sha512-Ty7xv32XCp8u0eQt8rItpMs6rU9Ki6LJ1dQOW3V/56PKDcpvfHPnYFbsx5FFUP2Yim34m/UkazidamMNVR4vKg=="],
"@opencode-ai/ui/@pierre/precision-diffs/shiki/@shikijs/engine-oniguruma": ["@shikijs/engine-oniguruma@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0", "@shikijs/vscode-textmate": "^10.0.2" } }, "sha512-O42rBGr4UDSlhT2ZFMxqM7QzIU+IcpoTMzb3W7AlziI1ZF7R8eS2M0yt5Ry35nnnTX/LTLXFPUjRFCIW+Operg=="],
"@opencode-ai/ui/@pierre/precision-diffs/shiki/@shikijs/langs": ["@shikijs/langs@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0" } }, "sha512-672c3WAETDYHwrRP0yLy3W1QYB89Hbpj+pO4KhxK6FzIrDI2FoEXNiNCut6BQmEApYLfuYfpgOZaqbY+E9b8wQ=="],
"@opencode-ai/ui/@pierre/precision-diffs/shiki/@shikijs/themes": ["@shikijs/themes@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0" } }, "sha512-Vxw1Nm1/Od8jyA7QuAenaV78BG2nSr3/gCGdBkLpfLscddCkzkL36Q5b67SrLLfvAJTOUzW39x4FHVCFriPVgg=="],
"@opencode-ai/ui/@pierre/precision-diffs/shiki/@shikijs/types": ["@shikijs/types@3.13.0", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-oM9P+NCFri/mmQ8LoFGVfVyemm5Hi27330zuOBp0annwJdKH1kOLndw3zCtAVDehPLg9fKqoEx3Ht/wNZxolfw=="],
"@solidjs/start/shiki/@shikijs/engine-javascript/oniguruma-to-es": ["oniguruma-to-es@2.3.0", "", { "dependencies": { "emoji-regex-xs": "^1.0.0", "regex": "^5.1.1", "regex-recursion": "^5.1.1" } }, "sha512-bwALDxriqfKGfUufKGGepCzu9x7nJQuoRoAFp4AnwehhC2crqrDIAP/uN2qdlsAvSMpeRC3+Yzhqc7hLmle5+g=="],
"@vercel/nft/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
@ -4720,13 +4741,27 @@
"nitropack/serve-static/send/fresh": ["fresh@2.0.0", "", {}, "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A=="],
"nitropack/serve-static/send/statuses": ["statuses@2.0.2", "", {}, "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="],
"opencode/@pierre/precision-diffs/@shikijs/transformers/@shikijs/types": ["@shikijs/types@3.13.0", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-oM9P+NCFri/mmQ8LoFGVfVyemm5Hi27330zuOBp0annwJdKH1kOLndw3zCtAVDehPLg9fKqoEx3Ht/wNZxolfw=="],
"opencode/@pierre/precision-diffs/shiki/@shikijs/engine-javascript": ["@shikijs/engine-javascript@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0", "@shikijs/vscode-textmate": "^10.0.2", "oniguruma-to-es": "^4.3.3" } }, "sha512-Ty7xv32XCp8u0eQt8rItpMs6rU9Ki6LJ1dQOW3V/56PKDcpvfHPnYFbsx5FFUP2Yim34m/UkazidamMNVR4vKg=="],
"opencode/@pierre/precision-diffs/shiki/@shikijs/engine-oniguruma": ["@shikijs/engine-oniguruma@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0", "@shikijs/vscode-textmate": "^10.0.2" } }, "sha512-O42rBGr4UDSlhT2ZFMxqM7QzIU+IcpoTMzb3W7AlziI1ZF7R8eS2M0yt5Ry35nnnTX/LTLXFPUjRFCIW+Operg=="],
"opencode/@pierre/precision-diffs/shiki/@shikijs/langs": ["@shikijs/langs@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0" } }, "sha512-672c3WAETDYHwrRP0yLy3W1QYB89Hbpj+pO4KhxK6FzIrDI2FoEXNiNCut6BQmEApYLfuYfpgOZaqbY+E9b8wQ=="],
"opencode/@pierre/precision-diffs/shiki/@shikijs/themes": ["@shikijs/themes@3.13.0", "", { "dependencies": { "@shikijs/types": "3.13.0" } }, "sha512-Vxw1Nm1/Od8jyA7QuAenaV78BG2nSr3/gCGdBkLpfLscddCkzkL36Q5b67SrLLfvAJTOUzW39x4FHVCFriPVgg=="],
"opencode/@pierre/precision-diffs/shiki/@shikijs/types": ["@shikijs/types@3.13.0", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-oM9P+NCFri/mmQ8LoFGVfVyemm5Hi27330zuOBp0annwJdKH1kOLndw3zCtAVDehPLg9fKqoEx3Ht/wNZxolfw=="],
"opencontrol/@modelcontextprotocol/sdk/express/accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="],
"opencontrol/@modelcontextprotocol/sdk/express/body-parser": ["body-parser@2.2.0", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.0", "http-errors": "^2.0.0", "iconv-lite": "^0.6.3", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.0", "type-is": "^2.0.0" } }, "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg=="],
"opencontrol/@modelcontextprotocol/sdk/express/content-disposition": ["content-disposition@1.0.0", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg=="],
"opencontrol/@modelcontextprotocol/sdk/express/cookie": ["cookie@0.7.1", "", {}, "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="],
"opencontrol/@modelcontextprotocol/sdk/express/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="],
"opencontrol/@modelcontextprotocol/sdk/express/cookie-signature": ["cookie-signature@1.2.2", "", {}, "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg=="],
@ -4740,6 +4775,8 @@
"opencontrol/@modelcontextprotocol/sdk/express/serve-static": ["serve-static@2.2.0", "", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ=="],
"opencontrol/@modelcontextprotocol/sdk/express/statuses": ["statuses@2.0.2", "", {}, "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="],
"opencontrol/@modelcontextprotocol/sdk/express/type-is": ["type-is@2.0.1", "", { "dependencies": { "content-type": "^1.0.5", "media-typer": "^1.1.0", "mime-types": "^3.0.0" } }, "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw=="],
"pkg-up/find-up/locate-path/p-locate": ["p-locate@3.0.0", "", { "dependencies": { "p-limit": "^2.0.0" } }, "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ=="],

View file

@ -104,7 +104,7 @@ To test locally:
- `MODEL`: The model used by opencode. Same as the `MODEL` defined in the GitHub workflow.
- `ANTHROPIC_API_KEY`: Your model provider API key. Same as the keys defined in the GitHub workflow.
- `GITHUB_RUN_ID`: Dummy value to emulate GitHub action environment.
- `MOCK_TOKEN`: A GitHub persontal access token. This token is used to verify you have `admin` or `write` access to the test repo. Generate a token [here](https://github.com/settings/personal-access-tokens).
- `MOCK_TOKEN`: A GitHub personal access token. This token is used to verify you have `admin` or `write` access to the test repo. Generate a token [here](https://github.com/settings/personal-access-tokens).
- `MOCK_EVENT`: Mock GitHub event payload (see templates below).
- `/path/to/opencode`: Path to your cloned opencode repo. `bun /path/to/opencode/github/index.ts` runs your local version of `opencode`.
@ -118,7 +118,7 @@ Replace:
- `"owner":"sst"` with repo owner
- `"repo":"hello-world"` with repo name
- `"actor":"fwang"` with the GitHub username of commentor
- `"actor":"fwang"` with the GitHub username of commenter
- `"number":4` with the GitHub issue id
- `"body":"hey opencode, summarize thread"` with comment body

View file

@ -28,7 +28,7 @@
"@tsconfig/bun": "1.0.9",
"@cloudflare/workers-types": "4.20251008.0",
"@openauthjs/openauth": "0.0.0-20250322224806",
"@pierre/precision-diffs": "0.0.2-alpha.1-1",
"@pierre/precision-diffs": "0.3.2",
"@solidjs/meta": "0.29.4",
"@tailwindcss/vite": "4.1.11",
"diff": "8.0.2",
@ -56,8 +56,8 @@
"turbo": "2.5.6"
},
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"@opencode-ai/script": "workspace:*"
"@opencode-ai/script": "workspace:*",
"@opencode-ai/sdk": "workspace:*"
},
"repository": {
"type": "git",

View file

@ -7,7 +7,7 @@
"dev:remote": "VITE_AUTH_URL=https://auth.dev.opencode.ai bun sst shell --stage=dev bun dev",
"build": "vinxi build && ../../opencode/script/schema.ts ./.output/public/config.json",
"start": "vinxi start",
"version": "0.15.16"
"version": "0.15.20"
},
"dependencies": {
"@ibm/plex": "6.4.1",

View file

@ -0,0 +1,5 @@
export class AuthError extends Error {}
export class CreditsError extends Error {}
export class MonthlyLimitError extends Error {}
export class UserLimitError extends Error {}
export class ModelError extends Error {}

View file

@ -0,0 +1 @@
export type Format = "anthropic" | "openai" | "oa-compat"

View file

@ -1,67 +1,41 @@
import { z } from "zod"
import type { APIEvent } from "@solidjs/start/server"
import path from "node:path"
import { and, Database, eq, isNull, lt, or, sql } from "@opencode-ai/console-core/drizzle/index.js"
import { KeyTable } from "@opencode-ai/console-core/schema/key.sql.js"
import { BillingTable, UsageTable } from "@opencode-ai/console-core/schema/billing.sql.js"
import { centsToMicroCents } from "@opencode-ai/console-core/util/price.js"
import { Identifier } from "@opencode-ai/console-core/identifier.js"
import { Resource } from "@opencode-ai/console-resource"
import { Billing } from "../../../../core/src/billing"
import { Billing } from "@opencode-ai/console-core/billing.js"
import { Actor } from "@opencode-ai/console-core/actor.js"
import { WorkspaceTable } from "@opencode-ai/console-core/schema/workspace.sql.js"
import { ZenData } from "@opencode-ai/console-core/model.js"
import { UserTable } from "@opencode-ai/console-core/schema/user.sql.js"
import { ModelTable } from "@opencode-ai/console-core/schema/model.sql.js"
import { ProviderTable } from "@opencode-ai/console-core/schema/provider.sql.js"
import { logger } from "./logger"
import { AuthError, CreditsError, MonthlyLimitError, UserLimitError, ModelError } from "./error"
import { createBodyConverter, createStreamPartConverter, createResponseConverter } from "./provider/provider"
import { Format } from "./format"
import { anthropicHelper } from "./provider/anthropic"
import { openaiHelper } from "./provider/openai"
import { oaCompatHelper } from "./provider/openai-compatible"
type ZenData = Awaited<ReturnType<typeof ZenData.list>>
type Model = ZenData["models"][string]
export async function handler(
input: APIEvent,
opts: {
modifyBody?: (body: any) => any
setAuthHeader: (headers: Headers, apiKey: string) => void
format: Format
parseApiKey: (headers: Headers) => string | undefined
onStreamPart: (chunk: string) => void
getStreamUsage: () => any
normalizeUsage: (body: any) => {
inputTokens: number
outputTokens: number
reasoningTokens?: number
cacheReadTokens?: number
cacheWrite5mTokens?: number
cacheWrite1hTokens?: number
}
},
) {
class AuthError extends Error {}
class CreditsError extends Error {}
class MonthlyLimitError extends Error {}
class UserLimitError extends Error {}
class ModelError extends Error {}
type ZenData = Awaited<ReturnType<typeof ZenData.list>>
type Model = ZenData["models"][string]
const FREE_WORKSPACES = [
"wrk_01K46JDFR0E75SG2Q8K172KF3Y", // frank
"wrk_01K6W1A3VE0KMNVSCQT43BG2SX", // opencode bench
]
const logger = {
metric: (values: Record<string, any>) => {
console.log(`_metric:${JSON.stringify(values)}`)
},
log: console.log,
debug: (message: string) => {
if (Resource.App.stage === "production") return
console.debug(message)
},
}
try {
const url = new URL(input.request.url)
const body = await input.request.json()
logger.debug(JSON.stringify(body))
logger.metric({
is_tream: !!body.stream,
session: input.request.headers.get("x-opencode-session"),
@ -78,22 +52,28 @@ export async function handler(
// Request to model provider
const startTimestamp = Date.now()
const res = await fetch(path.posix.join(providerInfo.api, url.pathname.replace(/^\/zen\/v1/, "") + url.search), {
const reqUrl = providerInfo.modifyUrl(providerInfo.api)
const reqBody = JSON.stringify(
providerInfo.modifyBody({
...createBodyConverter(opts.format, providerInfo.format)(body),
model: providerInfo.model,
}),
)
logger.debug("REQUEST URL: " + reqUrl)
logger.debug("REQUEST: " + reqBody)
const res = await fetch(reqUrl, {
method: "POST",
headers: (() => {
const headers = input.request.headers
headers.delete("host")
headers.delete("content-length")
opts.setAuthHeader(headers, providerInfo.apiKey)
providerInfo.modifyHeaders(headers, body, providerInfo.apiKey)
Object.entries(providerInfo.headerMappings ?? {}).forEach(([k, v]) => {
headers.set(k, headers.get(v)!)
})
return headers
})(),
body: JSON.stringify({
...(opts.modifyBody?.(body) ?? body),
model: providerInfo.model,
}),
body: reqBody,
})
// Scrub response headers
@ -104,14 +84,19 @@ export async function handler(
resHeaders.set(k, v)
}
}
logger.debug("STATUS: " + res.status + " " + res.statusText)
if (res.status === 400 || res.status === 503) {
logger.debug("RESPONSE: " + (await res.text()))
}
// Handle non-streaming response
if (!body.stream) {
const responseConverter = createResponseConverter(providerInfo.format, opts.format)
const json = await res.json()
const body = JSON.stringify(json)
const body = JSON.stringify(responseConverter(json))
logger.metric({ response_length: body.length })
logger.debug(body)
await trackUsage(authInfo, modelInfo, providerInfo.id, json.usage)
logger.debug("RESPONSE: " + body)
await trackUsage(authInfo, modelInfo, providerInfo, json.usage)
await reload(authInfo)
return new Response(body, {
status: res.status,
@ -121,10 +106,13 @@ export async function handler(
}
// Handle streaming response
const streamConverter = createStreamPartConverter(providerInfo.format, opts.format)
const usageParser = providerInfo.createUsageParser()
const stream = new ReadableStream({
start(c) {
const reader = res.body?.getReader()
const decoder = new TextDecoder()
const encoder = new TextEncoder()
let buffer = ""
let responseLength = 0
@ -136,9 +124,9 @@ export async function handler(
response_length: responseLength,
"timestamp.last_byte": Date.now(),
})
const usage = opts.getStreamUsage()
const usage = usageParser.retrieve()
if (usage) {
await trackUsage(authInfo, modelInfo, providerInfo.id, usage)
await trackUsage(authInfo, modelInfo, providerInfo, usage)
await reload(authInfo)
}
c.close()
@ -158,12 +146,21 @@ export async function handler(
const parts = buffer.split("\n\n")
buffer = parts.pop() ?? ""
for (const part of parts) {
logger.debug(part)
opts.onStreamPart(part.trim())
for (let part of parts) {
logger.debug("PART: " + part)
part = part.trim()
usageParser.parse(part)
if (providerInfo.format !== opts.format) {
part = streamConverter(part)
c.enqueue(encoder.encode(part + "\n\n"))
}
}
c.enqueue(value)
if (providerInfo.format === opts.format) {
c.enqueue(value)
}
return pump()
}) || Promise.resolve()
@ -235,7 +232,11 @@ export async function handler(
throw new ModelError(`Provider ${provider.id} not supported`)
}
return { ...provider, ...zenData.providers[provider.id] }
return {
...provider,
...zenData.providers[provider.id],
...(provider.id === "anthropic" ? anthropicHelper : provider.id === "openai" ? openaiHelper : oaCompatHelper),
}
}
async function authenticate(
@ -356,11 +357,11 @@ export async function handler(
async function trackUsage(
authInfo: Awaited<ReturnType<typeof authenticate>>,
modelInfo: ReturnType<typeof validateModel>,
providerId: string,
providerInfo: Awaited<ReturnType<typeof selectProvider>>,
usage: any,
) {
const { inputTokens, outputTokens, reasoningTokens, cacheReadTokens, cacheWrite5mTokens, cacheWrite1hTokens } =
opts.normalizeUsage(usage)
providerInfo.normalizeUsage(usage)
const modelCost =
modelInfo.cost200K &&
@ -421,7 +422,7 @@ export async function handler(
workspaceID: authInfo.workspaceID,
id: Identifier.create("usage"),
model: modelInfo.id,
provider: providerId,
provider: providerInfo.id,
inputTokens,
outputTokens,
reasoningTokens,

View file

@ -0,0 +1,12 @@
import { Resource } from "@opencode-ai/console-resource"
export const logger = {
metric: (values: Record<string, any>) => {
console.log(`_metric:${JSON.stringify(values)}`)
},
log: console.log,
debug: (message: string) => {
if (Resource.App.stage === "production") return
console.debug(message)
},
}

View file

@ -0,0 +1,621 @@
import { ProviderHelper, CommonRequest, CommonResponse, CommonChunk } from "./provider"
type Usage = {
cache_creation?: {
ephemeral_5m_input_tokens?: number
ephemeral_1h_input_tokens?: number
}
cache_creation_input_tokens?: number
cache_read_input_tokens?: number
input_tokens?: number
output_tokens?: number
server_tool_use?: {
web_search_requests?: number
}
}
export const anthropicHelper = {
format: "anthropic",
modifyUrl: (providerApi: string) => providerApi + "/messages",
modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => {
headers.set("x-api-key", apiKey)
headers.set("anthropic-version", headers.get("anthropic-version") ?? "2023-06-01")
if (body.model.startsWith("claude-sonnet-")) {
headers.set("anthropic-beta", "context-1m-2025-08-07")
}
},
modifyBody: (body: Record<string, any>) => {
return {
...body,
service_tier: "standard_only",
}
},
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
const data = chunk.split("\n")[1]
if (!data.startsWith("data: ")) return
let json
try {
json = JSON.parse(data.slice(6))
} catch (e) {
return
}
const usageUpdate = json.usage ?? json.message?.usage
if (!usageUpdate) return
usage = {
...usage,
...usageUpdate,
cache_creation: {
...usage?.cache_creation,
...usageUpdate.cache_creation,
},
server_tool_use: {
...usage?.server_tool_use,
...usageUpdate.server_tool_use,
},
}
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => ({
inputTokens: usage.input_tokens ?? 0,
outputTokens: usage.output_tokens ?? 0,
reasoningTokens: undefined,
cacheReadTokens: usage.cache_read_input_tokens ?? undefined,
cacheWrite5mTokens: usage.cache_creation?.ephemeral_5m_input_tokens ?? undefined,
cacheWrite1hTokens: usage.cache_creation?.ephemeral_1h_input_tokens ?? undefined,
}),
} satisfies ProviderHelper
export function fromAnthropicRequest(body: any): CommonRequest {
if (!body || typeof body !== "object") return body
const msgs: any[] = []
const sys = Array.isArray(body.system) ? body.system : undefined
if (sys && sys.length > 0) {
for (const s of sys) {
if (!s) continue
if ((s as any).type !== "text") continue
if (typeof (s as any).text !== "string") continue
if ((s as any).text.length === 0) continue
msgs.push({ role: "system", content: (s as any).text })
}
}
const toImg = (src: any) => {
if (!src || typeof src !== "object") return undefined
if ((src as any).type === "url" && typeof (src as any).url === "string")
return { type: "image_url", image_url: { url: (src as any).url } }
if (
(src as any).type === "base64" &&
typeof (src as any).media_type === "string" &&
typeof (src as any).data === "string"
)
return { type: "image_url", image_url: { url: `data:${(src as any).media_type};base64,${(src as any).data}` } }
return undefined
}
const inMsgs = Array.isArray(body.messages) ? body.messages : []
for (const m of inMsgs) {
if (!m || !(m as any).role) continue
if ((m as any).role === "user") {
const partsIn = Array.isArray((m as any).content) ? (m as any).content : []
const partsOut: any[] = []
for (const p of partsIn) {
if (!p || !(p as any).type) continue
if ((p as any).type === "text" && typeof (p as any).text === "string")
partsOut.push({ type: "text", text: (p as any).text })
if ((p as any).type === "image") {
const ip = toImg((p as any).source)
if (ip) partsOut.push(ip)
}
if ((p as any).type === "tool_result") {
const id = (p as any).tool_use_id
const content =
typeof (p as any).content === "string" ? (p as any).content : JSON.stringify((p as any).content)
msgs.push({ role: "tool", tool_call_id: id, content })
}
}
if (partsOut.length > 0) {
if (partsOut.length === 1 && partsOut[0].type === "text") msgs.push({ role: "user", content: partsOut[0].text })
else msgs.push({ role: "user", content: partsOut })
}
continue
}
if ((m as any).role === "assistant") {
const partsIn = Array.isArray((m as any).content) ? (m as any).content : []
const texts: string[] = []
const tcs: any[] = []
for (const p of partsIn) {
if (!p || !(p as any).type) continue
if ((p as any).type === "text" && typeof (p as any).text === "string") texts.push((p as any).text)
if ((p as any).type === "tool_use") {
const name = (p as any).name
const id = (p as any).id
const inp = (p as any).input
const input = (() => {
if (typeof inp === "string") return inp
try {
return JSON.stringify(inp ?? {})
} catch {
return String(inp ?? "")
}
})()
tcs.push({ id, type: "function", function: { name, arguments: input } })
}
}
const out: any = { role: "assistant", content: texts.join("") }
if (tcs.length > 0) out.tool_calls = tcs
msgs.push(out)
continue
}
}
const tools = Array.isArray(body.tools)
? body.tools
.filter((t: any) => t && typeof t === "object" && "input_schema" in t)
.map((t: any) => ({
type: "function",
function: { name: (t as any).name, description: (t as any).description, parameters: (t as any).input_schema },
}))
: undefined
const tcin = body.tool_choice
const tc = (() => {
if (!tcin) return undefined
if ((tcin as any).type === "auto") return "auto"
if ((tcin as any).type === "any") return "required"
if ((tcin as any).type === "tool" && typeof (tcin as any).name === "string")
return { type: "function" as const, function: { name: (tcin as any).name } }
return undefined
})()
const stop = (() => {
const v = body.stop_sequences
if (!v) return undefined
if (Array.isArray(v)) return v.length === 1 ? v[0] : v
if (typeof v === "string") return v
return undefined
})()
return {
max_tokens: body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop,
messages: msgs,
stream: !!body.stream,
tools,
tool_choice: tc,
}
}
export function toAnthropicRequest(body: CommonRequest) {
if (!body || typeof body !== "object") return body
const sysIn = Array.isArray(body.messages) ? body.messages.filter((m: any) => m && m.role === "system") : []
let ccCount = 0
const cc = () => {
ccCount++
return ccCount <= 4 ? { cache_control: { type: "ephemeral" } } : {}
}
const system = sysIn
.filter((m: any) => typeof m.content === "string" && m.content.length > 0)
.map((m: any) => ({ type: "text", text: m.content, ...cc() }))
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const msgsOut: any[] = []
const toSrc = (p: any) => {
if (!p || typeof p !== "object") return undefined
if ((p as any).type === "image_url" && (p as any).image_url) {
const u = (p as any).image_url.url ?? (p as any).image_url
if (typeof u === "string" && u.startsWith("data:")) {
const m = u.match(/^data:([^;]+);base64,(.*)$/)
if (m) return { type: "base64", media_type: m[1], data: m[2] }
}
if (typeof u === "string") return { type: "url", url: u }
}
return undefined
}
for (const m of msgsIn) {
if (!m || !(m as any).role) continue
if ((m as any).role === "user") {
if (typeof (m as any).content === "string") {
msgsOut.push({
role: "user",
content: [{ type: "text", text: (m as any).content, ...cc() }],
})
} else if (Array.isArray((m as any).content)) {
const parts: any[] = []
for (const p of (m as any).content) {
if (!p || !(p as any).type) continue
if ((p as any).type === "text" && typeof (p as any).text === "string")
parts.push({ type: "text", text: (p as any).text, ...cc() })
if ((p as any).type === "image_url") {
const s = toSrc(p)
if (s) parts.push({ type: "image", source: s, ...cc() })
}
}
if (parts.length > 0) msgsOut.push({ role: "user", content: parts })
}
continue
}
if ((m as any).role === "assistant") {
const out: any = { role: "assistant", content: [] as any[] }
if (typeof (m as any).content === "string" && (m as any).content.length > 0) {
;(out.content as any[]).push({ type: "text", text: (m as any).content, ...cc() })
}
if (Array.isArray((m as any).tool_calls)) {
for (const tc of (m as any).tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
let input: any
const a = (tc as any).function.arguments
if (typeof a === "string") {
try {
input = JSON.parse(a)
} catch {
input = a
}
} else input = a
const id = (tc as any).id || `toolu_${Math.random().toString(36).slice(2)}`
;(out.content as any[]).push({
type: "tool_use",
id,
name: (tc as any).function.name,
input,
...cc(),
})
}
}
}
if ((out.content as any[]).length > 0) msgsOut.push(out)
continue
}
if ((m as any).role === "tool") {
msgsOut.push({
role: "user",
content: [
{
type: "tool_result",
tool_use_id: (m as any).tool_call_id,
content: (m as any).content,
...cc(),
},
],
})
continue
}
}
const tools = Array.isArray(body.tools)
? body.tools
.filter((t: any) => t && typeof t === "object" && (t as any).type === "function")
.map((t: any) => ({
name: (t as any).function.name,
description: (t as any).function.description,
input_schema: (t as any).function.parameters,
...cc(),
}))
: undefined
const tcIn = body.tool_choice
const tool_choice = (() => {
if (!tcIn) return undefined
if (tcIn === "auto") return { type: "auto" }
if (tcIn === "required") return { type: "any" }
if ((tcIn as any).type === "function" && (tcIn as any).function?.name)
return { type: "tool", name: (tcIn as any).function.name }
return undefined
})()
const stop_sequences = (() => {
const v = body.stop
if (!v) return undefined
if (Array.isArray(v)) return v
if (typeof v === "string") return [v]
return undefined
})()
return {
max_tokens: body.max_tokens ?? 32_000,
temperature: body.temperature,
top_p: body.top_p,
system: system.length > 0 ? system : undefined,
messages: msgsOut,
stream: !!body.stream,
tools,
tool_choice,
stop_sequences,
}
}
export function fromAnthropicResponse(resp: any): CommonResponse {
if (!resp || typeof resp !== "object") return resp
if (Array.isArray((resp as any).choices)) return resp
const isAnthropic = typeof (resp as any).type === "string" && (resp as any).type === "message"
if (!isAnthropic) return resp
const idIn = (resp as any).id
const id =
typeof idIn === "string" ? idIn.replace(/^msg_/, "chatcmpl_") : `chatcmpl_${Math.random().toString(36).slice(2)}`
const model = (resp as any).model
const blocks: any[] = Array.isArray((resp as any).content) ? (resp as any).content : []
const text = blocks
.filter((b) => b && b.type === "text" && typeof (b as any).text === "string")
.map((b: any) => b.text)
.join("")
const tcs = blocks
.filter((b) => b && b.type === "tool_use")
.map((b: any) => {
const name = (b as any).name
const args = (() => {
const inp = (b as any).input
if (typeof inp === "string") return inp
try {
return JSON.stringify(inp ?? {})
} catch {
return String(inp ?? "")
}
})()
const tid =
typeof (b as any).id === "string" && (b as any).id.length > 0
? (b as any).id
: `toolu_${Math.random().toString(36).slice(2)}`
return { id: tid, type: "function" as const, function: { name, arguments: args } }
})
const finish = (r: string | null) => {
if (r === "end_turn") return "stop"
if (r === "tool_use") return "tool_calls"
if (r === "max_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
}
const u = (resp as any).usage
const usage = (() => {
if (!u) return undefined as any
const pt = typeof (u as any).input_tokens === "number" ? (u as any).input_tokens : undefined
const ct = typeof (u as any).output_tokens === "number" ? (u as any).output_tokens : undefined
const total = pt != null && ct != null ? pt + ct : undefined
const cached =
typeof (u as any).cache_read_input_tokens === "number" ? (u as any).cache_read_input_tokens : undefined
const details = cached != null ? { cached_tokens: cached } : undefined
return {
prompt_tokens: pt,
completion_tokens: ct,
total_tokens: total,
...(details ? { prompt_tokens_details: details } : {}),
}
})()
return {
id,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: "assistant",
...(text && text.length > 0 ? { content: text } : {}),
...(tcs.length > 0 ? { tool_calls: tcs } : {}),
},
finish_reason: finish((resp as any).stop_reason ?? null),
},
],
...(usage ? { usage } : {}),
}
}
export function toAnthropicResponse(resp: CommonResponse) {
if (!resp || typeof resp !== "object") return resp
if (!Array.isArray((resp as any).choices)) return resp
const choice = (resp as any).choices[0]
if (!choice) return resp
const message = choice.message
if (!message) return resp
const content: any[] = []
if (typeof message.content === "string" && message.content.length > 0)
content.push({ type: "text", text: message.content })
if (Array.isArray(message.tool_calls)) {
for (const tc of message.tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
let input: any
try {
input = JSON.parse((tc as any).function.arguments)
} catch {
input = (tc as any).function.arguments
}
content.push({ type: "tool_use", id: (tc as any).id, name: (tc as any).function.name, input })
}
}
}
const stop_reason = (() => {
const r = choice.finish_reason
if (r === "stop") return "end_turn"
if (r === "tool_calls") return "tool_use"
if (r === "length") return "max_tokens"
if (r === "content_filter") return "content_filter"
return null
})()
const usage = (() => {
const u = (resp as any).usage
if (!u) return undefined
return {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
cache_read_input_tokens: u.prompt_tokens_details?.cached_tokens,
}
})()
return {
id: (resp as any).id,
type: "message",
role: "assistant",
content: content.length > 0 ? content : [{ type: "text", text: "" }],
model: (resp as any).model,
stop_reason,
usage,
}
}
export function fromAnthropicChunk(chunk: string): CommonChunk | string {
// Anthropic sends two lines per part: "event: <type>\n" + "data: <json>"
const lines = chunk.split("\n")
const dataLine = lines.find((l) => l.startsWith("data: "))
if (!dataLine) return chunk
let json
try {
json = JSON.parse(dataLine.slice(6))
} catch {
return chunk
}
const out: CommonChunk = {
id: json.id ?? json.message?.id ?? "",
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: json.model ?? json.message?.model ?? "",
choices: [],
}
if (json.type === "content_block_start") {
const cb = json.content_block
if (cb?.type === "text") {
out.choices.push({ index: json.index ?? 0, delta: { role: "assistant", content: "" }, finish_reason: null })
} else if (cb?.type === "tool_use") {
out.choices.push({
index: json.index ?? 0,
delta: {
tool_calls: [
{ index: json.index ?? 0, id: cb.id, type: "function", function: { name: cb.name, arguments: "" } },
],
},
finish_reason: null,
})
}
}
if (json.type === "content_block_delta") {
const d = json.delta
if (d?.type === "text_delta") {
out.choices.push({ index: json.index ?? 0, delta: { content: d.text }, finish_reason: null })
} else if (d?.type === "input_json_delta") {
out.choices.push({
index: json.index ?? 0,
delta: { tool_calls: [{ index: json.index ?? 0, function: { arguments: d.partial_json } }] },
finish_reason: null,
})
}
}
if (json.type === "message_delta") {
const d = json.delta
const finish_reason = (() => {
const r = d?.stop_reason
if (r === "end_turn") return "stop"
if (r === "tool_use") return "tool_calls"
if (r === "max_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
})()
out.choices.push({ index: 0, delta: {}, finish_reason })
}
if (json.usage) {
const u = json.usage
out.usage = {
prompt_tokens: u.input_tokens,
completion_tokens: u.output_tokens,
total_tokens: (u.input_tokens || 0) + (u.output_tokens || 0),
...(u.cache_read_input_tokens ? { prompt_tokens_details: { cached_tokens: u.cache_read_input_tokens } } : {}),
}
}
return out
}
export function toAnthropicChunk(chunk: CommonChunk): string {
if (!chunk.choices || !Array.isArray(chunk.choices) || chunk.choices.length === 0) {
return JSON.stringify({})
}
const choice = chunk.choices[0]
const delta = choice.delta
if (!delta) return JSON.stringify({})
const result: any = {}
if (delta.content) {
result.type = "content_block_delta"
result.index = 0
result.delta = { type: "text_delta", text: delta.content }
}
if (delta.tool_calls) {
for (const tc of delta.tool_calls) {
if (tc.function?.name) {
result.type = "content_block_start"
result.index = tc.index ?? 0
result.content_block = { type: "tool_use", id: tc.id, name: tc.function.name, input: {} }
} else if (tc.function?.arguments) {
result.type = "content_block_delta"
result.index = tc.index ?? 0
result.delta = { type: "input_json_delta", partial_json: tc.function.arguments }
}
}
}
if (choice.finish_reason) {
const stop_reason = (() => {
const r = choice.finish_reason
if (r === "stop") return "end_turn"
if (r === "tool_calls") return "tool_use"
if (r === "length") return "max_tokens"
if (r === "content_filter") return "content_filter"
return null
})()
result.type = "message_delta"
result.delta = { stop_reason, stop_sequence: null }
}
if (chunk.usage) {
const u = chunk.usage
result.usage = {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
cache_read_input_tokens: u.prompt_tokens_details?.cached_tokens,
}
}
return JSON.stringify(result)
}

View file

@ -0,0 +1,541 @@
import { ProviderHelper, CommonRequest, CommonResponse, CommonChunk } from "./provider"
type Usage = {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
// used by moonshot
cached_tokens?: number
// used by xai
prompt_tokens_details?: {
text_tokens?: number
audio_tokens?: number
image_tokens?: number
cached_tokens?: number
}
completion_tokens_details?: {
reasoning_tokens?: number
audio_tokens?: number
accepted_prediction_tokens?: number
rejected_prediction_tokens?: number
}
}
export const oaCompatHelper = {
format: "oa-compat",
modifyUrl: (providerApi: string) => providerApi + "/chat/completions",
modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => {
headers.set("authorization", `Bearer ${apiKey}`)
},
modifyBody: (body: Record<string, any>) => {
return {
...body,
...(body.stream ? { stream_options: { include_usage: true } } : {}),
}
},
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
if (!chunk.startsWith("data: ")) return
let json
try {
json = JSON.parse(chunk.slice(6)) as { usage?: Usage }
} catch (e) {
return
}
if (!json.usage) return
usage = json.usage
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => {
const inputTokens = usage.prompt_tokens ?? 0
const outputTokens = usage.completion_tokens ?? 0
const reasoningTokens = usage.completion_tokens_details?.reasoning_tokens ?? undefined
const cacheReadTokens = usage.cached_tokens ?? usage.prompt_tokens_details?.cached_tokens ?? undefined
return {
inputTokens: inputTokens - (cacheReadTokens ?? 0),
outputTokens,
reasoningTokens,
cacheReadTokens,
cacheWrite5mTokens: undefined,
cacheWrite1hTokens: undefined,
}
},
} satisfies ProviderHelper
export function fromOaCompatibleRequest(body: any): CommonRequest {
if (!body || typeof body !== "object") return body
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const msgsOut: any[] = []
for (const m of msgsIn) {
if (!m || !m.role) continue
if (m.role === "system") {
if (typeof m.content === "string" && m.content.length > 0) msgsOut.push({ role: "system", content: m.content })
continue
}
if (m.role === "user") {
if (typeof m.content === "string") {
msgsOut.push({ role: "user", content: m.content })
} else if (Array.isArray(m.content)) {
const parts: any[] = []
for (const p of m.content) {
if (!p || !p.type) continue
if (p.type === "text" && typeof p.text === "string") parts.push({ type: "text", text: p.text })
if (p.type === "image_url") parts.push({ type: "image_url", image_url: p.image_url })
}
if (parts.length === 1 && parts[0].type === "text") msgsOut.push({ role: "user", content: parts[0].text })
else if (parts.length > 0) msgsOut.push({ role: "user", content: parts })
}
continue
}
if (m.role === "assistant") {
const out: any = { role: "assistant" }
if (typeof m.content === "string") out.content = m.content
if (Array.isArray(m.tool_calls)) out.tool_calls = m.tool_calls
msgsOut.push(out)
continue
}
if (m.role === "tool") {
msgsOut.push({ role: "tool", tool_call_id: m.tool_call_id, content: m.content })
continue
}
}
return {
max_tokens: body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop: body.stop,
messages: msgsOut,
stream: !!body.stream,
tools: Array.isArray(body.tools) ? body.tools : undefined,
tool_choice: body.tool_choice,
}
}
export function toOaCompatibleRequest(body: CommonRequest) {
if (!body || typeof body !== "object") return body
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const msgsOut: any[] = []
const toImg = (p: any) => {
if (!p || typeof p !== "object") return undefined
if (p.type === "image_url" && p.image_url) return { type: "image_url", image_url: p.image_url }
const s = (p as any).source
if (!s || typeof s !== "object") return undefined
if (s.type === "url" && typeof s.url === "string") return { type: "image_url", image_url: { url: s.url } }
if (s.type === "base64" && typeof s.media_type === "string" && typeof s.data === "string")
return { type: "image_url", image_url: { url: `data:${s.media_type};base64,${s.data}` } }
return undefined
}
for (const m of msgsIn) {
if (!m || !m.role) continue
if (m.role === "system") {
if (typeof m.content === "string" && m.content.length > 0) msgsOut.push({ role: "system", content: m.content })
continue
}
if (m.role === "user") {
if (typeof m.content === "string") {
msgsOut.push({ role: "user", content: m.content })
continue
}
if (Array.isArray(m.content)) {
const parts: any[] = []
for (const p of m.content) {
if (!p || !p.type) continue
if (p.type === "text" && typeof p.text === "string") parts.push({ type: "text", text: p.text })
const ip = toImg(p)
if (ip) parts.push(ip)
}
if (parts.length === 1 && parts[0].type === "text") msgsOut.push({ role: "user", content: parts[0].text })
else if (parts.length > 0) msgsOut.push({ role: "user", content: parts })
}
continue
}
if (m.role === "assistant") {
const out: any = { role: "assistant" }
if (typeof m.content === "string") out.content = m.content
if (Array.isArray(m.tool_calls)) out.tool_calls = m.tool_calls
msgsOut.push(out)
continue
}
if (m.role === "tool") {
msgsOut.push({ role: "tool", tool_call_id: m.tool_call_id, content: m.content })
continue
}
}
const tools = Array.isArray(body.tools)
? body.tools.map((tool: any) => ({
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.parameters,
},
}))
: undefined
return {
model: body.model,
max_tokens: body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop: body.stop,
messages: msgsOut,
stream: !!body.stream,
tools,
tool_choice: body.tool_choice,
response_format: (body as any).response_format,
}
}
export function fromOaCompatibleResponse(resp: any): CommonResponse {
if (!resp || typeof resp !== "object") return resp
if (!Array.isArray((resp as any).choices)) return resp
const choice = (resp as any).choices[0]
if (!choice) return resp
const message = choice.message
if (!message) return resp
const content: any[] = []
if (typeof message.content === "string" && message.content.length > 0) {
content.push({ type: "text", text: message.content })
}
if (Array.isArray(message.tool_calls)) {
for (const toolCall of message.tool_calls) {
if (toolCall.type === "function" && toolCall.function) {
let input
try {
input = JSON.parse(toolCall.function.arguments)
} catch {
input = toolCall.function.arguments
}
content.push({
type: "tool_use",
id: toolCall.id,
name: toolCall.function.name,
input,
})
}
}
}
const stopReason = (() => {
const reason = choice.finish_reason
if (reason === "stop") return "stop"
if (reason === "tool_calls") return "tool_calls"
if (reason === "length") return "length"
if (reason === "content_filter") return "content_filter"
return null
})()
const usage = (() => {
const u = (resp as any).usage
if (!u) return undefined
return {
prompt_tokens: u.prompt_tokens,
completion_tokens: u.completion_tokens,
total_tokens: u.total_tokens,
...(u.prompt_tokens_details?.cached_tokens
? { prompt_tokens_details: { cached_tokens: u.prompt_tokens_details.cached_tokens } }
: {}),
}
})()
return {
id: (resp as any).id,
object: "chat.completion" as const,
created: Math.floor(Date.now() / 1000),
model: (resp as any).model,
choices: [
{
index: 0,
message: {
role: "assistant" as const,
...(content.length > 0 && content.some((c) => c.type === "text")
? {
content: content
.filter((c) => c.type === "text")
.map((c: any) => c.text)
.join(""),
}
: {}),
...(content.length > 0 && content.some((c) => c.type === "tool_use")
? {
tool_calls: content
.filter((c) => c.type === "tool_use")
.map((c: any) => ({
id: c.id,
type: "function" as const,
function: {
name: c.name,
arguments: typeof c.input === "string" ? c.input : JSON.stringify(c.input),
},
})),
}
: {}),
},
finish_reason: stopReason,
},
],
...(usage ? { usage } : {}),
}
}
export function toOaCompatibleResponse(resp: CommonResponse) {
if (!resp || typeof resp !== "object") return resp
if (Array.isArray((resp as any).choices)) return resp
const isAnthropic = typeof (resp as any).type === "string" && (resp as any).type === "message"
if (!isAnthropic) return resp
const idIn = (resp as any).id
const id =
typeof idIn === "string" ? idIn.replace(/^msg_/, "chatcmpl_") : `chatcmpl_${Math.random().toString(36).slice(2)}`
const model = (resp as any).model
const blocks: any[] = Array.isArray((resp as any).content) ? (resp as any).content : []
const text = blocks
.filter((b) => b && b.type === "text" && typeof b.text === "string")
.map((b) => b.text)
.join("")
const tcs = blocks
.filter((b) => b && b.type === "tool_use")
.map((b) => {
const name = (b as any).name
const args = (() => {
const inp = (b as any).input
if (typeof inp === "string") return inp
try {
return JSON.stringify(inp ?? {})
} catch {
return String(inp ?? "")
}
})()
const tid =
typeof (b as any).id === "string" && (b as any).id.length > 0
? (b as any).id
: `toolu_${Math.random().toString(36).slice(2)}`
return { id: tid, type: "function" as const, function: { name, arguments: args } }
})
const finish = (r: string | null) => {
if (r === "end_turn") return "stop"
if (r === "tool_use") return "tool_calls"
if (r === "max_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
}
const u = (resp as any).usage
const usage = (() => {
if (!u) return undefined as any
const pt = typeof u.input_tokens === "number" ? u.input_tokens : undefined
const ct = typeof u.output_tokens === "number" ? u.output_tokens : undefined
const total = pt != null && ct != null ? pt + ct : undefined
const cached = typeof u.cache_read_input_tokens === "number" ? u.cache_read_input_tokens : undefined
const details = cached != null ? { cached_tokens: cached } : undefined
return {
prompt_tokens: pt,
completion_tokens: ct,
total_tokens: total,
...(details ? { prompt_tokens_details: details } : {}),
}
})()
return {
id,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: "assistant",
...(text && text.length > 0 ? { content: text } : {}),
...(tcs.length > 0 ? { tool_calls: tcs } : {}),
},
finish_reason: finish((resp as any).stop_reason ?? null),
},
],
...(usage ? { usage } : {}),
}
}
export function fromOaCompatibleChunk(chunk: string): CommonChunk | string {
if (!chunk.startsWith("data: ")) return chunk
let json
try {
json = JSON.parse(chunk.slice(6))
} catch {
return chunk
}
if (!json.choices || !Array.isArray(json.choices) || json.choices.length === 0) {
return chunk
}
const choice = json.choices[0]
const delta = choice.delta
if (!delta) return chunk
const result: CommonChunk = {
id: json.id ?? "",
object: "chat.completion.chunk",
created: json.created ?? Math.floor(Date.now() / 1000),
model: json.model ?? "",
choices: [],
}
if (delta.content) {
result.choices.push({
index: choice.index ?? 0,
delta: { content: delta.content },
finish_reason: null,
})
}
if (delta.tool_calls) {
for (const toolCall of delta.tool_calls) {
result.choices.push({
index: choice.index ?? 0,
delta: {
tool_calls: [
{
index: toolCall.index ?? 0,
id: toolCall.id,
type: toolCall.type ?? "function",
function: toolCall.function,
},
],
},
finish_reason: null,
})
}
}
if (choice.finish_reason) {
result.choices.push({
index: choice.index ?? 0,
delta: {},
finish_reason: choice.finish_reason,
})
}
if (json.usage) {
const usage = json.usage
result.usage = {
prompt_tokens: usage.prompt_tokens,
completion_tokens: usage.completion_tokens,
total_tokens: usage.total_tokens,
...(usage.prompt_tokens_details?.cached_tokens
? { prompt_tokens_details: { cached_tokens: usage.prompt_tokens_details.cached_tokens } }
: {}),
}
}
return result
}
export function toOaCompatibleChunk(chunk: CommonChunk): string {
const result: any = {
id: chunk.id,
object: "chat.completion.chunk",
created: chunk.created,
model: chunk.model,
choices: [],
}
if (!chunk.choices || chunk.choices.length === 0) {
return `data: ${JSON.stringify(result)}`
}
const choice = chunk.choices[0]
const delta = choice.delta
if (delta?.role) {
result.choices.push({
index: choice.index,
delta: { role: delta.role },
finish_reason: null,
})
}
if (delta?.content) {
result.choices.push({
index: choice.index,
delta: { content: delta.content },
finish_reason: null,
})
}
if (delta?.tool_calls) {
for (const tc of delta.tool_calls) {
result.choices.push({
index: choice.index,
delta: {
tool_calls: [
{
index: tc.index,
id: tc.id,
type: tc.type,
function: tc.function,
},
],
},
finish_reason: null,
})
}
}
if (choice.finish_reason) {
result.choices.push({
index: choice.index,
delta: {},
finish_reason: choice.finish_reason,
})
}
if (chunk.usage) {
result.usage = {
prompt_tokens: chunk.usage.prompt_tokens,
completion_tokens: chunk.usage.completion_tokens,
total_tokens: chunk.usage.total_tokens,
...(chunk.usage.prompt_tokens_details?.cached_tokens
? {
prompt_tokens_details: { cached_tokens: chunk.usage.prompt_tokens_details.cached_tokens },
}
: {}),
}
}
return `data: ${JSON.stringify(result)}`
}

View file

@ -0,0 +1,600 @@
import { ProviderHelper, CommonRequest, CommonResponse, CommonChunk } from "./provider"
type Usage = {
input_tokens?: number
input_tokens_details?: {
cached_tokens?: number
}
output_tokens?: number
output_tokens_details?: {
reasoning_tokens?: number
}
total_tokens?: number
}
export const openaiHelper = {
format: "openai",
modifyUrl: (providerApi: string) => providerApi + "/responses",
modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => {
headers.set("authorization", `Bearer ${apiKey}`)
},
modifyBody: (body: Record<string, any>) => {
return body
},
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
const [event, data] = chunk.split("\n")
if (event !== "event: response.completed") return
if (!data.startsWith("data: ")) return
let json
try {
json = JSON.parse(data.slice(6)) as { response?: { usage?: Usage } }
} catch (e) {
return
}
if (!json.response?.usage) return
usage = json.response.usage
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => {
const inputTokens = usage.input_tokens ?? 0
const outputTokens = usage.output_tokens ?? 0
const reasoningTokens = usage.output_tokens_details?.reasoning_tokens ?? undefined
const cacheReadTokens = usage.input_tokens_details?.cached_tokens ?? undefined
return {
inputTokens: inputTokens - (cacheReadTokens ?? 0),
outputTokens: outputTokens - (reasoningTokens ?? 0),
reasoningTokens,
cacheReadTokens,
cacheWrite5mTokens: undefined,
cacheWrite1hTokens: undefined,
}
},
} satisfies ProviderHelper
export function fromOpenaiRequest(body: any): CommonRequest {
if (!body || typeof body !== "object") return body
const toImg = (p: any) => {
if (!p || typeof p !== "object") return undefined
if ((p as any).type === "image_url" && (p as any).image_url)
return { type: "image_url", image_url: (p as any).image_url }
if ((p as any).type === "input_image" && (p as any).image_url)
return { type: "image_url", image_url: (p as any).image_url }
const s = (p as any).source
if (!s || typeof s !== "object") return undefined
if ((s as any).type === "url" && typeof (s as any).url === "string")
return { type: "image_url", image_url: { url: (s as any).url } }
if (
(s as any).type === "base64" &&
typeof (s as any).media_type === "string" &&
typeof (s as any).data === "string"
)
return { type: "image_url", image_url: { url: `data:${(s as any).media_type};base64,${(s as any).data}` } }
return undefined
}
const msgs: any[] = []
const inMsgs = Array.isArray(body.input) ? body.input : Array.isArray(body.messages) ? body.messages : []
for (const m of inMsgs) {
if (!m) continue
// Responses API items without role:
if (!(m as any).role && (m as any).type) {
if ((m as any).type === "function_call") {
const name = (m as any).name
const a = (m as any).arguments
const args = typeof a === "string" ? a : JSON.stringify(a ?? {})
msgs.push({
role: "assistant",
tool_calls: [{ id: (m as any).id, type: "function", function: { name, arguments: args } }],
})
}
if ((m as any).type === "function_call_output") {
const id = (m as any).call_id
const out = (m as any).output
const content = typeof out === "string" ? out : JSON.stringify(out)
msgs.push({ role: "tool", tool_call_id: id, content })
}
continue
}
if ((m as any).role === "system" || (m as any).role === "developer") {
const c = (m as any).content
if (typeof c === "string" && c.length > 0) msgs.push({ role: "system", content: c })
if (Array.isArray(c)) {
const t = c.find((p: any) => p && typeof p.text === "string")
if (t && typeof t.text === "string" && t.text.length > 0) msgs.push({ role: "system", content: t.text })
}
continue
}
if ((m as any).role === "user") {
const c = (m as any).content
if (typeof c === "string") {
msgs.push({ role: "user", content: c })
} else if (Array.isArray(c)) {
const parts: any[] = []
for (const p of c) {
if (!p || !(p as any).type) continue
if (((p as any).type === "text" || (p as any).type === "input_text") && typeof (p as any).text === "string")
parts.push({ type: "text", text: (p as any).text })
const ip = toImg(p)
if (ip) parts.push(ip)
if ((p as any).type === "tool_result") {
const id = (p as any).tool_call_id
const content =
typeof (p as any).content === "string" ? (p as any).content : JSON.stringify((p as any).content)
msgs.push({ role: "tool", tool_call_id: id, content })
}
}
if (parts.length === 1 && parts[0].type === "text") msgs.push({ role: "user", content: parts[0].text })
else if (parts.length > 0) msgs.push({ role: "user", content: parts })
}
continue
}
if ((m as any).role === "assistant") {
const c = (m as any).content
const out: any = { role: "assistant" }
if (typeof c === "string" && c.length > 0) out.content = c
if (Array.isArray((m as any).tool_calls)) out.tool_calls = (m as any).tool_calls
msgs.push(out)
continue
}
if ((m as any).role === "tool") {
msgs.push({ role: "tool", tool_call_id: (m as any).tool_call_id, content: (m as any).content })
continue
}
}
const tcIn = body.tool_choice
const tc = (() => {
if (!tcIn) return undefined
if (tcIn === "auto") return "auto"
if (tcIn === "required") return "required"
if ((tcIn as any).type === "function" && (tcIn as any).function?.name)
return { type: "function" as const, function: { name: (tcIn as any).function.name } }
return undefined
})()
const stop = (() => {
const v = body.stop_sequences ?? body.stop
if (!v) return undefined
if (Array.isArray(v)) return v.length === 1 ? v[0] : v
if (typeof v === "string") return v
return undefined
})()
return {
max_tokens: body.max_output_tokens ?? body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop,
messages: msgs,
stream: !!body.stream,
tools: Array.isArray(body.tools) ? body.tools : undefined,
tool_choice: tc,
}
}
export function toOpenaiRequest(body: CommonRequest) {
if (!body || typeof body !== "object") return body
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const input: any[] = []
const toPart = (p: any) => {
if (!p || typeof p !== "object") return undefined
if ((p as any).type === "text" && typeof (p as any).text === "string")
return { type: "input_text", text: (p as any).text }
if ((p as any).type === "image_url" && (p as any).image_url)
return { type: "input_image", image_url: (p as any).image_url }
const s = (p as any).source
if (!s || typeof s !== "object") return undefined
if ((s as any).type === "url" && typeof (s as any).url === "string")
return { type: "input_image", image_url: { url: (s as any).url } }
if (
(s as any).type === "base64" &&
typeof (s as any).media_type === "string" &&
typeof (s as any).data === "string"
)
return { type: "input_image", image_url: { url: `data:${(s as any).media_type};base64,${(s as any).data}` } }
return undefined
}
for (const m of msgsIn) {
if (!m || !(m as any).role) continue
if ((m as any).role === "system") {
const c = (m as any).content
if (typeof c === "string") input.push({ role: "system", content: c })
continue
}
if ((m as any).role === "user") {
const c = (m as any).content
if (typeof c === "string") {
input.push({ role: "user", content: [{ type: "input_text", text: c }] })
} else if (Array.isArray(c)) {
const parts: any[] = []
for (const p of c) {
const op = toPart(p)
if (op) parts.push(op)
}
if (parts.length > 0) input.push({ role: "user", content: parts })
}
continue
}
if ((m as any).role === "assistant") {
const c = (m as any).content
if (typeof c === "string" && c.length > 0) {
input.push({ role: "assistant", content: [{ type: "output_text", text: c }] })
}
if (Array.isArray((m as any).tool_calls)) {
for (const tc of (m as any).tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
const name = (tc as any).function.name
const a = (tc as any).function.arguments
const args = typeof a === "string" ? a : JSON.stringify(a)
input.push({ type: "function_call", call_id: (tc as any).id, name, arguments: args })
}
}
}
continue
}
if ((m as any).role === "tool") {
const out = typeof (m as any).content === "string" ? (m as any).content : JSON.stringify((m as any).content)
input.push({ type: "function_call_output", call_id: (m as any).tool_call_id, output: out })
continue
}
}
const stop_sequences = (() => {
const v = body.stop
if (!v) return undefined
if (Array.isArray(v)) return v
if (typeof v === "string") return [v]
return undefined
})()
const tcIn = body.tool_choice
const tool_choice = (() => {
if (!tcIn) return undefined
if (tcIn === "auto") return "auto"
if (tcIn === "required") return "required"
if ((tcIn as any).type === "function" && (tcIn as any).function?.name)
return { type: "function", function: { name: (tcIn as any).function.name } }
return undefined
})()
const tools = (() => {
if (!Array.isArray(body.tools)) return undefined
return body.tools.map((tool: any) => {
if (tool.type === "function") {
return {
type: "function",
name: tool.function?.name,
description: tool.function?.description,
parameters: tool.function?.parameters,
strict: tool.function?.strict,
}
}
return tool
})
})()
return {
model: body.model,
input,
max_output_tokens: body.max_tokens,
top_p: body.top_p,
stop_sequences,
stream: !!body.stream,
tools,
tool_choice,
include: Array.isArray((body as any).include) ? (body as any).include : undefined,
truncation: (body as any).truncation,
metadata: (body as any).metadata,
store: (body as any).store,
user: (body as any).user,
text: { verbosity: "low" },
reasoning: { effort: "medium" },
}
}
export function fromOpenaiResponse(resp: any): CommonResponse {
if (!resp || typeof resp !== "object") return resp
if (Array.isArray((resp as any).choices)) return resp
const r = (resp as any).response ?? resp
if (!r || typeof r !== "object") return resp
const idIn = (r as any).id
const id =
typeof idIn === "string" ? idIn.replace(/^resp_/, "chatcmpl_") : `chatcmpl_${Math.random().toString(36).slice(2)}`
const model = (r as any).model ?? (resp as any).model
const out = Array.isArray((r as any).output) ? (r as any).output : []
const text = out
.filter((o: any) => o && o.type === "message" && Array.isArray((o as any).content))
.flatMap((o: any) => (o as any).content)
.filter((p: any) => p && p.type === "output_text" && typeof p.text === "string")
.map((p: any) => p.text)
.join("")
const tcs = out
.filter((o: any) => o && o.type === "function_call")
.map((o: any) => {
const name = (o as any).name
const a = (o as any).arguments
const args = typeof a === "string" ? a : JSON.stringify(a ?? {})
const tid =
typeof (o as any).id === "string" && (o as any).id.length > 0
? (o as any).id
: `toolu_${Math.random().toString(36).slice(2)}`
return { id: tid, type: "function" as const, function: { name, arguments: args } }
})
const finish = (r: string | null) => {
if (r === "stop") return "stop"
if (r === "tool_call" || r === "tool_calls") return "tool_calls"
if (r === "length" || r === "max_output_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
}
const u = (r as any).usage ?? (resp as any).usage
const usage = (() => {
if (!u) return undefined as any
const pt = typeof (u as any).input_tokens === "number" ? (u as any).input_tokens : undefined
const ct = typeof (u as any).output_tokens === "number" ? (u as any).output_tokens : undefined
const total = pt != null && ct != null ? pt + ct : undefined
const cached = (u as any).input_tokens_details?.cached_tokens
const details = typeof cached === "number" ? { cached_tokens: cached } : undefined
return {
prompt_tokens: pt,
completion_tokens: ct,
total_tokens: total,
...(details ? { prompt_tokens_details: details } : {}),
}
})()
return {
id,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: "assistant",
...(text && text.length > 0 ? { content: text } : {}),
...(tcs.length > 0 ? { tool_calls: tcs } : {}),
},
finish_reason: finish((r as any).stop_reason ?? null),
},
],
...(usage ? { usage } : {}),
}
}
export function toOpenaiResponse(resp: CommonResponse) {
if (!resp || typeof resp !== "object") return resp
if (!Array.isArray((resp as any).choices)) return resp
const choice = (resp as any).choices[0]
if (!choice) return resp
const msg = choice.message
if (!msg) return resp
const outputItems: any[] = []
if (typeof msg.content === "string" && msg.content.length > 0) {
outputItems.push({
id: `msg_${Math.random().toString(36).slice(2)}`,
type: "message",
status: "completed",
role: "assistant",
content: [{ type: "output_text", text: msg.content, annotations: [], logprobs: [] }],
})
}
if (Array.isArray(msg.tool_calls)) {
for (const tc of msg.tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
outputItems.push({
id: (tc as any).id,
type: "function_call",
name: (tc as any).function.name,
call_id: (tc as any).id,
arguments: (tc as any).function.arguments,
})
}
}
}
const stop_reason = (() => {
const r = choice.finish_reason
if (r === "stop") return "stop"
if (r === "tool_calls") return "tool_call"
if (r === "length") return "max_output_tokens"
if (r === "content_filter") return "content_filter"
return null
})()
const usage = (() => {
const u = (resp as any).usage
if (!u) return undefined
return {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
total_tokens: u.total_tokens,
...(u.prompt_tokens_details?.cached_tokens
? { input_tokens_details: { cached_tokens: u.prompt_tokens_details.cached_tokens } }
: {}),
}
})()
return {
id: (resp as any).id?.replace(/^chatcmpl_/, "resp_") ?? `resp_${Math.random().toString(36).slice(2)}`,
object: "response",
model: (resp as any).model,
output: outputItems,
stop_reason,
usage,
}
}
export function fromOpenaiChunk(chunk: string): CommonChunk | string {
const lines = chunk.split("\n")
const ev = lines[0]
const dl = lines[1]
if (!ev || !dl || !dl.startsWith("data: ")) return chunk
let json: any
try {
json = JSON.parse(dl.slice(6))
} catch {
return chunk
}
const respObj = json.response ?? {}
const out: CommonChunk = {
id: respObj.id ?? json.id ?? "",
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: respObj.model ?? json.model ?? "",
choices: [],
}
const e = ev.replace("event: ", "").trim()
if (e === "response.output_text.delta") {
const d = (json as any).delta ?? (json as any).text ?? (json as any).output_text_delta
if (typeof d === "string" && d.length > 0)
out.choices.push({ index: 0, delta: { content: d }, finish_reason: null })
}
if (e === "response.output_item.added" && (json as any).item?.type === "function_call") {
const name = (json as any).item?.name
const id = (json as any).item?.id
if (typeof name === "string" && name.length > 0) {
out.choices.push({
index: 0,
delta: { tool_calls: [{ index: 0, id, type: "function", function: { name, arguments: "" } }] },
finish_reason: null,
})
}
}
if (e === "response.function_call_arguments.delta") {
const a = (json as any).delta ?? (json as any).arguments_delta
if (typeof a === "string" && a.length > 0) {
out.choices.push({
index: 0,
delta: { tool_calls: [{ index: 0, function: { arguments: a } }] },
finish_reason: null,
})
}
}
if (e === "response.completed") {
const fr = (() => {
const sr = (respObj as any).stop_reason ?? (json as any).stop_reason
if (sr === "stop") return "stop"
if (sr === "tool_call" || sr === "tool_calls") return "tool_calls"
if (sr === "length" || sr === "max_output_tokens") return "length"
if (sr === "content_filter") return "content_filter"
return null
})()
out.choices.push({ index: 0, delta: {}, finish_reason: fr })
const u = (respObj as any).usage ?? (json as any).response?.usage
if (u) {
out.usage = {
prompt_tokens: u.input_tokens,
completion_tokens: u.output_tokens,
total_tokens: (u.input_tokens || 0) + (u.output_tokens || 0),
...(u.input_tokens_details?.cached_tokens
? { prompt_tokens_details: { cached_tokens: u.input_tokens_details.cached_tokens } }
: {}),
}
}
}
return out
}
export function toOpenaiChunk(chunk: CommonChunk): string {
if (!chunk.choices || !Array.isArray(chunk.choices) || chunk.choices.length === 0) {
return ""
}
const choice = chunk.choices[0]
const d = choice.delta
if (!d) return ""
const id = chunk.id
const model = chunk.model
if (d.content) {
const data = { id, type: "response.output_text.delta", delta: d.content, response: { id, model } }
return `event: response.output_text.delta\ndata: ${JSON.stringify(data)}`
}
if (d.tool_calls) {
for (const tc of d.tool_calls) {
if (tc.function?.name) {
const data = {
type: "response.output_item.added",
output_index: 0,
item: { id: tc.id, type: "function_call", name: tc.function.name, call_id: tc.id, arguments: "" },
}
return `event: response.output_item.added\ndata: ${JSON.stringify(data)}`
}
if (tc.function?.arguments) {
const data = {
type: "response.function_call_arguments.delta",
output_index: 0,
delta: tc.function.arguments,
}
return `event: response.function_call_arguments.delta\ndata: ${JSON.stringify(data)}`
}
}
}
if (choice.finish_reason) {
const u = chunk.usage
const usage = u
? {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
total_tokens: u.total_tokens,
...(u.prompt_tokens_details?.cached_tokens
? { input_tokens_details: { cached_tokens: u.prompt_tokens_details.cached_tokens } }
: {}),
}
: undefined
const data: any = { id, type: "response.completed", response: { id, model, ...(usage ? { usage } : {}) } }
return `event: response.completed\ndata: ${JSON.stringify(data)}`
}
return ""
}

View file

@ -0,0 +1,207 @@
import { Format } from "../format"
import {
fromAnthropicChunk,
fromAnthropicRequest,
fromAnthropicResponse,
toAnthropicChunk,
toAnthropicRequest,
toAnthropicResponse,
} from "./anthropic"
import {
fromOpenaiChunk,
fromOpenaiRequest,
fromOpenaiResponse,
toOpenaiChunk,
toOpenaiRequest,
toOpenaiResponse,
} from "./openai"
import {
fromOaCompatibleChunk,
fromOaCompatibleRequest,
fromOaCompatibleResponse,
toOaCompatibleChunk,
toOaCompatibleRequest,
toOaCompatibleResponse,
} from "./openai-compatible"
export type ProviderHelper = {
format: Format
modifyUrl: (providerApi: string) => string
modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => void
modifyBody: (body: Record<string, any>) => Record<string, any>
createUsageParser: () => {
parse: (chunk: string) => void
retrieve: () => any
}
normalizeUsage: (usage: any) => {
inputTokens: number
outputTokens: number
reasoningTokens?: number
cacheReadTokens?: number
cacheWrite5mTokens?: number
cacheWrite1hTokens?: number
}
}
export interface CommonMessage {
role: "system" | "user" | "assistant" | "tool"
content?: string | Array<CommonContentPart>
tool_call_id?: string
tool_calls?: CommonToolCall[]
}
export interface CommonContentPart {
type: "text" | "image_url"
text?: string
image_url?: { url: string }
}
export interface CommonToolCall {
id: string
type: "function"
function: {
name: string
arguments: string
}
}
export interface CommonTool {
type: "function"
function: {
name: string
description?: string
parameters?: Record<string, any>
}
}
export interface CommonUsage {
input_tokens?: number
output_tokens?: number
total_tokens?: number
prompt_tokens?: number
completion_tokens?: number
cache_read_input_tokens?: number
cache_creation?: {
ephemeral_5m_input_tokens?: number
ephemeral_1h_input_tokens?: number
}
input_tokens_details?: {
cached_tokens?: number
}
output_tokens_details?: {
reasoning_tokens?: number
}
}
export interface CommonRequest {
model?: string
max_tokens?: number
temperature?: number
top_p?: number
stop?: string | string[]
messages: CommonMessage[]
stream?: boolean
tools?: CommonTool[]
tool_choice?: "auto" | "required" | { type: "function"; function: { name: string } }
}
export interface CommonResponse {
id: string
object: "chat.completion"
created: number
model: string
choices: Array<{
index: number
message: {
role: "assistant"
content?: string
tool_calls?: CommonToolCall[]
}
finish_reason: "stop" | "tool_calls" | "length" | "content_filter" | null
}>
usage?: {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
prompt_tokens_details?: { cached_tokens?: number }
}
}
export interface CommonChunk {
id: string
object: "chat.completion.chunk"
created: number
model: string
choices: Array<{
index: number
delta: {
role?: "assistant"
content?: string
tool_calls?: Array<{
index: number
id?: string
type?: "function"
function?: {
name?: string
arguments?: string
}
}>
}
finish_reason: "stop" | "tool_calls" | "length" | "content_filter" | null
}>
usage?: {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
prompt_tokens_details?: { cached_tokens?: number }
}
}
export function createBodyConverter(from: Format, to: Format) {
return (body: any): any => {
if (from === to) return body
let raw: CommonRequest
if (from === "anthropic") raw = fromAnthropicRequest(body)
else if (from === "openai") raw = fromOpenaiRequest(body)
else raw = fromOaCompatibleRequest(body)
if (to === "anthropic") return toAnthropicRequest(raw)
if (to === "openai") return toOpenaiRequest(raw)
if (to === "oa-compat") return toOaCompatibleRequest(raw)
}
}
export function createStreamPartConverter(from: Format, to: Format) {
return (part: any): any => {
if (from === to) return part
let raw: CommonChunk | string
if (from === "anthropic") raw = fromAnthropicChunk(part)
else if (from === "openai") raw = fromOpenaiChunk(part)
else raw = fromOaCompatibleChunk(part)
// If result is a string (error case), pass it through
if (typeof raw === "string") return raw
if (to === "anthropic") return toAnthropicChunk(raw)
if (to === "openai") return toOpenaiChunk(raw)
if (to === "oa-compat") return toOaCompatibleChunk(raw)
}
}
export function createResponseConverter(from: Format, to: Format) {
return (response: any): any => {
if (from === to) return response
let raw: CommonResponse
if (from === "anthropic") raw = fromAnthropicResponse(response)
else if (from === "openai") raw = fromOpenaiResponse(response)
else raw = fromOaCompatibleResponse(response)
if (to === "anthropic") return toAnthropicResponse(raw)
if (to === "openai") return toOpenaiResponse(raw)
if (to === "oa-compat") return toOaCompatibleResponse(raw)
}
}

View file

@ -1,63 +1,9 @@
import type { APIEvent } from "@solidjs/start/server"
import { handler } from "~/routes/zen/handler"
type Usage = {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
// used by moonshot
cached_tokens?: number
// used by xai
prompt_tokens_details?: {
text_tokens?: number
audio_tokens?: number
image_tokens?: number
cached_tokens?: number
}
completion_tokens_details?: {
reasoning_tokens?: number
audio_tokens?: number
accepted_prediction_tokens?: number
rejected_prediction_tokens?: number
}
}
import { handler } from "~/routes/zen/util/handler"
export function POST(input: APIEvent) {
let usage: Usage
return handler(input, {
modifyBody: (body: any) => ({
...body,
...(body.stream ? { stream_options: { include_usage: true } } : {}),
}),
setAuthHeader: (headers: Headers, apiKey: string) => {
headers.set("authorization", `Bearer ${apiKey}`)
},
format: "oa-compat",
parseApiKey: (headers: Headers) => headers.get("authorization")?.split(" ")[1],
onStreamPart: (chunk: string) => {
if (!chunk.startsWith("data: ")) return
let json
try {
json = JSON.parse(chunk.slice(6)) as { usage?: Usage }
} catch (e) {
return
}
if (!json.usage) return
usage = json.usage
},
getStreamUsage: () => usage,
normalizeUsage: (usage: Usage) => {
const inputTokens = usage.prompt_tokens ?? 0
const outputTokens = usage.completion_tokens ?? 0
const reasoningTokens = usage.completion_tokens_details?.reasoning_tokens ?? undefined
const cacheReadTokens = usage.cached_tokens ?? usage.prompt_tokens_details?.cached_tokens ?? undefined
return {
inputTokens: inputTokens - (cacheReadTokens ?? 0),
outputTokens: outputTokens - (reasoningTokens ?? 0),
reasoningTokens,
cacheReadTokens,
}
},
})
}

View file

@ -1,64 +1,9 @@
import type { APIEvent } from "@solidjs/start/server"
import { handler } from "~/routes/zen/handler"
type Usage = {
cache_creation?: {
ephemeral_5m_input_tokens?: number
ephemeral_1h_input_tokens?: number
}
cache_creation_input_tokens?: number
cache_read_input_tokens?: number
input_tokens?: number
output_tokens?: number
server_tool_use?: {
web_search_requests?: number
}
}
import { handler } from "~/routes/zen/util/handler"
export function POST(input: APIEvent) {
let usage: Usage
return handler(input, {
modifyBody: (body: any) => ({
...body,
service_tier: "standard_only",
}),
setAuthHeader: (headers: Headers, apiKey: string) => headers.set("x-api-key", apiKey),
format: "anthropic",
parseApiKey: (headers: Headers) => headers.get("x-api-key") ?? undefined,
onStreamPart: (chunk: string) => {
const data = chunk.split("\n")[1]
if (!data.startsWith("data: ")) return
let json
try {
json = JSON.parse(data.slice(6))
} catch (e) {
return
}
// ie. { type: "message_start"; message: { usage: Usage } }
// ie. { type: "message_delta"; usage: Usage }
const usageUpdate = json.usage ?? json.message?.usage
if (!usageUpdate) return
usage = {
...usage,
...usageUpdate,
cache_creation: {
...usage?.cache_creation,
...usageUpdate.cache_creation,
},
server_tool_use: {
...usage?.server_tool_use,
...usageUpdate.server_tool_use,
},
}
},
getStreamUsage: () => usage,
normalizeUsage: (usage: Usage) => ({
inputTokens: usage.input_tokens ?? 0,
outputTokens: usage.output_tokens ?? 0,
cacheReadTokens: usage.cache_read_input_tokens ?? undefined,
cacheWrite5mTokens: usage.cache_creation?.ephemeral_5m_input_tokens ?? undefined,
cacheWrite1hTokens: usage.cache_creation?.ephemeral_1h_input_tokens ?? undefined,
}),
})
}

View file

@ -0,0 +1,60 @@
import type { APIEvent } from "@solidjs/start/server"
import { and, Database, eq, isNull } from "@opencode-ai/console-core/drizzle/index.js"
import { KeyTable } from "@opencode-ai/console-core/schema/key.sql.js"
import { WorkspaceTable } from "@opencode-ai/console-core/schema/workspace.sql.js"
import { ModelTable } from "@opencode-ai/console-core/schema/model.sql.js"
import { ZenData } from "@opencode-ai/console-core/model.js"
export async function OPTIONS(input: APIEvent) {
return new Response(null, {
status: 200,
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization",
},
})
}
export async function GET(input: APIEvent) {
const zenData = ZenData.list()
const disabledModels = await authenticate()
return new Response(
JSON.stringify({
object: "list",
data: Object.entries(zenData.models)
.filter(([id]) => !disabledModels.includes(id))
.map(([id, model]) => ({
id: `opencode/${id}`,
object: "model",
created: Math.floor(Date.now() / 1000),
owned_by: "opencode",
})),
}),
{
headers: {
"Content-Type": "application/json",
},
},
)
async function authenticate() {
const apiKey = input.request.headers.get("authorization")?.split(" ")[1]
if (!apiKey) return []
const disabledModels = await Database.use((tx) =>
tx
.select({
model: ModelTable.model,
})
.from(KeyTable)
.innerJoin(WorkspaceTable, eq(WorkspaceTable.id, KeyTable.workspaceID))
.leftJoin(ModelTable, and(eq(ModelTable.workspaceID, KeyTable.workspaceID), isNull(ModelTable.timeDeleted)))
.where(and(eq(KeyTable.key, apiKey), isNull(KeyTable.timeDeleted)))
.then((rows) => rows.map((row) => row.model)),
)
return disabledModels
}
}

View file

@ -1,52 +1,9 @@
import type { APIEvent } from "@solidjs/start/server"
import { handler } from "~/routes/zen/handler"
type Usage = {
input_tokens?: number
input_tokens_details?: {
cached_tokens?: number
}
output_tokens?: number
output_tokens_details?: {
reasoning_tokens?: number
}
total_tokens?: number
}
import { handler } from "~/routes/zen/util/handler"
export function POST(input: APIEvent) {
let usage: Usage
return handler(input, {
setAuthHeader: (headers: Headers, apiKey: string) => {
headers.set("authorization", `Bearer ${apiKey}`)
},
format: "openai",
parseApiKey: (headers: Headers) => headers.get("authorization")?.split(" ")[1],
onStreamPart: (chunk: string) => {
const [event, data] = chunk.split("\n")
if (event !== "event: response.completed") return
if (!data.startsWith("data: ")) return
let json
try {
json = JSON.parse(data.slice(6)) as { response?: { usage?: Usage } }
} catch (e) {
return
}
if (!json.response?.usage) return
usage = json.response.usage
},
getStreamUsage: () => usage,
normalizeUsage: (usage: Usage) => {
const inputTokens = usage.input_tokens ?? 0
const outputTokens = usage.output_tokens ?? 0
const reasoningTokens = usage.output_tokens_details?.reasoning_tokens ?? undefined
const cacheReadTokens = usage.input_tokens_details?.cached_tokens ?? undefined
return {
inputTokens: inputTokens - (cacheReadTokens ?? 0),
outputTokens: outputTokens - (reasoningTokens ?? 0),
reasoningTokens,
cacheReadTokens,
}
},
})
}

View file

@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/console-core",
"version": "0.15.16",
"version": "0.15.20",
"private": true,
"type": "module",
"dependencies": {

View file

@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-function",
"version": "0.15.16",
"version": "0.15.20",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View file

@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-mail",
"version": "0.15.16",
"version": "0.15.20",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",

View file

@ -1,23 +1,23 @@
{
"name": "@opencode-ai/desktop",
"version": "0.15.16",
"version": "0.15.20",
"description": "",
"type": "module",
"scripts": {
"start": "vite",
"dev": "vite",
"build": "vite build",
"serve": "vite preview",
"typecheck": "tsgo --noEmit"
"serve": "vite preview"
},
"license": "MIT",
"devDependencies": {
"opencode": "workspace:*",
"@tailwindcss/vite": "catalog:",
"@tsconfig/bun": "1.0.9",
"@types/luxon": "3.7.1",
"@types/node": "catalog:",
"typescript": "catalog:",
"@typescript/native-preview": "catalog:",
"typescript": "catalog:",
"vite": "catalog:",
"vite-plugin-icons-spritesheet": "3.0.1",
"vite-plugin-solid": "catalog:"
@ -26,7 +26,7 @@
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
"@opencode-ai/ui": "workspace:*",
"@pierre/precision-diffs": "catalog:",
"@pierre/precision-diffs": "0.3.5",
"@shikijs/transformers": "3.9.2",
"@solid-primitives/active-element": "2.1.3",
"@solid-primitives/event-bus": "1.1.2",

View file

@ -0,0 +1,362 @@
import type { Part, AssistantMessage, ReasoningPart, TextPart, ToolPart } from "@opencode-ai/sdk"
import type { Tool } from "opencode/tool/tool"
import type { ReadTool } from "opencode/tool/read"
import { children, Component, createMemo, For, Match, Show, Switch, type JSX } from "solid-js"
import { Dynamic } from "solid-js/web"
import { Markdown } from "./markdown"
import { Collapsible, Icon, IconProps } from "@opencode-ai/ui"
import { getDirectory, getFilename } from "@/utils"
import type { ListTool } from "opencode/tool/ls"
import type { GlobTool } from "opencode/tool/glob"
import type { GrepTool } from "opencode/tool/grep"
import type { WebFetchTool } from "opencode/tool/webfetch"
import type { TaskTool } from "opencode/tool/task"
import type { BashTool } from "opencode/tool/bash"
import type { EditTool } from "opencode/tool/edit"
import type { WriteTool } from "opencode/tool/write"
import { DiffChanges } from "./diff-changes"
export function AssistantMessage(props: { message: AssistantMessage; parts: Part[] }) {
return (
<div class="w-full flex flex-col items-start gap-4">
<For each={props.parts}>
{(part) => {
const component = createMemo(() => PART_MAPPING[part.type as keyof typeof PART_MAPPING])
return (
<Show when={component()}>
<Dynamic component={component()} part={part as any} message={props.message} />
</Show>
)
}}
</For>
</div>
)
}
const PART_MAPPING = {
text: TextPart,
tool: ToolPart,
reasoning: ReasoningPart,
}
function ReasoningPart(props: { part: ReasoningPart; message: AssistantMessage }) {
return null
// return (
// <Show when={props.part.text.trim()}>
// <div>{props.part.text}</div>
// </Show>
// )
}
function TextPart(props: { part: TextPart; message: AssistantMessage }) {
return (
<Show when={props.part.text.trim()}>
<Markdown text={props.part.text.trim()} />
</Show>
)
}
function ToolPart(props: { part: ToolPart; message: AssistantMessage }) {
// const sync = useSync()
const component = createMemo(() => {
const render = ToolRegistry.render(props.part.tool) ?? GenericTool
const metadata = props.part.state.status === "pending" ? {} : (props.part.state.metadata ?? {})
const input = props.part.state.status === "completed" ? props.part.state.input : {}
// const permissions = sync.data.permission[props.message.sessionID] ?? []
// const permissionIndex = permissions.findIndex((x) => x.callID === props.part.callID)
// const permission = permissions[permissionIndex]
return (
<>
<Dynamic
component={render}
input={input}
tool={props.part.tool}
metadata={metadata}
// permission={permission?.metadata ?? {}}
output={props.part.state.status === "completed" ? props.part.state.output : undefined}
/>
{/* <Show when={props.part.state.status === "error"}>{props.part.state.error.replace("Error: ", "")}</Show> */}
</>
)
})
return <Show when={component()}>{component()}</Show>
}
type TriggerTitle = {
title: string
subtitle?: string
args?: string[]
action?: JSX.Element
}
const isTriggerTitle = (val: any): val is TriggerTitle => {
return typeof val === "object" && val !== null && "title" in val && !(val instanceof Node)
}
function BasicTool(props: { icon: IconProps["name"]; trigger: TriggerTitle | JSX.Element; children?: JSX.Element }) {
const resolved = children(() => props.children)
return (
<Collapsible>
<Collapsible.Trigger>
<div class="w-full flex items-center self-stretch gap-5 justify-between">
<div class="w-full flex items-center self-stretch gap-5">
<Icon name={props.icon} size="small" />
<Switch>
<Match when={isTriggerTitle(props.trigger)}>
<div class="w-full flex items-center gap-2 justify-between">
<div class="flex items-center gap-2">
<span class="text-12-medium text-text-base capitalize">
{(props.trigger as TriggerTitle).title}
</span>
<Show when={(props.trigger as TriggerTitle).subtitle}>
<span class="text-12-medium text-text-weak">{(props.trigger as TriggerTitle).subtitle}</span>
</Show>
<Show when={(props.trigger as TriggerTitle).args?.length}>
<For each={(props.trigger as TriggerTitle).args}>
{(arg) => <span class="text-12-regular text-text-weaker">{arg}</span>}
</For>
</Show>
</div>
<Show when={(props.trigger as TriggerTitle).action}>{(props.trigger as TriggerTitle).action}</Show>
</div>
</Match>
<Match when={true}>{props.trigger as JSX.Element}</Match>
</Switch>
</div>
<Show when={resolved()}>
<Collapsible.Arrow />
</Show>
</div>
</Collapsible.Trigger>
<Show when={props.children}>
<Collapsible.Content>{props.children}</Collapsible.Content>
</Show>
</Collapsible>
)
}
function GenericTool(props: ToolProps<any>) {
return <BasicTool icon="mcp" trigger={{ title: props.tool }} />
}
type ToolProps<T extends Tool.Info> = {
input: Partial<Tool.InferParameters<T>>
metadata: Partial<Tool.InferMetadata<T>>
// permission: Record<string, any>
tool: string
output?: string
}
const ToolRegistry = (() => {
const state: Record<
string,
{
name: string
render?: Component<ToolProps<any>>
}
> = {}
function register<T extends Tool.Info>(input: { name: string; render?: Component<ToolProps<T>> }) {
state[input.name] = input
return input
}
return {
register,
render(name: string) {
return state[name]?.render
},
}
})()
ToolRegistry.register<typeof ReadTool>({
name: "read",
render(props) {
return (
<BasicTool
icon="glasses"
trigger={{ title: props.tool, subtitle: props.input.filePath ? getFilename(props.input.filePath) : "" }}
/>
)
},
})
ToolRegistry.register<typeof ListTool>({
name: "list",
render(props) {
return (
<BasicTool icon="bullet-list" trigger={{ title: props.tool, subtitle: props.input.path || "/" }}>
<Show when={false && props.output}>
<div class="whitespace-pre">{props.output}</div>
</Show>
</BasicTool>
)
},
})
ToolRegistry.register<typeof GlobTool>({
name: "glob",
render(props) {
return (
<BasicTool
icon="magnifying-glass-menu"
trigger={{
title: props.tool,
subtitle: props.input.path || "/",
args: props.input.pattern ? ["pattern=" + props.input.pattern] : [],
}}
>
<Show when={false && props.output}>
<div class="whitespace-pre">{props.output}</div>
</Show>
</BasicTool>
)
},
})
ToolRegistry.register<typeof GrepTool>({
name: "grep",
render(props) {
const args = []
if (props.input.pattern) args.push("pattern=" + props.input.pattern)
if (props.input.include) args.push("include=" + props.input.include)
return (
<BasicTool
icon="magnifying-glass-menu"
trigger={{
title: props.tool,
subtitle: props.input.path || "/",
args,
}}
>
<Show when={false && props.output}>
<div class="whitespace-pre">{props.output}</div>
</Show>
</BasicTool>
)
},
})
ToolRegistry.register<typeof WebFetchTool>({
name: "webfetch",
render(props) {
return (
<BasicTool
icon="window-cursor"
trigger={{
title: props.tool,
subtitle: props.input.url || "",
args: props.input.format ? ["format=" + props.input.format] : [],
action: (
<div class="size-6 flex items-center justify-center">
<Icon name="square-arrow-top-right" size="small" />
</div>
),
}}
>
<Show when={false && props.output}>
<div class="whitespace-pre">{props.output}</div>
</Show>
</BasicTool>
)
},
})
ToolRegistry.register<typeof TaskTool>({
name: "task",
render(props) {
return (
<BasicTool
icon="task"
trigger={{
title: `${props.input.subagent_type || props.tool} Agent`,
subtitle: props.input.description,
}}
>
<Show when={false && props.output}>
<div class="whitespace-pre">{props.output}</div>
</Show>
</BasicTool>
)
},
})
ToolRegistry.register<typeof BashTool>({
name: "bash",
render(props) {
return (
<BasicTool
icon="console"
trigger={{
title: "Shell",
subtitle: "Ran " + props.input.command,
}}
>
<Show when={false && props.output}>
<div class="whitespace-pre">{props.output}</div>
</Show>
</BasicTool>
)
},
})
ToolRegistry.register<typeof EditTool>({
name: "edit",
render(props) {
return (
<BasicTool
icon="code-lines"
trigger={
<div class="flex items-center justify-between w-full">
<div class="flex items-center gap-5">
<div class="text-12-medium text-text-base capitalize">Edit</div>
<div class="flex">
<Show when={props.input.filePath?.includes("/")}>
<span class="text-text-weak">{getDirectory(props.input.filePath!)}/</span>
</Show>
<span class="text-text-strong">{getFilename(props.input.filePath ?? "")}</span>
</div>
</div>
<div class="flex gap-4 items-center justify-end">{/* <DiffChanges diff={diff} /> */}</div>
</div>
}
>
<Show when={false && props.output}>
<div class="whitespace-pre">{props.output}</div>
</Show>
</BasicTool>
)
},
})
ToolRegistry.register<typeof WriteTool>({
name: "write",
render(props) {
return (
<BasicTool
icon="code-lines"
trigger={
<div class="flex items-center justify-between w-full">
<div class="flex items-center gap-5">
<div class="text-12-medium text-text-base capitalize">Write</div>
<div class="flex">
<Show when={props.input.filePath?.includes("/")}>
<span class="text-text-weak">{getDirectory(props.input.filePath!)}/</span>
</Show>
<span class="text-text-strong">{getFilename(props.input.filePath ?? "")}</span>
</div>
</div>
<div class="flex gap-4 items-center justify-end">{/* <DiffChanges diff={diff} /> */}</div>
</div>
}
>
<Show when={false && props.output}>
<div class="whitespace-pre">{props.output}</div>
</Show>
</BasicTool>
)
},
})

View file

@ -0,0 +1,20 @@
import { FileDiff } from "@opencode-ai/sdk"
import { createMemo, Show } from "solid-js"
export function DiffChanges(props: { diff: FileDiff | FileDiff[] }) {
const additions = createMemo(() =>
Array.isArray(props.diff) ? props.diff.reduce((acc, diff) => acc + (diff.additions ?? 0), 0) : props.diff.additions,
)
const deletions = createMemo(() =>
Array.isArray(props.diff) ? props.diff.reduce((acc, diff) => acc + (diff.deletions ?? 0), 0) : props.diff.deletions,
)
const total = createMemo(() => additions() + deletions())
return (
<Show when={total() > 0}>
<div class="flex gap-2 justify-end items-center">
<span class="text-12-mono text-right text-text-diff-add-base">{`+${additions()}`}</span>
<span class="text-12-mono text-right text-text-diff-delete-base">{`-${deletions()}`}</span>
</div>
</Show>
)
}

View file

@ -2,8 +2,9 @@ import {
type FileContents,
FileDiff,
type DiffLineAnnotation,
type HunkData,
DiffFileRendererOptions,
registerCustomTheme,
// registerCustomTheme,
} from "@pierre/precision-diffs"
import { ComponentProps, createEffect, splitProps } from "solid-js"
@ -15,8 +16,7 @@ export type DiffProps<T = {}> = Omit<DiffFileRendererOptions<T>, "themes"> & {
classList?: ComponentProps<"div">["classList"]
}
// @ts-expect-error
registerCustomTheme("opencode", () => import("./theme.json"))
// registerCustomTheme("opencode", () => import("./theme.json"))
// interface ThreadMetadata {
// threadId: string
@ -49,7 +49,7 @@ export function Diff<T>(props: DiffProps<T>) {
// annotations and a container element to hold the diff
createEffect(() => {
const instance = new FileDiff<T>({
theme: "opencode",
theme: "pierre-light",
// Or can also provide a 'themes' prop, which allows the code to adapt
// to your OS light or dark theme
// themes: { dark: 'pierre-night', light: 'pierre-light' },
@ -98,6 +98,23 @@ export function Diff<T>(props: DiffProps<T>) {
// 'simple':
// Just a subtle bar separator between each hunk
hunkSeparators: "line-info",
// hunkSeparators(hunkData: HunkData) {
// const fragment = document.createDocumentFragment()
// const numCol = document.createElement("div")
// numCol.textContent = `${hunkData.lines}`
// numCol.style.position = "sticky"
// numCol.style.left = "0"
// numCol.style.backgroundColor = "var(--pjs-bg)"
// numCol.style.zIndex = "2"
// fragment.appendChild(numCol)
// const contentCol = document.createElement("div")
// contentCol.textContent = "unmodified lines"
// contentCol.style.position = "sticky"
// contentCol.style.width = "var(--pjs-column-content-width)"
// contentCol.style.left = "var(--pjs-column-number-width)"
// fragment.appendChild(contentCol)
// return fragment
// },
// On lines that have both additions and deletions, we can run a
// separate diff check to mark parts of the lines that change.
// 'none':

View file

@ -460,13 +460,6 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
return sync.data.message[store.active]?.find((m) => m.id === store.activeMessage)
})
const activeAssistantMessages = createMemo(() => {
if (!store.active || !activeMessage()) return []
return sync.data.message[store.active]?.filter(
(m) => m.role === "assistant" && m.parentID == activeMessage()?.id,
)
})
const model = createMemo(() => {
if (!last()) return
const model = sync.data.provider.find((x) => x.id === last().providerID)?.models[last().modelID]
@ -504,7 +497,6 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({
return {
active,
activeMessage,
activeAssistantMessages,
lastUserMessage,
cost,
last,

View file

@ -22,6 +22,10 @@ import { Code } from "@/components/code"
import { useSync } from "@/context/sync"
import { useSDK } from "@/context/sdk"
import { Diff } from "@/components/diff"
import { ProgressCircle } from "@/components/progress-circle"
import { AssistantMessage } from "@/components/assistant-message"
import { type AssistantMessage as AssistantMessageType } from "@opencode-ai/sdk"
import { DiffChanges } from "@/components/diff-changes"
export default function Page() {
const local = useLocal()
@ -92,7 +96,7 @@ export default function Page() {
}
}
if (event.key.length === 1 && event.key !== "Unidentified") {
if (event.key.length === 1 && event.key !== "Unidentified" && !(event.ctrlKey || event.metaKey)) {
inputRef?.focus()
}
}
@ -392,9 +396,6 @@ export default function Page() {
{(session) => {
const diffs = createMemo(() => session.summary?.diffs ?? [])
const filesChanged = createMemo(() => diffs().length)
const additions = createMemo(() => diffs().reduce((acc, diff) => (acc ?? 0) + (diff.additions ?? 0), 0))
const deletions = createMemo(() => diffs().reduce((acc, diff) => (acc ?? 0) + (diff.deletions ?? 0), 0))
return (
<Tooltip placement="right" value={session.title}>
<div>
@ -408,12 +409,7 @@ export default function Page() {
</div>
<div class="flex justify-between items-center self-stretch">
<span class="text-12-regular text-text-weak">{`${filesChanged() || "No"} file${filesChanged() !== 1 ? "s" : ""} changed`}</span>
<Show when={additions() || deletions()}>
<div class="flex gap-2 justify-end items-center">
<span class="text-12-mono text-right text-text-diff-add-base">{`+${additions()}`}</span>
<span class="text-12-mono text-right text-text-diff-delete-base">{`-${deletions()}`}</span>
</div>
</Show>
<DiffChanges diff={diffs()} />
</div>
</div>
</Tooltip>
@ -434,13 +430,12 @@ export default function Page() {
<Tabs onChange={handleTabChange}>
<div class="sticky top-0 shrink-0 flex">
<Tabs.List>
<Tabs.Trigger value="chat" class="flex gap-x-1.5 items-center">
<Tabs.Trigger value="chat" class="flex gap-x-4 items-center">
<div>Chat</div>
<Show when={local.session.active()}>
<div class="flex flex-col h-4 px-2 -mr-2 justify-center items-center rounded-full bg-surface-base text-12-medium text-text-strong">
{local.session.context()}%
</div>
</Show>
<Tooltip value={`${local.session.tokens() ?? 0} Tokens`} class="flex items-center gap-1.5">
<ProgressCircle percentage={local.session.context() ?? 0} />
<div class="text-14-regular text-text-weak text-right">{local.session.context() ?? 0}%</div>
</Tooltip>
</Tabs.Trigger>
{/* <Tabs.Trigger value="review">Review</Tabs.Trigger> */}
<SortableProvider ids={local.file.opened().map((file) => file.path)}>
@ -548,33 +543,114 @@ export default function Page() {
<Show when={local.session.userMessages().length > 1}>
<ul role="list" class="w-60 shrink-0 flex flex-col items-start gap-1">
<For each={local.session.userMessages()}>
{(message) => (
<li
class="group/li flex items-center gap-x-2 py-1 self-stretch cursor-default"
onClick={() => local.session.setActiveMessage(message.id)}
>
<div class="w-[18px] shrink-0">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 12" fill="none">
<g>
<rect x="0" width="2" height="12" rx="1" fill="#CFCECD" />
<rect x="4" width="2" height="12" rx="1" fill="#CFCECD" />
<rect x="8" width="2" height="12" rx="1" fill="#CFCECD" />
<rect x="12" width="2" height="12" rx="1" fill="#CFCECD" />
<rect x="16" width="2" height="12" rx="1" fill="#CFCECD" />
</g>
</svg>
</div>
<div
data-active={local.session.activeMessage()?.id === message.id}
classList={{
"text-14-regular text-text-weak whitespace-nowrap truncate min-w-0": true,
"text-text-weak data-[active=true]:text-text-strong group-hover/li:text-text-base": true,
}}
{(message) => {
const countLines = (text: string) => {
if (!text) return 0
return text.split("\n").length
}
const additions = createMemo(
() =>
message.summary?.diffs.reduce((acc, diff) => acc + (diff.additions ?? 0), 0) ?? 0,
)
const deletions = createMemo(
() =>
message.summary?.diffs.reduce((acc, diff) => acc + (diff.deletions ?? 0), 0) ?? 0,
)
const totalBeforeLines = createMemo(
() =>
message.summary?.diffs.reduce((acc, diff) => acc + countLines(diff.before), 0) ??
0,
)
const blockCounts = createMemo(() => {
const TOTAL_BLOCKS = 5
const adds = additions()
const dels = deletions()
const unchanged = Math.max(0, totalBeforeLines() - dels)
const totalActivity = unchanged + adds + dels
if (totalActivity === 0) {
return { added: 0, deleted: 0, neutral: TOTAL_BLOCKS }
}
const percentAdded = adds / totalActivity
const percentDeleted = dels / totalActivity
const added_raw = percentAdded * TOTAL_BLOCKS
const deleted_raw = percentDeleted * TOTAL_BLOCKS
let added = adds > 0 ? Math.ceil(added_raw) : 0
let deleted = dels > 0 ? Math.ceil(deleted_raw) : 0
let total_allocated = added + deleted
if (total_allocated > TOTAL_BLOCKS) {
if (added_raw < deleted_raw) {
added = Math.floor(added_raw)
} else {
deleted = Math.floor(deleted_raw)
}
total_allocated = added + deleted
if (total_allocated > TOTAL_BLOCKS) {
if (added_raw < deleted_raw) {
deleted = Math.floor(deleted_raw)
} else {
added = Math.floor(added_raw)
}
}
}
const neutral = Math.max(0, TOTAL_BLOCKS - added - deleted)
return { added, deleted, neutral }
})
const ADD_COLOR = "var(--icon-diff-add-base)"
const DELETE_COLOR = "var(--icon-diff-delete-base)"
const NEUTRAL_COLOR = "var(--icon-weak-base)"
const visibleBlocks = createMemo(() => {
const counts = blockCounts()
const blocks = [
...Array(counts.added).fill(ADD_COLOR),
...Array(counts.deleted).fill(DELETE_COLOR),
...Array(counts.neutral).fill(NEUTRAL_COLOR),
]
return blocks.slice(0, 5)
})
return (
<li
class="group/li flex items-center gap-x-2 py-1 self-stretch cursor-default"
onClick={() => local.session.setActiveMessage(message.id)}
>
{message.summary?.title ?? local.session.getMessageText(message)}
</div>
</li>
)}
<div class="w-[18px] shrink-0">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 12" fill="none">
<g>
<For each={visibleBlocks()}>
{(color, i) => (
<rect x={i() * 4} width="2" height="12" rx="1" fill={color} />
)}
</For>
</g>
</svg>
</div>
<div
data-active={local.session.activeMessage()?.id === message.id}
classList={{
"text-14-regular text-text-weak whitespace-nowrap truncate min-w-0": true,
"text-text-weak data-[active=true]:text-text-strong group-hover/li:text-text-base": true,
}}
>
{message.summary?.title ?? local.session.getMessageText(message)}
</div>
</li>
)
}}
</For>
</ul>
</Show>
@ -585,6 +661,11 @@ export default function Page() {
const title = createMemo(() => message.summary?.title)
const prompt = createMemo(() => local.session.getMessageText(message))
const summary = createMemo(() => message.summary?.body)
const assistantMessages = createMemo(() => {
return sync.data.message[activeSession().id]?.filter(
(m) => m.role === "assistant" && m.parentID == message.id,
) as AssistantMessageType[]
})
return (
<div
@ -633,10 +714,7 @@ export default function Page() {
</div>
</div>
<div class="flex gap-4 items-center justify-end">
<div class="flex gap-2 justify-end items-center">
<span class="text-12-mono text-right text-text-diff-add-base">{`+${diff.additions}`}</span>
<span class="text-12-mono text-right text-text-diff-delete-base">{`-${diff.deletions}`}</span>
</div>
<DiffChanges diff={diff} />
<Icon name="chevron-grabber-vertical" size="small" />
</div>
</div>
@ -661,10 +739,18 @@ export default function Page() {
</Show>
</div>
{/* Response */}
<div data-todo="Response (Timeline)">
<div data-todo="Response" class="w-full">
<div class="flex flex-col items-start gap-1 self-stretch">
<h2 class="text-12-medium text-text-weak">Response</h2>
</div>
<div class="w-full flex flex-col items-start self-stretch gap-8">
<For each={assistantMessages()}>
{(assistantMessage) => {
const parts = createMemo(() => sync.data.part[assistantMessage.id])
return <AssistantMessage message={assistantMessage} parts={parts()} />
}}
</For>
</div>
</div>
</div>
)

View file

@ -1,6 +1,6 @@
{
"name": "@opencode-ai/function",
"version": "0.15.16",
"version": "0.15.20",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",

View file

@ -1,6 +1,6 @@
{
"$schema": "https://json.schemastore.org/package.json",
"version": "0.15.16",
"version": "0.15.20",
"name": "opencode",
"type": "module",
"private": true,

View file

@ -4,8 +4,12 @@ import solidPlugin from "../node_modules/@opentui/solid/scripts/solid-plugin"
import path from "path"
import fs from "fs"
import { $ } from "bun"
import { fileURLToPath } from "url"
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
const dir = path.resolve(__dirname, "..")
const dir = new URL("..", import.meta.url).pathname
process.chdir(dir)
import pkg from "../package.json"

View file

@ -51,13 +51,16 @@ if (!Script.preview) {
const macX64Sha = await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
const macArm64Sha = await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
const [pkgver, _subver = ""] = Script.version.split(/(-.*)/, 2)
// arch
const binaryPkgbuild = [
"# Maintainer: dax",
"# Maintainer: adam",
"",
"pkgname='opencode-bin'",
`pkgver=${Script.version.split("-")[0]}`,
`pkgver=${pkgver}`,
`_subver=${_subver}`,
"options=('!debug' '!strip')",
"pkgrel=1",
"pkgdesc='The AI coding agent built for the terminal.'",
@ -68,10 +71,10 @@ if (!Script.preview) {
"conflicts=('opencode')",
"depends=('fzf' 'ripgrep')",
"",
`source_aarch64=("\${pkgname}_\${pkgver}_aarch64.zip::https://github.com/sst/opencode/releases/download/v${Script.version}/opencode-linux-arm64.zip")`,
`source_aarch64=("\${pkgname}_\${pkgver}_aarch64.zip::https://github.com/sst/opencode/releases/download/v\${pkgver}\${_subver}/opencode-linux-arm64.zip")`,
`sha256sums_aarch64=('${arm64Sha}')`,
"",
`source_x86_64=("\${pkgname}_\${pkgver}_x86_64.zip::https://github.com/sst/opencode/releases/download/v${Script.version}/opencode-linux-x64.zip")`,
`source_x86_64=("\${pkgname}_\${pkgver}_x86_64.zip::https://github.com/sst/opencode/releases/download/v\${pkgver}\${_subver}/opencode-linux-x64.zip")`,
`sha256sums_x86_64=('${x64Sha}')`,
"",
"package() {",
@ -86,7 +89,8 @@ if (!Script.preview) {
"# Maintainer: adam",
"",
"pkgname='opencode'",
`pkgver=${Script.version.split("-")[0]}`,
`pkgver=${pkgver}`,
`_subver=${_subver}`,
"options=('!debug' '!strip')",
"pkgrel=1",
"pkgdesc='The AI coding agent built for the terminal.'",
@ -98,7 +102,7 @@ if (!Script.preview) {
"depends=('fzf' 'ripgrep')",
"makedepends=('git' 'bun-bin' 'go')",
"",
`source=("opencode-\${pkgver}.tar.gz::https://github.com/sst/opencode/archive/v${Script.version}.tar.gz")`,
`source=("opencode-\${pkgver}.tar.gz::https://github.com/sst/opencode/archive/v\${pkgver}\${_subver}.tar.gz")`,
`sha256sums=('SKIP')`,
"",
"build() {",

View file

@ -1,6 +1,6 @@
#!/usr/bin/env bun
import { z } from "zod/v4"
import { z } from "zod"
import { Config } from "../src/config/config"
const file = process.argv[2]

View file

@ -1,204 +1,704 @@
import type {
Agent,
AgentSideConnection,
AuthenticateRequest,
AuthenticateResponse,
CancelNotification,
InitializeRequest,
InitializeResponse,
LoadSessionRequest,
LoadSessionResponse,
NewSessionRequest,
NewSessionResponse,
PromptRequest,
PromptResponse,
SetSessionModelRequest,
SetSessionModelResponse,
import {
type Agent as ACPAgent,
type AgentSideConnection,
type AuthenticateRequest,
type CancelNotification,
type InitializeRequest,
type LoadSessionRequest,
type NewSessionRequest,
type PermissionOption,
type PlanEntry,
type PromptRequest,
type SetSessionModelRequest,
type SetSessionModeRequest,
type SetSessionModeResponse,
type ToolCallContent,
type ToolKind,
} from "@agentclientprotocol/sdk"
import { Log } from "../util/log"
import { ACPSessionManager } from "./session"
import type { ACPConfig } from "./types"
import { Provider } from "../provider/provider"
import { SessionPrompt } from "../session/prompt"
import { Identifier } from "../id/id"
import { Installation } from "@/installation"
import { SessionLock } from "@/session/lock"
import { Bus } from "@/bus"
import { MessageV2 } from "@/session/message-v2"
import { Storage } from "@/storage/storage"
import { Command } from "@/command"
import { Agent as Agents } from "@/agent/agent"
import { Permission } from "@/permission"
import { Session } from "@/session"
import { Identifier } from "@/id/id"
import { SessionCompaction } from "@/session/compaction"
import type { Config } from "@/config/config"
import { MCP } from "@/mcp"
import { Todo } from "@/session/todo"
import { z } from "zod"
export class OpenCodeAgent implements Agent {
private log = Log.create({ service: "acp-agent" })
private sessionManager = new ACPSessionManager()
private connection: AgentSideConnection
private config: ACPConfig
export namespace ACP {
const log = Log.create({ service: "acp-agent" })
constructor(connection: AgentSideConnection, config: ACPConfig = {}) {
this.connection = connection
this.config = config
}
export class Agent implements ACPAgent {
private sessionManager = new ACPSessionManager()
private connection: AgentSideConnection
private config: ACPConfig
async initialize(params: InitializeRequest): Promise<InitializeResponse> {
this.log.info("initialize", { protocolVersion: params.protocolVersion })
constructor(connection: AgentSideConnection, config: ACPConfig = {}) {
this.connection = connection
this.config = config
this.setupEventSubscriptions()
}
return {
protocolVersion: 1,
agentCapabilities: {
loadSession: false,
},
_meta: {
opencode: {
version: await import("../installation").then((m) => m.Installation.VERSION),
private setupEventSubscriptions() {
const options: PermissionOption[] = [
{ optionId: "once", kind: "allow_once", name: "Allow once" },
{ optionId: "always", kind: "allow_always", name: "Always allow" },
{ optionId: "reject", kind: "reject_once", name: "Reject" },
]
Bus.subscribe(Permission.Event.Updated, async (event) => {
const acpSession = this.sessionManager.get(event.properties.sessionID)
if (!acpSession) return
try {
const permission = event.properties
const res = await this.connection
.requestPermission({
sessionId: acpSession.id,
toolCall: {
toolCallId: permission.callID ?? permission.id,
status: "pending",
title: permission.title,
rawInput: permission.metadata,
kind: toToolKind(permission.type),
locations: toLocations(permission.type, permission.metadata),
},
options,
})
.catch((error) => {
log.error("failed to request permission from ACP", {
error,
permissionID: permission.id,
sessionID: permission.sessionID,
})
Permission.respond({
sessionID: permission.sessionID,
permissionID: permission.id,
response: "reject",
})
return
})
if (!res) return
if (res.outcome.outcome !== "selected") {
Permission.respond({ sessionID: permission.sessionID, permissionID: permission.id, response: "reject" })
return
}
Permission.respond({
sessionID: permission.sessionID,
permissionID: permission.id,
response: res.outcome.optionId as "once" | "always" | "reject",
})
} catch (err) {
if (!(err instanceof Permission.RejectedError)) {
log.error("unexpected error when handling permission", { error: err })
throw err
}
}
})
Bus.subscribe(MessageV2.Event.PartUpdated, async (event) => {
const props = event.properties
const { part } = props
const acpSession = this.sessionManager.get(part.sessionID)
if (!acpSession) return
const message = await Storage.read<MessageV2.Info>(["message", part.sessionID, part.messageID]).catch(
() => undefined,
)
if (!message || message.role !== "assistant") return
if (part.type === "tool") {
switch (part.state.status) {
case "pending":
await this.connection
.sessionUpdate({
sessionId: acpSession.id,
update: {
sessionUpdate: "tool_call",
toolCallId: part.callID,
title: part.tool,
kind: toToolKind(part.tool),
status: "pending",
locations: [],
rawInput: {},
},
})
.catch((err) => {
log.error("failed to send tool pending to ACP", { error: err })
})
break
case "running":
await this.connection
.sessionUpdate({
sessionId: acpSession.id,
update: {
sessionUpdate: "tool_call_update",
toolCallId: part.callID,
status: "in_progress",
locations: toLocations(part.tool, part.state.input),
rawInput: part.state.input,
},
})
.catch((err) => {
log.error("failed to send tool in_progress to ACP", { error: err })
})
break
case "completed":
const kind = toToolKind(part.tool)
const content: ToolCallContent[] = [
{
type: "content",
content: {
type: "text",
text: part.state.output,
},
},
]
if (kind === "edit") {
const input = part.state.input
const filePath = typeof input["filePath"] === "string" ? input["filePath"] : ""
const oldText = typeof input["oldString"] === "string" ? input["oldString"] : ""
const newText =
typeof input["newString"] === "string"
? input["newString"]
: typeof input["content"] === "string"
? input["content"]
: ""
content.push({
type: "diff",
path: filePath,
oldText,
newText,
})
}
if (part.tool === "todowrite") {
const parsedTodos = z.array(Todo.Info).safeParse(JSON.parse(part.state.output))
if (parsedTodos.success) {
await this.connection
.sessionUpdate({
sessionId: acpSession.id,
update: {
sessionUpdate: "plan",
entries: parsedTodos.data.map((todo) => {
const status: PlanEntry["status"] =
todo.status === "cancelled" ? "completed" : (todo.status as PlanEntry["status"])
return {
priority: "medium",
status,
content: todo.content,
}
}),
},
})
.catch((err) => {
log.error("failed to send session update for todo", { error: err })
})
} else {
log.error("failed to parse todo output", { error: parsedTodos.error })
}
}
await this.connection
.sessionUpdate({
sessionId: acpSession.id,
update: {
sessionUpdate: "tool_call_update",
toolCallId: part.callID,
status: "completed",
kind,
content,
title: part.state.title,
rawOutput: {
output: part.state.output,
metadata: part.state.metadata,
},
},
})
.catch((err) => {
log.error("failed to send tool completed to ACP", { error: err })
})
break
case "error":
await this.connection
.sessionUpdate({
sessionId: acpSession.id,
update: {
sessionUpdate: "tool_call_update",
toolCallId: part.callID,
status: "failed",
content: [
{
type: "content",
content: {
type: "text",
text: part.state.error,
},
},
],
rawOutput: {
error: part.state.error,
},
},
})
.catch((err) => {
log.error("failed to send tool error to ACP", { error: err })
})
break
}
} else if (part.type === "text") {
const delta = props.delta
if (delta && part.synthetic !== true) {
await this.connection
.sessionUpdate({
sessionId: acpSession.id,
update: {
sessionUpdate: "agent_message_chunk",
content: {
type: "text",
text: delta,
},
},
})
.catch((err) => {
log.error("failed to send text to ACP", { error: err })
})
}
} else if (part.type === "reasoning") {
const delta = props.delta
if (delta) {
await this.connection
.sessionUpdate({
sessionId: acpSession.id,
update: {
sessionUpdate: "agent_thought_chunk",
content: {
type: "text",
text: delta,
},
},
})
.catch((err) => {
log.error("failed to send reasoning to ACP", { error: err })
})
}
}
})
}
async initialize(params: InitializeRequest) {
log.info("initialize", { protocolVersion: params.protocolVersion })
return {
protocolVersion: 1,
agentCapabilities: {
loadSession: true,
mcpCapabilities: {
http: true,
sse: true,
},
promptCapabilities: {
embeddedContext: true,
image: true,
},
},
},
authMethods: [
{
description: "Run `opencode auth login` in the terminal",
name: "Login with opencode",
id: "opencode-login",
},
],
_meta: {
opencode: {
version: Installation.VERSION,
},
},
}
}
async authenticate(_params: AuthenticateRequest) {
throw new Error("Authentication not implemented")
}
async newSession(params: NewSessionRequest) {
const model = await defaultModel(this.config)
const session = await this.sessionManager.create(params.cwd, params.mcpServers, model)
log.info("creating_session", { mcpServers: params.mcpServers.length })
const load = await this.loadSession({
cwd: params.cwd,
mcpServers: params.mcpServers,
sessionId: session.id,
})
return {
sessionId: session.id,
models: load.models,
modes: load.modes,
_meta: {},
}
}
async loadSession(params: LoadSessionRequest) {
const model = await defaultModel(this.config)
const sessionId = params.sessionId
const providers = await Provider.list()
const entries = Object.entries(providers).sort((a, b) => {
const nameA = a[1].info.name.toLowerCase()
const nameB = b[1].info.name.toLowerCase()
if (nameA < nameB) return -1
if (nameA > nameB) return 1
return 0
})
const availableModels = entries.flatMap(([providerID, provider]) => {
const models = Provider.sort(Object.values(provider.info.models))
return models.map((model) => ({
modelId: `${providerID}/${model.id}`,
name: `${provider.info.name}/${model.name}`,
}))
})
const availableCommands = (await Command.list()).map((command) => ({
name: command.name,
description: command.description ?? "",
}))
const names = new Set(availableCommands.map((c) => c.name))
if (!names.has("init"))
availableCommands.push({
name: "init",
description: "create/update a AGENTS.md",
})
if (!names.has("compact"))
availableCommands.push({
name: "compact",
description: "compact the session",
})
setTimeout(() => {
this.connection.sessionUpdate({
sessionId,
update: {
sessionUpdate: "available_commands_update",
availableCommands,
},
})
}, 0)
const availableModes = (await Agents.list())
.filter((agent) => agent.mode !== "subagent")
.map((agent) => ({
id: agent.name,
name: agent.name,
description: agent.description,
}))
const currentModeId = availableModes.find((m) => m.name === "build")?.id ?? availableModes[0].id
const mcpServers: Record<string, Config.Mcp> = {}
for (const server of params.mcpServers) {
if ("type" in server) {
mcpServers[server.name] = {
url: server.url,
headers: server.headers.reduce<Record<string, string>>((acc, { name, value }) => {
acc[name] = value
return acc
}, {}),
type: "remote",
}
} else {
mcpServers[server.name] = {
type: "local",
command: [server.command, ...server.args],
environment: server.env.reduce<Record<string, string>>((acc, { name, value }) => {
acc[name] = value
return acc
}, {}),
}
}
}
await Promise.all(
Object.entries(mcpServers).map(async ([key, mcp]) => {
await MCP.add(key, mcp)
}),
)
return {
sessionId,
models: {
currentModelId: `${model.providerID}/${model.modelID}`,
availableModels,
},
modes: {
availableModes,
currentModeId,
},
_meta: {},
}
}
async setSessionModel(params: SetSessionModelRequest) {
const session = this.sessionManager.get(params.sessionId)
if (!session) {
throw new Error(`Session not found: ${params.sessionId}`)
}
const parsed = Provider.parseModel(params.modelId)
const model = await Provider.getModel(parsed.providerID, parsed.modelID)
this.sessionManager.setModel(session.id, {
providerID: model.providerID,
modelID: model.modelID,
})
return {
_meta: {},
}
}
async setSessionMode(params: SetSessionModeRequest): Promise<SetSessionModeResponse | void> {
const session = this.sessionManager.get(params.sessionId)
if (!session) {
throw new Error(`Session not found: ${params.sessionId}`)
}
await Agents.get(params.modeId).then((agent) => {
if (!agent) throw new Error(`Agent not found: ${params.modeId}`)
})
this.sessionManager.setMode(params.sessionId, params.modeId)
}
async prompt(params: PromptRequest) {
const sessionID = params.sessionId
const acpSession = this.sessionManager.get(sessionID)
if (!acpSession) {
throw new Error(`Session not found: ${sessionID}`)
}
const current = acpSession.model
const model = current ?? (await defaultModel(this.config))
if (!current) {
this.sessionManager.setModel(acpSession.id, model)
}
const agent = acpSession.modeId ?? "build"
const parts: SessionPrompt.PromptInput["parts"] = []
for (const part of params.prompt) {
switch (part.type) {
case "text":
parts.push({
type: "text" as const,
text: part.text,
})
break
case "image":
if (part.data) {
parts.push({
type: "file",
url: `data:${part.mimeType};base64,${part.data}`,
mime: part.mimeType,
})
} else if (part.uri && part.uri.startsWith("http:")) {
parts.push({
type: "file",
url: part.uri,
mime: part.mimeType,
})
}
break
case "resource_link":
const parsed = parseUri(part.uri)
parts.push(parsed)
break
case "resource":
const resource = part.resource
if ("text" in resource) {
parts.push({
type: "text",
text: resource.text,
})
}
break
default:
break
}
}
log.info("parts", { parts })
const cmd = (() => {
const text = parts
.filter((p) => p.type === "text")
.map((p) => p.text)
.join("")
.trim()
if (!text.startsWith("/")) return
const [name, ...rest] = text.slice(1).split(/\s+/)
return { name, args: rest.join(" ").trim() }
})()
const done = {
stopReason: "end_turn" as const,
_meta: {},
}
if (!cmd) {
await SessionPrompt.prompt({
sessionID,
model: {
providerID: model.providerID,
modelID: model.modelID,
},
parts,
agent,
})
return done
}
const command = await Command.get(cmd.name)
if (command) {
await SessionPrompt.command({
sessionID,
command: command.name,
arguments: cmd.args,
model: model.providerID + "/" + model.modelID,
agent,
})
return done
}
switch (cmd.name) {
case "init":
await Session.initialize({
sessionID,
messageID: Identifier.ascending("message"),
providerID: model.providerID,
modelID: model.modelID,
})
break
case "compact":
await SessionCompaction.run({
sessionID,
providerID: model.providerID,
modelID: model.modelID,
})
break
}
return done
}
async cancel(params: CancelNotification) {
SessionLock.abort(params.sessionId)
}
}
async authenticate(params: AuthenticateRequest): Promise<void | AuthenticateResponse> {
this.log.info("authenticate", { methodId: params.methodId })
throw new Error("Authentication not yet implemented")
}
function toToolKind(toolName: string): ToolKind {
const tool = toolName.toLocaleLowerCase()
switch (tool) {
case "bash":
return "execute"
case "webfetch":
return "fetch"
async newSession(params: NewSessionRequest): Promise<NewSessionResponse> {
this.log.info("newSession", { cwd: params.cwd, mcpServers: params.mcpServers.length })
case "edit":
case "patch":
case "write":
return "edit"
const model = await this.defaultModel()
const session = await this.sessionManager.create(params.cwd, params.mcpServers, model)
const availableModels = await this.availableModels()
case "grep":
case "glob":
case "context7_resolve_library_id":
case "context7_get_library_docs":
return "search"
return {
sessionId: session.id,
models: {
currentModelId: `${model.providerID}/${model.modelID}`,
availableModels,
},
_meta: {},
case "list":
case "read":
return "read"
default:
return "other"
}
}
async loadSession(params: LoadSessionRequest): Promise<LoadSessionResponse> {
this.log.info("loadSession", { sessionId: params.sessionId, cwd: params.cwd })
const defaultModel = await this.defaultModel()
const session = await this.sessionManager.load(params.sessionId, params.cwd, params.mcpServers, defaultModel)
const availableModels = await this.availableModels()
return {
models: {
currentModelId: `${session.model.providerID}/${session.model.modelID}`,
availableModels,
},
_meta: {},
function toLocations(toolName: string, input: Record<string, any>): { path: string }[] {
const tool = toolName.toLocaleLowerCase()
switch (tool) {
case "read":
case "edit":
case "write":
return input["filePath"] ? [{ path: input["filePath"] }] : []
case "glob":
case "grep":
return input["path"] ? [{ path: input["path"] }] : []
case "bash":
return []
case "list":
return input["path"] ? [{ path: input["path"] }] : []
default:
return []
}
}
async setSessionModel(params: SetSessionModelRequest): Promise<SetSessionModelResponse> {
this.log.info("setSessionModel", { sessionId: params.sessionId, modelId: params.modelId })
const session = this.sessionManager.get(params.sessionId)
if (!session) {
throw new Error(`Session not found: ${params.sessionId}`)
}
const parsed = Provider.parseModel(params.modelId)
const model = await Provider.getModel(parsed.providerID, parsed.modelID)
this.sessionManager.setModel(session.id, {
providerID: model.providerID,
modelID: model.modelID,
})
return {
_meta: {},
}
}
private async defaultModel() {
const configured = this.config.defaultModel
async function defaultModel(config: ACPConfig) {
const configured = config.defaultModel
if (configured) return configured
return Provider.defaultModel()
}
private async availableModels() {
const providers = await Provider.list()
const entries = Object.entries(providers).sort((a, b) => {
const nameA = a[1].info.name.toLowerCase()
const nameB = b[1].info.name.toLowerCase()
if (nameA < nameB) return -1
if (nameA > nameB) return 1
return 0
})
return entries.flatMap(([providerID, provider]) => {
const models = Provider.sort(Object.values(provider.info.models))
return models.map((model) => ({
modelId: `${providerID}/${model.id}`,
name: `${provider.info.name}/${model.name}`,
}))
})
}
async prompt(params: PromptRequest): Promise<PromptResponse> {
this.log.info("prompt", {
sessionId: params.sessionId,
promptLength: params.prompt.length,
})
const acpSession = this.sessionManager.get(params.sessionId)
if (!acpSession) {
throw new Error(`Session not found: ${params.sessionId}`)
}
const current = acpSession.model
const model = current ?? (await this.defaultModel())
if (!current) {
this.sessionManager.setModel(acpSession.id, model)
}
const parts = params.prompt.map((content) => {
if (content.type === "text") {
function parseUri(
uri: string,
): { type: "file"; url: string; filename: string; mime: string } | { type: "text"; text: string } {
try {
if (uri.startsWith("file://")) {
const path = uri.slice(7)
const name = path.split("/").pop() || path
return {
type: "text" as const,
text: content.text,
type: "file",
url: uri,
filename: name,
mime: "text/plain",
}
}
if (content.type === "resource") {
const resource = content.resource
let text = ""
if ("text" in resource && typeof resource.text === "string") {
text = resource.text
}
return {
type: "text" as const,
text,
if (uri.startsWith("zed://")) {
const url = new URL(uri)
const path = url.searchParams.get("path")
if (path) {
const name = path.split("/").pop() || path
return {
type: "file",
url: `file://${path}`,
filename: name,
mime: "text/plain",
}
}
}
return {
type: "text" as const,
text: JSON.stringify(content),
type: "text",
text: uri,
}
} catch {
return {
type: "text",
text: uri,
}
})
await SessionPrompt.prompt({
sessionID: acpSession.openCodeSessionId,
messageID: Identifier.ascending("message"),
model: {
providerID: model.providerID,
modelID: model.modelID,
},
parts,
acpConnection: {
connection: this.connection,
sessionId: params.sessionId,
},
})
this.log.debug("prompt response completed")
// Streaming notifications are now handled during prompt execution
// No need to send final text chunk here
return {
stopReason: "end_turn",
_meta: {},
}
}
async cancel(params: CancelNotification): Promise<void> {
this.log.info("cancel", { sessionId: params.sessionId })
}
}

View file

@ -1,85 +0,0 @@
import type {
Client,
CreateTerminalRequest,
CreateTerminalResponse,
KillTerminalCommandRequest,
KillTerminalResponse,
ReadTextFileRequest,
ReadTextFileResponse,
ReleaseTerminalRequest,
ReleaseTerminalResponse,
RequestPermissionRequest,
RequestPermissionResponse,
SessionNotification,
TerminalOutputRequest,
TerminalOutputResponse,
WaitForTerminalExitRequest,
WaitForTerminalExitResponse,
WriteTextFileRequest,
WriteTextFileResponse,
} from "@agentclientprotocol/sdk"
import { Log } from "../util/log"
export class ACPClient implements Client {
private log = Log.create({ service: "acp-client" })
async requestPermission(params: RequestPermissionRequest): Promise<RequestPermissionResponse> {
this.log.debug("requestPermission", params)
const firstOption = params.options[0]
if (!firstOption) {
return { outcome: { outcome: "cancelled" } }
}
return {
outcome: {
outcome: "selected",
optionId: firstOption.optionId,
},
}
}
async sessionUpdate(params: SessionNotification): Promise<void> {
this.log.debug("sessionUpdate", { sessionId: params.sessionId })
}
async writeTextFile(params: WriteTextFileRequest): Promise<WriteTextFileResponse> {
this.log.debug("writeTextFile", { path: params.path })
await Bun.write(params.path, params.content)
return { _meta: {} }
}
async readTextFile(params: ReadTextFileRequest): Promise<ReadTextFileResponse> {
this.log.debug("readTextFile", { path: params.path })
const file = Bun.file(params.path)
const exists = await file.exists()
if (!exists) {
throw new Error(`File not found: ${params.path}`)
}
const content = await file.text()
return { content, _meta: {} }
}
async createTerminal(params: CreateTerminalRequest): Promise<CreateTerminalResponse> {
this.log.debug("createTerminal", params)
throw new Error("Terminal support not yet implemented")
}
async terminalOutput(params: TerminalOutputRequest): Promise<TerminalOutputResponse> {
this.log.debug("terminalOutput", params)
throw new Error("Terminal support not yet implemented")
}
async releaseTerminal(params: ReleaseTerminalRequest): Promise<void | ReleaseTerminalResponse> {
this.log.debug("releaseTerminal", params)
throw new Error("Terminal support not yet implemented")
}
async waitForTerminalExit(params: WaitForTerminalExitRequest): Promise<WaitForTerminalExitResponse> {
this.log.debug("waitForTerminalExit", params)
throw new Error("Terminal support not yet implemented")
}
async killTerminal(params: KillTerminalCommandRequest): Promise<void | KillTerminalResponse> {
this.log.debug("killTerminal", params)
throw new Error("Terminal support not yet implemented")
}
}

View file

@ -1,53 +0,0 @@
import { AgentSideConnection, ndJsonStream } from "@agentclientprotocol/sdk"
import { Log } from "../util/log"
import { Instance } from "../project/instance"
import { OpenCodeAgent } from "./agent"
export namespace ACPServer {
const log = Log.create({ service: "acp-server" })
export async function start() {
await Instance.provide({
directory: process.cwd(),
fn: async () => {
log.info("starting ACP server", { cwd: process.cwd() })
const stdout = new WritableStream({
write(chunk) {
process.stdout.write(chunk)
},
})
const stdin = new ReadableStream({
start(controller) {
process.stdin.on("data", (chunk) => {
controller.enqueue(new Uint8Array(chunk))
})
process.stdin.on("end", () => {
controller.close()
})
},
})
const stream = ndJsonStream(stdout, stdin)
new AgentSideConnection((conn) => {
return new OpenCodeAgent(conn)
}, stream)
await new Promise<void>((resolve) => {
process.on("SIGTERM", () => {
log.info("received SIGTERM")
resolve()
})
process.on("SIGINT", () => {
log.info("received SIGINT")
resolve()
})
})
log.info("ACP server stopped")
},
})
}
}

View file

@ -7,20 +7,15 @@ import type { ACPSessionState } from "./types"
export class ACPSessionManager {
private sessions = new Map<string, ACPSessionState>()
async create(
cwd: string,
mcpServers: McpServer[],
model?: ACPSessionState["model"],
): Promise<ACPSessionState> {
const sessionId = `acp_${Identifier.ascending("session")}`
const openCodeSession = await Session.create({ title: `ACP Session ${sessionId}` })
async create(cwd: string, mcpServers: McpServer[], model?: ACPSessionState["model"]): Promise<ACPSessionState> {
const session = await Session.create({ title: `ACP Session ${crypto.randomUUID()}` })
const sessionId = session.id
const resolvedModel = model ?? (await Provider.defaultModel())
const state: ACPSessionState = {
id: sessionId,
cwd,
mcpServers,
openCodeSessionId: openCodeSession.id,
createdAt: new Date(),
model: resolvedModel,
}
@ -29,54 +24,22 @@ export class ACPSessionManager {
return state
}
get(sessionId: string): ACPSessionState | undefined {
get(sessionId: string) {
return this.sessions.get(sessionId)
}
async remove(sessionId: string): Promise<void> {
async remove(sessionId: string) {
const state = this.sessions.get(sessionId)
if (!state) return
await Session.remove(state.openCodeSessionId).catch(() => {})
await Session.remove(sessionId).catch(() => {})
this.sessions.delete(sessionId)
}
has(sessionId: string): boolean {
has(sessionId: string) {
return this.sessions.has(sessionId)
}
async load(
sessionId: string,
cwd: string,
mcpServers: McpServer[],
model?: ACPSessionState["model"],
): Promise<ACPSessionState> {
const existing = this.sessions.get(sessionId)
if (existing) {
if (!existing.model) {
const resolved = model ?? (await Provider.defaultModel())
existing.model = resolved
this.sessions.set(sessionId, existing)
}
return existing
}
const openCodeSession = await Session.create({ title: `ACP Session ${sessionId} (loaded)` })
const resolvedModel = model ?? (await Provider.defaultModel())
const state: ACPSessionState = {
id: sessionId,
cwd,
mcpServers,
openCodeSessionId: openCodeSession.id,
createdAt: new Date(),
model: resolvedModel,
}
this.sessions.set(sessionId, state)
return state
}
getModel(sessionId: string) {
const session = this.sessions.get(sessionId)
if (!session) return
@ -90,4 +53,12 @@ export class ACPSessionManager {
this.sessions.set(sessionId, session)
return session
}
setMode(sessionId: string, modeId: string) {
const session = this.sessions.get(sessionId)
if (!session) return
session.modeId = modeId
this.sessions.set(sessionId, session)
return session
}
}

View file

@ -4,12 +4,12 @@ export interface ACPSessionState {
id: string
cwd: string
mcpServers: McpServer[]
openCodeSessionId: string
createdAt: Date
model: {
providerID: string
modelID: string
}
modeId?: string
}
export interface ACPConfig {

View file

@ -1,5 +1,5 @@
import { Config } from "../config/config"
import z from "zod/v4"
import z from "zod"
import { Provider } from "../provider/provider"
import { generateObject, type ModelMessage } from "ai"
import PROMPT_GENERATE from "./generate.txt"

View file

@ -1,7 +1,7 @@
import path from "path"
import { Global } from "../global"
import fs from "fs/promises"
import z from "zod/v4"
import z from "zod"
export namespace Auth {
export const Oauth = z

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Global } from "../global"
import { Log } from "../util/log"
import path from "path"

View file

@ -1,5 +1,5 @@
import z from "zod/v4"
import type { ZodType } from "zod/v4"
import z from "zod"
import type { ZodType } from "zod"
import { Log } from "../util/log"
import { Instance } from "../project/instance"

View file

@ -1,5 +1,17 @@
import { ACPServer } from "../../acp/server"
import { Log } from "@/util/log"
import { bootstrap } from "../bootstrap"
import { cmd } from "./cmd"
import { AgentSideConnection, ndJsonStream } from "@agentclientprotocol/sdk"
import { ACP } from "@/acp/agent"
const log = Log.create({ service: "acp-command" })
process.on("unhandledRejection", (reason, promise) => {
log.error("Unhandled rejection", {
promise,
reason,
})
})
export const AcpCommand = cmd({
command: "acp",
@ -13,6 +25,38 @@ export const AcpCommand = cmd({
},
handler: async (opts) => {
if (opts.cwd) process.chdir(opts["cwd"])
await ACPServer.start()
await bootstrap(process.cwd(), async () => {
const input = new WritableStream<Uint8Array>({
write(chunk) {
return new Promise<void>((resolve, reject) => {
process.stdout.write(chunk, (err) => {
if (err) {
reject(err)
} else {
resolve()
}
})
})
},
})
const output = new ReadableStream<Uint8Array>({
start(controller) {
process.stdin.on("data", (chunk: Buffer) => {
controller.enqueue(new Uint8Array(chunk))
})
process.stdin.on("end", () => controller.close())
process.stdin.on("error", (err) => controller.error(err))
},
})
const stream = ndJsonStream(input, output)
new AgentSideConnection((conn) => {
return new ACP.Agent(conn)
}, stream)
log.info("setup connection")
})
process.stdin.resume()
},
})

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { EOL } from "os"
import { NamedError } from "../util/error"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Config } from "../config/config"
import { Instance } from "../project/instance"

View file

@ -1,7 +1,7 @@
import { Log } from "../util/log"
import path from "path"
import os from "os"
import z from "zod/v4"
import z from "zod"
import { Filesystem } from "../util/filesystem"
import { ModelsDev } from "../provider/models"
import { mergeDeep, pipe } from "remeda"
@ -61,6 +61,11 @@ export namespace Config {
)),
]
if (Flag.OPENCODE_CONFIG_DIR) {
directories.push(Flag.OPENCODE_CONFIG_DIR)
log.debug("loading config from OPENCODE_CONFIG_DIR", { path: Flag.OPENCODE_CONFIG_DIR })
}
for (const dir of directories) {
await assertValid(dir)
installDependencies(dir)
@ -584,6 +589,7 @@ export namespace Config {
.optional(),
})
.optional(),
chatMaxRetries: z.number().optional().describe("Number of retries for chat completions on failure"),
disable_paste_summary: z.boolean().optional(),
})
.optional(),

View file

@ -1,7 +1,7 @@
import path from "path"
import { Global } from "../global"
import fs from "fs/promises"
import z from "zod/v4"
import z from "zod"
import { NamedError } from "../util/error"
import { lazy } from "../util/lazy"
import { Log } from "../util/log"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Bus } from "../bus"
import { $ } from "bun"
import type { BunFile } from "bun"

View file

@ -2,7 +2,7 @@
import path from "path"
import { Global } from "../global"
import fs from "fs/promises"
import z from "zod/v4"
import z from "zod"
import { NamedError } from "../util/error"
import { lazy } from "../util/lazy"
import { $ } from "bun"
@ -218,7 +218,7 @@ export namespace Ripgrep {
code: "ENOENT",
errno: -2,
path: input.cwd,
});
})
}
const proc = Bun.spawn(args, {

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Bus } from "../bus"
import { Flag } from "../flag/flag"
import { Instance } from "../project/instance"

View file

@ -1,6 +1,7 @@
export namespace Flag {
export const OPENCODE_AUTO_SHARE = truthy("OPENCODE_AUTO_SHARE")
export const OPENCODE_CONFIG = process.env["OPENCODE_CONFIG"]
export const OPENCODE_CONFIG_DIR = process.env["OPENCODE_CONFIG_DIR"]
export const OPENCODE_CONFIG_CONTENT = process.env["OPENCODE_CONFIG_CONTENT"]
export const OPENCODE_DISABLE_AUTOUPDATE = truthy("OPENCODE_DISABLE_AUTOUPDATE")
export const OPENCODE_DISABLE_PRUNE = truthy("OPENCODE_DISABLE_PRUNE")
@ -9,6 +10,7 @@ export namespace Flag {
export const OPENCODE_DISABLE_LSP_DOWNLOAD = truthy("OPENCODE_DISABLE_LSP_DOWNLOAD")
export const OPENCODE_ENABLE_EXPERIMENTAL_MODELS = truthy("OPENCODE_ENABLE_EXPERIMENTAL_MODELS")
export const OPENCODE_DISABLE_AUTOCOMPACT = truthy("OPENCODE_DISABLE_AUTOCOMPACT")
export const OPENCODE_FAKE_VCS = process.env["OPENCODE_FAKE_VCS"]
// Experimental
export const OPENCODE_EXPERIMENTAL_WATCHER = truthy("OPENCODE_EXPERIMENTAL_WATCHER")

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { randomBytes } from "crypto"
export namespace Identifier {

View file

@ -1,5 +1,5 @@
import { spawn } from "bun"
import z from "zod/v4"
import z from "zod"
import { NamedError } from "../util/error"
import { Log } from "../util/log"
import { Bus } from "../bus"

View file

@ -1,6 +1,6 @@
import path from "path"
import { $ } from "bun"
import z from "zod/v4"
import z from "zod"
import { NamedError } from "../util/error"
import { Bus } from "../bus"
import { Log } from "../util/log"

View file

@ -4,7 +4,7 @@ import type { Diagnostic as VSCodeDiagnostic } from "vscode-languageserver-types
import { Log } from "../util/log"
import { LANGUAGE_EXTENSIONS } from "./language"
import { Bus } from "../bus"
import z from "zod/v4"
import z from "zod"
import type { LSPServer } from "./server"
import { NamedError } from "../util/error"
import { withTimeout } from "../util/timeout"

View file

@ -2,7 +2,7 @@ import { Log } from "../util/log"
import { LSPClient } from "./client"
import path from "path"
import { LSPServer } from "./server"
import z from "zod/v4"
import z from "zod"
import { Config } from "../config/config"
import { spawn } from "child_process"
import { Instance } from "../project/instance"

View file

@ -931,9 +931,15 @@ export namespace LSPServer {
await fs.mkdir(installDir, { recursive: true })
if (ext === "zip") {
await $`unzip -o -q ${tempPath} -d ${installDir}`.quiet().nothrow()
const ok = await $`unzip -o -q ${tempPath} -d ${installDir}`.quiet().catch((error) => {
log.error("Failed to extract lua-language-server archive", { error })
})
if (!ok) return
} else {
await $`tar -xzf ${tempPath} -C ${installDir}`.nothrow()
const ok = await $`tar -xzf ${tempPath} -C ${installDir}`.quiet().catch((error) => {
log.error("Failed to extract lua-language-server archive", { error })
})
if (!ok) return
}
await fs.rm(tempPath, { force: true })
@ -947,7 +953,10 @@ export namespace LSPServer {
}
if (platform !== "win32") {
await $`chmod +x ${bin}`.nothrow()
const ok = await $`chmod +x ${bin}`.quiet().catch((error) => {
log.error("Failed to set executable permission for lua-language-server binary", { error })
})
if (!ok) return
}
log.info(`installed lua-language-server`, { bin })

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Bus } from "../bus"
import { Log } from "../util/log"
import { Identifier } from "../id/id"

View file

@ -1,9 +1,10 @@
import z from "zod/v4"
import z from "zod"
import { Filesystem } from "../util/filesystem"
import path from "path"
import { $ } from "bun"
import { Storage } from "../storage/storage"
import { Log } from "../util/log"
import { Flag } from "@/flag/flag"
export namespace Project {
const log = Log.create({ service: "project" })
@ -31,6 +32,7 @@ export namespace Project {
const project: Info = {
id: "global",
worktree: "/",
vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS),
time: {
created: Date.now(),
},

View file

@ -1,7 +1,7 @@
import { Global } from "../global"
import { Log } from "../util/log"
import path from "path"
import z from "zod/v4"
import z from "zod"
import { data } from "./models-macro" with { type: "macro" }
import { Installation } from "../installation"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import path from "path"
import { Config } from "../config/config"
import { mergeDeep, sortBy } from "remeda"
@ -101,7 +101,7 @@ export namespace Provider {
"nova-pro",
"nova-premier",
"claude",
"deepseek"
"deepseek",
].some((m) => modelID.includes(m))
const isGovCloud = region.startsWith("us-gov")
if (modelRequiresPrefix && !isGovCloud) {
@ -422,14 +422,14 @@ export namespace Provider {
const modPath =
provider.id === "google-vertex-anthropic" ? `${installedPath}/dist/anthropic/index.mjs` : installedPath
const mod = await import(modPath)
if (options["timeout"] !== undefined) {
if (options["timeout"] !== undefined && options["timeout"] !== null) {
// Only override fetch if user explicitly sets timeout
options["fetch"] = async (input: any, init?: BunFetchRequestInit) => {
const { signal, ...rest } = init ?? {}
const signals: AbortSignal[] = []
if (signal) signals.push(signal)
signals.push(AbortSignal.timeout(options["timeout"]))
if (options["timeout"] !== false) signals.push(AbortSignal.timeout(options["timeout"]))
const combined = signals.length > 1 ? AbortSignal.any(signals) : signals[0]
@ -517,14 +517,11 @@ export namespace Provider {
const provider = await state().then((state) => state.providers[providerID])
if (!provider) return
const priority = [
"claude-haiku-4-5",
"claude-haiku-4.5",
"3-5-haiku",
"3.5-haiku",
"gemini-2.5-flash",
"gpt-5-nano",
]
let priority = ["claude-haiku-4-5", "claude-haiku-4.5", "3-5-haiku", "3.5-haiku", "gemini-2.5-flash", "gpt-5-nano"]
// claude-haiku-4.5 is considered a premium model in github copilot, we shouldn't use premium requests for title gen
if (providerID === "github-copilot") {
priority = priority.filter((m) => m !== "claude-haiku-4.5")
}
for (const item of priority) {
for (const model of Object.keys(provider.info.models)) {
if (model.includes(item)) return getModel(providerID, model)

View file

@ -75,7 +75,7 @@ export namespace ProviderTransform {
export function temperature(_providerID: string, modelID: string) {
if (modelID.toLowerCase().includes("qwen")) return 0.55
if (modelID.toLowerCase().includes("claude")) return 1
if (modelID.toLowerCase().includes("claude")) return undefined
return 0
}
@ -92,7 +92,9 @@ export namespace ProviderTransform {
}
if (modelID.includes("gpt-5") && !modelID.includes("gpt-5-chat")) {
if (!modelID.includes("codex")) result["reasoningEffort"] = "medium"
if (!modelID.includes("codex") && !modelID.includes("gpt-5-pro")) {
result["reasoningEffort"] = "medium"
}
if (providerID !== "azure") {
result["textVerbosity"] = modelID.includes("codex") ? "medium" : "low"

View file

@ -5,7 +5,7 @@ import { Hono } from "hono"
import { cors } from "hono/cors"
import { stream, streamSSE } from "hono/streaming"
import { Session } from "../session"
import z from "zod/v4"
import z from "zod"
import { Provider } from "../provider/provider"
import { mapValues } from "remeda"
import { NamedError } from "../util/error"

View file

@ -7,7 +7,7 @@ import { defer } from "../util/defer"
import { MessageV2 } from "./message-v2"
import { SystemPrompt } from "./system"
import { Bus } from "../bus"
import z from "zod/v4"
import z from "zod"
import type { ModelsDev } from "../provider/models"
import { SessionPrompt } from "./prompt"
import { Flag } from "../flag/flag"
@ -16,6 +16,7 @@ import { Log } from "../util/log"
import { SessionLock } from "./lock"
import { ProviderTransform } from "@/provider/transform"
import { SessionRetry } from "./retry"
import { Config } from "@/config/config"
export namespace SessionCompaction {
const log = Log.create({ service: "session.compaction" })
@ -156,6 +157,7 @@ export namespace SessionCompaction {
error,
})
},
tools: model.info.tool_call ? {} : undefined,
messages: [
...system.map(
(x): ModelMessage => ({
@ -189,7 +191,11 @@ export namespace SessionCompaction {
case "text-delta":
part.text += value.text
if (value.providerMetadata) part.metadata = value.providerMetadata
if (part.text) await Session.updatePart(part)
if (part.text)
await Session.updatePart({
part,
delta: value.text,
})
continue
case "text-end": {
part.text = part.text.trimEnd()
@ -254,12 +260,14 @@ export namespace SessionCompaction {
}
let stream = doStream()
const cfg = await Config.get()
const maxRetries = cfg.experimental?.chatMaxRetries ?? MAX_RETRIES
let result = await process(stream, {
count: 0,
max: MAX_RETRIES,
max: maxRetries,
})
if (result.shouldRetry) {
for (let retry = 1; retry < MAX_RETRIES; retry++) {
for (let retry = 1; retry < maxRetries; retry++) {
const lastRetryPart = result.parts.findLast((p) => p.type === "retry")
if (lastRetryPart) {
@ -296,7 +304,7 @@ export namespace SessionCompaction {
stream = doStream()
result = await process(stream, {
count: retry,
max: MAX_RETRIES,
max: maxRetries,
})
if (!result.shouldRetry) {
break

View file

@ -1,5 +1,5 @@
import { Decimal } from "decimal.js"
import z from "zod/v4"
import z from "zod"
import { type LanguageModelUsage, type ProviderMetadata } from "ai"
import PROMPT_INITIALIZE from "../session/prompt/initialize.txt"
@ -78,6 +78,12 @@ export namespace Session {
export type ShareInfo = z.output<typeof ShareInfo>
export const Event = {
Created: Bus.event(
"session.created",
z.object({
info: Info,
}),
),
Updated: Bus.event(
"session.updated",
z.object({
@ -167,6 +173,9 @@ export namespace Session {
}
log.info("created", result)
await Storage.write(["session", Instance.project.id, result.id], result)
Bus.publish(Event.Created, {
info: result,
})
const cfg = await Config.get()
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
share(result.id)
@ -340,10 +349,25 @@ export namespace Session {
},
)
export const updatePart = fn(MessageV2.Part, async (part) => {
const UpdatePartInput = z.union([
MessageV2.Part,
z.object({
part: MessageV2.TextPart,
delta: z.string(),
}),
z.object({
part: MessageV2.ReasoningPart,
delta: z.string(),
}),
])
export const updatePart = fn(UpdatePartInput, async (input) => {
const part = "delta" in input ? input.part : input
const delta = "delta" in input ? input.delta : undefined
await Storage.write(["part", part.messageID, part.id], part)
Bus.publish(MessageV2.Event.PartUpdated, {
part,
delta,
})
return part
})

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Instance } from "../project/instance"
import { Log } from "../util/log"
import { NamedError } from "../util/error"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Bus } from "../bus"
import { NamedError } from "../util/error"
import { Message } from "./message"
@ -361,6 +361,7 @@ export namespace MessageV2 {
"message.part.updated",
z.object({
part: Part,
delta: z.string().optional(),
}),
),
PartRemoved: Bus.event(

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { NamedError } from "../util/error"
export namespace Message {

View file

@ -1,7 +1,7 @@
import path from "path"
import os from "os"
import fs from "fs/promises"
import z from "zod/v4"
import z from "zod"
import { Identifier } from "../id/id"
import { MessageV2 } from "./message-v2"
import { Log } from "../util/log"
@ -50,6 +50,7 @@ import { Command } from "../command"
import { $, fileURLToPath } from "bun"
import { ConfigMarkdown } from "../config/markdown"
import { SessionSummary } from "./summary"
import { Config } from "@/config/config"
export namespace SessionPrompt {
const log = Log.create({ service: "session.prompt" })
@ -94,18 +95,9 @@ export namespace SessionPrompt {
})
.optional(),
agent: z.string().optional(),
noReply: z.boolean().optional(),
system: z.string().optional(),
tools: z.record(z.string(), z.boolean()).optional(),
/**
* ACP (Agent Client Protocol) connection details for streaming responses.
* When provided, enables real-time streaming and tool execution visibility.
*/
acpConnection: z
.object({
connection: z.any(), // AgentSideConnection - using any to avoid circular deps
sessionId: z.string(), // ACP session ID (different from opencode sessionID)
})
.optional(),
parts: z.array(
z.discriminatedUnion("type", [
MessageV2.TextPart.omit({
@ -152,6 +144,11 @@ export namespace SessionPrompt {
const userMsg = await createUserMessage(input)
await Session.touch(input.sessionID)
// Early return for context-only messages (no AI inference)
if (input.noReply) {
return userMsg
}
if (isBusy(input.sessionID)) {
return new Promise((resolve) => {
const queue = state().queued.get(input.sessionID) ?? []
@ -184,7 +181,6 @@ export namespace SessionPrompt {
agent: agent.name,
system,
abort: abort.signal,
acpConnection: input.acpConnection,
})
const tools = await resolveTools({
@ -335,12 +331,14 @@ export namespace SessionPrompt {
})
let stream = doStream()
const cfg = await Config.get()
const maxRetries = cfg.experimental?.chatMaxRetries ?? MAX_RETRIES
let result = await processor.process(stream, {
count: 0,
max: MAX_RETRIES,
max: maxRetries,
})
if (result.shouldRetry) {
for (let retry = 1; retry < MAX_RETRIES; retry++) {
for (let retry = 1; retry < maxRetries; retry++) {
const lastRetryPart = result.parts.findLast((p) => p.type === "retry")
if (lastRetryPart) {
@ -377,7 +375,7 @@ export namespace SessionPrompt {
stream = doStream()
result = await processor.process(stream, {
count: retry,
max: MAX_RETRIES,
max: maxRetries,
})
if (!result.shouldRetry) {
break
@ -587,7 +585,17 @@ export namespace SessionPrompt {
args,
},
)
const result = await execute(args, opts)
const result = await execute(args, opts).catch((err: unknown) => {
log.error("Error executing tool", { error: err, tool: key })
return {
content: [
{
type: "text",
text: `Failed to execute tool: ${err instanceof Error ? err.message : String(err)}`,
},
],
}
})
await Plugin.trigger(
"tool.execute.after",
@ -889,60 +897,6 @@ export namespace SessionPrompt {
return input.messages
}
/**
* Maps tool names to ACP tool kinds for consistent categorization.
* - read: Tools that read data (read, glob, grep, list, webfetch, docs)
* - edit: Tools that modify state (edit, write, bash)
* - other: All other tools (MCP tools, task, todowrite, etc.)
*/
function determineToolKind(toolName: string): "read" | "edit" | "other" {
const readTools = [
"read",
"glob",
"grep",
"list",
"webfetch",
"context7_resolve_library_id",
"context7_get_library_docs",
]
const editTools = ["edit", "write", "bash"]
if (readTools.includes(toolName.toLowerCase())) return "read"
if (editTools.includes(toolName.toLowerCase())) return "edit"
return "other"
}
/**
* Extracts file/directory locations from tool inputs for ACP notifications.
* Returns array of {path} objects that ACP clients can use for navigation.
*
* Examples:
* - read({filePath: "/foo/bar.ts"}) -> [{path: "/foo/bar.ts"}]
* - glob({pattern: "*.ts", path: "/src"}) -> [{path: "/src"}]
* - bash({command: "ls"}) -> [] (no file references)
*/
function extractLocations(toolName: string, input: Record<string, any>): { path: string }[] {
try {
switch (toolName.toLowerCase()) {
case "read":
case "edit":
case "write":
return input["filePath"] ? [{ path: input["filePath"] }] : []
case "glob":
case "grep":
return input["path"] ? [{ path: input["path"] }] : []
case "bash":
return []
case "list":
return input["path"] ? [{ path: input["path"] }] : []
default:
return []
}
} catch {
return []
}
}
export type Processor = Awaited<ReturnType<typeof createProcessor>>
async function createProcessor(input: {
sessionID: string
@ -951,10 +905,6 @@ export namespace SessionPrompt {
system: string[]
agent: string
abort: AbortSignal
acpConnection?: {
connection: any
sessionId: string
}
}) {
const toolcalls: Record<string, MessageV2.ToolPart> = {}
let snapshot: string | undefined
@ -1052,7 +1002,7 @@ export namespace SessionPrompt {
const part = reasoningMap[value.id]
part.text += value.text
if (value.providerMetadata) part.metadata = value.providerMetadata
if (part.text) await Session.updatePart(part)
if (part.text) await Session.updatePart({ part, delta: value.text })
}
break
@ -1086,26 +1036,6 @@ export namespace SessionPrompt {
},
})
toolcalls[value.id] = part as MessageV2.ToolPart
// Notify ACP client of pending tool call
if (input.acpConnection) {
await input.acpConnection.connection
.sessionUpdate({
sessionId: input.acpConnection.sessionId,
update: {
sessionUpdate: "tool_call",
toolCallId: value.id,
title: value.toolName,
kind: determineToolKind(value.toolName),
status: "pending",
locations: [], // Will be populated when we have input
rawInput: {},
},
})
.catch((err: Error) => {
log.error("failed to send tool pending to ACP", { error: err })
})
}
break
case "tool-input-delta":
@ -1130,24 +1060,6 @@ export namespace SessionPrompt {
metadata: value.providerMetadata,
})
toolcalls[value.toolCallId] = part as MessageV2.ToolPart
// Notify ACP client that tool is running
if (input.acpConnection) {
await input.acpConnection.connection
.sessionUpdate({
sessionId: input.acpConnection.sessionId,
update: {
sessionUpdate: "tool_call_update",
toolCallId: value.toolCallId,
status: "in_progress",
locations: extractLocations(value.toolName, value.input),
rawInput: value.input,
},
})
.catch((err: Error) => {
log.error("failed to send tool in_progress to ACP", { error: err })
})
}
}
break
}
@ -1170,32 +1082,6 @@ export namespace SessionPrompt {
},
})
// Notify ACP client that tool completed
if (input.acpConnection) {
await input.acpConnection.connection
.sessionUpdate({
sessionId: input.acpConnection.sessionId,
update: {
sessionUpdate: "tool_call_update",
toolCallId: value.toolCallId,
status: "completed",
content: [
{
type: "content",
content: {
type: "text",
text: value.output.output,
},
},
],
rawOutput: value.output,
},
})
.catch((err: Error) => {
log.error("failed to send tool completed to ACP", { error: err })
})
}
delete toolcalls[value.toolCallId]
}
break
@ -1218,34 +1104,6 @@ export namespace SessionPrompt {
},
})
// Notify ACP client of tool error
if (input.acpConnection) {
await input.acpConnection.connection
.sessionUpdate({
sessionId: input.acpConnection.sessionId,
update: {
sessionUpdate: "tool_call_update",
toolCallId: value.toolCallId,
status: "failed",
content: [
{
type: "content",
content: {
type: "text",
text: `Error: ${(value.error as any).toString()}`,
},
},
],
rawOutput: {
error: (value.error as any).toString(),
},
},
})
.catch((err: Error) => {
log.error("failed to send tool error to ACP", { error: err })
})
}
if (value.error instanceof Permission.RejectedError) {
blocked = true
}
@ -1324,26 +1182,11 @@ export namespace SessionPrompt {
if (currentText) {
currentText.text += value.text
if (value.providerMetadata) currentText.metadata = value.providerMetadata
if (currentText.text) await Session.updatePart(currentText)
// Send streaming chunk to ACP client
if (input.acpConnection && value.text) {
await input.acpConnection.connection
.sessionUpdate({
sessionId: input.acpConnection.sessionId,
update: {
sessionUpdate: "agent_message_chunk",
content: {
type: "text",
text: value.text,
},
},
})
.catch((err: Error) => {
log.error("failed to send text delta to ACP", { error: err })
// Don't fail the whole request if ACP notification fails
})
}
if (currentText.text)
await Session.updatePart({
part: currentText,
delta: value.text,
})
}
break
@ -1909,9 +1752,15 @@ export namespace SessionPrompt {
.then((result) => {
if (result.text)
return Session.update(input.session.id, (draft) => {
const cleaned = result.text.replace(/<think>[\s\S]*?<\/think>\s*/g, "").split("\n")[0]
const cleaned = result.text
.replace(/<think>[\s\S]*?<\/think>\s*/g, "")
.split("\n")
.map((line) => line.trim())
.find((line) => line.length > 0)
if (!cleaned) return
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
draft.title = title.trim()
draft.title = title
})
})
.catch((error) => {

View file

@ -1,81 +1,24 @@
You are OpenCode, the best coding agent on the planet.
You are an interactive CLI tool that helps users with software engineering tasks. Use the instructions below and the tools available to you to assist the user.
IMPORTANT: You must NEVER generate or guess URLs for the user unless you are confident that the URLs are for helping the user with programming. You may use URLs provided by the user in their messages or local files.
If the user asks for help or wants to give feedback inform them of the following:
- /help: Get help with using opencode
- To give feedback, users should report the issue at https://github.com/sst/opencode/issues
If the user asks for help or wants to give feedback inform them of the following:
- ctrl+p to list available actions
- To give feedback, users should report the issue at
https://github.com/sst/opencode
When the user directly asks about OpenCode (eg. "can OpenCode do...", "does OpenCode have..."), or asks in second person (eg. "are you able...", "can you do..."), or asks how to use a specific OpenCode feature (eg. implement a hook, write a slash command, or install an MCP server), use the WebFetch tool to gather information to answer the question from OpenCode docs. The list of available docs is available at https://opencode.ai/docs
# Tone and style
You should be concise, direct, and to the point.
You should be concise, direct, and to the point, while providing complete information and matching the level of detail you provide in your response with the level of complexity of the user's query or the work you have completed.
IMPORTANT: You should minimize output tokens as much as possible while maintaining helpfulness, quality, and accuracy. Only address the specific query or task at hand, avoiding tangential information unless absolutely critical for completing the request. If you can answer in 1-3 sentences or a short paragraph, please do.
IMPORTANT: You should NOT answer with unnecessary preamble or postamble (such as explaining your code or summarizing your action), unless the user asks you to.
Do not add additional code explanation summary unless requested by the user. After working on a file, just stop, rather than providing an explanation of what you did.
Answer the user's question directly, without elaboration, explanation, or details. One word answers are best. Avoid introductions, conclusions, and explanations. You MUST avoid text before/after your response, such as "The answer is <answer>.", "Here is the content of the file..." or "Based on the information provided, the answer is..." or "Here is what I will do next...". Here are some examples to demonstrate appropriate verbosity:
<example>
user: 2 + 2
assistant: 4
</example>
<example>
user: what is 2+2?
assistant: 4
</example>
<example>
user: is 11 a prime number?
assistant: Yes
</example>
<example>
user: what command should I run to list files in the current directory?
assistant: ls
</example>
<example>
user: what command should I run to watch files in the current directory?
assistant: [use the ls tool to list the files in the current directory, then read docs/commands in the relevant file to find out how to watch files]
npm run dev
</example>
<example>
user: How many golf balls fit inside a jetta?
assistant: 150000
</example>
<example>
user: what files are in the directory src/?
assistant: [runs ls and sees foo.c, bar.c, baz.c]
user: which file contains the implementation of foo?
assistant: src/foo.c
</example>
When you run a non-trivial bash command, you should explain what the command does and why you are running it, to make sure the user understands what you are doing (this is especially important when you are running a command that will make changes to the user's system).
Remember that your output will be displayed on a command line interface. Your responses can use Github-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
Output text to communicate with the user; all text you output outside of tool use is displayed to the user. Only use tools to complete tasks. Never use tools like Bash or code comments as means to communicate with the user during the session.
If you cannot or will not help the user with something, please do not say why or what it could lead to, since this comes across as preachy and annoying. Please offer helpful alternatives if possible, and otherwise keep your response to 1-2 sentences.
Only use emojis if the user explicitly requests it. Avoid using emojis in all communication unless asked.
IMPORTANT: Keep your responses short, since they will be displayed on a command line interface.
# Proactiveness
You are allowed to be proactive, but only when the user asks you to do something. You should strive to strike a balance between:
- Doing the right thing when asked, including taking actions and follow-up actions
- Not surprising the user with actions you take without asking
For example, if the user asks you how to approach something, you should do your best to answer their question first, and not immediately jump into taking actions.
- Only use emojis if the user explicitly requests it. Avoid using emojis in all communication unless asked.
- Your output will be displayed on a command line interface. Your responses should be short and concise. You can use Github-flavored markdown for formatting, and will be rendered in a monospace font using the CommonMark specification.
- Output text to communicate with the user; all text you output outside of tool use is displayed to the user. Only use tools to complete tasks. Never use tools like Bash or code comments as means to communicate with the user during the session.
- NEVER create files unless they're absolutely necessary for achieving your goal. ALWAYS prefer editing an existing file to creating a new one. This includes markdown files.
# Professional objectivity
Prioritize technical accuracy and truthfulness over validating the user's beliefs. Focus on facts and problem-solving, providing direct, objective technical info without any unnecessary superlatives, praise, or emotional validation. It is best for the user if Claude honestly applies the same rigorous standards to all ideas and disagrees when necessary, even if it may not be what the user wants to hear. Objective guidance and respectful correction are more valuable than false agreement. Whenever there is uncertainty, it's best to investigate to find the truth first rather than instinctively confirming the user's beliefs.
# Following conventions
When making changes to files, first understand the file's code conventions. Mimic code style, use existing libraries and utilities, and follow existing patterns.
- NEVER assume that a given library is available, even if it is well known. Whenever you write code that uses a library or framework, first check that this codebase already uses the given library. For example, you might look at neighboring files, or check the package.json (or cargo.toml, and so on depending on the language).
- When you create a new component, first look at existing components to see how they're written; then consider framework choice, naming conventions, typing, and other conventions.
- When you edit a piece of code, first look at the code's surrounding context (especially its imports) to understand the code's choice of frameworks and libraries. Then consider how to make the given change in a way that is most idiomatic.
- Always follow security best practices. Never introduce code that exposes or logs secrets and keys. Never commit secrets or keys to the repository.
# Code style
- IMPORTANT: DO NOT ADD ***ANY*** COMMENTS unless asked
Prioritize technical accuracy and truthfulness over validating the user's beliefs. Focus on facts and problem-solving, providing direct, objective technical info without any unnecessary superlatives, praise, or emotional validation. It is best for the user if OpenCode honestly applies the same rigorous standards to all ideas and disagrees when necessary, even if it may not be what the user wants to hear. Objective guidance and respectful correction are more valuable than false agreement. Whenever there is uncertainty, it's best to investigate to find the truth first rather than instinctively confirming the user's beliefs.
# Task Management
You have access to the TodoWrite tools to help you manage and plan tasks. Use these tools VERY frequently to ensure that you are tracking your tasks and giving the user visibility into your progress.
@ -87,7 +30,7 @@ Examples:
<example>
user: Run the build and fix any type errors
assistant: I'm going to use the TodoWrite tool to write the following items to the todo list:
assistant: I'm going to use the TodoWrite tool to write the following items to the todo list:
- Run the build
- Fix any type errors
@ -107,7 +50,6 @@ In the above example, the assistant completes all the tasks, including the 10 er
<example>
user: Help me write a new feature that allows users to track their usage metrics and export them to various formats
assistant: I'll help you implement a usage metrics tracking and export feature. Let me first use the TodoWrite tool to plan this task.
Adding the following todos to the todo list:
1. Research existing metrics tracking in the codebase
@ -124,23 +66,32 @@ I've found some existing telemetry code. Let me mark the first todo as in_progre
[Assistant continues implementing the feature step by step, marking todos as in_progress and completed as they go]
</example>
# Doing tasks
The user will primarily request you perform software engineering tasks. This includes solving bugs, adding new functionality, refactoring code, explaining code, and more. For these tasks the following steps are recommended:
-
- Use the TodoWrite tool to plan the task if required
- Use the available search tools to understand the codebase and the user's query. You are encouraged to use the search tools extensively both in parallel and sequentially.
- Implement the solution using all tools available to you
- Verify the solution if possible with tests. NEVER assume specific test framework or test script. Check the README or search codebase to determine the testing approach.
- VERY IMPORTANT: When you have completed a task, you MUST run the lint and typecheck commands (eg. npm run lint, npm run typecheck, ruff, etc.) with Bash if they were provided to you to ensure your code is correct. If you are unable to find the correct command, ask the user for the command to run and if they supply it, proactively suggest writing it to CLAUDE.md so that you will know to run it next time.
NEVER commit changes unless the user explicitly asks you to. It is VERY IMPORTANT to only commit when explicitly asked, otherwise the user will feel that you are being too proactive.
- Tool results and user messages may include <system-reminder> tags. <system-reminder> tags contain useful information and reminders. They are NOT part of the user's provided input or the tool result.
- Tool results and user messages may include <system-reminder> tags. <system-reminder> tags contain useful information and reminders. They are automatically added by the system, and bear no direct relation to the specific tool results or user messages in which they appear.
# Tool usage policy
- When doing file search, prefer to use the Task tool in order to reduce context usage.
- You should proactively use the Task tool with specialized agents when the task at hand matches the agent's description.
- When WebFetch returns a message about a redirect to a different host, you should immediately make a new WebFetch request with the redirect URL provided in the response.
- You have the capability to call multiple tools in a single response. When multiple independent pieces of information are requested, batch your tool calls together for optimal performance. When making multiple bash tool calls, you MUST send a single message with multiple tools calls to run the calls in parallel. For example, if you need to run "git status" and "git diff", send a single message with two tool calls to run the calls in parallel.
- You can call multiple tools in a single response. If you intend to call multiple tools and there are no dependencies between them, make all independent tool calls in parallel. Maximize use of parallel tool calls where possible to increase efficiency. However, if some tool calls depend on previous calls to inform dependent values, do NOT call these tools in parallel and instead call them sequentially. For instance, if one operation must complete before another starts, run these operations sequentially instead. Never use placeholders or guess missing parameters in tool calls.
- If the user specifies that they want you to run tools "in parallel", you MUST send a single message with multiple tool use content blocks. For example, if you need to launch multiple agents in parallel, send a single message with multiple Task tool calls.
- Use specialized tools instead of bash commands when possible, as this provides a better user experience. For file operations, use dedicated tools: Read for reading files instead of cat/head/tail, Edit for editing instead of sed/awk, and Write for creating files instead of cat with heredoc or echo redirection. Reserve bash tools exclusively for actual system commands and terminal operations that require shell execution. NEVER use bash echo or other command-line tools to communicate thoughts, explanations, or instructions to the user. Output all communication directly in your response text instead.
- VERY IMPORTANT: When exploring the codebase to gather context or to answer a question that is not a needle query for a specific file/class/function, it is CRITICAL that you use the Task tool instead of running search commands directly.
<example>
user: Where are errors from the client handled?
assistant: [Uses the Task tool to find the files that handle client errors instead of using Glob or Grep directly]
</example>
<example>
user: What is the codebase structure?
assistant: [Uses the Task tool]
</example>
IMPORTANT: Always use the TodoWrite tool to plan and track tasks throughout the conversation.

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Identifier } from "../id/id"
import { Snapshot } from "../snapshot"
import { MessageV2 } from "./message-v2"

View file

@ -86,7 +86,7 @@ export namespace SessionSummary {
) {
const result = await generateText({
model: small.language,
maxOutputTokens: 50,
maxOutputTokens: 100,
messages: [
{
role: "user",

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Bus } from "../bus"
import { Storage } from "../storage/storage"

View file

@ -3,7 +3,7 @@ import path from "path"
import fs from "fs/promises"
import { Log } from "../util/log"
import { Global } from "../global"
import z from "zod/v4"
import z from "zod"
import { Config } from "../config/config"
import { Instance } from "../project/instance"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { spawn } from "child_process"
import { Tool } from "./tool"
import DESCRIPTION from "./bash.txt"

View file

@ -3,11 +3,11 @@
// https://github.com/google-gemini/gemini-cli/blob/main/packages/core/src/utils/editCorrector.ts
// https://github.com/cline/cline/blob/main/evals/diff-edits/diff-apply/diff-06-26-25.ts
import z from "zod/v4"
import z from "zod"
import * as path from "path"
import { Tool } from "./tool"
import { LSP } from "../lsp"
import { createTwoFilesPatch } from "diff"
import { createTwoFilesPatch, diffLines } from "diff"
import { Permission } from "../permission"
import DESCRIPTION from "./edit.txt"
import { File } from "../file"
@ -16,6 +16,7 @@ import { FileTime } from "../file/time"
import { Filesystem } from "../util/filesystem"
import { Instance } from "../project/instance"
import { Agent } from "../agent/agent"
import { Snapshot } from "@/snapshot"
export const EditTool = Tool.define("edit", {
description: DESCRIPTION,
@ -114,10 +115,23 @@ export const EditTool = Tool.define("edit", {
}
}
const filediff: Snapshot.FileDiff = {
file: filePath,
before: contentOld,
after: contentNew,
additions: 0,
deletions: 0,
}
for (const change of diffLines(contentOld, contentNew)) {
if (change.added) filediff.additions += change.count || 0
if (change.removed) filediff.deletions += change.count || 0
}
return {
metadata: {
diagnostics,
diff,
filediff,
},
title: `${path.relative(Instance.worktree, filePath)}`,
output,

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import path from "path"
import { Tool } from "./tool"
import DESCRIPTION from "./glob.txt"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Tool } from "./tool"
import { Ripgrep } from "../file/ripgrep"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Tool } from "./tool"
export const InvalidTool = Tool.define("invalid", {

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Tool } from "./tool"
import * as path from "path"
import DESCRIPTION from "./ls.txt"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Tool } from "./tool"
import path from "path"
import { LSP } from "../lsp"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Tool } from "./tool"
import path from "path"
import { LSP } from "../lsp"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Tool } from "./tool"
import { EditTool } from "./edit"
import DESCRIPTION from "./multiedit.txt"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import * as path from "path"
import * as fs from "fs/promises"
import { Tool } from "./tool"
@ -17,7 +17,8 @@ const PatchParams = z.object({
})
export const PatchTool = Tool.define("patch", {
description: "Apply a patch to modify multiple files. Supports adding, updating, and deleting files with context-aware changes.",
description:
"Apply a patch to modify multiple files. Supports adding, updating, and deleting files with context-aware changes.",
parameters: PatchParams,
async execute(params, ctx) {
if (!params.patchText) {
@ -46,12 +47,12 @@ export const PatchTool = Tool.define("patch", {
type: "add" | "update" | "delete" | "move"
movePath?: string
}> = []
let totalDiff = ""
for (const hunk of hunks) {
const filePath = path.resolve(Instance.directory, hunk.path)
if (!Filesystem.contains(Instance.directory, filePath)) {
throw new Error(`File ${filePath} is not in the current working directory`)
}
@ -62,30 +63,30 @@ export const PatchTool = Tool.define("patch", {
const oldContent = ""
const newContent = hunk.contents
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
fileChanges.push({
filePath,
oldContent,
newContent,
type: "add",
})
totalDiff += diff + "\n"
}
break
case "update":
// Check if file exists for update
const stats = await fs.stat(filePath).catch(() => null)
if (!stats || stats.isDirectory()) {
throw new Error(`File not found or is directory: ${filePath}`)
}
// Read file and update time tracking (like edit tool does)
await FileTime.assert(ctx.sessionID, filePath)
const oldContent = await fs.readFile(filePath, "utf-8")
let newContent = oldContent
// Apply the update chunks to get new content
try {
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks)
@ -93,9 +94,9 @@ export const PatchTool = Tool.define("patch", {
} catch (error) {
throw new Error(`Failed to apply update to ${filePath}: ${error}`)
}
const diff = createTwoFilesPatch(filePath, filePath, oldContent, newContent)
fileChanges.push({
filePath,
oldContent,
@ -103,23 +104,23 @@ export const PatchTool = Tool.define("patch", {
type: hunk.move_path ? "move" : "update",
movePath: hunk.move_path ? path.resolve(Instance.directory, hunk.move_path) : undefined,
})
totalDiff += diff + "\n"
break
case "delete":
// Check if file exists for deletion
await FileTime.assert(ctx.sessionID, filePath)
const contentToDelete = await fs.readFile(filePath, "utf-8")
const deleteDiff = createTwoFilesPatch(filePath, filePath, contentToDelete, "")
fileChanges.push({
filePath,
oldContent: contentToDelete,
newContent: "",
type: "delete",
})
totalDiff += deleteDiff + "\n"
break
}
@ -141,7 +142,7 @@ export const PatchTool = Tool.define("patch", {
// Apply the changes
const changedFiles: string[] = []
for (const change of fileChanges) {
switch (change.type) {
case "add":
@ -153,12 +154,12 @@ export const PatchTool = Tool.define("patch", {
await fs.writeFile(change.filePath, change.newContent, "utf-8")
changedFiles.push(change.filePath)
break
case "update":
await fs.writeFile(change.filePath, change.newContent, "utf-8")
changedFiles.push(change.filePath)
break
case "move":
if (change.movePath) {
// Create parent directories for destination
@ -173,13 +174,13 @@ export const PatchTool = Tool.define("patch", {
changedFiles.push(change.movePath)
}
break
case "delete":
await fs.unlink(change.filePath)
changedFiles.push(change.filePath)
break
}
// Update file time tracking
FileTime.read(ctx.sessionID, change.filePath)
if (change.movePath) {
@ -193,7 +194,7 @@ export const PatchTool = Tool.define("patch", {
}
// Generate output summary
const relativePaths = changedFiles.map(filePath => path.relative(Instance.worktree, filePath))
const relativePaths = changedFiles.map((filePath) => path.relative(Instance.worktree, filePath))
const summary = `${fileChanges.length} files changed`
return {
@ -201,7 +202,7 @@ export const PatchTool = Tool.define("patch", {
metadata: {
diff: totalDiff,
},
output: `Patch applied successfully. ${summary}:\n${relativePaths.map(p => ` ${p}`).join("\n")}`,
output: `Patch applied successfully. ${summary}:\n${relativePaths.map((p) => ` ${p}`).join("\n")}`,
}
},
})
})

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import * as fs from "fs"
import * as path from "path"
import { Tool } from "./tool"

View file

@ -16,7 +16,7 @@ import { Instance } from "../project/instance"
import { Config } from "../config/config"
import path from "path"
import { type ToolDefinition } from "@opencode-ai/plugin"
import z from "zod/v4"
import z from "zod"
import { Plugin } from "../plugin"
export namespace ToolRegistry {

View file

@ -1,6 +1,6 @@
import { Tool } from "./tool"
import DESCRIPTION from "./task.txt"
import z from "zod/v4"
import z from "zod"
import { Session } from "../session"
import { Bus } from "../bus"
import { MessageV2 } from "../session/message-v2"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Tool } from "./tool"
import DESCRIPTION_WRITE from "./todowrite.txt"
import { Todo } from "../session/todo"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import type { MessageV2 } from "../session/message-v2"
export namespace Tool {

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import { Tool } from "./tool"
import TurndownService from "turndown"
import DESCRIPTION from "./webfetch.txt"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
import * as path from "path"
import { Tool } from "./tool"
import { LSP } from "../lsp"

View file

@ -1,4 +1,4 @@
import z from "zod/v4"
import z from "zod"
export abstract class NamedError extends Error {
abstract schema(): z.core.$ZodType

View file

@ -1,7 +1,7 @@
import path from "path"
import fs from "fs/promises"
import { Global } from "../global"
import z from "zod/v4"
import z from "zod"
export namespace Log {
export const Level = z.enum(["DEBUG", "INFO", "WARN", "ERROR"]).meta({ ref: "LogLevel", description: "Log level" })

View file

@ -0,0 +1,71 @@
import { describe, expect, test } from "bun:test"
import path from "path"
import { Session } from "../../src/session"
import { Bus } from "../../src/bus"
import { Log } from "../../src/util/log"
import { Instance } from "../../src/project/instance"
const projectRoot = path.join(__dirname, "../..")
Log.init({ print: false })
describe("session.started event", () => {
test("should emit session.started event when session is created", async () => {
await Instance.provide({
directory: projectRoot,
fn: async () => {
let eventReceived = false
let receivedInfo: Session.Info | undefined
const unsub = Bus.subscribe(Session.Event.Created, (event) => {
eventReceived = true
receivedInfo = event.properties.info as Session.Info
})
const session = await Session.create({})
await new Promise((resolve) => setTimeout(resolve, 100))
unsub()
expect(eventReceived).toBe(true)
expect(receivedInfo).toBeDefined()
expect(receivedInfo?.id).toBe(session.id)
expect(receivedInfo?.projectID).toBe(session.projectID)
expect(receivedInfo?.directory).toBe(session.directory)
expect(receivedInfo?.title).toBe(session.title)
await Session.remove(session.id)
},
})
})
test("session.started event should be emitted before session.updated", async () => {
await Instance.provide({
directory: projectRoot,
fn: async () => {
const events: string[] = []
const unsubStarted = Bus.subscribe(Session.Event.Created, () => {
events.push("started")
})
const unsubUpdated = Bus.subscribe(Session.Event.Updated, () => {
events.push("updated")
})
const session = await Session.create({})
await new Promise((resolve) => setTimeout(resolve, 100))
unsubStarted()
unsubUpdated()
expect(events).toContain("started")
expect(events).toContain("updated")
expect(events.indexOf("started")).toBeLessThan(events.indexOf("updated"))
await Session.remove(session.id)
},
})
})
})

View file

@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/plugin",
"version": "0.15.16",
"version": "0.15.20",
"type": "module",
"scripts": {
"typecheck": "tsgo --noEmit",

View file

@ -1,4 +1,4 @@
import { z } from "zod/v4"
import { z } from "zod"
export type ToolContext = {
sessionID: string

View file

@ -1,3 +1,3 @@
{
".": "0.16.2"
".": "0.18.0"
}

View file

@ -1,4 +1,4 @@
configured_endpoints: 43
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-273fc9fea965af661dfed0902d00f10d6ed844f0681ca861a58821c4902eac2f.yml
openapi_spec_hash: c6144f23a1bac75f79be86edd405552b
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-92f9d0f8daee2ea7458f8b9f1d7a7f941ff932442ad944bc7576254d5978b6d5.yml
openapi_spec_hash: 5b785c4ff6fb69039915f0e746abdaf9
config_hash: 026ef000d34bf2f930e7b41e77d2d3ff

View file

@ -1,5 +1,21 @@
# Changelog
## 0.18.0 (2025-10-10)
Full Changelog: [v0.17.0...v0.18.0](https://github.com/sst/opencode-sdk-go/compare/v0.17.0...v0.18.0)
### Features
* **api:** api update ([0a7f5e7](https://github.com/sst/opencode-sdk-go/commit/0a7f5e710911506512a132ba39e0593c412beb77))
## 0.17.0 (2025-10-07)
Full Changelog: [v0.16.2...v0.17.0](https://github.com/sst/opencode-sdk-go/compare/v0.16.2...v0.17.0)
### Features
* **api:** api update ([84a3df5](https://github.com/sst/opencode-sdk-go/commit/84a3df50a7ff3d87e5593e4f29dfb5d561f71cc3))
## 0.16.2 (2025-09-26)
Full Changelog: [v0.16.1...v0.16.2](https://github.com/sst/opencode-sdk-go/compare/v0.16.1...v0.16.2)

View file

@ -24,7 +24,7 @@ Or to pin the version:
<!-- x-release-please-start-version -->
```sh
go get -u 'github.com/sst/opencode-sdk-go@v0.16.2'
go get -u 'github.com/sst/opencode-sdk-go@v0.18.0'
```
<!-- x-release-please-end -->

View file

@ -62,7 +62,9 @@ type Model struct {
Temperature bool `json:"temperature,required"`
ToolCall bool `json:"tool_call,required"`
Experimental bool `json:"experimental"`
Modalities ModelModalities `json:"modalities"`
Provider ModelProvider `json:"provider"`
Status ModelStatus `json:"status"`
JSON modelJSON `json:"-"`
}
@ -79,7 +81,9 @@ type modelJSON struct {
Temperature apijson.Field
ToolCall apijson.Field
Experimental apijson.Field
Modalities apijson.Field
Provider apijson.Field
Status apijson.Field
raw string
ExtraFields map[string]apijson.Field
}
@ -140,6 +144,64 @@ func (r modelLimitJSON) RawJSON() string {
return r.raw
}
type ModelModalities struct {
Input []ModelModalitiesInput `json:"input,required"`
Output []ModelModalitiesOutput `json:"output,required"`
JSON modelModalitiesJSON `json:"-"`
}
// modelModalitiesJSON contains the JSON metadata for the struct [ModelModalities]
type modelModalitiesJSON struct {
Input apijson.Field
Output apijson.Field
raw string
ExtraFields map[string]apijson.Field
}
func (r *ModelModalities) UnmarshalJSON(data []byte) (err error) {
return apijson.UnmarshalRoot(data, r)
}
func (r modelModalitiesJSON) RawJSON() string {
return r.raw
}
type ModelModalitiesInput string
const (
ModelModalitiesInputText ModelModalitiesInput = "text"
ModelModalitiesInputAudio ModelModalitiesInput = "audio"
ModelModalitiesInputImage ModelModalitiesInput = "image"
ModelModalitiesInputVideo ModelModalitiesInput = "video"
ModelModalitiesInputPdf ModelModalitiesInput = "pdf"
)
func (r ModelModalitiesInput) IsKnown() bool {
switch r {
case ModelModalitiesInputText, ModelModalitiesInputAudio, ModelModalitiesInputImage, ModelModalitiesInputVideo, ModelModalitiesInputPdf:
return true
}
return false
}
type ModelModalitiesOutput string
const (
ModelModalitiesOutputText ModelModalitiesOutput = "text"
ModelModalitiesOutputAudio ModelModalitiesOutput = "audio"
ModelModalitiesOutputImage ModelModalitiesOutput = "image"
ModelModalitiesOutputVideo ModelModalitiesOutput = "video"
ModelModalitiesOutputPdf ModelModalitiesOutput = "pdf"
)
func (r ModelModalitiesOutput) IsKnown() bool {
switch r {
case ModelModalitiesOutputText, ModelModalitiesOutputAudio, ModelModalitiesOutputImage, ModelModalitiesOutputVideo, ModelModalitiesOutputPdf:
return true
}
return false
}
type ModelProvider struct {
Npm string `json:"npm,required"`
JSON modelProviderJSON `json:"-"`
@ -160,6 +222,21 @@ func (r modelProviderJSON) RawJSON() string {
return r.raw
}
type ModelStatus string
const (
ModelStatusAlpha ModelStatus = "alpha"
ModelStatusBeta ModelStatus = "beta"
)
func (r ModelStatus) IsKnown() bool {
switch r {
case ModelStatusAlpha, ModelStatusBeta:
return true
}
return false
}
type Provider struct {
ID string `json:"id,required"`
Env []string `json:"env,required"`

View file

@ -1567,19 +1567,21 @@ func (r configProviderJSON) RawJSON() string {
}
type ConfigProviderModel struct {
ID string `json:"id"`
Attachment bool `json:"attachment"`
Cost ConfigProviderModelsCost `json:"cost"`
Experimental bool `json:"experimental"`
Limit ConfigProviderModelsLimit `json:"limit"`
Name string `json:"name"`
Options map[string]interface{} `json:"options"`
Provider ConfigProviderModelsProvider `json:"provider"`
Reasoning bool `json:"reasoning"`
ReleaseDate string `json:"release_date"`
Temperature bool `json:"temperature"`
ToolCall bool `json:"tool_call"`
JSON configProviderModelJSON `json:"-"`
ID string `json:"id"`
Attachment bool `json:"attachment"`
Cost ConfigProviderModelsCost `json:"cost"`
Experimental bool `json:"experimental"`
Limit ConfigProviderModelsLimit `json:"limit"`
Modalities ConfigProviderModelsModalities `json:"modalities"`
Name string `json:"name"`
Options map[string]interface{} `json:"options"`
Provider ConfigProviderModelsProvider `json:"provider"`
Reasoning bool `json:"reasoning"`
ReleaseDate string `json:"release_date"`
Status ConfigProviderModelsStatus `json:"status"`
Temperature bool `json:"temperature"`
ToolCall bool `json:"tool_call"`
JSON configProviderModelJSON `json:"-"`
}
// configProviderModelJSON contains the JSON metadata for the struct
@ -1590,11 +1592,13 @@ type configProviderModelJSON struct {
Cost apijson.Field
Experimental apijson.Field
Limit apijson.Field
Modalities apijson.Field
Name apijson.Field
Options apijson.Field
Provider apijson.Field
Reasoning apijson.Field
ReleaseDate apijson.Field
Status apijson.Field
Temperature apijson.Field
ToolCall apijson.Field
raw string
@ -1659,6 +1663,65 @@ func (r configProviderModelsLimitJSON) RawJSON() string {
return r.raw
}
type ConfigProviderModelsModalities struct {
Input []ConfigProviderModelsModalitiesInput `json:"input,required"`
Output []ConfigProviderModelsModalitiesOutput `json:"output,required"`
JSON configProviderModelsModalitiesJSON `json:"-"`
}
// configProviderModelsModalitiesJSON contains the JSON metadata for the struct
// [ConfigProviderModelsModalities]
type configProviderModelsModalitiesJSON struct {
Input apijson.Field
Output apijson.Field
raw string
ExtraFields map[string]apijson.Field
}
func (r *ConfigProviderModelsModalities) UnmarshalJSON(data []byte) (err error) {
return apijson.UnmarshalRoot(data, r)
}
func (r configProviderModelsModalitiesJSON) RawJSON() string {
return r.raw
}
type ConfigProviderModelsModalitiesInput string
const (
ConfigProviderModelsModalitiesInputText ConfigProviderModelsModalitiesInput = "text"
ConfigProviderModelsModalitiesInputAudio ConfigProviderModelsModalitiesInput = "audio"
ConfigProviderModelsModalitiesInputImage ConfigProviderModelsModalitiesInput = "image"
ConfigProviderModelsModalitiesInputVideo ConfigProviderModelsModalitiesInput = "video"
ConfigProviderModelsModalitiesInputPdf ConfigProviderModelsModalitiesInput = "pdf"
)
func (r ConfigProviderModelsModalitiesInput) IsKnown() bool {
switch r {
case ConfigProviderModelsModalitiesInputText, ConfigProviderModelsModalitiesInputAudio, ConfigProviderModelsModalitiesInputImage, ConfigProviderModelsModalitiesInputVideo, ConfigProviderModelsModalitiesInputPdf:
return true
}
return false
}
type ConfigProviderModelsModalitiesOutput string
const (
ConfigProviderModelsModalitiesOutputText ConfigProviderModelsModalitiesOutput = "text"
ConfigProviderModelsModalitiesOutputAudio ConfigProviderModelsModalitiesOutput = "audio"
ConfigProviderModelsModalitiesOutputImage ConfigProviderModelsModalitiesOutput = "image"
ConfigProviderModelsModalitiesOutputVideo ConfigProviderModelsModalitiesOutput = "video"
ConfigProviderModelsModalitiesOutputPdf ConfigProviderModelsModalitiesOutput = "pdf"
)
func (r ConfigProviderModelsModalitiesOutput) IsKnown() bool {
switch r {
case ConfigProviderModelsModalitiesOutputText, ConfigProviderModelsModalitiesOutputAudio, ConfigProviderModelsModalitiesOutputImage, ConfigProviderModelsModalitiesOutputVideo, ConfigProviderModelsModalitiesOutputPdf:
return true
}
return false
}
type ConfigProviderModelsProvider struct {
Npm string `json:"npm,required"`
JSON configProviderModelsProviderJSON `json:"-"`
@ -1680,6 +1743,21 @@ func (r configProviderModelsProviderJSON) RawJSON() string {
return r.raw
}
type ConfigProviderModelsStatus string
const (
ConfigProviderModelsStatusAlpha ConfigProviderModelsStatus = "alpha"
ConfigProviderModelsStatusBeta ConfigProviderModelsStatus = "beta"
)
func (r ConfigProviderModelsStatus) IsKnown() bool {
switch r {
case ConfigProviderModelsStatusAlpha, ConfigProviderModelsStatusBeta:
return true
}
return false
}
type ConfigProviderOptions struct {
APIKey string `json:"apiKey"`
BaseURL string `json:"baseURL"`

Some files were not shown because too many files have changed in this diff Show more