Compare commits

..

No commits in common. "dev" and "v0.1.191" have entirely different histories.

209 changed files with 8975 additions and 17756 deletions

View file

@ -28,5 +28,5 @@ jobs:
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git add STATS.md
git diff --staged --quiet || git commit -m "ignore: update download stats $(date -I)"
git diff --staged --quiet || git commit -m "Update download stats $(date -I)"
git push

View file

@ -9,7 +9,7 @@
</p>
<p align="center">AI coding agent, built for the terminal.</p>
<p align="center">
<a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://opencode.ai/docs"><img alt="View docs" src="https://img.shields.io/badge/view-docs-blue?style=flat-square" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/sst/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/sst/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p>
@ -76,4 +76,4 @@ The other confusingly named repo has no relation to this one. You can [read the
---
**Join our community** [Discord](https://discord.gg/opencode) | [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev)
**Join our community** [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev)

View file

@ -1,15 +1,10 @@
# Download Stats
| Date | GitHub Downloads | npm Downloads | Total |
| ---------- | ---------------- | --------------- | ----------------- |
| ---------- | ---------------- | --------------- | --------------- |
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
| 2025-07-10 | 43,796 (+5,744) | 71,402 (+6,934) | 115,198 (+12,678) |
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |

125
bun.lock
View file

@ -11,11 +11,6 @@
"packages/function": {
"name": "@opencode/function",
"version": "0.0.1",
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "22.0.0",
"jose": "6.0.11",
},
"devDependencies": {
"@cloudflare/workers-types": "4.20250522.0",
"@types/node": "catalog:",
@ -36,6 +31,7 @@
"@openauthjs/openauth": "0.4.3",
"@standard-schema/spec": "1.0.0",
"ai": "catalog:",
"air": "0.4.14",
"decimal.js": "10.5.0",
"diff": "8.0.2",
"env-paths": "3.0.0",
@ -83,12 +79,11 @@
"lang-map": "0.4.0",
"luxon": "3.6.1",
"marked": "15.0.12",
"marked-shiki": "1.2.0",
"rehype-autolink-headings": "7.1.0",
"sharp": "0.32.5",
"shiki": "3.4.2",
"solid-js": "1.9.7",
"toolbeam-docs-theme": "0.4.3",
"toolbeam-docs-theme": "0.4.1",
},
"devDependencies": {
"@types/node": "catalog:",
@ -101,22 +96,30 @@
"sharp",
"esbuild",
],
"patchedDependencies": {
"ai@4.3.16": "patches/ai@4.3.16.patch",
},
"overrides": {
"zod": "3.24.2",
},
"catalog": {
"@types/node": "22.13.9",
"ai": "5.0.0-beta.7",
"ai": "4.3.16",
"typescript": "5.8.2",
"zod": "3.25.49",
"zod": "3.24.2",
},
"packages": {
"@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@2.2.10", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-icLGO7Q0NinnHIPgT+y1QjHVwH4HwV+brWbvM+FfCG2Afpa89PyKa3Ret91kGjZpBgM/xnj1B7K5eM+rRlsXQA=="],
"@ai-sdk/anthropic": ["@ai-sdk/anthropic@1.2.12", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-YSzjlko7JvuiyQFmI9RN1tNZdEiZxc+6xld/0tq/VkJaHpEzGAb1yiNxxvmYVcjvfu/PcvCxAAYXmTYQQ63IHQ=="],
"@ai-sdk/gateway": ["@ai-sdk/gateway@1.0.0-beta.3", "", { "dependencies": { "@ai-sdk/provider": "2.0.0-beta.1", "@ai-sdk/provider-utils": "3.0.0-beta.2" }, "peerDependencies": { "zod": "^3.25.49" } }, "sha512-g49gMSkXy94lYvl5LRh438OR/0JCG6ol0jV+iLot7cy5HLltZlGocEuauETBu4b10mDXOd7XIjTEZoQpYFMYLQ=="],
"@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="],
"@ai-sdk/provider": ["@ai-sdk/provider@2.0.0-beta.1", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-Z8SPncMtS3RsoXITmT7NVwrAq6M44dmw0DoUOYJqNNtCu8iMWuxB8Nxsoqpa0uEEy9R1V1ZThJAXTYgjTUxl3w=="],
"@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="],
"@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.0-beta.2", "", { "dependencies": { "@ai-sdk/provider": "2.0.0-beta.1", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.3", "zod-to-json-schema": "^3.24.1" }, "peerDependencies": { "zod": "^3.25.49" } }, "sha512-H4K+4weOVgWqrDDeAbQWoA4U5mN4WrQPHQFdH7ynQYcnhj/pzctU9Q6mGlR5ESMWxaXxazxlOblSITlXo9bahA=="],
"@ai-sdk/react": ["@ai-sdk/react@1.2.12", "", { "dependencies": { "@ai-sdk/provider-utils": "2.2.8", "@ai-sdk/ui-utils": "1.2.11", "swr": "^2.2.5", "throttleit": "2.1.0" }, "peerDependencies": { "react": "^18 || ^19 || ^19.0.0-rc", "zod": "^3.23.8" }, "optionalPeers": ["zod"] }, "sha512-jK1IZZ22evPZoQW3vlkZ7wvjYGYF+tRBKXtrcolduIkQ/m/sOAVcVeVDUDvh1T91xCnWCdUGCPZg2avZ90mv3g=="],
"@ai-sdk/ui-utils": ["@ai-sdk/ui-utils@1.2.11", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "zod-to-json-schema": "^3.24.1" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-3zcwCc8ezzFlwp3ZD15wAPjf2Au4s3vAbKsXQVyhxODHcmu0iyPO2Eua6D/vicq/AUm/BAo60r97O6HU+EI0+w=="],
"@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="],
@ -342,42 +345,6 @@
"@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="],
"@octokit/auth-app": ["@octokit/auth-app@8.0.1", "", { "dependencies": { "@octokit/auth-oauth-app": "^9.0.1", "@octokit/auth-oauth-user": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "toad-cache": "^3.7.0", "universal-github-app-jwt": "^2.2.0", "universal-user-agent": "^7.0.0" } }, "sha512-P2J5pB3pjiGwtJX4WqJVYCtNkcZ+j5T2Wm14aJAEIC3WJOrv12jvBley3G1U/XI8q9o1A7QMG54LiFED2BiFlg=="],
"@octokit/auth-oauth-app": ["@octokit/auth-oauth-app@9.0.1", "", { "dependencies": { "@octokit/auth-oauth-device": "^8.0.1", "@octokit/auth-oauth-user": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-TthWzYxuHKLAbmxdFZwFlmwVyvynpyPmjwc+2/cI3cvbT7mHtsAW9b1LvQaNnAuWL+pFnqtxdmrU8QpF633i1g=="],
"@octokit/auth-oauth-device": ["@octokit/auth-oauth-device@8.0.1", "", { "dependencies": { "@octokit/oauth-methods": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-TOqId/+am5yk9zor0RGibmlqn4V0h8vzjxlw/wYr3qzkQxl8aBPur384D1EyHtqvfz0syeXji4OUvKkHvxk/Gw=="],
"@octokit/auth-oauth-user": ["@octokit/auth-oauth-user@6.0.0", "", { "dependencies": { "@octokit/auth-oauth-device": "^8.0.1", "@octokit/oauth-methods": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-GV9IW134PHsLhtUad21WIeP9mlJ+QNpFd6V9vuPWmaiN25HEJeEQUcS4y5oRuqCm9iWDLtfIs+9K8uczBXKr6A=="],
"@octokit/auth-token": ["@octokit/auth-token@6.0.0", "", {}, "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w=="],
"@octokit/core": ["@octokit/core@7.0.3", "", { "dependencies": { "@octokit/auth-token": "^6.0.0", "@octokit/graphql": "^9.0.1", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "before-after-hook": "^4.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-oNXsh2ywth5aowwIa7RKtawnkdH6LgU1ztfP9AIUCQCvzysB+WeU8o2kyyosDPwBZutPpjZDKPQGIzzrfTWweQ=="],
"@octokit/endpoint": ["@octokit/endpoint@11.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ=="],
"@octokit/graphql": ["@octokit/graphql@9.0.1", "", { "dependencies": { "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-j1nQNU1ZxNFx2ZtKmL4sMrs4egy5h65OMDmSbVyuCzjOcwsHq6EaYjOTGXPQxgfiN8dJ4CriYHk6zF050WEULg=="],
"@octokit/oauth-authorization-url": ["@octokit/oauth-authorization-url@8.0.0", "", {}, "sha512-7QoLPRh/ssEA/HuHBHdVdSgF8xNLz/Bc5m9fZkArJE5bb6NmVkDm3anKxXPmN1zh6b5WKZPRr3697xKT/yM3qQ=="],
"@octokit/oauth-methods": ["@octokit/oauth-methods@6.0.0", "", { "dependencies": { "@octokit/oauth-authorization-url": "^8.0.0", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0" } }, "sha512-Q8nFIagNLIZgM2odAraelMcDssapc+lF+y3OlcIPxyAU+knefO8KmozGqfnma1xegRDP4z5M73ABsamn72bOcA=="],
"@octokit/openapi-types": ["@octokit/openapi-types@25.1.0", "", {}, "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA=="],
"@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@13.1.1", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-q9iQGlZlxAVNRN2jDNskJW/Cafy7/XE52wjZ5TTvyhyOD904Cvx//DNyoO3J/MXJ0ve3rPoNWKEg5iZrisQSuw=="],
"@octokit/plugin-request-log": ["@octokit/plugin-request-log@6.0.0", "", { "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q=="],
"@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@16.0.0", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-kJVUQk6/dx/gRNLWUnAWKFs1kVPn5O5CYZyssyEoNYaFedqZxsfYs7DwI3d67hGz4qOwaJ1dpm07hOAD1BXx6g=="],
"@octokit/request": ["@octokit/request@10.0.3", "", { "dependencies": { "@octokit/endpoint": "^11.0.0", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA=="],
"@octokit/request-error": ["@octokit/request-error@7.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg=="],
"@octokit/rest": ["@octokit/rest@22.0.0", "", { "dependencies": { "@octokit/core": "^7.0.2", "@octokit/plugin-paginate-rest": "^13.0.1", "@octokit/plugin-request-log": "^6.0.0", "@octokit/plugin-rest-endpoint-methods": "^16.0.0" } }, "sha512-z6tmTu9BTnw51jYGulxrlernpsQYXpui1RK21vmXn8yF5bp6iX16yfTtJYGK5Mh1qDkvDOmp2n8sRMcQmR8jiA=="],
"@octokit/types": ["@octokit/types@14.1.0", "", { "dependencies": { "@octokit/openapi-types": "^25.1.0" } }, "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g=="],
"@openauthjs/openauth": ["@openauthjs/openauth@0.4.3", "", { "dependencies": { "@standard-schema/spec": "1.0.0-beta.3", "aws4fetch": "1.0.20", "jose": "5.9.6" }, "peerDependencies": { "arctic": "^2.2.2", "hono": "^4.0.0" } }, "sha512-RlnjqvHzqcbFVymEwhlUEuac4utA5h4nhSK/i2szZuQmxTIqbGUxZ+nM+avM+VV4Ing+/ZaNLKILoXS3yrkOOw=="],
"@opencode/function": ["@opencode/function@workspace:packages/function"],
@ -500,6 +467,8 @@
"@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="],
"@types/diff-match-patch": ["@types/diff-match-patch@1.0.36", "", {}, "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg=="],
"@types/estree": ["@types/estree@1.0.7", "", {}, "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ=="],
"@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="],
@ -546,7 +515,9 @@
"acorn-walk": ["acorn-walk@8.3.2", "", {}, "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A=="],
"ai": ["ai@5.0.0-beta.7", "", { "dependencies": { "@ai-sdk/gateway": "1.0.0-beta.3", "@ai-sdk/provider": "2.0.0-beta.1", "@ai-sdk/provider-utils": "3.0.0-beta.2", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.49" } }, "sha512-oC4KzUJCQPMB7v9rCqL/rVk2ogZvI6lYiXfKjzPYHwa1zIgy329qqRLmAd3mKEDTTG6By1r0zasQu7FKmG+4gw=="],
"ai": ["ai@4.3.16", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "@ai-sdk/react": "1.2.12", "@ai-sdk/ui-utils": "1.2.11", "@opentelemetry/api": "1.9.0", "jsondiffpatch": "0.6.0" }, "peerDependencies": { "react": "^18 || ^19 || ^19.0.0-rc", "zod": "^3.23.8" }, "optionalPeers": ["react"] }, "sha512-KUDwlThJ5tr2Vw0A1ZkbDKNME3wzWhuVfAOwIvFUzl1TPVDFAXDFTXio3p+jaKneB+dKNCvFFlolYmmgHttG1g=="],
"air": ["air@0.4.14", "", { "dependencies": { "zephyr": "~1.3.5" } }, "sha512-E8bl9LlSGSQqjxxjeGIrpYpf8jVyJplsdK1bTobh61F7ks+3aLeXL4KbGSJIFsiaSSz5ZExLU51DGztmQSlZTQ=="],
"ansi-align": ["ansi-align@3.0.1", "", { "dependencies": { "string-width": "^4.1.0" } }, "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w=="],
@ -616,8 +587,6 @@
"bcp-47-match": ["bcp-47-match@2.0.3", "", {}, "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ=="],
"before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="],
"bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="],
"blake3-wasm": ["blake3-wasm@2.1.5", "", {}, "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g=="],
@ -766,6 +735,8 @@
"diff": ["diff@8.0.2", "", {}, "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg=="],
"diff-match-patch": ["diff-match-patch@1.0.5", "", {}, "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw=="],
"diff3": ["diff3@0.0.3", "", {}, "sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g=="],
"direction": ["direction@2.0.1", "", { "bin": { "direction": "cli.js" } }, "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA=="],
@ -834,7 +805,7 @@
"eventsource": ["eventsource@3.0.7", "", { "dependencies": { "eventsource-parser": "^3.0.1" } }, "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA=="],
"eventsource-parser": ["eventsource-parser@3.0.3", "", {}, "sha512-nVpZkTMM9rF6AQ9gPJpFsNAMt48wIzB5TQgiTLdHiuO8XEDhUgZEhqKlZWXbIzo9VmJ/HvysHqEaVeD5v9TPvA=="],
"eventsource-parser": ["eventsource-parser@3.0.2", "", {}, "sha512-6RxOBZ/cYgd8usLwsEl+EC09Au/9BcmCKYF2/xbml6DNczf7nv0MQb+7BA2F+li6//I+28VNlQR37XfQtcAJuA=="],
"exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="],
@ -850,8 +821,6 @@
"extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="],
"fast-content-type-parse": ["fast-content-type-parse@3.0.0", "", {}, "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg=="],
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
"fast-fifo": ["fast-fifo@1.3.2", "", {}, "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ=="],
@ -1038,7 +1007,7 @@
"jmespath": ["jmespath@0.16.0", "", {}, "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw=="],
"jose": ["jose@6.0.11", "", {}, "sha512-QxG7EaliDARm1O1S8BGakqncGT9s25bKL1WSf6/oa17Tkqwi8D2ZNglqCF+DsYF88/rV66Q/Q2mFAy697E1DUg=="],
"jose": ["jose@5.2.3", "", {}, "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA=="],
"joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="],
@ -1058,6 +1027,8 @@
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
"jsondiffpatch": ["jsondiffpatch@0.6.0", "", { "dependencies": { "@types/diff-match-patch": "^1.0.36", "chalk": "^5.3.0", "diff-match-patch": "^1.0.5" }, "bin": { "jsondiffpatch": "bin/jsondiffpatch.js" } }, "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ=="],
"kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
"klona": ["klona@2.0.6", "", {}, "sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA=="],
@ -1084,8 +1055,6 @@
"marked": ["marked@15.0.12", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA=="],
"marked-shiki": ["marked-shiki@1.2.0", "", { "peerDependencies": { "marked": ">=7.0.0", "shiki": ">=1.0.0" } }, "sha512-N924hp8veE6Mc91g5/kCNVoTU7TkeJfB2G2XEWb+k1fVA0Bck2T0rVt93d39BlOYH6ohP4Q9BFlPk+UkblhXbg=="],
"math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
"mdast-util-definitions": ["mdast-util-definitions@6.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "unist-util-visit": "^5.0.0" } }, "sha512-scTllyX6pnYNZH/AIp/0ePz6s4cZtARxImwoPJ7kS42n+MnVsI4XbnG6d4ibehRIldYMWM2LD7ImQblVhUejVQ=="],
@ -1374,6 +1343,8 @@
"rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="],
"react": ["react@19.1.0", "", {}, "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg=="],
"readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
@ -1558,6 +1529,8 @@
"supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="],
"swr": ["swr@2.3.3", "", { "dependencies": { "dequal": "^2.0.3", "use-sync-external-store": "^1.4.0" }, "peerDependencies": { "react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-dshNvs3ExOqtZ6kJBaAsabhPdHyeY4P2cKwRCniDVifBMoG/SVI7tfLWqPXriVspf2Rg4tPzXJTnwaihIeFw2A=="],
"tar-fs": ["tar-fs@3.0.9", "", { "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { "bare-fs": "^4.0.1", "bare-path": "^3.0.0" } }, "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA=="],
"tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="],
@ -1566,19 +1539,19 @@
"thread-stream": ["thread-stream@0.15.2", "", { "dependencies": { "real-require": "^0.1.0" } }, "sha512-UkEhKIg2pD+fjkHQKyJO3yoIvAP3N6RlNFt2dUhcS1FGvCD1cQa1M/PGknCLFIyZdtJOWQjejp7bdNqmN7zwdA=="],
"throttleit": ["throttleit@2.1.0", "", {}, "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw=="],
"tiny-inflate": ["tiny-inflate@1.0.3", "", {}, "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="],
"tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="],
"tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="],
"toad-cache": ["toad-cache@3.7.0", "", {}, "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw=="],
"toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="],
"token-types": ["token-types@6.0.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA=="],
"toolbeam-docs-theme": ["toolbeam-docs-theme@0.4.3", "", { "peerDependencies": { "@astrojs/starlight": "^0.34.3", "astro": "^5.7.13" } }, "sha512-3um/NsSq4xFeKbKrNGPHIzfTixwnEVvroqA8Q+lecnYHHJ5TtiYTggHDqewOW+I67t0J1IVBwVKUPjxiQfIcog=="],
"toolbeam-docs-theme": ["toolbeam-docs-theme@0.4.1", "", { "peerDependencies": { "@astrojs/starlight": "^0.34.3", "astro": "^5.7.13" } }, "sha512-lTI4dHZaVNQky29m7sb36Oy4tWPwxsCuFxFjF8hgGW0vpV+S6qPvI9SwsJFvdE/OHO5DoI7VMbryV1pxZHkkHQ=="],
"tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
@ -1644,10 +1617,6 @@
"unist-util-visit-parents": ["unist-util-visit-parents@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw=="],
"universal-github-app-jwt": ["universal-github-app-jwt@2.2.2", "", {}, "sha512-dcmbeSrOdTnsjGjUfAlqNDJrhxXizjAz94ija9Qw8YkZ1uu0d+GoZzyH+Jb9tIIqvGsadUfwg+22k5aDqqwzbw=="],
"universal-user-agent": ["universal-user-agent@7.0.3", "", {}, "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A=="],
"unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="],
"unstorage": ["unstorage@1.16.0", "", { "dependencies": { "anymatch": "^3.1.3", "chokidar": "^4.0.3", "destr": "^2.0.5", "h3": "^1.15.2", "lru-cache": "^10.4.3", "node-fetch-native": "^1.6.6", "ofetch": "^1.4.1", "ufo": "^1.6.1" }, "peerDependencies": { "@azure/app-configuration": "^1.8.0", "@azure/cosmos": "^4.2.0", "@azure/data-tables": "^13.3.0", "@azure/identity": "^4.6.0", "@azure/keyvault-secrets": "^4.9.0", "@azure/storage-blob": "^12.26.0", "@capacitor/preferences": "^6.0.3 || ^7.0.0", "@deno/kv": ">=0.9.0", "@netlify/blobs": "^6.5.0 || ^7.0.0 || ^8.1.0", "@planetscale/database": "^1.19.0", "@upstash/redis": "^1.34.3", "@vercel/blob": ">=0.27.1", "@vercel/kv": "^1.0.1", "aws4fetch": "^1.0.20", "db0": ">=0.2.1", "idb-keyval": "^6.2.1", "ioredis": "^5.4.2", "uploadthing": "^7.4.4" }, "optionalPeers": ["@azure/app-configuration", "@azure/cosmos", "@azure/data-tables", "@azure/identity", "@azure/keyvault-secrets", "@azure/storage-blob", "@capacitor/preferences", "@deno/kv", "@netlify/blobs", "@planetscale/database", "@upstash/redis", "@vercel/blob", "@vercel/kv", "aws4fetch", "db0", "idb-keyval", "ioredis", "uploadthing"] }, "sha512-WQ37/H5A7LcRPWfYOrDa1Ys02xAbpPJq6q5GkO88FBXVSQzHd7+BjEwfRqyaSWCv9MbsJy058GWjjPjcJ16GGA=="],
@ -1656,6 +1625,8 @@
"url": ["url@0.10.3", "", { "dependencies": { "punycode": "1.3.2", "querystring": "0.2.0" } }, "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ=="],
"use-sync-external-store": ["use-sync-external-store@1.5.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-Rb46I4cGGVBmjamjphe8L/UnvJD+uPPtTkNvX5mZgqdbavhI4EbgIWJiIHXJ8bc/i9EQGPRh4DwEURJ552Do0A=="],
"util": ["util@0.12.5", "", { "dependencies": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", "is-generator-function": "^1.0.7", "is-typed-array": "^1.1.3", "which-typed-array": "^1.1.2" } }, "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA=="],
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
@ -1732,7 +1703,9 @@
"youch": ["youch@3.3.4", "", { "dependencies": { "cookie": "^0.7.1", "mustache": "^4.2.0", "stacktracey": "^2.1.8" } }, "sha512-UeVBXie8cA35DS6+nBkls68xaBBXCye0CNznrhszZjTbRVnJKQuNsyLKBTTL4ln1o1rh2PKtv35twV7irj5SEg=="],
"zod": ["zod@3.25.49", "", {}, "sha512-JMMPMy9ZBk3XFEdbM3iL1brx4NUSejd6xr3ELrrGEfGb355gjhiAWtG3K5o+AViV/3ZfkIrCzXsZn6SbLwTR8Q=="],
"zephyr": ["zephyr@1.3.6", "", {}, "sha512-oYH52DGZzIbXNrkijskaR8YpVKnXAe8jNgH1KirglVBnTFOn6mK9/0SVCxGn+73l0Hjhr4UYNzYkO07LXSWy6w=="],
"zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="],
"zod-openapi": ["zod-openapi@4.2.4", "", { "peerDependencies": { "zod": "^3.21.4" } }, "sha512-tsrQpbpqFCXqVXUzi3TPwFhuMtLN3oNZobOtYnK6/5VkXsNdnIgyNr4r8no4wmYluaxzN3F7iS+8xCW8BmMQ8g=="],
@ -1744,22 +1717,12 @@
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
"@ai-sdk/amazon-bedrock/@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="],
"@ai-sdk/amazon-bedrock/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="],
"@ai-sdk/amazon-bedrock/aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="],
"@ai-sdk/anthropic/@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="],
"@ai-sdk/anthropic/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="],
"@ampproject/remapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="],
"@astrojs/mdx/@astrojs/markdown-remark": ["@astrojs/markdown-remark@6.3.2", "", { "dependencies": { "@astrojs/internal-helpers": "0.6.1", "@astrojs/prism": "3.3.0", "github-slugger": "^2.0.0", "hast-util-from-html": "^2.0.3", "hast-util-to-text": "^4.0.2", "import-meta-resolve": "^4.1.0", "js-yaml": "^4.1.0", "mdast-util-definitions": "^6.0.0", "rehype-raw": "^7.0.0", "rehype-stringify": "^10.0.1", "remark-gfm": "^4.0.1", "remark-parse": "^11.0.0", "remark-rehype": "^11.1.2", "remark-smartypants": "^3.0.2", "shiki": "^3.2.1", "smol-toml": "^1.3.1", "unified": "^11.0.5", "unist-util-remove-position": "^5.0.0", "unist-util-visit": "^5.0.0", "unist-util-visit-parents": "^6.0.1", "vfile": "^6.0.3" } }, "sha512-bO35JbWpVvyKRl7cmSJD822e8YA8ThR/YbUsciWNA7yTcqpIAL2hJDToWP5KcZBWxGT6IOdOkHSXARSNZc4l/Q=="],
"@astrojs/sitemap/zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="],
"@aws-crypto/crc32/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"@aws-crypto/util/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="],
@ -1778,8 +1741,6 @@
"@jridgewell/gen-mapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="],
"@modelcontextprotocol/sdk/zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="],
"@openauthjs/openauth/@standard-schema/spec": ["@standard-schema/spec@1.0.0-beta.3", "", {}, "sha512-0ifF3BjA1E8SY9C+nUew8RefNOIq0cDlYALPty4rhUm8Rrl6tCM8hBT4bhGhx7I7iXD0uAgt50lgo8dD73ACMw=="],
"@openauthjs/openauth/aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="],
@ -1816,14 +1777,10 @@
"astro/sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="],
"astro/zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="],
"babel-plugin-jsx-dom-expressions/@babel/helper-module-imports": ["@babel/helper-module-imports@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA=="],
"bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="],
"eventsource/eventsource-parser": ["eventsource-parser@3.0.2", "", {}, "sha512-6RxOBZ/cYgd8usLwsEl+EC09Au/9BcmCKYF2/xbml6DNczf7nv0MQb+7BA2F+li6//I+28VNlQR37XfQtcAJuA=="],
"express/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="],
"get-source/source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
@ -1836,12 +1793,8 @@
"miniflare/sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="],
"miniflare/zod": ["zod@3.22.3", "", {}, "sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug=="],
"opencontrol/hono": ["hono@4.7.4", "", {}, "sha512-Pst8FuGqz3L7tFF+u9Pu70eI0xa5S3LPUmrNd5Jm8nTHze9FxLTK9Kaj5g/k4UcwuJSXTP65SyHOPLrffpcAJg=="],
"opencontrol/zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="],
"opencontrol/zod-to-json-schema": ["zod-to-json-schema@3.24.3", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-HIAfWdYIt1sssHfYZFCXp4rU1w2r8hVVXYIlmoa0r0gABLs5di3RCqPU5DDROogVz1pAdYBaz7HK5n9pSUNs3A=="],
"openid-client/jose": ["jose@4.15.9", "", {}, "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA=="],
@ -1866,8 +1819,6 @@
"sitemap/sax": ["sax@1.4.1", "", {}, "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg=="],
"sst/jose": ["jose@5.2.3", "", {}, "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA=="],
"token-types/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
"unicode-trie/pako": ["pako@0.2.9", "", {}, "sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA=="],

View file

@ -4,8 +4,6 @@ export const domain = (() => {
return `${$app.stage}.dev.opencode.ai`
})()
const GITHUB_APP_ID = new sst.Secret("GITHUB_APP_ID")
const GITHUB_APP_PRIVATE_KEY = new sst.Secret("GITHUB_APP_PRIVATE_KEY")
const bucket = new sst.cloudflare.Bucket("Bucket")
export const api = new sst.cloudflare.Worker("Api", {
@ -15,7 +13,7 @@ export const api = new sst.cloudflare.Worker("Api", {
WEB_DOMAIN: domain,
},
url: true,
link: [bucket, GITHUB_APP_ID, GITHUB_APP_PRIVATE_KEY],
link: [bucket],
transform: {
worker: (args) => {
args.logpush = true
@ -41,8 +39,6 @@ new sst.cloudflare.x.Astro("Web", {
domain,
path: "packages/web",
environment: {
// For astro config
SST_STAGE: $app.stage,
VITE_API_URL: api.url,
},
})

View file

@ -1,11 +1,5 @@
{
"$schema": "https://opencode.ai/config.json",
"mcp": {
"weather": {
"type": "local",
"command": ["opencode", "x", "@h1deya/mcp-server-weather"]
}
},
"experimental": {
"hook": {
"file_edited": {

View file

@ -7,7 +7,7 @@
"scripts": {
"dev": "bun run packages/opencode/src/index.ts",
"typecheck": "bun run --filter='*' typecheck",
"stainless": "./scripts/stainless",
"stainless": "bun run ./packages/opencode/src/index.ts serve ",
"postinstall": "./scripts/hooks"
},
"workspaces": {
@ -17,8 +17,8 @@
"catalog": {
"typescript": "5.8.2",
"@types/node": "22.13.9",
"zod": "3.25.49",
"ai": "5.0.0-beta.7"
"zod": "3.24.2",
"ai": "4.3.16"
}
},
"devDependencies": {
@ -31,8 +31,10 @@
},
"license": "MIT",
"prettier": {
"semi": false,
"printWidth": 120
"semi": false
},
"overrides": {
"zod": "3.24.2"
},
"trustedDependencies": [
"esbuild",

View file

@ -8,10 +8,5 @@
"@cloudflare/workers-types": "4.20250522.0",
"typescript": "catalog:",
"@types/node": "catalog:"
},
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "22.0.0",
"jose": "6.0.11"
}
}

View file

@ -1,9 +1,5 @@
import { DurableObject } from "cloudflare:workers"
import { randomUUID } from "node:crypto"
import { jwtVerify, createRemoteJWKSet } from "jose"
import { createAppAuth } from "@octokit/auth-app"
import { Octokit } from "@octokit/rest"
import { Resource } from "sst"
type Env = {
SYNC_SERVER: DurableObjectNamespace<SyncServer>
@ -42,7 +38,10 @@ export class SyncServer extends DurableObject<Env> {
async publish(key: string, content: any) {
const sessionID = await this.getSessionID()
if (!key.startsWith(`session/info/${sessionID}`) && !key.startsWith(`session/message/${sessionID}/`))
if (
!key.startsWith(`session/info/${sessionID}`) &&
!key.startsWith(`session/message/${sessionID}/`)
)
return new Response("Error: Invalid key", { status: 400 })
// store message
@ -108,7 +107,7 @@ export class SyncServer extends DurableObject<Env> {
}
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
async fetch(request: Request, env: Env, ctx: ExecutionContext) {
const url = new URL(request.url)
const splits = url.pathname.split("/")
const method = splits[1]
@ -185,7 +184,8 @@ export default {
}
const id = url.searchParams.get("id")
console.log("share_poll", id)
if (!id) return new Response("Error: Share ID is required", { status: 400 })
if (!id)
return new Response("Error: Share ID is required", { status: 400 })
const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id))
return stub.fetch(request)
}
@ -193,7 +193,8 @@ export default {
if (request.method === "GET" && method === "share_data") {
const id = url.searchParams.get("id")
console.log("share_data", id)
if (!id) return new Response("Error: Share ID is required", { status: 400 })
if (!id)
return new Response("Error: Share ID is required", { status: 400 })
const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id))
const data = await stub.getData()
@ -222,60 +223,5 @@ export default {
},
)
}
if (request.method === "POST" && method === "exchange_github_app_token") {
const EXPECTED_AUDIENCE = "opencode-github-action"
const GITHUB_ISSUER = "https://token.actions.githubusercontent.com"
const JWKS_URL = `${GITHUB_ISSUER}/.well-known/jwks`
// get Authorization header
const authHeader = request.headers.get("Authorization")
const token = authHeader?.replace(/^Bearer /, "")
if (!token)
return new Response(JSON.stringify({ error: "Authorization header is required" }), {
status: 401,
headers: { "Content-Type": "application/json" },
})
// verify token
const JWKS = createRemoteJWKSet(new URL(JWKS_URL))
let owner, repo
try {
const { payload } = await jwtVerify(token, JWKS, {
issuer: GITHUB_ISSUER,
audience: EXPECTED_AUDIENCE,
})
const sub = payload.sub // e.g. 'repo:my-org/my-repo:ref:refs/heads/main'
const parts = sub.split(":")[1].split("/")
owner = parts[0]
repo = parts[1]
} catch (err) {
console.error("Token verification failed:", err)
return new Response(JSON.stringify({ error: "Invalid or expired token" }), {
status: 403,
headers: { "Content-Type": "application/json" },
})
}
// Create app JWT token
const auth = createAppAuth({
appId: Resource.GITHUB_APP_ID.value,
privateKey: Resource.GITHUB_APP_PRIVATE_KEY.value,
})
const appAuth = await auth({ type: "app" })
// Lookup installation
const octokit = new Octokit({ auth: appAuth.token })
const { data: installation } = await octokit.apps.getRepoInstallation({ owner, repo })
// Get installation token
const installationAuth = await auth({ type: "installation", installationId: installation.id })
return new Response(JSON.stringify({ token: installationAuth.token }), {
headers: { "Content-Type": "application/json" },
})
}
return new Response("Not Found", { status: 404 })
},
}

View file

@ -6,26 +6,18 @@
import "sst"
declare module "sst" {
export interface Resource {
"GITHUB_APP_ID": {
"type": "sst.sst.Secret"
"value": string
}
"GITHUB_APP_PRIVATE_KEY": {
"type": "sst.sst.Secret"
"value": string
}
"Web": {
"type": "sst.cloudflare.Astro"
"url": string
Web: {
type: "sst.cloudflare.Astro"
url: string
}
}
}
// cloudflare
import * as cloudflare from "@cloudflare/workers-types";
import * as cloudflare from "@cloudflare/workers-types"
declare module "sst" {
export interface Resource {
"Api": cloudflare.Service
"Bucket": cloudflare.R2Bucket
Api: cloudflare.Service
Bucket: cloudflare.R2Bucket
}
}

View file

@ -0,0 +1,369 @@
{
"type": "object",
"properties": {
"$schema": {
"type": "string",
"description": "JSON schema reference for configuration validation"
},
"theme": {
"type": "string",
"description": "Theme name to use for the interface"
},
"keybinds": {
"type": "object",
"properties": {
"leader": {
"type": "string",
"description": "Leader key for keybind combinations"
},
"help": {
"type": "string",
"description": "Show help dialog"
},
"editor_open": {
"type": "string",
"description": "Open external editor"
},
"session_new": {
"type": "string",
"description": "Create a new session"
},
"session_list": {
"type": "string",
"description": "List all sessions"
},
"session_share": {
"type": "string",
"description": "Share current session"
},
"session_interrupt": {
"type": "string",
"description": "Interrupt current session"
},
"session_compact": {
"type": "string",
"description": "Toggle compact mode for session"
},
"tool_details": {
"type": "string",
"description": "Show tool details"
},
"model_list": {
"type": "string",
"description": "List available models"
},
"theme_list": {
"type": "string",
"description": "List available themes"
},
"project_init": {
"type": "string",
"description": "Initialize project configuration"
},
"input_clear": {
"type": "string",
"description": "Clear input field"
},
"input_paste": {
"type": "string",
"description": "Paste from clipboard"
},
"input_submit": {
"type": "string",
"description": "Submit input"
},
"input_newline": {
"type": "string",
"description": "Insert newline in input"
},
"history_previous": {
"type": "string",
"description": "Navigate to previous history item"
},
"history_next": {
"type": "string",
"description": "Navigate to next history item"
},
"messages_page_up": {
"type": "string",
"description": "Scroll messages up by one page"
},
"messages_page_down": {
"type": "string",
"description": "Scroll messages down by one page"
},
"messages_half_page_up": {
"type": "string",
"description": "Scroll messages up by half page"
},
"messages_half_page_down": {
"type": "string",
"description": "Scroll messages down by half page"
},
"messages_previous": {
"type": "string",
"description": "Navigate to previous message"
},
"messages_next": {
"type": "string",
"description": "Navigate to next message"
},
"messages_first": {
"type": "string",
"description": "Navigate to first message"
},
"messages_last": {
"type": "string",
"description": "Navigate to last message"
},
"app_exit": {
"type": "string",
"description": "Exit the application"
}
},
"additionalProperties": false,
"description": "Custom keybind configurations"
},
"autoshare": {
"type": "boolean",
"description": "Share newly created sessions automatically"
},
"autoupdate": {
"type": "boolean",
"description": "Automatically update to the latest version"
},
"disabled_providers": {
"type": "array",
"items": {
"type": "string"
},
"description": "Disable providers that are loaded automatically"
},
"model": {
"type": "string",
"description": "Model to use in the format of provider/model, eg anthropic/claude-2"
},
"provider": {
"type": "object",
"additionalProperties": {
"type": "object",
"properties": {
"api": {
"type": "string"
},
"name": {
"type": "string"
},
"env": {
"type": "array",
"items": {
"type": "string"
}
},
"id": {
"type": "string"
},
"npm": {
"type": "string"
},
"models": {
"type": "object",
"additionalProperties": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"attachment": {
"type": "boolean"
},
"reasoning": {
"type": "boolean"
},
"temperature": {
"type": "boolean"
},
"tool_call": {
"type": "boolean"
},
"cost": {
"type": "object",
"properties": {
"input": {
"type": "number"
},
"output": {
"type": "number"
},
"cache_read": {
"type": "number"
},
"cache_write": {
"type": "number"
}
},
"required": ["input", "output"],
"additionalProperties": false
},
"limit": {
"type": "object",
"properties": {
"context": {
"type": "number"
},
"output": {
"type": "number"
}
},
"required": ["context", "output"],
"additionalProperties": false
},
"id": {
"type": "string"
},
"options": {
"type": "object",
"additionalProperties": {}
}
},
"additionalProperties": false
}
},
"options": {
"type": "object",
"additionalProperties": {}
}
},
"required": ["models"],
"additionalProperties": false
},
"description": "Custom provider configurations and model overrides"
},
"mcp": {
"type": "object",
"additionalProperties": {
"anyOf": [
{
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "local",
"description": "Type of MCP server connection"
},
"command": {
"type": "array",
"items": {
"type": "string"
},
"description": "Command and arguments to run the MCP server"
},
"environment": {
"type": "object",
"additionalProperties": {
"type": "string"
},
"description": "Environment variables to set when running the MCP server"
},
"enabled": {
"type": "boolean",
"description": "Enable or disable the MCP server on startup"
}
},
"required": ["type", "command"],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "remote",
"description": "Type of MCP server connection"
},
"url": {
"type": "string",
"description": "URL of the remote MCP server"
},
"enabled": {
"type": "boolean",
"description": "Enable or disable the MCP server on startup"
}
},
"required": ["type", "url"],
"additionalProperties": false
}
]
},
"description": "MCP (Model Context Protocol) server configurations"
},
"instructions": {
"type": "array",
"items": {
"type": "string"
},
"description": "Additional instruction files or patterns to include"
},
"experimental": {
"type": "object",
"properties": {
"hook": {
"type": "object",
"properties": {
"file_edited": {
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"type": "object",
"properties": {
"command": {
"type": "array",
"items": {
"type": "string"
}
},
"environment": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"required": ["command"],
"additionalProperties": false
}
}
},
"session_completed": {
"type": "array",
"items": {
"type": "object",
"properties": {
"command": {
"type": "array",
"items": {
"type": "string"
}
},
"environment": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"required": ["command"],
"additionalProperties": false
}
}
},
"additionalProperties": false
}
},
"additionalProperties": false
}
},
"additionalProperties": false,
"$schema": "http://json-schema.org/draft-07/schema#"
}

View file

@ -57,7 +57,8 @@ for (const [os, arch] of targets) {
2,
),
)
if (!dry) await $`cd dist/${name} && bun publish --access public --tag ${npmTag}`
if (!dry)
await $`cd dist/${name} && bun publish --access public --tag ${npmTag}`
optionalDependencies[name] = version
}
@ -81,7 +82,8 @@ await Bun.file(`./dist/${pkg.name}/package.json`).write(
2,
),
)
if (!dry) await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}`
if (!dry)
await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}`
if (!snapshot) {
// Github Release
@ -89,11 +91,15 @@ if (!snapshot) {
await $`cd dist/${key}/bin && zip -r ../../${key}.zip *`
}
const previous = await fetch("https://api.github.com/repos/sst/opencode/releases/latest")
const previous = await fetch(
"https://api.github.com/repos/sst/opencode/releases/latest",
)
.then((res) => res.json())
.then((data) => data.tag_name)
const commits = await fetch(`https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`)
const commits = await fetch(
`https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`,
)
.then((res) => res.json())
.then((data) => data.commits || [])
@ -103,7 +109,6 @@ if (!snapshot) {
const lower = x.toLowerCase()
return (
!lower.includes("ignore:") &&
!lower.includes("chore:") &&
!lower.includes("ci:") &&
!lower.includes("wip:") &&
!lower.includes("docs:") &&
@ -112,13 +117,26 @@ if (!snapshot) {
})
.join("\n")
if (!dry) await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip`
if (!dry)
await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip`
// Calculate SHA values
const arm64Sha = await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
const x64Sha = await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
const macX64Sha = await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
const macArm64Sha = await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
const arm64Sha =
await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
const x64Sha =
await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
const macX64Sha =
await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
const macArm64Sha =
await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
// AUR package
const pkgbuild = [
@ -152,7 +170,9 @@ if (!snapshot) {
for (const pkg of ["opencode", "opencode-bin"]) {
await $`rm -rf ./dist/aur-${pkg}`
await $`git clone ssh://aur@aur.archlinux.org/${pkg}.git ./dist/aur-${pkg}`
await Bun.file(`./dist/aur-${pkg}/PKGBUILD`).write(pkgbuild.replace("${pkg}", pkg))
await Bun.file(`./dist/aur-${pkg}/PKGBUILD`).write(
pkgbuild.replace("${pkg}", pkg),
)
await $`cd ./dist/aur-${pkg} && makepkg --printsrcinfo > .SRCINFO`
await $`cd ./dist/aur-${pkg} && git add PKGBUILD .SRCINFO`
await $`cd ./dist/aur-${pkg} && git commit -m "Update to v${version}"`

View file

@ -4,32 +4,5 @@ import "zod-openapi/extend"
import { Config } from "../src/config/config"
import { zodToJsonSchema } from "zod-to-json-schema"
const file = process.argv[2]
const result = zodToJsonSchema(Config.Info, {
/**
* We'll use the `default` values of the field as the only value in `examples`.
* This will ensure no docs are needed to be read, as the configuration is
* self-documenting.
*
* See https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.9.5
*/
postProcess(jsonSchema) {
const schema = jsonSchema as typeof jsonSchema & {
examples?: unknown[]
}
if (schema && typeof schema === "object" && "type" in schema && schema.type === "string" && schema?.default) {
if (!schema.examples) {
schema.examples = [schema.default]
}
schema.description = [schema.description || "", `default: \`${schema.default}\``]
.filter(Boolean)
.join("\n\n")
.trim()
}
return jsonSchema
},
})
await Bun.write(file, JSON.stringify(result, null, 2))
const result = zodToJsonSchema(Config.Info)
await Bun.write("config.schema.json", JSON.stringify(result, null, 2))

View file

@ -45,14 +45,23 @@ export namespace App {
}
export const provideExisting = ctx.provide
export async function provide<T>(input: Input, cb: (app: App.Info) => Promise<T>) {
export async function provide<T>(
input: Input,
cb: (app: App.Info) => Promise<T>,
) {
log.info("creating", {
cwd: input.cwd,
})
const git = await Filesystem.findUp(".git", input.cwd).then(([x]) => (x ? path.dirname(x) : undefined))
const git = await Filesystem.findUp(".git", input.cwd).then(([x]) =>
x ? path.dirname(x) : undefined,
)
log.info("git", { git })
const data = path.join(Global.Path.data, "project", git ? directory(git) : "global")
const data = path.join(
Global.Path.data,
"project",
git ? directory(git) : "global",
)
const stateFile = Bun.file(path.join(data, APP_JSON))
const state = (await stateFile.json().catch(() => ({}))) as {
initialized: number

View file

@ -10,8 +10,14 @@ export namespace AuthAnthropic {
url.searchParams.set("code", "true")
url.searchParams.set("client_id", CLIENT_ID)
url.searchParams.set("response_type", "code")
url.searchParams.set("redirect_uri", "https://console.anthropic.com/oauth/code/callback")
url.searchParams.set("scope", "org:create_api_key user:profile user:inference")
url.searchParams.set(
"redirect_uri",
"https://console.anthropic.com/oauth/code/callback",
)
url.searchParams.set(
"scope",
"org:create_api_key user:profile user:inference",
)
url.searchParams.set("code_challenge", pkce.challenge)
url.searchParams.set("code_challenge_method", "S256")
url.searchParams.set("state", pkce.verifier)
@ -51,7 +57,9 @@ export namespace AuthAnthropic {
const info = await Auth.get("anthropic")
if (!info || info.type !== "oauth") return
if (info.access && info.expires > Date.now()) return info.access
const response = await fetch("https://console.anthropic.com/v1/oauth/token", {
const response = await fetch(
"https://console.anthropic.com/v1/oauth/token",
{
method: "POST",
headers: {
"Content-Type": "application/json",
@ -61,7 +69,8 @@ export namespace AuthAnthropic {
refresh_token: info.refresh,
client_id: CLIENT_ID,
}),
})
},
)
if (!response.ok) return
const json = await response.json()
await Auth.set("anthropic", {

View file

@ -4,7 +4,9 @@ import path from "path"
export const AuthCopilot = lazy(async () => {
const file = Bun.file(path.join(Global.Path.state, "plugin", "copilot.ts"))
const response = fetch("https://raw.githubusercontent.com/sst/opencode-github-copilot/refs/heads/main/auth.ts")
const response = fetch(
"https://raw.githubusercontent.com/sst/opencode-github-copilot/refs/heads/main/auth.ts",
)
.then((x) => Bun.write(file, x))
.catch(() => {})

View file

@ -122,7 +122,10 @@ export namespace AuthGithubCopilot {
return tokenData.token
}
export const DeviceCodeError = NamedError.create("DeviceCodeError", z.object({}))
export const DeviceCodeError = NamedError.create(
"DeviceCodeError",
z.object({}),
)
export const TokenExchangeError = NamedError.create(
"TokenExchangeError",

View file

@ -8,7 +8,10 @@ import { readableStreamToText } from "bun"
export namespace BunProc {
const log = Log.create({ service: "bun" })
export async function run(cmd: string[], options?: Bun.SpawnOptions.OptionsObject<any, any, any>) {
export async function run(
cmd: string[],
options?: Bun.SpawnOptions.OptionsObject<any, any, any>,
) {
log.info("running", {
cmd: [which(), ...cmd],
...options,
@ -23,17 +26,9 @@ export namespace BunProc {
BUN_BE_BUN: "1",
},
})
const code = await result.exited
const stdout = result.stdout
? typeof result.stdout === "number"
? result.stdout
: await readableStreamToText(result.stdout)
: undefined
const stderr = result.stderr
? typeof result.stderr === "number"
? result.stderr
: await readableStreamToText(result.stderr)
: undefined
const code = await result.exited;
const stdout = result.stdout ? typeof result.stdout === "number" ? result.stdout : await readableStreamToText(result.stdout) : undefined
const stderr = result.stderr ? typeof result.stderr === "number" ? result.stderr : await readableStreamToText(result.stderr) : undefined
log.info("done", {
code,
stdout,
@ -60,18 +55,15 @@ export namespace BunProc {
export async function install(pkg: string, version = "latest") {
const mod = path.join(Global.Path.cache, "node_modules", pkg)
const pkgjson = Bun.file(path.join(Global.Path.cache, "package.json"))
const parsed = await pkgjson.json().catch(async () => {
const result = { dependencies: {} }
await Bun.write(pkgjson.name!, JSON.stringify(result, null, 2))
return result
})
const parsed = await pkgjson.json().catch(() => ({
dependencies: {},
}))
if (parsed.dependencies[pkg] === version) return mod
await BunProc.run(
["add", "--exact", "--cwd", Global.Path.cache, "--registry=https://registry.npmjs.org", pkg + "@" + version],
{
parsed.dependencies[pkg] = version
await Bun.write(pkgjson, JSON.stringify(parsed, null, 2))
await BunProc.run(["install", "--registry=https://registry.npmjs.org"], {
cwd: Global.Path.cache,
},
).catch((e) => {
}).catch((e) => {
throw new InstallFailedError(
{ pkg, version },
{

View file

@ -18,7 +18,10 @@ export namespace Bus {
const registry = new Map<string, EventDefinition>()
export function event<Type extends string, Properties extends ZodType>(type: Type, properties: Properties) {
export function event<Type extends string, Properties extends ZodType>(
type: Type,
properties: Properties,
) {
const result = {
type,
properties,
@ -69,7 +72,10 @@ export namespace Bus {
export function subscribe<Definition extends EventDefinition>(
def: Definition,
callback: (event: { type: Definition["type"]; properties: z.infer<Definition["properties"]> }) => void,
callback: (event: {
type: Definition["type"]
properties: z.infer<Definition["properties"]>
}) => void,
) {
return raw(def.type, callback)
}

View file

@ -1,15 +1,20 @@
import { App } from "../app/app"
import { ConfigHooks } from "../config/hooks"
import { FileWatcher } from "../file/watch"
import { Format } from "../format"
import { LSP } from "../lsp"
import { Share } from "../share/share"
export async function bootstrap<T>(input: App.Input, cb: (app: App.Info) => Promise<T>) {
export async function bootstrap<T>(
input: App.Input,
cb: (app: App.Info) => Promise<T>,
) {
return App.provide(input, async (app) => {
Share.init()
Format.init()
ConfigHooks.init()
LSP.init()
FileWatcher.init()
return cb(app)
})

View file

@ -15,7 +15,11 @@ export const AuthCommand = cmd({
command: "auth",
describe: "manage credentials",
builder: (yargs) =>
yargs.command(AuthLoginCommand).command(AuthLogoutCommand).command(AuthListCommand).demandCommand(),
yargs
.command(AuthLoginCommand)
.command(AuthLogoutCommand)
.command(AuthListCommand)
.demandCommand(),
async handler() {},
})
@ -27,7 +31,9 @@ export const AuthListCommand = cmd({
UI.empty()
const authPath = path.join(Global.Path.data, "auth.json")
const homedir = os.homedir()
const displayPath = authPath.startsWith(homedir) ? authPath.replace(homedir, "~") : authPath
const displayPath = authPath.startsWith(homedir)
? authPath.replace(homedir, "~")
: authPath
prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`)
const results = await Auth.all().then((x) => Object.entries(x))
const database = await ModelsDev.get()
@ -108,7 +114,8 @@ export const AuthLoginCommand = cmd({
if (provider === "other") {
provider = await prompts.text({
message: "Enter provider id",
validate: (x) => (x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only"),
validate: (x) =>
x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only",
})
if (prompts.isCancel(provider)) throw new UI.CancelledError()
provider = provider.replace(/^@ai-sdk\//, "")
@ -179,13 +186,17 @@ export const AuthLoginCommand = cmd({
await new Promise((resolve) => setTimeout(resolve, 10))
const deviceInfo = await copilot.authorize()
prompts.note(`Please visit: ${deviceInfo.verification}\nEnter code: ${deviceInfo.user}`)
prompts.note(
`Please visit: ${deviceInfo.verification}\nEnter code: ${deviceInfo.user}`,
)
const spinner = prompts.spinner()
spinner.start("Waiting for authorization...")
while (true) {
await new Promise((resolve) => setTimeout(resolve, deviceInfo.interval * 1000))
await new Promise((resolve) =>
setTimeout(resolve, deviceInfo.interval * 1000),
)
const response = await copilot.poll(deviceInfo.device)
if (response.status === "pending") continue
if (response.status === "success") {
@ -237,7 +248,12 @@ export const AuthLogoutCommand = cmd({
const providerID = await prompts.select({
message: "Select provider",
options: credentials.map(([key, value]) => ({
label: (database[key]?.name || key) + UI.Style.TEXT_DIM + " (" + value.type + ")",
label:
(database[key]?.name || key) +
UI.Style.TEXT_DIM +
" (" +
value.type +
")",
value: key,
})),
})

View file

@ -31,6 +31,7 @@ const FileStatusCommand = cmd({
export const FileCommand = cmd({
command: "file",
builder: (yargs) => yargs.command(FileReadCommand).command(FileStatusCommand).demandCommand(),
builder: (yargs) =>
yargs.command(FileReadCommand).command(FileStatusCommand).demandCommand(),
async handler() {},
})

View file

@ -3,7 +3,6 @@ import { cmd } from "../cmd"
import { FileCommand } from "./file"
import { LSPCommand } from "./lsp"
import { RipgrepCommand } from "./ripgrep"
import { ScrapCommand } from "./scrap"
import { SnapshotCommand } from "./snapshot"
export const DebugCommand = cmd({
@ -13,13 +12,14 @@ export const DebugCommand = cmd({
.command(LSPCommand)
.command(RipgrepCommand)
.command(FileCommand)
.command(ScrapCommand)
.command(SnapshotCommand)
.command({
command: "wait",
async handler() {
await bootstrap({ cwd: process.cwd() }, async () => {
await new Promise((resolve) => setTimeout(resolve, 1_000 * 60 * 60 * 24))
await new Promise((resolve) =>
setTimeout(resolve, 1_000 * 60 * 60 * 24),
)
})
},
})

View file

@ -6,13 +6,14 @@ import { Log } from "../../../util/log"
export const LSPCommand = cmd({
command: "lsp",
builder: (yargs) =>
yargs.command(DiagnosticsCommand).command(SymbolsCommand).command(DocumentSymbolsCommand).demandCommand(),
yargs.command(DiagnosticsCommand).command(SymbolsCommand).demandCommand(),
async handler() {},
})
const DiagnosticsCommand = cmd({
command: "diagnostics <file>",
builder: (yargs) => yargs.positional("file", { type: "string", demandOption: true }),
builder: (yargs) =>
yargs.positional("file", { type: "string", demandOption: true }),
async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => {
await LSP.touchFile(args.file, true)
@ -23,24 +24,14 @@ const DiagnosticsCommand = cmd({
export const SymbolsCommand = cmd({
command: "symbols <query>",
builder: (yargs) => yargs.positional("query", { type: "string", demandOption: true }),
builder: (yargs) =>
yargs.positional("query", { type: "string", demandOption: true }),
async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => {
await LSP.touchFile("./src/index.ts", true)
using _ = Log.Default.time("symbols")
const results = await LSP.workspaceSymbol(args.query)
console.log(JSON.stringify(results, null, 2))
})
},
})
export const DocumentSymbolsCommand = cmd({
command: "document-symbols <uri>",
builder: (yargs) => yargs.positional("uri", { type: "string", demandOption: true }),
async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => {
using _ = Log.Default.time("document-symbols")
const results = await LSP.documentSymbol(args.uri)
console.log(JSON.stringify(results, null, 2))
})
},
})

View file

@ -5,7 +5,12 @@ import { cmd } from "../cmd"
export const RipgrepCommand = cmd({
command: "rg",
builder: (yargs) => yargs.command(TreeCommand).command(FilesCommand).command(SearchCommand).demandCommand(),
builder: (yargs) =>
yargs
.command(TreeCommand)
.command(FilesCommand)
.command(SearchCommand)
.demandCommand(),
async handler() {},
})
@ -45,7 +50,7 @@ const FilesCommand = cmd({
const files = await Ripgrep.files({
cwd: app.path.cwd,
query: args.query,
glob: args.glob ? [args.glob] : undefined,
glob: args.glob,
limit: args.limit,
})
console.log(files.join("\n"))

View file

@ -1,7 +0,0 @@
import { cmd } from "../cmd"
export const ScrapCommand = cmd({
command: "scrap",
builder: (yargs) => yargs,
async handler() {},
})

View file

@ -4,7 +4,11 @@ import { cmd } from "../cmd"
export const SnapshotCommand = cmd({
command: "snapshot",
builder: (yargs) => yargs.command(SnapshotCreateCommand).command(SnapshotRestoreCommand).demandCommand(),
builder: (yargs) =>
yargs
.command(SnapshotCreateCommand)
.command(SnapshotRestoreCommand)
.demandCommand(),
async handler() {},
})

View file

@ -10,6 +10,9 @@ export const GenerateCommand = {
const dir = "gen"
await fs.rmdir(dir, { recursive: true }).catch(() => {})
await fs.mkdir(dir, { recursive: true })
await Bun.write(path.join(dir, "openapi.json"), JSON.stringify(specs, null, 2))
await Bun.write(
path.join(dir, "openapi.json"),
JSON.stringify(specs, null, 2),
)
},
} satisfies CommandModule

View file

@ -2,13 +2,12 @@ import type { Argv } from "yargs"
import { Bus } from "../../bus"
import { Provider } from "../../provider/provider"
import { Session } from "../../session"
import { Message } from "../../session/message"
import { UI } from "../ui"
import { cmd } from "./cmd"
import { Flag } from "../../flag/flag"
import { Config } from "../../config/config"
import { bootstrap } from "../bootstrap"
import { MessageV2 } from "../../session/message-v2"
import { Mode } from "../../session/mode"
const TOOL: Record<string, [string, string]> = {
todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD],
@ -53,22 +52,13 @@ export const RunCommand = cmd({
alias: ["m"],
describe: "model to use in the format of provider/model",
})
.option("mode", {
type: "string",
describe: "mode to use",
})
},
handler: async (args) => {
let message = args.message.join(" ")
if (!process.stdin.isTTY) message += "\n" + (await Bun.stdin.text())
const message = args.message.join(" ")
await bootstrap({ cwd: process.cwd() }, async () => {
const session = await (async () => {
if (args.continue) {
const list = Session.list()
const first = await list.next()
await list.return()
const first = await Session.list().next()
if (first.done) return
return first.value
}
@ -88,19 +78,27 @@ export const RunCommand = cmd({
UI.empty()
UI.println(UI.logo())
UI.empty()
const displayMessage = message.length > 300 ? message.slice(0, 300) + "..." : message
UI.println(UI.Style.TEXT_NORMAL_BOLD + "> ", displayMessage)
UI.println(UI.Style.TEXT_NORMAL_BOLD + "> ", message)
UI.empty()
const cfg = await Config.get()
if (cfg.autoshare || Flag.OPENCODE_AUTO_SHARE || args.share) {
await Session.share(session.id)
UI.println(UI.Style.TEXT_INFO_BOLD + "~ https://opencode.ai/s/" + session.id.slice(-8))
UI.println(
UI.Style.TEXT_INFO_BOLD +
"~ https://opencode.ai/s/" +
session.id.slice(-8),
)
}
UI.empty()
const { providerID, modelID } = args.model ? Provider.parseModel(args.model) : await Provider.defaultModel()
UI.println(UI.Style.TEXT_NORMAL_BOLD + "@ ", UI.Style.TEXT_NORMAL + `${providerID}/${modelID}`)
const { providerID, modelID } = args.model
? Provider.parseModel(args.model)
: await Provider.defaultModel()
UI.println(
UI.Style.TEXT_NORMAL_BOLD + "@ ",
UI.Style.TEXT_NORMAL + `${providerID}/${modelID}`,
)
UI.empty()
function printEvent(color: string, type: string, title: string) {
@ -112,13 +110,24 @@ export const RunCommand = cmd({
)
}
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
Bus.subscribe(Message.Event.PartUpdated, async (evt) => {
if (evt.properties.sessionID !== session.id) return
const part = evt.properties.part
const message = await Session.getMessage(
evt.properties.sessionID,
evt.properties.messageID,
)
if (part.type === "tool" && part.state.status === "completed") {
const [tool, color] = TOOL[part.tool] ?? [part.tool, UI.Style.TEXT_INFO_BOLD]
printEvent(color, tool, part.state.title || "Unknown")
if (
part.type === "tool-invocation" &&
part.toolInvocation.state === "result"
) {
const metadata = message.metadata.tool[part.toolInvocation.toolCallId]
const [tool, color] = TOOL[part.toolInvocation.toolName] ?? [
part.toolInvocation.toolName,
UI.Style.TEXT_INFO_BOLD,
]
printEvent(color, tool, metadata?.title || "Unknown")
}
if (part.type === "text") {
@ -132,31 +141,10 @@ export const RunCommand = cmd({
}
})
let errorMsg: string | undefined
Bus.subscribe(Session.Event.Error, async (evt) => {
const { sessionID, error } = evt.properties
if (sessionID !== session.id || !error) return
let err = String(error.name)
if ("data" in error && error.data && "message" in error.data) {
err = error.data.message
}
errorMsg = errorMsg ? errorMsg + "\n" + err : err
UI.error(err)
})
const mode = args.mode ? await Mode.get(args.mode) : await Mode.list().then((x) => x[0])
const result = await Session.chat({
sessionID: session.id,
...(mode.model
? mode.model
: {
providerID,
modelID,
}),
mode: mode.name,
parts: [
{
type: "text",
@ -168,7 +156,6 @@ export const RunCommand = cmd({
if (isPiped) {
const match = result.parts.findLast((x) => x.type === "text")
if (match) process.stdout.write(match.text)
if (errorMsg) process.stdout.write(errorMsg)
}
UI.empty()
})

View file

@ -38,7 +38,9 @@ export const ServeCommand = cmd({
hostname,
})
console.log(`opencode server listening on http://${server.hostname}:${server.port}`)
console.log(
`opencode server listening on http://${server.hostname}:${server.port}`,
)
await new Promise(() => {})

View file

@ -1,179 +0,0 @@
import { Storage } from "../../storage/storage"
import { MessageV2 } from "../../session/message-v2"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
interface SessionStats {
totalSessions: number
totalMessages: number
totalCost: number
totalTokens: {
input: number
output: number
reasoning: number
cache: {
read: number
write: number
}
}
toolUsage: Record<string, number>
dateRange: {
earliest: number
latest: number
}
days: number
costPerDay: number
}
export const StatsCommand = cmd({
command: "stats",
describe: "analyze and display statistics from message-v2 format",
handler: async () => {
await bootstrap({ cwd: process.cwd() }, async () => {
const stats: SessionStats = {
totalSessions: 0,
totalMessages: 0,
totalCost: 0,
totalTokens: {
input: 0,
output: 0,
reasoning: 0,
cache: {
read: 0,
write: 0,
},
},
toolUsage: {},
dateRange: {
earliest: Date.now(),
latest: 0,
},
days: 0,
costPerDay: 0,
}
const sessionMap = new Map<string, number>()
try {
for await (const messagePath of Storage.list("session/message")) {
try {
const message = await Storage.readJSON<MessageV2.Info>(messagePath)
if (!message.parts.find((part) => part.type === "step-finish")) continue
stats.totalMessages++
const sessionId = message.sessionID
sessionMap.set(sessionId, (sessionMap.get(sessionId) || 0) + 1)
if (message.time.created < stats.dateRange.earliest) {
stats.dateRange.earliest = message.time.created
}
if (message.time.created > stats.dateRange.latest) {
stats.dateRange.latest = message.time.created
}
if (message.role === "assistant") {
stats.totalCost += message.cost
stats.totalTokens.input += message.tokens.input
stats.totalTokens.output += message.tokens.output
stats.totalTokens.reasoning += message.tokens.reasoning
stats.totalTokens.cache.read += message.tokens.cache.read
stats.totalTokens.cache.write += message.tokens.cache.write
for (const part of message.parts) {
if (part.type === "tool") {
stats.toolUsage[part.tool] = (stats.toolUsage[part.tool] || 0) + 1
}
}
}
} catch (e) {
continue
}
}
} catch (e) {
console.error("Failed to read storage:", e)
return
}
stats.totalSessions = sessionMap.size
if (stats.dateRange.latest > 0) {
const daysDiff = (stats.dateRange.latest - stats.dateRange.earliest) / (1000 * 60 * 60 * 24)
stats.days = Math.max(1, Math.ceil(daysDiff))
stats.costPerDay = stats.totalCost / stats.days
}
displayStats(stats)
})
},
})
function displayStats(stats: SessionStats) {
const width = 56
function renderRow(label: string, value: string): string {
const availableWidth = width - 1
const paddingNeeded = availableWidth - label.length - value.length
const padding = Math.max(0, paddingNeeded)
return `${label}${" ".repeat(padding)}${value}`
}
// Overview section
console.log("┌────────────────────────────────────────────────────────┐")
console.log("│ OVERVIEW │")
console.log("├────────────────────────────────────────────────────────┤")
console.log(renderRow("Sessions", stats.totalSessions.toLocaleString()))
console.log(renderRow("Messages", stats.totalMessages.toLocaleString()))
console.log(renderRow("Days", stats.days.toString()))
console.log("└────────────────────────────────────────────────────────┘")
console.log()
// Cost & Tokens section
console.log("┌────────────────────────────────────────────────────────┐")
console.log("│ COST & TOKENS │")
console.log("├────────────────────────────────────────────────────────┤")
const cost = isNaN(stats.totalCost) ? 0 : stats.totalCost
const costPerDay = isNaN(stats.costPerDay) ? 0 : stats.costPerDay
console.log(renderRow("Total Cost", `$${cost.toFixed(2)}`))
console.log(renderRow("Cost/Day", `$${costPerDay.toFixed(2)}`))
console.log(renderRow("Input", formatNumber(stats.totalTokens.input)))
console.log(renderRow("Output", formatNumber(stats.totalTokens.output)))
console.log(renderRow("Cache Read", formatNumber(stats.totalTokens.cache.read)))
console.log(renderRow("Cache Write", formatNumber(stats.totalTokens.cache.write)))
console.log("└────────────────────────────────────────────────────────┘")
console.log()
// Tool Usage section
if (Object.keys(stats.toolUsage).length > 0) {
const sortedTools = Object.entries(stats.toolUsage)
.sort(([, a], [, b]) => b - a)
.slice(0, 10)
console.log("┌────────────────────────────────────────────────────────┐")
console.log("│ TOOL USAGE │")
console.log("├────────────────────────────────────────────────────────┤")
const maxCount = Math.max(...sortedTools.map(([, count]) => count))
const totalToolUsage = Object.values(stats.toolUsage).reduce((a, b) => a + b, 0)
for (const [tool, count] of sortedTools) {
const barLength = Math.max(1, Math.floor((count / maxCount) * 20))
const bar = "█".repeat(barLength)
const percentage = ((count / totalToolUsage) * 100).toFixed(1)
const content = ` ${tool.padEnd(10)} ${bar.padEnd(20)} ${count.toString().padStart(3)} (${percentage.padStart(4)}%)`
const padding = Math.max(0, width - content.length)
console.log(`${content}${" ".repeat(padding)}`)
}
console.log("└────────────────────────────────────────────────────────┘")
}
console.log()
}
function formatNumber(num: number): string {
if (num >= 1000000) {
return (num / 1000000).toFixed(1) + "M"
} else if (num >= 1000) {
return (num / 1000).toFixed(1) + "K"
}
return num.toString()
}

View file

@ -10,31 +10,14 @@ import { Installation } from "../../installation"
import { Config } from "../../config/config"
import { Bus } from "../../bus"
import { Log } from "../../util/log"
import { FileWatcher } from "../../file/watch"
import { Mode } from "../../session/mode"
export const TuiCommand = cmd({
command: "$0 [project]",
describe: "start opencode tui",
builder: (yargs) =>
yargs
.positional("project", {
yargs.positional("project", {
type: "string",
describe: "path to start opencode in",
})
.option("model", {
type: "string",
alias: ["m"],
describe: "model to use in the format of provider/model",
})
.option("prompt", {
alias: ["p"],
type: "string",
describe: "prompt to use",
})
.option("mode", {
type: "string",
describe: "mode to use",
}),
handler: async (args) => {
while (true) {
@ -46,7 +29,6 @@ export const TuiCommand = cmd({
return
}
const result = await bootstrap({ cwd }, async (app) => {
FileWatcher.init()
const providers = await Provider.list()
if (Object.keys(providers).length === 0) {
return "needs_provider"
@ -58,7 +40,9 @@ export const TuiCommand = cmd({
})
let cmd = ["go", "run", "./main.go"]
let cwd = Bun.fileURLToPath(new URL("../../../../tui/cmd/opencode", import.meta.url))
let cwd = Bun.fileURLToPath(
new URL("../../../../tui/cmd/opencode", import.meta.url),
)
if (Bun.embeddedFiles.length > 0) {
const blob = Bun.embeddedFiles[0] as File
let binaryName = blob.name
@ -78,22 +62,15 @@ export const TuiCommand = cmd({
cmd,
})
const proc = Bun.spawn({
cmd: [
...cmd,
...(args.model ? ["--model", args.model] : []),
...(args.prompt ? ["--prompt", args.prompt] : []),
...(args.mode ? ["--mode", args.mode] : []),
],
cmd: [...cmd, ...process.argv.slice(2)],
cwd,
stdout: "inherit",
stderr: "inherit",
stdin: "inherit",
env: {
...process.env,
CGO_ENABLED: "0",
OPENCODE_SERVER: server.url.toString(),
OPENCODE_APP_INFO: JSON.stringify(app),
OPENCODE_MODES: JSON.stringify(await Mode.list()),
},
onExit: () => {
server.stop()

View file

@ -27,26 +27,22 @@ export const UpgradeCommand = {
const detectedMethod = await Installation.method()
const method = (args.method as Installation.Method) ?? detectedMethod
if (method === "unknown") {
prompts.log.error(`opencode is installed to ${process.execPath} and seems to be managed by a package manager`)
prompts.log.error(
`opencode is installed to ${process.execPath} and seems to be managed by a package manager`,
)
prompts.outro("Done")
return
}
prompts.log.info("Using method: " + method)
const target = args.target ?? (await Installation.latest())
if (Installation.VERSION === target) {
prompts.log.warn(`opencode upgrade skipped: ${target} is already installed`)
prompts.outro("Done")
return
}
prompts.log.info(`From ${Installation.VERSION}${target}`)
const spinner = prompts.spinner()
spinner.start("Upgrading...")
const err = await Installation.upgrade(method, target).catch((err) => err)
if (err) {
spinner.stop("Upgrade failed")
if (err instanceof Installation.UpgradeFailedError) prompts.log.error(err.data.stderr)
if (err instanceof Installation.UpgradeFailedError)
prompts.log.error(err.data.stderr)
else if (err instanceof Error) prompts.log.error(err.message)
prompts.outro("Done")
return

View file

@ -5,11 +5,14 @@ import { UI } from "./ui"
export function FormatError(input: unknown) {
if (MCP.Failed.isInstance(input))
return `MCP server "${input.data.name}" failed. Note, opencode does not support MCP authentication yet.`
if (Config.JsonError.isInstance(input)) return `Config file at ${input.data.path} is not valid JSON`
if (Config.JsonError.isInstance(input))
return `Config file at ${input.data.path} is not valid JSON`
if (Config.InvalidError.isInstance(input))
return [
`Config file at ${input.data.path} is invalid`,
...(input.data.issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []),
...(input.data.issues?.map(
(issue) => "↳ " + issue.message + " " + issue.path.join("."),
) ?? []),
].join("\n")
if (UI.CancelledError.isInstance(input)) return ""

View file

@ -4,7 +4,7 @@ import { z } from "zod"
import { App } from "../app/app"
import { Filesystem } from "../util/filesystem"
import { ModelsDev } from "../provider/models"
import { mergeDeep, pipe } from "remeda"
import { mergeDeep } from "remeda"
import { Global } from "../global"
import fs from "fs/promises"
import { lazy } from "../util/lazy"
@ -29,12 +29,18 @@ export namespace Config {
export const McpLocal = z
.object({
type: z.literal("local").describe("Type of MCP server connection"),
command: z.string().array().describe("Command and arguments to run the MCP server"),
command: z
.string()
.array()
.describe("Command and arguments to run the MCP server"),
environment: z
.record(z.string(), z.string())
.optional()
.describe("Environment variables to set when running the MCP server"),
enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
enabled: z
.boolean()
.optional()
.describe("Enable or disable the MCP server on startup"),
})
.strict()
.openapi({
@ -45,7 +51,10 @@ export namespace Config {
.object({
type: z.literal("remote").describe("Type of MCP server connection"),
url: z.string().describe("URL of the remote MCP server"),
enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
enabled: z
.boolean()
.optional()
.describe("Enable or disable the MCP server on startup"),
})
.strict()
.openapi({
@ -55,80 +64,105 @@ export namespace Config {
export const Mcp = z.discriminatedUnion("type", [McpLocal, McpRemote])
export type Mcp = z.infer<typeof Mcp>
export const Mode = z
.object({
model: z.string().optional(),
prompt: z.string().optional(),
tools: z.record(z.string(), z.boolean()).optional(),
})
.openapi({
ref: "ModeConfig",
})
export type Mode = z.infer<typeof Mode>
export const Keybinds = z
.object({
leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"),
app_help: z.string().optional().default("<leader>h").describe("Show help dialog"),
switch_mode: z.string().optional().default("tab").describe("Switch mode"),
editor_open: z.string().optional().default("<leader>e").describe("Open external editor"),
session_new: z.string().optional().default("<leader>n").describe("Create a new session"),
session_list: z.string().optional().default("<leader>l").describe("List all sessions"),
session_share: z.string().optional().default("<leader>s").describe("Share current session"),
session_unshare: z.string().optional().default("<leader>u").describe("Unshare current session"),
session_interrupt: z.string().optional().default("esc").describe("Interrupt current session"),
session_compact: z.string().optional().default("<leader>c").describe("Compact the session"),
tool_details: z.string().optional().default("<leader>d").describe("Toggle tool details"),
model_list: z.string().optional().default("<leader>m").describe("List available models"),
theme_list: z.string().optional().default("<leader>t").describe("List available themes"),
file_list: z.string().optional().default("<leader>f").describe("List files"),
file_close: z.string().optional().default("esc").describe("Close file"),
file_search: z.string().optional().default("<leader>/").describe("Search file"),
file_diff_toggle: z.string().optional().default("<leader>v").describe("Split/unified diff"),
project_init: z.string().optional().default("<leader>i").describe("Create/update AGENTS.md"),
input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"),
input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"),
input_submit: z.string().optional().default("enter").describe("Submit input"),
input_newline: z.string().optional().default("shift+enter,ctrl+j").describe("Insert newline in input"),
messages_page_up: z.string().optional().default("pgup").describe("Scroll messages up by one page"),
messages_page_down: z.string().optional().default("pgdown").describe("Scroll messages down by one page"),
messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"),
leader: z
.string()
.optional()
.describe("Leader key for keybind combinations"),
help: z.string().optional().describe("Show help dialog"),
editor_open: z.string().optional().describe("Open external editor"),
session_new: z.string().optional().describe("Create a new session"),
session_list: z.string().optional().describe("List all sessions"),
session_share: z.string().optional().describe("Share current session"),
session_interrupt: z
.string()
.optional()
.describe("Interrupt current session"),
session_compact: z
.string()
.optional()
.describe("Toggle compact mode for session"),
tool_details: z.string().optional().describe("Show tool details"),
model_list: z.string().optional().describe("List available models"),
theme_list: z.string().optional().describe("List available themes"),
project_init: z
.string()
.optional()
.describe("Initialize project configuration"),
input_clear: z.string().optional().describe("Clear input field"),
input_paste: z.string().optional().describe("Paste from clipboard"),
input_submit: z.string().optional().describe("Submit input"),
input_newline: z.string().optional().describe("Insert newline in input"),
history_previous: z
.string()
.optional()
.describe("Navigate to previous history item"),
history_next: z
.string()
.optional()
.describe("Navigate to next history item"),
messages_page_up: z
.string()
.optional()
.describe("Scroll messages up by one page"),
messages_page_down: z
.string()
.optional()
.describe("Scroll messages down by one page"),
messages_half_page_up: z
.string()
.optional()
.describe("Scroll messages up by half page"),
messages_half_page_down: z
.string()
.optional()
.default("ctrl+alt+d")
.describe("Scroll messages down by half page"),
messages_previous: z.string().optional().default("ctrl+up").describe("Navigate to previous message"),
messages_next: z.string().optional().default("ctrl+down").describe("Navigate to next message"),
messages_first: z.string().optional().default("ctrl+g").describe("Navigate to first message"),
messages_last: z.string().optional().default("ctrl+alt+g").describe("Navigate to last message"),
messages_layout_toggle: z.string().optional().default("<leader>p").describe("Toggle layout"),
messages_copy: z.string().optional().default("<leader>y").describe("Copy message"),
messages_revert: z.string().optional().default("<leader>r").describe("Revert message"),
app_exit: z.string().optional().default("ctrl+c,<leader>q").describe("Exit the application"),
messages_previous: z
.string()
.optional()
.describe("Navigate to previous message"),
messages_next: z.string().optional().describe("Navigate to next message"),
messages_first: z
.string()
.optional()
.describe("Navigate to first message"),
messages_last: z.string().optional().describe("Navigate to last message"),
app_exit: z.string().optional().describe("Exit the application"),
})
.strict()
.openapi({
ref: "KeybindsConfig",
})
export const Info = z
.object({
$schema: z.string().optional().describe("JSON schema reference for configuration validation"),
theme: z.string().optional().describe("Theme name to use for the interface"),
$schema: z
.string()
.optional()
.describe("JSON schema reference for configuration validation"),
theme: z
.string()
.optional()
.describe("Theme name to use for the interface"),
keybinds: Keybinds.optional().describe("Custom keybind configurations"),
autoshare: z.boolean().optional().describe("Share newly created sessions automatically"),
autoupdate: z.boolean().optional().describe("Automatically update to the latest version"),
disabled_providers: z.array(z.string()).optional().describe("Disable providers that are loaded automatically"),
model: z.string().describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(),
mode: z
.object({
build: Mode.optional(),
plan: Mode.optional(),
})
.catchall(Mode)
autoshare: z
.boolean()
.optional()
.describe("Share newly created sessions automatically"),
autoupdate: z
.boolean()
.optional()
.describe("Automatically update to the latest version"),
disabled_providers: z
.array(z.string())
.optional()
.describe("Disable providers that are loaded automatically"),
model: z
.string()
.describe(
"Model to use in the format of provider/model, eg anthropic/claude-2",
)
.optional(),
log_level: Log.Level.optional().describe("Minimum log level to write to log files"),
provider: z
.record(
ModelsDev.Provider.partial().extend({
@ -138,8 +172,14 @@ export namespace Config {
)
.optional()
.describe("Custom provider configurations and model overrides"),
mcp: z.record(z.string(), Mcp).optional().describe("MCP (Model Context Protocol) server configurations"),
instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"),
mcp: z
.record(z.string(), Mcp)
.optional()
.describe("MCP (Model Context Protocol) server configurations"),
instructions: z
.array(z.string())
.optional()
.describe("Additional instruction files or patterns to include"),
experimental: z
.object({
hook: z
@ -175,11 +215,7 @@ export namespace Config {
export type Info = z.output<typeof Info>
export const global = lazy(async () => {
let result = pipe(
{},
mergeDeep(await load(path.join(Global.Path.config, "config.json"))),
mergeDeep(await load(path.join(Global.Path.config, "opencode.json"))),
)
let result = await load(path.join(Global.Path.config, "config.json"))
await import(path.join(Global.Path.config, "config"), {
with: {
@ -191,7 +227,10 @@ export namespace Config {
if (provider && model) result.model = `${provider}/${model}`
result["$schema"] = "https://opencode.ai/config.json"
result = mergeDeep(result, rest)
await Bun.write(path.join(Global.Path.config, "config.json"), JSON.stringify(result, null, 2))
await Bun.write(
path.join(Global.Path.config, "config.json"),
JSON.stringify(result, null, 2),
)
await fs.unlink(path.join(Global.Path.config, "config"))
})
.catch(() => {})
@ -199,47 +238,19 @@ export namespace Config {
return result
})
async function load(configPath: string) {
let text = await Bun.file(configPath)
.text()
async function load(path: string) {
const data = await Bun.file(path)
.json()
.catch((err) => {
if (err.code === "ENOENT") return
throw new JsonError({ path: configPath }, { cause: err })
if (err.code === "ENOENT") return {}
throw new JsonError({ path }, { cause: err })
})
if (!text) return {}
text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
return process.env[varName] || ""
})
const fileMatches = text.match(/"?\{file:([^}]+)\}"?/g)
if (fileMatches) {
const configDir = path.dirname(configPath)
for (const match of fileMatches) {
const filePath = match.replace(/^"?\{file:/, "").replace(/\}"?$/, "")
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
const fileContent = await Bun.file(resolvedPath).text()
text = text.replace(match, JSON.stringify(fileContent))
}
}
let data: any
try {
data = JSON.parse(text)
} catch (err) {
throw new JsonError({ path: configPath }, { cause: err as Error })
}
const parsed = Info.safeParse(data)
if (parsed.success) {
if (!parsed.data.$schema) {
parsed.data.$schema = "https://opencode.ai/config.json"
await Bun.write(configPath, JSON.stringify(parsed.data, null, 2))
}
return parsed.data
}
throw new InvalidError({ path: configPath, issues: parsed.error.issues })
if (parsed.success) return parsed.data
throw new InvalidError({ path, issues: parsed.error.issues })
}
export const JsonError = NamedError.create(
"ConfigJsonError",
z.object({

View file

@ -22,7 +22,9 @@ export namespace ConfigHooks {
command: item.command,
})
Bun.spawn({
cmd: item.command.map((x) => x.replace("$FILE", payload.properties.file)),
cmd: item.command.map((x) =>
x.replace("$FILE", payload.properties.file),
),
env: item.environment,
cwd: app.path.cwd,
stdout: "ignore",

View file

@ -45,7 +45,10 @@ export namespace Fzf {
log.info("found", { filepath })
return { filepath }
}
filepath = path.join(Global.Path.bin, "fzf" + (process.platform === "win32" ? ".exe" : ""))
filepath = path.join(
Global.Path.bin,
"fzf" + (process.platform === "win32" ? ".exe" : ""),
)
const file = Bun.file(filepath)
if (!(await file.exists())) {
@ -53,15 +56,18 @@ export namespace Fzf {
const arch = archMap[process.arch as keyof typeof archMap] ?? "amd64"
const config = PLATFORM[process.platform as keyof typeof PLATFORM]
if (!config) throw new UnsupportedPlatformError({ platform: process.platform })
if (!config)
throw new UnsupportedPlatformError({ platform: process.platform })
const version = VERSION
const platformName = process.platform === "win32" ? "windows" : process.platform
const platformName =
process.platform === "win32" ? "windows" : process.platform
const filename = `fzf-${version}-${platformName}_${arch}.${config.extension}`
const url = `https://github.com/junegunn/fzf/releases/download/v${version}/${filename}`
const response = await fetch(url)
if (!response.ok) throw new DownloadFailedError({ url, status: response.status })
if (!response.ok)
throw new DownloadFailedError({ url, status: response.status })
const buffer = await response.arrayBuffer()
const archivePath = path.join(Global.Path.bin, filename)
@ -80,11 +86,14 @@ export namespace Fzf {
})
}
if (config.extension === "zip") {
const proc = Bun.spawn(["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin], {
const proc = Bun.spawn(
["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin],
{
cwd: Global.Path.bin,
stderr: "pipe",
stdout: "ignore",
})
},
)
await proc.exited
if (proc.exitCode !== 0)
throw new ExtractionFailedError({

View file

@ -11,19 +11,6 @@ import { Log } from "../util/log"
export namespace File {
const log = Log.create({ service: "file" })
export const Info = z
.object({
path: z.string(),
added: z.number().int(),
removed: z.number().int(),
status: z.enum(["added", "deleted", "modified"]),
})
.openapi({
ref: "File",
})
export type Info = z.infer<typeof Info>
export const Event = {
Edited: Bus.event(
"file.edited",
@ -37,16 +24,20 @@ export namespace File {
const app = App.info()
if (!app.git) return []
const diffOutput = await $`git diff --numstat HEAD`.cwd(app.path.cwd).quiet().nothrow().text()
const diffOutput = await $`git diff --numstat HEAD`
.cwd(app.path.cwd)
.quiet()
.nothrow()
.text()
const changedFiles: Info[] = []
const changedFiles = []
if (diffOutput.trim()) {
const lines = diffOutput.trim().split("\n")
for (const line of lines) {
const [added, removed, filepath] = line.split("\t")
changedFiles.push({
path: filepath,
file: filepath,
added: added === "-" ? 0 : parseInt(added, 10),
removed: removed === "-" ? 0 : parseInt(removed, 10),
status: "modified",
@ -54,16 +45,22 @@ export namespace File {
}
}
const untrackedOutput = await $`git ls-files --others --exclude-standard`.cwd(app.path.cwd).quiet().nothrow().text()
const untrackedOutput = await $`git ls-files --others --exclude-standard`
.cwd(app.path.cwd)
.quiet()
.nothrow()
.text()
if (untrackedOutput.trim()) {
const untrackedFiles = untrackedOutput.trim().split("\n")
for (const filepath of untrackedFiles) {
try {
const content = await Bun.file(path.join(app.path.root, filepath)).text()
const content = await Bun.file(
path.join(app.path.root, filepath),
).text()
const lines = content.split("\n").length
changedFiles.push({
path: filepath,
file: filepath,
added: lines,
removed: 0,
status: "added",
@ -75,13 +72,17 @@ export namespace File {
}
// Get deleted files
const deletedOutput = await $`git diff --name-only --diff-filter=D HEAD`.cwd(app.path.cwd).quiet().nothrow().text()
const deletedOutput = await $`git diff --name-only --diff-filter=D HEAD`
.cwd(app.path.cwd)
.quiet()
.nothrow()
.text()
if (deletedOutput.trim()) {
const deletedFiles = deletedOutput.trim().split("\n")
for (const filepath of deletedFiles) {
changedFiles.push({
path: filepath,
file: filepath,
added: 0,
removed: 0, // Could get original line count but would require another git command
status: "deleted",
@ -91,7 +92,7 @@ export namespace File {
return changedFiles.map((x) => ({
...x,
path: path.relative(app.path.cwd, path.join(app.path.root, x.path)),
file: path.relative(app.path.cwd, path.join(app.path.root, x.file)),
}))
}
@ -111,7 +112,11 @@ export namespace File {
filepath: rel,
})
if (diff !== "unmodified") {
const original = await $`git show HEAD:${rel}`.cwd(app.path.root).quiet().nothrow().text()
const original = await $`git show HEAD:${rel}`
.cwd(app.path.root)
.quiet()
.nothrow()
.text()
const patch = createPatch(file, original, content, "old", "new", {
context: Infinity,
})

View file

@ -34,8 +34,7 @@ export namespace Ripgrep {
export const Match = z.object({
type: z.literal("match"),
data: z
.object({
data: z.object({
path: z.object({
text: z.string(),
}),
@ -53,8 +52,7 @@ export namespace Ripgrep {
end: z.number(),
}),
),
})
.openapi({ ref: "Match" }),
}),
})
const End = z.object({
@ -124,11 +122,15 @@ export namespace Ripgrep {
const state = lazy(async () => {
let filepath = Bun.which("rg")
if (filepath) return { filepath }
filepath = path.join(Global.Path.bin, "rg" + (process.platform === "win32" ? ".exe" : ""))
filepath = path.join(
Global.Path.bin,
"rg" + (process.platform === "win32" ? ".exe" : ""),
)
const file = Bun.file(filepath)
if (!(await file.exists())) {
const platformKey = `${process.arch}-${process.platform}` as keyof typeof PLATFORM
const platformKey =
`${process.arch}-${process.platform}` as keyof typeof PLATFORM
const config = PLATFORM[platformKey]
if (!config) throw new UnsupportedPlatformError({ platform: platformKey })
@ -137,7 +139,8 @@ export namespace Ripgrep {
const url = `https://github.com/BurntSushi/ripgrep/releases/download/${version}/${filename}`
const response = await fetch(url)
if (!response.ok) throw new DownloadFailedError({ url, status: response.status })
if (!response.ok)
throw new DownloadFailedError({ url, status: response.status })
const buffer = await response.arrayBuffer()
const archivePath = path.join(Global.Path.bin, filename)
@ -161,11 +164,14 @@ export namespace Ripgrep {
})
}
if (config.extension === "zip") {
const proc = Bun.spawn(["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin], {
const proc = Bun.spawn(
["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin],
{
cwd: Global.Path.bin,
stderr: "pipe",
stdout: "ignore",
})
},
)
await proc.exited
if (proc.exitCode !== 0)
throw new ExtractionFailedError({
@ -187,16 +193,17 @@ export namespace Ripgrep {
return filepath
}
export async function files(input: { cwd: string; query?: string; glob?: string[]; limit?: number }) {
const commands = [`${$.escape(await filepath())} --files --follow --hidden --glob='!.git/*'`]
if (input.glob) {
for (const g of input.glob) {
commands[0] += ` --glob='${g}'`
}
}
if (input.query) commands.push(`${await Fzf.filepath()} --filter=${input.query}`)
export async function files(input: {
cwd: string
query?: string
glob?: string
limit?: number
}) {
const commands = [
`${await filepath()} --files --hidden --glob='!.git/*' ${input.glob ? `--glob='${input.glob}'` : ``}`,
]
if (input.query)
commands.push(`${await Fzf.filepath()} --filter=${input.query}`)
if (input.limit) commands.push(`head -n ${input.limit}`)
const joined = commands.join(" | ")
const result = await $`${{ raw: joined }}`.cwd(input.cwd).nothrow().text()
@ -303,8 +310,18 @@ export namespace Ripgrep {
return lines.join("\n")
}
export async function search(input: { cwd: string; pattern: string; glob?: string[]; limit?: number }) {
const args = [`${await filepath()}`, "--json", "--hidden", "--glob='!.git/*'"]
export async function search(input: {
cwd: string
pattern: string
glob?: string[]
limit?: number
}) {
const args = [
`${await filepath()}`,
"--json",
"--hidden",
"--glob='!.git/*'",
]
if (input.glob) {
for (const g of input.glob) {

View file

@ -1,8 +1,6 @@
import { App } from "../app/app"
import { Log } from "../util/log"
export namespace FileTime {
const log = Log.create({ service: "file.time" })
export const state = App.state("tool.filetimes", () => {
const read: {
[sessionID: string]: {
@ -15,7 +13,6 @@ export namespace FileTime {
})
export function read(sessionID: string, file: string) {
log.info("read", { sessionID, file })
const { read } = state()
read[sessionID] = read[sessionID] || {}
read[sessionID][file] = new Date()
@ -27,7 +24,10 @@ export namespace FileTime {
export async function assert(sessionID: string, filepath: string) {
const time = get(sessionID, filepath)
if (!time) throw new Error(`You must read the file ${filepath} before overwriting it. Use the Read tool first`)
if (!time)
throw new Error(
`You must read the file ${filepath} before overwriting it. Use the Read tool first`,
)
const stats = await Bun.file(filepath).stat()
if (stats.mtime.getTime() > time.getTime()) {
throw new Error(

View file

@ -21,9 +21,11 @@ export namespace FileWatcher {
"file.watcher",
() => {
const app = App.use()
if (!app.info.git) return {}
try {
const watcher = fs.watch(app.info.path.cwd, { recursive: true }, (event, file) => {
const watcher = fs.watch(
app.info.path.cwd,
{ recursive: true },
(event, file) => {
log.info("change", { file, event })
if (!file) return
// for some reason async local storage is lost here
@ -34,7 +36,8 @@ export namespace FileWatcher {
event,
})
})
})
},
)
return { watcher }
} catch {
return {}
@ -46,7 +49,7 @@ export namespace FileWatcher {
)
export function init() {
if (Flag.OPENCODE_DISABLE_WATCHER || true) return
if (Flag.OPENCODE_DISABLE_WATCHER) return
state()
}
}

View file

@ -1,7 +1,5 @@
import { App } from "../app/app"
import { BunProc } from "../bun"
import { Filesystem } from "../util/filesystem"
import path from "path"
export interface Info {
name: string
@ -31,7 +29,7 @@ export const mix: Info = {
export const prettier: Info = {
name: "prettier",
command: [BunProc.which(), "x", "prettier", "--write", "$FILE"],
command: [BunProc.which(), "run", "prettier", "--write", "$FILE"],
environment: {
BUN_BE_BUN: "1",
},
@ -64,12 +62,23 @@ export const prettier: Info = {
".gql",
],
async enabled() {
const app = App.info()
const nms = await Filesystem.findUp("node_modules", app.path.cwd, app.path.root)
for (const item of nms) {
if (await Bun.file(path.join(item, ".bin", "prettier")).exists()) return true
}
// this is more complicated because we only want to use prettier if it's
// being used with the current project
try {
const proc = Bun.spawn({
cmd: [BunProc.which(), "run", "prettier", "--version"],
cwd: App.info().path.cwd,
env: {
BUN_BE_BUN: "1",
},
stdout: "ignore",
stderr: "ignore",
})
const exit = await proc.exited
return exit === 0
} catch {
return false
}
},
}
@ -85,7 +94,21 @@ export const zig: Info = {
export const clang: Info = {
name: "clang-format",
command: ["clang-format", "-i", "$FILE"],
extensions: [".c", ".cc", ".cpp", ".cxx", ".c++", ".h", ".hh", ".hpp", ".hxx", ".h++", ".ino", ".C", ".H"],
extensions: [
".c",
".cc",
".cpp",
".cxx",
".c++",
".h",
".hh",
".hpp",
".hxx",
".h++",
".ino",
".C",
".H",
],
async enabled() {
return Bun.which("clang-format") !== null
},

View file

@ -23,17 +23,7 @@ export namespace Global {
await Promise.all([
fs.mkdir(Global.Path.data, { recursive: true }),
fs.mkdir(Global.Path.config, { recursive: true }),
fs.mkdir(Global.Path.cache, { recursive: true }),
fs.mkdir(Global.Path.providers, { recursive: true }),
fs.mkdir(Global.Path.state, { recursive: true }),
])
const CACHE_VERSION = "1"
const version = await Bun.file(path.join(Global.Path.cache, "version"))
.text()
.catch(() => "0")
if (version !== CACHE_VERSION) {
await fs.rm(Global.Path.cache, { recursive: true, force: true })
await Bun.file(path.join(Global.Path.cache, "version")).write(CACHE_VERSION)
}

View file

@ -26,7 +26,11 @@ export namespace Identifier {
return generateID(prefix, true, given)
}
function generateID(prefix: keyof typeof prefixes, descending: boolean, given?: string): string {
function generateID(
prefix: keyof typeof prefixes,
descending: boolean,
given?: string,
): string {
if (!given) {
return generateNewID(prefix, descending)
}
@ -38,7 +42,8 @@ export namespace Identifier {
}
function randomBase62(length: number): string {
const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
const chars =
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
let result = ""
const bytes = randomBytes(length)
for (let i = 0; i < length; i++) {
@ -47,7 +52,10 @@ export namespace Identifier {
return result
}
function generateNewID(prefix: keyof typeof prefixes, descending: boolean): string {
function generateNewID(
prefix: keyof typeof prefixes,
descending: boolean,
): string {
const currentTimestamp = Date.now()
if (currentTimestamp !== lastTimestamp) {
@ -65,6 +73,11 @@ export namespace Identifier {
timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff))
}
return prefixes[prefix] + "_" + timeBytes.toString("hex") + randomBase62(LENGTH - 12)
return (
prefixes[prefix] +
"_" +
timeBytes.toString("hex") +
randomBase62(LENGTH - 12)
)
}
}

View file

@ -14,7 +14,6 @@ import { FormatError } from "./cli/error"
import { ServeCommand } from "./cli/cmd/serve"
import { TuiCommand } from "./cli/cmd/tui"
import { DebugCommand } from "./cli/cmd/debug"
import { StatsCommand } from "./cli/cmd/stats"
const cancel = new AbortController()
@ -41,24 +40,6 @@ const cli = yargs(hideBin(process.argv))
})
.middleware(async () => {
await Log.init({ print: process.argv.includes("--print-logs") })
try {
const { Config } = await import("./config/config")
const { App } = await import("./app/app")
App.provide({ cwd: process.cwd() }, async () => {
const cfg = await Config.get()
if (cfg.log_level) {
Log.setLevel(cfg.log_level as Log.Level)
} else {
const defaultLevel = Installation.isDev() ? "DEBUG" : "INFO"
Log.setLevel(defaultLevel)
}
})
} catch (e) {
Log.Default.error("failed to load config", { error: e })
}
Log.Default.info("opencode", {
version: Installation.VERSION,
args: process.argv.slice(2),
@ -73,9 +54,11 @@ const cli = yargs(hideBin(process.argv))
.command(UpgradeCommand)
.command(ServeCommand)
.command(ModelsCommand)
.command(StatsCommand)
.fail((msg) => {
if (msg.startsWith("Unknown argument") || msg.startsWith("Not enough non-option arguments")) {
if (
msg.startsWith("Unknown argument") ||
msg.startsWith("Not enough non-option arguments")
) {
cli.showHelp("log")
}
})
@ -114,7 +97,10 @@ try {
Log.Default.error("fatal", data)
const formatted = FormatError(e)
if (formatted) UI.error(formatted)
if (formatted === undefined) UI.error("Unexpected error, check log file at " + Log.file() + " for more details")
if (formatted === undefined)
UI.error(
"Unexpected error, check log file at " + Log.file() + " for more details",
)
process.exitCode = 1
}

View file

@ -135,7 +135,8 @@ export namespace Installation {
})
}
export const VERSION = typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev"
export const VERSION =
typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev"
export async function latest() {
return fetch("https://api.github.com/repos/sst/opencode/releases/latest")

View file

@ -1,5 +1,9 @@
import path from "path"
import { createMessageConnection, StreamMessageReader, StreamMessageWriter } from "vscode-jsonrpc/node"
import {
createMessageConnection,
StreamMessageReader,
StreamMessageWriter,
} from "vscode-jsonrpc/node"
import type { Diagnostic as VSCodeDiagnostic } from "vscode-languageserver-types"
import { App } from "../app/app"
import { Log } from "../util/log"
@ -34,54 +38,45 @@ export namespace LSPClient {
),
}
export async function create(input: { serverID: string; server: LSPServer.Handle; root: string }) {
export async function create(serverID: string, server: LSPServer.Handle) {
const app = App.info()
const l = log.clone().tag("serverID", input.serverID)
l.info("starting client")
log.info("starting client", { id: serverID })
const connection = createMessageConnection(
new StreamMessageReader(input.server.process.stdout),
new StreamMessageWriter(input.server.process.stdin),
new StreamMessageReader(server.process.stdout),
new StreamMessageWriter(server.process.stdin),
)
const diagnostics = new Map<string, Diagnostic[]>()
connection.onNotification("textDocument/publishDiagnostics", (params) => {
const path = new URL(params.uri).pathname
l.info("textDocument/publishDiagnostics", {
log.info("textDocument/publishDiagnostics", {
path,
})
const exists = diagnostics.has(path)
diagnostics.set(path, params.diagnostics)
if (!exists && input.serverID === "typescript") return
Bus.publish(Event.Diagnostics, { path, serverID: input.serverID })
})
connection.onRequest("window/workDoneProgress/create", (params) => {
l.info("window/workDoneProgress/create", params)
return null
if (!exists && serverID === "typescript") return
Bus.publish(Event.Diagnostics, { path, serverID })
})
connection.onRequest("workspace/configuration", async () => {
return [{}]
})
connection.listen()
l.info("sending initialize")
log.info("sending initialize", { id: serverID })
await withTimeout(
connection.sendRequest("initialize", {
rootUri: "file://" + input.root,
processId: input.server.process.pid,
processId: server.process.pid,
workspaceFolders: [
{
name: "workspace",
uri: "file://" + input.root,
uri: "file://" + app.path.cwd,
},
],
initializationOptions: {
...input.server.initialization,
...server.initialization,
},
capabilities: {
window: {
workDoneProgress: true,
},
workspace: {
configuration: true,
},
@ -98,9 +93,9 @@ export namespace LSPClient {
}),
5_000,
).catch((err) => {
l.error("initialize error", { error: err })
log.error("initialize error", { error: err })
throw new InitializeError(
{ serverID: input.serverID },
{ serverID },
{
cause: err,
},
@ -108,22 +103,26 @@ export namespace LSPClient {
})
await connection.sendNotification("initialized", {})
log.info("initialized", {
serverID,
})
const files: {
[path: string]: number
} = {}
const result = {
root: input.root,
get serverID() {
return input.serverID
return serverID
},
get connection() {
return connection
},
notify: {
async open(input: { path: string }) {
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path)
input.path = path.isAbsolute(input.path)
? input.path
: path.resolve(app.path.cwd, input.path)
const file = Bun.file(input.path)
const text = await file.text()
const version = files[input.path]
@ -155,13 +154,18 @@ export namespace LSPClient {
return diagnostics
},
async waitForDiagnostics(input: { path: string }) {
input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path)
input.path = path.isAbsolute(input.path)
? input.path
: path.resolve(app.path.cwd, input.path)
log.info("waiting for diagnostics", input)
let unsub: () => void
return await withTimeout(
new Promise<void>((resolve) => {
unsub = Bus.subscribe(Event.Diagnostics, (event) => {
if (event.properties.path === input.path && event.properties.serverID === result.serverID) {
if (
event.properties.path === input.path &&
event.properties.serverID === result.serverID
) {
log.info("got diagnostics", input)
unsub?.()
resolve()
@ -176,16 +180,13 @@ export namespace LSPClient {
})
},
async shutdown() {
l.info("shutting down")
log.info("shutting down", { serverID })
connection.end()
connection.dispose()
input.server.process.kill()
l.info("shutdown")
log.info("shutdown", { serverID })
},
}
l.info("initialized")
return result
}
}

View file

@ -3,13 +3,19 @@ import { Log } from "../util/log"
import { LSPClient } from "./client"
import path from "path"
import { LSPServer } from "./server"
import { Ripgrep } from "../file/ripgrep"
import { z } from "zod"
export namespace LSP {
const log = Log.create({ service: "lsp" })
export const Range = z
export const Symbol = z
.object({
name: z.string(),
kind: z.number(),
location: z.object({
uri: z.string(),
range: z.object({
start: z.object({
line: z.number(),
character: z.number(),
@ -18,50 +24,43 @@ export namespace LSP {
line: z.number(),
character: z.number(),
}),
})
.openapi({
ref: "Range",
})
export type Range = z.infer<typeof Range>
export const Symbol = z
.object({
name: z.string(),
kind: z.number(),
location: z.object({
uri: z.string(),
range: Range,
}),
}),
})
.openapi({
ref: "Symbol",
ref: "LSP.Symbol",
})
export type Symbol = z.infer<typeof Symbol>
export const DocumentSymbol = z
.object({
name: z.string(),
detail: z.string().optional(),
kind: z.number(),
range: Range,
selectionRange: Range,
})
.openapi({
ref: "DocumentSymbol",
})
export type DocumentSymbol = z.infer<typeof DocumentSymbol>
const state = App.state(
"lsp",
async () => {
const clients: LSPClient.Info[] = []
async (app) => {
log.info("initializing")
const clients = new Map<string, LSPClient.Info>()
for (const server of Object.values(LSPServer)) {
for (const extension of server.extensions) {
const [file] = await Ripgrep.files({
cwd: app.path.cwd,
glob: "*" + extension,
})
if (!file) continue
const handle = await server.spawn(App.info())
if (!handle) break
const client = await LSPClient.create(server.id, handle).catch(
(err) => log.error("", { error: err }),
)
if (!client) break
clients.set(server.id, client)
break
}
}
log.info("initialized")
return {
broken: new Set<string>(),
clients,
}
},
async (state) => {
for (const client of state.clients) {
for (const client of state.clients.values()) {
await client.shutdown()
}
},
@ -71,44 +70,16 @@ export namespace LSP {
return state()
}
async function getClients(file: string) {
const s = await state()
const extension = path.parse(file).ext
const result: LSPClient.Info[] = []
for (const server of Object.values(LSPServer)) {
if (!server.extensions.includes(extension)) continue
const root = await server.root(file, App.info())
if (!root) continue
if (s.broken.has(root + server.id)) continue
const match = s.clients.find((x) => x.root === root && x.serverID === server.id)
if (match) {
result.push(match)
continue
}
const handle = await server.spawn(App.info(), root)
if (!handle) continue
const client = await LSPClient.create({
serverID: server.id,
server: handle,
root,
}).catch((err) => {
s.broken.add(root + server.id)
handle.process.kill()
log.error("", { error: err })
})
if (!client) continue
s.clients.push(client)
result.push(client)
}
return result
}
export async function touchFile(input: string, waitForDiagnostics?: boolean) {
const clients = await getClients(input)
const extension = path.parse(input).ext
const matches = Object.values(LSPServer)
.filter((x) => x.extensions.includes(extension))
.map((x) => x.id)
await run(async (client) => {
if (!clients.includes(client)) return
const wait = waitForDiagnostics ? client.waitForDiagnostics({ path: input }) : Promise.resolve()
if (!matches.includes(client.serverID)) return
const wait = waitForDiagnostics
? client.waitForDiagnostics({ path: input })
: Promise.resolve()
await client.notify.open({ path: input })
return wait
})
@ -126,7 +97,11 @@ export namespace LSP {
return results
}
export async function hover(input: { file: string; line: number; character: number }) {
export async function hover(input: {
file: string
line: number
character: number
}) {
return run((client) => {
return client.connection.sendRequest("textDocument/hover", {
textDocument: {
@ -140,74 +115,18 @@ export namespace LSP {
})
}
enum SymbolKind {
File = 1,
Module = 2,
Namespace = 3,
Package = 4,
Class = 5,
Method = 6,
Property = 7,
Field = 8,
Constructor = 9,
Enum = 10,
Interface = 11,
Function = 12,
Variable = 13,
Constant = 14,
String = 15,
Number = 16,
Boolean = 17,
Array = 18,
Object = 19,
Key = 20,
Null = 21,
EnumMember = 22,
Struct = 23,
Event = 24,
Operator = 25,
TypeParameter = 26,
}
const kinds = [
SymbolKind.Class,
SymbolKind.Function,
SymbolKind.Method,
SymbolKind.Interface,
SymbolKind.Variable,
SymbolKind.Constant,
SymbolKind.Struct,
SymbolKind.Enum,
]
export async function workspaceSymbol(query: string) {
return run((client) =>
client.connection
.sendRequest("workspace/symbol", {
client.connection.sendRequest("workspace/symbol", {
query,
})
.then((result: any) => result.filter((x: LSP.Symbol) => kinds.includes(x.kind)))
.then((result: any) => result.slice(0, 10))
.catch(() => []),
}),
).then((result) => result.flat() as LSP.Symbol[])
}
export async function documentSymbol(uri: string) {
return run((client) =>
client.connection
.sendRequest("textDocument/documentSymbol", {
textDocument: {
uri,
},
})
.catch(() => []),
)
.then((result) => result.flat() as (LSP.DocumentSymbol | LSP.Symbol)[])
.then((result) => result.filter(Boolean))
}
async function run<T>(input: (client: LSPClient.Info) => Promise<T>): Promise<T[]> {
const clients = await state().then((x) => x.clients)
async function run<T>(
input: (client: LSPClient.Info) => Promise<T>,
): Promise<T[]> {
const clients = await state().then((x) => [...x.clients.values()])
const tasks = clients.map((x) => input(x))
return Promise.all(tasks)
}

View file

@ -94,6 +94,4 @@ export const LANGUAGE_EXTENSIONS: Record<string, string> = {
".yml": "yaml",
".mjs": "javascript",
".cjs": "javascript",
".zig": "zig",
".zon": "zig",
} as const

View file

@ -6,7 +6,6 @@ import { Log } from "../util/log"
import { BunProc } from "../bun"
import { $ } from "bun"
import fs from "fs/promises"
import { Filesystem } from "../util/filesystem"
export namespace LSPServer {
const log = Log.create({ service: "lsp.server" })
@ -16,44 +15,31 @@ export namespace LSPServer {
initialization?: Record<string, any>
}
type RootFunction = (file: string, app: App.Info) => Promise<string | undefined>
const NearestRoot = (patterns: string[]): RootFunction => {
return async (file, app) => {
const files = Filesystem.up({
targets: patterns,
start: path.dirname(file),
stop: app.path.root,
})
const first = await files.next()
await files.return()
if (!first.value) return app.path.root
return path.dirname(first.value)
}
}
export interface Info {
id: string
extensions: string[]
global?: boolean
root: RootFunction
spawn(app: App.Info, root: string): Promise<Handle | undefined>
spawn(app: App.Info): Promise<Handle | undefined>
}
export const Typescript: Info = {
id: "typescript",
root: NearestRoot(["tsconfig.json", "package.json", "jsconfig.json"]),
extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"],
async spawn(app, root) {
const tsserver = await Bun.resolve("typescript/lib/tsserver.js", app.path.cwd).catch(() => {})
async spawn(app) {
const tsserver = await Bun.resolve(
"typescript/lib/tsserver.js",
app.path.cwd,
).catch(() => {})
if (!tsserver) return
const proc = spawn(BunProc.which(), ["x", "typescript-language-server", "--stdio"], {
cwd: root,
const proc = spawn(
BunProc.which(),
["x", "typescript-language-server", "--stdio"],
{
env: {
...process.env,
BUN_BE_BUN: "1",
},
})
},
)
return {
process: proc,
initialization: {
@ -67,13 +53,8 @@ export namespace LSPServer {
export const Gopls: Info = {
id: "golang",
root: async (file, app) => {
const work = await NearestRoot(["go.work"])(file, app)
if (work) return work
return NearestRoot(["go.mod", "go.sum"])(file, app)
},
extensions: [".go"],
async spawn(_, root) {
async spawn() {
let bin = Bun.which("gopls", {
PATH: process.env["PATH"] + ":" + Global.Path.bin,
})
@ -92,24 +73,24 @@ export namespace LSPServer {
log.error("Failed to install gopls")
return
}
bin = path.join(Global.Path.bin, "gopls" + (process.platform === "win32" ? ".exe" : ""))
bin = path.join(
Global.Path.bin,
"gopls" + (process.platform === "win32" ? ".exe" : ""),
)
log.info(`installed gopls`, {
bin,
})
}
return {
process: spawn(bin!, {
cwd: root,
}),
process: spawn(bin!),
}
},
}
export const RubyLsp: Info = {
id: "ruby-lsp",
root: NearestRoot(["Gemfile"]),
extensions: [".rb", ".rake", ".gemspec", ".ru"],
async spawn(_, root) {
async spawn() {
let bin = Bun.which("ruby-lsp", {
PATH: process.env["PATH"] + ":" + Global.Path.bin,
})
@ -132,15 +113,16 @@ export namespace LSPServer {
log.error("Failed to install ruby-lsp")
return
}
bin = path.join(Global.Path.bin, "ruby-lsp" + (process.platform === "win32" ? ".exe" : ""))
bin = path.join(
Global.Path.bin,
"ruby-lsp" + (process.platform === "win32" ? ".exe" : ""),
)
log.info(`installed ruby-lsp`, {
bin,
})
}
return {
process: spawn(bin!, ["--stdio"], {
cwd: root,
}),
process: spawn(bin!, ["--stdio"]),
}
},
}
@ -148,15 +130,17 @@ export namespace LSPServer {
export const Pyright: Info = {
id: "pyright",
extensions: [".py", ".pyi"],
root: NearestRoot(["pyproject.toml", "setup.py", "setup.cfg", "requirements.txt", "Pipfile", "pyrightconfig.json"]),
async spawn(_, root) {
const proc = spawn(BunProc.which(), ["x", "pyright-langserver", "--stdio"], {
cwd: root,
async spawn() {
const proc = spawn(
BunProc.which(),
["x", "pyright-langserver", "--stdio"],
{
env: {
...process.env,
BUN_BE_BUN: "1",
},
})
},
)
return {
process: proc,
}
@ -166,8 +150,7 @@ export namespace LSPServer {
export const ElixirLS: Info = {
id: "elixir-ls",
extensions: [".ex", ".exs"],
root: NearestRoot(["mix.exs", "mix.lock"]),
async spawn(_, root) {
async spawn() {
let binary = Bun.which("elixir-ls")
if (!binary) {
const elixirLsPath = path.join(Global.Path.bin, "elixir-ls")
@ -175,7 +158,9 @@ export namespace LSPServer {
Global.Path.bin,
"elixir-ls-master",
"release",
process.platform === "win32" ? "language_server.bar" : "language_server.sh",
process.platform === "win32"
? "language_server.bar"
: "language_server.sh",
)
if (!(await Bun.file(binary).exists())) {
@ -187,7 +172,9 @@ export namespace LSPServer {
log.info("downloading elixir-ls from GitHub releases")
const response = await fetch("https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip")
const response = await fetch(
"https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip",
)
if (!response.ok) return
const zipPath = path.join(Global.Path.bin, "elixir-ls.zip")
await Bun.file(zipPath).write(response)
@ -211,114 +198,7 @@ export namespace LSPServer {
}
return {
process: spawn(binary, {
cwd: root,
}),
}
},
}
export const Zls: Info = {
id: "zls",
extensions: [".zig", ".zon"],
root: NearestRoot(["build.zig"]),
async spawn(_, root) {
let bin = Bun.which("zls", {
PATH: process.env["PATH"] + ":" + Global.Path.bin,
})
if (!bin) {
const zig = Bun.which("zig")
if (!zig) {
log.error("Zig is required to use zls. Please install Zig first.")
return
}
log.info("downloading zls from GitHub releases")
const releaseResponse = await fetch("https://api.github.com/repos/zigtools/zls/releases/latest")
if (!releaseResponse.ok) {
log.error("Failed to fetch zls release info")
return
}
const release = await releaseResponse.json()
const platform = process.platform
const arch = process.arch
let assetName = ""
let zlsArch: string = arch
if (arch === "arm64") zlsArch = "aarch64"
else if (arch === "x64") zlsArch = "x86_64"
else if (arch === "ia32") zlsArch = "x86"
let zlsPlatform: string = platform
if (platform === "darwin") zlsPlatform = "macos"
else if (platform === "win32") zlsPlatform = "windows"
const ext = platform === "win32" ? "zip" : "tar.xz"
assetName = `zls-${zlsArch}-${zlsPlatform}.${ext}`
const supportedCombos = [
"zls-x86_64-linux.tar.xz",
"zls-x86_64-macos.tar.xz",
"zls-x86_64-windows.zip",
"zls-aarch64-linux.tar.xz",
"zls-aarch64-macos.tar.xz",
"zls-aarch64-windows.zip",
"zls-x86-linux.tar.xz",
"zls-x86-windows.zip",
]
if (!supportedCombos.includes(assetName)) {
log.error(`Platform ${platform} and architecture ${arch} is not supported by zls`)
return
}
const asset = release.assets.find((a: any) => a.name === assetName)
if (!asset) {
log.error(`Could not find asset ${assetName} in latest zls release`)
return
}
const downloadUrl = asset.browser_download_url
const downloadResponse = await fetch(downloadUrl)
if (!downloadResponse.ok) {
log.error("Failed to download zls")
return
}
const tempPath = path.join(Global.Path.bin, assetName)
await Bun.file(tempPath).write(downloadResponse)
if (ext === "zip") {
await $`unzip -o -q ${tempPath}`.cwd(Global.Path.bin).nothrow()
} else {
await $`tar -xf ${tempPath}`.cwd(Global.Path.bin).nothrow()
}
await fs.rm(tempPath, { force: true })
bin = path.join(Global.Path.bin, "zls" + (platform === "win32" ? ".exe" : ""))
if (!(await Bun.file(bin).exists())) {
log.error("Failed to extract zls binary")
return
}
if (platform !== "win32") {
await $`chmod +x ${bin}`.nothrow()
}
log.info(`installed zls`, { bin })
}
return {
process: spawn(bin, {
cwd: root,
}),
process: spawn(binary),
}
},
}

View file

@ -91,7 +91,8 @@ export namespace Provider {
if (!info || info.type !== "oauth") return
if (!info.access || info.expires < Date.now()) {
const tokens = await copilot.access(info.refresh)
if (!tokens) throw new Error("GitHub Copilot authentication expired")
if (!tokens)
throw new Error("GitHub Copilot authentication expired")
await Auth.set("github-copilot", {
type: "oauth",
...tokens,
@ -99,27 +100,25 @@ export namespace Provider {
info.access = tokens.access
}
let isAgentCall = false
let isVisionRequest = false
try {
const body = typeof init.body === "string" ? JSON.parse(init.body) : init.body
const body =
typeof init.body === "string"
? JSON.parse(init.body)
: init.body
if (body?.messages) {
isAgentCall = body.messages.some((msg: any) => msg.role && ["tool", "assistant"].includes(msg.role))
isVisionRequest = body.messages.some(
isAgentCall = body.messages.some(
(msg: any) =>
Array.isArray(msg.content) && msg.content.some((part: any) => part.type === "image_url"),
msg.role && ["tool", "assistant"].includes(msg.role),
)
}
} catch {}
const headers: Record<string, string> = {
const headers = {
...init.headers,
...copilot.HEADERS,
Authorization: `Bearer ${info.access}`,
"Openai-Intent": "conversation-edits",
"X-Initiator": isAgentCall ? "agent" : "user",
}
if (isVisionRequest) {
headers["Copilot-Vision-Request"] = "true"
}
delete headers["x-api-key"]
return fetch(input, {
...init,
@ -139,11 +138,14 @@ export namespace Provider {
}
},
"amazon-bedrock": async () => {
if (!process.env["AWS_PROFILE"] && !process.env["AWS_ACCESS_KEY_ID"]) return { autoload: false }
if (!process.env["AWS_PROFILE"] && !process.env["AWS_ACCESS_KEY_ID"])
return { autoload: false }
const region = process.env["AWS_REGION"] ?? "us-east-1"
const { fromNodeProviderChain } = await import(await BunProc.install("@aws-sdk/credential-providers"))
const { fromNodeProviderChain } = await import(
await BunProc.install("@aws-sdk/credential-providers")
)
return {
autoload: true,
options: {
@ -155,7 +157,9 @@ export namespace Provider {
switch (regionPrefix) {
case "us": {
const modelRequiresPrefix = ["claude", "deepseek"].some((m) => modelID.includes(m))
const modelRequiresPrefix = ["claude", "deepseek"].some((m) =>
modelID.includes(m),
)
if (modelRequiresPrefix) {
modelID = `${regionPrefix}.${modelID}`
}
@ -170,18 +174,25 @@ export namespace Provider {
"eu-south-1",
"eu-south-2",
].some((r) => region.includes(r))
const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "llama3", "pixtral"].some((m) =>
modelID.includes(m),
)
const modelRequiresPrefix = [
"claude",
"nova-lite",
"nova-micro",
"llama3",
"pixtral",
].some((m) => modelID.includes(m))
if (regionRequiresPrefix && modelRequiresPrefix) {
modelID = `${regionPrefix}.${modelID}`
}
break
}
case "ap": {
const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "nova-pro"].some((m) =>
modelID.includes(m),
)
const modelRequiresPrefix = [
"claude",
"nova-lite",
"nova-micro",
"nova-pro",
].some((m) => modelID.includes(m))
if (modelRequiresPrefix) {
regionPrefix = "apac"
modelID = `${regionPrefix}.${modelID}`
@ -219,7 +230,10 @@ export namespace Provider {
options: Record<string, any>
}
} = {}
const models = new Map<string, { info: ModelsDev.Model; language: LanguageModel }>()
const models = new Map<
string,
{ info: ModelsDev.Model; language: LanguageModel }
>()
const sdk = new Map<string, SDK>()
log.info("init")
@ -234,7 +248,7 @@ export namespace Provider {
if (!provider) {
const info = database[id]
if (!info) return
if (info.api && !options["baseURL"]) options["baseURL"] = info.api
if (info.api) options["baseURL"] = info.api
providers[id] = {
source,
info,
@ -294,7 +308,9 @@ export namespace Provider {
database[providerID] = parsed
}
const disabled = await Config.get().then((cfg) => new Set(cfg.disabled_providers ?? []))
const disabled = await Config.get().then(
(cfg) => new Set(cfg.disabled_providers ?? []),
)
// load env
for (const [providerID, provider] of Object.entries(database)) {
if (disabled.has(providerID)) continue
@ -321,7 +337,12 @@ export namespace Provider {
if (disabled.has(providerID)) continue
const result = await fn(database[providerID])
if (result && (result.autoload || providers[providerID])) {
mergeProvider(providerID, result.options ?? {}, "custom", result.getModel)
mergeProvider(
providerID,
result.options ?? {},
"custom",
result.getModel,
)
}
}
@ -358,7 +379,7 @@ export namespace Provider {
const existing = s.sdk.get(provider.id)
if (existing) return existing
const pkg = provider.npm ?? provider.id
const mod = await import(await BunProc.install(pkg, "beta"))
const mod = await import(await BunProc.install(pkg, "latest"))
const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!]
const loaded = fn(s.providers[provider.id]?.options)
s.sdk.set(provider.id, loaded)
@ -385,7 +406,9 @@ export namespace Provider {
const sdk = await getSDK(provider.info)
try {
const language = provider.getModel ? await provider.getModel(sdk, modelID) : sdk.languageModel(modelID)
const language = provider.getModel
? await provider.getModel(sdk, modelID)
: sdk.languageModel(modelID)
log.info("found", { providerID, modelID })
s.models.set(key, {
info,
@ -412,7 +435,10 @@ export namespace Provider {
export function sort(models: ModelsDev.Model[]) {
return sortBy(
models,
[(model) => priority.findIndex((filter) => model.id.includes(filter)), "desc"],
[
(model) => priority.findIndex((filter) => model.id.includes(filter)),
"desc",
],
[(model) => (model.id.includes("latest") ? 0 : 1), "asc"],
[(model) => model.id, "desc"],
)
@ -423,7 +449,11 @@ export namespace Provider {
if (cfg.model) return parseModel(cfg.model)
const provider = await list()
.then((val) => Object.values(val))
.then((x) => x.find((p) => !cfg.provider || Object.keys(cfg.provider).includes(p.info.id)))
.then((x) =>
x.find(
(p) => !cfg.provider || Object.keys(cfg.provider).includes(p.info.id),
),
)
if (!provider) throw new Error("no providers found")
const [model] = sort(Object.values(provider.info.models))
if (!model) throw new Error("no models found")
@ -506,11 +536,9 @@ export namespace Provider {
if (schema instanceof z.ZodUnion) {
return z.union(
schema.options.map((option: z.ZodTypeAny) => optionalToNullable(option)) as [
z.ZodTypeAny,
z.ZodTypeAny,
...z.ZodTypeAny[],
],
schema.options.map((option: z.ZodTypeAny) =>
optionalToNullable(option),
) as [z.ZodTypeAny, z.ZodTypeAny, ...z.ZodTypeAny[]],
)
}

View file

@ -1,21 +1,22 @@
import type { ModelMessage } from "ai"
import type { LanguageModelV1Prompt } from "ai"
import { unique } from "remeda"
export namespace ProviderTransform {
export function message(msgs: ModelMessage[], providerID: string, modelID: string) {
export function message(
msgs: LanguageModelV1Prompt,
providerID: string,
modelID: string,
) {
if (providerID === "anthropic" || modelID.includes("anthropic")) {
const system = msgs.filter((msg) => msg.role === "system").slice(0, 2)
const final = msgs.filter((msg) => msg.role !== "system").slice(-2)
for (const msg of unique([...system, ...final])) {
msg.providerOptions = {
...msg.providerOptions,
msg.providerMetadata = {
...msg.providerMetadata,
anthropic: {
cacheControl: { type: "ephemeral" },
},
openaiCompatible: {
cache_control: { type: "ephemeral" },
},
}
}
}
@ -24,8 +25,8 @@ export namespace ProviderTransform {
const final = msgs.filter((msg) => msg.role !== "system").slice(-2)
for (const msg of unique([...system, ...final])) {
msg.providerOptions = {
...msg.providerOptions,
msg.providerMetadata = {
...msg.providerMetadata,
bedrock: {
cachePoint: { type: "ephemeral" },
},

View file

@ -6,6 +6,7 @@ import { streamSSE } from "hono/streaming"
import { Session } from "../session"
import { resolver, validator as zValidator } from "hono-openapi/zod"
import { z } from "zod"
import { Message } from "../session/message"
import { Provider } from "../provider/provider"
import { App } from "../app/app"
import { mapValues } from "remeda"
@ -15,8 +16,6 @@ import { Ripgrep } from "../file/ripgrep"
import { Config } from "../config/config"
import { File } from "../file"
import { LSP } from "../lsp"
import { MessageV2 } from "../session/message-v2"
import { Mode } from "../session/mode"
const ERRORS = {
400: {
@ -52,9 +51,12 @@ export namespace Server {
status: 400,
})
}
return c.json(new NamedError.Unknown({ message: err.toString() }).toObject(), {
return c.json(
new NamedError.Unknown({ message: err.toString() }).toObject(),
{
status: 400,
})
},
)
})
.use(async (c, next) => {
log.info("request", {
@ -405,7 +407,7 @@ export namespace Server {
description: "List of messages",
content: {
"application/json": {
schema: resolver(MessageV2.Info.array()),
schema: resolver(Message.Info.array()),
},
},
},
@ -431,7 +433,7 @@ export namespace Server {
description: "Created message",
content: {
"application/json": {
schema: resolver(MessageV2.Assistant),
schema: resolver(Message.Info),
},
},
},
@ -448,8 +450,7 @@ export namespace Server {
z.object({
providerID: z.string(),
modelID: z.string(),
mode: z.string(),
parts: MessageV2.UserPart.array(),
parts: Message.MessagePart.array(),
}),
),
async (c) => {
@ -480,10 +481,15 @@ export namespace Server {
},
}),
async (c) => {
const providers = await Provider.list().then((x) => mapValues(x, (item) => item.info))
const providers = await Provider.list().then((x) =>
mapValues(x, (item) => item.info),
)
return c.json({
providers: Object.values(providers),
default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id),
default: mapValues(
providers,
(item) => Provider.sort(Object.values(item.models))[0].id,
),
})
},
)
@ -560,7 +566,7 @@ export namespace Server {
description: "Symbols",
content: {
"application/json": {
schema: resolver(LSP.Symbol.array()),
schema: resolver(z.unknown().array()),
},
},
},
@ -623,7 +629,16 @@ export namespace Server {
description: "File status",
content: {
"application/json": {
schema: resolver(File.Info.array()),
schema: resolver(
z
.object({
file: z.string(),
added: z.number().int(),
removed: z.number().int(),
status: z.enum(["added", "deleted", "modified"]),
})
.array(),
),
},
},
},
@ -634,75 +649,6 @@ export namespace Server {
return c.json(content)
},
)
.post(
"/log",
describeRoute({
description: "Write a log entry to the server logs",
responses: {
200: {
description: "Log entry written successfully",
content: {
"application/json": {
schema: resolver(z.boolean()),
},
},
},
},
}),
zValidator(
"json",
z.object({
service: z.string().openapi({ description: "Service name for the log entry" }),
level: z.enum(["debug", "info", "error", "warn"]).openapi({ description: "Log level" }),
message: z.string().openapi({ description: "Log message" }),
extra: z
.record(z.string(), z.any())
.optional()
.openapi({ description: "Additional metadata for the log entry" }),
}),
),
async (c) => {
const { service, level, message, extra } = c.req.valid("json")
const logger = Log.create({ service })
switch (level) {
case "debug":
logger.debug(message, extra)
break
case "info":
logger.info(message, extra)
break
case "error":
logger.error(message, extra)
break
case "warn":
logger.warn(message, extra)
break
}
return c.json(true)
},
)
.get(
"/mode",
describeRoute({
description: "List all modes",
responses: {
200: {
description: "List of modes",
content: {
"application/json": {
schema: resolver(Mode.Info.array()),
},
},
},
},
}),
async (c) => {
const modes = await Mode.list()
return c.json(modes)
},
)
return result
}

File diff suppressed because it is too large Load diff

View file

@ -1,426 +0,0 @@
import z from "zod"
import { Bus } from "../bus"
import { Provider } from "../provider/provider"
import { NamedError } from "../util/error"
import { Message } from "./message"
import { convertToModelMessages, type ModelMessage, type UIMessage } from "ai"
export namespace MessageV2 {
export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({}))
export const AbortedError = NamedError.create("MessageAbortedError", z.object({}))
export const ToolStatePending = z
.object({
status: z.literal("pending"),
})
.openapi({
ref: "ToolStatePending",
})
export type ToolStatePending = z.infer<typeof ToolStatePending>
export const ToolStateRunning = z
.object({
status: z.literal("running"),
input: z.any(),
title: z.string().optional(),
metadata: z.record(z.any()).optional(),
time: z.object({
start: z.number(),
}),
})
.openapi({
ref: "ToolStateRunning",
})
export type ToolStateRunning = z.infer<typeof ToolStateRunning>
export const ToolStateCompleted = z
.object({
status: z.literal("completed"),
input: z.record(z.any()),
output: z.string(),
title: z.string(),
metadata: z.record(z.any()),
time: z.object({
start: z.number(),
end: z.number(),
}),
})
.openapi({
ref: "ToolStateCompleted",
})
export type ToolStateCompleted = z.infer<typeof ToolStateCompleted>
export const ToolStateError = z
.object({
status: z.literal("error"),
input: z.record(z.any()),
error: z.string(),
time: z.object({
start: z.number(),
end: z.number(),
}),
})
.openapi({
ref: "ToolStateError",
})
export type ToolStateError = z.infer<typeof ToolStateError>
export const ToolState = z
.discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError])
.openapi({
ref: "ToolState",
})
export const TextPart = z
.object({
type: z.literal("text"),
text: z.string(),
synthetic: z.boolean().optional(),
})
.openapi({
ref: "TextPart",
})
export type TextPart = z.infer<typeof TextPart>
export const ToolPart = z
.object({
type: z.literal("tool"),
id: z.string(),
tool: z.string(),
state: ToolState,
})
.openapi({
ref: "ToolPart",
})
export type ToolPart = z.infer<typeof ToolPart>
export const FilePart = z
.object({
type: z.literal("file"),
mime: z.string(),
filename: z.string().optional(),
url: z.string(),
})
.openapi({
ref: "FilePart",
})
export type FilePart = z.infer<typeof FilePart>
export const StepStartPart = z
.object({
type: z.literal("step-start"),
})
.openapi({
ref: "StepStartPart",
})
export type StepStartPart = z.infer<typeof StepStartPart>
export const StepFinishPart = z
.object({
type: z.literal("step-finish"),
cost: z.number(),
tokens: z.object({
input: z.number(),
output: z.number(),
reasoning: z.number(),
cache: z.object({
read: z.number(),
write: z.number(),
}),
}),
})
.openapi({
ref: "StepFinishPart",
})
export type StepFinishPart = z.infer<typeof StepFinishPart>
const Base = z.object({
id: z.string(),
sessionID: z.string(),
})
export const UserPart = z.discriminatedUnion("type", [TextPart, FilePart]).openapi({
ref: "UserMessagePart",
})
export type UserPart = z.infer<typeof UserPart>
export const User = Base.extend({
role: z.literal("user"),
parts: z.array(UserPart),
time: z.object({
created: z.number(),
}),
}).openapi({
ref: "UserMessage",
})
export type User = z.infer<typeof User>
export const AssistantPart = z
.discriminatedUnion("type", [TextPart, ToolPart, StepStartPart, StepFinishPart])
.openapi({
ref: "AssistantMessagePart",
})
export type AssistantPart = z.infer<typeof AssistantPart>
export const Assistant = Base.extend({
role: z.literal("assistant"),
parts: z.array(AssistantPart),
time: z.object({
created: z.number(),
completed: z.number().optional(),
}),
error: z
.discriminatedUnion("name", [
Provider.AuthError.Schema,
NamedError.Unknown.Schema,
OutputLengthError.Schema,
AbortedError.Schema,
])
.optional(),
system: z.string().array(),
modelID: z.string(),
providerID: z.string(),
path: z.object({
cwd: z.string(),
root: z.string(),
}),
summary: z.boolean().optional(),
cost: z.number(),
tokens: z.object({
input: z.number(),
output: z.number(),
reasoning: z.number(),
cache: z.object({
read: z.number(),
write: z.number(),
}),
}),
}).openapi({
ref: "AssistantMessage",
})
export type Assistant = z.infer<typeof Assistant>
export const Info = z.discriminatedUnion("role", [User, Assistant]).openapi({
ref: "Message",
})
export type Info = z.infer<typeof Info>
export const Event = {
Updated: Bus.event(
"message.updated",
z.object({
info: Info,
}),
),
Removed: Bus.event(
"message.removed",
z.object({
sessionID: z.string(),
messageID: z.string(),
}),
),
PartUpdated: Bus.event(
"message.part.updated",
z.object({
part: AssistantPart,
sessionID: z.string(),
messageID: z.string(),
}),
),
}
export function fromV1(v1: Message.Info) {
if (v1.role === "assistant") {
const result: Assistant = {
id: v1.id,
sessionID: v1.metadata.sessionID,
role: "assistant",
time: {
created: v1.metadata.time.created,
completed: v1.metadata.time.completed,
},
cost: v1.metadata.assistant!.cost,
path: v1.metadata.assistant!.path,
summary: v1.metadata.assistant!.summary,
tokens: v1.metadata.assistant!.tokens,
modelID: v1.metadata.assistant!.modelID,
providerID: v1.metadata.assistant!.providerID,
system: v1.metadata.assistant!.system,
error: v1.metadata.error,
parts: v1.parts.flatMap((part): AssistantPart[] => {
if (part.type === "text") {
return [
{
type: "text",
text: part.text,
},
]
}
if (part.type === "step-start") {
return [
{
type: "step-start",
},
]
}
if (part.type === "tool-invocation") {
return [
{
type: "tool",
id: part.toolInvocation.toolCallId,
tool: part.toolInvocation.toolName,
state: (() => {
if (part.toolInvocation.state === "partial-call") {
return {
status: "pending",
}
}
const { title, time, ...metadata } = v1.metadata.tool[part.toolInvocation.toolCallId] ?? {}
if (part.toolInvocation.state === "call") {
return {
status: "running",
input: part.toolInvocation.args,
time: {
start: time?.start,
},
}
}
if (part.toolInvocation.state === "result") {
return {
status: "completed",
input: part.toolInvocation.args,
output: part.toolInvocation.result,
title,
time,
metadata,
}
}
throw new Error("unknown tool invocation state")
})(),
},
]
}
return []
}),
}
return result
}
if (v1.role === "user") {
const result: User = {
id: v1.id,
sessionID: v1.metadata.sessionID,
role: "user",
time: {
created: v1.metadata.time.created,
},
parts: v1.parts.flatMap((part): UserPart[] => {
if (part.type === "text") {
return [
{
type: "text",
text: part.text,
},
]
}
if (part.type === "file") {
return [
{
type: "file",
mime: part.mediaType,
filename: part.filename,
url: part.url,
},
]
}
return []
}),
}
return result
}
}
export function toModelMessage(input: Info[]): ModelMessage[] {
const result: UIMessage[] = []
for (const msg of input) {
if (msg.parts.length === 0) continue
if (msg.role === "user") {
result.push({
id: msg.id,
role: "user",
parts: msg.parts.flatMap((part): UIMessage["parts"] => {
if (part.type === "text")
return [
{
type: "text",
text: part.text,
},
]
if (part.type === "file")
return [
{
type: "file",
url: part.url,
mediaType: part.mime,
filename: part.filename,
},
]
return []
}),
})
}
if (msg.role === "assistant") {
result.push({
id: msg.id,
role: "assistant",
parts: msg.parts.flatMap((part): UIMessage["parts"] => {
if (part.type === "text")
return [
{
type: "text",
text: part.text,
},
]
if (part.type === "step-start")
return [
{
type: "step-start",
},
]
if (part.type === "tool") {
if (part.state.status === "completed")
return [
{
type: ("tool-" + part.tool) as `tool-${string}`,
state: "output-available",
toolCallId: part.id,
input: part.state.input,
output: part.state.output,
},
]
if (part.state.status === "error")
return [
{
type: ("tool-" + part.tool) as `tool-${string}`,
state: "output-error",
toolCallId: part.id,
input: part.state.input,
errorText: part.state.error,
},
]
}
return []
}),
})
}
}
return convertToModelMessages(result)
}
}

View file

@ -1,9 +1,13 @@
import z from "zod"
import { Bus } from "../bus"
import { Provider } from "../provider/provider"
import { NamedError } from "../util/error"
export namespace Message {
export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({}))
export const OutputLengthError = NamedError.create(
"MessageOutputLengthError",
z.object({}),
)
export const ToolCall = z
.object({
@ -45,7 +49,9 @@ export namespace Message {
})
export type ToolResult = z.infer<typeof ToolResult>
export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).openapi({
export const ToolInvocation = z
.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult])
.openapi({
ref: "ToolInvocation",
})
export type ToolInvocation = z.infer<typeof ToolInvocation>
@ -116,7 +122,14 @@ export namespace Message {
export type StepStartPart = z.infer<typeof StepStartPart>
export const MessagePart = z
.discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart])
.discriminatedUnion("type", [
TextPart,
ReasoningPart,
ToolInvocationPart,
SourceUrlPart,
FilePart,
StepStartPart,
])
.openapi({
ref: "MessagePart",
})
@ -184,4 +197,28 @@ export namespace Message {
ref: "Message",
})
export type Info = z.infer<typeof Info>
export const Event = {
Updated: Bus.event(
"message.updated",
z.object({
info: Info,
}),
),
Removed: Bus.event(
"message.removed",
z.object({
sessionID: z.string(),
messageID: z.string(),
}),
),
PartUpdated: Bus.event(
"message.part.updated",
z.object({
part: MessagePart,
sessionID: z.string(),
messageID: z.string(),
}),
),
}
}

View file

@ -1,70 +0,0 @@
import { mergeDeep } from "remeda"
import { App } from "../app/app"
import { Config } from "../config/config"
import z from "zod"
export namespace Mode {
export const Info = z
.object({
name: z.string(),
model: z
.object({
modelID: z.string(),
providerID: z.string(),
})
.optional(),
prompt: z.string().optional(),
tools: z.record(z.boolean()),
})
.openapi({
ref: "Mode",
})
export type Info = z.infer<typeof Info>
const state = App.state("mode", async () => {
const cfg = await Config.get()
const mode = mergeDeep(
{
build: {},
plan: {
tools: {
write: false,
edit: false,
patch: false,
bash: false,
},
},
},
cfg.mode ?? {},
)
const result: Record<string, Info> = {}
for (const [key, value] of Object.entries(mode)) {
let item = result[key]
if (!item)
item = result[key] = {
name: key,
tools: {},
}
const model = value.model ?? cfg.model
if (model) {
const [providerID, ...rest] = model.split("/")
const modelID = rest.join("/")
item.model = {
modelID,
providerID,
}
}
if (value.prompt) item.prompt = value.prompt
if (value.tools) item.tools = value.tools
}
return result
})
export async function get(mode: string) {
return state().then((x) => x[mode])
}
export async function list() {
return state().then((x) => Object.values(x))
}
}

View file

@ -1,95 +0,0 @@
You are an agent known as opencode - please keep going until the users query is completely resolved, before ending your turn and yielding back to the user.
Your thinking should be thorough and so it's fine if it's very long. However, avoid unnecessary repetition and verbosity. You should be concise, but thorough.
You MUST iterate and keep going until the problem is solved.
I want you to fully solve this autonomously before coming back to me.
Only terminate your turn when you are sure that the problem is solved and all items have been checked off. Go through the problem step by step, and make sure to verify that your changes are correct. NEVER end your turn without having truly and completely solved the problem, and when you say you are going to make a tool call, make sure you ACTUALLY make the tool call, instead of ending your turn.
Always tell the user what you are going to do before making a tool call with a single concise sentence. This will help them understand what you are doing and why.
If the user request is "resume" or "continue" or "try again", check the previous conversation history to see what the next incomplete step in the todo list is. Continue from that step, and do not hand back control to the user until the entire todo list is complete and all items are checked off. Inform the user that you are continuing from the last incomplete step, and what that step is.
Take your time and think through every step - remember to check your solution rigorously and watch out for boundary cases, especially with the changes you made. Your solution must be perfect. If not, continue working on it. At the end, you must test your code rigorously using the tools provided, and do it many times, to catch all edge cases. If it is not robust, iterate more and make it perfect. Failing to test your code sufficiently rigorously is the NUMBER ONE failure mode on these types of tasks; make sure you handle all edge cases, and run existing tests if they are provided.
You MUST plan extensively before each function call, and reflect extensively on the outcomes of the previous function calls. DO NOT do this entire process by making function calls only, as this can impair your ability to solve the problem and think insightfully.
# Workflow
1. Understand the problem deeply. Carefully read the issue and think critically about what is required.
2. Investigate the codebase. Explore relevant files, search for key functions, and gather context.
3. Develop a clear, step-by-step plan. Break down the fix into manageable, incremental steps. Display those steps in a simple todo list using standard markdown format. Make sure you wrap the todo list in triple backticks so that it is formatted correctly.
4. Implement the fix incrementally. Make small, testable code changes.
5. Debug as needed. Use debugging techniques to isolate and resolve issues.
6. Test frequently. Run tests after each change to verify correctness.
7. Iterate until the root cause is fixed and all tests pass.
8. Reflect and validate comprehensively. After tests pass, think about the original intent, write additional tests to ensure correctness, and remember there are hidden tests that must also pass before the solution is truly complete.
Refer to the detailed sections below for more information on each step.
## 1. Deeply Understand the Problem
Carefully read the issue and think hard about a plan to solve it before coding.
## 2. Codebase Investigation
- Explore relevant files and directories.
- Search for key functions, classes, or variables related to the issue.
- Read and understand relevant code snippets.
- Identify the root cause of the problem.
- Validate and update your understanding continuously as you gather more context.
## 3. Fetch Provided URLs
- If the user provides a URL, use the `functions.fetch_webpage` tool to retrieve the content of the provided URL.
- After fetching, review the content returned by the fetch tool.
- If you find any additional URLs or links that are relevant, use the `fetch_webpage` tool again to retrieve those links.
- Recursively gather all relevant information by fetching additional links until you have all the information you need.
## 4. Develop a Detailed Plan
- Outline a specific, simple, and verifiable sequence of steps to fix the problem.
- Create a todo list in markdown format to track your progress.
- Each time you complete a step, check it off using `[x]` syntax.
- Each time you check off a step, display the updated todo list to the user.
- Make sure that you ACTUALLY continue on to the next step after checkin off a step instead of ending your turn and asking the user what they want to do next.
## 5. Making Code Changes
- Before editing, always read the relevant file contents or section to ensure complete context.
- Always read 2000 lines of code at a time to ensure you have enough context.
- If a patch is not applied correctly, attempt to reapply it.
- Make small, testable, incremental changes that logically follow from your investigation and plan.
## 6. Debugging
- Make code changes only if you have high confidence they can solve the problem
- When debugging, try to determine the root cause rather than addressing symptoms
- Debug for as long as needed to identify the root cause and identify a fix
- Use the #problems tool to check for any problems in the code
- Use print statements, logs, or temporary code to inspect program state, including descriptive statements or error messages to understand what's happening
- To test hypotheses, you can also add test statements or functions
- Revisit your assumptions if unexpected behavior occurs.
# Fetch Webpage
Use the `webfetch` tool when the user provides a URL. Follow these steps exactly.
1. Use the `webfetch` tool to retrieve the content of the provided URL.
2. After fetching, review the content returned by the fetch tool.
3. If you find any additional URLs or links that are relevant, use the `webfetch` tool again to retrieve those links.
4. Go back to step 2 and repeat until you have all the information you need.
IMPORTANT: Recursively fetching links is crucial. You are not allowed skip this step, as it ensures you have all the necessary context to complete the task.
# How to create a Todo List
Use the following format to create a todo list:
```markdown
- [ ] Step 1: Description of the first step
- [ ] Step 2: Description of the second step
- [ ] Step 3: Description of the third step
```
Do not ever use HTML tags or any other formatting for the todo list, as it will not be rendered correctly. Always use the markdown format shown above.
# Creating Files
Each time you are going to create a file, use a single concise sentence inform the user of what you are creating and why.
# Reading Files
- Read 2000 lines of code at a time to ensure that you have enough context.
- Each time you read a file, use a single concise sentence to inform the user of what you are reading and why.

View file

@ -1,3 +0,0 @@
<system-reminder>
Plan mode is active. The user indicated that they do not want you to execute yet -- you MUST NOT make any edits, run any non-readonly tools (including changing configs or making commits), or otherwise make any changes to the system. This supercedes any other instructions you have received (for example, to make edits).
</system-reminder>

View file

@ -7,16 +7,23 @@ import path from "path"
import os from "os"
import PROMPT_ANTHROPIC from "./prompt/anthropic.txt"
import PROMPT_BEAST from "./prompt/beast.txt"
import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt"
import PROMPT_SUMMARIZE from "./prompt/summarize.txt"
import PROMPT_TITLE from "./prompt/title.txt"
export namespace SystemPrompt {
export function provider(providerID: string, modelID: string) {
if (providerID === "anthropic") return [PROMPT_ANTHROPIC_SPOOF.trim(), PROMPT_ANTHROPIC]
if (modelID.includes("gpt-")) return [PROMPT_BEAST]
return [PROMPT_ANTHROPIC]
export function provider(providerID: string) {
const result = []
switch (providerID) {
case "anthropic":
result.push(PROMPT_ANTHROPIC_SPOOF.trim())
result.push(PROMPT_ANTHROPIC)
break
default:
result.push(PROMPT_ANTHROPIC)
break
}
return result
}
export async function environment() {

View file

@ -53,7 +53,9 @@ export namespace Share {
export const URL =
process.env["OPENCODE_API"] ??
(Installation.isSnapshot() || Installation.isDev() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
(Installation.isSnapshot() || Installation.isDev()
? "https://api.dev.opencode.ai"
: "https://api.opencode.ai")
export async function create(sessionID: string) {
return fetch(`${URL}/share_create`, {

View file

@ -55,7 +55,9 @@ export namespace Snapshot {
log.info("restore", { commit })
const app = App.info()
const git = gitdir(sessionID)
await $`git --git-dir=${git} checkout ${commit} --force`.quiet().cwd(app.path.root)
await $`git --git-dir=${git} checkout ${commit} --force`
.quiet()
.cwd(app.path.root)
}
function gitdir(sessionID: string) {

View file

@ -4,80 +4,44 @@ import { Bus } from "../bus"
import path from "path"
import z from "zod"
import fs from "fs/promises"
import { MessageV2 } from "../session/message-v2"
export namespace Storage {
const log = Log.create({ service: "storage" })
export const Event = {
Write: Bus.event("storage.write", z.object({ key: z.string(), content: z.any() })),
Write: Bus.event(
"storage.write",
z.object({ key: z.string(), content: z.any() }),
),
}
type Migration = (dir: string) => Promise<void>
const MIGRATIONS: Migration[] = [
async (dir: string) => {
try {
const files = new Bun.Glob("session/message/*/*.json").scanSync({
cwd: dir,
absolute: true,
})
for (const file of files) {
const content = await Bun.file(file).json()
if (!content.metadata) continue
log.info("migrating to v2 message", { file })
try {
const result = MessageV2.fromV1(content)
await Bun.write(file, JSON.stringify(result, null, 2))
} catch (e) {
await fs.rename(file, file.replace("storage", "broken"))
}
}
} catch {}
},
]
const state = App.state("storage", async () => {
const state = App.state("storage", () => {
const app = App.info()
const dir = path.normalize(path.join(app.path.data, "storage"))
await fs.mkdir(dir, { recursive: true })
const migration = await Bun.file(path.join(dir, "migration"))
.json()
.then((x) => parseInt(x))
.catch(() => 0)
for (let index = migration; index < MIGRATIONS.length; index++) {
log.info("running migration", { index })
const migration = MIGRATIONS[index]
await migration(dir)
await Bun.write(path.join(dir, "migration"), (index + 1).toString())
}
const dir = path.join(app.path.data, "storage")
log.info("init", { path: dir })
return {
dir,
}
})
export async function remove(key: string) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, key + ".json")
const target = path.join(state().dir, key + ".json")
await fs.unlink(target).catch(() => {})
}
export async function removeDir(key: string) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, key)
const target = path.join(state().dir, key)
await fs.rm(target, { recursive: true, force: true }).catch(() => {})
}
export async function readJSON<T>(key: string) {
const dir = await state().then((x) => x.dir)
return Bun.file(path.join(dir, key + ".json")).json() as Promise<T>
return Bun.file(path.join(state().dir, key + ".json")).json() as Promise<T>
}
export async function writeJSON<T>(key: string, content: T) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, key + ".json")
const target = path.join(state().dir, key + ".json")
const tmp = target + Date.now() + ".tmp"
await Bun.write(tmp, JSON.stringify(content, null, 2))
await Bun.write(tmp, JSON.stringify(content))
await fs.rename(tmp, target).catch(() => {})
await fs.unlink(tmp).catch(() => {})
Bus.publish(Event.Write, { key, content })
@ -85,10 +49,9 @@ export namespace Storage {
const glob = new Bun.Glob("**/*")
export async function* list(prefix: string) {
const dir = await state().then((x) => x.dir)
try {
for await (const item of glob.scan({
cwd: path.join(dir, prefix),
cwd: path.join(state().dir, prefix),
onlyFiles: true,
})) {
const result = path.join(prefix, item.slice(0, -5))

View file

@ -4,6 +4,25 @@ import DESCRIPTION from "./bash.txt"
import { App } from "../app/app"
const MAX_OUTPUT_LENGTH = 30000
const BANNED_COMMANDS = [
"alias",
"curl",
"curlie",
"wget",
"axel",
"aria2c",
"nc",
"telnet",
"lynx",
"w3m",
"links",
"httpie",
"xh",
"http-prompt",
"chrome",
"firefox",
"safari",
]
const DEFAULT_TIMEOUT = 1 * 60 * 1000
const MAX_TIMEOUT = 10 * 60 * 1000
@ -12,7 +31,12 @@ export const BashTool = Tool.define({
description: DESCRIPTION,
parameters: z.object({
command: z.string().describe("The command to execute"),
timeout: z.number().min(0).max(MAX_TIMEOUT).describe("Optional timeout in milliseconds").optional(),
timeout: z
.number()
.min(0)
.max(MAX_TIMEOUT)
.describe("Optional timeout in milliseconds")
.optional(),
description: z
.string()
.describe(
@ -21,6 +45,8 @@ export const BashTool = Tool.define({
}),
async execute(params, ctx) {
const timeout = Math.min(params.timeout ?? DEFAULT_TIMEOUT, MAX_TIMEOUT)
if (BANNED_COMMANDS.some((item) => params.command.startsWith(item)))
throw new Error(`Command '${params.command}' is not allowed`)
const process = Bun.spawn({
cmd: ["bash", "-c", params.command],
@ -36,14 +62,21 @@ export const BashTool = Tool.define({
const stderr = await new Response(process.stderr).text()
return {
title: params.command,
metadata: {
stderr,
stdout,
exit: process.exitCode,
description: params.description,
title: params.command,
},
output: [`<stdout>`, stdout ?? "", `</stdout>`, `<stderr>`, stderr ?? "", `</stderr>`].join("\n"),
output: [
`<stdout>`,
stdout ?? "",
`</stdout>`,
`<stderr>`,
stderr ?? "",
`</stderr>`,
].join("\n"),
}
},
})

View file

@ -20,8 +20,15 @@ export const EditTool = Tool.define({
parameters: z.object({
filePath: z.string().describe("The absolute path to the file to modify"),
oldString: z.string().describe("The text to replace"),
newString: z.string().describe("The text to replace it with (must be different from old_string)"),
replaceAll: z.boolean().optional().describe("Replace all occurrences of old_string (default false)"),
newString: z
.string()
.describe(
"The text to replace it with (must be different from old_string)",
),
replaceAll: z
.boolean()
.optional()
.describe("Replace all occurrences of old_string (default false)"),
}),
async execute(params, ctx) {
if (!params.filePath) {
@ -33,7 +40,9 @@ export const EditTool = Tool.define({
}
const app = App.info()
const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(app.path.cwd, params.filePath)
const filepath = path.isAbsolute(params.filePath)
? params.filePath
: path.join(app.path.cwd, params.filePath)
await Permission.ask({
id: "edit",
@ -61,11 +70,17 @@ export const EditTool = Tool.define({
const file = Bun.file(filepath)
const stats = await file.stat().catch(() => {})
if (!stats) throw new Error(`File ${filepath} not found`)
if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filepath}`)
if (stats.isDirectory())
throw new Error(`Path is a directory, not a file: ${filepath}`)
await FileTime.assert(ctx.sessionID, filepath)
contentOld = await file.text()
contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll)
contentNew = replace(
contentOld,
params.oldString,
params.newString,
params.replaceAll,
)
await file.write(contentNew)
await Bus.publish(File.Event.Edited, {
file: filepath,
@ -73,7 +88,9 @@ export const EditTool = Tool.define({
contentNew = await file.text()
})()
const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, contentNew))
const diff = trimDiff(
createTwoFilesPatch(filepath, filepath, contentOld, contentNew),
)
FileTime.read(ctx.sessionID, filepath)
@ -93,14 +110,17 @@ export const EditTool = Tool.define({
metadata: {
diagnostics,
diff,
},
title: `${path.relative(app.path.root, filepath)}`,
},
output,
}
},
})
export type Replacer = (content: string, find: string) => Generator<string, void, unknown>
export type Replacer = (
content: string,
find: string,
) => Generator<string, void, unknown>
export const SimpleReplacer: Replacer = function* (_content, find) {
yield find
@ -188,7 +208,10 @@ export const BlockAnchorReplacer: Replacer = function* (content, find) {
}
}
export const WhitespaceNormalizedReplacer: Replacer = function* (content, find) {
export const WhitespaceNormalizedReplacer: Replacer = function* (
content,
find,
) {
const normalizeWhitespace = (text: string) => text.replace(/\s+/g, " ").trim()
const normalizedFind = normalizeWhitespace(find)
@ -206,7 +229,9 @@ export const WhitespaceNormalizedReplacer: Replacer = function* (content, find)
// Find the actual substring in the original line that matches
const words = find.trim().split(/\s+/)
if (words.length > 0) {
const pattern = words.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")).join("\\s+")
const pattern = words
.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"))
.join("\\s+")
try {
const regex = new RegExp(pattern)
const match = line.match(regex)
@ -245,7 +270,9 @@ export const IndentationFlexibleReplacer: Replacer = function* (content, find) {
}),
)
return lines.map((line) => (line.trim().length === 0 ? line : line.slice(minIndent))).join("\n")
return lines
.map((line) => (line.trim().length === 0 ? line : line.slice(minIndent)))
.join("\n")
}
const normalizedFind = removeIndentation(find)
@ -396,7 +423,10 @@ export const ContextAwareReplacer: Replacer = function* (content, find) {
}
}
if (totalNonEmptyLines === 0 || matchingLines / totalNonEmptyLines >= 0.5) {
if (
totalNonEmptyLines === 0 ||
matchingLines / totalNonEmptyLines >= 0.5
) {
yield block
break // Only match the first occurrence
}
@ -443,7 +473,12 @@ function trimDiff(diff: string): string {
return trimmedLines.join("\n")
}
export function replace(content: string, oldString: string, newString: string, replaceAll = false): string {
export function replace(
content: string,
oldString: string,
newString: string,
replaceAll = false,
): string {
if (oldString === newString) {
throw new Error("oldString and newString must be different")
}
@ -467,7 +502,11 @@ export function replace(content: string, oldString: string, newString: string, r
}
const lastIndex = content.lastIndexOf(search)
if (index !== lastIndex) continue
return content.substring(0, index) + newString + content.substring(index + search.length)
return (
content.substring(0, index) +
newString +
content.substring(index + search.length)
)
}
}
throw new Error("oldString not found in content or was found multiple times")

View file

@ -20,14 +20,16 @@ export const GlobTool = Tool.define({
async execute(params) {
const app = App.info()
let search = params.path ?? app.path.cwd
search = path.isAbsolute(search) ? search : path.resolve(app.path.cwd, search)
search = path.isAbsolute(search)
? search
: path.resolve(app.path.cwd, search)
const limit = 100
const files = []
let truncated = false
for (const file of await Ripgrep.files({
cwd: search,
glob: [params.pattern],
glob: params.pattern,
})) {
if (files.length >= limit) {
truncated = true
@ -51,15 +53,17 @@ export const GlobTool = Tool.define({
output.push(...files.map((f) => f.path))
if (truncated) {
output.push("")
output.push("(Results are truncated. Consider using a more specific path or pattern.)")
output.push(
"(Results are truncated. Consider using a more specific path or pattern.)",
)
}
}
return {
title: path.relative(app.path.root, search),
metadata: {
count: files.length,
truncated,
title: path.relative(app.path.root, search),
},
output: output.join("\n"),
}

View file

@ -9,9 +9,21 @@ export const GrepTool = Tool.define({
id: "grep",
description: DESCRIPTION,
parameters: z.object({
pattern: z.string().describe("The regex pattern to search for in file contents"),
path: z.string().optional().describe("The directory to search in. Defaults to the current working directory."),
include: z.string().optional().describe('File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")'),
pattern: z
.string()
.describe("The regex pattern to search for in file contents"),
path: z
.string()
.optional()
.describe(
"The directory to search in. Defaults to the current working directory.",
),
include: z
.string()
.optional()
.describe(
'File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")',
),
}),
async execute(params) {
if (!params.pattern) {
@ -39,8 +51,7 @@ export const GrepTool = Tool.define({
if (exitCode === 1) {
return {
title: params.pattern,
metadata: { matches: 0, truncated: false },
metadata: { matches: 0, truncated: false, title: params.pattern },
output: "No files found",
}
}
@ -82,8 +93,7 @@ export const GrepTool = Tool.define({
if (finalMatches.length === 0) {
return {
title: params.pattern,
metadata: { matches: 0, truncated: false },
metadata: { matches: 0, truncated: false, title: params.pattern },
output: "No files found",
}
}
@ -104,14 +114,16 @@ export const GrepTool = Tool.define({
if (truncated) {
outputLines.push("")
outputLines.push("(Results are truncated. Consider using a more specific path or pattern.)")
outputLines.push(
"(Results are truncated. Consider using a more specific path or pattern.)",
)
}
return {
title: params.pattern,
metadata: {
matches: finalMatches.length,
truncated,
title: params.pattern,
},
output: outputLines.join("\n"),
}

View file

@ -16,8 +16,6 @@ export const IGNORE_PATTERNS = [
"obj/",
".idea/",
".vscode/",
".zig-cache/",
"zig-out",
]
const LIMIT = 100
@ -26,8 +24,16 @@ export const ListTool = Tool.define({
id: "list",
description: DESCRIPTION,
parameters: z.object({
path: z.string().describe("The absolute path to the directory to list (must be absolute, not relative)").optional(),
ignore: z.array(z.string()).describe("List of glob patterns to ignore").optional(),
path: z
.string()
.describe(
"The absolute path to the directory to list (must be absolute, not relative)",
)
.optional(),
ignore: z
.array(z.string())
.describe("List of glob patterns to ignore")
.optional(),
}),
async execute(params) {
const app = App.info()
@ -38,7 +44,8 @@ export const ListTool = Tool.define({
for await (const file of glob.scan({ cwd: searchPath, dot: true })) {
if (IGNORE_PATTERNS.some((p) => file.includes(p))) continue
if (params.ignore?.some((pattern) => new Bun.Glob(pattern).match(file))) continue
if (params.ignore?.some((pattern) => new Bun.Glob(pattern).match(file)))
continue
files.push(file)
if (files.length >= LIMIT) break
}
@ -92,10 +99,10 @@ export const ListTool = Tool.define({
const output = `${searchPath}/\n` + renderDir(".", 0)
return {
title: path.relative(app.path.root, searchPath),
metadata: {
count: files.length,
truncated: files.length >= LIMIT,
title: path.relative(app.path.root, searchPath),
},
output,
}

View file

@ -13,16 +13,20 @@ export const LspDiagnosticTool = Tool.define({
}),
execute: async (args) => {
const app = App.info()
const normalized = path.isAbsolute(args.path) ? args.path : path.join(app.path.cwd, args.path)
const normalized = path.isAbsolute(args.path)
? args.path
: path.join(app.path.cwd, args.path)
await LSP.touchFile(normalized, true)
const diagnostics = await LSP.diagnostics()
const file = diagnostics[normalized]
return {
title: path.relative(app.path.root, normalized),
metadata: {
diagnostics,
title: path.relative(app.path.root, normalized),
},
output: file?.length ? file.map(LSP.Diagnostic.pretty).join("\n") : "No errors found",
output: file?.length
? file.map(LSP.Diagnostic.pretty).join("\n")
: "No errors found",
}
},
})

View file

@ -15,7 +15,9 @@ export const LspHoverTool = Tool.define({
}),
execute: async (args) => {
const app = App.info()
const file = path.isAbsolute(args.file) ? args.file : path.join(app.path.cwd, args.file)
const file = path.isAbsolute(args.file)
? args.file
: path.join(app.path.cwd, args.file)
await LSP.touchFile(file, true)
const result = await LSP.hover({
...args,
@ -23,9 +25,14 @@ export const LspHoverTool = Tool.define({
})
return {
title: path.relative(app.path.root, file) + ":" + args.line + ":" + args.character,
metadata: {
result,
title:
path.relative(app.path.root, file) +
":" +
args.line +
":" +
args.character,
},
output: JSON.stringify(result, null, 2),
}

View file

@ -10,7 +10,9 @@ export const MultiEditTool = Tool.define({
description: DESCRIPTION,
parameters: z.object({
filePath: z.string().describe("The absolute path to the file to modify"),
edits: z.array(EditTool.parameters).describe("Array of edit operations to perform sequentially on the file"),
edits: z
.array(EditTool.parameters)
.describe("Array of edit operations to perform sequentially on the file"),
}),
async execute(params, ctx) {
const results = []
@ -28,9 +30,9 @@ export const MultiEditTool = Tool.define({
}
const app = App.info()
return {
title: path.relative(app.path.root, params.filePath),
metadata: {
results: results.map((r) => r.metadata),
title: path.relative(app.path.root, params.filePath),
},
output: results.at(-1)!.output,
}

View file

@ -6,7 +6,9 @@ import { FileTime } from "../file/time"
import DESCRIPTION from "./patch.txt"
const PatchParams = z.object({
patchText: z.string().describe("The full patch text that describes all changes to be made"),
patchText: z
.string()
.describe("The full patch text that describes all changes to be made"),
})
interface Change {
@ -40,7 +42,10 @@ function identifyFilesNeeded(patchText: string): string[] {
const files: string[] = []
const lines = patchText.split("\n")
for (const line of lines) {
if (line.startsWith("*** Update File:") || line.startsWith("*** Delete File:")) {
if (
line.startsWith("*** Update File:") ||
line.startsWith("*** Delete File:")
) {
const filePath = line.split(":", 2)[1]?.trim()
if (filePath) files.push(filePath)
}
@ -60,7 +65,10 @@ function identifyFilesAdded(patchText: string): string[] {
return files
}
function textToPatch(patchText: string, _currentFiles: Record<string, string>): [PatchOperation[], number] {
function textToPatch(
patchText: string,
_currentFiles: Record<string, string>,
): [PatchOperation[], number] {
const operations: PatchOperation[] = []
const lines = patchText.split("\n")
let i = 0
@ -85,7 +93,11 @@ function textToPatch(patchText: string, _currentFiles: Record<string, string>):
const changes: PatchChange[] = []
i++
while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
while (
i < lines.length &&
!lines[i].startsWith("@@") &&
!lines[i].startsWith("***")
) {
const changeLine = lines[i]
if (changeLine.startsWith(" ")) {
changes.push({ type: "keep", content: changeLine.substring(1) })
@ -139,7 +151,10 @@ function textToPatch(patchText: string, _currentFiles: Record<string, string>):
return [operations, fuzz]
}
function patchToCommit(operations: PatchOperation[], currentFiles: Record<string, string>): Commit {
function patchToCommit(
operations: PatchOperation[],
currentFiles: Record<string, string>,
): Commit {
const changes: Record<string, Change> = {}
for (const op of operations) {
@ -158,7 +173,9 @@ function patchToCommit(operations: PatchOperation[], currentFiles: Record<string
const lines = originalContent.split("\n")
for (const hunk of op.hunks) {
const contextIndex = lines.findIndex((line) => line.includes(hunk.contextLine))
const contextIndex = lines.findIndex((line) =>
line.includes(hunk.contextLine),
)
if (contextIndex === -1) {
throw new Error(`Context line not found: ${hunk.contextLine}`)
}
@ -187,7 +204,11 @@ function patchToCommit(operations: PatchOperation[], currentFiles: Record<string
return { changes }
}
function generateDiff(oldContent: string, newContent: string, filePath: string): [string, number, number] {
function generateDiff(
oldContent: string,
newContent: string,
filePath: string,
): [string, number, number] {
// Mock implementation - would need actual diff generation
const lines1 = oldContent.split("\n")
const lines2 = newContent.split("\n")
@ -275,7 +296,9 @@ export const PatchTool = Tool.define({
// Process the patch
const [patch, fuzz] = textToPatch(params.patchText, currentFiles)
if (fuzz > 3) {
throw new Error(`patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`)
throw new Error(
`patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`,
)
}
// Convert patch to commit
@ -320,7 +343,11 @@ export const PatchTool = Tool.define({
const newContent = change.new_content || ""
// Calculate diff statistics
const [, additions, removals] = generateDiff(oldContent, newContent, filePath)
const [, additions, removals] = generateDiff(
oldContent,
newContent,
filePath,
)
totalAdditions += additions
totalRemovals += removals
@ -331,11 +358,11 @@ export const PatchTool = Tool.define({
const output = result
return {
title: `${filesToRead.length} files`,
metadata: {
changed: changedFiles,
additions: totalAdditions,
removals: totalRemovals,
title: `${filesToRead.length} files`,
},
output,
}

View file

@ -16,8 +16,14 @@ export const ReadTool = Tool.define({
description: DESCRIPTION,
parameters: z.object({
filePath: z.string().describe("The path to the file to read"),
offset: z.number().describe("The line number to start reading from (0-based)").optional(),
limit: z.number().describe("The number of lines to read (defaults to 2000)").optional(),
offset: z
.number()
.describe("The line number to start reading from (0-based)")
.optional(),
limit: z
.number()
.describe("The number of lines to read (defaults to 2000)")
.optional(),
}),
async execute(params, ctx) {
let filePath = params.filePath
@ -34,13 +40,16 @@ export const ReadTool = Tool.define({
const suggestions = dirEntries
.filter(
(entry) =>
entry.toLowerCase().includes(base.toLowerCase()) || base.toLowerCase().includes(entry.toLowerCase()),
entry.toLowerCase().includes(base.toLowerCase()) ||
base.toLowerCase().includes(entry.toLowerCase()),
)
.map((entry) => path.join(dir, entry))
.slice(0, 3)
if (suggestions.length > 0) {
throw new Error(`File not found: ${filePath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`)
throw new Error(
`File not found: ${filePath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`,
)
}
throw new Error(`File not found: ${filePath}`)
@ -48,14 +57,21 @@ export const ReadTool = Tool.define({
const stats = await file.stat()
if (stats.size > MAX_READ_SIZE)
throw new Error(`File is too large (${stats.size} bytes). Maximum size is ${MAX_READ_SIZE} bytes`)
throw new Error(
`File is too large (${stats.size} bytes). Maximum size is ${MAX_READ_SIZE} bytes`,
)
const limit = params.limit ?? DEFAULT_READ_LIMIT
const offset = params.offset || 0
const isImage = isImageFile(filePath)
if (isImage) throw new Error(`This is an image file of type: ${isImage}\nUse a different tool to process images`)
if (isImage)
throw new Error(
`This is an image file of type: ${isImage}\nUse a different tool to process images`,
)
const lines = await file.text().then((text) => text.split("\n"))
const raw = lines.slice(offset, offset + limit).map((line) => {
return line.length > MAX_LINE_LENGTH ? line.substring(0, MAX_LINE_LENGTH) + "..." : line
return line.length > MAX_LINE_LENGTH
? line.substring(0, MAX_LINE_LENGTH) + "..."
: line
})
const content = raw.map((line, index) => {
return `${(index + offset + 1).toString().padStart(5, "0")}| ${line}`
@ -66,19 +82,21 @@ export const ReadTool = Tool.define({
output += content.join("\n")
if (lines.length > offset + content.length) {
output += `\n\n(File has more lines. Use 'offset' parameter to read beyond line ${offset + content.length})`
output += `\n\n(File has more lines. Use 'offset' parameter to read beyond line ${
offset + content.length
})`
}
output += "\n</file>"
// just warms the lsp client
LSP.touchFile(filePath, false)
await LSP.touchFile(filePath, false)
FileTime.read(ctx.sessionID, filePath)
return {
title: path.relative(App.info().path.root, filePath),
output,
metadata: {
preview,
title: path.relative(App.info().path.root, filePath),
},
}
},

View file

@ -2,7 +2,7 @@ Reads a file from the local filesystem. You can access any file directly by usin
Assume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.
Usage:
- The filePath parameter must be an absolute path, not a relative path
- The file_path parameter must be an absolute path, not a relative path
- By default, it reads up to 2000 lines starting from the beginning of the file
- You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters
- Any lines longer than 2000 characters will be truncated

View file

@ -3,36 +3,41 @@ import DESCRIPTION from "./task.txt"
import { z } from "zod"
import { Session } from "../session"
import { Bus } from "../bus"
import { MessageV2 } from "../session/message-v2"
import { Message } from "../session/message"
export const TaskTool = Tool.define({
id: "task",
description: DESCRIPTION,
parameters: z.object({
description: z.string().describe("A short (3-5 words) description of the task"),
description: z
.string()
.describe("A short (3-5 words) description of the task"),
prompt: z.string().describe("The task for the agent to perform"),
}),
async execute(params, ctx) {
const session = await Session.create(ctx.sessionID)
const msg = (await Session.getMessage(ctx.sessionID, ctx.messageID)) as MessageV2.Assistant
const msg = await Session.getMessage(ctx.sessionID, ctx.messageID)
const metadata = msg.metadata.assistant!
function summary(input: MessageV2.Info) {
function summary(input: Message.Info) {
const result = []
for (const part of input.parts) {
if (part.type === "tool" && part.state.status === "completed") {
result.push(part)
if (part.type === "tool-invocation") {
result.push({
toolInvocation: part.toolInvocation,
metadata: input.metadata.tool[part.toolInvocation.toolCallId],
})
}
}
return result
}
const unsub = Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
if (evt.properties.info.sessionID !== session.id) return
const unsub = Bus.subscribe(Message.Event.Updated, async (evt) => {
if (evt.properties.info.metadata.sessionID !== session.id) return
ctx.metadata({
title: params.description,
metadata: {
summary: summary(evt.properties.info),
},
})
})
@ -41,8 +46,8 @@ export const TaskTool = Tool.define({
})
const result = await Session.chat({
sessionID: session.id,
modelID: msg.modelID,
providerID: msg.providerID,
modelID: metadata.modelID,
providerID: metadata.providerID,
parts: [
{
type: "text",
@ -52,8 +57,8 @@ export const TaskTool = Tool.define({
})
unsub()
return {
title: params.description,
metadata: {
title: params.description,
summary: summary(result),
},
output: result.parts.findLast((x) => x.type === "text")!.text,

View file

@ -5,8 +5,12 @@ import { App } from "../app/app"
const TodoInfo = z.object({
content: z.string().min(1).describe("Brief description of the task"),
status: z.enum(["pending", "in_progress", "completed", "cancelled"]).describe("Current status of the task"),
priority: z.enum(["high", "medium", "low"]).describe("Priority level of the task"),
status: z
.enum(["pending", "in_progress", "completed"])
.describe("Current status of the task"),
priority: z
.enum(["high", "medium", "low"])
.describe("Priority level of the task"),
id: z.string().describe("Unique identifier for the todo item"),
})
type TodoInfo = z.infer<typeof TodoInfo>
@ -28,9 +32,9 @@ export const TodoWriteTool = Tool.define({
const todos = state()
todos[opts.sessionID] = params.todos
return {
title: `${params.todos.filter((x) => x.status !== "completed").length} todos`,
output: JSON.stringify(params.todos, null, 2),
metadata: {
title: `${params.todos.filter((x) => x.status !== "completed").length} todos`,
todos: params.todos,
},
}
@ -44,9 +48,9 @@ export const TodoReadTool = Tool.define({
async execute(_params, opts) {
const todos = state()[opts.sessionID] ?? []
return {
title: `${todos.filter((x) => x.status !== "completed").length} todos`,
metadata: {
todos,
title: `${todos.filter((x) => x.status !== "completed").length} todos`,
},
output: JSON.stringify(todos, null, 2),
}

View file

@ -2,15 +2,19 @@ import type { StandardSchemaV1 } from "@standard-schema/spec"
export namespace Tool {
interface Metadata {
title: string
[key: string]: any
}
export type Context<M extends Metadata = Metadata> = {
sessionID: string
messageID: string
abort: AbortSignal
metadata(input: { title?: string; metadata?: M }): void
metadata(meta: M): void
}
export interface Info<Parameters extends StandardSchemaV1 = StandardSchemaV1, M extends Metadata = Metadata> {
export interface Info<
Parameters extends StandardSchemaV1 = StandardSchemaV1,
M extends Metadata = Metadata,
> {
id: string
description: string
parameters: Parameters
@ -18,15 +22,15 @@ export namespace Tool {
args: StandardSchemaV1.InferOutput<Parameters>,
ctx: Context,
): Promise<{
title: string
metadata: M
output: string
}>
}
export function define<Parameters extends StandardSchemaV1, Result extends Metadata>(
input: Info<Parameters, Result>,
): Info<Parameters, Result> {
export function define<
Parameters extends StandardSchemaV1,
Result extends Metadata,
>(input: Info<Parameters, Result>): Info<Parameters, Result> {
return input
}
}

View file

@ -14,7 +14,9 @@ export const WebFetchTool = Tool.define({
url: z.string().describe("The URL to fetch content from"),
format: z
.enum(["text", "markdown", "html"])
.describe("The format to return the content in (text, markdown, or html)"),
.describe(
"The format to return the content in (text, markdown, or html)",
),
timeout: z
.number()
.min(0)
@ -24,11 +26,17 @@ export const WebFetchTool = Tool.define({
}),
async execute(params, ctx) {
// Validate URL
if (!params.url.startsWith("http://") && !params.url.startsWith("https://")) {
if (
!params.url.startsWith("http://") &&
!params.url.startsWith("https://")
) {
throw new Error("URL must start with http:// or https://")
}
const timeout = Math.min((params.timeout ?? DEFAULT_TIMEOUT / 1000) * 1000, MAX_TIMEOUT)
const timeout = Math.min(
(params.timeout ?? DEFAULT_TIMEOUT / 1000) * 1000,
MAX_TIMEOUT,
)
const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), timeout)
@ -38,7 +46,8 @@ export const WebFetchTool = Tool.define({
headers: {
"User-Agent":
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
Accept: "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
Accept:
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.9",
},
})
@ -70,14 +79,16 @@ export const WebFetchTool = Tool.define({
const text = await extractTextFromHTML(content)
return {
output: text,
metadata: {
title,
metadata: {},
},
}
}
return {
output: content,
metadata: {
title,
metadata: {},
},
}
case "markdown":
@ -85,28 +96,32 @@ export const WebFetchTool = Tool.define({
const markdown = convertHTMLToMarkdown(content)
return {
output: markdown,
metadata: {
title,
metadata: {},
},
}
}
return {
output: "```\n" + content + "\n```",
metadata: {
title,
metadata: {},
},
}
case "html":
return {
output: content,
metadata: {
title,
metadata: {},
},
}
default:
return {
output: content,
metadata: {
title,
metadata: {},
},
}
}
},
@ -128,7 +143,16 @@ async function extractTextFromHTML(html: string) {
.on("*", {
element(element) {
// Reset skip flag when entering other elements
if (!["script", "style", "noscript", "iframe", "object", "embed"].includes(element.tagName)) {
if (
![
"script",
"style",
"noscript",
"iframe",
"object",
"embed",
].includes(element.tagName)
) {
skipContent = false
}
},

View file

@ -13,12 +13,18 @@ export const WriteTool = Tool.define({
id: "write",
description: DESCRIPTION,
parameters: z.object({
filePath: z.string().describe("The absolute path to the file to write (must be absolute, not relative)"),
filePath: z
.string()
.describe(
"The absolute path to the file to write (must be absolute, not relative)",
),
content: z.string().describe("The content to write to the file"),
}),
async execute(params, ctx) {
const app = App.info()
const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(app.path.cwd, params.filePath)
const filepath = path.isAbsolute(params.filePath)
? params.filePath
: path.join(app.path.cwd, params.filePath)
const file = Bun.file(filepath)
const exists = await file.exists()
@ -27,7 +33,9 @@ export const WriteTool = Tool.define({
await Permission.ask({
id: "write",
sessionID: ctx.sessionID,
title: exists ? "Overwrite this file: " + filepath : "Create new file: " + filepath,
title: exists
? "Overwrite this file: " + filepath
: "Create new file: " + filepath,
metadata: {
filePath: filepath,
content: params.content,
@ -54,11 +62,11 @@ export const WriteTool = Tool.define({
}
return {
title: path.relative(app.path.root, filepath),
metadata: {
diagnostics,
filepath,
exists: exists,
title: path.relative(app.path.root, filepath),
},
output,
}

View file

@ -7,7 +7,10 @@ export abstract class NamedError extends Error {
abstract schema(): ZodSchema
abstract toObject(): { name: string; data: any }
static create<Name extends string, Data extends ZodSchema>(name: Name, data: Data) {
static create<Name extends string, Data extends ZodSchema>(
name: Name,
data: Data,
) {
const schema = z
.object({
name: z.literal(name),

View file

@ -1,17 +1,7 @@
import { exists } from "fs/promises"
import { dirname, join, relative } from "path"
import { dirname, join } from "path"
export namespace Filesystem {
export function overlaps(a: string, b: string) {
const relA = relative(a, b)
const relB = relative(b, a)
return !relA || !relA.startsWith("..") || !relB || !relB.startsWith("..")
}
export function contains(parent: string, child: string) {
return relative(parent, child).startsWith("..")
}
export async function findUp(target: string, start: string, stop?: string) {
let current = start
const result = []
@ -26,21 +16,6 @@ export namespace Filesystem {
return result
}
export async function* up(options: { targets: string[]; start: string; stop?: string }) {
const { targets, start, stop } = options
let current = start
while (true) {
for (const target of targets) {
const search = join(current, target)
if (await exists(search)) yield search
}
if (stop === current) break
const parent = dirname(current)
if (parent === current) break
current = parent
}
}
export async function globUp(pattern: string, start: string, stop?: string) {
let current = start
const result = []

View file

@ -1,59 +1,15 @@
import path from "path"
import fs from "fs/promises"
import { Global } from "../global"
import z from "zod"
export namespace Log {
export const Level = z.enum(["DEBUG", "INFO", "WARN", "ERROR"]).openapi({ ref: "LogLevel", description: "Log level" })
export type Level = z.infer<typeof Level>
const levelPriority: Record<Level, number> = {
DEBUG: 0,
INFO: 1,
WARN: 2,
ERROR: 3,
}
let currentLevel: Level = "INFO"
export function setLevel(level: Level) {
currentLevel = level
}
export function getLevel(): Level {
return currentLevel
}
function shouldLog(level: Level): boolean {
return levelPriority[level] >= levelPriority[currentLevel]
}
export type Logger = {
debug(message?: any, extra?: Record<string, any>): void
info(message?: any, extra?: Record<string, any>): void
error(message?: any, extra?: Record<string, any>): void
warn(message?: any, extra?: Record<string, any>): void
tag(key: string, value: string): Logger
clone(): Logger
time(
message: string,
extra?: Record<string, any>,
): {
stop(): void
[Symbol.dispose](): void
}
}
const loggers = new Map<string, Logger>()
export const Default = create({ service: "default" })
export interface Options {
print: boolean
level?: Level
}
let logpath = ""
export function file() {
return logpath
}
@ -63,7 +19,10 @@ export namespace Log {
await fs.mkdir(dir, { recursive: true })
cleanup(dir)
if (options.print) return
logpath = path.join(dir, new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log")
logpath = path.join(
dir,
new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log",
)
const logfile = Bun.file(logpath)
await fs.truncate(logpath).catch(() => {})
const writer = logfile.writer()
@ -84,21 +43,15 @@ export namespace Log {
const filesToDelete = files.slice(0, -10)
await Promise.all(filesToDelete.map((file) => fs.unlink(file).catch(() => {})))
await Promise.all(
filesToDelete.map((file) => fs.unlink(file).catch(() => {})),
)
}
let last = Date.now()
export function create(tags?: Record<string, any>) {
tags = tags || {}
const service = tags["service"]
if (service && typeof service === "string") {
const cached = loggers.get(service)
if (cached) {
return cached
}
}
function build(message: any, extra?: Record<string, any>) {
const prefix = Object.entries({
...tags,
@ -110,28 +63,21 @@ export namespace Log {
const next = new Date()
const diff = next.getTime() - last
last = next.getTime()
return [next.toISOString().split(".")[0], "+" + diff + "ms", prefix, message].filter(Boolean).join(" ") + "\n"
return (
[next.toISOString().split(".")[0], "+" + diff + "ms", prefix, message]
.filter(Boolean)
.join(" ") + "\n"
)
}
const result: Logger = {
debug(message?: any, extra?: Record<string, any>) {
if (shouldLog("DEBUG")) {
process.stderr.write("DEBUG " + build(message, extra))
}
},
const result = {
info(message?: any, extra?: Record<string, any>) {
if (shouldLog("INFO")) {
process.stderr.write("INFO " + build(message, extra))
}
},
error(message?: any, extra?: Record<string, any>) {
if (shouldLog("ERROR")) {
process.stderr.write("ERROR " + build(message, extra))
}
},
warn(message?: any, extra?: Record<string, any>) {
if (shouldLog("WARN")) {
process.stderr.write("WARN " + build(message, extra))
}
},
tag(key: string, value: string) {
if (tags) tags[key] = value
@ -159,10 +105,6 @@ export namespace Log {
},
}
if (service && typeof service === "string") {
loggers.set(service, result)
}
return result
}
}

View file

@ -17,7 +17,12 @@ const testCases: TestCase[] = [
replace: 'console.log("universe");',
},
{
content: ["if (condition) {", " doSomething();", " doSomethingElse();", "}"].join("\n"),
content: [
"if (condition) {",
" doSomething();",
" doSomethingElse();",
"}",
].join("\n"),
find: [" doSomething();", " doSomethingElse();"].join("\n"),
replace: [" doNewThing();", " doAnotherThing();"].join("\n"),
},
@ -48,8 +53,15 @@ const testCases: TestCase[] = [
" return result;",
"}",
].join("\n"),
find: ["function calculate(a, b) {", " // different middle content", " return result;", "}"].join("\n"),
replace: ["function calculate(a, b) {", " return a * b * 2;", "}"].join("\n"),
find: [
"function calculate(a, b) {",
" // different middle content",
" return result;",
"}",
].join("\n"),
replace: ["function calculate(a, b) {", " return a * b * 2;", "}"].join(
"\n",
),
},
{
content: [
@ -64,7 +76,13 @@ const testCases: TestCase[] = [
"}",
].join("\n"),
find: ["class MyClass {", " // different implementation", "}"].join("\n"),
replace: ["class MyClass {", " constructor() {", " this.value = 42;", " }", "}"].join("\n"),
replace: [
"class MyClass {",
" constructor() {",
" this.value = 42;",
" }",
"}",
].join("\n"),
},
// WhitespaceNormalizedReplacer cases
@ -86,21 +104,48 @@ const testCases: TestCase[] = [
// IndentationFlexibleReplacer cases
{
content: [" function nested() {", ' console.log("deeply nested");', " return true;", " }"].join(
"\n",
),
find: ["function nested() {", ' console.log("deeply nested");', " return true;", "}"].join("\n"),
replace: ["function nested() {", ' console.log("updated");', " return false;", "}"].join("\n"),
content: [
" function nested() {",
' console.log("deeply nested");',
" return true;",
" }",
].join("\n"),
find: [
"function nested() {",
' console.log("deeply nested");',
" return true;",
"}",
].join("\n"),
replace: [
"function nested() {",
' console.log("updated");',
" return false;",
"}",
].join("\n"),
},
{
content: [" if (true) {", ' console.log("level 1");', ' console.log("level 2");', " }"].join("\n"),
find: ["if (true) {", 'console.log("level 1");', ' console.log("level 2");', "}"].join("\n"),
content: [
" if (true) {",
' console.log("level 1");',
' console.log("level 2");',
" }",
].join("\n"),
find: [
"if (true) {",
'console.log("level 1");',
' console.log("level 2");',
"}",
].join("\n"),
replace: ["if (true) {", 'console.log("updated");', "}"].join("\n"),
},
// replaceAll option cases
{
content: ['console.log("test");', 'console.log("test");', 'console.log("test");'].join("\n"),
content: [
'console.log("test");',
'console.log("test");',
'console.log("test");',
].join("\n"),
find: 'console.log("test");',
replace: 'console.log("updated");',
all: true,
@ -168,7 +213,9 @@ const testCases: TestCase[] = [
// MultiOccurrenceReplacer cases (with replaceAll)
{
content: ["debug('start');", "debug('middle');", "debug('end');"].join("\n"),
content: ["debug('start');", "debug('middle');", "debug('end');"].join(
"\n",
),
find: "debug",
replace: "log",
all: true,
@ -192,7 +239,9 @@ const testCases: TestCase[] = [
replace: "const value = 24;",
},
{
content: ["", " if (condition) {", " doSomething();", " }", ""].join("\n"),
content: ["", " if (condition) {", " doSomething();", " }", ""].join(
"\n",
),
find: ["if (condition) {", " doSomething();", "}"].join("\n"),
replace: ["if (condition) {", " doNothing();", "}"].join("\n"),
},
@ -213,7 +262,9 @@ const testCases: TestCase[] = [
" return result;",
"}",
].join("\n"),
replace: ["function calculate(a, b) {", " return (a + b) * 2;", "}"].join("\n"),
replace: ["function calculate(a, b) {", " return (a + b) * 2;", "}"].join(
"\n",
),
},
{
content: [
@ -227,8 +278,15 @@ const testCases: TestCase[] = [
" }",
"}",
].join("\n"),
find: ["class TestClass {", " // different implementation", " // with multiple lines", "}"].join("\n"),
replace: ["class TestClass {", " getValue() { return 42; }", "}"].join("\n"),
find: [
"class TestClass {",
" // different implementation",
" // with multiple lines",
"}",
].join("\n"),
replace: ["class TestClass {", " getValue() { return 42; }", "}"].join(
"\n",
),
},
// Combined edge cases for new replacers
@ -238,7 +296,9 @@ const testCases: TestCase[] = [
replace: 'console.log("updated");',
},
{
content: [" ", "function test() {", " return 'value';", "}", " "].join("\n"),
content: [" ", "function test() {", " return 'value';", "}", " "].join(
"\n",
),
find: ["function test() {", "return 'value';", "}"].join("\n"),
replace: ["function test() {", "return 'new value';", "}"].join("\n"),
},
@ -286,7 +346,13 @@ const testCases: TestCase[] = [
// ContextAwareReplacer - test with trailing newline in find string
{
content: ["class Test {", " method1() {", " return 1;", " }", "}"].join("\n"),
content: [
"class Test {",
" method1() {",
" return 1;",
" }",
"}",
].join("\n"),
find: [
"class Test {",
" // different content",
@ -335,7 +401,12 @@ describe("EditTool Replacers", () => {
replace(testCase.content, testCase.find, testCase.replace, testCase.all)
}).toThrow()
} else {
const result = replace(testCase.content, testCase.find, testCase.replace, testCase.all)
const result = replace(
testCase.content,
testCase.find,
testCase.replace,
testCase.all,
)
expect(result).toContain(testCase.replace)
}
})

View file

@ -42,7 +42,10 @@ describe("tool.glob", () => {
describe("tool.ls", () => {
test("basic", async () => {
const result = await App.provide({ cwd: process.cwd() }, async () => {
return await ListTool.execute({ path: "./example", ignore: [".git"] }, ctx)
return await ListTool.execute(
{ path: "./example", ignore: [".git"] },
ctx,
)
})
expect(result.output).toMatchSnapshot()
})

View file

@ -5,18 +5,14 @@ import (
"encoding/json"
"log/slog"
"os"
"os/signal"
"path/filepath"
"strings"
"syscall"
tea "github.com/charmbracelet/bubbletea/v2"
flag "github.com/spf13/pflag"
"github.com/sst/opencode-sdk-go"
"github.com/sst/opencode-sdk-go/option"
"github.com/sst/opencode/internal/app"
"github.com/sst/opencode/internal/clipboard"
"github.com/sst/opencode/internal/tui"
"github.com/sst/opencode/internal/util"
)
var Version = "dev"
@ -27,11 +23,6 @@ func main() {
version = "v" + Version
}
var model *string = flag.String("model", "", "model to begin with")
var prompt *string = flag.String("prompt", "", "prompt to begin with")
var mode *string = flag.String("mode", "", "mode to begin with")
flag.Parse()
url := os.Getenv("OPENCODE_SERVER")
appInfoStr := os.Getenv("OPENCODE_APP_INFO")
@ -42,36 +33,39 @@ func main() {
os.Exit(1)
}
modesStr := os.Getenv("OPENCODE_MODES")
var modes []opencode.Mode
err = json.Unmarshal([]byte(modesStr), &modes)
logfile := filepath.Join(appInfo.Path.Data, "log", "tui.log")
if _, err := os.Stat(filepath.Dir(logfile)); os.IsNotExist(err) {
err := os.MkdirAll(filepath.Dir(logfile), 0755)
if err != nil {
slog.Error("Failed to unmarshal modes", "error", err)
slog.Error("Failed to create log directory", "error", err)
os.Exit(1)
}
}
file, err := os.Create(logfile)
if err != nil {
slog.Error("Failed to create log file", "error", err)
os.Exit(1)
}
defer file.Close()
logger := slog.New(slog.NewTextHandler(file, &slog.HandlerOptions{Level: slog.LevelDebug}))
slog.SetDefault(logger)
slog.Debug("TUI launched", "app", appInfo)
httpClient := opencode.NewClient(
option.WithBaseURL(url),
)
apiHandler := util.NewAPILogHandler(httpClient, "tui", slog.LevelDebug)
logger := slog.New(apiHandler)
slog.SetDefault(logger)
slog.Debug("TUI launched", "app", appInfoStr, "modes", modesStr)
go func() {
err = clipboard.Init()
if err != nil {
slog.Error("Failed to initialize clipboard", "error", err)
slog.Error("Failed to create client", "error", err)
os.Exit(1)
}
}()
// Create main context for the application
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
app_, err := app.New(ctx, version, appInfo, modes, httpClient, model, prompt, mode)
app_, err := app.New(ctx, version, appInfo, httpClient)
if err != nil {
panic(err)
}
@ -79,14 +73,10 @@ func main() {
program := tea.NewProgram(
tui.NewModel(app_),
tea.WithAltScreen(),
// tea.WithKeyboardEnhancements(),
tea.WithKeyboardEnhancements(),
tea.WithMouseCellMotion(),
)
// Set up signal handling for graceful shutdown
sigChan := make(chan os.Signal, 1)
signal.Notify(sigChan, syscall.SIGTERM, syscall.SIGINT)
go func() {
stream := httpClient.Event.ListStreaming(ctx)
for stream.Next() {
@ -99,13 +89,6 @@ func main() {
}
}()
// Handle signals in a separate goroutine
go func() {
sig := <-sigChan
slog.Info("Received signal, shutting down gracefully", "signal", sig)
program.Quit()
}()
// Run the TUI
result, err := program.Run()
if err != nil {

View file

@ -6,26 +6,21 @@ require (
github.com/BurntSushi/toml v1.5.0
github.com/alecthomas/chroma/v2 v2.18.0
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3
github.com/charmbracelet/glamour v0.10.0
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3
github.com/charmbracelet/x/ansi v0.9.3
github.com/charmbracelet/x/input v0.3.7
github.com/google/uuid v1.6.0
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1
github.com/charmbracelet/x/ansi v0.8.0
github.com/lithammer/fuzzysearch v1.1.8
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6
github.com/muesli/reflow v0.3.0
github.com/muesli/termenv v0.16.0
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3
github.com/sst/opencode-sdk-go v0.1.0-alpha.8
golang.org/x/image v0.28.0
github.com/tidwall/gjson v1.14.4
rsc.io/qr v0.2.0
)
replace (
github.com/charmbracelet/x/input => ./input
github.com/sst/opencode-sdk-go => ./sdk
)
replace github.com/sst/opencode-sdk-go => ./sdk
require golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect
@ -34,6 +29,7 @@ require (
github.com/atombender/go-jsonschema v0.20.0 // indirect
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect
github.com/charmbracelet/x/input v0.3.5-0.20250424101541-abb4d9a9b197 // indirect
github.com/charmbracelet/x/windows v0.2.1 // indirect
github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect
github.com/fsnotify/fsnotify v1.8.0 // indirect
@ -53,23 +49,23 @@ require (
github.com/sosodev/duration v1.3.1 // indirect
github.com/speakeasy-api/openapi-overlay v0.9.0 // indirect
github.com/spf13/cobra v1.9.1 // indirect
github.com/tidwall/gjson v1.14.4 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/sjson v1.2.5 // indirect
github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
golang.org/x/mod v0.25.0 // indirect
golang.org/x/tools v0.34.0 // indirect
golang.org/x/mod v0.24.0 // indirect
golang.org/x/tools v0.31.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
)
require (
github.com/atotto/clipboard v0.1.4 // indirect
github.com/atotto/clipboard v0.1.4
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/charmbracelet/colorprofile v0.3.1 // indirect
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250505150409-97991a1f17d1 // indirect
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250501183327-ad3bc78c6a81 // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/disintegration/imaging v1.6.2
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/gorilla/css v1.0.1 // indirect
@ -81,15 +77,16 @@ require (
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/rivo/uniseg v0.4.7
github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/spf13/pflag v1.0.6
github.com/spf13/pflag v1.0.6 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/yuin/goldmark v1.7.8 // indirect
github.com/yuin/goldmark-emoji v1.0.5 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/sync v0.15.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/term v0.32.0 // indirect
golang.org/x/text v0.26.0
golang.org/x/image v0.26.0
golang.org/x/net v0.39.0 // indirect
golang.org/x/sync v0.13.0 // indirect
golang.org/x/sys v0.32.0 // indirect
golang.org/x/term v0.31.0 // indirect
golang.org/x/text v0.24.0
gopkg.in/yaml.v3 v3.0.1 // indirect
)

View file

@ -22,24 +22,26 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuP
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1 h1:swACzss0FjnyPz1enfX56GKkLiuKg5FlyVmOLIlU2kE=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4 h1:UgUuKKvBwgqm2ZEL+sKv/OLeavrUb4gfHgdxe6oIOno=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4/go.mod h1:0wWFRpsgF7vHsCukVZ5LAhZkiR4j875H6KEM2/tFQmA=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3 h1:5A2e3myxXMpCES+kjEWgGsaf9VgZXjZbLi5iMTH7j40=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3/go.mod h1:ZFDg5oPjyRYrPAa3iFrtP1DO8xy+LUQxd9JFHEcuwJY=
github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40=
github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0=
github.com/charmbracelet/glamour v0.10.0 h1:MtZvfwsYCx8jEPFJm3rIBFIMZUfUJ765oX8V6kXldcY=
github.com/charmbracelet/glamour v0.10.0/go.mod h1:f+uf+I/ChNmqo087elLnVdCiVgjSKWuXa/l6NU2ndYk=
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 h1:ZR7e0ro+SZZiIZD7msJyA+NjkCNNavuiPBLgerbOziE=
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834/go.mod h1:aKC/t2arECF6rNOnaKaVU6y4t4ZeHQzqfxedE/VkVhA=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3 h1:W6DpZX6zSkZr0iFq6JVh1vItLoxfYtNlaxOJtWp8Kis=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3/go.mod h1:65HTtKURcv/ict9ZQhr6zT84JqIjMcJbyrZYHHKNfKA=
github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0=
github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250505150409-97991a1f17d1 h1:MTSs/nsZNfZPbYk/r9hluK2BtwoqvEYruAujNVwgDv0=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250505150409-97991a1f17d1/go.mod h1:xBlh2Yi3DL3zy/2n15kITpg0YZardf/aa/hgUaIM6Rk=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1 h1:D9AJJuYTN5pvz6mpIGO1ijLKpfTYSHOtKGgwoTQ4Gog=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1/go.mod h1:tRlx/Hu0lo/j9viunCN2H+Ze6JrmdjQlXUQvvArgaOc=
github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250501183327-ad3bc78c6a81 h1:iGrflaL5jQW6crML+pZx/ulWAVZQR3CQoRGvFsr2Tyg=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250501183327-ad3bc78c6a81/go.mod h1:poPFOXFTsJsnLbkV3H2KxAAXT7pdjxxLujLocWjkyzM=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHEJ52OC4VuTzU8t+n5frMjLvpYWEznSr/u8tnkCYw=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf h1:rLG0Yb6MQSDKdB52aGX55JT1oi0P0Kuaj7wi1bLUpnI=
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf/go.mod h1:B3UgsnsBZS/eX42BlaNiJkD1pPOUa+oF1IYC6Yd2CEU=
github.com/charmbracelet/x/input v0.3.5-0.20250424101541-abb4d9a9b197 h1:fsWj8NF5njyMVzELc7++HsvRDvgz3VcgGAUgWBDWWWM=
github.com/charmbracelet/x/input v0.3.5-0.20250424101541-abb4d9a9b197/go.mod h1:xseGeVftoP9rVI+/8WKYrJFH6ior6iERGvklwwHz5+s=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/windows v0.2.1 h1:3x7vnbpQrjpuq/4L+I4gNsG5htYoCiA5oe9hLjAij5I=
@ -52,6 +54,8 @@ github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58=
@ -88,8 +92,6 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
@ -214,13 +216,14 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
golang.org/x/image v0.28.0 h1:gdem5JW1OLS4FbkWgLO+7ZeFzYtL3xClb97GaUzYMFE=
golang.org/x/image v0.28.0/go.mod h1:GUJYXtnGKEUgggyzh+Vxt+AviiCcyiwpsl8iQ8MvwGY=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY=
golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w=
golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU=
golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@ -231,15 +234,15 @@ golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=
golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610=
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -258,28 +261,28 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o=
golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo=
golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg=
golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU=
golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View file

@ -1,14 +0,0 @@
//go:build !windows
// +build !windows
package input
import (
"io"
"github.com/muesli/cancelreader"
)
func newCancelreader(r io.Reader, _ int) (cancelreader.CancelReader, error) {
return cancelreader.NewReader(r) //nolint:wrapcheck
}

View file

@ -1,143 +0,0 @@
//go:build windows
// +build windows
package input
import (
"fmt"
"io"
"os"
"sync"
xwindows "github.com/charmbracelet/x/windows"
"github.com/muesli/cancelreader"
"golang.org/x/sys/windows"
)
type conInputReader struct {
cancelMixin
conin windows.Handle
originalMode uint32
}
var _ cancelreader.CancelReader = &conInputReader{}
func newCancelreader(r io.Reader, flags int) (cancelreader.CancelReader, error) {
fallback := func(io.Reader) (cancelreader.CancelReader, error) {
return cancelreader.NewReader(r)
}
var dummy uint32
if f, ok := r.(cancelreader.File); !ok || f.Fd() != os.Stdin.Fd() ||
// If data was piped to the standard input, it does not emit events
// anymore. We can detect this if the console mode cannot be set anymore,
// in this case, we fallback to the default cancelreader implementation.
windows.GetConsoleMode(windows.Handle(f.Fd()), &dummy) != nil {
return fallback(r)
}
conin, err := windows.GetStdHandle(windows.STD_INPUT_HANDLE)
if err != nil {
return fallback(r)
}
// Discard any pending input events.
if err := xwindows.FlushConsoleInputBuffer(conin); err != nil {
return fallback(r)
}
modes := []uint32{
windows.ENABLE_WINDOW_INPUT,
windows.ENABLE_EXTENDED_FLAGS,
}
// Enabling mouse mode implicitly blocks console text selection. Thus, we
// need to enable it only if the mouse mode is requested.
// In order to toggle mouse mode, the caller must recreate the reader with
// the appropriate flag toggled.
if flags&FlagMouseMode != 0 {
modes = append(modes, windows.ENABLE_MOUSE_INPUT)
}
originalMode, err := prepareConsole(conin, modes...)
if err != nil {
return nil, fmt.Errorf("failed to prepare console input: %w", err)
}
return &conInputReader{
conin: conin,
originalMode: originalMode,
}, nil
}
// Cancel implements cancelreader.CancelReader.
func (r *conInputReader) Cancel() bool {
r.setCanceled()
return windows.CancelIoEx(r.conin, nil) == nil || windows.CancelIo(r.conin) == nil
}
// Close implements cancelreader.CancelReader.
func (r *conInputReader) Close() error {
if r.originalMode != 0 {
err := windows.SetConsoleMode(r.conin, r.originalMode)
if err != nil {
return fmt.Errorf("reset console mode: %w", err)
}
}
return nil
}
// Read implements cancelreader.CancelReader.
func (r *conInputReader) Read(data []byte) (int, error) {
if r.isCanceled() {
return 0, cancelreader.ErrCanceled
}
var n uint32
if err := windows.ReadFile(r.conin, data, &n, nil); err != nil {
return int(n), fmt.Errorf("read console input: %w", err)
}
return int(n), nil
}
func prepareConsole(input windows.Handle, modes ...uint32) (originalMode uint32, err error) {
err = windows.GetConsoleMode(input, &originalMode)
if err != nil {
return 0, fmt.Errorf("get console mode: %w", err)
}
var newMode uint32
for _, mode := range modes {
newMode |= mode
}
err = windows.SetConsoleMode(input, newMode)
if err != nil {
return 0, fmt.Errorf("set console mode: %w", err)
}
return originalMode, nil
}
// cancelMixin represents a goroutine-safe cancelation status.
type cancelMixin struct {
unsafeCanceled bool
lock sync.Mutex
}
func (c *cancelMixin) setCanceled() {
c.lock.Lock()
defer c.lock.Unlock()
c.unsafeCanceled = true
}
func (c *cancelMixin) isCanceled() bool {
c.lock.Lock()
defer c.lock.Unlock()
return c.unsafeCanceled
}

View file

@ -1,25 +0,0 @@
package input
import "github.com/charmbracelet/x/ansi"
// ClipboardSelection represents a clipboard selection. The most common
// clipboard selections are "system" and "primary" and selections.
type ClipboardSelection = byte
// Clipboard selections.
const (
SystemClipboard ClipboardSelection = ansi.SystemClipboard
PrimaryClipboard ClipboardSelection = ansi.PrimaryClipboard
)
// ClipboardEvent is a clipboard read message event. This message is emitted when
// a terminal receives an OSC52 clipboard read message event.
type ClipboardEvent struct {
Content string
Selection ClipboardSelection
}
// String returns the string representation of the clipboard message.
func (e ClipboardEvent) String() string {
return e.Content
}

View file

@ -1,136 +0,0 @@
package input
import (
"fmt"
"image/color"
"math"
)
// ForegroundColorEvent represents a foreground color event. This event is
// emitted when the terminal requests the terminal foreground color using
// [ansi.RequestForegroundColor].
type ForegroundColorEvent struct{ color.Color }
// String returns the hex representation of the color.
func (e ForegroundColorEvent) String() string {
return colorToHex(e.Color)
}
// IsDark returns whether the color is dark.
func (e ForegroundColorEvent) IsDark() bool {
return isDarkColor(e.Color)
}
// BackgroundColorEvent represents a background color event. This event is
// emitted when the terminal requests the terminal background color using
// [ansi.RequestBackgroundColor].
type BackgroundColorEvent struct{ color.Color }
// String returns the hex representation of the color.
func (e BackgroundColorEvent) String() string {
return colorToHex(e)
}
// IsDark returns whether the color is dark.
func (e BackgroundColorEvent) IsDark() bool {
return isDarkColor(e.Color)
}
// CursorColorEvent represents a cursor color change event. This event is
// emitted when the program requests the terminal cursor color using
// [ansi.RequestCursorColor].
type CursorColorEvent struct{ color.Color }
// String returns the hex representation of the color.
func (e CursorColorEvent) String() string {
return colorToHex(e)
}
// IsDark returns whether the color is dark.
func (e CursorColorEvent) IsDark() bool {
return isDarkColor(e)
}
type shiftable interface {
~uint | ~uint16 | ~uint32 | ~uint64
}
func shift[T shiftable](x T) T {
if x > 0xff {
x >>= 8
}
return x
}
func colorToHex(c color.Color) string {
if c == nil {
return ""
}
r, g, b, _ := c.RGBA()
return fmt.Sprintf("#%02x%02x%02x", shift(r), shift(g), shift(b))
}
func getMaxMin(a, b, c float64) (ma, mi float64) {
if a > b {
ma = a
mi = b
} else {
ma = b
mi = a
}
if c > ma {
ma = c
} else if c < mi {
mi = c
}
return ma, mi
}
func round(x float64) float64 {
return math.Round(x*1000) / 1000
}
// rgbToHSL converts an RGB triple to an HSL triple.
func rgbToHSL(r, g, b uint8) (h, s, l float64) {
// convert uint32 pre-multiplied value to uint8
// The r,g,b values are divided by 255 to change the range from 0..255 to 0..1:
Rnot := float64(r) / 255
Gnot := float64(g) / 255
Bnot := float64(b) / 255
Cmax, Cmin := getMaxMin(Rnot, Gnot, Bnot)
Δ := Cmax - Cmin
// Lightness calculation:
l = (Cmax + Cmin) / 2
// Hue and Saturation Calculation:
if Δ == 0 {
h = 0
s = 0
} else {
switch Cmax {
case Rnot:
h = 60 * (math.Mod((Gnot-Bnot)/Δ, 6))
case Gnot:
h = 60 * (((Bnot - Rnot) / Δ) + 2)
case Bnot:
h = 60 * (((Rnot - Gnot) / Δ) + 4)
}
if h < 0 {
h += 360
}
s = Δ / (1 - math.Abs((2*l)-1))
}
return h, round(s), round(l)
}
// isDarkColor returns whether the given color is dark.
func isDarkColor(c color.Color) bool {
if c == nil {
return true
}
r, g, b, _ := c.RGBA()
_, _, l := rgbToHSL(uint8(r>>8), uint8(g>>8), uint8(b>>8)) //nolint:gosec
return l < 0.5
}

View file

@ -1,7 +0,0 @@
package input
import "image"
// CursorPositionEvent represents a cursor position event. Where X is the
// zero-based column and Y is the zero-based row.
type CursorPositionEvent image.Point

View file

@ -1,18 +0,0 @@
package input
import "github.com/charmbracelet/x/ansi"
// PrimaryDeviceAttributesEvent is an event that represents the terminal
// primary device attributes.
type PrimaryDeviceAttributesEvent []int
func parsePrimaryDevAttrs(params ansi.Params) Event {
// Primary Device Attributes
da1 := make(PrimaryDeviceAttributesEvent, len(params))
for i, p := range params {
if !p.HasMore() {
da1[i] = p.Param(0)
}
}
return da1
}

View file

@ -1,6 +0,0 @@
// Package input provides a set of utilities for handling input events in a
// terminal environment. It includes support for reading input events, parsing
// escape sequences, and handling clipboard events.
// The package is designed to work with various terminal types and supports
// customization through flags and options.
package input

View file

@ -1,196 +0,0 @@
//nolint:unused,revive,nolintlint
package input
import (
"bytes"
"io"
"unicode/utf8"
"github.com/muesli/cancelreader"
)
// Logger is a simple logger interface.
type Logger interface {
Printf(format string, v ...any)
}
// win32InputState is a state machine for parsing key events from the Windows
// Console API into escape sequences and utf8 runes, and keeps track of the last
// control key state to determine modifier key changes. It also keeps track of
// the last mouse button state and window size changes to determine which mouse
// buttons were released and to prevent multiple size events from firing.
type win32InputState struct {
ansiBuf [256]byte
ansiIdx int
utf16Buf [2]rune
utf16Half bool
lastCks uint32 // the last control key state for the previous event
lastMouseBtns uint32 // the last mouse button state for the previous event
lastWinsizeX, lastWinsizeY int16 // the last window size for the previous event to prevent multiple size events from firing
}
// Reader represents an input event reader. It reads input events and parses
// escape sequences from the terminal input buffer and translates them into
// human-readable events.
type Reader struct {
rd cancelreader.CancelReader
table map[string]Key // table is a lookup table for key sequences.
term string // term is the terminal name $TERM.
// paste is the bracketed paste mode buffer.
// When nil, bracketed paste mode is disabled.
paste []byte
buf [256]byte // do we need a larger buffer?
// partialSeq holds incomplete escape sequences that need more data
partialSeq []byte
// keyState keeps track of the current Windows Console API key events state.
// It is used to decode ANSI escape sequences and utf16 sequences.
keyState win32InputState
parser Parser
logger Logger
}
// NewReader returns a new input event reader. The reader reads input events
// from the terminal and parses escape sequences into human-readable events. It
// supports reading Terminfo databases. See [Parser] for more information.
//
// Example:
//
// r, _ := input.NewReader(os.Stdin, os.Getenv("TERM"), 0)
// defer r.Close()
// events, _ := r.ReadEvents()
// for _, ev := range events {
// log.Printf("%v", ev)
// }
func NewReader(r io.Reader, termType string, flags int) (*Reader, error) {
d := new(Reader)
cr, err := newCancelreader(r, flags)
if err != nil {
return nil, err
}
d.rd = cr
d.table = buildKeysTable(flags, termType)
d.term = termType
d.parser.flags = flags
return d, nil
}
// SetLogger sets a logger for the reader.
func (d *Reader) SetLogger(l Logger) {
d.logger = l
}
// Read implements [io.Reader].
func (d *Reader) Read(p []byte) (int, error) {
return d.rd.Read(p) //nolint:wrapcheck
}
// Cancel cancels the underlying reader.
func (d *Reader) Cancel() bool {
return d.rd.Cancel()
}
// Close closes the underlying reader.
func (d *Reader) Close() error {
return d.rd.Close() //nolint:wrapcheck
}
func (d *Reader) readEvents() ([]Event, error) {
nb, err := d.rd.Read(d.buf[:])
if err != nil {
return nil, err //nolint:wrapcheck
}
var events []Event
// Combine any partial sequence from previous read with new data
var buf []byte
if len(d.partialSeq) > 0 {
buf = make([]byte, len(d.partialSeq)+nb)
copy(buf, d.partialSeq)
copy(buf[len(d.partialSeq):], d.buf[:nb])
d.partialSeq = nil // clear the partial sequence
} else {
buf = d.buf[:nb]
}
// Lookup table first
if bytes.HasPrefix(buf, []byte{'\x1b'}) {
if k, ok := d.table[string(buf)]; ok {
if d.logger != nil {
d.logger.Printf("input: %q", buf)
}
events = append(events, KeyPressEvent(k))
return events, nil
}
}
var i int
for i < len(buf) {
nb, ev := d.parser.parseSequence(buf[i:])
if d.logger != nil && nb > 0 {
d.logger.Printf("input: %q", buf[i:i+nb])
}
// Handle incomplete sequences - when parseSequence returns (0, nil)
// it means we need more data to complete the sequence
if nb == 0 && ev == nil {
// Store the remaining data for the next read
remaining := len(buf) - i
if remaining > 0 {
d.partialSeq = make([]byte, remaining)
copy(d.partialSeq, buf[i:])
}
break
}
// Handle bracketed-paste
if d.paste != nil {
if _, ok := ev.(PasteEndEvent); !ok {
d.paste = append(d.paste, buf[i])
i++
continue
}
}
switch ev.(type) {
// case UnknownEvent:
// // If the sequence is not recognized by the parser, try looking it up.
// if k, ok := d.table[string(buf[i:i+nb])]; ok {
// ev = KeyPressEvent(k)
// }
case PasteStartEvent:
d.paste = []byte{}
case PasteEndEvent:
// Decode the captured data into runes.
var paste []rune
for len(d.paste) > 0 {
r, w := utf8.DecodeRune(d.paste)
if r != utf8.RuneError {
paste = append(paste, r)
}
d.paste = d.paste[w:]
}
d.paste = nil // reset the buffer
events = append(events, PasteEvent(paste))
case nil:
i++
continue
}
if mevs, ok := ev.(MultiEvent); ok {
events = append(events, []Event(mevs)...)
} else {
events = append(events, ev)
}
i += nb
}
return events, nil
}

View file

@ -1,17 +0,0 @@
//go:build !windows
// +build !windows
package input
// ReadEvents reads input events from the terminal.
//
// It reads the events available in the input buffer and returns them.
func (d *Reader) ReadEvents() ([]Event, error) {
return d.readEvents()
}
// parseWin32InputKeyEvent parses a Win32 input key events. This function is
// only available on Windows.
func (p *Parser) parseWin32InputKeyEvent(*win32InputState, uint16, uint16, rune, bool, uint32, uint16) Event {
return nil
}

View file

@ -1,25 +0,0 @@
package input
import (
"io"
"strings"
"testing"
)
func BenchmarkDriver(b *testing.B) {
input := "\x1b\x1b[Ztest\x00\x1b]10;1234/1234/1234\x07\x1b[27;2;27~"
rdr := strings.NewReader(input)
drv, err := NewReader(rdr, "dumb", 0)
if err != nil {
b.Fatalf("could not create driver: %v", err)
}
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
rdr.Reset(input)
if _, err := drv.ReadEvents(); err != nil && err != io.EOF {
b.Errorf("error reading input: %v", err)
}
}
}

View file

@ -1,620 +0,0 @@
//go:build windows
// +build windows
package input
import (
"errors"
"fmt"
"strings"
"time"
"unicode"
"unicode/utf16"
"unicode/utf8"
"github.com/charmbracelet/x/ansi"
xwindows "github.com/charmbracelet/x/windows"
"github.com/muesli/cancelreader"
"golang.org/x/sys/windows"
)
// ReadEvents reads input events from the terminal.
//
// It reads the events available in the input buffer and returns them.
func (d *Reader) ReadEvents() ([]Event, error) {
events, err := d.handleConInput()
if errors.Is(err, errNotConInputReader) {
return d.readEvents()
}
return events, err
}
var errNotConInputReader = fmt.Errorf("handleConInput: not a conInputReader")
func (d *Reader) handleConInput() ([]Event, error) {
cc, ok := d.rd.(*conInputReader)
if !ok {
return nil, errNotConInputReader
}
var (
events []xwindows.InputRecord
err error
)
for {
// Peek up to 256 events, this is to allow for sequences events reported as
// key events.
events, err = peekNConsoleInputs(cc.conin, 256)
if cc.isCanceled() {
return nil, cancelreader.ErrCanceled
}
if err != nil {
return nil, fmt.Errorf("peek coninput events: %w", err)
}
if len(events) > 0 {
break
}
// Sleep for a bit to avoid busy waiting.
time.Sleep(10 * time.Millisecond)
}
events, err = readNConsoleInputs(cc.conin, uint32(len(events)))
if cc.isCanceled() {
return nil, cancelreader.ErrCanceled
}
if err != nil {
return nil, fmt.Errorf("read coninput events: %w", err)
}
var evs []Event
for _, event := range events {
if e := d.parser.parseConInputEvent(event, &d.keyState); e != nil {
if multi, ok := e.(MultiEvent); ok {
evs = append(evs, multi...)
} else {
evs = append(evs, e)
}
}
}
return evs, nil
}
func (p *Parser) parseConInputEvent(event xwindows.InputRecord, keyState *win32InputState) Event {
switch event.EventType {
case xwindows.KEY_EVENT:
kevent := event.KeyEvent()
return p.parseWin32InputKeyEvent(keyState, kevent.VirtualKeyCode, kevent.VirtualScanCode,
kevent.Char, kevent.KeyDown, kevent.ControlKeyState, kevent.RepeatCount)
case xwindows.WINDOW_BUFFER_SIZE_EVENT:
wevent := event.WindowBufferSizeEvent()
if wevent.Size.X != keyState.lastWinsizeX || wevent.Size.Y != keyState.lastWinsizeY {
keyState.lastWinsizeX, keyState.lastWinsizeY = wevent.Size.X, wevent.Size.Y
return WindowSizeEvent{
Width: int(wevent.Size.X),
Height: int(wevent.Size.Y),
}
}
case xwindows.MOUSE_EVENT:
mevent := event.MouseEvent()
Event := mouseEvent(keyState.lastMouseBtns, mevent)
keyState.lastMouseBtns = mevent.ButtonState
return Event
case xwindows.FOCUS_EVENT:
fevent := event.FocusEvent()
if fevent.SetFocus {
return FocusEvent{}
}
return BlurEvent{}
case xwindows.MENU_EVENT:
// ignore
}
return nil
}
func mouseEventButton(p, s uint32) (MouseButton, bool) {
var isRelease bool
button := MouseNone
btn := p ^ s
if btn&s == 0 {
isRelease = true
}
if btn == 0 {
switch {
case s&xwindows.FROM_LEFT_1ST_BUTTON_PRESSED > 0:
button = MouseLeft
case s&xwindows.FROM_LEFT_2ND_BUTTON_PRESSED > 0:
button = MouseMiddle
case s&xwindows.RIGHTMOST_BUTTON_PRESSED > 0:
button = MouseRight
case s&xwindows.FROM_LEFT_3RD_BUTTON_PRESSED > 0:
button = MouseBackward
case s&xwindows.FROM_LEFT_4TH_BUTTON_PRESSED > 0:
button = MouseForward
}
return button, isRelease
}
switch btn {
case xwindows.FROM_LEFT_1ST_BUTTON_PRESSED: // left button
button = MouseLeft
case xwindows.RIGHTMOST_BUTTON_PRESSED: // right button
button = MouseRight
case xwindows.FROM_LEFT_2ND_BUTTON_PRESSED: // middle button
button = MouseMiddle
case xwindows.FROM_LEFT_3RD_BUTTON_PRESSED: // unknown (possibly mouse backward)
button = MouseBackward
case xwindows.FROM_LEFT_4TH_BUTTON_PRESSED: // unknown (possibly mouse forward)
button = MouseForward
}
return button, isRelease
}
func mouseEvent(p uint32, e xwindows.MouseEventRecord) (ev Event) {
var mod KeyMod
var isRelease bool
if e.ControlKeyState&(xwindows.LEFT_ALT_PRESSED|xwindows.RIGHT_ALT_PRESSED) != 0 {
mod |= ModAlt
}
if e.ControlKeyState&(xwindows.LEFT_CTRL_PRESSED|xwindows.RIGHT_CTRL_PRESSED) != 0 {
mod |= ModCtrl
}
if e.ControlKeyState&(xwindows.SHIFT_PRESSED) != 0 {
mod |= ModShift
}
m := Mouse{
X: int(e.MousePositon.X),
Y: int(e.MousePositon.Y),
Mod: mod,
}
wheelDirection := int16(highWord(e.ButtonState)) //nolint:gosec
switch e.EventFlags {
case 0, xwindows.DOUBLE_CLICK:
m.Button, isRelease = mouseEventButton(p, e.ButtonState)
case xwindows.MOUSE_WHEELED:
if wheelDirection > 0 {
m.Button = MouseWheelUp
} else {
m.Button = MouseWheelDown
}
case xwindows.MOUSE_HWHEELED:
if wheelDirection > 0 {
m.Button = MouseWheelRight
} else {
m.Button = MouseWheelLeft
}
case xwindows.MOUSE_MOVED:
m.Button, _ = mouseEventButton(p, e.ButtonState)
return MouseMotionEvent(m)
}
if isWheel(m.Button) {
return MouseWheelEvent(m)
} else if isRelease {
return MouseReleaseEvent(m)
}
return MouseClickEvent(m)
}
func highWord(data uint32) uint16 {
return uint16((data & 0xFFFF0000) >> 16) //nolint:gosec
}
func readNConsoleInputs(console windows.Handle, maxEvents uint32) ([]xwindows.InputRecord, error) {
if maxEvents == 0 {
return nil, fmt.Errorf("maxEvents cannot be zero")
}
records := make([]xwindows.InputRecord, maxEvents)
n, err := readConsoleInput(console, records)
return records[:n], err
}
func readConsoleInput(console windows.Handle, inputRecords []xwindows.InputRecord) (uint32, error) {
if len(inputRecords) == 0 {
return 0, fmt.Errorf("size of input record buffer cannot be zero")
}
var read uint32
err := xwindows.ReadConsoleInput(console, &inputRecords[0], uint32(len(inputRecords)), &read) //nolint:gosec
return read, err //nolint:wrapcheck
}
func peekConsoleInput(console windows.Handle, inputRecords []xwindows.InputRecord) (uint32, error) {
if len(inputRecords) == 0 {
return 0, fmt.Errorf("size of input record buffer cannot be zero")
}
var read uint32
err := xwindows.PeekConsoleInput(console, &inputRecords[0], uint32(len(inputRecords)), &read) //nolint:gosec
return read, err //nolint:wrapcheck
}
func peekNConsoleInputs(console windows.Handle, maxEvents uint32) ([]xwindows.InputRecord, error) {
if maxEvents == 0 {
return nil, fmt.Errorf("maxEvents cannot be zero")
}
records := make([]xwindows.InputRecord, maxEvents)
n, err := peekConsoleInput(console, records)
return records[:n], err
}
// parseWin32InputKeyEvent parses a single key event from either the Windows
// Console API or win32-input-mode events. When state is nil, it means this is
// an event from win32-input-mode. Otherwise, it's a key event from the Windows
// Console API and needs a state to decode ANSI escape sequences and utf16
// runes.
func (p *Parser) parseWin32InputKeyEvent(state *win32InputState, vkc uint16, _ uint16, r rune, keyDown bool, cks uint32, repeatCount uint16) (event Event) {
defer func() {
// Respect the repeat count.
if repeatCount > 1 {
var multi MultiEvent
for i := 0; i < int(repeatCount); i++ {
multi = append(multi, event)
}
event = multi
}
}()
if state != nil {
defer func() {
state.lastCks = cks
}()
}
var utf8Buf [utf8.UTFMax]byte
var key Key
if state != nil && state.utf16Half {
state.utf16Half = false
state.utf16Buf[1] = r
codepoint := utf16.DecodeRune(state.utf16Buf[0], state.utf16Buf[1])
rw := utf8.EncodeRune(utf8Buf[:], codepoint)
r, _ = utf8.DecodeRune(utf8Buf[:rw])
key.Code = r
key.Text = string(r)
key.Mod = translateControlKeyState(cks)
key = ensureKeyCase(key, cks)
if keyDown {
return KeyPressEvent(key)
}
return KeyReleaseEvent(key)
}
var baseCode rune
switch {
case vkc == 0:
// Zero means this event is either an escape code or a unicode
// codepoint.
if state != nil && state.ansiIdx == 0 && r != ansi.ESC {
// This is a unicode codepoint.
baseCode = r
break
}
if state != nil {
// Collect ANSI escape code.
state.ansiBuf[state.ansiIdx] = byte(r)
state.ansiIdx++
if state.ansiIdx <= 2 {
// We haven't received enough bytes to determine if this is an
// ANSI escape code.
return nil
}
if r == ansi.ESC {
// We're expecting a closing String Terminator [ansi.ST].
return nil
}
n, event := p.parseSequence(state.ansiBuf[:state.ansiIdx])
if n == 0 {
return nil
}
if _, ok := event.(UnknownEvent); ok {
return nil
}
state.ansiIdx = 0
return event
}
case vkc == xwindows.VK_BACK:
baseCode = KeyBackspace
case vkc == xwindows.VK_TAB:
baseCode = KeyTab
case vkc == xwindows.VK_RETURN:
baseCode = KeyEnter
case vkc == xwindows.VK_SHIFT:
//nolint:nestif
if cks&xwindows.SHIFT_PRESSED != 0 {
if cks&xwindows.ENHANCED_KEY != 0 {
baseCode = KeyRightShift
} else {
baseCode = KeyLeftShift
}
} else if state != nil {
if state.lastCks&xwindows.SHIFT_PRESSED != 0 {
if state.lastCks&xwindows.ENHANCED_KEY != 0 {
baseCode = KeyRightShift
} else {
baseCode = KeyLeftShift
}
}
}
case vkc == xwindows.VK_CONTROL:
if cks&xwindows.LEFT_CTRL_PRESSED != 0 {
baseCode = KeyLeftCtrl
} else if cks&xwindows.RIGHT_CTRL_PRESSED != 0 {
baseCode = KeyRightCtrl
} else if state != nil {
if state.lastCks&xwindows.LEFT_CTRL_PRESSED != 0 {
baseCode = KeyLeftCtrl
} else if state.lastCks&xwindows.RIGHT_CTRL_PRESSED != 0 {
baseCode = KeyRightCtrl
}
}
case vkc == xwindows.VK_MENU:
if cks&xwindows.LEFT_ALT_PRESSED != 0 {
baseCode = KeyLeftAlt
} else if cks&xwindows.RIGHT_ALT_PRESSED != 0 {
baseCode = KeyRightAlt
} else if state != nil {
if state.lastCks&xwindows.LEFT_ALT_PRESSED != 0 {
baseCode = KeyLeftAlt
} else if state.lastCks&xwindows.RIGHT_ALT_PRESSED != 0 {
baseCode = KeyRightAlt
}
}
case vkc == xwindows.VK_PAUSE:
baseCode = KeyPause
case vkc == xwindows.VK_CAPITAL:
baseCode = KeyCapsLock
case vkc == xwindows.VK_ESCAPE:
baseCode = KeyEscape
case vkc == xwindows.VK_SPACE:
baseCode = KeySpace
case vkc == xwindows.VK_PRIOR:
baseCode = KeyPgUp
case vkc == xwindows.VK_NEXT:
baseCode = KeyPgDown
case vkc == xwindows.VK_END:
baseCode = KeyEnd
case vkc == xwindows.VK_HOME:
baseCode = KeyHome
case vkc == xwindows.VK_LEFT:
baseCode = KeyLeft
case vkc == xwindows.VK_UP:
baseCode = KeyUp
case vkc == xwindows.VK_RIGHT:
baseCode = KeyRight
case vkc == xwindows.VK_DOWN:
baseCode = KeyDown
case vkc == xwindows.VK_SELECT:
baseCode = KeySelect
case vkc == xwindows.VK_SNAPSHOT:
baseCode = KeyPrintScreen
case vkc == xwindows.VK_INSERT:
baseCode = KeyInsert
case vkc == xwindows.VK_DELETE:
baseCode = KeyDelete
case vkc >= '0' && vkc <= '9':
baseCode = rune(vkc)
case vkc >= 'A' && vkc <= 'Z':
// Convert to lowercase.
baseCode = rune(vkc) + 32
case vkc == xwindows.VK_LWIN:
baseCode = KeyLeftSuper
case vkc == xwindows.VK_RWIN:
baseCode = KeyRightSuper
case vkc == xwindows.VK_APPS:
baseCode = KeyMenu
case vkc >= xwindows.VK_NUMPAD0 && vkc <= xwindows.VK_NUMPAD9:
baseCode = rune(vkc-xwindows.VK_NUMPAD0) + KeyKp0
case vkc == xwindows.VK_MULTIPLY:
baseCode = KeyKpMultiply
case vkc == xwindows.VK_ADD:
baseCode = KeyKpPlus
case vkc == xwindows.VK_SEPARATOR:
baseCode = KeyKpComma
case vkc == xwindows.VK_SUBTRACT:
baseCode = KeyKpMinus
case vkc == xwindows.VK_DECIMAL:
baseCode = KeyKpDecimal
case vkc == xwindows.VK_DIVIDE:
baseCode = KeyKpDivide
case vkc >= xwindows.VK_F1 && vkc <= xwindows.VK_F24:
baseCode = rune(vkc-xwindows.VK_F1) + KeyF1
case vkc == xwindows.VK_NUMLOCK:
baseCode = KeyNumLock
case vkc == xwindows.VK_SCROLL:
baseCode = KeyScrollLock
case vkc == xwindows.VK_LSHIFT:
baseCode = KeyLeftShift
case vkc == xwindows.VK_RSHIFT:
baseCode = KeyRightShift
case vkc == xwindows.VK_LCONTROL:
baseCode = KeyLeftCtrl
case vkc == xwindows.VK_RCONTROL:
baseCode = KeyRightCtrl
case vkc == xwindows.VK_LMENU:
baseCode = KeyLeftAlt
case vkc == xwindows.VK_RMENU:
baseCode = KeyRightAlt
case vkc == xwindows.VK_VOLUME_MUTE:
baseCode = KeyMute
case vkc == xwindows.VK_VOLUME_DOWN:
baseCode = KeyLowerVol
case vkc == xwindows.VK_VOLUME_UP:
baseCode = KeyRaiseVol
case vkc == xwindows.VK_MEDIA_NEXT_TRACK:
baseCode = KeyMediaNext
case vkc == xwindows.VK_MEDIA_PREV_TRACK:
baseCode = KeyMediaPrev
case vkc == xwindows.VK_MEDIA_STOP:
baseCode = KeyMediaStop
case vkc == xwindows.VK_MEDIA_PLAY_PAUSE:
baseCode = KeyMediaPlayPause
case vkc == xwindows.VK_OEM_1:
baseCode = ';'
case vkc == xwindows.VK_OEM_PLUS:
baseCode = '+'
case vkc == xwindows.VK_OEM_COMMA:
baseCode = ','
case vkc == xwindows.VK_OEM_MINUS:
baseCode = '-'
case vkc == xwindows.VK_OEM_PERIOD:
baseCode = '.'
case vkc == xwindows.VK_OEM_2:
baseCode = '/'
case vkc == xwindows.VK_OEM_3:
baseCode = '`'
case vkc == xwindows.VK_OEM_4:
baseCode = '['
case vkc == xwindows.VK_OEM_5:
baseCode = '\\'
case vkc == xwindows.VK_OEM_6:
baseCode = ']'
case vkc == xwindows.VK_OEM_7:
baseCode = '\''
}
if utf16.IsSurrogate(r) {
if state != nil {
state.utf16Buf[0] = r
state.utf16Half = true
}
return nil
}
// AltGr is left ctrl + right alt. On non-US keyboards, this is used to type
// special characters and produce printable events.
// XXX: Should this be a KeyMod?
altGr := cks&(xwindows.LEFT_CTRL_PRESSED|xwindows.RIGHT_ALT_PRESSED) == xwindows.LEFT_CTRL_PRESSED|xwindows.RIGHT_ALT_PRESSED
var text string
keyCode := baseCode
if !unicode.IsControl(r) {
rw := utf8.EncodeRune(utf8Buf[:], r)
keyCode, _ = utf8.DecodeRune(utf8Buf[:rw])
if unicode.IsPrint(keyCode) && (cks == 0 ||
cks == xwindows.SHIFT_PRESSED ||
cks == xwindows.CAPSLOCK_ON ||
altGr) {
// If the control key state is 0, shift is pressed, or caps lock
// then the key event is a printable event i.e. [text] is not empty.
text = string(keyCode)
}
}
key.Code = keyCode
key.Text = text
key.Mod = translateControlKeyState(cks)
key.BaseCode = baseCode
key = ensureKeyCase(key, cks)
if keyDown {
return KeyPressEvent(key)
}
return KeyReleaseEvent(key)
}
// ensureKeyCase ensures that the key's text is in the correct case based on the
// control key state.
func ensureKeyCase(key Key, cks uint32) Key {
if len(key.Text) == 0 {
return key
}
hasShift := cks&xwindows.SHIFT_PRESSED != 0
hasCaps := cks&xwindows.CAPSLOCK_ON != 0
if hasShift || hasCaps {
if unicode.IsLower(key.Code) {
key.ShiftedCode = unicode.ToUpper(key.Code)
key.Text = string(key.ShiftedCode)
}
} else {
if unicode.IsUpper(key.Code) {
key.ShiftedCode = unicode.ToLower(key.Code)
key.Text = string(key.ShiftedCode)
}
}
return key
}
// translateControlKeyState translates the control key state from the Windows
// Console API into a Mod bitmask.
func translateControlKeyState(cks uint32) (m KeyMod) {
if cks&xwindows.LEFT_CTRL_PRESSED != 0 || cks&xwindows.RIGHT_CTRL_PRESSED != 0 {
m |= ModCtrl
}
if cks&xwindows.LEFT_ALT_PRESSED != 0 || cks&xwindows.RIGHT_ALT_PRESSED != 0 {
m |= ModAlt
}
if cks&xwindows.SHIFT_PRESSED != 0 {
m |= ModShift
}
if cks&xwindows.CAPSLOCK_ON != 0 {
m |= ModCapsLock
}
if cks&xwindows.NUMLOCK_ON != 0 {
m |= ModNumLock
}
if cks&xwindows.SCROLLLOCK_ON != 0 {
m |= ModScrollLock
}
return
}
//nolint:unused
func keyEventString(vkc, sc uint16, r rune, keyDown bool, cks uint32, repeatCount uint16) string {
var s strings.Builder
s.WriteString("vkc: ")
s.WriteString(fmt.Sprintf("%d, 0x%02x", vkc, vkc))
s.WriteString(", sc: ")
s.WriteString(fmt.Sprintf("%d, 0x%02x", sc, sc))
s.WriteString(", r: ")
s.WriteString(fmt.Sprintf("%q", r))
s.WriteString(", down: ")
s.WriteString(fmt.Sprintf("%v", keyDown))
s.WriteString(", cks: [")
if cks&xwindows.LEFT_ALT_PRESSED != 0 {
s.WriteString("left alt, ")
}
if cks&xwindows.RIGHT_ALT_PRESSED != 0 {
s.WriteString("right alt, ")
}
if cks&xwindows.LEFT_CTRL_PRESSED != 0 {
s.WriteString("left ctrl, ")
}
if cks&xwindows.RIGHT_CTRL_PRESSED != 0 {
s.WriteString("right ctrl, ")
}
if cks&xwindows.SHIFT_PRESSED != 0 {
s.WriteString("shift, ")
}
if cks&xwindows.CAPSLOCK_ON != 0 {
s.WriteString("caps lock, ")
}
if cks&xwindows.NUMLOCK_ON != 0 {
s.WriteString("num lock, ")
}
if cks&xwindows.SCROLLLOCK_ON != 0 {
s.WriteString("scroll lock, ")
}
if cks&xwindows.ENHANCED_KEY != 0 {
s.WriteString("enhanced key, ")
}
s.WriteString("], repeat count: ")
s.WriteString(fmt.Sprintf("%d", repeatCount))
return s.String()
}

View file

@ -1,271 +0,0 @@
package input
import (
"encoding/binary"
"image/color"
"reflect"
"testing"
"unicode/utf16"
"github.com/charmbracelet/x/ansi"
xwindows "github.com/charmbracelet/x/windows"
"golang.org/x/sys/windows"
)
func TestWindowsInputEvents(t *testing.T) {
cases := []struct {
name string
events []xwindows.InputRecord
expected []Event
sequence bool // indicates that the input events are ANSI sequence or utf16
}{
{
name: "single key event",
events: []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: 'a',
VirtualKeyCode: 'A',
}),
},
expected: []Event{KeyPressEvent{Code: 'a', BaseCode: 'a', Text: "a"}},
},
{
name: "single key event with control key",
events: []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: 'a',
VirtualKeyCode: 'A',
ControlKeyState: xwindows.LEFT_CTRL_PRESSED,
}),
},
expected: []Event{KeyPressEvent{Code: 'a', BaseCode: 'a', Mod: ModCtrl}},
},
{
name: "escape alt key event",
events: []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: ansi.ESC,
VirtualKeyCode: ansi.ESC,
ControlKeyState: xwindows.LEFT_ALT_PRESSED,
}),
},
expected: []Event{KeyPressEvent{Code: ansi.ESC, BaseCode: ansi.ESC, Mod: ModAlt}},
},
{
name: "single shifted key event",
events: []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: 'A',
VirtualKeyCode: 'A',
ControlKeyState: xwindows.SHIFT_PRESSED,
}),
},
expected: []Event{KeyPressEvent{Code: 'A', BaseCode: 'a', Text: "A", Mod: ModShift}},
},
{
name: "utf16 rune",
events: encodeUtf16Rune('😊'), // smiley emoji '😊'
expected: []Event{
KeyPressEvent{Code: '😊', Text: "😊"},
},
sequence: true,
},
{
name: "background color response",
events: encodeSequence("\x1b]11;rgb:ff/ff/ff\x07"),
expected: []Event{BackgroundColorEvent{Color: color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff}}},
sequence: true,
},
{
name: "st terminated background color response",
events: encodeSequence("\x1b]11;rgb:ffff/ffff/ffff\x1b\\"),
expected: []Event{BackgroundColorEvent{Color: color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff}}},
sequence: true,
},
{
name: "simple mouse event",
events: []xwindows.InputRecord{
encodeMouseEvent(xwindows.MouseEventRecord{
MousePositon: windows.Coord{X: 10, Y: 20},
ButtonState: xwindows.FROM_LEFT_1ST_BUTTON_PRESSED,
EventFlags: 0,
}),
encodeMouseEvent(xwindows.MouseEventRecord{
MousePositon: windows.Coord{X: 10, Y: 20},
EventFlags: 0,
}),
},
expected: []Event{
MouseClickEvent{Button: MouseLeft, X: 10, Y: 20},
MouseReleaseEvent{Button: MouseLeft, X: 10, Y: 20},
},
},
{
name: "focus event",
events: []xwindows.InputRecord{
encodeFocusEvent(xwindows.FocusEventRecord{
SetFocus: true,
}),
encodeFocusEvent(xwindows.FocusEventRecord{
SetFocus: false,
}),
},
expected: []Event{
FocusEvent{},
BlurEvent{},
},
},
{
name: "window size event",
events: []xwindows.InputRecord{
encodeWindowBufferSizeEvent(xwindows.WindowBufferSizeRecord{
Size: windows.Coord{X: 10, Y: 20},
}),
},
expected: []Event{
WindowSizeEvent{Width: 10, Height: 20},
},
},
}
// p is the parser to parse the input events
var p Parser
// keep track of the state of the driver to handle ANSI sequences and utf16
var state win32InputState
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
if tc.sequence {
var Event Event
for _, ev := range tc.events {
if ev.EventType != xwindows.KEY_EVENT {
t.Fatalf("expected key event, got %v", ev.EventType)
}
key := ev.KeyEvent()
Event = p.parseWin32InputKeyEvent(&state, key.VirtualKeyCode, key.VirtualScanCode, key.Char, key.KeyDown, key.ControlKeyState, key.RepeatCount)
}
if len(tc.expected) != 1 {
t.Fatalf("expected 1 event, got %d", len(tc.expected))
}
if !reflect.DeepEqual(Event, tc.expected[0]) {
t.Errorf("expected %v, got %v", tc.expected[0], Event)
}
} else {
if len(tc.events) != len(tc.expected) {
t.Fatalf("expected %d events, got %d", len(tc.expected), len(tc.events))
}
for j, ev := range tc.events {
Event := p.parseConInputEvent(ev, &state)
if !reflect.DeepEqual(Event, tc.expected[j]) {
t.Errorf("expected %#v, got %#v", tc.expected[j], Event)
}
}
}
})
}
}
func boolToUint32(b bool) uint32 {
if b {
return 1
}
return 0
}
func encodeMenuEvent(menu xwindows.MenuEventRecord) xwindows.InputRecord {
var bts [16]byte
binary.LittleEndian.PutUint32(bts[0:4], menu.CommandID)
return xwindows.InputRecord{
EventType: xwindows.MENU_EVENT,
Event: bts,
}
}
func encodeWindowBufferSizeEvent(size xwindows.WindowBufferSizeRecord) xwindows.InputRecord {
var bts [16]byte
binary.LittleEndian.PutUint16(bts[0:2], uint16(size.Size.X))
binary.LittleEndian.PutUint16(bts[2:4], uint16(size.Size.Y))
return xwindows.InputRecord{
EventType: xwindows.WINDOW_BUFFER_SIZE_EVENT,
Event: bts,
}
}
func encodeFocusEvent(focus xwindows.FocusEventRecord) xwindows.InputRecord {
var bts [16]byte
if focus.SetFocus {
bts[0] = 1
}
return xwindows.InputRecord{
EventType: xwindows.FOCUS_EVENT,
Event: bts,
}
}
func encodeMouseEvent(mouse xwindows.MouseEventRecord) xwindows.InputRecord {
var bts [16]byte
binary.LittleEndian.PutUint16(bts[0:2], uint16(mouse.MousePositon.X))
binary.LittleEndian.PutUint16(bts[2:4], uint16(mouse.MousePositon.Y))
binary.LittleEndian.PutUint32(bts[4:8], mouse.ButtonState)
binary.LittleEndian.PutUint32(bts[8:12], mouse.ControlKeyState)
binary.LittleEndian.PutUint32(bts[12:16], mouse.EventFlags)
return xwindows.InputRecord{
EventType: xwindows.MOUSE_EVENT,
Event: bts,
}
}
func encodeKeyEvent(key xwindows.KeyEventRecord) xwindows.InputRecord {
var bts [16]byte
binary.LittleEndian.PutUint32(bts[0:4], boolToUint32(key.KeyDown))
binary.LittleEndian.PutUint16(bts[4:6], key.RepeatCount)
binary.LittleEndian.PutUint16(bts[6:8], key.VirtualKeyCode)
binary.LittleEndian.PutUint16(bts[8:10], key.VirtualScanCode)
binary.LittleEndian.PutUint16(bts[10:12], uint16(key.Char))
binary.LittleEndian.PutUint32(bts[12:16], key.ControlKeyState)
return xwindows.InputRecord{
EventType: xwindows.KEY_EVENT,
Event: bts,
}
}
// encodeSequence encodes a string of ANSI escape sequences into a slice of
// Windows input key records.
func encodeSequence(s string) (evs []xwindows.InputRecord) {
var state byte
for len(s) > 0 {
seq, _, n, newState := ansi.DecodeSequence(s, state, nil)
for i := 0; i < n; i++ {
evs = append(evs, encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: rune(seq[i]),
}))
}
state = newState
s = s[n:]
}
return
}
func encodeUtf16Rune(r rune) []xwindows.InputRecord {
r1, r2 := utf16.EncodeRune(r)
return encodeUtf16Pair(r1, r2)
}
func encodeUtf16Pair(r1, r2 rune) []xwindows.InputRecord {
return []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: r1,
}),
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: r2,
}),
}
}

View file

@ -1,9 +0,0 @@
package input
// FocusEvent represents a terminal focus event.
// This occurs when the terminal gains focus.
type FocusEvent struct{}
// BlurEvent represents a terminal blur event.
// This occurs when the terminal loses focus.
type BlurEvent struct{}

Some files were not shown because too many files have changed in this diff Show more