Compare commits

..

172 commits

Author SHA1 Message Date
Dax Raad
aadca5013a fix share page timestamps
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-11 21:49:20 -04:00
Dax Raad
5c3d490e59 share page hide step-finish events 2025-07-11 21:45:56 -04:00
Dax Raad
1254f48135 fix issue preventing things from working when node_modules or package.json present in ~/ 2025-07-11 21:09:39 -04:00
Dax Raad
1729c310d9 switch global config to ~/.config/opencode/opencode.json 2025-07-11 20:51:23 -04:00
Dax Raad
0130190bbd docs: add model docs 2025-07-11 20:33:06 -04:00
Aiden Cline
97a31ddffc
tweak: plan interactions should match web (TUI) (#895) 2025-07-11 18:03:22 -04:00
zWing
3249420ad1
fix: avoid overwriting the provider.option.baseURL (#880) 2025-07-11 18:01:28 -04:00
Dax Raad
4bb8536d34 introduce cache version concept for auto cleanup when breaking cache changes happen 2025-07-11 17:50:49 -04:00
Jay
c73d4a137e
docs: Update troubleshooting.mdx 2025-07-11 17:50:25 -04:00
Dax Raad
57ac8f2741 wip: stats 2025-07-11 17:37:41 -04:00
Jay V
2f1acee5a1 docs: share page add time footer back 2025-07-11 14:24:20 -04:00
Jay V
9ca54020ac docs: share page mobile bugs 2025-07-11 14:24:20 -04:00
Jay V
f7d44b178b docs: share fix mobile diffs 2025-07-11 14:24:20 -04:00
Sergii Kozak
b4950a157c
fix(session): add fallback for undefined output token limit (#860)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
Co-authored-by: opencode <noreply@opencode.ai>
2025-07-11 10:55:13 -04:00
alexz
dfbef066c7
fix: ENAMETOOLONG: name too long when adding custom mode (#881) 2025-07-11 10:54:52 -04:00
GitHub Action
26fd76fbee ignore: update download stats 2025-07-11 2025-07-11 12:04:08 +00:00
adamdotdevin
04769d8a26
fix(tui): help commands bg color 2025-07-11 06:03:21 -05:00
adamdotdevin
34b576d9b5
fix(tui): don't include /mode trigger 2025-07-11 06:01:51 -05:00
adamdotdevin
22b244f847
fix(tui): actually fix mouse ansi codes leaking 2025-07-11 06:00:20 -05:00
Aiden Cline
7e1fc275e7
fix: avoid worker exception, graceful 404 (#869)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-11 04:55:56 -05:00
Frank
3b9b391320 wip: github actions
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-11 06:55:13 +08:00
Frank
766bfd025c wip: github actions 2025-07-11 05:23:24 +08:00
Jay V
c7f30e1065 docs: share page fix terminal part 2025-07-10 17:21:21 -04:00
Frank
1c4fd7f28f Api: add endpoint for getting github app token 2025-07-11 05:01:27 +08:00
adamdotdevin
85805d2c38
fix(tui): handle SIGTERM, closes #319 2025-07-10 15:59:03 -05:00
Timo Clasen
982cb3e71a
fix(tui): center help dilaog (#853) 2025-07-10 15:56:19 -05:00
adamdotdevin
294d0e7ee3
fix(tui): mouse wheel ansi codes leaking into editor 2025-07-10 15:49:58 -05:00
Jay V
8be1ca836c docs: fix diag styles 2025-07-10 16:38:51 -04:00
Jay V
2e5f96fa41 docs: share page attachment 2025-07-10 16:38:51 -04:00
Dax Raad
c056b0add9 add step finish part 2025-07-10 16:25:38 -04:00
Dax Raad
b00bb3c083 run: properly close session.list 2025-07-10 16:13:01 -04:00
Dax Raad
d9befd3aa6 disable filewatcher, fixes file descriptor leak 2025-07-10 15:58:45 -04:00
Dax Raad
49de703ba1 config: escape file: string content 2025-07-10 15:38:58 -04:00
Dax Raad
22988894c8 ci: slow down stats 2025-07-10 15:31:06 -04:00
adamdotdevin
34b1754f25
docs: clipboard requirements on linux 2025-07-10 13:12:37 -05:00
adamdotdevin
54fe3504ba
feat(tui): accent editor border on leader key 2025-07-10 12:57:22 -05:00
Jay V
d2c862e32d docs: edit local models 2025-07-10 13:49:24 -04:00
Jay V
afc53afb35 docs: edit mode 2025-07-10 13:29:37 -04:00
Gabriel Garrett
b56e49c5dc
Adds real example in docs of how to configure custom provider (#840)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-10 13:29:30 -04:00
Aiden Cline
8b2a909e1f
fix: encode & decode file paths (#843) 2025-07-10 11:19:54 -05:00
Jay V
e9c954d45e docs: add modes to sidebar 2025-07-10 12:07:44 -04:00
Jay V
6f449d13af docs: add modes to sidebar 2025-07-10 12:07:18 -04:00
Dax Raad
6e375bef0d docs: modes 2025-07-10 11:53:28 -04:00
Dax Raad
67106a6967 docs: add config variable docs 2025-07-10 11:48:55 -04:00
Dax Raad
b5d690620d support env and file pointers in config 2025-07-10 11:45:31 -04:00
Dax Raad
9db3ce1d0b opencode run respects mode 2025-07-10 11:28:28 -04:00
Dax Raad
1cc55b68ef wip: scrap 2025-07-10 11:25:37 -04:00
Dax Raad
469f667774 set max output token limit to 32_000 2025-07-10 11:25:37 -04:00
adamdottv
6603d9a9f0
feat: --mode flag passed to tui 2025-07-10 10:19:25 -05:00
adamdottv
5dc1920a4c
feat: mode flag in cli run command 2025-07-10 10:13:15 -05:00
adamdottv
d3e5f3f3a8
feat(tui): add token and cost info to session header 2025-07-10 10:06:51 -05:00
adamdottv
ce4cb820f7
feat(tui): modes 2025-07-10 10:06:51 -05:00
Dax Raad
ba5be6b625 make LSP lazy again 2025-07-10 09:37:40 -04:00
adamdottv
f95c3f4177
fix(tui): fouc in textarea on app load 2025-07-10 08:20:17 -05:00
adamdottv
d2b1307bff
fix(tui): textarea cursor sync issues with attachments 2025-07-10 07:49:36 -05:00
adamdottv
b40ba32adc
fix(tui): textarea issues 2025-07-10 07:38:57 -05:00
GitHub Action
ce0cebb7d7 ignore: update download stats 2025-07-10 2025-07-10 12:04:15 +00:00
Dax Raad
f478f89a68 temporary grok 4 patch 2025-07-10 07:57:55 -04:00
Dax Raad
85d95f0f2b disable lsp on non-git folders 2025-07-10 07:39:02 -04:00
Dax Raad
1515efc77c fix session is busy error 2025-07-10 07:27:03 -04:00
Josh Medeski
6d393759e1
feat(tui): subsitute cwd home path on status bar (#808) 2025-07-10 06:12:19 -05:00
Adi Yeroslav
a1701678cd
feat(tui): /editor - change the auto-send behavior to put content in input box instead (#827) 2025-07-10 05:57:52 -05:00
Timo Clasen
c411a26d6f
feat(tui): hide cost if using subscription model (#828) 2025-07-10 05:56:36 -05:00
adamdottv
85dbfeb314
feat(tui): @symbol attachments 2025-07-10 05:53:00 -05:00
Dax Raad
085c0e4e2b respect go.work when spawning LSP
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-09 22:54:47 -04:00
Dax Raad
8404a97c3e better detection of prettier formatter 2025-07-09 22:37:31 -04:00
Dax Raad
0ee3b1ede2 do not wait for LSP to be fully ready 2025-07-09 21:59:38 -04:00
Dax Raad
a826936702 modes concept 2025-07-09 21:59:38 -04:00
Jay V
fd4a5d5a63 docs: share doc edit
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-09 20:26:31 -04:00
Jay V
69cf1d7b7e docs: share doc 2025-07-09 20:24:09 -04:00
Jay V
8e0a1d1167 docs: edit troubleshooting 2025-07-09 19:55:14 -04:00
Timo Clasen
f22021187d
feat(tui): treat pasted text file paths as file references (#809) 2025-07-09 18:37:39 -05:00
Jay V
febecc348a docs: enterprise doc 2025-07-09 15:46:57 -04:00
Jay V
c5ccfc3e94 docs: share page last part fix 2025-07-09 15:46:57 -04:00
Mike Wallio
1f6efc6b94
Add gpt-4.1 beast prompt (#778)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
Co-authored-by: Dax Raad <d@ironbay.co>
2025-07-09 12:11:54 -04:00
Frank Denis
727fe6f942
LSP: fix SimpleRoots to actually search in the root directory (#795) 2025-07-09 10:35:06 -05:00
Dax Raad
a91e79382e ci: remove checked in config.schema.json 2025-07-09 11:30:42 -04:00
Dax Raad
5c626e0a2f ci: generate config schema as part of build
Some checks are pending
deploy / deploy (push) Waiting to run
2025-07-09 11:25:58 -04:00
adamdottv
8e9e383219
chore: troubleshooting docs 2025-07-09 10:12:36 -05:00
Dax Raad
f383008cc1 lsp: spawn only a single tsserver in project root 2025-07-09 11:06:44 -04:00
adamdottv
303ade25ed
feat: discord redirect 2025-07-09 10:01:42 -05:00
adamdottv
53f8e7850e
feat: configurable log levels 2025-07-09 10:00:03 -05:00
adamdottv
ca8ce88354
feat(tui): move logging to server logs 2025-07-09 08:16:10 -05:00
adamdottv
37a86439c4
fix(tui): don't panic on missing linux clipboard tool 2025-07-09 06:51:58 -05:00
adamdottv
269b43f4de
fix(tui): markdown wrapping off sometimes 2025-07-09 06:41:53 -05:00
adamdottv
3f25e5bf86
chore: internal clipboard package 2025-07-09 04:55:24 -05:00
Aiden Cline
67765fa47c
tweak: keep completion options open when trigger is still present (#789) 2025-07-09 04:42:31 -05:00
adamdottv
58b1c58bc5
fix(tui): clear command priority
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-08 19:26:50 -05:00
Dax Raad
d80badc50f ci: ignore chore commits 2025-07-08 20:05:33 -04:00
Dax Raad
75279e5ccf wip: symbols endpoint 2025-07-08 20:05:33 -04:00
Yihui Khuu
7893b84614
Add debounce before exit when using non-leader exit command (#759) 2025-07-08 18:53:38 -05:00
Dax Raad
cfc715bd48 wip: remove excess import 2025-07-08 19:51:09 -04:00
adamdottv
39bcba85a9
chore: vendor clipboard into go package 2025-07-08 18:48:40 -05:00
adamdottv
da3df51316
chore: remove clipboard temp 2025-07-08 18:47:59 -05:00
adamdottv
12190e4efc
chore: vendor clipboard into go package 2025-07-08 18:46:42 -05:00
Aiden Cline
d2a9b2f64a
fix: documentation typo (#781) 2025-07-08 18:30:46 -05:00
adamdottv
aacadd8a8a
fix(tui): panic when reading/writing clipboard on linux 2025-07-08 18:29:45 -05:00
Jay V
969154a473 docs: share page image
Some checks are pending
deploy / deploy (push) Waiting to run
2025-07-08 19:24:21 -04:00
Jay V
4d6ca3fab1 docs: share page many model case 2025-07-08 19:08:33 -04:00
Dax Raad
00ea5082e7 add typescript lsp timeout if it fails to start 2025-07-08 18:33:12 -04:00
Dax Raad
4a878b88c0 properly load typescript lsp in subpaths 2025-07-08 18:18:45 -04:00
Dax Raad
6de955847c big rework of LSP system 2025-07-08 18:14:49 -04:00
Jay V
3ba5d528b4 docs: share bugs 2025-07-08 18:14:36 -04:00
Jay V
f99e2b3429 docs: share error part 2025-07-08 18:00:08 -04:00
Jay V
7e4e6f6e51 docs: share page bugs 2025-07-08 17:18:38 -04:00
Jay V
0514f3f43b docs: share image model 2025-07-08 17:18:38 -04:00
Timo Clasen
1e07384364
fix: make compact command interruptible (#691)
Co-authored-by: GitHub Action <action@github.com>
2025-07-08 15:37:25 -05:00
strager
4c4739c422
fix(tool): fix ripgrep invocation on Windows (#700)
Co-authored-by: Adam <2363879+adamdotdevin@users.noreply.github.com>
2025-07-08 15:36:26 -05:00
Rami Chowdhury
2d8b90a6ff
feat(storage): ensure storage directory exists and handle paths correctly (#771) 2025-07-08 15:34:11 -05:00
Robb Currall
a2fa7ffa42
fix: support cancelled task state (#775) 2025-07-08 15:33:39 -05:00
Frank Denis
f7d6175283
Add support for the Zig Language Server (ZLS) (#756) 2025-07-08 15:31:11 -05:00
Tommy
9ed187ee52
docs: add terminal requirements (#708) 2025-07-08 15:30:05 -05:00
Gal Schlezinger
14d81e574b
[config json schema] declare default values and examples for in-ide documentation (#754) 2025-07-08 15:29:07 -05:00
adamdottv
6efe8cc8df
fix: env has to be string 2025-07-08 14:59:03 -05:00
adamdottv
daa5fc916a
fix(tui): pasting causes panic on macos 2025-07-08 14:57:17 -05:00
adamdottv
c659496b96
fix(tui): model/provider arg parsing 2025-07-08 14:11:57 -05:00
Timo Clasen
21fbf21cb6
fix(copilot): add vision request header (#773) 2025-07-08 14:01:54 -05:00
adamdottv
f31cbf2744
fix: image reading 2025-07-08 13:02:13 -05:00
Aiden Cline
8322f18e03
fix: display errors when using opencode run ... (#751)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-08 10:38:11 -05:00
adamdottv
562bdb95e2
fix: include symlinks in ripgrep searches 2025-07-08 10:02:19 -05:00
Dax
a57ce8365d
Update STATS.md 2025-07-08 10:30:02 -04:00
adamdottv
0da83ae67e
feat(tui): command aliases 2025-07-08 08:20:55 -05:00
adamdottv
662d022a48
feat(tui): paste images and pdfs 2025-07-08 08:09:01 -05:00
GitHub Action
9efef03919 ignore: update download stats 2025-07-08 2025-07-08 12:04:27 +00:00
GitHub Action
7a9fb3fa92 ignore: update download stats 2025-07-08 2025-07-08 10:51:06 +00:00
adamdottv
ea96ead346
feat(tui): handle --model and --prompt flags 2025-07-08 05:50:18 -05:00
Dax Raad
6100a77b85 start file watcher only for tui
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-07 21:05:04 -04:00
Dax Raad
c7a59ee2b1 better handling of aborting sessions 2025-07-07 20:59:00 -04:00
Jay V
a272b58fe9 docs: intro
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-07 17:41:46 -04:00
Dax Raad
9948fcf1b6 fix crash when running on new project 2025-07-07 17:39:52 -04:00
Dax Raad
0d50c867ff fix mcp tools corrupting session 2025-07-07 17:05:16 -04:00
Dax Raad
27f7e02f12 run: truncate prompt 2025-07-07 16:41:42 -04:00
Jay V
0f93ecd564 docs: canonical url 2025-07-07 16:37:00 -04:00
Dax Raad
da909d9684 append piped stdin to prompt 2025-07-07 16:33:21 -04:00
Jay V
facd851b11 docs: dynamic domain 2025-07-07 16:31:15 -04:00
Dax Raad
c51de945a5 Add stdin support to run command
Allow piping content to opencode run when no message arguments are provided, enabling standard Unix pipe patterns for better CLI integration.

🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-07-07 16:29:13 -04:00
Jay V
9253a3ca9e docs: debug 2025-07-07 16:26:23 -04:00
Dax Raad
7cfa297a78 wip: model and prompt flags for tui 2025-07-07 16:24:37 -04:00
Jay V
661b74def6 docs: debug info 2025-07-07 16:13:26 -04:00
Dax Raad
b478e5655c fix interrupt 2025-07-07 16:12:47 -04:00
Dax
f884766445
v2 message format and upgrade to ai sdk v5 (#743)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Liang-Shih Lin <liangshihlin@proton.me>
Co-authored-by: Dominik Engelhardt <dominikengelhardt@ymail.com>
Co-authored-by: Jay V <air@live.ca>
Co-authored-by: adamdottv <2363879+adamdottv@users.noreply.github.com>
2025-07-07 15:53:43 -04:00
Jay V
76b2e4539c docs: discord 2025-07-07 14:44:37 -04:00
Dominik Engelhardt
d87922c0eb
Fix Elixir LSP startup (#726)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-06 23:37:46 -04:00
Liang-Shih Lin
2446483df5
fix: Skip opencode upgrade if same version (#720) 2025-07-06 23:36:59 -04:00
GitHub Action
f4c453155d Update download stats 2025-07-06
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-06 12:03:56 +00:00
Dax Raad
969ad80ed2 fix openrouter caching with anthropic, should be a lot cheaper
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-05 11:39:54 -04:00
GitHub Action
af064b41d7 Update download stats 2025-07-05
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-05 12:03:56 +00:00
Dax Raad
ea6bfef21a use full filepath
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-04 17:58:03 -04:00
Jay V
107363b1d9 docs: fix show more in share page
Some checks failed
deploy / deploy (push) Has been cancelled
2025-07-04 17:57:12 -04:00
Dax Raad
85214d7c59 fix input bar not rendering capital letters 2025-07-04 17:21:51 -04:00
Timo Clasen
997cb2d945
fix(tui): optimistic rendering (#692) 2025-07-04 16:06:57 -05:00
Dax Raad
45b139390c make file attachments work good like 2025-07-04 16:21:26 -04:00
Jay V
994368de15 docs: share fix scrolling again 2025-07-04 13:53:25 -04:00
Jay V
143fd8e076 docs: share improve markdown rendering of ai responses 2025-07-04 13:53:25 -04:00
Dax Raad
06dba28bd6 wip: fix media type 2025-07-04 12:50:52 -04:00
adamdottv
b8d276a049
fix(tui): full paths for attachments 2025-07-04 11:42:22 -05:00
Dax Raad
ee01f01271 file attachments 2025-07-04 12:24:01 -04:00
adamdottv
32d5db4f0a
fix(tui): markdown wrapping off sometimes 2025-07-04 11:16:38 -05:00
adamdottv
f6108b7be8
fix(tui): handle pdf and image @ files 2025-07-04 11:13:09 -05:00
adamdottv
94ef341c9d
feat(tui): render attachments 2025-07-04 10:55:02 -05:00
adamdottv
f9abc7c84f
feat(tui): file attachments 2025-07-04 10:55:02 -05:00
adamdottv
891ed6ebc0
fix(tui): slower startup due to file.status 2025-07-04 10:55:01 -05:00
Dax Raad
163e23a68b removed banned command concept 2025-07-04 11:32:12 -04:00
Vladimir
f13b0af491
docs: Fix invalid json in the mcp example config (#645) 2025-07-04 11:24:13 -04:00
Aiden Cline
4a0be45d3d
chore: document instructions configuration option (#670) 2025-07-04 11:22:45 -04:00
Dax Raad
23788674c8 disable snapshots temporarily
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-04 08:45:18 -04:00
GitHub Action
121eb24e73 Update download stats 2025-07-04 2025-07-04 12:26:16 +00:00
Dax Raad
571d60182a improve snapshotting speed further
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-03 21:36:09 -04:00
Jay V
167a9dcaf3 docs: share fix scroll to anchor
Some checks are pending
deploy / deploy (push) Waiting to run
2025-07-03 20:30:21 -04:00
Dax Raad
37327259cb ci: ignore 2025-07-03 20:30:02 -04:00
Dax Raad
cdb25656d5 improve snapshot speed 2025-07-03 20:16:25 -04:00
Jay V
25c876caa2 docs: share fix last message not expandable 2025-07-03 19:33:55 -04:00
210 changed files with 17725 additions and 8931 deletions

View file

@ -28,5 +28,5 @@ jobs:
git config --local user.email "action@github.com" git config --local user.email "action@github.com"
git config --local user.name "GitHub Action" git config --local user.name "GitHub Action"
git add STATS.md git add STATS.md
git diff --staged --quiet || git commit -m "Update download stats $(date -I)" git diff --staged --quiet || git commit -m "ignore: update download stats $(date -I)"
git push git push

View file

@ -9,7 +9,7 @@
</p> </p>
<p align="center">AI coding agent, built for the terminal.</p> <p align="center">AI coding agent, built for the terminal.</p>
<p align="center"> <p align="center">
<a href="https://opencode.ai/docs"><img alt="View docs" src="https://img.shields.io/badge/view-docs-blue?style=flat-square" /></a> <a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a> <a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/sst/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/sst/opencode/publish.yml?style=flat-square&branch=dev" /></a> <a href="https://github.com/sst/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/sst/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p> </p>
@ -76,4 +76,4 @@ The other confusingly named repo has no relation to this one. You can [read the
--- ---
**Join our community** [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev) **Join our community** [Discord](https://discord.gg/opencode) | [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev)

View file

@ -1,9 +1,15 @@
# Download Stats # Download Stats
| Date | GitHub Downloads | npm Downloads | Total | | Date | GitHub Downloads | npm Downloads | Total |
| ---------- | ---------------- | --------------- | --------------- | | ---------- | ---------------- | --------------- | ----------------- |
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) | | 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) | | 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) | | 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) | | 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) | | 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
| 2025-07-10 | 43,796 (+5,744) | 71,402 (+6,934) | 115,198 (+12,678) |
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |

146
bun.lock
View file

@ -5,12 +5,17 @@
"name": "opencode", "name": "opencode",
"devDependencies": { "devDependencies": {
"prettier": "3.5.3", "prettier": "3.5.3",
"sst": "3.17.6", "sst": "3.17.8",
}, },
}, },
"packages/function": { "packages/function": {
"name": "@opencode/function", "name": "@opencode/function",
"version": "0.0.1", "version": "0.0.1",
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "22.0.0",
"jose": "6.0.11",
},
"devDependencies": { "devDependencies": {
"@cloudflare/workers-types": "4.20250522.0", "@cloudflare/workers-types": "4.20250522.0",
"@types/node": "catalog:", "@types/node": "catalog:",
@ -78,11 +83,12 @@
"lang-map": "0.4.0", "lang-map": "0.4.0",
"luxon": "3.6.1", "luxon": "3.6.1",
"marked": "15.0.12", "marked": "15.0.12",
"marked-shiki": "1.2.0",
"rehype-autolink-headings": "7.1.0", "rehype-autolink-headings": "7.1.0",
"sharp": "0.32.5", "sharp": "0.32.5",
"shiki": "3.4.2", "shiki": "3.4.2",
"solid-js": "1.9.7", "solid-js": "1.9.7",
"toolbeam-docs-theme": "0.4.1", "toolbeam-docs-theme": "0.4.3",
}, },
"devDependencies": { "devDependencies": {
"@types/node": "catalog:", "@types/node": "catalog:",
@ -95,30 +101,22 @@
"sharp", "sharp",
"esbuild", "esbuild",
], ],
"patchedDependencies": {
"ai@4.3.16": "patches/ai@4.3.16.patch",
},
"overrides": {
"zod": "3.24.2",
},
"catalog": { "catalog": {
"@types/node": "22.13.9", "@types/node": "22.13.9",
"ai": "4.3.16", "ai": "5.0.0-beta.7",
"typescript": "5.8.2", "typescript": "5.8.2",
"zod": "3.24.2", "zod": "3.25.49",
}, },
"packages": { "packages": {
"@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@2.2.10", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-icLGO7Q0NinnHIPgT+y1QjHVwH4HwV+brWbvM+FfCG2Afpa89PyKa3Ret91kGjZpBgM/xnj1B7K5eM+rRlsXQA=="], "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@2.2.10", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-icLGO7Q0NinnHIPgT+y1QjHVwH4HwV+brWbvM+FfCG2Afpa89PyKa3Ret91kGjZpBgM/xnj1B7K5eM+rRlsXQA=="],
"@ai-sdk/anthropic": ["@ai-sdk/anthropic@1.2.12", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-YSzjlko7JvuiyQFmI9RN1tNZdEiZxc+6xld/0tq/VkJaHpEzGAb1yiNxxvmYVcjvfu/PcvCxAAYXmTYQQ63IHQ=="], "@ai-sdk/anthropic": ["@ai-sdk/anthropic@1.2.12", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-YSzjlko7JvuiyQFmI9RN1tNZdEiZxc+6xld/0tq/VkJaHpEzGAb1yiNxxvmYVcjvfu/PcvCxAAYXmTYQQ63IHQ=="],
"@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="], "@ai-sdk/gateway": ["@ai-sdk/gateway@1.0.0-beta.3", "", { "dependencies": { "@ai-sdk/provider": "2.0.0-beta.1", "@ai-sdk/provider-utils": "3.0.0-beta.2" }, "peerDependencies": { "zod": "^3.25.49" } }, "sha512-g49gMSkXy94lYvl5LRh438OR/0JCG6ol0jV+iLot7cy5HLltZlGocEuauETBu4b10mDXOd7XIjTEZoQpYFMYLQ=="],
"@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="], "@ai-sdk/provider": ["@ai-sdk/provider@2.0.0-beta.1", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-Z8SPncMtS3RsoXITmT7NVwrAq6M44dmw0DoUOYJqNNtCu8iMWuxB8Nxsoqpa0uEEy9R1V1ZThJAXTYgjTUxl3w=="],
"@ai-sdk/react": ["@ai-sdk/react@1.2.12", "", { "dependencies": { "@ai-sdk/provider-utils": "2.2.8", "@ai-sdk/ui-utils": "1.2.11", "swr": "^2.2.5", "throttleit": "2.1.0" }, "peerDependencies": { "react": "^18 || ^19 || ^19.0.0-rc", "zod": "^3.23.8" }, "optionalPeers": ["zod"] }, "sha512-jK1IZZ22evPZoQW3vlkZ7wvjYGYF+tRBKXtrcolduIkQ/m/sOAVcVeVDUDvh1T91xCnWCdUGCPZg2avZ90mv3g=="], "@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.0-beta.2", "", { "dependencies": { "@ai-sdk/provider": "2.0.0-beta.1", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.3", "zod-to-json-schema": "^3.24.1" }, "peerDependencies": { "zod": "^3.25.49" } }, "sha512-H4K+4weOVgWqrDDeAbQWoA4U5mN4WrQPHQFdH7ynQYcnhj/pzctU9Q6mGlR5ESMWxaXxazxlOblSITlXo9bahA=="],
"@ai-sdk/ui-utils": ["@ai-sdk/ui-utils@1.2.11", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "zod-to-json-schema": "^3.24.1" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-3zcwCc8ezzFlwp3ZD15wAPjf2Au4s3vAbKsXQVyhxODHcmu0iyPO2Eua6D/vicq/AUm/BAo60r97O6HU+EI0+w=="],
"@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="], "@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="],
@ -344,6 +342,42 @@
"@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="], "@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="],
"@octokit/auth-app": ["@octokit/auth-app@8.0.1", "", { "dependencies": { "@octokit/auth-oauth-app": "^9.0.1", "@octokit/auth-oauth-user": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "toad-cache": "^3.7.0", "universal-github-app-jwt": "^2.2.0", "universal-user-agent": "^7.0.0" } }, "sha512-P2J5pB3pjiGwtJX4WqJVYCtNkcZ+j5T2Wm14aJAEIC3WJOrv12jvBley3G1U/XI8q9o1A7QMG54LiFED2BiFlg=="],
"@octokit/auth-oauth-app": ["@octokit/auth-oauth-app@9.0.1", "", { "dependencies": { "@octokit/auth-oauth-device": "^8.0.1", "@octokit/auth-oauth-user": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-TthWzYxuHKLAbmxdFZwFlmwVyvynpyPmjwc+2/cI3cvbT7mHtsAW9b1LvQaNnAuWL+pFnqtxdmrU8QpF633i1g=="],
"@octokit/auth-oauth-device": ["@octokit/auth-oauth-device@8.0.1", "", { "dependencies": { "@octokit/oauth-methods": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-TOqId/+am5yk9zor0RGibmlqn4V0h8vzjxlw/wYr3qzkQxl8aBPur384D1EyHtqvfz0syeXji4OUvKkHvxk/Gw=="],
"@octokit/auth-oauth-user": ["@octokit/auth-oauth-user@6.0.0", "", { "dependencies": { "@octokit/auth-oauth-device": "^8.0.1", "@octokit/oauth-methods": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-GV9IW134PHsLhtUad21WIeP9mlJ+QNpFd6V9vuPWmaiN25HEJeEQUcS4y5oRuqCm9iWDLtfIs+9K8uczBXKr6A=="],
"@octokit/auth-token": ["@octokit/auth-token@6.0.0", "", {}, "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w=="],
"@octokit/core": ["@octokit/core@7.0.3", "", { "dependencies": { "@octokit/auth-token": "^6.0.0", "@octokit/graphql": "^9.0.1", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "before-after-hook": "^4.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-oNXsh2ywth5aowwIa7RKtawnkdH6LgU1ztfP9AIUCQCvzysB+WeU8o2kyyosDPwBZutPpjZDKPQGIzzrfTWweQ=="],
"@octokit/endpoint": ["@octokit/endpoint@11.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ=="],
"@octokit/graphql": ["@octokit/graphql@9.0.1", "", { "dependencies": { "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-j1nQNU1ZxNFx2ZtKmL4sMrs4egy5h65OMDmSbVyuCzjOcwsHq6EaYjOTGXPQxgfiN8dJ4CriYHk6zF050WEULg=="],
"@octokit/oauth-authorization-url": ["@octokit/oauth-authorization-url@8.0.0", "", {}, "sha512-7QoLPRh/ssEA/HuHBHdVdSgF8xNLz/Bc5m9fZkArJE5bb6NmVkDm3anKxXPmN1zh6b5WKZPRr3697xKT/yM3qQ=="],
"@octokit/oauth-methods": ["@octokit/oauth-methods@6.0.0", "", { "dependencies": { "@octokit/oauth-authorization-url": "^8.0.0", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0" } }, "sha512-Q8nFIagNLIZgM2odAraelMcDssapc+lF+y3OlcIPxyAU+knefO8KmozGqfnma1xegRDP4z5M73ABsamn72bOcA=="],
"@octokit/openapi-types": ["@octokit/openapi-types@25.1.0", "", {}, "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA=="],
"@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@13.1.1", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-q9iQGlZlxAVNRN2jDNskJW/Cafy7/XE52wjZ5TTvyhyOD904Cvx//DNyoO3J/MXJ0ve3rPoNWKEg5iZrisQSuw=="],
"@octokit/plugin-request-log": ["@octokit/plugin-request-log@6.0.0", "", { "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q=="],
"@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@16.0.0", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-kJVUQk6/dx/gRNLWUnAWKFs1kVPn5O5CYZyssyEoNYaFedqZxsfYs7DwI3d67hGz4qOwaJ1dpm07hOAD1BXx6g=="],
"@octokit/request": ["@octokit/request@10.0.3", "", { "dependencies": { "@octokit/endpoint": "^11.0.0", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA=="],
"@octokit/request-error": ["@octokit/request-error@7.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg=="],
"@octokit/rest": ["@octokit/rest@22.0.0", "", { "dependencies": { "@octokit/core": "^7.0.2", "@octokit/plugin-paginate-rest": "^13.0.1", "@octokit/plugin-request-log": "^6.0.0", "@octokit/plugin-rest-endpoint-methods": "^16.0.0" } }, "sha512-z6tmTu9BTnw51jYGulxrlernpsQYXpui1RK21vmXn8yF5bp6iX16yfTtJYGK5Mh1qDkvDOmp2n8sRMcQmR8jiA=="],
"@octokit/types": ["@octokit/types@14.1.0", "", { "dependencies": { "@octokit/openapi-types": "^25.1.0" } }, "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g=="],
"@openauthjs/openauth": ["@openauthjs/openauth@0.4.3", "", { "dependencies": { "@standard-schema/spec": "1.0.0-beta.3", "aws4fetch": "1.0.20", "jose": "5.9.6" }, "peerDependencies": { "arctic": "^2.2.2", "hono": "^4.0.0" } }, "sha512-RlnjqvHzqcbFVymEwhlUEuac4utA5h4nhSK/i2szZuQmxTIqbGUxZ+nM+avM+VV4Ing+/ZaNLKILoXS3yrkOOw=="], "@openauthjs/openauth": ["@openauthjs/openauth@0.4.3", "", { "dependencies": { "@standard-schema/spec": "1.0.0-beta.3", "aws4fetch": "1.0.20", "jose": "5.9.6" }, "peerDependencies": { "arctic": "^2.2.2", "hono": "^4.0.0" } }, "sha512-RlnjqvHzqcbFVymEwhlUEuac4utA5h4nhSK/i2szZuQmxTIqbGUxZ+nM+avM+VV4Ing+/ZaNLKILoXS3yrkOOw=="],
"@opencode/function": ["@opencode/function@workspace:packages/function"], "@opencode/function": ["@opencode/function@workspace:packages/function"],
@ -462,12 +496,10 @@
"@types/babel__traverse": ["@types/babel__traverse@7.20.7", "", { "dependencies": { "@babel/types": "^7.20.7" } }, "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng=="], "@types/babel__traverse": ["@types/babel__traverse@7.20.7", "", { "dependencies": { "@babel/types": "^7.20.7" } }, "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng=="],
"@types/bun": ["@types/bun@1.2.17", "", { "dependencies": { "bun-types": "1.2.17" } }, "sha512-l/BYs/JYt+cXA/0+wUhulYJB6a6p//GTPiJ7nV+QHa8iiId4HZmnu/3J/SowP5g0rTiERY2kfGKXEK5Ehltx4Q=="], "@types/bun": ["@types/bun@1.2.18", "", { "dependencies": { "bun-types": "1.2.18" } }, "sha512-Xf6RaWVheyemaThV0kUfaAUvCNokFr+bH8Jxp+tTZfx7dAPA8z9ePnP9S9+Vspzuxxx9JRAXhnyccRj3GyCMdQ=="],
"@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="], "@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="],
"@types/diff-match-patch": ["@types/diff-match-patch@1.0.36", "", {}, "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg=="],
"@types/estree": ["@types/estree@1.0.7", "", {}, "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ=="], "@types/estree": ["@types/estree@1.0.7", "", {}, "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ=="],
"@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="], "@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="],
@ -492,6 +524,8 @@
"@types/node": ["@types/node@22.13.9", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-acBjXdRJ3A6Pb3tqnw9HZmyR3Fiol3aGxRCK1x3d+6CDAMjl7I649wpSd+yNURCjbOUGu9tqtLKnTGxmK6CyGw=="], "@types/node": ["@types/node@22.13.9", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-acBjXdRJ3A6Pb3tqnw9HZmyR3Fiol3aGxRCK1x3d+6CDAMjl7I649wpSd+yNURCjbOUGu9tqtLKnTGxmK6CyGw=="],
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
"@types/sax": ["@types/sax@1.2.7", "", { "dependencies": { "@types/node": "*" } }, "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A=="], "@types/sax": ["@types/sax@1.2.7", "", { "dependencies": { "@types/node": "*" } }, "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A=="],
"@types/turndown": ["@types/turndown@5.0.5", "", {}, "sha512-TL2IgGgc7B5j78rIccBtlYAnkuv8nUQqhQc+DSYV5j9Be9XOcm/SKOVRuA47xAVI3680Tk9B1d8flK2GWT2+4w=="], "@types/turndown": ["@types/turndown@5.0.5", "", {}, "sha512-TL2IgGgc7B5j78rIccBtlYAnkuv8nUQqhQc+DSYV5j9Be9XOcm/SKOVRuA47xAVI3680Tk9B1d8flK2GWT2+4w=="],
@ -512,7 +546,7 @@
"acorn-walk": ["acorn-walk@8.3.2", "", {}, "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A=="], "acorn-walk": ["acorn-walk@8.3.2", "", {}, "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A=="],
"ai": ["ai@4.3.16", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "@ai-sdk/react": "1.2.12", "@ai-sdk/ui-utils": "1.2.11", "@opentelemetry/api": "1.9.0", "jsondiffpatch": "0.6.0" }, "peerDependencies": { "react": "^18 || ^19 || ^19.0.0-rc", "zod": "^3.23.8" }, "optionalPeers": ["react"] }, "sha512-KUDwlThJ5tr2Vw0A1ZkbDKNME3wzWhuVfAOwIvFUzl1TPVDFAXDFTXio3p+jaKneB+dKNCvFFlolYmmgHttG1g=="], "ai": ["ai@5.0.0-beta.7", "", { "dependencies": { "@ai-sdk/gateway": "1.0.0-beta.3", "@ai-sdk/provider": "2.0.0-beta.1", "@ai-sdk/provider-utils": "3.0.0-beta.2", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.49" } }, "sha512-oC4KzUJCQPMB7v9rCqL/rVk2ogZvI6lYiXfKjzPYHwa1zIgy329qqRLmAd3mKEDTTG6By1r0zasQu7FKmG+4gw=="],
"ansi-align": ["ansi-align@3.0.1", "", { "dependencies": { "string-width": "^4.1.0" } }, "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w=="], "ansi-align": ["ansi-align@3.0.1", "", { "dependencies": { "string-width": "^4.1.0" } }, "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w=="],
@ -582,6 +616,8 @@
"bcp-47-match": ["bcp-47-match@2.0.3", "", {}, "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ=="], "bcp-47-match": ["bcp-47-match@2.0.3", "", {}, "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ=="],
"before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="],
"bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="], "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="],
"blake3-wasm": ["blake3-wasm@2.1.5", "", {}, "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g=="], "blake3-wasm": ["blake3-wasm@2.1.5", "", {}, "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g=="],
@ -602,7 +638,7 @@
"buffer": ["buffer@4.9.2", "", { "dependencies": { "base64-js": "^1.0.2", "ieee754": "^1.1.4", "isarray": "^1.0.0" } }, "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg=="], "buffer": ["buffer@4.9.2", "", { "dependencies": { "base64-js": "^1.0.2", "ieee754": "^1.1.4", "isarray": "^1.0.0" } }, "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg=="],
"bun-types": ["bun-types@1.2.17", "", { "dependencies": { "@types/node": "*" } }, "sha512-ElC7ItwT3SCQwYZDYoAH+q6KT4Fxjl8DtZ6qDulUFBmXA8YB4xo+l54J9ZJN+k2pphfn9vk7kfubeSd5QfTVJQ=="], "bun-types": ["bun-types@1.2.18", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-04+Eha5NP7Z0A9YgDAzMk5PHR16ZuLVa83b26kH5+cp1qZW4F6FmAURngE7INf4tKOvCE69vYvDEwoNl1tGiWw=="],
"bundle-name": ["bundle-name@4.1.0", "", { "dependencies": { "run-applescript": "^7.0.0" } }, "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q=="], "bundle-name": ["bundle-name@4.1.0", "", { "dependencies": { "run-applescript": "^7.0.0" } }, "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q=="],
@ -730,8 +766,6 @@
"diff": ["diff@8.0.2", "", {}, "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg=="], "diff": ["diff@8.0.2", "", {}, "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg=="],
"diff-match-patch": ["diff-match-patch@1.0.5", "", {}, "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw=="],
"diff3": ["diff3@0.0.3", "", {}, "sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g=="], "diff3": ["diff3@0.0.3", "", {}, "sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g=="],
"direction": ["direction@2.0.1", "", { "bin": { "direction": "cli.js" } }, "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA=="], "direction": ["direction@2.0.1", "", { "bin": { "direction": "cli.js" } }, "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA=="],
@ -800,7 +834,7 @@
"eventsource": ["eventsource@3.0.7", "", { "dependencies": { "eventsource-parser": "^3.0.1" } }, "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA=="], "eventsource": ["eventsource@3.0.7", "", { "dependencies": { "eventsource-parser": "^3.0.1" } }, "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA=="],
"eventsource-parser": ["eventsource-parser@3.0.2", "", {}, "sha512-6RxOBZ/cYgd8usLwsEl+EC09Au/9BcmCKYF2/xbml6DNczf7nv0MQb+7BA2F+li6//I+28VNlQR37XfQtcAJuA=="], "eventsource-parser": ["eventsource-parser@3.0.3", "", {}, "sha512-nVpZkTMM9rF6AQ9gPJpFsNAMt48wIzB5TQgiTLdHiuO8XEDhUgZEhqKlZWXbIzo9VmJ/HvysHqEaVeD5v9TPvA=="],
"exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="], "exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="],
@ -816,6 +850,8 @@
"extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="], "extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="],
"fast-content-type-parse": ["fast-content-type-parse@3.0.0", "", {}, "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg=="],
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
"fast-fifo": ["fast-fifo@1.3.2", "", {}, "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ=="], "fast-fifo": ["fast-fifo@1.3.2", "", {}, "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ=="],
@ -1002,7 +1038,7 @@
"jmespath": ["jmespath@0.16.0", "", {}, "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw=="], "jmespath": ["jmespath@0.16.0", "", {}, "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw=="],
"jose": ["jose@5.2.3", "", {}, "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA=="], "jose": ["jose@6.0.11", "", {}, "sha512-QxG7EaliDARm1O1S8BGakqncGT9s25bKL1WSf6/oa17Tkqwi8D2ZNglqCF+DsYF88/rV66Q/Q2mFAy697E1DUg=="],
"joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="], "joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="],
@ -1022,8 +1058,6 @@
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="], "json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
"jsondiffpatch": ["jsondiffpatch@0.6.0", "", { "dependencies": { "@types/diff-match-patch": "^1.0.36", "chalk": "^5.3.0", "diff-match-patch": "^1.0.5" }, "bin": { "jsondiffpatch": "bin/jsondiffpatch.js" } }, "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ=="],
"kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="], "kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
"klona": ["klona@2.0.6", "", {}, "sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA=="], "klona": ["klona@2.0.6", "", {}, "sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA=="],
@ -1050,6 +1084,8 @@
"marked": ["marked@15.0.12", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA=="], "marked": ["marked@15.0.12", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA=="],
"marked-shiki": ["marked-shiki@1.2.0", "", { "peerDependencies": { "marked": ">=7.0.0", "shiki": ">=1.0.0" } }, "sha512-N924hp8veE6Mc91g5/kCNVoTU7TkeJfB2G2XEWb+k1fVA0Bck2T0rVt93d39BlOYH6ohP4Q9BFlPk+UkblhXbg=="],
"math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
"mdast-util-definitions": ["mdast-util-definitions@6.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "unist-util-visit": "^5.0.0" } }, "sha512-scTllyX6pnYNZH/AIp/0ePz6s4cZtARxImwoPJ7kS42n+MnVsI4XbnG6d4ibehRIldYMWM2LD7ImQblVhUejVQ=="], "mdast-util-definitions": ["mdast-util-definitions@6.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "unist-util-visit": "^5.0.0" } }, "sha512-scTllyX6pnYNZH/AIp/0ePz6s4cZtARxImwoPJ7kS42n+MnVsI4XbnG6d4ibehRIldYMWM2LD7ImQblVhUejVQ=="],
@ -1338,8 +1374,6 @@
"rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="], "rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="],
"react": ["react@19.1.0", "", {}, "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg=="],
"readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="], "readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
@ -1476,23 +1510,23 @@
"split2": ["split2@3.2.2", "", { "dependencies": { "readable-stream": "^3.0.0" } }, "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg=="], "split2": ["split2@3.2.2", "", { "dependencies": { "readable-stream": "^3.0.0" } }, "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg=="],
"sst": ["sst@3.17.6", "", { "dependencies": { "aws-sdk": "2.1692.0", "aws4fetch": "1.0.18", "jose": "5.2.3", "opencontrol": "0.0.6", "openid-client": "5.6.4" }, "optionalDependencies": { "sst-darwin-arm64": "3.17.6", "sst-darwin-x64": "3.17.6", "sst-linux-arm64": "3.17.6", "sst-linux-x64": "3.17.6", "sst-linux-x86": "3.17.6", "sst-win32-arm64": "3.17.6", "sst-win32-x64": "3.17.6", "sst-win32-x86": "3.17.6" }, "bin": { "sst": "bin/sst.mjs" } }, "sha512-p+AcqwfYQUdkxeRjCikQoTMviPCBiGoU7M0vcV6GDVmVis8hzhVw4EFfHTafZC+aWfy1Ke2UQi66vZlEVWuEqA=="], "sst": ["sst@3.17.8", "", { "dependencies": { "aws-sdk": "2.1692.0", "aws4fetch": "1.0.18", "jose": "5.2.3", "opencontrol": "0.0.6", "openid-client": "5.6.4" }, "optionalDependencies": { "sst-darwin-arm64": "3.17.8", "sst-darwin-x64": "3.17.8", "sst-linux-arm64": "3.17.8", "sst-linux-x64": "3.17.8", "sst-linux-x86": "3.17.8", "sst-win32-arm64": "3.17.8", "sst-win32-x64": "3.17.8", "sst-win32-x86": "3.17.8" }, "bin": { "sst": "bin/sst.mjs" } }, "sha512-P/a9/ZsjtQRrTBerBMO1ODaVa5HVTmNLrQNJiYvu2Bgd0ov+vefQeHv6oima8HLlPwpDIPS2gxJk8BZrTZMfCA=="],
"sst-darwin-arm64": ["sst-darwin-arm64@3.17.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-6tb7KlcPR7PTi3ofQv8dX/n6Jf7pNP9VfrnYL4HBWnWrcYaZeJ5MWobILfIJ/y2jHgoqmg9e5C3266Eds0JQyw=="], "sst-darwin-arm64": ["sst-darwin-arm64@3.17.8", "", { "os": "darwin", "cpu": "arm64" }, "sha512-50P6YRMnZVItZUfB0+NzqMww2mmm4vB3zhTVtWUtGoXeiw78g1AEnVlmS28gYXPHM1P987jTvR7EON9u9ig/Dg=="],
"sst-darwin-x64": ["sst-darwin-x64@3.17.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-lFakq6/EgTuBSjbl8Kry4pfgAPEIyn6o7ZkyRz3hz5331wUaX88yfjs3tL9JQ8Ey6jBUYxwhP/Q1n7fzIG046g=="], "sst-darwin-x64": ["sst-darwin-x64@3.17.8", "", { "os": "darwin", "cpu": "x64" }, "sha512-P0pnMHCmpkpcsxkWpilmeoD79LkbkoIcv6H0aeM9ArT/71/JBhvqH+HjMHSJCzni/9uR6er+nH5F+qol0UO6Bw=="],
"sst-linux-arm64": ["sst-linux-arm64@3.17.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-SdTxXMbTEdiwOqp37w31kXv97vHqSx3oK9h/76lKg7V9k5JxPJ6JMefPLhoKWwK0Zh6AndY2zo2oRoEv4SIaDw=="], "sst-linux-arm64": ["sst-linux-arm64@3.17.8", "", { "os": "linux", "cpu": "arm64" }, "sha512-vun54YA/UzprCu9p8BC4rMwFU5Cj9xrHAHYLYUp/yq4H0pfmBIiQM62nsfIKizRThe/TkBFy60EEi9myf6raYA=="],
"sst-linux-x64": ["sst-linux-x64@3.17.6", "", { "os": "linux", "cpu": "x64" }, "sha512-qneh7uWDiTUYx8X1Y3h2YVw3SJ0ybBBlRrVybIvCM09JqQ8+qq/XjKXGzA/3/EF0Jr7Ug8cARSn9CwxhdQGN7Q=="], "sst-linux-x64": ["sst-linux-x64@3.17.8", "", { "os": "linux", "cpu": "x64" }, "sha512-HqByCaLE2gEJbM20P1QRd+GqDMAiieuU53FaZA1F+AGxQi+kR82NWjrPqFcMj4dMYg8w/TWXuV+G5+PwoUmpDw=="],
"sst-linux-x86": ["sst-linux-x86@3.17.6", "", { "os": "linux", "cpu": "none" }, "sha512-pU3D5OeqnmfxGqN31DxuwWnc1OayxhkErnITHhZ39D0MTiwbIgCapH26FuLW8B08/uxJWG8djUlOboCRhSBvWA=="], "sst-linux-x86": ["sst-linux-x86@3.17.8", "", { "os": "linux", "cpu": "none" }, "sha512-bCd6QM3MejfSmdvg8I/k+aUJQIZEQJg023qmN78fv00vwlAtfECvY7tjT9E2m3LDp33pXrcRYbFOQzPu+tWFfA=="],
"sst-win32-arm64": ["sst-win32-arm64@3.17.6", "", { "os": "win32", "cpu": "arm64" }, "sha512-Rr3RTYWAsH9sM9CbM/sAZCk7dB1OsSAljjJuuHMvdSAYW3RDpXEza0PBJGxnBID2eOrpswEchzMPL2d8LtL7oA=="], "sst-win32-arm64": ["sst-win32-arm64@3.17.8", "", { "os": "win32", "cpu": "arm64" }, "sha512-pilx0n8gm4aHJae/vNiqIwZkWF3tdwWzD/ON7hkytw+CVSZ0FXtyFW/yO/+2u3Yw0Kj0lSWPnUqYgm/eHPLwQA=="],
"sst-win32-x64": ["sst-win32-x64@3.17.6", "", { "os": "win32", "cpu": "x64" }, "sha512-yZ3roxwI0Wve9PFzdrrF1kfzCmIMFCCoa8qKeXY7LxCJ4QQIqHbCOccLK1Wv/MIU/mcZHWXTQVCLHw77uaa0GQ=="], "sst-win32-x64": ["sst-win32-x64@3.17.8", "", { "os": "win32", "cpu": "x64" }, "sha512-Jb0FVRyiOtESudF1V8ucW65PuHrx/iOHUamIO0JnbujWNHZBTRPB2QHN1dbewgkueYDaCmyS8lvuIImLwYJnzQ=="],
"sst-win32-x86": ["sst-win32-x86@3.17.6", "", { "os": "win32", "cpu": "none" }, "sha512-zV7TJWPJN9PmIXr15iXFSs0tbGsa52oBR3+xiKrUj2qj9XsZe7HBFwskRnHyiFq0durZY9kk9ZtoVlpuUuzr1g=="], "sst-win32-x86": ["sst-win32-x86@3.17.8", "", { "os": "win32", "cpu": "none" }, "sha512-oVmFa/PoElQmfnGJlB0w6rPXiYuldiagO6AbrLMT/6oAnWerLQ8Uhv9tJWfMh3xtPLImQLTjxDo1v0AIzEv9QA=="],
"stacktracey": ["stacktracey@2.1.8", "", { "dependencies": { "as-table": "^1.0.36", "get-source": "^2.0.12" } }, "sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw=="], "stacktracey": ["stacktracey@2.1.8", "", { "dependencies": { "as-table": "^1.0.36", "get-source": "^2.0.12" } }, "sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw=="],
@ -1524,8 +1558,6 @@
"supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="],
"swr": ["swr@2.3.3", "", { "dependencies": { "dequal": "^2.0.3", "use-sync-external-store": "^1.4.0" }, "peerDependencies": { "react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-dshNvs3ExOqtZ6kJBaAsabhPdHyeY4P2cKwRCniDVifBMoG/SVI7tfLWqPXriVspf2Rg4tPzXJTnwaihIeFw2A=="],
"tar-fs": ["tar-fs@3.0.9", "", { "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { "bare-fs": "^4.0.1", "bare-path": "^3.0.0" } }, "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA=="], "tar-fs": ["tar-fs@3.0.9", "", { "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { "bare-fs": "^4.0.1", "bare-path": "^3.0.0" } }, "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA=="],
"tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="], "tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="],
@ -1534,19 +1566,19 @@
"thread-stream": ["thread-stream@0.15.2", "", { "dependencies": { "real-require": "^0.1.0" } }, "sha512-UkEhKIg2pD+fjkHQKyJO3yoIvAP3N6RlNFt2dUhcS1FGvCD1cQa1M/PGknCLFIyZdtJOWQjejp7bdNqmN7zwdA=="], "thread-stream": ["thread-stream@0.15.2", "", { "dependencies": { "real-require": "^0.1.0" } }, "sha512-UkEhKIg2pD+fjkHQKyJO3yoIvAP3N6RlNFt2dUhcS1FGvCD1cQa1M/PGknCLFIyZdtJOWQjejp7bdNqmN7zwdA=="],
"throttleit": ["throttleit@2.1.0", "", {}, "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw=="],
"tiny-inflate": ["tiny-inflate@1.0.3", "", {}, "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="], "tiny-inflate": ["tiny-inflate@1.0.3", "", {}, "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="],
"tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="], "tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="],
"tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="], "tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="],
"toad-cache": ["toad-cache@3.7.0", "", {}, "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw=="],
"toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="],
"token-types": ["token-types@6.0.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA=="], "token-types": ["token-types@6.0.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA=="],
"toolbeam-docs-theme": ["toolbeam-docs-theme@0.4.1", "", { "peerDependencies": { "@astrojs/starlight": "^0.34.3", "astro": "^5.7.13" } }, "sha512-lTI4dHZaVNQky29m7sb36Oy4tWPwxsCuFxFjF8hgGW0vpV+S6qPvI9SwsJFvdE/OHO5DoI7VMbryV1pxZHkkHQ=="], "toolbeam-docs-theme": ["toolbeam-docs-theme@0.4.3", "", { "peerDependencies": { "@astrojs/starlight": "^0.34.3", "astro": "^5.7.13" } }, "sha512-3um/NsSq4xFeKbKrNGPHIzfTixwnEVvroqA8Q+lecnYHHJ5TtiYTggHDqewOW+I67t0J1IVBwVKUPjxiQfIcog=="],
"tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
@ -1612,6 +1644,10 @@
"unist-util-visit-parents": ["unist-util-visit-parents@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw=="], "unist-util-visit-parents": ["unist-util-visit-parents@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw=="],
"universal-github-app-jwt": ["universal-github-app-jwt@2.2.2", "", {}, "sha512-dcmbeSrOdTnsjGjUfAlqNDJrhxXizjAz94ija9Qw8YkZ1uu0d+GoZzyH+Jb9tIIqvGsadUfwg+22k5aDqqwzbw=="],
"universal-user-agent": ["universal-user-agent@7.0.3", "", {}, "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A=="],
"unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="],
"unstorage": ["unstorage@1.16.0", "", { "dependencies": { "anymatch": "^3.1.3", "chokidar": "^4.0.3", "destr": "^2.0.5", "h3": "^1.15.2", "lru-cache": "^10.4.3", "node-fetch-native": "^1.6.6", "ofetch": "^1.4.1", "ufo": "^1.6.1" }, "peerDependencies": { "@azure/app-configuration": "^1.8.0", "@azure/cosmos": "^4.2.0", "@azure/data-tables": "^13.3.0", "@azure/identity": "^4.6.0", "@azure/keyvault-secrets": "^4.9.0", "@azure/storage-blob": "^12.26.0", "@capacitor/preferences": "^6.0.3 || ^7.0.0", "@deno/kv": ">=0.9.0", "@netlify/blobs": "^6.5.0 || ^7.0.0 || ^8.1.0", "@planetscale/database": "^1.19.0", "@upstash/redis": "^1.34.3", "@vercel/blob": ">=0.27.1", "@vercel/kv": "^1.0.1", "aws4fetch": "^1.0.20", "db0": ">=0.2.1", "idb-keyval": "^6.2.1", "ioredis": "^5.4.2", "uploadthing": "^7.4.4" }, "optionalPeers": ["@azure/app-configuration", "@azure/cosmos", "@azure/data-tables", "@azure/identity", "@azure/keyvault-secrets", "@azure/storage-blob", "@capacitor/preferences", "@deno/kv", "@netlify/blobs", "@planetscale/database", "@upstash/redis", "@vercel/blob", "@vercel/kv", "aws4fetch", "db0", "idb-keyval", "ioredis", "uploadthing"] }, "sha512-WQ37/H5A7LcRPWfYOrDa1Ys02xAbpPJq6q5GkO88FBXVSQzHd7+BjEwfRqyaSWCv9MbsJy058GWjjPjcJ16GGA=="], "unstorage": ["unstorage@1.16.0", "", { "dependencies": { "anymatch": "^3.1.3", "chokidar": "^4.0.3", "destr": "^2.0.5", "h3": "^1.15.2", "lru-cache": "^10.4.3", "node-fetch-native": "^1.6.6", "ofetch": "^1.4.1", "ufo": "^1.6.1" }, "peerDependencies": { "@azure/app-configuration": "^1.8.0", "@azure/cosmos": "^4.2.0", "@azure/data-tables": "^13.3.0", "@azure/identity": "^4.6.0", "@azure/keyvault-secrets": "^4.9.0", "@azure/storage-blob": "^12.26.0", "@capacitor/preferences": "^6.0.3 || ^7.0.0", "@deno/kv": ">=0.9.0", "@netlify/blobs": "^6.5.0 || ^7.0.0 || ^8.1.0", "@planetscale/database": "^1.19.0", "@upstash/redis": "^1.34.3", "@vercel/blob": ">=0.27.1", "@vercel/kv": "^1.0.1", "aws4fetch": "^1.0.20", "db0": ">=0.2.1", "idb-keyval": "^6.2.1", "ioredis": "^5.4.2", "uploadthing": "^7.4.4" }, "optionalPeers": ["@azure/app-configuration", "@azure/cosmos", "@azure/data-tables", "@azure/identity", "@azure/keyvault-secrets", "@azure/storage-blob", "@capacitor/preferences", "@deno/kv", "@netlify/blobs", "@planetscale/database", "@upstash/redis", "@vercel/blob", "@vercel/kv", "aws4fetch", "db0", "idb-keyval", "ioredis", "uploadthing"] }, "sha512-WQ37/H5A7LcRPWfYOrDa1Ys02xAbpPJq6q5GkO88FBXVSQzHd7+BjEwfRqyaSWCv9MbsJy058GWjjPjcJ16GGA=="],
@ -1620,8 +1656,6 @@
"url": ["url@0.10.3", "", { "dependencies": { "punycode": "1.3.2", "querystring": "0.2.0" } }, "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ=="], "url": ["url@0.10.3", "", { "dependencies": { "punycode": "1.3.2", "querystring": "0.2.0" } }, "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ=="],
"use-sync-external-store": ["use-sync-external-store@1.5.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-Rb46I4cGGVBmjamjphe8L/UnvJD+uPPtTkNvX5mZgqdbavhI4EbgIWJiIHXJ8bc/i9EQGPRh4DwEURJ552Do0A=="],
"util": ["util@0.12.5", "", { "dependencies": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", "is-generator-function": "^1.0.7", "is-typed-array": "^1.1.3", "which-typed-array": "^1.1.2" } }, "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA=="], "util": ["util@0.12.5", "", { "dependencies": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", "is-generator-function": "^1.0.7", "is-typed-array": "^1.1.3", "which-typed-array": "^1.1.2" } }, "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA=="],
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
@ -1698,7 +1732,7 @@
"youch": ["youch@3.3.4", "", { "dependencies": { "cookie": "^0.7.1", "mustache": "^4.2.0", "stacktracey": "^2.1.8" } }, "sha512-UeVBXie8cA35DS6+nBkls68xaBBXCye0CNznrhszZjTbRVnJKQuNsyLKBTTL4ln1o1rh2PKtv35twV7irj5SEg=="], "youch": ["youch@3.3.4", "", { "dependencies": { "cookie": "^0.7.1", "mustache": "^4.2.0", "stacktracey": "^2.1.8" } }, "sha512-UeVBXie8cA35DS6+nBkls68xaBBXCye0CNznrhszZjTbRVnJKQuNsyLKBTTL4ln1o1rh2PKtv35twV7irj5SEg=="],
"zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="], "zod": ["zod@3.25.49", "", {}, "sha512-JMMPMy9ZBk3XFEdbM3iL1brx4NUSejd6xr3ELrrGEfGb355gjhiAWtG3K5o+AViV/3ZfkIrCzXsZn6SbLwTR8Q=="],
"zod-openapi": ["zod-openapi@4.2.4", "", { "peerDependencies": { "zod": "^3.21.4" } }, "sha512-tsrQpbpqFCXqVXUzi3TPwFhuMtLN3oNZobOtYnK6/5VkXsNdnIgyNr4r8no4wmYluaxzN3F7iS+8xCW8BmMQ8g=="], "zod-openapi": ["zod-openapi@4.2.4", "", { "peerDependencies": { "zod": "^3.21.4" } }, "sha512-tsrQpbpqFCXqVXUzi3TPwFhuMtLN3oNZobOtYnK6/5VkXsNdnIgyNr4r8no4wmYluaxzN3F7iS+8xCW8BmMQ8g=="],
@ -1710,12 +1744,22 @@
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
"@ai-sdk/amazon-bedrock/@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="],
"@ai-sdk/amazon-bedrock/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="],
"@ai-sdk/amazon-bedrock/aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="], "@ai-sdk/amazon-bedrock/aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="],
"@ai-sdk/anthropic/@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="],
"@ai-sdk/anthropic/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="],
"@ampproject/remapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], "@ampproject/remapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="],
"@astrojs/mdx/@astrojs/markdown-remark": ["@astrojs/markdown-remark@6.3.2", "", { "dependencies": { "@astrojs/internal-helpers": "0.6.1", "@astrojs/prism": "3.3.0", "github-slugger": "^2.0.0", "hast-util-from-html": "^2.0.3", "hast-util-to-text": "^4.0.2", "import-meta-resolve": "^4.1.0", "js-yaml": "^4.1.0", "mdast-util-definitions": "^6.0.0", "rehype-raw": "^7.0.0", "rehype-stringify": "^10.0.1", "remark-gfm": "^4.0.1", "remark-parse": "^11.0.0", "remark-rehype": "^11.1.2", "remark-smartypants": "^3.0.2", "shiki": "^3.2.1", "smol-toml": "^1.3.1", "unified": "^11.0.5", "unist-util-remove-position": "^5.0.0", "unist-util-visit": "^5.0.0", "unist-util-visit-parents": "^6.0.1", "vfile": "^6.0.3" } }, "sha512-bO35JbWpVvyKRl7cmSJD822e8YA8ThR/YbUsciWNA7yTcqpIAL2hJDToWP5KcZBWxGT6IOdOkHSXARSNZc4l/Q=="], "@astrojs/mdx/@astrojs/markdown-remark": ["@astrojs/markdown-remark@6.3.2", "", { "dependencies": { "@astrojs/internal-helpers": "0.6.1", "@astrojs/prism": "3.3.0", "github-slugger": "^2.0.0", "hast-util-from-html": "^2.0.3", "hast-util-to-text": "^4.0.2", "import-meta-resolve": "^4.1.0", "js-yaml": "^4.1.0", "mdast-util-definitions": "^6.0.0", "rehype-raw": "^7.0.0", "rehype-stringify": "^10.0.1", "remark-gfm": "^4.0.1", "remark-parse": "^11.0.0", "remark-rehype": "^11.1.2", "remark-smartypants": "^3.0.2", "shiki": "^3.2.1", "smol-toml": "^1.3.1", "unified": "^11.0.5", "unist-util-remove-position": "^5.0.0", "unist-util-visit": "^5.0.0", "unist-util-visit-parents": "^6.0.1", "vfile": "^6.0.3" } }, "sha512-bO35JbWpVvyKRl7cmSJD822e8YA8ThR/YbUsciWNA7yTcqpIAL2hJDToWP5KcZBWxGT6IOdOkHSXARSNZc4l/Q=="],
"@astrojs/sitemap/zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="],
"@aws-crypto/crc32/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], "@aws-crypto/crc32/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"@aws-crypto/util/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="], "@aws-crypto/util/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="],
@ -1734,6 +1778,8 @@
"@jridgewell/gen-mapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], "@jridgewell/gen-mapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="],
"@modelcontextprotocol/sdk/zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="],
"@openauthjs/openauth/@standard-schema/spec": ["@standard-schema/spec@1.0.0-beta.3", "", {}, "sha512-0ifF3BjA1E8SY9C+nUew8RefNOIq0cDlYALPty4rhUm8Rrl6tCM8hBT4bhGhx7I7iXD0uAgt50lgo8dD73ACMw=="], "@openauthjs/openauth/@standard-schema/spec": ["@standard-schema/spec@1.0.0-beta.3", "", {}, "sha512-0ifF3BjA1E8SY9C+nUew8RefNOIq0cDlYALPty4rhUm8Rrl6tCM8hBT4bhGhx7I7iXD0uAgt50lgo8dD73ACMw=="],
"@openauthjs/openauth/aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="], "@openauthjs/openauth/aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="],
@ -1770,10 +1816,14 @@
"astro/sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="], "astro/sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="],
"astro/zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="],
"babel-plugin-jsx-dom-expressions/@babel/helper-module-imports": ["@babel/helper-module-imports@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA=="], "babel-plugin-jsx-dom-expressions/@babel/helper-module-imports": ["@babel/helper-module-imports@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA=="],
"bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], "bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="],
"eventsource/eventsource-parser": ["eventsource-parser@3.0.2", "", {}, "sha512-6RxOBZ/cYgd8usLwsEl+EC09Au/9BcmCKYF2/xbml6DNczf7nv0MQb+7BA2F+li6//I+28VNlQR37XfQtcAJuA=="],
"express/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], "express/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="],
"get-source/source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="], "get-source/source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
@ -1786,8 +1836,12 @@
"miniflare/sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="], "miniflare/sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="],
"miniflare/zod": ["zod@3.22.3", "", {}, "sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug=="],
"opencontrol/hono": ["hono@4.7.4", "", {}, "sha512-Pst8FuGqz3L7tFF+u9Pu70eI0xa5S3LPUmrNd5Jm8nTHze9FxLTK9Kaj5g/k4UcwuJSXTP65SyHOPLrffpcAJg=="], "opencontrol/hono": ["hono@4.7.4", "", {}, "sha512-Pst8FuGqz3L7tFF+u9Pu70eI0xa5S3LPUmrNd5Jm8nTHze9FxLTK9Kaj5g/k4UcwuJSXTP65SyHOPLrffpcAJg=="],
"opencontrol/zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="],
"opencontrol/zod-to-json-schema": ["zod-to-json-schema@3.24.3", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-HIAfWdYIt1sssHfYZFCXp4rU1w2r8hVVXYIlmoa0r0gABLs5di3RCqPU5DDROogVz1pAdYBaz7HK5n9pSUNs3A=="], "opencontrol/zod-to-json-schema": ["zod-to-json-schema@3.24.3", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-HIAfWdYIt1sssHfYZFCXp4rU1w2r8hVVXYIlmoa0r0gABLs5di3RCqPU5DDROogVz1pAdYBaz7HK5n9pSUNs3A=="],
"openid-client/jose": ["jose@4.15.9", "", {}, "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA=="], "openid-client/jose": ["jose@4.15.9", "", {}, "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA=="],
@ -1812,6 +1866,8 @@
"sitemap/sax": ["sax@1.4.1", "", {}, "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg=="], "sitemap/sax": ["sax@1.4.1", "", {}, "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg=="],
"sst/jose": ["jose@5.2.3", "", {}, "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA=="],
"token-types/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], "token-types/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
"unicode-trie/pako": ["pako@0.2.9", "", {}, "sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA=="], "unicode-trie/pako": ["pako@0.2.9", "", {}, "sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA=="],

View file

@ -4,6 +4,8 @@ export const domain = (() => {
return `${$app.stage}.dev.opencode.ai` return `${$app.stage}.dev.opencode.ai`
})() })()
const GITHUB_APP_ID = new sst.Secret("GITHUB_APP_ID")
const GITHUB_APP_PRIVATE_KEY = new sst.Secret("GITHUB_APP_PRIVATE_KEY")
const bucket = new sst.cloudflare.Bucket("Bucket") const bucket = new sst.cloudflare.Bucket("Bucket")
export const api = new sst.cloudflare.Worker("Api", { export const api = new sst.cloudflare.Worker("Api", {
@ -13,7 +15,7 @@ export const api = new sst.cloudflare.Worker("Api", {
WEB_DOMAIN: domain, WEB_DOMAIN: domain,
}, },
url: true, url: true,
link: [bucket], link: [bucket, GITHUB_APP_ID, GITHUB_APP_PRIVATE_KEY],
transform: { transform: {
worker: (args) => { worker: (args) => {
args.logpush = true args.logpush = true
@ -39,6 +41,8 @@ new sst.cloudflare.x.Astro("Web", {
domain, domain,
path: "packages/web", path: "packages/web",
environment: { environment: {
// For astro config
SST_STAGE: $app.stage,
VITE_API_URL: api.url, VITE_API_URL: api.url,
}, },
}) })

View file

@ -1,5 +1,11 @@
{ {
"$schema": "https://opencode.ai/config.json", "$schema": "https://opencode.ai/config.json",
"mcp": {
"weather": {
"type": "local",
"command": ["opencode", "x", "@h1deya/mcp-server-weather"]
}
},
"experimental": { "experimental": {
"hook": { "hook": {
"file_edited": { "file_edited": {

View file

@ -7,7 +7,7 @@
"scripts": { "scripts": {
"dev": "bun run packages/opencode/src/index.ts", "dev": "bun run packages/opencode/src/index.ts",
"typecheck": "bun run --filter='*' typecheck", "typecheck": "bun run --filter='*' typecheck",
"stainless": "bun run ./packages/opencode/src/index.ts serve ", "stainless": "./scripts/stainless",
"postinstall": "./scripts/hooks" "postinstall": "./scripts/hooks"
}, },
"workspaces": { "workspaces": {
@ -17,13 +17,13 @@
"catalog": { "catalog": {
"typescript": "5.8.2", "typescript": "5.8.2",
"@types/node": "22.13.9", "@types/node": "22.13.9",
"zod": "3.24.2", "zod": "3.25.49",
"ai": "4.3.16" "ai": "5.0.0-beta.7"
} }
}, },
"devDependencies": { "devDependencies": {
"prettier": "3.5.3", "prettier": "3.5.3",
"sst": "3.17.6" "sst": "3.17.8"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@ -31,10 +31,8 @@
}, },
"license": "MIT", "license": "MIT",
"prettier": { "prettier": {
"semi": false "semi": false,
}, "printWidth": 120
"overrides": {
"zod": "3.24.2"
}, },
"trustedDependencies": [ "trustedDependencies": [
"esbuild", "esbuild",

View file

@ -8,5 +8,10 @@
"@cloudflare/workers-types": "4.20250522.0", "@cloudflare/workers-types": "4.20250522.0",
"typescript": "catalog:", "typescript": "catalog:",
"@types/node": "catalog:" "@types/node": "catalog:"
},
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "22.0.0",
"jose": "6.0.11"
} }
} }

View file

@ -1,5 +1,9 @@
import { DurableObject } from "cloudflare:workers" import { DurableObject } from "cloudflare:workers"
import { randomUUID } from "node:crypto" import { randomUUID } from "node:crypto"
import { jwtVerify, createRemoteJWKSet } from "jose"
import { createAppAuth } from "@octokit/auth-app"
import { Octokit } from "@octokit/rest"
import { Resource } from "sst"
type Env = { type Env = {
SYNC_SERVER: DurableObjectNamespace<SyncServer> SYNC_SERVER: DurableObjectNamespace<SyncServer>
@ -38,10 +42,7 @@ export class SyncServer extends DurableObject<Env> {
async publish(key: string, content: any) { async publish(key: string, content: any) {
const sessionID = await this.getSessionID() const sessionID = await this.getSessionID()
if ( if (!key.startsWith(`session/info/${sessionID}`) && !key.startsWith(`session/message/${sessionID}/`))
!key.startsWith(`session/info/${sessionID}`) &&
!key.startsWith(`session/message/${sessionID}/`)
)
return new Response("Error: Invalid key", { status: 400 }) return new Response("Error: Invalid key", { status: 400 })
// store message // store message
@ -107,7 +108,7 @@ export class SyncServer extends DurableObject<Env> {
} }
export default { export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext) { async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
const url = new URL(request.url) const url = new URL(request.url)
const splits = url.pathname.split("/") const splits = url.pathname.split("/")
const method = splits[1] const method = splits[1]
@ -184,8 +185,7 @@ export default {
} }
const id = url.searchParams.get("id") const id = url.searchParams.get("id")
console.log("share_poll", id) console.log("share_poll", id)
if (!id) if (!id) return new Response("Error: Share ID is required", { status: 400 })
return new Response("Error: Share ID is required", { status: 400 })
const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id)) const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id))
return stub.fetch(request) return stub.fetch(request)
} }
@ -193,8 +193,7 @@ export default {
if (request.method === "GET" && method === "share_data") { if (request.method === "GET" && method === "share_data") {
const id = url.searchParams.get("id") const id = url.searchParams.get("id")
console.log("share_data", id) console.log("share_data", id)
if (!id) if (!id) return new Response("Error: Share ID is required", { status: 400 })
return new Response("Error: Share ID is required", { status: 400 })
const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id)) const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id))
const data = await stub.getData() const data = await stub.getData()
@ -223,5 +222,60 @@ export default {
}, },
) )
} }
if (request.method === "POST" && method === "exchange_github_app_token") {
const EXPECTED_AUDIENCE = "opencode-github-action"
const GITHUB_ISSUER = "https://token.actions.githubusercontent.com"
const JWKS_URL = `${GITHUB_ISSUER}/.well-known/jwks`
// get Authorization header
const authHeader = request.headers.get("Authorization")
const token = authHeader?.replace(/^Bearer /, "")
if (!token)
return new Response(JSON.stringify({ error: "Authorization header is required" }), {
status: 401,
headers: { "Content-Type": "application/json" },
})
// verify token
const JWKS = createRemoteJWKSet(new URL(JWKS_URL))
let owner, repo
try {
const { payload } = await jwtVerify(token, JWKS, {
issuer: GITHUB_ISSUER,
audience: EXPECTED_AUDIENCE,
})
const sub = payload.sub // e.g. 'repo:my-org/my-repo:ref:refs/heads/main'
const parts = sub.split(":")[1].split("/")
owner = parts[0]
repo = parts[1]
} catch (err) {
console.error("Token verification failed:", err)
return new Response(JSON.stringify({ error: "Invalid or expired token" }), {
status: 403,
headers: { "Content-Type": "application/json" },
})
}
// Create app JWT token
const auth = createAppAuth({
appId: Resource.GITHUB_APP_ID.value,
privateKey: Resource.GITHUB_APP_PRIVATE_KEY.value,
})
const appAuth = await auth({ type: "app" })
// Lookup installation
const octokit = new Octokit({ auth: appAuth.token })
const { data: installation } = await octokit.apps.getRepoInstallation({ owner, repo })
// Get installation token
const installationAuth = await auth({ type: "installation", installationId: installation.id })
return new Response(JSON.stringify({ token: installationAuth.token }), {
headers: { "Content-Type": "application/json" },
})
}
return new Response("Not Found", { status: 404 })
}, },
} }

View file

@ -6,18 +6,26 @@
import "sst" import "sst"
declare module "sst" { declare module "sst" {
export interface Resource { export interface Resource {
Web: { "GITHUB_APP_ID": {
type: "sst.cloudflare.Astro" "type": "sst.sst.Secret"
url: string "value": string
}
"GITHUB_APP_PRIVATE_KEY": {
"type": "sst.sst.Secret"
"value": string
}
"Web": {
"type": "sst.cloudflare.Astro"
"url": string
} }
} }
} }
// cloudflare // cloudflare
import * as cloudflare from "@cloudflare/workers-types" import * as cloudflare from "@cloudflare/workers-types";
declare module "sst" { declare module "sst" {
export interface Resource { export interface Resource {
Api: cloudflare.Service "Api": cloudflare.Service
Bucket: cloudflare.R2Bucket "Bucket": cloudflare.R2Bucket
} }
} }

View file

@ -1,4 +1,3 @@
node_modules
research research
dist dist
gen gen

View file

@ -1,369 +0,0 @@
{
"type": "object",
"properties": {
"$schema": {
"type": "string",
"description": "JSON schema reference for configuration validation"
},
"theme": {
"type": "string",
"description": "Theme name to use for the interface"
},
"keybinds": {
"type": "object",
"properties": {
"leader": {
"type": "string",
"description": "Leader key for keybind combinations"
},
"help": {
"type": "string",
"description": "Show help dialog"
},
"editor_open": {
"type": "string",
"description": "Open external editor"
},
"session_new": {
"type": "string",
"description": "Create a new session"
},
"session_list": {
"type": "string",
"description": "List all sessions"
},
"session_share": {
"type": "string",
"description": "Share current session"
},
"session_interrupt": {
"type": "string",
"description": "Interrupt current session"
},
"session_compact": {
"type": "string",
"description": "Toggle compact mode for session"
},
"tool_details": {
"type": "string",
"description": "Show tool details"
},
"model_list": {
"type": "string",
"description": "List available models"
},
"theme_list": {
"type": "string",
"description": "List available themes"
},
"project_init": {
"type": "string",
"description": "Initialize project configuration"
},
"input_clear": {
"type": "string",
"description": "Clear input field"
},
"input_paste": {
"type": "string",
"description": "Paste from clipboard"
},
"input_submit": {
"type": "string",
"description": "Submit input"
},
"input_newline": {
"type": "string",
"description": "Insert newline in input"
},
"history_previous": {
"type": "string",
"description": "Navigate to previous history item"
},
"history_next": {
"type": "string",
"description": "Navigate to next history item"
},
"messages_page_up": {
"type": "string",
"description": "Scroll messages up by one page"
},
"messages_page_down": {
"type": "string",
"description": "Scroll messages down by one page"
},
"messages_half_page_up": {
"type": "string",
"description": "Scroll messages up by half page"
},
"messages_half_page_down": {
"type": "string",
"description": "Scroll messages down by half page"
},
"messages_previous": {
"type": "string",
"description": "Navigate to previous message"
},
"messages_next": {
"type": "string",
"description": "Navigate to next message"
},
"messages_first": {
"type": "string",
"description": "Navigate to first message"
},
"messages_last": {
"type": "string",
"description": "Navigate to last message"
},
"app_exit": {
"type": "string",
"description": "Exit the application"
}
},
"additionalProperties": false,
"description": "Custom keybind configurations"
},
"autoshare": {
"type": "boolean",
"description": "Share newly created sessions automatically"
},
"autoupdate": {
"type": "boolean",
"description": "Automatically update to the latest version"
},
"disabled_providers": {
"type": "array",
"items": {
"type": "string"
},
"description": "Disable providers that are loaded automatically"
},
"model": {
"type": "string",
"description": "Model to use in the format of provider/model, eg anthropic/claude-2"
},
"provider": {
"type": "object",
"additionalProperties": {
"type": "object",
"properties": {
"api": {
"type": "string"
},
"name": {
"type": "string"
},
"env": {
"type": "array",
"items": {
"type": "string"
}
},
"id": {
"type": "string"
},
"npm": {
"type": "string"
},
"models": {
"type": "object",
"additionalProperties": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"attachment": {
"type": "boolean"
},
"reasoning": {
"type": "boolean"
},
"temperature": {
"type": "boolean"
},
"tool_call": {
"type": "boolean"
},
"cost": {
"type": "object",
"properties": {
"input": {
"type": "number"
},
"output": {
"type": "number"
},
"cache_read": {
"type": "number"
},
"cache_write": {
"type": "number"
}
},
"required": ["input", "output"],
"additionalProperties": false
},
"limit": {
"type": "object",
"properties": {
"context": {
"type": "number"
},
"output": {
"type": "number"
}
},
"required": ["context", "output"],
"additionalProperties": false
},
"id": {
"type": "string"
},
"options": {
"type": "object",
"additionalProperties": {}
}
},
"additionalProperties": false
}
},
"options": {
"type": "object",
"additionalProperties": {}
}
},
"required": ["models"],
"additionalProperties": false
},
"description": "Custom provider configurations and model overrides"
},
"mcp": {
"type": "object",
"additionalProperties": {
"anyOf": [
{
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "local",
"description": "Type of MCP server connection"
},
"command": {
"type": "array",
"items": {
"type": "string"
},
"description": "Command and arguments to run the MCP server"
},
"environment": {
"type": "object",
"additionalProperties": {
"type": "string"
},
"description": "Environment variables to set when running the MCP server"
},
"enabled": {
"type": "boolean",
"description": "Enable or disable the MCP server on startup"
}
},
"required": ["type", "command"],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "remote",
"description": "Type of MCP server connection"
},
"url": {
"type": "string",
"description": "URL of the remote MCP server"
},
"enabled": {
"type": "boolean",
"description": "Enable or disable the MCP server on startup"
}
},
"required": ["type", "url"],
"additionalProperties": false
}
]
},
"description": "MCP (Model Context Protocol) server configurations"
},
"instructions": {
"type": "array",
"items": {
"type": "string"
},
"description": "Additional instruction files or patterns to include"
},
"experimental": {
"type": "object",
"properties": {
"hook": {
"type": "object",
"properties": {
"file_edited": {
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"type": "object",
"properties": {
"command": {
"type": "array",
"items": {
"type": "string"
}
},
"environment": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"required": ["command"],
"additionalProperties": false
}
}
},
"session_completed": {
"type": "array",
"items": {
"type": "object",
"properties": {
"command": {
"type": "array",
"items": {
"type": "string"
}
},
"environment": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"required": ["command"],
"additionalProperties": false
}
}
},
"additionalProperties": false
}
},
"additionalProperties": false
}
},
"additionalProperties": false,
"$schema": "http://json-schema.org/draft-07/schema#"
}

View file

@ -57,8 +57,7 @@ for (const [os, arch] of targets) {
2, 2,
), ),
) )
if (!dry) if (!dry) await $`cd dist/${name} && bun publish --access public --tag ${npmTag}`
await $`cd dist/${name} && bun publish --access public --tag ${npmTag}`
optionalDependencies[name] = version optionalDependencies[name] = version
} }
@ -82,8 +81,7 @@ await Bun.file(`./dist/${pkg.name}/package.json`).write(
2, 2,
), ),
) )
if (!dry) if (!dry) await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}`
await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}`
if (!snapshot) { if (!snapshot) {
// Github Release // Github Release
@ -91,15 +89,11 @@ if (!snapshot) {
await $`cd dist/${key}/bin && zip -r ../../${key}.zip *` await $`cd dist/${key}/bin && zip -r ../../${key}.zip *`
} }
const previous = await fetch( const previous = await fetch("https://api.github.com/repos/sst/opencode/releases/latest")
"https://api.github.com/repos/sst/opencode/releases/latest",
)
.then((res) => res.json()) .then((res) => res.json())
.then((data) => data.tag_name) .then((data) => data.tag_name)
const commits = await fetch( const commits = await fetch(`https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`)
`https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`,
)
.then((res) => res.json()) .then((res) => res.json())
.then((data) => data.commits || []) .then((data) => data.commits || [])
@ -109,6 +103,7 @@ if (!snapshot) {
const lower = x.toLowerCase() const lower = x.toLowerCase()
return ( return (
!lower.includes("ignore:") && !lower.includes("ignore:") &&
!lower.includes("chore:") &&
!lower.includes("ci:") && !lower.includes("ci:") &&
!lower.includes("wip:") && !lower.includes("wip:") &&
!lower.includes("docs:") && !lower.includes("docs:") &&
@ -117,26 +112,13 @@ if (!snapshot) {
}) })
.join("\n") .join("\n")
if (!dry) if (!dry) await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip`
await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip`
// Calculate SHA values // Calculate SHA values
const arm64Sha = const arm64Sha = await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1` const x64Sha = await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
.text() const macX64Sha = await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
.then((x) => x.trim()) const macArm64Sha = await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
const x64Sha =
await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
const macX64Sha =
await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
const macArm64Sha =
await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
// AUR package // AUR package
const pkgbuild = [ const pkgbuild = [
@ -170,9 +152,7 @@ if (!snapshot) {
for (const pkg of ["opencode", "opencode-bin"]) { for (const pkg of ["opencode", "opencode-bin"]) {
await $`rm -rf ./dist/aur-${pkg}` await $`rm -rf ./dist/aur-${pkg}`
await $`git clone ssh://aur@aur.archlinux.org/${pkg}.git ./dist/aur-${pkg}` await $`git clone ssh://aur@aur.archlinux.org/${pkg}.git ./dist/aur-${pkg}`
await Bun.file(`./dist/aur-${pkg}/PKGBUILD`).write( await Bun.file(`./dist/aur-${pkg}/PKGBUILD`).write(pkgbuild.replace("${pkg}", pkg))
pkgbuild.replace("${pkg}", pkg),
)
await $`cd ./dist/aur-${pkg} && makepkg --printsrcinfo > .SRCINFO` await $`cd ./dist/aur-${pkg} && makepkg --printsrcinfo > .SRCINFO`
await $`cd ./dist/aur-${pkg} && git add PKGBUILD .SRCINFO` await $`cd ./dist/aur-${pkg} && git add PKGBUILD .SRCINFO`
await $`cd ./dist/aur-${pkg} && git commit -m "Update to v${version}"` await $`cd ./dist/aur-${pkg} && git commit -m "Update to v${version}"`

View file

@ -4,5 +4,32 @@ import "zod-openapi/extend"
import { Config } from "../src/config/config" import { Config } from "../src/config/config"
import { zodToJsonSchema } from "zod-to-json-schema" import { zodToJsonSchema } from "zod-to-json-schema"
const result = zodToJsonSchema(Config.Info) const file = process.argv[2]
await Bun.write("config.schema.json", JSON.stringify(result, null, 2))
const result = zodToJsonSchema(Config.Info, {
/**
* We'll use the `default` values of the field as the only value in `examples`.
* This will ensure no docs are needed to be read, as the configuration is
* self-documenting.
*
* See https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.9.5
*/
postProcess(jsonSchema) {
const schema = jsonSchema as typeof jsonSchema & {
examples?: unknown[]
}
if (schema && typeof schema === "object" && "type" in schema && schema.type === "string" && schema?.default) {
if (!schema.examples) {
schema.examples = [schema.default]
}
schema.description = [schema.description || "", `default: \`${schema.default}\``]
.filter(Boolean)
.join("\n\n")
.trim()
}
return jsonSchema
},
})
await Bun.write(file, JSON.stringify(result, null, 2))

View file

@ -45,23 +45,14 @@ export namespace App {
} }
export const provideExisting = ctx.provide export const provideExisting = ctx.provide
export async function provide<T>( export async function provide<T>(input: Input, cb: (app: App.Info) => Promise<T>) {
input: Input,
cb: (app: App.Info) => Promise<T>,
) {
log.info("creating", { log.info("creating", {
cwd: input.cwd, cwd: input.cwd,
}) })
const git = await Filesystem.findUp(".git", input.cwd).then(([x]) => const git = await Filesystem.findUp(".git", input.cwd).then(([x]) => (x ? path.dirname(x) : undefined))
x ? path.dirname(x) : undefined,
)
log.info("git", { git }) log.info("git", { git })
const data = path.join( const data = path.join(Global.Path.data, "project", git ? directory(git) : "global")
Global.Path.data,
"project",
git ? directory(git) : "global",
)
const stateFile = Bun.file(path.join(data, APP_JSON)) const stateFile = Bun.file(path.join(data, APP_JSON))
const state = (await stateFile.json().catch(() => ({}))) as { const state = (await stateFile.json().catch(() => ({}))) as {
initialized: number initialized: number

View file

@ -10,14 +10,8 @@ export namespace AuthAnthropic {
url.searchParams.set("code", "true") url.searchParams.set("code", "true")
url.searchParams.set("client_id", CLIENT_ID) url.searchParams.set("client_id", CLIENT_ID)
url.searchParams.set("response_type", "code") url.searchParams.set("response_type", "code")
url.searchParams.set( url.searchParams.set("redirect_uri", "https://console.anthropic.com/oauth/code/callback")
"redirect_uri", url.searchParams.set("scope", "org:create_api_key user:profile user:inference")
"https://console.anthropic.com/oauth/code/callback",
)
url.searchParams.set(
"scope",
"org:create_api_key user:profile user:inference",
)
url.searchParams.set("code_challenge", pkce.challenge) url.searchParams.set("code_challenge", pkce.challenge)
url.searchParams.set("code_challenge_method", "S256") url.searchParams.set("code_challenge_method", "S256")
url.searchParams.set("state", pkce.verifier) url.searchParams.set("state", pkce.verifier)
@ -57,20 +51,17 @@ export namespace AuthAnthropic {
const info = await Auth.get("anthropic") const info = await Auth.get("anthropic")
if (!info || info.type !== "oauth") return if (!info || info.type !== "oauth") return
if (info.access && info.expires > Date.now()) return info.access if (info.access && info.expires > Date.now()) return info.access
const response = await fetch( const response = await fetch("https://console.anthropic.com/v1/oauth/token", {
"https://console.anthropic.com/v1/oauth/token", method: "POST",
{ headers: {
method: "POST", "Content-Type": "application/json",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
grant_type: "refresh_token",
refresh_token: info.refresh,
client_id: CLIENT_ID,
}),
}, },
) body: JSON.stringify({
grant_type: "refresh_token",
refresh_token: info.refresh,
client_id: CLIENT_ID,
}),
})
if (!response.ok) return if (!response.ok) return
const json = await response.json() const json = await response.json()
await Auth.set("anthropic", { await Auth.set("anthropic", {

View file

@ -4,9 +4,7 @@ import path from "path"
export const AuthCopilot = lazy(async () => { export const AuthCopilot = lazy(async () => {
const file = Bun.file(path.join(Global.Path.state, "plugin", "copilot.ts")) const file = Bun.file(path.join(Global.Path.state, "plugin", "copilot.ts"))
const response = fetch( const response = fetch("https://raw.githubusercontent.com/sst/opencode-github-copilot/refs/heads/main/auth.ts")
"https://raw.githubusercontent.com/sst/opencode-github-copilot/refs/heads/main/auth.ts",
)
.then((x) => Bun.write(file, x)) .then((x) => Bun.write(file, x))
.catch(() => {}) .catch(() => {})

View file

@ -122,10 +122,7 @@ export namespace AuthGithubCopilot {
return tokenData.token return tokenData.token
} }
export const DeviceCodeError = NamedError.create( export const DeviceCodeError = NamedError.create("DeviceCodeError", z.object({}))
"DeviceCodeError",
z.object({}),
)
export const TokenExchangeError = NamedError.create( export const TokenExchangeError = NamedError.create(
"TokenExchangeError", "TokenExchangeError",

View file

@ -8,10 +8,7 @@ import { readableStreamToText } from "bun"
export namespace BunProc { export namespace BunProc {
const log = Log.create({ service: "bun" }) const log = Log.create({ service: "bun" })
export async function run( export async function run(cmd: string[], options?: Bun.SpawnOptions.OptionsObject<any, any, any>) {
cmd: string[],
options?: Bun.SpawnOptions.OptionsObject<any, any, any>,
) {
log.info("running", { log.info("running", {
cmd: [which(), ...cmd], cmd: [which(), ...cmd],
...options, ...options,
@ -26,9 +23,17 @@ export namespace BunProc {
BUN_BE_BUN: "1", BUN_BE_BUN: "1",
}, },
}) })
const code = await result.exited; const code = await result.exited
const stdout = result.stdout ? typeof result.stdout === "number" ? result.stdout : await readableStreamToText(result.stdout) : undefined const stdout = result.stdout
const stderr = result.stderr ? typeof result.stderr === "number" ? result.stderr : await readableStreamToText(result.stderr) : undefined ? typeof result.stdout === "number"
? result.stdout
: await readableStreamToText(result.stdout)
: undefined
const stderr = result.stderr
? typeof result.stderr === "number"
? result.stderr
: await readableStreamToText(result.stderr)
: undefined
log.info("done", { log.info("done", {
code, code,
stdout, stdout,
@ -55,15 +60,18 @@ export namespace BunProc {
export async function install(pkg: string, version = "latest") { export async function install(pkg: string, version = "latest") {
const mod = path.join(Global.Path.cache, "node_modules", pkg) const mod = path.join(Global.Path.cache, "node_modules", pkg)
const pkgjson = Bun.file(path.join(Global.Path.cache, "package.json")) const pkgjson = Bun.file(path.join(Global.Path.cache, "package.json"))
const parsed = await pkgjson.json().catch(() => ({ const parsed = await pkgjson.json().catch(async () => {
dependencies: {}, const result = { dependencies: {} }
})) await Bun.write(pkgjson.name!, JSON.stringify(result, null, 2))
return result
})
if (parsed.dependencies[pkg] === version) return mod if (parsed.dependencies[pkg] === version) return mod
parsed.dependencies[pkg] = version await BunProc.run(
await Bun.write(pkgjson, JSON.stringify(parsed, null, 2)) ["add", "--exact", "--cwd", Global.Path.cache, "--registry=https://registry.npmjs.org", pkg + "@" + version],
await BunProc.run(["install", "--registry=https://registry.npmjs.org"], { {
cwd: Global.Path.cache, cwd: Global.Path.cache,
}).catch((e) => { },
).catch((e) => {
throw new InstallFailedError( throw new InstallFailedError(
{ pkg, version }, { pkg, version },
{ {

View file

@ -18,10 +18,7 @@ export namespace Bus {
const registry = new Map<string, EventDefinition>() const registry = new Map<string, EventDefinition>()
export function event<Type extends string, Properties extends ZodType>( export function event<Type extends string, Properties extends ZodType>(type: Type, properties: Properties) {
type: Type,
properties: Properties,
) {
const result = { const result = {
type, type,
properties, properties,
@ -72,10 +69,7 @@ export namespace Bus {
export function subscribe<Definition extends EventDefinition>( export function subscribe<Definition extends EventDefinition>(
def: Definition, def: Definition,
callback: (event: { callback: (event: { type: Definition["type"]; properties: z.infer<Definition["properties"]> }) => void,
type: Definition["type"]
properties: z.infer<Definition["properties"]>
}) => void,
) { ) {
return raw(def.type, callback) return raw(def.type, callback)
} }

View file

@ -1,20 +1,15 @@
import { App } from "../app/app" import { App } from "../app/app"
import { ConfigHooks } from "../config/hooks" import { ConfigHooks } from "../config/hooks"
import { FileWatcher } from "../file/watch"
import { Format } from "../format" import { Format } from "../format"
import { LSP } from "../lsp" import { LSP } from "../lsp"
import { Share } from "../share/share" import { Share } from "../share/share"
export async function bootstrap<T>( export async function bootstrap<T>(input: App.Input, cb: (app: App.Info) => Promise<T>) {
input: App.Input,
cb: (app: App.Info) => Promise<T>,
) {
return App.provide(input, async (app) => { return App.provide(input, async (app) => {
Share.init() Share.init()
Format.init() Format.init()
ConfigHooks.init() ConfigHooks.init()
LSP.init() LSP.init()
FileWatcher.init()
return cb(app) return cb(app)
}) })

View file

@ -15,11 +15,7 @@ export const AuthCommand = cmd({
command: "auth", command: "auth",
describe: "manage credentials", describe: "manage credentials",
builder: (yargs) => builder: (yargs) =>
yargs yargs.command(AuthLoginCommand).command(AuthLogoutCommand).command(AuthListCommand).demandCommand(),
.command(AuthLoginCommand)
.command(AuthLogoutCommand)
.command(AuthListCommand)
.demandCommand(),
async handler() {}, async handler() {},
}) })
@ -31,9 +27,7 @@ export const AuthListCommand = cmd({
UI.empty() UI.empty()
const authPath = path.join(Global.Path.data, "auth.json") const authPath = path.join(Global.Path.data, "auth.json")
const homedir = os.homedir() const homedir = os.homedir()
const displayPath = authPath.startsWith(homedir) const displayPath = authPath.startsWith(homedir) ? authPath.replace(homedir, "~") : authPath
? authPath.replace(homedir, "~")
: authPath
prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`) prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`)
const results = await Auth.all().then((x) => Object.entries(x)) const results = await Auth.all().then((x) => Object.entries(x))
const database = await ModelsDev.get() const database = await ModelsDev.get()
@ -114,8 +108,7 @@ export const AuthLoginCommand = cmd({
if (provider === "other") { if (provider === "other") {
provider = await prompts.text({ provider = await prompts.text({
message: "Enter provider id", message: "Enter provider id",
validate: (x) => validate: (x) => (x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only"),
x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only",
}) })
if (prompts.isCancel(provider)) throw new UI.CancelledError() if (prompts.isCancel(provider)) throw new UI.CancelledError()
provider = provider.replace(/^@ai-sdk\//, "") provider = provider.replace(/^@ai-sdk\//, "")
@ -186,17 +179,13 @@ export const AuthLoginCommand = cmd({
await new Promise((resolve) => setTimeout(resolve, 10)) await new Promise((resolve) => setTimeout(resolve, 10))
const deviceInfo = await copilot.authorize() const deviceInfo = await copilot.authorize()
prompts.note( prompts.note(`Please visit: ${deviceInfo.verification}\nEnter code: ${deviceInfo.user}`)
`Please visit: ${deviceInfo.verification}\nEnter code: ${deviceInfo.user}`,
)
const spinner = prompts.spinner() const spinner = prompts.spinner()
spinner.start("Waiting for authorization...") spinner.start("Waiting for authorization...")
while (true) { while (true) {
await new Promise((resolve) => await new Promise((resolve) => setTimeout(resolve, deviceInfo.interval * 1000))
setTimeout(resolve, deviceInfo.interval * 1000),
)
const response = await copilot.poll(deviceInfo.device) const response = await copilot.poll(deviceInfo.device)
if (response.status === "pending") continue if (response.status === "pending") continue
if (response.status === "success") { if (response.status === "success") {
@ -248,12 +237,7 @@ export const AuthLogoutCommand = cmd({
const providerID = await prompts.select({ const providerID = await prompts.select({
message: "Select provider", message: "Select provider",
options: credentials.map(([key, value]) => ({ options: credentials.map(([key, value]) => ({
label: label: (database[key]?.name || key) + UI.Style.TEXT_DIM + " (" + value.type + ")",
(database[key]?.name || key) +
UI.Style.TEXT_DIM +
" (" +
value.type +
")",
value: key, value: key,
})), })),
}) })

View file

@ -31,7 +31,6 @@ const FileStatusCommand = cmd({
export const FileCommand = cmd({ export const FileCommand = cmd({
command: "file", command: "file",
builder: (yargs) => builder: (yargs) => yargs.command(FileReadCommand).command(FileStatusCommand).demandCommand(),
yargs.command(FileReadCommand).command(FileStatusCommand).demandCommand(),
async handler() {}, async handler() {},
}) })

View file

@ -3,6 +3,7 @@ import { cmd } from "../cmd"
import { FileCommand } from "./file" import { FileCommand } from "./file"
import { LSPCommand } from "./lsp" import { LSPCommand } from "./lsp"
import { RipgrepCommand } from "./ripgrep" import { RipgrepCommand } from "./ripgrep"
import { ScrapCommand } from "./scrap"
import { SnapshotCommand } from "./snapshot" import { SnapshotCommand } from "./snapshot"
export const DebugCommand = cmd({ export const DebugCommand = cmd({
@ -12,14 +13,13 @@ export const DebugCommand = cmd({
.command(LSPCommand) .command(LSPCommand)
.command(RipgrepCommand) .command(RipgrepCommand)
.command(FileCommand) .command(FileCommand)
.command(ScrapCommand)
.command(SnapshotCommand) .command(SnapshotCommand)
.command({ .command({
command: "wait", command: "wait",
async handler() { async handler() {
await bootstrap({ cwd: process.cwd() }, async () => { await bootstrap({ cwd: process.cwd() }, async () => {
await new Promise((resolve) => await new Promise((resolve) => setTimeout(resolve, 1_000 * 60 * 60 * 24))
setTimeout(resolve, 1_000 * 60 * 60 * 24),
)
}) })
}, },
}) })

View file

@ -6,14 +6,13 @@ import { Log } from "../../../util/log"
export const LSPCommand = cmd({ export const LSPCommand = cmd({
command: "lsp", command: "lsp",
builder: (yargs) => builder: (yargs) =>
yargs.command(DiagnosticsCommand).command(SymbolsCommand).demandCommand(), yargs.command(DiagnosticsCommand).command(SymbolsCommand).command(DocumentSymbolsCommand).demandCommand(),
async handler() {}, async handler() {},
}) })
const DiagnosticsCommand = cmd({ const DiagnosticsCommand = cmd({
command: "diagnostics <file>", command: "diagnostics <file>",
builder: (yargs) => builder: (yargs) => yargs.positional("file", { type: "string", demandOption: true }),
yargs.positional("file", { type: "string", demandOption: true }),
async handler(args) { async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => { await bootstrap({ cwd: process.cwd() }, async () => {
await LSP.touchFile(args.file, true) await LSP.touchFile(args.file, true)
@ -24,14 +23,24 @@ const DiagnosticsCommand = cmd({
export const SymbolsCommand = cmd({ export const SymbolsCommand = cmd({
command: "symbols <query>", command: "symbols <query>",
builder: (yargs) => builder: (yargs) => yargs.positional("query", { type: "string", demandOption: true }),
yargs.positional("query", { type: "string", demandOption: true }),
async handler(args) { async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => { await bootstrap({ cwd: process.cwd() }, async () => {
await LSP.touchFile("./src/index.ts", true)
using _ = Log.Default.time("symbols") using _ = Log.Default.time("symbols")
const results = await LSP.workspaceSymbol(args.query) const results = await LSP.workspaceSymbol(args.query)
console.log(JSON.stringify(results, null, 2)) console.log(JSON.stringify(results, null, 2))
}) })
}, },
}) })
export const DocumentSymbolsCommand = cmd({
command: "document-symbols <uri>",
builder: (yargs) => yargs.positional("uri", { type: "string", demandOption: true }),
async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => {
using _ = Log.Default.time("document-symbols")
const results = await LSP.documentSymbol(args.uri)
console.log(JSON.stringify(results, null, 2))
})
},
})

View file

@ -5,12 +5,7 @@ import { cmd } from "../cmd"
export const RipgrepCommand = cmd({ export const RipgrepCommand = cmd({
command: "rg", command: "rg",
builder: (yargs) => builder: (yargs) => yargs.command(TreeCommand).command(FilesCommand).command(SearchCommand).demandCommand(),
yargs
.command(TreeCommand)
.command(FilesCommand)
.command(SearchCommand)
.demandCommand(),
async handler() {}, async handler() {},
}) })
@ -50,7 +45,7 @@ const FilesCommand = cmd({
const files = await Ripgrep.files({ const files = await Ripgrep.files({
cwd: app.path.cwd, cwd: app.path.cwd,
query: args.query, query: args.query,
glob: args.glob, glob: args.glob ? [args.glob] : undefined,
limit: args.limit, limit: args.limit,
}) })
console.log(files.join("\n")) console.log(files.join("\n"))

View file

@ -0,0 +1,7 @@
import { cmd } from "../cmd"
export const ScrapCommand = cmd({
command: "scrap",
builder: (yargs) => yargs,
async handler() {},
})

View file

@ -4,11 +4,7 @@ import { cmd } from "../cmd"
export const SnapshotCommand = cmd({ export const SnapshotCommand = cmd({
command: "snapshot", command: "snapshot",
builder: (yargs) => builder: (yargs) => yargs.command(SnapshotCreateCommand).command(SnapshotRestoreCommand).demandCommand(),
yargs
.command(SnapshotCreateCommand)
.command(SnapshotRestoreCommand)
.demandCommand(),
async handler() {}, async handler() {},
}) })

View file

@ -10,9 +10,6 @@ export const GenerateCommand = {
const dir = "gen" const dir = "gen"
await fs.rmdir(dir, { recursive: true }).catch(() => {}) await fs.rmdir(dir, { recursive: true }).catch(() => {})
await fs.mkdir(dir, { recursive: true }) await fs.mkdir(dir, { recursive: true })
await Bun.write( await Bun.write(path.join(dir, "openapi.json"), JSON.stringify(specs, null, 2))
path.join(dir, "openapi.json"),
JSON.stringify(specs, null, 2),
)
}, },
} satisfies CommandModule } satisfies CommandModule

View file

@ -2,12 +2,13 @@ import type { Argv } from "yargs"
import { Bus } from "../../bus" import { Bus } from "../../bus"
import { Provider } from "../../provider/provider" import { Provider } from "../../provider/provider"
import { Session } from "../../session" import { Session } from "../../session"
import { Message } from "../../session/message"
import { UI } from "../ui" import { UI } from "../ui"
import { cmd } from "./cmd" import { cmd } from "./cmd"
import { Flag } from "../../flag/flag" import { Flag } from "../../flag/flag"
import { Config } from "../../config/config" import { Config } from "../../config/config"
import { bootstrap } from "../bootstrap" import { bootstrap } from "../bootstrap"
import { MessageV2 } from "../../session/message-v2"
import { Mode } from "../../session/mode"
const TOOL: Record<string, [string, string]> = { const TOOL: Record<string, [string, string]> = {
todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD], todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD],
@ -52,13 +53,22 @@ export const RunCommand = cmd({
alias: ["m"], alias: ["m"],
describe: "model to use in the format of provider/model", describe: "model to use in the format of provider/model",
}) })
.option("mode", {
type: "string",
describe: "mode to use",
})
}, },
handler: async (args) => { handler: async (args) => {
const message = args.message.join(" ") let message = args.message.join(" ")
if (!process.stdin.isTTY) message += "\n" + (await Bun.stdin.text())
await bootstrap({ cwd: process.cwd() }, async () => { await bootstrap({ cwd: process.cwd() }, async () => {
const session = await (async () => { const session = await (async () => {
if (args.continue) { if (args.continue) {
const first = await Session.list().next() const list = Session.list()
const first = await list.next()
await list.return()
if (first.done) return if (first.done) return
return first.value return first.value
} }
@ -78,27 +88,19 @@ export const RunCommand = cmd({
UI.empty() UI.empty()
UI.println(UI.logo()) UI.println(UI.logo())
UI.empty() UI.empty()
UI.println(UI.Style.TEXT_NORMAL_BOLD + "> ", message) const displayMessage = message.length > 300 ? message.slice(0, 300) + "..." : message
UI.println(UI.Style.TEXT_NORMAL_BOLD + "> ", displayMessage)
UI.empty() UI.empty()
const cfg = await Config.get() const cfg = await Config.get()
if (cfg.autoshare || Flag.OPENCODE_AUTO_SHARE || args.share) { if (cfg.autoshare || Flag.OPENCODE_AUTO_SHARE || args.share) {
await Session.share(session.id) await Session.share(session.id)
UI.println( UI.println(UI.Style.TEXT_INFO_BOLD + "~ https://opencode.ai/s/" + session.id.slice(-8))
UI.Style.TEXT_INFO_BOLD +
"~ https://opencode.ai/s/" +
session.id.slice(-8),
)
} }
UI.empty() UI.empty()
const { providerID, modelID } = args.model const { providerID, modelID } = args.model ? Provider.parseModel(args.model) : await Provider.defaultModel()
? Provider.parseModel(args.model) UI.println(UI.Style.TEXT_NORMAL_BOLD + "@ ", UI.Style.TEXT_NORMAL + `${providerID}/${modelID}`)
: await Provider.defaultModel()
UI.println(
UI.Style.TEXT_NORMAL_BOLD + "@ ",
UI.Style.TEXT_NORMAL + `${providerID}/${modelID}`,
)
UI.empty() UI.empty()
function printEvent(color: string, type: string, title: string) { function printEvent(color: string, type: string, title: string) {
@ -110,24 +112,13 @@ export const RunCommand = cmd({
) )
} }
Bus.subscribe(Message.Event.PartUpdated, async (evt) => { Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
if (evt.properties.sessionID !== session.id) return if (evt.properties.sessionID !== session.id) return
const part = evt.properties.part const part = evt.properties.part
const message = await Session.getMessage(
evt.properties.sessionID,
evt.properties.messageID,
)
if ( if (part.type === "tool" && part.state.status === "completed") {
part.type === "tool-invocation" && const [tool, color] = TOOL[part.tool] ?? [part.tool, UI.Style.TEXT_INFO_BOLD]
part.toolInvocation.state === "result" printEvent(color, tool, part.state.title || "Unknown")
) {
const metadata = message.metadata.tool[part.toolInvocation.toolCallId]
const [tool, color] = TOOL[part.toolInvocation.toolName] ?? [
part.toolInvocation.toolName,
UI.Style.TEXT_INFO_BOLD,
]
printEvent(color, tool, metadata?.title || "Unknown")
} }
if (part.type === "text") { if (part.type === "text") {
@ -141,10 +132,31 @@ export const RunCommand = cmd({
} }
}) })
let errorMsg: string | undefined
Bus.subscribe(Session.Event.Error, async (evt) => {
const { sessionID, error } = evt.properties
if (sessionID !== session.id || !error) return
let err = String(error.name)
if ("data" in error && error.data && "message" in error.data) {
err = error.data.message
}
errorMsg = errorMsg ? errorMsg + "\n" + err : err
UI.error(err)
})
const mode = args.mode ? await Mode.get(args.mode) : await Mode.list().then((x) => x[0])
const result = await Session.chat({ const result = await Session.chat({
sessionID: session.id, sessionID: session.id,
providerID, ...(mode.model
modelID, ? mode.model
: {
providerID,
modelID,
}),
mode: mode.name,
parts: [ parts: [
{ {
type: "text", type: "text",
@ -156,6 +168,7 @@ export const RunCommand = cmd({
if (isPiped) { if (isPiped) {
const match = result.parts.findLast((x) => x.type === "text") const match = result.parts.findLast((x) => x.type === "text")
if (match) process.stdout.write(match.text) if (match) process.stdout.write(match.text)
if (errorMsg) process.stdout.write(errorMsg)
} }
UI.empty() UI.empty()
}) })

View file

@ -38,9 +38,7 @@ export const ServeCommand = cmd({
hostname, hostname,
}) })
console.log( console.log(`opencode server listening on http://${server.hostname}:${server.port}`)
`opencode server listening on http://${server.hostname}:${server.port}`,
)
await new Promise(() => {}) await new Promise(() => {})

View file

@ -0,0 +1,179 @@
import { Storage } from "../../storage/storage"
import { MessageV2 } from "../../session/message-v2"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
interface SessionStats {
totalSessions: number
totalMessages: number
totalCost: number
totalTokens: {
input: number
output: number
reasoning: number
cache: {
read: number
write: number
}
}
toolUsage: Record<string, number>
dateRange: {
earliest: number
latest: number
}
days: number
costPerDay: number
}
export const StatsCommand = cmd({
command: "stats",
describe: "analyze and display statistics from message-v2 format",
handler: async () => {
await bootstrap({ cwd: process.cwd() }, async () => {
const stats: SessionStats = {
totalSessions: 0,
totalMessages: 0,
totalCost: 0,
totalTokens: {
input: 0,
output: 0,
reasoning: 0,
cache: {
read: 0,
write: 0,
},
},
toolUsage: {},
dateRange: {
earliest: Date.now(),
latest: 0,
},
days: 0,
costPerDay: 0,
}
const sessionMap = new Map<string, number>()
try {
for await (const messagePath of Storage.list("session/message")) {
try {
const message = await Storage.readJSON<MessageV2.Info>(messagePath)
if (!message.parts.find((part) => part.type === "step-finish")) continue
stats.totalMessages++
const sessionId = message.sessionID
sessionMap.set(sessionId, (sessionMap.get(sessionId) || 0) + 1)
if (message.time.created < stats.dateRange.earliest) {
stats.dateRange.earliest = message.time.created
}
if (message.time.created > stats.dateRange.latest) {
stats.dateRange.latest = message.time.created
}
if (message.role === "assistant") {
stats.totalCost += message.cost
stats.totalTokens.input += message.tokens.input
stats.totalTokens.output += message.tokens.output
stats.totalTokens.reasoning += message.tokens.reasoning
stats.totalTokens.cache.read += message.tokens.cache.read
stats.totalTokens.cache.write += message.tokens.cache.write
for (const part of message.parts) {
if (part.type === "tool") {
stats.toolUsage[part.tool] = (stats.toolUsage[part.tool] || 0) + 1
}
}
}
} catch (e) {
continue
}
}
} catch (e) {
console.error("Failed to read storage:", e)
return
}
stats.totalSessions = sessionMap.size
if (stats.dateRange.latest > 0) {
const daysDiff = (stats.dateRange.latest - stats.dateRange.earliest) / (1000 * 60 * 60 * 24)
stats.days = Math.max(1, Math.ceil(daysDiff))
stats.costPerDay = stats.totalCost / stats.days
}
displayStats(stats)
})
},
})
function displayStats(stats: SessionStats) {
const width = 56
function renderRow(label: string, value: string): string {
const availableWidth = width - 1
const paddingNeeded = availableWidth - label.length - value.length
const padding = Math.max(0, paddingNeeded)
return `${label}${" ".repeat(padding)}${value}`
}
// Overview section
console.log("┌────────────────────────────────────────────────────────┐")
console.log("│ OVERVIEW │")
console.log("├────────────────────────────────────────────────────────┤")
console.log(renderRow("Sessions", stats.totalSessions.toLocaleString()))
console.log(renderRow("Messages", stats.totalMessages.toLocaleString()))
console.log(renderRow("Days", stats.days.toString()))
console.log("└────────────────────────────────────────────────────────┘")
console.log()
// Cost & Tokens section
console.log("┌────────────────────────────────────────────────────────┐")
console.log("│ COST & TOKENS │")
console.log("├────────────────────────────────────────────────────────┤")
const cost = isNaN(stats.totalCost) ? 0 : stats.totalCost
const costPerDay = isNaN(stats.costPerDay) ? 0 : stats.costPerDay
console.log(renderRow("Total Cost", `$${cost.toFixed(2)}`))
console.log(renderRow("Cost/Day", `$${costPerDay.toFixed(2)}`))
console.log(renderRow("Input", formatNumber(stats.totalTokens.input)))
console.log(renderRow("Output", formatNumber(stats.totalTokens.output)))
console.log(renderRow("Cache Read", formatNumber(stats.totalTokens.cache.read)))
console.log(renderRow("Cache Write", formatNumber(stats.totalTokens.cache.write)))
console.log("└────────────────────────────────────────────────────────┘")
console.log()
// Tool Usage section
if (Object.keys(stats.toolUsage).length > 0) {
const sortedTools = Object.entries(stats.toolUsage)
.sort(([, a], [, b]) => b - a)
.slice(0, 10)
console.log("┌────────────────────────────────────────────────────────┐")
console.log("│ TOOL USAGE │")
console.log("├────────────────────────────────────────────────────────┤")
const maxCount = Math.max(...sortedTools.map(([, count]) => count))
const totalToolUsage = Object.values(stats.toolUsage).reduce((a, b) => a + b, 0)
for (const [tool, count] of sortedTools) {
const barLength = Math.max(1, Math.floor((count / maxCount) * 20))
const bar = "█".repeat(barLength)
const percentage = ((count / totalToolUsage) * 100).toFixed(1)
const content = ` ${tool.padEnd(10)} ${bar.padEnd(20)} ${count.toString().padStart(3)} (${percentage.padStart(4)}%)`
const padding = Math.max(0, width - content.length)
console.log(`${content}${" ".repeat(padding)}`)
}
console.log("└────────────────────────────────────────────────────────┘")
}
console.log()
}
function formatNumber(num: number): string {
if (num >= 1000000) {
return (num / 1000000).toFixed(1) + "M"
} else if (num >= 1000) {
return (num / 1000).toFixed(1) + "K"
}
return num.toString()
}

View file

@ -10,15 +10,32 @@ import { Installation } from "../../installation"
import { Config } from "../../config/config" import { Config } from "../../config/config"
import { Bus } from "../../bus" import { Bus } from "../../bus"
import { Log } from "../../util/log" import { Log } from "../../util/log"
import { FileWatcher } from "../../file/watch"
import { Mode } from "../../session/mode"
export const TuiCommand = cmd({ export const TuiCommand = cmd({
command: "$0 [project]", command: "$0 [project]",
describe: "start opencode tui", describe: "start opencode tui",
builder: (yargs) => builder: (yargs) =>
yargs.positional("project", { yargs
type: "string", .positional("project", {
describe: "path to start opencode in", type: "string",
}), describe: "path to start opencode in",
})
.option("model", {
type: "string",
alias: ["m"],
describe: "model to use in the format of provider/model",
})
.option("prompt", {
alias: ["p"],
type: "string",
describe: "prompt to use",
})
.option("mode", {
type: "string",
describe: "mode to use",
}),
handler: async (args) => { handler: async (args) => {
while (true) { while (true) {
const cwd = args.project ? path.resolve(args.project) : process.cwd() const cwd = args.project ? path.resolve(args.project) : process.cwd()
@ -29,6 +46,7 @@ export const TuiCommand = cmd({
return return
} }
const result = await bootstrap({ cwd }, async (app) => { const result = await bootstrap({ cwd }, async (app) => {
FileWatcher.init()
const providers = await Provider.list() const providers = await Provider.list()
if (Object.keys(providers).length === 0) { if (Object.keys(providers).length === 0) {
return "needs_provider" return "needs_provider"
@ -40,9 +58,7 @@ export const TuiCommand = cmd({
}) })
let cmd = ["go", "run", "./main.go"] let cmd = ["go", "run", "./main.go"]
let cwd = Bun.fileURLToPath( let cwd = Bun.fileURLToPath(new URL("../../../../tui/cmd/opencode", import.meta.url))
new URL("../../../../tui/cmd/opencode", import.meta.url),
)
if (Bun.embeddedFiles.length > 0) { if (Bun.embeddedFiles.length > 0) {
const blob = Bun.embeddedFiles[0] as File const blob = Bun.embeddedFiles[0] as File
let binaryName = blob.name let binaryName = blob.name
@ -62,15 +78,22 @@ export const TuiCommand = cmd({
cmd, cmd,
}) })
const proc = Bun.spawn({ const proc = Bun.spawn({
cmd: [...cmd, ...process.argv.slice(2)], cmd: [
...cmd,
...(args.model ? ["--model", args.model] : []),
...(args.prompt ? ["--prompt", args.prompt] : []),
...(args.mode ? ["--mode", args.mode] : []),
],
cwd, cwd,
stdout: "inherit", stdout: "inherit",
stderr: "inherit", stderr: "inherit",
stdin: "inherit", stdin: "inherit",
env: { env: {
...process.env, ...process.env,
CGO_ENABLED: "0",
OPENCODE_SERVER: server.url.toString(), OPENCODE_SERVER: server.url.toString(),
OPENCODE_APP_INFO: JSON.stringify(app), OPENCODE_APP_INFO: JSON.stringify(app),
OPENCODE_MODES: JSON.stringify(await Mode.list()),
}, },
onExit: () => { onExit: () => {
server.stop() server.stop()

View file

@ -27,22 +27,26 @@ export const UpgradeCommand = {
const detectedMethod = await Installation.method() const detectedMethod = await Installation.method()
const method = (args.method as Installation.Method) ?? detectedMethod const method = (args.method as Installation.Method) ?? detectedMethod
if (method === "unknown") { if (method === "unknown") {
prompts.log.error( prompts.log.error(`opencode is installed to ${process.execPath} and seems to be managed by a package manager`)
`opencode is installed to ${process.execPath} and seems to be managed by a package manager`,
)
prompts.outro("Done") prompts.outro("Done")
return return
} }
prompts.log.info("Using method: " + method) prompts.log.info("Using method: " + method)
const target = args.target ?? (await Installation.latest()) const target = args.target ?? (await Installation.latest())
if (Installation.VERSION === target) {
prompts.log.warn(`opencode upgrade skipped: ${target} is already installed`)
prompts.outro("Done")
return
}
prompts.log.info(`From ${Installation.VERSION}${target}`) prompts.log.info(`From ${Installation.VERSION}${target}`)
const spinner = prompts.spinner() const spinner = prompts.spinner()
spinner.start("Upgrading...") spinner.start("Upgrading...")
const err = await Installation.upgrade(method, target).catch((err) => err) const err = await Installation.upgrade(method, target).catch((err) => err)
if (err) { if (err) {
spinner.stop("Upgrade failed") spinner.stop("Upgrade failed")
if (err instanceof Installation.UpgradeFailedError) if (err instanceof Installation.UpgradeFailedError) prompts.log.error(err.data.stderr)
prompts.log.error(err.data.stderr)
else if (err instanceof Error) prompts.log.error(err.message) else if (err instanceof Error) prompts.log.error(err.message)
prompts.outro("Done") prompts.outro("Done")
return return

View file

@ -5,14 +5,11 @@ import { UI } from "./ui"
export function FormatError(input: unknown) { export function FormatError(input: unknown) {
if (MCP.Failed.isInstance(input)) if (MCP.Failed.isInstance(input))
return `MCP server "${input.data.name}" failed. Note, opencode does not support MCP authentication yet.` return `MCP server "${input.data.name}" failed. Note, opencode does not support MCP authentication yet.`
if (Config.JsonError.isInstance(input)) if (Config.JsonError.isInstance(input)) return `Config file at ${input.data.path} is not valid JSON`
return `Config file at ${input.data.path} is not valid JSON`
if (Config.InvalidError.isInstance(input)) if (Config.InvalidError.isInstance(input))
return [ return [
`Config file at ${input.data.path} is invalid`, `Config file at ${input.data.path} is invalid`,
...(input.data.issues?.map( ...(input.data.issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []),
(issue) => "↳ " + issue.message + " " + issue.path.join("."),
) ?? []),
].join("\n") ].join("\n")
if (UI.CancelledError.isInstance(input)) return "" if (UI.CancelledError.isInstance(input)) return ""

View file

@ -4,7 +4,7 @@ import { z } from "zod"
import { App } from "../app/app" import { App } from "../app/app"
import { Filesystem } from "../util/filesystem" import { Filesystem } from "../util/filesystem"
import { ModelsDev } from "../provider/models" import { ModelsDev } from "../provider/models"
import { mergeDeep } from "remeda" import { mergeDeep, pipe } from "remeda"
import { Global } from "../global" import { Global } from "../global"
import fs from "fs/promises" import fs from "fs/promises"
import { lazy } from "../util/lazy" import { lazy } from "../util/lazy"
@ -29,18 +29,12 @@ export namespace Config {
export const McpLocal = z export const McpLocal = z
.object({ .object({
type: z.literal("local").describe("Type of MCP server connection"), type: z.literal("local").describe("Type of MCP server connection"),
command: z command: z.string().array().describe("Command and arguments to run the MCP server"),
.string()
.array()
.describe("Command and arguments to run the MCP server"),
environment: z environment: z
.record(z.string(), z.string()) .record(z.string(), z.string())
.optional() .optional()
.describe("Environment variables to set when running the MCP server"), .describe("Environment variables to set when running the MCP server"),
enabled: z enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
.boolean()
.optional()
.describe("Enable or disable the MCP server on startup"),
}) })
.strict() .strict()
.openapi({ .openapi({
@ -51,10 +45,7 @@ export namespace Config {
.object({ .object({
type: z.literal("remote").describe("Type of MCP server connection"), type: z.literal("remote").describe("Type of MCP server connection"),
url: z.string().describe("URL of the remote MCP server"), url: z.string().describe("URL of the remote MCP server"),
enabled: z enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
.boolean()
.optional()
.describe("Enable or disable the MCP server on startup"),
}) })
.strict() .strict()
.openapi({ .openapi({
@ -64,105 +55,80 @@ export namespace Config {
export const Mcp = z.discriminatedUnion("type", [McpLocal, McpRemote]) export const Mcp = z.discriminatedUnion("type", [McpLocal, McpRemote])
export type Mcp = z.infer<typeof Mcp> export type Mcp = z.infer<typeof Mcp>
export const Mode = z
.object({
model: z.string().optional(),
prompt: z.string().optional(),
tools: z.record(z.string(), z.boolean()).optional(),
})
.openapi({
ref: "ModeConfig",
})
export type Mode = z.infer<typeof Mode>
export const Keybinds = z export const Keybinds = z
.object({ .object({
leader: z leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"),
.string() app_help: z.string().optional().default("<leader>h").describe("Show help dialog"),
.optional() switch_mode: z.string().optional().default("tab").describe("Switch mode"),
.describe("Leader key for keybind combinations"), editor_open: z.string().optional().default("<leader>e").describe("Open external editor"),
help: z.string().optional().describe("Show help dialog"), session_new: z.string().optional().default("<leader>n").describe("Create a new session"),
editor_open: z.string().optional().describe("Open external editor"), session_list: z.string().optional().default("<leader>l").describe("List all sessions"),
session_new: z.string().optional().describe("Create a new session"), session_share: z.string().optional().default("<leader>s").describe("Share current session"),
session_list: z.string().optional().describe("List all sessions"), session_unshare: z.string().optional().default("<leader>u").describe("Unshare current session"),
session_share: z.string().optional().describe("Share current session"), session_interrupt: z.string().optional().default("esc").describe("Interrupt current session"),
session_interrupt: z session_compact: z.string().optional().default("<leader>c").describe("Compact the session"),
.string() tool_details: z.string().optional().default("<leader>d").describe("Toggle tool details"),
.optional() model_list: z.string().optional().default("<leader>m").describe("List available models"),
.describe("Interrupt current session"), theme_list: z.string().optional().default("<leader>t").describe("List available themes"),
session_compact: z file_list: z.string().optional().default("<leader>f").describe("List files"),
.string() file_close: z.string().optional().default("esc").describe("Close file"),
.optional() file_search: z.string().optional().default("<leader>/").describe("Search file"),
.describe("Toggle compact mode for session"), file_diff_toggle: z.string().optional().default("<leader>v").describe("Split/unified diff"),
tool_details: z.string().optional().describe("Show tool details"), project_init: z.string().optional().default("<leader>i").describe("Create/update AGENTS.md"),
model_list: z.string().optional().describe("List available models"), input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"),
theme_list: z.string().optional().describe("List available themes"), input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"),
project_init: z input_submit: z.string().optional().default("enter").describe("Submit input"),
.string() input_newline: z.string().optional().default("shift+enter,ctrl+j").describe("Insert newline in input"),
.optional() messages_page_up: z.string().optional().default("pgup").describe("Scroll messages up by one page"),
.describe("Initialize project configuration"), messages_page_down: z.string().optional().default("pgdown").describe("Scroll messages down by one page"),
input_clear: z.string().optional().describe("Clear input field"), messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"),
input_paste: z.string().optional().describe("Paste from clipboard"),
input_submit: z.string().optional().describe("Submit input"),
input_newline: z.string().optional().describe("Insert newline in input"),
history_previous: z
.string()
.optional()
.describe("Navigate to previous history item"),
history_next: z
.string()
.optional()
.describe("Navigate to next history item"),
messages_page_up: z
.string()
.optional()
.describe("Scroll messages up by one page"),
messages_page_down: z
.string()
.optional()
.describe("Scroll messages down by one page"),
messages_half_page_up: z
.string()
.optional()
.describe("Scroll messages up by half page"),
messages_half_page_down: z messages_half_page_down: z
.string() .string()
.optional() .optional()
.default("ctrl+alt+d")
.describe("Scroll messages down by half page"), .describe("Scroll messages down by half page"),
messages_previous: z messages_previous: z.string().optional().default("ctrl+up").describe("Navigate to previous message"),
.string() messages_next: z.string().optional().default("ctrl+down").describe("Navigate to next message"),
.optional() messages_first: z.string().optional().default("ctrl+g").describe("Navigate to first message"),
.describe("Navigate to previous message"), messages_last: z.string().optional().default("ctrl+alt+g").describe("Navigate to last message"),
messages_next: z.string().optional().describe("Navigate to next message"), messages_layout_toggle: z.string().optional().default("<leader>p").describe("Toggle layout"),
messages_first: z messages_copy: z.string().optional().default("<leader>y").describe("Copy message"),
.string() messages_revert: z.string().optional().default("<leader>r").describe("Revert message"),
.optional() app_exit: z.string().optional().default("ctrl+c,<leader>q").describe("Exit the application"),
.describe("Navigate to first message"),
messages_last: z.string().optional().describe("Navigate to last message"),
app_exit: z.string().optional().describe("Exit the application"),
}) })
.strict() .strict()
.openapi({ .openapi({
ref: "KeybindsConfig", ref: "KeybindsConfig",
}) })
export const Info = z export const Info = z
.object({ .object({
$schema: z $schema: z.string().optional().describe("JSON schema reference for configuration validation"),
.string() theme: z.string().optional().describe("Theme name to use for the interface"),
.optional()
.describe("JSON schema reference for configuration validation"),
theme: z
.string()
.optional()
.describe("Theme name to use for the interface"),
keybinds: Keybinds.optional().describe("Custom keybind configurations"), keybinds: Keybinds.optional().describe("Custom keybind configurations"),
autoshare: z autoshare: z.boolean().optional().describe("Share newly created sessions automatically"),
.boolean() autoupdate: z.boolean().optional().describe("Automatically update to the latest version"),
.optional() disabled_providers: z.array(z.string()).optional().describe("Disable providers that are loaded automatically"),
.describe("Share newly created sessions automatically"), model: z.string().describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(),
autoupdate: z mode: z
.boolean() .object({
.optional() build: Mode.optional(),
.describe("Automatically update to the latest version"), plan: Mode.optional(),
disabled_providers: z })
.array(z.string()) .catchall(Mode)
.optional()
.describe("Disable providers that are loaded automatically"),
model: z
.string()
.describe(
"Model to use in the format of provider/model, eg anthropic/claude-2",
)
.optional(), .optional(),
log_level: Log.Level.optional().describe("Minimum log level to write to log files"),
provider: z provider: z
.record( .record(
ModelsDev.Provider.partial().extend({ ModelsDev.Provider.partial().extend({
@ -172,14 +138,8 @@ export namespace Config {
) )
.optional() .optional()
.describe("Custom provider configurations and model overrides"), .describe("Custom provider configurations and model overrides"),
mcp: z mcp: z.record(z.string(), Mcp).optional().describe("MCP (Model Context Protocol) server configurations"),
.record(z.string(), Mcp) instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"),
.optional()
.describe("MCP (Model Context Protocol) server configurations"),
instructions: z
.array(z.string())
.optional()
.describe("Additional instruction files or patterns to include"),
experimental: z experimental: z
.object({ .object({
hook: z hook: z
@ -215,7 +175,11 @@ export namespace Config {
export type Info = z.output<typeof Info> export type Info = z.output<typeof Info>
export const global = lazy(async () => { export const global = lazy(async () => {
let result = await load(path.join(Global.Path.config, "config.json")) let result = pipe(
{},
mergeDeep(await load(path.join(Global.Path.config, "config.json"))),
mergeDeep(await load(path.join(Global.Path.config, "opencode.json"))),
)
await import(path.join(Global.Path.config, "config"), { await import(path.join(Global.Path.config, "config"), {
with: { with: {
@ -227,10 +191,7 @@ export namespace Config {
if (provider && model) result.model = `${provider}/${model}` if (provider && model) result.model = `${provider}/${model}`
result["$schema"] = "https://opencode.ai/config.json" result["$schema"] = "https://opencode.ai/config.json"
result = mergeDeep(result, rest) result = mergeDeep(result, rest)
await Bun.write( await Bun.write(path.join(Global.Path.config, "config.json"), JSON.stringify(result, null, 2))
path.join(Global.Path.config, "config.json"),
JSON.stringify(result, null, 2),
)
await fs.unlink(path.join(Global.Path.config, "config")) await fs.unlink(path.join(Global.Path.config, "config"))
}) })
.catch(() => {}) .catch(() => {})
@ -238,19 +199,47 @@ export namespace Config {
return result return result
}) })
async function load(path: string) { async function load(configPath: string) {
const data = await Bun.file(path) let text = await Bun.file(configPath)
.json() .text()
.catch((err) => { .catch((err) => {
if (err.code === "ENOENT") return {} if (err.code === "ENOENT") return
throw new JsonError({ path }, { cause: err }) throw new JsonError({ path: configPath }, { cause: err })
}) })
if (!text) return {}
text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
return process.env[varName] || ""
})
const fileMatches = text.match(/"?\{file:([^}]+)\}"?/g)
if (fileMatches) {
const configDir = path.dirname(configPath)
for (const match of fileMatches) {
const filePath = match.replace(/^"?\{file:/, "").replace(/\}"?$/, "")
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
const fileContent = await Bun.file(resolvedPath).text()
text = text.replace(match, JSON.stringify(fileContent))
}
}
let data: any
try {
data = JSON.parse(text)
} catch (err) {
throw new JsonError({ path: configPath }, { cause: err as Error })
}
const parsed = Info.safeParse(data) const parsed = Info.safeParse(data)
if (parsed.success) return parsed.data if (parsed.success) {
throw new InvalidError({ path, issues: parsed.error.issues }) if (!parsed.data.$schema) {
parsed.data.$schema = "https://opencode.ai/config.json"
await Bun.write(configPath, JSON.stringify(parsed.data, null, 2))
}
return parsed.data
}
throw new InvalidError({ path: configPath, issues: parsed.error.issues })
} }
export const JsonError = NamedError.create( export const JsonError = NamedError.create(
"ConfigJsonError", "ConfigJsonError",
z.object({ z.object({

View file

@ -22,9 +22,7 @@ export namespace ConfigHooks {
command: item.command, command: item.command,
}) })
Bun.spawn({ Bun.spawn({
cmd: item.command.map((x) => cmd: item.command.map((x) => x.replace("$FILE", payload.properties.file)),
x.replace("$FILE", payload.properties.file),
),
env: item.environment, env: item.environment,
cwd: app.path.cwd, cwd: app.path.cwd,
stdout: "ignore", stdout: "ignore",

View file

@ -45,10 +45,7 @@ export namespace Fzf {
log.info("found", { filepath }) log.info("found", { filepath })
return { filepath } return { filepath }
} }
filepath = path.join( filepath = path.join(Global.Path.bin, "fzf" + (process.platform === "win32" ? ".exe" : ""))
Global.Path.bin,
"fzf" + (process.platform === "win32" ? ".exe" : ""),
)
const file = Bun.file(filepath) const file = Bun.file(filepath)
if (!(await file.exists())) { if (!(await file.exists())) {
@ -56,18 +53,15 @@ export namespace Fzf {
const arch = archMap[process.arch as keyof typeof archMap] ?? "amd64" const arch = archMap[process.arch as keyof typeof archMap] ?? "amd64"
const config = PLATFORM[process.platform as keyof typeof PLATFORM] const config = PLATFORM[process.platform as keyof typeof PLATFORM]
if (!config) if (!config) throw new UnsupportedPlatformError({ platform: process.platform })
throw new UnsupportedPlatformError({ platform: process.platform })
const version = VERSION const version = VERSION
const platformName = const platformName = process.platform === "win32" ? "windows" : process.platform
process.platform === "win32" ? "windows" : process.platform
const filename = `fzf-${version}-${platformName}_${arch}.${config.extension}` const filename = `fzf-${version}-${platformName}_${arch}.${config.extension}`
const url = `https://github.com/junegunn/fzf/releases/download/v${version}/${filename}` const url = `https://github.com/junegunn/fzf/releases/download/v${version}/${filename}`
const response = await fetch(url) const response = await fetch(url)
if (!response.ok) if (!response.ok) throw new DownloadFailedError({ url, status: response.status })
throw new DownloadFailedError({ url, status: response.status })
const buffer = await response.arrayBuffer() const buffer = await response.arrayBuffer()
const archivePath = path.join(Global.Path.bin, filename) const archivePath = path.join(Global.Path.bin, filename)
@ -86,14 +80,11 @@ export namespace Fzf {
}) })
} }
if (config.extension === "zip") { if (config.extension === "zip") {
const proc = Bun.spawn( const proc = Bun.spawn(["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin], {
["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin], cwd: Global.Path.bin,
{ stderr: "pipe",
cwd: Global.Path.bin, stdout: "ignore",
stderr: "pipe", })
stdout: "ignore",
},
)
await proc.exited await proc.exited
if (proc.exitCode !== 0) if (proc.exitCode !== 0)
throw new ExtractionFailedError({ throw new ExtractionFailedError({

View file

@ -11,6 +11,19 @@ import { Log } from "../util/log"
export namespace File { export namespace File {
const log = Log.create({ service: "file" }) const log = Log.create({ service: "file" })
export const Info = z
.object({
path: z.string(),
added: z.number().int(),
removed: z.number().int(),
status: z.enum(["added", "deleted", "modified"]),
})
.openapi({
ref: "File",
})
export type Info = z.infer<typeof Info>
export const Event = { export const Event = {
Edited: Bus.event( Edited: Bus.event(
"file.edited", "file.edited",
@ -24,20 +37,16 @@ export namespace File {
const app = App.info() const app = App.info()
if (!app.git) return [] if (!app.git) return []
const diffOutput = await $`git diff --numstat HEAD` const diffOutput = await $`git diff --numstat HEAD`.cwd(app.path.cwd).quiet().nothrow().text()
.cwd(app.path.cwd)
.quiet()
.nothrow()
.text()
const changedFiles = [] const changedFiles: Info[] = []
if (diffOutput.trim()) { if (diffOutput.trim()) {
const lines = diffOutput.trim().split("\n") const lines = diffOutput.trim().split("\n")
for (const line of lines) { for (const line of lines) {
const [added, removed, filepath] = line.split("\t") const [added, removed, filepath] = line.split("\t")
changedFiles.push({ changedFiles.push({
file: filepath, path: filepath,
added: added === "-" ? 0 : parseInt(added, 10), added: added === "-" ? 0 : parseInt(added, 10),
removed: removed === "-" ? 0 : parseInt(removed, 10), removed: removed === "-" ? 0 : parseInt(removed, 10),
status: "modified", status: "modified",
@ -45,22 +54,16 @@ export namespace File {
} }
} }
const untrackedOutput = await $`git ls-files --others --exclude-standard` const untrackedOutput = await $`git ls-files --others --exclude-standard`.cwd(app.path.cwd).quiet().nothrow().text()
.cwd(app.path.cwd)
.quiet()
.nothrow()
.text()
if (untrackedOutput.trim()) { if (untrackedOutput.trim()) {
const untrackedFiles = untrackedOutput.trim().split("\n") const untrackedFiles = untrackedOutput.trim().split("\n")
for (const filepath of untrackedFiles) { for (const filepath of untrackedFiles) {
try { try {
const content = await Bun.file( const content = await Bun.file(path.join(app.path.root, filepath)).text()
path.join(app.path.root, filepath),
).text()
const lines = content.split("\n").length const lines = content.split("\n").length
changedFiles.push({ changedFiles.push({
file: filepath, path: filepath,
added: lines, added: lines,
removed: 0, removed: 0,
status: "added", status: "added",
@ -72,17 +75,13 @@ export namespace File {
} }
// Get deleted files // Get deleted files
const deletedOutput = await $`git diff --name-only --diff-filter=D HEAD` const deletedOutput = await $`git diff --name-only --diff-filter=D HEAD`.cwd(app.path.cwd).quiet().nothrow().text()
.cwd(app.path.cwd)
.quiet()
.nothrow()
.text()
if (deletedOutput.trim()) { if (deletedOutput.trim()) {
const deletedFiles = deletedOutput.trim().split("\n") const deletedFiles = deletedOutput.trim().split("\n")
for (const filepath of deletedFiles) { for (const filepath of deletedFiles) {
changedFiles.push({ changedFiles.push({
file: filepath, path: filepath,
added: 0, added: 0,
removed: 0, // Could get original line count but would require another git command removed: 0, // Could get original line count but would require another git command
status: "deleted", status: "deleted",
@ -92,7 +91,7 @@ export namespace File {
return changedFiles.map((x) => ({ return changedFiles.map((x) => ({
...x, ...x,
file: path.relative(app.path.cwd, path.join(app.path.root, x.file)), path: path.relative(app.path.cwd, path.join(app.path.root, x.path)),
})) }))
} }
@ -112,11 +111,7 @@ export namespace File {
filepath: rel, filepath: rel,
}) })
if (diff !== "unmodified") { if (diff !== "unmodified") {
const original = await $`git show HEAD:${rel}` const original = await $`git show HEAD:${rel}`.cwd(app.path.root).quiet().nothrow().text()
.cwd(app.path.root)
.quiet()
.nothrow()
.text()
const patch = createPatch(file, original, content, "old", "new", { const patch = createPatch(file, original, content, "old", "new", {
context: Infinity, context: Infinity,
}) })

View file

@ -34,25 +34,27 @@ export namespace Ripgrep {
export const Match = z.object({ export const Match = z.object({
type: z.literal("match"), type: z.literal("match"),
data: z.object({ data: z
path: z.object({ .object({
text: z.string(), path: z.object({
}), text: z.string(),
lines: z.object({
text: z.string(),
}),
line_number: z.number(),
absolute_offset: z.number(),
submatches: z.array(
z.object({
match: z.object({
text: z.string(),
}),
start: z.number(),
end: z.number(),
}), }),
), lines: z.object({
}), text: z.string(),
}),
line_number: z.number(),
absolute_offset: z.number(),
submatches: z.array(
z.object({
match: z.object({
text: z.string(),
}),
start: z.number(),
end: z.number(),
}),
),
})
.openapi({ ref: "Match" }),
}) })
const End = z.object({ const End = z.object({
@ -122,15 +124,11 @@ export namespace Ripgrep {
const state = lazy(async () => { const state = lazy(async () => {
let filepath = Bun.which("rg") let filepath = Bun.which("rg")
if (filepath) return { filepath } if (filepath) return { filepath }
filepath = path.join( filepath = path.join(Global.Path.bin, "rg" + (process.platform === "win32" ? ".exe" : ""))
Global.Path.bin,
"rg" + (process.platform === "win32" ? ".exe" : ""),
)
const file = Bun.file(filepath) const file = Bun.file(filepath)
if (!(await file.exists())) { if (!(await file.exists())) {
const platformKey = const platformKey = `${process.arch}-${process.platform}` as keyof typeof PLATFORM
`${process.arch}-${process.platform}` as keyof typeof PLATFORM
const config = PLATFORM[platformKey] const config = PLATFORM[platformKey]
if (!config) throw new UnsupportedPlatformError({ platform: platformKey }) if (!config) throw new UnsupportedPlatformError({ platform: platformKey })
@ -139,8 +137,7 @@ export namespace Ripgrep {
const url = `https://github.com/BurntSushi/ripgrep/releases/download/${version}/${filename}` const url = `https://github.com/BurntSushi/ripgrep/releases/download/${version}/${filename}`
const response = await fetch(url) const response = await fetch(url)
if (!response.ok) if (!response.ok) throw new DownloadFailedError({ url, status: response.status })
throw new DownloadFailedError({ url, status: response.status })
const buffer = await response.arrayBuffer() const buffer = await response.arrayBuffer()
const archivePath = path.join(Global.Path.bin, filename) const archivePath = path.join(Global.Path.bin, filename)
@ -164,14 +161,11 @@ export namespace Ripgrep {
}) })
} }
if (config.extension === "zip") { if (config.extension === "zip") {
const proc = Bun.spawn( const proc = Bun.spawn(["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin], {
["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin], cwd: Global.Path.bin,
{ stderr: "pipe",
cwd: Global.Path.bin, stdout: "ignore",
stderr: "pipe", })
stdout: "ignore",
},
)
await proc.exited await proc.exited
if (proc.exitCode !== 0) if (proc.exitCode !== 0)
throw new ExtractionFailedError({ throw new ExtractionFailedError({
@ -193,17 +187,16 @@ export namespace Ripgrep {
return filepath return filepath
} }
export async function files(input: { export async function files(input: { cwd: string; query?: string; glob?: string[]; limit?: number }) {
cwd: string const commands = [`${$.escape(await filepath())} --files --follow --hidden --glob='!.git/*'`]
query?: string
glob?: string if (input.glob) {
limit?: number for (const g of input.glob) {
}) { commands[0] += ` --glob='${g}'`
const commands = [ }
`${await filepath()} --files --hidden --glob='!.git/*' ${input.glob ? `--glob='${input.glob}'` : ``}`, }
]
if (input.query) if (input.query) commands.push(`${await Fzf.filepath()} --filter=${input.query}`)
commands.push(`${await Fzf.filepath()} --filter=${input.query}`)
if (input.limit) commands.push(`head -n ${input.limit}`) if (input.limit) commands.push(`head -n ${input.limit}`)
const joined = commands.join(" | ") const joined = commands.join(" | ")
const result = await $`${{ raw: joined }}`.cwd(input.cwd).nothrow().text() const result = await $`${{ raw: joined }}`.cwd(input.cwd).nothrow().text()
@ -310,18 +303,8 @@ export namespace Ripgrep {
return lines.join("\n") return lines.join("\n")
} }
export async function search(input: { export async function search(input: { cwd: string; pattern: string; glob?: string[]; limit?: number }) {
cwd: string const args = [`${await filepath()}`, "--json", "--hidden", "--glob='!.git/*'"]
pattern: string
glob?: string[]
limit?: number
}) {
const args = [
`${await filepath()}`,
"--json",
"--hidden",
"--glob='!.git/*'",
]
if (input.glob) { if (input.glob) {
for (const g of input.glob) { for (const g of input.glob) {

View file

@ -1,6 +1,8 @@
import { App } from "../app/app" import { App } from "../app/app"
import { Log } from "../util/log"
export namespace FileTime { export namespace FileTime {
const log = Log.create({ service: "file.time" })
export const state = App.state("tool.filetimes", () => { export const state = App.state("tool.filetimes", () => {
const read: { const read: {
[sessionID: string]: { [sessionID: string]: {
@ -13,6 +15,7 @@ export namespace FileTime {
}) })
export function read(sessionID: string, file: string) { export function read(sessionID: string, file: string) {
log.info("read", { sessionID, file })
const { read } = state() const { read } = state()
read[sessionID] = read[sessionID] || {} read[sessionID] = read[sessionID] || {}
read[sessionID][file] = new Date() read[sessionID][file] = new Date()
@ -24,10 +27,7 @@ export namespace FileTime {
export async function assert(sessionID: string, filepath: string) { export async function assert(sessionID: string, filepath: string) {
const time = get(sessionID, filepath) const time = get(sessionID, filepath)
if (!time) if (!time) throw new Error(`You must read the file ${filepath} before overwriting it. Use the Read tool first`)
throw new Error(
`You must read the file ${filepath} before overwriting it. Use the Read tool first`,
)
const stats = await Bun.file(filepath).stat() const stats = await Bun.file(filepath).stat()
if (stats.mtime.getTime() > time.getTime()) { if (stats.mtime.getTime() > time.getTime()) {
throw new Error( throw new Error(

View file

@ -21,23 +21,20 @@ export namespace FileWatcher {
"file.watcher", "file.watcher",
() => { () => {
const app = App.use() const app = App.use()
if (!app.info.git) return {}
try { try {
const watcher = fs.watch( const watcher = fs.watch(app.info.path.cwd, { recursive: true }, (event, file) => {
app.info.path.cwd, log.info("change", { file, event })
{ recursive: true }, if (!file) return
(event, file) => { // for some reason async local storage is lost here
log.info("change", { file, event }) // https://github.com/oven-sh/bun/issues/20754
if (!file) return App.provideExisting(app, async () => {
// for some reason async local storage is lost here Bus.publish(Event.Updated, {
// https://github.com/oven-sh/bun/issues/20754 file,
App.provideExisting(app, async () => { event,
Bus.publish(Event.Updated, {
file,
event,
})
}) })
}, })
) })
return { watcher } return { watcher }
} catch { } catch {
return {} return {}
@ -49,7 +46,7 @@ export namespace FileWatcher {
) )
export function init() { export function init() {
if (Flag.OPENCODE_DISABLE_WATCHER) return if (Flag.OPENCODE_DISABLE_WATCHER || true) return
state() state()
} }
} }

View file

@ -1,5 +1,7 @@
import { App } from "../app/app" import { App } from "../app/app"
import { BunProc } from "../bun" import { BunProc } from "../bun"
import { Filesystem } from "../util/filesystem"
import path from "path"
export interface Info { export interface Info {
name: string name: string
@ -29,7 +31,7 @@ export const mix: Info = {
export const prettier: Info = { export const prettier: Info = {
name: "prettier", name: "prettier",
command: [BunProc.which(), "run", "prettier", "--write", "$FILE"], command: [BunProc.which(), "x", "prettier", "--write", "$FILE"],
environment: { environment: {
BUN_BE_BUN: "1", BUN_BE_BUN: "1",
}, },
@ -62,23 +64,12 @@ export const prettier: Info = {
".gql", ".gql",
], ],
async enabled() { async enabled() {
// this is more complicated because we only want to use prettier if it's const app = App.info()
// being used with the current project const nms = await Filesystem.findUp("node_modules", app.path.cwd, app.path.root)
try { for (const item of nms) {
const proc = Bun.spawn({ if (await Bun.file(path.join(item, ".bin", "prettier")).exists()) return true
cmd: [BunProc.which(), "run", "prettier", "--version"],
cwd: App.info().path.cwd,
env: {
BUN_BE_BUN: "1",
},
stdout: "ignore",
stderr: "ignore",
})
const exit = await proc.exited
return exit === 0
} catch {
return false
} }
return false
}, },
} }
@ -94,21 +85,7 @@ export const zig: Info = {
export const clang: Info = { export const clang: Info = {
name: "clang-format", name: "clang-format",
command: ["clang-format", "-i", "$FILE"], command: ["clang-format", "-i", "$FILE"],
extensions: [ extensions: [".c", ".cc", ".cpp", ".cxx", ".c++", ".h", ".hh", ".hpp", ".hxx", ".h++", ".ino", ".C", ".H"],
".c",
".cc",
".cpp",
".cxx",
".c++",
".h",
".hh",
".hpp",
".hxx",
".h++",
".ino",
".C",
".H",
],
async enabled() { async enabled() {
return Bun.which("clang-format") !== null return Bun.which("clang-format") !== null
}, },

View file

@ -23,7 +23,17 @@ export namespace Global {
await Promise.all([ await Promise.all([
fs.mkdir(Global.Path.data, { recursive: true }), fs.mkdir(Global.Path.data, { recursive: true }),
fs.mkdir(Global.Path.config, { recursive: true }), fs.mkdir(Global.Path.config, { recursive: true }),
fs.mkdir(Global.Path.cache, { recursive: true }),
fs.mkdir(Global.Path.providers, { recursive: true }), fs.mkdir(Global.Path.providers, { recursive: true }),
fs.mkdir(Global.Path.state, { recursive: true }), fs.mkdir(Global.Path.state, { recursive: true }),
]) ])
const CACHE_VERSION = "1"
const version = await Bun.file(path.join(Global.Path.cache, "version"))
.text()
.catch(() => "0")
if (version !== CACHE_VERSION) {
await fs.rm(Global.Path.cache, { recursive: true, force: true })
await Bun.file(path.join(Global.Path.cache, "version")).write(CACHE_VERSION)
}

View file

@ -26,11 +26,7 @@ export namespace Identifier {
return generateID(prefix, true, given) return generateID(prefix, true, given)
} }
function generateID( function generateID(prefix: keyof typeof prefixes, descending: boolean, given?: string): string {
prefix: keyof typeof prefixes,
descending: boolean,
given?: string,
): string {
if (!given) { if (!given) {
return generateNewID(prefix, descending) return generateNewID(prefix, descending)
} }
@ -42,8 +38,7 @@ export namespace Identifier {
} }
function randomBase62(length: number): string { function randomBase62(length: number): string {
const chars = const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
let result = "" let result = ""
const bytes = randomBytes(length) const bytes = randomBytes(length)
for (let i = 0; i < length; i++) { for (let i = 0; i < length; i++) {
@ -52,10 +47,7 @@ export namespace Identifier {
return result return result
} }
function generateNewID( function generateNewID(prefix: keyof typeof prefixes, descending: boolean): string {
prefix: keyof typeof prefixes,
descending: boolean,
): string {
const currentTimestamp = Date.now() const currentTimestamp = Date.now()
if (currentTimestamp !== lastTimestamp) { if (currentTimestamp !== lastTimestamp) {
@ -73,11 +65,6 @@ export namespace Identifier {
timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff)) timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff))
} }
return ( return prefixes[prefix] + "_" + timeBytes.toString("hex") + randomBase62(LENGTH - 12)
prefixes[prefix] +
"_" +
timeBytes.toString("hex") +
randomBase62(LENGTH - 12)
)
} }
} }

View file

@ -14,6 +14,7 @@ import { FormatError } from "./cli/error"
import { ServeCommand } from "./cli/cmd/serve" import { ServeCommand } from "./cli/cmd/serve"
import { TuiCommand } from "./cli/cmd/tui" import { TuiCommand } from "./cli/cmd/tui"
import { DebugCommand } from "./cli/cmd/debug" import { DebugCommand } from "./cli/cmd/debug"
import { StatsCommand } from "./cli/cmd/stats"
const cancel = new AbortController() const cancel = new AbortController()
@ -40,6 +41,24 @@ const cli = yargs(hideBin(process.argv))
}) })
.middleware(async () => { .middleware(async () => {
await Log.init({ print: process.argv.includes("--print-logs") }) await Log.init({ print: process.argv.includes("--print-logs") })
try {
const { Config } = await import("./config/config")
const { App } = await import("./app/app")
App.provide({ cwd: process.cwd() }, async () => {
const cfg = await Config.get()
if (cfg.log_level) {
Log.setLevel(cfg.log_level as Log.Level)
} else {
const defaultLevel = Installation.isDev() ? "DEBUG" : "INFO"
Log.setLevel(defaultLevel)
}
})
} catch (e) {
Log.Default.error("failed to load config", { error: e })
}
Log.Default.info("opencode", { Log.Default.info("opencode", {
version: Installation.VERSION, version: Installation.VERSION,
args: process.argv.slice(2), args: process.argv.slice(2),
@ -54,11 +73,9 @@ const cli = yargs(hideBin(process.argv))
.command(UpgradeCommand) .command(UpgradeCommand)
.command(ServeCommand) .command(ServeCommand)
.command(ModelsCommand) .command(ModelsCommand)
.command(StatsCommand)
.fail((msg) => { .fail((msg) => {
if ( if (msg.startsWith("Unknown argument") || msg.startsWith("Not enough non-option arguments")) {
msg.startsWith("Unknown argument") ||
msg.startsWith("Not enough non-option arguments")
) {
cli.showHelp("log") cli.showHelp("log")
} }
}) })
@ -97,10 +114,7 @@ try {
Log.Default.error("fatal", data) Log.Default.error("fatal", data)
const formatted = FormatError(e) const formatted = FormatError(e)
if (formatted) UI.error(formatted) if (formatted) UI.error(formatted)
if (formatted === undefined) if (formatted === undefined) UI.error("Unexpected error, check log file at " + Log.file() + " for more details")
UI.error(
"Unexpected error, check log file at " + Log.file() + " for more details",
)
process.exitCode = 1 process.exitCode = 1
} }

View file

@ -135,8 +135,7 @@ export namespace Installation {
}) })
} }
export const VERSION = export const VERSION = typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev"
typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev"
export async function latest() { export async function latest() {
return fetch("https://api.github.com/repos/sst/opencode/releases/latest") return fetch("https://api.github.com/repos/sst/opencode/releases/latest")

View file

@ -1,9 +1,5 @@
import path from "path" import path from "path"
import { import { createMessageConnection, StreamMessageReader, StreamMessageWriter } from "vscode-jsonrpc/node"
createMessageConnection,
StreamMessageReader,
StreamMessageWriter,
} from "vscode-jsonrpc/node"
import type { Diagnostic as VSCodeDiagnostic } from "vscode-languageserver-types" import type { Diagnostic as VSCodeDiagnostic } from "vscode-languageserver-types"
import { App } from "../app/app" import { App } from "../app/app"
import { Log } from "../util/log" import { Log } from "../util/log"
@ -38,45 +34,54 @@ export namespace LSPClient {
), ),
} }
export async function create(serverID: string, server: LSPServer.Handle) { export async function create(input: { serverID: string; server: LSPServer.Handle; root: string }) {
const app = App.info() const app = App.info()
log.info("starting client", { id: serverID }) const l = log.clone().tag("serverID", input.serverID)
l.info("starting client")
const connection = createMessageConnection( const connection = createMessageConnection(
new StreamMessageReader(server.process.stdout), new StreamMessageReader(input.server.process.stdout),
new StreamMessageWriter(server.process.stdin), new StreamMessageWriter(input.server.process.stdin),
) )
const diagnostics = new Map<string, Diagnostic[]>() const diagnostics = new Map<string, Diagnostic[]>()
connection.onNotification("textDocument/publishDiagnostics", (params) => { connection.onNotification("textDocument/publishDiagnostics", (params) => {
const path = new URL(params.uri).pathname const path = new URL(params.uri).pathname
log.info("textDocument/publishDiagnostics", { l.info("textDocument/publishDiagnostics", {
path, path,
}) })
const exists = diagnostics.has(path) const exists = diagnostics.has(path)
diagnostics.set(path, params.diagnostics) diagnostics.set(path, params.diagnostics)
if (!exists && serverID === "typescript") return if (!exists && input.serverID === "typescript") return
Bus.publish(Event.Diagnostics, { path, serverID }) Bus.publish(Event.Diagnostics, { path, serverID: input.serverID })
})
connection.onRequest("window/workDoneProgress/create", (params) => {
l.info("window/workDoneProgress/create", params)
return null
}) })
connection.onRequest("workspace/configuration", async () => { connection.onRequest("workspace/configuration", async () => {
return [{}] return [{}]
}) })
connection.listen() connection.listen()
log.info("sending initialize", { id: serverID }) l.info("sending initialize")
await withTimeout( await withTimeout(
connection.sendRequest("initialize", { connection.sendRequest("initialize", {
processId: server.process.pid, rootUri: "file://" + input.root,
processId: input.server.process.pid,
workspaceFolders: [ workspaceFolders: [
{ {
name: "workspace", name: "workspace",
uri: "file://" + app.path.cwd, uri: "file://" + input.root,
}, },
], ],
initializationOptions: { initializationOptions: {
...server.initialization, ...input.server.initialization,
}, },
capabilities: { capabilities: {
window: {
workDoneProgress: true,
},
workspace: { workspace: {
configuration: true, configuration: true,
}, },
@ -92,28 +97,33 @@ export namespace LSPClient {
}, },
}), }),
5_000, 5_000,
).catch(() => { ).catch((err) => {
throw new InitializeError({ serverID }) l.error("initialize error", { error: err })
throw new InitializeError(
{ serverID: input.serverID },
{
cause: err,
},
)
}) })
await connection.sendNotification("initialized", {}) await connection.sendNotification("initialized", {})
log.info("initialized")
const files: { const files: {
[path: string]: number [path: string]: number
} = {} } = {}
const result = { const result = {
root: input.root,
get serverID() { get serverID() {
return serverID return input.serverID
}, },
get connection() { get connection() {
return connection return connection
}, },
notify: { notify: {
async open(input: { path: string }) { async open(input: { path: string }) {
input.path = path.isAbsolute(input.path) input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path)
? input.path
: path.resolve(app.path.cwd, input.path)
const file = Bun.file(input.path) const file = Bun.file(input.path)
const text = await file.text() const text = await file.text()
const version = files[input.path] const version = files[input.path]
@ -145,18 +155,13 @@ export namespace LSPClient {
return diagnostics return diagnostics
}, },
async waitForDiagnostics(input: { path: string }) { async waitForDiagnostics(input: { path: string }) {
input.path = path.isAbsolute(input.path) input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path)
? input.path
: path.resolve(app.path.cwd, input.path)
log.info("waiting for diagnostics", input) log.info("waiting for diagnostics", input)
let unsub: () => void let unsub: () => void
return await withTimeout( return await withTimeout(
new Promise<void>((resolve) => { new Promise<void>((resolve) => {
unsub = Bus.subscribe(Event.Diagnostics, (event) => { unsub = Bus.subscribe(Event.Diagnostics, (event) => {
if ( if (event.properties.path === input.path && event.properties.serverID === result.serverID) {
event.properties.path === input.path &&
event.properties.serverID === result.serverID
) {
log.info("got diagnostics", input) log.info("got diagnostics", input)
unsub?.() unsub?.()
resolve() resolve()
@ -171,14 +176,16 @@ export namespace LSPClient {
}) })
}, },
async shutdown() { async shutdown() {
log.info("shutting down", { serverID }) l.info("shutting down")
connection.end() connection.end()
connection.dispose() connection.dispose()
server.process.kill("SIGTERM") input.server.process.kill()
log.info("shutdown", { serverID }) l.info("shutdown")
}, },
} }
l.info("initialized")
return result return result
} }
} }

View file

@ -3,64 +3,65 @@ import { Log } from "../util/log"
import { LSPClient } from "./client" import { LSPClient } from "./client"
import path from "path" import path from "path"
import { LSPServer } from "./server" import { LSPServer } from "./server"
import { Ripgrep } from "../file/ripgrep"
import { z } from "zod" import { z } from "zod"
export namespace LSP { export namespace LSP {
const log = Log.create({ service: "lsp" }) const log = Log.create({ service: "lsp" })
export const Range = z
.object({
start: z.object({
line: z.number(),
character: z.number(),
}),
end: z.object({
line: z.number(),
character: z.number(),
}),
})
.openapi({
ref: "Range",
})
export type Range = z.infer<typeof Range>
export const Symbol = z export const Symbol = z
.object({ .object({
name: z.string(), name: z.string(),
kind: z.number(), kind: z.number(),
location: z.object({ location: z.object({
uri: z.string(), uri: z.string(),
range: z.object({ range: Range,
start: z.object({
line: z.number(),
character: z.number(),
}),
end: z.object({
line: z.number(),
character: z.number(),
}),
}),
}), }),
}) })
.openapi({ .openapi({
ref: "LSP.Symbol", ref: "Symbol",
}) })
export type Symbol = z.infer<typeof Symbol> export type Symbol = z.infer<typeof Symbol>
export const DocumentSymbol = z
.object({
name: z.string(),
detail: z.string().optional(),
kind: z.number(),
range: Range,
selectionRange: Range,
})
.openapi({
ref: "DocumentSymbol",
})
export type DocumentSymbol = z.infer<typeof DocumentSymbol>
const state = App.state( const state = App.state(
"lsp", "lsp",
async (app) => { async () => {
log.info("initializing") const clients: LSPClient.Info[] = []
const clients = new Map<string, LSPClient.Info>()
for (const server of Object.values(LSPServer)) {
for (const extension of server.extensions) {
const [file] = await Ripgrep.files({
cwd: app.path.cwd,
glob: "*" + extension,
})
if (!file) continue
const handle = await server.spawn(App.info())
if (!handle) break
const client = await LSPClient.create(server.id, handle).catch(
() => {},
)
if (!client) break
clients.set(server.id, client)
break
}
}
log.info("initialized")
return { return {
broken: new Set<string>(),
clients, clients,
} }
}, },
async (state) => { async (state) => {
for (const client of state.clients.values()) { for (const client of state.clients) {
await client.shutdown() await client.shutdown()
} }
}, },
@ -70,16 +71,44 @@ export namespace LSP {
return state() return state()
} }
async function getClients(file: string) {
const s = await state()
const extension = path.parse(file).ext
const result: LSPClient.Info[] = []
for (const server of Object.values(LSPServer)) {
if (!server.extensions.includes(extension)) continue
const root = await server.root(file, App.info())
if (!root) continue
if (s.broken.has(root + server.id)) continue
const match = s.clients.find((x) => x.root === root && x.serverID === server.id)
if (match) {
result.push(match)
continue
}
const handle = await server.spawn(App.info(), root)
if (!handle) continue
const client = await LSPClient.create({
serverID: server.id,
server: handle,
root,
}).catch((err) => {
s.broken.add(root + server.id)
handle.process.kill()
log.error("", { error: err })
})
if (!client) continue
s.clients.push(client)
result.push(client)
}
return result
}
export async function touchFile(input: string, waitForDiagnostics?: boolean) { export async function touchFile(input: string, waitForDiagnostics?: boolean) {
const extension = path.parse(input).ext const clients = await getClients(input)
const matches = Object.values(LSPServer)
.filter((x) => x.extensions.includes(extension))
.map((x) => x.id)
await run(async (client) => { await run(async (client) => {
if (!matches.includes(client.serverID)) return if (!clients.includes(client)) return
const wait = waitForDiagnostics const wait = waitForDiagnostics ? client.waitForDiagnostics({ path: input }) : Promise.resolve()
? client.waitForDiagnostics({ path: input })
: Promise.resolve()
await client.notify.open({ path: input }) await client.notify.open({ path: input })
return wait return wait
}) })
@ -97,11 +126,7 @@ export namespace LSP {
return results return results
} }
export async function hover(input: { export async function hover(input: { file: string; line: number; character: number }) {
file: string
line: number
character: number
}) {
return run((client) => { return run((client) => {
return client.connection.sendRequest("textDocument/hover", { return client.connection.sendRequest("textDocument/hover", {
textDocument: { textDocument: {
@ -115,18 +140,74 @@ export namespace LSP {
}) })
} }
enum SymbolKind {
File = 1,
Module = 2,
Namespace = 3,
Package = 4,
Class = 5,
Method = 6,
Property = 7,
Field = 8,
Constructor = 9,
Enum = 10,
Interface = 11,
Function = 12,
Variable = 13,
Constant = 14,
String = 15,
Number = 16,
Boolean = 17,
Array = 18,
Object = 19,
Key = 20,
Null = 21,
EnumMember = 22,
Struct = 23,
Event = 24,
Operator = 25,
TypeParameter = 26,
}
const kinds = [
SymbolKind.Class,
SymbolKind.Function,
SymbolKind.Method,
SymbolKind.Interface,
SymbolKind.Variable,
SymbolKind.Constant,
SymbolKind.Struct,
SymbolKind.Enum,
]
export async function workspaceSymbol(query: string) { export async function workspaceSymbol(query: string) {
return run((client) => return run((client) =>
client.connection.sendRequest("workspace/symbol", { client.connection
query, .sendRequest("workspace/symbol", {
}), query,
})
.then((result: any) => result.filter((x: LSP.Symbol) => kinds.includes(x.kind)))
.then((result: any) => result.slice(0, 10))
.catch(() => []),
).then((result) => result.flat() as LSP.Symbol[]) ).then((result) => result.flat() as LSP.Symbol[])
} }
async function run<T>( export async function documentSymbol(uri: string) {
input: (client: LSPClient.Info) => Promise<T>, return run((client) =>
): Promise<T[]> { client.connection
const clients = await state().then((x) => [...x.clients.values()]) .sendRequest("textDocument/documentSymbol", {
textDocument: {
uri,
},
})
.catch(() => []),
)
.then((result) => result.flat() as (LSP.DocumentSymbol | LSP.Symbol)[])
.then((result) => result.filter(Boolean))
}
async function run<T>(input: (client: LSPClient.Info) => Promise<T>): Promise<T[]> {
const clients = await state().then((x) => x.clients)
const tasks = clients.map((x) => input(x)) const tasks = clients.map((x) => input(x))
return Promise.all(tasks) return Promise.all(tasks)
} }

View file

@ -94,4 +94,6 @@ export const LANGUAGE_EXTENSIONS: Record<string, string> = {
".yml": "yaml", ".yml": "yaml",
".mjs": "javascript", ".mjs": "javascript",
".cjs": "javascript", ".cjs": "javascript",
".zig": "zig",
".zon": "zig",
} as const } as const

View file

@ -6,6 +6,7 @@ import { Log } from "../util/log"
import { BunProc } from "../bun" import { BunProc } from "../bun"
import { $ } from "bun" import { $ } from "bun"
import fs from "fs/promises" import fs from "fs/promises"
import { Filesystem } from "../util/filesystem"
export namespace LSPServer { export namespace LSPServer {
const log = Log.create({ service: "lsp.server" }) const log = Log.create({ service: "lsp.server" })
@ -15,31 +16,44 @@ export namespace LSPServer {
initialization?: Record<string, any> initialization?: Record<string, any>
} }
type RootFunction = (file: string, app: App.Info) => Promise<string | undefined>
const NearestRoot = (patterns: string[]): RootFunction => {
return async (file, app) => {
const files = Filesystem.up({
targets: patterns,
start: path.dirname(file),
stop: app.path.root,
})
const first = await files.next()
await files.return()
if (!first.value) return app.path.root
return path.dirname(first.value)
}
}
export interface Info { export interface Info {
id: string id: string
extensions: string[] extensions: string[]
spawn(app: App.Info): Promise<Handle | undefined> global?: boolean
root: RootFunction
spawn(app: App.Info, root: string): Promise<Handle | undefined>
} }
export const Typescript: Info = { export const Typescript: Info = {
id: "typescript", id: "typescript",
root: NearestRoot(["tsconfig.json", "package.json", "jsconfig.json"]),
extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"], extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"],
async spawn(app) { async spawn(app, root) {
const tsserver = await Bun.resolve( const tsserver = await Bun.resolve("typescript/lib/tsserver.js", app.path.cwd).catch(() => {})
"typescript/lib/tsserver.js",
app.path.cwd,
).catch(() => {})
if (!tsserver) return if (!tsserver) return
const proc = spawn( const proc = spawn(BunProc.which(), ["x", "typescript-language-server", "--stdio"], {
BunProc.which(), cwd: root,
["x", "typescript-language-server", "--stdio"], env: {
{ ...process.env,
env: { BUN_BE_BUN: "1",
...process.env,
BUN_BE_BUN: "1",
},
}, },
) })
return { return {
process: proc, process: proc,
initialization: { initialization: {
@ -53,8 +67,13 @@ export namespace LSPServer {
export const Gopls: Info = { export const Gopls: Info = {
id: "golang", id: "golang",
root: async (file, app) => {
const work = await NearestRoot(["go.work"])(file, app)
if (work) return work
return NearestRoot(["go.mod", "go.sum"])(file, app)
},
extensions: [".go"], extensions: [".go"],
async spawn() { async spawn(_, root) {
let bin = Bun.which("gopls", { let bin = Bun.which("gopls", {
PATH: process.env["PATH"] + ":" + Global.Path.bin, PATH: process.env["PATH"] + ":" + Global.Path.bin,
}) })
@ -73,24 +92,24 @@ export namespace LSPServer {
log.error("Failed to install gopls") log.error("Failed to install gopls")
return return
} }
bin = path.join( bin = path.join(Global.Path.bin, "gopls" + (process.platform === "win32" ? ".exe" : ""))
Global.Path.bin,
"gopls" + (process.platform === "win32" ? ".exe" : ""),
)
log.info(`installed gopls`, { log.info(`installed gopls`, {
bin, bin,
}) })
} }
return { return {
process: spawn(bin!), process: spawn(bin!, {
cwd: root,
}),
} }
}, },
} }
export const RubyLsp: Info = { export const RubyLsp: Info = {
id: "ruby-lsp", id: "ruby-lsp",
root: NearestRoot(["Gemfile"]),
extensions: [".rb", ".rake", ".gemspec", ".ru"], extensions: [".rb", ".rake", ".gemspec", ".ru"],
async spawn() { async spawn(_, root) {
let bin = Bun.which("ruby-lsp", { let bin = Bun.which("ruby-lsp", {
PATH: process.env["PATH"] + ":" + Global.Path.bin, PATH: process.env["PATH"] + ":" + Global.Path.bin,
}) })
@ -113,16 +132,15 @@ export namespace LSPServer {
log.error("Failed to install ruby-lsp") log.error("Failed to install ruby-lsp")
return return
} }
bin = path.join( bin = path.join(Global.Path.bin, "ruby-lsp" + (process.platform === "win32" ? ".exe" : ""))
Global.Path.bin,
"ruby-lsp" + (process.platform === "win32" ? ".exe" : ""),
)
log.info(`installed ruby-lsp`, { log.info(`installed ruby-lsp`, {
bin, bin,
}) })
} }
return { return {
process: spawn(bin!, ["--stdio"]), process: spawn(bin!, ["--stdio"], {
cwd: root,
}),
} }
}, },
} }
@ -130,17 +148,15 @@ export namespace LSPServer {
export const Pyright: Info = { export const Pyright: Info = {
id: "pyright", id: "pyright",
extensions: [".py", ".pyi"], extensions: [".py", ".pyi"],
async spawn() { root: NearestRoot(["pyproject.toml", "setup.py", "setup.cfg", "requirements.txt", "Pipfile", "pyrightconfig.json"]),
const proc = spawn( async spawn(_, root) {
BunProc.which(), const proc = spawn(BunProc.which(), ["x", "pyright-langserver", "--stdio"], {
["x", "pyright-langserver", "--stdio"], cwd: root,
{ env: {
env: { ...process.env,
...process.env, BUN_BE_BUN: "1",
BUN_BE_BUN: "1",
},
}, },
) })
return { return {
process: proc, process: proc,
} }
@ -150,7 +166,8 @@ export namespace LSPServer {
export const ElixirLS: Info = { export const ElixirLS: Info = {
id: "elixir-ls", id: "elixir-ls",
extensions: [".ex", ".exs"], extensions: [".ex", ".exs"],
async spawn() { root: NearestRoot(["mix.exs", "mix.lock"]),
async spawn(_, root) {
let binary = Bun.which("elixir-ls") let binary = Bun.which("elixir-ls")
if (!binary) { if (!binary) {
const elixirLsPath = path.join(Global.Path.bin, "elixir-ls") const elixirLsPath = path.join(Global.Path.bin, "elixir-ls")
@ -158,9 +175,7 @@ export namespace LSPServer {
Global.Path.bin, Global.Path.bin,
"elixir-ls-master", "elixir-ls-master",
"release", "release",
process.platform === "win32" process.platform === "win32" ? "language_server.bar" : "language_server.sh",
? "language_server.bar"
: "language_server.sh",
) )
if (!(await Bun.file(binary).exists())) { if (!(await Bun.file(binary).exists())) {
@ -172,9 +187,7 @@ export namespace LSPServer {
log.info("downloading elixir-ls from GitHub releases") log.info("downloading elixir-ls from GitHub releases")
const response = await fetch( const response = await fetch("https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip")
"https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip",
)
if (!response.ok) return if (!response.ok) return
const zipPath = path.join(Global.Path.bin, "elixir-ls.zip") const zipPath = path.join(Global.Path.bin, "elixir-ls.zip")
await Bun.file(zipPath).write(response) await Bun.file(zipPath).write(response)
@ -198,7 +211,114 @@ export namespace LSPServer {
} }
return { return {
process: spawn(binary), process: spawn(binary, {
cwd: root,
}),
}
},
}
export const Zls: Info = {
id: "zls",
extensions: [".zig", ".zon"],
root: NearestRoot(["build.zig"]),
async spawn(_, root) {
let bin = Bun.which("zls", {
PATH: process.env["PATH"] + ":" + Global.Path.bin,
})
if (!bin) {
const zig = Bun.which("zig")
if (!zig) {
log.error("Zig is required to use zls. Please install Zig first.")
return
}
log.info("downloading zls from GitHub releases")
const releaseResponse = await fetch("https://api.github.com/repos/zigtools/zls/releases/latest")
if (!releaseResponse.ok) {
log.error("Failed to fetch zls release info")
return
}
const release = await releaseResponse.json()
const platform = process.platform
const arch = process.arch
let assetName = ""
let zlsArch: string = arch
if (arch === "arm64") zlsArch = "aarch64"
else if (arch === "x64") zlsArch = "x86_64"
else if (arch === "ia32") zlsArch = "x86"
let zlsPlatform: string = platform
if (platform === "darwin") zlsPlatform = "macos"
else if (platform === "win32") zlsPlatform = "windows"
const ext = platform === "win32" ? "zip" : "tar.xz"
assetName = `zls-${zlsArch}-${zlsPlatform}.${ext}`
const supportedCombos = [
"zls-x86_64-linux.tar.xz",
"zls-x86_64-macos.tar.xz",
"zls-x86_64-windows.zip",
"zls-aarch64-linux.tar.xz",
"zls-aarch64-macos.tar.xz",
"zls-aarch64-windows.zip",
"zls-x86-linux.tar.xz",
"zls-x86-windows.zip",
]
if (!supportedCombos.includes(assetName)) {
log.error(`Platform ${platform} and architecture ${arch} is not supported by zls`)
return
}
const asset = release.assets.find((a: any) => a.name === assetName)
if (!asset) {
log.error(`Could not find asset ${assetName} in latest zls release`)
return
}
const downloadUrl = asset.browser_download_url
const downloadResponse = await fetch(downloadUrl)
if (!downloadResponse.ok) {
log.error("Failed to download zls")
return
}
const tempPath = path.join(Global.Path.bin, assetName)
await Bun.file(tempPath).write(downloadResponse)
if (ext === "zip") {
await $`unzip -o -q ${tempPath}`.cwd(Global.Path.bin).nothrow()
} else {
await $`tar -xf ${tempPath}`.cwd(Global.Path.bin).nothrow()
}
await fs.rm(tempPath, { force: true })
bin = path.join(Global.Path.bin, "zls" + (platform === "win32" ? ".exe" : ""))
if (!(await Bun.file(bin).exists())) {
log.error("Failed to extract zls binary")
return
}
if (platform !== "win32") {
await $`chmod +x ${bin}`.nothrow()
}
log.info(`installed zls`, { bin })
}
return {
process: spawn(bin, {
cwd: root,
}),
} }
}, },
} }

View file

@ -91,8 +91,7 @@ export namespace Provider {
if (!info || info.type !== "oauth") return if (!info || info.type !== "oauth") return
if (!info.access || info.expires < Date.now()) { if (!info.access || info.expires < Date.now()) {
const tokens = await copilot.access(info.refresh) const tokens = await copilot.access(info.refresh)
if (!tokens) if (!tokens) throw new Error("GitHub Copilot authentication expired")
throw new Error("GitHub Copilot authentication expired")
await Auth.set("github-copilot", { await Auth.set("github-copilot", {
type: "oauth", type: "oauth",
...tokens, ...tokens,
@ -100,25 +99,27 @@ export namespace Provider {
info.access = tokens.access info.access = tokens.access
} }
let isAgentCall = false let isAgentCall = false
let isVisionRequest = false
try { try {
const body = const body = typeof init.body === "string" ? JSON.parse(init.body) : init.body
typeof init.body === "string"
? JSON.parse(init.body)
: init.body
if (body?.messages) { if (body?.messages) {
isAgentCall = body.messages.some( isAgentCall = body.messages.some((msg: any) => msg.role && ["tool", "assistant"].includes(msg.role))
isVisionRequest = body.messages.some(
(msg: any) => (msg: any) =>
msg.role && ["tool", "assistant"].includes(msg.role), Array.isArray(msg.content) && msg.content.some((part: any) => part.type === "image_url"),
) )
} }
} catch {} } catch {}
const headers = { const headers: Record<string, string> = {
...init.headers, ...init.headers,
...copilot.HEADERS, ...copilot.HEADERS,
Authorization: `Bearer ${info.access}`, Authorization: `Bearer ${info.access}`,
"Openai-Intent": "conversation-edits", "Openai-Intent": "conversation-edits",
"X-Initiator": isAgentCall ? "agent" : "user", "X-Initiator": isAgentCall ? "agent" : "user",
} }
if (isVisionRequest) {
headers["Copilot-Vision-Request"] = "true"
}
delete headers["x-api-key"] delete headers["x-api-key"]
return fetch(input, { return fetch(input, {
...init, ...init,
@ -138,14 +139,11 @@ export namespace Provider {
} }
}, },
"amazon-bedrock": async () => { "amazon-bedrock": async () => {
if (!process.env["AWS_PROFILE"] && !process.env["AWS_ACCESS_KEY_ID"]) if (!process.env["AWS_PROFILE"] && !process.env["AWS_ACCESS_KEY_ID"]) return { autoload: false }
return { autoload: false }
const region = process.env["AWS_REGION"] ?? "us-east-1" const region = process.env["AWS_REGION"] ?? "us-east-1"
const { fromNodeProviderChain } = await import( const { fromNodeProviderChain } = await import(await BunProc.install("@aws-sdk/credential-providers"))
await BunProc.install("@aws-sdk/credential-providers")
)
return { return {
autoload: true, autoload: true,
options: { options: {
@ -157,9 +155,7 @@ export namespace Provider {
switch (regionPrefix) { switch (regionPrefix) {
case "us": { case "us": {
const modelRequiresPrefix = ["claude", "deepseek"].some((m) => const modelRequiresPrefix = ["claude", "deepseek"].some((m) => modelID.includes(m))
modelID.includes(m),
)
if (modelRequiresPrefix) { if (modelRequiresPrefix) {
modelID = `${regionPrefix}.${modelID}` modelID = `${regionPrefix}.${modelID}`
} }
@ -174,25 +170,18 @@ export namespace Provider {
"eu-south-1", "eu-south-1",
"eu-south-2", "eu-south-2",
].some((r) => region.includes(r)) ].some((r) => region.includes(r))
const modelRequiresPrefix = [ const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "llama3", "pixtral"].some((m) =>
"claude", modelID.includes(m),
"nova-lite", )
"nova-micro",
"llama3",
"pixtral",
].some((m) => modelID.includes(m))
if (regionRequiresPrefix && modelRequiresPrefix) { if (regionRequiresPrefix && modelRequiresPrefix) {
modelID = `${regionPrefix}.${modelID}` modelID = `${regionPrefix}.${modelID}`
} }
break break
} }
case "ap": { case "ap": {
const modelRequiresPrefix = [ const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "nova-pro"].some((m) =>
"claude", modelID.includes(m),
"nova-lite", )
"nova-micro",
"nova-pro",
].some((m) => modelID.includes(m))
if (modelRequiresPrefix) { if (modelRequiresPrefix) {
regionPrefix = "apac" regionPrefix = "apac"
modelID = `${regionPrefix}.${modelID}` modelID = `${regionPrefix}.${modelID}`
@ -230,10 +219,7 @@ export namespace Provider {
options: Record<string, any> options: Record<string, any>
} }
} = {} } = {}
const models = new Map< const models = new Map<string, { info: ModelsDev.Model; language: LanguageModel }>()
string,
{ info: ModelsDev.Model; language: LanguageModel }
>()
const sdk = new Map<string, SDK>() const sdk = new Map<string, SDK>()
log.info("init") log.info("init")
@ -248,7 +234,7 @@ export namespace Provider {
if (!provider) { if (!provider) {
const info = database[id] const info = database[id]
if (!info) return if (!info) return
if (info.api) options["baseURL"] = info.api if (info.api && !options["baseURL"]) options["baseURL"] = info.api
providers[id] = { providers[id] = {
source, source,
info, info,
@ -308,9 +294,7 @@ export namespace Provider {
database[providerID] = parsed database[providerID] = parsed
} }
const disabled = await Config.get().then( const disabled = await Config.get().then((cfg) => new Set(cfg.disabled_providers ?? []))
(cfg) => new Set(cfg.disabled_providers ?? []),
)
// load env // load env
for (const [providerID, provider] of Object.entries(database)) { for (const [providerID, provider] of Object.entries(database)) {
if (disabled.has(providerID)) continue if (disabled.has(providerID)) continue
@ -337,12 +321,7 @@ export namespace Provider {
if (disabled.has(providerID)) continue if (disabled.has(providerID)) continue
const result = await fn(database[providerID]) const result = await fn(database[providerID])
if (result && (result.autoload || providers[providerID])) { if (result && (result.autoload || providers[providerID])) {
mergeProvider( mergeProvider(providerID, result.options ?? {}, "custom", result.getModel)
providerID,
result.options ?? {},
"custom",
result.getModel,
)
} }
} }
@ -379,7 +358,7 @@ export namespace Provider {
const existing = s.sdk.get(provider.id) const existing = s.sdk.get(provider.id)
if (existing) return existing if (existing) return existing
const pkg = provider.npm ?? provider.id const pkg = provider.npm ?? provider.id
const mod = await import(await BunProc.install(pkg, "latest")) const mod = await import(await BunProc.install(pkg, "beta"))
const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!] const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!]
const loaded = fn(s.providers[provider.id]?.options) const loaded = fn(s.providers[provider.id]?.options)
s.sdk.set(provider.id, loaded) s.sdk.set(provider.id, loaded)
@ -406,9 +385,7 @@ export namespace Provider {
const sdk = await getSDK(provider.info) const sdk = await getSDK(provider.info)
try { try {
const language = provider.getModel const language = provider.getModel ? await provider.getModel(sdk, modelID) : sdk.languageModel(modelID)
? await provider.getModel(sdk, modelID)
: sdk.languageModel(modelID)
log.info("found", { providerID, modelID }) log.info("found", { providerID, modelID })
s.models.set(key, { s.models.set(key, {
info, info,
@ -435,10 +412,7 @@ export namespace Provider {
export function sort(models: ModelsDev.Model[]) { export function sort(models: ModelsDev.Model[]) {
return sortBy( return sortBy(
models, models,
[ [(model) => priority.findIndex((filter) => model.id.includes(filter)), "desc"],
(model) => priority.findIndex((filter) => model.id.includes(filter)),
"desc",
],
[(model) => (model.id.includes("latest") ? 0 : 1), "asc"], [(model) => (model.id.includes("latest") ? 0 : 1), "asc"],
[(model) => model.id, "desc"], [(model) => model.id, "desc"],
) )
@ -449,11 +423,7 @@ export namespace Provider {
if (cfg.model) return parseModel(cfg.model) if (cfg.model) return parseModel(cfg.model)
const provider = await list() const provider = await list()
.then((val) => Object.values(val)) .then((val) => Object.values(val))
.then((x) => .then((x) => x.find((p) => !cfg.provider || Object.keys(cfg.provider).includes(p.info.id)))
x.find(
(p) => !cfg.provider || Object.keys(cfg.provider).includes(p.info.id),
),
)
if (!provider) throw new Error("no providers found") if (!provider) throw new Error("no providers found")
const [model] = sort(Object.values(provider.info.models)) const [model] = sort(Object.values(provider.info.models))
if (!model) throw new Error("no models found") if (!model) throw new Error("no models found")
@ -536,9 +506,11 @@ export namespace Provider {
if (schema instanceof z.ZodUnion) { if (schema instanceof z.ZodUnion) {
return z.union( return z.union(
schema.options.map((option: z.ZodTypeAny) => schema.options.map((option: z.ZodTypeAny) => optionalToNullable(option)) as [
optionalToNullable(option), z.ZodTypeAny,
) as [z.ZodTypeAny, z.ZodTypeAny, ...z.ZodTypeAny[]], z.ZodTypeAny,
...z.ZodTypeAny[],
],
) )
} }

View file

@ -1,22 +1,21 @@
import type { LanguageModelV1Prompt } from "ai" import type { ModelMessage } from "ai"
import { unique } from "remeda" import { unique } from "remeda"
export namespace ProviderTransform { export namespace ProviderTransform {
export function message( export function message(msgs: ModelMessage[], providerID: string, modelID: string) {
msgs: LanguageModelV1Prompt,
providerID: string,
modelID: string,
) {
if (providerID === "anthropic" || modelID.includes("anthropic")) { if (providerID === "anthropic" || modelID.includes("anthropic")) {
const system = msgs.filter((msg) => msg.role === "system").slice(0, 2) const system = msgs.filter((msg) => msg.role === "system").slice(0, 2)
const final = msgs.filter((msg) => msg.role !== "system").slice(-2) const final = msgs.filter((msg) => msg.role !== "system").slice(-2)
for (const msg of unique([...system, ...final])) { for (const msg of unique([...system, ...final])) {
msg.providerMetadata = { msg.providerOptions = {
...msg.providerMetadata, ...msg.providerOptions,
anthropic: { anthropic: {
cacheControl: { type: "ephemeral" }, cacheControl: { type: "ephemeral" },
}, },
openaiCompatible: {
cache_control: { type: "ephemeral" },
},
} }
} }
} }
@ -25,8 +24,8 @@ export namespace ProviderTransform {
const final = msgs.filter((msg) => msg.role !== "system").slice(-2) const final = msgs.filter((msg) => msg.role !== "system").slice(-2)
for (const msg of unique([...system, ...final])) { for (const msg of unique([...system, ...final])) {
msg.providerMetadata = { msg.providerOptions = {
...msg.providerMetadata, ...msg.providerOptions,
bedrock: { bedrock: {
cachePoint: { type: "ephemeral" }, cachePoint: { type: "ephemeral" },
}, },

View file

@ -6,7 +6,6 @@ import { streamSSE } from "hono/streaming"
import { Session } from "../session" import { Session } from "../session"
import { resolver, validator as zValidator } from "hono-openapi/zod" import { resolver, validator as zValidator } from "hono-openapi/zod"
import { z } from "zod" import { z } from "zod"
import { Message } from "../session/message"
import { Provider } from "../provider/provider" import { Provider } from "../provider/provider"
import { App } from "../app/app" import { App } from "../app/app"
import { mapValues } from "remeda" import { mapValues } from "remeda"
@ -16,6 +15,8 @@ import { Ripgrep } from "../file/ripgrep"
import { Config } from "../config/config" import { Config } from "../config/config"
import { File } from "../file" import { File } from "../file"
import { LSP } from "../lsp" import { LSP } from "../lsp"
import { MessageV2 } from "../session/message-v2"
import { Mode } from "../session/mode"
const ERRORS = { const ERRORS = {
400: { 400: {
@ -51,12 +52,9 @@ export namespace Server {
status: 400, status: 400,
}) })
} }
return c.json( return c.json(new NamedError.Unknown({ message: err.toString() }).toObject(), {
new NamedError.Unknown({ message: err.toString() }).toObject(), status: 400,
{ })
status: 400,
},
)
}) })
.use(async (c, next) => { .use(async (c, next) => {
log.info("request", { log.info("request", {
@ -407,7 +405,7 @@ export namespace Server {
description: "List of messages", description: "List of messages",
content: { content: {
"application/json": { "application/json": {
schema: resolver(Message.Info.array()), schema: resolver(MessageV2.Info.array()),
}, },
}, },
}, },
@ -433,7 +431,7 @@ export namespace Server {
description: "Created message", description: "Created message",
content: { content: {
"application/json": { "application/json": {
schema: resolver(Message.Info), schema: resolver(MessageV2.Assistant),
}, },
}, },
}, },
@ -450,7 +448,8 @@ export namespace Server {
z.object({ z.object({
providerID: z.string(), providerID: z.string(),
modelID: z.string(), modelID: z.string(),
parts: Message.MessagePart.array(), mode: z.string(),
parts: MessageV2.UserPart.array(),
}), }),
), ),
async (c) => { async (c) => {
@ -481,15 +480,10 @@ export namespace Server {
}, },
}), }),
async (c) => { async (c) => {
const providers = await Provider.list().then((x) => const providers = await Provider.list().then((x) => mapValues(x, (item) => item.info))
mapValues(x, (item) => item.info),
)
return c.json({ return c.json({
providers: Object.values(providers), providers: Object.values(providers),
default: mapValues( default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id),
providers,
(item) => Provider.sort(Object.values(item.models))[0].id,
),
}) })
}, },
) )
@ -566,7 +560,7 @@ export namespace Server {
description: "Symbols", description: "Symbols",
content: { content: {
"application/json": { "application/json": {
schema: resolver(z.unknown().array()), schema: resolver(LSP.Symbol.array()),
}, },
}, },
}, },
@ -629,16 +623,7 @@ export namespace Server {
description: "File status", description: "File status",
content: { content: {
"application/json": { "application/json": {
schema: resolver( schema: resolver(File.Info.array()),
z
.object({
file: z.string(),
added: z.number().int(),
removed: z.number().int(),
status: z.enum(["added", "deleted", "modified"]),
})
.array(),
),
}, },
}, },
}, },
@ -649,6 +634,75 @@ export namespace Server {
return c.json(content) return c.json(content)
}, },
) )
.post(
"/log",
describeRoute({
description: "Write a log entry to the server logs",
responses: {
200: {
description: "Log entry written successfully",
content: {
"application/json": {
schema: resolver(z.boolean()),
},
},
},
},
}),
zValidator(
"json",
z.object({
service: z.string().openapi({ description: "Service name for the log entry" }),
level: z.enum(["debug", "info", "error", "warn"]).openapi({ description: "Log level" }),
message: z.string().openapi({ description: "Log message" }),
extra: z
.record(z.string(), z.any())
.optional()
.openapi({ description: "Additional metadata for the log entry" }),
}),
),
async (c) => {
const { service, level, message, extra } = c.req.valid("json")
const logger = Log.create({ service })
switch (level) {
case "debug":
logger.debug(message, extra)
break
case "info":
logger.info(message, extra)
break
case "error":
logger.error(message, extra)
break
case "warn":
logger.warn(message, extra)
break
}
return c.json(true)
},
)
.get(
"/mode",
describeRoute({
description: "List all modes",
responses: {
200: {
description: "List of modes",
content: {
"application/json": {
schema: resolver(Mode.Info.array()),
},
},
},
},
}),
async (c) => {
const modes = await Mode.list()
return c.json(modes)
},
)
return result return result
} }

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,426 @@
import z from "zod"
import { Bus } from "../bus"
import { Provider } from "../provider/provider"
import { NamedError } from "../util/error"
import { Message } from "./message"
import { convertToModelMessages, type ModelMessage, type UIMessage } from "ai"
export namespace MessageV2 {
export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({}))
export const AbortedError = NamedError.create("MessageAbortedError", z.object({}))
export const ToolStatePending = z
.object({
status: z.literal("pending"),
})
.openapi({
ref: "ToolStatePending",
})
export type ToolStatePending = z.infer<typeof ToolStatePending>
export const ToolStateRunning = z
.object({
status: z.literal("running"),
input: z.any(),
title: z.string().optional(),
metadata: z.record(z.any()).optional(),
time: z.object({
start: z.number(),
}),
})
.openapi({
ref: "ToolStateRunning",
})
export type ToolStateRunning = z.infer<typeof ToolStateRunning>
export const ToolStateCompleted = z
.object({
status: z.literal("completed"),
input: z.record(z.any()),
output: z.string(),
title: z.string(),
metadata: z.record(z.any()),
time: z.object({
start: z.number(),
end: z.number(),
}),
})
.openapi({
ref: "ToolStateCompleted",
})
export type ToolStateCompleted = z.infer<typeof ToolStateCompleted>
export const ToolStateError = z
.object({
status: z.literal("error"),
input: z.record(z.any()),
error: z.string(),
time: z.object({
start: z.number(),
end: z.number(),
}),
})
.openapi({
ref: "ToolStateError",
})
export type ToolStateError = z.infer<typeof ToolStateError>
export const ToolState = z
.discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError])
.openapi({
ref: "ToolState",
})
export const TextPart = z
.object({
type: z.literal("text"),
text: z.string(),
synthetic: z.boolean().optional(),
})
.openapi({
ref: "TextPart",
})
export type TextPart = z.infer<typeof TextPart>
export const ToolPart = z
.object({
type: z.literal("tool"),
id: z.string(),
tool: z.string(),
state: ToolState,
})
.openapi({
ref: "ToolPart",
})
export type ToolPart = z.infer<typeof ToolPart>
export const FilePart = z
.object({
type: z.literal("file"),
mime: z.string(),
filename: z.string().optional(),
url: z.string(),
})
.openapi({
ref: "FilePart",
})
export type FilePart = z.infer<typeof FilePart>
export const StepStartPart = z
.object({
type: z.literal("step-start"),
})
.openapi({
ref: "StepStartPart",
})
export type StepStartPart = z.infer<typeof StepStartPart>
export const StepFinishPart = z
.object({
type: z.literal("step-finish"),
cost: z.number(),
tokens: z.object({
input: z.number(),
output: z.number(),
reasoning: z.number(),
cache: z.object({
read: z.number(),
write: z.number(),
}),
}),
})
.openapi({
ref: "StepFinishPart",
})
export type StepFinishPart = z.infer<typeof StepFinishPart>
const Base = z.object({
id: z.string(),
sessionID: z.string(),
})
export const UserPart = z.discriminatedUnion("type", [TextPart, FilePart]).openapi({
ref: "UserMessagePart",
})
export type UserPart = z.infer<typeof UserPart>
export const User = Base.extend({
role: z.literal("user"),
parts: z.array(UserPart),
time: z.object({
created: z.number(),
}),
}).openapi({
ref: "UserMessage",
})
export type User = z.infer<typeof User>
export const AssistantPart = z
.discriminatedUnion("type", [TextPart, ToolPart, StepStartPart, StepFinishPart])
.openapi({
ref: "AssistantMessagePart",
})
export type AssistantPart = z.infer<typeof AssistantPart>
export const Assistant = Base.extend({
role: z.literal("assistant"),
parts: z.array(AssistantPart),
time: z.object({
created: z.number(),
completed: z.number().optional(),
}),
error: z
.discriminatedUnion("name", [
Provider.AuthError.Schema,
NamedError.Unknown.Schema,
OutputLengthError.Schema,
AbortedError.Schema,
])
.optional(),
system: z.string().array(),
modelID: z.string(),
providerID: z.string(),
path: z.object({
cwd: z.string(),
root: z.string(),
}),
summary: z.boolean().optional(),
cost: z.number(),
tokens: z.object({
input: z.number(),
output: z.number(),
reasoning: z.number(),
cache: z.object({
read: z.number(),
write: z.number(),
}),
}),
}).openapi({
ref: "AssistantMessage",
})
export type Assistant = z.infer<typeof Assistant>
export const Info = z.discriminatedUnion("role", [User, Assistant]).openapi({
ref: "Message",
})
export type Info = z.infer<typeof Info>
export const Event = {
Updated: Bus.event(
"message.updated",
z.object({
info: Info,
}),
),
Removed: Bus.event(
"message.removed",
z.object({
sessionID: z.string(),
messageID: z.string(),
}),
),
PartUpdated: Bus.event(
"message.part.updated",
z.object({
part: AssistantPart,
sessionID: z.string(),
messageID: z.string(),
}),
),
}
export function fromV1(v1: Message.Info) {
if (v1.role === "assistant") {
const result: Assistant = {
id: v1.id,
sessionID: v1.metadata.sessionID,
role: "assistant",
time: {
created: v1.metadata.time.created,
completed: v1.metadata.time.completed,
},
cost: v1.metadata.assistant!.cost,
path: v1.metadata.assistant!.path,
summary: v1.metadata.assistant!.summary,
tokens: v1.metadata.assistant!.tokens,
modelID: v1.metadata.assistant!.modelID,
providerID: v1.metadata.assistant!.providerID,
system: v1.metadata.assistant!.system,
error: v1.metadata.error,
parts: v1.parts.flatMap((part): AssistantPart[] => {
if (part.type === "text") {
return [
{
type: "text",
text: part.text,
},
]
}
if (part.type === "step-start") {
return [
{
type: "step-start",
},
]
}
if (part.type === "tool-invocation") {
return [
{
type: "tool",
id: part.toolInvocation.toolCallId,
tool: part.toolInvocation.toolName,
state: (() => {
if (part.toolInvocation.state === "partial-call") {
return {
status: "pending",
}
}
const { title, time, ...metadata } = v1.metadata.tool[part.toolInvocation.toolCallId] ?? {}
if (part.toolInvocation.state === "call") {
return {
status: "running",
input: part.toolInvocation.args,
time: {
start: time?.start,
},
}
}
if (part.toolInvocation.state === "result") {
return {
status: "completed",
input: part.toolInvocation.args,
output: part.toolInvocation.result,
title,
time,
metadata,
}
}
throw new Error("unknown tool invocation state")
})(),
},
]
}
return []
}),
}
return result
}
if (v1.role === "user") {
const result: User = {
id: v1.id,
sessionID: v1.metadata.sessionID,
role: "user",
time: {
created: v1.metadata.time.created,
},
parts: v1.parts.flatMap((part): UserPart[] => {
if (part.type === "text") {
return [
{
type: "text",
text: part.text,
},
]
}
if (part.type === "file") {
return [
{
type: "file",
mime: part.mediaType,
filename: part.filename,
url: part.url,
},
]
}
return []
}),
}
return result
}
}
export function toModelMessage(input: Info[]): ModelMessage[] {
const result: UIMessage[] = []
for (const msg of input) {
if (msg.parts.length === 0) continue
if (msg.role === "user") {
result.push({
id: msg.id,
role: "user",
parts: msg.parts.flatMap((part): UIMessage["parts"] => {
if (part.type === "text")
return [
{
type: "text",
text: part.text,
},
]
if (part.type === "file")
return [
{
type: "file",
url: part.url,
mediaType: part.mime,
filename: part.filename,
},
]
return []
}),
})
}
if (msg.role === "assistant") {
result.push({
id: msg.id,
role: "assistant",
parts: msg.parts.flatMap((part): UIMessage["parts"] => {
if (part.type === "text")
return [
{
type: "text",
text: part.text,
},
]
if (part.type === "step-start")
return [
{
type: "step-start",
},
]
if (part.type === "tool") {
if (part.state.status === "completed")
return [
{
type: ("tool-" + part.tool) as `tool-${string}`,
state: "output-available",
toolCallId: part.id,
input: part.state.input,
output: part.state.output,
},
]
if (part.state.status === "error")
return [
{
type: ("tool-" + part.tool) as `tool-${string}`,
state: "output-error",
toolCallId: part.id,
input: part.state.input,
errorText: part.state.error,
},
]
}
return []
}),
})
}
}
return convertToModelMessages(result)
}
}

View file

@ -1,13 +1,9 @@
import z from "zod" import z from "zod"
import { Bus } from "../bus"
import { Provider } from "../provider/provider" import { Provider } from "../provider/provider"
import { NamedError } from "../util/error" import { NamedError } from "../util/error"
export namespace Message { export namespace Message {
export const OutputLengthError = NamedError.create( export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({}))
"MessageOutputLengthError",
z.object({}),
)
export const ToolCall = z export const ToolCall = z
.object({ .object({
@ -49,11 +45,9 @@ export namespace Message {
}) })
export type ToolResult = z.infer<typeof ToolResult> export type ToolResult = z.infer<typeof ToolResult>
export const ToolInvocation = z export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).openapi({
.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]) ref: "ToolInvocation",
.openapi({ })
ref: "ToolInvocation",
})
export type ToolInvocation = z.infer<typeof ToolInvocation> export type ToolInvocation = z.infer<typeof ToolInvocation>
export const TextPart = z export const TextPart = z
@ -122,14 +116,7 @@ export namespace Message {
export type StepStartPart = z.infer<typeof StepStartPart> export type StepStartPart = z.infer<typeof StepStartPart>
export const MessagePart = z export const MessagePart = z
.discriminatedUnion("type", [ .discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart])
TextPart,
ReasoningPart,
ToolInvocationPart,
SourceUrlPart,
FilePart,
StepStartPart,
])
.openapi({ .openapi({
ref: "MessagePart", ref: "MessagePart",
}) })
@ -197,28 +184,4 @@ export namespace Message {
ref: "Message", ref: "Message",
}) })
export type Info = z.infer<typeof Info> export type Info = z.infer<typeof Info>
export const Event = {
Updated: Bus.event(
"message.updated",
z.object({
info: Info,
}),
),
Removed: Bus.event(
"message.removed",
z.object({
sessionID: z.string(),
messageID: z.string(),
}),
),
PartUpdated: Bus.event(
"message.part.updated",
z.object({
part: MessagePart,
sessionID: z.string(),
messageID: z.string(),
}),
),
}
} }

View file

@ -0,0 +1,70 @@
import { mergeDeep } from "remeda"
import { App } from "../app/app"
import { Config } from "../config/config"
import z from "zod"
export namespace Mode {
export const Info = z
.object({
name: z.string(),
model: z
.object({
modelID: z.string(),
providerID: z.string(),
})
.optional(),
prompt: z.string().optional(),
tools: z.record(z.boolean()),
})
.openapi({
ref: "Mode",
})
export type Info = z.infer<typeof Info>
const state = App.state("mode", async () => {
const cfg = await Config.get()
const mode = mergeDeep(
{
build: {},
plan: {
tools: {
write: false,
edit: false,
patch: false,
bash: false,
},
},
},
cfg.mode ?? {},
)
const result: Record<string, Info> = {}
for (const [key, value] of Object.entries(mode)) {
let item = result[key]
if (!item)
item = result[key] = {
name: key,
tools: {},
}
const model = value.model ?? cfg.model
if (model) {
const [providerID, ...rest] = model.split("/")
const modelID = rest.join("/")
item.model = {
modelID,
providerID,
}
}
if (value.prompt) item.prompt = value.prompt
if (value.tools) item.tools = value.tools
}
return result
})
export async function get(mode: string) {
return state().then((x) => x[mode])
}
export async function list() {
return state().then((x) => Object.values(x))
}
}

View file

@ -0,0 +1,95 @@
You are an agent known as opencode - please keep going until the users query is completely resolved, before ending your turn and yielding back to the user.
Your thinking should be thorough and so it's fine if it's very long. However, avoid unnecessary repetition and verbosity. You should be concise, but thorough.
You MUST iterate and keep going until the problem is solved.
I want you to fully solve this autonomously before coming back to me.
Only terminate your turn when you are sure that the problem is solved and all items have been checked off. Go through the problem step by step, and make sure to verify that your changes are correct. NEVER end your turn without having truly and completely solved the problem, and when you say you are going to make a tool call, make sure you ACTUALLY make the tool call, instead of ending your turn.
Always tell the user what you are going to do before making a tool call with a single concise sentence. This will help them understand what you are doing and why.
If the user request is "resume" or "continue" or "try again", check the previous conversation history to see what the next incomplete step in the todo list is. Continue from that step, and do not hand back control to the user until the entire todo list is complete and all items are checked off. Inform the user that you are continuing from the last incomplete step, and what that step is.
Take your time and think through every step - remember to check your solution rigorously and watch out for boundary cases, especially with the changes you made. Your solution must be perfect. If not, continue working on it. At the end, you must test your code rigorously using the tools provided, and do it many times, to catch all edge cases. If it is not robust, iterate more and make it perfect. Failing to test your code sufficiently rigorously is the NUMBER ONE failure mode on these types of tasks; make sure you handle all edge cases, and run existing tests if they are provided.
You MUST plan extensively before each function call, and reflect extensively on the outcomes of the previous function calls. DO NOT do this entire process by making function calls only, as this can impair your ability to solve the problem and think insightfully.
# Workflow
1. Understand the problem deeply. Carefully read the issue and think critically about what is required.
2. Investigate the codebase. Explore relevant files, search for key functions, and gather context.
3. Develop a clear, step-by-step plan. Break down the fix into manageable, incremental steps. Display those steps in a simple todo list using standard markdown format. Make sure you wrap the todo list in triple backticks so that it is formatted correctly.
4. Implement the fix incrementally. Make small, testable code changes.
5. Debug as needed. Use debugging techniques to isolate and resolve issues.
6. Test frequently. Run tests after each change to verify correctness.
7. Iterate until the root cause is fixed and all tests pass.
8. Reflect and validate comprehensively. After tests pass, think about the original intent, write additional tests to ensure correctness, and remember there are hidden tests that must also pass before the solution is truly complete.
Refer to the detailed sections below for more information on each step.
## 1. Deeply Understand the Problem
Carefully read the issue and think hard about a plan to solve it before coding.
## 2. Codebase Investigation
- Explore relevant files and directories.
- Search for key functions, classes, or variables related to the issue.
- Read and understand relevant code snippets.
- Identify the root cause of the problem.
- Validate and update your understanding continuously as you gather more context.
## 3. Fetch Provided URLs
- If the user provides a URL, use the `functions.fetch_webpage` tool to retrieve the content of the provided URL.
- After fetching, review the content returned by the fetch tool.
- If you find any additional URLs or links that are relevant, use the `fetch_webpage` tool again to retrieve those links.
- Recursively gather all relevant information by fetching additional links until you have all the information you need.
## 4. Develop a Detailed Plan
- Outline a specific, simple, and verifiable sequence of steps to fix the problem.
- Create a todo list in markdown format to track your progress.
- Each time you complete a step, check it off using `[x]` syntax.
- Each time you check off a step, display the updated todo list to the user.
- Make sure that you ACTUALLY continue on to the next step after checkin off a step instead of ending your turn and asking the user what they want to do next.
## 5. Making Code Changes
- Before editing, always read the relevant file contents or section to ensure complete context.
- Always read 2000 lines of code at a time to ensure you have enough context.
- If a patch is not applied correctly, attempt to reapply it.
- Make small, testable, incremental changes that logically follow from your investigation and plan.
## 6. Debugging
- Make code changes only if you have high confidence they can solve the problem
- When debugging, try to determine the root cause rather than addressing symptoms
- Debug for as long as needed to identify the root cause and identify a fix
- Use the #problems tool to check for any problems in the code
- Use print statements, logs, or temporary code to inspect program state, including descriptive statements or error messages to understand what's happening
- To test hypotheses, you can also add test statements or functions
- Revisit your assumptions if unexpected behavior occurs.
# Fetch Webpage
Use the `webfetch` tool when the user provides a URL. Follow these steps exactly.
1. Use the `webfetch` tool to retrieve the content of the provided URL.
2. After fetching, review the content returned by the fetch tool.
3. If you find any additional URLs or links that are relevant, use the `webfetch` tool again to retrieve those links.
4. Go back to step 2 and repeat until you have all the information you need.
IMPORTANT: Recursively fetching links is crucial. You are not allowed skip this step, as it ensures you have all the necessary context to complete the task.
# How to create a Todo List
Use the following format to create a todo list:
```markdown
- [ ] Step 1: Description of the first step
- [ ] Step 2: Description of the second step
- [ ] Step 3: Description of the third step
```
Do not ever use HTML tags or any other formatting for the todo list, as it will not be rendered correctly. Always use the markdown format shown above.
# Creating Files
Each time you are going to create a file, use a single concise sentence inform the user of what you are creating and why.
# Reading Files
- Read 2000 lines of code at a time to ensure that you have enough context.
- Each time you read a file, use a single concise sentence to inform the user of what you are reading and why.

View file

@ -0,0 +1,3 @@
<system-reminder>
Plan mode is active. The user indicated that they do not want you to execute yet -- you MUST NOT make any edits, run any non-readonly tools (including changing configs or making commits), or otherwise make any changes to the system. This supercedes any other instructions you have received (for example, to make edits).
</system-reminder>

View file

@ -7,23 +7,16 @@ import path from "path"
import os from "os" import os from "os"
import PROMPT_ANTHROPIC from "./prompt/anthropic.txt" import PROMPT_ANTHROPIC from "./prompt/anthropic.txt"
import PROMPT_BEAST from "./prompt/beast.txt"
import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt" import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt"
import PROMPT_SUMMARIZE from "./prompt/summarize.txt" import PROMPT_SUMMARIZE from "./prompt/summarize.txt"
import PROMPT_TITLE from "./prompt/title.txt" import PROMPT_TITLE from "./prompt/title.txt"
export namespace SystemPrompt { export namespace SystemPrompt {
export function provider(providerID: string) { export function provider(providerID: string, modelID: string) {
const result = [] if (providerID === "anthropic") return [PROMPT_ANTHROPIC_SPOOF.trim(), PROMPT_ANTHROPIC]
switch (providerID) { if (modelID.includes("gpt-")) return [PROMPT_BEAST]
case "anthropic": return [PROMPT_ANTHROPIC]
result.push(PROMPT_ANTHROPIC_SPOOF.trim())
result.push(PROMPT_ANTHROPIC)
break
default:
result.push(PROMPT_ANTHROPIC)
break
}
return result
} }
export async function environment() { export async function environment() {

View file

@ -53,9 +53,7 @@ export namespace Share {
export const URL = export const URL =
process.env["OPENCODE_API"] ?? process.env["OPENCODE_API"] ??
(Installation.isSnapshot() || Installation.isDev() (Installation.isSnapshot() || Installation.isDev() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
? "https://api.dev.opencode.ai"
: "https://api.opencode.ai")
export async function create(sessionID: string) { export async function create(sessionID: string) {
return fetch(`${URL}/share_create`, { return fetch(`${URL}/share_create`, {

View file

@ -1,14 +1,7 @@
import { App } from "../app/app" import { App } from "../app/app"
import { import { $ } from "bun"
add,
commit,
init,
checkout,
statusMatrix,
remove,
} from "isomorphic-git"
import path from "path" import path from "path"
import fs from "fs" import fs from "fs/promises"
import { Ripgrep } from "../file/ripgrep" import { Ripgrep } from "../file/ripgrep"
import { Log } from "../util/log" import { Log } from "../util/log"
@ -16,66 +9,53 @@ export namespace Snapshot {
const log = Log.create({ service: "snapshot" }) const log = Log.create({ service: "snapshot" })
export async function create(sessionID: string) { export async function create(sessionID: string) {
return
log.info("creating snapshot")
const app = App.info() const app = App.info()
const git = gitdir(sessionID) const git = gitdir(sessionID)
const files = await Ripgrep.files({
cwd: app.path.cwd, // not a git repo, check if too big to snapshot
limit: app.git ? undefined : 1000, if (!app.git) {
}) const files = await Ripgrep.files({
// not a git repo and too big to snapshot cwd: app.path.cwd,
if (!app.git && files.length === 1000) return limit: 1000,
await init({ })
dir: app.path.cwd, log.info("found files", { count: files.length })
gitdir: git, if (files.length > 1000) return
fs,
})
const status = await statusMatrix({
fs,
gitdir: git,
dir: app.path.cwd,
})
await add({
fs,
gitdir: git,
parallel: true,
dir: app.path.cwd,
filepath: files,
})
for (const [file, _head, workdir, stage] of status) {
if (workdir === 0 && stage === 1) {
log.info("remove", { file })
await remove({
fs,
gitdir: git,
dir: app.path.cwd,
filepath: file,
})
}
} }
const result = await commit({
fs, if (await fs.mkdir(git, { recursive: true })) {
gitdir: git, await $`git init`
dir: app.path.cwd, .env({
message: "snapshot", ...process.env,
author: { GIT_DIR: git,
name: "opencode", GIT_WORK_TREE: app.path.root,
email: "mail@opencode.ai", })
}, .quiet()
}) .nothrow()
log.info("commit", { result }) log.info("initialized")
return result }
await $`git --git-dir ${git} add .`.quiet().cwd(app.path.cwd).nothrow()
log.info("added files")
const result =
await $`git --git-dir ${git} commit --allow-empty -m "snapshot" --author="opencode <mail@opencode.ai>"`
.quiet()
.cwd(app.path.cwd)
.nothrow()
log.info("commit")
const match = result.stdout.toString().match(/\[.+ ([a-f0-9]+)\]/)
if (!match) return
return match![1]
} }
export async function restore(sessionID: string, commit: string) { export async function restore(sessionID: string, commit: string) {
log.info("restore", { commit }) log.info("restore", { commit })
const app = App.info() const app = App.info()
await checkout({ const git = gitdir(sessionID)
fs, await $`git --git-dir=${git} checkout ${commit} --force`.quiet().cwd(app.path.root)
gitdir: gitdir(sessionID),
dir: app.path.cwd,
ref: commit,
force: true,
})
} }
function gitdir(sessionID: string) { function gitdir(sessionID: string) {

View file

@ -4,44 +4,80 @@ import { Bus } from "../bus"
import path from "path" import path from "path"
import z from "zod" import z from "zod"
import fs from "fs/promises" import fs from "fs/promises"
import { MessageV2 } from "../session/message-v2"
export namespace Storage { export namespace Storage {
const log = Log.create({ service: "storage" }) const log = Log.create({ service: "storage" })
export const Event = { export const Event = {
Write: Bus.event( Write: Bus.event("storage.write", z.object({ key: z.string(), content: z.any() })),
"storage.write",
z.object({ key: z.string(), content: z.any() }),
),
} }
const state = App.state("storage", () => { type Migration = (dir: string) => Promise<void>
const MIGRATIONS: Migration[] = [
async (dir: string) => {
try {
const files = new Bun.Glob("session/message/*/*.json").scanSync({
cwd: dir,
absolute: true,
})
for (const file of files) {
const content = await Bun.file(file).json()
if (!content.metadata) continue
log.info("migrating to v2 message", { file })
try {
const result = MessageV2.fromV1(content)
await Bun.write(file, JSON.stringify(result, null, 2))
} catch (e) {
await fs.rename(file, file.replace("storage", "broken"))
}
}
} catch {}
},
]
const state = App.state("storage", async () => {
const app = App.info() const app = App.info()
const dir = path.join(app.path.data, "storage") const dir = path.normalize(path.join(app.path.data, "storage"))
log.info("init", { path: dir }) await fs.mkdir(dir, { recursive: true })
const migration = await Bun.file(path.join(dir, "migration"))
.json()
.then((x) => parseInt(x))
.catch(() => 0)
for (let index = migration; index < MIGRATIONS.length; index++) {
log.info("running migration", { index })
const migration = MIGRATIONS[index]
await migration(dir)
await Bun.write(path.join(dir, "migration"), (index + 1).toString())
}
return { return {
dir, dir,
} }
}) })
export async function remove(key: string) { export async function remove(key: string) {
const target = path.join(state().dir, key + ".json") const dir = await state().then((x) => x.dir)
const target = path.join(dir, key + ".json")
await fs.unlink(target).catch(() => {}) await fs.unlink(target).catch(() => {})
} }
export async function removeDir(key: string) { export async function removeDir(key: string) {
const target = path.join(state().dir, key) const dir = await state().then((x) => x.dir)
const target = path.join(dir, key)
await fs.rm(target, { recursive: true, force: true }).catch(() => {}) await fs.rm(target, { recursive: true, force: true }).catch(() => {})
} }
export async function readJSON<T>(key: string) { export async function readJSON<T>(key: string) {
return Bun.file(path.join(state().dir, key + ".json")).json() as Promise<T> const dir = await state().then((x) => x.dir)
return Bun.file(path.join(dir, key + ".json")).json() as Promise<T>
} }
export async function writeJSON<T>(key: string, content: T) { export async function writeJSON<T>(key: string, content: T) {
const target = path.join(state().dir, key + ".json") const dir = await state().then((x) => x.dir)
const target = path.join(dir, key + ".json")
const tmp = target + Date.now() + ".tmp" const tmp = target + Date.now() + ".tmp"
await Bun.write(tmp, JSON.stringify(content)) await Bun.write(tmp, JSON.stringify(content, null, 2))
await fs.rename(tmp, target).catch(() => {}) await fs.rename(tmp, target).catch(() => {})
await fs.unlink(tmp).catch(() => {}) await fs.unlink(tmp).catch(() => {})
Bus.publish(Event.Write, { key, content }) Bus.publish(Event.Write, { key, content })
@ -49,9 +85,10 @@ export namespace Storage {
const glob = new Bun.Glob("**/*") const glob = new Bun.Glob("**/*")
export async function* list(prefix: string) { export async function* list(prefix: string) {
const dir = await state().then((x) => x.dir)
try { try {
for await (const item of glob.scan({ for await (const item of glob.scan({
cwd: path.join(state().dir, prefix), cwd: path.join(dir, prefix),
onlyFiles: true, onlyFiles: true,
})) { })) {
const result = path.join(prefix, item.slice(0, -5)) const result = path.join(prefix, item.slice(0, -5))

View file

@ -4,25 +4,6 @@ import DESCRIPTION from "./bash.txt"
import { App } from "../app/app" import { App } from "../app/app"
const MAX_OUTPUT_LENGTH = 30000 const MAX_OUTPUT_LENGTH = 30000
const BANNED_COMMANDS = [
"alias",
"curl",
"curlie",
"wget",
"axel",
"aria2c",
"nc",
"telnet",
"lynx",
"w3m",
"links",
"httpie",
"xh",
"http-prompt",
"chrome",
"firefox",
"safari",
]
const DEFAULT_TIMEOUT = 1 * 60 * 1000 const DEFAULT_TIMEOUT = 1 * 60 * 1000
const MAX_TIMEOUT = 10 * 60 * 1000 const MAX_TIMEOUT = 10 * 60 * 1000
@ -31,12 +12,7 @@ export const BashTool = Tool.define({
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
command: z.string().describe("The command to execute"), command: z.string().describe("The command to execute"),
timeout: z timeout: z.number().min(0).max(MAX_TIMEOUT).describe("Optional timeout in milliseconds").optional(),
.number()
.min(0)
.max(MAX_TIMEOUT)
.describe("Optional timeout in milliseconds")
.optional(),
description: z description: z
.string() .string()
.describe( .describe(
@ -45,8 +21,6 @@ export const BashTool = Tool.define({
}), }),
async execute(params, ctx) { async execute(params, ctx) {
const timeout = Math.min(params.timeout ?? DEFAULT_TIMEOUT, MAX_TIMEOUT) const timeout = Math.min(params.timeout ?? DEFAULT_TIMEOUT, MAX_TIMEOUT)
if (BANNED_COMMANDS.some((item) => params.command.startsWith(item)))
throw new Error(`Command '${params.command}' is not allowed`)
const process = Bun.spawn({ const process = Bun.spawn({
cmd: ["bash", "-c", params.command], cmd: ["bash", "-c", params.command],
@ -62,21 +36,14 @@ export const BashTool = Tool.define({
const stderr = await new Response(process.stderr).text() const stderr = await new Response(process.stderr).text()
return { return {
title: params.command,
metadata: { metadata: {
stderr, stderr,
stdout, stdout,
exit: process.exitCode, exit: process.exitCode,
description: params.description, description: params.description,
title: params.command,
}, },
output: [ output: [`<stdout>`, stdout ?? "", `</stdout>`, `<stderr>`, stderr ?? "", `</stderr>`].join("\n"),
`<stdout>`,
stdout ?? "",
`</stdout>`,
`<stderr>`,
stderr ?? "",
`</stderr>`,
].join("\n"),
} }
}, },
}) })

View file

@ -20,15 +20,8 @@ export const EditTool = Tool.define({
parameters: z.object({ parameters: z.object({
filePath: z.string().describe("The absolute path to the file to modify"), filePath: z.string().describe("The absolute path to the file to modify"),
oldString: z.string().describe("The text to replace"), oldString: z.string().describe("The text to replace"),
newString: z newString: z.string().describe("The text to replace it with (must be different from old_string)"),
.string() replaceAll: z.boolean().optional().describe("Replace all occurrences of old_string (default false)"),
.describe(
"The text to replace it with (must be different from old_string)",
),
replaceAll: z
.boolean()
.optional()
.describe("Replace all occurrences of old_string (default false)"),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
if (!params.filePath) { if (!params.filePath) {
@ -40,9 +33,7 @@ export const EditTool = Tool.define({
} }
const app = App.info() const app = App.info()
const filepath = path.isAbsolute(params.filePath) const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(app.path.cwd, params.filePath)
? params.filePath
: path.join(app.path.cwd, params.filePath)
await Permission.ask({ await Permission.ask({
id: "edit", id: "edit",
@ -70,17 +61,11 @@ export const EditTool = Tool.define({
const file = Bun.file(filepath) const file = Bun.file(filepath)
const stats = await file.stat().catch(() => {}) const stats = await file.stat().catch(() => {})
if (!stats) throw new Error(`File ${filepath} not found`) if (!stats) throw new Error(`File ${filepath} not found`)
if (stats.isDirectory()) if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filepath}`)
throw new Error(`Path is a directory, not a file: ${filepath}`)
await FileTime.assert(ctx.sessionID, filepath) await FileTime.assert(ctx.sessionID, filepath)
contentOld = await file.text() contentOld = await file.text()
contentNew = replace( contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll)
contentOld,
params.oldString,
params.newString,
params.replaceAll,
)
await file.write(contentNew) await file.write(contentNew)
await Bus.publish(File.Event.Edited, { await Bus.publish(File.Event.Edited, {
file: filepath, file: filepath,
@ -88,9 +73,7 @@ export const EditTool = Tool.define({
contentNew = await file.text() contentNew = await file.text()
})() })()
const diff = trimDiff( const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, contentNew))
createTwoFilesPatch(filepath, filepath, contentOld, contentNew),
)
FileTime.read(ctx.sessionID, filepath) FileTime.read(ctx.sessionID, filepath)
@ -110,17 +93,14 @@ export const EditTool = Tool.define({
metadata: { metadata: {
diagnostics, diagnostics,
diff, diff,
title: `${path.relative(app.path.root, filepath)}`,
}, },
title: `${path.relative(app.path.root, filepath)}`,
output, output,
} }
}, },
}) })
export type Replacer = ( export type Replacer = (content: string, find: string) => Generator<string, void, unknown>
content: string,
find: string,
) => Generator<string, void, unknown>
export const SimpleReplacer: Replacer = function* (_content, find) { export const SimpleReplacer: Replacer = function* (_content, find) {
yield find yield find
@ -208,10 +188,7 @@ export const BlockAnchorReplacer: Replacer = function* (content, find) {
} }
} }
export const WhitespaceNormalizedReplacer: Replacer = function* ( export const WhitespaceNormalizedReplacer: Replacer = function* (content, find) {
content,
find,
) {
const normalizeWhitespace = (text: string) => text.replace(/\s+/g, " ").trim() const normalizeWhitespace = (text: string) => text.replace(/\s+/g, " ").trim()
const normalizedFind = normalizeWhitespace(find) const normalizedFind = normalizeWhitespace(find)
@ -229,9 +206,7 @@ export const WhitespaceNormalizedReplacer: Replacer = function* (
// Find the actual substring in the original line that matches // Find the actual substring in the original line that matches
const words = find.trim().split(/\s+/) const words = find.trim().split(/\s+/)
if (words.length > 0) { if (words.length > 0) {
const pattern = words const pattern = words.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")).join("\\s+")
.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"))
.join("\\s+")
try { try {
const regex = new RegExp(pattern) const regex = new RegExp(pattern)
const match = line.match(regex) const match = line.match(regex)
@ -270,9 +245,7 @@ export const IndentationFlexibleReplacer: Replacer = function* (content, find) {
}), }),
) )
return lines return lines.map((line) => (line.trim().length === 0 ? line : line.slice(minIndent))).join("\n")
.map((line) => (line.trim().length === 0 ? line : line.slice(minIndent)))
.join("\n")
} }
const normalizedFind = removeIndentation(find) const normalizedFind = removeIndentation(find)
@ -423,10 +396,7 @@ export const ContextAwareReplacer: Replacer = function* (content, find) {
} }
} }
if ( if (totalNonEmptyLines === 0 || matchingLines / totalNonEmptyLines >= 0.5) {
totalNonEmptyLines === 0 ||
matchingLines / totalNonEmptyLines >= 0.5
) {
yield block yield block
break // Only match the first occurrence break // Only match the first occurrence
} }
@ -473,12 +443,7 @@ function trimDiff(diff: string): string {
return trimmedLines.join("\n") return trimmedLines.join("\n")
} }
export function replace( export function replace(content: string, oldString: string, newString: string, replaceAll = false): string {
content: string,
oldString: string,
newString: string,
replaceAll = false,
): string {
if (oldString === newString) { if (oldString === newString) {
throw new Error("oldString and newString must be different") throw new Error("oldString and newString must be different")
} }
@ -502,11 +467,7 @@ export function replace(
} }
const lastIndex = content.lastIndexOf(search) const lastIndex = content.lastIndexOf(search)
if (index !== lastIndex) continue if (index !== lastIndex) continue
return ( return content.substring(0, index) + newString + content.substring(index + search.length)
content.substring(0, index) +
newString +
content.substring(index + search.length)
)
} }
} }
throw new Error("oldString not found in content or was found multiple times") throw new Error("oldString not found in content or was found multiple times")

View file

@ -20,16 +20,14 @@ export const GlobTool = Tool.define({
async execute(params) { async execute(params) {
const app = App.info() const app = App.info()
let search = params.path ?? app.path.cwd let search = params.path ?? app.path.cwd
search = path.isAbsolute(search) search = path.isAbsolute(search) ? search : path.resolve(app.path.cwd, search)
? search
: path.resolve(app.path.cwd, search)
const limit = 100 const limit = 100
const files = [] const files = []
let truncated = false let truncated = false
for (const file of await Ripgrep.files({ for (const file of await Ripgrep.files({
cwd: search, cwd: search,
glob: params.pattern, glob: [params.pattern],
})) { })) {
if (files.length >= limit) { if (files.length >= limit) {
truncated = true truncated = true
@ -53,17 +51,15 @@ export const GlobTool = Tool.define({
output.push(...files.map((f) => f.path)) output.push(...files.map((f) => f.path))
if (truncated) { if (truncated) {
output.push("") output.push("")
output.push( output.push("(Results are truncated. Consider using a more specific path or pattern.)")
"(Results are truncated. Consider using a more specific path or pattern.)",
)
} }
} }
return { return {
title: path.relative(app.path.root, search),
metadata: { metadata: {
count: files.length, count: files.length,
truncated, truncated,
title: path.relative(app.path.root, search),
}, },
output: output.join("\n"), output: output.join("\n"),
} }

View file

@ -9,21 +9,9 @@ export const GrepTool = Tool.define({
id: "grep", id: "grep",
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
pattern: z pattern: z.string().describe("The regex pattern to search for in file contents"),
.string() path: z.string().optional().describe("The directory to search in. Defaults to the current working directory."),
.describe("The regex pattern to search for in file contents"), include: z.string().optional().describe('File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")'),
path: z
.string()
.optional()
.describe(
"The directory to search in. Defaults to the current working directory.",
),
include: z
.string()
.optional()
.describe(
'File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")',
),
}), }),
async execute(params) { async execute(params) {
if (!params.pattern) { if (!params.pattern) {
@ -51,7 +39,8 @@ export const GrepTool = Tool.define({
if (exitCode === 1) { if (exitCode === 1) {
return { return {
metadata: { matches: 0, truncated: false, title: params.pattern }, title: params.pattern,
metadata: { matches: 0, truncated: false },
output: "No files found", output: "No files found",
} }
} }
@ -93,7 +82,8 @@ export const GrepTool = Tool.define({
if (finalMatches.length === 0) { if (finalMatches.length === 0) {
return { return {
metadata: { matches: 0, truncated: false, title: params.pattern }, title: params.pattern,
metadata: { matches: 0, truncated: false },
output: "No files found", output: "No files found",
} }
} }
@ -114,16 +104,14 @@ export const GrepTool = Tool.define({
if (truncated) { if (truncated) {
outputLines.push("") outputLines.push("")
outputLines.push( outputLines.push("(Results are truncated. Consider using a more specific path or pattern.)")
"(Results are truncated. Consider using a more specific path or pattern.)",
)
} }
return { return {
title: params.pattern,
metadata: { metadata: {
matches: finalMatches.length, matches: finalMatches.length,
truncated, truncated,
title: params.pattern,
}, },
output: outputLines.join("\n"), output: outputLines.join("\n"),
} }

View file

@ -16,6 +16,8 @@ export const IGNORE_PATTERNS = [
"obj/", "obj/",
".idea/", ".idea/",
".vscode/", ".vscode/",
".zig-cache/",
"zig-out",
] ]
const LIMIT = 100 const LIMIT = 100
@ -24,16 +26,8 @@ export const ListTool = Tool.define({
id: "list", id: "list",
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
path: z path: z.string().describe("The absolute path to the directory to list (must be absolute, not relative)").optional(),
.string() ignore: z.array(z.string()).describe("List of glob patterns to ignore").optional(),
.describe(
"The absolute path to the directory to list (must be absolute, not relative)",
)
.optional(),
ignore: z
.array(z.string())
.describe("List of glob patterns to ignore")
.optional(),
}), }),
async execute(params) { async execute(params) {
const app = App.info() const app = App.info()
@ -44,8 +38,7 @@ export const ListTool = Tool.define({
for await (const file of glob.scan({ cwd: searchPath, dot: true })) { for await (const file of glob.scan({ cwd: searchPath, dot: true })) {
if (IGNORE_PATTERNS.some((p) => file.includes(p))) continue if (IGNORE_PATTERNS.some((p) => file.includes(p))) continue
if (params.ignore?.some((pattern) => new Bun.Glob(pattern).match(file))) if (params.ignore?.some((pattern) => new Bun.Glob(pattern).match(file))) continue
continue
files.push(file) files.push(file)
if (files.length >= LIMIT) break if (files.length >= LIMIT) break
} }
@ -99,10 +92,10 @@ export const ListTool = Tool.define({
const output = `${searchPath}/\n` + renderDir(".", 0) const output = `${searchPath}/\n` + renderDir(".", 0)
return { return {
title: path.relative(app.path.root, searchPath),
metadata: { metadata: {
count: files.length, count: files.length,
truncated: files.length >= LIMIT, truncated: files.length >= LIMIT,
title: path.relative(app.path.root, searchPath),
}, },
output, output,
} }

View file

@ -13,20 +13,16 @@ export const LspDiagnosticTool = Tool.define({
}), }),
execute: async (args) => { execute: async (args) => {
const app = App.info() const app = App.info()
const normalized = path.isAbsolute(args.path) const normalized = path.isAbsolute(args.path) ? args.path : path.join(app.path.cwd, args.path)
? args.path
: path.join(app.path.cwd, args.path)
await LSP.touchFile(normalized, true) await LSP.touchFile(normalized, true)
const diagnostics = await LSP.diagnostics() const diagnostics = await LSP.diagnostics()
const file = diagnostics[normalized] const file = diagnostics[normalized]
return { return {
title: path.relative(app.path.root, normalized),
metadata: { metadata: {
diagnostics, diagnostics,
title: path.relative(app.path.root, normalized),
}, },
output: file?.length output: file?.length ? file.map(LSP.Diagnostic.pretty).join("\n") : "No errors found",
? file.map(LSP.Diagnostic.pretty).join("\n")
: "No errors found",
} }
}, },
}) })

View file

@ -15,9 +15,7 @@ export const LspHoverTool = Tool.define({
}), }),
execute: async (args) => { execute: async (args) => {
const app = App.info() const app = App.info()
const file = path.isAbsolute(args.file) const file = path.isAbsolute(args.file) ? args.file : path.join(app.path.cwd, args.file)
? args.file
: path.join(app.path.cwd, args.file)
await LSP.touchFile(file, true) await LSP.touchFile(file, true)
const result = await LSP.hover({ const result = await LSP.hover({
...args, ...args,
@ -25,14 +23,9 @@ export const LspHoverTool = Tool.define({
}) })
return { return {
title: path.relative(app.path.root, file) + ":" + args.line + ":" + args.character,
metadata: { metadata: {
result, result,
title:
path.relative(app.path.root, file) +
":" +
args.line +
":" +
args.character,
}, },
output: JSON.stringify(result, null, 2), output: JSON.stringify(result, null, 2),
} }

View file

@ -10,9 +10,7 @@ export const MultiEditTool = Tool.define({
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
filePath: z.string().describe("The absolute path to the file to modify"), filePath: z.string().describe("The absolute path to the file to modify"),
edits: z edits: z.array(EditTool.parameters).describe("Array of edit operations to perform sequentially on the file"),
.array(EditTool.parameters)
.describe("Array of edit operations to perform sequentially on the file"),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
const results = [] const results = []
@ -30,9 +28,9 @@ export const MultiEditTool = Tool.define({
} }
const app = App.info() const app = App.info()
return { return {
title: path.relative(app.path.root, params.filePath),
metadata: { metadata: {
results: results.map((r) => r.metadata), results: results.map((r) => r.metadata),
title: path.relative(app.path.root, params.filePath),
}, },
output: results.at(-1)!.output, output: results.at(-1)!.output,
} }

View file

@ -6,9 +6,7 @@ import { FileTime } from "../file/time"
import DESCRIPTION from "./patch.txt" import DESCRIPTION from "./patch.txt"
const PatchParams = z.object({ const PatchParams = z.object({
patchText: z patchText: z.string().describe("The full patch text that describes all changes to be made"),
.string()
.describe("The full patch text that describes all changes to be made"),
}) })
interface Change { interface Change {
@ -42,10 +40,7 @@ function identifyFilesNeeded(patchText: string): string[] {
const files: string[] = [] const files: string[] = []
const lines = patchText.split("\n") const lines = patchText.split("\n")
for (const line of lines) { for (const line of lines) {
if ( if (line.startsWith("*** Update File:") || line.startsWith("*** Delete File:")) {
line.startsWith("*** Update File:") ||
line.startsWith("*** Delete File:")
) {
const filePath = line.split(":", 2)[1]?.trim() const filePath = line.split(":", 2)[1]?.trim()
if (filePath) files.push(filePath) if (filePath) files.push(filePath)
} }
@ -65,10 +60,7 @@ function identifyFilesAdded(patchText: string): string[] {
return files return files
} }
function textToPatch( function textToPatch(patchText: string, _currentFiles: Record<string, string>): [PatchOperation[], number] {
patchText: string,
_currentFiles: Record<string, string>,
): [PatchOperation[], number] {
const operations: PatchOperation[] = [] const operations: PatchOperation[] = []
const lines = patchText.split("\n") const lines = patchText.split("\n")
let i = 0 let i = 0
@ -93,11 +85,7 @@ function textToPatch(
const changes: PatchChange[] = [] const changes: PatchChange[] = []
i++ i++
while ( while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
i < lines.length &&
!lines[i].startsWith("@@") &&
!lines[i].startsWith("***")
) {
const changeLine = lines[i] const changeLine = lines[i]
if (changeLine.startsWith(" ")) { if (changeLine.startsWith(" ")) {
changes.push({ type: "keep", content: changeLine.substring(1) }) changes.push({ type: "keep", content: changeLine.substring(1) })
@ -151,10 +139,7 @@ function textToPatch(
return [operations, fuzz] return [operations, fuzz]
} }
function patchToCommit( function patchToCommit(operations: PatchOperation[], currentFiles: Record<string, string>): Commit {
operations: PatchOperation[],
currentFiles: Record<string, string>,
): Commit {
const changes: Record<string, Change> = {} const changes: Record<string, Change> = {}
for (const op of operations) { for (const op of operations) {
@ -173,9 +158,7 @@ function patchToCommit(
const lines = originalContent.split("\n") const lines = originalContent.split("\n")
for (const hunk of op.hunks) { for (const hunk of op.hunks) {
const contextIndex = lines.findIndex((line) => const contextIndex = lines.findIndex((line) => line.includes(hunk.contextLine))
line.includes(hunk.contextLine),
)
if (contextIndex === -1) { if (contextIndex === -1) {
throw new Error(`Context line not found: ${hunk.contextLine}`) throw new Error(`Context line not found: ${hunk.contextLine}`)
} }
@ -204,11 +187,7 @@ function patchToCommit(
return { changes } return { changes }
} }
function generateDiff( function generateDiff(oldContent: string, newContent: string, filePath: string): [string, number, number] {
oldContent: string,
newContent: string,
filePath: string,
): [string, number, number] {
// Mock implementation - would need actual diff generation // Mock implementation - would need actual diff generation
const lines1 = oldContent.split("\n") const lines1 = oldContent.split("\n")
const lines2 = newContent.split("\n") const lines2 = newContent.split("\n")
@ -296,9 +275,7 @@ export const PatchTool = Tool.define({
// Process the patch // Process the patch
const [patch, fuzz] = textToPatch(params.patchText, currentFiles) const [patch, fuzz] = textToPatch(params.patchText, currentFiles)
if (fuzz > 3) { if (fuzz > 3) {
throw new Error( throw new Error(`patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`)
`patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`,
)
} }
// Convert patch to commit // Convert patch to commit
@ -343,11 +320,7 @@ export const PatchTool = Tool.define({
const newContent = change.new_content || "" const newContent = change.new_content || ""
// Calculate diff statistics // Calculate diff statistics
const [, additions, removals] = generateDiff( const [, additions, removals] = generateDiff(oldContent, newContent, filePath)
oldContent,
newContent,
filePath,
)
totalAdditions += additions totalAdditions += additions
totalRemovals += removals totalRemovals += removals
@ -358,11 +331,11 @@ export const PatchTool = Tool.define({
const output = result const output = result
return { return {
title: `${filesToRead.length} files`,
metadata: { metadata: {
changed: changedFiles, changed: changedFiles,
additions: totalAdditions, additions: totalAdditions,
removals: totalRemovals, removals: totalRemovals,
title: `${filesToRead.length} files`,
}, },
output, output,
} }

View file

@ -16,14 +16,8 @@ export const ReadTool = Tool.define({
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
filePath: z.string().describe("The path to the file to read"), filePath: z.string().describe("The path to the file to read"),
offset: z offset: z.number().describe("The line number to start reading from (0-based)").optional(),
.number() limit: z.number().describe("The number of lines to read (defaults to 2000)").optional(),
.describe("The line number to start reading from (0-based)")
.optional(),
limit: z
.number()
.describe("The number of lines to read (defaults to 2000)")
.optional(),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
let filePath = params.filePath let filePath = params.filePath
@ -40,16 +34,13 @@ export const ReadTool = Tool.define({
const suggestions = dirEntries const suggestions = dirEntries
.filter( .filter(
(entry) => (entry) =>
entry.toLowerCase().includes(base.toLowerCase()) || entry.toLowerCase().includes(base.toLowerCase()) || base.toLowerCase().includes(entry.toLowerCase()),
base.toLowerCase().includes(entry.toLowerCase()),
) )
.map((entry) => path.join(dir, entry)) .map((entry) => path.join(dir, entry))
.slice(0, 3) .slice(0, 3)
if (suggestions.length > 0) { if (suggestions.length > 0) {
throw new Error( throw new Error(`File not found: ${filePath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`)
`File not found: ${filePath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`,
)
} }
throw new Error(`File not found: ${filePath}`) throw new Error(`File not found: ${filePath}`)
@ -57,21 +48,14 @@ export const ReadTool = Tool.define({
const stats = await file.stat() const stats = await file.stat()
if (stats.size > MAX_READ_SIZE) if (stats.size > MAX_READ_SIZE)
throw new Error( throw new Error(`File is too large (${stats.size} bytes). Maximum size is ${MAX_READ_SIZE} bytes`)
`File is too large (${stats.size} bytes). Maximum size is ${MAX_READ_SIZE} bytes`,
)
const limit = params.limit ?? DEFAULT_READ_LIMIT const limit = params.limit ?? DEFAULT_READ_LIMIT
const offset = params.offset || 0 const offset = params.offset || 0
const isImage = isImageFile(filePath) const isImage = isImageFile(filePath)
if (isImage) if (isImage) throw new Error(`This is an image file of type: ${isImage}\nUse a different tool to process images`)
throw new Error(
`This is an image file of type: ${isImage}\nUse a different tool to process images`,
)
const lines = await file.text().then((text) => text.split("\n")) const lines = await file.text().then((text) => text.split("\n"))
const raw = lines.slice(offset, offset + limit).map((line) => { const raw = lines.slice(offset, offset + limit).map((line) => {
return line.length > MAX_LINE_LENGTH return line.length > MAX_LINE_LENGTH ? line.substring(0, MAX_LINE_LENGTH) + "..." : line
? line.substring(0, MAX_LINE_LENGTH) + "..."
: line
}) })
const content = raw.map((line, index) => { const content = raw.map((line, index) => {
return `${(index + offset + 1).toString().padStart(5, "0")}| ${line}` return `${(index + offset + 1).toString().padStart(5, "0")}| ${line}`
@ -82,21 +66,19 @@ export const ReadTool = Tool.define({
output += content.join("\n") output += content.join("\n")
if (lines.length > offset + content.length) { if (lines.length > offset + content.length) {
output += `\n\n(File has more lines. Use 'offset' parameter to read beyond line ${ output += `\n\n(File has more lines. Use 'offset' parameter to read beyond line ${offset + content.length})`
offset + content.length
})`
} }
output += "\n</file>" output += "\n</file>"
// just warms the lsp client // just warms the lsp client
await LSP.touchFile(filePath, false) LSP.touchFile(filePath, false)
FileTime.read(ctx.sessionID, filePath) FileTime.read(ctx.sessionID, filePath)
return { return {
title: path.relative(App.info().path.root, filePath),
output, output,
metadata: { metadata: {
preview, preview,
title: path.relative(App.info().path.root, filePath),
}, },
} }
}, },

View file

@ -2,7 +2,7 @@ Reads a file from the local filesystem. You can access any file directly by usin
Assume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned. Assume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.
Usage: Usage:
- The file_path parameter must be an absolute path, not a relative path - The filePath parameter must be an absolute path, not a relative path
- By default, it reads up to 2000 lines starting from the beginning of the file - By default, it reads up to 2000 lines starting from the beginning of the file
- You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters - You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters
- Any lines longer than 2000 characters will be truncated - Any lines longer than 2000 characters will be truncated

View file

@ -3,41 +3,36 @@ import DESCRIPTION from "./task.txt"
import { z } from "zod" import { z } from "zod"
import { Session } from "../session" import { Session } from "../session"
import { Bus } from "../bus" import { Bus } from "../bus"
import { Message } from "../session/message" import { MessageV2 } from "../session/message-v2"
export const TaskTool = Tool.define({ export const TaskTool = Tool.define({
id: "task", id: "task",
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
description: z description: z.string().describe("A short (3-5 words) description of the task"),
.string()
.describe("A short (3-5 words) description of the task"),
prompt: z.string().describe("The task for the agent to perform"), prompt: z.string().describe("The task for the agent to perform"),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
const session = await Session.create(ctx.sessionID) const session = await Session.create(ctx.sessionID)
const msg = await Session.getMessage(ctx.sessionID, ctx.messageID) const msg = (await Session.getMessage(ctx.sessionID, ctx.messageID)) as MessageV2.Assistant
const metadata = msg.metadata.assistant!
function summary(input: Message.Info) { function summary(input: MessageV2.Info) {
const result = [] const result = []
for (const part of input.parts) { for (const part of input.parts) {
if (part.type === "tool-invocation") { if (part.type === "tool" && part.state.status === "completed") {
result.push({ result.push(part)
toolInvocation: part.toolInvocation,
metadata: input.metadata.tool[part.toolInvocation.toolCallId],
})
} }
} }
return result return result
} }
const unsub = Bus.subscribe(Message.Event.Updated, async (evt) => { const unsub = Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
if (evt.properties.info.metadata.sessionID !== session.id) return if (evt.properties.info.sessionID !== session.id) return
ctx.metadata({ ctx.metadata({
title: params.description, title: params.description,
summary: summary(evt.properties.info), metadata: {
summary: summary(evt.properties.info),
},
}) })
}) })
@ -46,8 +41,8 @@ export const TaskTool = Tool.define({
}) })
const result = await Session.chat({ const result = await Session.chat({
sessionID: session.id, sessionID: session.id,
modelID: metadata.modelID, modelID: msg.modelID,
providerID: metadata.providerID, providerID: msg.providerID,
parts: [ parts: [
{ {
type: "text", type: "text",
@ -57,8 +52,8 @@ export const TaskTool = Tool.define({
}) })
unsub() unsub()
return { return {
title: params.description,
metadata: { metadata: {
title: params.description,
summary: summary(result), summary: summary(result),
}, },
output: result.parts.findLast((x) => x.type === "text")!.text, output: result.parts.findLast((x) => x.type === "text")!.text,

View file

@ -5,12 +5,8 @@ import { App } from "../app/app"
const TodoInfo = z.object({ const TodoInfo = z.object({
content: z.string().min(1).describe("Brief description of the task"), content: z.string().min(1).describe("Brief description of the task"),
status: z status: z.enum(["pending", "in_progress", "completed", "cancelled"]).describe("Current status of the task"),
.enum(["pending", "in_progress", "completed"]) priority: z.enum(["high", "medium", "low"]).describe("Priority level of the task"),
.describe("Current status of the task"),
priority: z
.enum(["high", "medium", "low"])
.describe("Priority level of the task"),
id: z.string().describe("Unique identifier for the todo item"), id: z.string().describe("Unique identifier for the todo item"),
}) })
type TodoInfo = z.infer<typeof TodoInfo> type TodoInfo = z.infer<typeof TodoInfo>
@ -32,9 +28,9 @@ export const TodoWriteTool = Tool.define({
const todos = state() const todos = state()
todos[opts.sessionID] = params.todos todos[opts.sessionID] = params.todos
return { return {
title: `${params.todos.filter((x) => x.status !== "completed").length} todos`,
output: JSON.stringify(params.todos, null, 2), output: JSON.stringify(params.todos, null, 2),
metadata: { metadata: {
title: `${params.todos.filter((x) => x.status !== "completed").length} todos`,
todos: params.todos, todos: params.todos,
}, },
} }
@ -48,9 +44,9 @@ export const TodoReadTool = Tool.define({
async execute(_params, opts) { async execute(_params, opts) {
const todos = state()[opts.sessionID] ?? [] const todos = state()[opts.sessionID] ?? []
return { return {
title: `${todos.filter((x) => x.status !== "completed").length} todos`,
metadata: { metadata: {
todos, todos,
title: `${todos.filter((x) => x.status !== "completed").length} todos`,
}, },
output: JSON.stringify(todos, null, 2), output: JSON.stringify(todos, null, 2),
} }

View file

@ -2,19 +2,15 @@ import type { StandardSchemaV1 } from "@standard-schema/spec"
export namespace Tool { export namespace Tool {
interface Metadata { interface Metadata {
title: string
[key: string]: any [key: string]: any
} }
export type Context<M extends Metadata = Metadata> = { export type Context<M extends Metadata = Metadata> = {
sessionID: string sessionID: string
messageID: string messageID: string
abort: AbortSignal abort: AbortSignal
metadata(meta: M): void metadata(input: { title?: string; metadata?: M }): void
} }
export interface Info< export interface Info<Parameters extends StandardSchemaV1 = StandardSchemaV1, M extends Metadata = Metadata> {
Parameters extends StandardSchemaV1 = StandardSchemaV1,
M extends Metadata = Metadata,
> {
id: string id: string
description: string description: string
parameters: Parameters parameters: Parameters
@ -22,15 +18,15 @@ export namespace Tool {
args: StandardSchemaV1.InferOutput<Parameters>, args: StandardSchemaV1.InferOutput<Parameters>,
ctx: Context, ctx: Context,
): Promise<{ ): Promise<{
title: string
metadata: M metadata: M
output: string output: string
}> }>
} }
export function define< export function define<Parameters extends StandardSchemaV1, Result extends Metadata>(
Parameters extends StandardSchemaV1, input: Info<Parameters, Result>,
Result extends Metadata, ): Info<Parameters, Result> {
>(input: Info<Parameters, Result>): Info<Parameters, Result> {
return input return input
} }
} }

View file

@ -14,9 +14,7 @@ export const WebFetchTool = Tool.define({
url: z.string().describe("The URL to fetch content from"), url: z.string().describe("The URL to fetch content from"),
format: z format: z
.enum(["text", "markdown", "html"]) .enum(["text", "markdown", "html"])
.describe( .describe("The format to return the content in (text, markdown, or html)"),
"The format to return the content in (text, markdown, or html)",
),
timeout: z timeout: z
.number() .number()
.min(0) .min(0)
@ -26,17 +24,11 @@ export const WebFetchTool = Tool.define({
}), }),
async execute(params, ctx) { async execute(params, ctx) {
// Validate URL // Validate URL
if ( if (!params.url.startsWith("http://") && !params.url.startsWith("https://")) {
!params.url.startsWith("http://") &&
!params.url.startsWith("https://")
) {
throw new Error("URL must start with http:// or https://") throw new Error("URL must start with http:// or https://")
} }
const timeout = Math.min( const timeout = Math.min((params.timeout ?? DEFAULT_TIMEOUT / 1000) * 1000, MAX_TIMEOUT)
(params.timeout ?? DEFAULT_TIMEOUT / 1000) * 1000,
MAX_TIMEOUT,
)
const controller = new AbortController() const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), timeout) const timeoutId = setTimeout(() => controller.abort(), timeout)
@ -46,8 +38,7 @@ export const WebFetchTool = Tool.define({
headers: { headers: {
"User-Agent": "User-Agent":
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
Accept: Accept: "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.9", "Accept-Language": "en-US,en;q=0.9",
}, },
}) })
@ -79,16 +70,14 @@ export const WebFetchTool = Tool.define({
const text = await extractTextFromHTML(content) const text = await extractTextFromHTML(content)
return { return {
output: text, output: text,
metadata: { title,
title, metadata: {},
},
} }
} }
return { return {
output: content, output: content,
metadata: { title,
title, metadata: {},
},
} }
case "markdown": case "markdown":
@ -96,32 +85,28 @@ export const WebFetchTool = Tool.define({
const markdown = convertHTMLToMarkdown(content) const markdown = convertHTMLToMarkdown(content)
return { return {
output: markdown, output: markdown,
metadata: { title,
title, metadata: {},
},
} }
} }
return { return {
output: "```\n" + content + "\n```", output: "```\n" + content + "\n```",
metadata: { title,
title, metadata: {},
},
} }
case "html": case "html":
return { return {
output: content, output: content,
metadata: { title,
title, metadata: {},
},
} }
default: default:
return { return {
output: content, output: content,
metadata: { title,
title, metadata: {},
},
} }
} }
}, },
@ -143,16 +128,7 @@ async function extractTextFromHTML(html: string) {
.on("*", { .on("*", {
element(element) { element(element) {
// Reset skip flag when entering other elements // Reset skip flag when entering other elements
if ( if (!["script", "style", "noscript", "iframe", "object", "embed"].includes(element.tagName)) {
![
"script",
"style",
"noscript",
"iframe",
"object",
"embed",
].includes(element.tagName)
) {
skipContent = false skipContent = false
} }
}, },

View file

@ -13,18 +13,12 @@ export const WriteTool = Tool.define({
id: "write", id: "write",
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
filePath: z filePath: z.string().describe("The absolute path to the file to write (must be absolute, not relative)"),
.string()
.describe(
"The absolute path to the file to write (must be absolute, not relative)",
),
content: z.string().describe("The content to write to the file"), content: z.string().describe("The content to write to the file"),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
const app = App.info() const app = App.info()
const filepath = path.isAbsolute(params.filePath) const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(app.path.cwd, params.filePath)
? params.filePath
: path.join(app.path.cwd, params.filePath)
const file = Bun.file(filepath) const file = Bun.file(filepath)
const exists = await file.exists() const exists = await file.exists()
@ -33,9 +27,7 @@ export const WriteTool = Tool.define({
await Permission.ask({ await Permission.ask({
id: "write", id: "write",
sessionID: ctx.sessionID, sessionID: ctx.sessionID,
title: exists title: exists ? "Overwrite this file: " + filepath : "Create new file: " + filepath,
? "Overwrite this file: " + filepath
: "Create new file: " + filepath,
metadata: { metadata: {
filePath: filepath, filePath: filepath,
content: params.content, content: params.content,
@ -62,11 +54,11 @@ export const WriteTool = Tool.define({
} }
return { return {
title: path.relative(app.path.root, filepath),
metadata: { metadata: {
diagnostics, diagnostics,
filepath, filepath,
exists: exists, exists: exists,
title: path.relative(app.path.root, filepath),
}, },
output, output,
} }

View file

@ -7,10 +7,7 @@ export abstract class NamedError extends Error {
abstract schema(): ZodSchema abstract schema(): ZodSchema
abstract toObject(): { name: string; data: any } abstract toObject(): { name: string; data: any }
static create<Name extends string, Data extends ZodSchema>( static create<Name extends string, Data extends ZodSchema>(name: Name, data: Data) {
name: Name,
data: Data,
) {
const schema = z const schema = z
.object({ .object({
name: z.literal(name), name: z.literal(name),

View file

@ -1,7 +1,17 @@
import { exists } from "fs/promises" import { exists } from "fs/promises"
import { dirname, join } from "path" import { dirname, join, relative } from "path"
export namespace Filesystem { export namespace Filesystem {
export function overlaps(a: string, b: string) {
const relA = relative(a, b)
const relB = relative(b, a)
return !relA || !relA.startsWith("..") || !relB || !relB.startsWith("..")
}
export function contains(parent: string, child: string) {
return relative(parent, child).startsWith("..")
}
export async function findUp(target: string, start: string, stop?: string) { export async function findUp(target: string, start: string, stop?: string) {
let current = start let current = start
const result = [] const result = []
@ -16,6 +26,21 @@ export namespace Filesystem {
return result return result
} }
export async function* up(options: { targets: string[]; start: string; stop?: string }) {
const { targets, start, stop } = options
let current = start
while (true) {
for (const target of targets) {
const search = join(current, target)
if (await exists(search)) yield search
}
if (stop === current) break
const parent = dirname(current)
if (parent === current) break
current = parent
}
}
export async function globUp(pattern: string, start: string, stop?: string) { export async function globUp(pattern: string, start: string, stop?: string) {
let current = start let current = start
const result = [] const result = []

View file

@ -1,15 +1,59 @@
import path from "path" import path from "path"
import fs from "fs/promises" import fs from "fs/promises"
import { Global } from "../global" import { Global } from "../global"
import z from "zod"
export namespace Log { export namespace Log {
export const Level = z.enum(["DEBUG", "INFO", "WARN", "ERROR"]).openapi({ ref: "LogLevel", description: "Log level" })
export type Level = z.infer<typeof Level>
const levelPriority: Record<Level, number> = {
DEBUG: 0,
INFO: 1,
WARN: 2,
ERROR: 3,
}
let currentLevel: Level = "INFO"
export function setLevel(level: Level) {
currentLevel = level
}
export function getLevel(): Level {
return currentLevel
}
function shouldLog(level: Level): boolean {
return levelPriority[level] >= levelPriority[currentLevel]
}
export type Logger = {
debug(message?: any, extra?: Record<string, any>): void
info(message?: any, extra?: Record<string, any>): void
error(message?: any, extra?: Record<string, any>): void
warn(message?: any, extra?: Record<string, any>): void
tag(key: string, value: string): Logger
clone(): Logger
time(
message: string,
extra?: Record<string, any>,
): {
stop(): void
[Symbol.dispose](): void
}
}
const loggers = new Map<string, Logger>()
export const Default = create({ service: "default" }) export const Default = create({ service: "default" })
export interface Options { export interface Options {
print: boolean print: boolean
level?: Level
} }
let logpath = "" let logpath = ""
export function file() { export function file() {
return logpath return logpath
} }
@ -19,10 +63,7 @@ export namespace Log {
await fs.mkdir(dir, { recursive: true }) await fs.mkdir(dir, { recursive: true })
cleanup(dir) cleanup(dir)
if (options.print) return if (options.print) return
logpath = path.join( logpath = path.join(dir, new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log")
dir,
new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log",
)
const logfile = Bun.file(logpath) const logfile = Bun.file(logpath)
await fs.truncate(logpath).catch(() => {}) await fs.truncate(logpath).catch(() => {})
const writer = logfile.writer() const writer = logfile.writer()
@ -43,15 +84,21 @@ export namespace Log {
const filesToDelete = files.slice(0, -10) const filesToDelete = files.slice(0, -10)
await Promise.all( await Promise.all(filesToDelete.map((file) => fs.unlink(file).catch(() => {})))
filesToDelete.map((file) => fs.unlink(file).catch(() => {})),
)
} }
let last = Date.now() let last = Date.now()
export function create(tags?: Record<string, any>) { export function create(tags?: Record<string, any>) {
tags = tags || {} tags = tags || {}
const service = tags["service"]
if (service && typeof service === "string") {
const cached = loggers.get(service)
if (cached) {
return cached
}
}
function build(message: any, extra?: Record<string, any>) { function build(message: any, extra?: Record<string, any>) {
const prefix = Object.entries({ const prefix = Object.entries({
...tags, ...tags,
@ -63,21 +110,28 @@ export namespace Log {
const next = new Date() const next = new Date()
const diff = next.getTime() - last const diff = next.getTime() - last
last = next.getTime() last = next.getTime()
return ( return [next.toISOString().split(".")[0], "+" + diff + "ms", prefix, message].filter(Boolean).join(" ") + "\n"
[next.toISOString().split(".")[0], "+" + diff + "ms", prefix, message]
.filter(Boolean)
.join(" ") + "\n"
)
} }
const result = { const result: Logger = {
debug(message?: any, extra?: Record<string, any>) {
if (shouldLog("DEBUG")) {
process.stderr.write("DEBUG " + build(message, extra))
}
},
info(message?: any, extra?: Record<string, any>) { info(message?: any, extra?: Record<string, any>) {
process.stderr.write("INFO " + build(message, extra)) if (shouldLog("INFO")) {
process.stderr.write("INFO " + build(message, extra))
}
}, },
error(message?: any, extra?: Record<string, any>) { error(message?: any, extra?: Record<string, any>) {
process.stderr.write("ERROR " + build(message, extra)) if (shouldLog("ERROR")) {
process.stderr.write("ERROR " + build(message, extra))
}
}, },
warn(message?: any, extra?: Record<string, any>) { warn(message?: any, extra?: Record<string, any>) {
process.stderr.write("WARN " + build(message, extra)) if (shouldLog("WARN")) {
process.stderr.write("WARN " + build(message, extra))
}
}, },
tag(key: string, value: string) { tag(key: string, value: string) {
if (tags) tags[key] = value if (tags) tags[key] = value
@ -105,6 +159,10 @@ export namespace Log {
}, },
} }
if (service && typeof service === "string") {
loggers.set(service, result)
}
return result return result
} }
} }

View file

@ -17,12 +17,7 @@ const testCases: TestCase[] = [
replace: 'console.log("universe");', replace: 'console.log("universe");',
}, },
{ {
content: [ content: ["if (condition) {", " doSomething();", " doSomethingElse();", "}"].join("\n"),
"if (condition) {",
" doSomething();",
" doSomethingElse();",
"}",
].join("\n"),
find: [" doSomething();", " doSomethingElse();"].join("\n"), find: [" doSomething();", " doSomethingElse();"].join("\n"),
replace: [" doNewThing();", " doAnotherThing();"].join("\n"), replace: [" doNewThing();", " doAnotherThing();"].join("\n"),
}, },
@ -53,15 +48,8 @@ const testCases: TestCase[] = [
" return result;", " return result;",
"}", "}",
].join("\n"), ].join("\n"),
find: [ find: ["function calculate(a, b) {", " // different middle content", " return result;", "}"].join("\n"),
"function calculate(a, b) {", replace: ["function calculate(a, b) {", " return a * b * 2;", "}"].join("\n"),
" // different middle content",
" return result;",
"}",
].join("\n"),
replace: ["function calculate(a, b) {", " return a * b * 2;", "}"].join(
"\n",
),
}, },
{ {
content: [ content: [
@ -76,13 +64,7 @@ const testCases: TestCase[] = [
"}", "}",
].join("\n"), ].join("\n"),
find: ["class MyClass {", " // different implementation", "}"].join("\n"), find: ["class MyClass {", " // different implementation", "}"].join("\n"),
replace: [ replace: ["class MyClass {", " constructor() {", " this.value = 42;", " }", "}"].join("\n"),
"class MyClass {",
" constructor() {",
" this.value = 42;",
" }",
"}",
].join("\n"),
}, },
// WhitespaceNormalizedReplacer cases // WhitespaceNormalizedReplacer cases
@ -104,48 +86,21 @@ const testCases: TestCase[] = [
// IndentationFlexibleReplacer cases // IndentationFlexibleReplacer cases
{ {
content: [ content: [" function nested() {", ' console.log("deeply nested");', " return true;", " }"].join(
" function nested() {", "\n",
' console.log("deeply nested");', ),
" return true;", find: ["function nested() {", ' console.log("deeply nested");', " return true;", "}"].join("\n"),
" }", replace: ["function nested() {", ' console.log("updated");', " return false;", "}"].join("\n"),
].join("\n"),
find: [
"function nested() {",
' console.log("deeply nested");',
" return true;",
"}",
].join("\n"),
replace: [
"function nested() {",
' console.log("updated");',
" return false;",
"}",
].join("\n"),
}, },
{ {
content: [ content: [" if (true) {", ' console.log("level 1");', ' console.log("level 2");', " }"].join("\n"),
" if (true) {", find: ["if (true) {", 'console.log("level 1");', ' console.log("level 2");', "}"].join("\n"),
' console.log("level 1");',
' console.log("level 2");',
" }",
].join("\n"),
find: [
"if (true) {",
'console.log("level 1");',
' console.log("level 2");',
"}",
].join("\n"),
replace: ["if (true) {", 'console.log("updated");', "}"].join("\n"), replace: ["if (true) {", 'console.log("updated");', "}"].join("\n"),
}, },
// replaceAll option cases // replaceAll option cases
{ {
content: [ content: ['console.log("test");', 'console.log("test");', 'console.log("test");'].join("\n"),
'console.log("test");',
'console.log("test");',
'console.log("test");',
].join("\n"),
find: 'console.log("test");', find: 'console.log("test");',
replace: 'console.log("updated");', replace: 'console.log("updated");',
all: true, all: true,
@ -213,9 +168,7 @@ const testCases: TestCase[] = [
// MultiOccurrenceReplacer cases (with replaceAll) // MultiOccurrenceReplacer cases (with replaceAll)
{ {
content: ["debug('start');", "debug('middle');", "debug('end');"].join( content: ["debug('start');", "debug('middle');", "debug('end');"].join("\n"),
"\n",
),
find: "debug", find: "debug",
replace: "log", replace: "log",
all: true, all: true,
@ -239,9 +192,7 @@ const testCases: TestCase[] = [
replace: "const value = 24;", replace: "const value = 24;",
}, },
{ {
content: ["", " if (condition) {", " doSomething();", " }", ""].join( content: ["", " if (condition) {", " doSomething();", " }", ""].join("\n"),
"\n",
),
find: ["if (condition) {", " doSomething();", "}"].join("\n"), find: ["if (condition) {", " doSomething();", "}"].join("\n"),
replace: ["if (condition) {", " doNothing();", "}"].join("\n"), replace: ["if (condition) {", " doNothing();", "}"].join("\n"),
}, },
@ -262,9 +213,7 @@ const testCases: TestCase[] = [
" return result;", " return result;",
"}", "}",
].join("\n"), ].join("\n"),
replace: ["function calculate(a, b) {", " return (a + b) * 2;", "}"].join( replace: ["function calculate(a, b) {", " return (a + b) * 2;", "}"].join("\n"),
"\n",
),
}, },
{ {
content: [ content: [
@ -278,15 +227,8 @@ const testCases: TestCase[] = [
" }", " }",
"}", "}",
].join("\n"), ].join("\n"),
find: [ find: ["class TestClass {", " // different implementation", " // with multiple lines", "}"].join("\n"),
"class TestClass {", replace: ["class TestClass {", " getValue() { return 42; }", "}"].join("\n"),
" // different implementation",
" // with multiple lines",
"}",
].join("\n"),
replace: ["class TestClass {", " getValue() { return 42; }", "}"].join(
"\n",
),
}, },
// Combined edge cases for new replacers // Combined edge cases for new replacers
@ -296,9 +238,7 @@ const testCases: TestCase[] = [
replace: 'console.log("updated");', replace: 'console.log("updated");',
}, },
{ {
content: [" ", "function test() {", " return 'value';", "}", " "].join( content: [" ", "function test() {", " return 'value';", "}", " "].join("\n"),
"\n",
),
find: ["function test() {", "return 'value';", "}"].join("\n"), find: ["function test() {", "return 'value';", "}"].join("\n"),
replace: ["function test() {", "return 'new value';", "}"].join("\n"), replace: ["function test() {", "return 'new value';", "}"].join("\n"),
}, },
@ -346,13 +286,7 @@ const testCases: TestCase[] = [
// ContextAwareReplacer - test with trailing newline in find string // ContextAwareReplacer - test with trailing newline in find string
{ {
content: [ content: ["class Test {", " method1() {", " return 1;", " }", "}"].join("\n"),
"class Test {",
" method1() {",
" return 1;",
" }",
"}",
].join("\n"),
find: [ find: [
"class Test {", "class Test {",
" // different content", " // different content",
@ -401,12 +335,7 @@ describe("EditTool Replacers", () => {
replace(testCase.content, testCase.find, testCase.replace, testCase.all) replace(testCase.content, testCase.find, testCase.replace, testCase.all)
}).toThrow() }).toThrow()
} else { } else {
const result = replace( const result = replace(testCase.content, testCase.find, testCase.replace, testCase.all)
testCase.content,
testCase.find,
testCase.replace,
testCase.all,
)
expect(result).toContain(testCase.replace) expect(result).toContain(testCase.replace)
} }
}) })

View file

@ -42,10 +42,7 @@ describe("tool.glob", () => {
describe("tool.ls", () => { describe("tool.ls", () => {
test("basic", async () => { test("basic", async () => {
const result = await App.provide({ cwd: process.cwd() }, async () => { const result = await App.provide({ cwd: process.cwd() }, async () => {
return await ListTool.execute( return await ListTool.execute({ path: "./example", ignore: [".git"] }, ctx)
{ path: "./example", ignore: [".git"] },
ctx,
)
}) })
expect(result.output).toMatchSnapshot() expect(result.output).toMatchSnapshot()
}) })

View file

@ -5,14 +5,18 @@ import (
"encoding/json" "encoding/json"
"log/slog" "log/slog"
"os" "os"
"path/filepath" "os/signal"
"strings" "strings"
"syscall"
tea "github.com/charmbracelet/bubbletea/v2" tea "github.com/charmbracelet/bubbletea/v2"
flag "github.com/spf13/pflag"
"github.com/sst/opencode-sdk-go" "github.com/sst/opencode-sdk-go"
"github.com/sst/opencode-sdk-go/option" "github.com/sst/opencode-sdk-go/option"
"github.com/sst/opencode/internal/app" "github.com/sst/opencode/internal/app"
"github.com/sst/opencode/internal/clipboard"
"github.com/sst/opencode/internal/tui" "github.com/sst/opencode/internal/tui"
"github.com/sst/opencode/internal/util"
) )
var Version = "dev" var Version = "dev"
@ -23,6 +27,11 @@ func main() {
version = "v" + Version version = "v" + Version
} }
var model *string = flag.String("model", "", "model to begin with")
var prompt *string = flag.String("prompt", "", "prompt to begin with")
var mode *string = flag.String("mode", "", "mode to begin with")
flag.Parse()
url := os.Getenv("OPENCODE_SERVER") url := os.Getenv("OPENCODE_SERVER")
appInfoStr := os.Getenv("OPENCODE_APP_INFO") appInfoStr := os.Getenv("OPENCODE_APP_INFO")
@ -33,39 +42,36 @@ func main() {
os.Exit(1) os.Exit(1)
} }
logfile := filepath.Join(appInfo.Path.Data, "log", "tui.log") modesStr := os.Getenv("OPENCODE_MODES")
if _, err := os.Stat(filepath.Dir(logfile)); os.IsNotExist(err) { var modes []opencode.Mode
err := os.MkdirAll(filepath.Dir(logfile), 0755) err = json.Unmarshal([]byte(modesStr), &modes)
if err != nil {
slog.Error("Failed to create log directory", "error", err)
os.Exit(1)
}
}
file, err := os.Create(logfile)
if err != nil { if err != nil {
slog.Error("Failed to create log file", "error", err) slog.Error("Failed to unmarshal modes", "error", err)
os.Exit(1) os.Exit(1)
} }
defer file.Close()
logger := slog.New(slog.NewTextHandler(file, &slog.HandlerOptions{Level: slog.LevelDebug}))
slog.SetDefault(logger)
slog.Debug("TUI launched", "app", appInfo)
httpClient := opencode.NewClient( httpClient := opencode.NewClient(
option.WithBaseURL(url), option.WithBaseURL(url),
) )
if err != nil { apiHandler := util.NewAPILogHandler(httpClient, "tui", slog.LevelDebug)
slog.Error("Failed to create client", "error", err) logger := slog.New(apiHandler)
os.Exit(1) slog.SetDefault(logger)
}
slog.Debug("TUI launched", "app", appInfoStr, "modes", modesStr)
go func() {
err = clipboard.Init()
if err != nil {
slog.Error("Failed to initialize clipboard", "error", err)
}
}()
// Create main context for the application // Create main context for the application
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
defer cancel() defer cancel()
app_, err := app.New(ctx, version, appInfo, httpClient) app_, err := app.New(ctx, version, appInfo, modes, httpClient, model, prompt, mode)
if err != nil { if err != nil {
panic(err) panic(err)
} }
@ -73,10 +79,14 @@ func main() {
program := tea.NewProgram( program := tea.NewProgram(
tui.NewModel(app_), tui.NewModel(app_),
tea.WithAltScreen(), tea.WithAltScreen(),
tea.WithKeyboardEnhancements(), // tea.WithKeyboardEnhancements(),
tea.WithMouseCellMotion(), tea.WithMouseCellMotion(),
) )
// Set up signal handling for graceful shutdown
sigChan := make(chan os.Signal, 1)
signal.Notify(sigChan, syscall.SIGTERM, syscall.SIGINT)
go func() { go func() {
stream := httpClient.Event.ListStreaming(ctx) stream := httpClient.Event.ListStreaming(ctx)
for stream.Next() { for stream.Next() {
@ -89,6 +99,13 @@ func main() {
} }
}() }()
// Handle signals in a separate goroutine
go func() {
sig := <-sigChan
slog.Info("Received signal, shutting down gracefully", "signal", sig)
program.Quit()
}()
// Run the TUI // Run the TUI
result, err := program.Run() result, err := program.Run()
if err != nil { if err != nil {

View file

@ -6,21 +6,26 @@ require (
github.com/BurntSushi/toml v1.5.0 github.com/BurntSushi/toml v1.5.0
github.com/alecthomas/chroma/v2 v2.18.0 github.com/alecthomas/chroma/v2 v2.18.0
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1 github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3 github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4
github.com/charmbracelet/glamour v0.10.0 github.com/charmbracelet/glamour v0.10.0
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1 github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3
github.com/charmbracelet/x/ansi v0.8.0 github.com/charmbracelet/x/ansi v0.9.3
github.com/charmbracelet/x/input v0.3.7
github.com/google/uuid v1.6.0
github.com/lithammer/fuzzysearch v1.1.8 github.com/lithammer/fuzzysearch v1.1.8
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6
github.com/muesli/reflow v0.3.0 github.com/muesli/reflow v0.3.0
github.com/muesli/termenv v0.16.0 github.com/muesli/termenv v0.16.0
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3
github.com/sst/opencode-sdk-go v0.1.0-alpha.8 github.com/sst/opencode-sdk-go v0.1.0-alpha.8
github.com/tidwall/gjson v1.14.4 golang.org/x/image v0.28.0
rsc.io/qr v0.2.0 rsc.io/qr v0.2.0
) )
replace github.com/sst/opencode-sdk-go => ./sdk replace (
github.com/charmbracelet/x/input => ./input
github.com/sst/opencode-sdk-go => ./sdk
)
require golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect require golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect
@ -29,7 +34,6 @@ require (
github.com/atombender/go-jsonschema v0.20.0 // indirect github.com/atombender/go-jsonschema v0.20.0 // indirect
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect
github.com/charmbracelet/x/input v0.3.5-0.20250424101541-abb4d9a9b197 // indirect
github.com/charmbracelet/x/windows v0.2.1 // indirect github.com/charmbracelet/x/windows v0.2.1 // indirect
github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect
github.com/fsnotify/fsnotify v1.8.0 // indirect github.com/fsnotify/fsnotify v1.8.0 // indirect
@ -49,23 +53,23 @@ require (
github.com/sosodev/duration v1.3.1 // indirect github.com/sosodev/duration v1.3.1 // indirect
github.com/speakeasy-api/openapi-overlay v0.9.0 // indirect github.com/speakeasy-api/openapi-overlay v0.9.0 // indirect
github.com/spf13/cobra v1.9.1 // indirect github.com/spf13/cobra v1.9.1 // indirect
github.com/tidwall/gjson v1.14.4 // indirect
github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/sjson v1.2.5 // indirect github.com/tidwall/sjson v1.2.5 // indirect
github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
golang.org/x/mod v0.24.0 // indirect golang.org/x/mod v0.25.0 // indirect
golang.org/x/tools v0.31.0 // indirect golang.org/x/tools v0.34.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect
) )
require ( require (
github.com/atotto/clipboard v0.1.4 github.com/atotto/clipboard v0.1.4 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect github.com/aymerick/douceur v0.2.0 // indirect
github.com/charmbracelet/colorprofile v0.3.1 // indirect github.com/charmbracelet/colorprofile v0.3.1 // indirect
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250501183327-ad3bc78c6a81 // indirect github.com/charmbracelet/x/cellbuf v0.0.14-0.20250505150409-97991a1f17d1 // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/disintegration/imaging v1.6.2
github.com/dlclark/regexp2 v1.11.5 // indirect github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/google/go-cmp v0.7.0 // indirect github.com/google/go-cmp v0.7.0 // indirect
github.com/gorilla/css v1.0.1 // indirect github.com/gorilla/css v1.0.1 // indirect
@ -77,16 +81,15 @@ require (
github.com/muesli/cancelreader v0.2.2 // indirect github.com/muesli/cancelreader v0.2.2 // indirect
github.com/rivo/uniseg v0.4.7 github.com/rivo/uniseg v0.4.7
github.com/rogpeppe/go-internal v1.14.1 // indirect github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/spf13/pflag v1.0.6 // indirect github.com/spf13/pflag v1.0.6
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/yuin/goldmark v1.7.8 // indirect github.com/yuin/goldmark v1.7.8 // indirect
github.com/yuin/goldmark-emoji v1.0.5 // indirect github.com/yuin/goldmark-emoji v1.0.5 // indirect
golang.org/x/image v0.26.0 golang.org/x/net v0.41.0 // indirect
golang.org/x/net v0.39.0 // indirect golang.org/x/sync v0.15.0 // indirect
golang.org/x/sync v0.13.0 // indirect golang.org/x/sys v0.33.0 // indirect
golang.org/x/sys v0.32.0 // indirect golang.org/x/term v0.32.0 // indirect
golang.org/x/term v0.31.0 // indirect golang.org/x/text v0.26.0
golang.org/x/text v0.24.0
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )

View file

@ -22,26 +22,24 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuP
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1 h1:swACzss0FjnyPz1enfX56GKkLiuKg5FlyVmOLIlU2kE= github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1 h1:swACzss0FjnyPz1enfX56GKkLiuKg5FlyVmOLIlU2kE=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw= github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3 h1:5A2e3myxXMpCES+kjEWgGsaf9VgZXjZbLi5iMTH7j40= github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4 h1:UgUuKKvBwgqm2ZEL+sKv/OLeavrUb4gfHgdxe6oIOno=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3/go.mod h1:ZFDg5oPjyRYrPAa3iFrtP1DO8xy+LUQxd9JFHEcuwJY= github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4/go.mod h1:0wWFRpsgF7vHsCukVZ5LAhZkiR4j875H6KEM2/tFQmA=
github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40= github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40=
github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0= github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0=
github.com/charmbracelet/glamour v0.10.0 h1:MtZvfwsYCx8jEPFJm3rIBFIMZUfUJ765oX8V6kXldcY= github.com/charmbracelet/glamour v0.10.0 h1:MtZvfwsYCx8jEPFJm3rIBFIMZUfUJ765oX8V6kXldcY=
github.com/charmbracelet/glamour v0.10.0/go.mod h1:f+uf+I/ChNmqo087elLnVdCiVgjSKWuXa/l6NU2ndYk= github.com/charmbracelet/glamour v0.10.0/go.mod h1:f+uf+I/ChNmqo087elLnVdCiVgjSKWuXa/l6NU2ndYk=
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 h1:ZR7e0ro+SZZiIZD7msJyA+NjkCNNavuiPBLgerbOziE= github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 h1:ZR7e0ro+SZZiIZD7msJyA+NjkCNNavuiPBLgerbOziE=
github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834/go.mod h1:aKC/t2arECF6rNOnaKaVU6y4t4ZeHQzqfxedE/VkVhA= github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834/go.mod h1:aKC/t2arECF6rNOnaKaVU6y4t4ZeHQzqfxedE/VkVhA=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1 h1:D9AJJuYTN5pvz6mpIGO1ijLKpfTYSHOtKGgwoTQ4Gog= github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3 h1:W6DpZX6zSkZr0iFq6JVh1vItLoxfYtNlaxOJtWp8Kis=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1/go.mod h1:tRlx/Hu0lo/j9viunCN2H+Ze6JrmdjQlXUQvvArgaOc= github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3/go.mod h1:65HTtKURcv/ict9ZQhr6zT84JqIjMcJbyrZYHHKNfKA=
github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE= github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0=
github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q= github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250501183327-ad3bc78c6a81 h1:iGrflaL5jQW6crML+pZx/ulWAVZQR3CQoRGvFsr2Tyg= github.com/charmbracelet/x/cellbuf v0.0.14-0.20250505150409-97991a1f17d1 h1:MTSs/nsZNfZPbYk/r9hluK2BtwoqvEYruAujNVwgDv0=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250501183327-ad3bc78c6a81/go.mod h1:poPFOXFTsJsnLbkV3H2KxAAXT7pdjxxLujLocWjkyzM= github.com/charmbracelet/x/cellbuf v0.0.14-0.20250505150409-97991a1f17d1/go.mod h1:xBlh2Yi3DL3zy/2n15kITpg0YZardf/aa/hgUaIM6Rk=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHEJ52OC4VuTzU8t+n5frMjLvpYWEznSr/u8tnkCYw= github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHEJ52OC4VuTzU8t+n5frMjLvpYWEznSr/u8tnkCYw=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U= github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf h1:rLG0Yb6MQSDKdB52aGX55JT1oi0P0Kuaj7wi1bLUpnI= github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf h1:rLG0Yb6MQSDKdB52aGX55JT1oi0P0Kuaj7wi1bLUpnI=
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf/go.mod h1:B3UgsnsBZS/eX42BlaNiJkD1pPOUa+oF1IYC6Yd2CEU= github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf/go.mod h1:B3UgsnsBZS/eX42BlaNiJkD1pPOUa+oF1IYC6Yd2CEU=
github.com/charmbracelet/x/input v0.3.5-0.20250424101541-abb4d9a9b197 h1:fsWj8NF5njyMVzELc7++HsvRDvgz3VcgGAUgWBDWWWM=
github.com/charmbracelet/x/input v0.3.5-0.20250424101541-abb4d9a9b197/go.mod h1:xseGeVftoP9rVI+/8WKYrJFH6ior6iERGvklwwHz5+s=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/windows v0.2.1 h1:3x7vnbpQrjpuq/4L+I4gNsG5htYoCiA5oe9hLjAij5I= github.com/charmbracelet/x/windows v0.2.1 h1:3x7vnbpQrjpuq/4L+I4gNsG5htYoCiA5oe9hLjAij5I=
@ -54,8 +52,6 @@ github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ= github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58= github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58=
@ -92,6 +88,8 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
@ -216,14 +214,13 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw= golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM= golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.28.0 h1:gdem5JW1OLS4FbkWgLO+7ZeFzYtL3xClb97GaUzYMFE=
golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY= golang.org/x/image v0.28.0/go.mod h1:GUJYXtnGKEUgggyzh+Vxt+AviiCcyiwpsl8iQ8MvwGY=
golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w=
golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@ -234,15 +231,15 @@ golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -261,28 +258,28 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o= golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw= golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo=
golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View file

@ -0,0 +1,14 @@
//go:build !windows
// +build !windows
package input
import (
"io"
"github.com/muesli/cancelreader"
)
func newCancelreader(r io.Reader, _ int) (cancelreader.CancelReader, error) {
return cancelreader.NewReader(r) //nolint:wrapcheck
}

View file

@ -0,0 +1,143 @@
//go:build windows
// +build windows
package input
import (
"fmt"
"io"
"os"
"sync"
xwindows "github.com/charmbracelet/x/windows"
"github.com/muesli/cancelreader"
"golang.org/x/sys/windows"
)
type conInputReader struct {
cancelMixin
conin windows.Handle
originalMode uint32
}
var _ cancelreader.CancelReader = &conInputReader{}
func newCancelreader(r io.Reader, flags int) (cancelreader.CancelReader, error) {
fallback := func(io.Reader) (cancelreader.CancelReader, error) {
return cancelreader.NewReader(r)
}
var dummy uint32
if f, ok := r.(cancelreader.File); !ok || f.Fd() != os.Stdin.Fd() ||
// If data was piped to the standard input, it does not emit events
// anymore. We can detect this if the console mode cannot be set anymore,
// in this case, we fallback to the default cancelreader implementation.
windows.GetConsoleMode(windows.Handle(f.Fd()), &dummy) != nil {
return fallback(r)
}
conin, err := windows.GetStdHandle(windows.STD_INPUT_HANDLE)
if err != nil {
return fallback(r)
}
// Discard any pending input events.
if err := xwindows.FlushConsoleInputBuffer(conin); err != nil {
return fallback(r)
}
modes := []uint32{
windows.ENABLE_WINDOW_INPUT,
windows.ENABLE_EXTENDED_FLAGS,
}
// Enabling mouse mode implicitly blocks console text selection. Thus, we
// need to enable it only if the mouse mode is requested.
// In order to toggle mouse mode, the caller must recreate the reader with
// the appropriate flag toggled.
if flags&FlagMouseMode != 0 {
modes = append(modes, windows.ENABLE_MOUSE_INPUT)
}
originalMode, err := prepareConsole(conin, modes...)
if err != nil {
return nil, fmt.Errorf("failed to prepare console input: %w", err)
}
return &conInputReader{
conin: conin,
originalMode: originalMode,
}, nil
}
// Cancel implements cancelreader.CancelReader.
func (r *conInputReader) Cancel() bool {
r.setCanceled()
return windows.CancelIoEx(r.conin, nil) == nil || windows.CancelIo(r.conin) == nil
}
// Close implements cancelreader.CancelReader.
func (r *conInputReader) Close() error {
if r.originalMode != 0 {
err := windows.SetConsoleMode(r.conin, r.originalMode)
if err != nil {
return fmt.Errorf("reset console mode: %w", err)
}
}
return nil
}
// Read implements cancelreader.CancelReader.
func (r *conInputReader) Read(data []byte) (int, error) {
if r.isCanceled() {
return 0, cancelreader.ErrCanceled
}
var n uint32
if err := windows.ReadFile(r.conin, data, &n, nil); err != nil {
return int(n), fmt.Errorf("read console input: %w", err)
}
return int(n), nil
}
func prepareConsole(input windows.Handle, modes ...uint32) (originalMode uint32, err error) {
err = windows.GetConsoleMode(input, &originalMode)
if err != nil {
return 0, fmt.Errorf("get console mode: %w", err)
}
var newMode uint32
for _, mode := range modes {
newMode |= mode
}
err = windows.SetConsoleMode(input, newMode)
if err != nil {
return 0, fmt.Errorf("set console mode: %w", err)
}
return originalMode, nil
}
// cancelMixin represents a goroutine-safe cancelation status.
type cancelMixin struct {
unsafeCanceled bool
lock sync.Mutex
}
func (c *cancelMixin) setCanceled() {
c.lock.Lock()
defer c.lock.Unlock()
c.unsafeCanceled = true
}
func (c *cancelMixin) isCanceled() bool {
c.lock.Lock()
defer c.lock.Unlock()
return c.unsafeCanceled
}

View file

@ -0,0 +1,25 @@
package input
import "github.com/charmbracelet/x/ansi"
// ClipboardSelection represents a clipboard selection. The most common
// clipboard selections are "system" and "primary" and selections.
type ClipboardSelection = byte
// Clipboard selections.
const (
SystemClipboard ClipboardSelection = ansi.SystemClipboard
PrimaryClipboard ClipboardSelection = ansi.PrimaryClipboard
)
// ClipboardEvent is a clipboard read message event. This message is emitted when
// a terminal receives an OSC52 clipboard read message event.
type ClipboardEvent struct {
Content string
Selection ClipboardSelection
}
// String returns the string representation of the clipboard message.
func (e ClipboardEvent) String() string {
return e.Content
}

136
packages/tui/input/color.go Normal file
View file

@ -0,0 +1,136 @@
package input
import (
"fmt"
"image/color"
"math"
)
// ForegroundColorEvent represents a foreground color event. This event is
// emitted when the terminal requests the terminal foreground color using
// [ansi.RequestForegroundColor].
type ForegroundColorEvent struct{ color.Color }
// String returns the hex representation of the color.
func (e ForegroundColorEvent) String() string {
return colorToHex(e.Color)
}
// IsDark returns whether the color is dark.
func (e ForegroundColorEvent) IsDark() bool {
return isDarkColor(e.Color)
}
// BackgroundColorEvent represents a background color event. This event is
// emitted when the terminal requests the terminal background color using
// [ansi.RequestBackgroundColor].
type BackgroundColorEvent struct{ color.Color }
// String returns the hex representation of the color.
func (e BackgroundColorEvent) String() string {
return colorToHex(e)
}
// IsDark returns whether the color is dark.
func (e BackgroundColorEvent) IsDark() bool {
return isDarkColor(e.Color)
}
// CursorColorEvent represents a cursor color change event. This event is
// emitted when the program requests the terminal cursor color using
// [ansi.RequestCursorColor].
type CursorColorEvent struct{ color.Color }
// String returns the hex representation of the color.
func (e CursorColorEvent) String() string {
return colorToHex(e)
}
// IsDark returns whether the color is dark.
func (e CursorColorEvent) IsDark() bool {
return isDarkColor(e)
}
type shiftable interface {
~uint | ~uint16 | ~uint32 | ~uint64
}
func shift[T shiftable](x T) T {
if x > 0xff {
x >>= 8
}
return x
}
func colorToHex(c color.Color) string {
if c == nil {
return ""
}
r, g, b, _ := c.RGBA()
return fmt.Sprintf("#%02x%02x%02x", shift(r), shift(g), shift(b))
}
func getMaxMin(a, b, c float64) (ma, mi float64) {
if a > b {
ma = a
mi = b
} else {
ma = b
mi = a
}
if c > ma {
ma = c
} else if c < mi {
mi = c
}
return ma, mi
}
func round(x float64) float64 {
return math.Round(x*1000) / 1000
}
// rgbToHSL converts an RGB triple to an HSL triple.
func rgbToHSL(r, g, b uint8) (h, s, l float64) {
// convert uint32 pre-multiplied value to uint8
// The r,g,b values are divided by 255 to change the range from 0..255 to 0..1:
Rnot := float64(r) / 255
Gnot := float64(g) / 255
Bnot := float64(b) / 255
Cmax, Cmin := getMaxMin(Rnot, Gnot, Bnot)
Δ := Cmax - Cmin
// Lightness calculation:
l = (Cmax + Cmin) / 2
// Hue and Saturation Calculation:
if Δ == 0 {
h = 0
s = 0
} else {
switch Cmax {
case Rnot:
h = 60 * (math.Mod((Gnot-Bnot)/Δ, 6))
case Gnot:
h = 60 * (((Bnot - Rnot) / Δ) + 2)
case Bnot:
h = 60 * (((Rnot - Gnot) / Δ) + 4)
}
if h < 0 {
h += 360
}
s = Δ / (1 - math.Abs((2*l)-1))
}
return h, round(s), round(l)
}
// isDarkColor returns whether the given color is dark.
func isDarkColor(c color.Color) bool {
if c == nil {
return true
}
r, g, b, _ := c.RGBA()
_, _, l := rgbToHSL(uint8(r>>8), uint8(g>>8), uint8(b>>8)) //nolint:gosec
return l < 0.5
}

View file

@ -0,0 +1,7 @@
package input
import "image"
// CursorPositionEvent represents a cursor position event. Where X is the
// zero-based column and Y is the zero-based row.
type CursorPositionEvent image.Point

18
packages/tui/input/da1.go Normal file
View file

@ -0,0 +1,18 @@
package input
import "github.com/charmbracelet/x/ansi"
// PrimaryDeviceAttributesEvent is an event that represents the terminal
// primary device attributes.
type PrimaryDeviceAttributesEvent []int
func parsePrimaryDevAttrs(params ansi.Params) Event {
// Primary Device Attributes
da1 := make(PrimaryDeviceAttributesEvent, len(params))
for i, p := range params {
if !p.HasMore() {
da1[i] = p.Param(0)
}
}
return da1
}

View file

@ -0,0 +1,6 @@
// Package input provides a set of utilities for handling input events in a
// terminal environment. It includes support for reading input events, parsing
// escape sequences, and handling clipboard events.
// The package is designed to work with various terminal types and supports
// customization through flags and options.
package input

View file

@ -0,0 +1,196 @@
//nolint:unused,revive,nolintlint
package input
import (
"bytes"
"io"
"unicode/utf8"
"github.com/muesli/cancelreader"
)
// Logger is a simple logger interface.
type Logger interface {
Printf(format string, v ...any)
}
// win32InputState is a state machine for parsing key events from the Windows
// Console API into escape sequences and utf8 runes, and keeps track of the last
// control key state to determine modifier key changes. It also keeps track of
// the last mouse button state and window size changes to determine which mouse
// buttons were released and to prevent multiple size events from firing.
type win32InputState struct {
ansiBuf [256]byte
ansiIdx int
utf16Buf [2]rune
utf16Half bool
lastCks uint32 // the last control key state for the previous event
lastMouseBtns uint32 // the last mouse button state for the previous event
lastWinsizeX, lastWinsizeY int16 // the last window size for the previous event to prevent multiple size events from firing
}
// Reader represents an input event reader. It reads input events and parses
// escape sequences from the terminal input buffer and translates them into
// human-readable events.
type Reader struct {
rd cancelreader.CancelReader
table map[string]Key // table is a lookup table for key sequences.
term string // term is the terminal name $TERM.
// paste is the bracketed paste mode buffer.
// When nil, bracketed paste mode is disabled.
paste []byte
buf [256]byte // do we need a larger buffer?
// partialSeq holds incomplete escape sequences that need more data
partialSeq []byte
// keyState keeps track of the current Windows Console API key events state.
// It is used to decode ANSI escape sequences and utf16 sequences.
keyState win32InputState
parser Parser
logger Logger
}
// NewReader returns a new input event reader. The reader reads input events
// from the terminal and parses escape sequences into human-readable events. It
// supports reading Terminfo databases. See [Parser] for more information.
//
// Example:
//
// r, _ := input.NewReader(os.Stdin, os.Getenv("TERM"), 0)
// defer r.Close()
// events, _ := r.ReadEvents()
// for _, ev := range events {
// log.Printf("%v", ev)
// }
func NewReader(r io.Reader, termType string, flags int) (*Reader, error) {
d := new(Reader)
cr, err := newCancelreader(r, flags)
if err != nil {
return nil, err
}
d.rd = cr
d.table = buildKeysTable(flags, termType)
d.term = termType
d.parser.flags = flags
return d, nil
}
// SetLogger sets a logger for the reader.
func (d *Reader) SetLogger(l Logger) {
d.logger = l
}
// Read implements [io.Reader].
func (d *Reader) Read(p []byte) (int, error) {
return d.rd.Read(p) //nolint:wrapcheck
}
// Cancel cancels the underlying reader.
func (d *Reader) Cancel() bool {
return d.rd.Cancel()
}
// Close closes the underlying reader.
func (d *Reader) Close() error {
return d.rd.Close() //nolint:wrapcheck
}
func (d *Reader) readEvents() ([]Event, error) {
nb, err := d.rd.Read(d.buf[:])
if err != nil {
return nil, err //nolint:wrapcheck
}
var events []Event
// Combine any partial sequence from previous read with new data
var buf []byte
if len(d.partialSeq) > 0 {
buf = make([]byte, len(d.partialSeq)+nb)
copy(buf, d.partialSeq)
copy(buf[len(d.partialSeq):], d.buf[:nb])
d.partialSeq = nil // clear the partial sequence
} else {
buf = d.buf[:nb]
}
// Lookup table first
if bytes.HasPrefix(buf, []byte{'\x1b'}) {
if k, ok := d.table[string(buf)]; ok {
if d.logger != nil {
d.logger.Printf("input: %q", buf)
}
events = append(events, KeyPressEvent(k))
return events, nil
}
}
var i int
for i < len(buf) {
nb, ev := d.parser.parseSequence(buf[i:])
if d.logger != nil && nb > 0 {
d.logger.Printf("input: %q", buf[i:i+nb])
}
// Handle incomplete sequences - when parseSequence returns (0, nil)
// it means we need more data to complete the sequence
if nb == 0 && ev == nil {
// Store the remaining data for the next read
remaining := len(buf) - i
if remaining > 0 {
d.partialSeq = make([]byte, remaining)
copy(d.partialSeq, buf[i:])
}
break
}
// Handle bracketed-paste
if d.paste != nil {
if _, ok := ev.(PasteEndEvent); !ok {
d.paste = append(d.paste, buf[i])
i++
continue
}
}
switch ev.(type) {
// case UnknownEvent:
// // If the sequence is not recognized by the parser, try looking it up.
// if k, ok := d.table[string(buf[i:i+nb])]; ok {
// ev = KeyPressEvent(k)
// }
case PasteStartEvent:
d.paste = []byte{}
case PasteEndEvent:
// Decode the captured data into runes.
var paste []rune
for len(d.paste) > 0 {
r, w := utf8.DecodeRune(d.paste)
if r != utf8.RuneError {
paste = append(paste, r)
}
d.paste = d.paste[w:]
}
d.paste = nil // reset the buffer
events = append(events, PasteEvent(paste))
case nil:
i++
continue
}
if mevs, ok := ev.(MultiEvent); ok {
events = append(events, []Event(mevs)...)
} else {
events = append(events, ev)
}
i += nb
}
return events, nil
}

View file

@ -0,0 +1,17 @@
//go:build !windows
// +build !windows
package input
// ReadEvents reads input events from the terminal.
//
// It reads the events available in the input buffer and returns them.
func (d *Reader) ReadEvents() ([]Event, error) {
return d.readEvents()
}
// parseWin32InputKeyEvent parses a Win32 input key events. This function is
// only available on Windows.
func (p *Parser) parseWin32InputKeyEvent(*win32InputState, uint16, uint16, rune, bool, uint32, uint16) Event {
return nil
}

View file

@ -0,0 +1,25 @@
package input
import (
"io"
"strings"
"testing"
)
func BenchmarkDriver(b *testing.B) {
input := "\x1b\x1b[Ztest\x00\x1b]10;1234/1234/1234\x07\x1b[27;2;27~"
rdr := strings.NewReader(input)
drv, err := NewReader(rdr, "dumb", 0)
if err != nil {
b.Fatalf("could not create driver: %v", err)
}
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
rdr.Reset(input)
if _, err := drv.ReadEvents(); err != nil && err != io.EOF {
b.Errorf("error reading input: %v", err)
}
}
}

View file

@ -0,0 +1,620 @@
//go:build windows
// +build windows
package input
import (
"errors"
"fmt"
"strings"
"time"
"unicode"
"unicode/utf16"
"unicode/utf8"
"github.com/charmbracelet/x/ansi"
xwindows "github.com/charmbracelet/x/windows"
"github.com/muesli/cancelreader"
"golang.org/x/sys/windows"
)
// ReadEvents reads input events from the terminal.
//
// It reads the events available in the input buffer and returns them.
func (d *Reader) ReadEvents() ([]Event, error) {
events, err := d.handleConInput()
if errors.Is(err, errNotConInputReader) {
return d.readEvents()
}
return events, err
}
var errNotConInputReader = fmt.Errorf("handleConInput: not a conInputReader")
func (d *Reader) handleConInput() ([]Event, error) {
cc, ok := d.rd.(*conInputReader)
if !ok {
return nil, errNotConInputReader
}
var (
events []xwindows.InputRecord
err error
)
for {
// Peek up to 256 events, this is to allow for sequences events reported as
// key events.
events, err = peekNConsoleInputs(cc.conin, 256)
if cc.isCanceled() {
return nil, cancelreader.ErrCanceled
}
if err != nil {
return nil, fmt.Errorf("peek coninput events: %w", err)
}
if len(events) > 0 {
break
}
// Sleep for a bit to avoid busy waiting.
time.Sleep(10 * time.Millisecond)
}
events, err = readNConsoleInputs(cc.conin, uint32(len(events)))
if cc.isCanceled() {
return nil, cancelreader.ErrCanceled
}
if err != nil {
return nil, fmt.Errorf("read coninput events: %w", err)
}
var evs []Event
for _, event := range events {
if e := d.parser.parseConInputEvent(event, &d.keyState); e != nil {
if multi, ok := e.(MultiEvent); ok {
evs = append(evs, multi...)
} else {
evs = append(evs, e)
}
}
}
return evs, nil
}
func (p *Parser) parseConInputEvent(event xwindows.InputRecord, keyState *win32InputState) Event {
switch event.EventType {
case xwindows.KEY_EVENT:
kevent := event.KeyEvent()
return p.parseWin32InputKeyEvent(keyState, kevent.VirtualKeyCode, kevent.VirtualScanCode,
kevent.Char, kevent.KeyDown, kevent.ControlKeyState, kevent.RepeatCount)
case xwindows.WINDOW_BUFFER_SIZE_EVENT:
wevent := event.WindowBufferSizeEvent()
if wevent.Size.X != keyState.lastWinsizeX || wevent.Size.Y != keyState.lastWinsizeY {
keyState.lastWinsizeX, keyState.lastWinsizeY = wevent.Size.X, wevent.Size.Y
return WindowSizeEvent{
Width: int(wevent.Size.X),
Height: int(wevent.Size.Y),
}
}
case xwindows.MOUSE_EVENT:
mevent := event.MouseEvent()
Event := mouseEvent(keyState.lastMouseBtns, mevent)
keyState.lastMouseBtns = mevent.ButtonState
return Event
case xwindows.FOCUS_EVENT:
fevent := event.FocusEvent()
if fevent.SetFocus {
return FocusEvent{}
}
return BlurEvent{}
case xwindows.MENU_EVENT:
// ignore
}
return nil
}
func mouseEventButton(p, s uint32) (MouseButton, bool) {
var isRelease bool
button := MouseNone
btn := p ^ s
if btn&s == 0 {
isRelease = true
}
if btn == 0 {
switch {
case s&xwindows.FROM_LEFT_1ST_BUTTON_PRESSED > 0:
button = MouseLeft
case s&xwindows.FROM_LEFT_2ND_BUTTON_PRESSED > 0:
button = MouseMiddle
case s&xwindows.RIGHTMOST_BUTTON_PRESSED > 0:
button = MouseRight
case s&xwindows.FROM_LEFT_3RD_BUTTON_PRESSED > 0:
button = MouseBackward
case s&xwindows.FROM_LEFT_4TH_BUTTON_PRESSED > 0:
button = MouseForward
}
return button, isRelease
}
switch btn {
case xwindows.FROM_LEFT_1ST_BUTTON_PRESSED: // left button
button = MouseLeft
case xwindows.RIGHTMOST_BUTTON_PRESSED: // right button
button = MouseRight
case xwindows.FROM_LEFT_2ND_BUTTON_PRESSED: // middle button
button = MouseMiddle
case xwindows.FROM_LEFT_3RD_BUTTON_PRESSED: // unknown (possibly mouse backward)
button = MouseBackward
case xwindows.FROM_LEFT_4TH_BUTTON_PRESSED: // unknown (possibly mouse forward)
button = MouseForward
}
return button, isRelease
}
func mouseEvent(p uint32, e xwindows.MouseEventRecord) (ev Event) {
var mod KeyMod
var isRelease bool
if e.ControlKeyState&(xwindows.LEFT_ALT_PRESSED|xwindows.RIGHT_ALT_PRESSED) != 0 {
mod |= ModAlt
}
if e.ControlKeyState&(xwindows.LEFT_CTRL_PRESSED|xwindows.RIGHT_CTRL_PRESSED) != 0 {
mod |= ModCtrl
}
if e.ControlKeyState&(xwindows.SHIFT_PRESSED) != 0 {
mod |= ModShift
}
m := Mouse{
X: int(e.MousePositon.X),
Y: int(e.MousePositon.Y),
Mod: mod,
}
wheelDirection := int16(highWord(e.ButtonState)) //nolint:gosec
switch e.EventFlags {
case 0, xwindows.DOUBLE_CLICK:
m.Button, isRelease = mouseEventButton(p, e.ButtonState)
case xwindows.MOUSE_WHEELED:
if wheelDirection > 0 {
m.Button = MouseWheelUp
} else {
m.Button = MouseWheelDown
}
case xwindows.MOUSE_HWHEELED:
if wheelDirection > 0 {
m.Button = MouseWheelRight
} else {
m.Button = MouseWheelLeft
}
case xwindows.MOUSE_MOVED:
m.Button, _ = mouseEventButton(p, e.ButtonState)
return MouseMotionEvent(m)
}
if isWheel(m.Button) {
return MouseWheelEvent(m)
} else if isRelease {
return MouseReleaseEvent(m)
}
return MouseClickEvent(m)
}
func highWord(data uint32) uint16 {
return uint16((data & 0xFFFF0000) >> 16) //nolint:gosec
}
func readNConsoleInputs(console windows.Handle, maxEvents uint32) ([]xwindows.InputRecord, error) {
if maxEvents == 0 {
return nil, fmt.Errorf("maxEvents cannot be zero")
}
records := make([]xwindows.InputRecord, maxEvents)
n, err := readConsoleInput(console, records)
return records[:n], err
}
func readConsoleInput(console windows.Handle, inputRecords []xwindows.InputRecord) (uint32, error) {
if len(inputRecords) == 0 {
return 0, fmt.Errorf("size of input record buffer cannot be zero")
}
var read uint32
err := xwindows.ReadConsoleInput(console, &inputRecords[0], uint32(len(inputRecords)), &read) //nolint:gosec
return read, err //nolint:wrapcheck
}
func peekConsoleInput(console windows.Handle, inputRecords []xwindows.InputRecord) (uint32, error) {
if len(inputRecords) == 0 {
return 0, fmt.Errorf("size of input record buffer cannot be zero")
}
var read uint32
err := xwindows.PeekConsoleInput(console, &inputRecords[0], uint32(len(inputRecords)), &read) //nolint:gosec
return read, err //nolint:wrapcheck
}
func peekNConsoleInputs(console windows.Handle, maxEvents uint32) ([]xwindows.InputRecord, error) {
if maxEvents == 0 {
return nil, fmt.Errorf("maxEvents cannot be zero")
}
records := make([]xwindows.InputRecord, maxEvents)
n, err := peekConsoleInput(console, records)
return records[:n], err
}
// parseWin32InputKeyEvent parses a single key event from either the Windows
// Console API or win32-input-mode events. When state is nil, it means this is
// an event from win32-input-mode. Otherwise, it's a key event from the Windows
// Console API and needs a state to decode ANSI escape sequences and utf16
// runes.
func (p *Parser) parseWin32InputKeyEvent(state *win32InputState, vkc uint16, _ uint16, r rune, keyDown bool, cks uint32, repeatCount uint16) (event Event) {
defer func() {
// Respect the repeat count.
if repeatCount > 1 {
var multi MultiEvent
for i := 0; i < int(repeatCount); i++ {
multi = append(multi, event)
}
event = multi
}
}()
if state != nil {
defer func() {
state.lastCks = cks
}()
}
var utf8Buf [utf8.UTFMax]byte
var key Key
if state != nil && state.utf16Half {
state.utf16Half = false
state.utf16Buf[1] = r
codepoint := utf16.DecodeRune(state.utf16Buf[0], state.utf16Buf[1])
rw := utf8.EncodeRune(utf8Buf[:], codepoint)
r, _ = utf8.DecodeRune(utf8Buf[:rw])
key.Code = r
key.Text = string(r)
key.Mod = translateControlKeyState(cks)
key = ensureKeyCase(key, cks)
if keyDown {
return KeyPressEvent(key)
}
return KeyReleaseEvent(key)
}
var baseCode rune
switch {
case vkc == 0:
// Zero means this event is either an escape code or a unicode
// codepoint.
if state != nil && state.ansiIdx == 0 && r != ansi.ESC {
// This is a unicode codepoint.
baseCode = r
break
}
if state != nil {
// Collect ANSI escape code.
state.ansiBuf[state.ansiIdx] = byte(r)
state.ansiIdx++
if state.ansiIdx <= 2 {
// We haven't received enough bytes to determine if this is an
// ANSI escape code.
return nil
}
if r == ansi.ESC {
// We're expecting a closing String Terminator [ansi.ST].
return nil
}
n, event := p.parseSequence(state.ansiBuf[:state.ansiIdx])
if n == 0 {
return nil
}
if _, ok := event.(UnknownEvent); ok {
return nil
}
state.ansiIdx = 0
return event
}
case vkc == xwindows.VK_BACK:
baseCode = KeyBackspace
case vkc == xwindows.VK_TAB:
baseCode = KeyTab
case vkc == xwindows.VK_RETURN:
baseCode = KeyEnter
case vkc == xwindows.VK_SHIFT:
//nolint:nestif
if cks&xwindows.SHIFT_PRESSED != 0 {
if cks&xwindows.ENHANCED_KEY != 0 {
baseCode = KeyRightShift
} else {
baseCode = KeyLeftShift
}
} else if state != nil {
if state.lastCks&xwindows.SHIFT_PRESSED != 0 {
if state.lastCks&xwindows.ENHANCED_KEY != 0 {
baseCode = KeyRightShift
} else {
baseCode = KeyLeftShift
}
}
}
case vkc == xwindows.VK_CONTROL:
if cks&xwindows.LEFT_CTRL_PRESSED != 0 {
baseCode = KeyLeftCtrl
} else if cks&xwindows.RIGHT_CTRL_PRESSED != 0 {
baseCode = KeyRightCtrl
} else if state != nil {
if state.lastCks&xwindows.LEFT_CTRL_PRESSED != 0 {
baseCode = KeyLeftCtrl
} else if state.lastCks&xwindows.RIGHT_CTRL_PRESSED != 0 {
baseCode = KeyRightCtrl
}
}
case vkc == xwindows.VK_MENU:
if cks&xwindows.LEFT_ALT_PRESSED != 0 {
baseCode = KeyLeftAlt
} else if cks&xwindows.RIGHT_ALT_PRESSED != 0 {
baseCode = KeyRightAlt
} else if state != nil {
if state.lastCks&xwindows.LEFT_ALT_PRESSED != 0 {
baseCode = KeyLeftAlt
} else if state.lastCks&xwindows.RIGHT_ALT_PRESSED != 0 {
baseCode = KeyRightAlt
}
}
case vkc == xwindows.VK_PAUSE:
baseCode = KeyPause
case vkc == xwindows.VK_CAPITAL:
baseCode = KeyCapsLock
case vkc == xwindows.VK_ESCAPE:
baseCode = KeyEscape
case vkc == xwindows.VK_SPACE:
baseCode = KeySpace
case vkc == xwindows.VK_PRIOR:
baseCode = KeyPgUp
case vkc == xwindows.VK_NEXT:
baseCode = KeyPgDown
case vkc == xwindows.VK_END:
baseCode = KeyEnd
case vkc == xwindows.VK_HOME:
baseCode = KeyHome
case vkc == xwindows.VK_LEFT:
baseCode = KeyLeft
case vkc == xwindows.VK_UP:
baseCode = KeyUp
case vkc == xwindows.VK_RIGHT:
baseCode = KeyRight
case vkc == xwindows.VK_DOWN:
baseCode = KeyDown
case vkc == xwindows.VK_SELECT:
baseCode = KeySelect
case vkc == xwindows.VK_SNAPSHOT:
baseCode = KeyPrintScreen
case vkc == xwindows.VK_INSERT:
baseCode = KeyInsert
case vkc == xwindows.VK_DELETE:
baseCode = KeyDelete
case vkc >= '0' && vkc <= '9':
baseCode = rune(vkc)
case vkc >= 'A' && vkc <= 'Z':
// Convert to lowercase.
baseCode = rune(vkc) + 32
case vkc == xwindows.VK_LWIN:
baseCode = KeyLeftSuper
case vkc == xwindows.VK_RWIN:
baseCode = KeyRightSuper
case vkc == xwindows.VK_APPS:
baseCode = KeyMenu
case vkc >= xwindows.VK_NUMPAD0 && vkc <= xwindows.VK_NUMPAD9:
baseCode = rune(vkc-xwindows.VK_NUMPAD0) + KeyKp0
case vkc == xwindows.VK_MULTIPLY:
baseCode = KeyKpMultiply
case vkc == xwindows.VK_ADD:
baseCode = KeyKpPlus
case vkc == xwindows.VK_SEPARATOR:
baseCode = KeyKpComma
case vkc == xwindows.VK_SUBTRACT:
baseCode = KeyKpMinus
case vkc == xwindows.VK_DECIMAL:
baseCode = KeyKpDecimal
case vkc == xwindows.VK_DIVIDE:
baseCode = KeyKpDivide
case vkc >= xwindows.VK_F1 && vkc <= xwindows.VK_F24:
baseCode = rune(vkc-xwindows.VK_F1) + KeyF1
case vkc == xwindows.VK_NUMLOCK:
baseCode = KeyNumLock
case vkc == xwindows.VK_SCROLL:
baseCode = KeyScrollLock
case vkc == xwindows.VK_LSHIFT:
baseCode = KeyLeftShift
case vkc == xwindows.VK_RSHIFT:
baseCode = KeyRightShift
case vkc == xwindows.VK_LCONTROL:
baseCode = KeyLeftCtrl
case vkc == xwindows.VK_RCONTROL:
baseCode = KeyRightCtrl
case vkc == xwindows.VK_LMENU:
baseCode = KeyLeftAlt
case vkc == xwindows.VK_RMENU:
baseCode = KeyRightAlt
case vkc == xwindows.VK_VOLUME_MUTE:
baseCode = KeyMute
case vkc == xwindows.VK_VOLUME_DOWN:
baseCode = KeyLowerVol
case vkc == xwindows.VK_VOLUME_UP:
baseCode = KeyRaiseVol
case vkc == xwindows.VK_MEDIA_NEXT_TRACK:
baseCode = KeyMediaNext
case vkc == xwindows.VK_MEDIA_PREV_TRACK:
baseCode = KeyMediaPrev
case vkc == xwindows.VK_MEDIA_STOP:
baseCode = KeyMediaStop
case vkc == xwindows.VK_MEDIA_PLAY_PAUSE:
baseCode = KeyMediaPlayPause
case vkc == xwindows.VK_OEM_1:
baseCode = ';'
case vkc == xwindows.VK_OEM_PLUS:
baseCode = '+'
case vkc == xwindows.VK_OEM_COMMA:
baseCode = ','
case vkc == xwindows.VK_OEM_MINUS:
baseCode = '-'
case vkc == xwindows.VK_OEM_PERIOD:
baseCode = '.'
case vkc == xwindows.VK_OEM_2:
baseCode = '/'
case vkc == xwindows.VK_OEM_3:
baseCode = '`'
case vkc == xwindows.VK_OEM_4:
baseCode = '['
case vkc == xwindows.VK_OEM_5:
baseCode = '\\'
case vkc == xwindows.VK_OEM_6:
baseCode = ']'
case vkc == xwindows.VK_OEM_7:
baseCode = '\''
}
if utf16.IsSurrogate(r) {
if state != nil {
state.utf16Buf[0] = r
state.utf16Half = true
}
return nil
}
// AltGr is left ctrl + right alt. On non-US keyboards, this is used to type
// special characters and produce printable events.
// XXX: Should this be a KeyMod?
altGr := cks&(xwindows.LEFT_CTRL_PRESSED|xwindows.RIGHT_ALT_PRESSED) == xwindows.LEFT_CTRL_PRESSED|xwindows.RIGHT_ALT_PRESSED
var text string
keyCode := baseCode
if !unicode.IsControl(r) {
rw := utf8.EncodeRune(utf8Buf[:], r)
keyCode, _ = utf8.DecodeRune(utf8Buf[:rw])
if unicode.IsPrint(keyCode) && (cks == 0 ||
cks == xwindows.SHIFT_PRESSED ||
cks == xwindows.CAPSLOCK_ON ||
altGr) {
// If the control key state is 0, shift is pressed, or caps lock
// then the key event is a printable event i.e. [text] is not empty.
text = string(keyCode)
}
}
key.Code = keyCode
key.Text = text
key.Mod = translateControlKeyState(cks)
key.BaseCode = baseCode
key = ensureKeyCase(key, cks)
if keyDown {
return KeyPressEvent(key)
}
return KeyReleaseEvent(key)
}
// ensureKeyCase ensures that the key's text is in the correct case based on the
// control key state.
func ensureKeyCase(key Key, cks uint32) Key {
if len(key.Text) == 0 {
return key
}
hasShift := cks&xwindows.SHIFT_PRESSED != 0
hasCaps := cks&xwindows.CAPSLOCK_ON != 0
if hasShift || hasCaps {
if unicode.IsLower(key.Code) {
key.ShiftedCode = unicode.ToUpper(key.Code)
key.Text = string(key.ShiftedCode)
}
} else {
if unicode.IsUpper(key.Code) {
key.ShiftedCode = unicode.ToLower(key.Code)
key.Text = string(key.ShiftedCode)
}
}
return key
}
// translateControlKeyState translates the control key state from the Windows
// Console API into a Mod bitmask.
func translateControlKeyState(cks uint32) (m KeyMod) {
if cks&xwindows.LEFT_CTRL_PRESSED != 0 || cks&xwindows.RIGHT_CTRL_PRESSED != 0 {
m |= ModCtrl
}
if cks&xwindows.LEFT_ALT_PRESSED != 0 || cks&xwindows.RIGHT_ALT_PRESSED != 0 {
m |= ModAlt
}
if cks&xwindows.SHIFT_PRESSED != 0 {
m |= ModShift
}
if cks&xwindows.CAPSLOCK_ON != 0 {
m |= ModCapsLock
}
if cks&xwindows.NUMLOCK_ON != 0 {
m |= ModNumLock
}
if cks&xwindows.SCROLLLOCK_ON != 0 {
m |= ModScrollLock
}
return
}
//nolint:unused
func keyEventString(vkc, sc uint16, r rune, keyDown bool, cks uint32, repeatCount uint16) string {
var s strings.Builder
s.WriteString("vkc: ")
s.WriteString(fmt.Sprintf("%d, 0x%02x", vkc, vkc))
s.WriteString(", sc: ")
s.WriteString(fmt.Sprintf("%d, 0x%02x", sc, sc))
s.WriteString(", r: ")
s.WriteString(fmt.Sprintf("%q", r))
s.WriteString(", down: ")
s.WriteString(fmt.Sprintf("%v", keyDown))
s.WriteString(", cks: [")
if cks&xwindows.LEFT_ALT_PRESSED != 0 {
s.WriteString("left alt, ")
}
if cks&xwindows.RIGHT_ALT_PRESSED != 0 {
s.WriteString("right alt, ")
}
if cks&xwindows.LEFT_CTRL_PRESSED != 0 {
s.WriteString("left ctrl, ")
}
if cks&xwindows.RIGHT_CTRL_PRESSED != 0 {
s.WriteString("right ctrl, ")
}
if cks&xwindows.SHIFT_PRESSED != 0 {
s.WriteString("shift, ")
}
if cks&xwindows.CAPSLOCK_ON != 0 {
s.WriteString("caps lock, ")
}
if cks&xwindows.NUMLOCK_ON != 0 {
s.WriteString("num lock, ")
}
if cks&xwindows.SCROLLLOCK_ON != 0 {
s.WriteString("scroll lock, ")
}
if cks&xwindows.ENHANCED_KEY != 0 {
s.WriteString("enhanced key, ")
}
s.WriteString("], repeat count: ")
s.WriteString(fmt.Sprintf("%d", repeatCount))
return s.String()
}

View file

@ -0,0 +1,271 @@
package input
import (
"encoding/binary"
"image/color"
"reflect"
"testing"
"unicode/utf16"
"github.com/charmbracelet/x/ansi"
xwindows "github.com/charmbracelet/x/windows"
"golang.org/x/sys/windows"
)
func TestWindowsInputEvents(t *testing.T) {
cases := []struct {
name string
events []xwindows.InputRecord
expected []Event
sequence bool // indicates that the input events are ANSI sequence or utf16
}{
{
name: "single key event",
events: []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: 'a',
VirtualKeyCode: 'A',
}),
},
expected: []Event{KeyPressEvent{Code: 'a', BaseCode: 'a', Text: "a"}},
},
{
name: "single key event with control key",
events: []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: 'a',
VirtualKeyCode: 'A',
ControlKeyState: xwindows.LEFT_CTRL_PRESSED,
}),
},
expected: []Event{KeyPressEvent{Code: 'a', BaseCode: 'a', Mod: ModCtrl}},
},
{
name: "escape alt key event",
events: []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: ansi.ESC,
VirtualKeyCode: ansi.ESC,
ControlKeyState: xwindows.LEFT_ALT_PRESSED,
}),
},
expected: []Event{KeyPressEvent{Code: ansi.ESC, BaseCode: ansi.ESC, Mod: ModAlt}},
},
{
name: "single shifted key event",
events: []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: 'A',
VirtualKeyCode: 'A',
ControlKeyState: xwindows.SHIFT_PRESSED,
}),
},
expected: []Event{KeyPressEvent{Code: 'A', BaseCode: 'a', Text: "A", Mod: ModShift}},
},
{
name: "utf16 rune",
events: encodeUtf16Rune('😊'), // smiley emoji '😊'
expected: []Event{
KeyPressEvent{Code: '😊', Text: "😊"},
},
sequence: true,
},
{
name: "background color response",
events: encodeSequence("\x1b]11;rgb:ff/ff/ff\x07"),
expected: []Event{BackgroundColorEvent{Color: color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff}}},
sequence: true,
},
{
name: "st terminated background color response",
events: encodeSequence("\x1b]11;rgb:ffff/ffff/ffff\x1b\\"),
expected: []Event{BackgroundColorEvent{Color: color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff}}},
sequence: true,
},
{
name: "simple mouse event",
events: []xwindows.InputRecord{
encodeMouseEvent(xwindows.MouseEventRecord{
MousePositon: windows.Coord{X: 10, Y: 20},
ButtonState: xwindows.FROM_LEFT_1ST_BUTTON_PRESSED,
EventFlags: 0,
}),
encodeMouseEvent(xwindows.MouseEventRecord{
MousePositon: windows.Coord{X: 10, Y: 20},
EventFlags: 0,
}),
},
expected: []Event{
MouseClickEvent{Button: MouseLeft, X: 10, Y: 20},
MouseReleaseEvent{Button: MouseLeft, X: 10, Y: 20},
},
},
{
name: "focus event",
events: []xwindows.InputRecord{
encodeFocusEvent(xwindows.FocusEventRecord{
SetFocus: true,
}),
encodeFocusEvent(xwindows.FocusEventRecord{
SetFocus: false,
}),
},
expected: []Event{
FocusEvent{},
BlurEvent{},
},
},
{
name: "window size event",
events: []xwindows.InputRecord{
encodeWindowBufferSizeEvent(xwindows.WindowBufferSizeRecord{
Size: windows.Coord{X: 10, Y: 20},
}),
},
expected: []Event{
WindowSizeEvent{Width: 10, Height: 20},
},
},
}
// p is the parser to parse the input events
var p Parser
// keep track of the state of the driver to handle ANSI sequences and utf16
var state win32InputState
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
if tc.sequence {
var Event Event
for _, ev := range tc.events {
if ev.EventType != xwindows.KEY_EVENT {
t.Fatalf("expected key event, got %v", ev.EventType)
}
key := ev.KeyEvent()
Event = p.parseWin32InputKeyEvent(&state, key.VirtualKeyCode, key.VirtualScanCode, key.Char, key.KeyDown, key.ControlKeyState, key.RepeatCount)
}
if len(tc.expected) != 1 {
t.Fatalf("expected 1 event, got %d", len(tc.expected))
}
if !reflect.DeepEqual(Event, tc.expected[0]) {
t.Errorf("expected %v, got %v", tc.expected[0], Event)
}
} else {
if len(tc.events) != len(tc.expected) {
t.Fatalf("expected %d events, got %d", len(tc.expected), len(tc.events))
}
for j, ev := range tc.events {
Event := p.parseConInputEvent(ev, &state)
if !reflect.DeepEqual(Event, tc.expected[j]) {
t.Errorf("expected %#v, got %#v", tc.expected[j], Event)
}
}
}
})
}
}
func boolToUint32(b bool) uint32 {
if b {
return 1
}
return 0
}
func encodeMenuEvent(menu xwindows.MenuEventRecord) xwindows.InputRecord {
var bts [16]byte
binary.LittleEndian.PutUint32(bts[0:4], menu.CommandID)
return xwindows.InputRecord{
EventType: xwindows.MENU_EVENT,
Event: bts,
}
}
func encodeWindowBufferSizeEvent(size xwindows.WindowBufferSizeRecord) xwindows.InputRecord {
var bts [16]byte
binary.LittleEndian.PutUint16(bts[0:2], uint16(size.Size.X))
binary.LittleEndian.PutUint16(bts[2:4], uint16(size.Size.Y))
return xwindows.InputRecord{
EventType: xwindows.WINDOW_BUFFER_SIZE_EVENT,
Event: bts,
}
}
func encodeFocusEvent(focus xwindows.FocusEventRecord) xwindows.InputRecord {
var bts [16]byte
if focus.SetFocus {
bts[0] = 1
}
return xwindows.InputRecord{
EventType: xwindows.FOCUS_EVENT,
Event: bts,
}
}
func encodeMouseEvent(mouse xwindows.MouseEventRecord) xwindows.InputRecord {
var bts [16]byte
binary.LittleEndian.PutUint16(bts[0:2], uint16(mouse.MousePositon.X))
binary.LittleEndian.PutUint16(bts[2:4], uint16(mouse.MousePositon.Y))
binary.LittleEndian.PutUint32(bts[4:8], mouse.ButtonState)
binary.LittleEndian.PutUint32(bts[8:12], mouse.ControlKeyState)
binary.LittleEndian.PutUint32(bts[12:16], mouse.EventFlags)
return xwindows.InputRecord{
EventType: xwindows.MOUSE_EVENT,
Event: bts,
}
}
func encodeKeyEvent(key xwindows.KeyEventRecord) xwindows.InputRecord {
var bts [16]byte
binary.LittleEndian.PutUint32(bts[0:4], boolToUint32(key.KeyDown))
binary.LittleEndian.PutUint16(bts[4:6], key.RepeatCount)
binary.LittleEndian.PutUint16(bts[6:8], key.VirtualKeyCode)
binary.LittleEndian.PutUint16(bts[8:10], key.VirtualScanCode)
binary.LittleEndian.PutUint16(bts[10:12], uint16(key.Char))
binary.LittleEndian.PutUint32(bts[12:16], key.ControlKeyState)
return xwindows.InputRecord{
EventType: xwindows.KEY_EVENT,
Event: bts,
}
}
// encodeSequence encodes a string of ANSI escape sequences into a slice of
// Windows input key records.
func encodeSequence(s string) (evs []xwindows.InputRecord) {
var state byte
for len(s) > 0 {
seq, _, n, newState := ansi.DecodeSequence(s, state, nil)
for i := 0; i < n; i++ {
evs = append(evs, encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: rune(seq[i]),
}))
}
state = newState
s = s[n:]
}
return
}
func encodeUtf16Rune(r rune) []xwindows.InputRecord {
r1, r2 := utf16.EncodeRune(r)
return encodeUtf16Pair(r1, r2)
}
func encodeUtf16Pair(r1, r2 rune) []xwindows.InputRecord {
return []xwindows.InputRecord{
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: r1,
}),
encodeKeyEvent(xwindows.KeyEventRecord{
KeyDown: true,
Char: r2,
}),
}
}

Some files were not shown because too many files have changed in this diff Show more