Compare commits

..

284 commits

Author SHA1 Message Date
GitHub Action
6470243095 ignore: update download stats 2025-07-18
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-18 12:04:28 +00:00
GitHub Action
c8321cfbd9 ignore: update download stats 2025-07-18 2025-07-18 12:02:18 +00:00
Yihui Khuu
46c246e01f
fix: \{return} should be replaced with new line on all lines (#1119) 2025-07-18 06:22:36 -05:00
adamdotdevin
9964d8e6c0
fix: model cost overrides 2025-07-18 05:08:35 -05:00
Timo Clasen
df33143396
feat(tui): parse for file attachments when exiting EDITOR (#1117) 2025-07-18 04:47:20 -05:00
Aiden Cline
571aeaaea2
tweak: remove needless resorting (#1116) 2025-07-18 04:42:43 -05:00
Aiden Cline
edfea03917
tweak: fix [object Object] in logging (#1114) 2025-07-18 04:41:23 -05:00
Tom
81c88cc742
fix(tui): ensure viewport scrolls to bottom on new messages (#1110) 2025-07-18 04:41:03 -05:00
Mike Wallio
99b9390d80
Update to a customized beast mode v3 for opencode. (#1109)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-17 20:10:06 -05:00
Dax Raad
23c30521d8 only enable ruff if it seems to be used 2025-07-17 18:07:06 -04:00
Wendell Misiedjan
e681d610de
feat: support AWS_BEARER_TOKEN_BEDROCK for amazon bedrock provider autoloading (#1094)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-17 09:12:30 -05:00
Aiden Cline
a1fdeded3e
tweak: allow mcp servers to include headers (#1096) 2025-07-17 09:11:48 -05:00
GitHub Action
2051312d12 ignore: update download stats 2025-07-17 2025-07-17 14:07:13 +00:00
Alexander Drottsgård
20cb7a76af
feat(tui): highlight current session in sessions modal (#1093)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-17 07:40:15 -05:00
Timo Clasen
a493aec174
feat(tui): remove share commands from help if sharing is disabled (#1087) 2025-07-17 04:28:12 -05:00
Aiden Cline
3ce3ac8e61
fix: message error centering (#1085) 2025-07-17 04:27:40 -05:00
Timo Clasen
91ad64feda
fix(tui): user defined ctrl+z should take precedence over suspending (#1088) 2025-07-17 04:27:02 -05:00
Timo Clasen
60b55f9d92
feat(tui): remove sharing info from session header when sharing is disabled (#1076)
Some checks failed
publish / publish (push) Waiting to run
deploy / deploy (push) Has been cancelled
2025-07-16 17:36:48 -05:00
Timo Clasen
3c6c2bf13b
docs(share): add explicit manual share mode (#1074) 2025-07-16 16:08:25 -05:00
Aiden Cline
d4f9375548
fix: type 'reasoning' was provided without its required following item (#1072) 2025-07-16 15:59:40 -05:00
Jay V
28b39f547e docs: edit 2025-07-16 16:59:12 -04:00
Jay V
7520f5efa8 docs: update enterprise doc 2025-07-16 16:44:28 -04:00
Jay V
eb4cdf4b20 docs: config doc
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-16 16:27:44 -04:00
Jay V
9f6fc1c3c5 docs: edits 2025-07-16 16:20:09 -04:00
Mike Wallio
dfede9ae6e
Remove binary file opencode (#1069) 2025-07-16 15:10:40 -05:00
Daniel Saldarriaga López
fc45c0c944
docs: fix keybinds documentation to match actual config schema (#867) 2025-07-16 15:34:52 -04:00
adamdotdevin
9d869f784c
fix(tui): expand edit calls 2025-07-16 14:33:57 -05:00
adamdotdevin
bd244f73af
fix(tui): slightly faster scroll speed 2025-07-16 14:26:46 -05:00
Dax Raad
dd34556e9c only include severity 1 diagnostics from lsp in edit tool output 2025-07-16 15:25:37 -04:00
adamdotdevin
f7dd48e60d
feat(tui): more ways to quit 2025-07-16 14:20:28 -05:00
Dax Raad
93c779cf48 docs: better variable examples 2025-07-16 14:56:24 -04:00
adamdotdevin
360c04c542
docs: copying text 2025-07-16 13:26:26 -05:00
adamdotdevin
529fd57e75
fix: missing dependency 2025-07-16 12:58:29 -05:00
adamdotdevin
faea3777e1
fix: missing dependency 2025-07-16 12:56:11 -05:00
Aiden Cline
a4664e2344
fix: generate title should use same options as model it uses to gen (#1064) 2025-07-16 12:46:52 -05:00
adamdotdevin
cdc1d8a94d
feat(tui): layout config to render full width 2025-07-16 12:43:02 -05:00
Jay V
fdd6d6600f docs: rename workflow 2025-07-16 13:38:00 -04:00
Jay V
9f44cfd595 docs: discord releases 2025-07-16 13:17:04 -04:00
Aiden Cline
70229b150c
Fix: better title generation (needs to change due to small models) (#1059) 2025-07-16 11:47:56 -05:00
John Henry Rudden
050ff943a6
Fix: Add escape sequence for @ symbols to prevent send blocking (#1029) 2025-07-16 11:18:48 -05:00
Tom
88b58fd6a0
fix: Prevent division by zero in context percentage calculation (#1055) 2025-07-16 09:35:20 -05:00
Jeremy Mack
5d67e13df5
fix: grep omitting text after a colon (#1053) 2025-07-16 09:09:05 -05:00
Adi Yeroslav
57d1a60efc
feat(tui): shift+tab to cycle modes backward (#1049) 2025-07-16 07:43:48 -05:00
Nipuna Perera
add81b9739
Enhance private npm registry support (#998) 2025-07-16 08:31:38 -04:00
GitHub Action
81bdb8e269 ignore: update download stats 2025-07-16
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-16 12:04:30 +00:00
adamdotdevin
a563fdd287
fix(tui): diagnostics rendering 2025-07-16 06:55:14 -05:00
adamdotdevin
7c93bf5993
fix(tui): pending tool call width 2025-07-16 06:27:32 -05:00
adamdotdevin
6a5a4247c6
fix(gh): build 2025-07-16 06:13:43 -05:00
adamdotdevin
a39136a2a0
fix(tui): render attachments in user messages in accent color 2025-07-16 06:09:27 -05:00
adamdotdevin
9f5b59f336
chore: messages cleanup 2025-07-16 06:09:27 -05:00
adamdotdevin
01c125b058
fix(tui): faster cache algo 2025-07-16 06:09:27 -05:00
adamdotdevin
d41aa2bc72
chore(tui): simplify messages component, remove navigate, add copy last message 2025-07-16 06:09:26 -05:00
Robin Moser
f45deb37f0
fix: don't sign snapshot commits (#1046) 2025-07-16 04:46:32 -05:00
Matias Insaurralde
e89972a396
perf: move ANSI regex compilations to package level (#1040)
Signed-off-by: Matías Insaurralde <matias@insaurral.de>
2025-07-16 04:20:25 -05:00
Frank
c3c647a21a wip: github actions 2025-07-16 16:20:06 +08:00
Frank
b79167ce66 sync 2025-07-16 16:12:31 +08:00
Frank
7ac0a2bc65 wip: github actions 2025-07-16 16:05:51 +08:00
Frank
cb032cff2b
wip: github actions 2025-07-16 03:57:14 -04:00
Frank
867a69a751
wip: github actions 2025-07-16 03:54:20 -04:00
Frank
20b8efcc50 wip: github actions 2025-07-16 15:36:23 +08:00
Frank
a86d42149f wip: github actions 2025-07-16 14:59:53 +08:00
Frank
82a36acfe3 wip: github action 2025-07-16 14:59:53 +08:00
Dax Raad
0793c3f2a3 clean up export command
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-15 21:50:43 -04:00
Dax Raad
5c860b0d69 fix share page v1 message 2025-07-15 21:35:32 -04:00
Dax Raad
05bb127a8e enable bash tool in plan mode 2025-07-15 21:28:03 -04:00
aron
1bbd84008f
move spoof prompt to support anthropic with custom modes (#1031) 2025-07-15 21:16:27 -04:00
Stephen Murray
fdfd4d69d3
add support for modified gemini-cli system prompt (#1033)
Co-authored-by: Dax Raad <d@ironbay.co>
2025-07-15 21:13:11 -04:00
Jay
7f659cce36
docs: Update README.md 2025-07-15 20:09:26 -04:00
Jay V
48fcaa83be docs: fix config 2025-07-15 19:54:51 -04:00
Jay V
70c16c4c95 docs: adding action to notify discord 2025-07-15 19:49:38 -04:00
Jay V
c1e1ef6eb5 docs: readme 2025-07-15 18:32:04 -04:00
Jay V
bb155db8b2 docs: share tweak copy button 2025-07-15 18:25:25 -04:00
John Henry Rudden
7c91f668d1
docs: share add copy button to messages in web interface (#902)
Co-authored-by: Jay <air@live.ca>
2025-07-15 17:56:33 -04:00
Jay V
1af103d29e docs: share handle non bundled langs 2025-07-15 17:47:22 -04:00
Jay V
8a3e581edc docs: share fix diff bugs 2025-07-15 17:47:22 -04:00
Jay V
749e7838a4 docs: share page task tool 2025-07-15 17:47:22 -04:00
Dax Raad
73b46c2bf9 docs: document base URL
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-15 14:57:50 -04:00
Joe Schmitt
8bd250fb15
feat(tui): add /export command to export conversation to editor (#989)
Co-authored-by: opencode <noreply@opencode.ai>
2025-07-15 13:53:21 -05:00
Dax Raad
b1ab641905 add small model for title generation 2025-07-15 14:00:52 -04:00
adamdotdevin
76e256ed64
fix(tui): wider max width 2025-07-15 12:44:41 -05:00
adamdotdevin
4f955f2127
fix(tui): mouse scroll ansi parsing and perf 2025-07-15 12:03:30 -05:00
Aiden Cline
bbeb579d3a
tweak: (opencode run): adjust tool call rendering, reduce number of "Unknowns" (#1012) 2025-07-15 11:22:57 -05:00
Timo Clasen
f707fb3f8d
feat(tui): add keymap to remove entries from recently used models (#1019) 2025-07-15 11:20:56 -05:00
adamdotdevin
6b98acb7be
chore: update stainless defs 2025-07-15 10:03:11 -05:00
adamdotdevin
2487b18f62
chore: update stainless script to kick off prod build 2025-07-15 08:15:31 -05:00
adamdotdevin
533f64fe26
fix(tui): rework lists and search dialog 2025-07-15 08:07:26 -05:00
Dax Raad
b5c85d3806 fix logic for suprpessing snapshots in big directories 2025-07-15 09:07:04 -04:00
Dax Raad
bcf952bc8a upgrade ai sdk 2025-07-15 09:06:35 -04:00
GitHub Action
a6dc75a44c ignore: update download stats 2025-07-15 2025-07-15 12:04:28 +00:00
Joohoon Cha
416daca9c6
fix(tui): close completion dialog on ctrl+h (#1005)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-15 06:24:05 -05:00
Aiden Cline
636fe0fb64
Fix: failed to open session (#999) 2025-07-15 05:40:29 -05:00
Frank
95e0957d64 wip: github actions 2025-07-15 17:45:16 +08:00
Dax Raad
2eefdae6a9 ignore: fix types 2025-07-15 00:56:03 -04:00
Dax Raad
d62746ceb7 fix panic 2025-07-15 00:35:02 -04:00
Dax Raad
4b2ce14ff3 bring back task tool 2025-07-15 00:05:54 -04:00
Jase Kraft
294a11752e
fix: --continue pull the latest session id consistently (#918)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
Co-authored-by: Dax Raad <d@ironbay.co>
2025-07-14 20:32:00 -04:00
Dax Raad
1cf1d1f634 docs: fix agents.md 2025-07-14 20:23:05 -04:00
Ryan Roden-Corrent
2ce694d41f
Add support for job-control suspend (ctrl+z/SIGSTP). (#944) 2025-07-14 20:13:46 -04:00
CodinCat
d6eff3b3a3
improve error handling and logging for GitHub API failures in upgrade and install script (#972) 2025-07-14 20:13:12 -04:00
Dax Raad
e63a6d45c1 docs: README 2025-07-14 20:10:43 -04:00
Dax Raad
93686519ba docs: README 2025-07-14 20:06:15 -04:00
Mike Wallio
f593792fb5
Standardize parameter description references in Edit and MultiEdit tools (#984) 2025-07-14 20:03:59 -04:00
Dax Raad
2cdb37c32b support anthropic console login flow 2025-07-14 18:07:55 -04:00
Timo Clasen
535d79b64c
docs: fix typo (#982) 2025-07-14 16:40:16 -04:00
Dax Raad
b4e4c3f662 wip: snapshot
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-14 15:29:08 -04:00
adamdotdevin
ba676e7ae0
fix(tui): support readline nav in new search component
Some checks are pending
deploy / deploy (push) Waiting to run
2025-07-14 12:20:58 -05:00
adamdotdevin
a1c8e5af45
chore: use new search component in find dialog 2025-07-14 12:15:47 -05:00
adamdotdevin
f1e7e7c138
feat(tui): even better model selector 2025-07-14 12:15:46 -05:00
Dax Raad
80b77caec0 ignore: share page fix 2025-07-14 13:13:33 -04:00
Dorian Karter
86a2ea44b5
feat(tui): add support for readline list nav (ctrl-p/ctrl-n) (#955) 2025-07-14 10:21:09 -05:00
Dax Raad
a2002c88c6 wip: update sdk 2025-07-14 11:18:08 -04:00
opencode-agent[bot]
d8bcf4f4e7
Fix issue: Option to update username shown in conversations. (#975)
Co-authored-by: opencode-agent[bot] <opencode-agent[bot]@users.noreply.github.com>
Co-authored-by: thdxr <thdxr@users.noreply.github.com>
2025-07-14 11:03:04 -04:00
Dax Raad
31e0326f78 fix init command and escape to cancel 2025-07-14 10:48:17 -04:00
adamdotdevin
a53d2ea356
fix(tui): build and bg color 2025-07-14 09:14:02 -05:00
adamdotdevin
229a280652
fix(tui): find dialog bg color 2025-07-14 09:09:55 -05:00
Nicholas Hamilton
8d0350d923
feat: ability to create new session from session dialog (#920) 2025-07-14 09:04:43 -05:00
Almir Sarajčić
4192d7eacc
Fix failing git hooks (#966) 2025-07-14 07:52:29 -05:00
Munawwar Firoz
7b8b4cf8c7
feat: ctrl+left arrow / ctrl+right arrow key support (#969) 2025-07-14 07:16:06 -05:00
Almir Sarajčić
1f4de75348
Explain usage of external references in AGENTS.md (#965) 2025-07-14 07:06:37 -05:00
GitHub Action
457755c690 ignore: update download stats 2025-07-14 2025-07-14 12:04:16 +00:00
Aiden Cline
052a1e7514
fix: file command visual bug (#959) 2025-07-14 07:03:02 -05:00
Daniel Nouri
139d6e2818
Fix clipboard on Wayland systems (#941)
Co-authored-by: Daniel Nouri <daniel@redhotcar>
2025-07-14 06:57:45 -05:00
Dax Raad
06554efdf4 get rid of cli markdown dep
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-13 23:06:31 -04:00
Dax Raad
67e9bda94f ci
Some checks are pending
deploy / deploy (push) Waiting to run
2025-07-13 22:58:33 -04:00
Dax Raad
53bb6b4c4f fix missing tokens 2025-07-13 22:56:29 -04:00
Dax Raad
73d54c7068 fix type error 2025-07-13 17:25:13 -04:00
Dax
90d6c4ab41
Part data model (#950) 2025-07-13 17:22:11 -04:00
opencode-agent[bot]
736396fc70
Added sharing config with auto/disabled options (#951)
Co-authored-by: opencode-agent[bot] <opencode-agent[bot]@users.noreply.github.com>
Co-authored-by: thdxr <thdxr@users.noreply.github.com>
2025-07-13 16:43:58 -04:00
Dax Raad
177bfed93e ci: github action 2025-07-13 16:22:58 -04:00
Dax Raad
91f8477ef5 wip: mcp 2025-07-13 16:22:16 -04:00
John Henry Rudden
f04a5e50ee
fix: deduplicate command suggestions (#934) 2025-07-13 14:47:26 -05:00
Aiden Cline
bb28b70700
Fix: title generation (#949) 2025-07-13 14:46:36 -05:00
Frank
7361a02ef3 wip: github actions
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-13 23:59:25 +08:00
GitHub Action
d465f150fc ignore: update download stats 2025-07-13 2025-07-13 12:04:11 +00:00
Dax Raad
17fa8c117b fix packages being reinstalled on every start
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-12 12:41:12 -04:00
Muzammil Khan
9aa0c40a00
feat: add more ignore patterns to the ls tool (#913) 2025-07-12 12:06:58 -04:00
GitHub Action
fd4648da17 ignore: update download stats 2025-07-12 2025-07-12 12:03:59 +00:00
Dax Raad
aadca5013a fix share page timestamps
Some checks failed
publish / publish (push) Waiting to run
deploy / deploy (push) Has been cancelled
2025-07-11 21:49:20 -04:00
Dax Raad
5c3d490e59 share page hide step-finish events 2025-07-11 21:45:56 -04:00
Dax Raad
1254f48135 fix issue preventing things from working when node_modules or package.json present in ~/ 2025-07-11 21:09:39 -04:00
Dax Raad
1729c310d9 switch global config to ~/.config/opencode/opencode.json 2025-07-11 20:51:23 -04:00
Dax Raad
0130190bbd docs: add model docs 2025-07-11 20:33:06 -04:00
Aiden Cline
97a31ddffc
tweak: plan interactions should match web (TUI) (#895) 2025-07-11 18:03:22 -04:00
zWing
3249420ad1
fix: avoid overwriting the provider.option.baseURL (#880) 2025-07-11 18:01:28 -04:00
Dax Raad
4bb8536d34 introduce cache version concept for auto cleanup when breaking cache changes happen 2025-07-11 17:50:49 -04:00
Jay
c73d4a137e
docs: Update troubleshooting.mdx 2025-07-11 17:50:25 -04:00
Dax Raad
57ac8f2741 wip: stats 2025-07-11 17:37:41 -04:00
Jay V
2f1acee5a1 docs: share page add time footer back 2025-07-11 14:24:20 -04:00
Jay V
9ca54020ac docs: share page mobile bugs 2025-07-11 14:24:20 -04:00
Jay V
f7d44b178b docs: share fix mobile diffs 2025-07-11 14:24:20 -04:00
Sergii Kozak
b4950a157c
fix(session): add fallback for undefined output token limit (#860)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
Co-authored-by: opencode <noreply@opencode.ai>
2025-07-11 10:55:13 -04:00
alexz
dfbef066c7
fix: ENAMETOOLONG: name too long when adding custom mode (#881) 2025-07-11 10:54:52 -04:00
GitHub Action
26fd76fbee ignore: update download stats 2025-07-11 2025-07-11 12:04:08 +00:00
adamdotdevin
04769d8a26
fix(tui): help commands bg color 2025-07-11 06:03:21 -05:00
adamdotdevin
34b576d9b5
fix(tui): don't include /mode trigger 2025-07-11 06:01:51 -05:00
adamdotdevin
22b244f847
fix(tui): actually fix mouse ansi codes leaking 2025-07-11 06:00:20 -05:00
Aiden Cline
7e1fc275e7
fix: avoid worker exception, graceful 404 (#869)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-11 04:55:56 -05:00
Frank
3b9b391320 wip: github actions
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-11 06:55:13 +08:00
Frank
766bfd025c wip: github actions 2025-07-11 05:23:24 +08:00
Jay V
c7f30e1065 docs: share page fix terminal part 2025-07-10 17:21:21 -04:00
Frank
1c4fd7f28f Api: add endpoint for getting github app token 2025-07-11 05:01:27 +08:00
adamdotdevin
85805d2c38
fix(tui): handle SIGTERM, closes #319 2025-07-10 15:59:03 -05:00
Timo Clasen
982cb3e71a
fix(tui): center help dilaog (#853) 2025-07-10 15:56:19 -05:00
adamdotdevin
294d0e7ee3
fix(tui): mouse wheel ansi codes leaking into editor 2025-07-10 15:49:58 -05:00
Jay V
8be1ca836c docs: fix diag styles 2025-07-10 16:38:51 -04:00
Jay V
2e5f96fa41 docs: share page attachment 2025-07-10 16:38:51 -04:00
Dax Raad
c056b0add9 add step finish part 2025-07-10 16:25:38 -04:00
Dax Raad
b00bb3c083 run: properly close session.list 2025-07-10 16:13:01 -04:00
Dax Raad
d9befd3aa6 disable filewatcher, fixes file descriptor leak 2025-07-10 15:58:45 -04:00
Dax Raad
49de703ba1 config: escape file: string content 2025-07-10 15:38:58 -04:00
Dax Raad
22988894c8 ci: slow down stats 2025-07-10 15:31:06 -04:00
adamdotdevin
34b1754f25
docs: clipboard requirements on linux 2025-07-10 13:12:37 -05:00
adamdotdevin
54fe3504ba
feat(tui): accent editor border on leader key 2025-07-10 12:57:22 -05:00
Jay V
d2c862e32d docs: edit local models 2025-07-10 13:49:24 -04:00
Jay V
afc53afb35 docs: edit mode 2025-07-10 13:29:37 -04:00
Gabriel Garrett
b56e49c5dc
Adds real example in docs of how to configure custom provider (#840)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-10 13:29:30 -04:00
Aiden Cline
8b2a909e1f
fix: encode & decode file paths (#843) 2025-07-10 11:19:54 -05:00
Jay V
e9c954d45e docs: add modes to sidebar 2025-07-10 12:07:44 -04:00
Jay V
6f449d13af docs: add modes to sidebar 2025-07-10 12:07:18 -04:00
Dax Raad
6e375bef0d docs: modes 2025-07-10 11:53:28 -04:00
Dax Raad
67106a6967 docs: add config variable docs 2025-07-10 11:48:55 -04:00
Dax Raad
b5d690620d support env and file pointers in config 2025-07-10 11:45:31 -04:00
Dax Raad
9db3ce1d0b opencode run respects mode 2025-07-10 11:28:28 -04:00
Dax Raad
1cc55b68ef wip: scrap 2025-07-10 11:25:37 -04:00
Dax Raad
469f667774 set max output token limit to 32_000 2025-07-10 11:25:37 -04:00
adamdottv
6603d9a9f0
feat: --mode flag passed to tui 2025-07-10 10:19:25 -05:00
adamdottv
5dc1920a4c
feat: mode flag in cli run command 2025-07-10 10:13:15 -05:00
adamdottv
d3e5f3f3a8
feat(tui): add token and cost info to session header 2025-07-10 10:06:51 -05:00
adamdottv
ce4cb820f7
feat(tui): modes 2025-07-10 10:06:51 -05:00
Dax Raad
ba5be6b625 make LSP lazy again 2025-07-10 09:37:40 -04:00
adamdottv
f95c3f4177
fix(tui): fouc in textarea on app load 2025-07-10 08:20:17 -05:00
adamdottv
d2b1307bff
fix(tui): textarea cursor sync issues with attachments 2025-07-10 07:49:36 -05:00
adamdottv
b40ba32adc
fix(tui): textarea issues 2025-07-10 07:38:57 -05:00
GitHub Action
ce0cebb7d7 ignore: update download stats 2025-07-10 2025-07-10 12:04:15 +00:00
Dax Raad
f478f89a68 temporary grok 4 patch 2025-07-10 07:57:55 -04:00
Dax Raad
85d95f0f2b disable lsp on non-git folders 2025-07-10 07:39:02 -04:00
Dax Raad
1515efc77c fix session is busy error 2025-07-10 07:27:03 -04:00
Josh Medeski
6d393759e1
feat(tui): subsitute cwd home path on status bar (#808) 2025-07-10 06:12:19 -05:00
Adi Yeroslav
a1701678cd
feat(tui): /editor - change the auto-send behavior to put content in input box instead (#827) 2025-07-10 05:57:52 -05:00
Timo Clasen
c411a26d6f
feat(tui): hide cost if using subscription model (#828) 2025-07-10 05:56:36 -05:00
adamdottv
85dbfeb314
feat(tui): @symbol attachments 2025-07-10 05:53:00 -05:00
Dax Raad
085c0e4e2b respect go.work when spawning LSP
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-09 22:54:47 -04:00
Dax Raad
8404a97c3e better detection of prettier formatter 2025-07-09 22:37:31 -04:00
Dax Raad
0ee3b1ede2 do not wait for LSP to be fully ready 2025-07-09 21:59:38 -04:00
Dax Raad
a826936702 modes concept 2025-07-09 21:59:38 -04:00
Jay V
fd4a5d5a63 docs: share doc edit
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-09 20:26:31 -04:00
Jay V
69cf1d7b7e docs: share doc 2025-07-09 20:24:09 -04:00
Jay V
8e0a1d1167 docs: edit troubleshooting 2025-07-09 19:55:14 -04:00
Timo Clasen
f22021187d
feat(tui): treat pasted text file paths as file references (#809) 2025-07-09 18:37:39 -05:00
Jay V
febecc348a docs: enterprise doc 2025-07-09 15:46:57 -04:00
Jay V
c5ccfc3e94 docs: share page last part fix 2025-07-09 15:46:57 -04:00
Mike Wallio
1f6efc6b94
Add gpt-4.1 beast prompt (#778)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
Co-authored-by: Dax Raad <d@ironbay.co>
2025-07-09 12:11:54 -04:00
Frank Denis
727fe6f942
LSP: fix SimpleRoots to actually search in the root directory (#795) 2025-07-09 10:35:06 -05:00
Dax Raad
a91e79382e ci: remove checked in config.schema.json 2025-07-09 11:30:42 -04:00
Dax Raad
5c626e0a2f ci: generate config schema as part of build
Some checks are pending
deploy / deploy (push) Waiting to run
2025-07-09 11:25:58 -04:00
adamdottv
8e9e383219
chore: troubleshooting docs 2025-07-09 10:12:36 -05:00
Dax Raad
f383008cc1 lsp: spawn only a single tsserver in project root 2025-07-09 11:06:44 -04:00
adamdottv
303ade25ed
feat: discord redirect 2025-07-09 10:01:42 -05:00
adamdottv
53f8e7850e
feat: configurable log levels 2025-07-09 10:00:03 -05:00
adamdottv
ca8ce88354
feat(tui): move logging to server logs 2025-07-09 08:16:10 -05:00
adamdottv
37a86439c4
fix(tui): don't panic on missing linux clipboard tool 2025-07-09 06:51:58 -05:00
adamdottv
269b43f4de
fix(tui): markdown wrapping off sometimes 2025-07-09 06:41:53 -05:00
adamdottv
3f25e5bf86
chore: internal clipboard package 2025-07-09 04:55:24 -05:00
Aiden Cline
67765fa47c
tweak: keep completion options open when trigger is still present (#789) 2025-07-09 04:42:31 -05:00
adamdottv
58b1c58bc5
fix(tui): clear command priority
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-08 19:26:50 -05:00
Dax Raad
d80badc50f ci: ignore chore commits 2025-07-08 20:05:33 -04:00
Dax Raad
75279e5ccf wip: symbols endpoint 2025-07-08 20:05:33 -04:00
Yihui Khuu
7893b84614
Add debounce before exit when using non-leader exit command (#759) 2025-07-08 18:53:38 -05:00
Dax Raad
cfc715bd48 wip: remove excess import 2025-07-08 19:51:09 -04:00
adamdottv
39bcba85a9
chore: vendor clipboard into go package 2025-07-08 18:48:40 -05:00
adamdottv
da3df51316
chore: remove clipboard temp 2025-07-08 18:47:59 -05:00
adamdottv
12190e4efc
chore: vendor clipboard into go package 2025-07-08 18:46:42 -05:00
Aiden Cline
d2a9b2f64a
fix: documentation typo (#781) 2025-07-08 18:30:46 -05:00
adamdottv
aacadd8a8a
fix(tui): panic when reading/writing clipboard on linux 2025-07-08 18:29:45 -05:00
Jay V
969154a473 docs: share page image
Some checks are pending
deploy / deploy (push) Waiting to run
2025-07-08 19:24:21 -04:00
Jay V
4d6ca3fab1 docs: share page many model case 2025-07-08 19:08:33 -04:00
Dax Raad
00ea5082e7 add typescript lsp timeout if it fails to start 2025-07-08 18:33:12 -04:00
Dax Raad
4a878b88c0 properly load typescript lsp in subpaths 2025-07-08 18:18:45 -04:00
Dax Raad
6de955847c big rework of LSP system 2025-07-08 18:14:49 -04:00
Jay V
3ba5d528b4 docs: share bugs 2025-07-08 18:14:36 -04:00
Jay V
f99e2b3429 docs: share error part 2025-07-08 18:00:08 -04:00
Jay V
7e4e6f6e51 docs: share page bugs 2025-07-08 17:18:38 -04:00
Jay V
0514f3f43b docs: share image model 2025-07-08 17:18:38 -04:00
Timo Clasen
1e07384364
fix: make compact command interruptible (#691)
Co-authored-by: GitHub Action <action@github.com>
2025-07-08 15:37:25 -05:00
strager
4c4739c422
fix(tool): fix ripgrep invocation on Windows (#700)
Co-authored-by: Adam <2363879+adamdotdevin@users.noreply.github.com>
2025-07-08 15:36:26 -05:00
Rami Chowdhury
2d8b90a6ff
feat(storage): ensure storage directory exists and handle paths correctly (#771) 2025-07-08 15:34:11 -05:00
Robb Currall
a2fa7ffa42
fix: support cancelled task state (#775) 2025-07-08 15:33:39 -05:00
Frank Denis
f7d6175283
Add support for the Zig Language Server (ZLS) (#756) 2025-07-08 15:31:11 -05:00
Tommy
9ed187ee52
docs: add terminal requirements (#708) 2025-07-08 15:30:05 -05:00
Gal Schlezinger
14d81e574b
[config json schema] declare default values and examples for in-ide documentation (#754) 2025-07-08 15:29:07 -05:00
adamdottv
6efe8cc8df
fix: env has to be string 2025-07-08 14:59:03 -05:00
adamdottv
daa5fc916a
fix(tui): pasting causes panic on macos 2025-07-08 14:57:17 -05:00
adamdottv
c659496b96
fix(tui): model/provider arg parsing 2025-07-08 14:11:57 -05:00
Timo Clasen
21fbf21cb6
fix(copilot): add vision request header (#773) 2025-07-08 14:01:54 -05:00
adamdottv
f31cbf2744
fix: image reading 2025-07-08 13:02:13 -05:00
Aiden Cline
8322f18e03
fix: display errors when using opencode run ... (#751)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-08 10:38:11 -05:00
adamdottv
562bdb95e2
fix: include symlinks in ripgrep searches 2025-07-08 10:02:19 -05:00
Dax
a57ce8365d
Update STATS.md 2025-07-08 10:30:02 -04:00
adamdottv
0da83ae67e
feat(tui): command aliases 2025-07-08 08:20:55 -05:00
adamdottv
662d022a48
feat(tui): paste images and pdfs 2025-07-08 08:09:01 -05:00
GitHub Action
9efef03919 ignore: update download stats 2025-07-08 2025-07-08 12:04:27 +00:00
GitHub Action
7a9fb3fa92 ignore: update download stats 2025-07-08 2025-07-08 10:51:06 +00:00
adamdottv
ea96ead346
feat(tui): handle --model and --prompt flags 2025-07-08 05:50:18 -05:00
Dax Raad
6100a77b85 start file watcher only for tui
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-07 21:05:04 -04:00
Dax Raad
c7a59ee2b1 better handling of aborting sessions 2025-07-07 20:59:00 -04:00
Jay V
a272b58fe9 docs: intro
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-07 17:41:46 -04:00
Dax Raad
9948fcf1b6 fix crash when running on new project 2025-07-07 17:39:52 -04:00
Dax Raad
0d50c867ff fix mcp tools corrupting session 2025-07-07 17:05:16 -04:00
Dax Raad
27f7e02f12 run: truncate prompt 2025-07-07 16:41:42 -04:00
Jay V
0f93ecd564 docs: canonical url 2025-07-07 16:37:00 -04:00
Dax Raad
da909d9684 append piped stdin to prompt 2025-07-07 16:33:21 -04:00
Jay V
facd851b11 docs: dynamic domain 2025-07-07 16:31:15 -04:00
Dax Raad
c51de945a5 Add stdin support to run command
Allow piping content to opencode run when no message arguments are provided, enabling standard Unix pipe patterns for better CLI integration.

🤖 Generated with [opencode](https://opencode.ai)

Co-Authored-By: opencode <noreply@opencode.ai>
2025-07-07 16:29:13 -04:00
Jay V
9253a3ca9e docs: debug 2025-07-07 16:26:23 -04:00
Dax Raad
7cfa297a78 wip: model and prompt flags for tui 2025-07-07 16:24:37 -04:00
Jay V
661b74def6 docs: debug info 2025-07-07 16:13:26 -04:00
Dax Raad
b478e5655c fix interrupt 2025-07-07 16:12:47 -04:00
Dax
f884766445
v2 message format and upgrade to ai sdk v5 (#743)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Liang-Shih Lin <liangshihlin@proton.me>
Co-authored-by: Dominik Engelhardt <dominikengelhardt@ymail.com>
Co-authored-by: Jay V <air@live.ca>
Co-authored-by: adamdottv <2363879+adamdottv@users.noreply.github.com>
2025-07-07 15:53:43 -04:00
Jay V
76b2e4539c docs: discord 2025-07-07 14:44:37 -04:00
Dominik Engelhardt
d87922c0eb
Fix Elixir LSP startup (#726)
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-06 23:37:46 -04:00
Liang-Shih Lin
2446483df5
fix: Skip opencode upgrade if same version (#720) 2025-07-06 23:36:59 -04:00
GitHub Action
f4c453155d Update download stats 2025-07-06
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-06 12:03:56 +00:00
Dax Raad
969ad80ed2 fix openrouter caching with anthropic, should be a lot cheaper
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-05 11:39:54 -04:00
GitHub Action
af064b41d7 Update download stats 2025-07-05
Some checks are pending
deploy / deploy (push) Waiting to run
publish / publish (push) Waiting to run
2025-07-05 12:03:56 +00:00
252 changed files with 21899 additions and 10613 deletions

14
.github/workflows/notify-discord.yml vendored Normal file
View file

@ -0,0 +1,14 @@
name: discord
on:
release:
types: [published] # fires only when a release is published
jobs:
notify:
runs-on: ubuntu-latest
steps:
- name: Send nicely-formatted embed to Discord
uses: SethCohen/github-releases-to-discord@v1
with:
webhook_url: ${{ secrets.DISCORD_WEBHOOK }}

24
.github/workflows/opencode.yml vendored Normal file
View file

@ -0,0 +1,24 @@
name: opencode
on:
issue_comment:
types: [created]
jobs:
opencode:
if: startsWith(github.event.comment.body, 'hey opencode')
runs-on: ubuntu-latest
permissions:
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run opencode
uses: sst/opencode/sdks/github@github-v1
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
with:
model: anthropic/claude-sonnet-4-20250514

View file

@ -0,0 +1,29 @@
name: publish-github-action
on:
workflow_dispatch:
concurrency: ${{ github.workflow }}-${{ github.ref }}
permissions:
contents: write
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: git fetch --force --tags
- uses: oven-sh/setup-bun@v2
with:
bun-version: 1.2.17
- name: Publish
run: |
git config --global user.email "opencode@sst.dev"
git config --global user.name "opencode"
./scripts/publish-github-action.ts

View file

@ -28,5 +28,5 @@ jobs:
git config --local user.email "action@github.com" git config --local user.email "action@github.com"
git config --local user.name "GitHub Action" git config --local user.name "GitHub Action"
git add STATS.md git add STATS.md
git diff --staged --quiet || git commit -m "Update download stats $(date -I)" git diff --staged --quiet || git commit -m "ignore: update download stats $(date -I)"
git push git push

15
AGENTS.md Normal file
View file

@ -0,0 +1,15 @@
# TUI Agent Guidelines
## Style
- prefer single word variable/function names
- avoid try catch where possible - prefer to let exceptions bubble up
- avoid else statements where possible
- do not make useless helper functions - inline functionality unless the
function is reusable or composable
- prefer Bun apis
## Workflow
- you can regenerate the golang sdk by calling ./scripts/stainless.ts
- we use bun for everything

View file

@ -9,7 +9,7 @@
</p> </p>
<p align="center">AI coding agent, built for the terminal.</p> <p align="center">AI coding agent, built for the terminal.</p>
<p align="center"> <p align="center">
<a href="https://opencode.ai/docs"><img alt="View docs" src="https://img.shields.io/badge/view-docs-blue?style=flat-square" /></a> <a href="https://opencode.ai/discord"><img alt="Discord" src="https://img.shields.io/discord/1391832426048651334?style=flat-square&label=discord" /></a>
<a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a> <a href="https://www.npmjs.com/package/opencode-ai"><img alt="npm" src="https://img.shields.io/npm/v/opencode-ai?style=flat-square" /></a>
<a href="https://github.com/sst/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/sst/opencode/publish.yml?style=flat-square&branch=dev" /></a> <a href="https://github.com/sst/opencode/actions/workflows/publish.yml"><img alt="Build status" src="https://img.shields.io/github/actions/workflow/status/sst/opencode/publish.yml?style=flat-square&branch=dev" /></a>
</p> </p>
@ -30,7 +30,8 @@ brew install sst/tap/opencode # macOS
paru -S opencode-bin # Arch Linux paru -S opencode-bin # Arch Linux
``` ```
> **Note:** Remove versions older than 0.1.x before installing > [!TIP]
> Remove versions older than 0.1.x before installing.
### Documentation ### Documentation
@ -38,10 +39,25 @@ For more info on how to configure opencode [**head over to our docs**](https://o
### Contributing ### Contributing
For any new features we'd appreciate it if you could open an issue first to discuss what you'd like to implement. We're pretty responsive there and it'll save you from working on something that we don't end up using. No need to do this for simpler fixes. opencode is an opinionated tool so any fundamental feature needs to go through a
design process with the core team.
> **Note**: Please talk to us via github issues before spending time working on > [!IMPORTANT]
> a new feature > We do not accept PRs for core features.
However we still merge a ton of PRs - you can contribute:
- Bug fixes
- Improvements to LLM performance
- Support for new providers
- Fixes for env specific quirks
- Missing standard behavior
- Documentation
Take a look at the git history to see what kind of PRs we end up merging.
> [!NOTE]
> If you do not follow the above guidelines we might close your PR.
To run opencode locally you need. To run opencode locally you need.
@ -76,4 +92,4 @@ The other confusingly named repo has no relation to this one. You can [read the
--- ---
**Join our community** [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev) **Join our community** [Discord](https://discord.gg/opencode) | [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev)

View file

@ -1,10 +1,23 @@
# Download Stats # Download Stats
| Date | GitHub Downloads | npm Downloads | Total | | Date | GitHub Downloads | npm Downloads | Total |
| ---------- | ---------------- | --------------- | --------------- | | ---------- | ---------------- | ---------------- | ----------------- |
| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) | | 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) |
| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) | | 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) |
| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) | | 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) |
| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) | | 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) |
| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) | | 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) |
| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) | | 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) |
| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) |
| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) |
| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) |
| 2025-07-10 | 43,796 (+5,744) | 71,402 (+6,934) | 115,198 (+12,678) |
| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) |
| 2025-07-12 | 49,302 (+2,320) | 82,177 (+4,715) | 131,479 (+7,035) |
| 2025-07-13 | 50,803 (+1,501) | 86,394 (+4,217) | 137,197 (+5,718) |
| 2025-07-14 | 53,283 (+2,480) | 87,860 (+1,466) | 141,143 (+3,946) |
| 2025-07-15 | 57,590 (+4,307) | 91,036 (+3,176) | 148,626 (+7,483) |
| 2025-07-16 | 62,313 (+4,723) | 95,258 (+4,222) | 157,571 (+8,945) |
| 2025-07-17 | 66,684 (+4,371) | 100,048 (+4,790) | 166,732 (+9,161) |
| 2025-07-18 | 70,379 (+3,695) | 102,587 (+2,539) | 172,966 (+6,234) |
| 2025-07-18 | 70,380 (+1) | 102,587 (+0) | 172,967 (+1) |

578
bun.lock

File diff suppressed because it is too large Load diff

View file

@ -4,6 +4,8 @@ export const domain = (() => {
return `${$app.stage}.dev.opencode.ai` return `${$app.stage}.dev.opencode.ai`
})() })()
const GITHUB_APP_ID = new sst.Secret("GITHUB_APP_ID")
const GITHUB_APP_PRIVATE_KEY = new sst.Secret("GITHUB_APP_PRIVATE_KEY")
const bucket = new sst.cloudflare.Bucket("Bucket") const bucket = new sst.cloudflare.Bucket("Bucket")
export const api = new sst.cloudflare.Worker("Api", { export const api = new sst.cloudflare.Worker("Api", {
@ -13,7 +15,7 @@ export const api = new sst.cloudflare.Worker("Api", {
WEB_DOMAIN: domain, WEB_DOMAIN: domain,
}, },
url: true, url: true,
link: [bucket], link: [bucket, GITHUB_APP_ID, GITHUB_APP_PRIVATE_KEY],
transform: { transform: {
worker: (args) => { worker: (args) => {
args.logpush = true args.logpush = true
@ -39,6 +41,8 @@ new sst.cloudflare.x.Astro("Web", {
domain, domain,
path: "packages/web", path: "packages/web",
environment: { environment: {
// For astro config
SST_STAGE: $app.stage,
VITE_API_URL: api.url, VITE_API_URL: api.url,
}, },
}) })

View file

@ -48,7 +48,7 @@ if [ -z "$requested_version" ]; then
url="https://github.com/sst/opencode/releases/latest/download/$filename" url="https://github.com/sst/opencode/releases/latest/download/$filename"
specific_version=$(curl -s https://api.github.com/repos/sst/opencode/releases/latest | awk -F'"' '/"tag_name": "/ {gsub(/^v/, "", $4); print $4}') specific_version=$(curl -s https://api.github.com/repos/sst/opencode/releases/latest | awk -F'"' '/"tag_name": "/ {gsub(/^v/, "", $4); print $4}')
if [[ $? -ne 0 ]]; then if [[ $? -ne 0 || -z "$specific_version" ]]; then
echo "${RED}Failed to fetch version information${NC}" echo "${RED}Failed to fetch version information${NC}"
exit 1 exit 1
fi fi

View file

@ -1,19 +1,9 @@
{ {
"$schema": "https://opencode.ai/config.json", "$schema": "https://opencode.ai/config.json",
"experimental": { "mcp": {
"hook": { "weather": {
"file_edited": { "type": "local",
".json": [ "command": ["opencode", "x", "@h1deya/mcp-server-weather"]
{
"command": ["bun", "run", "prettier", "$FILE"]
}
]
},
"session_completed": [
{
"command": ["touch", "./node_modules/foo"]
}
]
} }
} }
} }

View file

@ -7,7 +7,7 @@
"scripts": { "scripts": {
"dev": "bun run packages/opencode/src/index.ts", "dev": "bun run packages/opencode/src/index.ts",
"typecheck": "bun run --filter='*' typecheck", "typecheck": "bun run --filter='*' typecheck",
"stainless": "bun run ./packages/opencode/src/index.ts serve ", "stainless": "./scripts/stainless",
"postinstall": "./scripts/hooks" "postinstall": "./scripts/hooks"
}, },
"workspaces": { "workspaces": {
@ -17,8 +17,8 @@
"catalog": { "catalog": {
"typescript": "5.8.2", "typescript": "5.8.2",
"@types/node": "22.13.9", "@types/node": "22.13.9",
"zod": "3.24.2", "zod": "3.25.49",
"ai": "4.3.16" "ai": "5.0.0-beta.21"
} }
}, },
"devDependencies": { "devDependencies": {
@ -31,17 +31,13 @@
}, },
"license": "MIT", "license": "MIT",
"prettier": { "prettier": {
"semi": false "semi": false,
}, "printWidth": 120
"overrides": {
"zod": "3.24.2"
}, },
"trustedDependencies": [ "trustedDependencies": [
"esbuild", "esbuild",
"protobufjs", "protobufjs",
"sharp" "sharp"
], ],
"patchedDependencies": { "patchedDependencies": {}
"ai@4.3.16": "patches/ai@4.3.16.patch"
}
} }

View file

@ -8,5 +8,10 @@
"@cloudflare/workers-types": "4.20250522.0", "@cloudflare/workers-types": "4.20250522.0",
"typescript": "catalog:", "typescript": "catalog:",
"@types/node": "catalog:" "@types/node": "catalog:"
},
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "22.0.0",
"jose": "6.0.11"
} }
} }

View file

@ -1,5 +1,9 @@
import { DurableObject } from "cloudflare:workers" import { DurableObject } from "cloudflare:workers"
import { randomUUID } from "node:crypto" import { randomUUID } from "node:crypto"
import { jwtVerify, createRemoteJWKSet } from "jose"
import { createAppAuth } from "@octokit/auth-app"
import { Octokit } from "@octokit/rest"
import { Resource } from "sst"
type Env = { type Env = {
SYNC_SERVER: DurableObjectNamespace<SyncServer> SYNC_SERVER: DurableObjectNamespace<SyncServer>
@ -40,7 +44,8 @@ export class SyncServer extends DurableObject<Env> {
const sessionID = await this.getSessionID() const sessionID = await this.getSessionID()
if ( if (
!key.startsWith(`session/info/${sessionID}`) && !key.startsWith(`session/info/${sessionID}`) &&
!key.startsWith(`session/message/${sessionID}/`) !key.startsWith(`session/message/${sessionID}/`) &&
!key.startsWith(`session/part/${sessionID}/`)
) )
return new Response("Error: Invalid key", { status: 400 }) return new Response("Error: Invalid key", { status: 400 })
@ -70,7 +75,7 @@ export class SyncServer extends DurableObject<Env> {
} }
public async getData() { public async getData() {
const data = await this.ctx.storage.list() const data = (await this.ctx.storage.list()) as Map<string, any>
return Array.from(data.entries()) return Array.from(data.entries())
.filter(([key, _]) => key.startsWith("session/")) .filter(([key, _]) => key.startsWith("session/"))
.map(([key, content]) => ({ key, content })) .map(([key, content]) => ({ key, content }))
@ -107,7 +112,7 @@ export class SyncServer extends DurableObject<Env> {
} }
export default { export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext) { async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
const url = new URL(request.url) const url = new URL(request.url)
const splits = url.pathname.split("/") const splits = url.pathname.split("/")
const method = splits[1] const method = splits[1]
@ -184,8 +189,7 @@ export default {
} }
const id = url.searchParams.get("id") const id = url.searchParams.get("id")
console.log("share_poll", id) console.log("share_poll", id)
if (!id) if (!id) return new Response("Error: Share ID is required", { status: 400 })
return new Response("Error: Share ID is required", { status: 400 })
const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id)) const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id))
return stub.fetch(request) return stub.fetch(request)
} }
@ -193,8 +197,7 @@ export default {
if (request.method === "GET" && method === "share_data") { if (request.method === "GET" && method === "share_data") {
const id = url.searchParams.get("id") const id = url.searchParams.get("id")
console.log("share_data", id) console.log("share_data", id)
if (!id) if (!id) return new Response("Error: Share ID is required", { status: 400 })
return new Response("Error: Share ID is required", { status: 400 })
const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id)) const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id))
const data = await stub.getData() const data = await stub.getData()
@ -208,8 +211,13 @@ export default {
return return
} }
if (type === "message") { if (type === "message") {
const [, messageID] = splits messages[d.content.id] = {
messages[messageID] = d.content parts: [],
...d.content,
}
}
if (type === "part") {
messages[d.content.messageID].parts.push(d.content)
} }
}) })
@ -223,5 +231,95 @@ export default {
}, },
) )
} }
/**
* Used by the GitHub action to get GitHub installation access token given the OIDC token
*/
if (request.method === "POST" && method === "exchange_github_app_token") {
const EXPECTED_AUDIENCE = "opencode-github-action"
const GITHUB_ISSUER = "https://token.actions.githubusercontent.com"
const JWKS_URL = `${GITHUB_ISSUER}/.well-known/jwks`
// get Authorization header
const authHeader = request.headers.get("Authorization")
const token = authHeader?.replace(/^Bearer /, "")
if (!token)
return new Response(JSON.stringify({ error: "Authorization header is required" }), {
status: 401,
headers: { "Content-Type": "application/json" },
})
// verify token
const JWKS = createRemoteJWKSet(new URL(JWKS_URL))
let owner, repo
try {
const { payload } = await jwtVerify(token, JWKS, {
issuer: GITHUB_ISSUER,
audience: EXPECTED_AUDIENCE,
})
const sub = payload.sub // e.g. 'repo:my-org/my-repo:ref:refs/heads/main'
const parts = sub.split(":")[1].split("/")
owner = parts[0]
repo = parts[1]
} catch (err) {
console.error("Token verification failed:", err)
return new Response(JSON.stringify({ error: "Invalid or expired token" }), {
status: 403,
headers: { "Content-Type": "application/json" },
})
}
// Create app JWT token
const auth = createAppAuth({
appId: Resource.GITHUB_APP_ID.value,
privateKey: Resource.GITHUB_APP_PRIVATE_KEY.value,
})
const appAuth = await auth({ type: "app" })
// Lookup installation
const octokit = new Octokit({ auth: appAuth.token })
const { data: installation } = await octokit.apps.getRepoInstallation({ owner, repo })
// Get installation token
const installationAuth = await auth({ type: "installation", installationId: installation.id })
return new Response(JSON.stringify({ token: installationAuth.token }), {
headers: { "Content-Type": "application/json" },
})
}
/**
* Used by the opencode CLI to check if the GitHub app is installed
*/
if (request.method === "GET" && method === "get_github_app_installation") {
const owner = url.searchParams.get("owner")
const repo = url.searchParams.get("repo")
const auth = createAppAuth({
appId: Resource.GITHUB_APP_ID.value,
privateKey: Resource.GITHUB_APP_PRIVATE_KEY.value,
})
const appAuth = await auth({ type: "app" })
// Lookup installation
const octokit = new Octokit({ auth: appAuth.token })
let installation
try {
const ret = await octokit.apps.getRepoInstallation({ owner, repo })
installation = ret.data
} catch (err) {
if (err instanceof Error && err.message.includes("Not Found")) {
// not installed
} else {
throw err
}
}
return new Response(JSON.stringify({ installation }), {
headers: { "Content-Type": "application/json" },
})
}
return new Response("Not Found", { status: 404 })
}, },
} }

View file

@ -6,18 +6,26 @@
import "sst" import "sst"
declare module "sst" { declare module "sst" {
export interface Resource { export interface Resource {
Web: { "GITHUB_APP_ID": {
type: "sst.cloudflare.Astro" "type": "sst.sst.Secret"
url: string "value": string
}
"GITHUB_APP_PRIVATE_KEY": {
"type": "sst.sst.Secret"
"value": string
}
"Web": {
"type": "sst.cloudflare.Astro"
"url": string
} }
} }
} }
// cloudflare // cloudflare
import * as cloudflare from "@cloudflare/workers-types" import * as cloudflare from "@cloudflare/workers-types";
declare module "sst" { declare module "sst" {
export interface Resource { export interface Resource {
Api: cloudflare.Service "Api": cloudflare.Service
Bucket: cloudflare.R2Bucket "Bucket": cloudflare.R2Bucket
} }
} }

View file

@ -1,369 +0,0 @@
{
"type": "object",
"properties": {
"$schema": {
"type": "string",
"description": "JSON schema reference for configuration validation"
},
"theme": {
"type": "string",
"description": "Theme name to use for the interface"
},
"keybinds": {
"type": "object",
"properties": {
"leader": {
"type": "string",
"description": "Leader key for keybind combinations"
},
"help": {
"type": "string",
"description": "Show help dialog"
},
"editor_open": {
"type": "string",
"description": "Open external editor"
},
"session_new": {
"type": "string",
"description": "Create a new session"
},
"session_list": {
"type": "string",
"description": "List all sessions"
},
"session_share": {
"type": "string",
"description": "Share current session"
},
"session_interrupt": {
"type": "string",
"description": "Interrupt current session"
},
"session_compact": {
"type": "string",
"description": "Toggle compact mode for session"
},
"tool_details": {
"type": "string",
"description": "Show tool details"
},
"model_list": {
"type": "string",
"description": "List available models"
},
"theme_list": {
"type": "string",
"description": "List available themes"
},
"project_init": {
"type": "string",
"description": "Initialize project configuration"
},
"input_clear": {
"type": "string",
"description": "Clear input field"
},
"input_paste": {
"type": "string",
"description": "Paste from clipboard"
},
"input_submit": {
"type": "string",
"description": "Submit input"
},
"input_newline": {
"type": "string",
"description": "Insert newline in input"
},
"history_previous": {
"type": "string",
"description": "Navigate to previous history item"
},
"history_next": {
"type": "string",
"description": "Navigate to next history item"
},
"messages_page_up": {
"type": "string",
"description": "Scroll messages up by one page"
},
"messages_page_down": {
"type": "string",
"description": "Scroll messages down by one page"
},
"messages_half_page_up": {
"type": "string",
"description": "Scroll messages up by half page"
},
"messages_half_page_down": {
"type": "string",
"description": "Scroll messages down by half page"
},
"messages_previous": {
"type": "string",
"description": "Navigate to previous message"
},
"messages_next": {
"type": "string",
"description": "Navigate to next message"
},
"messages_first": {
"type": "string",
"description": "Navigate to first message"
},
"messages_last": {
"type": "string",
"description": "Navigate to last message"
},
"app_exit": {
"type": "string",
"description": "Exit the application"
}
},
"additionalProperties": false,
"description": "Custom keybind configurations"
},
"autoshare": {
"type": "boolean",
"description": "Share newly created sessions automatically"
},
"autoupdate": {
"type": "boolean",
"description": "Automatically update to the latest version"
},
"disabled_providers": {
"type": "array",
"items": {
"type": "string"
},
"description": "Disable providers that are loaded automatically"
},
"model": {
"type": "string",
"description": "Model to use in the format of provider/model, eg anthropic/claude-2"
},
"provider": {
"type": "object",
"additionalProperties": {
"type": "object",
"properties": {
"api": {
"type": "string"
},
"name": {
"type": "string"
},
"env": {
"type": "array",
"items": {
"type": "string"
}
},
"id": {
"type": "string"
},
"npm": {
"type": "string"
},
"models": {
"type": "object",
"additionalProperties": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"attachment": {
"type": "boolean"
},
"reasoning": {
"type": "boolean"
},
"temperature": {
"type": "boolean"
},
"tool_call": {
"type": "boolean"
},
"cost": {
"type": "object",
"properties": {
"input": {
"type": "number"
},
"output": {
"type": "number"
},
"cache_read": {
"type": "number"
},
"cache_write": {
"type": "number"
}
},
"required": ["input", "output"],
"additionalProperties": false
},
"limit": {
"type": "object",
"properties": {
"context": {
"type": "number"
},
"output": {
"type": "number"
}
},
"required": ["context", "output"],
"additionalProperties": false
},
"id": {
"type": "string"
},
"options": {
"type": "object",
"additionalProperties": {}
}
},
"additionalProperties": false
}
},
"options": {
"type": "object",
"additionalProperties": {}
}
},
"required": ["models"],
"additionalProperties": false
},
"description": "Custom provider configurations and model overrides"
},
"mcp": {
"type": "object",
"additionalProperties": {
"anyOf": [
{
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "local",
"description": "Type of MCP server connection"
},
"command": {
"type": "array",
"items": {
"type": "string"
},
"description": "Command and arguments to run the MCP server"
},
"environment": {
"type": "object",
"additionalProperties": {
"type": "string"
},
"description": "Environment variables to set when running the MCP server"
},
"enabled": {
"type": "boolean",
"description": "Enable or disable the MCP server on startup"
}
},
"required": ["type", "command"],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "remote",
"description": "Type of MCP server connection"
},
"url": {
"type": "string",
"description": "URL of the remote MCP server"
},
"enabled": {
"type": "boolean",
"description": "Enable or disable the MCP server on startup"
}
},
"required": ["type", "url"],
"additionalProperties": false
}
]
},
"description": "MCP (Model Context Protocol) server configurations"
},
"instructions": {
"type": "array",
"items": {
"type": "string"
},
"description": "Additional instruction files or patterns to include"
},
"experimental": {
"type": "object",
"properties": {
"hook": {
"type": "object",
"properties": {
"file_edited": {
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"type": "object",
"properties": {
"command": {
"type": "array",
"items": {
"type": "string"
}
},
"environment": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"required": ["command"],
"additionalProperties": false
}
}
},
"session_completed": {
"type": "array",
"items": {
"type": "object",
"properties": {
"command": {
"type": "array",
"items": {
"type": "string"
}
},
"environment": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"required": ["command"],
"additionalProperties": false
}
}
},
"additionalProperties": false
}
},
"additionalProperties": false
}
},
"additionalProperties": false,
"$schema": "http://json-schema.org/draft-07/schema#"
}

View file

@ -17,37 +17,33 @@
"devDependencies": { "devDependencies": {
"@ai-sdk/amazon-bedrock": "2.2.10", "@ai-sdk/amazon-bedrock": "2.2.10",
"@ai-sdk/anthropic": "1.2.12", "@ai-sdk/anthropic": "1.2.12",
"@standard-schema/spec": "1.0.0",
"@tsconfig/bun": "1.0.7", "@tsconfig/bun": "1.0.7",
"@types/bun": "latest", "@types/bun": "latest",
"@types/turndown": "5.0.5", "@types/turndown": "5.0.5",
"@types/yargs": "17.0.33", "@types/yargs": "17.0.33",
"typescript": "catalog:", "typescript": "catalog:",
"vscode-languageserver-types": "3.17.5",
"zod-to-json-schema": "3.24.5" "zod-to-json-schema": "3.24.5"
}, },
"dependencies": { "dependencies": {
"@clack/prompts": "0.11.0", "@clack/prompts": "0.11.0",
"@flystorage/file-storage": "1.1.0", "@hono/zod-validator": "0.4.2",
"@flystorage/local-fs": "1.1.0", "@modelcontextprotocol/sdk": "1.15.1",
"@hono/zod-validator": "0.5.0",
"@openauthjs/openauth": "0.4.3", "@openauthjs/openauth": "0.4.3",
"@standard-schema/spec": "1.0.0",
"ai": "catalog:", "ai": "catalog:",
"decimal.js": "10.5.0", "decimal.js": "10.5.0",
"diff": "8.0.2", "diff": "8.0.2",
"env-paths": "3.0.0",
"hono": "4.7.10", "hono": "4.7.10",
"hono-openapi": "0.4.8", "hono-openapi": "0.4.8",
"isomorphic-git": "1.32.1", "isomorphic-git": "1.32.1",
"open": "10.1.2", "open": "10.1.2",
"remeda": "2.22.3", "remeda": "2.22.3",
"ts-lsp-client": "1.0.3",
"turndown": "7.2.0", "turndown": "7.2.0",
"vscode-jsonrpc": "8.2.1", "vscode-jsonrpc": "8.2.1",
"vscode-languageclient": "8",
"xdg-basedir": "5.1.0", "xdg-basedir": "5.1.0",
"yargs": "18.0.0", "yargs": "18.0.0",
"zod": "catalog:", "zod": "catalog:",
"zod-openapi": "4.2.4", "zod-openapi": "4.1.0"
"zod-validation-error": "3.5.2"
} }
} }

View file

@ -9,7 +9,7 @@ const snapshot = process.argv.includes("--snapshot")
const version = snapshot const version = snapshot
? `0.0.0-${new Date().toISOString().slice(0, 16).replace(/[-:T]/g, "")}` ? `0.0.0-${new Date().toISOString().slice(0, 16).replace(/[-:T]/g, "")}`
: await $`git describe --tags --exact-match HEAD` : await $`git describe --tags --abbrev=0`
.text() .text()
.then((x) => x.substring(1).trim()) .then((x) => x.substring(1).trim())
.catch(() => { .catch(() => {
@ -57,8 +57,7 @@ for (const [os, arch] of targets) {
2, 2,
), ),
) )
if (!dry) if (!dry) await $`cd dist/${name} && bun publish --access public --tag ${npmTag}`
await $`cd dist/${name} && bun publish --access public --tag ${npmTag}`
optionalDependencies[name] = version optionalDependencies[name] = version
} }
@ -82,8 +81,7 @@ await Bun.file(`./dist/${pkg.name}/package.json`).write(
2, 2,
), ),
) )
if (!dry) if (!dry) await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}`
await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}`
if (!snapshot) { if (!snapshot) {
// Github Release // Github Release
@ -91,15 +89,11 @@ if (!snapshot) {
await $`cd dist/${key}/bin && zip -r ../../${key}.zip *` await $`cd dist/${key}/bin && zip -r ../../${key}.zip *`
} }
const previous = await fetch( const previous = await fetch("https://api.github.com/repos/sst/opencode/releases/latest")
"https://api.github.com/repos/sst/opencode/releases/latest",
)
.then((res) => res.json()) .then((res) => res.json())
.then((data) => data.tag_name) .then((data) => data.tag_name)
const commits = await fetch( const commits = await fetch(`https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`)
`https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`,
)
.then((res) => res.json()) .then((res) => res.json())
.then((data) => data.commits || []) .then((data) => data.commits || [])
@ -109,6 +103,7 @@ if (!snapshot) {
const lower = x.toLowerCase() const lower = x.toLowerCase()
return ( return (
!lower.includes("ignore:") && !lower.includes("ignore:") &&
!lower.includes("chore:") &&
!lower.includes("ci:") && !lower.includes("ci:") &&
!lower.includes("wip:") && !lower.includes("wip:") &&
!lower.includes("docs:") && !lower.includes("docs:") &&
@ -117,26 +112,13 @@ if (!snapshot) {
}) })
.join("\n") .join("\n")
if (!dry) if (!dry) await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip`
await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip`
// Calculate SHA values // Calculate SHA values
const arm64Sha = const arm64Sha = await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1` const x64Sha = await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
.text() const macX64Sha = await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
.then((x) => x.trim()) const macArm64Sha = await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim())
const x64Sha =
await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
const macX64Sha =
await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
const macArm64Sha =
await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`
.text()
.then((x) => x.trim())
// AUR package // AUR package
const pkgbuild = [ const pkgbuild = [
@ -170,9 +152,7 @@ if (!snapshot) {
for (const pkg of ["opencode", "opencode-bin"]) { for (const pkg of ["opencode", "opencode-bin"]) {
await $`rm -rf ./dist/aur-${pkg}` await $`rm -rf ./dist/aur-${pkg}`
await $`git clone ssh://aur@aur.archlinux.org/${pkg}.git ./dist/aur-${pkg}` await $`git clone ssh://aur@aur.archlinux.org/${pkg}.git ./dist/aur-${pkg}`
await Bun.file(`./dist/aur-${pkg}/PKGBUILD`).write( await Bun.file(`./dist/aur-${pkg}/PKGBUILD`).write(pkgbuild.replace("${pkg}", pkg))
pkgbuild.replace("${pkg}", pkg),
)
await $`cd ./dist/aur-${pkg} && makepkg --printsrcinfo > .SRCINFO` await $`cd ./dist/aur-${pkg} && makepkg --printsrcinfo > .SRCINFO`
await $`cd ./dist/aur-${pkg} && git add PKGBUILD .SRCINFO` await $`cd ./dist/aur-${pkg} && git add PKGBUILD .SRCINFO`
await $`cd ./dist/aur-${pkg} && git commit -m "Update to v${version}"` await $`cd ./dist/aur-${pkg} && git commit -m "Update to v${version}"`

View file

@ -4,5 +4,32 @@ import "zod-openapi/extend"
import { Config } from "../src/config/config" import { Config } from "../src/config/config"
import { zodToJsonSchema } from "zod-to-json-schema" import { zodToJsonSchema } from "zod-to-json-schema"
const result = zodToJsonSchema(Config.Info) const file = process.argv[2]
await Bun.write("config.schema.json", JSON.stringify(result, null, 2))
const result = zodToJsonSchema(Config.Info, {
/**
* We'll use the `default` values of the field as the only value in `examples`.
* This will ensure no docs are needed to be read, as the configuration is
* self-documenting.
*
* See https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.9.5
*/
postProcess(jsonSchema) {
const schema = jsonSchema as typeof jsonSchema & {
examples?: unknown[]
}
if (schema && typeof schema === "object" && "type" in schema && schema.type === "string" && schema?.default) {
if (!schema.examples) {
schema.examples = [schema.default]
}
schema.description = [schema.description || "", `default: \`${schema.default}\``]
.filter(Boolean)
.join("\n\n")
.trim()
}
return jsonSchema
},
})
await Bun.write(file, JSON.stringify(result, null, 2))

View file

@ -12,7 +12,6 @@ export namespace App {
export const Info = z export const Info = z
.object({ .object({
user: z.string(),
hostname: z.string(), hostname: z.string(),
git: z.boolean(), git: z.boolean(),
path: z.object({ path: z.object({
@ -45,23 +44,14 @@ export namespace App {
} }
export const provideExisting = ctx.provide export const provideExisting = ctx.provide
export async function provide<T>( export async function provide<T>(input: Input, cb: (app: App.Info) => Promise<T>) {
input: Input,
cb: (app: App.Info) => Promise<T>,
) {
log.info("creating", { log.info("creating", {
cwd: input.cwd, cwd: input.cwd,
}) })
const git = await Filesystem.findUp(".git", input.cwd).then(([x]) => const git = await Filesystem.findUp(".git", input.cwd).then(([x]) => (x ? path.dirname(x) : undefined))
x ? path.dirname(x) : undefined,
)
log.info("git", { git }) log.info("git", { git })
const data = path.join( const data = path.join(Global.Path.data, "project", git ? directory(git) : "global")
Global.Path.data,
"project",
git ? directory(git) : "global",
)
const stateFile = Bun.file(path.join(data, APP_JSON)) const stateFile = Bun.file(path.join(data, APP_JSON))
const state = (await stateFile.json().catch(() => ({}))) as { const state = (await stateFile.json().catch(() => ({}))) as {
initialized: number initialized: number
@ -79,7 +69,6 @@ export namespace App {
const root = git ?? input.cwd const root = git ?? input.cwd
const info: Info = { const info: Info = {
user: os.userInfo().username,
hostname: os.hostname(), hostname: os.hostname(),
time: { time: {
initialized: state.initialized, initialized: state.initialized,

View file

@ -4,20 +4,18 @@ import { Auth } from "./index"
export namespace AuthAnthropic { export namespace AuthAnthropic {
const CLIENT_ID = "9d1c250a-e61b-44d9-88ed-5944d1962f5e" const CLIENT_ID = "9d1c250a-e61b-44d9-88ed-5944d1962f5e"
export async function authorize() { export async function authorize(mode: "max" | "console") {
const pkce = await generatePKCE() const pkce = await generatePKCE()
const url = new URL("https://claude.ai/oauth/authorize", import.meta.url)
const url = new URL(
`https://${mode === "console" ? "console.anthropic.com" : "claude.ai"}/oauth/authorize`,
import.meta.url,
)
url.searchParams.set("code", "true") url.searchParams.set("code", "true")
url.searchParams.set("client_id", CLIENT_ID) url.searchParams.set("client_id", CLIENT_ID)
url.searchParams.set("response_type", "code") url.searchParams.set("response_type", "code")
url.searchParams.set( url.searchParams.set("redirect_uri", "https://console.anthropic.com/oauth/code/callback")
"redirect_uri", url.searchParams.set("scope", "org:create_api_key user:profile user:inference")
"https://console.anthropic.com/oauth/code/callback",
)
url.searchParams.set(
"scope",
"org:create_api_key user:profile user:inference",
)
url.searchParams.set("code_challenge", pkce.challenge) url.searchParams.set("code_challenge", pkce.challenge)
url.searchParams.set("code_challenge_method", "S256") url.searchParams.set("code_challenge_method", "S256")
url.searchParams.set("state", pkce.verifier) url.searchParams.set("state", pkce.verifier)
@ -45,21 +43,18 @@ export namespace AuthAnthropic {
}) })
if (!result.ok) throw new ExchangeFailed() if (!result.ok) throw new ExchangeFailed()
const json = await result.json() const json = await result.json()
await Auth.set("anthropic", { return {
type: "oauth",
refresh: json.refresh_token as string, refresh: json.refresh_token as string,
access: json.access_token as string, access: json.access_token as string,
expires: Date.now() + json.expires_in * 1000, expires: Date.now() + json.expires_in * 1000,
}) }
} }
export async function access() { export async function access() {
const info = await Auth.get("anthropic") const info = await Auth.get("anthropic")
if (!info || info.type !== "oauth") return if (!info || info.type !== "oauth") return
if (info.access && info.expires > Date.now()) return info.access if (info.access && info.expires > Date.now()) return info.access
const response = await fetch( const response = await fetch("https://console.anthropic.com/v1/oauth/token", {
"https://console.anthropic.com/v1/oauth/token",
{
method: "POST", method: "POST",
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
@ -69,8 +64,7 @@ export namespace AuthAnthropic {
refresh_token: info.refresh, refresh_token: info.refresh,
client_id: CLIENT_ID, client_id: CLIENT_ID,
}), }),
}, })
)
if (!response.ok) return if (!response.ok) return
const json = await response.json() const json = await response.json()
await Auth.set("anthropic", { await Auth.set("anthropic", {

View file

@ -4,9 +4,7 @@ import path from "path"
export const AuthCopilot = lazy(async () => { export const AuthCopilot = lazy(async () => {
const file = Bun.file(path.join(Global.Path.state, "plugin", "copilot.ts")) const file = Bun.file(path.join(Global.Path.state, "plugin", "copilot.ts"))
const response = fetch( const response = fetch("https://raw.githubusercontent.com/sst/opencode-github-copilot/refs/heads/main/auth.ts")
"https://raw.githubusercontent.com/sst/opencode-github-copilot/refs/heads/main/auth.ts",
)
.then((x) => Bun.write(file, x)) .then((x) => Bun.write(file, x))
.catch(() => {}) .catch(() => {})

View file

@ -122,10 +122,7 @@ export namespace AuthGithubCopilot {
return tokenData.token return tokenData.token
} }
export const DeviceCodeError = NamedError.create( export const DeviceCodeError = NamedError.create("DeviceCodeError", z.object({}))
"DeviceCodeError",
z.object({}),
)
export const TokenExchangeError = NamedError.create( export const TokenExchangeError = NamedError.create(
"TokenExchangeError", "TokenExchangeError",

View file

@ -8,10 +8,7 @@ import { readableStreamToText } from "bun"
export namespace BunProc { export namespace BunProc {
const log = Log.create({ service: "bun" }) const log = Log.create({ service: "bun" })
export async function run( export async function run(cmd: string[], options?: Bun.SpawnOptions.OptionsObject<any, any, any>) {
cmd: string[],
options?: Bun.SpawnOptions.OptionsObject<any, any, any>,
) {
log.info("running", { log.info("running", {
cmd: [which(), ...cmd], cmd: [which(), ...cmd],
...options, ...options,
@ -26,9 +23,17 @@ export namespace BunProc {
BUN_BE_BUN: "1", BUN_BE_BUN: "1",
}, },
}) })
const code = await result.exited; const code = await result.exited
const stdout = result.stdout ? typeof result.stdout === "number" ? result.stdout : await readableStreamToText(result.stdout) : undefined const stdout = result.stdout
const stderr = result.stderr ? typeof result.stderr === "number" ? result.stderr : await readableStreamToText(result.stderr) : undefined ? typeof result.stdout === "number"
? result.stdout
: await readableStreamToText(result.stdout)
: undefined
const stderr = result.stderr
? typeof result.stderr === "number"
? result.stderr
: await readableStreamToText(result.stderr)
: undefined
log.info("done", { log.info("done", {
code, code,
stdout, stdout,
@ -55,13 +60,22 @@ export namespace BunProc {
export async function install(pkg: string, version = "latest") { export async function install(pkg: string, version = "latest") {
const mod = path.join(Global.Path.cache, "node_modules", pkg) const mod = path.join(Global.Path.cache, "node_modules", pkg)
const pkgjson = Bun.file(path.join(Global.Path.cache, "package.json")) const pkgjson = Bun.file(path.join(Global.Path.cache, "package.json"))
const parsed = await pkgjson.json().catch(() => ({ const parsed = await pkgjson.json().catch(async () => {
dependencies: {}, const result = { dependencies: {} }
})) await Bun.write(pkgjson.name!, JSON.stringify(result, null, 2))
return result
})
if (parsed.dependencies[pkg] === version) return mod if (parsed.dependencies[pkg] === version) return mod
parsed.dependencies[pkg] = version
await Bun.write(pkgjson, JSON.stringify(parsed, null, 2)) // Build command arguments
await BunProc.run(["install", "--registry=https://registry.npmjs.org"], { const args = ["add", "--force", "--exact", "--cwd", Global.Path.cache, pkg + "@" + version]
// Let Bun handle registry resolution:
// - If .npmrc files exist, Bun will use them automatically
// - If no .npmrc files exist, Bun will default to https://registry.npmjs.org
log.info("installing package using Bun's default registry resolution", { pkg, version })
await BunProc.run(args, {
cwd: Global.Path.cache, cwd: Global.Path.cache,
}).catch((e) => { }).catch((e) => {
throw new InstallFailedError( throw new InstallFailedError(
@ -71,6 +85,8 @@ export namespace BunProc {
}, },
) )
}) })
parsed.dependencies[pkg] = version
await Bun.write(pkgjson.name!, JSON.stringify(parsed, null, 2))
return mod return mod
} }
} }

View file

@ -18,10 +18,7 @@ export namespace Bus {
const registry = new Map<string, EventDefinition>() const registry = new Map<string, EventDefinition>()
export function event<Type extends string, Properties extends ZodType>( export function event<Type extends string, Properties extends ZodType>(type: Type, properties: Properties) {
type: Type,
properties: Properties,
) {
const result = { const result = {
type, type,
properties, properties,
@ -72,10 +69,7 @@ export namespace Bus {
export function subscribe<Definition extends EventDefinition>( export function subscribe<Definition extends EventDefinition>(
def: Definition, def: Definition,
callback: (event: { callback: (event: { type: Definition["type"]; properties: z.infer<Definition["properties"]> }) => void,
type: Definition["type"]
properties: z.infer<Definition["properties"]>
}) => void,
) { ) {
return raw(def.type, callback) return raw(def.type, callback)
} }

View file

@ -1,20 +1,15 @@
import { App } from "../app/app" import { App } from "../app/app"
import { ConfigHooks } from "../config/hooks" import { ConfigHooks } from "../config/hooks"
import { FileWatcher } from "../file/watch"
import { Format } from "../format" import { Format } from "../format"
import { LSP } from "../lsp" import { LSP } from "../lsp"
import { Share } from "../share/share" import { Share } from "../share/share"
export async function bootstrap<T>( export async function bootstrap<T>(input: App.Input, cb: (app: App.Info) => Promise<T>) {
input: App.Input,
cb: (app: App.Info) => Promise<T>,
) {
return App.provide(input, async (app) => { return App.provide(input, async (app) => {
Share.init() Share.init()
Format.init() Format.init()
ConfigHooks.init() ConfigHooks.init()
LSP.init() LSP.init()
FileWatcher.init()
return cb(app) return cb(app)
}) })

View file

@ -15,11 +15,7 @@ export const AuthCommand = cmd({
command: "auth", command: "auth",
describe: "manage credentials", describe: "manage credentials",
builder: (yargs) => builder: (yargs) =>
yargs yargs.command(AuthLoginCommand).command(AuthLogoutCommand).command(AuthListCommand).demandCommand(),
.command(AuthLoginCommand)
.command(AuthLogoutCommand)
.command(AuthListCommand)
.demandCommand(),
async handler() {}, async handler() {},
}) })
@ -31,9 +27,7 @@ export const AuthListCommand = cmd({
UI.empty() UI.empty()
const authPath = path.join(Global.Path.data, "auth.json") const authPath = path.join(Global.Path.data, "auth.json")
const homedir = os.homedir() const homedir = os.homedir()
const displayPath = authPath.startsWith(homedir) const displayPath = authPath.startsWith(homedir) ? authPath.replace(homedir, "~") : authPath
? authPath.replace(homedir, "~")
: authPath
prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`) prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`)
const results = await Auth.all().then((x) => Object.entries(x)) const results = await Auth.all().then((x) => Object.entries(x))
const database = await ModelsDev.get() const database = await ModelsDev.get()
@ -114,8 +108,7 @@ export const AuthLoginCommand = cmd({
if (provider === "other") { if (provider === "other") {
provider = await prompts.text({ provider = await prompts.text({
message: "Enter provider id", message: "Enter provider id",
validate: (x) => validate: (x) => (x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only"),
x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only",
}) })
if (prompts.isCancel(provider)) throw new UI.CancelledError() if (prompts.isCancel(provider)) throw new UI.CancelledError()
provider = provider.replace(/^@ai-sdk\//, "") provider = provider.replace(/^@ai-sdk\//, "")
@ -127,7 +120,7 @@ export const AuthLoginCommand = cmd({
if (provider === "amazon-bedrock") { if (provider === "amazon-bedrock") {
prompts.log.info( prompts.log.info(
"Amazon bedrock can be configured with standard AWS environment variables like AWS_PROFILE or AWS_ACCESS_KEY_ID", "Amazon bedrock can be configured with standard AWS environment variables like AWS_BEARER_TOKEN_BEDROCK, AWS_PROFILE or AWS_ACCESS_KEY_ID",
) )
prompts.outro("Done") prompts.outro("Done")
return return
@ -139,20 +132,24 @@ export const AuthLoginCommand = cmd({
options: [ options: [
{ {
label: "Claude Pro/Max", label: "Claude Pro/Max",
value: "oauth", value: "max",
}, },
{ {
label: "API Key", label: "Create API Key",
value: "console",
},
{
label: "Manually enter API Key",
value: "api", value: "api",
}, },
], ],
}) })
if (prompts.isCancel(method)) throw new UI.CancelledError() if (prompts.isCancel(method)) throw new UI.CancelledError()
if (method === "oauth") { if (method === "max") {
// some weird bug where program exits without this // some weird bug where program exits without this
await new Promise((resolve) => setTimeout(resolve, 10)) await new Promise((resolve) => setTimeout(resolve, 10))
const { url, verifier } = await AuthAnthropic.authorize() const { url, verifier } = await AuthAnthropic.authorize("max")
prompts.note("Trying to open browser...") prompts.note("Trying to open browser...")
try { try {
await open(url) await open(url)
@ -169,13 +166,66 @@ export const AuthLoginCommand = cmd({
}) })
if (prompts.isCancel(code)) throw new UI.CancelledError() if (prompts.isCancel(code)) throw new UI.CancelledError()
await AuthAnthropic.exchange(code, verifier) try {
.then(() => { const credentials = await AuthAnthropic.exchange(code, verifier)
await Auth.set("anthropic", {
type: "oauth",
refresh: credentials.refresh,
access: credentials.access,
expires: credentials.expires,
})
prompts.log.success("Login successful") prompts.log.success("Login successful")
}) } catch {
.catch(() => {
prompts.log.error("Invalid code") prompts.log.error("Invalid code")
}
prompts.outro("Done")
return
}
if (method === "console") {
// some weird bug where program exits without this
await new Promise((resolve) => setTimeout(resolve, 10))
const { url, verifier } = await AuthAnthropic.authorize("console")
prompts.note("Trying to open browser...")
try {
await open(url)
} catch (e) {
prompts.log.error(
"Failed to open browser perhaps you are running without a display or X server, please open the following URL in your browser:",
)
}
prompts.log.info(url)
const code = await prompts.text({
message: "Paste the authorization code here: ",
validate: (x) => (x.length > 0 ? undefined : "Required"),
}) })
if (prompts.isCancel(code)) throw new UI.CancelledError()
try {
const credentials = await AuthAnthropic.exchange(code, verifier)
const accessToken = credentials.access
const response = await fetch("https://api.anthropic.com/api/oauth/claude_cli/create_api_key", {
method: "POST",
headers: {
Authorization: `Bearer ${accessToken}`,
"Content-Type": "application/x-www-form-urlencoded",
Accept: "application/json, text/plain, */*",
},
})
if (!response.ok) {
throw new Error("Failed to create API key")
}
const json = await response.json()
await Auth.set("anthropic", {
type: "api",
key: json.raw_key,
})
prompts.log.success("Login successful - API key created and saved")
} catch (error) {
prompts.log.error("Invalid code or failed to create API key")
}
prompts.outro("Done") prompts.outro("Done")
return return
} }
@ -186,17 +236,13 @@ export const AuthLoginCommand = cmd({
await new Promise((resolve) => setTimeout(resolve, 10)) await new Promise((resolve) => setTimeout(resolve, 10))
const deviceInfo = await copilot.authorize() const deviceInfo = await copilot.authorize()
prompts.note( prompts.note(`Please visit: ${deviceInfo.verification}\nEnter code: ${deviceInfo.user}`)
`Please visit: ${deviceInfo.verification}\nEnter code: ${deviceInfo.user}`,
)
const spinner = prompts.spinner() const spinner = prompts.spinner()
spinner.start("Waiting for authorization...") spinner.start("Waiting for authorization...")
while (true) { while (true) {
await new Promise((resolve) => await new Promise((resolve) => setTimeout(resolve, deviceInfo.interval * 1000))
setTimeout(resolve, deviceInfo.interval * 1000),
)
const response = await copilot.poll(deviceInfo.device) const response = await copilot.poll(deviceInfo.device)
if (response.status === "pending") continue if (response.status === "pending") continue
if (response.status === "success") { if (response.status === "success") {
@ -248,12 +294,7 @@ export const AuthLogoutCommand = cmd({
const providerID = await prompts.select({ const providerID = await prompts.select({
message: "Select provider", message: "Select provider",
options: credentials.map(([key, value]) => ({ options: credentials.map(([key, value]) => ({
label: label: (database[key]?.name || key) + UI.Style.TEXT_DIM + " (" + value.type + ")",
(database[key]?.name || key) +
UI.Style.TEXT_DIM +
" (" +
value.type +
")",
value: key, value: key,
})), })),
}) })

View file

@ -31,7 +31,6 @@ const FileStatusCommand = cmd({
export const FileCommand = cmd({ export const FileCommand = cmd({
command: "file", command: "file",
builder: (yargs) => builder: (yargs) => yargs.command(FileReadCommand).command(FileStatusCommand).demandCommand(),
yargs.command(FileReadCommand).command(FileStatusCommand).demandCommand(),
async handler() {}, async handler() {},
}) })

View file

@ -3,6 +3,7 @@ import { cmd } from "../cmd"
import { FileCommand } from "./file" import { FileCommand } from "./file"
import { LSPCommand } from "./lsp" import { LSPCommand } from "./lsp"
import { RipgrepCommand } from "./ripgrep" import { RipgrepCommand } from "./ripgrep"
import { ScrapCommand } from "./scrap"
import { SnapshotCommand } from "./snapshot" import { SnapshotCommand } from "./snapshot"
export const DebugCommand = cmd({ export const DebugCommand = cmd({
@ -12,14 +13,13 @@ export const DebugCommand = cmd({
.command(LSPCommand) .command(LSPCommand)
.command(RipgrepCommand) .command(RipgrepCommand)
.command(FileCommand) .command(FileCommand)
.command(ScrapCommand)
.command(SnapshotCommand) .command(SnapshotCommand)
.command({ .command({
command: "wait", command: "wait",
async handler() { async handler() {
await bootstrap({ cwd: process.cwd() }, async () => { await bootstrap({ cwd: process.cwd() }, async () => {
await new Promise((resolve) => await new Promise((resolve) => setTimeout(resolve, 1_000 * 60 * 60 * 24))
setTimeout(resolve, 1_000 * 60 * 60 * 24),
)
}) })
}, },
}) })

View file

@ -6,14 +6,13 @@ import { Log } from "../../../util/log"
export const LSPCommand = cmd({ export const LSPCommand = cmd({
command: "lsp", command: "lsp",
builder: (yargs) => builder: (yargs) =>
yargs.command(DiagnosticsCommand).command(SymbolsCommand).demandCommand(), yargs.command(DiagnosticsCommand).command(SymbolsCommand).command(DocumentSymbolsCommand).demandCommand(),
async handler() {}, async handler() {},
}) })
const DiagnosticsCommand = cmd({ const DiagnosticsCommand = cmd({
command: "diagnostics <file>", command: "diagnostics <file>",
builder: (yargs) => builder: (yargs) => yargs.positional("file", { type: "string", demandOption: true }),
yargs.positional("file", { type: "string", demandOption: true }),
async handler(args) { async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => { await bootstrap({ cwd: process.cwd() }, async () => {
await LSP.touchFile(args.file, true) await LSP.touchFile(args.file, true)
@ -24,14 +23,24 @@ const DiagnosticsCommand = cmd({
export const SymbolsCommand = cmd({ export const SymbolsCommand = cmd({
command: "symbols <query>", command: "symbols <query>",
builder: (yargs) => builder: (yargs) => yargs.positional("query", { type: "string", demandOption: true }),
yargs.positional("query", { type: "string", demandOption: true }),
async handler(args) { async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => { await bootstrap({ cwd: process.cwd() }, async () => {
await LSP.touchFile("./src/index.ts", true)
using _ = Log.Default.time("symbols") using _ = Log.Default.time("symbols")
const results = await LSP.workspaceSymbol(args.query) const results = await LSP.workspaceSymbol(args.query)
console.log(JSON.stringify(results, null, 2)) console.log(JSON.stringify(results, null, 2))
}) })
}, },
}) })
export const DocumentSymbolsCommand = cmd({
command: "document-symbols <uri>",
builder: (yargs) => yargs.positional("uri", { type: "string", demandOption: true }),
async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => {
using _ = Log.Default.time("document-symbols")
const results = await LSP.documentSymbol(args.uri)
console.log(JSON.stringify(results, null, 2))
})
},
})

View file

@ -5,12 +5,7 @@ import { cmd } from "../cmd"
export const RipgrepCommand = cmd({ export const RipgrepCommand = cmd({
command: "rg", command: "rg",
builder: (yargs) => builder: (yargs) => yargs.command(TreeCommand).command(FilesCommand).command(SearchCommand).demandCommand(),
yargs
.command(TreeCommand)
.command(FilesCommand)
.command(SearchCommand)
.demandCommand(),
async handler() {}, async handler() {},
}) })
@ -50,7 +45,7 @@ const FilesCommand = cmd({
const files = await Ripgrep.files({ const files = await Ripgrep.files({
cwd: app.path.cwd, cwd: app.path.cwd,
query: args.query, query: args.query,
glob: args.glob, glob: args.glob ? [args.glob] : undefined,
limit: args.limit, limit: args.limit,
}) })
console.log(files.join("\n")) console.log(files.join("\n"))

View file

@ -0,0 +1,7 @@
import { cmd } from "../cmd"
export const ScrapCommand = cmd({
command: "scrap",
builder: (yargs) => yargs,
async handler() {},
})

View file

@ -4,15 +4,11 @@ import { cmd } from "../cmd"
export const SnapshotCommand = cmd({ export const SnapshotCommand = cmd({
command: "snapshot", command: "snapshot",
builder: (yargs) => builder: (yargs) => yargs.command(CreateCommand).command(RestoreCommand).command(DiffCommand).demandCommand(),
yargs
.command(SnapshotCreateCommand)
.command(SnapshotRestoreCommand)
.demandCommand(),
async handler() {}, async handler() {},
}) })
export const SnapshotCreateCommand = cmd({ const CreateCommand = cmd({
command: "create", command: "create",
async handler() { async handler() {
await bootstrap({ cwd: process.cwd() }, async () => { await bootstrap({ cwd: process.cwd() }, async () => {
@ -22,7 +18,7 @@ export const SnapshotCreateCommand = cmd({
}, },
}) })
export const SnapshotRestoreCommand = cmd({ const RestoreCommand = cmd({
command: "restore <commit>", command: "restore <commit>",
builder: (yargs) => builder: (yargs) =>
yargs.positional("commit", { yargs.positional("commit", {
@ -37,3 +33,20 @@ export const SnapshotRestoreCommand = cmd({
}) })
}, },
}) })
export const DiffCommand = cmd({
command: "diff <commit>",
describe: "diff",
builder: (yargs) =>
yargs.positional("commit", {
type: "string",
description: "commit",
demandOption: true,
}),
async handler(args) {
await bootstrap({ cwd: process.cwd() }, async () => {
const diff = await Snapshot.diff("test", args.commit)
console.log(diff)
})
},
})

View file

@ -10,9 +10,6 @@ export const GenerateCommand = {
const dir = "gen" const dir = "gen"
await fs.rmdir(dir, { recursive: true }).catch(() => {}) await fs.rmdir(dir, { recursive: true }).catch(() => {})
await fs.mkdir(dir, { recursive: true }) await fs.mkdir(dir, { recursive: true })
await Bun.write( await Bun.write(path.join(dir, "openapi.json"), JSON.stringify(specs, null, 2))
path.join(dir, "openapi.json"),
JSON.stringify(specs, null, 2),
)
}, },
} satisfies CommandModule } satisfies CommandModule

View file

@ -0,0 +1,235 @@
import { $ } from "bun"
import path from "path"
import { exec } from "child_process"
import * as prompts from "@clack/prompts"
import { map, pipe, sortBy, values } from "remeda"
import { UI } from "../ui"
import { cmd } from "./cmd"
import { ModelsDev } from "../../provider/models"
import { App } from "../../app/app"
const WORKFLOW_FILE = ".github/workflows/opencode.yml"
export const InstallGithubCommand = cmd({
command: "install-github",
describe: "install the GitHub agent",
async handler() {
await App.provide({ cwd: process.cwd() }, async () => {
UI.empty()
prompts.intro("Install GitHub agent")
const app = await getAppInfo()
await installGitHubApp()
const providers = await ModelsDev.get()
const provider = await promptProvider()
const model = await promptModel()
//const key = await promptKey()
await addWorkflowFiles()
printNextSteps()
function printNextSteps() {
let step2
if (provider === "amazon-bedrock") {
step2 =
"Configure OIDC in AWS - https://docs.github.com/en/actions/how-tos/security-for-github-actions/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services"
} else {
const url = `https://github.com/organizations/${app.owner}/settings/secrets/actions`
const env = providers[provider].env
const envStr =
env.length === 1
? `\`${env[0]}\` secret`
: `\`${[env.slice(0, -1).join("\`, \`"), ...env.slice(-1)].join("\` and \`")}\` secrets`
step2 = `Add ${envStr} for ${providers[provider].name} - ${url}`
}
prompts.outro(
[
"Next steps:",
` 1. Commit "${WORKFLOW_FILE}" file and push`,
` 2. ${step2}`,
" 3. Learn how to use the GitHub agent - https://docs.opencode.ai/docs/github/getting-started",
].join("\n"),
)
}
async function getAppInfo() {
const app = App.info()
if (!app.git) {
prompts.log.error(`Could not find git repository. Please run this command from a git repository.`)
throw new UI.CancelledError()
}
// Get repo info
const info = await $`git remote get-url origin`.quiet().nothrow().text()
// match https or git pattern
// ie. https://github.com/sst/opencode.git
// ie. git@github.com:sst/opencode.git
const parsed = info.match(/git@github\.com:(.*)\.git/) ?? info.match(/github\.com\/(.*)\.git/)
if (!parsed) {
prompts.log.error(`Could not find git repository. Please run this command from a git repository.`)
throw new UI.CancelledError()
}
const [owner, repo] = parsed[1].split("/")
return { owner, repo, root: app.path.root }
}
async function promptProvider() {
const priority: Record<string, number> = {
anthropic: 0,
"github-copilot": 1,
openai: 2,
google: 3,
}
let provider = await prompts.select({
message: "Select provider",
maxItems: 8,
options: [
...pipe(
providers,
values(),
sortBy(
(x) => priority[x.id] ?? 99,
(x) => x.name ?? x.id,
),
map((x) => ({
label: x.name,
value: x.id,
hint: priority[x.id] === 0 ? "recommended" : undefined,
})),
),
{
value: "other",
label: "Other",
},
],
})
if (prompts.isCancel(provider)) throw new UI.CancelledError()
if (provider === "other") {
provider = await prompts.text({
message: "Enter provider id",
validate: (x) => (x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only"),
})
if (prompts.isCancel(provider)) throw new UI.CancelledError()
provider = provider.replace(/^@ai-sdk\//, "")
if (prompts.isCancel(provider)) throw new UI.CancelledError()
prompts.log.warn(
`This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`,
)
}
return provider
}
async function promptModel() {
const providerData = providers[provider]!
const model = await prompts.select({
message: "Select model",
maxItems: 8,
options: pipe(
providerData.models,
values(),
sortBy((x) => x.name ?? x.id),
map((x) => ({
label: x.name ?? x.id,
value: x.id,
})),
),
})
if (prompts.isCancel(model)) throw new UI.CancelledError()
return model
}
async function installGitHubApp() {
const s = prompts.spinner()
s.start("Installing GitHub app")
// Get installation
const installation = await getInstallation()
if (installation) return s.stop("GitHub app already installed")
// Open browser
const url = "https://github.com/apps/opencode-agent"
const command =
process.platform === "darwin"
? `open "${url}"`
: process.platform === "win32"
? `start "${url}"`
: `xdg-open "${url}"`
exec(command, (error) => {
if (error) {
prompts.log.warn(`Could not open browser. Please visit: ${url}`)
}
})
// Wait for installation
s.message("Waiting for GitHub app to be installed")
const MAX_RETRIES = 60
let retries = 0
do {
const installation = await getInstallation()
if (installation) break
if (retries > MAX_RETRIES) {
s.stop(
`Failed to detect GitHub app installation. Make sure to install the app for the \`${app.owner}/${app.repo}\` repository.`,
)
throw new UI.CancelledError()
}
retries++
await new Promise((resolve) => setTimeout(resolve, 1000))
} while (true)
s.stop("Installed GitHub app")
async function getInstallation() {
return await fetch(`https://api.opencode.ai/get_github_app_installation?owner=${app.owner}&repo=${app.repo}`)
.then((res) => res.json())
.then((data) => data.installation)
}
}
async function addWorkflowFiles() {
const envStr =
provider === "amazon-bedrock"
? ""
: `\n env:${providers[provider].env.map((e) => `\n ${e}: \${{ secrets.${e} }}`).join("")}`
await Bun.write(
path.join(app.root, WORKFLOW_FILE),
`
name: opencode
on:
issue_comment:
types: [created]
jobs:
opencode:
if: startsWith(github.event.comment.body, 'hey opencode')
runs-on: ubuntu-latest
permissions:
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run opencode
uses: sst/opencode/sdks/github@github-v1${envStr}
with:
model: ${provider}/${model}
`.trim(),
)
prompts.log.success(`Added workflow file: "${WORKFLOW_FILE}"`)
}
})
},
})

View file

@ -0,0 +1,79 @@
import { cmd } from "./cmd"
import { Client } from "@modelcontextprotocol/sdk/client/index.js"
import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"
import * as prompts from "@clack/prompts"
import { UI } from "../ui"
export const McpCommand = cmd({
command: "mcp",
builder: (yargs) => yargs.command(McpAddCommand).demandCommand(),
async handler() {},
})
export const McpAddCommand = cmd({
command: "add",
describe: "add an MCP server",
async handler() {
UI.empty()
prompts.intro("Add MCP server")
const name = await prompts.text({
message: "Enter MCP server name",
validate: (x) => (x.length > 0 ? undefined : "Required"),
})
if (prompts.isCancel(name)) throw new UI.CancelledError()
const type = await prompts.select({
message: "Select MCP server type",
options: [
{
label: "Local",
value: "local",
hint: "Run a local command",
},
{
label: "Remote",
value: "remote",
hint: "Connect to a remote URL",
},
],
})
if (prompts.isCancel(type)) throw new UI.CancelledError()
if (type === "local") {
const command = await prompts.text({
message: "Enter command to run",
placeholder: "e.g., opencode x @modelcontextprotocol/server-filesystem",
validate: (x) => (x.length > 0 ? undefined : "Required"),
})
if (prompts.isCancel(command)) throw new UI.CancelledError()
prompts.log.info(`Local MCP server "${name}" configured with command: ${command}`)
prompts.outro("MCP server added successfully")
return
}
if (type === "remote") {
const url = await prompts.text({
message: "Enter MCP server URL",
placeholder: "e.g., https://example.com/mcp",
validate: (x) => {
if (x.length === 0) return "Required"
const isValid = URL.canParse(x)
return isValid ? undefined : "Invalid URL"
},
})
if (prompts.isCancel(url)) throw new UI.CancelledError()
const client = new Client({
name: "opencode",
version: "1.0.0",
})
const transport = new StreamableHTTPClientTransport(new URL(url))
await client.connect(transport)
prompts.log.info(`Remote MCP server "${name}" configured with URL: ${url}`)
}
prompts.outro("MCP server added successfully")
},
})

View file

@ -2,12 +2,14 @@ import type { Argv } from "yargs"
import { Bus } from "../../bus" import { Bus } from "../../bus"
import { Provider } from "../../provider/provider" import { Provider } from "../../provider/provider"
import { Session } from "../../session" import { Session } from "../../session"
import { Message } from "../../session/message"
import { UI } from "../ui" import { UI } from "../ui"
import { cmd } from "./cmd" import { cmd } from "./cmd"
import { Flag } from "../../flag/flag" import { Flag } from "../../flag/flag"
import { Config } from "../../config/config" import { Config } from "../../config/config"
import { bootstrap } from "../bootstrap" import { bootstrap } from "../bootstrap"
import { MessageV2 } from "../../session/message-v2"
import { Mode } from "../../session/mode"
import { Identifier } from "../../id/id"
const TOOL: Record<string, [string, string]> = { const TOOL: Record<string, [string, string]> = {
todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD], todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD],
@ -52,13 +54,22 @@ export const RunCommand = cmd({
alias: ["m"], alias: ["m"],
describe: "model to use in the format of provider/model", describe: "model to use in the format of provider/model",
}) })
.option("mode", {
type: "string",
describe: "mode to use",
})
}, },
handler: async (args) => { handler: async (args) => {
const message = args.message.join(" ") let message = args.message.join(" ")
if (!process.stdin.isTTY) message += "\n" + (await Bun.stdin.text())
await bootstrap({ cwd: process.cwd() }, async () => { await bootstrap({ cwd: process.cwd() }, async () => {
const session = await (async () => { const session = await (async () => {
if (args.continue) { if (args.continue) {
const first = await Session.list().next() const list = Session.list()
const first = await list.next()
await list.return()
if (first.done) return if (first.done) return
return first.value return first.value
} }
@ -73,32 +84,27 @@ export const RunCommand = cmd({
return return
} }
const isPiped = !process.stdout.isTTY
UI.empty() UI.empty()
UI.println(UI.logo()) UI.println(UI.logo())
UI.empty() UI.empty()
UI.println(UI.Style.TEXT_NORMAL_BOLD + "> ", message)
UI.empty()
const cfg = await Config.get() const cfg = await Config.get()
if (cfg.autoshare || Flag.OPENCODE_AUTO_SHARE || args.share) { if (cfg.share === "auto" || Flag.OPENCODE_AUTO_SHARE || args.share) {
try {
await Session.share(session.id) await Session.share(session.id)
UI.println( UI.println(UI.Style.TEXT_INFO_BOLD + "~ https://opencode.ai/s/" + session.id.slice(-8))
UI.Style.TEXT_INFO_BOLD + } catch (error) {
"~ https://opencode.ai/s/" + if (error instanceof Error && error.message.includes("disabled")) {
session.id.slice(-8), UI.println(UI.Style.TEXT_DANGER_BOLD + "! " + error.message)
) } else {
throw error
}
}
} }
UI.empty() UI.empty()
const { providerID, modelID } = args.model const { providerID, modelID } = args.model ? Provider.parseModel(args.model) : await Provider.defaultModel()
? Provider.parseModel(args.model) UI.println(UI.Style.TEXT_NORMAL_BOLD + "@ ", UI.Style.TEXT_NORMAL + `${providerID}/${modelID}`)
: await Provider.defaultModel()
UI.println(
UI.Style.TEXT_NORMAL_BOLD + "@ ",
UI.Style.TEXT_NORMAL + `${providerID}/${modelID}`,
)
UI.empty() UI.empty()
function printEvent(color: string, type: string, title: string) { function printEvent(color: string, type: string, title: string) {
@ -110,52 +116,75 @@ export const RunCommand = cmd({
) )
} }
Bus.subscribe(Message.Event.PartUpdated, async (evt) => { let text = ""
if (evt.properties.sessionID !== session.id) return Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
if (evt.properties.part.sessionID !== session.id) return
if (evt.properties.part.messageID === messageID) return
const part = evt.properties.part const part = evt.properties.part
const message = await Session.getMessage(
evt.properties.sessionID,
evt.properties.messageID,
)
if ( if (part.type === "tool" && part.state.status === "completed") {
part.type === "tool-invocation" && const [tool, color] = TOOL[part.tool] ?? [part.tool, UI.Style.TEXT_INFO_BOLD]
part.toolInvocation.state === "result" const title =
) { part.state.title || Object.keys(part.state.input).length > 0 ? JSON.stringify(part.state.input) : "Unknown"
const metadata = message.metadata.tool[part.toolInvocation.toolCallId] printEvent(color, tool, title)
const [tool, color] = TOOL[part.toolInvocation.toolName] ?? [
part.toolInvocation.toolName,
UI.Style.TEXT_INFO_BOLD,
]
printEvent(color, tool, metadata?.title || "Unknown")
} }
if (part.type === "text") { if (part.type === "text") {
if (part.text.includes("\n")) { text = part.text
if (part.time?.end) {
UI.empty() UI.empty()
UI.println(part.text) UI.println(UI.markdown(text))
UI.empty() UI.empty()
text = ""
return return
} }
printEvent(UI.Style.TEXT_NORMAL_BOLD, "Text", part.text)
} }
}) })
let errorMsg: string | undefined
Bus.subscribe(Session.Event.Error, async (evt) => {
const { sessionID, error } = evt.properties
if (sessionID !== session.id || !error) return
let err = String(error.name)
if ("data" in error && error.data && "message" in error.data) {
err = error.data.message
}
errorMsg = errorMsg ? errorMsg + "\n" + err : err
UI.error(err)
})
const mode = args.mode ? await Mode.get(args.mode) : await Mode.list().then((x) => x[0])
const messageID = Identifier.ascending("message")
const result = await Session.chat({ const result = await Session.chat({
sessionID: session.id, sessionID: session.id,
messageID,
...(mode.model
? mode.model
: {
providerID, providerID,
modelID, modelID,
}),
mode: mode.name,
parts: [ parts: [
{ {
id: Identifier.ascending("part"),
sessionID: session.id,
messageID: messageID,
type: "text", type: "text",
text: message, text: message,
}, },
], ],
}) })
const isPiped = !process.stdout.isTTY
if (isPiped) { if (isPiped) {
const match = result.parts.findLast((x) => x.type === "text") const match = result.parts.findLast((x) => x.type === "text")
if (match) process.stdout.write(match.text) if (match) process.stdout.write(UI.markdown(match.text))
if (errorMsg) process.stdout.write(errorMsg)
} }
UI.empty() UI.empty()
}) })

View file

@ -38,9 +38,7 @@ export const ServeCommand = cmd({
hostname, hostname,
}) })
console.log( console.log(`opencode server listening on http://${server.hostname}:${server.port}`)
`opencode server listening on http://${server.hostname}:${server.port}`,
)
await new Promise(() => {}) await new Promise(() => {})

View file

@ -0,0 +1,98 @@
import { cmd } from "./cmd"
interface SessionStats {
totalSessions: number
totalMessages: number
totalCost: number
totalTokens: {
input: number
output: number
reasoning: number
cache: {
read: number
write: number
}
}
toolUsage: Record<string, number>
dateRange: {
earliest: number
latest: number
}
days: number
costPerDay: number
}
export const StatsCommand = cmd({
command: "stats",
handler: async () => {},
})
export function displayStats(stats: SessionStats) {
const width = 56
function renderRow(label: string, value: string): string {
const availableWidth = width - 1
const paddingNeeded = availableWidth - label.length - value.length
const padding = Math.max(0, paddingNeeded)
return `${label}${" ".repeat(padding)}${value}`
}
// Overview section
console.log("┌────────────────────────────────────────────────────────┐")
console.log("│ OVERVIEW │")
console.log("├────────────────────────────────────────────────────────┤")
console.log(renderRow("Sessions", stats.totalSessions.toLocaleString()))
console.log(renderRow("Messages", stats.totalMessages.toLocaleString()))
console.log(renderRow("Days", stats.days.toString()))
console.log("└────────────────────────────────────────────────────────┘")
console.log()
// Cost & Tokens section
console.log("┌────────────────────────────────────────────────────────┐")
console.log("│ COST & TOKENS │")
console.log("├────────────────────────────────────────────────────────┤")
const cost = isNaN(stats.totalCost) ? 0 : stats.totalCost
const costPerDay = isNaN(stats.costPerDay) ? 0 : stats.costPerDay
console.log(renderRow("Total Cost", `$${cost.toFixed(2)}`))
console.log(renderRow("Cost/Day", `$${costPerDay.toFixed(2)}`))
console.log(renderRow("Input", formatNumber(stats.totalTokens.input)))
console.log(renderRow("Output", formatNumber(stats.totalTokens.output)))
console.log(renderRow("Cache Read", formatNumber(stats.totalTokens.cache.read)))
console.log(renderRow("Cache Write", formatNumber(stats.totalTokens.cache.write)))
console.log("└────────────────────────────────────────────────────────┘")
console.log()
// Tool Usage section
if (Object.keys(stats.toolUsage).length > 0) {
const sortedTools = Object.entries(stats.toolUsage)
.sort(([, a], [, b]) => b - a)
.slice(0, 10)
console.log("┌────────────────────────────────────────────────────────┐")
console.log("│ TOOL USAGE │")
console.log("├────────────────────────────────────────────────────────┤")
const maxCount = Math.max(...sortedTools.map(([, count]) => count))
const totalToolUsage = Object.values(stats.toolUsage).reduce((a, b) => a + b, 0)
for (const [tool, count] of sortedTools) {
const barLength = Math.max(1, Math.floor((count / maxCount) * 20))
const bar = "█".repeat(barLength)
const percentage = ((count / totalToolUsage) * 100).toFixed(1)
const content = ` ${tool.padEnd(10)} ${bar.padEnd(20)} ${count.toString().padStart(3)} (${percentage.padStart(4)}%)`
const padding = Math.max(0, width - content.length)
console.log(`${content}${" ".repeat(padding)}`)
}
console.log("└────────────────────────────────────────────────────────┘")
}
console.log()
}
function formatNumber(num: number): string {
if (num >= 1000000) {
return (num / 1000000).toFixed(1) + "M"
} else if (num >= 1000) {
return (num / 1000).toFixed(1) + "K"
}
return num.toString()
}

View file

@ -10,14 +10,31 @@ import { Installation } from "../../installation"
import { Config } from "../../config/config" import { Config } from "../../config/config"
import { Bus } from "../../bus" import { Bus } from "../../bus"
import { Log } from "../../util/log" import { Log } from "../../util/log"
import { FileWatcher } from "../../file/watch"
import { Mode } from "../../session/mode"
export const TuiCommand = cmd({ export const TuiCommand = cmd({
command: "$0 [project]", command: "$0 [project]",
describe: "start opencode tui", describe: "start opencode tui",
builder: (yargs) => builder: (yargs) =>
yargs.positional("project", { yargs
.positional("project", {
type: "string", type: "string",
describe: "path to start opencode in", describe: "path to start opencode in",
})
.option("model", {
type: "string",
alias: ["m"],
describe: "model to use in the format of provider/model",
})
.option("prompt", {
alias: ["p"],
type: "string",
describe: "prompt to use",
})
.option("mode", {
type: "string",
describe: "mode to use",
}), }),
handler: async (args) => { handler: async (args) => {
while (true) { while (true) {
@ -29,6 +46,7 @@ export const TuiCommand = cmd({
return return
} }
const result = await bootstrap({ cwd }, async (app) => { const result = await bootstrap({ cwd }, async (app) => {
FileWatcher.init()
const providers = await Provider.list() const providers = await Provider.list()
if (Object.keys(providers).length === 0) { if (Object.keys(providers).length === 0) {
return "needs_provider" return "needs_provider"
@ -40,9 +58,7 @@ export const TuiCommand = cmd({
}) })
let cmd = ["go", "run", "./main.go"] let cmd = ["go", "run", "./main.go"]
let cwd = Bun.fileURLToPath( let cwd = Bun.fileURLToPath(new URL("../../../../tui/cmd/opencode", import.meta.url))
new URL("../../../../tui/cmd/opencode", import.meta.url),
)
if (Bun.embeddedFiles.length > 0) { if (Bun.embeddedFiles.length > 0) {
const blob = Bun.embeddedFiles[0] as File const blob = Bun.embeddedFiles[0] as File
let binaryName = blob.name let binaryName = blob.name
@ -62,15 +78,22 @@ export const TuiCommand = cmd({
cmd, cmd,
}) })
const proc = Bun.spawn({ const proc = Bun.spawn({
cmd: [...cmd, ...process.argv.slice(2)], cmd: [
...cmd,
...(args.model ? ["--model", args.model] : []),
...(args.prompt ? ["--prompt", args.prompt] : []),
...(args.mode ? ["--mode", args.mode] : []),
],
cwd, cwd,
stdout: "inherit", stdout: "inherit",
stderr: "inherit", stderr: "inherit",
stdin: "inherit", stdin: "inherit",
env: { env: {
...process.env, ...process.env,
CGO_ENABLED: "0",
OPENCODE_SERVER: server.url.toString(), OPENCODE_SERVER: server.url.toString(),
OPENCODE_APP_INFO: JSON.stringify(app), OPENCODE_APP_INFO: JSON.stringify(app),
OPENCODE_MODES: JSON.stringify(await Mode.list()),
}, },
onExit: () => { onExit: () => {
server.stop() server.stop()

View file

@ -27,22 +27,26 @@ export const UpgradeCommand = {
const detectedMethod = await Installation.method() const detectedMethod = await Installation.method()
const method = (args.method as Installation.Method) ?? detectedMethod const method = (args.method as Installation.Method) ?? detectedMethod
if (method === "unknown") { if (method === "unknown") {
prompts.log.error( prompts.log.error(`opencode is installed to ${process.execPath} and seems to be managed by a package manager`)
`opencode is installed to ${process.execPath} and seems to be managed by a package manager`,
)
prompts.outro("Done") prompts.outro("Done")
return return
} }
prompts.log.info("Using method: " + method) prompts.log.info("Using method: " + method)
const target = args.target ?? (await Installation.latest()) const target = args.target ?? (await Installation.latest())
if (Installation.VERSION === target) {
prompts.log.warn(`opencode upgrade skipped: ${target} is already installed`)
prompts.outro("Done")
return
}
prompts.log.info(`From ${Installation.VERSION}${target}`) prompts.log.info(`From ${Installation.VERSION}${target}`)
const spinner = prompts.spinner() const spinner = prompts.spinner()
spinner.start("Upgrading...") spinner.start("Upgrading...")
const err = await Installation.upgrade(method, target).catch((err) => err) const err = await Installation.upgrade(method, target).catch((err) => err)
if (err) { if (err) {
spinner.stop("Upgrade failed") spinner.stop("Upgrade failed")
if (err instanceof Installation.UpgradeFailedError) if (err instanceof Installation.UpgradeFailedError) prompts.log.error(err.data.stderr)
prompts.log.error(err.data.stderr)
else if (err instanceof Error) prompts.log.error(err.message) else if (err instanceof Error) prompts.log.error(err.message)
prompts.outro("Done") prompts.outro("Done")
return return

View file

@ -5,14 +5,11 @@ import { UI } from "./ui"
export function FormatError(input: unknown) { export function FormatError(input: unknown) {
if (MCP.Failed.isInstance(input)) if (MCP.Failed.isInstance(input))
return `MCP server "${input.data.name}" failed. Note, opencode does not support MCP authentication yet.` return `MCP server "${input.data.name}" failed. Note, opencode does not support MCP authentication yet.`
if (Config.JsonError.isInstance(input)) if (Config.JsonError.isInstance(input)) return `Config file at ${input.data.path} is not valid JSON`
return `Config file at ${input.data.path} is not valid JSON`
if (Config.InvalidError.isInstance(input)) if (Config.InvalidError.isInstance(input))
return [ return [
`Config file at ${input.data.path} is invalid`, `Config file at ${input.data.path} is invalid`,
...(input.data.issues?.map( ...(input.data.issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []),
(issue) => "↳ " + issue.message + " " + issue.path.join("."),
) ?? []),
].join("\n") ].join("\n")
if (UI.CancelledError.isInstance(input)) return "" if (UI.CancelledError.isInstance(input)) return ""

View file

@ -76,4 +76,8 @@ export namespace UI {
export function error(message: string) { export function error(message: string) {
println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message) println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message)
} }
export function markdown(text: string): string {
return text
}
} }

View file

@ -4,7 +4,7 @@ import { z } from "zod"
import { App } from "../app/app" import { App } from "../app/app"
import { Filesystem } from "../util/filesystem" import { Filesystem } from "../util/filesystem"
import { ModelsDev } from "../provider/models" import { ModelsDev } from "../provider/models"
import { mergeDeep } from "remeda" import { mergeDeep, pipe } from "remeda"
import { Global } from "../global" import { Global } from "../global"
import fs from "fs/promises" import fs from "fs/promises"
import { lazy } from "../util/lazy" import { lazy } from "../util/lazy"
@ -21,6 +21,20 @@ export namespace Config {
result = mergeDeep(result, await load(resolved)) result = mergeDeep(result, await load(resolved))
} }
} }
// Handle migration from autoshare to share field
if (result.autoshare === true && !result.share) {
result.share = "auto"
}
if (!result.username) {
const os = await import("os")
result.username = os.userInfo().username
}
if (!result.layout) {
result.layout = "auto"
}
log.info("loaded", result) log.info("loaded", result)
return result return result
@ -29,18 +43,12 @@ export namespace Config {
export const McpLocal = z export const McpLocal = z
.object({ .object({
type: z.literal("local").describe("Type of MCP server connection"), type: z.literal("local").describe("Type of MCP server connection"),
command: z command: z.string().array().describe("Command and arguments to run the MCP server"),
.string()
.array()
.describe("Command and arguments to run the MCP server"),
environment: z environment: z
.record(z.string(), z.string()) .record(z.string(), z.string())
.optional() .optional()
.describe("Environment variables to set when running the MCP server"), .describe("Environment variables to set when running the MCP server"),
enabled: z enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
.boolean()
.optional()
.describe("Enable or disable the MCP server on startup"),
}) })
.strict() .strict()
.openapi({ .openapi({
@ -51,10 +59,8 @@ export namespace Config {
.object({ .object({
type: z.literal("remote").describe("Type of MCP server connection"), type: z.literal("remote").describe("Type of MCP server connection"),
url: z.string().describe("URL of the remote MCP server"), url: z.string().describe("URL of the remote MCP server"),
enabled: z enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
.boolean() headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"),
.optional()
.describe("Enable or disable the MCP server on startup"),
}) })
.strict() .strict()
.openapi({ .openapi({
@ -64,105 +70,101 @@ export namespace Config {
export const Mcp = z.discriminatedUnion("type", [McpLocal, McpRemote]) export const Mcp = z.discriminatedUnion("type", [McpLocal, McpRemote])
export type Mcp = z.infer<typeof Mcp> export type Mcp = z.infer<typeof Mcp>
export const Mode = z
.object({
model: z.string().optional(),
prompt: z.string().optional(),
tools: z.record(z.string(), z.boolean()).optional(),
})
.openapi({
ref: "ModeConfig",
})
export type Mode = z.infer<typeof Mode>
export const Keybinds = z export const Keybinds = z
.object({ .object({
leader: z leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"),
.string() app_help: z.string().optional().default("<leader>h").describe("Show help dialog"),
.optional() switch_mode: z.string().optional().default("tab").describe("Next mode"),
.describe("Leader key for keybind combinations"), switch_mode_reverse: z.string().optional().default("shift+tab").describe("Previous Mode"),
help: z.string().optional().describe("Show help dialog"), editor_open: z.string().optional().default("<leader>e").describe("Open external editor"),
editor_open: z.string().optional().describe("Open external editor"), session_export: z.string().optional().default("<leader>x").describe("Export session to editor"),
session_new: z.string().optional().describe("Create a new session"), session_new: z.string().optional().default("<leader>n").describe("Create a new session"),
session_list: z.string().optional().describe("List all sessions"), session_list: z.string().optional().default("<leader>l").describe("List all sessions"),
session_share: z.string().optional().describe("Share current session"), session_share: z.string().optional().default("<leader>s").describe("Share current session"),
session_interrupt: z session_unshare: z.string().optional().default("<leader>u").describe("Unshare current session"),
.string() session_interrupt: z.string().optional().default("esc").describe("Interrupt current session"),
.optional() session_compact: z.string().optional().default("<leader>c").describe("Compact the session"),
.describe("Interrupt current session"), tool_details: z.string().optional().default("<leader>d").describe("Toggle tool details"),
session_compact: z model_list: z.string().optional().default("<leader>m").describe("List available models"),
.string() theme_list: z.string().optional().default("<leader>t").describe("List available themes"),
.optional() file_list: z.string().optional().default("<leader>f").describe("List files"),
.describe("Toggle compact mode for session"), file_close: z.string().optional().default("esc").describe("Close file"),
tool_details: z.string().optional().describe("Show tool details"), file_search: z.string().optional().default("<leader>/").describe("Search file"),
model_list: z.string().optional().describe("List available models"), file_diff_toggle: z.string().optional().default("<leader>v").describe("Split/unified diff"),
theme_list: z.string().optional().describe("List available themes"), project_init: z.string().optional().default("<leader>i").describe("Create/update AGENTS.md"),
project_init: z input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"),
.string() input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"),
.optional() input_submit: z.string().optional().default("enter").describe("Submit input"),
.describe("Initialize project configuration"), input_newline: z.string().optional().default("shift+enter,ctrl+j").describe("Insert newline in input"),
input_clear: z.string().optional().describe("Clear input field"), messages_page_up: z.string().optional().default("pgup").describe("Scroll messages up by one page"),
input_paste: z.string().optional().describe("Paste from clipboard"), messages_page_down: z.string().optional().default("pgdown").describe("Scroll messages down by one page"),
input_submit: z.string().optional().describe("Submit input"), messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"),
input_newline: z.string().optional().describe("Insert newline in input"),
history_previous: z
.string()
.optional()
.describe("Navigate to previous history item"),
history_next: z
.string()
.optional()
.describe("Navigate to next history item"),
messages_page_up: z
.string()
.optional()
.describe("Scroll messages up by one page"),
messages_page_down: z
.string()
.optional()
.describe("Scroll messages down by one page"),
messages_half_page_up: z
.string()
.optional()
.describe("Scroll messages up by half page"),
messages_half_page_down: z messages_half_page_down: z
.string() .string()
.optional() .optional()
.default("ctrl+alt+d")
.describe("Scroll messages down by half page"), .describe("Scroll messages down by half page"),
messages_previous: z messages_previous: z.string().optional().default("ctrl+up").describe("Navigate to previous message"),
.string() messages_next: z.string().optional().default("ctrl+down").describe("Navigate to next message"),
.optional() messages_first: z.string().optional().default("ctrl+g").describe("Navigate to first message"),
.describe("Navigate to previous message"), messages_last: z.string().optional().default("ctrl+alt+g").describe("Navigate to last message"),
messages_next: z.string().optional().describe("Navigate to next message"), messages_layout_toggle: z.string().optional().default("<leader>p").describe("Toggle layout"),
messages_first: z messages_copy: z.string().optional().default("<leader>y").describe("Copy message"),
.string() messages_revert: z.string().optional().default("<leader>r").describe("Revert message"),
.optional() app_exit: z.string().optional().default("ctrl+c,<leader>q").describe("Exit the application"),
.describe("Navigate to first message"),
messages_last: z.string().optional().describe("Navigate to last message"),
app_exit: z.string().optional().describe("Exit the application"),
}) })
.strict() .strict()
.openapi({ .openapi({
ref: "KeybindsConfig", ref: "KeybindsConfig",
}) })
export const Layout = z.enum(["auto", "stretch"]).openapi({
ref: "LayoutConfig",
})
export type Layout = z.infer<typeof Layout>
export const Info = z export const Info = z
.object({ .object({
$schema: z $schema: z.string().optional().describe("JSON schema reference for configuration validation"),
.string() theme: z.string().optional().describe("Theme name to use for the interface"),
.optional()
.describe("JSON schema reference for configuration validation"),
theme: z
.string()
.optional()
.describe("Theme name to use for the interface"),
keybinds: Keybinds.optional().describe("Custom keybind configurations"), keybinds: Keybinds.optional().describe("Custom keybind configurations"),
share: z
.enum(["manual", "auto", "disabled"])
.optional()
.describe(
"Control sharing behavior:'manual' allows manual sharing via commands, 'auto' enables automatic sharing, 'disabled' disables all sharing",
),
autoshare: z autoshare: z
.boolean() .boolean()
.optional() .optional()
.describe("Share newly created sessions automatically"), .describe("@deprecated Use 'share' field instead. Share newly created sessions automatically"),
autoupdate: z autoupdate: z.boolean().optional().describe("Automatically update to the latest version"),
.boolean() disabled_providers: z.array(z.string()).optional().describe("Disable providers that are loaded automatically"),
.optional() model: z.string().describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(),
.describe("Automatically update to the latest version"), username: z
disabled_providers: z
.array(z.string())
.optional()
.describe("Disable providers that are loaded automatically"),
model: z
.string() .string()
.describe( .optional()
"Model to use in the format of provider/model, eg anthropic/claude-2", .describe("Custom username to display in conversations instead of system username"),
) mode: z
.optional(), .object({
build: Mode.optional(),
plan: Mode.optional(),
})
.catchall(Mode)
.optional()
.describe("Modes configuration, see https://opencode.ai/docs/modes"),
log_level: Log.Level.optional().describe("Minimum log level to write to log files"),
provider: z provider: z
.record( .record(
ModelsDev.Provider.partial().extend({ ModelsDev.Provider.partial().extend({
@ -172,14 +174,9 @@ export namespace Config {
) )
.optional() .optional()
.describe("Custom provider configurations and model overrides"), .describe("Custom provider configurations and model overrides"),
mcp: z mcp: z.record(z.string(), Mcp).optional().describe("MCP (Model Context Protocol) server configurations"),
.record(z.string(), Mcp) instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"),
.optional() layout: Layout.optional().describe("Layout to use for the TUI"),
.describe("MCP (Model Context Protocol) server configurations"),
instructions: z
.array(z.string())
.optional()
.describe("Additional instruction files or patterns to include"),
experimental: z experimental: z
.object({ .object({
hook: z hook: z
@ -215,7 +212,11 @@ export namespace Config {
export type Info = z.output<typeof Info> export type Info = z.output<typeof Info>
export const global = lazy(async () => { export const global = lazy(async () => {
let result = await load(path.join(Global.Path.config, "config.json")) let result = pipe(
{},
mergeDeep(await load(path.join(Global.Path.config, "config.json"))),
mergeDeep(await load(path.join(Global.Path.config, "opencode.json"))),
)
await import(path.join(Global.Path.config, "config"), { await import(path.join(Global.Path.config, "config"), {
with: { with: {
@ -227,10 +228,7 @@ export namespace Config {
if (provider && model) result.model = `${provider}/${model}` if (provider && model) result.model = `${provider}/${model}`
result["$schema"] = "https://opencode.ai/config.json" result["$schema"] = "https://opencode.ai/config.json"
result = mergeDeep(result, rest) result = mergeDeep(result, rest)
await Bun.write( await Bun.write(path.join(Global.Path.config, "config.json"), JSON.stringify(result, null, 2))
path.join(Global.Path.config, "config.json"),
JSON.stringify(result, null, 2),
)
await fs.unlink(path.join(Global.Path.config, "config")) await fs.unlink(path.join(Global.Path.config, "config"))
}) })
.catch(() => {}) .catch(() => {})
@ -238,19 +236,47 @@ export namespace Config {
return result return result
}) })
async function load(path: string) { async function load(configPath: string) {
const data = await Bun.file(path) let text = await Bun.file(configPath)
.json() .text()
.catch((err) => { .catch((err) => {
if (err.code === "ENOENT") return {} if (err.code === "ENOENT") return
throw new JsonError({ path }, { cause: err }) throw new JsonError({ path: configPath }, { cause: err })
})
if (!text) return {}
text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
return process.env[varName] || ""
}) })
const parsed = Info.safeParse(data) const fileMatches = text.match(/"?\{file:([^}]+)\}"?/g)
if (parsed.success) return parsed.data if (fileMatches) {
throw new InvalidError({ path, issues: parsed.error.issues }) const configDir = path.dirname(configPath)
for (const match of fileMatches) {
const filePath = match.replace(/^"?\{file:/, "").replace(/\}"?$/, "")
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
const fileContent = await Bun.file(resolvedPath).text()
text = text.replace(match, JSON.stringify(fileContent))
}
} }
let data: any
try {
data = JSON.parse(text)
} catch (err) {
throw new JsonError({ path: configPath }, { cause: err as Error })
}
const parsed = Info.safeParse(data)
if (parsed.success) {
if (!parsed.data.$schema) {
parsed.data.$schema = "https://opencode.ai/config.json"
await Bun.write(configPath, JSON.stringify(parsed.data, null, 2))
}
return parsed.data
}
throw new InvalidError({ path: configPath, issues: parsed.error.issues })
}
export const JsonError = NamedError.create( export const JsonError = NamedError.create(
"ConfigJsonError", "ConfigJsonError",
z.object({ z.object({

View file

@ -22,9 +22,7 @@ export namespace ConfigHooks {
command: item.command, command: item.command,
}) })
Bun.spawn({ Bun.spawn({
cmd: item.command.map((x) => cmd: item.command.map((x) => x.replace("$FILE", payload.properties.file)),
x.replace("$FILE", payload.properties.file),
),
env: item.environment, env: item.environment,
cwd: app.path.cwd, cwd: app.path.cwd,
stdout: "ignore", stdout: "ignore",

View file

@ -45,10 +45,7 @@ export namespace Fzf {
log.info("found", { filepath }) log.info("found", { filepath })
return { filepath } return { filepath }
} }
filepath = path.join( filepath = path.join(Global.Path.bin, "fzf" + (process.platform === "win32" ? ".exe" : ""))
Global.Path.bin,
"fzf" + (process.platform === "win32" ? ".exe" : ""),
)
const file = Bun.file(filepath) const file = Bun.file(filepath)
if (!(await file.exists())) { if (!(await file.exists())) {
@ -56,18 +53,15 @@ export namespace Fzf {
const arch = archMap[process.arch as keyof typeof archMap] ?? "amd64" const arch = archMap[process.arch as keyof typeof archMap] ?? "amd64"
const config = PLATFORM[process.platform as keyof typeof PLATFORM] const config = PLATFORM[process.platform as keyof typeof PLATFORM]
if (!config) if (!config) throw new UnsupportedPlatformError({ platform: process.platform })
throw new UnsupportedPlatformError({ platform: process.platform })
const version = VERSION const version = VERSION
const platformName = const platformName = process.platform === "win32" ? "windows" : process.platform
process.platform === "win32" ? "windows" : process.platform
const filename = `fzf-${version}-${platformName}_${arch}.${config.extension}` const filename = `fzf-${version}-${platformName}_${arch}.${config.extension}`
const url = `https://github.com/junegunn/fzf/releases/download/v${version}/${filename}` const url = `https://github.com/junegunn/fzf/releases/download/v${version}/${filename}`
const response = await fetch(url) const response = await fetch(url)
if (!response.ok) if (!response.ok) throw new DownloadFailedError({ url, status: response.status })
throw new DownloadFailedError({ url, status: response.status })
const buffer = await response.arrayBuffer() const buffer = await response.arrayBuffer()
const archivePath = path.join(Global.Path.bin, filename) const archivePath = path.join(Global.Path.bin, filename)
@ -86,14 +80,11 @@ export namespace Fzf {
}) })
} }
if (config.extension === "zip") { if (config.extension === "zip") {
const proc = Bun.spawn( const proc = Bun.spawn(["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin], {
["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin],
{
cwd: Global.Path.bin, cwd: Global.Path.bin,
stderr: "pipe", stderr: "pipe",
stdout: "ignore", stdout: "ignore",
}, })
)
await proc.exited await proc.exited
if (proc.exitCode !== 0) if (proc.exitCode !== 0)
throw new ExtractionFailedError({ throw new ExtractionFailedError({

View file

@ -11,6 +11,19 @@ import { Log } from "../util/log"
export namespace File { export namespace File {
const log = Log.create({ service: "file" }) const log = Log.create({ service: "file" })
export const Info = z
.object({
path: z.string(),
added: z.number().int(),
removed: z.number().int(),
status: z.enum(["added", "deleted", "modified"]),
})
.openapi({
ref: "File",
})
export type Info = z.infer<typeof Info>
export const Event = { export const Event = {
Edited: Bus.event( Edited: Bus.event(
"file.edited", "file.edited",
@ -24,20 +37,16 @@ export namespace File {
const app = App.info() const app = App.info()
if (!app.git) return [] if (!app.git) return []
const diffOutput = await $`git diff --numstat HEAD` const diffOutput = await $`git diff --numstat HEAD`.cwd(app.path.cwd).quiet().nothrow().text()
.cwd(app.path.cwd)
.quiet()
.nothrow()
.text()
const changedFiles = [] const changedFiles: Info[] = []
if (diffOutput.trim()) { if (diffOutput.trim()) {
const lines = diffOutput.trim().split("\n") const lines = diffOutput.trim().split("\n")
for (const line of lines) { for (const line of lines) {
const [added, removed, filepath] = line.split("\t") const [added, removed, filepath] = line.split("\t")
changedFiles.push({ changedFiles.push({
file: filepath, path: filepath,
added: added === "-" ? 0 : parseInt(added, 10), added: added === "-" ? 0 : parseInt(added, 10),
removed: removed === "-" ? 0 : parseInt(removed, 10), removed: removed === "-" ? 0 : parseInt(removed, 10),
status: "modified", status: "modified",
@ -45,22 +54,16 @@ export namespace File {
} }
} }
const untrackedOutput = await $`git ls-files --others --exclude-standard` const untrackedOutput = await $`git ls-files --others --exclude-standard`.cwd(app.path.cwd).quiet().nothrow().text()
.cwd(app.path.cwd)
.quiet()
.nothrow()
.text()
if (untrackedOutput.trim()) { if (untrackedOutput.trim()) {
const untrackedFiles = untrackedOutput.trim().split("\n") const untrackedFiles = untrackedOutput.trim().split("\n")
for (const filepath of untrackedFiles) { for (const filepath of untrackedFiles) {
try { try {
const content = await Bun.file( const content = await Bun.file(path.join(app.path.root, filepath)).text()
path.join(app.path.root, filepath),
).text()
const lines = content.split("\n").length const lines = content.split("\n").length
changedFiles.push({ changedFiles.push({
file: filepath, path: filepath,
added: lines, added: lines,
removed: 0, removed: 0,
status: "added", status: "added",
@ -72,17 +75,13 @@ export namespace File {
} }
// Get deleted files // Get deleted files
const deletedOutput = await $`git diff --name-only --diff-filter=D HEAD` const deletedOutput = await $`git diff --name-only --diff-filter=D HEAD`.cwd(app.path.cwd).quiet().nothrow().text()
.cwd(app.path.cwd)
.quiet()
.nothrow()
.text()
if (deletedOutput.trim()) { if (deletedOutput.trim()) {
const deletedFiles = deletedOutput.trim().split("\n") const deletedFiles = deletedOutput.trim().split("\n")
for (const filepath of deletedFiles) { for (const filepath of deletedFiles) {
changedFiles.push({ changedFiles.push({
file: filepath, path: filepath,
added: 0, added: 0,
removed: 0, // Could get original line count but would require another git command removed: 0, // Could get original line count but would require another git command
status: "deleted", status: "deleted",
@ -92,7 +91,7 @@ export namespace File {
return changedFiles.map((x) => ({ return changedFiles.map((x) => ({
...x, ...x,
file: path.relative(app.path.cwd, path.join(app.path.root, x.file)), path: path.relative(app.path.cwd, path.join(app.path.root, x.path)),
})) }))
} }
@ -112,11 +111,7 @@ export namespace File {
filepath: rel, filepath: rel,
}) })
if (diff !== "unmodified") { if (diff !== "unmodified") {
const original = await $`git show HEAD:${rel}` const original = await $`git show HEAD:${rel}`.cwd(app.path.root).quiet().nothrow().text()
.cwd(app.path.root)
.quiet()
.nothrow()
.text()
const patch = createPatch(file, original, content, "old", "new", { const patch = createPatch(file, original, content, "old", "new", {
context: Infinity, context: Infinity,
}) })

View file

@ -34,7 +34,8 @@ export namespace Ripgrep {
export const Match = z.object({ export const Match = z.object({
type: z.literal("match"), type: z.literal("match"),
data: z.object({ data: z
.object({
path: z.object({ path: z.object({
text: z.string(), text: z.string(),
}), }),
@ -52,7 +53,8 @@ export namespace Ripgrep {
end: z.number(), end: z.number(),
}), }),
), ),
}), })
.openapi({ ref: "Match" }),
}) })
const End = z.object({ const End = z.object({
@ -122,15 +124,11 @@ export namespace Ripgrep {
const state = lazy(async () => { const state = lazy(async () => {
let filepath = Bun.which("rg") let filepath = Bun.which("rg")
if (filepath) return { filepath } if (filepath) return { filepath }
filepath = path.join( filepath = path.join(Global.Path.bin, "rg" + (process.platform === "win32" ? ".exe" : ""))
Global.Path.bin,
"rg" + (process.platform === "win32" ? ".exe" : ""),
)
const file = Bun.file(filepath) const file = Bun.file(filepath)
if (!(await file.exists())) { if (!(await file.exists())) {
const platformKey = const platformKey = `${process.arch}-${process.platform}` as keyof typeof PLATFORM
`${process.arch}-${process.platform}` as keyof typeof PLATFORM
const config = PLATFORM[platformKey] const config = PLATFORM[platformKey]
if (!config) throw new UnsupportedPlatformError({ platform: platformKey }) if (!config) throw new UnsupportedPlatformError({ platform: platformKey })
@ -139,8 +137,7 @@ export namespace Ripgrep {
const url = `https://github.com/BurntSushi/ripgrep/releases/download/${version}/${filename}` const url = `https://github.com/BurntSushi/ripgrep/releases/download/${version}/${filename}`
const response = await fetch(url) const response = await fetch(url)
if (!response.ok) if (!response.ok) throw new DownloadFailedError({ url, status: response.status })
throw new DownloadFailedError({ url, status: response.status })
const buffer = await response.arrayBuffer() const buffer = await response.arrayBuffer()
const archivePath = path.join(Global.Path.bin, filename) const archivePath = path.join(Global.Path.bin, filename)
@ -164,14 +161,11 @@ export namespace Ripgrep {
}) })
} }
if (config.extension === "zip") { if (config.extension === "zip") {
const proc = Bun.spawn( const proc = Bun.spawn(["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin], {
["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin],
{
cwd: Global.Path.bin, cwd: Global.Path.bin,
stderr: "pipe", stderr: "pipe",
stdout: "ignore", stdout: "ignore",
}, })
)
await proc.exited await proc.exited
if (proc.exitCode !== 0) if (proc.exitCode !== 0)
throw new ExtractionFailedError({ throw new ExtractionFailedError({
@ -193,17 +187,16 @@ export namespace Ripgrep {
return filepath return filepath
} }
export async function files(input: { export async function files(input: { cwd: string; query?: string; glob?: string[]; limit?: number }) {
cwd: string const commands = [`${$.escape(await filepath())} --files --follow --hidden --glob='!.git/*'`]
query?: string
glob?: string if (input.glob) {
limit?: number for (const g of input.glob) {
}) { commands[0] += ` --glob='${g}'`
const commands = [ }
`${await filepath()} --files --hidden --glob='!.git/*' ${input.glob ? `--glob='${input.glob}'` : ``}`, }
]
if (input.query) if (input.query) commands.push(`${await Fzf.filepath()} --filter=${input.query}`)
commands.push(`${await Fzf.filepath()} --filter=${input.query}`)
if (input.limit) commands.push(`head -n ${input.limit}`) if (input.limit) commands.push(`head -n ${input.limit}`)
const joined = commands.join(" | ") const joined = commands.join(" | ")
const result = await $`${{ raw: joined }}`.cwd(input.cwd).nothrow().text() const result = await $`${{ raw: joined }}`.cwd(input.cwd).nothrow().text()
@ -310,18 +303,8 @@ export namespace Ripgrep {
return lines.join("\n") return lines.join("\n")
} }
export async function search(input: { export async function search(input: { cwd: string; pattern: string; glob?: string[]; limit?: number }) {
cwd: string const args = [`${await filepath()}`, "--json", "--hidden", "--glob='!.git/*'"]
pattern: string
glob?: string[]
limit?: number
}) {
const args = [
`${await filepath()}`,
"--json",
"--hidden",
"--glob='!.git/*'",
]
if (input.glob) { if (input.glob) {
for (const g of input.glob) { for (const g of input.glob) {

View file

@ -27,10 +27,7 @@ export namespace FileTime {
export async function assert(sessionID: string, filepath: string) { export async function assert(sessionID: string, filepath: string) {
const time = get(sessionID, filepath) const time = get(sessionID, filepath)
if (!time) if (!time) throw new Error(`You must read the file ${filepath} before overwriting it. Use the Read tool first`)
throw new Error(
`You must read the file ${filepath} before overwriting it. Use the Read tool first`,
)
const stats = await Bun.file(filepath).stat() const stats = await Bun.file(filepath).stat()
if (stats.mtime.getTime() > time.getTime()) { if (stats.mtime.getTime() > time.getTime()) {
throw new Error( throw new Error(

View file

@ -21,11 +21,9 @@ export namespace FileWatcher {
"file.watcher", "file.watcher",
() => { () => {
const app = App.use() const app = App.use()
if (!app.info.git) return {}
try { try {
const watcher = fs.watch( const watcher = fs.watch(app.info.path.cwd, { recursive: true }, (event, file) => {
app.info.path.cwd,
{ recursive: true },
(event, file) => {
log.info("change", { file, event }) log.info("change", { file, event })
if (!file) return if (!file) return
// for some reason async local storage is lost here // for some reason async local storage is lost here
@ -36,8 +34,7 @@ export namespace FileWatcher {
event, event,
}) })
}) })
}, })
)
return { watcher } return { watcher }
} catch { } catch {
return {} return {}
@ -49,7 +46,7 @@ export namespace FileWatcher {
) )
export function init() { export function init() {
if (Flag.OPENCODE_DISABLE_WATCHER) return if (Flag.OPENCODE_DISABLE_WATCHER || true) return
state() state()
} }
} }

View file

@ -1,5 +1,7 @@
import { App } from "../app/app" import { App } from "../app/app"
import { BunProc } from "../bun" import { BunProc } from "../bun"
import { Filesystem } from "../util/filesystem"
import path from "path"
export interface Info { export interface Info {
name: string name: string
@ -29,7 +31,7 @@ export const mix: Info = {
export const prettier: Info = { export const prettier: Info = {
name: "prettier", name: "prettier",
command: [BunProc.which(), "run", "prettier", "--write", "$FILE"], command: [BunProc.which(), "x", "prettier", "--write", "$FILE"],
environment: { environment: {
BUN_BE_BUN: "1", BUN_BE_BUN: "1",
}, },
@ -62,23 +64,12 @@ export const prettier: Info = {
".gql", ".gql",
], ],
async enabled() { async enabled() {
// this is more complicated because we only want to use prettier if it's const app = App.info()
// being used with the current project const nms = await Filesystem.findUp("node_modules", app.path.cwd, app.path.root)
try { for (const item of nms) {
const proc = Bun.spawn({ if (await Bun.file(path.join(item, ".bin", "prettier")).exists()) return true
cmd: [BunProc.which(), "run", "prettier", "--version"],
cwd: App.info().path.cwd,
env: {
BUN_BE_BUN: "1",
},
stdout: "ignore",
stderr: "ignore",
})
const exit = await proc.exited
return exit === 0
} catch {
return false
} }
return false
}, },
} }
@ -94,21 +85,7 @@ export const zig: Info = {
export const clang: Info = { export const clang: Info = {
name: "clang-format", name: "clang-format",
command: ["clang-format", "-i", "$FILE"], command: ["clang-format", "-i", "$FILE"],
extensions: [ extensions: [".c", ".cc", ".cpp", ".cxx", ".c++", ".h", ".hh", ".hpp", ".hxx", ".h++", ".ino", ".C", ".H"],
".c",
".cc",
".cpp",
".cxx",
".c++",
".h",
".hh",
".hpp",
".hxx",
".h++",
".ino",
".C",
".H",
],
async enabled() { async enabled() {
return Bun.which("clang-format") !== null return Bun.which("clang-format") !== null
}, },
@ -128,7 +105,29 @@ export const ruff: Info = {
command: ["ruff", "format", "$FILE"], command: ["ruff", "format", "$FILE"],
extensions: [".py", ".pyi"], extensions: [".py", ".pyi"],
async enabled() { async enabled() {
return Bun.which("ruff") !== null if (!Bun.which("ruff")) return false
const app = App.info()
const configs = ["pyproject.toml", "ruff.toml", ".ruff.toml"]
for (const config of configs) {
const found = await Filesystem.findUp(config, app.path.cwd, app.path.root)
if (found.length > 0) {
if (config === "pyproject.toml") {
const content = await Bun.file(found[0]).text()
if (content.includes("[tool.ruff]")) return true
} else {
return true
}
}
}
const deps = ["requirements.txt", "pyproject.toml", "Pipfile"]
for (const dep of deps) {
const found = await Filesystem.findUp(dep, app.path.cwd, app.path.root)
if (found.length > 0) {
const content = await Bun.file(found[0]).text()
if (content.includes("ruff")) return true
}
}
return false
}, },
} }

View file

@ -23,7 +23,17 @@ export namespace Global {
await Promise.all([ await Promise.all([
fs.mkdir(Global.Path.data, { recursive: true }), fs.mkdir(Global.Path.data, { recursive: true }),
fs.mkdir(Global.Path.config, { recursive: true }), fs.mkdir(Global.Path.config, { recursive: true }),
fs.mkdir(Global.Path.cache, { recursive: true }),
fs.mkdir(Global.Path.providers, { recursive: true }), fs.mkdir(Global.Path.providers, { recursive: true }),
fs.mkdir(Global.Path.state, { recursive: true }), fs.mkdir(Global.Path.state, { recursive: true }),
]) ])
const CACHE_VERSION = "2"
const version = await Bun.file(path.join(Global.Path.cache, "version"))
.text()
.catch(() => "0")
if (version !== CACHE_VERSION) {
await fs.rm(Global.Path.cache, { recursive: true, force: true })
await Bun.file(path.join(Global.Path.cache, "version")).write(CACHE_VERSION)
}

View file

@ -6,6 +6,7 @@ export namespace Identifier {
session: "ses", session: "ses",
message: "msg", message: "msg",
user: "usr", user: "usr",
part: "prt",
} as const } as const
export function schema(prefix: keyof typeof prefixes) { export function schema(prefix: keyof typeof prefixes) {
@ -26,11 +27,7 @@ export namespace Identifier {
return generateID(prefix, true, given) return generateID(prefix, true, given)
} }
function generateID( function generateID(prefix: keyof typeof prefixes, descending: boolean, given?: string): string {
prefix: keyof typeof prefixes,
descending: boolean,
given?: string,
): string {
if (!given) { if (!given) {
return generateNewID(prefix, descending) return generateNewID(prefix, descending)
} }
@ -42,8 +39,7 @@ export namespace Identifier {
} }
function randomBase62(length: number): string { function randomBase62(length: number): string {
const chars = const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
let result = "" let result = ""
const bytes = randomBytes(length) const bytes = randomBytes(length)
for (let i = 0; i < length; i++) { for (let i = 0; i < length; i++) {
@ -52,10 +48,7 @@ export namespace Identifier {
return result return result
} }
function generateNewID( function generateNewID(prefix: keyof typeof prefixes, descending: boolean): string {
prefix: keyof typeof prefixes,
descending: boolean,
): string {
const currentTimestamp = Date.now() const currentTimestamp = Date.now()
if (currentTimestamp !== lastTimestamp) { if (currentTimestamp !== lastTimestamp) {
@ -73,11 +66,6 @@ export namespace Identifier {
timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff)) timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff))
} }
return ( return prefixes[prefix] + "_" + timeBytes.toString("hex") + randomBase62(LENGTH - 12)
prefixes[prefix] +
"_" +
timeBytes.toString("hex") +
randomBase62(LENGTH - 12)
)
} }
} }

View file

@ -14,6 +14,9 @@ import { FormatError } from "./cli/error"
import { ServeCommand } from "./cli/cmd/serve" import { ServeCommand } from "./cli/cmd/serve"
import { TuiCommand } from "./cli/cmd/tui" import { TuiCommand } from "./cli/cmd/tui"
import { DebugCommand } from "./cli/cmd/debug" import { DebugCommand } from "./cli/cmd/debug"
import { StatsCommand } from "./cli/cmd/stats"
import { McpCommand } from "./cli/cmd/mcp"
import { InstallGithubCommand } from "./cli/cmd/install-github"
const cancel = new AbortController() const cancel = new AbortController()
@ -40,12 +43,31 @@ const cli = yargs(hideBin(process.argv))
}) })
.middleware(async () => { .middleware(async () => {
await Log.init({ print: process.argv.includes("--print-logs") }) await Log.init({ print: process.argv.includes("--print-logs") })
try {
const { Config } = await import("./config/config")
const { App } = await import("./app/app")
App.provide({ cwd: process.cwd() }, async () => {
const cfg = await Config.get()
if (cfg.log_level) {
Log.setLevel(cfg.log_level as Log.Level)
} else {
const defaultLevel = Installation.isDev() ? "DEBUG" : "INFO"
Log.setLevel(defaultLevel)
}
})
} catch (e) {
Log.Default.error("failed to load config", { error: e })
}
Log.Default.info("opencode", { Log.Default.info("opencode", {
version: Installation.VERSION, version: Installation.VERSION,
args: process.argv.slice(2), args: process.argv.slice(2),
}) })
}) })
.usage("\n" + UI.logo()) .usage("\n" + UI.logo())
.command(McpCommand)
.command(TuiCommand) .command(TuiCommand)
.command(RunCommand) .command(RunCommand)
.command(GenerateCommand) .command(GenerateCommand)
@ -54,11 +76,10 @@ const cli = yargs(hideBin(process.argv))
.command(UpgradeCommand) .command(UpgradeCommand)
.command(ServeCommand) .command(ServeCommand)
.command(ModelsCommand) .command(ModelsCommand)
.command(StatsCommand)
.command(InstallGithubCommand)
.fail((msg) => { .fail((msg) => {
if ( if (msg.startsWith("Unknown argument") || msg.startsWith("Not enough non-option arguments")) {
msg.startsWith("Unknown argument") ||
msg.startsWith("Not enough non-option arguments")
) {
cli.showHelp("log") cli.showHelp("log")
} }
}) })
@ -97,10 +118,7 @@ try {
Log.Default.error("fatal", data) Log.Default.error("fatal", data)
const formatted = FormatError(e) const formatted = FormatError(e)
if (formatted) UI.error(formatted) if (formatted) UI.error(formatted)
if (formatted === undefined) if (formatted === undefined) UI.error("Unexpected error, check log file at " + Log.file() + " for more details")
UI.error(
"Unexpected error, check log file at " + Log.file() + " for more details",
)
process.exitCode = 1 process.exitCode = 1
} }

View file

@ -135,12 +135,17 @@ export namespace Installation {
}) })
} }
export const VERSION = export const VERSION = typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev"
typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev"
export async function latest() { export async function latest() {
return fetch("https://api.github.com/repos/sst/opencode/releases/latest") return fetch("https://api.github.com/repos/sst/opencode/releases/latest")
.then((res) => res.json()) .then((res) => res.json())
.then((data) => data.tag_name.slice(1) as string) .then((data) => {
if (typeof data.tag_name !== "string") {
log.error("GitHub API error", data)
throw new Error("failed to fetch latest version")
}
return data.tag_name.slice(1) as string
})
} }
} }

View file

@ -1,9 +1,5 @@
import path from "path" import path from "path"
import { import { createMessageConnection, StreamMessageReader, StreamMessageWriter } from "vscode-jsonrpc/node"
createMessageConnection,
StreamMessageReader,
StreamMessageWriter,
} from "vscode-jsonrpc/node"
import type { Diagnostic as VSCodeDiagnostic } from "vscode-languageserver-types" import type { Diagnostic as VSCodeDiagnostic } from "vscode-languageserver-types"
import { App } from "../app/app" import { App } from "../app/app"
import { Log } from "../util/log" import { Log } from "../util/log"
@ -38,45 +34,54 @@ export namespace LSPClient {
), ),
} }
export async function create(serverID: string, server: LSPServer.Handle) { export async function create(input: { serverID: string; server: LSPServer.Handle; root: string }) {
const app = App.info() const app = App.info()
log.info("starting client", { id: serverID }) const l = log.clone().tag("serverID", input.serverID)
l.info("starting client")
const connection = createMessageConnection( const connection = createMessageConnection(
new StreamMessageReader(server.process.stdout), new StreamMessageReader(input.server.process.stdout),
new StreamMessageWriter(server.process.stdin), new StreamMessageWriter(input.server.process.stdin),
) )
const diagnostics = new Map<string, Diagnostic[]>() const diagnostics = new Map<string, Diagnostic[]>()
connection.onNotification("textDocument/publishDiagnostics", (params) => { connection.onNotification("textDocument/publishDiagnostics", (params) => {
const path = new URL(params.uri).pathname const path = new URL(params.uri).pathname
log.info("textDocument/publishDiagnostics", { l.info("textDocument/publishDiagnostics", {
path, path,
}) })
const exists = diagnostics.has(path) const exists = diagnostics.has(path)
diagnostics.set(path, params.diagnostics) diagnostics.set(path, params.diagnostics)
if (!exists && serverID === "typescript") return if (!exists && input.serverID === "typescript") return
Bus.publish(Event.Diagnostics, { path, serverID }) Bus.publish(Event.Diagnostics, { path, serverID: input.serverID })
})
connection.onRequest("window/workDoneProgress/create", (params) => {
l.info("window/workDoneProgress/create", params)
return null
}) })
connection.onRequest("workspace/configuration", async () => { connection.onRequest("workspace/configuration", async () => {
return [{}] return [{}]
}) })
connection.listen() connection.listen()
log.info("sending initialize", { id: serverID }) l.info("sending initialize")
await withTimeout( await withTimeout(
connection.sendRequest("initialize", { connection.sendRequest("initialize", {
processId: server.process.pid, rootUri: "file://" + input.root,
processId: input.server.process.pid,
workspaceFolders: [ workspaceFolders: [
{ {
name: "workspace", name: "workspace",
uri: "file://" + app.path.cwd, uri: "file://" + input.root,
}, },
], ],
initializationOptions: { initializationOptions: {
...server.initialization, ...input.server.initialization,
}, },
capabilities: { capabilities: {
window: {
workDoneProgress: true,
},
workspace: { workspace: {
configuration: true, configuration: true,
}, },
@ -93,9 +98,9 @@ export namespace LSPClient {
}), }),
5_000, 5_000,
).catch((err) => { ).catch((err) => {
log.error("initialize error", { error: err }) l.error("initialize error", { error: err })
throw new InitializeError( throw new InitializeError(
{ serverID }, { serverID: input.serverID },
{ {
cause: err, cause: err,
}, },
@ -103,26 +108,22 @@ export namespace LSPClient {
}) })
await connection.sendNotification("initialized", {}) await connection.sendNotification("initialized", {})
log.info("initialized", {
serverID,
})
const files: { const files: {
[path: string]: number [path: string]: number
} = {} } = {}
const result = { const result = {
root: input.root,
get serverID() { get serverID() {
return serverID return input.serverID
}, },
get connection() { get connection() {
return connection return connection
}, },
notify: { notify: {
async open(input: { path: string }) { async open(input: { path: string }) {
input.path = path.isAbsolute(input.path) input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path)
? input.path
: path.resolve(app.path.cwd, input.path)
const file = Bun.file(input.path) const file = Bun.file(input.path)
const text = await file.text() const text = await file.text()
const version = files[input.path] const version = files[input.path]
@ -154,18 +155,13 @@ export namespace LSPClient {
return diagnostics return diagnostics
}, },
async waitForDiagnostics(input: { path: string }) { async waitForDiagnostics(input: { path: string }) {
input.path = path.isAbsolute(input.path) input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path)
? input.path
: path.resolve(app.path.cwd, input.path)
log.info("waiting for diagnostics", input) log.info("waiting for diagnostics", input)
let unsub: () => void let unsub: () => void
return await withTimeout( return await withTimeout(
new Promise<void>((resolve) => { new Promise<void>((resolve) => {
unsub = Bus.subscribe(Event.Diagnostics, (event) => { unsub = Bus.subscribe(Event.Diagnostics, (event) => {
if ( if (event.properties.path === input.path && event.properties.serverID === result.serverID) {
event.properties.path === input.path &&
event.properties.serverID === result.serverID
) {
log.info("got diagnostics", input) log.info("got diagnostics", input)
unsub?.() unsub?.()
resolve() resolve()
@ -180,13 +176,16 @@ export namespace LSPClient {
}) })
}, },
async shutdown() { async shutdown() {
log.info("shutting down", { serverID }) l.info("shutting down")
connection.end() connection.end()
connection.dispose() connection.dispose()
log.info("shutdown", { serverID }) input.server.process.kill()
l.info("shutdown")
}, },
} }
l.info("initialized")
return result return result
} }
} }

View file

@ -3,19 +3,13 @@ import { Log } from "../util/log"
import { LSPClient } from "./client" import { LSPClient } from "./client"
import path from "path" import path from "path"
import { LSPServer } from "./server" import { LSPServer } from "./server"
import { Ripgrep } from "../file/ripgrep"
import { z } from "zod" import { z } from "zod"
export namespace LSP { export namespace LSP {
const log = Log.create({ service: "lsp" }) const log = Log.create({ service: "lsp" })
export const Symbol = z export const Range = z
.object({ .object({
name: z.string(),
kind: z.number(),
location: z.object({
uri: z.string(),
range: z.object({
start: z.object({ start: z.object({
line: z.number(), line: z.number(),
character: z.number(), character: z.number(),
@ -24,43 +18,50 @@ export namespace LSP {
line: z.number(), line: z.number(),
character: z.number(), character: z.number(),
}), }),
}), })
.openapi({
ref: "Range",
})
export type Range = z.infer<typeof Range>
export const Symbol = z
.object({
name: z.string(),
kind: z.number(),
location: z.object({
uri: z.string(),
range: Range,
}), }),
}) })
.openapi({ .openapi({
ref: "LSP.Symbol", ref: "Symbol",
}) })
export type Symbol = z.infer<typeof Symbol> export type Symbol = z.infer<typeof Symbol>
export const DocumentSymbol = z
.object({
name: z.string(),
detail: z.string().optional(),
kind: z.number(),
range: Range,
selectionRange: Range,
})
.openapi({
ref: "DocumentSymbol",
})
export type DocumentSymbol = z.infer<typeof DocumentSymbol>
const state = App.state( const state = App.state(
"lsp", "lsp",
async (app) => { async () => {
log.info("initializing") const clients: LSPClient.Info[] = []
const clients = new Map<string, LSPClient.Info>()
for (const server of Object.values(LSPServer)) {
for (const extension of server.extensions) {
const [file] = await Ripgrep.files({
cwd: app.path.cwd,
glob: "*" + extension,
})
if (!file) continue
const handle = await server.spawn(App.info())
if (!handle) break
const client = await LSPClient.create(server.id, handle).catch(
(err) => log.error("", { error: err }),
)
if (!client) break
clients.set(server.id, client)
break
}
}
log.info("initialized")
return { return {
broken: new Set<string>(),
clients, clients,
} }
}, },
async (state) => { async (state) => {
for (const client of state.clients.values()) { for (const client of state.clients) {
await client.shutdown() await client.shutdown()
} }
}, },
@ -70,16 +71,44 @@ export namespace LSP {
return state() return state()
} }
async function getClients(file: string) {
const s = await state()
const extension = path.parse(file).ext
const result: LSPClient.Info[] = []
for (const server of Object.values(LSPServer)) {
if (!server.extensions.includes(extension)) continue
const root = await server.root(file, App.info())
if (!root) continue
if (s.broken.has(root + server.id)) continue
const match = s.clients.find((x) => x.root === root && x.serverID === server.id)
if (match) {
result.push(match)
continue
}
const handle = await server.spawn(App.info(), root)
if (!handle) continue
const client = await LSPClient.create({
serverID: server.id,
server: handle,
root,
}).catch((err) => {
s.broken.add(root + server.id)
handle.process.kill()
log.error("", { error: err })
})
if (!client) continue
s.clients.push(client)
result.push(client)
}
return result
}
export async function touchFile(input: string, waitForDiagnostics?: boolean) { export async function touchFile(input: string, waitForDiagnostics?: boolean) {
const extension = path.parse(input).ext const clients = await getClients(input)
const matches = Object.values(LSPServer)
.filter((x) => x.extensions.includes(extension))
.map((x) => x.id)
await run(async (client) => { await run(async (client) => {
if (!matches.includes(client.serverID)) return if (!clients.includes(client)) return
const wait = waitForDiagnostics const wait = waitForDiagnostics ? client.waitForDiagnostics({ path: input }) : Promise.resolve()
? client.waitForDiagnostics({ path: input })
: Promise.resolve()
await client.notify.open({ path: input }) await client.notify.open({ path: input })
return wait return wait
}) })
@ -97,11 +126,7 @@ export namespace LSP {
return results return results
} }
export async function hover(input: { export async function hover(input: { file: string; line: number; character: number }) {
file: string
line: number
character: number
}) {
return run((client) => { return run((client) => {
return client.connection.sendRequest("textDocument/hover", { return client.connection.sendRequest("textDocument/hover", {
textDocument: { textDocument: {
@ -115,18 +140,74 @@ export namespace LSP {
}) })
} }
enum SymbolKind {
File = 1,
Module = 2,
Namespace = 3,
Package = 4,
Class = 5,
Method = 6,
Property = 7,
Field = 8,
Constructor = 9,
Enum = 10,
Interface = 11,
Function = 12,
Variable = 13,
Constant = 14,
String = 15,
Number = 16,
Boolean = 17,
Array = 18,
Object = 19,
Key = 20,
Null = 21,
EnumMember = 22,
Struct = 23,
Event = 24,
Operator = 25,
TypeParameter = 26,
}
const kinds = [
SymbolKind.Class,
SymbolKind.Function,
SymbolKind.Method,
SymbolKind.Interface,
SymbolKind.Variable,
SymbolKind.Constant,
SymbolKind.Struct,
SymbolKind.Enum,
]
export async function workspaceSymbol(query: string) { export async function workspaceSymbol(query: string) {
return run((client) => return run((client) =>
client.connection.sendRequest("workspace/symbol", { client.connection
.sendRequest("workspace/symbol", {
query, query,
}), })
.then((result: any) => result.filter((x: LSP.Symbol) => kinds.includes(x.kind)))
.then((result: any) => result.slice(0, 10))
.catch(() => []),
).then((result) => result.flat() as LSP.Symbol[]) ).then((result) => result.flat() as LSP.Symbol[])
} }
async function run<T>( export async function documentSymbol(uri: string) {
input: (client: LSPClient.Info) => Promise<T>, return run((client) =>
): Promise<T[]> { client.connection
const clients = await state().then((x) => [...x.clients.values()]) .sendRequest("textDocument/documentSymbol", {
textDocument: {
uri,
},
})
.catch(() => []),
)
.then((result) => result.flat() as (LSP.DocumentSymbol | LSP.Symbol)[])
.then((result) => result.filter(Boolean))
}
async function run<T>(input: (client: LSPClient.Info) => Promise<T>): Promise<T[]> {
const clients = await state().then((x) => x.clients)
const tasks = clients.map((x) => input(x)) const tasks = clients.map((x) => input(x))
return Promise.all(tasks) return Promise.all(tasks)
} }

View file

@ -94,4 +94,6 @@ export const LANGUAGE_EXTENSIONS: Record<string, string> = {
".yml": "yaml", ".yml": "yaml",
".mjs": "javascript", ".mjs": "javascript",
".cjs": "javascript", ".cjs": "javascript",
".zig": "zig",
".zon": "zig",
} as const } as const

View file

@ -6,6 +6,7 @@ import { Log } from "../util/log"
import { BunProc } from "../bun" import { BunProc } from "../bun"
import { $ } from "bun" import { $ } from "bun"
import fs from "fs/promises" import fs from "fs/promises"
import { Filesystem } from "../util/filesystem"
export namespace LSPServer { export namespace LSPServer {
const log = Log.create({ service: "lsp.server" }) const log = Log.create({ service: "lsp.server" })
@ -15,31 +16,44 @@ export namespace LSPServer {
initialization?: Record<string, any> initialization?: Record<string, any>
} }
type RootFunction = (file: string, app: App.Info) => Promise<string | undefined>
const NearestRoot = (patterns: string[]): RootFunction => {
return async (file, app) => {
const files = Filesystem.up({
targets: patterns,
start: path.dirname(file),
stop: app.path.root,
})
const first = await files.next()
await files.return()
if (!first.value) return app.path.root
return path.dirname(first.value)
}
}
export interface Info { export interface Info {
id: string id: string
extensions: string[] extensions: string[]
spawn(app: App.Info): Promise<Handle | undefined> global?: boolean
root: RootFunction
spawn(app: App.Info, root: string): Promise<Handle | undefined>
} }
export const Typescript: Info = { export const Typescript: Info = {
id: "typescript", id: "typescript",
root: NearestRoot(["tsconfig.json", "package.json", "jsconfig.json"]),
extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"], extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"],
async spawn(app) { async spawn(app, root) {
const tsserver = await Bun.resolve( const tsserver = await Bun.resolve("typescript/lib/tsserver.js", app.path.cwd).catch(() => {})
"typescript/lib/tsserver.js",
app.path.cwd,
).catch(() => {})
if (!tsserver) return if (!tsserver) return
const proc = spawn( const proc = spawn(BunProc.which(), ["x", "typescript-language-server", "--stdio"], {
BunProc.which(), cwd: root,
["x", "typescript-language-server", "--stdio"],
{
env: { env: {
...process.env, ...process.env,
BUN_BE_BUN: "1", BUN_BE_BUN: "1",
}, },
}, })
)
return { return {
process: proc, process: proc,
initialization: { initialization: {
@ -53,8 +67,13 @@ export namespace LSPServer {
export const Gopls: Info = { export const Gopls: Info = {
id: "golang", id: "golang",
root: async (file, app) => {
const work = await NearestRoot(["go.work"])(file, app)
if (work) return work
return NearestRoot(["go.mod", "go.sum"])(file, app)
},
extensions: [".go"], extensions: [".go"],
async spawn() { async spawn(_, root) {
let bin = Bun.which("gopls", { let bin = Bun.which("gopls", {
PATH: process.env["PATH"] + ":" + Global.Path.bin, PATH: process.env["PATH"] + ":" + Global.Path.bin,
}) })
@ -73,24 +92,24 @@ export namespace LSPServer {
log.error("Failed to install gopls") log.error("Failed to install gopls")
return return
} }
bin = path.join( bin = path.join(Global.Path.bin, "gopls" + (process.platform === "win32" ? ".exe" : ""))
Global.Path.bin,
"gopls" + (process.platform === "win32" ? ".exe" : ""),
)
log.info(`installed gopls`, { log.info(`installed gopls`, {
bin, bin,
}) })
} }
return { return {
process: spawn(bin!), process: spawn(bin!, {
cwd: root,
}),
} }
}, },
} }
export const RubyLsp: Info = { export const RubyLsp: Info = {
id: "ruby-lsp", id: "ruby-lsp",
root: NearestRoot(["Gemfile"]),
extensions: [".rb", ".rake", ".gemspec", ".ru"], extensions: [".rb", ".rake", ".gemspec", ".ru"],
async spawn() { async spawn(_, root) {
let bin = Bun.which("ruby-lsp", { let bin = Bun.which("ruby-lsp", {
PATH: process.env["PATH"] + ":" + Global.Path.bin, PATH: process.env["PATH"] + ":" + Global.Path.bin,
}) })
@ -113,16 +132,15 @@ export namespace LSPServer {
log.error("Failed to install ruby-lsp") log.error("Failed to install ruby-lsp")
return return
} }
bin = path.join( bin = path.join(Global.Path.bin, "ruby-lsp" + (process.platform === "win32" ? ".exe" : ""))
Global.Path.bin,
"ruby-lsp" + (process.platform === "win32" ? ".exe" : ""),
)
log.info(`installed ruby-lsp`, { log.info(`installed ruby-lsp`, {
bin, bin,
}) })
} }
return { return {
process: spawn(bin!, ["--stdio"]), process: spawn(bin!, ["--stdio"], {
cwd: root,
}),
} }
}, },
} }
@ -130,17 +148,15 @@ export namespace LSPServer {
export const Pyright: Info = { export const Pyright: Info = {
id: "pyright", id: "pyright",
extensions: [".py", ".pyi"], extensions: [".py", ".pyi"],
async spawn() { root: NearestRoot(["pyproject.toml", "setup.py", "setup.cfg", "requirements.txt", "Pipfile", "pyrightconfig.json"]),
const proc = spawn( async spawn(_, root) {
BunProc.which(), const proc = spawn(BunProc.which(), ["x", "pyright-langserver", "--stdio"], {
["x", "pyright-langserver", "--stdio"], cwd: root,
{
env: { env: {
...process.env, ...process.env,
BUN_BE_BUN: "1", BUN_BE_BUN: "1",
}, },
}, })
)
return { return {
process: proc, process: proc,
} }
@ -150,7 +166,8 @@ export namespace LSPServer {
export const ElixirLS: Info = { export const ElixirLS: Info = {
id: "elixir-ls", id: "elixir-ls",
extensions: [".ex", ".exs"], extensions: [".ex", ".exs"],
async spawn() { root: NearestRoot(["mix.exs", "mix.lock"]),
async spawn(_, root) {
let binary = Bun.which("elixir-ls") let binary = Bun.which("elixir-ls")
if (!binary) { if (!binary) {
const elixirLsPath = path.join(Global.Path.bin, "elixir-ls") const elixirLsPath = path.join(Global.Path.bin, "elixir-ls")
@ -158,9 +175,7 @@ export namespace LSPServer {
Global.Path.bin, Global.Path.bin,
"elixir-ls-master", "elixir-ls-master",
"release", "release",
process.platform === "win32" process.platform === "win32" ? "language_server.bar" : "language_server.sh",
? "language_server.bar"
: "language_server.sh",
) )
if (!(await Bun.file(binary).exists())) { if (!(await Bun.file(binary).exists())) {
@ -172,9 +187,7 @@ export namespace LSPServer {
log.info("downloading elixir-ls from GitHub releases") log.info("downloading elixir-ls from GitHub releases")
const response = await fetch( const response = await fetch("https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip")
"https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip",
)
if (!response.ok) return if (!response.ok) return
const zipPath = path.join(Global.Path.bin, "elixir-ls.zip") const zipPath = path.join(Global.Path.bin, "elixir-ls.zip")
await Bun.file(zipPath).write(response) await Bun.file(zipPath).write(response)
@ -198,7 +211,114 @@ export namespace LSPServer {
} }
return { return {
process: spawn(binary), process: spawn(binary, {
cwd: root,
}),
}
},
}
export const Zls: Info = {
id: "zls",
extensions: [".zig", ".zon"],
root: NearestRoot(["build.zig"]),
async spawn(_, root) {
let bin = Bun.which("zls", {
PATH: process.env["PATH"] + ":" + Global.Path.bin,
})
if (!bin) {
const zig = Bun.which("zig")
if (!zig) {
log.error("Zig is required to use zls. Please install Zig first.")
return
}
log.info("downloading zls from GitHub releases")
const releaseResponse = await fetch("https://api.github.com/repos/zigtools/zls/releases/latest")
if (!releaseResponse.ok) {
log.error("Failed to fetch zls release info")
return
}
const release = await releaseResponse.json()
const platform = process.platform
const arch = process.arch
let assetName = ""
let zlsArch: string = arch
if (arch === "arm64") zlsArch = "aarch64"
else if (arch === "x64") zlsArch = "x86_64"
else if (arch === "ia32") zlsArch = "x86"
let zlsPlatform: string = platform
if (platform === "darwin") zlsPlatform = "macos"
else if (platform === "win32") zlsPlatform = "windows"
const ext = platform === "win32" ? "zip" : "tar.xz"
assetName = `zls-${zlsArch}-${zlsPlatform}.${ext}`
const supportedCombos = [
"zls-x86_64-linux.tar.xz",
"zls-x86_64-macos.tar.xz",
"zls-x86_64-windows.zip",
"zls-aarch64-linux.tar.xz",
"zls-aarch64-macos.tar.xz",
"zls-aarch64-windows.zip",
"zls-x86-linux.tar.xz",
"zls-x86-windows.zip",
]
if (!supportedCombos.includes(assetName)) {
log.error(`Platform ${platform} and architecture ${arch} is not supported by zls`)
return
}
const asset = release.assets.find((a: any) => a.name === assetName)
if (!asset) {
log.error(`Could not find asset ${assetName} in latest zls release`)
return
}
const downloadUrl = asset.browser_download_url
const downloadResponse = await fetch(downloadUrl)
if (!downloadResponse.ok) {
log.error("Failed to download zls")
return
}
const tempPath = path.join(Global.Path.bin, assetName)
await Bun.file(tempPath).write(downloadResponse)
if (ext === "zip") {
await $`unzip -o -q ${tempPath}`.cwd(Global.Path.bin).nothrow()
} else {
await $`tar -xf ${tempPath}`.cwd(Global.Path.bin).nothrow()
}
await fs.rm(tempPath, { force: true })
bin = path.join(Global.Path.bin, "zls" + (platform === "win32" ? ".exe" : ""))
if (!(await Bun.file(bin).exists())) {
log.error("Failed to extract zls binary")
return
}
if (platform !== "win32") {
await $`chmod +x ${bin}`.nothrow()
}
log.info(`installed zls`, { bin })
}
return {
process: spawn(bin, {
cwd: root,
}),
} }
}, },
} }

View file

@ -37,6 +37,7 @@ export namespace MCP {
transport: { transport: {
type: "sse", type: "sse",
url: mcp.url, url: mcp.url,
headers: mcp.headers,
}, },
}).catch(() => {}) }).catch(() => {})
if (!client) { if (!client) {

View file

@ -21,7 +21,7 @@ import { AuthCopilot } from "../auth/copilot"
import { ModelsDev } from "./models" import { ModelsDev } from "./models"
import { NamedError } from "../util/error" import { NamedError } from "../util/error"
import { Auth } from "../auth" import { Auth } from "../auth"
// import { TaskTool } from "../tool/task" import { TaskTool } from "../tool/task"
export namespace Provider { export namespace Provider {
const log = Log.create({ service: "provider" }) const log = Log.create({ service: "provider" })
@ -91,8 +91,7 @@ export namespace Provider {
if (!info || info.type !== "oauth") return if (!info || info.type !== "oauth") return
if (!info.access || info.expires < Date.now()) { if (!info.access || info.expires < Date.now()) {
const tokens = await copilot.access(info.refresh) const tokens = await copilot.access(info.refresh)
if (!tokens) if (!tokens) throw new Error("GitHub Copilot authentication expired")
throw new Error("GitHub Copilot authentication expired")
await Auth.set("github-copilot", { await Auth.set("github-copilot", {
type: "oauth", type: "oauth",
...tokens, ...tokens,
@ -100,25 +99,27 @@ export namespace Provider {
info.access = tokens.access info.access = tokens.access
} }
let isAgentCall = false let isAgentCall = false
let isVisionRequest = false
try { try {
const body = const body = typeof init.body === "string" ? JSON.parse(init.body) : init.body
typeof init.body === "string"
? JSON.parse(init.body)
: init.body
if (body?.messages) { if (body?.messages) {
isAgentCall = body.messages.some( isAgentCall = body.messages.some((msg: any) => msg.role && ["tool", "assistant"].includes(msg.role))
isVisionRequest = body.messages.some(
(msg: any) => (msg: any) =>
msg.role && ["tool", "assistant"].includes(msg.role), Array.isArray(msg.content) && msg.content.some((part: any) => part.type === "image_url"),
) )
} }
} catch {} } catch {}
const headers = { const headers: Record<string, string> = {
...init.headers, ...init.headers,
...copilot.HEADERS, ...copilot.HEADERS,
Authorization: `Bearer ${info.access}`, Authorization: `Bearer ${info.access}`,
"Openai-Intent": "conversation-edits", "Openai-Intent": "conversation-edits",
"X-Initiator": isAgentCall ? "agent" : "user", "X-Initiator": isAgentCall ? "agent" : "user",
} }
if (isVisionRequest) {
headers["Copilot-Vision-Request"] = "true"
}
delete headers["x-api-key"] delete headers["x-api-key"]
return fetch(input, { return fetch(input, {
...init, ...init,
@ -138,14 +139,12 @@ export namespace Provider {
} }
}, },
"amazon-bedrock": async () => { "amazon-bedrock": async () => {
if (!process.env["AWS_PROFILE"] && !process.env["AWS_ACCESS_KEY_ID"]) if (!process.env["AWS_PROFILE"] && !process.env["AWS_ACCESS_KEY_ID"] && !process.env["AWS_BEARER_TOKEN_BEDROCK"])
return { autoload: false } return { autoload: false }
const region = process.env["AWS_REGION"] ?? "us-east-1" const region = process.env["AWS_REGION"] ?? "us-east-1"
const { fromNodeProviderChain } = await import( const { fromNodeProviderChain } = await import(await BunProc.install("@aws-sdk/credential-providers"))
await BunProc.install("@aws-sdk/credential-providers")
)
return { return {
autoload: true, autoload: true,
options: { options: {
@ -157,9 +156,7 @@ export namespace Provider {
switch (regionPrefix) { switch (regionPrefix) {
case "us": { case "us": {
const modelRequiresPrefix = ["claude", "deepseek"].some((m) => const modelRequiresPrefix = ["claude", "deepseek"].some((m) => modelID.includes(m))
modelID.includes(m),
)
if (modelRequiresPrefix) { if (modelRequiresPrefix) {
modelID = `${regionPrefix}.${modelID}` modelID = `${regionPrefix}.${modelID}`
} }
@ -174,25 +171,18 @@ export namespace Provider {
"eu-south-1", "eu-south-1",
"eu-south-2", "eu-south-2",
].some((r) => region.includes(r)) ].some((r) => region.includes(r))
const modelRequiresPrefix = [ const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "llama3", "pixtral"].some((m) =>
"claude", modelID.includes(m),
"nova-lite", )
"nova-micro",
"llama3",
"pixtral",
].some((m) => modelID.includes(m))
if (regionRequiresPrefix && modelRequiresPrefix) { if (regionRequiresPrefix && modelRequiresPrefix) {
modelID = `${regionPrefix}.${modelID}` modelID = `${regionPrefix}.${modelID}`
} }
break break
} }
case "ap": { case "ap": {
const modelRequiresPrefix = [ const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "nova-pro"].some((m) =>
"claude", modelID.includes(m),
"nova-lite", )
"nova-micro",
"nova-pro",
].some((m) => modelID.includes(m))
if (modelRequiresPrefix) { if (modelRequiresPrefix) {
regionPrefix = "apac" regionPrefix = "apac"
modelID = `${regionPrefix}.${modelID}` modelID = `${regionPrefix}.${modelID}`
@ -230,10 +220,7 @@ export namespace Provider {
options: Record<string, any> options: Record<string, any>
} }
} = {} } = {}
const models = new Map< const models = new Map<string, { info: ModelsDev.Model; language: LanguageModel }>()
string,
{ info: ModelsDev.Model; language: LanguageModel }
>()
const sdk = new Map<string, SDK>() const sdk = new Map<string, SDK>()
log.info("init") log.info("init")
@ -248,7 +235,7 @@ export namespace Provider {
if (!provider) { if (!provider) {
const info = database[id] const info = database[id]
if (!info) return if (!info) return
if (info.api) options["baseURL"] = info.api if (info.api && !options["baseURL"]) options["baseURL"] = info.api
providers[id] = { providers[id] = {
source, source,
info, info,
@ -285,13 +272,19 @@ export namespace Provider {
reasoning: model.reasoning ?? existing?.reasoning ?? false, reasoning: model.reasoning ?? existing?.reasoning ?? false,
temperature: model.temperature ?? existing?.temperature ?? false, temperature: model.temperature ?? existing?.temperature ?? false,
tool_call: model.tool_call ?? existing?.tool_call ?? true, tool_call: model.tool_call ?? existing?.tool_call ?? true,
cost: { cost:
...existing?.cost, !model.cost && !existing?.cost
...model.cost, ? {
input: 0, input: 0,
output: 0, output: 0,
cache_read: 0, cache_read: 0,
cache_write: 0, cache_write: 0,
}
: {
cache_read: 0,
cache_write: 0,
...existing?.cost,
...model.cost,
}, },
options: { options: {
...existing?.options, ...existing?.options,
@ -308,9 +301,7 @@ export namespace Provider {
database[providerID] = parsed database[providerID] = parsed
} }
const disabled = await Config.get().then( const disabled = await Config.get().then((cfg) => new Set(cfg.disabled_providers ?? []))
(cfg) => new Set(cfg.disabled_providers ?? []),
)
// load env // load env
for (const [providerID, provider] of Object.entries(database)) { for (const [providerID, provider] of Object.entries(database)) {
if (disabled.has(providerID)) continue if (disabled.has(providerID)) continue
@ -337,12 +328,7 @@ export namespace Provider {
if (disabled.has(providerID)) continue if (disabled.has(providerID)) continue
const result = await fn(database[providerID]) const result = await fn(database[providerID])
if (result && (result.autoload || providers[providerID])) { if (result && (result.autoload || providers[providerID])) {
mergeProvider( mergeProvider(providerID, result.options ?? {}, "custom", result.getModel)
providerID,
result.options ?? {},
"custom",
result.getModel,
)
} }
} }
@ -379,7 +365,7 @@ export namespace Provider {
const existing = s.sdk.get(provider.id) const existing = s.sdk.get(provider.id)
if (existing) return existing if (existing) return existing
const pkg = provider.npm ?? provider.id const pkg = provider.npm ?? provider.id
const mod = await import(await BunProc.install(pkg, "latest")) const mod = await import(await BunProc.install(pkg, "beta"))
const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!] const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!]
const loaded = fn(s.providers[provider.id]?.options) const loaded = fn(s.providers[provider.id]?.options)
s.sdk.set(provider.id, loaded) s.sdk.set(provider.id, loaded)
@ -406,9 +392,7 @@ export namespace Provider {
const sdk = await getSDK(provider.info) const sdk = await getSDK(provider.info)
try { try {
const language = provider.getModel const language = provider.getModel ? await provider.getModel(sdk, modelID) : sdk.languageModel(modelID)
? await provider.getModel(sdk, modelID)
: sdk.languageModel(modelID)
log.info("found", { providerID, modelID }) log.info("found", { providerID, modelID })
s.models.set(key, { s.models.set(key, {
info, info,
@ -431,14 +415,22 @@ export namespace Provider {
} }
} }
export async function getSmallModel(providerID: string) {
const provider = await state().then((state) => state.providers[providerID])
if (!provider) return
const priority = ["3-5-haiku", "3.5-haiku", "gemini-2.5-flash"]
for (const item of priority) {
for (const model of Object.keys(provider.info.models)) {
if (model.includes(item)) return getModel(providerID, model)
}
}
}
const priority = ["gemini-2.5-pro-preview", "codex-mini", "claude-sonnet-4"] const priority = ["gemini-2.5-pro-preview", "codex-mini", "claude-sonnet-4"]
export function sort(models: ModelsDev.Model[]) { export function sort(models: ModelsDev.Model[]) {
return sortBy( return sortBy(
models, models,
[ [(model) => priority.findIndex((filter) => model.id.includes(filter)), "desc"],
(model) => priority.findIndex((filter) => model.id.includes(filter)),
"desc",
],
[(model) => (model.id.includes("latest") ? 0 : 1), "asc"], [(model) => (model.id.includes("latest") ? 0 : 1), "asc"],
[(model) => model.id, "desc"], [(model) => model.id, "desc"],
) )
@ -449,11 +441,7 @@ export namespace Provider {
if (cfg.model) return parseModel(cfg.model) if (cfg.model) return parseModel(cfg.model)
const provider = await list() const provider = await list()
.then((val) => Object.values(val)) .then((val) => Object.values(val))
.then((x) => .then((x) => x.find((p) => !cfg.provider || Object.keys(cfg.provider).includes(p.info.id)))
x.find(
(p) => !cfg.provider || Object.keys(cfg.provider).includes(p.info.id),
),
)
if (!provider) throw new Error("no providers found") if (!provider) throw new Error("no providers found")
const [model] = sort(Object.values(provider.info.models)) const [model] = sort(Object.values(provider.info.models))
if (!model) throw new Error("no models found") if (!model) throw new Error("no models found")
@ -486,7 +474,7 @@ export namespace Provider {
WriteTool, WriteTool,
TodoWriteTool, TodoWriteTool,
TodoReadTool, TodoReadTool,
// TaskTool, TaskTool,
] ]
const TOOL_MAPPING: Record<string, Tool.Info[]> = { const TOOL_MAPPING: Record<string, Tool.Info[]> = {
@ -536,9 +524,11 @@ export namespace Provider {
if (schema instanceof z.ZodUnion) { if (schema instanceof z.ZodUnion) {
return z.union( return z.union(
schema.options.map((option: z.ZodTypeAny) => schema.options.map((option: z.ZodTypeAny) => optionalToNullable(option)) as [
optionalToNullable(option), z.ZodTypeAny,
) as [z.ZodTypeAny, z.ZodTypeAny, ...z.ZodTypeAny[]], z.ZodTypeAny,
...z.ZodTypeAny[],
],
) )
} }
@ -559,12 +549,4 @@ export namespace Provider {
providerID: z.string(), providerID: z.string(),
}), }),
) )
export const AuthError = NamedError.create(
"ProviderAuthError",
z.object({
providerID: z.string(),
message: z.string(),
}),
)
} }

View file

@ -1,22 +1,21 @@
import type { LanguageModelV1Prompt } from "ai" import type { ModelMessage } from "ai"
import { unique } from "remeda" import { unique } from "remeda"
export namespace ProviderTransform { export namespace ProviderTransform {
export function message( export function message(msgs: ModelMessage[], providerID: string, modelID: string) {
msgs: LanguageModelV1Prompt,
providerID: string,
modelID: string,
) {
if (providerID === "anthropic" || modelID.includes("anthropic")) { if (providerID === "anthropic" || modelID.includes("anthropic")) {
const system = msgs.filter((msg) => msg.role === "system").slice(0, 2) const system = msgs.filter((msg) => msg.role === "system").slice(0, 2)
const final = msgs.filter((msg) => msg.role !== "system").slice(-2) const final = msgs.filter((msg) => msg.role !== "system").slice(-2)
for (const msg of unique([...system, ...final])) { for (const msg of unique([...system, ...final])) {
msg.providerMetadata = { msg.providerOptions = {
...msg.providerMetadata, ...msg.providerOptions,
anthropic: { anthropic: {
cacheControl: { type: "ephemeral" }, cacheControl: { type: "ephemeral" },
}, },
openaiCompatible: {
cache_control: { type: "ephemeral" },
},
} }
} }
} }
@ -25,8 +24,8 @@ export namespace ProviderTransform {
const final = msgs.filter((msg) => msg.role !== "system").slice(-2) const final = msgs.filter((msg) => msg.role !== "system").slice(-2)
for (const msg of unique([...system, ...final])) { for (const msg of unique([...system, ...final])) {
msg.providerMetadata = { msg.providerOptions = {
...msg.providerMetadata, ...msg.providerOptions,
bedrock: { bedrock: {
cachePoint: { type: "ephemeral" }, cachePoint: { type: "ephemeral" },
}, },

View file

@ -6,7 +6,6 @@ import { streamSSE } from "hono/streaming"
import { Session } from "../session" import { Session } from "../session"
import { resolver, validator as zValidator } from "hono-openapi/zod" import { resolver, validator as zValidator } from "hono-openapi/zod"
import { z } from "zod" import { z } from "zod"
import { Message } from "../session/message"
import { Provider } from "../provider/provider" import { Provider } from "../provider/provider"
import { App } from "../app/app" import { App } from "../app/app"
import { mapValues } from "remeda" import { mapValues } from "remeda"
@ -16,6 +15,8 @@ import { Ripgrep } from "../file/ripgrep"
import { Config } from "../config/config" import { Config } from "../config/config"
import { File } from "../file" import { File } from "../file"
import { LSP } from "../lsp" import { LSP } from "../lsp"
import { MessageV2 } from "../session/message-v2"
import { Mode } from "../session/mode"
const ERRORS = { const ERRORS = {
400: { 400: {
@ -51,12 +52,9 @@ export namespace Server {
status: 400, status: 400,
}) })
} }
return c.json( return c.json(new NamedError.Unknown({ message: err.toString() }).toObject(), {
new NamedError.Unknown({ message: err.toString() }).toObject(),
{
status: 400, status: 400,
}, })
)
}) })
.use(async (c, next) => { .use(async (c, next) => {
log.info("request", { log.info("request", {
@ -271,6 +269,7 @@ export namespace Server {
zValidator( zValidator(
"json", "json",
z.object({ z.object({
messageID: z.string(),
providerID: z.string(), providerID: z.string(),
modelID: z.string(), modelID: z.string(),
}), }),
@ -407,7 +406,14 @@ export namespace Server {
description: "List of messages", description: "List of messages",
content: { content: {
"application/json": { "application/json": {
schema: resolver(Message.Info.array()), schema: resolver(
z
.object({
info: MessageV2.Info,
parts: MessageV2.Part.array(),
})
.array(),
),
}, },
}, },
}, },
@ -433,7 +439,7 @@ export namespace Server {
description: "Created message", description: "Created message",
content: { content: {
"application/json": { "application/json": {
schema: resolver(Message.Info), schema: resolver(MessageV2.Assistant),
}, },
}, },
}, },
@ -448,9 +454,11 @@ export namespace Server {
zValidator( zValidator(
"json", "json",
z.object({ z.object({
messageID: z.string(),
providerID: z.string(), providerID: z.string(),
modelID: z.string(), modelID: z.string(),
parts: Message.MessagePart.array(), mode: z.string(),
parts: z.union([MessageV2.FilePart, MessageV2.TextPart]).array(),
}), }),
), ),
async (c) => { async (c) => {
@ -481,15 +489,10 @@ export namespace Server {
}, },
}), }),
async (c) => { async (c) => {
const providers = await Provider.list().then((x) => const providers = await Provider.list().then((x) => mapValues(x, (item) => item.info))
mapValues(x, (item) => item.info),
)
return c.json({ return c.json({
providers: Object.values(providers), providers: Object.values(providers),
default: mapValues( default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id),
providers,
(item) => Provider.sort(Object.values(item.models))[0].id,
),
}) })
}, },
) )
@ -566,7 +569,7 @@ export namespace Server {
description: "Symbols", description: "Symbols",
content: { content: {
"application/json": { "application/json": {
schema: resolver(z.unknown().array()), schema: resolver(LSP.Symbol.array()),
}, },
}, },
}, },
@ -629,16 +632,7 @@ export namespace Server {
description: "File status", description: "File status",
content: { content: {
"application/json": { "application/json": {
schema: resolver( schema: resolver(File.Info.array()),
z
.object({
file: z.string(),
added: z.number().int(),
removed: z.number().int(),
status: z.enum(["added", "deleted", "modified"]),
})
.array(),
),
}, },
}, },
}, },
@ -649,6 +643,75 @@ export namespace Server {
return c.json(content) return c.json(content)
}, },
) )
.post(
"/log",
describeRoute({
description: "Write a log entry to the server logs",
responses: {
200: {
description: "Log entry written successfully",
content: {
"application/json": {
schema: resolver(z.boolean()),
},
},
},
},
}),
zValidator(
"json",
z.object({
service: z.string().openapi({ description: "Service name for the log entry" }),
level: z.enum(["debug", "info", "error", "warn"]).openapi({ description: "Log level" }),
message: z.string().openapi({ description: "Log message" }),
extra: z
.record(z.string(), z.any())
.optional()
.openapi({ description: "Additional metadata for the log entry" }),
}),
),
async (c) => {
const { service, level, message, extra } = c.req.valid("json")
const logger = Log.create({ service })
switch (level) {
case "debug":
logger.debug(message, extra)
break
case "info":
logger.info(message, extra)
break
case "error":
logger.error(message, extra)
break
case "warn":
logger.warn(message, extra)
break
}
return c.json(true)
},
)
.get(
"/mode",
describeRoute({
description: "List all modes",
responses: {
200: {
description: "List of modes",
content: {
"application/json": {
schema: resolver(Mode.Info.array()),
},
},
},
},
}),
async (c) => {
const modes = await Mode.list()
return c.json(modes)
},
)
return result return result
} }

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,460 @@
import z from "zod"
import { Bus } from "../bus"
import { NamedError } from "../util/error"
import { Message } from "./message"
import { convertToModelMessages, type ModelMessage, type UIMessage } from "ai"
import { Identifier } from "../id/id"
export namespace MessageV2 {
export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({}))
export const AbortedError = NamedError.create("MessageAbortedError", z.object({}))
export const AuthError = NamedError.create(
"ProviderAuthError",
z.object({
providerID: z.string(),
message: z.string(),
}),
)
export const ToolStatePending = z
.object({
status: z.literal("pending"),
})
.openapi({
ref: "ToolStatePending",
})
export type ToolStatePending = z.infer<typeof ToolStatePending>
export const ToolStateRunning = z
.object({
status: z.literal("running"),
input: z.any(),
title: z.string().optional(),
metadata: z.record(z.any()).optional(),
time: z.object({
start: z.number(),
}),
})
.openapi({
ref: "ToolStateRunning",
})
export type ToolStateRunning = z.infer<typeof ToolStateRunning>
export const ToolStateCompleted = z
.object({
status: z.literal("completed"),
input: z.record(z.any()),
output: z.string(),
title: z.string(),
metadata: z.record(z.any()),
time: z.object({
start: z.number(),
end: z.number(),
}),
})
.openapi({
ref: "ToolStateCompleted",
})
export type ToolStateCompleted = z.infer<typeof ToolStateCompleted>
export const ToolStateError = z
.object({
status: z.literal("error"),
input: z.record(z.any()),
error: z.string(),
time: z.object({
start: z.number(),
end: z.number(),
}),
})
.openapi({
ref: "ToolStateError",
})
export type ToolStateError = z.infer<typeof ToolStateError>
export const ToolState = z
.discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError])
.openapi({
ref: "ToolState",
})
const PartBase = z.object({
id: z.string(),
sessionID: z.string(),
messageID: z.string(),
})
export const SnapshotPart = PartBase.extend({
type: z.literal("snapshot"),
snapshot: z.string(),
}).openapi({
ref: "SnapshotPart",
})
export type SnapshotPart = z.infer<typeof SnapshotPart>
export const TextPart = PartBase.extend({
type: z.literal("text"),
text: z.string(),
synthetic: z.boolean().optional(),
time: z
.object({
start: z.number(),
end: z.number().optional(),
})
.optional(),
}).openapi({
ref: "TextPart",
})
export type TextPart = z.infer<typeof TextPart>
export const ToolPart = PartBase.extend({
type: z.literal("tool"),
callID: z.string(),
tool: z.string(),
state: ToolState,
}).openapi({
ref: "ToolPart",
})
export type ToolPart = z.infer<typeof ToolPart>
export const FilePart = PartBase.extend({
type: z.literal("file"),
mime: z.string(),
filename: z.string().optional(),
url: z.string(),
}).openapi({
ref: "FilePart",
})
export type FilePart = z.infer<typeof FilePart>
export const StepStartPart = PartBase.extend({
type: z.literal("step-start"),
}).openapi({
ref: "StepStartPart",
})
export type StepStartPart = z.infer<typeof StepStartPart>
export const StepFinishPart = PartBase.extend({
type: z.literal("step-finish"),
cost: z.number(),
tokens: z.object({
input: z.number(),
output: z.number(),
reasoning: z.number(),
cache: z.object({
read: z.number(),
write: z.number(),
}),
}),
}).openapi({
ref: "StepFinishPart",
})
export type StepFinishPart = z.infer<typeof StepFinishPart>
const Base = z.object({
id: z.string(),
sessionID: z.string(),
})
export const User = Base.extend({
role: z.literal("user"),
time: z.object({
created: z.number(),
}),
}).openapi({
ref: "UserMessage",
})
export type User = z.infer<typeof User>
export const Part = z
.discriminatedUnion("type", [TextPart, FilePart, ToolPart, StepStartPart, StepFinishPart, SnapshotPart])
.openapi({
ref: "Part",
})
export type Part = z.infer<typeof Part>
export const Assistant = Base.extend({
role: z.literal("assistant"),
time: z.object({
created: z.number(),
completed: z.number().optional(),
}),
error: z
.discriminatedUnion("name", [
AuthError.Schema,
NamedError.Unknown.Schema,
OutputLengthError.Schema,
AbortedError.Schema,
])
.optional(),
system: z.string().array(),
modelID: z.string(),
providerID: z.string(),
path: z.object({
cwd: z.string(),
root: z.string(),
}),
summary: z.boolean().optional(),
cost: z.number(),
tokens: z.object({
input: z.number(),
output: z.number(),
reasoning: z.number(),
cache: z.object({
read: z.number(),
write: z.number(),
}),
}),
}).openapi({
ref: "AssistantMessage",
})
export type Assistant = z.infer<typeof Assistant>
export const Info = z.discriminatedUnion("role", [User, Assistant]).openapi({
ref: "Message",
})
export type Info = z.infer<typeof Info>
export const Event = {
Updated: Bus.event(
"message.updated",
z.object({
info: Info,
}),
),
Removed: Bus.event(
"message.removed",
z.object({
sessionID: z.string(),
messageID: z.string(),
}),
),
PartUpdated: Bus.event(
"message.part.updated",
z.object({
part: Part,
}),
),
}
export function fromV1(v1: Message.Info) {
if (v1.role === "assistant") {
const info: Assistant = {
id: v1.id,
sessionID: v1.metadata.sessionID,
role: "assistant",
time: {
created: v1.metadata.time.created,
completed: v1.metadata.time.completed,
},
cost: v1.metadata.assistant!.cost,
path: v1.metadata.assistant!.path,
summary: v1.metadata.assistant!.summary,
tokens: v1.metadata.assistant!.tokens,
modelID: v1.metadata.assistant!.modelID,
providerID: v1.metadata.assistant!.providerID,
system: v1.metadata.assistant!.system,
error: v1.metadata.error,
}
const parts = v1.parts.flatMap((part): Part[] => {
const base = {
id: Identifier.ascending("part"),
messageID: v1.id,
sessionID: v1.metadata.sessionID,
}
if (part.type === "text") {
return [
{
...base,
type: "text",
text: part.text,
},
]
}
if (part.type === "step-start") {
return [
{
...base,
type: "step-start",
},
]
}
if (part.type === "tool-invocation") {
return [
{
...base,
type: "tool",
callID: part.toolInvocation.toolCallId,
tool: part.toolInvocation.toolName,
state: (() => {
if (part.toolInvocation.state === "partial-call") {
return {
status: "pending",
}
}
const { title, time, ...metadata } = v1.metadata.tool[part.toolInvocation.toolCallId] ?? {}
if (part.toolInvocation.state === "call") {
return {
status: "running",
input: part.toolInvocation.args,
time: {
start: time?.start,
},
}
}
if (part.toolInvocation.state === "result") {
return {
status: "completed",
input: part.toolInvocation.args,
output: part.toolInvocation.result,
title,
time,
metadata,
}
}
throw new Error("unknown tool invocation state")
})(),
},
]
}
return []
})
return {
info,
parts,
}
}
if (v1.role === "user") {
const info: User = {
id: v1.id,
sessionID: v1.metadata.sessionID,
role: "user",
time: {
created: v1.metadata.time.created,
},
}
const parts = v1.parts.flatMap((part): Part[] => {
const base = {
id: Identifier.ascending("part"),
messageID: v1.id,
sessionID: v1.metadata.sessionID,
}
if (part.type === "text") {
return [
{
...base,
type: "text",
text: part.text,
},
]
}
if (part.type === "file") {
return [
{
...base,
type: "file",
mime: part.mediaType,
filename: part.filename,
url: part.url,
},
]
}
return []
})
return { info, parts }
}
throw new Error("unknown message type")
}
export function toModelMessage(
input: {
info: Info
parts: Part[]
}[],
): ModelMessage[] {
const result: UIMessage[] = []
for (const msg of input) {
if (msg.parts.length === 0) continue
if (msg.info.role === "user") {
result.push({
id: msg.info.id,
role: "user",
parts: msg.parts.flatMap((part): UIMessage["parts"] => {
if (part.type === "text")
return [
{
type: "text",
text: part.text,
},
]
if (part.type === "file")
return [
{
type: "file",
url: part.url,
mediaType: part.mime,
filename: part.filename,
},
]
return []
}),
})
}
if (msg.info.role === "assistant") {
result.push({
id: msg.info.id,
role: "assistant",
parts: msg.parts.flatMap((part): UIMessage["parts"] => {
if (part.type === "text")
return [
{
type: "text",
text: part.text,
},
]
if (part.type === "step-start")
return [
{
type: "step-start",
},
]
if (part.type === "tool") {
if (part.state.status === "completed")
return [
{
type: ("tool-" + part.tool) as `tool-${string}`,
state: "output-available",
toolCallId: part.callID,
input: part.state.input,
output: part.state.output,
},
]
if (part.state.status === "error")
return [
{
type: ("tool-" + part.tool) as `tool-${string}`,
state: "output-error",
toolCallId: part.callID,
input: part.state.input,
errorText: part.state.error,
},
]
}
return []
}),
})
}
}
return convertToModelMessages(result)
}
}

View file

@ -1,12 +1,14 @@
import z from "zod" import z from "zod"
import { Bus } from "../bus"
import { Provider } from "../provider/provider"
import { NamedError } from "../util/error" import { NamedError } from "../util/error"
export namespace Message { export namespace Message {
export const OutputLengthError = NamedError.create( export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({}))
"MessageOutputLengthError", export const AuthError = NamedError.create(
z.object({}), "ProviderAuthError",
z.object({
providerID: z.string(),
message: z.string(),
}),
) )
export const ToolCall = z export const ToolCall = z
@ -49,9 +51,7 @@ export namespace Message {
}) })
export type ToolResult = z.infer<typeof ToolResult> export type ToolResult = z.infer<typeof ToolResult>
export const ToolInvocation = z export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).openapi({
.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult])
.openapi({
ref: "ToolInvocation", ref: "ToolInvocation",
}) })
export type ToolInvocation = z.infer<typeof ToolInvocation> export type ToolInvocation = z.infer<typeof ToolInvocation>
@ -122,14 +122,7 @@ export namespace Message {
export type StepStartPart = z.infer<typeof StepStartPart> export type StepStartPart = z.infer<typeof StepStartPart>
export const MessagePart = z export const MessagePart = z
.discriminatedUnion("type", [ .discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart])
TextPart,
ReasoningPart,
ToolInvocationPart,
SourceUrlPart,
FilePart,
StepStartPart,
])
.openapi({ .openapi({
ref: "MessagePart", ref: "MessagePart",
}) })
@ -147,11 +140,7 @@ export namespace Message {
completed: z.number().optional(), completed: z.number().optional(),
}), }),
error: z error: z
.discriminatedUnion("name", [ .discriminatedUnion("name", [AuthError.Schema, NamedError.Unknown.Schema, OutputLengthError.Schema])
Provider.AuthError.Schema,
NamedError.Unknown.Schema,
OutputLengthError.Schema,
])
.optional(), .optional(),
sessionID: z.string(), sessionID: z.string(),
tool: z.record( tool: z.record(
@ -197,28 +186,4 @@ export namespace Message {
ref: "Message", ref: "Message",
}) })
export type Info = z.infer<typeof Info> export type Info = z.infer<typeof Info>
export const Event = {
Updated: Bus.event(
"message.updated",
z.object({
info: Info,
}),
),
Removed: Bus.event(
"message.removed",
z.object({
sessionID: z.string(),
messageID: z.string(),
}),
),
PartUpdated: Bus.event(
"message.part.updated",
z.object({
part: MessagePart,
sessionID: z.string(),
messageID: z.string(),
}),
),
}
} }

View file

@ -0,0 +1,69 @@
import { mergeDeep } from "remeda"
import { App } from "../app/app"
import { Config } from "../config/config"
import z from "zod"
export namespace Mode {
export const Info = z
.object({
name: z.string(),
model: z
.object({
modelID: z.string(),
providerID: z.string(),
})
.optional(),
prompt: z.string().optional(),
tools: z.record(z.boolean()),
})
.openapi({
ref: "Mode",
})
export type Info = z.infer<typeof Info>
const state = App.state("mode", async () => {
const cfg = await Config.get()
const mode = mergeDeep(
{
build: {},
plan: {
tools: {
write: false,
edit: false,
patch: false,
},
},
},
cfg.mode ?? {},
)
const result: Record<string, Info> = {}
for (const [key, value] of Object.entries(mode)) {
let item = result[key]
if (!item)
item = result[key] = {
name: key,
tools: {},
}
const model = value.model ?? cfg.model
if (model) {
const [providerID, ...rest] = model.split("/")
const modelID = rest.join("/")
item.model = {
modelID,
providerID,
}
}
if (value.prompt) item.prompt = value.prompt
if (value.tools) item.tools = value.tools
}
return result
})
export async function get(mode: string) {
return state().then((x) => x[mode])
}
export async function list() {
return state().then((x) => Object.values(x))
}
}

View file

@ -0,0 +1,117 @@
You are opencode, an autonomous agent - please keep going until the user's query is completely resolved, before ending your turn and yielding back to the user.
Your thinking should be thorough and so it's fine if it's very long. However, avoid unnecessary repetition and verbosity. You should be concise, but thorough.
You MUST iterate and keep going until the problem is solved.
You have everything you need to resolve this problem. I want you to fully solve this autonomously before coming back to me.
Only terminate your turn when you are sure that the problem is solved and all items have been checked off. Use the TodoWrite and TodoRead tools to track and manage steps. Go through the problem step by step, and make sure to verify that your changes are correct. Once each step is finished mark it as completed with the TodoWrite tool. NEVER end your turn without having truly and completely solved the problem, use the TodoRead tool to make sure all steps are complete, and when you say you are going to make a tool call, make sure you ACTUALLY make the tool call, instead of ending your turn. If a step is impossible to complete, mark it as cancelled using the TodoWrite tool.
THE PROBLEM CAN NOT BE SOLVED WITHOUT EXTENSIVE INTERNET RESEARCH.
You must use the webfetch tool to recursively gather all information from URLs provided to you by the user, as well as any links you find in the content of those pages.
Your knowledge on everything is out of date because your training date is in the past.
You CANNOT successfully complete this task without using Bing to verify your understanding of third party packages and dependencies is up to date. You must use the webfetch tool to search bing for how to properly use libraries, packages, frameworks, dependencies, etc. every single time you install or implement one. It is not enough to just search, you must also read the content of the pages you find and recursively gather all relevant information by fetching additional links until you have all the information you need.
If the user request is "resume" or "continue" or "try again",use the TodoRead tool to find the next pending step. Continue from that step, and do not hand back control to the user until the entire todo list is complete and all steps are marked as complete or cancelled. Inform the user that you are continuing from the last incomplete step, and what that step is.
Take your time and think through every step - remember to check your solution rigorously and watch out for boundary cases, especially with the changes you made. Use the sequential thinking tool if available. Your solution must be perfect. If not, continue working on it. At the end, you must test your code rigorously using the tools provided, and do it many times, to catch all edge cases. If it is not robust, update the plan and iterate more and make it perfect. Failing to test your code sufficiently rigorously is the NUMBER ONE failure mode on these types of tasks; run the build, and verify that the changes you made actually build; make sure you handle all edge cases, and run existing tests if they are provided.
You MUST plan extensively before each tool call, and reflect extensively on the outcomes of the previous tool calls. DO NOT do this entire process by making tool calls only, as this can impair your ability to solve the problem and think insightfully.
You MUST keep working until the problem is completely solved, and all steps in the todo list are complete. Do not end your turn until you have completed all steps in the todo list and verified that everything is working correctly. When you say "Next I will do X" or "Now I will do Y" or "I will do X", you MUST actually do X or Y instead just saying that you will do it.
You MUST use the ToolRead tool to verify that all steps are complete or cancelled before ending your turn. If any steps are incomplete, you MUST continue working on them until they are all complete.
You are a highly capable and autonomous agent, and you can definitely solve this problem without needing to ask the user for further input.
# Workflow
1. Fetch any URL's provided by the user using the `webfetch` tool.
2. Understand the problem deeply. Carefully read the issue and think critically about what is required. Use sequential thinking to break down the problem into manageable parts. Consider the following:
- What is the expected behavior?
- What are the edge cases?
- What are the potential pitfalls?
- How does this fit into the larger context of the codebase?
- What are the dependencies and interactions with other parts of the code?
3. Investigate the codebase. Explore relevant files, search for key functions, and gather context.
4. Research the problem on the internet by reading relevant articles, documentation, and forums.
5. Develop a clear, step-by-step plan. Break down the fix into manageable, incremental steps. Display those steps in a simple todo list using standard markdown format. Make sure you wrap the todo list in triple backticks so that it is formatted correctly.
6. Implement the fix incrementally. Make small, testable code changes.
7. Debug as needed. Use debugging techniques to isolate and resolve issues.
8. Test frequently. Run tests after each change to verify correctness.
9. Iterate until the root cause is fixed and all tests pass.
10. Reflect and validate comprehensively. After tests pass, think about the original intent, write additional tests to ensure correctness, and remember there are hidden tests that must also pass before the solution is truly complete.
Refer to the detailed sections below for more information on each step.
## 1. Fetch Provided URLs
- If the user provides a URL, use the `webfetch` tool to retrieve the content of the provided URL.
- After fetching, review the content returned by the fetch tool.
- If you find any additional URLs or links that are relevant, use the `webfetch` tool again to retrieve those links.
- Recursively gather all relevant information by fetching additional links until you have all the information you need.
## 2. Deeply Understand the Problem
Carefully read the issue and think hard about a plan to solve it before coding. Use the sequential thinking tool if available.
## 3. Codebase Investigation
- Explore relevant files and directories.
- Search for key functions, classes, or variables related to the issue.
- Read and understand relevant code snippets.
- Identify the root cause of the problem.
- Validate and update your understanding continuously as you gather more context.
## 4. Internet Research
- Use the `webfetch` tool to search bing by fetching the URL `https://www.bing.com/search?q=your+search+query`.
- After fetching, review the content returned by the fetch tool.
- If you find any additional URLs or links that are relevant, use the `webfetch` tool again to retrieve those links.
- Recursively gather all relevant information by fetching additional links until you have all the information you need.
## 5. Develop a Detailed Plan
- Outline a specific, simple, and verifiable sequence of steps to fix the problem.
- Add steps using the TodoWrite tool.
- Each time you complete a step, mark it as complete using the TodoWrite tool.
- Each time you check off a step, use the TodoRead tool and display the updated todo list to the user in markdown format.
- You MUST continue on to the next step after checking off a step instead of ending your turn and asking the user what they want to do next.
- You may only end your turn when all steps in the todo list are marked as complete or cancelled.
## 6. Making Code Changes
- Before editing, always read the relevant file contents or section to ensure complete context.
- Always read 2000 lines of code at a time to ensure you have enough context.
- Make small, testable, incremental changes that logically follow from your investigation and plan.
- When using the edit tool, include 3-5 lines of unchanged code before and after the string you want to replace, to make it unambiguous which part of the file should be edited.
- If a patch or edit is not applied correctly, attempt to reapply it.
- Always validate that your changes build and pass tests after each change.
- If the build fails or test fail, debug why before proceeding, update the plan as needed.
## 7. Debugging
- Use the `lsp_diagnostics` tool to check for any problems in the code.
- Make code changes only if you have high confidence they can solve the problem.
- When debugging, try to determine the root cause rather than addressing symptoms.
- Debug for as long as needed to identify the root cause and identify a fix.
- Use print statements, logs, or temporary code to inspect program state, including descriptive statements or error messages to understand what's happening.
- To test hypotheses, you can also add test statements or functions.
- Revisit your assumptions if unexpected behavior occurs.
# How to create a Todo List
Use the following format to show the todo list:
```markdown
- [ ] Step 1: Description of the first step
- [ ] Step 2: Description of the second step
- [ ] Step 3: Description of the third step
```
Do not ever use HTML tags or any other formatting for the todo list, as it will not be rendered correctly. Always use the markdown format shown above.
# Communication Guidelines
Always communicate clearly and concisely in a casual, friendly yet professional tone.
<examples>
"Let me fetch the URL you provided to gather more information."
"Ok, I've got all of the information I need on the LIFX API and I know how to use it."
"Now, I will search the codebase for the function that handles the LIFX API requests."
"I need to update several files here - stand by"
"OK! Now let's run the tests to make sure everything is working correctly."
"Whelp - I see we have some problems. Let's fix those up."
</examples>

View file

@ -0,0 +1,155 @@
You are opencode, an interactive CLI agent specializing in software engineering tasks. Your primary goal is to help users safely and efficiently, adhering strictly to the following instructions and utilizing your available tools.
# Core Mandates
- **Conventions:** Rigorously adhere to existing project conventions when reading or modifying code. Analyze surrounding code, tests, and configuration first.
- **Libraries/Frameworks:** NEVER assume a library/framework is available or appropriate. Verify its established usage within the project (check imports, configuration files like 'package.json', 'Cargo.toml', 'requirements.txt', 'build.gradle', etc., or observe neighboring files) before employing it.
- **Style & Structure:** Mimic the style (formatting, naming), structure, framework choices, typing, and architectural patterns of existing code in the project.
- **Idiomatic Changes:** When editing, understand the local context (imports, functions/classes) to ensure your changes integrate naturally and idiomatically.
- **Comments:** Add code comments sparingly. Focus on *why* something is done, especially for complex logic, rather than *what* is done. Only add high-value comments if necessary for clarity or if requested by the user. Do not edit comments that are separate from the code you are changing. *NEVER* talk to the user or describe your changes through comments.
- **Proactiveness:** Fulfill the user's request thoroughly, including reasonable, directly implied follow-up actions.
- **Confirm Ambiguity/Expansion:** Do not take significant actions beyond the clear scope of the request without confirming with the user. If asked *how* to do something, explain first, don't just do it.
- **Explaining Changes:** After completing a code modification or file operation *do not* provide summaries unless asked.
- **Path Construction:** Before using any file system tool (e.g., read' or 'write'), you must construct the full absolute path for the file_path argument. Always combine the absolute path of the project's root directory with the file's path relative to the root. For example, if the project root is /path/to/project/ and the file is foo/bar/baz.txt, the final path you must use is /path/to/project/foo/bar/baz.txt. If the user provides a relative path, you must resolve it against the root directory to create an absolute path.
- **Do Not revert changes:** Do not revert changes to the codebase unless asked to do so by the user. Only revert changes made by you if they have resulted in an error or if the user has explicitly asked you to revert the changes.
# Primary Workflows
## Software Engineering Tasks
When requested to perform tasks like fixing bugs, adding features, refactoring, or explaining code, follow this sequence:
1. **Understand:** Think about the user's request and the relevant codebase context. Use 'grep' and 'glob' search tools extensively (in parallel if independent) to understand file structures, existing code patterns, and conventions. Use 'read' to understand context and validate any assumptions you may have.
2. **Plan:** Build a coherent and grounded (based on the understanding in step 1) plan for how you intend to resolve the user's task. Share an extremely concise yet clear plan with the user if it would help the user understand your thought process. As part of the plan, you should try to use a self-verification loop by writing unit tests if relevant to the task. Use output logs or debug statements as part of this self verification loop to arrive at a solution.
3. **Implement:** Use the available tools (e.g., 'edit', 'write' 'bash' ...) to act on the plan, strictly adhering to the project's established conventions (detailed under 'Core Mandates').
4. **Verify (Tests):** If applicable and feasible, verify the changes using the project's testing procedures. Identify the correct test commands and frameworks by examining 'README' files, build/package configuration (e.g., 'package.json'), or existing test execution patterns. NEVER assume standard test commands.
5. **Verify (Standards):** VERY IMPORTANT: After making code changes, execute the project-specific build, linting and type-checking commands (e.g., 'tsc', 'npm run lint', 'ruff check .') that you have identified for this project (or obtained from the user). This ensures code quality and adherence to standards. If unsure about these commands, you can ask the user if they'd like you to run them and if so how to.
## New Applications
**Goal:** Autonomously implement and deliver a visually appealing, substantially complete, and functional prototype. Utilize all tools at your disposal to implement the application. Some tools you may especially find useful are 'write', 'edit' and 'bash'.
1. **Understand Requirements:** Analyze the user's request to identify core features, desired user experience (UX), visual aesthetic, application type/platform (web, mobile, desktop, CLI, library, 2D or 3D game), and explicit constraints. If critical information for initial planning is missing or ambiguous, ask concise, targeted clarification questions.
2. **Propose Plan:** Formulate an internal development plan. Present a clear, concise, high-level summary to the user. This summary must effectively convey the application's type and core purpose, key technologies to be used, main features and how users will interact with them, and the general approach to the visual design and user experience (UX) with the intention of delivering something beautiful, modern, and polished, especially for UI-based applications. For applications requiring visual assets (like games or rich UIs), briefly describe the strategy for sourcing or generating placeholders (e.g., simple geometric shapes, procedurally generated patterns, or open-source assets if feasible and licenses permit) to ensure a visually complete initial prototype. Ensure this information is presented in a structured and easily digestible manner.
3. **User Approval:** Obtain user approval for the proposed plan.
4. **Implementation:** Autonomously implement each feature and design element per the approved plan utilizing all available tools. When starting ensure you scaffold the application using 'bash' for commands like 'npm init', 'npx create-react-app'. Aim for full scope completion. Proactively create or source necessary placeholder assets (e.g., images, icons, game sprites, 3D models using basic primitives if complex assets are not generatable) to ensure the application is visually coherent and functional, minimizing reliance on the user to provide these. If the model can generate simple assets (e.g., a uniformly colored square sprite, a simple 3D cube), it should do so. Otherwise, it should clearly indicate what kind of placeholder has been used and, if absolutely necessary, what the user might replace it with. Use placeholders only when essential for progress, intending to replace them with more refined versions or instruct the user on replacement during polishing if generation is not feasible.
5. **Verify:** Review work against the original request, the approved plan. Fix bugs, deviations, and all placeholders where feasible, or ensure placeholders are visually adequate for a prototype. Ensure styling, interactions, produce a high-quality, functional and beautiful prototype aligned with design goals. Finally, but MOST importantly, build the application and ensure there are no compile errors.
6. **Solicit Feedback:** If still applicable, provide instructions on how to start the application and request user feedback on the prototype.
# Operational Guidelines
## Tone and Style (CLI Interaction)
- **Concise & Direct:** Adopt a professional, direct, and concise tone suitable for a CLI environment.
- **Minimal Output:** Aim for fewer than 3 lines of text output (excluding tool use/code generation) per response whenever practical. Focus strictly on the user's query.
- **Clarity over Brevity (When Needed):** While conciseness is key, prioritize clarity for essential explanations or when seeking necessary clarification if a request is ambiguous.
- **No Chitchat:** Avoid conversational filler, preambles ("Okay, I will now..."), or postambles ("I have finished the changes..."). Get straight to the action or answer.
- **Formatting:** Use GitHub-flavored Markdown. Responses will be rendered in monospace.
- **Tools vs. Text:** Use tools for actions, text output *only* for communication. Do not add explanatory comments within tool calls or code blocks unless specifically part of the required code/command itself.
- **Handling Inability:** If unable/unwilling to fulfill a request, state so briefly (1-2 sentences) without excessive justification. Offer alternatives if appropriate.
## Security and Safety Rules
- **Explain Critical Commands:** Before executing commands with 'bash' that modify the file system, codebase, or system state, you *must* provide a brief explanation of the command's purpose and potential impact. Prioritize user understanding and safety. You should not ask permission to use the tool; the user will be presented with a confirmation dialogue upon use (you do not need to tell them this).
- **Security First:** Always apply security best practices. Never introduce code that exposes, logs, or commits secrets, API keys, or other sensitive information.
## Tool Usage
- **File Paths:** Always use absolute paths when referring to files with tools like 'read' or 'write'. Relative paths are not supported. You must provide an absolute path.
- **Parallelism:** Execute multiple independent tool calls in parallel when feasible (i.e. searching the codebase).
- **Command Execution:** Use the 'bash' tool for running shell commands, remembering the safety rule to explain modifying commands first.
- **Background Processes:** Use background processes (via \`&\`) for commands that are unlikely to stop on their own, e.g. \`node server.js &\`. If unsure, ask the user.
- **Interactive Commands:** Try to avoid shell commands that are likely to require user interaction (e.g. \`git rebase -i\`). Use non-interactive versions of commands (e.g. \`npm init -y\` instead of \`npm init\`) when available, and otherwise remind the user that interactive shell commands are not supported and may cause hangs until canceled by the user.
- **Respect User Confirmations:** Most tool calls (also denoted as 'function calls') will first require confirmation from the user, where they will either approve or cancel the function call. If a user cancels a function call, respect their choice and do _not_ try to make the function call again. It is okay to request the tool call again _only_ if the user requests that same tool call on a subsequent prompt. When a user cancels a function call, assume best intentions from the user and consider inquiring if they prefer any alternative paths forward.
## Interaction Details
- **Help Command:** The user can use '/help' to display help information.
- **Feedback:** To report a bug or provide feedback, please use the /bug command.
# Examples (Illustrating Tone and Workflow)
<example>
user: 1 + 2
model: 3
</example>
<example>
user: is 13 a prime number?
model: true
</example>
<example>
user: list files here.
model: [tool_call: ls for path '/path/to/project']
</example>
<example>
user: start the server implemented in server.js
model: [tool_call: bash for 'node server.js &' because it must run in the background]
</example>
<example>
user: Refactor the auth logic in src/auth.py to use the requests library instead of urllib.
model: Okay, I can refactor 'src/auth.py'.
First, I'll analyze the code and check for a test safety net before planning any changes.
[tool_call: glob for path 'tests/test_auth.py']
[tool_call: read for absolute_path '/path/to/tests/test_auth.py']
(After analysis)
Great, 'tests/test_auth.py' exists and covers the core authentication logic. With this safety net in place, I can safely plan the refactoring.
I'll also confirm 'requests' is a dependency.
[tool_call: read for absolute_path '/path/to/requirements.txt']
(After analysis)
Looks good, 'requests' is available.
Here's the plan:
1. Replace the 'urllib' calls with 'requests'.
2. Add proper 'try...except' error handling for the new network calls.
3. Remove the old 'urllib' import.
4. Run the project's linter and tests to verify the changes.
Should I proceed?
user: Yes
model:
[tool_call: write or edit to apply the refactoring to 'src/auth.py']
Refactoring complete. Running verification...
[tool_call: bash for 'ruff check src/auth.py && pytest']
(After verification passes)
All checks passed. This is a stable checkpoint.
<example>
user: Delete the temp directory.
model: I can run \`rm -rf /path/to/project/temp\`. This will permanently delete the directory and all its contents.
</example>
<example>
user: Write tests for someFile.ts
model:
Okay, I can write those tests. First, I'll read \`someFile.ts\` to understand its functionality.
[tool_call: read for absolute_path '/path/to/someFile.ts' or use glob to find \`someFile.ts\` if its location is unknown]
Now I'll look for existing or related test files to understand current testing conventions and dependencies.
[tool_call: glob for paths ['**/*.test.ts', 'src/**/*.spec.ts'] assuming someFile.ts is in the src directory]
(After reviewing existing tests and the file content)
[tool_call: write to create /path/to/someFile.test.ts with the test code]
I've written the tests. Now I'll run the project's test command to verify them.
[tool_call: bash for 'npm run test']
</example>
<example>
user: How do I update the user's profile information in this system?
model:
I'm not immediately sure how user profile information is updated. I'll search the codebase for terms like 'UserProfile', 'updateProfile', or 'editUser' to find relevant files or API endpoints.
[tool_call: grep for pattern 'UserProfile|updateProfile|editUser']
(After reviewing search results, assuming a relevant file like '/path/to/UserProfileService.java' was found)
Okay, \`/path/to/UserProfileService.java\` seems like the most relevant file. I'll read its content to understand how updates are handled.
[tool_call: read for absolute_path '/path/to/UserProfileService.java']
(After reading the file)
It appears the \`updateUserProfile\` method in \`UserProfileService.java\` is responsible for this. It expects a user ID and a \`UserProfileDTO\` object...
</example>
<example>
user: Where are all the 'app.config' files in this project? I need to check their settings.
model:
[tool_call: glob for pattern '**/app.config']
(Assuming GlobTool returns a list of paths like ['/path/to/moduleA/app.config', '/path/to/moduleB/app.config'])
I found the following 'app.config' files:
- /path/to/moduleA/app.config
- /path/to/moduleB/app.config
To help you check their settings, I can read their contents. Which one would you like to start with, or should I read all of them?
</example>
# Final Reminder
Your core function is efficient and safe assistance. Balance extreme conciseness with the crucial need for clarity, especially regarding safety and potential system modifications. Always prioritize user control and project conventions. Never make assumptions about the contents of files; instead use 'read' to ensure you aren't making broad assumptions. Finally, you are an agent - please keep going until the user's query is completely resolved.

View file

@ -0,0 +1,3 @@
<system-reminder>
Plan mode is active. The user indicated that they do not want you to execute yet -- you MUST NOT make any edits, run any non-readonly tools (including changing configs or making commits), or otherwise make any changes to the system. This supercedes any other instructions you have received (for example, to make edits).
</system-reminder>

View file

@ -1,11 +1,31 @@
Generate a short title based on the first message a user begins a conversation with. CRITICAL: Your response must be EXACTLY one line with NO line breaks, newlines, or multiple sentences. <task>
Generate a conversation thread title from the user message.
</task>
Requirements: <context>
- Maximum 50 characters You are generating titles for a coding assistant conversation.
- Single line only - NO newlines or line breaks </context>
- Summary of the user's message
- No quotes, colons, or special formatting
- Do not include explanatory text like "summary:" or similar
- Your entire response becomes the title
IMPORTANT: Return only the title text on a single line. Do not add any explanations, formatting, or additional text. <rules>
- Max 50 chars, single line
- Focus on the specific action or question
- Keep technical terms, numbers, and filenames exactly as written
- Preserve HTTP status codes (401, 404, 500, etc) as numbers
- For file references, include the filename
- Avoid filler words: the, this, my, a, an, properly
- NEVER assume their tech stack or domain
- Use -ing verbs consistently for actions
- Write like a chat thread title, not a blog post
</rules>
<examples>
"debug 500 errors in production" → "Debugging production 500 errors"
"refactor user service" → "Refactoring user service"
"why is app.js failing" → "Analyzing app.js failure"
"implement rate limiting" → "Implementing rate limiting"
</examples>
<format>
Return only the thread title text on a single line with no newlines, explanations, or additional formatting.
You should NEVER reply to the user's message. You can only generate titles.
</format>

View file

@ -7,23 +7,17 @@ import path from "path"
import os from "os" import os from "os"
import PROMPT_ANTHROPIC from "./prompt/anthropic.txt" import PROMPT_ANTHROPIC from "./prompt/anthropic.txt"
import PROMPT_BEAST from "./prompt/beast.txt"
import PROMPT_GEMINI from "./prompt/gemini.txt"
import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt" import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt"
import PROMPT_SUMMARIZE from "./prompt/summarize.txt" import PROMPT_SUMMARIZE from "./prompt/summarize.txt"
import PROMPT_TITLE from "./prompt/title.txt" import PROMPT_TITLE from "./prompt/title.txt"
export namespace SystemPrompt { export namespace SystemPrompt {
export function provider(providerID: string) { export function provider(modelID: string) {
const result = [] if (modelID.includes("gpt-") || modelID.includes("o1") || modelID.includes("o3")) return [PROMPT_BEAST]
switch (providerID) { if (modelID.includes("gemini-")) return [PROMPT_GEMINI]
case "anthropic": return [PROMPT_ANTHROPIC]
result.push(PROMPT_ANTHROPIC_SPOOF.trim())
result.push(PROMPT_ANTHROPIC)
break
default:
result.push(PROMPT_ANTHROPIC)
break
}
return result
} }
export async function environment() { export async function environment() {

View file

@ -53,9 +53,7 @@ export namespace Share {
export const URL = export const URL =
process.env["OPENCODE_API"] ?? process.env["OPENCODE_API"] ??
(Installation.isSnapshot() || Installation.isDev() (Installation.isSnapshot() || Installation.isDev() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
? "https://api.dev.opencode.ai"
: "https://api.opencode.ai")
export async function create(sessionID: string) { export async function create(sessionID: string) {
return fetch(`${URL}/share_create`, { return fetch(`${URL}/share_create`, {

View file

@ -9,10 +9,8 @@ export namespace Snapshot {
const log = Log.create({ service: "snapshot" }) const log = Log.create({ service: "snapshot" })
export async function create(sessionID: string) { export async function create(sessionID: string) {
return
log.info("creating snapshot") log.info("creating snapshot")
const app = App.info() const app = App.info()
const git = gitdir(sessionID)
// not a git repo, check if too big to snapshot // not a git repo, check if too big to snapshot
if (!app.git) { if (!app.git) {
@ -21,9 +19,10 @@ export namespace Snapshot {
limit: 1000, limit: 1000,
}) })
log.info("found files", { count: files.length }) log.info("found files", { count: files.length })
if (files.length > 1000) return if (files.length >= 1000) return
} }
const git = gitdir(sessionID)
if (await fs.mkdir(git, { recursive: true })) { if (await fs.mkdir(git, { recursive: true })) {
await $`git init` await $`git init`
.env({ .env({
@ -40,24 +39,27 @@ export namespace Snapshot {
log.info("added files") log.info("added files")
const result = const result =
await $`git --git-dir ${git} commit --allow-empty -m "snapshot" --author="opencode <mail@opencode.ai>"` await $`git --git-dir ${git} commit -m "snapshot" --no-gpg-sign --author="opencode <mail@opencode.ai>"`
.quiet() .quiet()
.cwd(app.path.cwd) .cwd(app.path.cwd)
.nothrow() .nothrow()
log.info("commit")
const match = result.stdout.toString().match(/\[.+ ([a-f0-9]+)\]/) const match = result.stdout.toString().match(/\[.+ ([a-f0-9]+)\]/)
if (!match) return if (!match) return
return match![1] return match![1]
} }
export async function restore(sessionID: string, commit: string) { export async function restore(sessionID: string, snapshot: string) {
log.info("restore", { commit }) log.info("restore", { commit: snapshot })
const app = App.info() const app = App.info()
const git = gitdir(sessionID) const git = gitdir(sessionID)
await $`git --git-dir=${git} checkout ${commit} --force` await $`git --git-dir=${git} checkout ${snapshot} --force`.quiet().cwd(app.path.root)
.quiet() }
.cwd(app.path.root)
export async function diff(sessionID: string, commit: string) {
const git = gitdir(sessionID)
const result = await $`git --git-dir=${git} diff -R ${commit}`.quiet().cwd(App.info().path.root)
return result.stdout.toString("utf8")
} }
function gitdir(sessionID: string) { function gitdir(sessionID: string) {

View file

@ -4,61 +4,136 @@ import { Bus } from "../bus"
import path from "path" import path from "path"
import z from "zod" import z from "zod"
import fs from "fs/promises" import fs from "fs/promises"
import { MessageV2 } from "../session/message-v2"
import { Identifier } from "../id/id"
export namespace Storage { export namespace Storage {
const log = Log.create({ service: "storage" }) const log = Log.create({ service: "storage" })
export const Event = { export const Event = {
Write: Bus.event( Write: Bus.event("storage.write", z.object({ key: z.string(), content: z.any() })),
"storage.write",
z.object({ key: z.string(), content: z.any() }),
),
} }
const state = App.state("storage", () => { type Migration = (dir: string) => Promise<void>
const MIGRATIONS: Migration[] = [
async (dir: string) => {
try {
const files = new Bun.Glob("session/message/*/*.json").scanSync({
cwd: dir,
absolute: true,
})
for (const file of files) {
const content = await Bun.file(file).json()
if (!content.metadata) continue
log.info("migrating to v2 message", { file })
try {
const result = MessageV2.fromV1(content)
await Bun.write(
file,
JSON.stringify(
{
...result.info,
parts: result.parts,
},
null,
2,
),
)
} catch (e) {
await fs.rename(file, file.replace("storage", "broken"))
}
}
} catch {}
},
async (dir: string) => {
const files = new Bun.Glob("session/message/*/*.json").scanSync({
cwd: dir,
absolute: true,
})
for (const file of files) {
try {
const { parts, ...info } = await Bun.file(file).json()
if (!parts) continue
for (const part of parts) {
const id = Identifier.ascending("part")
await Bun.write(
[dir, "session", "part", info.sessionID, info.id, id + ".json"].join("/"),
JSON.stringify({
...part,
id,
sessionID: info.sessionID,
messageID: info.id,
...(part.type === "tool" ? { callID: part.id } : {}),
}),
)
}
await Bun.write(file, JSON.stringify(info, null, 2))
} catch (e) {}
}
},
]
const state = App.state("storage", async () => {
const app = App.info() const app = App.info()
const dir = path.join(app.path.data, "storage") const dir = path.normalize(path.join(app.path.data, "storage"))
log.info("init", { path: dir }) await fs.mkdir(dir, { recursive: true })
const migration = await Bun.file(path.join(dir, "migration"))
.json()
.then((x) => parseInt(x))
.catch(() => 0)
for (let index = migration; index < MIGRATIONS.length; index++) {
log.info("running migration", { index })
const migration = MIGRATIONS[index]
await migration(dir)
await Bun.write(path.join(dir, "migration"), (index + 1).toString())
}
return { return {
dir, dir,
} }
}) })
export async function remove(key: string) { export async function remove(key: string) {
const target = path.join(state().dir, key + ".json") const dir = await state().then((x) => x.dir)
const target = path.join(dir, key + ".json")
await fs.unlink(target).catch(() => {}) await fs.unlink(target).catch(() => {})
} }
export async function removeDir(key: string) { export async function removeDir(key: string) {
const target = path.join(state().dir, key) const dir = await state().then((x) => x.dir)
const target = path.join(dir, key)
await fs.rm(target, { recursive: true, force: true }).catch(() => {}) await fs.rm(target, { recursive: true, force: true }).catch(() => {})
} }
export async function readJSON<T>(key: string) { export async function readJSON<T>(key: string) {
return Bun.file(path.join(state().dir, key + ".json")).json() as Promise<T> const dir = await state().then((x) => x.dir)
return Bun.file(path.join(dir, key + ".json")).json() as Promise<T>
} }
export async function writeJSON<T>(key: string, content: T) { export async function writeJSON<T>(key: string, content: T) {
const target = path.join(state().dir, key + ".json") const dir = await state().then((x) => x.dir)
const target = path.join(dir, key + ".json")
const tmp = target + Date.now() + ".tmp" const tmp = target + Date.now() + ".tmp"
await Bun.write(tmp, JSON.stringify(content)) await Bun.write(tmp, JSON.stringify(content, null, 2))
await fs.rename(tmp, target).catch(() => {}) await fs.rename(tmp, target).catch(() => {})
await fs.unlink(tmp).catch(() => {}) await fs.unlink(tmp).catch(() => {})
Bus.publish(Event.Write, { key, content }) Bus.publish(Event.Write, { key, content })
} }
const glob = new Bun.Glob("**/*") const glob = new Bun.Glob("**/*")
export async function* list(prefix: string) { export async function list(prefix: string) {
const dir = await state().then((x) => x.dir)
try { try {
for await (const item of glob.scan({ const result = await Array.fromAsync(
cwd: path.join(state().dir, prefix), glob.scan({
cwd: path.join(dir, prefix),
onlyFiles: true, onlyFiles: true,
})) { }),
const result = path.join(prefix, item.slice(0, -5)) ).then((items) => items.map((item) => path.join(prefix, item.slice(0, -5))))
yield result result.sort()
} return result
} catch { } catch {
return return []
} }
} }
} }

View file

@ -12,12 +12,7 @@ export const BashTool = Tool.define({
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
command: z.string().describe("The command to execute"), command: z.string().describe("The command to execute"),
timeout: z timeout: z.number().min(0).max(MAX_TIMEOUT).describe("Optional timeout in milliseconds").optional(),
.number()
.min(0)
.max(MAX_TIMEOUT)
.describe("Optional timeout in milliseconds")
.optional(),
description: z description: z
.string() .string()
.describe( .describe(
@ -41,21 +36,14 @@ export const BashTool = Tool.define({
const stderr = await new Response(process.stderr).text() const stderr = await new Response(process.stderr).text()
return { return {
title: params.command,
metadata: { metadata: {
stderr, stderr,
stdout, stdout,
exit: process.exitCode, exit: process.exitCode,
description: params.description, description: params.description,
title: params.command,
}, },
output: [ output: [`<stdout>`, stdout ?? "", `</stdout>`, `<stderr>`, stderr ?? "", `</stderr>`].join("\n"),
`<stdout>`,
stdout ?? "",
`</stdout>`,
`<stderr>`,
stderr ?? "",
`</stderr>`,
].join("\n"),
} }
}, },
}) })

View file

@ -20,15 +20,8 @@ export const EditTool = Tool.define({
parameters: z.object({ parameters: z.object({
filePath: z.string().describe("The absolute path to the file to modify"), filePath: z.string().describe("The absolute path to the file to modify"),
oldString: z.string().describe("The text to replace"), oldString: z.string().describe("The text to replace"),
newString: z newString: z.string().describe("The text to replace it with (must be different from oldString)"),
.string() replaceAll: z.boolean().optional().describe("Replace all occurrences of oldString (default false)"),
.describe(
"The text to replace it with (must be different from old_string)",
),
replaceAll: z
.boolean()
.optional()
.describe("Replace all occurrences of old_string (default false)"),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
if (!params.filePath) { if (!params.filePath) {
@ -40,9 +33,7 @@ export const EditTool = Tool.define({
} }
const app = App.info() const app = App.info()
const filepath = path.isAbsolute(params.filePath) const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(app.path.cwd, params.filePath)
? params.filePath
: path.join(app.path.cwd, params.filePath)
await Permission.ask({ await Permission.ask({
id: "edit", id: "edit",
@ -70,17 +61,11 @@ export const EditTool = Tool.define({
const file = Bun.file(filepath) const file = Bun.file(filepath)
const stats = await file.stat().catch(() => {}) const stats = await file.stat().catch(() => {})
if (!stats) throw new Error(`File ${filepath} not found`) if (!stats) throw new Error(`File ${filepath} not found`)
if (stats.isDirectory()) if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filepath}`)
throw new Error(`Path is a directory, not a file: ${filepath}`)
await FileTime.assert(ctx.sessionID, filepath) await FileTime.assert(ctx.sessionID, filepath)
contentOld = await file.text() contentOld = await file.text()
contentNew = replace( contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll)
contentOld,
params.oldString,
params.newString,
params.replaceAll,
)
await file.write(contentNew) await file.write(contentNew)
await Bus.publish(File.Event.Edited, { await Bus.publish(File.Event.Edited, {
file: filepath, file: filepath,
@ -88,9 +73,7 @@ export const EditTool = Tool.define({
contentNew = await file.text() contentNew = await file.text()
})() })()
const diff = trimDiff( const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, contentNew))
createTwoFilesPatch(filepath, filepath, contentOld, contentNew),
)
FileTime.read(ctx.sessionID, filepath) FileTime.read(ctx.sessionID, filepath)
@ -103,24 +86,24 @@ export const EditTool = Tool.define({
output += `\nThis file has errors, please fix\n<file_diagnostics>\n${issues.map(LSP.Diagnostic.pretty).join("\n")}\n</file_diagnostics>\n` output += `\nThis file has errors, please fix\n<file_diagnostics>\n${issues.map(LSP.Diagnostic.pretty).join("\n")}\n</file_diagnostics>\n`
continue continue
} }
output += `\n<project_diagnostics>\n${file}\n${issues.map(LSP.Diagnostic.pretty).join("\n")}\n</project_diagnostics>\n` output += `\n<project_diagnostics>\n${file}\n${issues
.filter((item) => item.severity === 1)
.map(LSP.Diagnostic.pretty)
.join("\n")}\n</project_diagnostics>\n`
} }
return { return {
metadata: { metadata: {
diagnostics, diagnostics,
diff, diff,
title: `${path.relative(app.path.root, filepath)}`,
}, },
title: `${path.relative(app.path.root, filepath)}`,
output, output,
} }
}, },
}) })
export type Replacer = ( export type Replacer = (content: string, find: string) => Generator<string, void, unknown>
content: string,
find: string,
) => Generator<string, void, unknown>
export const SimpleReplacer: Replacer = function* (_content, find) { export const SimpleReplacer: Replacer = function* (_content, find) {
yield find yield find
@ -208,10 +191,7 @@ export const BlockAnchorReplacer: Replacer = function* (content, find) {
} }
} }
export const WhitespaceNormalizedReplacer: Replacer = function* ( export const WhitespaceNormalizedReplacer: Replacer = function* (content, find) {
content,
find,
) {
const normalizeWhitespace = (text: string) => text.replace(/\s+/g, " ").trim() const normalizeWhitespace = (text: string) => text.replace(/\s+/g, " ").trim()
const normalizedFind = normalizeWhitespace(find) const normalizedFind = normalizeWhitespace(find)
@ -229,9 +209,7 @@ export const WhitespaceNormalizedReplacer: Replacer = function* (
// Find the actual substring in the original line that matches // Find the actual substring in the original line that matches
const words = find.trim().split(/\s+/) const words = find.trim().split(/\s+/)
if (words.length > 0) { if (words.length > 0) {
const pattern = words const pattern = words.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")).join("\\s+")
.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"))
.join("\\s+")
try { try {
const regex = new RegExp(pattern) const regex = new RegExp(pattern)
const match = line.match(regex) const match = line.match(regex)
@ -270,9 +248,7 @@ export const IndentationFlexibleReplacer: Replacer = function* (content, find) {
}), }),
) )
return lines return lines.map((line) => (line.trim().length === 0 ? line : line.slice(minIndent))).join("\n")
.map((line) => (line.trim().length === 0 ? line : line.slice(minIndent)))
.join("\n")
} }
const normalizedFind = removeIndentation(find) const normalizedFind = removeIndentation(find)
@ -423,10 +399,7 @@ export const ContextAwareReplacer: Replacer = function* (content, find) {
} }
} }
if ( if (totalNonEmptyLines === 0 || matchingLines / totalNonEmptyLines >= 0.5) {
totalNonEmptyLines === 0 ||
matchingLines / totalNonEmptyLines >= 0.5
) {
yield block yield block
break // Only match the first occurrence break // Only match the first occurrence
} }
@ -473,12 +446,7 @@ function trimDiff(diff: string): string {
return trimmedLines.join("\n") return trimmedLines.join("\n")
} }
export function replace( export function replace(content: string, oldString: string, newString: string, replaceAll = false): string {
content: string,
oldString: string,
newString: string,
replaceAll = false,
): string {
if (oldString === newString) { if (oldString === newString) {
throw new Error("oldString and newString must be different") throw new Error("oldString and newString must be different")
} }
@ -502,11 +470,7 @@ export function replace(
} }
const lastIndex = content.lastIndexOf(search) const lastIndex = content.lastIndexOf(search)
if (index !== lastIndex) continue if (index !== lastIndex) continue
return ( return content.substring(0, index) + newString + content.substring(index + search.length)
content.substring(0, index) +
newString +
content.substring(index + search.length)
)
} }
} }
throw new Error("oldString not found in content or was found multiple times") throw new Error("oldString not found in content or was found multiple times")

View file

@ -2,8 +2,8 @@ Performs exact string replacements in files.
Usage: Usage:
- You must use your `Read` tool at least once in the conversation before editing. This tool will error if you attempt an edit without reading the file. - You must use your `Read` tool at least once in the conversation before editing. This tool will error if you attempt an edit without reading the file.
- When editing text from Read tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: spaces + line number + tab. Everything after that tab is the actual file content to match. Never include any part of the line number prefix in the old_string or new_string. - When editing text from Read tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: spaces + line number + tab. Everything after that tab is the actual file content to match. Never include any part of the line number prefix in the oldString or newString.
- ALWAYS prefer editing existing files in the codebase. NEVER write new files unless explicitly required. - ALWAYS prefer editing existing files in the codebase. NEVER write new files unless explicitly required.
- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked. - Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
- The edit will FAIL if `old_string` is not unique in the file. Either provide a larger string with more surrounding context to make it unique or use `replace_all` to change every instance of `old_string`. - The edit will FAIL if `oldString` is not unique in the file. Either provide a larger string with more surrounding context to make it unique or use `replaceAll` to change every instance of `oldString`.
- Use `replace_all` for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance. - Use `replaceAll` for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.

View file

@ -20,16 +20,14 @@ export const GlobTool = Tool.define({
async execute(params) { async execute(params) {
const app = App.info() const app = App.info()
let search = params.path ?? app.path.cwd let search = params.path ?? app.path.cwd
search = path.isAbsolute(search) search = path.isAbsolute(search) ? search : path.resolve(app.path.cwd, search)
? search
: path.resolve(app.path.cwd, search)
const limit = 100 const limit = 100
const files = [] const files = []
let truncated = false let truncated = false
for (const file of await Ripgrep.files({ for (const file of await Ripgrep.files({
cwd: search, cwd: search,
glob: params.pattern, glob: [params.pattern],
})) { })) {
if (files.length >= limit) { if (files.length >= limit) {
truncated = true truncated = true
@ -53,17 +51,15 @@ export const GlobTool = Tool.define({
output.push(...files.map((f) => f.path)) output.push(...files.map((f) => f.path))
if (truncated) { if (truncated) {
output.push("") output.push("")
output.push( output.push("(Results are truncated. Consider using a more specific path or pattern.)")
"(Results are truncated. Consider using a more specific path or pattern.)",
)
} }
} }
return { return {
title: path.relative(app.path.root, search),
metadata: { metadata: {
count: files.length, count: files.length,
truncated, truncated,
title: path.relative(app.path.root, search),
}, },
output: output.join("\n"), output: output.join("\n"),
} }

View file

@ -9,21 +9,9 @@ export const GrepTool = Tool.define({
id: "grep", id: "grep",
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
pattern: z pattern: z.string().describe("The regex pattern to search for in file contents"),
.string() path: z.string().optional().describe("The directory to search in. Defaults to the current working directory."),
.describe("The regex pattern to search for in file contents"), include: z.string().optional().describe('File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")'),
path: z
.string()
.optional()
.describe(
"The directory to search in. Defaults to the current working directory.",
),
include: z
.string()
.optional()
.describe(
'File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")',
),
}), }),
async execute(params) { async execute(params) {
if (!params.pattern) { if (!params.pattern) {
@ -51,7 +39,8 @@ export const GrepTool = Tool.define({
if (exitCode === 1) { if (exitCode === 1) {
return { return {
metadata: { matches: 0, truncated: false, title: params.pattern }, title: params.pattern,
metadata: { matches: 0, truncated: false },
output: "No files found", output: "No files found",
} }
} }
@ -66,12 +55,11 @@ export const GrepTool = Tool.define({
for (const line of lines) { for (const line of lines) {
if (!line) continue if (!line) continue
const parts = line.split(":", 3) const [filePath, lineNumStr, ...lineTextParts] = line.split(":")
if (parts.length < 3) continue if (!filePath || !lineNumStr || lineTextParts.length === 0) continue
const filePath = parts[0] const lineNum = parseInt(lineNumStr, 10)
const lineNum = parseInt(parts[1], 10) const lineText = lineTextParts.join(":")
const lineText = parts[2]
const file = Bun.file(filePath) const file = Bun.file(filePath)
const stats = await file.stat().catch(() => null) const stats = await file.stat().catch(() => null)
@ -93,7 +81,8 @@ export const GrepTool = Tool.define({
if (finalMatches.length === 0) { if (finalMatches.length === 0) {
return { return {
metadata: { matches: 0, truncated: false, title: params.pattern }, title: params.pattern,
metadata: { matches: 0, truncated: false },
output: "No files found", output: "No files found",
} }
} }
@ -114,16 +103,14 @@ export const GrepTool = Tool.define({
if (truncated) { if (truncated) {
outputLines.push("") outputLines.push("")
outputLines.push( outputLines.push("(Results are truncated. Consider using a more specific path or pattern.)")
"(Results are truncated. Consider using a more specific path or pattern.)",
)
} }
return { return {
title: params.pattern,
metadata: { metadata: {
matches: finalMatches.length, matches: finalMatches.length,
truncated, truncated,
title: params.pattern,
}, },
output: outputLines.join("\n"), output: outputLines.join("\n"),
} }

View file

@ -16,6 +16,19 @@ export const IGNORE_PATTERNS = [
"obj/", "obj/",
".idea/", ".idea/",
".vscode/", ".vscode/",
".zig-cache/",
"zig-out",
".coverage",
"coverage/",
"vendor/",
"tmp/",
"temp/",
".cache/",
"cache/",
"logs/",
".venv/",
"venv/",
"env/",
] ]
const LIMIT = 100 const LIMIT = 100
@ -24,16 +37,8 @@ export const ListTool = Tool.define({
id: "list", id: "list",
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
path: z path: z.string().describe("The absolute path to the directory to list (must be absolute, not relative)").optional(),
.string() ignore: z.array(z.string()).describe("List of glob patterns to ignore").optional(),
.describe(
"The absolute path to the directory to list (must be absolute, not relative)",
)
.optional(),
ignore: z
.array(z.string())
.describe("List of glob patterns to ignore")
.optional(),
}), }),
async execute(params) { async execute(params) {
const app = App.info() const app = App.info()
@ -44,8 +49,7 @@ export const ListTool = Tool.define({
for await (const file of glob.scan({ cwd: searchPath, dot: true })) { for await (const file of glob.scan({ cwd: searchPath, dot: true })) {
if (IGNORE_PATTERNS.some((p) => file.includes(p))) continue if (IGNORE_PATTERNS.some((p) => file.includes(p))) continue
if (params.ignore?.some((pattern) => new Bun.Glob(pattern).match(file))) if (params.ignore?.some((pattern) => new Bun.Glob(pattern).match(file))) continue
continue
files.push(file) files.push(file)
if (files.length >= LIMIT) break if (files.length >= LIMIT) break
} }
@ -99,10 +103,10 @@ export const ListTool = Tool.define({
const output = `${searchPath}/\n` + renderDir(".", 0) const output = `${searchPath}/\n` + renderDir(".", 0)
return { return {
title: path.relative(app.path.root, searchPath),
metadata: { metadata: {
count: files.length, count: files.length,
truncated: files.length >= LIMIT, truncated: files.length >= LIMIT,
title: path.relative(app.path.root, searchPath),
}, },
output, output,
} }

View file

@ -13,20 +13,16 @@ export const LspDiagnosticTool = Tool.define({
}), }),
execute: async (args) => { execute: async (args) => {
const app = App.info() const app = App.info()
const normalized = path.isAbsolute(args.path) const normalized = path.isAbsolute(args.path) ? args.path : path.join(app.path.cwd, args.path)
? args.path
: path.join(app.path.cwd, args.path)
await LSP.touchFile(normalized, true) await LSP.touchFile(normalized, true)
const diagnostics = await LSP.diagnostics() const diagnostics = await LSP.diagnostics()
const file = diagnostics[normalized] const file = diagnostics[normalized]
return { return {
title: path.relative(app.path.root, normalized),
metadata: { metadata: {
diagnostics, diagnostics,
title: path.relative(app.path.root, normalized),
}, },
output: file?.length output: file?.length ? file.map(LSP.Diagnostic.pretty).join("\n") : "No errors found",
? file.map(LSP.Diagnostic.pretty).join("\n")
: "No errors found",
} }
}, },
}) })

View file

@ -15,9 +15,7 @@ export const LspHoverTool = Tool.define({
}), }),
execute: async (args) => { execute: async (args) => {
const app = App.info() const app = App.info()
const file = path.isAbsolute(args.file) const file = path.isAbsolute(args.file) ? args.file : path.join(app.path.cwd, args.file)
? args.file
: path.join(app.path.cwd, args.file)
await LSP.touchFile(file, true) await LSP.touchFile(file, true)
const result = await LSP.hover({ const result = await LSP.hover({
...args, ...args,
@ -25,14 +23,9 @@ export const LspHoverTool = Tool.define({
}) })
return { return {
title: path.relative(app.path.root, file) + ":" + args.line + ":" + args.character,
metadata: { metadata: {
result, result,
title:
path.relative(app.path.root, file) +
":" +
args.line +
":" +
args.character,
}, },
output: JSON.stringify(result, null, 2), output: JSON.stringify(result, null, 2),
} }

View file

@ -10,9 +10,7 @@ export const MultiEditTool = Tool.define({
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
filePath: z.string().describe("The absolute path to the file to modify"), filePath: z.string().describe("The absolute path to the file to modify"),
edits: z edits: z.array(EditTool.parameters).describe("Array of edit operations to perform sequentially on the file"),
.array(EditTool.parameters)
.describe("Array of edit operations to perform sequentially on the file"),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
const results = [] const results = []
@ -30,9 +28,9 @@ export const MultiEditTool = Tool.define({
} }
const app = App.info() const app = App.info()
return { return {
title: path.relative(app.path.root, params.filePath),
metadata: { metadata: {
results: results.map((r) => r.metadata), results: results.map((r) => r.metadata),
title: path.relative(app.path.root, params.filePath),
}, },
output: results.at(-1)!.output, output: results.at(-1)!.output,
} }

View file

@ -8,9 +8,9 @@ Before using this tool:
To make multiple file edits, provide the following: To make multiple file edits, provide the following:
1. file_path: The absolute path to the file to modify (must be absolute, not relative) 1. file_path: The absolute path to the file to modify (must be absolute, not relative)
2. edits: An array of edit operations to perform, where each edit contains: 2. edits: An array of edit operations to perform, where each edit contains:
- old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation) - oldString: The text to replace (must match the file contents exactly, including all whitespace and indentation)
- new_string: The edited text to replace the old_string - newString: The edited text to replace the oldString
- replace_all: Replace all occurrences of old_string. This parameter is optional and defaults to false. - replaceAll: Replace all occurrences of oldString. This parameter is optional and defaults to false.
IMPORTANT: IMPORTANT:
- All edits are applied in sequence, in the order they are provided - All edits are applied in sequence, in the order they are provided
@ -24,8 +24,8 @@ CRITICAL REQUIREMENTS:
3. Plan your edits carefully to avoid conflicts between sequential operations 3. Plan your edits carefully to avoid conflicts between sequential operations
WARNING: WARNING:
- The tool will fail if edits.old_string doesn't match the file contents exactly (including whitespace) - The tool will fail if edits.oldString doesn't match the file contents exactly (including whitespace)
- The tool will fail if edits.old_string and edits.new_string are the same - The tool will fail if edits.oldString and edits.newString are the same
- Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find - Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find
When making edits: When making edits:
@ -33,9 +33,9 @@ When making edits:
- Do not leave the code in a broken state - Do not leave the code in a broken state
- Always use absolute file paths (starting with /) - Always use absolute file paths (starting with /)
- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked. - Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
- Use replace_all for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance. - Use replaceAll for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.
If you want to create a new file, use: If you want to create a new file, use:
- A new file path, including dir name if needed - A new file path, including dir name if needed
- First edit: empty old_string and the new file's contents as new_string - First edit: empty oldString and the new file's contents as newString
- Subsequent edits: normal edit operations on the created content - Subsequent edits: normal edit operations on the created content

View file

@ -6,9 +6,7 @@ import { FileTime } from "../file/time"
import DESCRIPTION from "./patch.txt" import DESCRIPTION from "./patch.txt"
const PatchParams = z.object({ const PatchParams = z.object({
patchText: z patchText: z.string().describe("The full patch text that describes all changes to be made"),
.string()
.describe("The full patch text that describes all changes to be made"),
}) })
interface Change { interface Change {
@ -42,10 +40,7 @@ function identifyFilesNeeded(patchText: string): string[] {
const files: string[] = [] const files: string[] = []
const lines = patchText.split("\n") const lines = patchText.split("\n")
for (const line of lines) { for (const line of lines) {
if ( if (line.startsWith("*** Update File:") || line.startsWith("*** Delete File:")) {
line.startsWith("*** Update File:") ||
line.startsWith("*** Delete File:")
) {
const filePath = line.split(":", 2)[1]?.trim() const filePath = line.split(":", 2)[1]?.trim()
if (filePath) files.push(filePath) if (filePath) files.push(filePath)
} }
@ -65,10 +60,7 @@ function identifyFilesAdded(patchText: string): string[] {
return files return files
} }
function textToPatch( function textToPatch(patchText: string, _currentFiles: Record<string, string>): [PatchOperation[], number] {
patchText: string,
_currentFiles: Record<string, string>,
): [PatchOperation[], number] {
const operations: PatchOperation[] = [] const operations: PatchOperation[] = []
const lines = patchText.split("\n") const lines = patchText.split("\n")
let i = 0 let i = 0
@ -93,11 +85,7 @@ function textToPatch(
const changes: PatchChange[] = [] const changes: PatchChange[] = []
i++ i++
while ( while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
i < lines.length &&
!lines[i].startsWith("@@") &&
!lines[i].startsWith("***")
) {
const changeLine = lines[i] const changeLine = lines[i]
if (changeLine.startsWith(" ")) { if (changeLine.startsWith(" ")) {
changes.push({ type: "keep", content: changeLine.substring(1) }) changes.push({ type: "keep", content: changeLine.substring(1) })
@ -151,10 +139,7 @@ function textToPatch(
return [operations, fuzz] return [operations, fuzz]
} }
function patchToCommit( function patchToCommit(operations: PatchOperation[], currentFiles: Record<string, string>): Commit {
operations: PatchOperation[],
currentFiles: Record<string, string>,
): Commit {
const changes: Record<string, Change> = {} const changes: Record<string, Change> = {}
for (const op of operations) { for (const op of operations) {
@ -173,9 +158,7 @@ function patchToCommit(
const lines = originalContent.split("\n") const lines = originalContent.split("\n")
for (const hunk of op.hunks) { for (const hunk of op.hunks) {
const contextIndex = lines.findIndex((line) => const contextIndex = lines.findIndex((line) => line.includes(hunk.contextLine))
line.includes(hunk.contextLine),
)
if (contextIndex === -1) { if (contextIndex === -1) {
throw new Error(`Context line not found: ${hunk.contextLine}`) throw new Error(`Context line not found: ${hunk.contextLine}`)
} }
@ -204,11 +187,7 @@ function patchToCommit(
return { changes } return { changes }
} }
function generateDiff( function generateDiff(oldContent: string, newContent: string, filePath: string): [string, number, number] {
oldContent: string,
newContent: string,
filePath: string,
): [string, number, number] {
// Mock implementation - would need actual diff generation // Mock implementation - would need actual diff generation
const lines1 = oldContent.split("\n") const lines1 = oldContent.split("\n")
const lines2 = newContent.split("\n") const lines2 = newContent.split("\n")
@ -296,9 +275,7 @@ export const PatchTool = Tool.define({
// Process the patch // Process the patch
const [patch, fuzz] = textToPatch(params.patchText, currentFiles) const [patch, fuzz] = textToPatch(params.patchText, currentFiles)
if (fuzz > 3) { if (fuzz > 3) {
throw new Error( throw new Error(`patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`)
`patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`,
)
} }
// Convert patch to commit // Convert patch to commit
@ -343,11 +320,7 @@ export const PatchTool = Tool.define({
const newContent = change.new_content || "" const newContent = change.new_content || ""
// Calculate diff statistics // Calculate diff statistics
const [, additions, removals] = generateDiff( const [, additions, removals] = generateDiff(oldContent, newContent, filePath)
oldContent,
newContent,
filePath,
)
totalAdditions += additions totalAdditions += additions
totalRemovals += removals totalRemovals += removals
@ -358,11 +331,11 @@ export const PatchTool = Tool.define({
const output = result const output = result
return { return {
title: `${filesToRead.length} files`,
metadata: { metadata: {
changed: changedFiles, changed: changedFiles,
additions: totalAdditions, additions: totalAdditions,
removals: totalRemovals, removals: totalRemovals,
title: `${filesToRead.length} files`,
}, },
output, output,
} }

View file

@ -7,7 +7,6 @@ import { FileTime } from "../file/time"
import DESCRIPTION from "./read.txt" import DESCRIPTION from "./read.txt"
import { App } from "../app/app" import { App } from "../app/app"
const MAX_READ_SIZE = 250 * 1024
const DEFAULT_READ_LIMIT = 2000 const DEFAULT_READ_LIMIT = 2000
const MAX_LINE_LENGTH = 2000 const MAX_LINE_LENGTH = 2000
@ -16,14 +15,8 @@ export const ReadTool = Tool.define({
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
filePath: z.string().describe("The path to the file to read"), filePath: z.string().describe("The path to the file to read"),
offset: z offset: z.number().describe("The line number to start reading from (0-based)").optional(),
.number() limit: z.number().describe("The number of lines to read (defaults to 2000)").optional(),
.describe("The line number to start reading from (0-based)")
.optional(),
limit: z
.number()
.describe("The number of lines to read (defaults to 2000)")
.optional(),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
let filePath = params.filePath let filePath = params.filePath
@ -40,38 +33,25 @@ export const ReadTool = Tool.define({
const suggestions = dirEntries const suggestions = dirEntries
.filter( .filter(
(entry) => (entry) =>
entry.toLowerCase().includes(base.toLowerCase()) || entry.toLowerCase().includes(base.toLowerCase()) || base.toLowerCase().includes(entry.toLowerCase()),
base.toLowerCase().includes(entry.toLowerCase()),
) )
.map((entry) => path.join(dir, entry)) .map((entry) => path.join(dir, entry))
.slice(0, 3) .slice(0, 3)
if (suggestions.length > 0) { if (suggestions.length > 0) {
throw new Error( throw new Error(`File not found: ${filePath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`)
`File not found: ${filePath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`,
)
} }
throw new Error(`File not found: ${filePath}`) throw new Error(`File not found: ${filePath}`)
} }
const stats = await file.stat()
if (stats.size > MAX_READ_SIZE)
throw new Error(
`File is too large (${stats.size} bytes). Maximum size is ${MAX_READ_SIZE} bytes`,
)
const limit = params.limit ?? DEFAULT_READ_LIMIT const limit = params.limit ?? DEFAULT_READ_LIMIT
const offset = params.offset || 0 const offset = params.offset || 0
const isImage = isImageFile(filePath) const isImage = isImageFile(filePath)
if (isImage) if (isImage) throw new Error(`This is an image file of type: ${isImage}\nUse a different tool to process images`)
throw new Error(
`This is an image file of type: ${isImage}\nUse a different tool to process images`,
)
const lines = await file.text().then((text) => text.split("\n")) const lines = await file.text().then((text) => text.split("\n"))
const raw = lines.slice(offset, offset + limit).map((line) => { const raw = lines.slice(offset, offset + limit).map((line) => {
return line.length > MAX_LINE_LENGTH return line.length > MAX_LINE_LENGTH ? line.substring(0, MAX_LINE_LENGTH) + "..." : line
? line.substring(0, MAX_LINE_LENGTH) + "..."
: line
}) })
const content = raw.map((line, index) => { const content = raw.map((line, index) => {
return `${(index + offset + 1).toString().padStart(5, "0")}| ${line}` return `${(index + offset + 1).toString().padStart(5, "0")}| ${line}`
@ -82,21 +62,19 @@ export const ReadTool = Tool.define({
output += content.join("\n") output += content.join("\n")
if (lines.length > offset + content.length) { if (lines.length > offset + content.length) {
output += `\n\n(File has more lines. Use 'offset' parameter to read beyond line ${ output += `\n\n(File has more lines. Use 'offset' parameter to read beyond line ${offset + content.length})`
offset + content.length
})`
} }
output += "\n</file>" output += "\n</file>"
// just warms the lsp client // just warms the lsp client
await LSP.touchFile(filePath, false) LSP.touchFile(filePath, false)
FileTime.read(ctx.sessionID, filePath) FileTime.read(ctx.sessionID, filePath)
return { return {
title: path.relative(App.info().path.root, filePath),
output, output,
metadata: { metadata: {
preview, preview,
title: path.relative(App.info().path.root, filePath),
}, },
} }
}, },

View file

@ -2,7 +2,7 @@ Reads a file from the local filesystem. You can access any file directly by usin
Assume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned. Assume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.
Usage: Usage:
- The file_path parameter must be an absolute path, not a relative path - The filePath parameter must be an absolute path, not a relative path
- By default, it reads up to 2000 lines starting from the beginning of the file - By default, it reads up to 2000 lines starting from the beginning of the file
- You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters - You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters
- Any lines longer than 2000 characters will be truncated - Any lines longer than 2000 characters will be truncated

View file

@ -3,41 +3,33 @@ import DESCRIPTION from "./task.txt"
import { z } from "zod" import { z } from "zod"
import { Session } from "../session" import { Session } from "../session"
import { Bus } from "../bus" import { Bus } from "../bus"
import { Message } from "../session/message" import { MessageV2 } from "../session/message-v2"
import { Identifier } from "../id/id"
export const TaskTool = Tool.define({ export const TaskTool = Tool.define({
id: "task", id: "task",
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
description: z description: z.string().describe("A short (3-5 words) description of the task"),
.string()
.describe("A short (3-5 words) description of the task"),
prompt: z.string().describe("The task for the agent to perform"), prompt: z.string().describe("The task for the agent to perform"),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
const session = await Session.create(ctx.sessionID) const session = await Session.create(ctx.sessionID)
const msg = await Session.getMessage(ctx.sessionID, ctx.messageID) const msg = await Session.getMessage(ctx.sessionID, ctx.messageID)
const metadata = msg.metadata.assistant! if (msg.role !== "assistant") throw new Error("Not an assistant message")
function summary(input: Message.Info) { const messageID = Identifier.ascending("message")
const result = [] const parts: Record<string, MessageV2.ToolPart> = {}
const unsub = Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
for (const part of input.parts) { if (evt.properties.part.sessionID !== session.id) return
if (part.type === "tool-invocation") { if (evt.properties.part.messageID === messageID) return
result.push({ if (evt.properties.part.type !== "tool") return
toolInvocation: part.toolInvocation, parts[evt.properties.part.id] = evt.properties.part
metadata: input.metadata.tool[part.toolInvocation.toolCallId],
})
}
}
return result
}
const unsub = Bus.subscribe(Message.Event.Updated, async (evt) => {
if (evt.properties.info.metadata.sessionID !== session.id) return
ctx.metadata({ ctx.metadata({
title: params.description, title: params.description,
summary: summary(evt.properties.info), metadata: {
summary: Object.values(parts).sort((a, b) => a.id?.localeCompare(b.id)),
},
}) })
}) })
@ -45,11 +37,15 @@ export const TaskTool = Tool.define({
Session.abort(session.id) Session.abort(session.id)
}) })
const result = await Session.chat({ const result = await Session.chat({
messageID,
sessionID: session.id, sessionID: session.id,
modelID: metadata.modelID, modelID: msg.modelID,
providerID: metadata.providerID, providerID: msg.providerID,
parts: [ parts: [
{ {
id: Identifier.ascending("part"),
messageID,
sessionID: session.id,
type: "text", type: "text",
text: params.prompt, text: params.prompt,
}, },
@ -57,9 +53,9 @@ export const TaskTool = Tool.define({
}) })
unsub() unsub()
return { return {
metadata: {
title: params.description, title: params.description,
summary: summary(result), metadata: {
summary: result.parts.filter((x) => x.type === "tool"),
}, },
output: result.parts.findLast((x) => x.type === "text")!.text, output: result.parts.findLast((x) => x.type === "text")!.text,
} }

View file

@ -5,12 +5,8 @@ import { App } from "../app/app"
const TodoInfo = z.object({ const TodoInfo = z.object({
content: z.string().min(1).describe("Brief description of the task"), content: z.string().min(1).describe("Brief description of the task"),
status: z status: z.enum(["pending", "in_progress", "completed", "cancelled"]).describe("Current status of the task"),
.enum(["pending", "in_progress", "completed"]) priority: z.enum(["high", "medium", "low"]).describe("Priority level of the task"),
.describe("Current status of the task"),
priority: z
.enum(["high", "medium", "low"])
.describe("Priority level of the task"),
id: z.string().describe("Unique identifier for the todo item"), id: z.string().describe("Unique identifier for the todo item"),
}) })
type TodoInfo = z.infer<typeof TodoInfo> type TodoInfo = z.infer<typeof TodoInfo>
@ -32,9 +28,9 @@ export const TodoWriteTool = Tool.define({
const todos = state() const todos = state()
todos[opts.sessionID] = params.todos todos[opts.sessionID] = params.todos
return { return {
title: `${params.todos.filter((x) => x.status !== "completed").length} todos`,
output: JSON.stringify(params.todos, null, 2), output: JSON.stringify(params.todos, null, 2),
metadata: { metadata: {
title: `${params.todos.filter((x) => x.status !== "completed").length} todos`,
todos: params.todos, todos: params.todos,
}, },
} }
@ -48,9 +44,9 @@ export const TodoReadTool = Tool.define({
async execute(_params, opts) { async execute(_params, opts) {
const todos = state()[opts.sessionID] ?? [] const todos = state()[opts.sessionID] ?? []
return { return {
title: `${todos.filter((x) => x.status !== "completed").length} todos`,
metadata: { metadata: {
todos, todos,
title: `${todos.filter((x) => x.status !== "completed").length} todos`,
}, },
output: JSON.stringify(todos, null, 2), output: JSON.stringify(todos, null, 2),
} }

View file

@ -2,19 +2,15 @@ import type { StandardSchemaV1 } from "@standard-schema/spec"
export namespace Tool { export namespace Tool {
interface Metadata { interface Metadata {
title: string
[key: string]: any [key: string]: any
} }
export type Context<M extends Metadata = Metadata> = { export type Context<M extends Metadata = Metadata> = {
sessionID: string sessionID: string
messageID: string messageID: string
abort: AbortSignal abort: AbortSignal
metadata(meta: M): void metadata(input: { title?: string; metadata?: M }): void
} }
export interface Info< export interface Info<Parameters extends StandardSchemaV1 = StandardSchemaV1, M extends Metadata = Metadata> {
Parameters extends StandardSchemaV1 = StandardSchemaV1,
M extends Metadata = Metadata,
> {
id: string id: string
description: string description: string
parameters: Parameters parameters: Parameters
@ -22,15 +18,15 @@ export namespace Tool {
args: StandardSchemaV1.InferOutput<Parameters>, args: StandardSchemaV1.InferOutput<Parameters>,
ctx: Context, ctx: Context,
): Promise<{ ): Promise<{
title: string
metadata: M metadata: M
output: string output: string
}> }>
} }
export function define< export function define<Parameters extends StandardSchemaV1, Result extends Metadata>(
Parameters extends StandardSchemaV1, input: Info<Parameters, Result>,
Result extends Metadata, ): Info<Parameters, Result> {
>(input: Info<Parameters, Result>): Info<Parameters, Result> {
return input return input
} }
} }

View file

@ -14,9 +14,7 @@ export const WebFetchTool = Tool.define({
url: z.string().describe("The URL to fetch content from"), url: z.string().describe("The URL to fetch content from"),
format: z format: z
.enum(["text", "markdown", "html"]) .enum(["text", "markdown", "html"])
.describe( .describe("The format to return the content in (text, markdown, or html)"),
"The format to return the content in (text, markdown, or html)",
),
timeout: z timeout: z
.number() .number()
.min(0) .min(0)
@ -26,17 +24,11 @@ export const WebFetchTool = Tool.define({
}), }),
async execute(params, ctx) { async execute(params, ctx) {
// Validate URL // Validate URL
if ( if (!params.url.startsWith("http://") && !params.url.startsWith("https://")) {
!params.url.startsWith("http://") &&
!params.url.startsWith("https://")
) {
throw new Error("URL must start with http:// or https://") throw new Error("URL must start with http:// or https://")
} }
const timeout = Math.min( const timeout = Math.min((params.timeout ?? DEFAULT_TIMEOUT / 1000) * 1000, MAX_TIMEOUT)
(params.timeout ?? DEFAULT_TIMEOUT / 1000) * 1000,
MAX_TIMEOUT,
)
const controller = new AbortController() const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), timeout) const timeoutId = setTimeout(() => controller.abort(), timeout)
@ -46,8 +38,7 @@ export const WebFetchTool = Tool.define({
headers: { headers: {
"User-Agent": "User-Agent":
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
Accept: Accept: "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.9", "Accept-Language": "en-US,en;q=0.9",
}, },
}) })
@ -79,16 +70,14 @@ export const WebFetchTool = Tool.define({
const text = await extractTextFromHTML(content) const text = await extractTextFromHTML(content)
return { return {
output: text, output: text,
metadata: {
title, title,
}, metadata: {},
} }
} }
return { return {
output: content, output: content,
metadata: {
title, title,
}, metadata: {},
} }
case "markdown": case "markdown":
@ -96,32 +85,28 @@ export const WebFetchTool = Tool.define({
const markdown = convertHTMLToMarkdown(content) const markdown = convertHTMLToMarkdown(content)
return { return {
output: markdown, output: markdown,
metadata: {
title, title,
}, metadata: {},
} }
} }
return { return {
output: "```\n" + content + "\n```", output: "```\n" + content + "\n```",
metadata: {
title, title,
}, metadata: {},
} }
case "html": case "html":
return { return {
output: content, output: content,
metadata: {
title, title,
}, metadata: {},
} }
default: default:
return { return {
output: content, output: content,
metadata: {
title, title,
}, metadata: {},
} }
} }
}, },
@ -143,16 +128,7 @@ async function extractTextFromHTML(html: string) {
.on("*", { .on("*", {
element(element) { element(element) {
// Reset skip flag when entering other elements // Reset skip flag when entering other elements
if ( if (!["script", "style", "noscript", "iframe", "object", "embed"].includes(element.tagName)) {
![
"script",
"style",
"noscript",
"iframe",
"object",
"embed",
].includes(element.tagName)
) {
skipContent = false skipContent = false
} }
}, },

View file

@ -13,18 +13,12 @@ export const WriteTool = Tool.define({
id: "write", id: "write",
description: DESCRIPTION, description: DESCRIPTION,
parameters: z.object({ parameters: z.object({
filePath: z filePath: z.string().describe("The absolute path to the file to write (must be absolute, not relative)"),
.string()
.describe(
"The absolute path to the file to write (must be absolute, not relative)",
),
content: z.string().describe("The content to write to the file"), content: z.string().describe("The content to write to the file"),
}), }),
async execute(params, ctx) { async execute(params, ctx) {
const app = App.info() const app = App.info()
const filepath = path.isAbsolute(params.filePath) const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(app.path.cwd, params.filePath)
? params.filePath
: path.join(app.path.cwd, params.filePath)
const file = Bun.file(filepath) const file = Bun.file(filepath)
const exists = await file.exists() const exists = await file.exists()
@ -33,9 +27,7 @@ export const WriteTool = Tool.define({
await Permission.ask({ await Permission.ask({
id: "write", id: "write",
sessionID: ctx.sessionID, sessionID: ctx.sessionID,
title: exists title: exists ? "Overwrite this file: " + filepath : "Create new file: " + filepath,
? "Overwrite this file: " + filepath
: "Create new file: " + filepath,
metadata: { metadata: {
filePath: filepath, filePath: filepath,
content: params.content, content: params.content,
@ -62,11 +54,11 @@ export const WriteTool = Tool.define({
} }
return { return {
title: path.relative(app.path.root, filepath),
metadata: { metadata: {
diagnostics, diagnostics,
filepath, filepath,
exists: exists, exists: exists,
title: path.relative(app.path.root, filepath),
}, },
output, output,
} }

View file

@ -7,10 +7,7 @@ export abstract class NamedError extends Error {
abstract schema(): ZodSchema abstract schema(): ZodSchema
abstract toObject(): { name: string; data: any } abstract toObject(): { name: string; data: any }
static create<Name extends string, Data extends ZodSchema>( static create<Name extends string, Data extends ZodSchema>(name: Name, data: Data) {
name: Name,
data: Data,
) {
const schema = z const schema = z
.object({ .object({
name: z.literal(name), name: z.literal(name),

View file

@ -1,7 +1,17 @@
import { exists } from "fs/promises" import { exists } from "fs/promises"
import { dirname, join } from "path" import { dirname, join, relative } from "path"
export namespace Filesystem { export namespace Filesystem {
export function overlaps(a: string, b: string) {
const relA = relative(a, b)
const relB = relative(b, a)
return !relA || !relA.startsWith("..") || !relB || !relB.startsWith("..")
}
export function contains(parent: string, child: string) {
return relative(parent, child).startsWith("..")
}
export async function findUp(target: string, start: string, stop?: string) { export async function findUp(target: string, start: string, stop?: string) {
let current = start let current = start
const result = [] const result = []
@ -16,6 +26,21 @@ export namespace Filesystem {
return result return result
} }
export async function* up(options: { targets: string[]; start: string; stop?: string }) {
const { targets, start, stop } = options
let current = start
while (true) {
for (const target of targets) {
const search = join(current, target)
if (await exists(search)) yield search
}
if (stop === current) break
const parent = dirname(current)
if (parent === current) break
current = parent
}
}
export async function globUp(pattern: string, start: string, stop?: string) { export async function globUp(pattern: string, start: string, stop?: string) {
let current = start let current = start
const result = [] const result = []

View file

@ -1,15 +1,59 @@
import path from "path" import path from "path"
import fs from "fs/promises" import fs from "fs/promises"
import { Global } from "../global" import { Global } from "../global"
import z from "zod"
export namespace Log { export namespace Log {
export const Level = z.enum(["DEBUG", "INFO", "WARN", "ERROR"]).openapi({ ref: "LogLevel", description: "Log level" })
export type Level = z.infer<typeof Level>
const levelPriority: Record<Level, number> = {
DEBUG: 0,
INFO: 1,
WARN: 2,
ERROR: 3,
}
let currentLevel: Level = "INFO"
export function setLevel(level: Level) {
currentLevel = level
}
export function getLevel(): Level {
return currentLevel
}
function shouldLog(level: Level): boolean {
return levelPriority[level] >= levelPriority[currentLevel]
}
export type Logger = {
debug(message?: any, extra?: Record<string, any>): void
info(message?: any, extra?: Record<string, any>): void
error(message?: any, extra?: Record<string, any>): void
warn(message?: any, extra?: Record<string, any>): void
tag(key: string, value: string): Logger
clone(): Logger
time(
message: string,
extra?: Record<string, any>,
): {
stop(): void
[Symbol.dispose](): void
}
}
const loggers = new Map<string, Logger>()
export const Default = create({ service: "default" }) export const Default = create({ service: "default" })
export interface Options { export interface Options {
print: boolean print: boolean
level?: Level
} }
let logpath = "" let logpath = ""
export function file() { export function file() {
return logpath return logpath
} }
@ -19,10 +63,7 @@ export namespace Log {
await fs.mkdir(dir, { recursive: true }) await fs.mkdir(dir, { recursive: true })
cleanup(dir) cleanup(dir)
if (options.print) return if (options.print) return
logpath = path.join( logpath = path.join(dir, new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log")
dir,
new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log",
)
const logfile = Bun.file(logpath) const logfile = Bun.file(logpath)
await fs.truncate(logpath).catch(() => {}) await fs.truncate(logpath).catch(() => {})
const writer = logfile.writer() const writer = logfile.writer()
@ -43,41 +84,54 @@ export namespace Log {
const filesToDelete = files.slice(0, -10) const filesToDelete = files.slice(0, -10)
await Promise.all( await Promise.all(filesToDelete.map((file) => fs.unlink(file).catch(() => {})))
filesToDelete.map((file) => fs.unlink(file).catch(() => {})),
)
} }
let last = Date.now() let last = Date.now()
export function create(tags?: Record<string, any>) { export function create(tags?: Record<string, any>) {
tags = tags || {} tags = tags || {}
const service = tags["service"]
if (service && typeof service === "string") {
const cached = loggers.get(service)
if (cached) {
return cached
}
}
function build(message: any, extra?: Record<string, any>) { function build(message: any, extra?: Record<string, any>) {
const prefix = Object.entries({ const prefix = Object.entries({
...tags, ...tags,
...extra, ...extra,
}) })
.filter(([_, value]) => value !== undefined && value !== null) .filter(([_, value]) => value !== undefined && value !== null)
.map(([key, value]) => `${key}=${value}`) .map(([key, value]) => `${key}=${typeof value === "object" ? JSON.stringify(value) : value}`)
.join(" ") .join(" ")
const next = new Date() const next = new Date()
const diff = next.getTime() - last const diff = next.getTime() - last
last = next.getTime() last = next.getTime()
return ( return [next.toISOString().split(".")[0], "+" + diff + "ms", prefix, message].filter(Boolean).join(" ") + "\n"
[next.toISOString().split(".")[0], "+" + diff + "ms", prefix, message]
.filter(Boolean)
.join(" ") + "\n"
)
} }
const result = { const result: Logger = {
debug(message?: any, extra?: Record<string, any>) {
if (shouldLog("DEBUG")) {
process.stderr.write("DEBUG " + build(message, extra))
}
},
info(message?: any, extra?: Record<string, any>) { info(message?: any, extra?: Record<string, any>) {
if (shouldLog("INFO")) {
process.stderr.write("INFO " + build(message, extra)) process.stderr.write("INFO " + build(message, extra))
}
}, },
error(message?: any, extra?: Record<string, any>) { error(message?: any, extra?: Record<string, any>) {
if (shouldLog("ERROR")) {
process.stderr.write("ERROR " + build(message, extra)) process.stderr.write("ERROR " + build(message, extra))
}
}, },
warn(message?: any, extra?: Record<string, any>) { warn(message?: any, extra?: Record<string, any>) {
if (shouldLog("WARN")) {
process.stderr.write("WARN " + build(message, extra)) process.stderr.write("WARN " + build(message, extra))
}
}, },
tag(key: string, value: string) { tag(key: string, value: string) {
if (tags) tags[key] = value if (tags) tags[key] = value
@ -105,6 +159,10 @@ export namespace Log {
}, },
} }
if (service && typeof service === "string") {
loggers.set(service, result)
}
return result return result
} }
} }

View file

@ -0,0 +1,53 @@
import { describe, expect, test } from "bun:test"
import fs from "fs/promises"
import path from "path"
describe("BunProc registry configuration", () => {
test("should not contain hardcoded registry parameters", async () => {
// Read the bun/index.ts file
const bunIndexPath = path.join(__dirname, "../src/bun/index.ts")
const content = await fs.readFile(bunIndexPath, "utf-8")
// Verify that no hardcoded registry is present
expect(content).not.toContain("--registry=")
expect(content).not.toContain("hasNpmRcConfig")
expect(content).not.toContain("NpmRc")
})
test("should use Bun's default registry resolution", async () => {
// Read the bun/index.ts file
const bunIndexPath = path.join(__dirname, "../src/bun/index.ts")
const content = await fs.readFile(bunIndexPath, "utf-8")
// Verify that it uses Bun's default resolution
expect(content).toContain("Bun's default registry resolution")
expect(content).toContain("Bun will use them automatically")
expect(content).toContain("No need to pass --registry flag")
})
test("should have correct command structure without registry", async () => {
// Read the bun/index.ts file
const bunIndexPath = path.join(__dirname, "../src/bun/index.ts")
const content = await fs.readFile(bunIndexPath, "utf-8")
// Extract the install function
const installFunctionMatch = content.match(/export async function install[\s\S]*?^ }/m)
expect(installFunctionMatch).toBeTruthy()
if (installFunctionMatch) {
const installFunction = installFunctionMatch[0]
// Verify expected arguments are present
expect(installFunction).toContain('"add"')
expect(installFunction).toContain('"--force"')
expect(installFunction).toContain('"--exact"')
expect(installFunction).toContain('"--cwd"')
expect(installFunction).toContain('Global.Path.cache')
expect(installFunction).toContain('pkg + "@" + version')
// Verify no registry argument is added
expect(installFunction).not.toContain('"--registry"')
expect(installFunction).not.toContain('args.push("--registry')
}
})
})

View file

@ -17,12 +17,7 @@ const testCases: TestCase[] = [
replace: 'console.log("universe");', replace: 'console.log("universe");',
}, },
{ {
content: [ content: ["if (condition) {", " doSomething();", " doSomethingElse();", "}"].join("\n"),
"if (condition) {",
" doSomething();",
" doSomethingElse();",
"}",
].join("\n"),
find: [" doSomething();", " doSomethingElse();"].join("\n"), find: [" doSomething();", " doSomethingElse();"].join("\n"),
replace: [" doNewThing();", " doAnotherThing();"].join("\n"), replace: [" doNewThing();", " doAnotherThing();"].join("\n"),
}, },
@ -53,15 +48,8 @@ const testCases: TestCase[] = [
" return result;", " return result;",
"}", "}",
].join("\n"), ].join("\n"),
find: [ find: ["function calculate(a, b) {", " // different middle content", " return result;", "}"].join("\n"),
"function calculate(a, b) {", replace: ["function calculate(a, b) {", " return a * b * 2;", "}"].join("\n"),
" // different middle content",
" return result;",
"}",
].join("\n"),
replace: ["function calculate(a, b) {", " return a * b * 2;", "}"].join(
"\n",
),
}, },
{ {
content: [ content: [
@ -76,13 +64,7 @@ const testCases: TestCase[] = [
"}", "}",
].join("\n"), ].join("\n"),
find: ["class MyClass {", " // different implementation", "}"].join("\n"), find: ["class MyClass {", " // different implementation", "}"].join("\n"),
replace: [ replace: ["class MyClass {", " constructor() {", " this.value = 42;", " }", "}"].join("\n"),
"class MyClass {",
" constructor() {",
" this.value = 42;",
" }",
"}",
].join("\n"),
}, },
// WhitespaceNormalizedReplacer cases // WhitespaceNormalizedReplacer cases
@ -104,48 +86,21 @@ const testCases: TestCase[] = [
// IndentationFlexibleReplacer cases // IndentationFlexibleReplacer cases
{ {
content: [ content: [" function nested() {", ' console.log("deeply nested");', " return true;", " }"].join(
" function nested() {", "\n",
' console.log("deeply nested");', ),
" return true;", find: ["function nested() {", ' console.log("deeply nested");', " return true;", "}"].join("\n"),
" }", replace: ["function nested() {", ' console.log("updated");', " return false;", "}"].join("\n"),
].join("\n"),
find: [
"function nested() {",
' console.log("deeply nested");',
" return true;",
"}",
].join("\n"),
replace: [
"function nested() {",
' console.log("updated");',
" return false;",
"}",
].join("\n"),
}, },
{ {
content: [ content: [" if (true) {", ' console.log("level 1");', ' console.log("level 2");', " }"].join("\n"),
" if (true) {", find: ["if (true) {", 'console.log("level 1");', ' console.log("level 2");', "}"].join("\n"),
' console.log("level 1");',
' console.log("level 2");',
" }",
].join("\n"),
find: [
"if (true) {",
'console.log("level 1");',
' console.log("level 2");',
"}",
].join("\n"),
replace: ["if (true) {", 'console.log("updated");', "}"].join("\n"), replace: ["if (true) {", 'console.log("updated");', "}"].join("\n"),
}, },
// replaceAll option cases // replaceAll option cases
{ {
content: [ content: ['console.log("test");', 'console.log("test");', 'console.log("test");'].join("\n"),
'console.log("test");',
'console.log("test");',
'console.log("test");',
].join("\n"),
find: 'console.log("test");', find: 'console.log("test");',
replace: 'console.log("updated");', replace: 'console.log("updated");',
all: true, all: true,
@ -213,9 +168,7 @@ const testCases: TestCase[] = [
// MultiOccurrenceReplacer cases (with replaceAll) // MultiOccurrenceReplacer cases (with replaceAll)
{ {
content: ["debug('start');", "debug('middle');", "debug('end');"].join( content: ["debug('start');", "debug('middle');", "debug('end');"].join("\n"),
"\n",
),
find: "debug", find: "debug",
replace: "log", replace: "log",
all: true, all: true,
@ -239,9 +192,7 @@ const testCases: TestCase[] = [
replace: "const value = 24;", replace: "const value = 24;",
}, },
{ {
content: ["", " if (condition) {", " doSomething();", " }", ""].join( content: ["", " if (condition) {", " doSomething();", " }", ""].join("\n"),
"\n",
),
find: ["if (condition) {", " doSomething();", "}"].join("\n"), find: ["if (condition) {", " doSomething();", "}"].join("\n"),
replace: ["if (condition) {", " doNothing();", "}"].join("\n"), replace: ["if (condition) {", " doNothing();", "}"].join("\n"),
}, },
@ -262,9 +213,7 @@ const testCases: TestCase[] = [
" return result;", " return result;",
"}", "}",
].join("\n"), ].join("\n"),
replace: ["function calculate(a, b) {", " return (a + b) * 2;", "}"].join( replace: ["function calculate(a, b) {", " return (a + b) * 2;", "}"].join("\n"),
"\n",
),
}, },
{ {
content: [ content: [
@ -278,15 +227,8 @@ const testCases: TestCase[] = [
" }", " }",
"}", "}",
].join("\n"), ].join("\n"),
find: [ find: ["class TestClass {", " // different implementation", " // with multiple lines", "}"].join("\n"),
"class TestClass {", replace: ["class TestClass {", " getValue() { return 42; }", "}"].join("\n"),
" // different implementation",
" // with multiple lines",
"}",
].join("\n"),
replace: ["class TestClass {", " getValue() { return 42; }", "}"].join(
"\n",
),
}, },
// Combined edge cases for new replacers // Combined edge cases for new replacers
@ -296,9 +238,7 @@ const testCases: TestCase[] = [
replace: 'console.log("updated");', replace: 'console.log("updated");',
}, },
{ {
content: [" ", "function test() {", " return 'value';", "}", " "].join( content: [" ", "function test() {", " return 'value';", "}", " "].join("\n"),
"\n",
),
find: ["function test() {", "return 'value';", "}"].join("\n"), find: ["function test() {", "return 'value';", "}"].join("\n"),
replace: ["function test() {", "return 'new value';", "}"].join("\n"), replace: ["function test() {", "return 'new value';", "}"].join("\n"),
}, },
@ -346,13 +286,7 @@ const testCases: TestCase[] = [
// ContextAwareReplacer - test with trailing newline in find string // ContextAwareReplacer - test with trailing newline in find string
{ {
content: [ content: ["class Test {", " method1() {", " return 1;", " }", "}"].join("\n"),
"class Test {",
" method1() {",
" return 1;",
" }",
"}",
].join("\n"),
find: [ find: [
"class Test {", "class Test {",
" // different content", " // different content",
@ -401,12 +335,7 @@ describe("EditTool Replacers", () => {
replace(testCase.content, testCase.find, testCase.replace, testCase.all) replace(testCase.content, testCase.find, testCase.replace, testCase.all)
}).toThrow() }).toThrow()
} else { } else {
const result = replace( const result = replace(testCase.content, testCase.find, testCase.replace, testCase.all)
testCase.content,
testCase.find,
testCase.replace,
testCase.all,
)
expect(result).toContain(testCase.replace) expect(result).toContain(testCase.replace)
} }
}) })

View file

@ -42,10 +42,7 @@ describe("tool.glob", () => {
describe("tool.ls", () => { describe("tool.ls", () => {
test("basic", async () => { test("basic", async () => {
const result = await App.provide({ cwd: process.cwd() }, async () => { const result = await App.provide({ cwd: process.cwd() }, async () => {
return await ListTool.execute( return await ListTool.execute({ path: "./example", ignore: [".git"] }, ctx)
{ path: "./example", ignore: [".git"] },
ctx,
)
}) })
expect(result.output).toMatchSnapshot() expect(result.output).toMatchSnapshot()
}) })

View file

@ -1 +1,2 @@
opencode-test opencode-test
cmd/opencode/opencode

Some files were not shown because too many files have changed in this diff Show more