diff --git a/.github/workflows/notify-discord.yml b/.github/workflows/notify-discord.yml deleted file mode 100644 index c9032c30..00000000 --- a/.github/workflows/notify-discord.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: discord - -on: - release: - types: [published] # fires only when a release is published - -jobs: - notify: - runs-on: ubuntu-latest - steps: - - name: Send nicely-formatted embed to Discord - uses: SethCohen/github-releases-to-discord@v1 - with: - webhook_url: ${{ secrets.DISCORD_WEBHOOK }} diff --git a/.github/workflows/opencode.yml b/.github/workflows/opencode.yml deleted file mode 100644 index b2d5dacc..00000000 --- a/.github/workflows/opencode.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: opencode - -on: - issue_comment: - types: [created] - -jobs: - opencode: - if: startsWith(github.event.comment.body, 'hey opencode') - runs-on: ubuntu-latest - permissions: - id-token: write - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - - name: Run opencode - uses: sst/opencode/sdks/github@github-v1 - env: - ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - with: - model: anthropic/claude-sonnet-4-20250514 diff --git a/.github/workflows/publish-github-action.yml b/.github/workflows/publish-github-action.yml deleted file mode 100644 index e4b4ae75..00000000 --- a/.github/workflows/publish-github-action.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: publish-github-action - -on: - workflow_dispatch: - -concurrency: ${{ github.workflow }}-${{ github.ref }} - -permissions: - contents: write - -jobs: - publish: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - run: git fetch --force --tags - - - uses: oven-sh/setup-bun@v2 - with: - bun-version: 1.2.17 - - - name: Publish - run: | - git config --global user.email "opencode@sst.dev" - git config --global user.name "opencode" - ./scripts/publish-github-action.ts diff --git a/.github/workflows/stats.yml b/.github/workflows/stats.yml index 188996aa..fed03b68 100644 --- a/.github/workflows/stats.yml +++ b/.github/workflows/stats.yml @@ -28,5 +28,5 @@ jobs: git config --local user.email "action@github.com" git config --local user.name "GitHub Action" git add STATS.md - git diff --staged --quiet || git commit -m "ignore: update download stats $(date -I)" + git diff --staged --quiet || git commit -m "Update download stats $(date -I)" git push diff --git a/.gitignore b/.gitignore index 27316da6..a07a7493 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,3 @@ node_modules .env .idea .vscode -openapi.json diff --git a/AGENTS.md b/AGENTS.md deleted file mode 100644 index d6aaf1bd..00000000 --- a/AGENTS.md +++ /dev/null @@ -1,15 +0,0 @@ -# TUI Agent Guidelines - -## Style - -- prefer single word variable/function names -- avoid try catch where possible - prefer to let exceptions bubble up -- avoid else statements where possible -- do not make useless helper functions - inline functionality unless the - function is reusable or composable -- prefer Bun apis - -## Workflow - -- you can regenerate the golang sdk by calling ./scripts/stainless.ts -- we use bun for everything diff --git a/README.md b/README.md index 87afde2d..24841799 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@

AI coding agent, built for the terminal.

- Discord + View docs npm Build status

@@ -30,23 +30,7 @@ brew install sst/tap/opencode # macOS paru -S opencode-bin # Arch Linux ``` -> [!TIP] -> Remove versions older than 0.1.x before installing. - -#### Installation Directory - -The install script respects the following priority order for the installation path: - -1. `$OPENCODE_INSTALL_DIR` - Custom installation directory -2. `$XDG_BIN_DIR` - XDG Base Directory Specification compliant path -3. `$HOME/bin` - Standard user binary directory (if exists or can be created) -4. `$HOME/.opencode/bin` - Default fallback - -```bash -# Examples -OPENCODE_INSTALL_DIR=/usr/local/bin curl -fsSL https://opencode.ai/install | bash -XDG_BIN_DIR=$HOME/.local/bin curl -fsSL https://opencode.ai/install | bash -``` +> **Note:** Remove versions older than 0.1.x before installing ### Documentation @@ -54,25 +38,10 @@ For more info on how to configure opencode [**head over to our docs**](https://o ### Contributing -opencode is an opinionated tool so any fundamental feature needs to go through a -design process with the core team. +For any new features we'd appreciate it if you could open an issue first to discuss what you'd like to implement. We're pretty responsive there and it'll save you from working on something that we don't end up using. No need to do this for simpler fixes. -> [!IMPORTANT] -> We do not accept PRs for core features. - -However we still merge a ton of PRs - you can contribute: - -- Bug fixes -- Improvements to LLM performance -- Support for new providers -- Fixes for env specific quirks -- Missing standard behavior -- Documentation - -Take a look at the git history to see what kind of PRs we end up merging. - -> [!NOTE] -> If you do not follow the above guidelines we might close your PR. +> **Note**: Please talk to us via github issues before spending time working on +> a new feature To run opencode locally you need. @@ -107,4 +76,4 @@ The other confusingly named repo has no relation to this one. You can [read the --- -**Join our community** [Discord](https://discord.gg/opencode) | [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev) +**Join our community** [YouTube](https://www.youtube.com/c/sst-dev) | [X.com](https://x.com/SST_dev) diff --git a/STATS.md b/STATS.md index 47e66412..e73da4b4 100644 --- a/STATS.md +++ b/STATS.md @@ -1,24 +1,9 @@ # Download Stats -| Date | GitHub Downloads | npm Downloads | Total | -| ---------- | ---------------- | ---------------- | ----------------- | -| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) | -| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) | -| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) | -| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) | -| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) | -| 2025-07-04 | 30,608 (+2,774) | 54,758 (+4,803) | 85,366 (+7,577) | -| 2025-07-05 | 32,524 (+1,916) | 58,371 (+3,613) | 90,895 (+5,529) | -| 2025-07-06 | 33,766 (+1,242) | 59,694 (+1,323) | 93,460 (+2,565) | -| 2025-07-08 | 38,052 (+4,286) | 64,468 (+4,774) | 102,520 (+9,060) | -| 2025-07-10 | 43,796 (+5,744) | 71,402 (+6,934) | 115,198 (+12,678) | -| 2025-07-11 | 46,982 (+3,186) | 77,462 (+6,060) | 124,444 (+9,246) | -| 2025-07-12 | 49,302 (+2,320) | 82,177 (+4,715) | 131,479 (+7,035) | -| 2025-07-13 | 50,803 (+1,501) | 86,394 (+4,217) | 137,197 (+5,718) | -| 2025-07-14 | 53,283 (+2,480) | 87,860 (+1,466) | 141,143 (+3,946) | -| 2025-07-15 | 57,590 (+4,307) | 91,036 (+3,176) | 148,626 (+7,483) | -| 2025-07-16 | 62,313 (+4,723) | 95,258 (+4,222) | 157,571 (+8,945) | -| 2025-07-17 | 66,684 (+4,371) | 100,048 (+4,790) | 166,732 (+9,161) | -| 2025-07-18 | 70,379 (+3,695) | 102,587 (+2,539) | 172,966 (+6,234) | -| 2025-07-18 | 70,380 (+1) | 102,587 (+0) | 172,967 (+1) | -| 2025-07-19 | 73,497 (+3,117) | 105,904 (+3,317) | 179,401 (+6,434) | +| Date | GitHub Downloads | npm Downloads | Total | +| ---------- | ---------------- | --------------- | --------------- | +| 2025-06-29 | 18,789 (+0) | 39,420 (+0) | 58,209 (+0) | +| 2025-06-30 | 20,127 (+1,338) | 41,059 (+1,639) | 61,186 (+2,977) | +| 2025-07-01 | 22,108 (+1,981) | 43,745 (+2,686) | 65,853 (+4,667) | +| 2025-07-02 | 24,814 (+2,706) | 46,168 (+2,423) | 70,982 (+5,129) | +| 2025-07-03 | 27,834 (+3,020) | 49,955 (+3,787) | 77,789 (+6,807) | diff --git a/bun.lock b/bun.lock index f789eb74..a14065e0 100644 --- a/bun.lock +++ b/bun.lock @@ -5,17 +5,12 @@ "name": "opencode", "devDependencies": { "prettier": "3.5.3", - "sst": "3.17.8", + "sst": "3.17.6", }, }, "packages/function": { "name": "@opencode/function", "version": "0.0.1", - "dependencies": { - "@octokit/auth-app": "8.0.1", - "@octokit/rest": "22.0.0", - "jose": "6.0.11", - }, "devDependencies": { "@cloudflare/workers-types": "4.20250522.0", "@types/node": "catalog:", @@ -30,34 +25,38 @@ }, "dependencies": { "@clack/prompts": "0.11.0", - "@hono/zod-validator": "0.4.2", - "@modelcontextprotocol/sdk": "1.15.1", + "@flystorage/file-storage": "1.1.0", + "@flystorage/local-fs": "1.1.0", + "@hono/zod-validator": "0.5.0", "@openauthjs/openauth": "0.4.3", + "@standard-schema/spec": "1.0.0", "ai": "catalog:", "decimal.js": "10.5.0", "diff": "8.0.2", + "env-paths": "3.0.0", "hono": "4.7.10", "hono-openapi": "0.4.8", "isomorphic-git": "1.32.1", "open": "10.1.2", "remeda": "2.22.3", + "ts-lsp-client": "1.0.3", "turndown": "7.2.0", "vscode-jsonrpc": "8.2.1", + "vscode-languageclient": "8", "xdg-basedir": "5.1.0", "yargs": "18.0.0", "zod": "catalog:", - "zod-openapi": "4.1.0", + "zod-openapi": "4.2.4", + "zod-validation-error": "3.5.2", }, "devDependencies": { "@ai-sdk/amazon-bedrock": "2.2.10", "@ai-sdk/anthropic": "1.2.12", - "@standard-schema/spec": "1.0.0", "@tsconfig/bun": "1.0.7", "@types/bun": "latest", "@types/turndown": "5.0.5", "@types/yargs": "17.0.33", "typescript": "catalog:", - "vscode-languageserver-types": "3.17.5", "zod-to-json-schema": "3.24.5", }, }, @@ -79,13 +78,11 @@ "lang-map": "0.4.0", "luxon": "3.6.1", "marked": "15.0.12", - "marked-shiki": "1.2.0", "rehype-autolink-headings": "7.1.0", - "remeda": "2.26.0", "sharp": "0.32.5", "shiki": "3.4.2", "solid-js": "1.9.7", - "toolbeam-docs-theme": "0.4.3", + "toolbeam-docs-theme": "0.4.1", }, "devDependencies": { "@types/node": "catalog:", @@ -98,30 +95,38 @@ "sharp", "esbuild", ], + "patchedDependencies": { + "ai@4.3.16": "patches/ai@4.3.16.patch", + }, + "overrides": { + "zod": "3.24.2", + }, "catalog": { "@types/node": "22.13.9", - "ai": "5.0.0-beta.21", + "ai": "4.3.16", "typescript": "5.8.2", - "zod": "3.25.49", + "zod": "3.24.2", }, "packages": { "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@2.2.10", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-icLGO7Q0NinnHIPgT+y1QjHVwH4HwV+brWbvM+FfCG2Afpa89PyKa3Ret91kGjZpBgM/xnj1B7K5eM+rRlsXQA=="], "@ai-sdk/anthropic": ["@ai-sdk/anthropic@1.2.12", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8" }, "peerDependencies": { "zod": "^3.0.0" } }, "sha512-YSzjlko7JvuiyQFmI9RN1tNZdEiZxc+6xld/0tq/VkJaHpEzGAb1yiNxxvmYVcjvfu/PcvCxAAYXmTYQQ63IHQ=="], - "@ai-sdk/gateway": ["@ai-sdk/gateway@1.0.0-beta.8", "", { "dependencies": { "@ai-sdk/provider": "2.0.0-beta.1", "@ai-sdk/provider-utils": "3.0.0-beta.3" }, "peerDependencies": { "zod": "^3.25.49 || ^4" } }, "sha512-D2SqYRT/42JTiRxUuiWtn5cYQFscpb9Z14UNvJx7lnurBUXx57zy7TbLH0h7O+WbCluTQN5G6146JpUZ/SRyzw=="], + "@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="], - "@ai-sdk/provider": ["@ai-sdk/provider@2.0.0-beta.1", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-Z8SPncMtS3RsoXITmT7NVwrAq6M44dmw0DoUOYJqNNtCu8iMWuxB8Nxsoqpa0uEEy9R1V1ZThJAXTYgjTUxl3w=="], + "@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="], - "@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.0-beta.3", "", { "dependencies": { "@ai-sdk/provider": "2.0.0-beta.1", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.3", "zod-to-json-schema": "^3.24.1" }, "peerDependencies": { "zod": "^3.25.49 || ^4" } }, "sha512-4gZ392GxjzMF7TnReF2eTKhOSyiSS3ydRVq4I7jxkeV5sdEuMoH3gzfItmlctsqGxlMU1/+zKPwl5yYz9O2dzg=="], + "@ai-sdk/react": ["@ai-sdk/react@1.2.12", "", { "dependencies": { "@ai-sdk/provider-utils": "2.2.8", "@ai-sdk/ui-utils": "1.2.11", "swr": "^2.2.5", "throttleit": "2.1.0" }, "peerDependencies": { "react": "^18 || ^19 || ^19.0.0-rc", "zod": "^3.23.8" }, "optionalPeers": ["zod"] }, "sha512-jK1IZZ22evPZoQW3vlkZ7wvjYGYF+tRBKXtrcolduIkQ/m/sOAVcVeVDUDvh1T91xCnWCdUGCPZg2avZ90mv3g=="], + + "@ai-sdk/ui-utils": ["@ai-sdk/ui-utils@1.2.11", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "zod-to-json-schema": "^3.24.1" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-3zcwCc8ezzFlwp3ZD15wAPjf2Au4s3vAbKsXQVyhxODHcmu0iyPO2Eua6D/vicq/AUm/BAo60r97O6HU+EI0+w=="], "@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="], "@apidevtools/json-schema-ref-parser": ["@apidevtools/json-schema-ref-parser@11.9.3", "", { "dependencies": { "@jsdevtools/ono": "^7.1.3", "@types/json-schema": "^7.0.15", "js-yaml": "^4.1.0" } }, "sha512-60vepv88RwcJtSHrD6MjIL6Ta3SOYbgfnkHb+ppAVK+o9mXprRtulx7VlRl3lN3bbvysAfCS7WMVfhUYemB0IQ=="], - "@astrojs/cloudflare": ["@astrojs/cloudflare@12.6.0", "", { "dependencies": { "@astrojs/internal-helpers": "0.6.1", "@astrojs/underscore-redirects": "1.0.0", "@cloudflare/workers-types": "^4.20250507.0", "tinyglobby": "^0.2.13", "vite": "^6.3.5", "wrangler": "^4.14.1" }, "peerDependencies": { "astro": "^5.0.0" } }, "sha512-pQ8bokC59GEiXvyXpC4swBNoL7C/EknP+82KFzQwgR/Aeo5N1oPiAoPHgJbpPya/YF4E26WODdCQfBQDvLRfuw=="], + "@astrojs/cloudflare": ["@astrojs/cloudflare@12.5.4", "", { "dependencies": { "@astrojs/internal-helpers": "0.6.1", "@astrojs/underscore-redirects": "0.6.1", "@cloudflare/workers-types": "^4.20250507.0", "tinyglobby": "^0.2.13", "vite": "^6.3.5", "wrangler": "^4.14.1" }, "peerDependencies": { "astro": "^5.0.0" } }, "sha512-WKUeMP2tIbddEu0tlVEPj8o9m/8CJB6who3a3jupuIyR56ltmW924ZOMYtp/C9uxH7KeDJXrMszRj3LHs9U97w=="], - "@astrojs/compiler": ["@astrojs/compiler@2.12.2", "", {}, "sha512-w2zfvhjNCkNMmMMOn5b0J8+OmUaBL1o40ipMvqcG6NRpdC+lKxmTi48DT8Xw0SzJ3AfmeFLB45zXZXtmbsjcgw=="], + "@astrojs/compiler": ["@astrojs/compiler@2.12.0", "", {}, "sha512-7bCjW6tVDpUurQLeKBUN9tZ5kSv5qYrGmcn0sG0IwacL7isR2ZbyyA3AdZ4uxsuUFOS2SlgReTH7wkxO6zpqWA=="], "@astrojs/internal-helpers": ["@astrojs/internal-helpers@0.6.1", "", {}, "sha512-l5Pqf6uZu31aG+3Lv8nl/3s4DbUzdlxTWDof4pEpto6GUJNhhCbelVi9dEyurOVyqaelwmS9oSyOWOENSfgo9A=="], @@ -131,7 +136,7 @@ "@astrojs/prism": ["@astrojs/prism@3.2.0", "", { "dependencies": { "prismjs": "^1.29.0" } }, "sha512-GilTHKGCW6HMq7y3BUv9Ac7GMe/MO9gi9GW62GzKtth0SwukCu/qp2wLiGpEujhY+VVhaG9v7kv/5vFzvf4NYw=="], - "@astrojs/sitemap": ["@astrojs/sitemap@3.4.1", "", { "dependencies": { "sitemap": "^8.0.0", "stream-replace-string": "^2.0.0", "zod": "^3.24.2" } }, "sha512-VjZvr1e4FH6NHyyHXOiQgLiw94LnCVY4v06wN/D0gZKchTMkg71GrAHJz81/huafcmavtLkIv26HnpfDq6/h/Q=="], + "@astrojs/sitemap": ["@astrojs/sitemap@3.4.0", "", { "dependencies": { "sitemap": "^8.0.0", "stream-replace-string": "^2.0.0", "zod": "^3.24.2" } }, "sha512-C5m/xsKvRSILKM3hy47n5wKtTQtJXn8epoYuUmCCstaE9XBt20yInym3Bz2uNbEiNfv11bokoW0MqeXPIvjFIQ=="], "@astrojs/solid-js": ["@astrojs/solid-js@5.1.0", "", { "dependencies": { "vite": "^6.3.5", "vite-plugin-solid": "^2.11.6" }, "peerDependencies": { "solid-devtools": "^0.30.1", "solid-js": "^1.8.5" }, "optionalPeers": ["solid-devtools"] }, "sha512-VmPHOU9k7m6HHCT2Y1mNzifilUnttlowBM36frGcfj5wERJE9Ci0QtWJbzdf6AlcoIirb7xVw+ByupU011Di9w=="], @@ -139,26 +144,24 @@ "@astrojs/telemetry": ["@astrojs/telemetry@3.2.1", "", { "dependencies": { "ci-info": "^4.2.0", "debug": "^4.4.0", "dlv": "^1.1.3", "dset": "^3.1.4", "is-docker": "^3.0.0", "is-wsl": "^3.1.0", "which-pm-runs": "^1.1.0" } }, "sha512-SSVM820Jqc6wjsn7qYfV9qfeQvePtVc1nSofhyap7l0/iakUKywj3hfy3UJAOV4sGV4Q/u450RD4AaCaFvNPlg=="], - "@astrojs/underscore-redirects": ["@astrojs/underscore-redirects@1.0.0", "", {}, "sha512-qZxHwVnmb5FXuvRsaIGaqWgnftjCuMY+GSbaVZdBmE4j8AfgPqKPxYp8SUERyJcjpKCEmO4wD6ybuGH8A2kVRQ=="], + "@astrojs/underscore-redirects": ["@astrojs/underscore-redirects@0.6.1", "", {}, "sha512-4bMLrs2KW+8/vHEE5Ffv2HbxCbbgXO+2N6MpoCsMXUlUoi7pgEEx8kbkzMXJ2dZtWF3gvwm9lvgjnFeanC2LGg=="], "@aws-crypto/crc32": ["@aws-crypto/crc32@5.2.0", "", { "dependencies": { "@aws-crypto/util": "^5.2.0", "@aws-sdk/types": "^3.222.0", "tslib": "^2.6.2" } }, "sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg=="], "@aws-crypto/util": ["@aws-crypto/util@5.2.0", "", { "dependencies": { "@aws-sdk/types": "^3.222.0", "@smithy/util-utf8": "^2.0.0", "tslib": "^2.6.2" } }, "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ=="], - "@aws-sdk/types": ["@aws-sdk/types@3.840.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-xliuHaUFZxEx1NSXeLLZ9Dyu6+EJVQKEoD+yM+zqUo3YDZ7medKJWY6fIOKiPX/N7XbLdBYwajb15Q7IL8KkeA=="], + "@aws-sdk/types": ["@aws-sdk/types@3.821.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-Znroqdai1a90TlxGaJ+FK1lwC0fHpo97Xjsp5UKGR5JODYm7f9+/fF17ebO1KdoBr/Rm0UIFiF5VmI8ts9F1eA=="], "@babel/code-frame": ["@babel/code-frame@7.27.1", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg=="], - "@babel/compat-data": ["@babel/compat-data@7.28.0", "", {}, "sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw=="], + "@babel/compat-data": ["@babel/compat-data@7.27.3", "", {}, "sha512-V42wFfx1ymFte+ecf6iXghnnP8kWTO+ZLXIyZq+1LAXHHvTZdVxicn4yiVYdYMGaCO3tmqub11AorKkv+iodqw=="], - "@babel/core": ["@babel/core@7.28.0", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.0", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.6", "@babel/parser": "^7.28.0", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.0", "@babel/types": "^7.28.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ=="], + "@babel/core": ["@babel/core@7.27.4", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.4", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/traverse": "^7.27.4", "@babel/types": "^7.27.3", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g=="], - "@babel/generator": ["@babel/generator@7.28.0", "", { "dependencies": { "@babel/parser": "^7.28.0", "@babel/types": "^7.28.0", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" } }, "sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg=="], + "@babel/generator": ["@babel/generator@7.27.3", "", { "dependencies": { "@babel/parser": "^7.27.3", "@babel/types": "^7.27.3", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^3.0.2" } }, "sha512-xnlJYj5zepml8NXtjkG0WquFUv8RskFqyFcVgTBp5k+NaA/8uw/K+OSVf8AMGw5e9HKP2ETd5xpK5MLZQD6b4Q=="], "@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.27.2", "", { "dependencies": { "@babel/compat-data": "^7.27.2", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" } }, "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ=="], - "@babel/helper-globals": ["@babel/helper-globals@7.28.0", "", {}, "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw=="], - "@babel/helper-module-imports": ["@babel/helper-module-imports@7.27.1", "", { "dependencies": { "@babel/traverse": "^7.27.1", "@babel/types": "^7.27.1" } }, "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w=="], "@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.27.3", "", { "dependencies": { "@babel/helper-module-imports": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1", "@babel/traverse": "^7.27.3" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg=="], @@ -171,19 +174,19 @@ "@babel/helper-validator-option": ["@babel/helper-validator-option@7.27.1", "", {}, "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg=="], - "@babel/helpers": ["@babel/helpers@7.27.6", "", { "dependencies": { "@babel/template": "^7.27.2", "@babel/types": "^7.27.6" } }, "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug=="], + "@babel/helpers": ["@babel/helpers@7.27.4", "", { "dependencies": { "@babel/template": "^7.27.2", "@babel/types": "^7.27.3" } }, "sha512-Y+bO6U+I7ZKaM5G5rDUZiYfUvQPUibYmAFe7EnKdnKBbVXDZxvp+MWOH5gYciY0EPk4EScsuFMQBbEfpdRKSCQ=="], - "@babel/parser": ["@babel/parser@7.28.0", "", { "dependencies": { "@babel/types": "^7.28.0" }, "bin": "./bin/babel-parser.js" }, "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g=="], + "@babel/parser": ["@babel/parser@7.27.4", "", { "dependencies": { "@babel/types": "^7.27.3" }, "bin": "./bin/babel-parser.js" }, "sha512-BRmLHGwpUqLFR2jzx9orBuX/ABDkj2jLKOXrHDTN2aOKL+jFDDKaRNo9nyYsIl9h/UE/7lMKdDjKQQyxKKDZ7g=="], "@babel/plugin-syntax-jsx": ["@babel/plugin-syntax-jsx@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w=="], - "@babel/runtime": ["@babel/runtime@7.27.6", "", {}, "sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q=="], + "@babel/runtime": ["@babel/runtime@7.27.4", "", {}, "sha512-t3yaEOuGu9NlIZ+hIeGbBjFtZT7j2cb2tg0fuaJKeGotchRjjLfrBA9Kwf8quhpP1EUuxModQg04q/mBwyg8uA=="], "@babel/template": ["@babel/template@7.27.2", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/parser": "^7.27.2", "@babel/types": "^7.27.1" } }, "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw=="], - "@babel/traverse": ["@babel/traverse@7.28.0", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.0", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.28.0", "@babel/template": "^7.27.2", "@babel/types": "^7.28.0", "debug": "^4.3.1" } }, "sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg=="], + "@babel/traverse": ["@babel/traverse@7.27.4", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/types": "^7.27.3", "debug": "^4.3.1", "globals": "^11.1.0" } }, "sha512-oNcu2QbHqts9BtOWJosOVJapWjBDSxGCpFvikNR5TGDYDQf3JwpIoMzIKrvfoti93cLfPJEG4tH9SPVeyCGgdA=="], - "@babel/types": ["@babel/types@7.28.1", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1" } }, "sha512-x0LvFTekgSX+83TI28Y9wYPUfzrnl2aT5+5QLnO6v7mSJYtEEevuDRN0F0uSHRk1G1IWZC43o00Y0xDDrpBGPQ=="], + "@babel/types": ["@babel/types@7.27.3", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1" } }, "sha512-Y1GkI4ktrtvmawoSq+4FCVHNryea6uR+qUQy0AGxLSsjCX0nVmkYQMBLHDkXZuo5hGx7eYdnIaslsdBFm7zbUw=="], "@capsizecss/unpack": ["@capsizecss/unpack@2.4.0", "", { "dependencies": { "blob-to-buffer": "^1.2.8", "cross-fetch": "^3.0.4", "fontkit": "^2.0.2" } }, "sha512-GrSU71meACqcmIUxPYOJvGKF0yryjN/L1aCuE9DViCTJI7bfkjgYDPD1zbNDcINJwSSP6UaBZY9GAbYDO7re0Q=="], @@ -193,17 +196,17 @@ "@cloudflare/kv-asset-handler": ["@cloudflare/kv-asset-handler@0.4.0", "", { "dependencies": { "mime": "^3.0.0" } }, "sha512-+tv3z+SPp+gqTIcImN9o0hqE9xyfQjI1XD9pL6NuKjua9B1y7mNYv0S9cP+QEbA4ppVgGZEmKOvHX5G5Ei1CVA=="], - "@cloudflare/unenv-preset": ["@cloudflare/unenv-preset@2.3.3", "", { "peerDependencies": { "unenv": "2.0.0-rc.17", "workerd": "^1.20250508.0" }, "optionalPeers": ["workerd"] }, "sha512-/M3MEcj3V2WHIRSW1eAQBPRJ6JnGQHc6JKMAPLkDb7pLs3m6X9ES/+K3ceGqxI6TKeF32AWAi7ls0AYzVxCP0A=="], + "@cloudflare/unenv-preset": ["@cloudflare/unenv-preset@2.3.2", "", { "peerDependencies": { "unenv": "2.0.0-rc.17", "workerd": "^1.20250508.0" }, "optionalPeers": ["workerd"] }, "sha512-MtUgNl+QkQyhQvv5bbWP+BpBC1N0me4CHHuP2H4ktmOMKdB/6kkz/lo+zqiA4mEazb4y+1cwyNjVrQ2DWeE4mg=="], - "@cloudflare/workerd-darwin-64": ["@cloudflare/workerd-darwin-64@1.20250709.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-VqwcvnbI8FNCP87ZWNHA3/sAC5U9wMbNnjBG0sHEYzM7B9RPHKYHdVKdBEWhzZXnkQYMK81IHm4CZsK16XxAuQ=="], + "@cloudflare/workerd-darwin-64": ["@cloudflare/workerd-darwin-64@1.20250525.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-L5l+7sSJJT2+riR5rS3Q3PKNNySPjWfRIeaNGMVRi1dPO6QPi4lwuxfRUFNoeUdilZJUVPfSZvTtj9RedsKznQ=="], - "@cloudflare/workerd-darwin-arm64": ["@cloudflare/workerd-darwin-arm64@1.20250709.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-A54ttSgXMM4huChPTThhkieOjpDxR+srVOO9zjTHVIyoQxA8zVsku4CcY/GQ95RczMV+yCKVVu/tAME7vwBFuA=="], + "@cloudflare/workerd-darwin-arm64": ["@cloudflare/workerd-darwin-arm64@1.20250525.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Y3IbIdrF/vJWh/WBvshwcSyUh175VAiLRW7963S1dXChrZ1N5wuKGQm9xY69cIGVtitpMJWWW3jLq7J/Xxwm0Q=="], - "@cloudflare/workerd-linux-64": ["@cloudflare/workerd-linux-64@1.20250709.0", "", { "os": "linux", "cpu": "x64" }, "sha512-no4O3OK+VXINIxv99OHJDpIgML2ZssrSvImwLtULzqm+cl4t1PIfXNRUqj89ujTkmad+L9y4G6dBQMPCLnmlGg=="], + "@cloudflare/workerd-linux-64": ["@cloudflare/workerd-linux-64@1.20250525.0", "", { "os": "linux", "cpu": "x64" }, "sha512-KSyQPAby+c6cpENoO0ayCQlY6QIh28l/+QID7VC1SLXfiNHy+hPNsH1vVBTST6CilHVAQSsy9tCZ9O9XECB8yg=="], - "@cloudflare/workerd-linux-arm64": ["@cloudflare/workerd-linux-arm64@1.20250709.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-7cNICk2Qd+m4QGrcmWyAuZJXTHt1ud6isA+dic7Yk42WZmwXhlcUATyvFD9FSQNFcldjuRB4n8JlWEFqZBn+lw=="], + "@cloudflare/workerd-linux-arm64": ["@cloudflare/workerd-linux-arm64@1.20250525.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-Nt0FUxS2kQhJUea4hMCNPaetkrAFDhPnNX/ntwcqVlGgnGt75iaAhupWJbU0GB+gIWlKeuClUUnDZqKbicoKyg=="], - "@cloudflare/workerd-windows-64": ["@cloudflare/workerd-windows-64@1.20250709.0", "", { "os": "win32", "cpu": "x64" }, "sha512-j1AyO8V/62Q23EJplWgzBlRCqo/diXgox58AbDqSqgyzCBAlvUzXQRDBab/FPNG/erRqt7I1zQhahrBhrM0uLA=="], + "@cloudflare/workerd-windows-64": ["@cloudflare/workerd-windows-64@1.20250525.0", "", { "os": "win32", "cpu": "x64" }, "sha512-mwTj+9f3uIa4NEXR1cOa82PjLa6dbrb3J+KCVJFYIaq7e63VxEzOchCXS4tublT2pmOhmFqkgBMXrxozxNkR2Q=="], "@cloudflare/workers-types": ["@cloudflare/workers-types@4.20250522.0", "", {}, "sha512-9RIffHobc35JWeddzBguGgPa4wLDr5x5F94+0/qy7LiV6pTBQ/M5qGEN9VA16IDT3EUpYI0WKh6VpcmeVEtVtw=="], @@ -211,73 +214,79 @@ "@ctrl/tinycolor": ["@ctrl/tinycolor@4.1.0", "", {}, "sha512-WyOx8cJQ+FQus4Mm4uPIZA64gbk3Wxh0so5Lcii0aJifqwoVOlfFtorjLE0Hen4OYyHZMXDWqMmaQemBhgxFRQ=="], - "@emnapi/runtime": ["@emnapi/runtime@1.4.4", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-hHyapA4A3gPaDCNfiqyZUStTMqIkKRshqPIuDOXv1hcBnD4U3l8cP0T1HMCfGRxQ6V64TGCcoswChANyOAwbQg=="], + "@emnapi/runtime": ["@emnapi/runtime@1.4.3", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ=="], - "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.6", "", { "os": "aix", "cpu": "ppc64" }, "sha512-ShbM/3XxwuxjFiuVBHA+d3j5dyac0aEVVq1oluIDf71hUw0aRF59dV/efUsIwFnR6m8JNM2FjZOzmaZ8yG61kw=="], + "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA=="], - "@esbuild/android-arm": ["@esbuild/android-arm@0.25.6", "", { "os": "android", "cpu": "arm" }, "sha512-S8ToEOVfg++AU/bHwdksHNnyLyVM+eMVAOf6yRKFitnwnbwwPNqKr3srzFRe7nzV69RQKb5DgchIX5pt3L53xg=="], + "@esbuild/android-arm": ["@esbuild/android-arm@0.25.5", "", { "os": "android", "cpu": "arm" }, "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA=="], - "@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.6", "", { "os": "android", "cpu": "arm64" }, "sha512-hd5zdUarsK6strW+3Wxi5qWws+rJhCCbMiC9QZyzoxfk5uHRIE8T287giQxzVpEvCwuJ9Qjg6bEjcRJcgfLqoA=="], + "@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.5", "", { "os": "android", "cpu": "arm64" }, "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg=="], - "@esbuild/android-x64": ["@esbuild/android-x64@0.25.6", "", { "os": "android", "cpu": "x64" }, "sha512-0Z7KpHSr3VBIO9A/1wcT3NTy7EB4oNC4upJ5ye3R7taCc2GUdeynSLArnon5G8scPwaU866d3H4BCrE5xLW25A=="], + "@esbuild/android-x64": ["@esbuild/android-x64@0.25.5", "", { "os": "android", "cpu": "x64" }, "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw=="], - "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-FFCssz3XBavjxcFxKsGy2DYK5VSvJqa6y5HXljKzhRZ87LvEi13brPrf/wdyl/BbpbMKJNOr1Sd0jtW4Ge1pAA=="], + "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ=="], - "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-GfXs5kry/TkGM2vKqK2oyiLFygJRqKVhawu3+DOCk7OxLy/6jYkWXhlHwOoTb0WqGnWGAS7sooxbZowy+pK9Yg=="], + "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.5", "", { "os": "darwin", "cpu": "x64" }, "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ=="], - "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.6", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-aoLF2c3OvDn2XDTRvn8hN6DRzVVpDlj2B/F66clWd/FHLiHaG3aVZjxQX2DYphA5y/evbdGvC6Us13tvyt4pWg=="], + "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.5", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw=="], - "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.6", "", { "os": "freebsd", "cpu": "x64" }, "sha512-2SkqTjTSo2dYi/jzFbU9Plt1vk0+nNg8YC8rOXXea+iA3hfNJWebKYPs3xnOUf9+ZWhKAaxnQNUf2X9LOpeiMQ=="], + "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.5", "", { "os": "freebsd", "cpu": "x64" }, "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw=="], - "@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.6", "", { "os": "linux", "cpu": "arm" }, "sha512-SZHQlzvqv4Du5PrKE2faN0qlbsaW/3QQfUUc6yO2EjFcA83xnwm91UbEEVx4ApZ9Z5oG8Bxz4qPE+HFwtVcfyw=="], + "@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.5", "", { "os": "linux", "cpu": "arm" }, "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw=="], - "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-b967hU0gqKd9Drsh/UuAm21Khpoh6mPBSgz8mKRq4P5mVK8bpA+hQzmm/ZwGVULSNBzKdZPQBRT3+WuVavcWsQ=="], + "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg=="], - "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.6", "", { "os": "linux", "cpu": "ia32" }, "sha512-aHWdQ2AAltRkLPOsKdi3xv0mZ8fUGPdlKEjIEhxCPm5yKEThcUjHpWB1idN74lfXGnZ5SULQSgtr5Qos5B0bPw=="], + "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.5", "", { "os": "linux", "cpu": "ia32" }, "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA=="], - "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.6", "", { "os": "linux", "cpu": "none" }, "sha512-VgKCsHdXRSQ7E1+QXGdRPlQ/e08bN6WMQb27/TMfV+vPjjTImuT9PmLXupRlC90S1JeNNW5lzkAEO/McKeJ2yg=="], + "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.5", "", { "os": "linux", "cpu": "none" }, "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg=="], - "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.6", "", { "os": "linux", "cpu": "none" }, "sha512-WViNlpivRKT9/py3kCmkHnn44GkGXVdXfdc4drNmRl15zVQ2+D2uFwdlGh6IuK5AAnGTo2qPB1Djppj+t78rzw=="], + "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.5", "", { "os": "linux", "cpu": "none" }, "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg=="], - "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.6", "", { "os": "linux", "cpu": "ppc64" }, "sha512-wyYKZ9NTdmAMb5730I38lBqVu6cKl4ZfYXIs31Baf8aoOtB4xSGi3THmDYt4BTFHk7/EcVixkOV2uZfwU3Q2Jw=="], + "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.5", "", { "os": "linux", "cpu": "ppc64" }, "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ=="], - "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.6", "", { "os": "linux", "cpu": "none" }, "sha512-KZh7bAGGcrinEj4qzilJ4hqTY3Dg2U82c8bv+e1xqNqZCrCyc+TL9AUEn5WGKDzm3CfC5RODE/qc96OcbIe33w=="], + "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.5", "", { "os": "linux", "cpu": "none" }, "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA=="], - "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.6", "", { "os": "linux", "cpu": "s390x" }, "sha512-9N1LsTwAuE9oj6lHMyyAM+ucxGiVnEqUdp4v7IaMmrwb06ZTEVCIs3oPPplVsnjPfyjmxwHxHMF8b6vzUVAUGw=="], + "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.5", "", { "os": "linux", "cpu": "s390x" }, "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ=="], - "@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.6", "", { "os": "linux", "cpu": "x64" }, "sha512-A6bJB41b4lKFWRKNrWoP2LHsjVzNiaurf7wyj/XtFNTsnPuxwEBWHLty+ZE0dWBKuSK1fvKgrKaNjBS7qbFKig=="], + "@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.5", "", { "os": "linux", "cpu": "x64" }, "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw=="], - "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.6", "", { "os": "none", "cpu": "arm64" }, "sha512-IjA+DcwoVpjEvyxZddDqBY+uJ2Snc6duLpjmkXm/v4xuS3H+3FkLZlDm9ZsAbF9rsfP3zeA0/ArNDORZgrxR/Q=="], + "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.5", "", { "os": "none", "cpu": "arm64" }, "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw=="], - "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.6", "", { "os": "none", "cpu": "x64" }, "sha512-dUXuZr5WenIDlMHdMkvDc1FAu4xdWixTCRgP7RQLBOkkGgwuuzaGSYcOpW4jFxzpzL1ejb8yF620UxAqnBrR9g=="], + "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.5", "", { "os": "none", "cpu": "x64" }, "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ=="], - "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.6", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-l8ZCvXP0tbTJ3iaqdNf3pjaOSd5ex/e6/omLIQCVBLmHTlfXW3zAxQ4fnDmPLOB1x9xrcSi/xtCWFwCZRIaEwg=="], + "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.5", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw=="], - "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.6", "", { "os": "openbsd", "cpu": "x64" }, "sha512-hKrmDa0aOFOr71KQ/19JC7az1P0GWtCN1t2ahYAf4O007DHZt/dW8ym5+CUdJhQ/qkZmI1HAF8KkJbEFtCL7gw=="], + "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.5", "", { "os": "openbsd", "cpu": "x64" }, "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg=="], - "@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.25.6", "", { "os": "none", "cpu": "arm64" }, "sha512-+SqBcAWoB1fYKmpWoQP4pGtx+pUUC//RNYhFdbcSA16617cchuryuhOCRpPsjCblKukAckWsV+aQ3UKT/RMPcA=="], + "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.5", "", { "os": "sunos", "cpu": "x64" }, "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA=="], - "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.6", "", { "os": "sunos", "cpu": "x64" }, "sha512-dyCGxv1/Br7MiSC42qinGL8KkG4kX0pEsdb0+TKhmJZgCUDBGmyo1/ArCjNGiOLiIAgdbWgmWgib4HoCi5t7kA=="], + "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw=="], - "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.6", "", { "os": "win32", "cpu": "arm64" }, "sha512-42QOgcZeZOvXfsCBJF5Afw73t4veOId//XD3i+/9gSkhSV6Gk3VPlWncctI+JcOyERv85FUo7RxuxGy+z8A43Q=="], + "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ=="], - "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.6", "", { "os": "win32", "cpu": "ia32" }, "sha512-4AWhgXmDuYN7rJI6ORB+uU9DHLq/erBbuMoAuB4VWJTu5KtCgcKYPynF0YI1VkBNuEfjNlLrFr9KZPJzrtLkrQ=="], + "@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.5", "", { "os": "win32", "cpu": "x64" }, "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g=="], - "@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.6", "", { "os": "win32", "cpu": "x64" }, "sha512-NgJPHHbEpLQgDH2MjQu90pzW/5vvXIZ7KOnPyNBm92A6WgZ/7b6fJyUBjoumLqeOQQGqY2QjQxRo97ah4Sj0cA=="], + "@expressive-code/core": ["@expressive-code/core@0.41.2", "", { "dependencies": { "@ctrl/tinycolor": "^4.0.4", "hast-util-select": "^6.0.2", "hast-util-to-html": "^9.0.1", "hast-util-to-text": "^4.0.1", "hastscript": "^9.0.0", "postcss": "^8.4.38", "postcss-nested": "^6.0.1", "unist-util-visit": "^5.0.0", "unist-util-visit-parents": "^6.0.1" } }, "sha512-AJW5Tp9czbLqKMzwudL9Rv4js9afXBxkSGLmCNPq1iRgAYcx9NkTPJiSNCesjKRWoVC328AdSu6fqrD22zDgDg=="], - "@expressive-code/core": ["@expressive-code/core@0.41.3", "", { "dependencies": { "@ctrl/tinycolor": "^4.0.4", "hast-util-select": "^6.0.2", "hast-util-to-html": "^9.0.1", "hast-util-to-text": "^4.0.1", "hastscript": "^9.0.0", "postcss": "^8.4.38", "postcss-nested": "^6.0.1", "unist-util-visit": "^5.0.0", "unist-util-visit-parents": "^6.0.1" } }, "sha512-9qzohqU7O0+JwMEEgQhnBPOw5DtsQRBXhW++5fvEywsuX44vCGGof1SL5OvPElvNgaWZ4pFZAFSlkNOkGyLwSQ=="], + "@expressive-code/plugin-frames": ["@expressive-code/plugin-frames@0.41.2", "", { "dependencies": { "@expressive-code/core": "^0.41.2" } }, "sha512-pfy0hkJI4nbaONjmksFDcuHmIuyPTFmi1JpABe4q2ajskiJtfBf+WDAL2pg595R9JNoPrrH5+aT9lbkx2noicw=="], - "@expressive-code/plugin-frames": ["@expressive-code/plugin-frames@0.41.3", "", { "dependencies": { "@expressive-code/core": "^0.41.3" } }, "sha512-rFQtmf/3N2CK3Cq/uERweMTYZnBu+CwxBdHuOftEmfA9iBE7gTVvwpbh82P9ZxkPLvc40UMhYt7uNuAZexycRQ=="], + "@expressive-code/plugin-shiki": ["@expressive-code/plugin-shiki@0.41.2", "", { "dependencies": { "@expressive-code/core": "^0.41.2", "shiki": "^3.2.2" } }, "sha512-xD4zwqAkDccXqye+235BH5bN038jYiSMLfUrCOmMlzxPDGWdxJDk5z4uUB/aLfivEF2tXyO2zyaarL3Oqht0fQ=="], - "@expressive-code/plugin-shiki": ["@expressive-code/plugin-shiki@0.41.3", "", { "dependencies": { "@expressive-code/core": "^0.41.3", "shiki": "^3.2.2" } }, "sha512-RlTARoopzhFJIOVHLGvuXJ8DCEme/hjV+ZnRJBIxzxsKVpGPW4Oshqg9xGhWTYdHstTsxO663s0cdBLzZj9TQA=="], - - "@expressive-code/plugin-text-markers": ["@expressive-code/plugin-text-markers@0.41.3", "", { "dependencies": { "@expressive-code/core": "^0.41.3" } }, "sha512-SN8tkIzDpA0HLAscEYD2IVrfLiid6qEdE9QLlGVSxO1KEw7qYvjpbNBQjUjMr5/jvTJ7ys6zysU2vLPHE0sb2g=="], + "@expressive-code/plugin-text-markers": ["@expressive-code/plugin-text-markers@0.41.2", "", { "dependencies": { "@expressive-code/core": "^0.41.2" } }, "sha512-JFWBz2qYxxJOJkkWf96LpeolbnOqJY95TvwYc0hXIHf9oSWV0h0SY268w/5N3EtQaD9KktzDE+VIVwb9jdb3nw=="], "@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="], + "@flystorage/dynamic-import": ["@flystorage/dynamic-import@1.0.0", "", {}, "sha512-CIbIUrBdaPFyKnkVBaqzksvzNtsMSXITR/G/6zlil3MBnPFq2LX+X4Mv5p2XOmv/3OulFs/ff2SNb+5dc2Twtg=="], + + "@flystorage/file-storage": ["@flystorage/file-storage@1.1.0", "", {}, "sha512-25Gd5EsXDmhHrK5orpRuVqebQms1Cm9m5ACMZ0sVDX+Sbl1V0G88CbcWt7mEoWRYLvQ1U072htqg6Sav76ZlVA=="], + + "@flystorage/local-fs": ["@flystorage/local-fs@1.1.0", "", { "dependencies": { "@flystorage/dynamic-import": "^1.0.0", "@flystorage/file-storage": "^1.1.0", "file-type": "^20.5.0", "mime-types": "^3.0.1" } }, "sha512-dbErRhqmCv2UF0zPdeH7iVWuVeTWAJHuJD/mXDe2V370/SL7XIvdE3ditBHWC+1SzBKXJ0lkykOenwlum+oqIA=="], + "@fontsource/ibm-plex-mono": ["@fontsource/ibm-plex-mono@5.2.5", "", {}, "sha512-G09N3GfuT9qj3Ax2FDZvKqZttzM3v+cco2l8uXamhKyXLdmlaUDH5o88/C3vtTHj2oT7yRKsvxz9F+BXbWKMYA=="], - "@hono/zod-validator": ["@hono/zod-validator@0.4.2", "", { "peerDependencies": { "hono": ">=3.9.0", "zod": "^3.19.1" } }, "sha512-1rrlBg+EpDPhzOV4hT9pxr5+xDVmKuz6YJl+la7VCwK6ass5ldyKm5fD+umJdV2zhHD6jROoCCv8NbTwyfhT0g=="], + "@hapi/bourne": ["@hapi/bourne@2.1.0", "", {}, "sha512-i1BpaNDVLJdRBEKeJWkVO6tYX6DMFBuwMhSuWqLsY4ufeTKGVuV5rBsUhxPayXqnnWHgXUAmWK16H/ykO5Wj4Q=="], + + "@hono/zod-validator": ["@hono/zod-validator@0.5.0", "", { "peerDependencies": { "hono": ">=3.9.0", "zod": "^3.19.1" } }, "sha512-ds5bW6DCgAnNHP33E3ieSbaZFd5dkV52ZjyaXtGoR06APFrCtzAsKZxTHwOrJNBdXsi0e5wNwo5L4nVEVnJUdg=="], "@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.33.5", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.0.4" }, "os": "darwin", "cpu": "arm64" }, "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ=="], @@ -317,11 +326,13 @@ "@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.33.5", "", { "os": "win32", "cpu": "x64" }, "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg=="], - "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.12", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg=="], + "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="], "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], - "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.4", "", {}, "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw=="], + "@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="], + + "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="], "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.9", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ=="], @@ -331,43 +342,7 @@ "@mixmark-io/domino": ["@mixmark-io/domino@2.2.0", "", {}, "sha512-Y28PR25bHXUg88kCV7nivXrP2Nj2RueZ3/l/jdx6J9f8J4nsEGcgX0Qe6lt7Pa+J79+kPiJU3LguR6O/6zrLOw=="], - "@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.15.1", "", { "dependencies": { "ajv": "^6.12.6", "content-type": "^1.0.5", "cors": "^2.8.5", "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "eventsource-parser": "^3.0.0", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-W/XlN9c528yYn+9MQkVjxiTPgPxoxt+oczfjHBDsJx0+59+O7B75Zhsp0B16Xbwbz8ANISDajh6+V7nIcPMc5w=="], - - "@octokit/auth-app": ["@octokit/auth-app@8.0.1", "", { "dependencies": { "@octokit/auth-oauth-app": "^9.0.1", "@octokit/auth-oauth-user": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "toad-cache": "^3.7.0", "universal-github-app-jwt": "^2.2.0", "universal-user-agent": "^7.0.0" } }, "sha512-P2J5pB3pjiGwtJX4WqJVYCtNkcZ+j5T2Wm14aJAEIC3WJOrv12jvBley3G1U/XI8q9o1A7QMG54LiFED2BiFlg=="], - - "@octokit/auth-oauth-app": ["@octokit/auth-oauth-app@9.0.1", "", { "dependencies": { "@octokit/auth-oauth-device": "^8.0.1", "@octokit/auth-oauth-user": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-TthWzYxuHKLAbmxdFZwFlmwVyvynpyPmjwc+2/cI3cvbT7mHtsAW9b1LvQaNnAuWL+pFnqtxdmrU8QpF633i1g=="], - - "@octokit/auth-oauth-device": ["@octokit/auth-oauth-device@8.0.1", "", { "dependencies": { "@octokit/oauth-methods": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-TOqId/+am5yk9zor0RGibmlqn4V0h8vzjxlw/wYr3qzkQxl8aBPur384D1EyHtqvfz0syeXji4OUvKkHvxk/Gw=="], - - "@octokit/auth-oauth-user": ["@octokit/auth-oauth-user@6.0.0", "", { "dependencies": { "@octokit/auth-oauth-device": "^8.0.1", "@octokit/oauth-methods": "^6.0.0", "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-GV9IW134PHsLhtUad21WIeP9mlJ+QNpFd6V9vuPWmaiN25HEJeEQUcS4y5oRuqCm9iWDLtfIs+9K8uczBXKr6A=="], - - "@octokit/auth-token": ["@octokit/auth-token@6.0.0", "", {}, "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w=="], - - "@octokit/core": ["@octokit/core@7.0.3", "", { "dependencies": { "@octokit/auth-token": "^6.0.0", "@octokit/graphql": "^9.0.1", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "before-after-hook": "^4.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-oNXsh2ywth5aowwIa7RKtawnkdH6LgU1ztfP9AIUCQCvzysB+WeU8o2kyyosDPwBZutPpjZDKPQGIzzrfTWweQ=="], - - "@octokit/endpoint": ["@octokit/endpoint@11.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ=="], - - "@octokit/graphql": ["@octokit/graphql@9.0.1", "", { "dependencies": { "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-j1nQNU1ZxNFx2ZtKmL4sMrs4egy5h65OMDmSbVyuCzjOcwsHq6EaYjOTGXPQxgfiN8dJ4CriYHk6zF050WEULg=="], - - "@octokit/oauth-authorization-url": ["@octokit/oauth-authorization-url@8.0.0", "", {}, "sha512-7QoLPRh/ssEA/HuHBHdVdSgF8xNLz/Bc5m9fZkArJE5bb6NmVkDm3anKxXPmN1zh6b5WKZPRr3697xKT/yM3qQ=="], - - "@octokit/oauth-methods": ["@octokit/oauth-methods@6.0.0", "", { "dependencies": { "@octokit/oauth-authorization-url": "^8.0.0", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0" } }, "sha512-Q8nFIagNLIZgM2odAraelMcDssapc+lF+y3OlcIPxyAU+knefO8KmozGqfnma1xegRDP4z5M73ABsamn72bOcA=="], - - "@octokit/openapi-types": ["@octokit/openapi-types@25.1.0", "", {}, "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA=="], - - "@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@13.1.1", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-q9iQGlZlxAVNRN2jDNskJW/Cafy7/XE52wjZ5TTvyhyOD904Cvx//DNyoO3J/MXJ0ve3rPoNWKEg5iZrisQSuw=="], - - "@octokit/plugin-request-log": ["@octokit/plugin-request-log@6.0.0", "", { "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q=="], - - "@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@16.0.0", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-kJVUQk6/dx/gRNLWUnAWKFs1kVPn5O5CYZyssyEoNYaFedqZxsfYs7DwI3d67hGz4qOwaJ1dpm07hOAD1BXx6g=="], - - "@octokit/request": ["@octokit/request@10.0.3", "", { "dependencies": { "@octokit/endpoint": "^11.0.0", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA=="], - - "@octokit/request-error": ["@octokit/request-error@7.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg=="], - - "@octokit/rest": ["@octokit/rest@22.0.0", "", { "dependencies": { "@octokit/core": "^7.0.2", "@octokit/plugin-paginate-rest": "^13.0.1", "@octokit/plugin-request-log": "^6.0.0", "@octokit/plugin-rest-endpoint-methods": "^16.0.0" } }, "sha512-z6tmTu9BTnw51jYGulxrlernpsQYXpui1RK21vmXn8yF5bp6iX16yfTtJYGK5Mh1qDkvDOmp2n8sRMcQmR8jiA=="], - - "@octokit/types": ["@octokit/types@14.1.0", "", { "dependencies": { "@octokit/openapi-types": "^25.1.0" } }, "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g=="], + "@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="], "@openauthjs/openauth": ["@openauthjs/openauth@0.4.3", "", { "dependencies": { "@standard-schema/spec": "1.0.0-beta.3", "aws4fetch": "1.0.20", "jose": "5.9.6" }, "peerDependencies": { "arctic": "^2.2.2", "hono": "^4.0.0" } }, "sha512-RlnjqvHzqcbFVymEwhlUEuac4utA5h4nhSK/i2szZuQmxTIqbGUxZ+nM+avM+VV4Ing+/ZaNLKILoXS3yrkOOw=="], @@ -399,53 +374,47 @@ "@pagefind/windows-x64": ["@pagefind/windows-x64@1.3.0", "", { "os": "win32", "cpu": "x64" }, "sha512-BR1bIRWOMqkf8IoU576YDhij1Wd/Zf2kX/kCI0b2qzCKC8wcc2GQJaaRMCpzvCCrmliO4vtJ6RITp/AnoYUUmQ=="], - "@poppinss/colors": ["@poppinss/colors@4.1.5", "", { "dependencies": { "kleur": "^4.1.5" } }, "sha512-FvdDqtcRCtz6hThExcFOgW0cWX+xwSMWcRuQe5ZEb2m7cVQOAVZOIMt+/v9RxGiD9/OY16qJBXK4CVKWAPalBw=="], + "@rollup/pluginutils": ["@rollup/pluginutils@5.1.4", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-walker": "^2.0.2", "picomatch": "^4.0.2" }, "peerDependencies": { "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" }, "optionalPeers": ["rollup"] }, "sha512-USm05zrsFxYLPdWWq+K3STlWiT/3ELn3RcV5hJMghpeAIhxfsUIg6mt12CBJBInWMV4VneoV7SfGv8xIwo2qNQ=="], - "@poppinss/dumper": ["@poppinss/dumper@0.6.4", "", { "dependencies": { "@poppinss/colors": "^4.1.5", "@sindresorhus/is": "^7.0.2", "supports-color": "^10.0.0" } }, "sha512-iG0TIdqv8xJ3Lt9O8DrPRxw1MRLjNpoqiSGU03P/wNLP/s0ra0udPJ1J2Tx5M0J3H/cVyEgpbn8xUKRY9j59kQ=="], + "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.41.1", "", { "os": "android", "cpu": "arm" }, "sha512-NELNvyEWZ6R9QMkiytB4/L4zSEaBC03KIXEghptLGLZWJ6VPrL63ooZQCOnlx36aQPGhzuOMwDerC1Eb2VmrLw=="], - "@poppinss/exception": ["@poppinss/exception@1.2.2", "", {}, "sha512-m7bpKCD4QMlFCjA/nKTs23fuvoVFoA83brRKmObCUNmi/9tVu8Ve3w4YQAnJu4q3Tjf5fr685HYIC/IA2zHRSg=="], + "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.41.1", "", { "os": "android", "cpu": "arm64" }, "sha512-DXdQe1BJ6TK47ukAoZLehRHhfKnKg9BjnQYUu9gzhI8Mwa1d2fzxA1aw2JixHVl403bwp1+/o/NhhHtxWJBgEA=="], - "@rollup/pluginutils": ["@rollup/pluginutils@5.2.0", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-walker": "^2.0.2", "picomatch": "^4.0.2" }, "peerDependencies": { "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" }, "optionalPeers": ["rollup"] }, "sha512-qWJ2ZTbmumwiLFomfzTyt5Kng4hwPi9rwCYN4SHb6eaRU1KNO4ccxINHr/VhH4GgPlt1XfSTLX2LBTme8ne4Zw=="], + "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.41.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-5afxvwszzdulsU2w8JKWwY8/sJOLPzf0e1bFuvcW5h9zsEg+RQAojdW0ux2zyYAz7R8HvvzKCjLNJhVq965U7w=="], - "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.45.0", "", { "os": "android", "cpu": "arm" }, "sha512-2o/FgACbji4tW1dzXOqAV15Eu7DdgbKsF2QKcxfG4xbh5iwU7yr5RRP5/U+0asQliSYv5M4o7BevlGIoSL0LXg=="], + "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.41.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-egpJACny8QOdHNNMZKf8xY0Is6gIMz+tuqXlusxquWu3F833DcMwmGM7WlvCO9sB3OsPjdC4U0wHw5FabzCGZg=="], - "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.45.0", "", { "os": "android", "cpu": "arm64" }, "sha512-PSZ0SvMOjEAxwZeTx32eI/j5xSYtDCRxGu5k9zvzoY77xUNssZM+WV6HYBLROpY5CkXsbQjvz40fBb7WPwDqtQ=="], + "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.41.1", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-DBVMZH5vbjgRk3r0OzgjS38z+atlupJ7xfKIDJdZZL6sM6wjfDNo64aowcLPKIx7LMQi8vybB56uh1Ftck/Atg=="], - "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.45.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-BA4yPIPssPB2aRAWzmqzQ3y2/KotkLyZukVB7j3psK/U3nVJdceo6qr9pLM2xN6iRP/wKfxEbOb1yrlZH6sYZg=="], + "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.41.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-3FkydeohozEskBxNWEIbPfOE0aqQgB6ttTkJ159uWOFn42VLyfAiyD9UK5mhu+ItWzft60DycIN1Xdgiy8o/SA=="], - "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.45.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-Pr2o0lvTwsiG4HCr43Zy9xXrHspyMvsvEw4FwKYqhli4FuLE5FjcZzuQ4cfPe0iUFCvSQG6lACI0xj74FDZKRA=="], + "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.41.1", "", { "os": "linux", "cpu": "arm" }, "sha512-wC53ZNDgt0pqx5xCAgNunkTzFE8GTgdZ9EwYGVcg+jEjJdZGtq9xPjDnFgfFozQI/Xm1mh+D9YlYtl+ueswNEg=="], - "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.45.0", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-lYE8LkE5h4a/+6VnnLiL14zWMPnx6wNbDG23GcYFpRW1V9hYWHAw9lBZ6ZUIrOaoK7NliF1sdwYGiVmziUF4vA=="], + "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.41.1", "", { "os": "linux", "cpu": "arm" }, "sha512-jwKCca1gbZkZLhLRtsrka5N8sFAaxrGz/7wRJ8Wwvq3jug7toO21vWlViihG85ei7uJTpzbXZRcORotE+xyrLA=="], - "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.45.0", "", { "os": "freebsd", "cpu": "x64" }, "sha512-PVQWZK9sbzpvqC9Q0GlehNNSVHR+4m7+wET+7FgSnKG3ci5nAMgGmr9mGBXzAuE5SvguCKJ6mHL6vq1JaJ/gvw=="], + "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.41.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-g0UBcNknsmmNQ8V2d/zD2P7WWfJKU0F1nu0k5pW4rvdb+BIqMm8ToluW/eeRmxCared5dD76lS04uL4UaNgpNA=="], - "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.45.0", "", { "os": "linux", "cpu": "arm" }, "sha512-hLrmRl53prCcD+YXTfNvXd776HTxNh8wPAMllusQ+amcQmtgo3V5i/nkhPN6FakW+QVLoUUr2AsbtIRPFU3xIA=="], + "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.41.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-XZpeGB5TKEZWzIrj7sXr+BEaSgo/ma/kCgrZgL0oo5qdB1JlTzIYQKel/RmhT6vMAvOdM2teYlAaOGJpJ9lahg=="], - "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.45.0", "", { "os": "linux", "cpu": "arm" }, "sha512-XBKGSYcrkdiRRjl+8XvrUR3AosXU0NvF7VuqMsm7s5nRy+nt58ZMB19Jdp1RdqewLcaYnpk8zeVs/4MlLZEJxw=="], + "@rollup/rollup-linux-loongarch64-gnu": ["@rollup/rollup-linux-loongarch64-gnu@4.41.1", "", { "os": "linux", "cpu": "none" }, "sha512-bkCfDJ4qzWfFRCNt5RVV4DOw6KEgFTUZi2r2RuYhGWC8WhCA8lCAJhDeAmrM/fdiAH54m0mA0Vk2FGRPyzI+tw=="], - "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.45.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-fRvZZPUiBz7NztBE/2QnCS5AtqLVhXmUOPj9IHlfGEXkapgImf4W9+FSkL8cWqoAjozyUzqFmSc4zh2ooaeF6g=="], + "@rollup/rollup-linux-powerpc64le-gnu": ["@rollup/rollup-linux-powerpc64le-gnu@4.41.1", "", { "os": "linux", "cpu": "ppc64" }, "sha512-3mr3Xm+gvMX+/8EKogIZSIEF0WUu0HL9di+YWlJpO8CQBnoLAEL/roTCxuLncEdgcfJcvA4UMOf+2dnjl4Ut1A=="], - "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.45.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-Btv2WRZOcUGi8XU80XwIvzTg4U6+l6D0V6sZTrZx214nrwxw5nAi8hysaXj/mctyClWgesyuxbeLylCBNauimg=="], + "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.41.1", "", { "os": "linux", "cpu": "none" }, "sha512-3rwCIh6MQ1LGrvKJitQjZFuQnT2wxfU+ivhNBzmxXTXPllewOF7JR1s2vMX/tWtUYFgphygxjqMl76q4aMotGw=="], - "@rollup/rollup-linux-loongarch64-gnu": ["@rollup/rollup-linux-loongarch64-gnu@4.45.0", "", { "os": "linux", "cpu": "none" }, "sha512-Li0emNnwtUZdLwHjQPBxn4VWztcrw/h7mgLyHiEI5Z0MhpeFGlzaiBHpSNVOMB/xucjXTTcO+dhv469Djr16KA=="], + "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.41.1", "", { "os": "linux", "cpu": "none" }, "sha512-LdIUOb3gvfmpkgFZuccNa2uYiqtgZAz3PTzjuM5bH3nvuy9ty6RGc/Q0+HDFrHrizJGVpjnTZ1yS5TNNjFlklw=="], - "@rollup/rollup-linux-powerpc64le-gnu": ["@rollup/rollup-linux-powerpc64le-gnu@4.45.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-sB8+pfkYx2kvpDCfd63d5ScYT0Fz1LO6jIb2zLZvmK9ob2D8DeVqrmBDE0iDK8KlBVmsTNzrjr3G1xV4eUZhSw=="], + "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.41.1", "", { "os": "linux", "cpu": "s390x" }, "sha512-oIE6M8WC9ma6xYqjvPhzZYk6NbobIURvP/lEbh7FWplcMO6gn7MM2yHKA1eC/GvYwzNKK/1LYgqzdkZ8YFxR8g=="], - "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.45.0", "", { "os": "linux", "cpu": "none" }, "sha512-5GQ6PFhh7E6jQm70p1aW05G2cap5zMOvO0se5JMecHeAdj5ZhWEHbJ4hiKpfi1nnnEdTauDXxPgXae/mqjow9w=="], + "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.41.1", "", { "os": "linux", "cpu": "x64" }, "sha512-cWBOvayNvA+SyeQMp79BHPK8ws6sHSsYnK5zDcsC3Hsxr1dgTABKjMnMslPq1DvZIp6uO7kIWhiGwaTdR4Og9A=="], - "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.45.0", "", { "os": "linux", "cpu": "none" }, "sha512-N/euLsBd1rekWcuduakTo/dJw6U6sBP3eUq+RXM9RNfPuWTvG2w/WObDkIvJ2KChy6oxZmOSC08Ak2OJA0UiAA=="], + "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.41.1", "", { "os": "linux", "cpu": "x64" }, "sha512-y5CbN44M+pUCdGDlZFzGGBSKCA4A/J2ZH4edTYSSxFg7ce1Xt3GtydbVKWLlzL+INfFIZAEg1ZV6hh9+QQf9YQ=="], - "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.45.0", "", { "os": "linux", "cpu": "s390x" }, "sha512-2l9sA7d7QdikL0xQwNMO3xURBUNEWyHVHfAsHsUdq+E/pgLTUcCE+gih5PCdmyHmfTDeXUWVhqL0WZzg0nua3g=="], + "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.41.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-lZkCxIrjlJlMt1dLO/FbpZbzt6J/A8p4DnqzSa4PWqPEUUUnzXLeki/iyPLfV0BmHItlYgHUqJe+3KiyydmiNQ=="], - "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.45.0", "", { "os": "linux", "cpu": "x64" }, "sha512-XZdD3fEEQcwG2KrJDdEQu7NrHonPxxaV0/w2HpvINBdcqebz1aL+0vM2WFJq4DeiAVT6F5SUQas65HY5JDqoPw=="], + "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.41.1", "", { "os": "win32", "cpu": "ia32" }, "sha512-+psFT9+pIh2iuGsxFYYa/LhS5MFKmuivRsx9iPJWNSGbh2XVEjk90fmpUEjCnILPEPJnikAU6SFDiEUyOv90Pg=="], - "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.45.0", "", { "os": "linux", "cpu": "x64" }, "sha512-7ayfgvtmmWgKWBkCGg5+xTQ0r5V1owVm67zTrsEY1008L5ro7mCyGYORomARt/OquB9KY7LpxVBZes+oSniAAQ=="], - - "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.45.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-B+IJgcBnE2bm93jEW5kHisqvPITs4ddLOROAcOc/diBgrEiQJJ6Qcjby75rFSmH5eMGrqJryUgJDhrfj942apQ=="], - - "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.45.0", "", { "os": "win32", "cpu": "ia32" }, "sha512-+CXwwG66g0/FpWOnP/v1HnrGVSOygK/osUbu3wPRy8ECXjoYKjRAyfxYpDQOfghC5qPJYLPH0oN4MCOjwgdMug=="], - - "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.45.0", "", { "os": "win32", "cpu": "x64" }, "sha512-SRf1cytG7wqcHVLrBc9VtPK4pU5wxiB/lNIkNmW2ApKXIg+RpqwHfsaEK+e7eH4A1BpI6BX/aBWXxZCIrJg3uA=="], + "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.41.1", "", { "os": "win32", "cpu": "x64" }, "sha512-Wq2zpapRYLfi4aKxf2Xff0tN+7slj2d4R87WEzqw7ZLsVvO5zwYCIuEGSZYiK41+GlwUo1HiR+GdkLEJnCKTCw=="], "@shikijs/core": ["@shikijs/core@3.4.2", "", { "dependencies": { "@shikijs/types": "3.4.2", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4", "hast-util-to-html": "^9.0.5" } }, "sha512-AG8vnSi1W2pbgR2B911EfGqtLE9c4hQBYkv/x7Z+Kt0VxhgQKcW7UNDVYsu9YxwV6u+OJrvdJrMq6DNWoBjihQ=="], @@ -463,8 +432,6 @@ "@shikijs/vscode-textmate": ["@shikijs/vscode-textmate@10.0.2", "", {}, "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg=="], - "@sindresorhus/is": ["@sindresorhus/is@7.0.2", "", {}, "sha512-d9xRovfKNz1SKieM0qJdO+PQonjnnIfSNWfHYnBSJ9hkjm0ZPw6HlxscDXYstp3z+7V2GOFHc+J0CYrYTjqCJw=="], - "@smithy/eventstream-codec": ["@smithy/eventstream-codec@4.0.4", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@smithy/types": "^4.3.1", "@smithy/util-hex-encoding": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-7XoWfZqWb/QoR/rAU4VSi0mWnO2vu9/ltS6JZ5ZSZv0eovLVfDfu0/AX4ub33RsJTOth3TiFWSHS5YdztvFnig=="], "@smithy/is-array-buffer": ["@smithy/is-array-buffer@4.0.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw=="], @@ -477,12 +444,14 @@ "@smithy/util-utf8": ["@smithy/util-utf8@4.0.0", "", { "dependencies": { "@smithy/util-buffer-from": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow=="], - "@speed-highlight/core": ["@speed-highlight/core@1.2.7", "", {}, "sha512-0dxmVj4gxg3Jg879kvFS/msl4s9F3T9UXC1InxgOf7t5NvcPD97u/WTA5vL/IxWHMn7qSxBozqrnnE2wvl1m8g=="], - "@standard-schema/spec": ["@standard-schema/spec@1.0.0", "", {}, "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA=="], "@swc/helpers": ["@swc/helpers@0.5.17", "", { "dependencies": { "tslib": "^2.8.0" } }, "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A=="], + "@tokenizer/inflate": ["@tokenizer/inflate@0.2.7", "", { "dependencies": { "debug": "^4.4.0", "fflate": "^0.8.2", "token-types": "^6.0.0" } }, "sha512-MADQgmZT1eKjp06jpI2yozxaU9uVs4GzzgSL+uEq7bVcJ9V1ZXQkeGNql1fsSI0gMy1vhvNTNbUqrx+pZfJVmg=="], + + "@tokenizer/token": ["@tokenizer/token@0.3.0", "", {}, "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="], + "@tsconfig/bun": ["@tsconfig/bun@1.0.7", "", {}, "sha512-udGrGJBNQdXGVulehc1aWT73wkR9wdaGBtB6yL70RJsqwW/yJhIg6ZbRlPOfIUiFNrnBuYLBi9CSmMKfDC7dvA=="], "@types/babel__core": ["@types/babel__core@7.20.5", "", { "dependencies": { "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7", "@types/babel__generator": "*", "@types/babel__template": "*", "@types/babel__traverse": "*" } }, "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA=="], @@ -493,11 +462,13 @@ "@types/babel__traverse": ["@types/babel__traverse@7.20.7", "", { "dependencies": { "@babel/types": "^7.20.7" } }, "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng=="], - "@types/bun": ["@types/bun@1.2.18", "", { "dependencies": { "bun-types": "1.2.18" } }, "sha512-Xf6RaWVheyemaThV0kUfaAUvCNokFr+bH8Jxp+tTZfx7dAPA8z9ePnP9S9+Vspzuxxx9JRAXhnyccRj3GyCMdQ=="], + "@types/bun": ["@types/bun@1.2.17", "", { "dependencies": { "bun-types": "1.2.17" } }, "sha512-l/BYs/JYt+cXA/0+wUhulYJB6a6p//GTPiJ7nV+QHa8iiId4HZmnu/3J/SowP5g0rTiERY2kfGKXEK5Ehltx4Q=="], "@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="], - "@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="], + "@types/diff-match-patch": ["@types/diff-match-patch@1.0.36", "", {}, "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg=="], + + "@types/estree": ["@types/estree@1.0.7", "", {}, "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ=="], "@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="], @@ -521,8 +492,6 @@ "@types/node": ["@types/node@22.13.9", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-acBjXdRJ3A6Pb3tqnw9HZmyR3Fiol3aGxRCK1x3d+6CDAMjl7I649wpSd+yNURCjbOUGu9tqtLKnTGxmK6CyGw=="], - "@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="], - "@types/sax": ["@types/sax@1.2.7", "", { "dependencies": { "@types/node": "*" } }, "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A=="], "@types/turndown": ["@types/turndown@5.0.5", "", {}, "sha512-TL2IgGgc7B5j78rIccBtlYAnkuv8nUQqhQc+DSYV5j9Be9XOcm/SKOVRuA47xAVI3680Tk9B1d8flK2GWT2+4w=="], @@ -537,15 +506,13 @@ "accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="], - "acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="], + "acorn": ["acorn@8.14.1", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="], "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], "acorn-walk": ["acorn-walk@8.3.2", "", {}, "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A=="], - "ai": ["ai@5.0.0-beta.21", "", { "dependencies": { "@ai-sdk/gateway": "1.0.0-beta.8", "@ai-sdk/provider": "2.0.0-beta.1", "@ai-sdk/provider-utils": "3.0.0-beta.3", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.49 || ^4" }, "bin": { "ai": "dist/bin/ai.min.js" } }, "sha512-ZmgUoEIXb2G2HLtK1U3UB+hSDa3qrVIeAfgXf3SIE9r5Vqj6xHG1pN/7fHIZDSgb1TCaypG0ANVB0O9WmnMfiw=="], - - "ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="], + "ai": ["ai@4.3.16", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "@ai-sdk/provider-utils": "2.2.8", "@ai-sdk/react": "1.2.12", "@ai-sdk/ui-utils": "1.2.11", "@opentelemetry/api": "1.9.0", "jsondiffpatch": "0.6.0" }, "peerDependencies": { "react": "^18 || ^19 || ^19.0.0-rc", "zod": "^3.23.8" }, "optionalPeers": ["react"] }, "sha512-KUDwlThJ5tr2Vw0A1ZkbDKNME3wzWhuVfAOwIvFUzl1TPVDFAXDFTXio3p+jaKneB+dKNCvFFlolYmmgHttG1g=="], "ansi-align": ["ansi-align@3.0.1", "", { "dependencies": { "string-width": "^4.1.0" } }, "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w=="], @@ -561,18 +528,24 @@ "argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], + "args": ["args@5.0.3", "", { "dependencies": { "camelcase": "5.0.0", "chalk": "2.4.2", "leven": "2.1.0", "mri": "1.1.4" } }, "sha512-h6k/zfFgusnv3i5TU08KQkVKuCPBtL/PWQbWkHUxvJrZ2nAyeaUupneemcrgn1xmqxPQsPIzwkUhOpoqPDRZuA=="], + "aria-query": ["aria-query@5.3.2", "", {}, "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw=="], "array-iterate": ["array-iterate@2.0.1", "", {}, "sha512-I1jXZMjAgCMmxT4qxXfPXa6SthSoE8h6gkSI9BGGNv8mP8G/v0blc+qFnZu6K42vTOiuME596QaLO0TP3Lk0xg=="], + "as-table": ["as-table@1.0.55", "", { "dependencies": { "printable-characters": "^1.0.42" } }, "sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ=="], + "astring": ["astring@1.9.0", "", { "bin": { "astring": "bin/astring" } }, "sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg=="], "astro": ["astro@5.7.13", "", { "dependencies": { "@astrojs/compiler": "^2.11.0", "@astrojs/internal-helpers": "0.6.1", "@astrojs/markdown-remark": "6.3.1", "@astrojs/telemetry": "3.2.1", "@capsizecss/unpack": "^2.4.0", "@oslojs/encoding": "^1.1.0", "@rollup/pluginutils": "^5.1.4", "acorn": "^8.14.1", "aria-query": "^5.3.2", "axobject-query": "^4.1.0", "boxen": "8.0.1", "ci-info": "^4.2.0", "clsx": "^2.1.1", "common-ancestor-path": "^1.0.1", "cookie": "^1.0.2", "cssesc": "^3.0.0", "debug": "^4.4.0", "deterministic-object-hash": "^2.0.2", "devalue": "^5.1.1", "diff": "^5.2.0", "dlv": "^1.1.3", "dset": "^3.1.4", "es-module-lexer": "^1.6.0", "esbuild": "^0.25.0", "estree-walker": "^3.0.3", "flattie": "^1.1.1", "fontace": "~0.3.0", "github-slugger": "^2.0.0", "html-escaper": "3.0.3", "http-cache-semantics": "^4.1.1", "js-yaml": "^4.1.0", "kleur": "^4.1.5", "magic-string": "^0.30.17", "magicast": "^0.3.5", "mrmime": "^2.0.1", "neotraverse": "^0.6.18", "p-limit": "^6.2.0", "p-queue": "^8.1.0", "package-manager-detector": "^1.1.0", "picomatch": "^4.0.2", "prompts": "^2.4.2", "rehype": "^13.0.2", "semver": "^7.7.1", "shiki": "^3.2.1", "tinyexec": "^0.3.2", "tinyglobby": "^0.2.12", "tsconfck": "^3.1.5", "ultrahtml": "^1.6.0", "unifont": "~0.5.0", "unist-util-visit": "^5.0.0", "unstorage": "^1.15.0", "vfile": "^6.0.3", "vite": "^6.3.4", "vitefu": "^1.0.6", "xxhash-wasm": "^1.1.0", "yargs-parser": "^21.1.1", "yocto-spinner": "^0.2.1", "zod": "^3.24.2", "zod-to-json-schema": "^3.24.5", "zod-to-ts": "^1.2.0" }, "optionalDependencies": { "sharp": "^0.33.3" }, "bin": { "astro": "astro.js" } }, "sha512-cRGq2llKOhV3XMcYwQpfBIUcssN6HEK5CRbcMxAfd9OcFhvWE7KUy50zLioAZVVl3AqgUTJoNTlmZfD2eG0G1w=="], - "astro-expressive-code": ["astro-expressive-code@0.41.3", "", { "dependencies": { "rehype-expressive-code": "^0.41.3" }, "peerDependencies": { "astro": "^4.0.0-beta || ^5.0.0-beta || ^3.3.0" } }, "sha512-u+zHMqo/QNLE2eqYRCrK3+XMlKakv33Bzuz+56V1gs8H0y6TZ0hIi3VNbIxeTn51NLn+mJfUV/A0kMNfE4rANw=="], + "astro-expressive-code": ["astro-expressive-code@0.41.2", "", { "dependencies": { "rehype-expressive-code": "^0.41.2" }, "peerDependencies": { "astro": "^4.0.0-beta || ^5.0.0-beta || ^3.3.0" } }, "sha512-HN0jWTnhr7mIV/2e6uu4PPRNNo/k4UEgTLZqbp3MrHU+caCARveG2yZxaZVBmxyiVdYqW5Pd3u3n2zjnshixbw=="], "async-lock": ["async-lock@1.4.1", "", {}, "sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ=="], + "atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="], + "available-typed-arrays": ["available-typed-arrays@1.0.7", "", { "dependencies": { "possible-typed-array-names": "^1.0.0" } }, "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ=="], "aws-sdk": ["aws-sdk@2.1692.0", "", { "dependencies": { "buffer": "4.9.2", "events": "1.1.1", "ieee754": "1.1.13", "jmespath": "0.16.0", "querystring": "0.2.0", "sax": "1.2.1", "url": "0.10.3", "util": "^0.12.4", "uuid": "8.0.0", "xml2js": "0.6.2" } }, "sha512-x511uiJ/57FIsbgUe5csJ13k3uzu25uWQE+XqfBis/sB0SFoiElJWXRkgEAUh0U6n40eT3ay5Ue4oPkRMu1LYw=="], @@ -589,9 +562,11 @@ "bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="], - "bare-events": ["bare-events@2.6.0", "", {}, "sha512-EKZ5BTXYExaNqi3I3f9RtEsaI/xBSGjE0XZCZilPzFAV/goswFHuPd9jEZlPIZ/iNZJwDSao9qRiScySz7MbQg=="], + "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], - "bare-fs": ["bare-fs@4.1.6", "", { "dependencies": { "bare-events": "^2.5.4", "bare-path": "^3.0.0", "bare-stream": "^2.6.4" }, "peerDependencies": { "bare-buffer": "*" }, "optionalPeers": ["bare-buffer"] }, "sha512-25RsLF33BqooOEFNdMcEhMpJy8EoR88zSMrnOQOaM3USnOK2VmaJ1uaQEwPA6AQjrv1lXChScosN6CzbwbO9OQ=="], + "bare-events": ["bare-events@2.5.4", "", {}, "sha512-+gFfDkR8pj4/TrWCGUGWmJIkBwuxPS5F+a5yWjOHQt2hHvNZd5YLzadjmDUtFmMM4y429bnKLa8bYBMHcYdnQA=="], + + "bare-fs": ["bare-fs@4.1.5", "", { "dependencies": { "bare-events": "^2.5.4", "bare-path": "^3.0.0", "bare-stream": "^2.6.4" }, "peerDependencies": { "bare-buffer": "*" }, "optionalPeers": ["bare-buffer"] }, "sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA=="], "bare-os": ["bare-os@3.6.1", "", {}, "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g=="], @@ -607,8 +582,6 @@ "bcp-47-match": ["bcp-47-match@2.0.3", "", {}, "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ=="], - "before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="], - "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="], "blake3-wasm": ["blake3-wasm@2.1.5", "", {}, "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g=="], @@ -621,13 +594,15 @@ "boxen": ["boxen@8.0.1", "", { "dependencies": { "ansi-align": "^3.0.1", "camelcase": "^8.0.0", "chalk": "^5.3.0", "cli-boxes": "^3.0.0", "string-width": "^7.2.0", "type-fest": "^4.21.0", "widest-line": "^5.0.0", "wrap-ansi": "^9.0.0" } }, "sha512-F3PH5k5juxom4xktynS7MoFY+NUWH5LC4CnH11YB8NPew+HLpmBLCybSAEyb2F+4pRXhuhWqFesoQd6DAyc2hw=="], + "brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + "brotli": ["brotli@1.3.3", "", { "dependencies": { "base64-js": "^1.1.2" } }, "sha512-oTKjJdShmDuGW94SyyaoQvAjf30dZaHnjJ8uAF+u2/vGJkJbJPJAT1gDiOJP5v1Zb6f9KEyW/1HpuaWIXtGHPg=="], - "browserslist": ["browserslist@4.25.1", "", { "dependencies": { "caniuse-lite": "^1.0.30001726", "electron-to-chromium": "^1.5.173", "node-releases": "^2.0.19", "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" } }, "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw=="], + "browserslist": ["browserslist@4.25.0", "", { "dependencies": { "caniuse-lite": "^1.0.30001718", "electron-to-chromium": "^1.5.160", "node-releases": "^2.0.19", "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" } }, "sha512-PJ8gYKeS5e/whHBh8xrwYK+dAvEj7JXtz6uTucnMRB8OiGTsKccFekoRrjajPBHV8oOY+2tI4uxeceSimKwMFA=="], "buffer": ["buffer@4.9.2", "", { "dependencies": { "base64-js": "^1.0.2", "ieee754": "^1.1.4", "isarray": "^1.0.0" } }, "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg=="], - "bun-types": ["bun-types@1.2.18", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-04+Eha5NP7Z0A9YgDAzMk5PHR16ZuLVa83b26kH5+cp1qZW4F6FmAURngE7INf4tKOvCE69vYvDEwoNl1tGiWw=="], + "bun-types": ["bun-types@1.2.17", "", { "dependencies": { "@types/node": "*" } }, "sha512-ElC7ItwT3SCQwYZDYoAH+q6KT4Fxjl8DtZ6qDulUFBmXA8YB4xo+l54J9ZJN+k2pphfn9vk7kfubeSd5QfTVJQ=="], "bundle-name": ["bundle-name@4.1.0", "", { "dependencies": { "run-applescript": "^7.0.0" } }, "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q=="], @@ -641,7 +616,7 @@ "camelcase": ["camelcase@8.0.0", "", {}, "sha512-8WB3Jcas3swSvjIeA2yvCJ+Miyz5l1ZmB6HFb9R1317dt9LCQoswg/BGrmAmkWVEszSrrg4RwmO46qIm2OEnSA=="], - "caniuse-lite": ["caniuse-lite@1.0.30001727", "", {}, "sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q=="], + "caniuse-lite": ["caniuse-lite@1.0.30001720", "", {}, "sha512-Ec/2yV2nNPwb4DnTANEV99ZWwm3ZWfdlfkQbWSDDt+PsXEVYwlhPH8tdMaPunYTKKmz7AnHi2oNEi1GcmKCD8g=="], "ccount": ["ccount@2.0.1", "", {}, "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg=="], @@ -659,7 +634,7 @@ "chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="], - "ci-info": ["ci-info@4.3.0", "", {}, "sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ=="], + "ci-info": ["ci-info@4.2.0", "", {}, "sha512-cYY9mypksY8NRqgDB1XD1RiJL338v/551niynFTGkZOO2LHuB2OmOYxDIe/ttN9AHwrqdum1360G3ald0W9kCg=="], "clean-git-ref": ["clean-git-ref@2.0.1", "", {}, "sha512-bLSptAy2P0s6hU4PzuIMKmMJJSE6gLXGH1cntDu7bWJUksvuM+7ReOK61mozULErYvP6a15rnYl0zFDef+pyPw=="], @@ -703,11 +678,9 @@ "cross-fetch": ["cross-fetch@3.2.0", "", { "dependencies": { "node-fetch": "^2.7.0" } }, "sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q=="], - "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], - "crossws": ["crossws@0.3.5", "", { "dependencies": { "uncrypto": "^0.1.3" } }, "sha512-ojKiDvcmByhwa8YYqbQI/hg7MEU0NC03+pSdEq4ZUnZR9xXpwk7E43SMNGkn+JxJGPFtNvQ48+vV2p+P1ml5PA=="], - "css-selector-parser": ["css-selector-parser@3.1.3", "", {}, "sha512-gJMigczVZqYAk0hPVzx/M4Hm1D9QOtqkdQk9005TNzDIUGzo5cnHEDiKUT7jGPximL/oYb+LIitcHFQ4aKupxg=="], + "css-selector-parser": ["css-selector-parser@3.1.2", "", {}, "sha512-WfUcL99xWDs7b3eZPoRszWVfbNo8ErCF15PTvVROjkShGlAfjIkG6hlfj/sl6/rfo5Q9x9ryJ3VqVnAZDA+gcw=="], "css-tree": ["css-tree@3.1.0", "", { "dependencies": { "mdn-data": "2.12.2", "source-map-js": "^1.0.1" } }, "sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w=="], @@ -715,11 +688,15 @@ "csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="], + "data-uri-to-buffer": ["data-uri-to-buffer@2.0.2", "", {}, "sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA=="], + + "dateformat": ["dateformat@4.6.3", "", {}, "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA=="], + "debug": ["debug@4.4.1", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="], "decimal.js": ["decimal.js@10.5.0", "", {}, "sha512-8vDa8Qxvr/+d94hSh5P3IJwI5t8/c0KsMp+g8bNw9cY2icONa5aPfvKeieW1WlG0WQYwwhJ7mjui2xtiePQSXw=="], - "decode-named-character-reference": ["decode-named-character-reference@1.2.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q=="], + "decode-named-character-reference": ["decode-named-character-reference@1.1.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-Wy+JTSbFThEOXQIR2L6mxJvEs+veIzpmqD7ynWxMXGpnk3smkHQOp6forLdHsKpAMW9iJpaBBIxz285t1n1C3w=="], "decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="], @@ -753,6 +730,8 @@ "diff": ["diff@8.0.2", "", {}, "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg=="], + "diff-match-patch": ["diff-match-patch@1.0.5", "", {}, "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw=="], + "diff3": ["diff3@0.0.3", "", {}, "sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g=="], "direction": ["direction@2.0.1", "", { "bin": { "direction": "cli.js" } }, "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA=="], @@ -763,19 +742,21 @@ "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], + "duplexify": ["duplexify@4.1.3", "", { "dependencies": { "end-of-stream": "^1.4.1", "inherits": "^2.0.3", "readable-stream": "^3.1.1", "stream-shift": "^1.0.2" } }, "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA=="], + "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], - "electron-to-chromium": ["electron-to-chromium@1.5.183", "", {}, "sha512-vCrDBYjQCAEefWGjlK3EpoSKfKbT10pR4XXPdn65q7snuNOZnthoVpBfZPykmDapOKfoD+MMIPG8ZjKyyc9oHA=="], + "electron-to-chromium": ["electron-to-chromium@1.5.161", "", {}, "sha512-hwtetwfKNZo/UlwHIVBlKZVdy7o8bIZxxKs0Mv/ROPiQQQmDgdm5a+KvKtBsxM8ZjFzTaCeLoodZ8jiBE3o9rA=="], "emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], "encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="], - "end-of-stream": ["end-of-stream@1.4.5", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg=="], + "end-of-stream": ["end-of-stream@1.4.4", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q=="], - "entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="], + "entities": ["entities@6.0.0", "", {}, "sha512-aKstq2TDOndCn4diEyp9Uq/Flu2i1GlLkc6XIDQSDMuaFE3OPW5OphLCyQ5SpSJZTb4reN+kTcYru5yIfXoRPw=="], - "error-stack-parser-es": ["error-stack-parser-es@1.0.5", "", {}, "sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA=="], + "env-paths": ["env-paths@3.0.0", "", {}, "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A=="], "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], @@ -789,13 +770,13 @@ "esast-util-from-js": ["esast-util-from-js@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "acorn": "^8.0.0", "esast-util-from-estree": "^2.0.0", "vfile-message": "^4.0.0" } }, "sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw=="], - "esbuild": ["esbuild@0.25.6", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.6", "@esbuild/android-arm": "0.25.6", "@esbuild/android-arm64": "0.25.6", "@esbuild/android-x64": "0.25.6", "@esbuild/darwin-arm64": "0.25.6", "@esbuild/darwin-x64": "0.25.6", "@esbuild/freebsd-arm64": "0.25.6", "@esbuild/freebsd-x64": "0.25.6", "@esbuild/linux-arm": "0.25.6", "@esbuild/linux-arm64": "0.25.6", "@esbuild/linux-ia32": "0.25.6", "@esbuild/linux-loong64": "0.25.6", "@esbuild/linux-mips64el": "0.25.6", "@esbuild/linux-ppc64": "0.25.6", "@esbuild/linux-riscv64": "0.25.6", "@esbuild/linux-s390x": "0.25.6", "@esbuild/linux-x64": "0.25.6", "@esbuild/netbsd-arm64": "0.25.6", "@esbuild/netbsd-x64": "0.25.6", "@esbuild/openbsd-arm64": "0.25.6", "@esbuild/openbsd-x64": "0.25.6", "@esbuild/openharmony-arm64": "0.25.6", "@esbuild/sunos-x64": "0.25.6", "@esbuild/win32-arm64": "0.25.6", "@esbuild/win32-ia32": "0.25.6", "@esbuild/win32-x64": "0.25.6" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-GVuzuUwtdsghE3ocJ9Bs8PNoF13HNQ5TXbEi2AhvVb8xU1Iwt9Fos9FEamfoee+u/TOsn7GUWc04lz46n2bbTg=="], + "esbuild": ["esbuild@0.25.5", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.5", "@esbuild/android-arm": "0.25.5", "@esbuild/android-arm64": "0.25.5", "@esbuild/android-x64": "0.25.5", "@esbuild/darwin-arm64": "0.25.5", "@esbuild/darwin-x64": "0.25.5", "@esbuild/freebsd-arm64": "0.25.5", "@esbuild/freebsd-x64": "0.25.5", "@esbuild/linux-arm": "0.25.5", "@esbuild/linux-arm64": "0.25.5", "@esbuild/linux-ia32": "0.25.5", "@esbuild/linux-loong64": "0.25.5", "@esbuild/linux-mips64el": "0.25.5", "@esbuild/linux-ppc64": "0.25.5", "@esbuild/linux-riscv64": "0.25.5", "@esbuild/linux-s390x": "0.25.5", "@esbuild/linux-x64": "0.25.5", "@esbuild/netbsd-arm64": "0.25.5", "@esbuild/netbsd-x64": "0.25.5", "@esbuild/openbsd-arm64": "0.25.5", "@esbuild/openbsd-x64": "0.25.5", "@esbuild/sunos-x64": "0.25.5", "@esbuild/win32-arm64": "0.25.5", "@esbuild/win32-ia32": "0.25.5", "@esbuild/win32-x64": "0.25.5" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ=="], "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="], "escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="], - "escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="], + "escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="], "estree-util-attach-comments": ["estree-util-attach-comments@3.0.0", "", { "dependencies": { "@types/estree": "^1.0.0" } }, "sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw=="], @@ -819,7 +800,7 @@ "eventsource": ["eventsource@3.0.7", "", { "dependencies": { "eventsource-parser": "^3.0.1" } }, "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA=="], - "eventsource-parser": ["eventsource-parser@3.0.3", "", {}, "sha512-nVpZkTMM9rF6AQ9gPJpFsNAMt48wIzB5TQgiTLdHiuO8XEDhUgZEhqKlZWXbIzo9VmJ/HvysHqEaVeD5v9TPvA=="], + "eventsource-parser": ["eventsource-parser@3.0.2", "", {}, "sha512-6RxOBZ/cYgd8usLwsEl+EC09Au/9BcmCKYF2/xbml6DNczf7nv0MQb+7BA2F+li6//I+28VNlQR37XfQtcAJuA=="], "exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="], @@ -827,23 +808,27 @@ "express": ["express@5.1.0", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.0", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA=="], - "express-rate-limit": ["express-rate-limit@7.5.1", "", { "peerDependencies": { "express": ">= 4.11" } }, "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw=="], + "express-rate-limit": ["express-rate-limit@7.5.0", "", { "peerDependencies": { "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg=="], - "expressive-code": ["expressive-code@0.41.3", "", { "dependencies": { "@expressive-code/core": "^0.41.3", "@expressive-code/plugin-frames": "^0.41.3", "@expressive-code/plugin-shiki": "^0.41.3", "@expressive-code/plugin-text-markers": "^0.41.3" } }, "sha512-YLnD62jfgBZYrXIPQcJ0a51Afv9h8VlWqEGK9uU2T5nL/5rb8SnA86+7+mgCZe5D34Tff5RNEA5hjNVJYHzrFg=="], + "expressive-code": ["expressive-code@0.41.2", "", { "dependencies": { "@expressive-code/core": "^0.41.2", "@expressive-code/plugin-frames": "^0.41.2", "@expressive-code/plugin-shiki": "^0.41.2", "@expressive-code/plugin-text-markers": "^0.41.2" } }, "sha512-aLZiZaqorRtNExtGpUjK9zFH9aTpWeoTXMyLo4b4IcuXfPqtLPPxhRm/QlPb8QqIcMMXnSiGRHSFpQfX0m7HJw=="], - "exsolve": ["exsolve@1.0.7", "", {}, "sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw=="], + "exsolve": ["exsolve@1.0.5", "", {}, "sha512-pz5dvkYYKQ1AHVrgOzBKWeP4u4FRb3a6DNK2ucr0OoNwYIU4QWsJ+NM36LLzORT+z845MzKHHhpXiUF5nvQoJg=="], "extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="], - "fast-content-type-parse": ["fast-content-type-parse@3.0.0", "", {}, "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg=="], - "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], "fast-fifo": ["fast-fifo@1.3.2", "", {}, "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ=="], - "fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="], + "fast-redact": ["fast-redact@3.5.0", "", {}, "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A=="], - "fdir": ["fdir@6.4.6", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w=="], + "fast-safe-stringify": ["fast-safe-stringify@2.1.1", "", {}, "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA=="], + + "fdir": ["fdir@6.4.5", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-4BG7puHpVsIYxZUbiUE3RqGloLaSSwzYie5jvasC4LWuBWzZawynvYouhjbQKw2JuIGYdm0DzIxl8iVidKlUEw=="], + + "fflate": ["fflate@0.8.2", "", {}, "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A=="], + + "file-type": ["file-type@20.5.0", "", { "dependencies": { "@tokenizer/inflate": "^0.2.6", "strtok3": "^10.2.0", "token-types": "^6.0.0", "uint8array-extras": "^1.4.0" } }, "sha512-BfHZtG/l9iMm4Ecianu7P8HRD2tBHLtjXinm4X62XBOYzi7CYA7jyqfJzOvXHqzVrVPYqBo2/GvbARMaaJkKVg=="], "finalhandler": ["finalhandler@2.1.0", "", { "dependencies": { "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "on-finished": "^2.4.1", "parseurl": "^1.3.3", "statuses": "^2.0.1" } }, "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q=="], @@ -875,16 +860,22 @@ "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], + "get-source": ["get-source@2.0.12", "", { "dependencies": { "data-uri-to-buffer": "^2.0.0", "source-map": "^0.6.1" } }, "sha512-X5+4+iD+HoSeEED+uwrQ07BOQr0kEDFMVqqpBuI+RaZBpBpHCuXxo70bjar6f0b0u/DQJsJ7ssurpP0V60Az+w=="], + "github-from-package": ["github-from-package@0.0.0", "", {}, "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="], "github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="], "glob-to-regexp": ["glob-to-regexp@0.4.1", "", {}, "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw=="], + "globals": ["globals@11.12.0", "", {}, "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA=="], + "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], "h3": ["h3@1.15.3", "", { "dependencies": { "cookie-es": "^1.2.2", "crossws": "^0.3.4", "defu": "^6.1.4", "destr": "^2.0.5", "iron-webcrypto": "^1.2.1", "node-mock-http": "^1.0.0", "radix3": "^1.1.2", "ufo": "^1.6.1", "uncrypto": "^0.1.3" } }, "sha512-z6GknHqyX0h9aQaTx22VZDf6QyZn+0Nh+Ym8O/u0SGSkyF5cuTJYKlc8MkzW3Nzf9LE1ivcpmYC3FUGpywhuUQ=="], + "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], + "has-property-descriptors": ["has-property-descriptors@1.0.2", "", { "dependencies": { "es-define-property": "^1.0.0" } }, "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg=="], "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], @@ -1007,13 +998,13 @@ "isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], - "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], - "isomorphic-git": ["isomorphic-git@1.32.1", "", { "dependencies": { "async-lock": "^1.4.1", "clean-git-ref": "^2.0.1", "crc-32": "^1.2.0", "diff3": "0.0.3", "ignore": "^5.1.4", "minimisted": "^2.0.0", "pako": "^1.0.10", "path-browserify": "^1.0.1", "pify": "^4.0.1", "readable-stream": "^3.4.0", "sha.js": "^2.4.9", "simple-get": "^4.0.1" }, "bin": { "isogit": "cli.cjs" } }, "sha512-NZCS7qpLkCZ1M/IrujYBD31sM6pd/fMVArK4fz4I7h6m0rUW2AsYU7S7zXeABuHL6HIfW6l53b4UQ/K441CQjg=="], "jmespath": ["jmespath@0.16.0", "", {}, "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw=="], - "jose": ["jose@6.0.11", "", {}, "sha512-QxG7EaliDARm1O1S8BGakqncGT9s25bKL1WSf6/oa17Tkqwi8D2ZNglqCF+DsYF88/rV66Q/Q2mFAy697E1DUg=="], + "jose": ["jose@5.2.3", "", {}, "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA=="], + + "joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="], "js-base64": ["js-base64@3.7.7", "", {}, "sha512-7rCnleh0z2CkXhH67J8K1Ytz0b2Y+yxTPL+/KOJoa20hfnVQ/3/T6W/KflYI4bRHRagNeXeU2bkNGI3v1oS/lw=="], @@ -1023,14 +1014,16 @@ "jsesc": ["jsesc@3.1.0", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA=="], - "json-schema": ["json-schema@0.4.0", "", {}, "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="], + "json-rpc-2.0": ["json-rpc-2.0@1.7.0", "", {}, "sha512-asnLgC1qD5ytP+fvBP8uL0rvj+l8P6iYICbzZ8dVxCpESffVjzA7KkYkbKCIbavs7cllwH1ZUaNtJwphdeRqpg=="], - "json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], + "json-schema": ["json-schema@0.4.0", "", {}, "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="], "json-schema-walker": ["json-schema-walker@2.0.0", "", { "dependencies": { "@apidevtools/json-schema-ref-parser": "^11.1.0", "clone": "^2.1.2" } }, "sha512-nXN2cMky0Iw7Af28w061hmxaPDaML5/bQD9nwm1lOoIKEGjHcRGxqWe4MfrkYThYAPjSUhmsp4bJNoLAyVn9Xw=="], "json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="], + "jsondiffpatch": ["jsondiffpatch@0.6.0", "", { "dependencies": { "@types/diff-match-patch": "^1.0.36", "chalk": "^5.3.0", "diff-match-patch": "^1.0.5" }, "bin": { "jsondiffpatch": "bin/jsondiffpatch.js" } }, "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ=="], + "kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="], "klona": ["klona@2.0.6", "", {}, "sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA=="], @@ -1039,6 +1032,8 @@ "language-map": ["language-map@1.5.0", "", {}, "sha512-n7gFZpe+DwEAX9cXVTw43i3wiudWDDtSn28RmdnS/HCPr284dQI/SztsamWanRr75oSlKSaGbV2nmWCTzGCoVg=="], + "leven": ["leven@2.1.0", "", {}, "sha512-nvVPLpIHUxCUoRLrFqTgSxXJ614d8AgQoWl7zPe/2VadE8+1dpU3LBhowRuBAcuwruWtOdD8oYC9jDNJjXDPyA=="], + "longest-streak": ["longest-streak@3.1.0", "", {}, "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g=="], "lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="], @@ -1055,8 +1050,6 @@ "marked": ["marked@15.0.12", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA=="], - "marked-shiki": ["marked-shiki@1.2.0", "", { "peerDependencies": { "marked": ">=7.0.0", "shiki": ">=1.0.0" } }, "sha512-N924hp8veE6Mc91g5/kCNVoTU7TkeJfB2G2XEWb+k1fVA0Bck2T0rVt93d39BlOYH6ohP4Q9BFlPk+UkblhXbg=="], - "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], "mdast-util-definitions": ["mdast-util-definitions@6.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "unist-util-visit": "^5.0.0" } }, "sha512-scTllyX6pnYNZH/AIp/0ePz6s4cZtARxImwoPJ7kS42n+MnVsI4XbnG6d4ibehRIldYMWM2LD7ImQblVhUejVQ=="], @@ -1183,7 +1176,9 @@ "mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="], - "miniflare": ["miniflare@4.20250709.0", "", { "dependencies": { "@cspotcode/source-map-support": "0.8.1", "acorn": "8.14.0", "acorn-walk": "8.3.2", "exit-hook": "2.2.1", "glob-to-regexp": "0.4.1", "sharp": "^0.33.5", "stoppable": "1.1.0", "undici": "^5.28.5", "workerd": "1.20250709.0", "ws": "8.18.0", "youch": "4.1.0-beta.10", "zod": "3.22.3" }, "bin": { "miniflare": "bootstrap.js" } }, "sha512-dRGXi6Do9ArQZt7205QGWZ1tD6k6xQNY/mAZBAtiaQYvKxFuNyiHYlFnSN8Co4AFCVOozo/U52sVAaHvlcmnew=="], + "miniflare": ["miniflare@4.20250525.1", "", { "dependencies": { "@cspotcode/source-map-support": "0.8.1", "acorn": "8.14.0", "acorn-walk": "8.3.2", "exit-hook": "2.2.1", "glob-to-regexp": "0.4.1", "sharp": "^0.33.5", "stoppable": "1.1.0", "undici": "^5.28.5", "workerd": "1.20250525.0", "ws": "8.18.0", "youch": "3.3.4", "zod": "3.22.3" }, "bin": { "miniflare": "bootstrap.js" } }, "sha512-4PJlT5WA+hfclFU5Q7xnpG1G1VGYTXaf/3iu6iKQ8IsbSi9QvPTA2bSZ5goCFxmJXDjV4cxttVxB0Wl1CLuQ0w=="], + + "minimatch": ["minimatch@5.1.6", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g=="], "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="], @@ -1191,10 +1186,14 @@ "mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="], + "mri": ["mri@1.1.4", "", {}, "sha512-6y7IjGPm8AzlvoUrwAaw1tLnUBudaS3752vcd8JtrpGGQn+rXIe63LFVHm/YMwtqAuh+LJPCFdlLYPWM1nYn6w=="], + "mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="], "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + "mustache": ["mustache@4.2.0", "", { "bin": { "mustache": "bin/mustache" } }, "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ=="], + "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="], "napi-build-utils": ["napi-build-utils@2.0.0", "", {}, "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA=="], @@ -1213,7 +1212,7 @@ "node-fetch-native": ["node-fetch-native@1.6.6", "", {}, "sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ=="], - "node-mock-http": ["node-mock-http@1.0.1", "", {}, "sha512-0gJJgENizp4ghds/Ywu2FCmcRsgBTmRQzYPZm61wy+Em2sBarSka0OhQS5huLBg6od1zkNpnWMCZloQDFVvOMQ=="], + "node-mock-http": ["node-mock-http@1.0.0", "", {}, "sha512-0uGYQ1WQL1M5kKvGRXWQ3uZCHtLTO8hln3oBjIusM75WoesZ909uQJs/Hb946i2SS+Gsrhkaa6iAO17jRIv6DQ=="], "node-releases": ["node-releases@2.0.19", "", {}, "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw=="], @@ -1233,6 +1232,8 @@ "oidc-token-hash": ["oidc-token-hash@5.1.0", "", {}, "sha512-y0W+X7Ppo7oZX6eovsRkuzcSM40Bicg2JEJkDJ4irIt1wsYAP5MLSNv+QAogO8xivMffw/9OvV3um1pxXgt1uA=="], + "on-exit-leak-free": ["on-exit-leak-free@0.2.0", "", {}, "sha512-dqaz3u44QbRXQooZLTUKU41ZrzYrcvLISVgbrzbyCMxpmSLJvZ3ZamIJIZ29P6OhZIkNIQKosdeM6t1LYbA9hg=="], + "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], @@ -1273,23 +1274,31 @@ "path-browserify": ["path-browserify@1.0.1", "", {}, "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g=="], - "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], - "path-to-regexp": ["path-to-regexp@6.3.0", "", {}, "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ=="], "pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], + "peek-readable": ["peek-readable@7.0.0", "", {}, "sha512-nri2TO5JE3/mRryik9LlHFT53cgHfRK0Lt0BAZQXku/AW3E6XLt2GaY8siWi7dvW/m1z0ecn+J+bpDa9ZN3IsQ=="], + "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], "pify": ["pify@4.0.1", "", {}, "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g=="], - "pkce-challenge": ["pkce-challenge@5.0.0", "", {}, "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ=="], + "pino": ["pino@7.11.0", "", { "dependencies": { "atomic-sleep": "^1.0.0", "fast-redact": "^3.0.0", "on-exit-leak-free": "^0.2.0", "pino-abstract-transport": "v0.5.0", "pino-std-serializers": "^4.0.0", "process-warning": "^1.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.1.0", "safe-stable-stringify": "^2.1.0", "sonic-boom": "^2.2.1", "thread-stream": "^0.15.1" }, "bin": { "pino": "bin.js" } }, "sha512-dMACeu63HtRLmCG8VKdy4cShCPKaYDR4youZqoSWLxl5Gu99HUw8bw75thbPv9Nip+H+QYX8o3ZJbTdVZZ2TVg=="], + + "pino-abstract-transport": ["pino-abstract-transport@0.5.0", "", { "dependencies": { "duplexify": "^4.1.2", "split2": "^4.0.0" } }, "sha512-+KAgmVeqXYbTtU2FScx1XS3kNyfZ5TrXY07V96QnUSFqo2gAqlvmaxH67Lj7SWazqsMabf+58ctdTcBgnOLUOQ=="], + + "pino-pretty": ["pino-pretty@5.1.3", "", { "dependencies": { "@hapi/bourne": "^2.0.0", "args": "^5.0.1", "chalk": "^4.0.0", "dateformat": "^4.5.1", "fast-safe-stringify": "^2.0.7", "jmespath": "^0.15.0", "joycon": "^3.0.0", "pump": "^3.0.0", "readable-stream": "^3.6.0", "rfdc": "^1.3.0", "split2": "^3.1.1", "strip-json-comments": "^3.1.1" }, "bin": { "pino-pretty": "bin.js" } }, "sha512-Zj+0TVdYKkAAIx9EUCL5e4TttwgsaFvJh2ceIMQeFCY8ak9tseEZQGSgpvyjEj1/iIVGIh5tdhkGEQWSMILKHA=="], + + "pino-std-serializers": ["pino-std-serializers@4.0.0", "", {}, "sha512-cK0pekc1Kjy5w9V2/n+8MkZwusa6EyyxfeQCB799CQRhRt/CqYKiWs5adeu8Shve2ZNffvfC/7J64A2PJo1W/Q=="], + + "pkce-challenge": ["pkce-challenge@4.1.0", "", {}, "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ=="], "possible-typed-array-names": ["possible-typed-array-names@1.1.0", "", {}, "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg=="], - "postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="], + "postcss": ["postcss@8.5.4", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-QSa9EBe+uwlGTFmHsPKokv3B/oEMQZxfqW0QqNCyhpa6mB1afzulwn8hihglqAb2pOw+BJgNlmXQ8la2VeHB7w=="], "postcss-nested": ["postcss-nested@6.2.0", "", { "dependencies": { "postcss-selector-parser": "^6.1.1" }, "peerDependencies": { "postcss": "^8.2.14" } }, "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ=="], @@ -1299,15 +1308,19 @@ "prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="], + "printable-characters": ["printable-characters@1.0.42", "", {}, "sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ=="], + "prismjs": ["prismjs@1.30.0", "", {}, "sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw=="], + "process-warning": ["process-warning@1.0.0", "", {}, "sha512-du4wfLyj4yCZq1VupnVSZmRsPJsNuxoDQFdCFHLaYiEbFBD7QE0a+I4D7hOxrVnh78QE/YipFAj9lXHiXocV+Q=="], + "prompts": ["prompts@2.4.2", "", { "dependencies": { "kleur": "^3.0.3", "sisteransi": "^1.0.5" } }, "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q=="], "property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="], "proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="], - "pump": ["pump@3.0.3", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA=="], + "pump": ["pump@3.0.2", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw=="], "punycode": ["punycode@1.3.2", "", {}, "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw=="], @@ -1315,6 +1328,8 @@ "querystring": ["querystring@0.2.0", "", {}, "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g=="], + "quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="], + "radix3": ["radix3@1.1.2", "", {}, "sha512-b484I/7b8rDEdSDKckSSBA8knMpcdsXudlE/LNL639wFoHKwLbEkQFZHWEYwDC0wa0FKUcCY+GAF73Z7wxNVFA=="], "range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="], @@ -1323,10 +1338,14 @@ "rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="], + "react": ["react@19.1.0", "", {}, "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg=="], + "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], "readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="], + "real-require": ["real-require@0.1.0", "", {}, "sha512-r/H9MzAWtrv8aSVjPCMFpDMl5q66GqtmmRkRjpHTsp4zBAa+snZyiQNlMONiUmEJcsnaw0wCauJ2GWODr/aFkg=="], + "recma-build-jsx": ["recma-build-jsx@1.0.0", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-util-build-jsx": "^3.0.0", "vfile": "^6.0.0" } }, "sha512-8GtdyqaBcDfva+GUKDr3nev3VpKAhup1+RvkMvUxURHpW7QyIvk9F5wz7Vzo06CEMSilw6uArgRqhpiUcWp8ew=="], "recma-jsx": ["recma-jsx@1.0.0", "", { "dependencies": { "acorn-jsx": "^5.0.0", "estree-util-to-js": "^2.0.0", "recma-parse": "^1.0.0", "recma-stringify": "^1.0.0", "unified": "^11.0.0" } }, "sha512-5vwkv65qWwYxg+Atz95acp8DMu1JDSqdGkA2Of1j6rCreyFUE/gp15fC8MnGEuG1W68UKjM6x6+YTWIh7hZM/Q=="], @@ -1345,7 +1364,7 @@ "rehype-autolink-headings": ["rehype-autolink-headings@7.1.0", "", { "dependencies": { "@types/hast": "^3.0.0", "@ungap/structured-clone": "^1.0.0", "hast-util-heading-rank": "^3.0.0", "hast-util-is-element": "^3.0.0", "unified": "^11.0.0", "unist-util-visit": "^5.0.0" } }, "sha512-rItO/pSdvnvsP4QRB1pmPiNHUskikqtPojZKJPPPAVx9Hj8i8TwMBhofrrAYRhYOOBZH9tgmG5lPqDLuIWPWmw=="], - "rehype-expressive-code": ["rehype-expressive-code@0.41.3", "", { "dependencies": { "expressive-code": "^0.41.3" } }, "sha512-8d9Py4c/V6I/Od2VIXFAdpiO2kc0SV2qTJsRAaqSIcM9aruW4ASLNe2kOEo1inXAAkIhpFzAHTc358HKbvpNUg=="], + "rehype-expressive-code": ["rehype-expressive-code@0.41.2", "", { "dependencies": { "expressive-code": "^0.41.2" } }, "sha512-vHYfWO9WxAw6kHHctddOt+P4266BtyT1mrOIuxJD+1ELuvuJAa5uBIhYt0OVMyOhlvf57hzWOXJkHnMhpaHyxw=="], "rehype-format": ["rehype-format@5.0.1", "", { "dependencies": { "@types/hast": "^3.0.0", "hast-util-format": "^1.0.0" } }, "sha512-zvmVru9uB0josBVpr946OR8ui7nJEdzZobwLOOqHb/OOD88W0Vk2SqLwoVOj0fM6IPCCO6TaV9CvQvJMWwukFQ=="], @@ -1371,7 +1390,7 @@ "remark-stringify": ["remark-stringify@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-to-markdown": "^2.0.0", "unified": "^11.0.0" } }, "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw=="], - "remeda": ["remeda@2.26.0", "", { "dependencies": { "type-fest": "^4.41.0" } }, "sha512-lmNNwtaC6Co4m0WTTNoZ/JlpjEqAjPZO0+czC9YVRQUpkbS4x8Hmh+Mn9HPfJfiXqUQ5IXXgSXSOB2pBKAytdA=="], + "remeda": ["remeda@2.22.3", "", { "dependencies": { "type-fest": "^4.40.1" } }, "sha512-Ka6965m9Zu9OLsysWxVf3jdJKmp6+PKzDv7HWHinEevf0JOJ9y02YpjiC/sKxRpCqGhVyvm1U+0YIj+E6DMgKw=="], "restructure": ["restructure@3.0.2", "", {}, "sha512-gSfoiOEA0VPE6Tukkrr7I0RBdE0s7H1eFCDBk05l1KIQT1UIKNc5JZy6jdyW6eYH3aR3g5b3PuL77rq0hvwtAw=="], @@ -1383,7 +1402,9 @@ "retext-stringify": ["retext-stringify@4.0.0", "", { "dependencies": { "@types/nlcst": "^2.0.0", "nlcst-to-string": "^4.0.0", "unified": "^11.0.0" } }, "sha512-rtfN/0o8kL1e+78+uxPTqu1Klt0yPzKuQ2BfWwwfgIUSayyzxpM1PJzkKt4V8803uB9qSy32MvI7Xep9khTpiA=="], - "rollup": ["rollup@4.45.0", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.45.0", "@rollup/rollup-android-arm64": "4.45.0", "@rollup/rollup-darwin-arm64": "4.45.0", "@rollup/rollup-darwin-x64": "4.45.0", "@rollup/rollup-freebsd-arm64": "4.45.0", "@rollup/rollup-freebsd-x64": "4.45.0", "@rollup/rollup-linux-arm-gnueabihf": "4.45.0", "@rollup/rollup-linux-arm-musleabihf": "4.45.0", "@rollup/rollup-linux-arm64-gnu": "4.45.0", "@rollup/rollup-linux-arm64-musl": "4.45.0", "@rollup/rollup-linux-loongarch64-gnu": "4.45.0", "@rollup/rollup-linux-powerpc64le-gnu": "4.45.0", "@rollup/rollup-linux-riscv64-gnu": "4.45.0", "@rollup/rollup-linux-riscv64-musl": "4.45.0", "@rollup/rollup-linux-s390x-gnu": "4.45.0", "@rollup/rollup-linux-x64-gnu": "4.45.0", "@rollup/rollup-linux-x64-musl": "4.45.0", "@rollup/rollup-win32-arm64-msvc": "4.45.0", "@rollup/rollup-win32-ia32-msvc": "4.45.0", "@rollup/rollup-win32-x64-msvc": "4.45.0", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-WLjEcJRIo7i3WDDgOIJqVI2d+lAC3EwvOGy+Xfq6hs+GQuAA4Di/H72xmXkOhrIWFg2PFYSKZYfH0f4vfKXN4A=="], + "rfdc": ["rfdc@1.4.1", "", {}, "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA=="], + + "rollup": ["rollup@4.41.1", "", { "dependencies": { "@types/estree": "1.0.7" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.41.1", "@rollup/rollup-android-arm64": "4.41.1", "@rollup/rollup-darwin-arm64": "4.41.1", "@rollup/rollup-darwin-x64": "4.41.1", "@rollup/rollup-freebsd-arm64": "4.41.1", "@rollup/rollup-freebsd-x64": "4.41.1", "@rollup/rollup-linux-arm-gnueabihf": "4.41.1", "@rollup/rollup-linux-arm-musleabihf": "4.41.1", "@rollup/rollup-linux-arm64-gnu": "4.41.1", "@rollup/rollup-linux-arm64-musl": "4.41.1", "@rollup/rollup-linux-loongarch64-gnu": "4.41.1", "@rollup/rollup-linux-powerpc64le-gnu": "4.41.1", "@rollup/rollup-linux-riscv64-gnu": "4.41.1", "@rollup/rollup-linux-riscv64-musl": "4.41.1", "@rollup/rollup-linux-s390x-gnu": "4.41.1", "@rollup/rollup-linux-x64-gnu": "4.41.1", "@rollup/rollup-linux-x64-musl": "4.41.1", "@rollup/rollup-win32-arm64-msvc": "4.41.1", "@rollup/rollup-win32-ia32-msvc": "4.41.1", "@rollup/rollup-win32-x64-msvc": "4.41.1", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-cPmwD3FnFv8rKMBc1MxWCwVQFxwf1JEmSX3iQXrRVVG15zerAIXRjMFVWnd5Q5QvgKF7Aj+5ykXFhUl+QGnyOw=="], "router": ["router@2.2.0", "", { "dependencies": { "debug": "^4.4.0", "depd": "^2.0.0", "is-promise": "^4.0.0", "parseurl": "^1.3.3", "path-to-regexp": "^8.0.0" } }, "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ=="], @@ -1393,6 +1414,8 @@ "safe-regex-test": ["safe-regex-test@1.1.0", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-regex": "^1.2.1" } }, "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw=="], + "safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="], + "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="], "sax": ["sax@1.2.1", "", {}, "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA=="], @@ -1413,14 +1436,10 @@ "setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="], - "sha.js": ["sha.js@2.4.12", "", { "dependencies": { "inherits": "^2.0.4", "safe-buffer": "^5.2.1", "to-buffer": "^1.2.0" }, "bin": { "sha.js": "bin.js" } }, "sha512-8LzC5+bvI45BjpfXU8V5fdU2mfeKiQe1D1gIMn7XUlF3OTUrpdJpPPH4EMAnF0DsHHdSZqCdSss5qCmJKuiO3w=="], + "sha.js": ["sha.js@2.4.11", "", { "dependencies": { "inherits": "^2.0.1", "safe-buffer": "^5.0.1" }, "bin": { "sha.js": "./bin.js" } }, "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ=="], "sharp": ["sharp@0.32.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.2", "node-addon-api": "^6.1.0", "prebuild-install": "^7.1.1", "semver": "^7.5.4", "simple-get": "^4.0.1", "tar-fs": "^3.0.4", "tunnel-agent": "^0.6.0" } }, "sha512-0dap3iysgDkNaPOaOL4X/0akdu0ma62GcdC2NBQ+93eqpePdDdr2/LM0sFdDSMmN7yS+odyZtPsb7tx/cYBKnQ=="], - "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="], - - "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], - "shiki": ["shiki@3.4.2", "", { "dependencies": { "@shikijs/core": "3.4.2", "@shikijs/engine-javascript": "3.4.2", "@shikijs/engine-oniguruma": "3.4.2", "@shikijs/langs": "3.4.2", "@shikijs/themes": "3.4.2", "@shikijs/types": "3.4.2", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-wuxzZzQG8kvZndD7nustrNFIKYJ1jJoWIPaBpVe2+KHSvtzMi4SBjOxrigs8qeqce/l3U0cwiC+VAkLKSunHQQ=="], "side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], @@ -1441,43 +1460,51 @@ "sitemap": ["sitemap@8.0.0", "", { "dependencies": { "@types/node": "^17.0.5", "@types/sax": "^1.2.1", "arg": "^5.0.0", "sax": "^1.2.4" }, "bin": { "sitemap": "dist/cli.js" } }, "sha512-+AbdxhM9kJsHtruUF39bwS/B0Fytw6Fr1o4ZAIAEqA6cke2xcoO2GleBw9Zw7nRzILVEgz7zBM5GiTJjie1G9A=="], - "smol-toml": ["smol-toml@1.4.1", "", {}, "sha512-CxdwHXyYTONGHThDbq5XdwbFsuY4wlClRGejfE2NtwUtiHYsP1QtNsHb/hnj31jKYSchztJsaA8pSQoVzkfCFg=="], + "smol-toml": ["smol-toml@1.3.4", "", {}, "sha512-UOPtVuYkzYGee0Bd2Szz8d2G3RfMfJ2t3qVdZUAozZyAk+a0Sxa+QKix0YCwjL/A1RR0ar44nCxaoN9FxdJGwA=="], "solid-js": ["solid-js@1.9.7", "", { "dependencies": { "csstype": "^3.1.0", "seroval": "~1.3.0", "seroval-plugins": "~1.3.0" } }, "sha512-/saTKi8iWEM233n5OSi1YHCCuh66ZIQ7aK2hsToPe4tqGm7qAejU1SwNuTPivbWAYq7SjuHVVYxxuZQNRbICiw=="], "solid-refresh": ["solid-refresh@0.6.3", "", { "dependencies": { "@babel/generator": "^7.23.6", "@babel/helper-module-imports": "^7.22.15", "@babel/types": "^7.23.6" }, "peerDependencies": { "solid-js": "^1.3" } }, "sha512-F3aPsX6hVw9ttm5LYlth8Q15x6MlI/J3Dn+o3EQyRTtTxidepSTwAYdozt01/YA+7ObcciagGEyXIopGZzQtbA=="], + "sonic-boom": ["sonic-boom@2.8.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-kuonw1YOYYNOve5iHdSahXPOK49GqwA+LZhI6Wz/l0rP57iKyXXIHaRagOBHAPmGwJC6od2Z9zgvZ5loSgMlVg=="], + "source-map": ["source-map@0.7.4", "", {}, "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA=="], "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], "space-separated-tokens": ["space-separated-tokens@2.0.2", "", {}, "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q=="], - "sst": ["sst@3.17.8", "", { "dependencies": { "aws-sdk": "2.1692.0", "aws4fetch": "1.0.18", "jose": "5.2.3", "opencontrol": "0.0.6", "openid-client": "5.6.4" }, "optionalDependencies": { "sst-darwin-arm64": "3.17.8", "sst-darwin-x64": "3.17.8", "sst-linux-arm64": "3.17.8", "sst-linux-x64": "3.17.8", "sst-linux-x86": "3.17.8", "sst-win32-arm64": "3.17.8", "sst-win32-x64": "3.17.8", "sst-win32-x86": "3.17.8" }, "bin": { "sst": "bin/sst.mjs" } }, "sha512-P/a9/ZsjtQRrTBerBMO1ODaVa5HVTmNLrQNJiYvu2Bgd0ov+vefQeHv6oima8HLlPwpDIPS2gxJk8BZrTZMfCA=="], + "split2": ["split2@3.2.2", "", { "dependencies": { "readable-stream": "^3.0.0" } }, "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg=="], - "sst-darwin-arm64": ["sst-darwin-arm64@3.17.8", "", { "os": "darwin", "cpu": "arm64" }, "sha512-50P6YRMnZVItZUfB0+NzqMww2mmm4vB3zhTVtWUtGoXeiw78g1AEnVlmS28gYXPHM1P987jTvR7EON9u9ig/Dg=="], + "sst": ["sst@3.17.6", "", { "dependencies": { "aws-sdk": "2.1692.0", "aws4fetch": "1.0.18", "jose": "5.2.3", "opencontrol": "0.0.6", "openid-client": "5.6.4" }, "optionalDependencies": { "sst-darwin-arm64": "3.17.6", "sst-darwin-x64": "3.17.6", "sst-linux-arm64": "3.17.6", "sst-linux-x64": "3.17.6", "sst-linux-x86": "3.17.6", "sst-win32-arm64": "3.17.6", "sst-win32-x64": "3.17.6", "sst-win32-x86": "3.17.6" }, "bin": { "sst": "bin/sst.mjs" } }, "sha512-p+AcqwfYQUdkxeRjCikQoTMviPCBiGoU7M0vcV6GDVmVis8hzhVw4EFfHTafZC+aWfy1Ke2UQi66vZlEVWuEqA=="], - "sst-darwin-x64": ["sst-darwin-x64@3.17.8", "", { "os": "darwin", "cpu": "x64" }, "sha512-P0pnMHCmpkpcsxkWpilmeoD79LkbkoIcv6H0aeM9ArT/71/JBhvqH+HjMHSJCzni/9uR6er+nH5F+qol0UO6Bw=="], + "sst-darwin-arm64": ["sst-darwin-arm64@3.17.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-6tb7KlcPR7PTi3ofQv8dX/n6Jf7pNP9VfrnYL4HBWnWrcYaZeJ5MWobILfIJ/y2jHgoqmg9e5C3266Eds0JQyw=="], - "sst-linux-arm64": ["sst-linux-arm64@3.17.8", "", { "os": "linux", "cpu": "arm64" }, "sha512-vun54YA/UzprCu9p8BC4rMwFU5Cj9xrHAHYLYUp/yq4H0pfmBIiQM62nsfIKizRThe/TkBFy60EEi9myf6raYA=="], + "sst-darwin-x64": ["sst-darwin-x64@3.17.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-lFakq6/EgTuBSjbl8Kry4pfgAPEIyn6o7ZkyRz3hz5331wUaX88yfjs3tL9JQ8Ey6jBUYxwhP/Q1n7fzIG046g=="], - "sst-linux-x64": ["sst-linux-x64@3.17.8", "", { "os": "linux", "cpu": "x64" }, "sha512-HqByCaLE2gEJbM20P1QRd+GqDMAiieuU53FaZA1F+AGxQi+kR82NWjrPqFcMj4dMYg8w/TWXuV+G5+PwoUmpDw=="], + "sst-linux-arm64": ["sst-linux-arm64@3.17.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-SdTxXMbTEdiwOqp37w31kXv97vHqSx3oK9h/76lKg7V9k5JxPJ6JMefPLhoKWwK0Zh6AndY2zo2oRoEv4SIaDw=="], - "sst-linux-x86": ["sst-linux-x86@3.17.8", "", { "os": "linux", "cpu": "none" }, "sha512-bCd6QM3MejfSmdvg8I/k+aUJQIZEQJg023qmN78fv00vwlAtfECvY7tjT9E2m3LDp33pXrcRYbFOQzPu+tWFfA=="], + "sst-linux-x64": ["sst-linux-x64@3.17.6", "", { "os": "linux", "cpu": "x64" }, "sha512-qneh7uWDiTUYx8X1Y3h2YVw3SJ0ybBBlRrVybIvCM09JqQ8+qq/XjKXGzA/3/EF0Jr7Ug8cARSn9CwxhdQGN7Q=="], - "sst-win32-arm64": ["sst-win32-arm64@3.17.8", "", { "os": "win32", "cpu": "arm64" }, "sha512-pilx0n8gm4aHJae/vNiqIwZkWF3tdwWzD/ON7hkytw+CVSZ0FXtyFW/yO/+2u3Yw0Kj0lSWPnUqYgm/eHPLwQA=="], + "sst-linux-x86": ["sst-linux-x86@3.17.6", "", { "os": "linux", "cpu": "none" }, "sha512-pU3D5OeqnmfxGqN31DxuwWnc1OayxhkErnITHhZ39D0MTiwbIgCapH26FuLW8B08/uxJWG8djUlOboCRhSBvWA=="], - "sst-win32-x64": ["sst-win32-x64@3.17.8", "", { "os": "win32", "cpu": "x64" }, "sha512-Jb0FVRyiOtESudF1V8ucW65PuHrx/iOHUamIO0JnbujWNHZBTRPB2QHN1dbewgkueYDaCmyS8lvuIImLwYJnzQ=="], + "sst-win32-arm64": ["sst-win32-arm64@3.17.6", "", { "os": "win32", "cpu": "arm64" }, "sha512-Rr3RTYWAsH9sM9CbM/sAZCk7dB1OsSAljjJuuHMvdSAYW3RDpXEza0PBJGxnBID2eOrpswEchzMPL2d8LtL7oA=="], - "sst-win32-x86": ["sst-win32-x86@3.17.8", "", { "os": "win32", "cpu": "none" }, "sha512-oVmFa/PoElQmfnGJlB0w6rPXiYuldiagO6AbrLMT/6oAnWerLQ8Uhv9tJWfMh3xtPLImQLTjxDo1v0AIzEv9QA=="], + "sst-win32-x64": ["sst-win32-x64@3.17.6", "", { "os": "win32", "cpu": "x64" }, "sha512-yZ3roxwI0Wve9PFzdrrF1kfzCmIMFCCoa8qKeXY7LxCJ4QQIqHbCOccLK1Wv/MIU/mcZHWXTQVCLHw77uaa0GQ=="], - "statuses": ["statuses@2.0.2", "", {}, "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="], + "sst-win32-x86": ["sst-win32-x86@3.17.6", "", { "os": "win32", "cpu": "none" }, "sha512-zV7TJWPJN9PmIXr15iXFSs0tbGsa52oBR3+xiKrUj2qj9XsZe7HBFwskRnHyiFq0durZY9kk9ZtoVlpuUuzr1g=="], + + "stacktracey": ["stacktracey@2.1.8", "", { "dependencies": { "as-table": "^1.0.36", "get-source": "^2.0.12" } }, "sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw=="], + + "statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], "stoppable": ["stoppable@1.1.0", "", {}, "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw=="], "stream-replace-string": ["stream-replace-string@2.0.0", "", {}, "sha512-TlnjJ1C0QrmxRNrON00JvaFFlNh5TTG00APw23j74ET7gkQpTASi6/L2fuiav8pzK715HXtUeClpBTw2NPSn6w=="], - "streamx": ["streamx@2.22.1", "", { "dependencies": { "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" }, "optionalDependencies": { "bare-events": "^2.2.0" } }, "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA=="], + "stream-shift": ["stream-shift@1.0.3", "", {}, "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ=="], + + "streamx": ["streamx@2.22.0", "", { "dependencies": { "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" }, "optionalDependencies": { "bare-events": "^2.2.0" } }, "sha512-sLh1evHOzBy/iWRiR6d1zRcLao4gGZr3C1kzNz4fopCOKJb6xD9ub8Mpi9Mr1R6id5o43S+d93fI48UC5uM9aw=="], "string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], @@ -1487,33 +1514,39 @@ "strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - "strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="], + "strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="], - "style-to-js": ["style-to-js@1.1.17", "", { "dependencies": { "style-to-object": "1.0.9" } }, "sha512-xQcBGDxJb6jjFCTzvQtfiPn6YvvP2O8U1MDIPNfJQlWMYfktPy+iGsHE7cssjs7y84d9fQaK4UF3RIJaAHSoYA=="], + "strtok3": ["strtok3@10.2.2", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^7.0.0" } }, "sha512-Xt18+h4s7Z8xyZ0tmBoRmzxcop97R4BAh+dXouUDCYn+Em+1P3qpkUfI5ueWLT8ynC5hZ+q4iPEmGG1urvQGBg=="], - "style-to-object": ["style-to-object@1.0.9", "", { "dependencies": { "inline-style-parser": "0.2.4" } }, "sha512-G4qppLgKu/k6FwRpHiGiKPaPTFcG3g4wNVX/Qsfu+RqQM30E7Tyu/TEgxcL9PNLF5pdRLwQdE3YKKf+KF2Dzlw=="], + "style-to-js": ["style-to-js@1.1.16", "", { "dependencies": { "style-to-object": "1.0.8" } }, "sha512-/Q6ld50hKYPH3d/r6nr117TZkHR0w0kGGIVfpG9N6D8NymRPM9RqCUv4pRpJ62E5DqOYx2AFpbZMyCPnjQCnOw=="], - "supports-color": ["supports-color@10.0.0", "", {}, "sha512-HRVVSbCCMbj7/kdWF9Q+bbckjBHLtHMEoJWlkmYzzdwhYMkjkOwubLM6t7NbWKjgKamGDrWL1++KrjUO1t9oAQ=="], + "style-to-object": ["style-to-object@1.0.8", "", { "dependencies": { "inline-style-parser": "0.2.4" } }, "sha512-xT47I/Eo0rwJmaXC4oilDGDWLohVhR6o/xAQcPQN8q6QBuZVL8qMYL85kLmST5cPjAorwvqIA4qXTRQoYHaL6g=="], - "tar-fs": ["tar-fs@3.1.0", "", { "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { "bare-fs": "^4.0.1", "bare-path": "^3.0.0" } }, "sha512-5Mty5y/sOF1YWj1J6GiBodjlDc05CUR8PKXrsnFAiSG0xA+GHeWLovaZPYUDXkH/1iKRf2+M5+OrRgzC7O9b7w=="], + "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], + + "swr": ["swr@2.3.3", "", { "dependencies": { "dequal": "^2.0.3", "use-sync-external-store": "^1.4.0" }, "peerDependencies": { "react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-dshNvs3ExOqtZ6kJBaAsabhPdHyeY4P2cKwRCniDVifBMoG/SVI7tfLWqPXriVspf2Rg4tPzXJTnwaihIeFw2A=="], + + "tar-fs": ["tar-fs@3.0.9", "", { "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { "bare-fs": "^4.0.1", "bare-path": "^3.0.0" } }, "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA=="], "tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="], "text-decoder": ["text-decoder@1.2.3", "", { "dependencies": { "b4a": "^1.6.4" } }, "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA=="], + "thread-stream": ["thread-stream@0.15.2", "", { "dependencies": { "real-require": "^0.1.0" } }, "sha512-UkEhKIg2pD+fjkHQKyJO3yoIvAP3N6RlNFt2dUhcS1FGvCD1cQa1M/PGknCLFIyZdtJOWQjejp7bdNqmN7zwdA=="], + + "throttleit": ["throttleit@2.1.0", "", {}, "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw=="], + "tiny-inflate": ["tiny-inflate@1.0.3", "", {}, "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="], "tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="], "tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="], - "to-buffer": ["to-buffer@1.2.1", "", { "dependencies": { "isarray": "^2.0.5", "safe-buffer": "^5.2.1", "typed-array-buffer": "^1.0.3" } }, "sha512-tB82LpAIWjhLYbqjx3X4zEeHN6M8CiuOEy2JY8SEQVdYRe3CCHOFaqrBW1doLDrfpWhplcW7BL+bO3/6S3pcDQ=="], - - "toad-cache": ["toad-cache@3.7.0", "", {}, "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw=="], - "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], - "toolbeam-docs-theme": ["toolbeam-docs-theme@0.4.3", "", { "peerDependencies": { "@astrojs/starlight": "^0.34.3", "astro": "^5.7.13" } }, "sha512-3um/NsSq4xFeKbKrNGPHIzfTixwnEVvroqA8Q+lecnYHHJ5TtiYTggHDqewOW+I67t0J1IVBwVKUPjxiQfIcog=="], + "token-types": ["token-types@6.0.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA=="], + + "toolbeam-docs-theme": ["toolbeam-docs-theme@0.4.1", "", { "peerDependencies": { "@astrojs/starlight": "^0.34.3", "astro": "^5.7.13" } }, "sha512-lTI4dHZaVNQky29m7sb36Oy4tWPwxsCuFxFjF8hgGW0vpV+S6qPvI9SwsJFvdE/OHO5DoI7VMbryV1pxZHkkHQ=="], "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], @@ -1521,9 +1554,11 @@ "trough": ["trough@2.2.0", "", {}, "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw=="], + "ts-lsp-client": ["ts-lsp-client@1.0.3", "", { "dependencies": { "json-rpc-2.0": "^1.7.0", "pino": "^7.0.5", "pino-pretty": "^5.1.3", "tslib": "~2.6.2" } }, "sha512-0ItrsqvNUM9KNFGbeT1N8jSi9gvasGOvxJUXjGf4P2TX0w250AUWLeRStaSrQbYcFDshDtE5d4BshUmYwodDgw=="], + "tsconfck": ["tsconfck@3.1.6", "", { "peerDependencies": { "typescript": "^5.0.0" }, "optionalPeers": ["typescript"], "bin": { "tsconfck": "bin/tsconfck.js" } }, "sha512-ks6Vjr/jEw0P1gmOVwutM3B7fWxoWBL2KRDb1JfqGVawBmO5UsvmWOQFGHBPl5yxYz4eERr19E6L7NMv+Fej4w=="], - "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "tslib": ["tslib@2.6.3", "", {}, "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ=="], "tunnel-agent": ["tunnel-agent@0.6.0", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w=="], @@ -1533,12 +1568,12 @@ "type-is": ["type-is@2.0.1", "", { "dependencies": { "content-type": "^1.0.5", "media-typer": "^1.1.0", "mime-types": "^3.0.0" } }, "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw=="], - "typed-array-buffer": ["typed-array-buffer@1.0.3", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "is-typed-array": "^1.1.14" } }, "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw=="], - "typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="], "ufo": ["ufo@1.6.1", "", {}, "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA=="], + "uint8array-extras": ["uint8array-extras@1.4.0", "", {}, "sha512-ZPtzy0hu4cZjv3z5NW9gfKnNLjoz4y6uv4HlelAjDK7sY/xOkKZv9xK/WQpcsBB3jEybChz9DPC2U/+cusjJVQ=="], + "ultrahtml": ["ultrahtml@1.6.0", "", {}, "sha512-R9fBn90VTJrqqLDwyMph+HGne8eqY1iPfYhPzZrvKpIfwkWZbcYlfpsb8B9dTvBfpy1/hqAD7Wi8EKfP9e8zdw=="], "uncrypto": ["uncrypto@0.1.3", "", {}, "sha512-Ql87qFHB3s/De2ClA9e0gsnS6zXG27SkTiSJwjCc9MebbfapQfuPzumMIUMi38ezPZVNFcHI9sUIepeQfw8J8Q=="], @@ -1555,7 +1590,7 @@ "unified": ["unified@11.0.5", "", { "dependencies": { "@types/unist": "^3.0.0", "bail": "^2.0.0", "devlop": "^1.0.0", "extend": "^3.0.0", "is-plain-obj": "^4.0.0", "trough": "^2.0.0", "vfile": "^6.0.0" } }, "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA=="], - "unifont": ["unifont@0.5.2", "", { "dependencies": { "css-tree": "^3.0.0", "ofetch": "^1.4.1", "ohash": "^2.0.0" } }, "sha512-LzR4WUqzH9ILFvjLAUU7dK3Lnou/qd5kD+IakBtBK4S15/+x2y9VX+DcWQv6s551R6W+vzwgVS6tFg3XggGBgg=="], + "unifont": ["unifont@0.5.0", "", { "dependencies": { "css-tree": "^3.0.0", "ohash": "^2.0.0" } }, "sha512-4DueXMP5Hy4n607sh+vJ+rajoLu778aU3GzqeTCqsD/EaUcvqZT9wPC8kgK6Vjh22ZskrxyRCR71FwNOaYn6jA=="], "unist-util-find-after": ["unist-util-find-after@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ=="], @@ -1577,27 +1612,23 @@ "unist-util-visit-parents": ["unist-util-visit-parents@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw=="], - "universal-github-app-jwt": ["universal-github-app-jwt@2.2.2", "", {}, "sha512-dcmbeSrOdTnsjGjUfAlqNDJrhxXizjAz94ija9Qw8YkZ1uu0d+GoZzyH+Jb9tIIqvGsadUfwg+22k5aDqqwzbw=="], - - "universal-user-agent": ["universal-user-agent@7.0.3", "", {}, "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A=="], - "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], - "unstorage": ["unstorage@1.16.1", "", { "dependencies": { "anymatch": "^3.1.3", "chokidar": "^4.0.3", "destr": "^2.0.5", "h3": "^1.15.3", "lru-cache": "^10.4.3", "node-fetch-native": "^1.6.6", "ofetch": "^1.4.1", "ufo": "^1.6.1" }, "peerDependencies": { "@azure/app-configuration": "^1.8.0", "@azure/cosmos": "^4.2.0", "@azure/data-tables": "^13.3.0", "@azure/identity": "^4.6.0", "@azure/keyvault-secrets": "^4.9.0", "@azure/storage-blob": "^12.26.0", "@capacitor/preferences": "^6.0.3 || ^7.0.0", "@deno/kv": ">=0.9.0", "@netlify/blobs": "^6.5.0 || ^7.0.0 || ^8.1.0 || ^9.0.0 || ^10.0.0", "@planetscale/database": "^1.19.0", "@upstash/redis": "^1.34.3", "@vercel/blob": ">=0.27.1", "@vercel/kv": "^1.0.1", "aws4fetch": "^1.0.20", "db0": ">=0.2.1", "idb-keyval": "^6.2.1", "ioredis": "^5.4.2", "uploadthing": "^7.4.4" }, "optionalPeers": ["@azure/app-configuration", "@azure/cosmos", "@azure/data-tables", "@azure/identity", "@azure/keyvault-secrets", "@azure/storage-blob", "@capacitor/preferences", "@deno/kv", "@netlify/blobs", "@planetscale/database", "@upstash/redis", "@vercel/blob", "@vercel/kv", "aws4fetch", "db0", "idb-keyval", "ioredis", "uploadthing"] }, "sha512-gdpZ3guLDhz+zWIlYP1UwQ259tG5T5vYRzDaHMkQ1bBY1SQPutvZnrRjTFaWUUpseErJIgAZS51h6NOcZVZiqQ=="], + "unstorage": ["unstorage@1.16.0", "", { "dependencies": { "anymatch": "^3.1.3", "chokidar": "^4.0.3", "destr": "^2.0.5", "h3": "^1.15.2", "lru-cache": "^10.4.3", "node-fetch-native": "^1.6.6", "ofetch": "^1.4.1", "ufo": "^1.6.1" }, "peerDependencies": { "@azure/app-configuration": "^1.8.0", "@azure/cosmos": "^4.2.0", "@azure/data-tables": "^13.3.0", "@azure/identity": "^4.6.0", "@azure/keyvault-secrets": "^4.9.0", "@azure/storage-blob": "^12.26.0", "@capacitor/preferences": "^6.0.3 || ^7.0.0", "@deno/kv": ">=0.9.0", "@netlify/blobs": "^6.5.0 || ^7.0.0 || ^8.1.0", "@planetscale/database": "^1.19.0", "@upstash/redis": "^1.34.3", "@vercel/blob": ">=0.27.1", "@vercel/kv": "^1.0.1", "aws4fetch": "^1.0.20", "db0": ">=0.2.1", "idb-keyval": "^6.2.1", "ioredis": "^5.4.2", "uploadthing": "^7.4.4" }, "optionalPeers": ["@azure/app-configuration", "@azure/cosmos", "@azure/data-tables", "@azure/identity", "@azure/keyvault-secrets", "@azure/storage-blob", "@capacitor/preferences", "@deno/kv", "@netlify/blobs", "@planetscale/database", "@upstash/redis", "@vercel/blob", "@vercel/kv", "aws4fetch", "db0", "idb-keyval", "ioredis", "uploadthing"] }, "sha512-WQ37/H5A7LcRPWfYOrDa1Ys02xAbpPJq6q5GkO88FBXVSQzHd7+BjEwfRqyaSWCv9MbsJy058GWjjPjcJ16GGA=="], "update-browserslist-db": ["update-browserslist-db@1.1.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw=="], - "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], - "url": ["url@0.10.3", "", { "dependencies": { "punycode": "1.3.2", "querystring": "0.2.0" } }, "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ=="], + "use-sync-external-store": ["use-sync-external-store@1.5.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-Rb46I4cGGVBmjamjphe8L/UnvJD+uPPtTkNvX5mZgqdbavhI4EbgIWJiIHXJ8bc/i9EQGPRh4DwEURJ552Do0A=="], + "util": ["util@0.12.5", "", { "dependencies": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", "is-generator-function": "^1.0.7", "is-typed-array": "^1.1.3", "which-typed-array": "^1.1.2" } }, "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA=="], "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], "uuid": ["uuid@8.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw=="], - "validate-html-nesting": ["validate-html-nesting@1.2.3", "", {}, "sha512-kdkWdCl6eCeLlRShJKbjVOU2kFKxMF8Ghu50n+crEoyx+VKm3FxAxF9z4DCy6+bbTOqNW0+jcIYRnjoIRzigRw=="], + "validate-html-nesting": ["validate-html-nesting@1.2.2", "", {}, "sha512-hGdgQozCsQJMyfK5urgFcWEqsSSrK63Awe0t/IMR0bZ0QMtnuaiHzThW81guu3qx9abLi99NEuiaN6P9gVYsNg=="], "vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], @@ -1609,13 +1640,17 @@ "vite": ["vite@6.3.5", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ=="], - "vite-plugin-solid": ["vite-plugin-solid@2.11.7", "", { "dependencies": { "@babel/core": "^7.23.3", "@types/babel__core": "^7.20.4", "babel-preset-solid": "^1.8.4", "merge-anything": "^5.1.7", "solid-refresh": "^0.6.3", "vitefu": "^1.0.4" }, "peerDependencies": { "@testing-library/jest-dom": "^5.16.6 || ^5.17.0 || ^6.*", "solid-js": "^1.7.2", "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" }, "optionalPeers": ["@testing-library/jest-dom"] }, "sha512-5TgK1RnE449g0Ryxb9BXqem89RSy7fE8XGVCo+Gw84IHgPuPVP7nYNP6WBVAaY/0xw+OqfdQee+kusL0y3XYNg=="], + "vite-plugin-solid": ["vite-plugin-solid@2.11.6", "", { "dependencies": { "@babel/core": "^7.23.3", "@types/babel__core": "^7.20.4", "babel-preset-solid": "^1.8.4", "merge-anything": "^5.1.7", "solid-refresh": "^0.6.3", "vitefu": "^1.0.4" }, "peerDependencies": { "@testing-library/jest-dom": "^5.16.6 || ^5.17.0 || ^6.*", "solid-js": "^1.7.2", "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" }, "optionalPeers": ["@testing-library/jest-dom"] }, "sha512-Sl5CTqJTGyEeOsmdH6BOgalIZlwH3t4/y0RQuFLMGnvWMBvxb4+lq7x3BSiAw6etf0QexfNJW7HSOO/Qf7pigg=="], - "vitefu": ["vitefu@1.1.1", "", { "peerDependencies": { "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["vite"] }, "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ=="], + "vitefu": ["vitefu@1.0.6", "", { "peerDependencies": { "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" }, "optionalPeers": ["vite"] }, "sha512-+Rex1GlappUyNN6UfwbVZne/9cYC4+R2XDk9xkNXBKMw6HQagdX9PgZ8V2v1WUSK1wfBLp7qbI1+XSNIlB1xmA=="], "vscode-jsonrpc": ["vscode-jsonrpc@8.2.1", "", {}, "sha512-kdjOSJ2lLIn7r1rtrMbbNCHjyMPfRnowdKjBQ+mGq6NAW5QY2bEZC/khaC5OR8svbbjvLEaIXkOq45e2X9BIbQ=="], - "vscode-languageserver-types": ["vscode-languageserver-types@3.17.5", "", {}, "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg=="], + "vscode-languageclient": ["vscode-languageclient@8.1.0", "", { "dependencies": { "minimatch": "^5.1.0", "semver": "^7.3.7", "vscode-languageserver-protocol": "3.17.3" } }, "sha512-GL4QdbYUF/XxQlAsvYWZRV3V34kOkpRlvV60/72ghHfsYFnS/v2MANZ9P6sHmxFcZKOse8O+L9G7Czg0NUWing=="], + + "vscode-languageserver-protocol": ["vscode-languageserver-protocol@3.17.3", "", { "dependencies": { "vscode-jsonrpc": "8.1.0", "vscode-languageserver-types": "3.17.3" } }, "sha512-924/h0AqsMtA5yK22GgMtCYiMdCOtWTSGgUOkgEDX+wk2b0x4sAfLiO4NxBxqbiVtz7K7/1/RgVrVI0NClZwqA=="], + + "vscode-languageserver-types": ["vscode-languageserver-types@3.17.3", "", {}, "sha512-SYU4z1dL0PyIMd4Vj8YOqFvHu7Hz/enbWtpfnVbJHU4Nd1YNYx8u0ennumc6h48GQNeOLxmwySmnADouT/AuZA=="], "web-namespaces": ["web-namespaces@2.0.1", "", {}, "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ=="], @@ -1623,17 +1658,15 @@ "whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], - "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], - "which-pm-runs": ["which-pm-runs@1.1.0", "", {}, "sha512-n1brCuqClxfFfq/Rb0ICg9giSZqCS+pLtccdag6C2HyufBrh3fBOiy9nb6ggRMvWOVH5GrdJskj5iGTZNxd7SA=="], "which-typed-array": ["which-typed-array@1.1.19", "", { "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.8", "call-bound": "^1.0.4", "for-each": "^0.3.5", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2" } }, "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw=="], "widest-line": ["widest-line@5.0.0", "", { "dependencies": { "string-width": "^7.0.0" } }, "sha512-c9bZp7b5YtRj2wOe6dlj32MK+Bx/M/d+9VB2SHM1OtsUHR0aV0tdP6DWh/iMt0kWi1t5g1Iudu6hQRNd1A4PVA=="], - "workerd": ["workerd@1.20250709.0", "", { "optionalDependencies": { "@cloudflare/workerd-darwin-64": "1.20250709.0", "@cloudflare/workerd-darwin-arm64": "1.20250709.0", "@cloudflare/workerd-linux-64": "1.20250709.0", "@cloudflare/workerd-linux-arm64": "1.20250709.0", "@cloudflare/workerd-windows-64": "1.20250709.0" }, "bin": { "workerd": "bin/workerd" } }, "sha512-BqLPpmvRN+TYUSG61OkWamsGdEuMwgvabP8m0QOHIfofnrD2YVyWqE1kXJ0GH5EsVEuWamE5sR8XpTfsGBmIpg=="], + "workerd": ["workerd@1.20250525.0", "", { "optionalDependencies": { "@cloudflare/workerd-darwin-64": "1.20250525.0", "@cloudflare/workerd-darwin-arm64": "1.20250525.0", "@cloudflare/workerd-linux-64": "1.20250525.0", "@cloudflare/workerd-linux-arm64": "1.20250525.0", "@cloudflare/workerd-windows-64": "1.20250525.0" }, "bin": { "workerd": "bin/workerd" } }, "sha512-SXJgLREy/Aqw2J71Oah0Pbu+SShbqbTExjVQyRBTM1r7MG7fS5NUlknhnt6sikjA/t4cO09Bi8OJqHdTkrcnYQ=="], - "wrangler": ["wrangler@4.24.3", "", { "dependencies": { "@cloudflare/kv-asset-handler": "0.4.0", "@cloudflare/unenv-preset": "2.3.3", "blake3-wasm": "2.1.5", "esbuild": "0.25.4", "miniflare": "4.20250709.0", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.17", "workerd": "1.20250709.0" }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@cloudflare/workers-types": "^4.20250709.0" }, "optionalPeers": ["@cloudflare/workers-types"], "bin": { "wrangler": "bin/wrangler.js", "wrangler2": "bin/wrangler.js" } }, "sha512-stB1Wfs5NKlspsAzz8SBujBKsDqT5lpCyrL+vSUMy3uueEtI1A5qyORbKoJhIguEbwHfWS39mBsxzm6Vm1J2cg=="], + "wrangler": ["wrangler@4.19.1", "", { "dependencies": { "@cloudflare/kv-asset-handler": "0.4.0", "@cloudflare/unenv-preset": "2.3.2", "blake3-wasm": "2.1.5", "esbuild": "0.25.4", "miniflare": "4.20250525.1", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.17", "workerd": "1.20250525.0" }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@cloudflare/workers-types": "^4.20250525.0" }, "optionalPeers": ["@cloudflare/workers-types"], "bin": { "wrangler": "bin/wrangler.js", "wrangler2": "bin/wrangler.js" } }, "sha512-b+ed2SJKauHgndl4Im1wHE+FeSSlrdlEZNuvpc8q/94k4EmRxRkXnwBAsVWuicBxG3HStFLQPGGlvL8wGKTtHw=="], "wrap-ansi": ["wrap-ansi@9.0.0", "", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q=="], @@ -1663,45 +1696,43 @@ "yoctocolors": ["yoctocolors@2.1.1", "", {}, "sha512-GQHQqAopRhwU8Kt1DDM8NjibDXHC8eoh1erhGAJPEyveY9qqVeXvVikNKrDz69sHowPMorbPUrH/mx8c50eiBQ=="], - "youch": ["youch@4.1.0-beta.10", "", { "dependencies": { "@poppinss/colors": "^4.1.5", "@poppinss/dumper": "^0.6.4", "@speed-highlight/core": "^1.2.7", "cookie": "^1.0.2", "youch-core": "^0.3.3" } }, "sha512-rLfVLB4FgQneDr0dv1oddCVZmKjcJ6yX6mS4pU82Mq/Dt9a3cLZQ62pDBL4AUO+uVrCvtWz3ZFUL2HFAFJ/BXQ=="], + "youch": ["youch@3.3.4", "", { "dependencies": { "cookie": "^0.7.1", "mustache": "^4.2.0", "stacktracey": "^2.1.8" } }, "sha512-UeVBXie8cA35DS6+nBkls68xaBBXCye0CNznrhszZjTbRVnJKQuNsyLKBTTL4ln1o1rh2PKtv35twV7irj5SEg=="], - "youch-core": ["youch-core@0.3.3", "", { "dependencies": { "@poppinss/exception": "^1.2.2", "error-stack-parser-es": "^1.0.5" } }, "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA=="], + "zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="], - "zod": ["zod@3.25.49", "", {}, "sha512-JMMPMy9ZBk3XFEdbM3iL1brx4NUSejd6xr3ELrrGEfGb355gjhiAWtG3K5o+AViV/3ZfkIrCzXsZn6SbLwTR8Q=="], - - "zod-openapi": ["zod-openapi@4.1.0", "", { "peerDependencies": { "zod": "^3.21.4" } }, "sha512-bRCwRYhEO9CmFLyKgJX8h6j1dRtRiwOe+TLzMVPyV0pRW5vRIgb1rLgIGcuRZ5z3MmSVrZqbv3yva4IJrtZK4g=="], + "zod-openapi": ["zod-openapi@4.2.4", "", { "peerDependencies": { "zod": "^3.21.4" } }, "sha512-tsrQpbpqFCXqVXUzi3TPwFhuMtLN3oNZobOtYnK6/5VkXsNdnIgyNr4r8no4wmYluaxzN3F7iS+8xCW8BmMQ8g=="], "zod-to-json-schema": ["zod-to-json-schema@3.24.5", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g=="], "zod-to-ts": ["zod-to-ts@1.2.0", "", { "peerDependencies": { "typescript": "^4.9.4 || ^5.0.2", "zod": "^3" } }, "sha512-x30XE43V+InwGpvTySRNz9kB7qFU8DlyEy7BsSTCHPH1R0QasMmHWZDCzYm6bVXtj/9NNJAZF3jW8rzFvH5OFA=="], + "zod-validation-error": ["zod-validation-error@3.5.2", "", { "peerDependencies": { "zod": "^3.25.0" } }, "sha512-mdi7YOLtram5dzJ5aDtm1AG9+mxRma1iaMrZdYIpFO7epdKBUwLHIxTF8CPDeCQ828zAXYtizrKlEJAtzgfgrw=="], + "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], - "@ai-sdk/amazon-bedrock/@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="], - - "@ai-sdk/amazon-bedrock/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="], - "@ai-sdk/amazon-bedrock/aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="], - "@ai-sdk/anthropic/@ai-sdk/provider": ["@ai-sdk/provider@1.1.3", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg=="], - - "@ai-sdk/anthropic/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@2.2.8", "", { "dependencies": { "@ai-sdk/provider": "1.1.3", "nanoid": "^3.3.8", "secure-json-parse": "^2.7.0" }, "peerDependencies": { "zod": "^3.23.8" } }, "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA=="], - - "@ampproject/remapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.29", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ=="], + "@ampproject/remapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], "@astrojs/mdx/@astrojs/markdown-remark": ["@astrojs/markdown-remark@6.3.2", "", { "dependencies": { "@astrojs/internal-helpers": "0.6.1", "@astrojs/prism": "3.3.0", "github-slugger": "^2.0.0", "hast-util-from-html": "^2.0.3", "hast-util-to-text": "^4.0.2", "import-meta-resolve": "^4.1.0", "js-yaml": "^4.1.0", "mdast-util-definitions": "^6.0.0", "rehype-raw": "^7.0.0", "rehype-stringify": "^10.0.1", "remark-gfm": "^4.0.1", "remark-parse": "^11.0.0", "remark-rehype": "^11.1.2", "remark-smartypants": "^3.0.2", "shiki": "^3.2.1", "smol-toml": "^1.3.1", "unified": "^11.0.5", "unist-util-remove-position": "^5.0.0", "unist-util-visit": "^5.0.0", "unist-util-visit-parents": "^6.0.1", "vfile": "^6.0.3" } }, "sha512-bO35JbWpVvyKRl7cmSJD822e8YA8ThR/YbUsciWNA7yTcqpIAL2hJDToWP5KcZBWxGT6IOdOkHSXARSNZc4l/Q=="], + "@aws-crypto/crc32/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "@aws-crypto/util/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="], + "@aws-crypto/util/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "@aws-sdk/types/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - "@babel/generator/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.29", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ=="], + "@babel/generator/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], "@babel/helper-compilation-targets/lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="], "@babel/helper-compilation-targets/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - "@jridgewell/gen-mapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.29", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ=="], + "@jridgewell/gen-mapping/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], "@openauthjs/openauth/@standard-schema/spec": ["@standard-schema/spec@1.0.0-beta.3", "", {}, "sha512-0ifF3BjA1E8SY9C+nUew8RefNOIq0cDlYALPty4rhUm8Rrl6tCM8hBT4bhGhx7I7iXD0uAgt50lgo8dD73ACMw=="], @@ -1713,10 +1744,28 @@ "@rollup/pluginutils/estree-walker": ["estree-walker@2.0.2", "", {}, "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w=="], + "@smithy/eventstream-codec/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "@smithy/is-array-buffer/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "@smithy/types/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "@smithy/util-buffer-from/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "@smithy/util-hex-encoding/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "@smithy/util-utf8/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "@swc/helpers/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "ansi-align/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], "anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], + "args/camelcase": ["camelcase@5.0.0", "", {}, "sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA=="], + + "args/chalk": ["chalk@2.4.2", "", { "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="], + "astro/diff": ["diff@5.2.0", "", {}, "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A=="], "astro/sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="], @@ -1727,56 +1776,56 @@ "express/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], + "get-source/source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="], + "hast-util-to-parse5/property-information": ["property-information@6.5.0", "", {}, "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig=="], - "http-errors/statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], + "mdast-util-find-and-replace/escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="], "miniflare/acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="], "miniflare/sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="], - "miniflare/zod": ["zod@3.22.3", "", {}, "sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug=="], - - "opencode/remeda": ["remeda@2.22.3", "", { "dependencies": { "type-fest": "^4.40.1" } }, "sha512-Ka6965m9Zu9OLsysWxVf3jdJKmp6+PKzDv7HWHinEevf0JOJ9y02YpjiC/sKxRpCqGhVyvm1U+0YIj+E6DMgKw=="], - - "opencontrol/@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="], - "opencontrol/hono": ["hono@4.7.4", "", {}, "sha512-Pst8FuGqz3L7tFF+u9Pu70eI0xa5S3LPUmrNd5Jm8nTHze9FxLTK9Kaj5g/k4UcwuJSXTP65SyHOPLrffpcAJg=="], - "opencontrol/zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="], - "opencontrol/zod-to-json-schema": ["zod-to-json-schema@3.24.3", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-HIAfWdYIt1sssHfYZFCXp4rU1w2r8hVVXYIlmoa0r0gABLs5di3RCqPU5DDROogVz1pAdYBaz7HK5n9pSUNs3A=="], "openid-client/jose": ["jose@4.15.9", "", {}, "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA=="], "parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="], + "pino-abstract-transport/split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="], + + "pino-pretty/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], + + "pino-pretty/jmespath": ["jmespath@0.15.0", "", {}, "sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w=="], + "prebuild-install/tar-fs": ["tar-fs@2.1.3", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg=="], "prompts/kleur": ["kleur@3.0.3", "", {}, "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w=="], + "rc/strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="], + "router/path-to-regexp": ["path-to-regexp@8.2.0", "", {}, "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ=="], "sitemap/@types/node": ["@types/node@17.0.45", "", {}, "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw=="], "sitemap/sax": ["sax@1.4.1", "", {}, "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg=="], - "sst/jose": ["jose@5.2.3", "", {}, "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA=="], - - "to-buffer/isarray": ["isarray@2.0.5", "", {}, "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw=="], + "token-types/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], "unicode-trie/pako": ["pako@0.2.9", "", {}, "sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA=="], "unstorage/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - "uri-js/punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], + "vscode-languageserver-protocol/vscode-jsonrpc": ["vscode-jsonrpc@8.1.0", "", {}, "sha512-6TDy/abTQk+zDGYazgbIPc+4JoXdwC8NHU9Pbn4UJP1fehUyZmM4RHp5IthX7A6L5KS30PRui+j+tbbMMMafdw=="], "wrangler/esbuild": ["esbuild@0.25.4", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.4", "@esbuild/android-arm": "0.25.4", "@esbuild/android-arm64": "0.25.4", "@esbuild/android-x64": "0.25.4", "@esbuild/darwin-arm64": "0.25.4", "@esbuild/darwin-x64": "0.25.4", "@esbuild/freebsd-arm64": "0.25.4", "@esbuild/freebsd-x64": "0.25.4", "@esbuild/linux-arm": "0.25.4", "@esbuild/linux-arm64": "0.25.4", "@esbuild/linux-ia32": "0.25.4", "@esbuild/linux-loong64": "0.25.4", "@esbuild/linux-mips64el": "0.25.4", "@esbuild/linux-ppc64": "0.25.4", "@esbuild/linux-riscv64": "0.25.4", "@esbuild/linux-s390x": "0.25.4", "@esbuild/linux-x64": "0.25.4", "@esbuild/netbsd-arm64": "0.25.4", "@esbuild/netbsd-x64": "0.25.4", "@esbuild/openbsd-arm64": "0.25.4", "@esbuild/openbsd-x64": "0.25.4", "@esbuild/sunos-x64": "0.25.4", "@esbuild/win32-arm64": "0.25.4", "@esbuild/win32-ia32": "0.25.4", "@esbuild/win32-x64": "0.25.4" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-8pgjLUcUjcgDg+2Q4NYXnPbo/vncAY4UmyaCm0jZevERqCHZIaWwdJHkf8XQtu4AxSKCdvrUbT0XUr1IdZzI8Q=="], - "xml2js/sax": ["sax@1.4.1", "", {}, "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg=="], - "yargs/yargs-parser": ["yargs-parser@22.0.0", "", {}, "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw=="], + "youch/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], + "@astrojs/mdx/@astrojs/markdown-remark/@astrojs/prism": ["@astrojs/prism@3.3.0", "", { "dependencies": { "prismjs": "^1.30.0" } }, "sha512-q8VwfU/fDZNoDOf+r7jUnMC2//H2l0TuQ6FkGJL8vD8nw/q5KiL3DS1KKBI3QhI9UQhpJ5dc7AtqfbXWuOgLCQ=="], "@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="], @@ -1787,11 +1836,13 @@ "ansi-align/string-width/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - "opencontrol/@modelcontextprotocol/sdk/pkce-challenge": ["pkce-challenge@4.1.0", "", {}, "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ=="], + "args/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="], - "opencontrol/@modelcontextprotocol/sdk/zod": ["zod@3.25.49", "", {}, "sha512-JMMPMy9ZBk3XFEdbM3iL1brx4NUSejd6xr3ELrrGEfGb355gjhiAWtG3K5o+AViV/3ZfkIrCzXsZn6SbLwTR8Q=="], + "args/chalk/supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="], - "opencontrol/@modelcontextprotocol/sdk/zod-to-json-schema": ["zod-to-json-schema@3.24.5", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g=="], + "bl/buffer/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], + + "pino-pretty/chalk/ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], "prebuild-install/tar-fs/tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="], @@ -1848,5 +1899,11 @@ "@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="], "ansi-align/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], + + "args/chalk/ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="], + + "args/chalk/supports-color/has-flag": ["has-flag@3.0.0", "", {}, "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw=="], + + "args/chalk/ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="], } } diff --git a/infra/app.ts b/infra/app.ts index 5c646d97..834936b7 100644 --- a/infra/app.ts +++ b/infra/app.ts @@ -4,8 +4,6 @@ export const domain = (() => { return `${$app.stage}.dev.opencode.ai` })() -const GITHUB_APP_ID = new sst.Secret("GITHUB_APP_ID") -const GITHUB_APP_PRIVATE_KEY = new sst.Secret("GITHUB_APP_PRIVATE_KEY") const bucket = new sst.cloudflare.Bucket("Bucket") export const api = new sst.cloudflare.Worker("Api", { @@ -15,7 +13,7 @@ export const api = new sst.cloudflare.Worker("Api", { WEB_DOMAIN: domain, }, url: true, - link: [bucket, GITHUB_APP_ID, GITHUB_APP_PRIVATE_KEY], + link: [bucket], transform: { worker: (args) => { args.logpush = true @@ -41,8 +39,6 @@ new sst.cloudflare.x.Astro("Web", { domain, path: "packages/web", environment: { - // For astro config - SST_STAGE: $app.stage, VITE_API_URL: api.url, }, }) diff --git a/install b/install index 46de9e35..e18bd7bb 100755 --- a/install +++ b/install @@ -48,7 +48,7 @@ if [ -z "$requested_version" ]; then url="https://github.com/sst/opencode/releases/latest/download/$filename" specific_version=$(curl -s https://api.github.com/repos/sst/opencode/releases/latest | awk -F'"' '/"tag_name": "/ {gsub(/^v/, "", $4); print $4}') - if [[ $? -ne 0 || -z "$specific_version" ]]; then + if [[ $? -ne 0 ]]; then echo "${RED}Failed to fetch version information${NC}" exit 1 fi @@ -186,3 +186,4 @@ if [ -n "${GITHUB_ACTIONS-}" ] && [ "${GITHUB_ACTIONS}" == "true" ]; then echo "$INSTALL_DIR" >> $GITHUB_PATH print_message info "Added $INSTALL_DIR to \$GITHUB_PATH" fi + diff --git a/opencode.json b/opencode.json index 2fa64407..57b94008 100644 --- a/opencode.json +++ b/opencode.json @@ -1,9 +1,19 @@ { "$schema": "https://opencode.ai/config.json", - "mcp": { - "weather": { - "type": "local", - "command": ["opencode", "x", "@h1deya/mcp-server-weather"] + "experimental": { + "hook": { + "file_edited": { + ".json": [ + { + "command": ["bun", "run", "prettier", "$FILE"] + } + ] + }, + "session_completed": [ + { + "command": ["touch", "./node_modules/foo"] + } + ] } } } diff --git a/package.json b/package.json index 9c98d4dc..ed4fcded 100644 --- a/package.json +++ b/package.json @@ -5,9 +5,7 @@ "type": "module", "packageManager": "bun@1.2.14", "scripts": { - "dev": "bun run packages/opencode/src/index.ts", "typecheck": "bun run --filter='*' typecheck", - "stainless": "./scripts/stainless", "postinstall": "./scripts/hooks" }, "workspaces": { @@ -17,13 +15,13 @@ "catalog": { "typescript": "5.8.2", "@types/node": "22.13.9", - "zod": "3.25.49", - "ai": "5.0.0-beta.21" + "zod": "3.24.2", + "ai": "4.3.16" } }, "devDependencies": { "prettier": "3.5.3", - "sst": "3.17.8" + "sst": "3.17.6" }, "repository": { "type": "git", @@ -31,13 +29,17 @@ }, "license": "MIT", "prettier": { - "semi": false, - "printWidth": 120 + "semi": false + }, + "overrides": { + "zod": "3.24.2" }, "trustedDependencies": [ "esbuild", "protobufjs", "sharp" ], - "patchedDependencies": {} + "patchedDependencies": { + "ai@4.3.16": "patches/ai@4.3.16.patch" + } } diff --git a/packages/function/package.json b/packages/function/package.json index 633aeff8..81a1edc9 100644 --- a/packages/function/package.json +++ b/packages/function/package.json @@ -8,10 +8,5 @@ "@cloudflare/workers-types": "4.20250522.0", "typescript": "catalog:", "@types/node": "catalog:" - }, - "dependencies": { - "@octokit/auth-app": "8.0.1", - "@octokit/rest": "22.0.0", - "jose": "6.0.11" } } diff --git a/packages/function/src/api.ts b/packages/function/src/api.ts index 4ba7cbf6..701b9854 100644 --- a/packages/function/src/api.ts +++ b/packages/function/src/api.ts @@ -1,9 +1,5 @@ import { DurableObject } from "cloudflare:workers" import { randomUUID } from "node:crypto" -import { jwtVerify, createRemoteJWKSet } from "jose" -import { createAppAuth } from "@octokit/auth-app" -import { Octokit } from "@octokit/rest" -import { Resource } from "sst" type Env = { SYNC_SERVER: DurableObjectNamespace @@ -44,8 +40,7 @@ export class SyncServer extends DurableObject { const sessionID = await this.getSessionID() if ( !key.startsWith(`session/info/${sessionID}`) && - !key.startsWith(`session/message/${sessionID}/`) && - !key.startsWith(`session/part/${sessionID}/`) + !key.startsWith(`session/message/${sessionID}/`) ) return new Response("Error: Invalid key", { status: 400 }) @@ -75,7 +70,7 @@ export class SyncServer extends DurableObject { } public async getData() { - const data = (await this.ctx.storage.list()) as Map + const data = await this.ctx.storage.list() return Array.from(data.entries()) .filter(([key, _]) => key.startsWith("session/")) .map(([key, content]) => ({ key, content })) @@ -112,7 +107,7 @@ export class SyncServer extends DurableObject { } export default { - async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise { + async fetch(request: Request, env: Env, ctx: ExecutionContext) { const url = new URL(request.url) const splits = url.pathname.split("/") const method = splits[1] @@ -189,7 +184,8 @@ export default { } const id = url.searchParams.get("id") console.log("share_poll", id) - if (!id) return new Response("Error: Share ID is required", { status: 400 }) + if (!id) + return new Response("Error: Share ID is required", { status: 400 }) const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id)) return stub.fetch(request) } @@ -197,7 +193,8 @@ export default { if (request.method === "GET" && method === "share_data") { const id = url.searchParams.get("id") console.log("share_data", id) - if (!id) return new Response("Error: Share ID is required", { status: 400 }) + if (!id) + return new Response("Error: Share ID is required", { status: 400 }) const stub = env.SYNC_SERVER.get(env.SYNC_SERVER.idFromName(id)) const data = await stub.getData() @@ -211,13 +208,8 @@ export default { return } if (type === "message") { - messages[d.content.id] = { - parts: [], - ...d.content, - } - } - if (type === "part") { - messages[d.content.messageID].parts.push(d.content) + const [, messageID] = splits + messages[messageID] = d.content } }) @@ -231,95 +223,5 @@ export default { }, ) } - - /** - * Used by the GitHub action to get GitHub installation access token given the OIDC token - */ - if (request.method === "POST" && method === "exchange_github_app_token") { - const EXPECTED_AUDIENCE = "opencode-github-action" - const GITHUB_ISSUER = "https://token.actions.githubusercontent.com" - const JWKS_URL = `${GITHUB_ISSUER}/.well-known/jwks` - - // get Authorization header - const authHeader = request.headers.get("Authorization") - const token = authHeader?.replace(/^Bearer /, "") - if (!token) - return new Response(JSON.stringify({ error: "Authorization header is required" }), { - status: 401, - headers: { "Content-Type": "application/json" }, - }) - - // verify token - const JWKS = createRemoteJWKSet(new URL(JWKS_URL)) - let owner, repo - try { - const { payload } = await jwtVerify(token, JWKS, { - issuer: GITHUB_ISSUER, - audience: EXPECTED_AUDIENCE, - }) - const sub = payload.sub // e.g. 'repo:my-org/my-repo:ref:refs/heads/main' - const parts = sub.split(":")[1].split("/") - owner = parts[0] - repo = parts[1] - } catch (err) { - console.error("Token verification failed:", err) - return new Response(JSON.stringify({ error: "Invalid or expired token" }), { - status: 403, - headers: { "Content-Type": "application/json" }, - }) - } - - // Create app JWT token - const auth = createAppAuth({ - appId: Resource.GITHUB_APP_ID.value, - privateKey: Resource.GITHUB_APP_PRIVATE_KEY.value, - }) - const appAuth = await auth({ type: "app" }) - - // Lookup installation - const octokit = new Octokit({ auth: appAuth.token }) - const { data: installation } = await octokit.apps.getRepoInstallation({ owner, repo }) - - // Get installation token - const installationAuth = await auth({ type: "installation", installationId: installation.id }) - - return new Response(JSON.stringify({ token: installationAuth.token }), { - headers: { "Content-Type": "application/json" }, - }) - } - - /** - * Used by the opencode CLI to check if the GitHub app is installed - */ - if (request.method === "GET" && method === "get_github_app_installation") { - const owner = url.searchParams.get("owner") - const repo = url.searchParams.get("repo") - - const auth = createAppAuth({ - appId: Resource.GITHUB_APP_ID.value, - privateKey: Resource.GITHUB_APP_PRIVATE_KEY.value, - }) - const appAuth = await auth({ type: "app" }) - - // Lookup installation - const octokit = new Octokit({ auth: appAuth.token }) - let installation - try { - const ret = await octokit.apps.getRepoInstallation({ owner, repo }) - installation = ret.data - } catch (err) { - if (err instanceof Error && err.message.includes("Not Found")) { - // not installed - } else { - throw err - } - } - - return new Response(JSON.stringify({ installation }), { - headers: { "Content-Type": "application/json" }, - }) - } - - return new Response("Not Found", { status: 404 }) }, } diff --git a/packages/function/sst-env.d.ts b/packages/function/sst-env.d.ts index dab7de3f..fd95edbb 100644 --- a/packages/function/sst-env.d.ts +++ b/packages/function/sst-env.d.ts @@ -6,28 +6,20 @@ import "sst" declare module "sst" { export interface Resource { - "GITHUB_APP_ID": { - "type": "sst.sst.Secret" - "value": string - } - "GITHUB_APP_PRIVATE_KEY": { - "type": "sst.sst.Secret" - "value": string - } - "Web": { - "type": "sst.cloudflare.Astro" - "url": string + Web: { + type: "sst.cloudflare.Astro" + url: string } } } -// cloudflare -import * as cloudflare from "@cloudflare/workers-types"; +// cloudflare +import * as cloudflare from "@cloudflare/workers-types" declare module "sst" { export interface Resource { - "Api": cloudflare.Service - "Bucket": cloudflare.R2Bucket + Api: cloudflare.Service + Bucket: cloudflare.R2Bucket } } import "sst" -export {} \ No newline at end of file +export {} diff --git a/packages/opencode/.gitignore b/packages/opencode/.gitignore index e057ca61..66857d89 100644 --- a/packages/opencode/.gitignore +++ b/packages/opencode/.gitignore @@ -1,3 +1,4 @@ +node_modules research dist gen diff --git a/packages/opencode/config.schema.json b/packages/opencode/config.schema.json new file mode 100644 index 00000000..35dfd6f1 --- /dev/null +++ b/packages/opencode/config.schema.json @@ -0,0 +1,369 @@ +{ + "type": "object", + "properties": { + "$schema": { + "type": "string", + "description": "JSON schema reference for configuration validation" + }, + "theme": { + "type": "string", + "description": "Theme name to use for the interface" + }, + "keybinds": { + "type": "object", + "properties": { + "leader": { + "type": "string", + "description": "Leader key for keybind combinations" + }, + "help": { + "type": "string", + "description": "Show help dialog" + }, + "editor_open": { + "type": "string", + "description": "Open external editor" + }, + "session_new": { + "type": "string", + "description": "Create a new session" + }, + "session_list": { + "type": "string", + "description": "List all sessions" + }, + "session_share": { + "type": "string", + "description": "Share current session" + }, + "session_interrupt": { + "type": "string", + "description": "Interrupt current session" + }, + "session_compact": { + "type": "string", + "description": "Toggle compact mode for session" + }, + "tool_details": { + "type": "string", + "description": "Show tool details" + }, + "model_list": { + "type": "string", + "description": "List available models" + }, + "theme_list": { + "type": "string", + "description": "List available themes" + }, + "project_init": { + "type": "string", + "description": "Initialize project configuration" + }, + "input_clear": { + "type": "string", + "description": "Clear input field" + }, + "input_paste": { + "type": "string", + "description": "Paste from clipboard" + }, + "input_submit": { + "type": "string", + "description": "Submit input" + }, + "input_newline": { + "type": "string", + "description": "Insert newline in input" + }, + "history_previous": { + "type": "string", + "description": "Navigate to previous history item" + }, + "history_next": { + "type": "string", + "description": "Navigate to next history item" + }, + "messages_page_up": { + "type": "string", + "description": "Scroll messages up by one page" + }, + "messages_page_down": { + "type": "string", + "description": "Scroll messages down by one page" + }, + "messages_half_page_up": { + "type": "string", + "description": "Scroll messages up by half page" + }, + "messages_half_page_down": { + "type": "string", + "description": "Scroll messages down by half page" + }, + "messages_previous": { + "type": "string", + "description": "Navigate to previous message" + }, + "messages_next": { + "type": "string", + "description": "Navigate to next message" + }, + "messages_first": { + "type": "string", + "description": "Navigate to first message" + }, + "messages_last": { + "type": "string", + "description": "Navigate to last message" + }, + "app_exit": { + "type": "string", + "description": "Exit the application" + } + }, + "additionalProperties": false, + "description": "Custom keybind configurations" + }, + "autoshare": { + "type": "boolean", + "description": "Share newly created sessions automatically" + }, + "autoupdate": { + "type": "boolean", + "description": "Automatically update to the latest version" + }, + "disabled_providers": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Disable providers that are loaded automatically" + }, + "model": { + "type": "string", + "description": "Model to use in the format of provider/model, eg anthropic/claude-2" + }, + "provider": { + "type": "object", + "additionalProperties": { + "type": "object", + "properties": { + "api": { + "type": "string" + }, + "name": { + "type": "string" + }, + "env": { + "type": "array", + "items": { + "type": "string" + } + }, + "id": { + "type": "string" + }, + "npm": { + "type": "string" + }, + "models": { + "type": "object", + "additionalProperties": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "attachment": { + "type": "boolean" + }, + "reasoning": { + "type": "boolean" + }, + "temperature": { + "type": "boolean" + }, + "tool_call": { + "type": "boolean" + }, + "cost": { + "type": "object", + "properties": { + "input": { + "type": "number" + }, + "output": { + "type": "number" + }, + "cache_read": { + "type": "number" + }, + "cache_write": { + "type": "number" + } + }, + "required": ["input", "output"], + "additionalProperties": false + }, + "limit": { + "type": "object", + "properties": { + "context": { + "type": "number" + }, + "output": { + "type": "number" + } + }, + "required": ["context", "output"], + "additionalProperties": false + }, + "id": { + "type": "string" + }, + "options": { + "type": "object", + "additionalProperties": {} + } + }, + "additionalProperties": false + } + }, + "options": { + "type": "object", + "additionalProperties": {} + } + }, + "required": ["models"], + "additionalProperties": false + }, + "description": "Custom provider configurations and model overrides" + }, + "mcp": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "local", + "description": "Type of MCP server connection" + }, + "command": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Command and arguments to run the MCP server" + }, + "environment": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Environment variables to set when running the MCP server" + }, + "enabled": { + "type": "boolean", + "description": "Enable or disable the MCP server on startup" + } + }, + "required": ["type", "command"], + "additionalProperties": false + }, + { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "remote", + "description": "Type of MCP server connection" + }, + "url": { + "type": "string", + "description": "URL of the remote MCP server" + }, + "enabled": { + "type": "boolean", + "description": "Enable or disable the MCP server on startup" + } + }, + "required": ["type", "url"], + "additionalProperties": false + } + ] + }, + "description": "MCP (Model Context Protocol) server configurations" + }, + "instructions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Additional instruction files or patterns to include" + }, + "experimental": { + "type": "object", + "properties": { + "hook": { + "type": "object", + "properties": { + "file_edited": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "object", + "properties": { + "command": { + "type": "array", + "items": { + "type": "string" + } + }, + "environment": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "required": ["command"], + "additionalProperties": false + } + } + }, + "session_completed": { + "type": "array", + "items": { + "type": "object", + "properties": { + "command": { + "type": "array", + "items": { + "type": "string" + } + }, + "environment": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "required": ["command"], + "additionalProperties": false + } + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#" +} diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 8541e018..fae2e342 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -17,33 +17,37 @@ "devDependencies": { "@ai-sdk/amazon-bedrock": "2.2.10", "@ai-sdk/anthropic": "1.2.12", - "@standard-schema/spec": "1.0.0", "@tsconfig/bun": "1.0.7", "@types/bun": "latest", "@types/turndown": "5.0.5", "@types/yargs": "17.0.33", "typescript": "catalog:", - "vscode-languageserver-types": "3.17.5", "zod-to-json-schema": "3.24.5" }, "dependencies": { "@clack/prompts": "0.11.0", - "@hono/zod-validator": "0.4.2", - "@modelcontextprotocol/sdk": "1.15.1", + "@flystorage/file-storage": "1.1.0", + "@flystorage/local-fs": "1.1.0", + "@hono/zod-validator": "0.5.0", "@openauthjs/openauth": "0.4.3", + "@standard-schema/spec": "1.0.0", "ai": "catalog:", "decimal.js": "10.5.0", "diff": "8.0.2", + "env-paths": "3.0.0", "hono": "4.7.10", "hono-openapi": "0.4.8", "isomorphic-git": "1.32.1", "open": "10.1.2", "remeda": "2.22.3", + "ts-lsp-client": "1.0.3", "turndown": "7.2.0", "vscode-jsonrpc": "8.2.1", + "vscode-languageclient": "8", "xdg-basedir": "5.1.0", "yargs": "18.0.0", "zod": "catalog:", - "zod-openapi": "4.1.0" + "zod-openapi": "4.2.4", + "zod-validation-error": "3.5.2" } } diff --git a/packages/opencode/script/publish.ts b/packages/opencode/script/publish.ts index 08e042d7..3f4c2005 100755 --- a/packages/opencode/script/publish.ts +++ b/packages/opencode/script/publish.ts @@ -9,7 +9,7 @@ const snapshot = process.argv.includes("--snapshot") const version = snapshot ? `0.0.0-${new Date().toISOString().slice(0, 16).replace(/[-:T]/g, "")}` - : await $`git describe --tags --abbrev=0` + : await $`git describe --tags --exact-match HEAD` .text() .then((x) => x.substring(1).trim()) .catch(() => { @@ -57,7 +57,8 @@ for (const [os, arch] of targets) { 2, ), ) - if (!dry) await $`cd dist/${name} && bun publish --access public --tag ${npmTag}` + if (!dry) + await $`cd dist/${name} && bun publish --access public --tag ${npmTag}` optionalDependencies[name] = version } @@ -81,7 +82,8 @@ await Bun.file(`./dist/${pkg.name}/package.json`).write( 2, ), ) -if (!dry) await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}` +if (!dry) + await $`cd ./dist/${pkg.name} && bun publish --access public --tag ${npmTag}` if (!snapshot) { // Github Release @@ -89,11 +91,15 @@ if (!snapshot) { await $`cd dist/${key}/bin && zip -r ../../${key}.zip *` } - const previous = await fetch("https://api.github.com/repos/sst/opencode/releases/latest") + const previous = await fetch( + "https://api.github.com/repos/sst/opencode/releases/latest", + ) .then((res) => res.json()) .then((data) => data.tag_name) - const commits = await fetch(`https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`) + const commits = await fetch( + `https://api.github.com/repos/sst/opencode/compare/${previous}...HEAD`, + ) .then((res) => res.json()) .then((data) => data.commits || []) @@ -103,7 +109,6 @@ if (!snapshot) { const lower = x.toLowerCase() return ( !lower.includes("ignore:") && - !lower.includes("chore:") && !lower.includes("ci:") && !lower.includes("wip:") && !lower.includes("docs:") && @@ -112,13 +117,26 @@ if (!snapshot) { }) .join("\n") - if (!dry) await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip` + if (!dry) + await $`gh release create v${version} --title "v${version}" --notes ${notes} ./dist/*.zip` // Calculate SHA values - const arm64Sha = await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim()) - const x64Sha = await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim()) - const macX64Sha = await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1`.text().then((x) => x.trim()) - const macArm64Sha = await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1`.text().then((x) => x.trim()) + const arm64Sha = + await $`sha256sum ./dist/opencode-linux-arm64.zip | cut -d' ' -f1` + .text() + .then((x) => x.trim()) + const x64Sha = + await $`sha256sum ./dist/opencode-linux-x64.zip | cut -d' ' -f1` + .text() + .then((x) => x.trim()) + const macX64Sha = + await $`sha256sum ./dist/opencode-darwin-x64.zip | cut -d' ' -f1` + .text() + .then((x) => x.trim()) + const macArm64Sha = + await $`sha256sum ./dist/opencode-darwin-arm64.zip | cut -d' ' -f1` + .text() + .then((x) => x.trim()) // AUR package const pkgbuild = [ @@ -152,7 +170,9 @@ if (!snapshot) { for (const pkg of ["opencode", "opencode-bin"]) { await $`rm -rf ./dist/aur-${pkg}` await $`git clone ssh://aur@aur.archlinux.org/${pkg}.git ./dist/aur-${pkg}` - await Bun.file(`./dist/aur-${pkg}/PKGBUILD`).write(pkgbuild.replace("${pkg}", pkg)) + await Bun.file(`./dist/aur-${pkg}/PKGBUILD`).write( + pkgbuild.replace("${pkg}", pkg), + ) await $`cd ./dist/aur-${pkg} && makepkg --printsrcinfo > .SRCINFO` await $`cd ./dist/aur-${pkg} && git add PKGBUILD .SRCINFO` await $`cd ./dist/aur-${pkg} && git commit -m "Update to v${version}"` diff --git a/packages/opencode/script/schema.ts b/packages/opencode/script/schema.ts index 008c168c..1c0067c7 100755 --- a/packages/opencode/script/schema.ts +++ b/packages/opencode/script/schema.ts @@ -4,32 +4,5 @@ import "zod-openapi/extend" import { Config } from "../src/config/config" import { zodToJsonSchema } from "zod-to-json-schema" -const file = process.argv[2] - -const result = zodToJsonSchema(Config.Info, { - /** - * We'll use the `default` values of the field as the only value in `examples`. - * This will ensure no docs are needed to be read, as the configuration is - * self-documenting. - * - * See https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.9.5 - */ - postProcess(jsonSchema) { - const schema = jsonSchema as typeof jsonSchema & { - examples?: unknown[] - } - if (schema && typeof schema === "object" && "type" in schema && schema.type === "string" && schema?.default) { - if (!schema.examples) { - schema.examples = [schema.default] - } - - schema.description = [schema.description || "", `default: \`${schema.default}\``] - .filter(Boolean) - .join("\n\n") - .trim() - } - - return jsonSchema - }, -}) -await Bun.write(file, JSON.stringify(result, null, 2)) +const result = zodToJsonSchema(Config.Info) +await Bun.write("config.schema.json", JSON.stringify(result, null, 2)) diff --git a/packages/opencode/src/app/app.ts b/packages/opencode/src/app/app.ts index fc7f49cb..6467d252 100644 --- a/packages/opencode/src/app/app.ts +++ b/packages/opencode/src/app/app.ts @@ -12,6 +12,7 @@ export namespace App { export const Info = z .object({ + user: z.string(), hostname: z.string(), git: z.boolean(), path: z.object({ @@ -44,14 +45,23 @@ export namespace App { } export const provideExisting = ctx.provide - export async function provide(input: Input, cb: (app: App.Info) => Promise) { + export async function provide( + input: Input, + cb: (app: App.Info) => Promise, + ) { log.info("creating", { cwd: input.cwd, }) - const git = await Filesystem.findUp(".git", input.cwd).then(([x]) => (x ? path.dirname(x) : undefined)) + const git = await Filesystem.findUp(".git", input.cwd).then(([x]) => + x ? path.dirname(x) : undefined, + ) log.info("git", { git }) - const data = path.join(Global.Path.data, "project", git ? directory(git) : "global") + const data = path.join( + Global.Path.data, + "project", + git ? directory(git) : "global", + ) const stateFile = Bun.file(path.join(data, APP_JSON)) const state = (await stateFile.json().catch(() => ({}))) as { initialized: number @@ -69,6 +79,7 @@ export namespace App { const root = git ?? input.cwd const info: Info = { + user: os.userInfo().username, hostname: os.hostname(), time: { initialized: state.initialized, diff --git a/packages/opencode/src/auth/anthropic.ts b/packages/opencode/src/auth/anthropic.ts index d3228cb8..df4af692 100644 --- a/packages/opencode/src/auth/anthropic.ts +++ b/packages/opencode/src/auth/anthropic.ts @@ -4,18 +4,20 @@ import { Auth } from "./index" export namespace AuthAnthropic { const CLIENT_ID = "9d1c250a-e61b-44d9-88ed-5944d1962f5e" - export async function authorize(mode: "max" | "console") { + export async function authorize() { const pkce = await generatePKCE() - - const url = new URL( - `https://${mode === "console" ? "console.anthropic.com" : "claude.ai"}/oauth/authorize`, - import.meta.url, - ) + const url = new URL("https://claude.ai/oauth/authorize", import.meta.url) url.searchParams.set("code", "true") url.searchParams.set("client_id", CLIENT_ID) url.searchParams.set("response_type", "code") - url.searchParams.set("redirect_uri", "https://console.anthropic.com/oauth/code/callback") - url.searchParams.set("scope", "org:create_api_key user:profile user:inference") + url.searchParams.set( + "redirect_uri", + "https://console.anthropic.com/oauth/code/callback", + ) + url.searchParams.set( + "scope", + "org:create_api_key user:profile user:inference", + ) url.searchParams.set("code_challenge", pkce.challenge) url.searchParams.set("code_challenge_method", "S256") url.searchParams.set("state", pkce.verifier) @@ -43,28 +45,32 @@ export namespace AuthAnthropic { }) if (!result.ok) throw new ExchangeFailed() const json = await result.json() - return { + await Auth.set("anthropic", { + type: "oauth", refresh: json.refresh_token as string, access: json.access_token as string, expires: Date.now() + json.expires_in * 1000, - } + }) } export async function access() { const info = await Auth.get("anthropic") if (!info || info.type !== "oauth") return if (info.access && info.expires > Date.now()) return info.access - const response = await fetch("https://console.anthropic.com/v1/oauth/token", { - method: "POST", - headers: { - "Content-Type": "application/json", + const response = await fetch( + "https://console.anthropic.com/v1/oauth/token", + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + grant_type: "refresh_token", + refresh_token: info.refresh, + client_id: CLIENT_ID, + }), }, - body: JSON.stringify({ - grant_type: "refresh_token", - refresh_token: info.refresh, - client_id: CLIENT_ID, - }), - }) + ) if (!response.ok) return const json = await response.json() await Auth.set("anthropic", { diff --git a/packages/opencode/src/auth/copilot.ts b/packages/opencode/src/auth/copilot.ts index 042f7c35..4bbbaf2c 100644 --- a/packages/opencode/src/auth/copilot.ts +++ b/packages/opencode/src/auth/copilot.ts @@ -4,7 +4,9 @@ import path from "path" export const AuthCopilot = lazy(async () => { const file = Bun.file(path.join(Global.Path.state, "plugin", "copilot.ts")) - const response = fetch("https://raw.githubusercontent.com/sst/opencode-github-copilot/refs/heads/main/auth.ts") + const response = fetch( + "https://raw.githubusercontent.com/sst/opencode-github-copilot/refs/heads/main/auth.ts", + ) .then((x) => Bun.write(file, x)) .catch(() => {}) diff --git a/packages/opencode/src/auth/github-copilot.ts b/packages/opencode/src/auth/github-copilot.ts index ba5274e5..c03caa26 100644 --- a/packages/opencode/src/auth/github-copilot.ts +++ b/packages/opencode/src/auth/github-copilot.ts @@ -122,7 +122,10 @@ export namespace AuthGithubCopilot { return tokenData.token } - export const DeviceCodeError = NamedError.create("DeviceCodeError", z.object({})) + export const DeviceCodeError = NamedError.create( + "DeviceCodeError", + z.object({}), + ) export const TokenExchangeError = NamedError.create( "TokenExchangeError", diff --git a/packages/opencode/src/bun/index.ts b/packages/opencode/src/bun/index.ts index eea46737..7a7d89cf 100644 --- a/packages/opencode/src/bun/index.ts +++ b/packages/opencode/src/bun/index.ts @@ -8,7 +8,10 @@ import { readableStreamToText } from "bun" export namespace BunProc { const log = Log.create({ service: "bun" }) - export async function run(cmd: string[], options?: Bun.SpawnOptions.OptionsObject) { + export async function run( + cmd: string[], + options?: Bun.SpawnOptions.OptionsObject, + ) { log.info("running", { cmd: [which(), ...cmd], ...options, @@ -23,17 +26,9 @@ export namespace BunProc { BUN_BE_BUN: "1", }, }) - const code = await result.exited - const stdout = result.stdout - ? typeof result.stdout === "number" - ? result.stdout - : await readableStreamToText(result.stdout) - : undefined - const stderr = result.stderr - ? typeof result.stderr === "number" - ? result.stderr - : await readableStreamToText(result.stderr) - : undefined + const code = await result.exited; + const stdout = result.stdout ? typeof result.stdout === "number" ? result.stdout : await readableStreamToText(result.stdout) : undefined + const stderr = result.stderr ? typeof result.stderr === "number" ? result.stderr : await readableStreamToText(result.stderr) : undefined log.info("done", { code, stdout, @@ -60,22 +55,13 @@ export namespace BunProc { export async function install(pkg: string, version = "latest") { const mod = path.join(Global.Path.cache, "node_modules", pkg) const pkgjson = Bun.file(path.join(Global.Path.cache, "package.json")) - const parsed = await pkgjson.json().catch(async () => { - const result = { dependencies: {} } - await Bun.write(pkgjson.name!, JSON.stringify(result, null, 2)) - return result - }) + const parsed = await pkgjson.json().catch(() => ({ + dependencies: {}, + })) if (parsed.dependencies[pkg] === version) return mod - - // Build command arguments - const args = ["add", "--force", "--exact", "--cwd", Global.Path.cache, pkg + "@" + version] - - // Let Bun handle registry resolution: - // - If .npmrc files exist, Bun will use them automatically - // - If no .npmrc files exist, Bun will default to https://registry.npmjs.org - log.info("installing package using Bun's default registry resolution", { pkg, version }) - - await BunProc.run(args, { + parsed.dependencies[pkg] = version + await Bun.write(pkgjson, JSON.stringify(parsed, null, 2)) + await BunProc.run(["install", "--registry=https://registry.npmjs.org"], { cwd: Global.Path.cache, }).catch((e) => { throw new InstallFailedError( @@ -85,8 +71,6 @@ export namespace BunProc { }, ) }) - parsed.dependencies[pkg] = version - await Bun.write(pkgjson.name!, JSON.stringify(parsed, null, 2)) return mod } } diff --git a/packages/opencode/src/bus/index.ts b/packages/opencode/src/bus/index.ts index 0353da90..8461269a 100644 --- a/packages/opencode/src/bus/index.ts +++ b/packages/opencode/src/bus/index.ts @@ -18,7 +18,10 @@ export namespace Bus { const registry = new Map() - export function event(type: Type, properties: Properties) { + export function event( + type: Type, + properties: Properties, + ) { const result = { type, properties, @@ -69,7 +72,10 @@ export namespace Bus { export function subscribe( def: Definition, - callback: (event: { type: Definition["type"]; properties: z.infer }) => void, + callback: (event: { + type: Definition["type"] + properties: z.infer + }) => void, ) { return raw(def.type, callback) } diff --git a/packages/opencode/src/cli/bootstrap.ts b/packages/opencode/src/cli/bootstrap.ts index 4419773b..9ae274ed 100644 --- a/packages/opencode/src/cli/bootstrap.ts +++ b/packages/opencode/src/cli/bootstrap.ts @@ -1,15 +1,20 @@ import { App } from "../app/app" import { ConfigHooks } from "../config/hooks" +import { FileWatcher } from "../file/watch" import { Format } from "../format" import { LSP } from "../lsp" import { Share } from "../share/share" -export async function bootstrap(input: App.Input, cb: (app: App.Info) => Promise) { +export async function bootstrap( + input: App.Input, + cb: (app: App.Info) => Promise, +) { return App.provide(input, async (app) => { Share.init() Format.init() ConfigHooks.init() LSP.init() + FileWatcher.init() return cb(app) }) diff --git a/packages/opencode/src/cli/cmd/auth.ts b/packages/opencode/src/cli/cmd/auth.ts index fd7c0e64..9e8da95b 100644 --- a/packages/opencode/src/cli/cmd/auth.ts +++ b/packages/opencode/src/cli/cmd/auth.ts @@ -15,7 +15,11 @@ export const AuthCommand = cmd({ command: "auth", describe: "manage credentials", builder: (yargs) => - yargs.command(AuthLoginCommand).command(AuthLogoutCommand).command(AuthListCommand).demandCommand(), + yargs + .command(AuthLoginCommand) + .command(AuthLogoutCommand) + .command(AuthListCommand) + .demandCommand(), async handler() {}, }) @@ -27,7 +31,9 @@ export const AuthListCommand = cmd({ UI.empty() const authPath = path.join(Global.Path.data, "auth.json") const homedir = os.homedir() - const displayPath = authPath.startsWith(homedir) ? authPath.replace(homedir, "~") : authPath + const displayPath = authPath.startsWith(homedir) + ? authPath.replace(homedir, "~") + : authPath prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`) const results = await Auth.all().then((x) => Object.entries(x)) const database = await ModelsDev.get() @@ -108,7 +114,8 @@ export const AuthLoginCommand = cmd({ if (provider === "other") { provider = await prompts.text({ message: "Enter provider id", - validate: (x) => (x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only"), + validate: (x) => + x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only", }) if (prompts.isCancel(provider)) throw new UI.CancelledError() provider = provider.replace(/^@ai-sdk\//, "") @@ -120,7 +127,7 @@ export const AuthLoginCommand = cmd({ if (provider === "amazon-bedrock") { prompts.log.info( - "Amazon bedrock can be configured with standard AWS environment variables like AWS_BEARER_TOKEN_BEDROCK, AWS_PROFILE or AWS_ACCESS_KEY_ID", + "Amazon bedrock can be configured with standard AWS environment variables like AWS_PROFILE or AWS_ACCESS_KEY_ID", ) prompts.outro("Done") return @@ -132,24 +139,20 @@ export const AuthLoginCommand = cmd({ options: [ { label: "Claude Pro/Max", - value: "max", + value: "oauth", }, { - label: "Create API Key", - value: "console", - }, - { - label: "Manually enter API Key", + label: "API Key", value: "api", }, ], }) if (prompts.isCancel(method)) throw new UI.CancelledError() - if (method === "max") { + if (method === "oauth") { // some weird bug where program exits without this await new Promise((resolve) => setTimeout(resolve, 10)) - const { url, verifier } = await AuthAnthropic.authorize("max") + const { url, verifier } = await AuthAnthropic.authorize() prompts.note("Trying to open browser...") try { await open(url) @@ -166,66 +169,13 @@ export const AuthLoginCommand = cmd({ }) if (prompts.isCancel(code)) throw new UI.CancelledError() - try { - const credentials = await AuthAnthropic.exchange(code, verifier) - await Auth.set("anthropic", { - type: "oauth", - refresh: credentials.refresh, - access: credentials.access, - expires: credentials.expires, + await AuthAnthropic.exchange(code, verifier) + .then(() => { + prompts.log.success("Login successful") }) - prompts.log.success("Login successful") - } catch { - prompts.log.error("Invalid code") - } - prompts.outro("Done") - return - } - - if (method === "console") { - // some weird bug where program exits without this - await new Promise((resolve) => setTimeout(resolve, 10)) - const { url, verifier } = await AuthAnthropic.authorize("console") - prompts.note("Trying to open browser...") - try { - await open(url) - } catch (e) { - prompts.log.error( - "Failed to open browser perhaps you are running without a display or X server, please open the following URL in your browser:", - ) - } - prompts.log.info(url) - - const code = await prompts.text({ - message: "Paste the authorization code here: ", - validate: (x) => (x.length > 0 ? undefined : "Required"), - }) - if (prompts.isCancel(code)) throw new UI.CancelledError() - - try { - const credentials = await AuthAnthropic.exchange(code, verifier) - const accessToken = credentials.access - const response = await fetch("https://api.anthropic.com/api/oauth/claude_cli/create_api_key", { - method: "POST", - headers: { - Authorization: `Bearer ${accessToken}`, - "Content-Type": "application/x-www-form-urlencoded", - Accept: "application/json, text/plain, */*", - }, + .catch(() => { + prompts.log.error("Invalid code") }) - if (!response.ok) { - throw new Error("Failed to create API key") - } - const json = await response.json() - await Auth.set("anthropic", { - type: "api", - key: json.raw_key, - }) - - prompts.log.success("Login successful - API key created and saved") - } catch (error) { - prompts.log.error("Invalid code or failed to create API key") - } prompts.outro("Done") return } @@ -236,13 +186,17 @@ export const AuthLoginCommand = cmd({ await new Promise((resolve) => setTimeout(resolve, 10)) const deviceInfo = await copilot.authorize() - prompts.note(`Please visit: ${deviceInfo.verification}\nEnter code: ${deviceInfo.user}`) + prompts.note( + `Please visit: ${deviceInfo.verification}\nEnter code: ${deviceInfo.user}`, + ) const spinner = prompts.spinner() spinner.start("Waiting for authorization...") while (true) { - await new Promise((resolve) => setTimeout(resolve, deviceInfo.interval * 1000)) + await new Promise((resolve) => + setTimeout(resolve, deviceInfo.interval * 1000), + ) const response = await copilot.poll(deviceInfo.device) if (response.status === "pending") continue if (response.status === "success") { @@ -294,7 +248,12 @@ export const AuthLogoutCommand = cmd({ const providerID = await prompts.select({ message: "Select provider", options: credentials.map(([key, value]) => ({ - label: (database[key]?.name || key) + UI.Style.TEXT_DIM + " (" + value.type + ")", + label: + (database[key]?.name || key) + + UI.Style.TEXT_DIM + + " (" + + value.type + + ")", value: key, })), }) diff --git a/packages/opencode/src/cli/cmd/debug/file.ts b/packages/opencode/src/cli/cmd/debug/file.ts index f773dbd9..021c49db 100644 --- a/packages/opencode/src/cli/cmd/debug/file.ts +++ b/packages/opencode/src/cli/cmd/debug/file.ts @@ -31,6 +31,7 @@ const FileStatusCommand = cmd({ export const FileCommand = cmd({ command: "file", - builder: (yargs) => yargs.command(FileReadCommand).command(FileStatusCommand).demandCommand(), + builder: (yargs) => + yargs.command(FileReadCommand).command(FileStatusCommand).demandCommand(), async handler() {}, }) diff --git a/packages/opencode/src/cli/cmd/debug/index.ts b/packages/opencode/src/cli/cmd/debug/index.ts index 77f4129a..e748183e 100644 --- a/packages/opencode/src/cli/cmd/debug/index.ts +++ b/packages/opencode/src/cli/cmd/debug/index.ts @@ -3,7 +3,6 @@ import { cmd } from "../cmd" import { FileCommand } from "./file" import { LSPCommand } from "./lsp" import { RipgrepCommand } from "./ripgrep" -import { ScrapCommand } from "./scrap" import { SnapshotCommand } from "./snapshot" export const DebugCommand = cmd({ @@ -13,13 +12,14 @@ export const DebugCommand = cmd({ .command(LSPCommand) .command(RipgrepCommand) .command(FileCommand) - .command(ScrapCommand) .command(SnapshotCommand) .command({ command: "wait", async handler() { await bootstrap({ cwd: process.cwd() }, async () => { - await new Promise((resolve) => setTimeout(resolve, 1_000 * 60 * 60 * 24)) + await new Promise((resolve) => + setTimeout(resolve, 1_000 * 60 * 60 * 24), + ) }) }, }) diff --git a/packages/opencode/src/cli/cmd/debug/lsp.ts b/packages/opencode/src/cli/cmd/debug/lsp.ts index ac1bac7c..d596bf6c 100644 --- a/packages/opencode/src/cli/cmd/debug/lsp.ts +++ b/packages/opencode/src/cli/cmd/debug/lsp.ts @@ -6,13 +6,14 @@ import { Log } from "../../../util/log" export const LSPCommand = cmd({ command: "lsp", builder: (yargs) => - yargs.command(DiagnosticsCommand).command(SymbolsCommand).command(DocumentSymbolsCommand).demandCommand(), + yargs.command(DiagnosticsCommand).command(SymbolsCommand).demandCommand(), async handler() {}, }) const DiagnosticsCommand = cmd({ command: "diagnostics ", - builder: (yargs) => yargs.positional("file", { type: "string", demandOption: true }), + builder: (yargs) => + yargs.positional("file", { type: "string", demandOption: true }), async handler(args) { await bootstrap({ cwd: process.cwd() }, async () => { await LSP.touchFile(args.file, true) @@ -23,24 +24,14 @@ const DiagnosticsCommand = cmd({ export const SymbolsCommand = cmd({ command: "symbols ", - builder: (yargs) => yargs.positional("query", { type: "string", demandOption: true }), + builder: (yargs) => + yargs.positional("query", { type: "string", demandOption: true }), async handler(args) { await bootstrap({ cwd: process.cwd() }, async () => { + await LSP.touchFile("./src/index.ts", true) using _ = Log.Default.time("symbols") const results = await LSP.workspaceSymbol(args.query) console.log(JSON.stringify(results, null, 2)) }) }, }) - -export const DocumentSymbolsCommand = cmd({ - command: "document-symbols ", - builder: (yargs) => yargs.positional("uri", { type: "string", demandOption: true }), - async handler(args) { - await bootstrap({ cwd: process.cwd() }, async () => { - using _ = Log.Default.time("document-symbols") - const results = await LSP.documentSymbol(args.uri) - console.log(JSON.stringify(results, null, 2)) - }) - }, -}) diff --git a/packages/opencode/src/cli/cmd/debug/ripgrep.ts b/packages/opencode/src/cli/cmd/debug/ripgrep.ts index b8005c90..c7136867 100644 --- a/packages/opencode/src/cli/cmd/debug/ripgrep.ts +++ b/packages/opencode/src/cli/cmd/debug/ripgrep.ts @@ -5,7 +5,12 @@ import { cmd } from "../cmd" export const RipgrepCommand = cmd({ command: "rg", - builder: (yargs) => yargs.command(TreeCommand).command(FilesCommand).command(SearchCommand).demandCommand(), + builder: (yargs) => + yargs + .command(TreeCommand) + .command(FilesCommand) + .command(SearchCommand) + .demandCommand(), async handler() {}, }) @@ -45,7 +50,7 @@ const FilesCommand = cmd({ const files = await Ripgrep.files({ cwd: app.path.cwd, query: args.query, - glob: args.glob ? [args.glob] : undefined, + glob: args.glob, limit: args.limit, }) console.log(files.join("\n")) diff --git a/packages/opencode/src/cli/cmd/debug/scrap.ts b/packages/opencode/src/cli/cmd/debug/scrap.ts deleted file mode 100644 index 3ba53e3f..00000000 --- a/packages/opencode/src/cli/cmd/debug/scrap.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { cmd } from "../cmd" - -export const ScrapCommand = cmd({ - command: "scrap", - builder: (yargs) => yargs, - async handler() {}, -}) diff --git a/packages/opencode/src/cli/cmd/debug/snapshot.ts b/packages/opencode/src/cli/cmd/debug/snapshot.ts index 48d7f91e..a6d129d5 100644 --- a/packages/opencode/src/cli/cmd/debug/snapshot.ts +++ b/packages/opencode/src/cli/cmd/debug/snapshot.ts @@ -4,11 +4,15 @@ import { cmd } from "../cmd" export const SnapshotCommand = cmd({ command: "snapshot", - builder: (yargs) => yargs.command(CreateCommand).command(RestoreCommand).command(DiffCommand).demandCommand(), + builder: (yargs) => + yargs + .command(SnapshotCreateCommand) + .command(SnapshotRestoreCommand) + .demandCommand(), async handler() {}, }) -const CreateCommand = cmd({ +export const SnapshotCreateCommand = cmd({ command: "create", async handler() { await bootstrap({ cwd: process.cwd() }, async () => { @@ -18,7 +22,7 @@ const CreateCommand = cmd({ }, }) -const RestoreCommand = cmd({ +export const SnapshotRestoreCommand = cmd({ command: "restore ", builder: (yargs) => yargs.positional("commit", { @@ -33,20 +37,3 @@ const RestoreCommand = cmd({ }) }, }) - -export const DiffCommand = cmd({ - command: "diff ", - describe: "diff", - builder: (yargs) => - yargs.positional("commit", { - type: "string", - description: "commit", - demandOption: true, - }), - async handler(args) { - await bootstrap({ cwd: process.cwd() }, async () => { - const diff = await Snapshot.diff("test", args.commit) - console.log(diff) - }) - }, -}) diff --git a/packages/opencode/src/cli/cmd/generate.ts b/packages/opencode/src/cli/cmd/generate.ts index d6ed0eb1..0cef1077 100644 --- a/packages/opencode/src/cli/cmd/generate.ts +++ b/packages/opencode/src/cli/cmd/generate.ts @@ -10,6 +10,9 @@ export const GenerateCommand = { const dir = "gen" await fs.rmdir(dir, { recursive: true }).catch(() => {}) await fs.mkdir(dir, { recursive: true }) - await Bun.write(path.join(dir, "openapi.json"), JSON.stringify(specs, null, 2)) + await Bun.write( + path.join(dir, "openapi.json"), + JSON.stringify(specs, null, 2), + ) }, } satisfies CommandModule diff --git a/packages/opencode/src/cli/cmd/install-github.ts b/packages/opencode/src/cli/cmd/install-github.ts deleted file mode 100644 index 604d76a4..00000000 --- a/packages/opencode/src/cli/cmd/install-github.ts +++ /dev/null @@ -1,235 +0,0 @@ -import { $ } from "bun" -import path from "path" -import { exec } from "child_process" -import * as prompts from "@clack/prompts" -import { map, pipe, sortBy, values } from "remeda" -import { UI } from "../ui" -import { cmd } from "./cmd" -import { ModelsDev } from "../../provider/models" -import { App } from "../../app/app" - -const WORKFLOW_FILE = ".github/workflows/opencode.yml" - -export const InstallGithubCommand = cmd({ - command: "install-github", - describe: "install the GitHub agent", - async handler() { - await App.provide({ cwd: process.cwd() }, async () => { - UI.empty() - prompts.intro("Install GitHub agent") - const app = await getAppInfo() - await installGitHubApp() - - const providers = await ModelsDev.get() - const provider = await promptProvider() - const model = await promptModel() - //const key = await promptKey() - - await addWorkflowFiles() - printNextSteps() - - function printNextSteps() { - let step2 - if (provider === "amazon-bedrock") { - step2 = - "Configure OIDC in AWS - https://docs.github.com/en/actions/how-tos/security-for-github-actions/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services" - } else { - const url = `https://github.com/organizations/${app.owner}/settings/secrets/actions` - const env = providers[provider].env - const envStr = - env.length === 1 - ? `\`${env[0]}\` secret` - : `\`${[env.slice(0, -1).join("\`, \`"), ...env.slice(-1)].join("\` and \`")}\` secrets` - step2 = `Add ${envStr} for ${providers[provider].name} - ${url}` - } - - prompts.outro( - [ - "Next steps:", - ` 1. Commit "${WORKFLOW_FILE}" file and push`, - ` 2. ${step2}`, - " 3. Learn how to use the GitHub agent - https://docs.opencode.ai/docs/github/getting-started", - ].join("\n"), - ) - } - - async function getAppInfo() { - const app = App.info() - if (!app.git) { - prompts.log.error(`Could not find git repository. Please run this command from a git repository.`) - throw new UI.CancelledError() - } - - // Get repo info - const info = await $`git remote get-url origin`.quiet().nothrow().text() - // match https or git pattern - // ie. https://github.com/sst/opencode.git - // ie. git@github.com:sst/opencode.git - const parsed = info.match(/git@github\.com:(.*)\.git/) ?? info.match(/github\.com\/(.*)\.git/) - if (!parsed) { - prompts.log.error(`Could not find git repository. Please run this command from a git repository.`) - throw new UI.CancelledError() - } - const [owner, repo] = parsed[1].split("/") - return { owner, repo, root: app.path.root } - } - - async function promptProvider() { - const priority: Record = { - anthropic: 0, - "github-copilot": 1, - openai: 2, - google: 3, - } - let provider = await prompts.select({ - message: "Select provider", - maxItems: 8, - options: [ - ...pipe( - providers, - values(), - sortBy( - (x) => priority[x.id] ?? 99, - (x) => x.name ?? x.id, - ), - map((x) => ({ - label: x.name, - value: x.id, - hint: priority[x.id] === 0 ? "recommended" : undefined, - })), - ), - { - value: "other", - label: "Other", - }, - ], - }) - - if (prompts.isCancel(provider)) throw new UI.CancelledError() - if (provider === "other") { - provider = await prompts.text({ - message: "Enter provider id", - validate: (x) => (x.match(/^[a-z-]+$/) ? undefined : "a-z and hyphens only"), - }) - if (prompts.isCancel(provider)) throw new UI.CancelledError() - provider = provider.replace(/^@ai-sdk\//, "") - if (prompts.isCancel(provider)) throw new UI.CancelledError() - prompts.log.warn( - `This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`, - ) - } - - return provider - } - - async function promptModel() { - const providerData = providers[provider]! - - const model = await prompts.select({ - message: "Select model", - maxItems: 8, - options: pipe( - providerData.models, - values(), - sortBy((x) => x.name ?? x.id), - map((x) => ({ - label: x.name ?? x.id, - value: x.id, - })), - ), - }) - - if (prompts.isCancel(model)) throw new UI.CancelledError() - return model - } - - async function installGitHubApp() { - const s = prompts.spinner() - s.start("Installing GitHub app") - - // Get installation - const installation = await getInstallation() - if (installation) return s.stop("GitHub app already installed") - - // Open browser - const url = "https://github.com/apps/opencode-agent" - const command = - process.platform === "darwin" - ? `open "${url}"` - : process.platform === "win32" - ? `start "${url}"` - : `xdg-open "${url}"` - - exec(command, (error) => { - if (error) { - prompts.log.warn(`Could not open browser. Please visit: ${url}`) - } - }) - - // Wait for installation - s.message("Waiting for GitHub app to be installed") - const MAX_RETRIES = 60 - let retries = 0 - do { - const installation = await getInstallation() - if (installation) break - - if (retries > MAX_RETRIES) { - s.stop( - `Failed to detect GitHub app installation. Make sure to install the app for the \`${app.owner}/${app.repo}\` repository.`, - ) - throw new UI.CancelledError() - } - - retries++ - await new Promise((resolve) => setTimeout(resolve, 1000)) - } while (true) - - s.stop("Installed GitHub app") - - async function getInstallation() { - return await fetch(`https://api.opencode.ai/get_github_app_installation?owner=${app.owner}&repo=${app.repo}`) - .then((res) => res.json()) - .then((data) => data.installation) - } - } - - async function addWorkflowFiles() { - const envStr = - provider === "amazon-bedrock" - ? "" - : `\n env:${providers[provider].env.map((e) => `\n ${e}: \${{ secrets.${e} }}`).join("")}` - - await Bun.write( - path.join(app.root, WORKFLOW_FILE), - ` -name: opencode - -on: - issue_comment: - types: [created] - -jobs: - opencode: - if: startsWith(github.event.comment.body, 'hey opencode') - runs-on: ubuntu-latest - permissions: - id-token: write - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - - name: Run opencode - uses: sst/opencode/sdks/github@github-v1${envStr} - with: - model: ${provider}/${model} -`.trim(), - ) - - prompts.log.success(`Added workflow file: "${WORKFLOW_FILE}"`) - } - }) - }, -}) diff --git a/packages/opencode/src/cli/cmd/mcp.ts b/packages/opencode/src/cli/cmd/mcp.ts deleted file mode 100644 index 5f8b6e5d..00000000 --- a/packages/opencode/src/cli/cmd/mcp.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { cmd } from "./cmd" -import { Client } from "@modelcontextprotocol/sdk/client/index.js" -import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js" -import * as prompts from "@clack/prompts" -import { UI } from "../ui" - -export const McpCommand = cmd({ - command: "mcp", - builder: (yargs) => yargs.command(McpAddCommand).demandCommand(), - async handler() {}, -}) - -export const McpAddCommand = cmd({ - command: "add", - describe: "add an MCP server", - async handler() { - UI.empty() - prompts.intro("Add MCP server") - - const name = await prompts.text({ - message: "Enter MCP server name", - validate: (x) => (x.length > 0 ? undefined : "Required"), - }) - if (prompts.isCancel(name)) throw new UI.CancelledError() - - const type = await prompts.select({ - message: "Select MCP server type", - options: [ - { - label: "Local", - value: "local", - hint: "Run a local command", - }, - { - label: "Remote", - value: "remote", - hint: "Connect to a remote URL", - }, - ], - }) - if (prompts.isCancel(type)) throw new UI.CancelledError() - - if (type === "local") { - const command = await prompts.text({ - message: "Enter command to run", - placeholder: "e.g., opencode x @modelcontextprotocol/server-filesystem", - validate: (x) => (x.length > 0 ? undefined : "Required"), - }) - if (prompts.isCancel(command)) throw new UI.CancelledError() - - prompts.log.info(`Local MCP server "${name}" configured with command: ${command}`) - prompts.outro("MCP server added successfully") - return - } - - if (type === "remote") { - const url = await prompts.text({ - message: "Enter MCP server URL", - placeholder: "e.g., https://example.com/mcp", - validate: (x) => { - if (x.length === 0) return "Required" - const isValid = URL.canParse(x) - return isValid ? undefined : "Invalid URL" - }, - }) - if (prompts.isCancel(url)) throw new UI.CancelledError() - - const client = new Client({ - name: "opencode", - version: "1.0.0", - }) - const transport = new StreamableHTTPClientTransport(new URL(url)) - await client.connect(transport) - prompts.log.info(`Remote MCP server "${name}" configured with URL: ${url}`) - } - - prompts.outro("MCP server added successfully") - }, -}) diff --git a/packages/opencode/src/cli/cmd/run.ts b/packages/opencode/src/cli/cmd/run.ts index fe15a0bd..1905aa17 100644 --- a/packages/opencode/src/cli/cmd/run.ts +++ b/packages/opencode/src/cli/cmd/run.ts @@ -2,14 +2,12 @@ import type { Argv } from "yargs" import { Bus } from "../../bus" import { Provider } from "../../provider/provider" import { Session } from "../../session" +import { Message } from "../../session/message" import { UI } from "../ui" import { cmd } from "./cmd" import { Flag } from "../../flag/flag" import { Config } from "../../config/config" import { bootstrap } from "../bootstrap" -import { MessageV2 } from "../../session/message-v2" -import { Mode } from "../../session/mode" -import { Identifier } from "../../id/id" const TOOL: Record = { todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD], @@ -54,22 +52,13 @@ export const RunCommand = cmd({ alias: ["m"], describe: "model to use in the format of provider/model", }) - .option("mode", { - type: "string", - describe: "mode to use", - }) }, handler: async (args) => { - let message = args.message.join(" ") - - if (!process.stdin.isTTY) message += "\n" + (await Bun.stdin.text()) - + const message = args.message.join(" ") await bootstrap({ cwd: process.cwd() }, async () => { const session = await (async () => { if (args.continue) { - const list = Session.list() - const first = await list.next() - await list.return() + const first = await Session.list().next() if (first.done) return return first.value } @@ -84,27 +73,32 @@ export const RunCommand = cmd({ return } + const isPiped = !process.stdout.isTTY + UI.empty() UI.println(UI.logo()) UI.empty() + UI.println(UI.Style.TEXT_NORMAL_BOLD + "> ", message) + UI.empty() const cfg = await Config.get() - if (cfg.share === "auto" || Flag.OPENCODE_AUTO_SHARE || args.share) { - try { - await Session.share(session.id) - UI.println(UI.Style.TEXT_INFO_BOLD + "~ https://opencode.ai/s/" + session.id.slice(-8)) - } catch (error) { - if (error instanceof Error && error.message.includes("disabled")) { - UI.println(UI.Style.TEXT_DANGER_BOLD + "! " + error.message) - } else { - throw error - } - } + if (cfg.autoshare || Flag.OPENCODE_AUTO_SHARE || args.share) { + await Session.share(session.id) + UI.println( + UI.Style.TEXT_INFO_BOLD + + "~ https://opencode.ai/s/" + + session.id.slice(-8), + ) } UI.empty() - const { providerID, modelID } = args.model ? Provider.parseModel(args.model) : await Provider.defaultModel() - UI.println(UI.Style.TEXT_NORMAL_BOLD + "@ ", UI.Style.TEXT_NORMAL + `${providerID}/${modelID}`) + const { providerID, modelID } = args.model + ? Provider.parseModel(args.model) + : await Provider.defaultModel() + UI.println( + UI.Style.TEXT_NORMAL_BOLD + "@ ", + UI.Style.TEXT_NORMAL + `${providerID}/${modelID}`, + ) UI.empty() function printEvent(color: string, type: string, title: string) { @@ -116,73 +110,52 @@ export const RunCommand = cmd({ ) } - let text = "" - Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => { - if (evt.properties.part.sessionID !== session.id) return - if (evt.properties.part.messageID === messageID) return + Bus.subscribe(Message.Event.PartUpdated, async (evt) => { + if (evt.properties.sessionID !== session.id) return const part = evt.properties.part + const message = await Session.getMessage( + evt.properties.sessionID, + evt.properties.messageID, + ) - if (part.type === "tool" && part.state.status === "completed") { - const [tool, color] = TOOL[part.tool] ?? [part.tool, UI.Style.TEXT_INFO_BOLD] - const title = - part.state.title || Object.keys(part.state.input).length > 0 ? JSON.stringify(part.state.input) : "Unknown" - printEvent(color, tool, title) + if ( + part.type === "tool-invocation" && + part.toolInvocation.state === "result" + ) { + const metadata = message.metadata.tool[part.toolInvocation.toolCallId] + const [tool, color] = TOOL[part.toolInvocation.toolName] ?? [ + part.toolInvocation.toolName, + UI.Style.TEXT_INFO_BOLD, + ] + printEvent(color, tool, metadata?.title || "Unknown") } if (part.type === "text") { - text = part.text - - if (part.time?.end) { + if (part.text.includes("\n")) { UI.empty() - UI.println(UI.markdown(text)) + UI.println(part.text) UI.empty() - text = "" return } + printEvent(UI.Style.TEXT_NORMAL_BOLD, "Text", part.text) } }) - let errorMsg: string | undefined - Bus.subscribe(Session.Event.Error, async (evt) => { - const { sessionID, error } = evt.properties - if (sessionID !== session.id || !error) return - let err = String(error.name) - - if ("data" in error && error.data && "message" in error.data) { - err = error.data.message - } - errorMsg = errorMsg ? errorMsg + "\n" + err : err - - UI.error(err) - }) - - const mode = args.mode ? await Mode.get(args.mode) : await Mode.list().then((x) => x[0]) - - const messageID = Identifier.ascending("message") const result = await Session.chat({ sessionID: session.id, - messageID, - ...(mode.model - ? mode.model - : { - providerID, - modelID, - }), - mode: mode.name, + providerID, + modelID, parts: [ { - id: Identifier.ascending("part"), type: "text", text: message, }, ], }) - const isPiped = !process.stdout.isTTY if (isPiped) { const match = result.parts.findLast((x) => x.type === "text") - if (match) process.stdout.write(UI.markdown(match.text)) - if (errorMsg) process.stdout.write(errorMsg) + if (match) process.stdout.write(match.text) } UI.empty() }) diff --git a/packages/opencode/src/cli/cmd/serve.ts b/packages/opencode/src/cli/cmd/serve.ts index 6dab38f1..f3686f30 100644 --- a/packages/opencode/src/cli/cmd/serve.ts +++ b/packages/opencode/src/cli/cmd/serve.ts @@ -38,7 +38,9 @@ export const ServeCommand = cmd({ hostname, }) - console.log(`opencode server listening on http://${server.hostname}:${server.port}`) + console.log( + `opencode server listening on http://${server.hostname}:${server.port}`, + ) await new Promise(() => {}) diff --git a/packages/opencode/src/cli/cmd/stats.ts b/packages/opencode/src/cli/cmd/stats.ts deleted file mode 100644 index 39ae86ba..00000000 --- a/packages/opencode/src/cli/cmd/stats.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { cmd } from "./cmd" - -interface SessionStats { - totalSessions: number - totalMessages: number - totalCost: number - totalTokens: { - input: number - output: number - reasoning: number - cache: { - read: number - write: number - } - } - toolUsage: Record - dateRange: { - earliest: number - latest: number - } - days: number - costPerDay: number -} - -export const StatsCommand = cmd({ - command: "stats", - handler: async () => {}, -}) - -export function displayStats(stats: SessionStats) { - const width = 56 - - function renderRow(label: string, value: string): string { - const availableWidth = width - 1 - const paddingNeeded = availableWidth - label.length - value.length - const padding = Math.max(0, paddingNeeded) - return `│${label}${" ".repeat(padding)}${value} │` - } - - // Overview section - console.log("┌────────────────────────────────────────────────────────┐") - console.log("│ OVERVIEW │") - console.log("├────────────────────────────────────────────────────────┤") - console.log(renderRow("Sessions", stats.totalSessions.toLocaleString())) - console.log(renderRow("Messages", stats.totalMessages.toLocaleString())) - console.log(renderRow("Days", stats.days.toString())) - console.log("└────────────────────────────────────────────────────────┘") - console.log() - - // Cost & Tokens section - console.log("┌────────────────────────────────────────────────────────┐") - console.log("│ COST & TOKENS │") - console.log("├────────────────────────────────────────────────────────┤") - const cost = isNaN(stats.totalCost) ? 0 : stats.totalCost - const costPerDay = isNaN(stats.costPerDay) ? 0 : stats.costPerDay - console.log(renderRow("Total Cost", `$${cost.toFixed(2)}`)) - console.log(renderRow("Cost/Day", `$${costPerDay.toFixed(2)}`)) - console.log(renderRow("Input", formatNumber(stats.totalTokens.input))) - console.log(renderRow("Output", formatNumber(stats.totalTokens.output))) - console.log(renderRow("Cache Read", formatNumber(stats.totalTokens.cache.read))) - console.log(renderRow("Cache Write", formatNumber(stats.totalTokens.cache.write))) - console.log("└────────────────────────────────────────────────────────┘") - console.log() - - // Tool Usage section - if (Object.keys(stats.toolUsage).length > 0) { - const sortedTools = Object.entries(stats.toolUsage) - .sort(([, a], [, b]) => b - a) - .slice(0, 10) - - console.log("┌────────────────────────────────────────────────────────┐") - console.log("│ TOOL USAGE │") - console.log("├────────────────────────────────────────────────────────┤") - - const maxCount = Math.max(...sortedTools.map(([, count]) => count)) - const totalToolUsage = Object.values(stats.toolUsage).reduce((a, b) => a + b, 0) - - for (const [tool, count] of sortedTools) { - const barLength = Math.max(1, Math.floor((count / maxCount) * 20)) - const bar = "█".repeat(barLength) - const percentage = ((count / totalToolUsage) * 100).toFixed(1) - - const content = ` ${tool.padEnd(10)} ${bar.padEnd(20)} ${count.toString().padStart(3)} (${percentage.padStart(4)}%)` - const padding = Math.max(0, width - content.length) - console.log(`│${content}${" ".repeat(padding)} │`) - } - console.log("└────────────────────────────────────────────────────────┘") - } - console.log() -} -function formatNumber(num: number): string { - if (num >= 1000000) { - return (num / 1000000).toFixed(1) + "M" - } else if (num >= 1000) { - return (num / 1000).toFixed(1) + "K" - } - return num.toString() -} diff --git a/packages/opencode/src/cli/cmd/tui.ts b/packages/opencode/src/cli/cmd/tui.ts index aa49a856..04e90978 100644 --- a/packages/opencode/src/cli/cmd/tui.ts +++ b/packages/opencode/src/cli/cmd/tui.ts @@ -9,33 +9,15 @@ import fs from "fs/promises" import { Installation } from "../../installation" import { Config } from "../../config/config" import { Bus } from "../../bus" -import { Log } from "../../util/log" -import { FileWatcher } from "../../file/watch" -import { Mode } from "../../session/mode" export const TuiCommand = cmd({ command: "$0 [project]", describe: "start opencode tui", builder: (yargs) => - yargs - .positional("project", { - type: "string", - describe: "path to start opencode in", - }) - .option("model", { - type: "string", - alias: ["m"], - describe: "model to use in the format of provider/model", - }) - .option("prompt", { - alias: ["p"], - type: "string", - describe: "prompt to use", - }) - .option("mode", { - type: "string", - describe: "mode to use", - }), + yargs.positional("project", { + type: "string", + describe: "path to start opencode in", + }), handler: async (args) => { while (true) { const cwd = args.project ? path.resolve(args.project) : process.cwd() @@ -46,7 +28,6 @@ export const TuiCommand = cmd({ return } const result = await bootstrap({ cwd }, async (app) => { - FileWatcher.init() const providers = await Provider.list() if (Object.keys(providers).length === 0) { return "needs_provider" @@ -58,7 +39,9 @@ export const TuiCommand = cmd({ }) let cmd = ["go", "run", "./main.go"] - let cwd = Bun.fileURLToPath(new URL("../../../../tui/cmd/opencode", import.meta.url)) + let cwd = Bun.fileURLToPath( + new URL("../../../../tui/cmd/opencode", import.meta.url), + ) if (Bun.embeddedFiles.length > 0) { const blob = Bun.embeddedFiles[0] as File let binaryName = blob.name @@ -74,26 +57,16 @@ export const TuiCommand = cmd({ cwd = process.cwd() cmd = [binary] } - Log.Default.info("tui", { - cmd, - }) const proc = Bun.spawn({ - cmd: [ - ...cmd, - ...(args.model ? ["--model", args.model] : []), - ...(args.prompt ? ["--prompt", args.prompt] : []), - ...(args.mode ? ["--mode", args.mode] : []), - ], + cmd: [...cmd, ...process.argv.slice(2)], cwd, stdout: "inherit", stderr: "inherit", stdin: "inherit", env: { ...process.env, - CGO_ENABLED: "0", OPENCODE_SERVER: server.url.toString(), OPENCODE_APP_INFO: JSON.stringify(app), - OPENCODE_MODES: JSON.stringify(await Mode.list()), }, onExit: () => { server.stop() diff --git a/packages/opencode/src/cli/cmd/upgrade.ts b/packages/opencode/src/cli/cmd/upgrade.ts index 17d18168..759ab5ae 100644 --- a/packages/opencode/src/cli/cmd/upgrade.ts +++ b/packages/opencode/src/cli/cmd/upgrade.ts @@ -27,26 +27,22 @@ export const UpgradeCommand = { const detectedMethod = await Installation.method() const method = (args.method as Installation.Method) ?? detectedMethod if (method === "unknown") { - prompts.log.error(`opencode is installed to ${process.execPath} and seems to be managed by a package manager`) + prompts.log.error( + `opencode is installed to ${process.execPath} and seems to be managed by a package manager`, + ) prompts.outro("Done") return } prompts.log.info("Using method: " + method) const target = args.target ?? (await Installation.latest()) - - if (Installation.VERSION === target) { - prompts.log.warn(`opencode upgrade skipped: ${target} is already installed`) - prompts.outro("Done") - return - } - prompts.log.info(`From ${Installation.VERSION} → ${target}`) const spinner = prompts.spinner() spinner.start("Upgrading...") const err = await Installation.upgrade(method, target).catch((err) => err) if (err) { spinner.stop("Upgrade failed") - if (err instanceof Installation.UpgradeFailedError) prompts.log.error(err.data.stderr) + if (err instanceof Installation.UpgradeFailedError) + prompts.log.error(err.data.stderr) else if (err instanceof Error) prompts.log.error(err.message) prompts.outro("Done") return diff --git a/packages/opencode/src/cli/error.ts b/packages/opencode/src/cli/error.ts index 261206a1..752ad696 100644 --- a/packages/opencode/src/cli/error.ts +++ b/packages/opencode/src/cli/error.ts @@ -5,11 +5,14 @@ import { UI } from "./ui" export function FormatError(input: unknown) { if (MCP.Failed.isInstance(input)) return `MCP server "${input.data.name}" failed. Note, opencode does not support MCP authentication yet.` - if (Config.JsonError.isInstance(input)) return `Config file at ${input.data.path} is not valid JSON` + if (Config.JsonError.isInstance(input)) + return `Config file at ${input.data.path} is not valid JSON` if (Config.InvalidError.isInstance(input)) return [ `Config file at ${input.data.path} is invalid`, - ...(input.data.issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []), + ...(input.data.issues?.map( + (issue) => "↳ " + issue.message + " " + issue.path.join("."), + ) ?? []), ].join("\n") if (UI.CancelledError.isInstance(input)) return "" diff --git a/packages/opencode/src/cli/ui.ts b/packages/opencode/src/cli/ui.ts index 0fa4d1ce..9801b459 100644 --- a/packages/opencode/src/cli/ui.ts +++ b/packages/opencode/src/cli/ui.ts @@ -76,8 +76,4 @@ export namespace UI { export function error(message: string) { println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message) } - - export function markdown(text: string): string { - return text - } } diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 781e47c7..eb67778e 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -4,7 +4,7 @@ import { z } from "zod" import { App } from "../app/app" import { Filesystem } from "../util/filesystem" import { ModelsDev } from "../provider/models" -import { mergeDeep, pipe } from "remeda" +import { mergeDeep } from "remeda" import { Global } from "../global" import fs from "fs/promises" import { lazy } from "../util/lazy" @@ -21,17 +21,6 @@ export namespace Config { result = mergeDeep(result, await load(resolved)) } } - - // Handle migration from autoshare to share field - if (result.autoshare === true && !result.share) { - result.share = "auto" - } - - if (!result.username) { - const os = await import("os") - result.username = os.userInfo().username - } - log.info("loaded", result) return result @@ -40,12 +29,18 @@ export namespace Config { export const McpLocal = z .object({ type: z.literal("local").describe("Type of MCP server connection"), - command: z.string().array().describe("Command and arguments to run the MCP server"), + command: z + .string() + .array() + .describe("Command and arguments to run the MCP server"), environment: z .record(z.string(), z.string()) .optional() .describe("Environment variables to set when running the MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), + enabled: z + .boolean() + .optional() + .describe("Enable or disable the MCP server on startup"), }) .strict() .openapi({ @@ -56,8 +51,10 @@ export namespace Config { .object({ type: z.literal("remote").describe("Type of MCP server connection"), url: z.string().describe("URL of the remote MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), + enabled: z + .boolean() + .optional() + .describe("Enable or disable the MCP server on startup"), }) .strict() .openapi({ @@ -67,107 +64,105 @@ export namespace Config { export const Mcp = z.discriminatedUnion("type", [McpLocal, McpRemote]) export type Mcp = z.infer - export const Mode = z - .object({ - model: z.string().optional(), - prompt: z.string().optional(), - tools: z.record(z.string(), z.boolean()).optional(), - }) - .openapi({ - ref: "ModeConfig", - }) - export type Mode = z.infer - export const Keybinds = z .object({ - leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"), - app_help: z.string().optional().default("h").describe("Show help dialog"), - switch_mode: z.string().optional().default("tab").describe("Next mode"), - switch_mode_reverse: z.string().optional().default("shift+tab").describe("Previous Mode"), - editor_open: z.string().optional().default("e").describe("Open external editor"), - session_export: z.string().optional().default("x").describe("Export session to editor"), - session_new: z.string().optional().default("n").describe("Create a new session"), - session_list: z.string().optional().default("l").describe("List all sessions"), - session_share: z.string().optional().default("s").describe("Share current session"), - session_unshare: z.string().optional().default("u").describe("Unshare current session"), - session_interrupt: z.string().optional().default("esc").describe("Interrupt current session"), - session_compact: z.string().optional().default("c").describe("Compact the session"), - tool_details: z.string().optional().default("d").describe("Toggle tool details"), - model_list: z.string().optional().default("m").describe("List available models"), - theme_list: z.string().optional().default("t").describe("List available themes"), - file_list: z.string().optional().default("f").describe("List files"), - file_close: z.string().optional().default("esc").describe("Close file"), - file_search: z.string().optional().default("/").describe("Search file"), - file_diff_toggle: z.string().optional().default("v").describe("Split/unified diff"), - project_init: z.string().optional().default("i").describe("Create/update AGENTS.md"), - input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"), - input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"), - input_submit: z.string().optional().default("enter").describe("Submit input"), - input_newline: z.string().optional().default("shift+enter,ctrl+j").describe("Insert newline in input"), - messages_page_up: z.string().optional().default("pgup").describe("Scroll messages up by one page"), - messages_page_down: z.string().optional().default("pgdown").describe("Scroll messages down by one page"), - messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"), + leader: z + .string() + .optional() + .describe("Leader key for keybind combinations"), + help: z.string().optional().describe("Show help dialog"), + editor_open: z.string().optional().describe("Open external editor"), + session_new: z.string().optional().describe("Create a new session"), + session_list: z.string().optional().describe("List all sessions"), + session_share: z.string().optional().describe("Share current session"), + session_interrupt: z + .string() + .optional() + .describe("Interrupt current session"), + session_compact: z + .string() + .optional() + .describe("Toggle compact mode for session"), + tool_details: z.string().optional().describe("Show tool details"), + model_list: z.string().optional().describe("List available models"), + theme_list: z.string().optional().describe("List available themes"), + project_init: z + .string() + .optional() + .describe("Initialize project configuration"), + input_clear: z.string().optional().describe("Clear input field"), + input_paste: z.string().optional().describe("Paste from clipboard"), + input_submit: z.string().optional().describe("Submit input"), + input_newline: z.string().optional().describe("Insert newline in input"), + history_previous: z + .string() + .optional() + .describe("Navigate to previous history item"), + history_next: z + .string() + .optional() + .describe("Navigate to next history item"), + messages_page_up: z + .string() + .optional() + .describe("Scroll messages up by one page"), + messages_page_down: z + .string() + .optional() + .describe("Scroll messages down by one page"), + messages_half_page_up: z + .string() + .optional() + .describe("Scroll messages up by half page"), messages_half_page_down: z .string() .optional() - .default("ctrl+alt+d") .describe("Scroll messages down by half page"), - messages_previous: z.string().optional().default("ctrl+up").describe("Navigate to previous message"), - messages_next: z.string().optional().default("ctrl+down").describe("Navigate to next message"), - messages_first: z.string().optional().default("ctrl+g").describe("Navigate to first message"), - messages_last: z.string().optional().default("ctrl+alt+g").describe("Navigate to last message"), - messages_layout_toggle: z.string().optional().default("p").describe("Toggle layout"), - messages_copy: z.string().optional().default("y").describe("Copy message"), - messages_revert: z.string().optional().default("r").describe("Revert message"), - app_exit: z.string().optional().default("ctrl+c,q").describe("Exit the application"), + messages_previous: z + .string() + .optional() + .describe("Navigate to previous message"), + messages_next: z.string().optional().describe("Navigate to next message"), + messages_first: z + .string() + .optional() + .describe("Navigate to first message"), + messages_last: z.string().optional().describe("Navigate to last message"), + app_exit: z.string().optional().describe("Exit the application"), }) .strict() .openapi({ ref: "KeybindsConfig", }) - - export const Layout = z.enum(["auto", "stretch"]).openapi({ - ref: "LayoutConfig", - }) - export type Layout = z.infer - export const Info = z .object({ - $schema: z.string().optional().describe("JSON schema reference for configuration validation"), - theme: z.string().optional().describe("Theme name to use for the interface"), - keybinds: Keybinds.optional().describe("Custom keybind configurations"), - share: z - .enum(["manual", "auto", "disabled"]) + $schema: z + .string() .optional() - .describe( - "Control sharing behavior:'manual' allows manual sharing via commands, 'auto' enables automatic sharing, 'disabled' disables all sharing", - ), + .describe("JSON schema reference for configuration validation"), + theme: z + .string() + .optional() + .describe("Theme name to use for the interface"), + keybinds: Keybinds.optional().describe("Custom keybind configurations"), autoshare: z .boolean() .optional() - .describe("@deprecated Use 'share' field instead. Share newly created sessions automatically"), - autoupdate: z.boolean().optional().describe("Automatically update to the latest version"), - disabled_providers: z.array(z.string()).optional().describe("Disable providers that are loaded automatically"), - model: z.string().describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(), - small_model: z + .describe("Share newly created sessions automatically"), + autoupdate: z + .boolean() + .optional() + .describe("Automatically update to the latest version"), + disabled_providers: z + .array(z.string()) + .optional() + .describe("Disable providers that are loaded automatically"), + model: z .string() .describe( - "Small model to use for tasks like summarization and title generation in the format of provider/model", + "Model to use in the format of provider/model, eg anthropic/claude-2", ) .optional(), - username: z - .string() - .optional() - .describe("Custom username to display in conversations instead of system username"), - mode: z - .object({ - build: Mode.optional(), - plan: Mode.optional(), - }) - .catchall(Mode) - .optional() - .describe("Modes configuration, see https://opencode.ai/docs/modes"), - log_level: Log.Level.optional().describe("Minimum log level to write to log files"), provider: z .record( ModelsDev.Provider.partial().extend({ @@ -177,9 +172,14 @@ export namespace Config { ) .optional() .describe("Custom provider configurations and model overrides"), - mcp: z.record(z.string(), Mcp).optional().describe("MCP (Model Context Protocol) server configurations"), - instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"), - layout: Layout.optional().describe("@deprecated Always uses stretch layout."), + mcp: z + .record(z.string(), Mcp) + .optional() + .describe("MCP (Model Context Protocol) server configurations"), + instructions: z + .array(z.string()) + .optional() + .describe("Additional instruction files or patterns to include"), experimental: z .object({ hook: z @@ -215,11 +215,7 @@ export namespace Config { export type Info = z.output export const global = lazy(async () => { - let result = pipe( - {}, - mergeDeep(await load(path.join(Global.Path.config, "config.json"))), - mergeDeep(await load(path.join(Global.Path.config, "opencode.json"))), - ) + let result = await load(path.join(Global.Path.config, "config.json")) await import(path.join(Global.Path.config, "config"), { with: { @@ -231,7 +227,10 @@ export namespace Config { if (provider && model) result.model = `${provider}/${model}` result["$schema"] = "https://opencode.ai/config.json" result = mergeDeep(result, rest) - await Bun.write(path.join(Global.Path.config, "config.json"), JSON.stringify(result, null, 2)) + await Bun.write( + path.join(Global.Path.config, "config.json"), + JSON.stringify(result, null, 2), + ) await fs.unlink(path.join(Global.Path.config, "config")) }) .catch(() => {}) @@ -239,47 +238,19 @@ export namespace Config { return result }) - async function load(configPath: string) { - let text = await Bun.file(configPath) - .text() + async function load(path: string) { + const data = await Bun.file(path) + .json() .catch((err) => { - if (err.code === "ENOENT") return - throw new JsonError({ path: configPath }, { cause: err }) + if (err.code === "ENOENT") return {} + throw new JsonError({ path }, { cause: err }) }) - if (!text) return {} - - text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => { - return process.env[varName] || "" - }) - - const fileMatches = text.match(/"?\{file:([^}]+)\}"?/g) - if (fileMatches) { - const configDir = path.dirname(configPath) - for (const match of fileMatches) { - const filePath = match.replace(/^"?\{file:/, "").replace(/\}"?$/, "") - const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath) - const fileContent = await Bun.file(resolvedPath).text() - text = text.replace(match, JSON.stringify(fileContent)) - } - } - - let data: any - try { - data = JSON.parse(text) - } catch (err) { - throw new JsonError({ path: configPath }, { cause: err as Error }) - } const parsed = Info.safeParse(data) - if (parsed.success) { - if (!parsed.data.$schema) { - parsed.data.$schema = "https://opencode.ai/config.json" - await Bun.write(configPath, JSON.stringify(parsed.data, null, 2)) - } - return parsed.data - } - throw new InvalidError({ path: configPath, issues: parsed.error.issues }) + if (parsed.success) return parsed.data + throw new InvalidError({ path, issues: parsed.error.issues }) } + export const JsonError = NamedError.create( "ConfigJsonError", z.object({ diff --git a/packages/opencode/src/config/hooks.ts b/packages/opencode/src/config/hooks.ts index 973575b7..ffa2475f 100644 --- a/packages/opencode/src/config/hooks.ts +++ b/packages/opencode/src/config/hooks.ts @@ -22,7 +22,9 @@ export namespace ConfigHooks { command: item.command, }) Bun.spawn({ - cmd: item.command.map((x) => x.replace("$FILE", payload.properties.file)), + cmd: item.command.map((x) => + x.replace("$FILE", payload.properties.file), + ), env: item.environment, cwd: app.path.cwd, stdout: "ignore", diff --git a/packages/opencode/src/file/fzf.ts b/packages/opencode/src/file/fzf.ts index 1376af8c..6d52702f 100644 --- a/packages/opencode/src/file/fzf.ts +++ b/packages/opencode/src/file/fzf.ts @@ -45,7 +45,10 @@ export namespace Fzf { log.info("found", { filepath }) return { filepath } } - filepath = path.join(Global.Path.bin, "fzf" + (process.platform === "win32" ? ".exe" : "")) + filepath = path.join( + Global.Path.bin, + "fzf" + (process.platform === "win32" ? ".exe" : ""), + ) const file = Bun.file(filepath) if (!(await file.exists())) { @@ -53,15 +56,18 @@ export namespace Fzf { const arch = archMap[process.arch as keyof typeof archMap] ?? "amd64" const config = PLATFORM[process.platform as keyof typeof PLATFORM] - if (!config) throw new UnsupportedPlatformError({ platform: process.platform }) + if (!config) + throw new UnsupportedPlatformError({ platform: process.platform }) const version = VERSION - const platformName = process.platform === "win32" ? "windows" : process.platform + const platformName = + process.platform === "win32" ? "windows" : process.platform const filename = `fzf-${version}-${platformName}_${arch}.${config.extension}` const url = `https://github.com/junegunn/fzf/releases/download/v${version}/${filename}` const response = await fetch(url) - if (!response.ok) throw new DownloadFailedError({ url, status: response.status }) + if (!response.ok) + throw new DownloadFailedError({ url, status: response.status }) const buffer = await response.arrayBuffer() const archivePath = path.join(Global.Path.bin, filename) @@ -80,11 +86,14 @@ export namespace Fzf { }) } if (config.extension === "zip") { - const proc = Bun.spawn(["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin], { - cwd: Global.Path.bin, - stderr: "pipe", - stdout: "ignore", - }) + const proc = Bun.spawn( + ["unzip", "-j", archivePath, "fzf.exe", "-d", Global.Path.bin], + { + cwd: Global.Path.bin, + stderr: "pipe", + stdout: "ignore", + }, + ) await proc.exited if (proc.exitCode !== 0) throw new ExtractionFailedError({ diff --git a/packages/opencode/src/file/index.ts b/packages/opencode/src/file/index.ts index b99f35e1..d9e61fa5 100644 --- a/packages/opencode/src/file/index.ts +++ b/packages/opencode/src/file/index.ts @@ -11,19 +11,6 @@ import { Log } from "../util/log" export namespace File { const log = Log.create({ service: "file" }) - export const Info = z - .object({ - path: z.string(), - added: z.number().int(), - removed: z.number().int(), - status: z.enum(["added", "deleted", "modified"]), - }) - .openapi({ - ref: "File", - }) - - export type Info = z.infer - export const Event = { Edited: Bus.event( "file.edited", @@ -37,16 +24,20 @@ export namespace File { const app = App.info() if (!app.git) return [] - const diffOutput = await $`git diff --numstat HEAD`.cwd(app.path.cwd).quiet().nothrow().text() + const diffOutput = await $`git diff --numstat HEAD` + .cwd(app.path.cwd) + .quiet() + .nothrow() + .text() - const changedFiles: Info[] = [] + const changedFiles = [] if (diffOutput.trim()) { const lines = diffOutput.trim().split("\n") for (const line of lines) { const [added, removed, filepath] = line.split("\t") changedFiles.push({ - path: filepath, + file: filepath, added: added === "-" ? 0 : parseInt(added, 10), removed: removed === "-" ? 0 : parseInt(removed, 10), status: "modified", @@ -54,16 +45,22 @@ export namespace File { } } - const untrackedOutput = await $`git ls-files --others --exclude-standard`.cwd(app.path.cwd).quiet().nothrow().text() + const untrackedOutput = await $`git ls-files --others --exclude-standard` + .cwd(app.path.cwd) + .quiet() + .nothrow() + .text() if (untrackedOutput.trim()) { const untrackedFiles = untrackedOutput.trim().split("\n") for (const filepath of untrackedFiles) { try { - const content = await Bun.file(path.join(app.path.root, filepath)).text() + const content = await Bun.file( + path.join(app.path.root, filepath), + ).text() const lines = content.split("\n").length changedFiles.push({ - path: filepath, + file: filepath, added: lines, removed: 0, status: "added", @@ -75,13 +72,17 @@ export namespace File { } // Get deleted files - const deletedOutput = await $`git diff --name-only --diff-filter=D HEAD`.cwd(app.path.cwd).quiet().nothrow().text() + const deletedOutput = await $`git diff --name-only --diff-filter=D HEAD` + .cwd(app.path.cwd) + .quiet() + .nothrow() + .text() if (deletedOutput.trim()) { const deletedFiles = deletedOutput.trim().split("\n") for (const filepath of deletedFiles) { changedFiles.push({ - path: filepath, + file: filepath, added: 0, removed: 0, // Could get original line count but would require another git command status: "deleted", @@ -91,7 +92,7 @@ export namespace File { return changedFiles.map((x) => ({ ...x, - path: path.relative(app.path.cwd, path.join(app.path.root, x.path)), + file: path.relative(app.path.cwd, path.join(app.path.root, x.file)), })) } @@ -111,7 +112,11 @@ export namespace File { filepath: rel, }) if (diff !== "unmodified") { - const original = await $`git show HEAD:${rel}`.cwd(app.path.root).quiet().nothrow().text() + const original = await $`git show HEAD:${rel}` + .cwd(app.path.root) + .quiet() + .nothrow() + .text() const patch = createPatch(file, original, content, "old", "new", { context: Infinity, }) diff --git a/packages/opencode/src/file/ripgrep.ts b/packages/opencode/src/file/ripgrep.ts index 05ebbe7d..a975d34b 100644 --- a/packages/opencode/src/file/ripgrep.ts +++ b/packages/opencode/src/file/ripgrep.ts @@ -34,27 +34,25 @@ export namespace Ripgrep { export const Match = z.object({ type: z.literal("match"), - data: z - .object({ - path: z.object({ - text: z.string(), - }), - lines: z.object({ - text: z.string(), - }), - line_number: z.number(), - absolute_offset: z.number(), - submatches: z.array( - z.object({ - match: z.object({ - text: z.string(), - }), - start: z.number(), - end: z.number(), + data: z.object({ + path: z.object({ + text: z.string(), + }), + lines: z.object({ + text: z.string(), + }), + line_number: z.number(), + absolute_offset: z.number(), + submatches: z.array( + z.object({ + match: z.object({ + text: z.string(), }), - ), - }) - .openapi({ ref: "Match" }), + start: z.number(), + end: z.number(), + }), + ), + }), }) const End = z.object({ @@ -124,11 +122,15 @@ export namespace Ripgrep { const state = lazy(async () => { let filepath = Bun.which("rg") if (filepath) return { filepath } - filepath = path.join(Global.Path.bin, "rg" + (process.platform === "win32" ? ".exe" : "")) + filepath = path.join( + Global.Path.bin, + "rg" + (process.platform === "win32" ? ".exe" : ""), + ) const file = Bun.file(filepath) if (!(await file.exists())) { - const platformKey = `${process.arch}-${process.platform}` as keyof typeof PLATFORM + const platformKey = + `${process.arch}-${process.platform}` as keyof typeof PLATFORM const config = PLATFORM[platformKey] if (!config) throw new UnsupportedPlatformError({ platform: platformKey }) @@ -137,7 +139,8 @@ export namespace Ripgrep { const url = `https://github.com/BurntSushi/ripgrep/releases/download/${version}/${filename}` const response = await fetch(url) - if (!response.ok) throw new DownloadFailedError({ url, status: response.status }) + if (!response.ok) + throw new DownloadFailedError({ url, status: response.status }) const buffer = await response.arrayBuffer() const archivePath = path.join(Global.Path.bin, filename) @@ -161,11 +164,14 @@ export namespace Ripgrep { }) } if (config.extension === "zip") { - const proc = Bun.spawn(["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin], { - cwd: Global.Path.bin, - stderr: "pipe", - stdout: "ignore", - }) + const proc = Bun.spawn( + ["unzip", "-j", archivePath, "*/rg.exe", "-d", Global.Path.bin], + { + cwd: Global.Path.bin, + stderr: "pipe", + stdout: "ignore", + }, + ) await proc.exited if (proc.exitCode !== 0) throw new ExtractionFailedError({ @@ -187,16 +193,17 @@ export namespace Ripgrep { return filepath } - export async function files(input: { cwd: string; query?: string; glob?: string[]; limit?: number }) { - const commands = [`${$.escape(await filepath())} --files --follow --hidden --glob='!.git/*'`] - - if (input.glob) { - for (const g of input.glob) { - commands[0] += ` --glob='${g}'` - } - } - - if (input.query) commands.push(`${await Fzf.filepath()} --filter=${input.query}`) + export async function files(input: { + cwd: string + query?: string + glob?: string + limit?: number + }) { + const commands = [ + `${await filepath()} --files --hidden --glob='!.git/*' ${input.glob ? `--glob='${input.glob}'` : ``}`, + ] + if (input.query) + commands.push(`${await Fzf.filepath()} --filter=${input.query}`) if (input.limit) commands.push(`head -n ${input.limit}`) const joined = commands.join(" | ") const result = await $`${{ raw: joined }}`.cwd(input.cwd).nothrow().text() @@ -303,8 +310,18 @@ export namespace Ripgrep { return lines.join("\n") } - export async function search(input: { cwd: string; pattern: string; glob?: string[]; limit?: number }) { - const args = [`${await filepath()}`, "--json", "--hidden", "--glob='!.git/*'"] + export async function search(input: { + cwd: string + pattern: string + glob?: string[] + limit?: number + }) { + const args = [ + `${await filepath()}`, + "--json", + "--hidden", + "--glob='!.git/*'", + ] if (input.glob) { for (const g of input.glob) { diff --git a/packages/opencode/src/file/time.ts b/packages/opencode/src/file/time.ts index 453259e8..53132197 100644 --- a/packages/opencode/src/file/time.ts +++ b/packages/opencode/src/file/time.ts @@ -1,8 +1,6 @@ import { App } from "../app/app" -import { Log } from "../util/log" export namespace FileTime { - const log = Log.create({ service: "file.time" }) export const state = App.state("tool.filetimes", () => { const read: { [sessionID: string]: { @@ -15,7 +13,6 @@ export namespace FileTime { }) export function read(sessionID: string, file: string) { - log.info("read", { sessionID, file }) const { read } = state() read[sessionID] = read[sessionID] || {} read[sessionID][file] = new Date() @@ -27,7 +24,10 @@ export namespace FileTime { export async function assert(sessionID: string, filepath: string) { const time = get(sessionID, filepath) - if (!time) throw new Error(`You must read the file ${filepath} before overwriting it. Use the Read tool first`) + if (!time) + throw new Error( + `You must read the file ${filepath} before overwriting it. Use the Read tool first`, + ) const stats = await Bun.file(filepath).stat() if (stats.mtime.getTime() > time.getTime()) { throw new Error( diff --git a/packages/opencode/src/file/watch.ts b/packages/opencode/src/file/watch.ts index 383ad6f3..1d12168f 100644 --- a/packages/opencode/src/file/watch.ts +++ b/packages/opencode/src/file/watch.ts @@ -21,20 +21,23 @@ export namespace FileWatcher { "file.watcher", () => { const app = App.use() - if (!app.info.git) return {} try { - const watcher = fs.watch(app.info.path.cwd, { recursive: true }, (event, file) => { - log.info("change", { file, event }) - if (!file) return - // for some reason async local storage is lost here - // https://github.com/oven-sh/bun/issues/20754 - App.provideExisting(app, async () => { - Bus.publish(Event.Updated, { - file, - event, + const watcher = fs.watch( + app.info.path.cwd, + { recursive: true }, + (event, file) => { + log.info("change", { file, event }) + if (!file) return + // for some reason async local storage is lost here + // https://github.com/oven-sh/bun/issues/20754 + App.provideExisting(app, async () => { + Bus.publish(Event.Updated, { + file, + event, + }) }) - }) - }) + }, + ) return { watcher } } catch { return {} @@ -46,7 +49,7 @@ export namespace FileWatcher { ) export function init() { - if (Flag.OPENCODE_DISABLE_WATCHER || true) return + if (Flag.OPENCODE_DISABLE_WATCHER) return state() } } diff --git a/packages/opencode/src/format/formatter.ts b/packages/opencode/src/format/formatter.ts index 83e359f6..60ca6a32 100644 --- a/packages/opencode/src/format/formatter.ts +++ b/packages/opencode/src/format/formatter.ts @@ -1,7 +1,5 @@ import { App } from "../app/app" import { BunProc } from "../bun" -import { Filesystem } from "../util/filesystem" -import path from "path" export interface Info { name: string @@ -31,7 +29,7 @@ export const mix: Info = { export const prettier: Info = { name: "prettier", - command: [BunProc.which(), "x", "prettier", "--write", "$FILE"], + command: [BunProc.which(), "run", "prettier", "--write", "$FILE"], environment: { BUN_BE_BUN: "1", }, @@ -64,12 +62,23 @@ export const prettier: Info = { ".gql", ], async enabled() { - const app = App.info() - const nms = await Filesystem.findUp("node_modules", app.path.cwd, app.path.root) - for (const item of nms) { - if (await Bun.file(path.join(item, ".bin", "prettier")).exists()) return true + // this is more complicated because we only want to use prettier if it's + // being used with the current project + try { + const proc = Bun.spawn({ + cmd: [BunProc.which(), "run", "prettier", "--version"], + cwd: App.info().path.cwd, + env: { + BUN_BE_BUN: "1", + }, + stdout: "ignore", + stderr: "ignore", + }) + const exit = await proc.exited + return exit === 0 + } catch { + return false } - return false }, } @@ -85,7 +94,21 @@ export const zig: Info = { export const clang: Info = { name: "clang-format", command: ["clang-format", "-i", "$FILE"], - extensions: [".c", ".cc", ".cpp", ".cxx", ".c++", ".h", ".hh", ".hpp", ".hxx", ".h++", ".ino", ".C", ".H"], + extensions: [ + ".c", + ".cc", + ".cpp", + ".cxx", + ".c++", + ".h", + ".hh", + ".hpp", + ".hxx", + ".h++", + ".ino", + ".C", + ".H", + ], async enabled() { return Bun.which("clang-format") !== null }, @@ -105,29 +128,7 @@ export const ruff: Info = { command: ["ruff", "format", "$FILE"], extensions: [".py", ".pyi"], async enabled() { - if (!Bun.which("ruff")) return false - const app = App.info() - const configs = ["pyproject.toml", "ruff.toml", ".ruff.toml"] - for (const config of configs) { - const found = await Filesystem.findUp(config, app.path.cwd, app.path.root) - if (found.length > 0) { - if (config === "pyproject.toml") { - const content = await Bun.file(found[0]).text() - if (content.includes("[tool.ruff]")) return true - } else { - return true - } - } - } - const deps = ["requirements.txt", "pyproject.toml", "Pipfile"] - for (const dep of deps) { - const found = await Filesystem.findUp(dep, app.path.cwd, app.path.root) - if (found.length > 0) { - const content = await Bun.file(found[0]).text() - if (content.includes("ruff")) return true - } - } - return false + return Bun.which("ruff") !== null }, } diff --git a/packages/opencode/src/global/index.ts b/packages/opencode/src/global/index.ts index b083e94d..24e9b6dd 100644 --- a/packages/opencode/src/global/index.ts +++ b/packages/opencode/src/global/index.ts @@ -23,17 +23,7 @@ export namespace Global { await Promise.all([ fs.mkdir(Global.Path.data, { recursive: true }), fs.mkdir(Global.Path.config, { recursive: true }), + fs.mkdir(Global.Path.cache, { recursive: true }), fs.mkdir(Global.Path.providers, { recursive: true }), fs.mkdir(Global.Path.state, { recursive: true }), ]) - -const CACHE_VERSION = "2" - -const version = await Bun.file(path.join(Global.Path.cache, "version")) - .text() - .catch(() => "0") - -if (version !== CACHE_VERSION) { - await fs.rm(Global.Path.cache, { recursive: true, force: true }) - await Bun.file(path.join(Global.Path.cache, "version")).write(CACHE_VERSION) -} diff --git a/packages/opencode/src/id/id.ts b/packages/opencode/src/id/id.ts index 6c1edd50..cf9a3042 100644 --- a/packages/opencode/src/id/id.ts +++ b/packages/opencode/src/id/id.ts @@ -6,7 +6,6 @@ export namespace Identifier { session: "ses", message: "msg", user: "usr", - part: "prt", } as const export function schema(prefix: keyof typeof prefixes) { @@ -27,7 +26,11 @@ export namespace Identifier { return generateID(prefix, true, given) } - function generateID(prefix: keyof typeof prefixes, descending: boolean, given?: string): string { + function generateID( + prefix: keyof typeof prefixes, + descending: boolean, + given?: string, + ): string { if (!given) { return generateNewID(prefix, descending) } @@ -39,7 +42,8 @@ export namespace Identifier { } function randomBase62(length: number): string { - const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + const chars = + "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" let result = "" const bytes = randomBytes(length) for (let i = 0; i < length; i++) { @@ -48,7 +52,10 @@ export namespace Identifier { return result } - function generateNewID(prefix: keyof typeof prefixes, descending: boolean): string { + function generateNewID( + prefix: keyof typeof prefixes, + descending: boolean, + ): string { const currentTimestamp = Date.now() if (currentTimestamp !== lastTimestamp) { @@ -66,6 +73,11 @@ export namespace Identifier { timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff)) } - return prefixes[prefix] + "_" + timeBytes.toString("hex") + randomBase62(LENGTH - 12) + return ( + prefixes[prefix] + + "_" + + timeBytes.toString("hex") + + randomBase62(LENGTH - 12) + ) } } diff --git a/packages/opencode/src/index.ts b/packages/opencode/src/index.ts index 73ff26c6..3ca40ce7 100644 --- a/packages/opencode/src/index.ts +++ b/packages/opencode/src/index.ts @@ -14,12 +14,6 @@ import { FormatError } from "./cli/error" import { ServeCommand } from "./cli/cmd/serve" import { TuiCommand } from "./cli/cmd/tui" import { DebugCommand } from "./cli/cmd/debug" -import { StatsCommand } from "./cli/cmd/stats" -import { McpCommand } from "./cli/cmd/mcp" -import { InstallGithubCommand } from "./cli/cmd/install-github" -import { Trace } from "./trace" - -Trace.init() const cancel = new AbortController() @@ -45,32 +39,13 @@ const cli = yargs(hideBin(process.argv)) type: "boolean", }) .middleware(async () => { - await Log.init({ print: process.argv.includes("--print-logs"), dev: Installation.isDev() }) - - try { - const { Config } = await import("./config/config") - const { App } = await import("./app/app") - - App.provide({ cwd: process.cwd() }, async () => { - const cfg = await Config.get() - if (cfg.log_level) { - Log.setLevel(cfg.log_level as Log.Level) - } else { - const defaultLevel = Installation.isDev() ? "DEBUG" : "INFO" - Log.setLevel(defaultLevel) - } - }) - } catch (e) { - Log.Default.error("failed to load config", { error: e }) - } - + await Log.init({ print: process.argv.includes("--print-logs") }) Log.Default.info("opencode", { version: Installation.VERSION, args: process.argv.slice(2), }) }) .usage("\n" + UI.logo()) - .command(McpCommand) .command(TuiCommand) .command(RunCommand) .command(GenerateCommand) @@ -79,10 +54,11 @@ const cli = yargs(hideBin(process.argv)) .command(UpgradeCommand) .command(ServeCommand) .command(ModelsCommand) - .command(StatsCommand) - .command(InstallGithubCommand) .fail((msg) => { - if (msg.startsWith("Unknown argument") || msg.startsWith("Not enough non-option arguments")) { + if ( + msg.startsWith("Unknown argument") || + msg.startsWith("Not enough non-option arguments") + ) { cli.showHelp("log") } }) @@ -121,7 +97,10 @@ try { Log.Default.error("fatal", data) const formatted = FormatError(e) if (formatted) UI.error(formatted) - if (formatted === undefined) UI.error("Unexpected error, check log file at " + Log.file() + " for more details") + if (formatted === undefined) + UI.error( + "Unexpected error, check log file at " + Log.file() + " for more details", + ) process.exitCode = 1 } diff --git a/packages/opencode/src/installation/index.ts b/packages/opencode/src/installation/index.ts index ab631a8d..4af5c807 100644 --- a/packages/opencode/src/installation/index.ts +++ b/packages/opencode/src/installation/index.ts @@ -135,17 +135,12 @@ export namespace Installation { }) } - export const VERSION = typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev" + export const VERSION = + typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "dev" export async function latest() { return fetch("https://api.github.com/repos/sst/opencode/releases/latest") .then((res) => res.json()) - .then((data) => { - if (typeof data.tag_name !== "string") { - log.error("GitHub API error", data) - throw new Error("failed to fetch latest version") - } - return data.tag_name.slice(1) as string - }) + .then((data) => data.tag_name.slice(1) as string) } } diff --git a/packages/opencode/src/lsp/client.ts b/packages/opencode/src/lsp/client.ts index c63e0259..f06a8c68 100644 --- a/packages/opencode/src/lsp/client.ts +++ b/packages/opencode/src/lsp/client.ts @@ -1,5 +1,9 @@ import path from "path" -import { createMessageConnection, StreamMessageReader, StreamMessageWriter } from "vscode-jsonrpc/node" +import { + createMessageConnection, + StreamMessageReader, + StreamMessageWriter, +} from "vscode-jsonrpc/node" import type { Diagnostic as VSCodeDiagnostic } from "vscode-languageserver-types" import { App } from "../app/app" import { Log } from "../util/log" @@ -34,54 +38,45 @@ export namespace LSPClient { ), } - export async function create(input: { serverID: string; server: LSPServer.Handle; root: string }) { + export async function create(serverID: string, server: LSPServer.Handle) { const app = App.info() - const l = log.clone().tag("serverID", input.serverID) - l.info("starting client") + log.info("starting client", { id: serverID }) const connection = createMessageConnection( - new StreamMessageReader(input.server.process.stdout), - new StreamMessageWriter(input.server.process.stdin), + new StreamMessageReader(server.process.stdout), + new StreamMessageWriter(server.process.stdin), ) const diagnostics = new Map() connection.onNotification("textDocument/publishDiagnostics", (params) => { const path = new URL(params.uri).pathname - l.info("textDocument/publishDiagnostics", { + log.info("textDocument/publishDiagnostics", { path, }) const exists = diagnostics.has(path) diagnostics.set(path, params.diagnostics) - if (!exists && input.serverID === "typescript") return - Bus.publish(Event.Diagnostics, { path, serverID: input.serverID }) - }) - connection.onRequest("window/workDoneProgress/create", (params) => { - l.info("window/workDoneProgress/create", params) - return null + if (!exists && serverID === "typescript") return + Bus.publish(Event.Diagnostics, { path, serverID }) }) connection.onRequest("workspace/configuration", async () => { return [{}] }) connection.listen() - l.info("sending initialize") + log.info("sending initialize", { id: serverID }) await withTimeout( connection.sendRequest("initialize", { - rootUri: "file://" + input.root, - processId: input.server.process.pid, + processId: server.process.pid, workspaceFolders: [ { name: "workspace", - uri: "file://" + input.root, + uri: "file://" + app.path.cwd, }, ], initializationOptions: { - ...input.server.initialization, + ...server.initialization, }, capabilities: { - window: { - workDoneProgress: true, - }, workspace: { configuration: true, }, @@ -97,33 +92,28 @@ export namespace LSPClient { }, }), 5_000, - ).catch((err) => { - l.error("initialize error", { error: err }) - throw new InitializeError( - { serverID: input.serverID }, - { - cause: err, - }, - ) + ).catch(() => { + throw new InitializeError({ serverID }) }) - await connection.sendNotification("initialized", {}) + log.info("initialized") const files: { [path: string]: number } = {} const result = { - root: input.root, get serverID() { - return input.serverID + return serverID }, get connection() { return connection }, notify: { async open(input: { path: string }) { - input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path) + input.path = path.isAbsolute(input.path) + ? input.path + : path.resolve(app.path.cwd, input.path) const file = Bun.file(input.path) const text = await file.text() const version = files[input.path] @@ -155,13 +145,18 @@ export namespace LSPClient { return diagnostics }, async waitForDiagnostics(input: { path: string }) { - input.path = path.isAbsolute(input.path) ? input.path : path.resolve(app.path.cwd, input.path) + input.path = path.isAbsolute(input.path) + ? input.path + : path.resolve(app.path.cwd, input.path) log.info("waiting for diagnostics", input) let unsub: () => void return await withTimeout( new Promise((resolve) => { unsub = Bus.subscribe(Event.Diagnostics, (event) => { - if (event.properties.path === input.path && event.properties.serverID === result.serverID) { + if ( + event.properties.path === input.path && + event.properties.serverID === result.serverID + ) { log.info("got diagnostics", input) unsub?.() resolve() @@ -176,16 +171,14 @@ export namespace LSPClient { }) }, async shutdown() { - l.info("shutting down") + log.info("shutting down", { serverID }) connection.end() connection.dispose() - input.server.process.kill() - l.info("shutdown") + server.process.kill("SIGTERM") + log.info("shutdown", { serverID }) }, } - l.info("initialized") - return result } } diff --git a/packages/opencode/src/lsp/index.ts b/packages/opencode/src/lsp/index.ts index 8f6c1c9c..2c73feb8 100644 --- a/packages/opencode/src/lsp/index.ts +++ b/packages/opencode/src/lsp/index.ts @@ -3,65 +3,64 @@ import { Log } from "../util/log" import { LSPClient } from "./client" import path from "path" import { LSPServer } from "./server" +import { Ripgrep } from "../file/ripgrep" import { z } from "zod" export namespace LSP { const log = Log.create({ service: "lsp" }) - export const Range = z - .object({ - start: z.object({ - line: z.number(), - character: z.number(), - }), - end: z.object({ - line: z.number(), - character: z.number(), - }), - }) - .openapi({ - ref: "Range", - }) - export type Range = z.infer - export const Symbol = z .object({ name: z.string(), kind: z.number(), location: z.object({ uri: z.string(), - range: Range, + range: z.object({ + start: z.object({ + line: z.number(), + character: z.number(), + }), + end: z.object({ + line: z.number(), + character: z.number(), + }), + }), }), }) .openapi({ - ref: "Symbol", + ref: "LSP.Symbol", }) export type Symbol = z.infer - export const DocumentSymbol = z - .object({ - name: z.string(), - detail: z.string().optional(), - kind: z.number(), - range: Range, - selectionRange: Range, - }) - .openapi({ - ref: "DocumentSymbol", - }) - export type DocumentSymbol = z.infer - const state = App.state( "lsp", - async () => { - const clients: LSPClient.Info[] = [] + async (app) => { + log.info("initializing") + const clients = new Map() + for (const server of Object.values(LSPServer)) { + for (const extension of server.extensions) { + const [file] = await Ripgrep.files({ + cwd: app.path.cwd, + glob: "*" + extension, + }) + if (!file) continue + const handle = await server.spawn(App.info()) + if (!handle) break + const client = await LSPClient.create(server.id, handle).catch( + () => {}, + ) + if (!client) break + clients.set(server.id, client) + break + } + } + log.info("initialized") return { - broken: new Set(), clients, } }, async (state) => { - for (const client of state.clients) { + for (const client of state.clients.values()) { await client.shutdown() } }, @@ -71,44 +70,16 @@ export namespace LSP { return state() } - async function getClients(file: string) { - const s = await state() - const extension = path.parse(file).ext - const result: LSPClient.Info[] = [] - for (const server of Object.values(LSPServer)) { - if (!server.extensions.includes(extension)) continue - const root = await server.root(file, App.info()) - if (!root) continue - if (s.broken.has(root + server.id)) continue - - const match = s.clients.find((x) => x.root === root && x.serverID === server.id) - if (match) { - result.push(match) - continue - } - const handle = await server.spawn(App.info(), root) - if (!handle) continue - const client = await LSPClient.create({ - serverID: server.id, - server: handle, - root, - }).catch((err) => { - s.broken.add(root + server.id) - handle.process.kill() - log.error("", { error: err }) - }) - if (!client) continue - s.clients.push(client) - result.push(client) - } - return result - } - export async function touchFile(input: string, waitForDiagnostics?: boolean) { - const clients = await getClients(input) + const extension = path.parse(input).ext + const matches = Object.values(LSPServer) + .filter((x) => x.extensions.includes(extension)) + .map((x) => x.id) await run(async (client) => { - if (!clients.includes(client)) return - const wait = waitForDiagnostics ? client.waitForDiagnostics({ path: input }) : Promise.resolve() + if (!matches.includes(client.serverID)) return + const wait = waitForDiagnostics + ? client.waitForDiagnostics({ path: input }) + : Promise.resolve() await client.notify.open({ path: input }) return wait }) @@ -126,7 +97,11 @@ export namespace LSP { return results } - export async function hover(input: { file: string; line: number; character: number }) { + export async function hover(input: { + file: string + line: number + character: number + }) { return run((client) => { return client.connection.sendRequest("textDocument/hover", { textDocument: { @@ -140,74 +115,18 @@ export namespace LSP { }) } - enum SymbolKind { - File = 1, - Module = 2, - Namespace = 3, - Package = 4, - Class = 5, - Method = 6, - Property = 7, - Field = 8, - Constructor = 9, - Enum = 10, - Interface = 11, - Function = 12, - Variable = 13, - Constant = 14, - String = 15, - Number = 16, - Boolean = 17, - Array = 18, - Object = 19, - Key = 20, - Null = 21, - EnumMember = 22, - Struct = 23, - Event = 24, - Operator = 25, - TypeParameter = 26, - } - - const kinds = [ - SymbolKind.Class, - SymbolKind.Function, - SymbolKind.Method, - SymbolKind.Interface, - SymbolKind.Variable, - SymbolKind.Constant, - SymbolKind.Struct, - SymbolKind.Enum, - ] - export async function workspaceSymbol(query: string) { return run((client) => - client.connection - .sendRequest("workspace/symbol", { - query, - }) - .then((result: any) => result.filter((x: LSP.Symbol) => kinds.includes(x.kind))) - .then((result: any) => result.slice(0, 10)) - .catch(() => []), + client.connection.sendRequest("workspace/symbol", { + query, + }), ).then((result) => result.flat() as LSP.Symbol[]) } - export async function documentSymbol(uri: string) { - return run((client) => - client.connection - .sendRequest("textDocument/documentSymbol", { - textDocument: { - uri, - }, - }) - .catch(() => []), - ) - .then((result) => result.flat() as (LSP.DocumentSymbol | LSP.Symbol)[]) - .then((result) => result.filter(Boolean)) - } - - async function run(input: (client: LSPClient.Info) => Promise): Promise { - const clients = await state().then((x) => x.clients) + async function run( + input: (client: LSPClient.Info) => Promise, + ): Promise { + const clients = await state().then((x) => [...x.clients.values()]) const tasks = clients.map((x) => input(x)) return Promise.all(tasks) } diff --git a/packages/opencode/src/lsp/language.ts b/packages/opencode/src/lsp/language.ts index 61686bd9..6c0da51b 100644 --- a/packages/opencode/src/lsp/language.ts +++ b/packages/opencode/src/lsp/language.ts @@ -94,6 +94,4 @@ export const LANGUAGE_EXTENSIONS: Record = { ".yml": "yaml", ".mjs": "javascript", ".cjs": "javascript", - ".zig": "zig", - ".zon": "zig", } as const diff --git a/packages/opencode/src/lsp/server.ts b/packages/opencode/src/lsp/server.ts index 8c843fea..ce7972f5 100644 --- a/packages/opencode/src/lsp/server.ts +++ b/packages/opencode/src/lsp/server.ts @@ -4,9 +4,6 @@ import path from "path" import { Global } from "../global" import { Log } from "../util/log" import { BunProc } from "../bun" -import { $ } from "bun" -import fs from "fs/promises" -import { Filesystem } from "../util/filesystem" export namespace LSPServer { const log = Log.create({ service: "lsp.server" }) @@ -16,44 +13,31 @@ export namespace LSPServer { initialization?: Record } - type RootFunction = (file: string, app: App.Info) => Promise - - const NearestRoot = (patterns: string[]): RootFunction => { - return async (file, app) => { - const files = Filesystem.up({ - targets: patterns, - start: path.dirname(file), - stop: app.path.root, - }) - const first = await files.next() - await files.return() - if (!first.value) return app.path.root - return path.dirname(first.value) - } - } - export interface Info { id: string extensions: string[] - global?: boolean - root: RootFunction - spawn(app: App.Info, root: string): Promise + spawn(app: App.Info): Promise } export const Typescript: Info = { id: "typescript", - root: NearestRoot(["tsconfig.json", "package.json", "jsconfig.json"]), extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"], - async spawn(app, root) { - const tsserver = await Bun.resolve("typescript/lib/tsserver.js", app.path.cwd).catch(() => {}) + async spawn(app) { + const tsserver = await Bun.resolve( + "typescript/lib/tsserver.js", + app.path.cwd, + ).catch(() => {}) if (!tsserver) return - const proc = spawn(BunProc.which(), ["x", "typescript-language-server", "--stdio"], { - cwd: root, - env: { - ...process.env, - BUN_BE_BUN: "1", + const proc = spawn( + BunProc.which(), + ["x", "typescript-language-server", "--stdio"], + { + env: { + ...process.env, + BUN_BE_BUN: "1", + }, }, - }) + ) return { process: proc, initialization: { @@ -67,13 +51,8 @@ export namespace LSPServer { export const Gopls: Info = { id: "golang", - root: async (file, app) => { - const work = await NearestRoot(["go.work"])(file, app) - if (work) return work - return NearestRoot(["go.mod", "go.sum"])(file, app) - }, extensions: [".go"], - async spawn(_, root) { + async spawn() { let bin = Bun.which("gopls", { PATH: process.env["PATH"] + ":" + Global.Path.bin, }) @@ -92,24 +71,24 @@ export namespace LSPServer { log.error("Failed to install gopls") return } - bin = path.join(Global.Path.bin, "gopls" + (process.platform === "win32" ? ".exe" : "")) + bin = path.join( + Global.Path.bin, + "gopls" + (process.platform === "win32" ? ".exe" : ""), + ) log.info(`installed gopls`, { bin, }) } return { - process: spawn(bin!, { - cwd: root, - }), + process: spawn(bin!), } }, } export const RubyLsp: Info = { id: "ruby-lsp", - root: NearestRoot(["Gemfile"]), extensions: [".rb", ".rake", ".gemspec", ".ru"], - async spawn(_, root) { + async spawn() { let bin = Bun.which("ruby-lsp", { PATH: process.env["PATH"] + ":" + Global.Path.bin, }) @@ -132,15 +111,16 @@ export namespace LSPServer { log.error("Failed to install ruby-lsp") return } - bin = path.join(Global.Path.bin, "ruby-lsp" + (process.platform === "win32" ? ".exe" : "")) + bin = path.join( + Global.Path.bin, + "ruby-lsp" + (process.platform === "win32" ? ".exe" : ""), + ) log.info(`installed ruby-lsp`, { bin, }) } return { - process: spawn(bin!, ["--stdio"], { - cwd: root, - }), + process: spawn(bin!, ["--stdio"]), } }, } @@ -148,178 +128,20 @@ export namespace LSPServer { export const Pyright: Info = { id: "pyright", extensions: [".py", ".pyi"], - root: NearestRoot(["pyproject.toml", "setup.py", "setup.cfg", "requirements.txt", "Pipfile", "pyrightconfig.json"]), - async spawn(_, root) { - const proc = spawn(BunProc.which(), ["x", "pyright-langserver", "--stdio"], { - cwd: root, - env: { - ...process.env, - BUN_BE_BUN: "1", + async spawn() { + const proc = spawn( + BunProc.which(), + ["x", "pyright-langserver", "--stdio"], + { + env: { + ...process.env, + BUN_BE_BUN: "1", + }, }, - }) + ) return { process: proc, } }, } - - export const ElixirLS: Info = { - id: "elixir-ls", - extensions: [".ex", ".exs"], - root: NearestRoot(["mix.exs", "mix.lock"]), - async spawn(_, root) { - let binary = Bun.which("elixir-ls") - if (!binary) { - const elixirLsPath = path.join(Global.Path.bin, "elixir-ls") - binary = path.join( - Global.Path.bin, - "elixir-ls-master", - "release", - process.platform === "win32" ? "language_server.bar" : "language_server.sh", - ) - - if (!(await Bun.file(binary).exists())) { - const elixir = Bun.which("elixir") - if (!elixir) { - log.error("elixir is required to run elixir-ls") - return - } - - log.info("downloading elixir-ls from GitHub releases") - - const response = await fetch("https://github.com/elixir-lsp/elixir-ls/archive/refs/heads/master.zip") - if (!response.ok) return - const zipPath = path.join(Global.Path.bin, "elixir-ls.zip") - await Bun.file(zipPath).write(response) - - await $`unzip -o -q ${zipPath}`.cwd(Global.Path.bin).nothrow() - - await fs.rm(zipPath, { - force: true, - recursive: true, - }) - - await $`mix deps.get && mix compile && mix elixir_ls.release2 -o release` - .quiet() - .cwd(path.join(Global.Path.bin, "elixir-ls-master")) - .env({ MIX_ENV: "prod", ...process.env }) - - log.info(`installed elixir-ls`, { - path: elixirLsPath, - }) - } - } - - return { - process: spawn(binary, { - cwd: root, - }), - } - }, - } - - export const Zls: Info = { - id: "zls", - extensions: [".zig", ".zon"], - root: NearestRoot(["build.zig"]), - async spawn(_, root) { - let bin = Bun.which("zls", { - PATH: process.env["PATH"] + ":" + Global.Path.bin, - }) - - if (!bin) { - const zig = Bun.which("zig") - if (!zig) { - log.error("Zig is required to use zls. Please install Zig first.") - return - } - - log.info("downloading zls from GitHub releases") - - const releaseResponse = await fetch("https://api.github.com/repos/zigtools/zls/releases/latest") - if (!releaseResponse.ok) { - log.error("Failed to fetch zls release info") - return - } - - const release = await releaseResponse.json() - - const platform = process.platform - const arch = process.arch - let assetName = "" - - let zlsArch: string = arch - if (arch === "arm64") zlsArch = "aarch64" - else if (arch === "x64") zlsArch = "x86_64" - else if (arch === "ia32") zlsArch = "x86" - - let zlsPlatform: string = platform - if (platform === "darwin") zlsPlatform = "macos" - else if (platform === "win32") zlsPlatform = "windows" - - const ext = platform === "win32" ? "zip" : "tar.xz" - - assetName = `zls-${zlsArch}-${zlsPlatform}.${ext}` - - const supportedCombos = [ - "zls-x86_64-linux.tar.xz", - "zls-x86_64-macos.tar.xz", - "zls-x86_64-windows.zip", - "zls-aarch64-linux.tar.xz", - "zls-aarch64-macos.tar.xz", - "zls-aarch64-windows.zip", - "zls-x86-linux.tar.xz", - "zls-x86-windows.zip", - ] - - if (!supportedCombos.includes(assetName)) { - log.error(`Platform ${platform} and architecture ${arch} is not supported by zls`) - return - } - - const asset = release.assets.find((a: any) => a.name === assetName) - if (!asset) { - log.error(`Could not find asset ${assetName} in latest zls release`) - return - } - - const downloadUrl = asset.browser_download_url - const downloadResponse = await fetch(downloadUrl) - if (!downloadResponse.ok) { - log.error("Failed to download zls") - return - } - - const tempPath = path.join(Global.Path.bin, assetName) - await Bun.file(tempPath).write(downloadResponse) - - if (ext === "zip") { - await $`unzip -o -q ${tempPath}`.cwd(Global.Path.bin).nothrow() - } else { - await $`tar -xf ${tempPath}`.cwd(Global.Path.bin).nothrow() - } - - await fs.rm(tempPath, { force: true }) - - bin = path.join(Global.Path.bin, "zls" + (platform === "win32" ? ".exe" : "")) - - if (!(await Bun.file(bin).exists())) { - log.error("Failed to extract zls binary") - return - } - - if (platform !== "win32") { - await $`chmod +x ${bin}`.nothrow() - } - - log.info(`installed zls`, { bin }) - } - - return { - process: spawn(bin, { - cwd: root, - }), - } - }, - } } diff --git a/packages/opencode/src/mcp/index.ts b/packages/opencode/src/mcp/index.ts index 34aec640..ded7d0d2 100644 --- a/packages/opencode/src/mcp/index.ts +++ b/packages/opencode/src/mcp/index.ts @@ -37,7 +37,6 @@ export namespace MCP { transport: { type: "sse", url: mcp.url, - headers: mcp.headers, }, }).catch(() => {}) if (!client) { diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index df3a0e48..f05d15ce 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -21,7 +21,7 @@ import { AuthCopilot } from "../auth/copilot" import { ModelsDev } from "./models" import { NamedError } from "../util/error" import { Auth } from "../auth" -import { TaskTool } from "../tool/task" +// import { TaskTool } from "../tool/task" export namespace Provider { const log = Log.create({ service: "provider" }) @@ -91,7 +91,8 @@ export namespace Provider { if (!info || info.type !== "oauth") return if (!info.access || info.expires < Date.now()) { const tokens = await copilot.access(info.refresh) - if (!tokens) throw new Error("GitHub Copilot authentication expired") + if (!tokens) + throw new Error("GitHub Copilot authentication expired") await Auth.set("github-copilot", { type: "oauth", ...tokens, @@ -99,27 +100,25 @@ export namespace Provider { info.access = tokens.access } let isAgentCall = false - let isVisionRequest = false try { - const body = typeof init.body === "string" ? JSON.parse(init.body) : init.body + const body = + typeof init.body === "string" + ? JSON.parse(init.body) + : init.body if (body?.messages) { - isAgentCall = body.messages.some((msg: any) => msg.role && ["tool", "assistant"].includes(msg.role)) - isVisionRequest = body.messages.some( + isAgentCall = body.messages.some( (msg: any) => - Array.isArray(msg.content) && msg.content.some((part: any) => part.type === "image_url"), + msg.role && ["tool", "assistant"].includes(msg.role), ) } } catch {} - const headers: Record = { + const headers = { ...init.headers, ...copilot.HEADERS, Authorization: `Bearer ${info.access}`, "Openai-Intent": "conversation-edits", "X-Initiator": isAgentCall ? "agent" : "user", } - if (isVisionRequest) { - headers["Copilot-Vision-Request"] = "true" - } delete headers["x-api-key"] return fetch(input, { ...init, @@ -139,12 +138,14 @@ export namespace Provider { } }, "amazon-bedrock": async () => { - if (!process.env["AWS_PROFILE"] && !process.env["AWS_ACCESS_KEY_ID"] && !process.env["AWS_BEARER_TOKEN_BEDROCK"]) + if (!process.env["AWS_PROFILE"] && !process.env["AWS_ACCESS_KEY_ID"]) return { autoload: false } const region = process.env["AWS_REGION"] ?? "us-east-1" - const { fromNodeProviderChain } = await import(await BunProc.install("@aws-sdk/credential-providers")) + const { fromNodeProviderChain } = await import( + await BunProc.install("@aws-sdk/credential-providers") + ) return { autoload: true, options: { @@ -156,7 +157,9 @@ export namespace Provider { switch (regionPrefix) { case "us": { - const modelRequiresPrefix = ["claude", "deepseek"].some((m) => modelID.includes(m)) + const modelRequiresPrefix = ["claude", "deepseek"].some((m) => + modelID.includes(m), + ) if (modelRequiresPrefix) { modelID = `${regionPrefix}.${modelID}` } @@ -171,18 +174,25 @@ export namespace Provider { "eu-south-1", "eu-south-2", ].some((r) => region.includes(r)) - const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "llama3", "pixtral"].some((m) => - modelID.includes(m), - ) + const modelRequiresPrefix = [ + "claude", + "nova-lite", + "nova-micro", + "llama3", + "pixtral", + ].some((m) => modelID.includes(m)) if (regionRequiresPrefix && modelRequiresPrefix) { modelID = `${regionPrefix}.${modelID}` } break } case "ap": { - const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "nova-pro"].some((m) => - modelID.includes(m), - ) + const modelRequiresPrefix = [ + "claude", + "nova-lite", + "nova-micro", + "nova-pro", + ].some((m) => modelID.includes(m)) if (modelRequiresPrefix) { regionPrefix = "apac" modelID = `${regionPrefix}.${modelID}` @@ -220,7 +230,10 @@ export namespace Provider { options: Record } } = {} - const models = new Map() + const models = new Map< + string, + { info: ModelsDev.Model; language: LanguageModel } + >() const sdk = new Map() log.info("init") @@ -235,7 +248,7 @@ export namespace Provider { if (!provider) { const info = database[id] if (!info) return - if (info.api && !options["baseURL"]) options["baseURL"] = info.api + if (info.api) options["baseURL"] = info.api providers[id] = { source, info, @@ -272,20 +285,14 @@ export namespace Provider { reasoning: model.reasoning ?? existing?.reasoning ?? false, temperature: model.temperature ?? existing?.temperature ?? false, tool_call: model.tool_call ?? existing?.tool_call ?? true, - cost: - !model.cost && !existing?.cost - ? { - input: 0, - output: 0, - cache_read: 0, - cache_write: 0, - } - : { - cache_read: 0, - cache_write: 0, - ...existing?.cost, - ...model.cost, - }, + cost: { + ...existing?.cost, + ...model.cost, + input: 0, + output: 0, + cache_read: 0, + cache_write: 0, + }, options: { ...existing?.options, ...model.options, @@ -301,7 +308,9 @@ export namespace Provider { database[providerID] = parsed } - const disabled = await Config.get().then((cfg) => new Set(cfg.disabled_providers ?? [])) + const disabled = await Config.get().then( + (cfg) => new Set(cfg.disabled_providers ?? []), + ) // load env for (const [providerID, provider] of Object.entries(database)) { if (disabled.has(providerID)) continue @@ -328,7 +337,12 @@ export namespace Provider { if (disabled.has(providerID)) continue const result = await fn(database[providerID]) if (result && (result.autoload || providers[providerID])) { - mergeProvider(providerID, result.options ?? {}, "custom", result.getModel) + mergeProvider( + providerID, + result.options ?? {}, + "custom", + result.getModel, + ) } } @@ -365,7 +379,7 @@ export namespace Provider { const existing = s.sdk.get(provider.id) if (existing) return existing const pkg = provider.npm ?? provider.id - const mod = await import(await BunProc.install(pkg, "beta")) + const mod = await import(await BunProc.install(pkg, "latest")) const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!] const loaded = fn(s.providers[provider.id]?.options) s.sdk.set(provider.id, loaded) @@ -392,7 +406,9 @@ export namespace Provider { const sdk = await getSDK(provider.info) try { - const language = provider.getModel ? await provider.getModel(sdk, modelID) : sdk.languageModel(modelID) + const language = provider.getModel + ? await provider.getModel(sdk, modelID) + : sdk.languageModel(modelID) log.info("found", { providerID, modelID }) s.models.set(key, { info, @@ -415,29 +431,14 @@ export namespace Provider { } } - export async function getSmallModel(providerID: string) { - const cfg = await Config.get() - - if (cfg.small_model) { - const parsed = parseModel(cfg.small_model) - return getModel(parsed.providerID, parsed.modelID) - } - - const provider = await state().then((state) => state.providers[providerID]) - if (!provider) return - const priority = ["3-5-haiku", "3.5-haiku", "gemini-2.5-flash"] - for (const item of priority) { - for (const model of Object.keys(provider.info.models)) { - if (model.includes(item)) return getModel(providerID, model) - } - } - } - const priority = ["gemini-2.5-pro-preview", "codex-mini", "claude-sonnet-4"] export function sort(models: ModelsDev.Model[]) { return sortBy( models, - [(model) => priority.findIndex((filter) => model.id.includes(filter)), "desc"], + [ + (model) => priority.findIndex((filter) => model.id.includes(filter)), + "desc", + ], [(model) => (model.id.includes("latest") ? 0 : 1), "asc"], [(model) => model.id, "desc"], ) @@ -448,7 +449,11 @@ export namespace Provider { if (cfg.model) return parseModel(cfg.model) const provider = await list() .then((val) => Object.values(val)) - .then((x) => x.find((p) => !cfg.provider || Object.keys(cfg.provider).includes(p.info.id))) + .then((x) => + x.find( + (p) => !cfg.provider || Object.keys(cfg.provider).includes(p.info.id), + ), + ) if (!provider) throw new Error("no providers found") const [model] = sort(Object.values(provider.info.models)) if (!model) throw new Error("no models found") @@ -481,7 +486,7 @@ export namespace Provider { WriteTool, TodoWriteTool, TodoReadTool, - TaskTool, + // TaskTool, ] const TOOL_MAPPING: Record = { @@ -494,10 +499,7 @@ export namespace Provider { ...t, parameters: optionalToNullable(t.parameters), })), - google: TOOLS.map((t) => ({ - ...t, - parameters: sanitizeGeminiParameters(t.parameters), - })), + google: TOOLS, } export async function tools(providerID: string) { @@ -511,60 +513,6 @@ export namespace Provider { return TOOL_MAPPING[providerID] ?? TOOLS } - function sanitizeGeminiParameters(schema: z.ZodTypeAny, visited = new Set()): z.ZodTypeAny { - if (!schema || visited.has(schema)) { - return schema - } - visited.add(schema) - - if (schema instanceof z.ZodDefault) { - const innerSchema = schema.removeDefault() - // Handle Gemini's incompatibility with `default` on `anyOf` (unions). - if (innerSchema instanceof z.ZodUnion) { - // The schema was `z.union(...).default(...)`, which is not allowed. - // We strip the default and return the sanitized union. - return sanitizeGeminiParameters(innerSchema, visited) - } - // Otherwise, the default is on a regular type, which is allowed. - // We recurse on the inner type and then re-apply the default. - return sanitizeGeminiParameters(innerSchema, visited).default(schema._def.defaultValue()) - } - - if (schema instanceof z.ZodOptional) { - return z.optional(sanitizeGeminiParameters(schema.unwrap(), visited)) - } - - if (schema instanceof z.ZodObject) { - const newShape: Record = {} - for (const [key, value] of Object.entries(schema.shape)) { - newShape[key] = sanitizeGeminiParameters(value as z.ZodTypeAny, visited) - } - return z.object(newShape) - } - - if (schema instanceof z.ZodArray) { - return z.array(sanitizeGeminiParameters(schema.element, visited)) - } - - if (schema instanceof z.ZodUnion) { - // This schema corresponds to `anyOf` in JSON Schema. - // We recursively sanitize each option in the union. - const sanitizedOptions = schema.options.map((option: z.ZodTypeAny) => sanitizeGeminiParameters(option, visited)) - return z.union(sanitizedOptions as [z.ZodTypeAny, z.ZodTypeAny, ...z.ZodTypeAny[]]) - } - - if (schema instanceof z.ZodString) { - const newSchema = z.string({ description: schema.description }) - const safeChecks = ["min", "max", "length", "regex", "startsWith", "endsWith", "includes", "trim"] - // rome-ignore lint/suspicious/noExplicitAny: - ;(newSchema._def as any).checks = (schema._def as z.ZodStringDef).checks.filter((check) => - safeChecks.includes(check.kind), - ) - return newSchema - } - - return schema - } function optionalToNullable(schema: z.ZodTypeAny): z.ZodTypeAny { if (schema instanceof z.ZodObject) { const shape = schema.shape @@ -588,11 +536,9 @@ export namespace Provider { if (schema instanceof z.ZodUnion) { return z.union( - schema.options.map((option: z.ZodTypeAny) => optionalToNullable(option)) as [ - z.ZodTypeAny, - z.ZodTypeAny, - ...z.ZodTypeAny[], - ], + schema.options.map((option: z.ZodTypeAny) => + optionalToNullable(option), + ) as [z.ZodTypeAny, z.ZodTypeAny, ...z.ZodTypeAny[]], ) } @@ -613,4 +559,12 @@ export namespace Provider { providerID: z.string(), }), ) + + export const AuthError = NamedError.create( + "ProviderAuthError", + z.object({ + providerID: z.string(), + message: z.string(), + }), + ) } diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 433e9f41..d5e140b3 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -1,21 +1,22 @@ -import type { ModelMessage } from "ai" +import type { LanguageModelV1Prompt } from "ai" import { unique } from "remeda" export namespace ProviderTransform { - export function message(msgs: ModelMessage[], providerID: string, modelID: string) { + export function message( + msgs: LanguageModelV1Prompt, + providerID: string, + modelID: string, + ) { if (providerID === "anthropic" || modelID.includes("anthropic")) { const system = msgs.filter((msg) => msg.role === "system").slice(0, 2) const final = msgs.filter((msg) => msg.role !== "system").slice(-2) for (const msg of unique([...system, ...final])) { - msg.providerOptions = { - ...msg.providerOptions, + msg.providerMetadata = { + ...msg.providerMetadata, anthropic: { cacheControl: { type: "ephemeral" }, }, - openaiCompatible: { - cache_control: { type: "ephemeral" }, - }, } } } @@ -24,8 +25,8 @@ export namespace ProviderTransform { const final = msgs.filter((msg) => msg.role !== "system").slice(-2) for (const msg of unique([...system, ...final])) { - msg.providerOptions = { - ...msg.providerOptions, + msg.providerMetadata = { + ...msg.providerMetadata, bedrock: { cachePoint: { type: "ephemeral" }, }, diff --git a/packages/opencode/src/server/server.ts b/packages/opencode/src/server/server.ts index 4e6ebfbb..df645cd8 100644 --- a/packages/opencode/src/server/server.ts +++ b/packages/opencode/src/server/server.ts @@ -6,6 +6,7 @@ import { streamSSE } from "hono/streaming" import { Session } from "../session" import { resolver, validator as zValidator } from "hono-openapi/zod" import { z } from "zod" +import { Message } from "../session/message" import { Provider } from "../provider/provider" import { App } from "../app/app" import { mapValues } from "remeda" @@ -15,8 +16,6 @@ import { Ripgrep } from "../file/ripgrep" import { Config } from "../config/config" import { File } from "../file" import { LSP } from "../lsp" -import { MessageV2 } from "../session/message-v2" -import { Mode } from "../session/mode" const ERRORS = { 400: { @@ -52,9 +51,12 @@ export namespace Server { status: 400, }) } - return c.json(new NamedError.Unknown({ message: err.toString() }).toObject(), { - status: 400, - }) + return c.json( + new NamedError.Unknown({ message: err.toString() }).toObject(), + { + status: 400, + }, + ) }) .use(async (c, next) => { log.info("request", { @@ -269,7 +271,6 @@ export namespace Server { zValidator( "json", z.object({ - messageID: z.string(), providerID: z.string(), modelID: z.string(), }), @@ -406,14 +407,7 @@ export namespace Server { description: "List of messages", content: { "application/json": { - schema: resolver( - z - .object({ - info: MessageV2.Info, - parts: MessageV2.Part.array(), - }) - .array(), - ), + schema: resolver(Message.Info.array()), }, }, }, @@ -439,7 +433,7 @@ export namespace Server { description: "Created message", content: { "application/json": { - schema: resolver(MessageV2.Assistant), + schema: resolver(Message.Info), }, }, }, @@ -451,7 +445,14 @@ export namespace Server { id: z.string().openapi({ description: "Session ID" }), }), ), - zValidator("json", Session.ChatInput.omit({ sessionID: true })), + zValidator( + "json", + z.object({ + providerID: z.string(), + modelID: z.string(), + parts: Message.MessagePart.array(), + }), + ), async (c) => { const sessionID = c.req.valid("param").id const body = c.req.valid("json") @@ -480,10 +481,15 @@ export namespace Server { }, }), async (c) => { - const providers = await Provider.list().then((x) => mapValues(x, (item) => item.info)) + const providers = await Provider.list().then((x) => + mapValues(x, (item) => item.info), + ) return c.json({ providers: Object.values(providers), - default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id), + default: mapValues( + providers, + (item) => Provider.sort(Object.values(item.models))[0].id, + ), }) }, ) @@ -560,7 +566,7 @@ export namespace Server { description: "Symbols", content: { "application/json": { - schema: resolver(LSP.Symbol.array()), + schema: resolver(z.unknown().array()), }, }, }, @@ -623,7 +629,16 @@ export namespace Server { description: "File status", content: { "application/json": { - schema: resolver(File.Info.array()), + schema: resolver( + z + .object({ + file: z.string(), + added: z.number().int(), + removed: z.number().int(), + status: z.enum(["added", "deleted", "modified"]), + }) + .array(), + ), }, }, }, @@ -634,75 +649,6 @@ export namespace Server { return c.json(content) }, ) - .post( - "/log", - describeRoute({ - description: "Write a log entry to the server logs", - responses: { - 200: { - description: "Log entry written successfully", - content: { - "application/json": { - schema: resolver(z.boolean()), - }, - }, - }, - }, - }), - zValidator( - "json", - z.object({ - service: z.string().openapi({ description: "Service name for the log entry" }), - level: z.enum(["debug", "info", "error", "warn"]).openapi({ description: "Log level" }), - message: z.string().openapi({ description: "Log message" }), - extra: z - .record(z.string(), z.any()) - .optional() - .openapi({ description: "Additional metadata for the log entry" }), - }), - ), - async (c) => { - const { service, level, message, extra } = c.req.valid("json") - const logger = Log.create({ service }) - - switch (level) { - case "debug": - logger.debug(message, extra) - break - case "info": - logger.info(message, extra) - break - case "error": - logger.error(message, extra) - break - case "warn": - logger.warn(message, extra) - break - } - - return c.json(true) - }, - ) - .get( - "/mode", - describeRoute({ - description: "List all modes", - responses: { - 200: { - description: "List of modes", - content: { - "application/json": { - schema: resolver(Mode.Info.array()), - }, - }, - }, - }, - }), - async (c) => { - const modes = await Mode.list() - return c.json(modes) - }, - ) return result } diff --git a/packages/opencode/src/session/index.ts b/packages/opencode/src/session/index.ts index 2b358bec..b07dc31c 100644 --- a/packages/opencode/src/session/index.ts +++ b/packages/opencode/src/session/index.ts @@ -1,51 +1,44 @@ import path from "path" -import { Decimal } from "decimal.js" -import { z, ZodSchema } from "zod" +import { App } from "../app/app" +import { Identifier } from "../id/id" +import { Storage } from "../storage/storage" +import { Log } from "../util/log" import { generateText, LoadAPIKeyError, + convertToCoreMessages, streamText, tool, - wrapLanguageModel, type Tool as AITool, type LanguageModelUsage, + type CoreMessage, + type UIMessage, type ProviderMetadata, - type ModelMessage, - stepCountIs, - type StreamTextResult, + wrapLanguageModel, } from "ai" +import { z, ZodSchema } from "zod" +import { Decimal } from "decimal.js" import PROMPT_INITIALIZE from "../session/prompt/initialize.txt" -import PROMPT_PLAN from "../session/prompt/plan.txt" -import PROMPT_ANTHROPIC_SPOOF from "../session/prompt/anthropic_spoof.txt" -import { App } from "../app/app" -import { Bus } from "../bus" -import { Config } from "../config/config" -import { Flag } from "../flag/flag" -import { Identifier } from "../id/id" -import { Installation } from "../installation" -import { MCP } from "../mcp" -import { Provider } from "../provider/provider" -import { ProviderTransform } from "../provider/transform" -import type { ModelsDev } from "../provider/models" import { Share } from "../share/share" -import { Snapshot } from "../snapshot" -import { Storage } from "../storage/storage" -import { Log } from "../util/log" +import { Message } from "./message" +import { Bus } from "../bus" +import { Provider } from "../provider/provider" +import { MCP } from "../mcp" import { NamedError } from "../util/error" +import type { Tool } from "../tool/tool" import { SystemPrompt } from "./system" -import { FileTime } from "../file/time" -import { MessageV2 } from "./message-v2" -import { Mode } from "./mode" -import { LSP } from "../lsp" -import { ReadTool } from "../tool/read" +import { Flag } from "../flag/flag" +import type { ModelsDev } from "../provider/models" +import { Installation } from "../installation" +import { Config } from "../config/config" +import { ProviderTransform } from "../provider/transform" +import { Snapshot } from "../snapshot" export namespace Session { const log = Log.create({ service: "session" }) - const OUTPUT_TOKEN_MAX = 32_000 - export const Info = z .object({ id: Identifier.schema("session"), @@ -106,8 +99,7 @@ export namespace Session { Error: Bus.event( "session.error", z.object({ - sessionID: z.string().optional(), - error: MessageV2.Assistant.shape.error, + error: Message.Info.shape.metadata.shape.error, }), ), } @@ -116,7 +108,7 @@ export namespace Session { "session", () => { const sessions = new Map() - const messages = new Map() + const messages = new Map() const pending = new Map() return { @@ -137,7 +129,9 @@ export namespace Session { id: Identifier.descending("session"), version: Installation.VERSION, parentID, - title: (parentID ? "Child session - " : "New Session - ") + new Date().toISOString(), + title: + (parentID ? "Child session - " : "New Session - ") + + new Date().toISOString(), time: { created: Date.now(), updated: Date.now(), @@ -147,16 +141,12 @@ export namespace Session { state().sessions.set(result.id, result) await Storage.writeJSON("session/info/" + result.id, result) const cfg = await Config.get() - if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto")) - share(result.id) - .then((share) => { - update(result.id, (draft) => { - draft.share = share - }) - }) - .catch(() => { - // Silently ignore sharing errors during session creation + if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.autoshare)) + share(result.id).then((share) => { + update(result.id, (draft) => { + draft.share = share }) + }) Bus.publish(Event.Updated, { info: result, }) @@ -178,11 +168,6 @@ export namespace Session { } export async function share(id: string) { - const cfg = await Config.get() - if (cfg.share === "disabled") { - throw new Error("Sharing is disabled in configuration") - } - const session = await get(id) if (session.share) return session.share const share = await Share.create(id) @@ -194,22 +179,17 @@ export namespace Session { await Storage.writeJSON("session/share/" + id, share) await Share.sync("session/info/" + id, session) for (const msg of await messages(id)) { - await Share.sync("session/message/" + id + "/" + msg.info.id, msg.info) - for (const part of msg.parts) { - await Share.sync("session/part/" + id + "/" + msg.info.id + "/" + part.id, part) - } + await Share.sync("session/message/" + id + "/" + msg.id, msg) } return share } export async function unshare(id: string) { - const share = await getShare(id) - if (!share) return await Storage.remove("session/share/" + id) await update(id, (draft) => { draft.share = undefined }) - await Share.remove(id, share.secret) + await Share.remove(id) } export async function update(id: string, editor: (session: Info) => void) { @@ -227,37 +207,24 @@ export namespace Session { } export async function messages(sessionID: string) { - const result = [] as { - info: MessageV2.Info - parts: MessageV2.Part[] - }[] - for (const p of await Storage.list("session/message/" + sessionID)) { - const read = await Storage.readJSON(p) - result.push({ - info: read, - parts: await parts(sessionID, read.id), - }) - } - result.sort((a, b) => (a.info.id > b.info.id ? 1 : -1)) - return result - } - - export async function getMessage(sessionID: string, messageID: string) { - return Storage.readJSON("session/message/" + sessionID + "/" + messageID) - } - - export async function parts(sessionID: string, messageID: string) { - const result = [] as MessageV2.Part[] - for (const item of await Storage.list("session/part/" + sessionID + "/" + messageID)) { - const read = await Storage.readJSON(item) + const result = [] as Message.Info[] + const list = Storage.list("session/message/" + sessionID) + for await (const p of list) { + const read = await Storage.readJSON(p) result.push(read) } result.sort((a, b) => (a.id > b.id ? 1 : -1)) return result } + export async function getMessage(sessionID: string, messageID: string) { + return Storage.readJSON( + "session/message/" + sessionID + "/" + messageID, + ) + } + export async function* list() { - for (const item of await Storage.list("session/info")) { + for await (const item of Storage.list("session/info")) { const sessionID = path.basename(item, ".json") yield get(sessionID) } @@ -265,7 +232,7 @@ export namespace Session { export async function children(parentID: string) { const result = [] as Session.Info[] - for (const item of await Storage.list("session/info")) { + for await (const item of Storage.list("session/info")) { const sessionID = path.basename(item, ".json") const session = await get(sessionID) if (session.parentID !== parentID) continue @@ -304,57 +271,26 @@ export namespace Session { } } - async function updateMessage(msg: MessageV2.Info) { - await Storage.writeJSON("session/message/" + msg.sessionID + "/" + msg.id, msg) - Bus.publish(MessageV2.Event.Updated, { + async function updateMessage(msg: Message.Info) { + await Storage.writeJSON( + "session/message/" + msg.metadata.sessionID + "/" + msg.id, + msg, + ) + Bus.publish(Message.Event.Updated, { info: msg, }) } - async function updatePart(part: MessageV2.Part) { - await Storage.writeJSON(["session", "part", part.sessionID, part.messageID, part.id].join("/"), part) - Bus.publish(MessageV2.Event.PartUpdated, { - part, - }) - return part - } - - export const ChatInput = z.object({ - sessionID: Identifier.schema("session"), - messageID: Identifier.schema("message").optional(), - providerID: z.string(), - modelID: z.string(), - mode: z.string().optional(), - parts: z.array( - z.discriminatedUnion("type", [ - MessageV2.TextPart.omit({ - messageID: true, - sessionID: true, - }) - .partial({ - id: true, - }) - .openapi({ - ref: "TextPartInput", - }), - MessageV2.FilePart.omit({ - messageID: true, - sessionID: true, - }) - .partial({ - id: true, - }) - .openapi({ - ref: "FilePartInput", - }), - ]), - ), - }) - - export async function chat(input: z.infer) { + export async function chat(input: { + sessionID: string + providerID: string + modelID: string + parts: Message.MessagePart[] + system?: string[] + tools?: Tool.Info[] + }) { const l = log.clone().tag("session", input.sessionID) l.info("chatting") - const model = await Provider.getModel(input.providerID, input.modelID) let msgs = await messages(input.sessionID) const session = await get(input.sessionID) @@ -363,18 +299,20 @@ export namespace Session { const trimmed = [] for (const msg of msgs) { if ( - msg.info.id > session.revert.messageID || - (msg.info.id === session.revert.messageID && session.revert.part === 0) + msg.id > session.revert.messageID || + (msg.id === session.revert.messageID && session.revert.part === 0) ) { - await Storage.remove("session/message/" + input.sessionID + "/" + msg.info.id) - await Bus.publish(MessageV2.Event.Removed, { + await Storage.remove( + "session/message/" + input.sessionID + "/" + msg.id, + ) + await Bus.publish(Message.Event.Removed, { sessionID: input.sessionID, - messageID: msg.info.id, + messageID: msg.id, }) continue } - if (msg.info.id === session.revert.messageID) { + if (msg.id === session.revert.messageID) { if (session.revert.part === 0) break msg.parts = msg.parts.slice(0, session.revert.part) } @@ -386,14 +324,23 @@ export namespace Session { }) } - const previous = msgs.filter((x) => x.info.role === "assistant").at(-1)?.info as MessageV2.Assistant - const outputLimit = Math.min(model.info.limit.output, OUTPUT_TOKEN_MAX) || OUTPUT_TOKEN_MAX + const previous = msgs.at(-1) // auto summarize if too long - if (previous && previous.tokens) { + if (previous?.metadata.assistant) { const tokens = - previous.tokens.input + previous.tokens.cache.read + previous.tokens.cache.write + previous.tokens.output - if (model.info.limit.context && tokens > Math.max((model.info.limit.context - outputLimit) * 0.9, 0)) { + previous.metadata.assistant.tokens.input + + previous.metadata.assistant.tokens.cache.read + + previous.metadata.assistant.tokens.cache.write + + previous.metadata.assistant.tokens.output + if ( + model.info.limit.context && + tokens > + Math.max( + (model.info.limit.context - (model.info.limit.output ?? 0)) * 0.9, + 0, + ) + ) { await summarize({ sessionID: input.sessionID, providerID: input.providerID, @@ -405,164 +352,32 @@ export namespace Session { using abort = lock(input.sessionID) - const lastSummary = msgs.findLast((msg) => msg.info.role === "assistant" && msg.info.summary === true) - if (lastSummary) msgs = msgs.filter((msg) => msg.info.id >= lastSummary.info.id) - - const userMsg: MessageV2.Info = { - id: input.messageID ?? Identifier.ascending("message"), - role: "user", - sessionID: input.sessionID, - time: { - created: Date.now(), - }, - } + const lastSummary = msgs.findLast( + (msg) => msg.metadata.assistant?.summary === true, + ) + if (lastSummary) msgs = msgs.filter((msg) => msg.id >= lastSummary.id) const app = App.info() - const userParts = await Promise.all( - input.parts.map(async (part): Promise => { - if (part.type === "file") { - const url = new URL(part.url) - switch (url.protocol) { - case "file:": - // have to normalize, symbol search returns absolute paths - // Decode the pathname since URL constructor doesn't automatically decode it - const pathname = decodeURIComponent(url.pathname) - const relativePath = pathname.replace(app.path.cwd, ".") - const filePath = path.join(app.path.cwd, relativePath) - - if (part.mime === "text/plain") { - let offset: number | undefined = undefined - let limit: number | undefined = undefined - const range = { - start: url.searchParams.get("start"), - end: url.searchParams.get("end"), - } - if (range.start != null) { - const filePath = part.url.split("?")[0] - let start = parseInt(range.start) - let end = range.end ? parseInt(range.end) : undefined - // some LSP servers (eg, gopls) don't give full range in - // workspace/symbol searches, so we'll try to find the - // symbol in the document to get the full range - if (start === end) { - const symbols = await LSP.documentSymbol(filePath) - for (const symbol of symbols) { - let range: LSP.Range | undefined - if ("range" in symbol) { - range = symbol.range - } else if ("location" in symbol) { - range = symbol.location.range - } - if (range?.start?.line && range?.start?.line === start) { - start = range.start.line - end = range?.end?.line ?? start - break - } - } - offset = Math.max(start - 2, 0) - if (end) { - limit = end - offset + 2 - } - } - } - const args = { filePath, offset, limit } - const result = await ReadTool.execute(args, { - sessionID: input.sessionID, - abort: abort.signal, - messageID: userMsg.id, - metadata: async () => {}, - }) - return [ - { - id: Identifier.ascending("part"), - messageID: userMsg.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Called the Read tool with the following input: ${JSON.stringify(args)}`, - }, - { - id: Identifier.ascending("part"), - messageID: userMsg.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: result.output, - }, - ] - } - - let file = Bun.file(filePath) - FileTime.read(input.sessionID, filePath) - return [ - { - id: Identifier.ascending("part"), - messageID: userMsg.id, - sessionID: input.sessionID, - type: "text", - text: `Called the Read tool with the following input: {\"filePath\":\"${pathname}\"}`, - synthetic: true, - }, - { - id: Identifier.ascending("part"), - messageID: userMsg.id, - sessionID: input.sessionID, - type: "file", - url: `data:${part.mime};base64,` + Buffer.from(await file.bytes()).toString("base64"), - mime: part.mime, - filename: part.filename!, - }, - ] - } - } - return [ - { - id: Identifier.ascending("part"), - ...part, - messageID: userMsg.id, - sessionID: input.sessionID, - }, - ] - }), - ).then((x) => x.flat()) - - if (input.mode === "plan") - userParts.push({ - id: Identifier.ascending("part"), - messageID: userMsg.id, - sessionID: input.sessionID, - type: "text", - text: PROMPT_PLAN, - synthetic: true, - }) - if (msgs.length === 0 && !session.parentID) { - const small = (await Provider.getSmallModel(input.providerID)) ?? model generateText({ - maxOutputTokens: input.providerID === "google" ? 1024 : 20, - providerOptions: small.info.options, + maxTokens: input.providerID === "google" ? 1024 : 20, + providerOptions: model.info.options, messages: [ ...SystemPrompt.title(input.providerID).map( - (x): ModelMessage => ({ + (x): CoreMessage => ({ role: "system", content: x, }), ), - ...MessageV2.toModelMessage([ + ...convertToCoreMessages([ { - info: { - id: Identifier.ascending("message"), - role: "user", - sessionID: input.sessionID, - time: { - created: Date.now(), - }, - }, - parts: userParts, + role: "user", + content: "", + parts: toParts(input.parts), }, ]), ], - model: small.language, + model: model.language, }) .then((result) => { if (result.text) @@ -572,128 +387,217 @@ export namespace Session { }) .catch(() => {}) } - await updateMessage(userMsg) - for (const part of userParts) { - await updatePart(part) + const snapshot = await Snapshot.create(input.sessionID) + const msg: Message.Info = { + role: "user", + id: Identifier.ascending("message"), + parts: input.parts, + metadata: { + time: { + created: Date.now(), + }, + sessionID: input.sessionID, + tool: {}, + snapshot, + }, } - msgs.push({ info: userMsg, parts: userParts }) + await updateMessage(msg) + msgs.push(msg) - const mode = await Mode.get(input.mode ?? "build") - let system = input.providerID === "anthropic" ? [PROMPT_ANTHROPIC_SPOOF.trim()] : [] - system.push(...(mode.prompt ? [mode.prompt] : SystemPrompt.provider(input.modelID))) + const system = input.system ?? SystemPrompt.provider(input.providerID) system.push(...(await SystemPrompt.environment())) system.push(...(await SystemPrompt.custom())) - // max 2 system prompt messages for caching purposes - const [first, ...rest] = system - system = [first, rest.join("\n")] - const assistantMsg: MessageV2.Info = { + const next: Message.Info = { id: Identifier.ascending("message"), role: "assistant", - system, - path: { - cwd: app.path.cwd, - root: app.path.root, + parts: [], + metadata: { + snapshot, + assistant: { + system, + path: { + cwd: app.path.cwd, + root: app.path.root, + }, + cost: 0, + tokens: { + input: 0, + output: 0, + reasoning: 0, + cache: { read: 0, write: 0 }, + }, + modelID: input.modelID, + providerID: input.providerID, + }, + time: { + created: Date.now(), + }, + sessionID: input.sessionID, + tool: {}, }, - cost: 0, - tokens: { - input: 0, - output: 0, - reasoning: 0, - cache: { read: 0, write: 0 }, - }, - modelID: input.modelID, - providerID: input.providerID, - time: { - created: Date.now(), - }, - sessionID: input.sessionID, } - await updateMessage(assistantMsg) + await updateMessage(next) const tools: Record = {} - const processor = createProcessor(assistantMsg, model.info) - for (const item of await Provider.tools(input.providerID)) { - if (mode.tools[item.id] === false) continue - if (session.parentID && item.id === "task") continue - tools[item.id] = tool({ + tools[item.id.replaceAll(".", "_")] = tool({ id: item.id as any, description: item.description, - inputSchema: item.parameters as ZodSchema, - async execute(args, options) { - const result = await item.execute(args, { - sessionID: input.sessionID, - abort: abort.signal, - messageID: assistantMsg.id, - metadata: async (val) => { - const match = processor.partFromToolCall(options.toolCallId) - if (match && match.state.status === "running") { - await updatePart({ - ...match, - state: { - title: val.title, - metadata: val.metadata, - status: "running", - input: args, - time: { - start: Date.now(), - }, + parameters: item.parameters as ZodSchema, + async execute(args, opts) { + const start = Date.now() + try { + const result = await item.execute(args, { + sessionID: input.sessionID, + abort: abort.signal, + messageID: next.id, + metadata: async (val) => { + next.metadata.tool[opts.toolCallId] = { + ...val, + time: { + start: 0, + end: 0, }, - }) - } - }, - }) - return result - }, - toModelOutput(result) { - return { - type: "text", - value: result.output, + } + await updateMessage(next) + }, + }) + next.metadata!.tool![opts.toolCallId] = { + ...result.metadata, + snapshot: await Snapshot.create(input.sessionID), + time: { + start, + end: Date.now(), + }, + } + await updateMessage(next) + return result.output + } catch (e: any) { + next.metadata!.tool![opts.toolCallId] = { + error: true, + message: e.toString(), + title: e.toString(), + snapshot: await Snapshot.create(input.sessionID), + time: { + start, + end: Date.now(), + }, + } + await updateMessage(next) + return e.toString() } }, }) } for (const [key, item] of Object.entries(await MCP.tools())) { - if (mode.tools[key] === false) continue const execute = item.execute if (!execute) continue item.execute = async (args, opts) => { - const result = await execute(args, opts) - const output = result.content - .filter((x: any) => x.type === "text") - .map((x: any) => x.text) - .join("\n\n") - - return { - output, - } - } - item.toModelOutput = (result) => { - return { - type: "text", - value: result.output, + const start = Date.now() + try { + const result = await execute(args, opts) + next.metadata!.tool![opts.toolCallId] = { + ...result.metadata, + snapshot: await Snapshot.create(input.sessionID), + time: { + start, + end: Date.now(), + }, + } + await updateMessage(next) + return result.content + .filter((x: any) => x.type === "text") + .map((x: any) => x.text) + .join("\n\n") + } catch (e: any) { + next.metadata!.tool![opts.toolCallId] = { + error: true, + message: e.toString(), + snapshot: await Snapshot.create(input.sessionID), + title: "mcp", + time: { + start, + end: Date.now(), + }, + } + await updateMessage(next) + return e.toString() } } tools[key] = item } - const stream = streamText({ - onError() {}, + let text: Message.TextPart | undefined + const result = streamText({ + onStepFinish: async (step) => { + log.info("step finish", { finishReason: step.finishReason }) + const assistant = next.metadata!.assistant! + const usage = getUsage(model.info, step.usage, step.providerMetadata) + assistant.cost += usage.cost + assistant.tokens = usage.tokens + await updateMessage(next) + if (text) { + Bus.publish(Message.Event.PartUpdated, { + part: text, + messageID: next.id, + sessionID: next.metadata.sessionID, + }) + } + text = undefined + }, + onError(err) { + log.error("callback error", err) + switch (true) { + case LoadAPIKeyError.isInstance(err.error): + next.metadata.error = new Provider.AuthError( + { + providerID: input.providerID, + message: err.error.message, + }, + { cause: err.error }, + ).toObject() + break + case err.error instanceof Error: + next.metadata.error = new NamedError.Unknown( + { message: err.error.toString() }, + { cause: err.error }, + ).toObject() + break + default: + next.metadata.error = new NamedError.Unknown( + { message: JSON.stringify(err.error) }, + { cause: err.error }, + ) + } + Bus.publish(Event.Error, { + error: next.metadata.error, + }) + }, + // async prepareStep(step) { + // next.parts.push({ + // type: "step-start", + // }) + // await updateMessage(next) + // return step + // }, + toolCallStreaming: true, maxRetries: 10, - maxOutputTokens: outputLimit, + maxTokens: Math.max(0, model.info.limit.output) || undefined, abortSignal: abort.signal, - stopWhen: stepCountIs(1000), + maxSteps: 1000, providerOptions: model.info.options, messages: [ ...system.map( - (x): ModelMessage => ({ + (x): CoreMessage => ({ role: "system", content: x, }), ), - ...MessageV2.toModelMessage(msgs), + ...convertToCoreMessages( + msgs.map(toUIMessage).filter((x) => x.parts.length > 0), + ), ], temperature: model.info.temperature ? 0 : undefined, tools: model.info.tool_call === false ? undefined : tools, @@ -703,8 +607,11 @@ export namespace Session { { async transformParams(args) { if (args.type === "stream") { - // @ts-expect-error - args.params.prompt = ProviderTransform.message(args.params.prompt, input.providerID, input.modelID) + args.params.prompt = ProviderTransform.message( + args.params.prompt, + input.providerID, + input.modelID, + ) } return args.params }, @@ -712,265 +619,173 @@ export namespace Session { ], }), }) - const result = await processor.process(stream) - return result - } - - function createProcessor(assistantMsg: MessageV2.Assistant, model: ModelsDev.Model) { - const toolCalls: Record = {} - return { - partFromToolCall(toolCallID: string) { - return toolCalls[toolCallID] - }, - async process(stream: StreamTextResult, never>) { - try { - let currentText: MessageV2.TextPart | undefined - - for await (const value of stream.fullStream) { - log.info("part", { - type: value.type, + try { + for await (const value of result.fullStream) { + l.info("part", { + type: value.type, + }) + switch (value.type) { + case "step-start": + next.parts.push({ + type: "step-start", }) - switch (value.type) { - case "start": - const snapshot = await Snapshot.create(assistantMsg.sessionID) - if (snapshot) - await updatePart({ - id: Identifier.ascending("part"), - messageID: assistantMsg.id, - sessionID: assistantMsg.sessionID, - type: "snapshot", - snapshot, - }) - break - - case "tool-input-start": - const part = await updatePart({ - id: Identifier.ascending("part"), - messageID: assistantMsg.id, - sessionID: assistantMsg.sessionID, - type: "tool", - tool: value.toolName, - callID: value.id, - state: { - status: "pending", - }, - }) - toolCalls[value.id] = part as MessageV2.ToolPart - break - - case "tool-input-delta": - break - - case "tool-call": { - const match = toolCalls[value.toolCallId] - if (match) { - const part = await updatePart({ - ...match, - state: { - status: "running", - input: value.input, - time: { - start: Date.now(), - }, - }, - }) - toolCalls[value.toolCallId] = part as MessageV2.ToolPart - } - break - } - case "tool-result": { - const match = toolCalls[value.toolCallId] - if (match && match.state.status === "running") { - await updatePart({ - ...match, - state: { - status: "completed", - input: value.input, - output: value.output.output, - metadata: value.output.metadata, - title: value.output.title, - time: { - start: match.state.time.start, - end: Date.now(), - }, - }, - }) - delete toolCalls[value.toolCallId] - const snapshot = await Snapshot.create(assistantMsg.sessionID) - if (snapshot) - await updatePart({ - id: Identifier.ascending("part"), - messageID: assistantMsg.id, - sessionID: assistantMsg.sessionID, - type: "snapshot", - snapshot, - }) - } - break + break + case "text-delta": + if (!text) { + text = { + type: "text", + text: value.textDelta, } + next.parts.push(text) + break + } else text.text += value.textDelta + break - case "tool-error": { - const match = toolCalls[value.toolCallId] - if (match && match.state.status === "running") { - await updatePart({ - ...match, - state: { - status: "error", - input: value.input, - error: (value.error as any).toString(), - time: { - start: match.state.time.start, - end: Date.now(), - }, - }, - }) - delete toolCalls[value.toolCallId] - const snapshot = await Snapshot.create(assistantMsg.sessionID) - if (snapshot) - await updatePart({ - id: Identifier.ascending("part"), - messageID: assistantMsg.id, - sessionID: assistantMsg.sessionID, - type: "snapshot", - snapshot, - }) - } - break - } - - case "error": - throw value.error - - case "start-step": - await updatePart({ - id: Identifier.ascending("part"), - messageID: assistantMsg.id, - sessionID: assistantMsg.sessionID, - type: "step-start", - }) - break - - case "finish-step": - const usage = getUsage(model, value.usage, value.providerMetadata) - assistantMsg.cost += usage.cost - assistantMsg.tokens = usage.tokens - await updatePart({ - id: Identifier.ascending("part"), - messageID: assistantMsg.id, - sessionID: assistantMsg.sessionID, - type: "step-finish", - tokens: usage.tokens, - cost: usage.cost, - }) - await updateMessage(assistantMsg) - break - - case "text-start": - currentText = { - id: Identifier.ascending("part"), - messageID: assistantMsg.id, - sessionID: assistantMsg.sessionID, - type: "text", - text: "", - time: { - start: Date.now(), - }, - } - break - - case "text": - if (currentText) { - currentText.text += value.text - await updatePart(currentText) - } - break - - case "text-end": - if (currentText && currentText.text) { - currentText.time = { - start: Date.now(), - end: Date.now(), - } - await updatePart(currentText) - } - currentText = undefined - break - - case "finish": - assistantMsg.time.completed = Date.now() - await updateMessage(assistantMsg) - break - - default: - log.info("unhandled", { - ...value, - }) - continue - } + case "tool-call": { + const [match] = next.parts.flatMap((p) => + p.type === "tool-invocation" && + p.toolInvocation.toolCallId === value.toolCallId + ? [p] + : [], + ) + if (!match) break + match.toolInvocation.args = value.args + match.toolInvocation.state = "call" + Bus.publish(Message.Event.PartUpdated, { + part: match, + messageID: next.id, + sessionID: next.metadata.sessionID, + }) + break } - } catch (e) { - log.error("", { - error: e, - }) - switch (true) { - case e instanceof DOMException && e.name === "AbortError": - assistantMsg.error = new MessageV2.AbortedError( - { message: e.message }, - { - cause: e, - }, - ).toObject() - break - case MessageV2.OutputLengthError.isInstance(e): - assistantMsg.error = e - break - case LoadAPIKeyError.isInstance(e): - assistantMsg.error = new MessageV2.AuthError( - { - providerID: model.id, - message: e.message, - }, - { cause: e }, - ).toObject() - break - case e instanceof Error: - assistantMsg.error = new NamedError.Unknown({ message: e.toString() }, { cause: e }).toObject() - break - default: - assistantMsg.error = new NamedError.Unknown({ message: JSON.stringify(e) }, { cause: e }) - } - Bus.publish(Event.Error, { - sessionID: assistantMsg.sessionID, - error: assistantMsg.error, - }) - } - const p = await parts(assistantMsg.sessionID, assistantMsg.id) - for (const part of p) { - if (part.type === "tool" && part.state.status !== "completed") { - updatePart({ - ...part, - state: { - status: "error", - error: "Tool execution aborted", - time: { - start: Date.now(), - end: Date.now(), - }, - input: {}, + + case "tool-call-streaming-start": + next.parts.push({ + type: "tool-invocation", + toolInvocation: { + state: "partial-call", + toolName: value.toolName, + toolCallId: value.toolCallId, + args: {}, }, }) - } + Bus.publish(Message.Event.PartUpdated, { + part: next.parts[next.parts.length - 1], + messageID: next.id, + sessionID: next.metadata.sessionID, + }) + break + + case "tool-call-delta": + continue + + // for some reason ai sdk claims to not send this part but it does + // @ts-expect-error + case "tool-result": + const match = next.parts.find( + (p) => + p.type === "tool-invocation" && + // @ts-expect-error + p.toolInvocation.toolCallId === value.toolCallId, + ) + if (match && match.type === "tool-invocation") { + match.toolInvocation = { + // @ts-expect-error + args: value.args, + // @ts-expect-error + toolCallId: value.toolCallId, + // @ts-expect-error + toolName: value.toolName, + state: "result", + // @ts-expect-error + result: value.result as string, + } + Bus.publish(Message.Event.PartUpdated, { + part: match, + messageID: next.id, + sessionID: next.metadata.sessionID, + }) + } + break + + case "finish": + log.info("message finish", { + reason: value.finishReason, + }) + const assistant = next.metadata!.assistant! + const usage = getUsage( + model.info, + value.usage, + value.providerMetadata, + ) + assistant.cost += usage.cost + await updateMessage(next) + if (value.finishReason === "length") + throw new Message.OutputLengthError({}) + break + default: + l.info("unhandled", { + type: value.type, + }) + continue } - assistantMsg.time.completed = Date.now() - await updateMessage(assistantMsg) - return { info: assistantMsg, parts: p } - }, + await updateMessage(next) + } + } catch (e: any) { + log.error("stream error", { + error: e, + }) + switch (true) { + case Message.OutputLengthError.isInstance(e): + next.metadata.error = e + break + case LoadAPIKeyError.isInstance(e): + next.metadata.error = new Provider.AuthError( + { + providerID: input.providerID, + message: e.message, + }, + { cause: e }, + ).toObject() + break + case e instanceof Error: + next.metadata.error = new NamedError.Unknown( + { message: e.toString() }, + { cause: e }, + ).toObject() + break + default: + next.metadata.error = new NamedError.Unknown( + { message: JSON.stringify(e) }, + { cause: e }, + ) + } + Bus.publish(Event.Error, { + error: next.metadata.error, + }) } + next.metadata!.time.completed = Date.now() + for (const part of next.parts) { + if ( + part.type === "tool-invocation" && + part.toolInvocation.state !== "result" + ) { + part.toolInvocation = { + ...part.toolInvocation, + state: "result", + result: "request was aborted", + } + } + } + await updateMessage(next) + return next } - export async function revert(_input: { sessionID: string; messageID: string; part: number }) { - // TODO - /* + export async function revert(input: { + sessionID: string + messageID: string + part: number + }) { const message = await getMessage(input.sessionID, input.messageID) if (!message) return const part = message.parts[input.part] @@ -998,66 +813,77 @@ export namespace Session { snapshot, } }) - */ } export async function unrevert(sessionID: string) { const session = await get(sessionID) if (!session) return if (!session.revert) return - if (session.revert.snapshot) await Snapshot.restore(sessionID, session.revert.snapshot) + if (session.revert.snapshot) + await Snapshot.restore(sessionID, session.revert.snapshot) update(sessionID, (draft) => { draft.revert = undefined }) } - export async function summarize(input: { sessionID: string; providerID: string; modelID: string }) { + export async function summarize(input: { + sessionID: string + providerID: string + modelID: string + }) { using abort = lock(input.sessionID) const msgs = await messages(input.sessionID) - const lastSummary = msgs.findLast((msg) => msg.info.role === "assistant" && msg.info.summary === true) - const filtered = msgs.filter((msg) => !lastSummary || msg.info.id >= lastSummary.info.id) + const lastSummary = msgs.findLast( + (msg) => msg.metadata.assistant?.summary === true, + )?.id + const filtered = msgs.filter((msg) => !lastSummary || msg.id >= lastSummary) const model = await Provider.getModel(input.providerID, input.modelID) const app = App.info() const system = SystemPrompt.summarize(input.providerID) - const next: MessageV2.Info = { + const next: Message.Info = { id: Identifier.ascending("message"), role: "assistant", - sessionID: input.sessionID, - system, - path: { - cwd: app.path.cwd, - root: app.path.root, - }, - summary: true, - cost: 0, - modelID: input.modelID, - providerID: input.providerID, - tokens: { - input: 0, - output: 0, - reasoning: 0, - cache: { read: 0, write: 0 }, - }, - time: { - created: Date.now(), + parts: [], + metadata: { + tool: {}, + sessionID: input.sessionID, + assistant: { + system, + path: { + cwd: app.path.cwd, + root: app.path.root, + }, + summary: true, + cost: 0, + modelID: input.modelID, + providerID: input.providerID, + tokens: { + input: 0, + output: 0, + reasoning: 0, + cache: { read: 0, write: 0 }, + }, + }, + time: { + created: Date.now(), + }, }, } await updateMessage(next) - const processor = createProcessor(next, model.info) - const stream = streamText({ - maxRetries: 10, + let text: Message.TextPart | undefined + const result = streamText({ abortSignal: abort.signal, model: model.language, messages: [ ...system.map( - (x): ModelMessage => ({ + (x): CoreMessage => ({ role: "system", content: x, }), ), - ...MessageV2.toModelMessage(filtered), + ...convertToCoreMessages(filtered.map(toUIMessage)), { role: "user", content: [ @@ -1068,10 +894,46 @@ export namespace Session { ], }, ], + onStepFinish: async (step) => { + const assistant = next.metadata!.assistant! + const usage = getUsage(model.info, step.usage, step.providerMetadata) + assistant.cost += usage.cost + assistant.tokens = usage.tokens + await updateMessage(next) + if (text) { + Bus.publish(Message.Event.PartUpdated, { + part: text, + messageID: next.id, + sessionID: next.metadata.sessionID, + }) + } + text = undefined + }, + async onFinish(input) { + const assistant = next.metadata!.assistant! + const usage = getUsage(model.info, input.usage, input.providerMetadata) + assistant.cost += usage.cost + assistant.tokens = usage.tokens + next.metadata!.time.completed = Date.now() + await updateMessage(next) + }, }) - const result = await processor.process(stream) - return result + for await (const value of result.fullStream) { + switch (value.type) { + case "text-delta": + if (!text) { + text = { + type: "text", + text: value.textDelta, + } + next.parts.push(text) + } else text.text += value.textDelta + + await updateMessage(next) + break + } + } } function lock(sessionID: string) { @@ -1091,25 +953,40 @@ export namespace Session { } } - function getUsage(model: ModelsDev.Model, usage: LanguageModelUsage, metadata?: ProviderMetadata) { + function getUsage( + model: ModelsDev.Model, + usage: LanguageModelUsage, + metadata?: ProviderMetadata, + ) { const tokens = { - input: usage.inputTokens ?? 0, - output: usage.outputTokens ?? 0, + input: usage.promptTokens ?? 0, + output: usage.completionTokens ?? 0, reasoning: 0, cache: { write: (metadata?.["anthropic"]?.["cacheCreationInputTokens"] ?? // @ts-expect-error metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ?? 0) as number, - read: usage.cachedInputTokens ?? 0, + read: (metadata?.["anthropic"]?.["cacheReadInputTokens"] ?? + // @ts-expect-error + metadata?.["bedrock"]?.["usage"]?.["cacheReadInputTokens"] ?? + 0) as number, }, } return { cost: new Decimal(0) .add(new Decimal(tokens.input).mul(model.cost.input).div(1_000_000)) .add(new Decimal(tokens.output).mul(model.cost.output).div(1_000_000)) - .add(new Decimal(tokens.cache.read).mul(model.cost.cache_read ?? 0).div(1_000_000)) - .add(new Decimal(tokens.cache.write).mul(model.cost.cache_write ?? 0).div(1_000_000)) + .add( + new Decimal(tokens.cache.read) + .mul(model.cost.cache_read ?? 0) + .div(1_000_000), + ) + .add( + new Decimal(tokens.cache.write) + .mul(model.cost.cache_write ?? 0) + .div(1_000_000), + ) .toNumber(), tokens, } @@ -1125,17 +1002,14 @@ export namespace Session { sessionID: string modelID: string providerID: string - messageID: string }) { const app = App.info() await Session.chat({ sessionID: input.sessionID, - messageID: input.messageID, providerID: input.providerID, modelID: input.modelID, parts: [ { - id: Identifier.ascending("part"), type: "text", text: PROMPT_INITIALIZE.replace("${path}", app.path.root), }, @@ -1144,3 +1018,57 @@ export namespace Session { await App.initialize() } } + +function toUIMessage(msg: Message.Info): UIMessage { + if (msg.role === "assistant") { + return { + id: msg.id, + role: "assistant", + content: "", + parts: toParts(msg.parts), + } + } + + if (msg.role === "user") { + return { + id: msg.id, + role: "user", + content: "", + parts: toParts(msg.parts), + } + } + + throw new Error("not implemented") +} + +function toParts(parts: Message.MessagePart[]): UIMessage["parts"] { + const result: UIMessage["parts"] = [] + for (const part of parts) { + switch (part.type) { + case "text": + result.push({ type: "text", text: part.text }) + break + case "file": + result.push({ + type: "file", + data: part.url, + mimeType: part.mediaType, + }) + break + case "tool-invocation": + result.push({ + type: "tool-invocation", + toolInvocation: part.toolInvocation, + }) + break + case "step-start": + result.push({ + type: "step-start", + }) + break + default: + break + } + } + return result +} diff --git a/packages/opencode/src/session/message-v2.ts b/packages/opencode/src/session/message-v2.ts deleted file mode 100644 index c74a39a7..00000000 --- a/packages/opencode/src/session/message-v2.ts +++ /dev/null @@ -1,460 +0,0 @@ -import z from "zod" -import { Bus } from "../bus" -import { NamedError } from "../util/error" -import { Message } from "./message" -import { convertToModelMessages, type ModelMessage, type UIMessage } from "ai" -import { Identifier } from "../id/id" - -export namespace MessageV2 { - export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) - export const AbortedError = NamedError.create("MessageAbortedError", z.object({})) - export const AuthError = NamedError.create( - "ProviderAuthError", - z.object({ - providerID: z.string(), - message: z.string(), - }), - ) - - export const ToolStatePending = z - .object({ - status: z.literal("pending"), - }) - .openapi({ - ref: "ToolStatePending", - }) - - export type ToolStatePending = z.infer - - export const ToolStateRunning = z - .object({ - status: z.literal("running"), - input: z.any(), - title: z.string().optional(), - metadata: z.record(z.any()).optional(), - time: z.object({ - start: z.number(), - }), - }) - .openapi({ - ref: "ToolStateRunning", - }) - export type ToolStateRunning = z.infer - - export const ToolStateCompleted = z - .object({ - status: z.literal("completed"), - input: z.record(z.any()), - output: z.string(), - title: z.string(), - metadata: z.record(z.any()), - time: z.object({ - start: z.number(), - end: z.number(), - }), - }) - .openapi({ - ref: "ToolStateCompleted", - }) - export type ToolStateCompleted = z.infer - - export const ToolStateError = z - .object({ - status: z.literal("error"), - input: z.record(z.any()), - error: z.string(), - time: z.object({ - start: z.number(), - end: z.number(), - }), - }) - .openapi({ - ref: "ToolStateError", - }) - export type ToolStateError = z.infer - - export const ToolState = z - .discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]) - .openapi({ - ref: "ToolState", - }) - - const PartBase = z.object({ - id: z.string(), - sessionID: z.string(), - messageID: z.string(), - }) - - export const SnapshotPart = PartBase.extend({ - type: z.literal("snapshot"), - snapshot: z.string(), - }).openapi({ - ref: "SnapshotPart", - }) - export type SnapshotPart = z.infer - - export const TextPart = PartBase.extend({ - type: z.literal("text"), - text: z.string(), - synthetic: z.boolean().optional(), - time: z - .object({ - start: z.number(), - end: z.number().optional(), - }) - .optional(), - }).openapi({ - ref: "TextPart", - }) - export type TextPart = z.infer - - export const ToolPart = PartBase.extend({ - type: z.literal("tool"), - callID: z.string(), - tool: z.string(), - state: ToolState, - }).openapi({ - ref: "ToolPart", - }) - export type ToolPart = z.infer - - export const FilePart = PartBase.extend({ - type: z.literal("file"), - mime: z.string(), - filename: z.string().optional(), - url: z.string(), - }).openapi({ - ref: "FilePart", - }) - export type FilePart = z.infer - - export const StepStartPart = PartBase.extend({ - type: z.literal("step-start"), - }).openapi({ - ref: "StepStartPart", - }) - export type StepStartPart = z.infer - - export const StepFinishPart = PartBase.extend({ - type: z.literal("step-finish"), - cost: z.number(), - tokens: z.object({ - input: z.number(), - output: z.number(), - reasoning: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - }), - }).openapi({ - ref: "StepFinishPart", - }) - export type StepFinishPart = z.infer - - const Base = z.object({ - id: z.string(), - sessionID: z.string(), - }) - - export const User = Base.extend({ - role: z.literal("user"), - time: z.object({ - created: z.number(), - }), - }).openapi({ - ref: "UserMessage", - }) - export type User = z.infer - - export const Part = z - .discriminatedUnion("type", [TextPart, FilePart, ToolPart, StepStartPart, StepFinishPart, SnapshotPart]) - .openapi({ - ref: "Part", - }) - export type Part = z.infer - - export const Assistant = Base.extend({ - role: z.literal("assistant"), - time: z.object({ - created: z.number(), - completed: z.number().optional(), - }), - error: z - .discriminatedUnion("name", [ - AuthError.Schema, - NamedError.Unknown.Schema, - OutputLengthError.Schema, - AbortedError.Schema, - ]) - .optional(), - system: z.string().array(), - modelID: z.string(), - providerID: z.string(), - path: z.object({ - cwd: z.string(), - root: z.string(), - }), - summary: z.boolean().optional(), - cost: z.number(), - tokens: z.object({ - input: z.number(), - output: z.number(), - reasoning: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - }), - }).openapi({ - ref: "AssistantMessage", - }) - export type Assistant = z.infer - - export const Info = z.discriminatedUnion("role", [User, Assistant]).openapi({ - ref: "Message", - }) - export type Info = z.infer - - export const Event = { - Updated: Bus.event( - "message.updated", - z.object({ - info: Info, - }), - ), - Removed: Bus.event( - "message.removed", - z.object({ - sessionID: z.string(), - messageID: z.string(), - }), - ), - PartUpdated: Bus.event( - "message.part.updated", - z.object({ - part: Part, - }), - ), - } - - export function fromV1(v1: Message.Info) { - if (v1.role === "assistant") { - const info: Assistant = { - id: v1.id, - sessionID: v1.metadata.sessionID, - role: "assistant", - time: { - created: v1.metadata.time.created, - completed: v1.metadata.time.completed, - }, - cost: v1.metadata.assistant!.cost, - path: v1.metadata.assistant!.path, - summary: v1.metadata.assistant!.summary, - tokens: v1.metadata.assistant!.tokens, - modelID: v1.metadata.assistant!.modelID, - providerID: v1.metadata.assistant!.providerID, - system: v1.metadata.assistant!.system, - error: v1.metadata.error, - } - const parts = v1.parts.flatMap((part): Part[] => { - const base = { - id: Identifier.ascending("part"), - messageID: v1.id, - sessionID: v1.metadata.sessionID, - } - if (part.type === "text") { - return [ - { - ...base, - type: "text", - text: part.text, - }, - ] - } - if (part.type === "step-start") { - return [ - { - ...base, - type: "step-start", - }, - ] - } - if (part.type === "tool-invocation") { - return [ - { - ...base, - type: "tool", - callID: part.toolInvocation.toolCallId, - tool: part.toolInvocation.toolName, - state: (() => { - if (part.toolInvocation.state === "partial-call") { - return { - status: "pending", - } - } - - const { title, time, ...metadata } = v1.metadata.tool[part.toolInvocation.toolCallId] ?? {} - if (part.toolInvocation.state === "call") { - return { - status: "running", - input: part.toolInvocation.args, - time: { - start: time?.start, - }, - } - } - - if (part.toolInvocation.state === "result") { - return { - status: "completed", - input: part.toolInvocation.args, - output: part.toolInvocation.result, - title, - time, - metadata, - } - } - throw new Error("unknown tool invocation state") - })(), - }, - ] - } - return [] - }) - return { - info, - parts, - } - } - - if (v1.role === "user") { - const info: User = { - id: v1.id, - sessionID: v1.metadata.sessionID, - role: "user", - time: { - created: v1.metadata.time.created, - }, - } - const parts = v1.parts.flatMap((part): Part[] => { - const base = { - id: Identifier.ascending("part"), - messageID: v1.id, - sessionID: v1.metadata.sessionID, - } - if (part.type === "text") { - return [ - { - ...base, - type: "text", - text: part.text, - }, - ] - } - if (part.type === "file") { - return [ - { - ...base, - type: "file", - mime: part.mediaType, - filename: part.filename, - url: part.url, - }, - ] - } - return [] - }) - return { info, parts } - } - - throw new Error("unknown message type") - } - - export function toModelMessage( - input: { - info: Info - parts: Part[] - }[], - ): ModelMessage[] { - const result: UIMessage[] = [] - - for (const msg of input) { - if (msg.parts.length === 0) continue - - if (msg.info.role === "user") { - result.push({ - id: msg.info.id, - role: "user", - parts: msg.parts.flatMap((part): UIMessage["parts"] => { - if (part.type === "text") - return [ - { - type: "text", - text: part.text, - }, - ] - if (part.type === "file") - return [ - { - type: "file", - url: part.url, - mediaType: part.mime, - filename: part.filename, - }, - ] - return [] - }), - }) - } - - if (msg.info.role === "assistant") { - result.push({ - id: msg.info.id, - role: "assistant", - parts: msg.parts.flatMap((part): UIMessage["parts"] => { - if (part.type === "text") - return [ - { - type: "text", - text: part.text, - }, - ] - if (part.type === "step-start") - return [ - { - type: "step-start", - }, - ] - if (part.type === "tool") { - if (part.state.status === "completed") - return [ - { - type: ("tool-" + part.tool) as `tool-${string}`, - state: "output-available", - toolCallId: part.callID, - input: part.state.input, - output: part.state.output, - }, - ] - if (part.state.status === "error") - return [ - { - type: ("tool-" + part.tool) as `tool-${string}`, - state: "output-error", - toolCallId: part.callID, - input: part.state.input, - errorText: part.state.error, - }, - ] - } - - return [] - }), - }) - } - } - - return convertToModelMessages(result) - } -} diff --git a/packages/opencode/src/session/message.ts b/packages/opencode/src/session/message.ts index e71c35c5..2d319e87 100644 --- a/packages/opencode/src/session/message.ts +++ b/packages/opencode/src/session/message.ts @@ -1,14 +1,12 @@ import z from "zod" +import { Bus } from "../bus" +import { Provider } from "../provider/provider" import { NamedError } from "../util/error" export namespace Message { - export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) - export const AuthError = NamedError.create( - "ProviderAuthError", - z.object({ - providerID: z.string(), - message: z.string(), - }), + export const OutputLengthError = NamedError.create( + "MessageOutputLengthError", + z.object({}), ) export const ToolCall = z @@ -51,9 +49,11 @@ export namespace Message { }) export type ToolResult = z.infer - export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).openapi({ - ref: "ToolInvocation", - }) + export const ToolInvocation = z + .discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]) + .openapi({ + ref: "ToolInvocation", + }) export type ToolInvocation = z.infer export const TextPart = z @@ -122,7 +122,14 @@ export namespace Message { export type StepStartPart = z.infer export const MessagePart = z - .discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart]) + .discriminatedUnion("type", [ + TextPart, + ReasoningPart, + ToolInvocationPart, + SourceUrlPart, + FilePart, + StepStartPart, + ]) .openapi({ ref: "MessagePart", }) @@ -140,7 +147,11 @@ export namespace Message { completed: z.number().optional(), }), error: z - .discriminatedUnion("name", [AuthError.Schema, NamedError.Unknown.Schema, OutputLengthError.Schema]) + .discriminatedUnion("name", [ + Provider.AuthError.Schema, + NamedError.Unknown.Schema, + OutputLengthError.Schema, + ]) .optional(), sessionID: z.string(), tool: z.record( @@ -186,4 +197,28 @@ export namespace Message { ref: "Message", }) export type Info = z.infer + + export const Event = { + Updated: Bus.event( + "message.updated", + z.object({ + info: Info, + }), + ), + Removed: Bus.event( + "message.removed", + z.object({ + sessionID: z.string(), + messageID: z.string(), + }), + ), + PartUpdated: Bus.event( + "message.part.updated", + z.object({ + part: MessagePart, + sessionID: z.string(), + messageID: z.string(), + }), + ), + } } diff --git a/packages/opencode/src/session/mode.ts b/packages/opencode/src/session/mode.ts deleted file mode 100644 index eb9e6927..00000000 --- a/packages/opencode/src/session/mode.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { mergeDeep } from "remeda" -import { App } from "../app/app" -import { Config } from "../config/config" -import z from "zod" - -export namespace Mode { - export const Info = z - .object({ - name: z.string(), - model: z - .object({ - modelID: z.string(), - providerID: z.string(), - }) - .optional(), - prompt: z.string().optional(), - tools: z.record(z.boolean()), - }) - .openapi({ - ref: "Mode", - }) - export type Info = z.infer - const state = App.state("mode", async () => { - const cfg = await Config.get() - const mode = mergeDeep( - { - build: {}, - plan: { - tools: { - write: false, - edit: false, - patch: false, - }, - }, - }, - cfg.mode ?? {}, - ) - const result: Record = {} - for (const [key, value] of Object.entries(mode)) { - let item = result[key] - if (!item) - item = result[key] = { - name: key, - tools: {}, - } - const model = value.model ?? cfg.model - if (model) { - const [providerID, ...rest] = model.split("/") - const modelID = rest.join("/") - item.model = { - modelID, - providerID, - } - } - if (value.prompt) item.prompt = value.prompt - if (value.tools) item.tools = value.tools - } - - return result - }) - - export async function get(mode: string) { - return state().then((x) => x[mode]) - } - - export async function list() { - return state().then((x) => Object.values(x)) - } -} diff --git a/packages/opencode/src/session/prompt/beast.txt b/packages/opencode/src/session/prompt/beast.txt deleted file mode 100644 index 473f0286..00000000 --- a/packages/opencode/src/session/prompt/beast.txt +++ /dev/null @@ -1,117 +0,0 @@ -You are opencode, an autonomous agent - please keep going until the user's query is completely resolved, before ending your turn and yielding back to the user. - -Your thinking should be thorough and so it's fine if it's very long. However, avoid unnecessary repetition and verbosity. You should be concise, but thorough. - -You MUST iterate and keep going until the problem is solved. - -You have everything you need to resolve this problem. I want you to fully solve this autonomously before coming back to me. - -Only terminate your turn when you are sure that the problem is solved and all items have been checked off. Use the TodoWrite and TodoRead tools to track and manage steps. Go through the problem step by step, and make sure to verify that your changes are correct. Once each step is finished mark it as completed with the TodoWrite tool. NEVER end your turn without having truly and completely solved the problem, use the TodoRead tool to make sure all steps are complete, and when you say you are going to make a tool call, make sure you ACTUALLY make the tool call, instead of ending your turn. If a step is impossible to complete, mark it as cancelled using the TodoWrite tool. - -THE PROBLEM CAN NOT BE SOLVED WITHOUT EXTENSIVE INTERNET RESEARCH. - -You must use the webfetch tool to recursively gather all information from URLs provided to you by the user, as well as any links you find in the content of those pages. - -Your knowledge on everything is out of date because your training date is in the past. - -You CANNOT successfully complete this task without using Bing to verify your understanding of third party packages and dependencies is up to date. You must use the webfetch tool to search bing for how to properly use libraries, packages, frameworks, dependencies, etc. every single time you install or implement one. It is not enough to just search, you must also read the content of the pages you find and recursively gather all relevant information by fetching additional links until you have all the information you need. - -If the user request is "resume" or "continue" or "try again",use the TodoRead tool to find the next pending step. Continue from that step, and do not hand back control to the user until the entire todo list is complete and all steps are marked as complete or cancelled. Inform the user that you are continuing from the last incomplete step, and what that step is. - -Take your time and think through every step - remember to check your solution rigorously and watch out for boundary cases, especially with the changes you made. Use the sequential thinking tool if available. Your solution must be perfect. If not, continue working on it. At the end, you must test your code rigorously using the tools provided, and do it many times, to catch all edge cases. If it is not robust, update the plan and iterate more and make it perfect. Failing to test your code sufficiently rigorously is the NUMBER ONE failure mode on these types of tasks; run the build, and verify that the changes you made actually build; make sure you handle all edge cases, and run existing tests if they are provided. - -You MUST plan extensively before each tool call, and reflect extensively on the outcomes of the previous tool calls. DO NOT do this entire process by making tool calls only, as this can impair your ability to solve the problem and think insightfully. - -You MUST keep working until the problem is completely solved, and all steps in the todo list are complete. Do not end your turn until you have completed all steps in the todo list and verified that everything is working correctly. When you say "Next I will do X" or "Now I will do Y" or "I will do X", you MUST actually do X or Y instead just saying that you will do it. - -You MUST use the ToolRead tool to verify that all steps are complete or cancelled before ending your turn. If any steps are incomplete, you MUST continue working on them until they are all complete. - -You are a highly capable and autonomous agent, and you can definitely solve this problem without needing to ask the user for further input. - -# Workflow -1. Fetch any URL's provided by the user using the `webfetch` tool. -2. Understand the problem deeply. Carefully read the issue and think critically about what is required. Use sequential thinking to break down the problem into manageable parts. Consider the following: - - What is the expected behavior? - - What are the edge cases? - - What are the potential pitfalls? - - How does this fit into the larger context of the codebase? - - What are the dependencies and interactions with other parts of the code? -3. Investigate the codebase. Explore relevant files, search for key functions, and gather context. -4. Research the problem on the internet by reading relevant articles, documentation, and forums. -5. Develop a clear, step-by-step plan. Break down the fix into manageable, incremental steps. Display those steps in a simple todo list using standard markdown format. Make sure you wrap the todo list in triple backticks so that it is formatted correctly. -6. Implement the fix incrementally. Make small, testable code changes. -7. Debug as needed. Use debugging techniques to isolate and resolve issues. -8. Test frequently. Run tests after each change to verify correctness. -9. Iterate until the root cause is fixed and all tests pass. -10. Reflect and validate comprehensively. After tests pass, think about the original intent, write additional tests to ensure correctness, and remember there are hidden tests that must also pass before the solution is truly complete. - -Refer to the detailed sections below for more information on each step. - -## 1. Fetch Provided URLs -- If the user provides a URL, use the `webfetch` tool to retrieve the content of the provided URL. -- After fetching, review the content returned by the fetch tool. -- If you find any additional URLs or links that are relevant, use the `webfetch` tool again to retrieve those links. -- Recursively gather all relevant information by fetching additional links until you have all the information you need. - -## 2. Deeply Understand the Problem -Carefully read the issue and think hard about a plan to solve it before coding. Use the sequential thinking tool if available. - -## 3. Codebase Investigation -- Explore relevant files and directories. -- Search for key functions, classes, or variables related to the issue. -- Read and understand relevant code snippets. -- Identify the root cause of the problem. -- Validate and update your understanding continuously as you gather more context. - -## 4. Internet Research -- Use the `webfetch` tool to search bing by fetching the URL `https://www.bing.com/search?q=your+search+query`. -- After fetching, review the content returned by the fetch tool. -- If you find any additional URLs or links that are relevant, use the `webfetch` tool again to retrieve those links. -- Recursively gather all relevant information by fetching additional links until you have all the information you need. - -## 5. Develop a Detailed Plan -- Outline a specific, simple, and verifiable sequence of steps to fix the problem. -- Add steps using the TodoWrite tool. -- Each time you complete a step, mark it as complete using the TodoWrite tool. -- Each time you check off a step, use the TodoRead tool and display the updated todo list to the user in markdown format. -- You MUST continue on to the next step after checking off a step instead of ending your turn and asking the user what they want to do next. -- You may only end your turn when all steps in the todo list are marked as complete or cancelled. - -## 6. Making Code Changes -- Before editing, always read the relevant file contents or section to ensure complete context. -- Always read 2000 lines of code at a time to ensure you have enough context. -- Make small, testable, incremental changes that logically follow from your investigation and plan. -- When using the edit tool, include 3-5 lines of unchanged code before and after the string you want to replace, to make it unambiguous which part of the file should be edited. -- If a patch or edit is not applied correctly, attempt to reapply it. -- Always validate that your changes build and pass tests after each change. -- If the build fails or test fail, debug why before proceeding, update the plan as needed. - -## 7. Debugging -- Use the `lsp_diagnostics` tool to check for any problems in the code. -- Make code changes only if you have high confidence they can solve the problem. -- When debugging, try to determine the root cause rather than addressing symptoms. -- Debug for as long as needed to identify the root cause and identify a fix. -- Use print statements, logs, or temporary code to inspect program state, including descriptive statements or error messages to understand what's happening. -- To test hypotheses, you can also add test statements or functions. -- Revisit your assumptions if unexpected behavior occurs. - -# How to create a Todo List - Use the following format to show the todo list: - ```markdown - - [ ] Step 1: Description of the first step - - [ ] Step 2: Description of the second step - - [ ] Step 3: Description of the third step - ``` -Do not ever use HTML tags or any other formatting for the todo list, as it will not be rendered correctly. Always use the markdown format shown above. - -# Communication Guidelines -Always communicate clearly and concisely in a casual, friendly yet professional tone. - - -"Let me fetch the URL you provided to gather more information." -"Ok, I've got all of the information I need on the LIFX API and I know how to use it." -"Now, I will search the codebase for the function that handles the LIFX API requests." -"I need to update several files here - stand by" -"OK! Now let's run the tests to make sure everything is working correctly." -"Whelp - I see we have some problems. Let's fix those up." - diff --git a/packages/opencode/src/session/prompt/gemini.txt b/packages/opencode/src/session/prompt/gemini.txt deleted file mode 100644 index 87fe422b..00000000 --- a/packages/opencode/src/session/prompt/gemini.txt +++ /dev/null @@ -1,155 +0,0 @@ -You are opencode, an interactive CLI agent specializing in software engineering tasks. Your primary goal is to help users safely and efficiently, adhering strictly to the following instructions and utilizing your available tools. - -# Core Mandates - -- **Conventions:** Rigorously adhere to existing project conventions when reading or modifying code. Analyze surrounding code, tests, and configuration first. -- **Libraries/Frameworks:** NEVER assume a library/framework is available or appropriate. Verify its established usage within the project (check imports, configuration files like 'package.json', 'Cargo.toml', 'requirements.txt', 'build.gradle', etc., or observe neighboring files) before employing it. -- **Style & Structure:** Mimic the style (formatting, naming), structure, framework choices, typing, and architectural patterns of existing code in the project. -- **Idiomatic Changes:** When editing, understand the local context (imports, functions/classes) to ensure your changes integrate naturally and idiomatically. -- **Comments:** Add code comments sparingly. Focus on *why* something is done, especially for complex logic, rather than *what* is done. Only add high-value comments if necessary for clarity or if requested by the user. Do not edit comments that are separate from the code you are changing. *NEVER* talk to the user or describe your changes through comments. -- **Proactiveness:** Fulfill the user's request thoroughly, including reasonable, directly implied follow-up actions. -- **Confirm Ambiguity/Expansion:** Do not take significant actions beyond the clear scope of the request without confirming with the user. If asked *how* to do something, explain first, don't just do it. -- **Explaining Changes:** After completing a code modification or file operation *do not* provide summaries unless asked. -- **Path Construction:** Before using any file system tool (e.g., read' or 'write'), you must construct the full absolute path for the file_path argument. Always combine the absolute path of the project's root directory with the file's path relative to the root. For example, if the project root is /path/to/project/ and the file is foo/bar/baz.txt, the final path you must use is /path/to/project/foo/bar/baz.txt. If the user provides a relative path, you must resolve it against the root directory to create an absolute path. -- **Do Not revert changes:** Do not revert changes to the codebase unless asked to do so by the user. Only revert changes made by you if they have resulted in an error or if the user has explicitly asked you to revert the changes. - -# Primary Workflows - -## Software Engineering Tasks -When requested to perform tasks like fixing bugs, adding features, refactoring, or explaining code, follow this sequence: -1. **Understand:** Think about the user's request and the relevant codebase context. Use 'grep' and 'glob' search tools extensively (in parallel if independent) to understand file structures, existing code patterns, and conventions. Use 'read' to understand context and validate any assumptions you may have. -2. **Plan:** Build a coherent and grounded (based on the understanding in step 1) plan for how you intend to resolve the user's task. Share an extremely concise yet clear plan with the user if it would help the user understand your thought process. As part of the plan, you should try to use a self-verification loop by writing unit tests if relevant to the task. Use output logs or debug statements as part of this self verification loop to arrive at a solution. -3. **Implement:** Use the available tools (e.g., 'edit', 'write' 'bash' ...) to act on the plan, strictly adhering to the project's established conventions (detailed under 'Core Mandates'). -4. **Verify (Tests):** If applicable and feasible, verify the changes using the project's testing procedures. Identify the correct test commands and frameworks by examining 'README' files, build/package configuration (e.g., 'package.json'), or existing test execution patterns. NEVER assume standard test commands. -5. **Verify (Standards):** VERY IMPORTANT: After making code changes, execute the project-specific build, linting and type-checking commands (e.g., 'tsc', 'npm run lint', 'ruff check .') that you have identified for this project (or obtained from the user). This ensures code quality and adherence to standards. If unsure about these commands, you can ask the user if they'd like you to run them and if so how to. - -## New Applications - -**Goal:** Autonomously implement and deliver a visually appealing, substantially complete, and functional prototype. Utilize all tools at your disposal to implement the application. Some tools you may especially find useful are 'write', 'edit' and 'bash'. - -1. **Understand Requirements:** Analyze the user's request to identify core features, desired user experience (UX), visual aesthetic, application type/platform (web, mobile, desktop, CLI, library, 2D or 3D game), and explicit constraints. If critical information for initial planning is missing or ambiguous, ask concise, targeted clarification questions. -2. **Propose Plan:** Formulate an internal development plan. Present a clear, concise, high-level summary to the user. This summary must effectively convey the application's type and core purpose, key technologies to be used, main features and how users will interact with them, and the general approach to the visual design and user experience (UX) with the intention of delivering something beautiful, modern, and polished, especially for UI-based applications. For applications requiring visual assets (like games or rich UIs), briefly describe the strategy for sourcing or generating placeholders (e.g., simple geometric shapes, procedurally generated patterns, or open-source assets if feasible and licenses permit) to ensure a visually complete initial prototype. Ensure this information is presented in a structured and easily digestible manner. -3. **User Approval:** Obtain user approval for the proposed plan. -4. **Implementation:** Autonomously implement each feature and design element per the approved plan utilizing all available tools. When starting ensure you scaffold the application using 'bash' for commands like 'npm init', 'npx create-react-app'. Aim for full scope completion. Proactively create or source necessary placeholder assets (e.g., images, icons, game sprites, 3D models using basic primitives if complex assets are not generatable) to ensure the application is visually coherent and functional, minimizing reliance on the user to provide these. If the model can generate simple assets (e.g., a uniformly colored square sprite, a simple 3D cube), it should do so. Otherwise, it should clearly indicate what kind of placeholder has been used and, if absolutely necessary, what the user might replace it with. Use placeholders only when essential for progress, intending to replace them with more refined versions or instruct the user on replacement during polishing if generation is not feasible. -5. **Verify:** Review work against the original request, the approved plan. Fix bugs, deviations, and all placeholders where feasible, or ensure placeholders are visually adequate for a prototype. Ensure styling, interactions, produce a high-quality, functional and beautiful prototype aligned with design goals. Finally, but MOST importantly, build the application and ensure there are no compile errors. -6. **Solicit Feedback:** If still applicable, provide instructions on how to start the application and request user feedback on the prototype. - -# Operational Guidelines - -## Tone and Style (CLI Interaction) -- **Concise & Direct:** Adopt a professional, direct, and concise tone suitable for a CLI environment. -- **Minimal Output:** Aim for fewer than 3 lines of text output (excluding tool use/code generation) per response whenever practical. Focus strictly on the user's query. -- **Clarity over Brevity (When Needed):** While conciseness is key, prioritize clarity for essential explanations or when seeking necessary clarification if a request is ambiguous. -- **No Chitchat:** Avoid conversational filler, preambles ("Okay, I will now..."), or postambles ("I have finished the changes..."). Get straight to the action or answer. -- **Formatting:** Use GitHub-flavored Markdown. Responses will be rendered in monospace. -- **Tools vs. Text:** Use tools for actions, text output *only* for communication. Do not add explanatory comments within tool calls or code blocks unless specifically part of the required code/command itself. -- **Handling Inability:** If unable/unwilling to fulfill a request, state so briefly (1-2 sentences) without excessive justification. Offer alternatives if appropriate. - -## Security and Safety Rules -- **Explain Critical Commands:** Before executing commands with 'bash' that modify the file system, codebase, or system state, you *must* provide a brief explanation of the command's purpose and potential impact. Prioritize user understanding and safety. You should not ask permission to use the tool; the user will be presented with a confirmation dialogue upon use (you do not need to tell them this). -- **Security First:** Always apply security best practices. Never introduce code that exposes, logs, or commits secrets, API keys, or other sensitive information. - -## Tool Usage -- **File Paths:** Always use absolute paths when referring to files with tools like 'read' or 'write'. Relative paths are not supported. You must provide an absolute path. -- **Parallelism:** Execute multiple independent tool calls in parallel when feasible (i.e. searching the codebase). -- **Command Execution:** Use the 'bash' tool for running shell commands, remembering the safety rule to explain modifying commands first. -- **Background Processes:** Use background processes (via \`&\`) for commands that are unlikely to stop on their own, e.g. \`node server.js &\`. If unsure, ask the user. -- **Interactive Commands:** Try to avoid shell commands that are likely to require user interaction (e.g. \`git rebase -i\`). Use non-interactive versions of commands (e.g. \`npm init -y\` instead of \`npm init\`) when available, and otherwise remind the user that interactive shell commands are not supported and may cause hangs until canceled by the user. -- **Respect User Confirmations:** Most tool calls (also denoted as 'function calls') will first require confirmation from the user, where they will either approve or cancel the function call. If a user cancels a function call, respect their choice and do _not_ try to make the function call again. It is okay to request the tool call again _only_ if the user requests that same tool call on a subsequent prompt. When a user cancels a function call, assume best intentions from the user and consider inquiring if they prefer any alternative paths forward. - -## Interaction Details -- **Help Command:** The user can use '/help' to display help information. -- **Feedback:** To report a bug or provide feedback, please use the /bug command. - -# Examples (Illustrating Tone and Workflow) - -user: 1 + 2 -model: 3 - - - -user: is 13 a prime number? -model: true - - - -user: list files here. -model: [tool_call: ls for path '/path/to/project'] - - - -user: start the server implemented in server.js -model: [tool_call: bash for 'node server.js &' because it must run in the background] - - - -user: Refactor the auth logic in src/auth.py to use the requests library instead of urllib. -model: Okay, I can refactor 'src/auth.py'. -First, I'll analyze the code and check for a test safety net before planning any changes. -[tool_call: glob for path 'tests/test_auth.py'] -[tool_call: read for absolute_path '/path/to/tests/test_auth.py'] -(After analysis) -Great, 'tests/test_auth.py' exists and covers the core authentication logic. With this safety net in place, I can safely plan the refactoring. -I'll also confirm 'requests' is a dependency. -[tool_call: read for absolute_path '/path/to/requirements.txt'] -(After analysis) -Looks good, 'requests' is available. - -Here's the plan: -1. Replace the 'urllib' calls with 'requests'. -2. Add proper 'try...except' error handling for the new network calls. -3. Remove the old 'urllib' import. -4. Run the project's linter and tests to verify the changes. - -Should I proceed? -user: Yes -model: -[tool_call: write or edit to apply the refactoring to 'src/auth.py'] -Refactoring complete. Running verification... -[tool_call: bash for 'ruff check src/auth.py && pytest'] -(After verification passes) -All checks passed. This is a stable checkpoint. - - -user: Delete the temp directory. -model: I can run \`rm -rf /path/to/project/temp\`. This will permanently delete the directory and all its contents. - - - -user: Write tests for someFile.ts -model: -Okay, I can write those tests. First, I'll read \`someFile.ts\` to understand its functionality. -[tool_call: read for absolute_path '/path/to/someFile.ts' or use glob to find \`someFile.ts\` if its location is unknown] -Now I'll look for existing or related test files to understand current testing conventions and dependencies. -[tool_call: glob for paths ['**/*.test.ts', 'src/**/*.spec.ts'] assuming someFile.ts is in the src directory] -(After reviewing existing tests and the file content) -[tool_call: write to create /path/to/someFile.test.ts with the test code] -I've written the tests. Now I'll run the project's test command to verify them. -[tool_call: bash for 'npm run test'] - - - -user: How do I update the user's profile information in this system? -model: -I'm not immediately sure how user profile information is updated. I'll search the codebase for terms like 'UserProfile', 'updateProfile', or 'editUser' to find relevant files or API endpoints. -[tool_call: grep for pattern 'UserProfile|updateProfile|editUser'] -(After reviewing search results, assuming a relevant file like '/path/to/UserProfileService.java' was found) -Okay, \`/path/to/UserProfileService.java\` seems like the most relevant file. I'll read its content to understand how updates are handled. -[tool_call: read for absolute_path '/path/to/UserProfileService.java'] -(After reading the file) -It appears the \`updateUserProfile\` method in \`UserProfileService.java\` is responsible for this. It expects a user ID and a \`UserProfileDTO\` object... - - - -user: Where are all the 'app.config' files in this project? I need to check their settings. -model: -[tool_call: glob for pattern '**/app.config'] -(Assuming GlobTool returns a list of paths like ['/path/to/moduleA/app.config', '/path/to/moduleB/app.config']) -I found the following 'app.config' files: -- /path/to/moduleA/app.config -- /path/to/moduleB/app.config -To help you check their settings, I can read their contents. Which one would you like to start with, or should I read all of them? - - -# Final Reminder -Your core function is efficient and safe assistance. Balance extreme conciseness with the crucial need for clarity, especially regarding safety and potential system modifications. Always prioritize user control and project conventions. Never make assumptions about the contents of files; instead use 'read' to ensure you aren't making broad assumptions. Finally, you are an agent - please keep going until the user's query is completely resolved. diff --git a/packages/opencode/src/session/prompt/plan.txt b/packages/opencode/src/session/prompt/plan.txt deleted file mode 100644 index fffbfffc..00000000 --- a/packages/opencode/src/session/prompt/plan.txt +++ /dev/null @@ -1,3 +0,0 @@ - -Plan mode is active. The user indicated that they do not want you to execute yet -- you MUST NOT make any edits, run any non-readonly tools (including changing configs or making commits), or otherwise make any changes to the system. This supercedes any other instructions you have received (for example, to make edits). - diff --git a/packages/opencode/src/session/prompt/title.txt b/packages/opencode/src/session/prompt/title.txt index ac82d60a..373456a7 100644 --- a/packages/opencode/src/session/prompt/title.txt +++ b/packages/opencode/src/session/prompt/title.txt @@ -1,31 +1,11 @@ - -Generate a conversation thread title from the user message. - +Generate a short title based on the first message a user begins a conversation with. CRITICAL: Your response must be EXACTLY one line with NO line breaks, newlines, or multiple sentences. - -You are generating titles for a coding assistant conversation. - +Requirements: +- Maximum 50 characters +- Single line only - NO newlines or line breaks +- Summary of the user's message +- No quotes, colons, or special formatting +- Do not include explanatory text like "summary:" or similar +- Your entire response becomes the title - -- Max 50 chars, single line -- Focus on the specific action or question -- Keep technical terms, numbers, and filenames exactly as written -- Preserve HTTP status codes (401, 404, 500, etc) as numbers -- For file references, include the filename -- Avoid filler words: the, this, my, a, an, properly -- NEVER assume their tech stack or domain -- Use -ing verbs consistently for actions -- Write like a chat thread title, not a blog post - - - -"debug 500 errors in production" → "Debugging production 500 errors" -"refactor user service" → "Refactoring user service" -"why is app.js failing" → "Analyzing app.js failure" -"implement rate limiting" → "Implementing rate limiting" - - - -Return only the thread title text on a single line with no newlines, explanations, or additional formatting. -You should NEVER reply to the user's message. You can only generate titles. - +IMPORTANT: Return only the title text on a single line. Do not add any explanations, formatting, or additional text. diff --git a/packages/opencode/src/session/system.ts b/packages/opencode/src/session/system.ts index 375b627b..722964ea 100644 --- a/packages/opencode/src/session/system.ts +++ b/packages/opencode/src/session/system.ts @@ -7,17 +7,23 @@ import path from "path" import os from "os" import PROMPT_ANTHROPIC from "./prompt/anthropic.txt" -import PROMPT_BEAST from "./prompt/beast.txt" -import PROMPT_GEMINI from "./prompt/gemini.txt" import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt" import PROMPT_SUMMARIZE from "./prompt/summarize.txt" import PROMPT_TITLE from "./prompt/title.txt" export namespace SystemPrompt { - export function provider(modelID: string) { - if (modelID.includes("gpt-") || modelID.includes("o1") || modelID.includes("o3")) return [PROMPT_BEAST] - if (modelID.includes("gemini-")) return [PROMPT_GEMINI] - return [PROMPT_ANTHROPIC] + export function provider(providerID: string) { + const result = [] + switch (providerID) { + case "anthropic": + result.push(PROMPT_ANTHROPIC_SPOOF.trim()) + result.push(PROMPT_ANTHROPIC) + break + default: + result.push(PROMPT_ANTHROPIC) + break + } + return result } export async function environment() { diff --git a/packages/opencode/src/share/share.ts b/packages/opencode/src/share/share.ts index 2996e4d9..f58468ce 100644 --- a/packages/opencode/src/share/share.ts +++ b/packages/opencode/src/share/share.ts @@ -53,7 +53,9 @@ export namespace Share { export const URL = process.env["OPENCODE_API"] ?? - (Installation.isSnapshot() || Installation.isDev() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai") + (Installation.isSnapshot() || Installation.isDev() + ? "https://api.dev.opencode.ai" + : "https://api.opencode.ai") export async function create(sessionID: string) { return fetch(`${URL}/share_create`, { @@ -64,10 +66,13 @@ export namespace Share { .then((x) => x as { url: string; secret: string }) } - export async function remove(sessionID: string, secret: string) { + export async function remove(id: string) { + const share = await Session.getShare(id).catch(() => {}) + if (!share) return + const { secret } = share return fetch(`${URL}/share_delete`, { method: "POST", - body: JSON.stringify({ sessionID, secret }), + body: JSON.stringify({ id, secret }), }).then((x) => x.json()) } } diff --git a/packages/opencode/src/snapshot/index.ts b/packages/opencode/src/snapshot/index.ts index b23de6e0..bf8ea05f 100644 --- a/packages/opencode/src/snapshot/index.ts +++ b/packages/opencode/src/snapshot/index.ts @@ -1,7 +1,14 @@ import { App } from "../app/app" -import { $ } from "bun" +import { + add, + commit, + init, + checkout, + statusMatrix, + remove, +} from "isomorphic-git" import path from "path" -import fs from "fs/promises" +import fs from "fs" import { Ripgrep } from "../file/ripgrep" import { Log } from "../util/log" @@ -9,57 +16,66 @@ export namespace Snapshot { const log = Log.create({ service: "snapshot" }) export async function create(sessionID: string) { - log.info("creating snapshot") const app = App.info() - - // not a git repo, check if too big to snapshot - if (!app.git) { - const files = await Ripgrep.files({ - cwd: app.path.cwd, - limit: 1000, - }) - log.info("found files", { count: files.length }) - if (files.length >= 1000) return - } - const git = gitdir(sessionID) - if (await fs.mkdir(git, { recursive: true })) { - await $`git init` - .env({ - ...process.env, - GIT_DIR: git, - GIT_WORK_TREE: app.path.root, + const files = await Ripgrep.files({ + cwd: app.path.cwd, + limit: app.git ? undefined : 1000, + }) + // not a git repo and too big to snapshot + if (!app.git && files.length === 1000) return + await init({ + dir: app.path.cwd, + gitdir: git, + fs, + }) + const status = await statusMatrix({ + fs, + gitdir: git, + dir: app.path.cwd, + }) + await add({ + fs, + gitdir: git, + parallel: true, + dir: app.path.cwd, + filepath: files, + }) + for (const [file, _head, workdir, stage] of status) { + if (workdir === 0 && stage === 1) { + log.info("remove", { file }) + await remove({ + fs, + gitdir: git, + dir: app.path.cwd, + filepath: file, }) - .quiet() - .nothrow() - log.info("initialized") + } } - - await $`git --git-dir ${git} add .`.quiet().cwd(app.path.cwd).nothrow() - log.info("added files") - - const result = - await $`git --git-dir ${git} commit -m "snapshot" --no-gpg-sign --author="opencode "` - .quiet() - .cwd(app.path.cwd) - .nothrow() - - const match = result.stdout.toString().match(/\[.+ ([a-f0-9]+)\]/) - if (!match) return - return match![1] + const result = await commit({ + fs, + gitdir: git, + dir: app.path.cwd, + message: "snapshot", + author: { + name: "opencode", + email: "mail@opencode.ai", + }, + }) + log.info("commit", { result }) + return result } - export async function restore(sessionID: string, snapshot: string) { - log.info("restore", { commit: snapshot }) + export async function restore(sessionID: string, commit: string) { + log.info("restore", { commit }) const app = App.info() - const git = gitdir(sessionID) - await $`git --git-dir=${git} checkout ${snapshot} --force`.quiet().cwd(app.path.root) - } - - export async function diff(sessionID: string, commit: string) { - const git = gitdir(sessionID) - const result = await $`git --git-dir=${git} diff -R ${commit}`.quiet().cwd(App.info().path.root) - return result.stdout.toString("utf8") + await checkout({ + fs, + gitdir: gitdir(sessionID), + dir: app.path.cwd, + ref: commit, + force: true, + }) } function gitdir(sessionID: string) { diff --git a/packages/opencode/src/storage/storage.ts b/packages/opencode/src/storage/storage.ts index 97efcef7..eee33a09 100644 --- a/packages/opencode/src/storage/storage.ts +++ b/packages/opencode/src/storage/storage.ts @@ -4,136 +4,61 @@ import { Bus } from "../bus" import path from "path" import z from "zod" import fs from "fs/promises" -import { MessageV2 } from "../session/message-v2" -import { Identifier } from "../id/id" export namespace Storage { const log = Log.create({ service: "storage" }) export const Event = { - Write: Bus.event("storage.write", z.object({ key: z.string(), content: z.any() })), + Write: Bus.event( + "storage.write", + z.object({ key: z.string(), content: z.any() }), + ), } - type Migration = (dir: string) => Promise - - const MIGRATIONS: Migration[] = [ - async (dir: string) => { - try { - const files = new Bun.Glob("session/message/*/*.json").scanSync({ - cwd: dir, - absolute: true, - }) - for (const file of files) { - const content = await Bun.file(file).json() - if (!content.metadata) continue - log.info("migrating to v2 message", { file }) - try { - const result = MessageV2.fromV1(content) - await Bun.write( - file, - JSON.stringify( - { - ...result.info, - parts: result.parts, - }, - null, - 2, - ), - ) - } catch (e) { - await fs.rename(file, file.replace("storage", "broken")) - } - } - } catch {} - }, - async (dir: string) => { - const files = new Bun.Glob("session/message/*/*.json").scanSync({ - cwd: dir, - absolute: true, - }) - for (const file of files) { - try { - const { parts, ...info } = await Bun.file(file).json() - if (!parts) continue - for (const part of parts) { - const id = Identifier.ascending("part") - await Bun.write( - [dir, "session", "part", info.sessionID, info.id, id + ".json"].join("/"), - JSON.stringify({ - ...part, - id, - sessionID: info.sessionID, - messageID: info.id, - ...(part.type === "tool" ? { callID: part.id } : {}), - }), - ) - } - await Bun.write(file, JSON.stringify(info, null, 2)) - } catch (e) {} - } - }, - ] - - const state = App.state("storage", async () => { + const state = App.state("storage", () => { const app = App.info() - const dir = path.normalize(path.join(app.path.data, "storage")) - await fs.mkdir(dir, { recursive: true }) - const migration = await Bun.file(path.join(dir, "migration")) - .json() - .then((x) => parseInt(x)) - .catch(() => 0) - for (let index = migration; index < MIGRATIONS.length; index++) { - log.info("running migration", { index }) - const migration = MIGRATIONS[index] - await migration(dir) - await Bun.write(path.join(dir, "migration"), (index + 1).toString()) - } + const dir = path.join(app.path.data, "storage") + log.info("init", { path: dir }) return { dir, } }) export async function remove(key: string) { - const dir = await state().then((x) => x.dir) - const target = path.join(dir, key + ".json") + const target = path.join(state().dir, key + ".json") await fs.unlink(target).catch(() => {}) } export async function removeDir(key: string) { - const dir = await state().then((x) => x.dir) - const target = path.join(dir, key) + const target = path.join(state().dir, key) await fs.rm(target, { recursive: true, force: true }).catch(() => {}) } export async function readJSON(key: string) { - const dir = await state().then((x) => x.dir) - return Bun.file(path.join(dir, key + ".json")).json() as Promise + return Bun.file(path.join(state().dir, key + ".json")).json() as Promise } export async function writeJSON(key: string, content: T) { - const dir = await state().then((x) => x.dir) - const target = path.join(dir, key + ".json") + const target = path.join(state().dir, key + ".json") const tmp = target + Date.now() + ".tmp" - await Bun.write(tmp, JSON.stringify(content, null, 2)) + await Bun.write(tmp, JSON.stringify(content)) await fs.rename(tmp, target).catch(() => {}) await fs.unlink(tmp).catch(() => {}) Bus.publish(Event.Write, { key, content }) } const glob = new Bun.Glob("**/*") - export async function list(prefix: string) { - const dir = await state().then((x) => x.dir) + export async function* list(prefix: string) { try { - const result = await Array.fromAsync( - glob.scan({ - cwd: path.join(dir, prefix), - onlyFiles: true, - }), - ).then((items) => items.map((item) => path.join(prefix, item.slice(0, -5)))) - result.sort() - return result + for await (const item of glob.scan({ + cwd: path.join(state().dir, prefix), + onlyFiles: true, + })) { + const result = path.join(prefix, item.slice(0, -5)) + yield result + } } catch { - return [] + return } } } diff --git a/packages/opencode/src/tool/bash.ts b/packages/opencode/src/tool/bash.ts index 050a5a97..3ef44bd5 100644 --- a/packages/opencode/src/tool/bash.ts +++ b/packages/opencode/src/tool/bash.ts @@ -4,6 +4,25 @@ import DESCRIPTION from "./bash.txt" import { App } from "../app/app" const MAX_OUTPUT_LENGTH = 30000 +const BANNED_COMMANDS = [ + "alias", + "curl", + "curlie", + "wget", + "axel", + "aria2c", + "nc", + "telnet", + "lynx", + "w3m", + "links", + "httpie", + "xh", + "http-prompt", + "chrome", + "firefox", + "safari", +] const DEFAULT_TIMEOUT = 1 * 60 * 1000 const MAX_TIMEOUT = 10 * 60 * 1000 @@ -12,7 +31,12 @@ export const BashTool = Tool.define({ description: DESCRIPTION, parameters: z.object({ command: z.string().describe("The command to execute"), - timeout: z.number().min(0).max(MAX_TIMEOUT).describe("Optional timeout in milliseconds").optional(), + timeout: z + .number() + .min(0) + .max(MAX_TIMEOUT) + .describe("Optional timeout in milliseconds") + .optional(), description: z .string() .describe( @@ -21,6 +45,8 @@ export const BashTool = Tool.define({ }), async execute(params, ctx) { const timeout = Math.min(params.timeout ?? DEFAULT_TIMEOUT, MAX_TIMEOUT) + if (BANNED_COMMANDS.some((item) => params.command.startsWith(item))) + throw new Error(`Command '${params.command}' is not allowed`) const process = Bun.spawn({ cmd: ["bash", "-c", params.command], @@ -36,14 +62,21 @@ export const BashTool = Tool.define({ const stderr = await new Response(process.stderr).text() return { - title: params.command, metadata: { stderr, stdout, exit: process.exitCode, description: params.description, + title: params.command, }, - output: [``, stdout ?? "", ``, ``, stderr ?? "", ``].join("\n"), + output: [ + ``, + stdout ?? "", + ``, + ``, + stderr ?? "", + ``, + ].join("\n"), } }, }) diff --git a/packages/opencode/src/tool/edit.ts b/packages/opencode/src/tool/edit.ts index 4b9f355e..fb02a536 100644 --- a/packages/opencode/src/tool/edit.ts +++ b/packages/opencode/src/tool/edit.ts @@ -20,8 +20,15 @@ export const EditTool = Tool.define({ parameters: z.object({ filePath: z.string().describe("The absolute path to the file to modify"), oldString: z.string().describe("The text to replace"), - newString: z.string().describe("The text to replace it with (must be different from oldString)"), - replaceAll: z.boolean().optional().describe("Replace all occurrences of oldString (default false)"), + newString: z + .string() + .describe( + "The text to replace it with (must be different from old_string)", + ), + replaceAll: z + .boolean() + .optional() + .describe("Replace all occurrences of old_string (default false)"), }), async execute(params, ctx) { if (!params.filePath) { @@ -33,7 +40,9 @@ export const EditTool = Tool.define({ } const app = App.info() - const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(app.path.cwd, params.filePath) + const filepath = path.isAbsolute(params.filePath) + ? params.filePath + : path.join(app.path.cwd, params.filePath) await Permission.ask({ id: "edit", @@ -61,11 +70,17 @@ export const EditTool = Tool.define({ const file = Bun.file(filepath) const stats = await file.stat().catch(() => {}) if (!stats) throw new Error(`File ${filepath} not found`) - if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filepath}`) + if (stats.isDirectory()) + throw new Error(`Path is a directory, not a file: ${filepath}`) await FileTime.assert(ctx.sessionID, filepath) contentOld = await file.text() - contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll) + contentNew = replace( + contentOld, + params.oldString, + params.newString, + params.replaceAll, + ) await file.write(contentNew) await Bus.publish(File.Event.Edited, { file: filepath, @@ -73,7 +88,9 @@ export const EditTool = Tool.define({ contentNew = await file.text() })() - const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, contentNew)) + const diff = trimDiff( + createTwoFilesPatch(filepath, filepath, contentOld, contentNew), + ) FileTime.read(ctx.sessionID, filepath) @@ -86,24 +103,24 @@ export const EditTool = Tool.define({ output += `\nThis file has errors, please fix\n\n${issues.map(LSP.Diagnostic.pretty).join("\n")}\n\n` continue } - output += `\n\n${file}\n${issues - .filter((item) => item.severity === 1) - .map(LSP.Diagnostic.pretty) - .join("\n")}\n\n` + output += `\n\n${file}\n${issues.map(LSP.Diagnostic.pretty).join("\n")}\n\n` } return { metadata: { diagnostics, diff, + title: `${path.relative(app.path.root, filepath)}`, }, - title: `${path.relative(app.path.root, filepath)}`, output, } }, }) -export type Replacer = (content: string, find: string) => Generator +export type Replacer = ( + content: string, + find: string, +) => Generator export const SimpleReplacer: Replacer = function* (_content, find) { yield find @@ -191,7 +208,10 @@ export const BlockAnchorReplacer: Replacer = function* (content, find) { } } -export const WhitespaceNormalizedReplacer: Replacer = function* (content, find) { +export const WhitespaceNormalizedReplacer: Replacer = function* ( + content, + find, +) { const normalizeWhitespace = (text: string) => text.replace(/\s+/g, " ").trim() const normalizedFind = normalizeWhitespace(find) @@ -209,7 +229,9 @@ export const WhitespaceNormalizedReplacer: Replacer = function* (content, find) // Find the actual substring in the original line that matches const words = find.trim().split(/\s+/) if (words.length > 0) { - const pattern = words.map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")).join("\\s+") + const pattern = words + .map((word) => word.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")) + .join("\\s+") try { const regex = new RegExp(pattern) const match = line.match(regex) @@ -248,7 +270,9 @@ export const IndentationFlexibleReplacer: Replacer = function* (content, find) { }), ) - return lines.map((line) => (line.trim().length === 0 ? line : line.slice(minIndent))).join("\n") + return lines + .map((line) => (line.trim().length === 0 ? line : line.slice(minIndent))) + .join("\n") } const normalizedFind = removeIndentation(find) @@ -399,7 +423,10 @@ export const ContextAwareReplacer: Replacer = function* (content, find) { } } - if (totalNonEmptyLines === 0 || matchingLines / totalNonEmptyLines >= 0.5) { + if ( + totalNonEmptyLines === 0 || + matchingLines / totalNonEmptyLines >= 0.5 + ) { yield block break // Only match the first occurrence } @@ -446,7 +473,12 @@ function trimDiff(diff: string): string { return trimmedLines.join("\n") } -export function replace(content: string, oldString: string, newString: string, replaceAll = false): string { +export function replace( + content: string, + oldString: string, + newString: string, + replaceAll = false, +): string { if (oldString === newString) { throw new Error("oldString and newString must be different") } @@ -457,10 +489,10 @@ export function replace(content: string, oldString: string, newString: string, r BlockAnchorReplacer, WhitespaceNormalizedReplacer, IndentationFlexibleReplacer, - // EscapeNormalizedReplacer, - // TrimmedBoundaryReplacer, - // ContextAwareReplacer, - // MultiOccurrenceReplacer, + EscapeNormalizedReplacer, + TrimmedBoundaryReplacer, + ContextAwareReplacer, + MultiOccurrenceReplacer, ]) { for (const search of replacer(content, oldString)) { const index = content.indexOf(search) @@ -470,7 +502,11 @@ export function replace(content: string, oldString: string, newString: string, r } const lastIndex = content.lastIndexOf(search) if (index !== lastIndex) continue - return content.substring(0, index) + newString + content.substring(index + search.length) + return ( + content.substring(0, index) + + newString + + content.substring(index + search.length) + ) } } throw new Error("oldString not found in content or was found multiple times") diff --git a/packages/opencode/src/tool/edit.txt b/packages/opencode/src/tool/edit.txt index 8bf8844d..fff2eca6 100644 --- a/packages/opencode/src/tool/edit.txt +++ b/packages/opencode/src/tool/edit.txt @@ -2,8 +2,8 @@ Performs exact string replacements in files. Usage: - You must use your `Read` tool at least once in the conversation before editing. This tool will error if you attempt an edit without reading the file. -- When editing text from Read tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: spaces + line number + tab. Everything after that tab is the actual file content to match. Never include any part of the line number prefix in the oldString or newString. +- When editing text from Read tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: spaces + line number + tab. Everything after that tab is the actual file content to match. Never include any part of the line number prefix in the old_string or new_string. - ALWAYS prefer editing existing files in the codebase. NEVER write new files unless explicitly required. - Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked. -- The edit will FAIL if `oldString` is not unique in the file. Either provide a larger string with more surrounding context to make it unique or use `replaceAll` to change every instance of `oldString`. -- Use `replaceAll` for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance. +- The edit will FAIL if `old_string` is not unique in the file. Either provide a larger string with more surrounding context to make it unique or use `replace_all` to change every instance of `old_string`. +- Use `replace_all` for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance. diff --git a/packages/opencode/src/tool/glob.ts b/packages/opencode/src/tool/glob.ts index 6496099e..70d5df47 100644 --- a/packages/opencode/src/tool/glob.ts +++ b/packages/opencode/src/tool/glob.ts @@ -20,14 +20,16 @@ export const GlobTool = Tool.define({ async execute(params) { const app = App.info() let search = params.path ?? app.path.cwd - search = path.isAbsolute(search) ? search : path.resolve(app.path.cwd, search) + search = path.isAbsolute(search) + ? search + : path.resolve(app.path.cwd, search) const limit = 100 const files = [] let truncated = false for (const file of await Ripgrep.files({ cwd: search, - glob: [params.pattern], + glob: params.pattern, })) { if (files.length >= limit) { truncated = true @@ -51,15 +53,17 @@ export const GlobTool = Tool.define({ output.push(...files.map((f) => f.path)) if (truncated) { output.push("") - output.push("(Results are truncated. Consider using a more specific path or pattern.)") + output.push( + "(Results are truncated. Consider using a more specific path or pattern.)", + ) } } return { - title: path.relative(app.path.root, search), metadata: { count: files.length, truncated, + title: path.relative(app.path.root, search), }, output: output.join("\n"), } diff --git a/packages/opencode/src/tool/grep.ts b/packages/opencode/src/tool/grep.ts index 898e3e73..e4ceec5e 100644 --- a/packages/opencode/src/tool/grep.ts +++ b/packages/opencode/src/tool/grep.ts @@ -9,9 +9,21 @@ export const GrepTool = Tool.define({ id: "grep", description: DESCRIPTION, parameters: z.object({ - pattern: z.string().describe("The regex pattern to search for in file contents"), - path: z.string().optional().describe("The directory to search in. Defaults to the current working directory."), - include: z.string().optional().describe('File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")'), + pattern: z + .string() + .describe("The regex pattern to search for in file contents"), + path: z + .string() + .optional() + .describe( + "The directory to search in. Defaults to the current working directory.", + ), + include: z + .string() + .optional() + .describe( + 'File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")', + ), }), async execute(params) { if (!params.pattern) { @@ -39,8 +51,7 @@ export const GrepTool = Tool.define({ if (exitCode === 1) { return { - title: params.pattern, - metadata: { matches: 0, truncated: false }, + metadata: { matches: 0, truncated: false, title: params.pattern }, output: "No files found", } } @@ -55,11 +66,12 @@ export const GrepTool = Tool.define({ for (const line of lines) { if (!line) continue - const [filePath, lineNumStr, ...lineTextParts] = line.split(":") - if (!filePath || !lineNumStr || lineTextParts.length === 0) continue + const parts = line.split(":", 3) + if (parts.length < 3) continue - const lineNum = parseInt(lineNumStr, 10) - const lineText = lineTextParts.join(":") + const filePath = parts[0] + const lineNum = parseInt(parts[1], 10) + const lineText = parts[2] const file = Bun.file(filePath) const stats = await file.stat().catch(() => null) @@ -81,8 +93,7 @@ export const GrepTool = Tool.define({ if (finalMatches.length === 0) { return { - title: params.pattern, - metadata: { matches: 0, truncated: false }, + metadata: { matches: 0, truncated: false, title: params.pattern }, output: "No files found", } } @@ -103,14 +114,16 @@ export const GrepTool = Tool.define({ if (truncated) { outputLines.push("") - outputLines.push("(Results are truncated. Consider using a more specific path or pattern.)") + outputLines.push( + "(Results are truncated. Consider using a more specific path or pattern.)", + ) } return { - title: params.pattern, metadata: { matches: finalMatches.length, truncated, + title: params.pattern, }, output: outputLines.join("\n"), } diff --git a/packages/opencode/src/tool/ls.ts b/packages/opencode/src/tool/ls.ts index d96e27e9..bfceba85 100644 --- a/packages/opencode/src/tool/ls.ts +++ b/packages/opencode/src/tool/ls.ts @@ -16,19 +16,6 @@ export const IGNORE_PATTERNS = [ "obj/", ".idea/", ".vscode/", - ".zig-cache/", - "zig-out", - ".coverage", - "coverage/", - "vendor/", - "tmp/", - "temp/", - ".cache/", - "cache/", - "logs/", - ".venv/", - "venv/", - "env/", ] const LIMIT = 100 @@ -37,8 +24,16 @@ export const ListTool = Tool.define({ id: "list", description: DESCRIPTION, parameters: z.object({ - path: z.string().describe("The absolute path to the directory to list (must be absolute, not relative)").optional(), - ignore: z.array(z.string()).describe("List of glob patterns to ignore").optional(), + path: z + .string() + .describe( + "The absolute path to the directory to list (must be absolute, not relative)", + ) + .optional(), + ignore: z + .array(z.string()) + .describe("List of glob patterns to ignore") + .optional(), }), async execute(params) { const app = App.info() @@ -49,7 +44,8 @@ export const ListTool = Tool.define({ for await (const file of glob.scan({ cwd: searchPath, dot: true })) { if (IGNORE_PATTERNS.some((p) => file.includes(p))) continue - if (params.ignore?.some((pattern) => new Bun.Glob(pattern).match(file))) continue + if (params.ignore?.some((pattern) => new Bun.Glob(pattern).match(file))) + continue files.push(file) if (files.length >= LIMIT) break } @@ -103,10 +99,10 @@ export const ListTool = Tool.define({ const output = `${searchPath}/\n` + renderDir(".", 0) return { - title: path.relative(app.path.root, searchPath), metadata: { count: files.length, truncated: files.length >= LIMIT, + title: path.relative(app.path.root, searchPath), }, output, } diff --git a/packages/opencode/src/tool/lsp-diagnostics.ts b/packages/opencode/src/tool/lsp-diagnostics.ts index fc9699bf..97ae7a26 100644 --- a/packages/opencode/src/tool/lsp-diagnostics.ts +++ b/packages/opencode/src/tool/lsp-diagnostics.ts @@ -13,16 +13,20 @@ export const LspDiagnosticTool = Tool.define({ }), execute: async (args) => { const app = App.info() - const normalized = path.isAbsolute(args.path) ? args.path : path.join(app.path.cwd, args.path) + const normalized = path.isAbsolute(args.path) + ? args.path + : path.join(app.path.cwd, args.path) await LSP.touchFile(normalized, true) const diagnostics = await LSP.diagnostics() const file = diagnostics[normalized] return { - title: path.relative(app.path.root, normalized), metadata: { diagnostics, + title: path.relative(app.path.root, normalized), }, - output: file?.length ? file.map(LSP.Diagnostic.pretty).join("\n") : "No errors found", + output: file?.length + ? file.map(LSP.Diagnostic.pretty).join("\n") + : "No errors found", } }, }) diff --git a/packages/opencode/src/tool/lsp-hover.ts b/packages/opencode/src/tool/lsp-hover.ts index bb94ddb3..f5031f85 100644 --- a/packages/opencode/src/tool/lsp-hover.ts +++ b/packages/opencode/src/tool/lsp-hover.ts @@ -15,7 +15,9 @@ export const LspHoverTool = Tool.define({ }), execute: async (args) => { const app = App.info() - const file = path.isAbsolute(args.file) ? args.file : path.join(app.path.cwd, args.file) + const file = path.isAbsolute(args.file) + ? args.file + : path.join(app.path.cwd, args.file) await LSP.touchFile(file, true) const result = await LSP.hover({ ...args, @@ -23,9 +25,14 @@ export const LspHoverTool = Tool.define({ }) return { - title: path.relative(app.path.root, file) + ":" + args.line + ":" + args.character, metadata: { result, + title: + path.relative(app.path.root, file) + + ":" + + args.line + + ":" + + args.character, }, output: JSON.stringify(result, null, 2), } diff --git a/packages/opencode/src/tool/multiedit.ts b/packages/opencode/src/tool/multiedit.ts index 041893b9..f8d0f16c 100644 --- a/packages/opencode/src/tool/multiedit.ts +++ b/packages/opencode/src/tool/multiedit.ts @@ -10,7 +10,9 @@ export const MultiEditTool = Tool.define({ description: DESCRIPTION, parameters: z.object({ filePath: z.string().describe("The absolute path to the file to modify"), - edits: z.array(EditTool.parameters).describe("Array of edit operations to perform sequentially on the file"), + edits: z + .array(EditTool.parameters) + .describe("Array of edit operations to perform sequentially on the file"), }), async execute(params, ctx) { const results = [] @@ -28,9 +30,9 @@ export const MultiEditTool = Tool.define({ } const app = App.info() return { - title: path.relative(app.path.root, params.filePath), metadata: { results: results.map((r) => r.metadata), + title: path.relative(app.path.root, params.filePath), }, output: results.at(-1)!.output, } diff --git a/packages/opencode/src/tool/multiedit.txt b/packages/opencode/src/tool/multiedit.txt index bb481512..1b32bbd9 100644 --- a/packages/opencode/src/tool/multiedit.txt +++ b/packages/opencode/src/tool/multiedit.txt @@ -8,9 +8,9 @@ Before using this tool: To make multiple file edits, provide the following: 1. file_path: The absolute path to the file to modify (must be absolute, not relative) 2. edits: An array of edit operations to perform, where each edit contains: - - oldString: The text to replace (must match the file contents exactly, including all whitespace and indentation) - - newString: The edited text to replace the oldString - - replaceAll: Replace all occurrences of oldString. This parameter is optional and defaults to false. + - old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation) + - new_string: The edited text to replace the old_string + - replace_all: Replace all occurrences of old_string. This parameter is optional and defaults to false. IMPORTANT: - All edits are applied in sequence, in the order they are provided @@ -24,8 +24,8 @@ CRITICAL REQUIREMENTS: 3. Plan your edits carefully to avoid conflicts between sequential operations WARNING: -- The tool will fail if edits.oldString doesn't match the file contents exactly (including whitespace) -- The tool will fail if edits.oldString and edits.newString are the same +- The tool will fail if edits.old_string doesn't match the file contents exactly (including whitespace) +- The tool will fail if edits.old_string and edits.new_string are the same - Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find When making edits: @@ -33,9 +33,9 @@ When making edits: - Do not leave the code in a broken state - Always use absolute file paths (starting with /) - Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked. -- Use replaceAll for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance. +- Use replace_all for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance. If you want to create a new file, use: - A new file path, including dir name if needed -- First edit: empty oldString and the new file's contents as newString +- First edit: empty old_string and the new file's contents as new_string - Subsequent edits: normal edit operations on the created content diff --git a/packages/opencode/src/tool/patch.ts b/packages/opencode/src/tool/patch.ts index 11cc56c9..6266d163 100644 --- a/packages/opencode/src/tool/patch.ts +++ b/packages/opencode/src/tool/patch.ts @@ -6,7 +6,9 @@ import { FileTime } from "../file/time" import DESCRIPTION from "./patch.txt" const PatchParams = z.object({ - patchText: z.string().describe("The full patch text that describes all changes to be made"), + patchText: z + .string() + .describe("The full patch text that describes all changes to be made"), }) interface Change { @@ -40,7 +42,10 @@ function identifyFilesNeeded(patchText: string): string[] { const files: string[] = [] const lines = patchText.split("\n") for (const line of lines) { - if (line.startsWith("*** Update File:") || line.startsWith("*** Delete File:")) { + if ( + line.startsWith("*** Update File:") || + line.startsWith("*** Delete File:") + ) { const filePath = line.split(":", 2)[1]?.trim() if (filePath) files.push(filePath) } @@ -60,7 +65,10 @@ function identifyFilesAdded(patchText: string): string[] { return files } -function textToPatch(patchText: string, _currentFiles: Record): [PatchOperation[], number] { +function textToPatch( + patchText: string, + _currentFiles: Record, +): [PatchOperation[], number] { const operations: PatchOperation[] = [] const lines = patchText.split("\n") let i = 0 @@ -85,7 +93,11 @@ function textToPatch(patchText: string, _currentFiles: Record): const changes: PatchChange[] = [] i++ - while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) { + while ( + i < lines.length && + !lines[i].startsWith("@@") && + !lines[i].startsWith("***") + ) { const changeLine = lines[i] if (changeLine.startsWith(" ")) { changes.push({ type: "keep", content: changeLine.substring(1) }) @@ -139,7 +151,10 @@ function textToPatch(patchText: string, _currentFiles: Record): return [operations, fuzz] } -function patchToCommit(operations: PatchOperation[], currentFiles: Record): Commit { +function patchToCommit( + operations: PatchOperation[], + currentFiles: Record, +): Commit { const changes: Record = {} for (const op of operations) { @@ -158,7 +173,9 @@ function patchToCommit(operations: PatchOperation[], currentFiles: Record line.includes(hunk.contextLine)) + const contextIndex = lines.findIndex((line) => + line.includes(hunk.contextLine), + ) if (contextIndex === -1) { throw new Error(`Context line not found: ${hunk.contextLine}`) } @@ -187,7 +204,11 @@ function patchToCommit(operations: PatchOperation[], currentFiles: Record 3) { - throw new Error(`patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`) + throw new Error( + `patch contains fuzzy matches (fuzz level: ${fuzz}). Please make your context lines more precise`, + ) } // Convert patch to commit @@ -320,7 +343,11 @@ export const PatchTool = Tool.define({ const newContent = change.new_content || "" // Calculate diff statistics - const [, additions, removals] = generateDiff(oldContent, newContent, filePath) + const [, additions, removals] = generateDiff( + oldContent, + newContent, + filePath, + ) totalAdditions += additions totalRemovals += removals @@ -331,11 +358,11 @@ export const PatchTool = Tool.define({ const output = result return { - title: `${filesToRead.length} files`, metadata: { changed: changedFiles, additions: totalAdditions, removals: totalRemovals, + title: `${filesToRead.length} files`, }, output, } diff --git a/packages/opencode/src/tool/read.ts b/packages/opencode/src/tool/read.ts index 81414186..f524389e 100644 --- a/packages/opencode/src/tool/read.ts +++ b/packages/opencode/src/tool/read.ts @@ -7,6 +7,7 @@ import { FileTime } from "../file/time" import DESCRIPTION from "./read.txt" import { App } from "../app/app" +const MAX_READ_SIZE = 250 * 1024 const DEFAULT_READ_LIMIT = 2000 const MAX_LINE_LENGTH = 2000 @@ -15,8 +16,14 @@ export const ReadTool = Tool.define({ description: DESCRIPTION, parameters: z.object({ filePath: z.string().describe("The path to the file to read"), - offset: z.number().describe("The line number to start reading from (0-based)").optional(), - limit: z.number().describe("The number of lines to read (defaults to 2000)").optional(), + offset: z + .number() + .describe("The line number to start reading from (0-based)") + .optional(), + limit: z + .number() + .describe("The number of lines to read (defaults to 2000)") + .optional(), }), async execute(params, ctx) { let filePath = params.filePath @@ -33,25 +40,38 @@ export const ReadTool = Tool.define({ const suggestions = dirEntries .filter( (entry) => - entry.toLowerCase().includes(base.toLowerCase()) || base.toLowerCase().includes(entry.toLowerCase()), + entry.toLowerCase().includes(base.toLowerCase()) || + base.toLowerCase().includes(entry.toLowerCase()), ) .map((entry) => path.join(dir, entry)) .slice(0, 3) if (suggestions.length > 0) { - throw new Error(`File not found: ${filePath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`) + throw new Error( + `File not found: ${filePath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`, + ) } throw new Error(`File not found: ${filePath}`) } + const stats = await file.stat() + if (stats.size > MAX_READ_SIZE) + throw new Error( + `File is too large (${stats.size} bytes). Maximum size is ${MAX_READ_SIZE} bytes`, + ) const limit = params.limit ?? DEFAULT_READ_LIMIT const offset = params.offset || 0 const isImage = isImageFile(filePath) - if (isImage) throw new Error(`This is an image file of type: ${isImage}\nUse a different tool to process images`) + if (isImage) + throw new Error( + `This is an image file of type: ${isImage}\nUse a different tool to process images`, + ) const lines = await file.text().then((text) => text.split("\n")) const raw = lines.slice(offset, offset + limit).map((line) => { - return line.length > MAX_LINE_LENGTH ? line.substring(0, MAX_LINE_LENGTH) + "..." : line + return line.length > MAX_LINE_LENGTH + ? line.substring(0, MAX_LINE_LENGTH) + "..." + : line }) const content = raw.map((line, index) => { return `${(index + offset + 1).toString().padStart(5, "0")}| ${line}` @@ -62,19 +82,21 @@ export const ReadTool = Tool.define({ output += content.join("\n") if (lines.length > offset + content.length) { - output += `\n\n(File has more lines. Use 'offset' parameter to read beyond line ${offset + content.length})` + output += `\n\n(File has more lines. Use 'offset' parameter to read beyond line ${ + offset + content.length + })` } output += "\n" // just warms the lsp client - LSP.touchFile(filePath, false) + await LSP.touchFile(filePath, false) FileTime.read(ctx.sessionID, filePath) return { - title: path.relative(App.info().path.root, filePath), output, metadata: { preview, + title: path.relative(App.info().path.root, filePath), }, } }, diff --git a/packages/opencode/src/tool/read.txt b/packages/opencode/src/tool/read.txt index be9e9e0c..d1bf8c5d 100644 --- a/packages/opencode/src/tool/read.txt +++ b/packages/opencode/src/tool/read.txt @@ -2,7 +2,7 @@ Reads a file from the local filesystem. You can access any file directly by usin Assume this tool is able to read all files on the machine. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned. Usage: -- The filePath parameter must be an absolute path, not a relative path +- The file_path parameter must be an absolute path, not a relative path - By default, it reads up to 2000 lines starting from the beginning of the file - You can optionally specify a line offset and limit (especially handy for long files), but it's recommended to read the whole file by not providing these parameters - Any lines longer than 2000 characters will be truncated diff --git a/packages/opencode/src/tool/task.ts b/packages/opencode/src/tool/task.ts index c26ca7e5..2796d0fe 100644 --- a/packages/opencode/src/tool/task.ts +++ b/packages/opencode/src/tool/task.ts @@ -3,33 +3,41 @@ import DESCRIPTION from "./task.txt" import { z } from "zod" import { Session } from "../session" import { Bus } from "../bus" -import { MessageV2 } from "../session/message-v2" -import { Identifier } from "../id/id" +import { Message } from "../session/message" export const TaskTool = Tool.define({ id: "task", description: DESCRIPTION, parameters: z.object({ - description: z.string().describe("A short (3-5 words) description of the task"), + description: z + .string() + .describe("A short (3-5 words) description of the task"), prompt: z.string().describe("The task for the agent to perform"), }), async execute(params, ctx) { const session = await Session.create(ctx.sessionID) const msg = await Session.getMessage(ctx.sessionID, ctx.messageID) - if (msg.role !== "assistant") throw new Error("Not an assistant message") + const metadata = msg.metadata.assistant! - const messageID = Identifier.ascending("message") - const parts: Record = {} - const unsub = Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => { - if (evt.properties.part.sessionID !== session.id) return - if (evt.properties.part.messageID === messageID) return - if (evt.properties.part.type !== "tool") return - parts[evt.properties.part.id] = evt.properties.part + function summary(input: Message.Info) { + const result = [] + + for (const part of input.parts) { + if (part.type === "tool-invocation") { + result.push({ + toolInvocation: part.toolInvocation, + metadata: input.metadata.tool[part.toolInvocation.toolCallId], + }) + } + } + return result + } + + const unsub = Bus.subscribe(Message.Event.Updated, async (evt) => { + if (evt.properties.info.metadata.sessionID !== session.id) return ctx.metadata({ title: params.description, - metadata: { - summary: Object.values(parts).sort((a, b) => a.id?.localeCompare(b.id)), - }, + summary: summary(evt.properties.info), }) }) @@ -37,13 +45,11 @@ export const TaskTool = Tool.define({ Session.abort(session.id) }) const result = await Session.chat({ - messageID, sessionID: session.id, - modelID: msg.modelID, - providerID: msg.providerID, + modelID: metadata.modelID, + providerID: metadata.providerID, parts: [ { - id: Identifier.ascending("part"), type: "text", text: params.prompt, }, @@ -51,9 +57,9 @@ export const TaskTool = Tool.define({ }) unsub() return { - title: params.description, metadata: { - summary: result.parts.filter((x) => x.type === "tool"), + title: params.description, + summary: summary(result), }, output: result.parts.findLast((x) => x.type === "text")!.text, } diff --git a/packages/opencode/src/tool/todo.ts b/packages/opencode/src/tool/todo.ts index 8a330c2d..33ac3d12 100644 --- a/packages/opencode/src/tool/todo.ts +++ b/packages/opencode/src/tool/todo.ts @@ -5,8 +5,12 @@ import { App } from "../app/app" const TodoInfo = z.object({ content: z.string().min(1).describe("Brief description of the task"), - status: z.enum(["pending", "in_progress", "completed", "cancelled"]).describe("Current status of the task"), - priority: z.enum(["high", "medium", "low"]).describe("Priority level of the task"), + status: z + .enum(["pending", "in_progress", "completed"]) + .describe("Current status of the task"), + priority: z + .enum(["high", "medium", "low"]) + .describe("Priority level of the task"), id: z.string().describe("Unique identifier for the todo item"), }) type TodoInfo = z.infer @@ -28,9 +32,9 @@ export const TodoWriteTool = Tool.define({ const todos = state() todos[opts.sessionID] = params.todos return { - title: `${params.todos.filter((x) => x.status !== "completed").length} todos`, output: JSON.stringify(params.todos, null, 2), metadata: { + title: `${params.todos.filter((x) => x.status !== "completed").length} todos`, todos: params.todos, }, } @@ -44,9 +48,9 @@ export const TodoReadTool = Tool.define({ async execute(_params, opts) { const todos = state()[opts.sessionID] ?? [] return { - title: `${todos.filter((x) => x.status !== "completed").length} todos`, metadata: { todos, + title: `${todos.filter((x) => x.status !== "completed").length} todos`, }, output: JSON.stringify(todos, null, 2), } diff --git a/packages/opencode/src/tool/tool.ts b/packages/opencode/src/tool/tool.ts index f44322ed..8c1cbf48 100644 --- a/packages/opencode/src/tool/tool.ts +++ b/packages/opencode/src/tool/tool.ts @@ -2,15 +2,19 @@ import type { StandardSchemaV1 } from "@standard-schema/spec" export namespace Tool { interface Metadata { + title: string [key: string]: any } export type Context = { sessionID: string messageID: string abort: AbortSignal - metadata(input: { title?: string; metadata?: M }): void + metadata(meta: M): void } - export interface Info { + export interface Info< + Parameters extends StandardSchemaV1 = StandardSchemaV1, + M extends Metadata = Metadata, + > { id: string description: string parameters: Parameters @@ -18,15 +22,15 @@ export namespace Tool { args: StandardSchemaV1.InferOutput, ctx: Context, ): Promise<{ - title: string metadata: M output: string }> } - export function define( - input: Info, - ): Info { + export function define< + Parameters extends StandardSchemaV1, + Result extends Metadata, + >(input: Info): Info { return input } } diff --git a/packages/opencode/src/tool/webfetch.ts b/packages/opencode/src/tool/webfetch.ts index 235d2113..5b7b9f9d 100644 --- a/packages/opencode/src/tool/webfetch.ts +++ b/packages/opencode/src/tool/webfetch.ts @@ -14,7 +14,9 @@ export const WebFetchTool = Tool.define({ url: z.string().describe("The URL to fetch content from"), format: z .enum(["text", "markdown", "html"]) - .describe("The format to return the content in (text, markdown, or html)"), + .describe( + "The format to return the content in (text, markdown, or html)", + ), timeout: z .number() .min(0) @@ -24,11 +26,17 @@ export const WebFetchTool = Tool.define({ }), async execute(params, ctx) { // Validate URL - if (!params.url.startsWith("http://") && !params.url.startsWith("https://")) { + if ( + !params.url.startsWith("http://") && + !params.url.startsWith("https://") + ) { throw new Error("URL must start with http:// or https://") } - const timeout = Math.min((params.timeout ?? DEFAULT_TIMEOUT / 1000) * 1000, MAX_TIMEOUT) + const timeout = Math.min( + (params.timeout ?? DEFAULT_TIMEOUT / 1000) * 1000, + MAX_TIMEOUT, + ) const controller = new AbortController() const timeoutId = setTimeout(() => controller.abort(), timeout) @@ -38,7 +46,8 @@ export const WebFetchTool = Tool.define({ headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", - Accept: "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8", + Accept: + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.9", }, }) @@ -70,14 +79,16 @@ export const WebFetchTool = Tool.define({ const text = await extractTextFromHTML(content) return { output: text, - title, - metadata: {}, + metadata: { + title, + }, } } return { output: content, - title, - metadata: {}, + metadata: { + title, + }, } case "markdown": @@ -85,28 +96,32 @@ export const WebFetchTool = Tool.define({ const markdown = convertHTMLToMarkdown(content) return { output: markdown, - title, - metadata: {}, + metadata: { + title, + }, } } return { output: "```\n" + content + "\n```", - title, - metadata: {}, + metadata: { + title, + }, } case "html": return { output: content, - title, - metadata: {}, + metadata: { + title, + }, } default: return { output: content, - title, - metadata: {}, + metadata: { + title, + }, } } }, @@ -128,7 +143,16 @@ async function extractTextFromHTML(html: string) { .on("*", { element(element) { // Reset skip flag when entering other elements - if (!["script", "style", "noscript", "iframe", "object", "embed"].includes(element.tagName)) { + if ( + ![ + "script", + "style", + "noscript", + "iframe", + "object", + "embed", + ].includes(element.tagName) + ) { skipContent = false } }, diff --git a/packages/opencode/src/tool/write.ts b/packages/opencode/src/tool/write.ts index be92d626..b0515805 100644 --- a/packages/opencode/src/tool/write.ts +++ b/packages/opencode/src/tool/write.ts @@ -13,12 +13,18 @@ export const WriteTool = Tool.define({ id: "write", description: DESCRIPTION, parameters: z.object({ - filePath: z.string().describe("The absolute path to the file to write (must be absolute, not relative)"), + filePath: z + .string() + .describe( + "The absolute path to the file to write (must be absolute, not relative)", + ), content: z.string().describe("The content to write to the file"), }), async execute(params, ctx) { const app = App.info() - const filepath = path.isAbsolute(params.filePath) ? params.filePath : path.join(app.path.cwd, params.filePath) + const filepath = path.isAbsolute(params.filePath) + ? params.filePath + : path.join(app.path.cwd, params.filePath) const file = Bun.file(filepath) const exists = await file.exists() @@ -27,7 +33,9 @@ export const WriteTool = Tool.define({ await Permission.ask({ id: "write", sessionID: ctx.sessionID, - title: exists ? "Overwrite this file: " + filepath : "Create new file: " + filepath, + title: exists + ? "Overwrite this file: " + filepath + : "Create new file: " + filepath, metadata: { filePath: filepath, content: params.content, @@ -54,11 +62,11 @@ export const WriteTool = Tool.define({ } return { - title: path.relative(app.path.root, filepath), metadata: { diagnostics, filepath, exists: exists, + title: path.relative(app.path.root, filepath), }, output, } diff --git a/packages/opencode/src/trace/index.ts b/packages/opencode/src/trace/index.ts deleted file mode 100644 index 8dba93d5..00000000 --- a/packages/opencode/src/trace/index.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { Global } from "../global" -import { Installation } from "../installation" -import path from "path" - -export namespace Trace { - export function init() { - if (!Installation.isDev()) return - const writer = Bun.file(path.join(Global.Path.data, "log", "fetch.log")).writer() - - const originalFetch = globalThis.fetch - // @ts-expect-error - globalThis.fetch = async (input: RequestInfo | URL, init?: RequestInit) => { - const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url - const method = init?.method || "GET" - - const urlObj = new URL(url) - - writer.write(`\n${method} ${urlObj.pathname}${urlObj.search} HTTP/1.1\n`) - writer.write(`Host: ${urlObj.host}\n`) - - if (init?.headers) { - if (init.headers instanceof Headers) { - init.headers.forEach((value, key) => { - writer.write(`${key}: ${value}\n`) - }) - } else { - for (const [key, value] of Object.entries(init.headers)) { - writer.write(`${key}: ${value}\n`) - } - } - } - - if (init?.body) { - writer.write(`\n${init.body}`) - } - writer.flush() - const response = await originalFetch(input, init) - const clonedResponse = response.clone() - writer.write(`\nHTTP/1.1 ${response.status} ${response.statusText}\n`) - response.headers.forEach((value, key) => { - writer.write(`${key}: ${value}\n`) - }) - if (clonedResponse.body) { - clonedResponse.text().then(async (x) => { - writer.write(`\n${x}\n`) - }) - } - writer.flush() - - return response - } - } -} diff --git a/packages/opencode/src/util/error.ts b/packages/opencode/src/util/error.ts index 53b434c6..be8764ee 100644 --- a/packages/opencode/src/util/error.ts +++ b/packages/opencode/src/util/error.ts @@ -7,7 +7,10 @@ export abstract class NamedError extends Error { abstract schema(): ZodSchema abstract toObject(): { name: string; data: any } - static create(name: Name, data: Data) { + static create( + name: Name, + data: Data, + ) { const schema = z .object({ name: z.literal(name), diff --git a/packages/opencode/src/util/filesystem.ts b/packages/opencode/src/util/filesystem.ts index d5149cf3..c4fd163c 100644 --- a/packages/opencode/src/util/filesystem.ts +++ b/packages/opencode/src/util/filesystem.ts @@ -1,17 +1,7 @@ import { exists } from "fs/promises" -import { dirname, join, relative } from "path" +import { dirname, join } from "path" export namespace Filesystem { - export function overlaps(a: string, b: string) { - const relA = relative(a, b) - const relB = relative(b, a) - return !relA || !relA.startsWith("..") || !relB || !relB.startsWith("..") - } - - export function contains(parent: string, child: string) { - return relative(parent, child).startsWith("..") - } - export async function findUp(target: string, start: string, stop?: string) { let current = start const result = [] @@ -26,21 +16,6 @@ export namespace Filesystem { return result } - export async function* up(options: { targets: string[]; start: string; stop?: string }) { - const { targets, start, stop } = options - let current = start - while (true) { - for (const target of targets) { - const search = join(current, target) - if (await exists(search)) yield search - } - if (stop === current) break - const parent = dirname(current) - if (parent === current) break - current = parent - } - } - export async function globUp(pattern: string, start: string, stop?: string) { let current = start const result = [] diff --git a/packages/opencode/src/util/log.ts b/packages/opencode/src/util/log.ts index c3cb04d2..b73e2dee 100644 --- a/packages/opencode/src/util/log.ts +++ b/packages/opencode/src/util/log.ts @@ -1,60 +1,15 @@ import path from "path" import fs from "fs/promises" import { Global } from "../global" -import z from "zod" - export namespace Log { - export const Level = z.enum(["DEBUG", "INFO", "WARN", "ERROR"]).openapi({ ref: "LogLevel", description: "Log level" }) - export type Level = z.infer - - const levelPriority: Record = { - DEBUG: 0, - INFO: 1, - WARN: 2, - ERROR: 3, - } - - let currentLevel: Level = "INFO" - - export function setLevel(level: Level) { - currentLevel = level - } - - export function getLevel(): Level { - return currentLevel - } - - function shouldLog(level: Level): boolean { - return levelPriority[level] >= levelPriority[currentLevel] - } - - export type Logger = { - debug(message?: any, extra?: Record): void - info(message?: any, extra?: Record): void - error(message?: any, extra?: Record): void - warn(message?: any, extra?: Record): void - tag(key: string, value: string): Logger - clone(): Logger - time( - message: string, - extra?: Record, - ): { - stop(): void - [Symbol.dispose](): void - } - } - - const loggers = new Map() - export const Default = create({ service: "default" }) export interface Options { print: boolean - dev?: boolean - level?: Level } let logpath = "" + export function file() { return logpath } @@ -66,7 +21,7 @@ export namespace Log { if (options.print) return logpath = path.join( dir, - options.dev ? "dev.log" : new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log", + new Date().toISOString().split(".")[0].replace(/:/g, "") + ".log", ) const logfile = Bun.file(logpath) await fs.truncate(logpath).catch(() => {}) @@ -79,64 +34,50 @@ export namespace Log { } async function cleanup(dir: string) { - const glob = new Bun.Glob("????-??-??T??????.log") - const files = await Array.fromAsync( - glob.scan({ - cwd: dir, - absolute: true, - }), - ) + const entries = await fs.readdir(dir, { withFileTypes: true }) + const files = entries + .filter((entry) => entry.isFile() && entry.name.endsWith(".log")) + .map((entry) => path.join(dir, entry.name)) + if (files.length <= 5) return const filesToDelete = files.slice(0, -10) - await Promise.all(filesToDelete.map((file) => fs.unlink(file).catch(() => {}))) + + await Promise.all( + filesToDelete.map((file) => fs.unlink(file).catch(() => {})), + ) } let last = Date.now() export function create(tags?: Record) { tags = tags || {} - const service = tags["service"] - if (service && typeof service === "string") { - const cached = loggers.get(service) - if (cached) { - return cached - } - } - function build(message: any, extra?: Record) { const prefix = Object.entries({ ...tags, ...extra, }) .filter(([_, value]) => value !== undefined && value !== null) - .map(([key, value]) => `${key}=${typeof value === "object" ? JSON.stringify(value) : value}`) + .map(([key, value]) => `${key}=${value}`) .join(" ") const next = new Date() const diff = next.getTime() - last last = next.getTime() - return [next.toISOString().split(".")[0], "+" + diff + "ms", prefix, message].filter(Boolean).join(" ") + "\n" + return ( + [next.toISOString().split(".")[0], "+" + diff + "ms", prefix, message] + .filter(Boolean) + .join(" ") + "\n" + ) } - const result: Logger = { - debug(message?: any, extra?: Record) { - if (shouldLog("DEBUG")) { - process.stderr.write("DEBUG " + build(message, extra)) - } - }, + const result = { info(message?: any, extra?: Record) { - if (shouldLog("INFO")) { - process.stderr.write("INFO " + build(message, extra)) - } + process.stderr.write("INFO " + build(message, extra)) }, error(message?: any, extra?: Record) { - if (shouldLog("ERROR")) { - process.stderr.write("ERROR " + build(message, extra)) - } + process.stderr.write("ERROR " + build(message, extra)) }, warn(message?: any, extra?: Record) { - if (shouldLog("WARN")) { - process.stderr.write("WARN " + build(message, extra)) - } + process.stderr.write("WARN " + build(message, extra)) }, tag(key: string, value: string) { if (tags) tags[key] = value @@ -164,10 +105,6 @@ export namespace Log { }, } - if (service && typeof service === "string") { - loggers.set(service, result) - } - return result } } diff --git a/packages/opencode/sst-env.d.ts b/packages/opencode/sst-env.d.ts index b6a7e906..0397645b 100644 --- a/packages/opencode/sst-env.d.ts +++ b/packages/opencode/sst-env.d.ts @@ -6,4 +6,4 @@ /// import "sst" -export {} \ No newline at end of file +export {} diff --git a/packages/opencode/test/bun.test.ts b/packages/opencode/test/bun.test.ts deleted file mode 100644 index 18f0db6b..00000000 --- a/packages/opencode/test/bun.test.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { describe, expect, test } from "bun:test" -import fs from "fs/promises" -import path from "path" - -describe("BunProc registry configuration", () => { - test("should not contain hardcoded registry parameters", async () => { - // Read the bun/index.ts file - const bunIndexPath = path.join(__dirname, "../src/bun/index.ts") - const content = await fs.readFile(bunIndexPath, "utf-8") - - // Verify that no hardcoded registry is present - expect(content).not.toContain("--registry=") - expect(content).not.toContain("hasNpmRcConfig") - expect(content).not.toContain("NpmRc") - }) - - test("should use Bun's default registry resolution", async () => { - // Read the bun/index.ts file - const bunIndexPath = path.join(__dirname, "../src/bun/index.ts") - const content = await fs.readFile(bunIndexPath, "utf-8") - - // Verify that it uses Bun's default resolution - expect(content).toContain("Bun's default registry resolution") - expect(content).toContain("Bun will use them automatically") - expect(content).toContain("No need to pass --registry flag") - }) - - test("should have correct command structure without registry", async () => { - // Read the bun/index.ts file - const bunIndexPath = path.join(__dirname, "../src/bun/index.ts") - const content = await fs.readFile(bunIndexPath, "utf-8") - - // Extract the install function - const installFunctionMatch = content.match(/export async function install[\s\S]*?^ }/m) - expect(installFunctionMatch).toBeTruthy() - - if (installFunctionMatch) { - const installFunction = installFunctionMatch[0] - - // Verify expected arguments are present - expect(installFunction).toContain('"add"') - expect(installFunction).toContain('"--force"') - expect(installFunction).toContain('"--exact"') - expect(installFunction).toContain('"--cwd"') - expect(installFunction).toContain('Global.Path.cache') - expect(installFunction).toContain('pkg + "@" + version') - - // Verify no registry argument is added - expect(installFunction).not.toContain('"--registry"') - expect(installFunction).not.toContain('args.push("--registry') - } - }) -}) diff --git a/packages/opencode/test/tool/edit.test.ts b/packages/opencode/test/tool/edit.test.ts index 6906062d..6de4f2a7 100644 --- a/packages/opencode/test/tool/edit.test.ts +++ b/packages/opencode/test/tool/edit.test.ts @@ -17,7 +17,12 @@ const testCases: TestCase[] = [ replace: 'console.log("universe");', }, { - content: ["if (condition) {", " doSomething();", " doSomethingElse();", "}"].join("\n"), + content: [ + "if (condition) {", + " doSomething();", + " doSomethingElse();", + "}", + ].join("\n"), find: [" doSomething();", " doSomethingElse();"].join("\n"), replace: [" doNewThing();", " doAnotherThing();"].join("\n"), }, @@ -48,8 +53,15 @@ const testCases: TestCase[] = [ " return result;", "}", ].join("\n"), - find: ["function calculate(a, b) {", " // different middle content", " return result;", "}"].join("\n"), - replace: ["function calculate(a, b) {", " return a * b * 2;", "}"].join("\n"), + find: [ + "function calculate(a, b) {", + " // different middle content", + " return result;", + "}", + ].join("\n"), + replace: ["function calculate(a, b) {", " return a * b * 2;", "}"].join( + "\n", + ), }, { content: [ @@ -64,7 +76,13 @@ const testCases: TestCase[] = [ "}", ].join("\n"), find: ["class MyClass {", " // different implementation", "}"].join("\n"), - replace: ["class MyClass {", " constructor() {", " this.value = 42;", " }", "}"].join("\n"), + replace: [ + "class MyClass {", + " constructor() {", + " this.value = 42;", + " }", + "}", + ].join("\n"), }, // WhitespaceNormalizedReplacer cases @@ -86,21 +104,48 @@ const testCases: TestCase[] = [ // IndentationFlexibleReplacer cases { - content: [" function nested() {", ' console.log("deeply nested");', " return true;", " }"].join( - "\n", - ), - find: ["function nested() {", ' console.log("deeply nested");', " return true;", "}"].join("\n"), - replace: ["function nested() {", ' console.log("updated");', " return false;", "}"].join("\n"), + content: [ + " function nested() {", + ' console.log("deeply nested");', + " return true;", + " }", + ].join("\n"), + find: [ + "function nested() {", + ' console.log("deeply nested");', + " return true;", + "}", + ].join("\n"), + replace: [ + "function nested() {", + ' console.log("updated");', + " return false;", + "}", + ].join("\n"), }, { - content: [" if (true) {", ' console.log("level 1");', ' console.log("level 2");', " }"].join("\n"), - find: ["if (true) {", 'console.log("level 1");', ' console.log("level 2");', "}"].join("\n"), + content: [ + " if (true) {", + ' console.log("level 1");', + ' console.log("level 2");', + " }", + ].join("\n"), + find: [ + "if (true) {", + 'console.log("level 1");', + ' console.log("level 2");', + "}", + ].join("\n"), replace: ["if (true) {", 'console.log("updated");', "}"].join("\n"), }, // replaceAll option cases { - content: ['console.log("test");', 'console.log("test");', 'console.log("test");'].join("\n"), + content: [ + 'console.log("test");', + 'console.log("test");', + 'console.log("test");', + ].join("\n"), find: 'console.log("test");', replace: 'console.log("updated");', all: true, @@ -168,7 +213,9 @@ const testCases: TestCase[] = [ // MultiOccurrenceReplacer cases (with replaceAll) { - content: ["debug('start');", "debug('middle');", "debug('end');"].join("\n"), + content: ["debug('start');", "debug('middle');", "debug('end');"].join( + "\n", + ), find: "debug", replace: "log", all: true, @@ -192,7 +239,9 @@ const testCases: TestCase[] = [ replace: "const value = 24;", }, { - content: ["", " if (condition) {", " doSomething();", " }", ""].join("\n"), + content: ["", " if (condition) {", " doSomething();", " }", ""].join( + "\n", + ), find: ["if (condition) {", " doSomething();", "}"].join("\n"), replace: ["if (condition) {", " doNothing();", "}"].join("\n"), }, @@ -213,7 +262,9 @@ const testCases: TestCase[] = [ " return result;", "}", ].join("\n"), - replace: ["function calculate(a, b) {", " return (a + b) * 2;", "}"].join("\n"), + replace: ["function calculate(a, b) {", " return (a + b) * 2;", "}"].join( + "\n", + ), }, { content: [ @@ -227,8 +278,15 @@ const testCases: TestCase[] = [ " }", "}", ].join("\n"), - find: ["class TestClass {", " // different implementation", " // with multiple lines", "}"].join("\n"), - replace: ["class TestClass {", " getValue() { return 42; }", "}"].join("\n"), + find: [ + "class TestClass {", + " // different implementation", + " // with multiple lines", + "}", + ].join("\n"), + replace: ["class TestClass {", " getValue() { return 42; }", "}"].join( + "\n", + ), }, // Combined edge cases for new replacers @@ -238,7 +296,9 @@ const testCases: TestCase[] = [ replace: 'console.log("updated");', }, { - content: [" ", "function test() {", " return 'value';", "}", " "].join("\n"), + content: [" ", "function test() {", " return 'value';", "}", " "].join( + "\n", + ), find: ["function test() {", "return 'value';", "}"].join("\n"), replace: ["function test() {", "return 'new value';", "}"].join("\n"), }, @@ -286,7 +346,13 @@ const testCases: TestCase[] = [ // ContextAwareReplacer - test with trailing newline in find string { - content: ["class Test {", " method1() {", " return 1;", " }", "}"].join("\n"), + content: [ + "class Test {", + " method1() {", + " return 1;", + " }", + "}", + ].join("\n"), find: [ "class Test {", " // different content", @@ -335,7 +401,12 @@ describe("EditTool Replacers", () => { replace(testCase.content, testCase.find, testCase.replace, testCase.all) }).toThrow() } else { - const result = replace(testCase.content, testCase.find, testCase.replace, testCase.all) + const result = replace( + testCase.content, + testCase.find, + testCase.replace, + testCase.all, + ) expect(result).toContain(testCase.replace) } }) diff --git a/packages/opencode/test/tool/tool.test.ts b/packages/opencode/test/tool/tool.test.ts index 88325029..4723a61d 100644 --- a/packages/opencode/test/tool/tool.test.ts +++ b/packages/opencode/test/tool/tool.test.ts @@ -42,7 +42,10 @@ describe("tool.glob", () => { describe("tool.ls", () => { test("basic", async () => { const result = await App.provide({ cwd: process.cwd() }, async () => { - return await ListTool.execute({ path: "./example", ignore: [".git"] }, ctx) + return await ListTool.execute( + { path: "./example", ignore: [".git"] }, + ctx, + ) }) expect(result.output).toMatchSnapshot() }) diff --git a/packages/tui/.gitignore b/packages/tui/.gitignore index 4685365f..aac2e0bd 100644 --- a/packages/tui/.gitignore +++ b/packages/tui/.gitignore @@ -1,2 +1 @@ opencode-test -cmd/opencode/opencode diff --git a/packages/tui/AGENTS.md b/packages/tui/AGENTS.md new file mode 100644 index 00000000..753374f9 --- /dev/null +++ b/packages/tui/AGENTS.md @@ -0,0 +1,25 @@ +# TUI Agent Guidelines + +## Build/Test Commands + +- **Build**: `go build ./cmd/opencode` (builds main binary) +- **Test**: `go test ./...` (runs all tests) +- **Single test**: `go test ./internal/theme -run TestLoadThemesFromJSON` (specific test) +- **Release build**: Uses `.goreleaser.yml` configuration + +## Code Style + +- **Language**: Go 1.24+ with standard formatting (`gofmt`) +- **Imports**: Group standard, third-party, local packages with blank lines +- **Naming**: Go conventions - PascalCase exports, camelCase private, ALL_CAPS constants +- **Error handling**: Return errors explicitly, use `fmt.Errorf` for wrapping +- **Structs**: Define clear interfaces, embed when appropriate +- **Testing**: Use table-driven tests, `t.TempDir()` for file operations + +## Architecture + +- **TUI Framework**: Bubble Tea v2 with Lipgloss v2 for styling +- **Client**: Generated OpenAPI client communicates with TypeScript server +- **Components**: Reusable UI components in `internal/components/` +- **Themes**: JSON-based theming system with override hierarchy +- **State**: Centralized app state with message passing diff --git a/packages/tui/cmd/opencode/main.go b/packages/tui/cmd/opencode/main.go index a0b2a376..a9283525 100644 --- a/packages/tui/cmd/opencode/main.go +++ b/packages/tui/cmd/opencode/main.go @@ -5,18 +5,14 @@ import ( "encoding/json" "log/slog" "os" - "os/signal" + "path/filepath" "strings" - "syscall" tea "github.com/charmbracelet/bubbletea/v2" - flag "github.com/spf13/pflag" "github.com/sst/opencode-sdk-go" "github.com/sst/opencode-sdk-go/option" "github.com/sst/opencode/internal/app" - "github.com/sst/opencode/internal/clipboard" "github.com/sst/opencode/internal/tui" - "github.com/sst/opencode/internal/util" ) var Version = "dev" @@ -27,11 +23,6 @@ func main() { version = "v" + Version } - var model *string = flag.String("model", "", "model to begin with") - var prompt *string = flag.String("prompt", "", "prompt to begin with") - var mode *string = flag.String("mode", "", "mode to begin with") - flag.Parse() - url := os.Getenv("OPENCODE_SERVER") appInfoStr := os.Getenv("OPENCODE_APP_INFO") @@ -42,36 +33,39 @@ func main() { os.Exit(1) } - modesStr := os.Getenv("OPENCODE_MODES") - var modes []opencode.Mode - err = json.Unmarshal([]byte(modesStr), &modes) + logfile := filepath.Join(appInfo.Path.Data, "log", "tui.log") + if _, err := os.Stat(filepath.Dir(logfile)); os.IsNotExist(err) { + err := os.MkdirAll(filepath.Dir(logfile), 0755) + if err != nil { + slog.Error("Failed to create log directory", "error", err) + os.Exit(1) + } + } + file, err := os.Create(logfile) if err != nil { - slog.Error("Failed to unmarshal modes", "error", err) + slog.Error("Failed to create log file", "error", err) os.Exit(1) } + defer file.Close() + logger := slog.New(slog.NewTextHandler(file, &slog.HandlerOptions{Level: slog.LevelDebug})) + slog.SetDefault(logger) + + slog.Debug("TUI launched", "app", appInfo) httpClient := opencode.NewClient( option.WithBaseURL(url), ) - apiHandler := util.NewAPILogHandler(httpClient, "tui", slog.LevelDebug) - logger := slog.New(apiHandler) - slog.SetDefault(logger) - - slog.Debug("TUI launched", "app", appInfoStr, "modes", modesStr) - - go func() { - err = clipboard.Init() - if err != nil { - slog.Error("Failed to initialize clipboard", "error", err) - } - }() + if err != nil { + slog.Error("Failed to create client", "error", err) + os.Exit(1) + } // Create main context for the application ctx, cancel := context.WithCancel(context.Background()) defer cancel() - app_, err := app.New(ctx, version, appInfo, modes, httpClient, model, prompt, mode) + app_, err := app.New(ctx, version, appInfo, httpClient) if err != nil { panic(err) } @@ -79,14 +73,10 @@ func main() { program := tea.NewProgram( tui.NewModel(app_), tea.WithAltScreen(), - // tea.WithKeyboardEnhancements(), + tea.WithKeyboardEnhancements(), tea.WithMouseCellMotion(), ) - // Set up signal handling for graceful shutdown - sigChan := make(chan os.Signal, 1) - signal.Notify(sigChan, syscall.SIGTERM, syscall.SIGINT) - go func() { stream := httpClient.Event.ListStreaming(ctx) for stream.Next() { @@ -99,13 +89,6 @@ func main() { } }() - // Handle signals in a separate goroutine - go func() { - sig := <-sigChan - slog.Info("Received signal, shutting down gracefully", "signal", sig) - program.Quit() - }() - // Run the TUI result, err := program.Run() if err != nil { diff --git a/packages/tui/go.mod b/packages/tui/go.mod index f2ad0c56..6cd1bae6 100644 --- a/packages/tui/go.mod +++ b/packages/tui/go.mod @@ -6,27 +6,20 @@ require ( github.com/BurntSushi/toml v1.5.0 github.com/alecthomas/chroma/v2 v2.18.0 github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1 - github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4 + github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3 github.com/charmbracelet/glamour v0.10.0 - github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3 - github.com/charmbracelet/x/ansi v0.9.3 - github.com/charmbracelet/x/input v0.3.7 - github.com/google/uuid v1.6.0 + github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1 + github.com/charmbracelet/x/ansi v0.8.0 github.com/lithammer/fuzzysearch v1.1.8 github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 github.com/muesli/reflow v0.3.0 github.com/muesli/termenv v0.16.0 github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 github.com/sst/opencode-sdk-go v0.1.0-alpha.8 - golang.org/x/image v0.28.0 + github.com/tidwall/gjson v1.14.4 rsc.io/qr v0.2.0 ) -replace ( - github.com/charmbracelet/x/input => ./input - github.com/sst/opencode-sdk-go => ./sdk -) - require golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect require ( @@ -34,6 +27,7 @@ require ( github.com/atombender/go-jsonschema v0.20.0 // indirect github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect + github.com/charmbracelet/x/input v0.3.5-0.20250424101541-abb4d9a9b197 // indirect github.com/charmbracelet/x/windows v0.2.1 // indirect github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect github.com/fsnotify/fsnotify v1.8.0 // indirect @@ -53,23 +47,23 @@ require ( github.com/sosodev/duration v1.3.1 // indirect github.com/speakeasy-api/openapi-overlay v0.9.0 // indirect github.com/spf13/cobra v1.9.1 // indirect - github.com/tidwall/gjson v1.14.4 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect github.com/tidwall/sjson v1.2.5 // indirect github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect - golang.org/x/mod v0.25.0 // indirect - golang.org/x/tools v0.34.0 // indirect + golang.org/x/mod v0.24.0 // indirect + golang.org/x/tools v0.31.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect ) require ( - github.com/atotto/clipboard v0.1.4 // indirect + github.com/atotto/clipboard v0.1.4 github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/charmbracelet/colorprofile v0.3.1 // indirect - github.com/charmbracelet/x/cellbuf v0.0.14-0.20250505150409-97991a1f17d1 // indirect + github.com/charmbracelet/x/cellbuf v0.0.14-0.20250501183327-ad3bc78c6a81 // indirect github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/disintegration/imaging v1.6.2 github.com/dlclark/regexp2 v1.11.5 // indirect github.com/google/go-cmp v0.7.0 // indirect github.com/gorilla/css v1.0.1 // indirect @@ -81,15 +75,16 @@ require ( github.com/muesli/cancelreader v0.2.2 // indirect github.com/rivo/uniseg v0.4.7 github.com/rogpeppe/go-internal v1.14.1 // indirect - github.com/spf13/pflag v1.0.6 + github.com/spf13/pflag v1.0.6 // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect github.com/yuin/goldmark v1.7.8 // indirect github.com/yuin/goldmark-emoji v1.0.5 // indirect - golang.org/x/net v0.41.0 // indirect - golang.org/x/sync v0.15.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/term v0.32.0 // indirect - golang.org/x/text v0.26.0 + golang.org/x/image v0.26.0 + golang.org/x/net v0.39.0 // indirect + golang.org/x/sync v0.13.0 // indirect + golang.org/x/sys v0.32.0 // indirect + golang.org/x/term v0.31.0 // indirect + golang.org/x/text v0.24.0 gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/packages/tui/go.sum b/packages/tui/go.sum index 370ea712..ac6981f2 100644 --- a/packages/tui/go.sum +++ b/packages/tui/go.sum @@ -22,24 +22,26 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuP github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1 h1:swACzss0FjnyPz1enfX56GKkLiuKg5FlyVmOLIlU2kE= github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw= -github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4 h1:UgUuKKvBwgqm2ZEL+sKv/OLeavrUb4gfHgdxe6oIOno= -github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4/go.mod h1:0wWFRpsgF7vHsCukVZ5LAhZkiR4j875H6KEM2/tFQmA= +github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3 h1:5A2e3myxXMpCES+kjEWgGsaf9VgZXjZbLi5iMTH7j40= +github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3/go.mod h1:ZFDg5oPjyRYrPAa3iFrtP1DO8xy+LUQxd9JFHEcuwJY= github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40= github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0= github.com/charmbracelet/glamour v0.10.0 h1:MtZvfwsYCx8jEPFJm3rIBFIMZUfUJ765oX8V6kXldcY= github.com/charmbracelet/glamour v0.10.0/go.mod h1:f+uf+I/ChNmqo087elLnVdCiVgjSKWuXa/l6NU2ndYk= github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 h1:ZR7e0ro+SZZiIZD7msJyA+NjkCNNavuiPBLgerbOziE= github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834/go.mod h1:aKC/t2arECF6rNOnaKaVU6y4t4ZeHQzqfxedE/VkVhA= -github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3 h1:W6DpZX6zSkZr0iFq6JVh1vItLoxfYtNlaxOJtWp8Kis= -github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3/go.mod h1:65HTtKURcv/ict9ZQhr6zT84JqIjMcJbyrZYHHKNfKA= -github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0= -github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE= -github.com/charmbracelet/x/cellbuf v0.0.14-0.20250505150409-97991a1f17d1 h1:MTSs/nsZNfZPbYk/r9hluK2BtwoqvEYruAujNVwgDv0= -github.com/charmbracelet/x/cellbuf v0.0.14-0.20250505150409-97991a1f17d1/go.mod h1:xBlh2Yi3DL3zy/2n15kITpg0YZardf/aa/hgUaIM6Rk= +github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1 h1:D9AJJuYTN5pvz6mpIGO1ijLKpfTYSHOtKGgwoTQ4Gog= +github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1/go.mod h1:tRlx/Hu0lo/j9viunCN2H+Ze6JrmdjQlXUQvvArgaOc= +github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE= +github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q= +github.com/charmbracelet/x/cellbuf v0.0.14-0.20250501183327-ad3bc78c6a81 h1:iGrflaL5jQW6crML+pZx/ulWAVZQR3CQoRGvFsr2Tyg= +github.com/charmbracelet/x/cellbuf v0.0.14-0.20250501183327-ad3bc78c6a81/go.mod h1:poPFOXFTsJsnLbkV3H2KxAAXT7pdjxxLujLocWjkyzM= github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHEJ52OC4VuTzU8t+n5frMjLvpYWEznSr/u8tnkCYw= github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U= github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf h1:rLG0Yb6MQSDKdB52aGX55JT1oi0P0Kuaj7wi1bLUpnI= github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf/go.mod h1:B3UgsnsBZS/eX42BlaNiJkD1pPOUa+oF1IYC6Yd2CEU= +github.com/charmbracelet/x/input v0.3.5-0.20250424101541-abb4d9a9b197 h1:fsWj8NF5njyMVzELc7++HsvRDvgz3VcgGAUgWBDWWWM= +github.com/charmbracelet/x/input v0.3.5-0.20250424101541-abb4d9a9b197/go.mod h1:xseGeVftoP9rVI+/8WKYrJFH6ior6iERGvklwwHz5+s= github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= github.com/charmbracelet/x/windows v0.2.1 h1:3x7vnbpQrjpuq/4L+I4gNsG5htYoCiA5oe9hLjAij5I= @@ -52,6 +54,8 @@ github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c= +github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4= github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ= github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58= @@ -88,8 +92,6 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= -github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= @@ -179,6 +181,8 @@ github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo= github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0= github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/sst/opencode-sdk-go v0.1.0-alpha.8 h1:Tp7nbckbMCwAA/ieVZeeZCp79xXtrPMaWLRk5mhNwrw= +github.com/sst/opencode-sdk-go v0.1.0-alpha.8/go.mod h1:uagorfAHZsVy6vf0xY6TlQraM4uCILdZ5tKKhl1oToM= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -214,13 +218,14 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw= golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM= -golang.org/x/image v0.28.0 h1:gdem5JW1OLS4FbkWgLO+7ZeFzYtL3xClb97GaUzYMFE= -golang.org/x/image v0.28.0/go.mod h1:GUJYXtnGKEUgggyzh+Vxt+AviiCcyiwpsl8iQ8MvwGY= +golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY= +golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= -golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= +golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -231,15 +236,15 @@ golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= +golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -258,28 +263,28 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= -golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o= +golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo= -golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= +golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= +golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/packages/tui/input/cancelreader_other.go b/packages/tui/input/cancelreader_other.go deleted file mode 100644 index dbd22a2e..00000000 --- a/packages/tui/input/cancelreader_other.go +++ /dev/null @@ -1,14 +0,0 @@ -//go:build !windows -// +build !windows - -package input - -import ( - "io" - - "github.com/muesli/cancelreader" -) - -func newCancelreader(r io.Reader, _ int) (cancelreader.CancelReader, error) { - return cancelreader.NewReader(r) //nolint:wrapcheck -} diff --git a/packages/tui/input/cancelreader_windows.go b/packages/tui/input/cancelreader_windows.go deleted file mode 100644 index 19abfce4..00000000 --- a/packages/tui/input/cancelreader_windows.go +++ /dev/null @@ -1,143 +0,0 @@ -//go:build windows -// +build windows - -package input - -import ( - "fmt" - "io" - "os" - "sync" - - xwindows "github.com/charmbracelet/x/windows" - "github.com/muesli/cancelreader" - "golang.org/x/sys/windows" -) - -type conInputReader struct { - cancelMixin - conin windows.Handle - originalMode uint32 -} - -var _ cancelreader.CancelReader = &conInputReader{} - -func newCancelreader(r io.Reader, flags int) (cancelreader.CancelReader, error) { - fallback := func(io.Reader) (cancelreader.CancelReader, error) { - return cancelreader.NewReader(r) - } - - var dummy uint32 - if f, ok := r.(cancelreader.File); !ok || f.Fd() != os.Stdin.Fd() || - // If data was piped to the standard input, it does not emit events - // anymore. We can detect this if the console mode cannot be set anymore, - // in this case, we fallback to the default cancelreader implementation. - windows.GetConsoleMode(windows.Handle(f.Fd()), &dummy) != nil { - return fallback(r) - } - - conin, err := windows.GetStdHandle(windows.STD_INPUT_HANDLE) - if err != nil { - return fallback(r) - } - - // Discard any pending input events. - if err := xwindows.FlushConsoleInputBuffer(conin); err != nil { - return fallback(r) - } - - modes := []uint32{ - windows.ENABLE_WINDOW_INPUT, - windows.ENABLE_EXTENDED_FLAGS, - } - - // Enabling mouse mode implicitly blocks console text selection. Thus, we - // need to enable it only if the mouse mode is requested. - // In order to toggle mouse mode, the caller must recreate the reader with - // the appropriate flag toggled. - if flags&FlagMouseMode != 0 { - modes = append(modes, windows.ENABLE_MOUSE_INPUT) - } - - originalMode, err := prepareConsole(conin, modes...) - if err != nil { - return nil, fmt.Errorf("failed to prepare console input: %w", err) - } - - return &conInputReader{ - conin: conin, - originalMode: originalMode, - }, nil -} - -// Cancel implements cancelreader.CancelReader. -func (r *conInputReader) Cancel() bool { - r.setCanceled() - - return windows.CancelIoEx(r.conin, nil) == nil || windows.CancelIo(r.conin) == nil -} - -// Close implements cancelreader.CancelReader. -func (r *conInputReader) Close() error { - if r.originalMode != 0 { - err := windows.SetConsoleMode(r.conin, r.originalMode) - if err != nil { - return fmt.Errorf("reset console mode: %w", err) - } - } - - return nil -} - -// Read implements cancelreader.CancelReader. -func (r *conInputReader) Read(data []byte) (int, error) { - if r.isCanceled() { - return 0, cancelreader.ErrCanceled - } - - var n uint32 - if err := windows.ReadFile(r.conin, data, &n, nil); err != nil { - return int(n), fmt.Errorf("read console input: %w", err) - } - - return int(n), nil -} - -func prepareConsole(input windows.Handle, modes ...uint32) (originalMode uint32, err error) { - err = windows.GetConsoleMode(input, &originalMode) - if err != nil { - return 0, fmt.Errorf("get console mode: %w", err) - } - - var newMode uint32 - for _, mode := range modes { - newMode |= mode - } - - err = windows.SetConsoleMode(input, newMode) - if err != nil { - return 0, fmt.Errorf("set console mode: %w", err) - } - - return originalMode, nil -} - -// cancelMixin represents a goroutine-safe cancelation status. -type cancelMixin struct { - unsafeCanceled bool - lock sync.Mutex -} - -func (c *cancelMixin) setCanceled() { - c.lock.Lock() - defer c.lock.Unlock() - - c.unsafeCanceled = true -} - -func (c *cancelMixin) isCanceled() bool { - c.lock.Lock() - defer c.lock.Unlock() - - return c.unsafeCanceled -} diff --git a/packages/tui/input/clipboard.go b/packages/tui/input/clipboard.go deleted file mode 100644 index 725a2d95..00000000 --- a/packages/tui/input/clipboard.go +++ /dev/null @@ -1,25 +0,0 @@ -package input - -import "github.com/charmbracelet/x/ansi" - -// ClipboardSelection represents a clipboard selection. The most common -// clipboard selections are "system" and "primary" and selections. -type ClipboardSelection = byte - -// Clipboard selections. -const ( - SystemClipboard ClipboardSelection = ansi.SystemClipboard - PrimaryClipboard ClipboardSelection = ansi.PrimaryClipboard -) - -// ClipboardEvent is a clipboard read message event. This message is emitted when -// a terminal receives an OSC52 clipboard read message event. -type ClipboardEvent struct { - Content string - Selection ClipboardSelection -} - -// String returns the string representation of the clipboard message. -func (e ClipboardEvent) String() string { - return e.Content -} diff --git a/packages/tui/input/color.go b/packages/tui/input/color.go deleted file mode 100644 index 9bcf7499..00000000 --- a/packages/tui/input/color.go +++ /dev/null @@ -1,136 +0,0 @@ -package input - -import ( - "fmt" - "image/color" - "math" -) - -// ForegroundColorEvent represents a foreground color event. This event is -// emitted when the terminal requests the terminal foreground color using -// [ansi.RequestForegroundColor]. -type ForegroundColorEvent struct{ color.Color } - -// String returns the hex representation of the color. -func (e ForegroundColorEvent) String() string { - return colorToHex(e.Color) -} - -// IsDark returns whether the color is dark. -func (e ForegroundColorEvent) IsDark() bool { - return isDarkColor(e.Color) -} - -// BackgroundColorEvent represents a background color event. This event is -// emitted when the terminal requests the terminal background color using -// [ansi.RequestBackgroundColor]. -type BackgroundColorEvent struct{ color.Color } - -// String returns the hex representation of the color. -func (e BackgroundColorEvent) String() string { - return colorToHex(e) -} - -// IsDark returns whether the color is dark. -func (e BackgroundColorEvent) IsDark() bool { - return isDarkColor(e.Color) -} - -// CursorColorEvent represents a cursor color change event. This event is -// emitted when the program requests the terminal cursor color using -// [ansi.RequestCursorColor]. -type CursorColorEvent struct{ color.Color } - -// String returns the hex representation of the color. -func (e CursorColorEvent) String() string { - return colorToHex(e) -} - -// IsDark returns whether the color is dark. -func (e CursorColorEvent) IsDark() bool { - return isDarkColor(e) -} - -type shiftable interface { - ~uint | ~uint16 | ~uint32 | ~uint64 -} - -func shift[T shiftable](x T) T { - if x > 0xff { - x >>= 8 - } - return x -} - -func colorToHex(c color.Color) string { - if c == nil { - return "" - } - r, g, b, _ := c.RGBA() - return fmt.Sprintf("#%02x%02x%02x", shift(r), shift(g), shift(b)) -} - -func getMaxMin(a, b, c float64) (ma, mi float64) { - if a > b { - ma = a - mi = b - } else { - ma = b - mi = a - } - if c > ma { - ma = c - } else if c < mi { - mi = c - } - return ma, mi -} - -func round(x float64) float64 { - return math.Round(x*1000) / 1000 -} - -// rgbToHSL converts an RGB triple to an HSL triple. -func rgbToHSL(r, g, b uint8) (h, s, l float64) { - // convert uint32 pre-multiplied value to uint8 - // The r,g,b values are divided by 255 to change the range from 0..255 to 0..1: - Rnot := float64(r) / 255 - Gnot := float64(g) / 255 - Bnot := float64(b) / 255 - Cmax, Cmin := getMaxMin(Rnot, Gnot, Bnot) - Δ := Cmax - Cmin - // Lightness calculation: - l = (Cmax + Cmin) / 2 - // Hue and Saturation Calculation: - if Δ == 0 { - h = 0 - s = 0 - } else { - switch Cmax { - case Rnot: - h = 60 * (math.Mod((Gnot-Bnot)/Δ, 6)) - case Gnot: - h = 60 * (((Bnot - Rnot) / Δ) + 2) - case Bnot: - h = 60 * (((Rnot - Gnot) / Δ) + 4) - } - if h < 0 { - h += 360 - } - - s = Δ / (1 - math.Abs((2*l)-1)) - } - - return h, round(s), round(l) -} - -// isDarkColor returns whether the given color is dark. -func isDarkColor(c color.Color) bool { - if c == nil { - return true - } - - r, g, b, _ := c.RGBA() - _, _, l := rgbToHSL(uint8(r>>8), uint8(g>>8), uint8(b>>8)) //nolint:gosec - return l < 0.5 -} diff --git a/packages/tui/input/cursor.go b/packages/tui/input/cursor.go deleted file mode 100644 index cf4e973d..00000000 --- a/packages/tui/input/cursor.go +++ /dev/null @@ -1,7 +0,0 @@ -package input - -import "image" - -// CursorPositionEvent represents a cursor position event. Where X is the -// zero-based column and Y is the zero-based row. -type CursorPositionEvent image.Point diff --git a/packages/tui/input/da1.go b/packages/tui/input/da1.go deleted file mode 100644 index c2cd94cf..00000000 --- a/packages/tui/input/da1.go +++ /dev/null @@ -1,18 +0,0 @@ -package input - -import "github.com/charmbracelet/x/ansi" - -// PrimaryDeviceAttributesEvent is an event that represents the terminal -// primary device attributes. -type PrimaryDeviceAttributesEvent []int - -func parsePrimaryDevAttrs(params ansi.Params) Event { - // Primary Device Attributes - da1 := make(PrimaryDeviceAttributesEvent, len(params)) - for i, p := range params { - if !p.HasMore() { - da1[i] = p.Param(0) - } - } - return da1 -} diff --git a/packages/tui/input/doc.go b/packages/tui/input/doc.go deleted file mode 100644 index 2877d496..00000000 --- a/packages/tui/input/doc.go +++ /dev/null @@ -1,6 +0,0 @@ -// Package input provides a set of utilities for handling input events in a -// terminal environment. It includes support for reading input events, parsing -// escape sequences, and handling clipboard events. -// The package is designed to work with various terminal types and supports -// customization through flags and options. -package input diff --git a/packages/tui/input/driver.go b/packages/tui/input/driver.go deleted file mode 100644 index 1e34677a..00000000 --- a/packages/tui/input/driver.go +++ /dev/null @@ -1,192 +0,0 @@ -//nolint:unused,revive,nolintlint -package input - -import ( - "bytes" - "io" - "unicode/utf8" - - "github.com/muesli/cancelreader" -) - -// Logger is a simple logger interface. -type Logger interface { - Printf(format string, v ...any) -} - -// win32InputState is a state machine for parsing key events from the Windows -// Console API into escape sequences and utf8 runes, and keeps track of the last -// control key state to determine modifier key changes. It also keeps track of -// the last mouse button state and window size changes to determine which mouse -// buttons were released and to prevent multiple size events from firing. -type win32InputState struct { - ansiBuf [256]byte - ansiIdx int - utf16Buf [2]rune - utf16Half bool - lastCks uint32 // the last control key state for the previous event - lastMouseBtns uint32 // the last mouse button state for the previous event - lastWinsizeX, lastWinsizeY int16 // the last window size for the previous event to prevent multiple size events from firing -} - -// Reader represents an input event reader. It reads input events and parses -// escape sequences from the terminal input buffer and translates them into -// human‑readable events. -type Reader struct { - rd cancelreader.CancelReader - table map[string]Key // table is a lookup table for key sequences. - term string // $TERM - paste []byte // bracketed paste buffer; nil when disabled - buf [256]byte // read buffer - partialSeq []byte // holds incomplete escape sequences - keyState win32InputState - parser Parser - logger Logger -} - -// NewReader returns a new input event reader. -func NewReader(r io.Reader, termType string, flags int) (*Reader, error) { - d := new(Reader) - cr, err := newCancelreader(r, flags) - if err != nil { - return nil, err - } - - d.rd = cr - d.table = buildKeysTable(flags, termType) - d.term = termType - d.parser.flags = flags - return d, nil -} - -// SetLogger sets a logger for the reader. -func (d *Reader) SetLogger(l Logger) { d.logger = l } - -// Read implements io.Reader. -func (d *Reader) Read(p []byte) (int, error) { return d.rd.Read(p) } - -// Cancel cancels the underlying reader. -func (d *Reader) Cancel() bool { return d.rd.Cancel() } - -// Close closes the underlying reader. -func (d *Reader) Close() error { return d.rd.Close() } - -func (d *Reader) readEvents() ([]Event, error) { - nb, err := d.rd.Read(d.buf[:]) - if err != nil { - return nil, err - } - - var events []Event - - // Combine any partial sequence from previous read with new data. - var buf []byte - if len(d.partialSeq) > 0 { - buf = make([]byte, len(d.partialSeq)+nb) - copy(buf, d.partialSeq) - copy(buf[len(d.partialSeq):], d.buf[:nb]) - d.partialSeq = nil - } else { - buf = d.buf[:nb] - } - - // Fast path: direct lookup for simple escape sequences. - if bytes.HasPrefix(buf, []byte{0x1b}) { - if k, ok := d.table[string(buf)]; ok { - if d.logger != nil { - d.logger.Printf("input: %q", buf) - } - events = append(events, KeyPressEvent(k)) - return events, nil - } - } - - var i int - for i < len(buf) { - consumed, ev := d.parser.parseSequence(buf[i:]) - if d.logger != nil && consumed > 0 { - d.logger.Printf("input: %q", buf[i:i+consumed]) - } - - // Incomplete sequence – store remainder and exit. - if consumed == 0 && ev == nil { - rem := len(buf) - i - if rem > 0 { - d.partialSeq = make([]byte, rem) - copy(d.partialSeq, buf[i:]) - } - break - } - - // Handle bracketed paste specially so we don’t emit a paste event for - // every byte. - if d.paste != nil { - if _, ok := ev.(PasteEndEvent); !ok { - d.paste = append(d.paste, buf[i]) - i++ - continue - } - } - - switch ev.(type) { - case PasteStartEvent: - d.paste = []byte{} - case PasteEndEvent: - var paste []rune - for len(d.paste) > 0 { - r, w := utf8.DecodeRune(d.paste) - if r != utf8.RuneError { - paste = append(paste, r) - } - d.paste = d.paste[w:] - } - d.paste = nil - events = append(events, PasteEvent(paste)) - case nil: - i++ - continue - } - - if mevs, ok := ev.(MultiEvent); ok { - events = append(events, []Event(mevs)...) - } else { - events = append(events, ev) - } - i += consumed - } - - // Collapse bursts of wheel/motion events into a single event each. - events = coalesceMouseEvents(events) - return events, nil -} - -// coalesceMouseEvents reduces the volume of MouseWheelEvent and MouseMotionEvent -// objects that arrive in rapid succession by keeping only the most recent -// event in each contiguous run. -func coalesceMouseEvents(in []Event) []Event { - if len(in) < 2 { - return in - } - - out := make([]Event, 0, len(in)) - for _, ev := range in { - switch ev.(type) { - case MouseWheelEvent: - if len(out) > 0 { - if _, ok := out[len(out)-1].(MouseWheelEvent); ok { - out[len(out)-1] = ev // replace previous wheel event - continue - } - } - case MouseMotionEvent: - if len(out) > 0 { - if _, ok := out[len(out)-1].(MouseMotionEvent); ok { - out[len(out)-1] = ev // replace previous motion event - continue - } - } - } - out = append(out, ev) - } - return out -} diff --git a/packages/tui/input/driver_other.go b/packages/tui/input/driver_other.go deleted file mode 100644 index fd3df06c..00000000 --- a/packages/tui/input/driver_other.go +++ /dev/null @@ -1,17 +0,0 @@ -//go:build !windows -// +build !windows - -package input - -// ReadEvents reads input events from the terminal. -// -// It reads the events available in the input buffer and returns them. -func (d *Reader) ReadEvents() ([]Event, error) { - return d.readEvents() -} - -// parseWin32InputKeyEvent parses a Win32 input key events. This function is -// only available on Windows. -func (p *Parser) parseWin32InputKeyEvent(*win32InputState, uint16, uint16, rune, bool, uint32, uint16) Event { - return nil -} diff --git a/packages/tui/input/driver_test.go b/packages/tui/input/driver_test.go deleted file mode 100644 index affdf5b8..00000000 --- a/packages/tui/input/driver_test.go +++ /dev/null @@ -1,25 +0,0 @@ -package input - -import ( - "io" - "strings" - "testing" -) - -func BenchmarkDriver(b *testing.B) { - input := "\x1b\x1b[Ztest\x00\x1b]10;1234/1234/1234\x07\x1b[27;2;27~" - rdr := strings.NewReader(input) - drv, err := NewReader(rdr, "dumb", 0) - if err != nil { - b.Fatalf("could not create driver: %v", err) - } - - b.ReportAllocs() - b.ResetTimer() - for i := 0; i < b.N; i++ { - rdr.Reset(input) - if _, err := drv.ReadEvents(); err != nil && err != io.EOF { - b.Errorf("error reading input: %v", err) - } - } -} diff --git a/packages/tui/input/driver_windows.go b/packages/tui/input/driver_windows.go deleted file mode 100644 index acdd6984..00000000 --- a/packages/tui/input/driver_windows.go +++ /dev/null @@ -1,620 +0,0 @@ -//go:build windows -// +build windows - -package input - -import ( - "errors" - "fmt" - "strings" - "time" - "unicode" - "unicode/utf16" - "unicode/utf8" - - "github.com/charmbracelet/x/ansi" - xwindows "github.com/charmbracelet/x/windows" - "github.com/muesli/cancelreader" - "golang.org/x/sys/windows" -) - -// ReadEvents reads input events from the terminal. -// -// It reads the events available in the input buffer and returns them. -func (d *Reader) ReadEvents() ([]Event, error) { - events, err := d.handleConInput() - if errors.Is(err, errNotConInputReader) { - return d.readEvents() - } - return events, err -} - -var errNotConInputReader = fmt.Errorf("handleConInput: not a conInputReader") - -func (d *Reader) handleConInput() ([]Event, error) { - cc, ok := d.rd.(*conInputReader) - if !ok { - return nil, errNotConInputReader - } - - var ( - events []xwindows.InputRecord - err error - ) - for { - // Peek up to 256 events, this is to allow for sequences events reported as - // key events. - events, err = peekNConsoleInputs(cc.conin, 256) - if cc.isCanceled() { - return nil, cancelreader.ErrCanceled - } - if err != nil { - return nil, fmt.Errorf("peek coninput events: %w", err) - } - if len(events) > 0 { - break - } - - // Sleep for a bit to avoid busy waiting. - time.Sleep(10 * time.Millisecond) - } - - events, err = readNConsoleInputs(cc.conin, uint32(len(events))) - if cc.isCanceled() { - return nil, cancelreader.ErrCanceled - } - if err != nil { - return nil, fmt.Errorf("read coninput events: %w", err) - } - - var evs []Event - for _, event := range events { - if e := d.parser.parseConInputEvent(event, &d.keyState); e != nil { - if multi, ok := e.(MultiEvent); ok { - evs = append(evs, multi...) - } else { - evs = append(evs, e) - } - } - } - - return evs, nil -} - -func (p *Parser) parseConInputEvent(event xwindows.InputRecord, keyState *win32InputState) Event { - switch event.EventType { - case xwindows.KEY_EVENT: - kevent := event.KeyEvent() - return p.parseWin32InputKeyEvent(keyState, kevent.VirtualKeyCode, kevent.VirtualScanCode, - kevent.Char, kevent.KeyDown, kevent.ControlKeyState, kevent.RepeatCount) - - case xwindows.WINDOW_BUFFER_SIZE_EVENT: - wevent := event.WindowBufferSizeEvent() - if wevent.Size.X != keyState.lastWinsizeX || wevent.Size.Y != keyState.lastWinsizeY { - keyState.lastWinsizeX, keyState.lastWinsizeY = wevent.Size.X, wevent.Size.Y - return WindowSizeEvent{ - Width: int(wevent.Size.X), - Height: int(wevent.Size.Y), - } - } - case xwindows.MOUSE_EVENT: - mevent := event.MouseEvent() - Event := mouseEvent(keyState.lastMouseBtns, mevent) - keyState.lastMouseBtns = mevent.ButtonState - return Event - case xwindows.FOCUS_EVENT: - fevent := event.FocusEvent() - if fevent.SetFocus { - return FocusEvent{} - } - return BlurEvent{} - case xwindows.MENU_EVENT: - // ignore - } - return nil -} - -func mouseEventButton(p, s uint32) (MouseButton, bool) { - var isRelease bool - button := MouseNone - btn := p ^ s - if btn&s == 0 { - isRelease = true - } - - if btn == 0 { - switch { - case s&xwindows.FROM_LEFT_1ST_BUTTON_PRESSED > 0: - button = MouseLeft - case s&xwindows.FROM_LEFT_2ND_BUTTON_PRESSED > 0: - button = MouseMiddle - case s&xwindows.RIGHTMOST_BUTTON_PRESSED > 0: - button = MouseRight - case s&xwindows.FROM_LEFT_3RD_BUTTON_PRESSED > 0: - button = MouseBackward - case s&xwindows.FROM_LEFT_4TH_BUTTON_PRESSED > 0: - button = MouseForward - } - return button, isRelease - } - - switch btn { - case xwindows.FROM_LEFT_1ST_BUTTON_PRESSED: // left button - button = MouseLeft - case xwindows.RIGHTMOST_BUTTON_PRESSED: // right button - button = MouseRight - case xwindows.FROM_LEFT_2ND_BUTTON_PRESSED: // middle button - button = MouseMiddle - case xwindows.FROM_LEFT_3RD_BUTTON_PRESSED: // unknown (possibly mouse backward) - button = MouseBackward - case xwindows.FROM_LEFT_4TH_BUTTON_PRESSED: // unknown (possibly mouse forward) - button = MouseForward - } - - return button, isRelease -} - -func mouseEvent(p uint32, e xwindows.MouseEventRecord) (ev Event) { - var mod KeyMod - var isRelease bool - if e.ControlKeyState&(xwindows.LEFT_ALT_PRESSED|xwindows.RIGHT_ALT_PRESSED) != 0 { - mod |= ModAlt - } - if e.ControlKeyState&(xwindows.LEFT_CTRL_PRESSED|xwindows.RIGHT_CTRL_PRESSED) != 0 { - mod |= ModCtrl - } - if e.ControlKeyState&(xwindows.SHIFT_PRESSED) != 0 { - mod |= ModShift - } - - m := Mouse{ - X: int(e.MousePositon.X), - Y: int(e.MousePositon.Y), - Mod: mod, - } - - wheelDirection := int16(highWord(e.ButtonState)) //nolint:gosec - switch e.EventFlags { - case 0, xwindows.DOUBLE_CLICK: - m.Button, isRelease = mouseEventButton(p, e.ButtonState) - case xwindows.MOUSE_WHEELED: - if wheelDirection > 0 { - m.Button = MouseWheelUp - } else { - m.Button = MouseWheelDown - } - case xwindows.MOUSE_HWHEELED: - if wheelDirection > 0 { - m.Button = MouseWheelRight - } else { - m.Button = MouseWheelLeft - } - case xwindows.MOUSE_MOVED: - m.Button, _ = mouseEventButton(p, e.ButtonState) - return MouseMotionEvent(m) - } - - if isWheel(m.Button) { - return MouseWheelEvent(m) - } else if isRelease { - return MouseReleaseEvent(m) - } - - return MouseClickEvent(m) -} - -func highWord(data uint32) uint16 { - return uint16((data & 0xFFFF0000) >> 16) //nolint:gosec -} - -func readNConsoleInputs(console windows.Handle, maxEvents uint32) ([]xwindows.InputRecord, error) { - if maxEvents == 0 { - return nil, fmt.Errorf("maxEvents cannot be zero") - } - - records := make([]xwindows.InputRecord, maxEvents) - n, err := readConsoleInput(console, records) - return records[:n], err -} - -func readConsoleInput(console windows.Handle, inputRecords []xwindows.InputRecord) (uint32, error) { - if len(inputRecords) == 0 { - return 0, fmt.Errorf("size of input record buffer cannot be zero") - } - - var read uint32 - - err := xwindows.ReadConsoleInput(console, &inputRecords[0], uint32(len(inputRecords)), &read) //nolint:gosec - - return read, err //nolint:wrapcheck -} - -func peekConsoleInput(console windows.Handle, inputRecords []xwindows.InputRecord) (uint32, error) { - if len(inputRecords) == 0 { - return 0, fmt.Errorf("size of input record buffer cannot be zero") - } - - var read uint32 - - err := xwindows.PeekConsoleInput(console, &inputRecords[0], uint32(len(inputRecords)), &read) //nolint:gosec - - return read, err //nolint:wrapcheck -} - -func peekNConsoleInputs(console windows.Handle, maxEvents uint32) ([]xwindows.InputRecord, error) { - if maxEvents == 0 { - return nil, fmt.Errorf("maxEvents cannot be zero") - } - - records := make([]xwindows.InputRecord, maxEvents) - n, err := peekConsoleInput(console, records) - return records[:n], err -} - -// parseWin32InputKeyEvent parses a single key event from either the Windows -// Console API or win32-input-mode events. When state is nil, it means this is -// an event from win32-input-mode. Otherwise, it's a key event from the Windows -// Console API and needs a state to decode ANSI escape sequences and utf16 -// runes. -func (p *Parser) parseWin32InputKeyEvent(state *win32InputState, vkc uint16, _ uint16, r rune, keyDown bool, cks uint32, repeatCount uint16) (event Event) { - defer func() { - // Respect the repeat count. - if repeatCount > 1 { - var multi MultiEvent - for i := 0; i < int(repeatCount); i++ { - multi = append(multi, event) - } - event = multi - } - }() - if state != nil { - defer func() { - state.lastCks = cks - }() - } - - var utf8Buf [utf8.UTFMax]byte - var key Key - if state != nil && state.utf16Half { - state.utf16Half = false - state.utf16Buf[1] = r - codepoint := utf16.DecodeRune(state.utf16Buf[0], state.utf16Buf[1]) - rw := utf8.EncodeRune(utf8Buf[:], codepoint) - r, _ = utf8.DecodeRune(utf8Buf[:rw]) - key.Code = r - key.Text = string(r) - key.Mod = translateControlKeyState(cks) - key = ensureKeyCase(key, cks) - if keyDown { - return KeyPressEvent(key) - } - return KeyReleaseEvent(key) - } - - var baseCode rune - switch { - case vkc == 0: - // Zero means this event is either an escape code or a unicode - // codepoint. - if state != nil && state.ansiIdx == 0 && r != ansi.ESC { - // This is a unicode codepoint. - baseCode = r - break - } - - if state != nil { - // Collect ANSI escape code. - state.ansiBuf[state.ansiIdx] = byte(r) - state.ansiIdx++ - if state.ansiIdx <= 2 { - // We haven't received enough bytes to determine if this is an - // ANSI escape code. - return nil - } - if r == ansi.ESC { - // We're expecting a closing String Terminator [ansi.ST]. - return nil - } - - n, event := p.parseSequence(state.ansiBuf[:state.ansiIdx]) - if n == 0 { - return nil - } - if _, ok := event.(UnknownEvent); ok { - return nil - } - - state.ansiIdx = 0 - return event - } - case vkc == xwindows.VK_BACK: - baseCode = KeyBackspace - case vkc == xwindows.VK_TAB: - baseCode = KeyTab - case vkc == xwindows.VK_RETURN: - baseCode = KeyEnter - case vkc == xwindows.VK_SHIFT: - //nolint:nestif - if cks&xwindows.SHIFT_PRESSED != 0 { - if cks&xwindows.ENHANCED_KEY != 0 { - baseCode = KeyRightShift - } else { - baseCode = KeyLeftShift - } - } else if state != nil { - if state.lastCks&xwindows.SHIFT_PRESSED != 0 { - if state.lastCks&xwindows.ENHANCED_KEY != 0 { - baseCode = KeyRightShift - } else { - baseCode = KeyLeftShift - } - } - } - case vkc == xwindows.VK_CONTROL: - if cks&xwindows.LEFT_CTRL_PRESSED != 0 { - baseCode = KeyLeftCtrl - } else if cks&xwindows.RIGHT_CTRL_PRESSED != 0 { - baseCode = KeyRightCtrl - } else if state != nil { - if state.lastCks&xwindows.LEFT_CTRL_PRESSED != 0 { - baseCode = KeyLeftCtrl - } else if state.lastCks&xwindows.RIGHT_CTRL_PRESSED != 0 { - baseCode = KeyRightCtrl - } - } - case vkc == xwindows.VK_MENU: - if cks&xwindows.LEFT_ALT_PRESSED != 0 { - baseCode = KeyLeftAlt - } else if cks&xwindows.RIGHT_ALT_PRESSED != 0 { - baseCode = KeyRightAlt - } else if state != nil { - if state.lastCks&xwindows.LEFT_ALT_PRESSED != 0 { - baseCode = KeyLeftAlt - } else if state.lastCks&xwindows.RIGHT_ALT_PRESSED != 0 { - baseCode = KeyRightAlt - } - } - case vkc == xwindows.VK_PAUSE: - baseCode = KeyPause - case vkc == xwindows.VK_CAPITAL: - baseCode = KeyCapsLock - case vkc == xwindows.VK_ESCAPE: - baseCode = KeyEscape - case vkc == xwindows.VK_SPACE: - baseCode = KeySpace - case vkc == xwindows.VK_PRIOR: - baseCode = KeyPgUp - case vkc == xwindows.VK_NEXT: - baseCode = KeyPgDown - case vkc == xwindows.VK_END: - baseCode = KeyEnd - case vkc == xwindows.VK_HOME: - baseCode = KeyHome - case vkc == xwindows.VK_LEFT: - baseCode = KeyLeft - case vkc == xwindows.VK_UP: - baseCode = KeyUp - case vkc == xwindows.VK_RIGHT: - baseCode = KeyRight - case vkc == xwindows.VK_DOWN: - baseCode = KeyDown - case vkc == xwindows.VK_SELECT: - baseCode = KeySelect - case vkc == xwindows.VK_SNAPSHOT: - baseCode = KeyPrintScreen - case vkc == xwindows.VK_INSERT: - baseCode = KeyInsert - case vkc == xwindows.VK_DELETE: - baseCode = KeyDelete - case vkc >= '0' && vkc <= '9': - baseCode = rune(vkc) - case vkc >= 'A' && vkc <= 'Z': - // Convert to lowercase. - baseCode = rune(vkc) + 32 - case vkc == xwindows.VK_LWIN: - baseCode = KeyLeftSuper - case vkc == xwindows.VK_RWIN: - baseCode = KeyRightSuper - case vkc == xwindows.VK_APPS: - baseCode = KeyMenu - case vkc >= xwindows.VK_NUMPAD0 && vkc <= xwindows.VK_NUMPAD9: - baseCode = rune(vkc-xwindows.VK_NUMPAD0) + KeyKp0 - case vkc == xwindows.VK_MULTIPLY: - baseCode = KeyKpMultiply - case vkc == xwindows.VK_ADD: - baseCode = KeyKpPlus - case vkc == xwindows.VK_SEPARATOR: - baseCode = KeyKpComma - case vkc == xwindows.VK_SUBTRACT: - baseCode = KeyKpMinus - case vkc == xwindows.VK_DECIMAL: - baseCode = KeyKpDecimal - case vkc == xwindows.VK_DIVIDE: - baseCode = KeyKpDivide - case vkc >= xwindows.VK_F1 && vkc <= xwindows.VK_F24: - baseCode = rune(vkc-xwindows.VK_F1) + KeyF1 - case vkc == xwindows.VK_NUMLOCK: - baseCode = KeyNumLock - case vkc == xwindows.VK_SCROLL: - baseCode = KeyScrollLock - case vkc == xwindows.VK_LSHIFT: - baseCode = KeyLeftShift - case vkc == xwindows.VK_RSHIFT: - baseCode = KeyRightShift - case vkc == xwindows.VK_LCONTROL: - baseCode = KeyLeftCtrl - case vkc == xwindows.VK_RCONTROL: - baseCode = KeyRightCtrl - case vkc == xwindows.VK_LMENU: - baseCode = KeyLeftAlt - case vkc == xwindows.VK_RMENU: - baseCode = KeyRightAlt - case vkc == xwindows.VK_VOLUME_MUTE: - baseCode = KeyMute - case vkc == xwindows.VK_VOLUME_DOWN: - baseCode = KeyLowerVol - case vkc == xwindows.VK_VOLUME_UP: - baseCode = KeyRaiseVol - case vkc == xwindows.VK_MEDIA_NEXT_TRACK: - baseCode = KeyMediaNext - case vkc == xwindows.VK_MEDIA_PREV_TRACK: - baseCode = KeyMediaPrev - case vkc == xwindows.VK_MEDIA_STOP: - baseCode = KeyMediaStop - case vkc == xwindows.VK_MEDIA_PLAY_PAUSE: - baseCode = KeyMediaPlayPause - case vkc == xwindows.VK_OEM_1: - baseCode = ';' - case vkc == xwindows.VK_OEM_PLUS: - baseCode = '+' - case vkc == xwindows.VK_OEM_COMMA: - baseCode = ',' - case vkc == xwindows.VK_OEM_MINUS: - baseCode = '-' - case vkc == xwindows.VK_OEM_PERIOD: - baseCode = '.' - case vkc == xwindows.VK_OEM_2: - baseCode = '/' - case vkc == xwindows.VK_OEM_3: - baseCode = '`' - case vkc == xwindows.VK_OEM_4: - baseCode = '[' - case vkc == xwindows.VK_OEM_5: - baseCode = '\\' - case vkc == xwindows.VK_OEM_6: - baseCode = ']' - case vkc == xwindows.VK_OEM_7: - baseCode = '\'' - } - - if utf16.IsSurrogate(r) { - if state != nil { - state.utf16Buf[0] = r - state.utf16Half = true - } - return nil - } - - // AltGr is left ctrl + right alt. On non-US keyboards, this is used to type - // special characters and produce printable events. - // XXX: Should this be a KeyMod? - altGr := cks&(xwindows.LEFT_CTRL_PRESSED|xwindows.RIGHT_ALT_PRESSED) == xwindows.LEFT_CTRL_PRESSED|xwindows.RIGHT_ALT_PRESSED - - var text string - keyCode := baseCode - if !unicode.IsControl(r) { - rw := utf8.EncodeRune(utf8Buf[:], r) - keyCode, _ = utf8.DecodeRune(utf8Buf[:rw]) - if unicode.IsPrint(keyCode) && (cks == 0 || - cks == xwindows.SHIFT_PRESSED || - cks == xwindows.CAPSLOCK_ON || - altGr) { - // If the control key state is 0, shift is pressed, or caps lock - // then the key event is a printable event i.e. [text] is not empty. - text = string(keyCode) - } - } - - key.Code = keyCode - key.Text = text - key.Mod = translateControlKeyState(cks) - key.BaseCode = baseCode - key = ensureKeyCase(key, cks) - if keyDown { - return KeyPressEvent(key) - } - - return KeyReleaseEvent(key) -} - -// ensureKeyCase ensures that the key's text is in the correct case based on the -// control key state. -func ensureKeyCase(key Key, cks uint32) Key { - if len(key.Text) == 0 { - return key - } - - hasShift := cks&xwindows.SHIFT_PRESSED != 0 - hasCaps := cks&xwindows.CAPSLOCK_ON != 0 - if hasShift || hasCaps { - if unicode.IsLower(key.Code) { - key.ShiftedCode = unicode.ToUpper(key.Code) - key.Text = string(key.ShiftedCode) - } - } else { - if unicode.IsUpper(key.Code) { - key.ShiftedCode = unicode.ToLower(key.Code) - key.Text = string(key.ShiftedCode) - } - } - - return key -} - -// translateControlKeyState translates the control key state from the Windows -// Console API into a Mod bitmask. -func translateControlKeyState(cks uint32) (m KeyMod) { - if cks&xwindows.LEFT_CTRL_PRESSED != 0 || cks&xwindows.RIGHT_CTRL_PRESSED != 0 { - m |= ModCtrl - } - if cks&xwindows.LEFT_ALT_PRESSED != 0 || cks&xwindows.RIGHT_ALT_PRESSED != 0 { - m |= ModAlt - } - if cks&xwindows.SHIFT_PRESSED != 0 { - m |= ModShift - } - if cks&xwindows.CAPSLOCK_ON != 0 { - m |= ModCapsLock - } - if cks&xwindows.NUMLOCK_ON != 0 { - m |= ModNumLock - } - if cks&xwindows.SCROLLLOCK_ON != 0 { - m |= ModScrollLock - } - return -} - -//nolint:unused -func keyEventString(vkc, sc uint16, r rune, keyDown bool, cks uint32, repeatCount uint16) string { - var s strings.Builder - s.WriteString("vkc: ") - s.WriteString(fmt.Sprintf("%d, 0x%02x", vkc, vkc)) - s.WriteString(", sc: ") - s.WriteString(fmt.Sprintf("%d, 0x%02x", sc, sc)) - s.WriteString(", r: ") - s.WriteString(fmt.Sprintf("%q", r)) - s.WriteString(", down: ") - s.WriteString(fmt.Sprintf("%v", keyDown)) - s.WriteString(", cks: [") - if cks&xwindows.LEFT_ALT_PRESSED != 0 { - s.WriteString("left alt, ") - } - if cks&xwindows.RIGHT_ALT_PRESSED != 0 { - s.WriteString("right alt, ") - } - if cks&xwindows.LEFT_CTRL_PRESSED != 0 { - s.WriteString("left ctrl, ") - } - if cks&xwindows.RIGHT_CTRL_PRESSED != 0 { - s.WriteString("right ctrl, ") - } - if cks&xwindows.SHIFT_PRESSED != 0 { - s.WriteString("shift, ") - } - if cks&xwindows.CAPSLOCK_ON != 0 { - s.WriteString("caps lock, ") - } - if cks&xwindows.NUMLOCK_ON != 0 { - s.WriteString("num lock, ") - } - if cks&xwindows.SCROLLLOCK_ON != 0 { - s.WriteString("scroll lock, ") - } - if cks&xwindows.ENHANCED_KEY != 0 { - s.WriteString("enhanced key, ") - } - s.WriteString("], repeat count: ") - s.WriteString(fmt.Sprintf("%d", repeatCount)) - return s.String() -} diff --git a/packages/tui/input/driver_windows_test.go b/packages/tui/input/driver_windows_test.go deleted file mode 100644 index 45371fd1..00000000 --- a/packages/tui/input/driver_windows_test.go +++ /dev/null @@ -1,271 +0,0 @@ -package input - -import ( - "encoding/binary" - "image/color" - "reflect" - "testing" - "unicode/utf16" - - "github.com/charmbracelet/x/ansi" - xwindows "github.com/charmbracelet/x/windows" - "golang.org/x/sys/windows" -) - -func TestWindowsInputEvents(t *testing.T) { - cases := []struct { - name string - events []xwindows.InputRecord - expected []Event - sequence bool // indicates that the input events are ANSI sequence or utf16 - }{ - { - name: "single key event", - events: []xwindows.InputRecord{ - encodeKeyEvent(xwindows.KeyEventRecord{ - KeyDown: true, - Char: 'a', - VirtualKeyCode: 'A', - }), - }, - expected: []Event{KeyPressEvent{Code: 'a', BaseCode: 'a', Text: "a"}}, - }, - { - name: "single key event with control key", - events: []xwindows.InputRecord{ - encodeKeyEvent(xwindows.KeyEventRecord{ - KeyDown: true, - Char: 'a', - VirtualKeyCode: 'A', - ControlKeyState: xwindows.LEFT_CTRL_PRESSED, - }), - }, - expected: []Event{KeyPressEvent{Code: 'a', BaseCode: 'a', Mod: ModCtrl}}, - }, - { - name: "escape alt key event", - events: []xwindows.InputRecord{ - encodeKeyEvent(xwindows.KeyEventRecord{ - KeyDown: true, - Char: ansi.ESC, - VirtualKeyCode: ansi.ESC, - ControlKeyState: xwindows.LEFT_ALT_PRESSED, - }), - }, - expected: []Event{KeyPressEvent{Code: ansi.ESC, BaseCode: ansi.ESC, Mod: ModAlt}}, - }, - { - name: "single shifted key event", - events: []xwindows.InputRecord{ - encodeKeyEvent(xwindows.KeyEventRecord{ - KeyDown: true, - Char: 'A', - VirtualKeyCode: 'A', - ControlKeyState: xwindows.SHIFT_PRESSED, - }), - }, - expected: []Event{KeyPressEvent{Code: 'A', BaseCode: 'a', Text: "A", Mod: ModShift}}, - }, - { - name: "utf16 rune", - events: encodeUtf16Rune('😊'), // smiley emoji '😊' - expected: []Event{ - KeyPressEvent{Code: '😊', Text: "😊"}, - }, - sequence: true, - }, - { - name: "background color response", - events: encodeSequence("\x1b]11;rgb:ff/ff/ff\x07"), - expected: []Event{BackgroundColorEvent{Color: color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff}}}, - sequence: true, - }, - { - name: "st terminated background color response", - events: encodeSequence("\x1b]11;rgb:ffff/ffff/ffff\x1b\\"), - expected: []Event{BackgroundColorEvent{Color: color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff}}}, - sequence: true, - }, - { - name: "simple mouse event", - events: []xwindows.InputRecord{ - encodeMouseEvent(xwindows.MouseEventRecord{ - MousePositon: windows.Coord{X: 10, Y: 20}, - ButtonState: xwindows.FROM_LEFT_1ST_BUTTON_PRESSED, - EventFlags: 0, - }), - encodeMouseEvent(xwindows.MouseEventRecord{ - MousePositon: windows.Coord{X: 10, Y: 20}, - EventFlags: 0, - }), - }, - expected: []Event{ - MouseClickEvent{Button: MouseLeft, X: 10, Y: 20}, - MouseReleaseEvent{Button: MouseLeft, X: 10, Y: 20}, - }, - }, - { - name: "focus event", - events: []xwindows.InputRecord{ - encodeFocusEvent(xwindows.FocusEventRecord{ - SetFocus: true, - }), - encodeFocusEvent(xwindows.FocusEventRecord{ - SetFocus: false, - }), - }, - expected: []Event{ - FocusEvent{}, - BlurEvent{}, - }, - }, - { - name: "window size event", - events: []xwindows.InputRecord{ - encodeWindowBufferSizeEvent(xwindows.WindowBufferSizeRecord{ - Size: windows.Coord{X: 10, Y: 20}, - }), - }, - expected: []Event{ - WindowSizeEvent{Width: 10, Height: 20}, - }, - }, - } - - // p is the parser to parse the input events - var p Parser - - // keep track of the state of the driver to handle ANSI sequences and utf16 - var state win32InputState - for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - if tc.sequence { - var Event Event - for _, ev := range tc.events { - if ev.EventType != xwindows.KEY_EVENT { - t.Fatalf("expected key event, got %v", ev.EventType) - } - - key := ev.KeyEvent() - Event = p.parseWin32InputKeyEvent(&state, key.VirtualKeyCode, key.VirtualScanCode, key.Char, key.KeyDown, key.ControlKeyState, key.RepeatCount) - } - if len(tc.expected) != 1 { - t.Fatalf("expected 1 event, got %d", len(tc.expected)) - } - if !reflect.DeepEqual(Event, tc.expected[0]) { - t.Errorf("expected %v, got %v", tc.expected[0], Event) - } - } else { - if len(tc.events) != len(tc.expected) { - t.Fatalf("expected %d events, got %d", len(tc.expected), len(tc.events)) - } - for j, ev := range tc.events { - Event := p.parseConInputEvent(ev, &state) - if !reflect.DeepEqual(Event, tc.expected[j]) { - t.Errorf("expected %#v, got %#v", tc.expected[j], Event) - } - } - } - }) - } -} - -func boolToUint32(b bool) uint32 { - if b { - return 1 - } - return 0 -} - -func encodeMenuEvent(menu xwindows.MenuEventRecord) xwindows.InputRecord { - var bts [16]byte - binary.LittleEndian.PutUint32(bts[0:4], menu.CommandID) - return xwindows.InputRecord{ - EventType: xwindows.MENU_EVENT, - Event: bts, - } -} - -func encodeWindowBufferSizeEvent(size xwindows.WindowBufferSizeRecord) xwindows.InputRecord { - var bts [16]byte - binary.LittleEndian.PutUint16(bts[0:2], uint16(size.Size.X)) - binary.LittleEndian.PutUint16(bts[2:4], uint16(size.Size.Y)) - return xwindows.InputRecord{ - EventType: xwindows.WINDOW_BUFFER_SIZE_EVENT, - Event: bts, - } -} - -func encodeFocusEvent(focus xwindows.FocusEventRecord) xwindows.InputRecord { - var bts [16]byte - if focus.SetFocus { - bts[0] = 1 - } - return xwindows.InputRecord{ - EventType: xwindows.FOCUS_EVENT, - Event: bts, - } -} - -func encodeMouseEvent(mouse xwindows.MouseEventRecord) xwindows.InputRecord { - var bts [16]byte - binary.LittleEndian.PutUint16(bts[0:2], uint16(mouse.MousePositon.X)) - binary.LittleEndian.PutUint16(bts[2:4], uint16(mouse.MousePositon.Y)) - binary.LittleEndian.PutUint32(bts[4:8], mouse.ButtonState) - binary.LittleEndian.PutUint32(bts[8:12], mouse.ControlKeyState) - binary.LittleEndian.PutUint32(bts[12:16], mouse.EventFlags) - return xwindows.InputRecord{ - EventType: xwindows.MOUSE_EVENT, - Event: bts, - } -} - -func encodeKeyEvent(key xwindows.KeyEventRecord) xwindows.InputRecord { - var bts [16]byte - binary.LittleEndian.PutUint32(bts[0:4], boolToUint32(key.KeyDown)) - binary.LittleEndian.PutUint16(bts[4:6], key.RepeatCount) - binary.LittleEndian.PutUint16(bts[6:8], key.VirtualKeyCode) - binary.LittleEndian.PutUint16(bts[8:10], key.VirtualScanCode) - binary.LittleEndian.PutUint16(bts[10:12], uint16(key.Char)) - binary.LittleEndian.PutUint32(bts[12:16], key.ControlKeyState) - return xwindows.InputRecord{ - EventType: xwindows.KEY_EVENT, - Event: bts, - } -} - -// encodeSequence encodes a string of ANSI escape sequences into a slice of -// Windows input key records. -func encodeSequence(s string) (evs []xwindows.InputRecord) { - var state byte - for len(s) > 0 { - seq, _, n, newState := ansi.DecodeSequence(s, state, nil) - for i := 0; i < n; i++ { - evs = append(evs, encodeKeyEvent(xwindows.KeyEventRecord{ - KeyDown: true, - Char: rune(seq[i]), - })) - } - state = newState - s = s[n:] - } - return -} - -func encodeUtf16Rune(r rune) []xwindows.InputRecord { - r1, r2 := utf16.EncodeRune(r) - return encodeUtf16Pair(r1, r2) -} - -func encodeUtf16Pair(r1, r2 rune) []xwindows.InputRecord { - return []xwindows.InputRecord{ - encodeKeyEvent(xwindows.KeyEventRecord{ - KeyDown: true, - Char: r1, - }), - encodeKeyEvent(xwindows.KeyEventRecord{ - KeyDown: true, - Char: r2, - }), - } -} diff --git a/packages/tui/input/focus.go b/packages/tui/input/focus.go deleted file mode 100644 index 796d95f6..00000000 --- a/packages/tui/input/focus.go +++ /dev/null @@ -1,9 +0,0 @@ -package input - -// FocusEvent represents a terminal focus event. -// This occurs when the terminal gains focus. -type FocusEvent struct{} - -// BlurEvent represents a terminal blur event. -// This occurs when the terminal loses focus. -type BlurEvent struct{} diff --git a/packages/tui/input/focus_test.go b/packages/tui/input/focus_test.go deleted file mode 100644 index 2d35e476..00000000 --- a/packages/tui/input/focus_test.go +++ /dev/null @@ -1,27 +0,0 @@ -package input - -import ( - "testing" -) - -func TestFocus(t *testing.T) { - var p Parser - _, e := p.parseSequence([]byte("\x1b[I")) - switch e.(type) { - case FocusEvent: - // ok - default: - t.Error("invalid sequence") - } -} - -func TestBlur(t *testing.T) { - var p Parser - _, e := p.parseSequence([]byte("\x1b[O")) - switch e.(type) { - case BlurEvent: - // ok - default: - t.Error("invalid sequence") - } -} diff --git a/packages/tui/input/go.mod b/packages/tui/input/go.mod deleted file mode 100644 index 36a9a92a..00000000 --- a/packages/tui/input/go.mod +++ /dev/null @@ -1,18 +0,0 @@ -module github.com/charmbracelet/x/input - -go 1.23.0 - -require ( - github.com/charmbracelet/x/ansi v0.9.3 - github.com/charmbracelet/x/windows v0.2.1 - github.com/muesli/cancelreader v0.2.2 - github.com/rivo/uniseg v0.4.7 - github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e - golang.org/x/sys v0.33.0 -) - -require ( - github.com/lucasb-eyer/go-colorful v1.2.0 // indirect - github.com/mattn/go-runewidth v0.0.16 // indirect - golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect -) diff --git a/packages/tui/input/go.sum b/packages/tui/input/go.sum deleted file mode 100644 index 7bc7a2eb..00000000 --- a/packages/tui/input/go.sum +++ /dev/null @@ -1,19 +0,0 @@ -github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0= -github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE= -github.com/charmbracelet/x/windows v0.2.1 h1:3x7vnbpQrjpuq/4L+I4gNsG5htYoCiA5oe9hLjAij5I= -github.com/charmbracelet/x/windows v0.2.1/go.mod h1:ptZp16h40gDYqs5TSawSVW+yiLB13j4kSMA0lSCHL0M= -github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= -github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= -github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= -github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= -github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= -github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= -github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= -github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= -golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= -golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= diff --git a/packages/tui/input/input.go b/packages/tui/input/input.go deleted file mode 100644 index da5e4f0b..00000000 --- a/packages/tui/input/input.go +++ /dev/null @@ -1,45 +0,0 @@ -package input - -import ( - "fmt" - "strings" -) - -// Event represents a terminal event. -type Event any - -// UnknownEvent represents an unknown event. -type UnknownEvent string - -// String returns a string representation of the unknown event. -func (e UnknownEvent) String() string { - return fmt.Sprintf("%q", string(e)) -} - -// MultiEvent represents multiple messages event. -type MultiEvent []Event - -// String returns a string representation of the multiple messages event. -func (e MultiEvent) String() string { - var sb strings.Builder - for _, ev := range e { - sb.WriteString(fmt.Sprintf("%v\n", ev)) - } - return sb.String() -} - -// WindowSizeEvent is used to report the terminal size. Note that Windows does -// not have support for reporting resizes via SIGWINCH signals and relies on -// the Windows Console API to report window size changes. -type WindowSizeEvent struct { - Width int - Height int -} - -// WindowOpEvent is a window operation (XTWINOPS) report event. This is used to -// report various window operations such as reporting the window size or cell -// size. -type WindowOpEvent struct { - Op int - Args []int -} diff --git a/packages/tui/input/key.go b/packages/tui/input/key.go deleted file mode 100644 index 8d3e3ebe..00000000 --- a/packages/tui/input/key.go +++ /dev/null @@ -1,574 +0,0 @@ -package input - -import ( - "fmt" - "strings" - "unicode" - - "github.com/charmbracelet/x/ansi" -) - -const ( - // KeyExtended is a special key code used to signify that a key event - // contains multiple runes. - KeyExtended = unicode.MaxRune + 1 -) - -// Special key symbols. -const ( - - // Special keys. - - KeyUp rune = KeyExtended + iota + 1 - KeyDown - KeyRight - KeyLeft - KeyBegin - KeyFind - KeyInsert - KeyDelete - KeySelect - KeyPgUp - KeyPgDown - KeyHome - KeyEnd - - // Keypad keys. - - KeyKpEnter - KeyKpEqual - KeyKpMultiply - KeyKpPlus - KeyKpComma - KeyKpMinus - KeyKpDecimal - KeyKpDivide - KeyKp0 - KeyKp1 - KeyKp2 - KeyKp3 - KeyKp4 - KeyKp5 - KeyKp6 - KeyKp7 - KeyKp8 - KeyKp9 - - //nolint:godox - // The following are keys defined in the Kitty keyboard protocol. - // TODO: Investigate the names of these keys. - - KeyKpSep - KeyKpUp - KeyKpDown - KeyKpLeft - KeyKpRight - KeyKpPgUp - KeyKpPgDown - KeyKpHome - KeyKpEnd - KeyKpInsert - KeyKpDelete - KeyKpBegin - - // Function keys. - - KeyF1 - KeyF2 - KeyF3 - KeyF4 - KeyF5 - KeyF6 - KeyF7 - KeyF8 - KeyF9 - KeyF10 - KeyF11 - KeyF12 - KeyF13 - KeyF14 - KeyF15 - KeyF16 - KeyF17 - KeyF18 - KeyF19 - KeyF20 - KeyF21 - KeyF22 - KeyF23 - KeyF24 - KeyF25 - KeyF26 - KeyF27 - KeyF28 - KeyF29 - KeyF30 - KeyF31 - KeyF32 - KeyF33 - KeyF34 - KeyF35 - KeyF36 - KeyF37 - KeyF38 - KeyF39 - KeyF40 - KeyF41 - KeyF42 - KeyF43 - KeyF44 - KeyF45 - KeyF46 - KeyF47 - KeyF48 - KeyF49 - KeyF50 - KeyF51 - KeyF52 - KeyF53 - KeyF54 - KeyF55 - KeyF56 - KeyF57 - KeyF58 - KeyF59 - KeyF60 - KeyF61 - KeyF62 - KeyF63 - - //nolint:godox - // The following are keys defined in the Kitty keyboard protocol. - // TODO: Investigate the names of these keys. - - KeyCapsLock - KeyScrollLock - KeyNumLock - KeyPrintScreen - KeyPause - KeyMenu - - KeyMediaPlay - KeyMediaPause - KeyMediaPlayPause - KeyMediaReverse - KeyMediaStop - KeyMediaFastForward - KeyMediaRewind - KeyMediaNext - KeyMediaPrev - KeyMediaRecord - - KeyLowerVol - KeyRaiseVol - KeyMute - - KeyLeftShift - KeyLeftAlt - KeyLeftCtrl - KeyLeftSuper - KeyLeftHyper - KeyLeftMeta - KeyRightShift - KeyRightAlt - KeyRightCtrl - KeyRightSuper - KeyRightHyper - KeyRightMeta - KeyIsoLevel3Shift - KeyIsoLevel5Shift - - // Special names in C0. - - KeyBackspace = rune(ansi.DEL) - KeyTab = rune(ansi.HT) - KeyEnter = rune(ansi.CR) - KeyReturn = KeyEnter - KeyEscape = rune(ansi.ESC) - KeyEsc = KeyEscape - - // Special names in G0. - - KeySpace = rune(ansi.SP) -) - -// KeyPressEvent represents a key press event. -type KeyPressEvent Key - -// String implements [fmt.Stringer] and is quite useful for matching key -// events. For details, on what this returns see [Key.String]. -func (k KeyPressEvent) String() string { - return Key(k).String() -} - -// Keystroke returns the keystroke representation of the [Key]. While less type -// safe than looking at the individual fields, it will usually be more -// convenient and readable to use this method when matching against keys. -// -// Note that modifier keys are always printed in the following order: -// - ctrl -// - alt -// - shift -// - meta -// - hyper -// - super -// -// For example, you'll always see "ctrl+shift+alt+a" and never -// "shift+ctrl+alt+a". -func (k KeyPressEvent) Keystroke() string { - return Key(k).Keystroke() -} - -// Key returns the underlying key event. This is a syntactic sugar for casting -// the key event to a [Key]. -func (k KeyPressEvent) Key() Key { - return Key(k) -} - -// KeyReleaseEvent represents a key release event. -type KeyReleaseEvent Key - -// String implements [fmt.Stringer] and is quite useful for matching key -// events. For details, on what this returns see [Key.String]. -func (k KeyReleaseEvent) String() string { - return Key(k).String() -} - -// Keystroke returns the keystroke representation of the [Key]. While less type -// safe than looking at the individual fields, it will usually be more -// convenient and readable to use this method when matching against keys. -// -// Note that modifier keys are always printed in the following order: -// - ctrl -// - alt -// - shift -// - meta -// - hyper -// - super -// -// For example, you'll always see "ctrl+shift+alt+a" and never -// "shift+ctrl+alt+a". -func (k KeyReleaseEvent) Keystroke() string { - return Key(k).Keystroke() -} - -// Key returns the underlying key event. This is a convenience method and -// syntactic sugar to satisfy the [KeyEvent] interface, and cast the key event to -// [Key]. -func (k KeyReleaseEvent) Key() Key { - return Key(k) -} - -// KeyEvent represents a key event. This can be either a key press or a key -// release event. -type KeyEvent interface { - fmt.Stringer - - // Key returns the underlying key event. - Key() Key -} - -// Key represents a Key press or release event. It contains information about -// the Key pressed, like the runes, the type of Key, and the modifiers pressed. -// There are a couple general patterns you could use to check for key presses -// or releases: -// -// // Switch on the string representation of the key (shorter) -// switch ev := ev.(type) { -// case KeyPressEvent: -// switch ev.String() { -// case "enter": -// fmt.Println("you pressed enter!") -// case "a": -// fmt.Println("you pressed a!") -// } -// } -// -// // Switch on the key type (more foolproof) -// switch ev := ev.(type) { -// case KeyEvent: -// // catch both KeyPressEvent and KeyReleaseEvent -// switch key := ev.Key(); key.Code { -// case KeyEnter: -// fmt.Println("you pressed enter!") -// default: -// switch key.Text { -// case "a": -// fmt.Println("you pressed a!") -// } -// } -// } -// -// Note that [Key.Text] will be empty for special keys like [KeyEnter], -// [KeyTab], and for keys that don't represent printable characters like key -// combos with modifier keys. In other words, [Key.Text] is populated only for -// keys that represent printable characters shifted or unshifted (like 'a', -// 'A', '1', '!', etc.). -type Key struct { - // Text contains the actual characters received. This usually the same as - // [Key.Code]. When [Key.Text] is non-empty, it indicates that the key - // pressed represents printable character(s). - Text string - - // Mod represents modifier keys, like [ModCtrl], [ModAlt], and so on. - Mod KeyMod - - // Code represents the key pressed. This is usually a special key like - // [KeyTab], [KeyEnter], [KeyF1], or a printable character like 'a'. - Code rune - - // ShiftedCode is the actual, shifted key pressed by the user. For example, - // if the user presses shift+a, or caps lock is on, [Key.ShiftedCode] will - // be 'A' and [Key.Code] will be 'a'. - // - // In the case of non-latin keyboards, like Arabic, [Key.ShiftedCode] is the - // unshifted key on the keyboard. - // - // This is only available with the Kitty Keyboard Protocol or the Windows - // Console API. - ShiftedCode rune - - // BaseCode is the key pressed according to the standard PC-101 key layout. - // On international keyboards, this is the key that would be pressed if the - // keyboard was set to US PC-101 layout. - // - // For example, if the user presses 'q' on a French AZERTY keyboard, - // [Key.BaseCode] will be 'q'. - // - // This is only available with the Kitty Keyboard Protocol or the Windows - // Console API. - BaseCode rune - - // IsRepeat indicates whether the key is being held down and sending events - // repeatedly. - // - // This is only available with the Kitty Keyboard Protocol or the Windows - // Console API. - IsRepeat bool -} - -// String implements [fmt.Stringer] and is quite useful for matching key -// events. It will return the textual representation of the [Key] if there is -// one, otherwise, it will fallback to [Key.Keystroke]. -// -// For example, you'll always get "?" and instead of "shift+/" on a US ANSI -// keyboard. -func (k Key) String() string { - if len(k.Text) > 0 && k.Text != " " { - return k.Text - } - return k.Keystroke() -} - -// Keystroke returns the keystroke representation of the [Key]. While less type -// safe than looking at the individual fields, it will usually be more -// convenient and readable to use this method when matching against keys. -// -// Note that modifier keys are always printed in the following order: -// - ctrl -// - alt -// - shift -// - meta -// - hyper -// - super -// -// For example, you'll always see "ctrl+shift+alt+a" and never -// "shift+ctrl+alt+a". -func (k Key) Keystroke() string { - var sb strings.Builder - if k.Mod.Contains(ModCtrl) && k.Code != KeyLeftCtrl && k.Code != KeyRightCtrl { - sb.WriteString("ctrl+") - } - if k.Mod.Contains(ModAlt) && k.Code != KeyLeftAlt && k.Code != KeyRightAlt { - sb.WriteString("alt+") - } - if k.Mod.Contains(ModShift) && k.Code != KeyLeftShift && k.Code != KeyRightShift { - sb.WriteString("shift+") - } - if k.Mod.Contains(ModMeta) && k.Code != KeyLeftMeta && k.Code != KeyRightMeta { - sb.WriteString("meta+") - } - if k.Mod.Contains(ModHyper) && k.Code != KeyLeftHyper && k.Code != KeyRightHyper { - sb.WriteString("hyper+") - } - if k.Mod.Contains(ModSuper) && k.Code != KeyLeftSuper && k.Code != KeyRightSuper { - sb.WriteString("super+") - } - - if kt, ok := keyTypeString[k.Code]; ok { - sb.WriteString(kt) - } else { - code := k.Code - if k.BaseCode != 0 { - // If a [Key.BaseCode] is present, use it to represent a key using the standard - // PC-101 key layout. - code = k.BaseCode - } - - switch code { - case KeySpace: - // Space is the only invisible printable character. - sb.WriteString("space") - case KeyExtended: - // Write the actual text of the key when the key contains multiple - // runes. - sb.WriteString(k.Text) - default: - sb.WriteRune(code) - } - } - - return sb.String() -} - -var keyTypeString = map[rune]string{ - KeyEnter: "enter", - KeyTab: "tab", - KeyBackspace: "backspace", - KeyEscape: "esc", - KeySpace: "space", - KeyUp: "up", - KeyDown: "down", - KeyLeft: "left", - KeyRight: "right", - KeyBegin: "begin", - KeyFind: "find", - KeyInsert: "insert", - KeyDelete: "delete", - KeySelect: "select", - KeyPgUp: "pgup", - KeyPgDown: "pgdown", - KeyHome: "home", - KeyEnd: "end", - KeyKpEnter: "kpenter", - KeyKpEqual: "kpequal", - KeyKpMultiply: "kpmul", - KeyKpPlus: "kpplus", - KeyKpComma: "kpcomma", - KeyKpMinus: "kpminus", - KeyKpDecimal: "kpperiod", - KeyKpDivide: "kpdiv", - KeyKp0: "kp0", - KeyKp1: "kp1", - KeyKp2: "kp2", - KeyKp3: "kp3", - KeyKp4: "kp4", - KeyKp5: "kp5", - KeyKp6: "kp6", - KeyKp7: "kp7", - KeyKp8: "kp8", - KeyKp9: "kp9", - - // Kitty keyboard extension - KeyKpSep: "kpsep", - KeyKpUp: "kpup", - KeyKpDown: "kpdown", - KeyKpLeft: "kpleft", - KeyKpRight: "kpright", - KeyKpPgUp: "kppgup", - KeyKpPgDown: "kppgdown", - KeyKpHome: "kphome", - KeyKpEnd: "kpend", - KeyKpInsert: "kpinsert", - KeyKpDelete: "kpdelete", - KeyKpBegin: "kpbegin", - - KeyF1: "f1", - KeyF2: "f2", - KeyF3: "f3", - KeyF4: "f4", - KeyF5: "f5", - KeyF6: "f6", - KeyF7: "f7", - KeyF8: "f8", - KeyF9: "f9", - KeyF10: "f10", - KeyF11: "f11", - KeyF12: "f12", - KeyF13: "f13", - KeyF14: "f14", - KeyF15: "f15", - KeyF16: "f16", - KeyF17: "f17", - KeyF18: "f18", - KeyF19: "f19", - KeyF20: "f20", - KeyF21: "f21", - KeyF22: "f22", - KeyF23: "f23", - KeyF24: "f24", - KeyF25: "f25", - KeyF26: "f26", - KeyF27: "f27", - KeyF28: "f28", - KeyF29: "f29", - KeyF30: "f30", - KeyF31: "f31", - KeyF32: "f32", - KeyF33: "f33", - KeyF34: "f34", - KeyF35: "f35", - KeyF36: "f36", - KeyF37: "f37", - KeyF38: "f38", - KeyF39: "f39", - KeyF40: "f40", - KeyF41: "f41", - KeyF42: "f42", - KeyF43: "f43", - KeyF44: "f44", - KeyF45: "f45", - KeyF46: "f46", - KeyF47: "f47", - KeyF48: "f48", - KeyF49: "f49", - KeyF50: "f50", - KeyF51: "f51", - KeyF52: "f52", - KeyF53: "f53", - KeyF54: "f54", - KeyF55: "f55", - KeyF56: "f56", - KeyF57: "f57", - KeyF58: "f58", - KeyF59: "f59", - KeyF60: "f60", - KeyF61: "f61", - KeyF62: "f62", - KeyF63: "f63", - - // Kitty keyboard extension - KeyCapsLock: "capslock", - KeyScrollLock: "scrolllock", - KeyNumLock: "numlock", - KeyPrintScreen: "printscreen", - KeyPause: "pause", - KeyMenu: "menu", - KeyMediaPlay: "mediaplay", - KeyMediaPause: "mediapause", - KeyMediaPlayPause: "mediaplaypause", - KeyMediaReverse: "mediareverse", - KeyMediaStop: "mediastop", - KeyMediaFastForward: "mediafastforward", - KeyMediaRewind: "mediarewind", - KeyMediaNext: "medianext", - KeyMediaPrev: "mediaprev", - KeyMediaRecord: "mediarecord", - KeyLowerVol: "lowervol", - KeyRaiseVol: "raisevol", - KeyMute: "mute", - KeyLeftShift: "leftshift", - KeyLeftAlt: "leftalt", - KeyLeftCtrl: "leftctrl", - KeyLeftSuper: "leftsuper", - KeyLeftHyper: "lefthyper", - KeyLeftMeta: "leftmeta", - KeyRightShift: "rightshift", - KeyRightAlt: "rightalt", - KeyRightCtrl: "rightctrl", - KeyRightSuper: "rightsuper", - KeyRightHyper: "righthyper", - KeyRightMeta: "rightmeta", - KeyIsoLevel3Shift: "isolevel3shift", - KeyIsoLevel5Shift: "isolevel5shift", -} diff --git a/packages/tui/input/key_test.go b/packages/tui/input/key_test.go deleted file mode 100644 index 9bf4d9a5..00000000 --- a/packages/tui/input/key_test.go +++ /dev/null @@ -1,880 +0,0 @@ -package input - -import ( - "bytes" - "context" - "errors" - "flag" - "fmt" - "image/color" - "io" - "math/rand" - "reflect" - "regexp" - "runtime" - "sort" - "strings" - "sync" - "testing" - "time" - - "github.com/charmbracelet/x/ansi" - "github.com/charmbracelet/x/ansi/kitty" -) - -var sequences = buildKeysTable(FlagTerminfo, "dumb") - -func TestKeyString(t *testing.T) { - t.Run("alt+space", func(t *testing.T) { - k := KeyPressEvent{Code: KeySpace, Mod: ModAlt} - if got := k.String(); got != "alt+space" { - t.Fatalf(`expected a "alt+space", got %q`, got) - } - }) - - t.Run("runes", func(t *testing.T) { - k := KeyPressEvent{Code: 'a', Text: "a"} - if got := k.String(); got != "a" { - t.Fatalf(`expected an "a", got %q`, got) - } - }) - - t.Run("invalid", func(t *testing.T) { - k := KeyPressEvent{Code: 99999} - if got := k.String(); got != "𘚟" { - t.Fatalf(`expected a "unknown", got %q`, got) - } - }) - - t.Run("space", func(t *testing.T) { - k := KeyPressEvent{Code: KeySpace, Text: " "} - if got := k.String(); got != "space" { - t.Fatalf(`expected a "space", got %q`, got) - } - }) - - t.Run("shift+space", func(t *testing.T) { - k := KeyPressEvent{Code: KeySpace, Mod: ModShift} - if got := k.String(); got != "shift+space" { - t.Fatalf(`expected a "shift+space", got %q`, got) - } - }) - - t.Run("?", func(t *testing.T) { - k := KeyPressEvent{Code: '/', Mod: ModShift, Text: "?"} - if got := k.String(); got != "?" { - t.Fatalf(`expected a "?", got %q`, got) - } - }) -} - -type seqTest struct { - seq []byte - Events []Event -} - -var f3CurPosRegexp = regexp.MustCompile(`\x1b\[1;(\d+)R`) - -// buildBaseSeqTests returns sequence tests that are valid for the -// detectSequence() function. -func buildBaseSeqTests() []seqTest { - td := []seqTest{} - for seq, key := range sequences { - k := KeyPressEvent(key) - st := seqTest{seq: []byte(seq), Events: []Event{k}} - - // XXX: This is a special case to handle F3 key sequence and cursor - // position report having the same sequence. See [parseCsi] for more - // information. - if f3CurPosRegexp.MatchString(seq) { - st.Events = []Event{k, CursorPositionEvent{Y: 0, X: int(key.Mod)}} - } - td = append(td, st) - } - - // Additional special cases. - td = append(td, - // Unrecognized CSI sequence. - seqTest{ - []byte{'\x1b', '[', '-', '-', '-', '-', 'X'}, - []Event{ - UnknownEvent([]byte{'\x1b', '[', '-', '-', '-', '-', 'X'}), - }, - }, - // A lone space character. - seqTest{ - []byte{' '}, - []Event{ - KeyPressEvent{Code: KeySpace, Text: " "}, - }, - }, - // An escape character with the alt modifier. - seqTest{ - []byte{'\x1b', ' '}, - []Event{ - KeyPressEvent{Code: KeySpace, Mod: ModAlt}, - }, - }, - ) - return td -} - -func TestParseSequence(t *testing.T) { - td := buildBaseSeqTests() - td = append(td, - // Background color. - seqTest{ - []byte("\x1b]11;rgb:1234/1234/1234\x07"), - []Event{BackgroundColorEvent{ - Color: color.RGBA{R: 0x12, G: 0x12, B: 0x12, A: 0xff}, - }}, - }, - seqTest{ - []byte("\x1b]11;rgb:1234/1234/1234\x1b\\"), - []Event{BackgroundColorEvent{ - Color: color.RGBA{R: 0x12, G: 0x12, B: 0x12, A: 0xff}, - }}, - }, - seqTest{ - []byte("\x1b]11;rgb:1234/1234/1234\x1b"), // Incomplete sequences are ignored. - []Event{ - UnknownEvent("\x1b]11;rgb:1234/1234/1234\x1b"), - }, - }, - - // Kitty Graphics response. - seqTest{ - []byte("\x1b_Ga=t;OK\x1b\\"), - []Event{KittyGraphicsEvent{ - Options: kitty.Options{Action: kitty.Transmit}, - Payload: []byte("OK"), - }}, - }, - seqTest{ - []byte("\x1b_Gi=99,I=13;OK\x1b\\"), - []Event{KittyGraphicsEvent{ - Options: kitty.Options{ID: 99, Number: 13}, - Payload: []byte("OK"), - }}, - }, - seqTest{ - []byte("\x1b_Gi=1337,q=1;EINVAL:your face\x1b\\"), - []Event{KittyGraphicsEvent{ - Options: kitty.Options{ID: 1337, Quite: 1}, - Payload: []byte("EINVAL:your face"), - }}, - }, - - // Xterm modifyOtherKeys CSI 27 ; ; ~ - seqTest{ - []byte("\x1b[27;3;20320~"), - []Event{KeyPressEvent{Code: '你', Mod: ModAlt}}, - }, - seqTest{ - []byte("\x1b[27;3;65~"), - []Event{KeyPressEvent{Code: 'A', Mod: ModAlt}}, - }, - seqTest{ - []byte("\x1b[27;3;8~"), - []Event{KeyPressEvent{Code: KeyBackspace, Mod: ModAlt}}, - }, - seqTest{ - []byte("\x1b[27;3;27~"), - []Event{KeyPressEvent{Code: KeyEscape, Mod: ModAlt}}, - }, - seqTest{ - []byte("\x1b[27;3;127~"), - []Event{KeyPressEvent{Code: KeyBackspace, Mod: ModAlt}}, - }, - - // Xterm report window text area size. - seqTest{ - []byte("\x1b[4;24;80t"), - []Event{ - WindowOpEvent{Op: 4, Args: []int{24, 80}}, - }, - }, - - // Kitty keyboard / CSI u (fixterms) - seqTest{ - []byte("\x1b[1B"), - []Event{KeyPressEvent{Code: KeyDown}}, - }, - seqTest{ - []byte("\x1b[1;B"), - []Event{KeyPressEvent{Code: KeyDown}}, - }, - seqTest{ - []byte("\x1b[1;4B"), - []Event{KeyPressEvent{Mod: ModShift | ModAlt, Code: KeyDown}}, - }, - seqTest{ - []byte("\x1b[1;4:1B"), - []Event{KeyPressEvent{Mod: ModShift | ModAlt, Code: KeyDown}}, - }, - seqTest{ - []byte("\x1b[1;4:2B"), - []Event{KeyPressEvent{Mod: ModShift | ModAlt, Code: KeyDown, IsRepeat: true}}, - }, - seqTest{ - []byte("\x1b[1;4:3B"), - []Event{KeyReleaseEvent{Mod: ModShift | ModAlt, Code: KeyDown}}, - }, - seqTest{ - []byte("\x1b[8~"), - []Event{KeyPressEvent{Code: KeyEnd}}, - }, - seqTest{ - []byte("\x1b[8;~"), - []Event{KeyPressEvent{Code: KeyEnd}}, - }, - seqTest{ - []byte("\x1b[8;10~"), - []Event{KeyPressEvent{Mod: ModShift | ModMeta, Code: KeyEnd}}, - }, - seqTest{ - []byte("\x1b[27;4u"), - []Event{KeyPressEvent{Mod: ModShift | ModAlt, Code: KeyEscape}}, - }, - seqTest{ - []byte("\x1b[127;4u"), - []Event{KeyPressEvent{Mod: ModShift | ModAlt, Code: KeyBackspace}}, - }, - seqTest{ - []byte("\x1b[57358;4u"), - []Event{KeyPressEvent{Mod: ModShift | ModAlt, Code: KeyCapsLock}}, - }, - seqTest{ - []byte("\x1b[9;2u"), - []Event{KeyPressEvent{Mod: ModShift, Code: KeyTab}}, - }, - seqTest{ - []byte("\x1b[195;u"), - []Event{KeyPressEvent{Text: "Ã", Code: 'Ã'}}, - }, - seqTest{ - []byte("\x1b[20320;2u"), - []Event{KeyPressEvent{Text: "你", Mod: ModShift, Code: '你'}}, - }, - seqTest{ - []byte("\x1b[195;:1u"), - []Event{KeyPressEvent{Text: "Ã", Code: 'Ã'}}, - }, - seqTest{ - []byte("\x1b[195;2:3u"), - []Event{KeyReleaseEvent{Code: 'Ã', Text: "Ã", Mod: ModShift}}, - }, - seqTest{ - []byte("\x1b[195;2:2u"), - []Event{KeyPressEvent{Code: 'Ã', Text: "Ã", IsRepeat: true, Mod: ModShift}}, - }, - seqTest{ - []byte("\x1b[195;2:1u"), - []Event{KeyPressEvent{Code: 'Ã', Text: "Ã", Mod: ModShift}}, - }, - seqTest{ - []byte("\x1b[195;2:3u"), - []Event{KeyReleaseEvent{Code: 'Ã', Text: "Ã", Mod: ModShift}}, - }, - seqTest{ - []byte("\x1b[97;2;65u"), - []Event{KeyPressEvent{Code: 'a', Text: "A", Mod: ModShift}}, - }, - seqTest{ - []byte("\x1b[97;;229u"), - []Event{KeyPressEvent{Code: 'a', Text: "å"}}, - }, - - // focus/blur - seqTest{ - []byte{'\x1b', '[', 'I'}, - []Event{ - FocusEvent{}, - }, - }, - seqTest{ - []byte{'\x1b', '[', 'O'}, - []Event{ - BlurEvent{}, - }, - }, - // Mouse event. - seqTest{ - []byte{'\x1b', '[', 'M', byte(32) + 0b0100_0000, byte(65), byte(49)}, - []Event{ - MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelUp}, - }, - }, - // SGR Mouse event. - seqTest{ - []byte("\x1b[<0;33;17M"), - []Event{ - MouseClickEvent{X: 32, Y: 16, Button: MouseLeft}, - }, - }, - // Runes. - seqTest{ - []byte{'a'}, - []Event{ - KeyPressEvent{Code: 'a', Text: "a"}, - }, - }, - seqTest{ - []byte{'\x1b', 'a'}, - []Event{ - KeyPressEvent{Code: 'a', Mod: ModAlt}, - }, - }, - seqTest{ - []byte{'a', 'a', 'a'}, - []Event{ - KeyPressEvent{Code: 'a', Text: "a"}, - KeyPressEvent{Code: 'a', Text: "a"}, - KeyPressEvent{Code: 'a', Text: "a"}, - }, - }, - // Multi-byte rune. - seqTest{ - []byte("☃"), - []Event{ - KeyPressEvent{Code: '☃', Text: "☃"}, - }, - }, - seqTest{ - []byte("\x1b☃"), - []Event{ - KeyPressEvent{Code: '☃', Mod: ModAlt}, - }, - }, - // Standalone control characters. - seqTest{ - []byte{'\x1b'}, - []Event{ - KeyPressEvent{Code: KeyEscape}, - }, - }, - seqTest{ - []byte{ansi.SOH}, - []Event{ - KeyPressEvent{Code: 'a', Mod: ModCtrl}, - }, - }, - seqTest{ - []byte{'\x1b', ansi.SOH}, - []Event{ - KeyPressEvent{Code: 'a', Mod: ModCtrl | ModAlt}, - }, - }, - seqTest{ - []byte{ansi.NUL}, - []Event{ - KeyPressEvent{Code: KeySpace, Mod: ModCtrl}, - }, - }, - seqTest{ - []byte{'\x1b', ansi.NUL}, - []Event{ - KeyPressEvent{Code: KeySpace, Mod: ModCtrl | ModAlt}, - }, - }, - // C1 control characters. - seqTest{ - []byte{'\x80'}, - []Event{ - KeyPressEvent{Code: rune(0x80 - '@'), Mod: ModCtrl | ModAlt}, - }, - }, - ) - - if runtime.GOOS != "windows" { - // Sadly, utf8.DecodeRune([]byte(0xfe)) returns a valid rune on windows. - // This is incorrect, but it makes our test fail if we try it out. - td = append(td, seqTest{ - []byte{'\xfe'}, - []Event{ - UnknownEvent(rune(0xfe)), - }, - }) - } - - var p Parser - for _, tc := range td { - t.Run(fmt.Sprintf("%q", string(tc.seq)), func(t *testing.T) { - var events []Event - buf := tc.seq - for len(buf) > 0 { - width, Event := p.parseSequence(buf) - switch Event := Event.(type) { - case MultiEvent: - events = append(events, Event...) - default: - events = append(events, Event) - } - buf = buf[width:] - } - if !reflect.DeepEqual(tc.Events, events) { - t.Errorf("\nexpected event for %q:\n %#v\ngot:\n %#v", tc.seq, tc.Events, events) - } - }) - } -} - -func TestReadLongInput(t *testing.T) { - expect := make([]Event, 1000) - for i := range 1000 { - expect[i] = KeyPressEvent{Code: 'a', Text: "a"} - } - input := strings.Repeat("a", 1000) - drv, err := NewReader(strings.NewReader(input), "dumb", 0) - if err != nil { - t.Fatalf("unexpected input driver error: %v", err) - } - - var Events []Event - for { - events, err := drv.ReadEvents() - if err == io.EOF { - break - } - if err != nil { - t.Fatalf("unexpected input error: %v", err) - } - Events = append(Events, events...) - } - - if !reflect.DeepEqual(expect, Events) { - t.Errorf("unexpected messages, expected:\n %+v\ngot:\n %+v", expect, Events) - } -} - -func TestReadInput(t *testing.T) { - type test struct { - keyname string - in []byte - out []Event - } - testData := []test{ - { - "a", - []byte{'a'}, - []Event{ - KeyPressEvent{Code: 'a', Text: "a"}, - }, - }, - { - "space", - []byte{' '}, - []Event{ - KeyPressEvent{Code: KeySpace, Text: " "}, - }, - }, - { - "a alt+a", - []byte{'a', '\x1b', 'a'}, - []Event{ - KeyPressEvent{Code: 'a', Text: "a"}, - KeyPressEvent{Code: 'a', Mod: ModAlt}, - }, - }, - { - "a alt+a a", - []byte{'a', '\x1b', 'a', 'a'}, - []Event{ - KeyPressEvent{Code: 'a', Text: "a"}, - KeyPressEvent{Code: 'a', Mod: ModAlt}, - KeyPressEvent{Code: 'a', Text: "a"}, - }, - }, - { - "ctrl+a", - []byte{byte(ansi.SOH)}, - []Event{ - KeyPressEvent{Code: 'a', Mod: ModCtrl}, - }, - }, - { - "ctrl+a ctrl+b", - []byte{byte(ansi.SOH), byte(ansi.STX)}, - []Event{ - KeyPressEvent{Code: 'a', Mod: ModCtrl}, - KeyPressEvent{Code: 'b', Mod: ModCtrl}, - }, - }, - { - "alt+a", - []byte{byte(0x1b), 'a'}, - []Event{ - KeyPressEvent{Code: 'a', Mod: ModAlt}, - }, - }, - { - "a b c d", - []byte{'a', 'b', 'c', 'd'}, - []Event{ - KeyPressEvent{Code: 'a', Text: "a"}, - KeyPressEvent{Code: 'b', Text: "b"}, - KeyPressEvent{Code: 'c', Text: "c"}, - KeyPressEvent{Code: 'd', Text: "d"}, - }, - }, - { - "up", - []byte("\x1b[A"), - []Event{ - KeyPressEvent{Code: KeyUp}, - }, - }, - { - "wheel up", - []byte{'\x1b', '[', 'M', byte(32) + 0b0100_0000, byte(65), byte(49)}, - []Event{ - MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelUp}, - }, - }, - { - "left motion release", - []byte{ - '\x1b', '[', 'M', byte(32) + 0b0010_0000, byte(32 + 33), byte(16 + 33), - '\x1b', '[', 'M', byte(32) + 0b0000_0011, byte(64 + 33), byte(32 + 33), - }, - []Event{ - MouseMotionEvent{X: 32, Y: 16, Button: MouseLeft}, - MouseReleaseEvent{X: 64, Y: 32, Button: MouseNone}, - }, - }, - { - "shift+tab", - []byte{'\x1b', '[', 'Z'}, - []Event{ - KeyPressEvent{Code: KeyTab, Mod: ModShift}, - }, - }, - { - "enter", - []byte{'\r'}, - []Event{KeyPressEvent{Code: KeyEnter}}, - }, - { - "alt+enter", - []byte{'\x1b', '\r'}, - []Event{ - KeyPressEvent{Code: KeyEnter, Mod: ModAlt}, - }, - }, - { - "insert", - []byte{'\x1b', '[', '2', '~'}, - []Event{ - KeyPressEvent{Code: KeyInsert}, - }, - }, - { - "ctrl+alt+a", - []byte{'\x1b', byte(ansi.SOH)}, - []Event{ - KeyPressEvent{Code: 'a', Mod: ModCtrl | ModAlt}, - }, - }, - { - "CSI?----X?", - []byte{'\x1b', '[', '-', '-', '-', '-', 'X'}, - []Event{UnknownEvent([]byte{'\x1b', '[', '-', '-', '-', '-', 'X'})}, - }, - // Powershell sequences. - { - "up", - []byte{'\x1b', 'O', 'A'}, - []Event{KeyPressEvent{Code: KeyUp}}, - }, - { - "down", - []byte{'\x1b', 'O', 'B'}, - []Event{KeyPressEvent{Code: KeyDown}}, - }, - { - "right", - []byte{'\x1b', 'O', 'C'}, - []Event{KeyPressEvent{Code: KeyRight}}, - }, - { - "left", - []byte{'\x1b', 'O', 'D'}, - []Event{KeyPressEvent{Code: KeyLeft}}, - }, - { - "alt+enter", - []byte{'\x1b', '\x0d'}, - []Event{KeyPressEvent{Code: KeyEnter, Mod: ModAlt}}, - }, - { - "alt+backspace", - []byte{'\x1b', '\x7f'}, - []Event{KeyPressEvent{Code: KeyBackspace, Mod: ModAlt}}, - }, - { - "ctrl+space", - []byte{'\x00'}, - []Event{KeyPressEvent{Code: KeySpace, Mod: ModCtrl}}, - }, - { - "ctrl+alt+space", - []byte{'\x1b', '\x00'}, - []Event{KeyPressEvent{Code: KeySpace, Mod: ModCtrl | ModAlt}}, - }, - { - "esc", - []byte{'\x1b'}, - []Event{KeyPressEvent{Code: KeyEscape}}, - }, - { - "alt+esc", - []byte{'\x1b', '\x1b'}, - []Event{KeyPressEvent{Code: KeyEscape, Mod: ModAlt}}, - }, - { - "a b o", - []byte{ - '\x1b', '[', '2', '0', '0', '~', - 'a', ' ', 'b', - '\x1b', '[', '2', '0', '1', '~', - 'o', - }, - []Event{ - PasteStartEvent{}, - PasteEvent("a b"), - PasteEndEvent{}, - KeyPressEvent{Code: 'o', Text: "o"}, - }, - }, - { - "a\x03\nb", - []byte{ - '\x1b', '[', '2', '0', '0', '~', - 'a', '\x03', '\n', 'b', - '\x1b', '[', '2', '0', '1', '~', - }, - []Event{ - PasteStartEvent{}, - PasteEvent("a\x03\nb"), - PasteEndEvent{}, - }, - }, - { - "?0xfe?", - []byte{'\xfe'}, - []Event{ - UnknownEvent(rune(0xfe)), - }, - }, - { - "a ?0xfe? b", - []byte{'a', '\xfe', ' ', 'b'}, - []Event{ - KeyPressEvent{Code: 'a', Text: "a"}, - UnknownEvent(rune(0xfe)), - KeyPressEvent{Code: KeySpace, Text: " "}, - KeyPressEvent{Code: 'b', Text: "b"}, - }, - }, - } - - for i, td := range testData { - t.Run(fmt.Sprintf("%d: %s", i, td.keyname), func(t *testing.T) { - Events := testReadInputs(t, bytes.NewReader(td.in)) - var buf strings.Builder - for i, Event := range Events { - if i > 0 { - buf.WriteByte(' ') - } - if s, ok := Event.(fmt.Stringer); ok { - buf.WriteString(s.String()) - } else { - fmt.Fprintf(&buf, "%#v:%T", Event, Event) - } - } - - if len(Events) != len(td.out) { - t.Fatalf("unexpected message list length: got %d, expected %d\n got: %#v\n expected: %#v\n", len(Events), len(td.out), Events, td.out) - } - - if !reflect.DeepEqual(td.out, Events) { - t.Fatalf("expected:\n%#v\ngot:\n%#v", td.out, Events) - } - }) - } -} - -func testReadInputs(t *testing.T, input io.Reader) []Event { - // We'll check that the input reader finishes at the end - // without error. - var wg sync.WaitGroup - var inputErr error - ctx, cancel := context.WithCancel(context.Background()) - defer func() { - cancel() - wg.Wait() - if inputErr != nil && !errors.Is(inputErr, io.EOF) { - t.Fatalf("unexpected input error: %v", inputErr) - } - }() - - dr, err := NewReader(input, "dumb", 0) - if err != nil { - t.Fatalf("unexpected input driver error: %v", err) - } - - // The messages we're consuming. - EventsC := make(chan Event) - - // Start the reader in the background. - wg.Add(1) - go func() { - defer wg.Done() - var events []Event - events, inputErr = dr.ReadEvents() - out: - for _, ev := range events { - select { - case EventsC <- ev: - case <-ctx.Done(): - break out - } - } - EventsC <- nil - }() - - var Events []Event -loop: - for { - select { - case Event := <-EventsC: - if Event == nil { - // end of input marker for the test. - break loop - } - Events = append(Events, Event) - case <-time.After(2 * time.Second): - t.Errorf("timeout waiting for input event") - break loop - } - } - return Events -} - -// randTest defines the test input and expected output for a sequence -// of interleaved control sequences and control characters. -type randTest struct { - data []byte - lengths []int - names []string -} - -// seed is the random seed to randomize the input. This helps check -// that all the sequences get ultimately exercised. -var seed = flag.Int64("seed", 0, "random seed (0 to autoselect)") - -// genRandomData generates a randomized test, with a random seed unless -// the seed flag was set. -func genRandomData(logfn func(int64), length int) randTest { - // We'll use a random source. However, we give the user the option - // to override it to a specific value for reproduceability. - s := *seed - if s == 0 { - s = time.Now().UnixNano() - } - // Inform the user so they know what to reuse to get the same data. - logfn(s) - return genRandomDataWithSeed(s, length) -} - -// genRandomDataWithSeed generates a randomized test with a fixed seed. -func genRandomDataWithSeed(s int64, length int) randTest { - src := rand.NewSource(s) - r := rand.New(src) - - // allseqs contains all the sequences, in sorted order. We sort - // to make the test deterministic (when the seed is also fixed). - type seqpair struct { - seq string - name string - } - var allseqs []seqpair - for seq, key := range sequences { - allseqs = append(allseqs, seqpair{seq, key.String()}) - } - sort.Slice(allseqs, func(i, j int) bool { return allseqs[i].seq < allseqs[j].seq }) - - // res contains the computed test. - var res randTest - - for len(res.data) < length { - alt := r.Intn(2) - prefix := "" - esclen := 0 - if alt == 1 { - prefix = "alt+" - esclen = 1 - } - kind := r.Intn(3) - switch kind { - case 0: - // A control character. - if alt == 1 { - res.data = append(res.data, '\x1b') - } - res.data = append(res.data, 1) - res.names = append(res.names, "ctrl+"+prefix+"a") - res.lengths = append(res.lengths, 1+esclen) - - case 1, 2: - // A sequence. - seqi := r.Intn(len(allseqs)) - s := allseqs[seqi] - if strings.Contains(s.name, "alt+") || strings.Contains(s.name, "meta+") { - esclen = 0 - prefix = "" - alt = 0 - } - if alt == 1 { - res.data = append(res.data, '\x1b') - } - res.data = append(res.data, s.seq...) - if strings.HasPrefix(s.name, "ctrl+") { - prefix = "ctrl+" + prefix - } - name := prefix + strings.TrimPrefix(s.name, "ctrl+") - res.names = append(res.names, name) - res.lengths = append(res.lengths, len(s.seq)+esclen) - } - } - return res -} - -func FuzzParseSequence(f *testing.F) { - var p Parser - for seq := range sequences { - f.Add(seq) - } - f.Add("\x1b]52;?\x07") // OSC 52 - f.Add("\x1b]11;rgb:0000/0000/0000\x1b\\") // OSC 11 - f.Add("\x1bP>|charm terminal(0.1.2)\x1b\\") // DCS (XTVERSION) - f.Add("\x1b_Gi=123\x1b\\") // APC - f.Fuzz(func(t *testing.T, seq string) { - n, _ := p.parseSequence([]byte(seq)) - if n == 0 && seq != "" { - t.Errorf("expected a non-zero width for %q", seq) - } - }) -} - -// BenchmarkDetectSequenceMap benchmarks the map-based sequence -// detector. -func BenchmarkDetectSequenceMap(b *testing.B) { - var p Parser - td := genRandomDataWithSeed(123, 10000) - for i := 0; i < b.N; i++ { - for j, w := 0, 0; j < len(td.data); j += w { - w, _ = p.parseSequence(td.data[j:]) - } - } -} diff --git a/packages/tui/input/kitty.go b/packages/tui/input/kitty.go deleted file mode 100644 index 4da00b50..00000000 --- a/packages/tui/input/kitty.go +++ /dev/null @@ -1,353 +0,0 @@ -package input - -import ( - "unicode" - "unicode/utf8" - - "github.com/charmbracelet/x/ansi" - "github.com/charmbracelet/x/ansi/kitty" -) - -// KittyGraphicsEvent represents a Kitty Graphics response event. -// -// See https://sw.kovidgoyal.net/kitty/graphics-protocol/ -type KittyGraphicsEvent struct { - Options kitty.Options - Payload []byte -} - -// KittyEnhancementsEvent represents a Kitty enhancements event. -type KittyEnhancementsEvent int - -// Kitty keyboard enhancement constants. -// See https://sw.kovidgoyal.net/kitty/keyboard-protocol/#progressive-enhancement -const ( - KittyDisambiguateEscapeCodes KittyEnhancementsEvent = 1 << iota - KittyReportEventTypes - KittyReportAlternateKeys - KittyReportAllKeysAsEscapeCodes - KittyReportAssociatedText -) - -// Contains reports whether m contains the given enhancements. -func (e KittyEnhancementsEvent) Contains(enhancements KittyEnhancementsEvent) bool { - return e&enhancements == enhancements -} - -// Kitty Clipboard Control Sequences. -var kittyKeyMap = map[int]Key{ - ansi.BS: {Code: KeyBackspace}, - ansi.HT: {Code: KeyTab}, - ansi.CR: {Code: KeyEnter}, - ansi.ESC: {Code: KeyEscape}, - ansi.DEL: {Code: KeyBackspace}, - - 57344: {Code: KeyEscape}, - 57345: {Code: KeyEnter}, - 57346: {Code: KeyTab}, - 57347: {Code: KeyBackspace}, - 57348: {Code: KeyInsert}, - 57349: {Code: KeyDelete}, - 57350: {Code: KeyLeft}, - 57351: {Code: KeyRight}, - 57352: {Code: KeyUp}, - 57353: {Code: KeyDown}, - 57354: {Code: KeyPgUp}, - 57355: {Code: KeyPgDown}, - 57356: {Code: KeyHome}, - 57357: {Code: KeyEnd}, - 57358: {Code: KeyCapsLock}, - 57359: {Code: KeyScrollLock}, - 57360: {Code: KeyNumLock}, - 57361: {Code: KeyPrintScreen}, - 57362: {Code: KeyPause}, - 57363: {Code: KeyMenu}, - 57364: {Code: KeyF1}, - 57365: {Code: KeyF2}, - 57366: {Code: KeyF3}, - 57367: {Code: KeyF4}, - 57368: {Code: KeyF5}, - 57369: {Code: KeyF6}, - 57370: {Code: KeyF7}, - 57371: {Code: KeyF8}, - 57372: {Code: KeyF9}, - 57373: {Code: KeyF10}, - 57374: {Code: KeyF11}, - 57375: {Code: KeyF12}, - 57376: {Code: KeyF13}, - 57377: {Code: KeyF14}, - 57378: {Code: KeyF15}, - 57379: {Code: KeyF16}, - 57380: {Code: KeyF17}, - 57381: {Code: KeyF18}, - 57382: {Code: KeyF19}, - 57383: {Code: KeyF20}, - 57384: {Code: KeyF21}, - 57385: {Code: KeyF22}, - 57386: {Code: KeyF23}, - 57387: {Code: KeyF24}, - 57388: {Code: KeyF25}, - 57389: {Code: KeyF26}, - 57390: {Code: KeyF27}, - 57391: {Code: KeyF28}, - 57392: {Code: KeyF29}, - 57393: {Code: KeyF30}, - 57394: {Code: KeyF31}, - 57395: {Code: KeyF32}, - 57396: {Code: KeyF33}, - 57397: {Code: KeyF34}, - 57398: {Code: KeyF35}, - 57399: {Code: KeyKp0}, - 57400: {Code: KeyKp1}, - 57401: {Code: KeyKp2}, - 57402: {Code: KeyKp3}, - 57403: {Code: KeyKp4}, - 57404: {Code: KeyKp5}, - 57405: {Code: KeyKp6}, - 57406: {Code: KeyKp7}, - 57407: {Code: KeyKp8}, - 57408: {Code: KeyKp9}, - 57409: {Code: KeyKpDecimal}, - 57410: {Code: KeyKpDivide}, - 57411: {Code: KeyKpMultiply}, - 57412: {Code: KeyKpMinus}, - 57413: {Code: KeyKpPlus}, - 57414: {Code: KeyKpEnter}, - 57415: {Code: KeyKpEqual}, - 57416: {Code: KeyKpSep}, - 57417: {Code: KeyKpLeft}, - 57418: {Code: KeyKpRight}, - 57419: {Code: KeyKpUp}, - 57420: {Code: KeyKpDown}, - 57421: {Code: KeyKpPgUp}, - 57422: {Code: KeyKpPgDown}, - 57423: {Code: KeyKpHome}, - 57424: {Code: KeyKpEnd}, - 57425: {Code: KeyKpInsert}, - 57426: {Code: KeyKpDelete}, - 57427: {Code: KeyKpBegin}, - 57428: {Code: KeyMediaPlay}, - 57429: {Code: KeyMediaPause}, - 57430: {Code: KeyMediaPlayPause}, - 57431: {Code: KeyMediaReverse}, - 57432: {Code: KeyMediaStop}, - 57433: {Code: KeyMediaFastForward}, - 57434: {Code: KeyMediaRewind}, - 57435: {Code: KeyMediaNext}, - 57436: {Code: KeyMediaPrev}, - 57437: {Code: KeyMediaRecord}, - 57438: {Code: KeyLowerVol}, - 57439: {Code: KeyRaiseVol}, - 57440: {Code: KeyMute}, - 57441: {Code: KeyLeftShift}, - 57442: {Code: KeyLeftCtrl}, - 57443: {Code: KeyLeftAlt}, - 57444: {Code: KeyLeftSuper}, - 57445: {Code: KeyLeftHyper}, - 57446: {Code: KeyLeftMeta}, - 57447: {Code: KeyRightShift}, - 57448: {Code: KeyRightCtrl}, - 57449: {Code: KeyRightAlt}, - 57450: {Code: KeyRightSuper}, - 57451: {Code: KeyRightHyper}, - 57452: {Code: KeyRightMeta}, - 57453: {Code: KeyIsoLevel3Shift}, - 57454: {Code: KeyIsoLevel5Shift}, -} - -func init() { - // These are some faulty C0 mappings some terminals such as WezTerm have - // and doesn't follow the specs. - kittyKeyMap[ansi.NUL] = Key{Code: KeySpace, Mod: ModCtrl} - for i := ansi.SOH; i <= ansi.SUB; i++ { - if _, ok := kittyKeyMap[i]; !ok { - kittyKeyMap[i] = Key{Code: rune(i + 0x60), Mod: ModCtrl} - } - } - for i := ansi.FS; i <= ansi.US; i++ { - if _, ok := kittyKeyMap[i]; !ok { - kittyKeyMap[i] = Key{Code: rune(i + 0x40), Mod: ModCtrl} - } - } -} - -const ( - kittyShift = 1 << iota - kittyAlt - kittyCtrl - kittySuper - kittyHyper - kittyMeta - kittyCapsLock - kittyNumLock -) - -func fromKittyMod(mod int) KeyMod { - var m KeyMod - if mod&kittyShift != 0 { - m |= ModShift - } - if mod&kittyAlt != 0 { - m |= ModAlt - } - if mod&kittyCtrl != 0 { - m |= ModCtrl - } - if mod&kittySuper != 0 { - m |= ModSuper - } - if mod&kittyHyper != 0 { - m |= ModHyper - } - if mod&kittyMeta != 0 { - m |= ModMeta - } - if mod&kittyCapsLock != 0 { - m |= ModCapsLock - } - if mod&kittyNumLock != 0 { - m |= ModNumLock - } - return m -} - -// parseKittyKeyboard parses a Kitty Keyboard Protocol sequence. -// -// In `CSI u`, this is parsed as: -// -// CSI codepoint ; modifiers u -// codepoint: ASCII Dec value -// -// The Kitty Keyboard Protocol extends this with optional components that can be -// enabled progressively. The full sequence is parsed as: -// -// CSI unicode-key-code:alternate-key-codes ; modifiers:event-type ; text-as-codepoints u -// -// See https://sw.kovidgoyal.net/kitty/keyboard-protocol/ -func parseKittyKeyboard(params ansi.Params) (Event Event) { - var isRelease bool - var key Key - - // The index of parameters separated by semicolons ';'. Sub parameters are - // separated by colons ':'. - var paramIdx int - var sudIdx int // The sub parameter index - for _, p := range params { - // Kitty Keyboard Protocol has 3 optional components. - switch paramIdx { - case 0: - switch sudIdx { - case 0: - var foundKey bool - code := p.Param(1) // CSI u has a default value of 1 - key, foundKey = kittyKeyMap[code] - if !foundKey { - r := rune(code) - if !utf8.ValidRune(r) { - r = utf8.RuneError - } - - key.Code = r - } - - case 2: - // shifted key + base key - if b := rune(p.Param(1)); unicode.IsPrint(b) { - // XXX: When alternate key reporting is enabled, the protocol - // can return 3 things, the unicode codepoint of the key, - // the shifted codepoint of the key, and the standard - // PC-101 key layout codepoint. - // This is useful to create an unambiguous mapping of keys - // when using a different language layout. - key.BaseCode = b - } - fallthrough - - case 1: - // shifted key - if s := rune(p.Param(1)); unicode.IsPrint(s) { - // XXX: We swap keys here because we want the shifted key - // to be the Rune that is returned by the event. - // For example, shift+a should produce "A" not "a". - // In such a case, we set AltRune to the original key "a" - // and Rune to "A". - key.ShiftedCode = s - } - } - case 1: - switch sudIdx { - case 0: - mod := p.Param(1) - if mod > 1 { - key.Mod = fromKittyMod(mod - 1) - if key.Mod > ModShift { - // XXX: We need to clear the text if we have a modifier key - // other than a [ModShift] key. - key.Text = "" - } - } - - case 1: - switch p.Param(1) { - case 2: - key.IsRepeat = true - case 3: - isRelease = true - } - case 2: - } - case 2: - if code := p.Param(0); code != 0 { - key.Text += string(rune(code)) - } - } - - sudIdx++ - if !p.HasMore() { - paramIdx++ - sudIdx = 0 - } - } - - //nolint:nestif - if len(key.Text) == 0 && unicode.IsPrint(key.Code) && - (key.Mod <= ModShift || key.Mod == ModCapsLock || key.Mod == ModShift|ModCapsLock) { - if key.Mod == 0 { - key.Text = string(key.Code) - } else { - desiredCase := unicode.ToLower - if key.Mod.Contains(ModShift) || key.Mod.Contains(ModCapsLock) { - desiredCase = unicode.ToUpper - } - if key.ShiftedCode != 0 { - key.Text = string(key.ShiftedCode) - } else { - key.Text = string(desiredCase(key.Code)) - } - } - } - - if isRelease { - return KeyReleaseEvent(key) - } - - return KeyPressEvent(key) -} - -// parseKittyKeyboardExt parses a Kitty Keyboard Protocol sequence extensions -// for non CSI u sequences. This includes things like CSI A, SS3 A and others, -// and CSI ~. -func parseKittyKeyboardExt(params ansi.Params, k KeyPressEvent) Event { - // Handle Kitty keyboard protocol - if len(params) > 2 && // We have at least 3 parameters - params[0].Param(1) == 1 && // The first parameter is 1 (defaults to 1) - params[1].HasMore() { // The second parameter is a subparameter (separated by a ":") - switch params[2].Param(1) { // The third parameter is the event type (defaults to 1) - case 2: - k.IsRepeat = true - case 3: - return KeyReleaseEvent(k) - } - } - return k -} diff --git a/packages/tui/input/mod.go b/packages/tui/input/mod.go deleted file mode 100644 index c0076276..00000000 --- a/packages/tui/input/mod.go +++ /dev/null @@ -1,37 +0,0 @@ -package input - -// KeyMod represents modifier keys. -type KeyMod int - -// Modifier keys. -const ( - ModShift KeyMod = 1 << iota - ModAlt - ModCtrl - ModMeta - - // These modifiers are used with the Kitty protocol. - // XXX: Meta and Super are swapped in the Kitty protocol, - // this is to preserve compatibility with XTerm modifiers. - - ModHyper - ModSuper // Windows/Command keys - - // These are key lock states. - - ModCapsLock - ModNumLock - ModScrollLock // Defined in Windows API only -) - -// Contains reports whether m contains the given modifiers. -// -// Example: -// -// m := ModAlt | ModCtrl -// m.Contains(ModCtrl) // true -// m.Contains(ModAlt | ModCtrl) // true -// m.Contains(ModAlt | ModCtrl | ModShift) // false -func (m KeyMod) Contains(mods KeyMod) bool { - return m&mods == mods -} diff --git a/packages/tui/input/mode.go b/packages/tui/input/mode.go deleted file mode 100644 index ea1ba571..00000000 --- a/packages/tui/input/mode.go +++ /dev/null @@ -1,14 +0,0 @@ -package input - -import "github.com/charmbracelet/x/ansi" - -// ModeReportEvent is a message that represents a mode report event (DECRPM). -// -// See: https://vt100.net/docs/vt510-rm/DECRPM.html -type ModeReportEvent struct { - // Mode is the mode number. - Mode ansi.Mode - - // Value is the mode value. - Value ansi.ModeSetting -} diff --git a/packages/tui/input/mouse.go b/packages/tui/input/mouse.go deleted file mode 100644 index d97eb72e..00000000 --- a/packages/tui/input/mouse.go +++ /dev/null @@ -1,292 +0,0 @@ -package input - -import ( - "fmt" - - "github.com/charmbracelet/x/ansi" -) - -// MouseButton represents the button that was pressed during a mouse message. -type MouseButton = ansi.MouseButton - -// Mouse event buttons -// -// This is based on X11 mouse button codes. -// -// 1 = left button -// 2 = middle button (pressing the scroll wheel) -// 3 = right button -// 4 = turn scroll wheel up -// 5 = turn scroll wheel down -// 6 = push scroll wheel left -// 7 = push scroll wheel right -// 8 = 4th button (aka browser backward button) -// 9 = 5th button (aka browser forward button) -// 10 -// 11 -// -// Other buttons are not supported. -const ( - MouseNone = ansi.MouseNone - MouseLeft = ansi.MouseLeft - MouseMiddle = ansi.MouseMiddle - MouseRight = ansi.MouseRight - MouseWheelUp = ansi.MouseWheelUp - MouseWheelDown = ansi.MouseWheelDown - MouseWheelLeft = ansi.MouseWheelLeft - MouseWheelRight = ansi.MouseWheelRight - MouseBackward = ansi.MouseBackward - MouseForward = ansi.MouseForward - MouseButton10 = ansi.MouseButton10 - MouseButton11 = ansi.MouseButton11 -) - -// MouseEvent represents a mouse message. This is a generic mouse message that -// can represent any kind of mouse event. -type MouseEvent interface { - fmt.Stringer - - // Mouse returns the underlying mouse event. - Mouse() Mouse -} - -// Mouse represents a Mouse message. Use [MouseEvent] to represent all mouse -// messages. -// -// The X and Y coordinates are zero-based, with (0,0) being the upper left -// corner of the terminal. -// -// // Catch all mouse events -// switch Event := Event.(type) { -// case MouseEvent: -// m := Event.Mouse() -// fmt.Println("Mouse event:", m.X, m.Y, m) -// } -// -// // Only catch mouse click events -// switch Event := Event.(type) { -// case MouseClickEvent: -// fmt.Println("Mouse click event:", Event.X, Event.Y, Event) -// } -type Mouse struct { - X, Y int - Button MouseButton - Mod KeyMod -} - -// String returns a string representation of the mouse message. -func (m Mouse) String() (s string) { - if m.Mod.Contains(ModCtrl) { - s += "ctrl+" - } - if m.Mod.Contains(ModAlt) { - s += "alt+" - } - if m.Mod.Contains(ModShift) { - s += "shift+" - } - - str := m.Button.String() - if str == "" { - s += "unknown" - } else if str != "none" { // motion events don't have a button - s += str - } - - return s -} - -// MouseClickEvent represents a mouse button click event. -type MouseClickEvent Mouse - -// String returns a string representation of the mouse click event. -func (e MouseClickEvent) String() string { - return Mouse(e).String() -} - -// Mouse returns the underlying mouse event. This is a convenience method and -// syntactic sugar to satisfy the [MouseEvent] interface, and cast the mouse -// event to [Mouse]. -func (e MouseClickEvent) Mouse() Mouse { - return Mouse(e) -} - -// MouseReleaseEvent represents a mouse button release event. -type MouseReleaseEvent Mouse - -// String returns a string representation of the mouse release event. -func (e MouseReleaseEvent) String() string { - return Mouse(e).String() -} - -// Mouse returns the underlying mouse event. This is a convenience method and -// syntactic sugar to satisfy the [MouseEvent] interface, and cast the mouse -// event to [Mouse]. -func (e MouseReleaseEvent) Mouse() Mouse { - return Mouse(e) -} - -// MouseWheelEvent represents a mouse wheel message event. -type MouseWheelEvent Mouse - -// String returns a string representation of the mouse wheel event. -func (e MouseWheelEvent) String() string { - return Mouse(e).String() -} - -// Mouse returns the underlying mouse event. This is a convenience method and -// syntactic sugar to satisfy the [MouseEvent] interface, and cast the mouse -// event to [Mouse]. -func (e MouseWheelEvent) Mouse() Mouse { - return Mouse(e) -} - -// MouseMotionEvent represents a mouse motion event. -type MouseMotionEvent Mouse - -// String returns a string representation of the mouse motion event. -func (e MouseMotionEvent) String() string { - m := Mouse(e) - if m.Button != 0 { - return m.String() + "+motion" - } - return m.String() + "motion" -} - -// Mouse returns the underlying mouse event. This is a convenience method and -// syntactic sugar to satisfy the [MouseEvent] interface, and cast the mouse -// event to [Mouse]. -func (e MouseMotionEvent) Mouse() Mouse { - return Mouse(e) -} - -// Parse SGR-encoded mouse events; SGR extended mouse events. SGR mouse events -// look like: -// -// ESC [ < Cb ; Cx ; Cy (M or m) -// -// where: -// -// Cb is the encoded button code -// Cx is the x-coordinate of the mouse -// Cy is the y-coordinate of the mouse -// M is for button press, m is for button release -// -// https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-Extended-coordinates -func parseSGRMouseEvent(cmd ansi.Cmd, params ansi.Params) Event { - x, _, ok := params.Param(1, 1) - if !ok { - x = 1 - } - y, _, ok := params.Param(2, 1) - if !ok { - y = 1 - } - release := cmd.Final() == 'm' - b, _, _ := params.Param(0, 0) - mod, btn, _, isMotion := parseMouseButton(b) - - // (1,1) is the upper left. We subtract 1 to normalize it to (0,0). - x-- - y-- - - m := Mouse{X: x, Y: y, Button: btn, Mod: mod} - - // Wheel buttons don't have release events - // Motion can be reported as a release event in some terminals (Windows Terminal) - if isWheel(m.Button) { - return MouseWheelEvent(m) - } else if !isMotion && release { - return MouseReleaseEvent(m) - } else if isMotion { - return MouseMotionEvent(m) - } - return MouseClickEvent(m) -} - -const x10MouseByteOffset = 32 - -// Parse X10-encoded mouse events; the simplest kind. The last release of X10 -// was December 1986, by the way. The original X10 mouse protocol limits the Cx -// and Cy coordinates to 223 (=255-032). -// -// X10 mouse events look like: -// -// ESC [M Cb Cx Cy -// -// See: http://www.xfree86.org/current/ctlseqs.html#Mouse%20Tracking -func parseX10MouseEvent(buf []byte) Event { - v := buf[3:6] - b := int(v[0]) - if b >= x10MouseByteOffset { - // XXX: b < 32 should be impossible, but we're being defensive. - b -= x10MouseByteOffset - } - - mod, btn, isRelease, isMotion := parseMouseButton(b) - - // (1,1) is the upper left. We subtract 1 to normalize it to (0,0). - x := int(v[1]) - x10MouseByteOffset - 1 - y := int(v[2]) - x10MouseByteOffset - 1 - - m := Mouse{X: x, Y: y, Button: btn, Mod: mod} - if isWheel(m.Button) { - return MouseWheelEvent(m) - } else if isMotion { - return MouseMotionEvent(m) - } else if isRelease { - return MouseReleaseEvent(m) - } - return MouseClickEvent(m) -} - -// See: https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-Extended-coordinates -func parseMouseButton(b int) (mod KeyMod, btn MouseButton, isRelease bool, isMotion bool) { - // mouse bit shifts - const ( - bitShift = 0b0000_0100 - bitAlt = 0b0000_1000 - bitCtrl = 0b0001_0000 - bitMotion = 0b0010_0000 - bitWheel = 0b0100_0000 - bitAdd = 0b1000_0000 // additional buttons 8-11 - - bitsMask = 0b0000_0011 - ) - - // Modifiers - if b&bitAlt != 0 { - mod |= ModAlt - } - if b&bitCtrl != 0 { - mod |= ModCtrl - } - if b&bitShift != 0 { - mod |= ModShift - } - - if b&bitAdd != 0 { - btn = MouseBackward + MouseButton(b&bitsMask) - } else if b&bitWheel != 0 { - btn = MouseWheelUp + MouseButton(b&bitsMask) - } else { - btn = MouseLeft + MouseButton(b&bitsMask) - // X10 reports a button release as 0b0000_0011 (3) - if b&bitsMask == bitsMask { - btn = MouseNone - isRelease = true - } - } - - // Motion bit doesn't get reported for wheel events. - if b&bitMotion != 0 && !isWheel(btn) { - isMotion = true - } - - return //nolint:nakedret -} - -// isWheel returns true if the mouse event is a wheel event. -func isWheel(btn MouseButton) bool { - return btn >= MouseWheelUp && btn <= MouseWheelRight -} diff --git a/packages/tui/input/mouse_test.go b/packages/tui/input/mouse_test.go deleted file mode 100644 index d55e4148..00000000 --- a/packages/tui/input/mouse_test.go +++ /dev/null @@ -1,481 +0,0 @@ -package input - -import ( - "fmt" - "testing" - - "github.com/charmbracelet/x/ansi" - "github.com/charmbracelet/x/ansi/parser" -) - -func TestMouseEvent_String(t *testing.T) { - tt := []struct { - name string - event Event - expected string - }{ - { - name: "unknown", - event: MouseClickEvent{Button: MouseButton(0xff)}, - expected: "unknown", - }, - { - name: "left", - event: MouseClickEvent{Button: MouseLeft}, - expected: "left", - }, - { - name: "right", - event: MouseClickEvent{Button: MouseRight}, - expected: "right", - }, - { - name: "middle", - event: MouseClickEvent{Button: MouseMiddle}, - expected: "middle", - }, - { - name: "release", - event: MouseReleaseEvent{Button: MouseNone}, - expected: "", - }, - { - name: "wheelup", - event: MouseWheelEvent{Button: MouseWheelUp}, - expected: "wheelup", - }, - { - name: "wheeldown", - event: MouseWheelEvent{Button: MouseWheelDown}, - expected: "wheeldown", - }, - { - name: "wheelleft", - event: MouseWheelEvent{Button: MouseWheelLeft}, - expected: "wheelleft", - }, - { - name: "wheelright", - event: MouseWheelEvent{Button: MouseWheelRight}, - expected: "wheelright", - }, - { - name: "motion", - event: MouseMotionEvent{Button: MouseNone}, - expected: "motion", - }, - { - name: "shift+left", - event: MouseReleaseEvent{Button: MouseLeft, Mod: ModShift}, - expected: "shift+left", - }, - { - name: "shift+left", event: MouseClickEvent{Button: MouseLeft, Mod: ModShift}, - expected: "shift+left", - }, - { - name: "ctrl+shift+left", - event: MouseClickEvent{Button: MouseLeft, Mod: ModCtrl | ModShift}, - expected: "ctrl+shift+left", - }, - { - name: "alt+left", - event: MouseClickEvent{Button: MouseLeft, Mod: ModAlt}, - expected: "alt+left", - }, - { - name: "ctrl+left", - event: MouseClickEvent{Button: MouseLeft, Mod: ModCtrl}, - expected: "ctrl+left", - }, - { - name: "ctrl+alt+left", - event: MouseClickEvent{Button: MouseLeft, Mod: ModAlt | ModCtrl}, - expected: "ctrl+alt+left", - }, - { - name: "ctrl+alt+shift+left", - event: MouseClickEvent{Button: MouseLeft, Mod: ModAlt | ModCtrl | ModShift}, - expected: "ctrl+alt+shift+left", - }, - { - name: "ignore coordinates", - event: MouseClickEvent{X: 100, Y: 200, Button: MouseLeft}, - expected: "left", - }, - { - name: "broken type", - event: MouseClickEvent{Button: MouseButton(120)}, - expected: "unknown", - }, - } - - for i := range tt { - tc := tt[i] - - t.Run(tc.name, func(t *testing.T) { - actual := fmt.Sprint(tc.event) - - if tc.expected != actual { - t.Fatalf("expected %q but got %q", - tc.expected, - actual, - ) - } - }) - } -} - -func TestParseX10MouseDownEvent(t *testing.T) { - encode := func(b byte, x, y int) []byte { - return []byte{ - '\x1b', - '[', - 'M', - byte(32) + b, - byte(x + 32 + 1), - byte(y + 32 + 1), - } - } - - tt := []struct { - name string - buf []byte - expected Event - }{ - // Position. - { - name: "zero position", - buf: encode(0b0000_0000, 0, 0), - expected: MouseClickEvent{X: 0, Y: 0, Button: MouseLeft}, - }, - { - name: "max position", - buf: encode(0b0000_0000, 222, 222), // Because 255 (max int8) - 32 - 1. - expected: MouseClickEvent{X: 222, Y: 222, Button: MouseLeft}, - }, - // Simple. - { - name: "left", - buf: encode(0b0000_0000, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseLeft}, - }, - { - name: "left in motion", - buf: encode(0b0010_0000, 32, 16), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseLeft}, - }, - { - name: "middle", - buf: encode(0b0000_0001, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseMiddle}, - }, - { - name: "middle in motion", - buf: encode(0b0010_0001, 32, 16), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseMiddle}, - }, - { - name: "right", - buf: encode(0b0000_0010, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseRight}, - }, - { - name: "right in motion", - buf: encode(0b0010_0010, 32, 16), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseRight}, - }, - { - name: "motion", - buf: encode(0b0010_0011, 32, 16), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseNone}, - }, - { - name: "wheel up", - buf: encode(0b0100_0000, 32, 16), - expected: MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelUp}, - }, - { - name: "wheel down", - buf: encode(0b0100_0001, 32, 16), - expected: MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelDown}, - }, - { - name: "wheel left", - buf: encode(0b0100_0010, 32, 16), - expected: MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelLeft}, - }, - { - name: "wheel right", - buf: encode(0b0100_0011, 32, 16), - expected: MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelRight}, - }, - { - name: "release", - buf: encode(0b0000_0011, 32, 16), - expected: MouseReleaseEvent{X: 32, Y: 16, Button: MouseNone}, - }, - { - name: "backward", - buf: encode(0b1000_0000, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseBackward}, - }, - { - name: "forward", - buf: encode(0b1000_0001, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseForward}, - }, - { - name: "button 10", - buf: encode(0b1000_0010, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseButton10}, - }, - { - name: "button 11", - buf: encode(0b1000_0011, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseButton11}, - }, - // Combinations. - { - name: "alt+right", - buf: encode(0b0000_1010, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Mod: ModAlt, Button: MouseRight}, - }, - { - name: "ctrl+right", - buf: encode(0b0001_0010, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Mod: ModCtrl, Button: MouseRight}, - }, - { - name: "left in motion", - buf: encode(0b0010_0000, 32, 16), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseLeft}, - }, - { - name: "alt+right in motion", - buf: encode(0b0010_1010, 32, 16), - expected: MouseMotionEvent{X: 32, Y: 16, Mod: ModAlt, Button: MouseRight}, - }, - { - name: "ctrl+right in motion", - buf: encode(0b0011_0010, 32, 16), - expected: MouseMotionEvent{X: 32, Y: 16, Mod: ModCtrl, Button: MouseRight}, - }, - { - name: "ctrl+alt+right", - buf: encode(0b0001_1010, 32, 16), - expected: MouseClickEvent{X: 32, Y: 16, Mod: ModAlt | ModCtrl, Button: MouseRight}, - }, - { - name: "ctrl+wheel up", - buf: encode(0b0101_0000, 32, 16), - expected: MouseWheelEvent{X: 32, Y: 16, Mod: ModCtrl, Button: MouseWheelUp}, - }, - { - name: "alt+wheel down", - buf: encode(0b0100_1001, 32, 16), - expected: MouseWheelEvent{X: 32, Y: 16, Mod: ModAlt, Button: MouseWheelDown}, - }, - { - name: "ctrl+alt+wheel down", - buf: encode(0b0101_1001, 32, 16), - expected: MouseWheelEvent{X: 32, Y: 16, Mod: ModAlt | ModCtrl, Button: MouseWheelDown}, - }, - // Overflow position. - { - name: "overflow position", - buf: encode(0b0010_0000, 250, 223), // Because 255 (max int8) - 32 - 1. - expected: MouseMotionEvent{X: -6, Y: -33, Button: MouseLeft}, - }, - } - - for i := range tt { - tc := tt[i] - - t.Run(tc.name, func(t *testing.T) { - actual := parseX10MouseEvent(tc.buf) - - if tc.expected != actual { - t.Fatalf("expected %#v but got %#v", - tc.expected, - actual, - ) - } - }) - } -} - -func TestParseSGRMouseEvent(t *testing.T) { - type csiSequence struct { - params []ansi.Param - cmd ansi.Cmd - } - encode := func(b, x, y int, r bool) *csiSequence { - re := 'M' - if r { - re = 'm' - } - return &csiSequence{ - params: []ansi.Param{ - ansi.Param(b), - ansi.Param(x + 1), - ansi.Param(y + 1), - }, - cmd: ansi.Cmd(re) | ('<' << parser.PrefixShift), - } - } - - tt := []struct { - name string - buf *csiSequence - expected Event - }{ - // Position. - { - name: "zero position", - buf: encode(0, 0, 0, false), - expected: MouseClickEvent{X: 0, Y: 0, Button: MouseLeft}, - }, - { - name: "225 position", - buf: encode(0, 225, 225, false), - expected: MouseClickEvent{X: 225, Y: 225, Button: MouseLeft}, - }, - // Simple. - { - name: "left", - buf: encode(0, 32, 16, false), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseLeft}, - }, - { - name: "left in motion", - buf: encode(32, 32, 16, false), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseLeft}, - }, - { - name: "left", - buf: encode(0, 32, 16, true), - expected: MouseReleaseEvent{X: 32, Y: 16, Button: MouseLeft}, - }, - { - name: "middle", - buf: encode(1, 32, 16, false), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseMiddle}, - }, - { - name: "middle in motion", - buf: encode(33, 32, 16, false), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseMiddle}, - }, - { - name: "middle", - buf: encode(1, 32, 16, true), - expected: MouseReleaseEvent{X: 32, Y: 16, Button: MouseMiddle}, - }, - { - name: "right", - buf: encode(2, 32, 16, false), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseRight}, - }, - { - name: "right", - buf: encode(2, 32, 16, true), - expected: MouseReleaseEvent{X: 32, Y: 16, Button: MouseRight}, - }, - { - name: "motion", - buf: encode(35, 32, 16, false), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseNone}, - }, - { - name: "wheel up", - buf: encode(64, 32, 16, false), - expected: MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelUp}, - }, - { - name: "wheel down", - buf: encode(65, 32, 16, false), - expected: MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelDown}, - }, - { - name: "wheel left", - buf: encode(66, 32, 16, false), - expected: MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelLeft}, - }, - { - name: "wheel right", - buf: encode(67, 32, 16, false), - expected: MouseWheelEvent{X: 32, Y: 16, Button: MouseWheelRight}, - }, - { - name: "backward", - buf: encode(128, 32, 16, false), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseBackward}, - }, - { - name: "backward in motion", - buf: encode(160, 32, 16, false), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseBackward}, - }, - { - name: "forward", - buf: encode(129, 32, 16, false), - expected: MouseClickEvent{X: 32, Y: 16, Button: MouseForward}, - }, - { - name: "forward in motion", - buf: encode(161, 32, 16, false), - expected: MouseMotionEvent{X: 32, Y: 16, Button: MouseForward}, - }, - // Combinations. - { - name: "alt+right", - buf: encode(10, 32, 16, false), - expected: MouseClickEvent{X: 32, Y: 16, Mod: ModAlt, Button: MouseRight}, - }, - { - name: "ctrl+right", - buf: encode(18, 32, 16, false), - expected: MouseClickEvent{X: 32, Y: 16, Mod: ModCtrl, Button: MouseRight}, - }, - { - name: "ctrl+alt+right", - buf: encode(26, 32, 16, false), - expected: MouseClickEvent{X: 32, Y: 16, Mod: ModAlt | ModCtrl, Button: MouseRight}, - }, - { - name: "alt+wheel", - buf: encode(73, 32, 16, false), - expected: MouseWheelEvent{X: 32, Y: 16, Mod: ModAlt, Button: MouseWheelDown}, - }, - { - name: "ctrl+wheel", - buf: encode(81, 32, 16, false), - expected: MouseWheelEvent{X: 32, Y: 16, Mod: ModCtrl, Button: MouseWheelDown}, - }, - { - name: "ctrl+alt+wheel", - buf: encode(89, 32, 16, false), - expected: MouseWheelEvent{X: 32, Y: 16, Mod: ModAlt | ModCtrl, Button: MouseWheelDown}, - }, - { - name: "ctrl+alt+shift+wheel", - buf: encode(93, 32, 16, false), - expected: MouseWheelEvent{X: 32, Y: 16, Mod: ModAlt | ModShift | ModCtrl, Button: MouseWheelDown}, - }, - } - - for i := range tt { - tc := tt[i] - - t.Run(tc.name, func(t *testing.T) { - actual := parseSGRMouseEvent(tc.buf.cmd, tc.buf.params) - if tc.expected != actual { - t.Fatalf("expected %#v but got %#v", - tc.expected, - actual, - ) - } - }) - } -} diff --git a/packages/tui/input/parse.go b/packages/tui/input/parse.go deleted file mode 100644 index ad8e2184..00000000 --- a/packages/tui/input/parse.go +++ /dev/null @@ -1,1030 +0,0 @@ -package input - -import ( - "bytes" - "encoding/base64" - "slices" - "strings" - "unicode" - "unicode/utf8" - - "github.com/charmbracelet/x/ansi" - "github.com/charmbracelet/x/ansi/parser" - "github.com/rivo/uniseg" -) - -// Flags to control the behavior of the parser. -const ( - // When this flag is set, the driver will treat both Ctrl+Space and Ctrl+@ - // as the same key sequence. - // - // Historically, the ANSI specs generate NUL (0x00) on both the Ctrl+Space - // and Ctrl+@ key sequences. This flag allows the driver to treat both as - // the same key sequence. - FlagCtrlAt = 1 << iota - - // When this flag is set, the driver will treat the Tab key and Ctrl+I as - // the same key sequence. - // - // Historically, the ANSI specs generate HT (0x09) on both the Tab key and - // Ctrl+I. This flag allows the driver to treat both as the same key - // sequence. - FlagCtrlI - - // When this flag is set, the driver will treat the Enter key and Ctrl+M as - // the same key sequence. - // - // Historically, the ANSI specs generate CR (0x0D) on both the Enter key - // and Ctrl+M. This flag allows the driver to treat both as the same key. - FlagCtrlM - - // When this flag is set, the driver will treat Escape and Ctrl+[ as - // the same key sequence. - // - // Historically, the ANSI specs generate ESC (0x1B) on both the Escape key - // and Ctrl+[. This flag allows the driver to treat both as the same key - // sequence. - FlagCtrlOpenBracket - - // When this flag is set, the driver will send a BS (0x08 byte) character - // instead of a DEL (0x7F byte) character when the Backspace key is - // pressed. - // - // The VT100 terminal has both a Backspace and a Delete key. The VT220 - // terminal dropped the Backspace key and replaced it with the Delete key. - // Both terminals send a DEL character when the Delete key is pressed. - // Modern terminals and PCs later readded the Delete key but used a - // different key sequence, and the Backspace key was standardized to send a - // DEL character. - FlagBackspace - - // When this flag is set, the driver will recognize the Find key instead of - // treating it as a Home key. - // - // The Find key was part of the VT220 keyboard, and is no longer used in - // modern day PCs. - FlagFind - - // When this flag is set, the driver will recognize the Select key instead - // of treating it as a End key. - // - // The Symbol key was part of the VT220 keyboard, and is no longer used in - // modern day PCs. - FlagSelect - - // When this flag is set, the driver will use Terminfo databases to - // overwrite the default key sequences. - FlagTerminfo - - // When this flag is set, the driver will preserve function keys (F13-F63) - // as symbols. - // - // Since these keys are not part of today's standard 20th century keyboard, - // we treat them as F1-F12 modifier keys i.e. ctrl/shift/alt + Fn combos. - // Key definitions come from Terminfo, this flag is only useful when - // FlagTerminfo is not set. - FlagFKeys - - // When this flag is set, the driver will enable mouse mode on Windows. - // This is only useful on Windows and has no effect on other platforms. - FlagMouseMode -) - -// Parser is a parser for input escape sequences. -type Parser struct { - flags int -} - -// NewParser returns a new input parser. This is a low-level parser that parses -// escape sequences into human-readable events. -// This differs from [ansi.Parser] and [ansi.DecodeSequence] in which it -// recognizes incorrect sequences that some terminals may send. -// -// For instance, the X10 mouse protocol sends a `CSI M` sequence followed by 3 -// bytes. If the parser doesn't recognize the 3 bytes, they might be echoed to -// the terminal output causing a mess. -// -// Another example is how URxvt sends invalid sequences for modified keys using -// invalid CSI final characters like '$'. -// -// Use flags to control the behavior of ambiguous key sequences. -func NewParser(flags int) *Parser { - return &Parser{flags: flags} -} - -// parseSequence finds the first recognized event sequence and returns it along -// with its length. -// -// It will return zero and nil no sequence is recognized or when the buffer is -// empty. If a sequence is not supported, an UnknownEvent is returned. -func (p *Parser) parseSequence(buf []byte) (n int, Event Event) { - if len(buf) == 0 { - return 0, nil - } - - switch b := buf[0]; b { - case ansi.ESC: - if len(buf) == 1 { - // Escape key - return 1, KeyPressEvent{Code: KeyEscape} - } - - switch bPrime := buf[1]; bPrime { - case 'O': // Esc-prefixed SS3 - return p.parseSs3(buf) - case 'P': // Esc-prefixed DCS - return p.parseDcs(buf) - case '[': // Esc-prefixed CSI - return p.parseCsi(buf) - case ']': // Esc-prefixed OSC - return p.parseOsc(buf) - case '_': // Esc-prefixed APC - return p.parseApc(buf) - case '^': // Esc-prefixed PM - return p.parseStTerminated(ansi.PM, '^', nil)(buf) - case 'X': // Esc-prefixed SOS - return p.parseStTerminated(ansi.SOS, 'X', nil)(buf) - default: - n, e := p.parseSequence(buf[1:]) - if k, ok := e.(KeyPressEvent); ok { - k.Text = "" - k.Mod |= ModAlt - return n + 1, k - } - - // Not a key sequence, nor an alt modified key sequence. In that - // case, just report a single escape key. - return 1, KeyPressEvent{Code: KeyEscape} - } - case ansi.SS3: - return p.parseSs3(buf) - case ansi.DCS: - return p.parseDcs(buf) - case ansi.CSI: - return p.parseCsi(buf) - case ansi.OSC: - return p.parseOsc(buf) - case ansi.APC: - return p.parseApc(buf) - case ansi.PM: - return p.parseStTerminated(ansi.PM, '^', nil)(buf) - case ansi.SOS: - return p.parseStTerminated(ansi.SOS, 'X', nil)(buf) - default: - if b <= ansi.US || b == ansi.DEL || b == ansi.SP { - return 1, p.parseControl(b) - } else if b >= ansi.PAD && b <= ansi.APC { - // C1 control code - // UTF-8 never starts with a C1 control code - // Encode these as Ctrl+Alt+ - code := rune(b) - 0x40 - return 1, KeyPressEvent{Code: code, Mod: ModCtrl | ModAlt} - } - return p.parseUtf8(buf) - } -} - -func (p *Parser) parseCsi(b []byte) (int, Event) { - if len(b) == 2 && b[0] == ansi.ESC { - // short cut if this is an alt+[ key - return 2, KeyPressEvent{Text: string(rune(b[1])), Mod: ModAlt} - } - - var cmd ansi.Cmd - var params [parser.MaxParamsSize]ansi.Param - var paramsLen int - - var i int - if b[i] == ansi.CSI || b[i] == ansi.ESC { - i++ - } - if i < len(b) && b[i-1] == ansi.ESC && b[i] == '[' { - i++ - } - - // Initial CSI byte - if i < len(b) && b[i] >= '<' && b[i] <= '?' { - cmd |= ansi.Cmd(b[i]) << parser.PrefixShift - } - - // Scan parameter bytes in the range 0x30-0x3F - var j int - for j = 0; i < len(b) && paramsLen < len(params) && b[i] >= 0x30 && b[i] <= 0x3F; i, j = i+1, j+1 { - if b[i] >= '0' && b[i] <= '9' { - if params[paramsLen] == parser.MissingParam { - params[paramsLen] = 0 - } - params[paramsLen] *= 10 - params[paramsLen] += ansi.Param(b[i]) - '0' - } - if b[i] == ':' { - params[paramsLen] |= parser.HasMoreFlag - } - if b[i] == ';' || b[i] == ':' { - paramsLen++ - if paramsLen < len(params) { - // Don't overflow the params slice - params[paramsLen] = parser.MissingParam - } - } - } - - if j > 0 && paramsLen < len(params) { - // has parameters - paramsLen++ - } - - // Scan intermediate bytes in the range 0x20-0x2F - var intermed byte - for ; i < len(b) && b[i] >= 0x20 && b[i] <= 0x2F; i++ { - intermed = b[i] - } - - // Set the intermediate byte - cmd |= ansi.Cmd(intermed) << parser.IntermedShift - - // Scan final byte in the range 0x40-0x7E - if i >= len(b) { - // Incomplete sequence - return 0, nil - } - if b[i] < 0x40 || b[i] > 0x7E { - // Special case for URxvt keys - // CSI $ is an invalid sequence, but URxvt uses it for - // shift modified keys. - if b[i-1] == '$' { - n, ev := p.parseCsi(append(b[:i-1], '~')) - if k, ok := ev.(KeyPressEvent); ok { - k.Mod |= ModShift - return n, k - } - } - return i, UnknownEvent(b[:i-1]) - } - - // Add the final byte - cmd |= ansi.Cmd(b[i]) - i++ - - pa := ansi.Params(params[:paramsLen]) - switch cmd { - case 'y' | '?'<= 3 { - pa = pa[:3] - return i, parseSGRMouseEvent(cmd, pa) - } - case 'm' | '>'< R (which is modified F3) when the cursor is at the - // row 1. In this case, we report both messages. - // - // For a non ambiguous cursor position report, use - // [ansi.RequestExtendedCursorPosition] (DECXCPR) instead. - return i, MultiEvent{KeyPressEvent{Code: KeyF3, Mod: KeyMod(col - 1)}, m} - } - - return i, m - } - - if paramsLen != 0 { - break - } - - // Unmodified key F3 (CSI R) - fallthrough - case 'a', 'b', 'c', 'd', 'A', 'B', 'C', 'D', 'E', 'F', 'H', 'P', 'Q', 'S', 'Z': - var k KeyPressEvent - switch cmd { - case 'a', 'b', 'c', 'd': - k = KeyPressEvent{Code: KeyUp + rune(cmd-'a'), Mod: ModShift} - case 'A', 'B', 'C', 'D': - k = KeyPressEvent{Code: KeyUp + rune(cmd-'A')} - case 'E': - k = KeyPressEvent{Code: KeyBegin} - case 'F': - k = KeyPressEvent{Code: KeyEnd} - case 'H': - k = KeyPressEvent{Code: KeyHome} - case 'P', 'Q', 'R', 'S': - k = KeyPressEvent{Code: KeyF1 + rune(cmd-'P')} - case 'Z': - k = KeyPressEvent{Code: KeyTab, Mod: ModShift} - } - id, _, _ := pa.Param(0, 1) - if id == 0 { - id = 1 - } - mod, _, _ := pa.Param(1, 1) - if mod == 0 { - mod = 1 - } - if paramsLen > 1 && id == 1 && mod != -1 { - // CSI 1 ; A - k.Mod |= KeyMod(mod - 1) - } - // Don't forget to handle Kitty keyboard protocol - return i, parseKittyKeyboardExt(pa, k) - case 'M': - // Handle X10 mouse - if i+2 >= len(b) { - // Incomplete sequence - return 0, nil - } - // PERFORMANCE: Do not use append here, as it will allocate a new slice - // for every mouse event. Instead, pass a sub-slice of the original - // buffer. - return i + 3, parseX10MouseEvent(b[i-1 : i+3]) - case 'y' | '$'< 1 && mod != -1 { - k.Mod |= KeyMod(mod - 1) - } - - // Handle URxvt weird keys - switch cmd { - case '~': - // Don't forget to handle Kitty keyboard protocol - return i, parseKittyKeyboardExt(pa, k) - case '^': - k.Mod |= ModCtrl - case '@': - k.Mod |= ModCtrl | ModShift - } - - return i, k - } - - case 't': - param, _, ok := pa.Param(0, 0) - if !ok { - break - } - - var winop WindowOpEvent - winop.Op = param - for j := 1; j < paramsLen; j++ { - val, _, ok := pa.Param(j, 0) - if ok { - winop.Args = append(winop.Args, val) - } - } - - return i, winop - } - return i, UnknownEvent(b[:i]) -} - -// parseSs3 parses a SS3 sequence. -// See https://vt100.net/docs/vt220-rm/chapter4.html#S4.4.4.2 -func (p *Parser) parseSs3(b []byte) (int, Event) { - if len(b) == 2 && b[0] == ansi.ESC { - // short cut if this is an alt+O key - return 2, KeyPressEvent{Code: rune(b[1]), Mod: ModAlt} - } - - var i int - if b[i] == ansi.SS3 || b[i] == ansi.ESC { - i++ - } - if i < len(b) && b[i-1] == ansi.ESC && b[i] == 'O' { - i++ - } - - // Scan numbers from 0-9 - var mod int - for ; i < len(b) && b[i] >= '0' && b[i] <= '9'; i++ { - mod *= 10 - mod += int(b[i]) - '0' - } - - // Scan a GL character - // A GL character is a single byte in the range 0x21-0x7E - // See https://vt100.net/docs/vt220-rm/chapter2.html#S2.3.2 - if i >= len(b) { - // Incomplete sequence - return 0, nil - } - if b[i] < 0x21 || b[i] > 0x7E { - return i, UnknownEvent(b[:i]) - } - - // GL character(s) - gl := b[i] - i++ - - var k KeyPressEvent - switch gl { - case 'a', 'b', 'c', 'd': - k = KeyPressEvent{Code: KeyUp + rune(gl-'a'), Mod: ModCtrl} - case 'A', 'B', 'C', 'D': - k = KeyPressEvent{Code: KeyUp + rune(gl-'A')} - case 'E': - k = KeyPressEvent{Code: KeyBegin} - case 'F': - k = KeyPressEvent{Code: KeyEnd} - case 'H': - k = KeyPressEvent{Code: KeyHome} - case 'P', 'Q', 'R', 'S': - k = KeyPressEvent{Code: KeyF1 + rune(gl-'P')} - case 'M': - k = KeyPressEvent{Code: KeyKpEnter} - case 'X': - k = KeyPressEvent{Code: KeyKpEqual} - case 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y': - k = KeyPressEvent{Code: KeyKpMultiply + rune(gl-'j')} - default: - return i, UnknownEvent(b[:i]) - } - - // Handle weird SS3 Func - if mod > 0 { - k.Mod |= KeyMod(mod - 1) - } - - return i, k -} - -func (p *Parser) parseOsc(b []byte) (int, Event) { - defaultKey := func() KeyPressEvent { - return KeyPressEvent{Code: rune(b[1]), Mod: ModAlt} - } - if len(b) == 2 && b[0] == ansi.ESC { - // short cut if this is an alt+] key - return 2, defaultKey() - } - - var i int - if b[i] == ansi.OSC || b[i] == ansi.ESC { - i++ - } - if i < len(b) && b[i-1] == ansi.ESC && b[i] == ']' { - i++ - } - - // Parse OSC command - // An OSC sequence is terminated by a BEL, ESC, or ST character - var start, end int - cmd := -1 - for ; i < len(b) && b[i] >= '0' && b[i] <= '9'; i++ { - if cmd == -1 { - cmd = 0 - } else { - cmd *= 10 - } - cmd += int(b[i]) - '0' - } - - if i < len(b) && b[i] == ';' { - // mark the start of the sequence data - i++ - start = i - } - - for ; i < len(b); i++ { - // advance to the end of the sequence - if slices.Contains([]byte{ansi.BEL, ansi.ESC, ansi.ST, ansi.CAN, ansi.SUB}, b[i]) { - break - } - } - - if i >= len(b) { - // Incomplete sequence - return 0, nil - } - - end = i // end of the sequence data - i++ - - // Check 7-bit ST (string terminator) character - switch b[i-1] { - case ansi.CAN, ansi.SUB: - return i, UnknownEvent(b[:i]) - case ansi.ESC: - if i >= len(b) || b[i] != '\\' { - if cmd == -1 || (start == 0 && end == 2) { - return 2, defaultKey() - } - - // If we don't have a valid ST terminator, then this is a - // cancelled sequence and should be ignored. - return i, UnknownEvent(b[:i]) - } - - i++ - } - - if end <= start { - return i, UnknownEvent(b[:i]) - } - - // PERFORMANCE: Only allocate the data string if we know we have a handler - // for the command. This avoids allocations for unknown OSC sequences that - // can be sent in high frequency by trackpads. - switch cmd { - case 10, 11, 12: - data := string(b[start:end]) - color := ansi.XParseColor(data) - switch cmd { - case 10: - return i, ForegroundColorEvent{color} - case 11: - return i, BackgroundColorEvent{color} - case 12: - return i, CursorColorEvent{color} - } - case 52: - data := string(b[start:end]) - parts := strings.Split(data, ";") - if len(parts) == 0 { - return i, ClipboardEvent{} - } - if len(parts) != 2 || len(parts[0]) < 1 { - break - } - - b64 := parts[1] - bts, err := base64.StdEncoding.DecodeString(b64) - if err != nil { - break - } - - sel := ClipboardSelection(parts[0][0]) //nolint:unconvert - return i, ClipboardEvent{Selection: sel, Content: string(bts)} - } - - return i, UnknownEvent(b[:i]) -} - -// parseStTerminated parses a control sequence that gets terminated by a ST character. -func (p *Parser) parseStTerminated( - intro8, intro7 byte, - fn func([]byte) Event, -) func([]byte) (int, Event) { - defaultKey := func(b []byte) (int, Event) { - switch intro8 { - case ansi.SOS: - return 2, KeyPressEvent{Code: 'x', Mod: ModShift | ModAlt} - case ansi.PM, ansi.APC: - return 2, KeyPressEvent{Code: rune(b[1]), Mod: ModAlt} - } - return 0, nil - } - return func(b []byte) (int, Event) { - if len(b) == 2 && b[0] == ansi.ESC { - return defaultKey(b) - } - - var i int - if b[i] == intro8 || b[i] == ansi.ESC { - i++ - } - if i < len(b) && b[i-1] == ansi.ESC && b[i] == intro7 { - i++ - } - - // Scan control sequence - // Most common control sequence is terminated by a ST character - // ST is a 7-bit string terminator character is (ESC \) - start := i - for ; i < len(b); i++ { - if slices.Contains([]byte{ansi.ESC, ansi.ST, ansi.CAN, ansi.SUB}, b[i]) { - break - } - } - - if i >= len(b) { - // Incomplete sequence - return 0, nil - } - - end := i // end of the sequence data - i++ - - // Check 7-bit ST (string terminator) character - switch b[i-1] { - case ansi.CAN, ansi.SUB: - return i, UnknownEvent(b[:i]) - case ansi.ESC: - if i >= len(b) || b[i] != '\\' { - if start == end { - return defaultKey(b) - } - - // If we don't have a valid ST terminator, then this is a - // cancelled sequence and should be ignored. - return i, UnknownEvent(b[:i]) - } - - i++ - } - - // Call the function to parse the sequence and return the result - if fn != nil { - if e := fn(b[start:end]); e != nil { - return i, e - } - } - - return i, UnknownEvent(b[:i]) - } -} - -func (p *Parser) parseDcs(b []byte) (int, Event) { - if len(b) == 2 && b[0] == ansi.ESC { - // short cut if this is an alt+P key - return 2, KeyPressEvent{Code: 'p', Mod: ModShift | ModAlt} - } - - var params [16]ansi.Param - var paramsLen int - var cmd ansi.Cmd - - // DCS sequences are introduced by DCS (0x90) or ESC P (0x1b 0x50) - var i int - if b[i] == ansi.DCS || b[i] == ansi.ESC { - i++ - } - if i < len(b) && b[i-1] == ansi.ESC && b[i] == 'P' { - i++ - } - - // initial DCS byte - if i < len(b) && b[i] >= '<' && b[i] <= '?' { - cmd |= ansi.Cmd(b[i]) << parser.PrefixShift - } - - // Scan parameter bytes in the range 0x30-0x3F - var j int - for j = 0; i < len(b) && paramsLen < len(params) && b[i] >= 0x30 && b[i] <= 0x3F; i, j = i+1, j+1 { - if b[i] >= '0' && b[i] <= '9' { - if params[paramsLen] == parser.MissingParam { - params[paramsLen] = 0 - } - params[paramsLen] *= 10 - params[paramsLen] += ansi.Param(b[i]) - '0' - } - if b[i] == ':' { - params[paramsLen] |= parser.HasMoreFlag - } - if b[i] == ';' || b[i] == ':' { - paramsLen++ - if paramsLen < len(params) { - // Don't overflow the params slice - params[paramsLen] = parser.MissingParam - } - } - } - - if j > 0 && paramsLen < len(params) { - // has parameters - paramsLen++ - } - - // Scan intermediate bytes in the range 0x20-0x2F - var intermed byte - for j := 0; i < len(b) && b[i] >= 0x20 && b[i] <= 0x2F; i, j = i+1, j+1 { - intermed = b[i] - } - - // set intermediate byte - cmd |= ansi.Cmd(intermed) << parser.IntermedShift - - // Scan final byte in the range 0x40-0x7E - if i >= len(b) { - // Incomplete sequence - return 0, nil - } - if b[i] < 0x40 || b[i] > 0x7E { - return i, UnknownEvent(b[:i]) - } - - // Add the final byte - cmd |= ansi.Cmd(b[i]) - i++ - - start := i // start of the sequence data - for ; i < len(b); i++ { - if b[i] == ansi.ST || b[i] == ansi.ESC { - break - } - } - - if i >= len(b) { - // Incomplete sequence - return 0, nil - } - - end := i // end of the sequence data - i++ - - // Check 7-bit ST (string terminator) character - if i < len(b) && b[i-1] == ansi.ESC && b[i] == '\\' { - i++ - } - - pa := ansi.Params(params[:paramsLen]) - switch cmd { - case 'r' | '+'<'< 1 { - g.Payload = parts[1] - } - return g - } - - return nil - })(b) -} - -func (p *Parser) parseUtf8(b []byte) (int, Event) { - if len(b) == 0 { - return 0, nil - } - - c := b[0] - if c <= ansi.US || c == ansi.DEL || c == ansi.SP { - // Control codes get handled by parseControl - return 1, p.parseControl(c) - } else if c > ansi.US && c < ansi.DEL { - // ASCII printable characters - code := rune(c) - k := KeyPressEvent{Code: code, Text: string(code)} - if unicode.IsUpper(code) { - // Convert upper case letters to lower case + shift modifier - k.Code = unicode.ToLower(code) - k.ShiftedCode = code - k.Mod |= ModShift - } - - return 1, k - } - - code, _ := utf8.DecodeRune(b) - if code == utf8.RuneError { - return 1, UnknownEvent(b[0]) - } - - cluster, _, _, _ := uniseg.FirstGraphemeCluster(b, -1) - // PERFORMANCE: Use RuneCount to check for multi-rune graphemes instead of - // looping over the string representation. - if utf8.RuneCount(cluster) > 1 { - code = KeyExtended - } - - return len(cluster), KeyPressEvent{Code: code, Text: string(cluster)} -} - -func (p *Parser) parseControl(b byte) Event { - switch b { - case ansi.NUL: - if p.flags&FlagCtrlAt != 0 { - return KeyPressEvent{Code: '@', Mod: ModCtrl} - } - return KeyPressEvent{Code: KeySpace, Mod: ModCtrl} - case ansi.BS: - return KeyPressEvent{Code: 'h', Mod: ModCtrl} - case ansi.HT: - if p.flags&FlagCtrlI != 0 { - return KeyPressEvent{Code: 'i', Mod: ModCtrl} - } - return KeyPressEvent{Code: KeyTab} - case ansi.CR: - if p.flags&FlagCtrlM != 0 { - return KeyPressEvent{Code: 'm', Mod: ModCtrl} - } - return KeyPressEvent{Code: KeyEnter} - case ansi.ESC: - if p.flags&FlagCtrlOpenBracket != 0 { - return KeyPressEvent{Code: '[', Mod: ModCtrl} - } - return KeyPressEvent{Code: KeyEscape} - case ansi.DEL: - if p.flags&FlagBackspace != 0 { - return KeyPressEvent{Code: KeyDelete} - } - return KeyPressEvent{Code: KeyBackspace} - case ansi.SP: - return KeyPressEvent{Code: KeySpace, Text: " "} - default: - if b >= ansi.SOH && b <= ansi.SUB { - // Use lower case letters for control codes - code := rune(b + 0x60) - return KeyPressEvent{Code: code, Mod: ModCtrl} - } else if b >= ansi.FS && b <= ansi.US { - code := rune(b + 0x40) - return KeyPressEvent{Code: code, Mod: ModCtrl} - } - return UnknownEvent(b) - } -} diff --git a/packages/tui/input/parse_test.go b/packages/tui/input/parse_test.go deleted file mode 100644 index dc892e0c..00000000 --- a/packages/tui/input/parse_test.go +++ /dev/null @@ -1,47 +0,0 @@ -package input - -import ( - "image/color" - "reflect" - "testing" - - "github.com/charmbracelet/x/ansi" -) - -func TestParseSequence_Events(t *testing.T) { - input := []byte("\x1b\x1b[Ztest\x00\x1b]10;rgb:1234/1234/1234\x07\x1b[27;2;27~\x1b[?1049;2$y\x1b[4;1$y") - want := []Event{ - KeyPressEvent{Code: KeyTab, Mod: ModShift | ModAlt}, - KeyPressEvent{Code: 't', Text: "t"}, - KeyPressEvent{Code: 'e', Text: "e"}, - KeyPressEvent{Code: 's', Text: "s"}, - KeyPressEvent{Code: 't', Text: "t"}, - KeyPressEvent{Code: KeySpace, Mod: ModCtrl}, - ForegroundColorEvent{color.RGBA{R: 0x12, G: 0x12, B: 0x12, A: 0xff}}, - KeyPressEvent{Code: KeyEscape, Mod: ModShift}, - ModeReportEvent{Mode: ansi.AltScreenSaveCursorMode, Value: ansi.ModeReset}, - ModeReportEvent{Mode: ansi.InsertReplaceMode, Value: ansi.ModeSet}, - } - - var p Parser - for i := 0; len(input) != 0; i++ { - if i >= len(want) { - t.Fatalf("reached end of want events") - } - n, got := p.parseSequence(input) - if !reflect.DeepEqual(got, want[i]) { - t.Errorf("got %#v (%T), want %#v (%T)", got, got, want[i], want[i]) - } - input = input[n:] - } -} - -func BenchmarkParseSequence(b *testing.B) { - var p Parser - input := []byte("\x1b\x1b[Ztest\x00\x1b]10;1234/1234/1234\x07\x1b[27;2;27~") - b.ReportAllocs() - b.ResetTimer() - for i := 0; i < b.N; i++ { - p.parseSequence(input) - } -} diff --git a/packages/tui/input/paste.go b/packages/tui/input/paste.go deleted file mode 100644 index 4e8fe68c..00000000 --- a/packages/tui/input/paste.go +++ /dev/null @@ -1,13 +0,0 @@ -package input - -// PasteEvent is an message that is emitted when a terminal receives pasted text -// using bracketed-paste. -type PasteEvent string - -// PasteStartEvent is an message that is emitted when the terminal starts the -// bracketed-paste text. -type PasteStartEvent struct{} - -// PasteEndEvent is an message that is emitted when the terminal ends the -// bracketed-paste text. -type PasteEndEvent struct{} diff --git a/packages/tui/input/table.go b/packages/tui/input/table.go deleted file mode 100644 index d2373236..00000000 --- a/packages/tui/input/table.go +++ /dev/null @@ -1,389 +0,0 @@ -package input - -import ( - "maps" - "strconv" - - "github.com/charmbracelet/x/ansi" -) - -// buildKeysTable builds a table of key sequences and their corresponding key -// events based on the VT100/VT200, XTerm, and Urxvt terminal specs. -func buildKeysTable(flags int, term string) map[string]Key { - nul := Key{Code: KeySpace, Mod: ModCtrl} // ctrl+@ or ctrl+space - if flags&FlagCtrlAt != 0 { - nul = Key{Code: '@', Mod: ModCtrl} - } - - tab := Key{Code: KeyTab} // ctrl+i or tab - if flags&FlagCtrlI != 0 { - tab = Key{Code: 'i', Mod: ModCtrl} - } - - enter := Key{Code: KeyEnter} // ctrl+m or enter - if flags&FlagCtrlM != 0 { - enter = Key{Code: 'm', Mod: ModCtrl} - } - - esc := Key{Code: KeyEscape} // ctrl+[ or escape - if flags&FlagCtrlOpenBracket != 0 { - esc = Key{Code: '[', Mod: ModCtrl} // ctrl+[ or escape - } - - del := Key{Code: KeyBackspace} - if flags&FlagBackspace != 0 { - del.Code = KeyDelete - } - - find := Key{Code: KeyHome} - if flags&FlagFind != 0 { - find.Code = KeyFind - } - - sel := Key{Code: KeyEnd} - if flags&FlagSelect != 0 { - sel.Code = KeySelect - } - - // The following is a table of key sequences and their corresponding key - // events based on the VT100/VT200 terminal specs. - // - // See: https://vt100.net/docs/vt100-ug/chapter3.html#S3.2 - // See: https://vt100.net/docs/vt220-rm/chapter3.html - // - // XXX: These keys may be overwritten by other options like XTerm or - // Terminfo. - table := map[string]Key{ - // C0 control characters - string(byte(ansi.NUL)): nul, - string(byte(ansi.SOH)): {Code: 'a', Mod: ModCtrl}, - string(byte(ansi.STX)): {Code: 'b', Mod: ModCtrl}, - string(byte(ansi.ETX)): {Code: 'c', Mod: ModCtrl}, - string(byte(ansi.EOT)): {Code: 'd', Mod: ModCtrl}, - string(byte(ansi.ENQ)): {Code: 'e', Mod: ModCtrl}, - string(byte(ansi.ACK)): {Code: 'f', Mod: ModCtrl}, - string(byte(ansi.BEL)): {Code: 'g', Mod: ModCtrl}, - string(byte(ansi.BS)): {Code: 'h', Mod: ModCtrl}, - string(byte(ansi.HT)): tab, - string(byte(ansi.LF)): {Code: 'j', Mod: ModCtrl}, - string(byte(ansi.VT)): {Code: 'k', Mod: ModCtrl}, - string(byte(ansi.FF)): {Code: 'l', Mod: ModCtrl}, - string(byte(ansi.CR)): enter, - string(byte(ansi.SO)): {Code: 'n', Mod: ModCtrl}, - string(byte(ansi.SI)): {Code: 'o', Mod: ModCtrl}, - string(byte(ansi.DLE)): {Code: 'p', Mod: ModCtrl}, - string(byte(ansi.DC1)): {Code: 'q', Mod: ModCtrl}, - string(byte(ansi.DC2)): {Code: 'r', Mod: ModCtrl}, - string(byte(ansi.DC3)): {Code: 's', Mod: ModCtrl}, - string(byte(ansi.DC4)): {Code: 't', Mod: ModCtrl}, - string(byte(ansi.NAK)): {Code: 'u', Mod: ModCtrl}, - string(byte(ansi.SYN)): {Code: 'v', Mod: ModCtrl}, - string(byte(ansi.ETB)): {Code: 'w', Mod: ModCtrl}, - string(byte(ansi.CAN)): {Code: 'x', Mod: ModCtrl}, - string(byte(ansi.EM)): {Code: 'y', Mod: ModCtrl}, - string(byte(ansi.SUB)): {Code: 'z', Mod: ModCtrl}, - string(byte(ansi.ESC)): esc, - string(byte(ansi.FS)): {Code: '\\', Mod: ModCtrl}, - string(byte(ansi.GS)): {Code: ']', Mod: ModCtrl}, - string(byte(ansi.RS)): {Code: '^', Mod: ModCtrl}, - string(byte(ansi.US)): {Code: '_', Mod: ModCtrl}, - - // Special keys in G0 - string(byte(ansi.SP)): {Code: KeySpace, Text: " "}, - string(byte(ansi.DEL)): del, - - // Special keys - - "\x1b[Z": {Code: KeyTab, Mod: ModShift}, - - "\x1b[1~": find, - "\x1b[2~": {Code: KeyInsert}, - "\x1b[3~": {Code: KeyDelete}, - "\x1b[4~": sel, - "\x1b[5~": {Code: KeyPgUp}, - "\x1b[6~": {Code: KeyPgDown}, - "\x1b[7~": {Code: KeyHome}, - "\x1b[8~": {Code: KeyEnd}, - - // Normal mode - "\x1b[A": {Code: KeyUp}, - "\x1b[B": {Code: KeyDown}, - "\x1b[C": {Code: KeyRight}, - "\x1b[D": {Code: KeyLeft}, - "\x1b[E": {Code: KeyBegin}, - "\x1b[F": {Code: KeyEnd}, - "\x1b[H": {Code: KeyHome}, - "\x1b[P": {Code: KeyF1}, - "\x1b[Q": {Code: KeyF2}, - "\x1b[R": {Code: KeyF3}, - "\x1b[S": {Code: KeyF4}, - - // Application Cursor Key Mode (DECCKM) - "\x1bOA": {Code: KeyUp}, - "\x1bOB": {Code: KeyDown}, - "\x1bOC": {Code: KeyRight}, - "\x1bOD": {Code: KeyLeft}, - "\x1bOE": {Code: KeyBegin}, - "\x1bOF": {Code: KeyEnd}, - "\x1bOH": {Code: KeyHome}, - "\x1bOP": {Code: KeyF1}, - "\x1bOQ": {Code: KeyF2}, - "\x1bOR": {Code: KeyF3}, - "\x1bOS": {Code: KeyF4}, - - // Keypad Application Mode (DECKPAM) - - "\x1bOM": {Code: KeyKpEnter}, - "\x1bOX": {Code: KeyKpEqual}, - "\x1bOj": {Code: KeyKpMultiply}, - "\x1bOk": {Code: KeyKpPlus}, - "\x1bOl": {Code: KeyKpComma}, - "\x1bOm": {Code: KeyKpMinus}, - "\x1bOn": {Code: KeyKpDecimal}, - "\x1bOo": {Code: KeyKpDivide}, - "\x1bOp": {Code: KeyKp0}, - "\x1bOq": {Code: KeyKp1}, - "\x1bOr": {Code: KeyKp2}, - "\x1bOs": {Code: KeyKp3}, - "\x1bOt": {Code: KeyKp4}, - "\x1bOu": {Code: KeyKp5}, - "\x1bOv": {Code: KeyKp6}, - "\x1bOw": {Code: KeyKp7}, - "\x1bOx": {Code: KeyKp8}, - "\x1bOy": {Code: KeyKp9}, - - // Function keys - - "\x1b[11~": {Code: KeyF1}, - "\x1b[12~": {Code: KeyF2}, - "\x1b[13~": {Code: KeyF3}, - "\x1b[14~": {Code: KeyF4}, - "\x1b[15~": {Code: KeyF5}, - "\x1b[17~": {Code: KeyF6}, - "\x1b[18~": {Code: KeyF7}, - "\x1b[19~": {Code: KeyF8}, - "\x1b[20~": {Code: KeyF9}, - "\x1b[21~": {Code: KeyF10}, - "\x1b[23~": {Code: KeyF11}, - "\x1b[24~": {Code: KeyF12}, - "\x1b[25~": {Code: KeyF13}, - "\x1b[26~": {Code: KeyF14}, - "\x1b[28~": {Code: KeyF15}, - "\x1b[29~": {Code: KeyF16}, - "\x1b[31~": {Code: KeyF17}, - "\x1b[32~": {Code: KeyF18}, - "\x1b[33~": {Code: KeyF19}, - "\x1b[34~": {Code: KeyF20}, - } - - // CSI ~ sequence keys - csiTildeKeys := map[string]Key{ - "1": find, "2": {Code: KeyInsert}, - "3": {Code: KeyDelete}, "4": sel, - "5": {Code: KeyPgUp}, "6": {Code: KeyPgDown}, - "7": {Code: KeyHome}, "8": {Code: KeyEnd}, - // There are no 9 and 10 keys - "11": {Code: KeyF1}, "12": {Code: KeyF2}, - "13": {Code: KeyF3}, "14": {Code: KeyF4}, - "15": {Code: KeyF5}, "17": {Code: KeyF6}, - "18": {Code: KeyF7}, "19": {Code: KeyF8}, - "20": {Code: KeyF9}, "21": {Code: KeyF10}, - "23": {Code: KeyF11}, "24": {Code: KeyF12}, - "25": {Code: KeyF13}, "26": {Code: KeyF14}, - "28": {Code: KeyF15}, "29": {Code: KeyF16}, - "31": {Code: KeyF17}, "32": {Code: KeyF18}, - "33": {Code: KeyF19}, "34": {Code: KeyF20}, - } - - // URxvt keys - // See https://manpages.ubuntu.com/manpages/trusty/man7/urxvt.7.html#key%20codes - table["\x1b[a"] = Key{Code: KeyUp, Mod: ModShift} - table["\x1b[b"] = Key{Code: KeyDown, Mod: ModShift} - table["\x1b[c"] = Key{Code: KeyRight, Mod: ModShift} - table["\x1b[d"] = Key{Code: KeyLeft, Mod: ModShift} - table["\x1bOa"] = Key{Code: KeyUp, Mod: ModCtrl} - table["\x1bOb"] = Key{Code: KeyDown, Mod: ModCtrl} - table["\x1bOc"] = Key{Code: KeyRight, Mod: ModCtrl} - table["\x1bOd"] = Key{Code: KeyLeft, Mod: ModCtrl} - //nolint:godox - // TODO: invistigate if shift-ctrl arrow keys collide with DECCKM keys i.e. - // "\x1bOA", "\x1bOB", "\x1bOC", "\x1bOD" - - // URxvt modifier CSI ~ keys - for k, v := range csiTildeKeys { - key := v - // Normal (no modifier) already defined part of VT100/VT200 - // Shift modifier - key.Mod = ModShift - table["\x1b["+k+"$"] = key - // Ctrl modifier - key.Mod = ModCtrl - table["\x1b["+k+"^"] = key - // Shift-Ctrl modifier - key.Mod = ModShift | ModCtrl - table["\x1b["+k+"@"] = key - } - - // URxvt F keys - // Note: Shift + F1-F10 generates F11-F20. - // This means Shift + F1 and Shift + F2 will generate F11 and F12, the same - // applies to Ctrl + Shift F1 & F2. - // - // P.S. Don't like this? Blame URxvt, configure your terminal to use - // different escapes like XTerm, or switch to a better terminal ¯\_(ツ)_/¯ - // - // See https://manpages.ubuntu.com/manpages/trusty/man7/urxvt.7.html#key%20codes - table["\x1b[23$"] = Key{Code: KeyF11, Mod: ModShift} - table["\x1b[24$"] = Key{Code: KeyF12, Mod: ModShift} - table["\x1b[25$"] = Key{Code: KeyF13, Mod: ModShift} - table["\x1b[26$"] = Key{Code: KeyF14, Mod: ModShift} - table["\x1b[28$"] = Key{Code: KeyF15, Mod: ModShift} - table["\x1b[29$"] = Key{Code: KeyF16, Mod: ModShift} - table["\x1b[31$"] = Key{Code: KeyF17, Mod: ModShift} - table["\x1b[32$"] = Key{Code: KeyF18, Mod: ModShift} - table["\x1b[33$"] = Key{Code: KeyF19, Mod: ModShift} - table["\x1b[34$"] = Key{Code: KeyF20, Mod: ModShift} - table["\x1b[11^"] = Key{Code: KeyF1, Mod: ModCtrl} - table["\x1b[12^"] = Key{Code: KeyF2, Mod: ModCtrl} - table["\x1b[13^"] = Key{Code: KeyF3, Mod: ModCtrl} - table["\x1b[14^"] = Key{Code: KeyF4, Mod: ModCtrl} - table["\x1b[15^"] = Key{Code: KeyF5, Mod: ModCtrl} - table["\x1b[17^"] = Key{Code: KeyF6, Mod: ModCtrl} - table["\x1b[18^"] = Key{Code: KeyF7, Mod: ModCtrl} - table["\x1b[19^"] = Key{Code: KeyF8, Mod: ModCtrl} - table["\x1b[20^"] = Key{Code: KeyF9, Mod: ModCtrl} - table["\x1b[21^"] = Key{Code: KeyF10, Mod: ModCtrl} - table["\x1b[23^"] = Key{Code: KeyF11, Mod: ModCtrl} - table["\x1b[24^"] = Key{Code: KeyF12, Mod: ModCtrl} - table["\x1b[25^"] = Key{Code: KeyF13, Mod: ModCtrl} - table["\x1b[26^"] = Key{Code: KeyF14, Mod: ModCtrl} - table["\x1b[28^"] = Key{Code: KeyF15, Mod: ModCtrl} - table["\x1b[29^"] = Key{Code: KeyF16, Mod: ModCtrl} - table["\x1b[31^"] = Key{Code: KeyF17, Mod: ModCtrl} - table["\x1b[32^"] = Key{Code: KeyF18, Mod: ModCtrl} - table["\x1b[33^"] = Key{Code: KeyF19, Mod: ModCtrl} - table["\x1b[34^"] = Key{Code: KeyF20, Mod: ModCtrl} - table["\x1b[23@"] = Key{Code: KeyF11, Mod: ModShift | ModCtrl} - table["\x1b[24@"] = Key{Code: KeyF12, Mod: ModShift | ModCtrl} - table["\x1b[25@"] = Key{Code: KeyF13, Mod: ModShift | ModCtrl} - table["\x1b[26@"] = Key{Code: KeyF14, Mod: ModShift | ModCtrl} - table["\x1b[28@"] = Key{Code: KeyF15, Mod: ModShift | ModCtrl} - table["\x1b[29@"] = Key{Code: KeyF16, Mod: ModShift | ModCtrl} - table["\x1b[31@"] = Key{Code: KeyF17, Mod: ModShift | ModCtrl} - table["\x1b[32@"] = Key{Code: KeyF18, Mod: ModShift | ModCtrl} - table["\x1b[33@"] = Key{Code: KeyF19, Mod: ModShift | ModCtrl} - table["\x1b[34@"] = Key{Code: KeyF20, Mod: ModShift | ModCtrl} - - // Register Alt + combinations - // XXX: this must come after URxvt but before XTerm keys to register URxvt - // keys with alt modifier - tmap := map[string]Key{} - for seq, key := range table { - key := key - key.Mod |= ModAlt - key.Text = "" // Clear runes - tmap["\x1b"+seq] = key - } - maps.Copy(table, tmap) - - // XTerm modifiers - // These are offset by 1 to be compatible with our Mod type. - // See https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-PC-Style-Function-Keys - modifiers := []KeyMod{ - ModShift, // 1 - ModAlt, // 2 - ModShift | ModAlt, // 3 - ModCtrl, // 4 - ModShift | ModCtrl, // 5 - ModAlt | ModCtrl, // 6 - ModShift | ModAlt | ModCtrl, // 7 - ModMeta, // 8 - ModMeta | ModShift, // 9 - ModMeta | ModAlt, // 10 - ModMeta | ModShift | ModAlt, // 11 - ModMeta | ModCtrl, // 12 - ModMeta | ModShift | ModCtrl, // 13 - ModMeta | ModAlt | ModCtrl, // 14 - ModMeta | ModShift | ModAlt | ModCtrl, // 15 - } - - // SS3 keypad function keys - ss3FuncKeys := map[string]Key{ - // These are defined in XTerm - // Taken from Foot keymap.h and XTerm modifyOtherKeys - // https://codeberg.org/dnkl/foot/src/branch/master/keymap.h - "M": {Code: KeyKpEnter}, "X": {Code: KeyKpEqual}, - "j": {Code: KeyKpMultiply}, "k": {Code: KeyKpPlus}, - "l": {Code: KeyKpComma}, "m": {Code: KeyKpMinus}, - "n": {Code: KeyKpDecimal}, "o": {Code: KeyKpDivide}, - "p": {Code: KeyKp0}, "q": {Code: KeyKp1}, - "r": {Code: KeyKp2}, "s": {Code: KeyKp3}, - "t": {Code: KeyKp4}, "u": {Code: KeyKp5}, - "v": {Code: KeyKp6}, "w": {Code: KeyKp7}, - "x": {Code: KeyKp8}, "y": {Code: KeyKp9}, - } - - // XTerm keys - csiFuncKeys := map[string]Key{ - "A": {Code: KeyUp}, "B": {Code: KeyDown}, - "C": {Code: KeyRight}, "D": {Code: KeyLeft}, - "E": {Code: KeyBegin}, "F": {Code: KeyEnd}, - "H": {Code: KeyHome}, "P": {Code: KeyF1}, - "Q": {Code: KeyF2}, "R": {Code: KeyF3}, - "S": {Code: KeyF4}, - } - - // CSI 27 ; ; ~ keys defined in XTerm modifyOtherKeys - modifyOtherKeys := map[int]Key{ - ansi.BS: {Code: KeyBackspace}, - ansi.HT: {Code: KeyTab}, - ansi.CR: {Code: KeyEnter}, - ansi.ESC: {Code: KeyEscape}, - ansi.DEL: {Code: KeyBackspace}, - } - - for _, m := range modifiers { - // XTerm modifier offset +1 - xtermMod := strconv.Itoa(int(m) + 1) - - // CSI 1 ; - for k, v := range csiFuncKeys { - // Functions always have a leading 1 param - seq := "\x1b[1;" + xtermMod + k - key := v - key.Mod = m - table[seq] = key - } - // SS3 - for k, v := range ss3FuncKeys { - seq := "\x1bO" + xtermMod + k - key := v - key.Mod = m - table[seq] = key - } - // CSI ; ~ - for k, v := range csiTildeKeys { - seq := "\x1b[" + k + ";" + xtermMod + "~" - key := v - key.Mod = m - table[seq] = key - } - // CSI 27 ; ; ~ - for k, v := range modifyOtherKeys { - code := strconv.Itoa(k) - seq := "\x1b[27;" + xtermMod + ";" + code + "~" - key := v - key.Mod = m - table[seq] = key - } - } - - // Register terminfo keys - // XXX: this might override keys already registered in table - if flags&FlagTerminfo != 0 { - titable := buildTerminfoKeys(flags, term) - maps.Copy(table, titable) - } - - return table -} diff --git a/packages/tui/input/termcap.go b/packages/tui/input/termcap.go deleted file mode 100644 index 3502189f..00000000 --- a/packages/tui/input/termcap.go +++ /dev/null @@ -1,54 +0,0 @@ -package input - -import ( - "bytes" - "encoding/hex" - "strings" -) - -// CapabilityEvent represents a Termcap/Terminfo response event. Termcap -// responses are generated by the terminal in response to RequestTermcap -// (XTGETTCAP) requests. -// -// See: https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-Operating-System-Commands -type CapabilityEvent string - -func parseTermcap(data []byte) CapabilityEvent { - // XTGETTCAP - if len(data) == 0 { - return CapabilityEvent("") - } - - var tc strings.Builder - split := bytes.Split(data, []byte{';'}) - for _, s := range split { - parts := bytes.SplitN(s, []byte{'='}, 2) - if len(parts) == 0 { - return CapabilityEvent("") - } - - name, err := hex.DecodeString(string(parts[0])) - if err != nil || len(name) == 0 { - continue - } - - var value []byte - if len(parts) > 1 { - value, err = hex.DecodeString(string(parts[1])) - if err != nil { - continue - } - } - - if tc.Len() > 0 { - tc.WriteByte(';') - } - tc.WriteString(string(name)) - if len(value) > 0 { - tc.WriteByte('=') - tc.WriteString(string(value)) - } - } - - return CapabilityEvent(tc.String()) -} diff --git a/packages/tui/input/terminfo.go b/packages/tui/input/terminfo.go deleted file mode 100644 index a54da2c3..00000000 --- a/packages/tui/input/terminfo.go +++ /dev/null @@ -1,277 +0,0 @@ -package input - -import ( - "strings" - - "github.com/xo/terminfo" -) - -func buildTerminfoKeys(flags int, term string) map[string]Key { - table := make(map[string]Key) - ti, _ := terminfo.Load(term) - if ti == nil { - return table - } - - tiTable := defaultTerminfoKeys(flags) - - // Default keys - for name, seq := range ti.StringCapsShort() { - if !strings.HasPrefix(name, "k") || len(seq) == 0 { - continue - } - - if k, ok := tiTable[name]; ok { - table[string(seq)] = k - } - } - - // Extended keys - for name, seq := range ti.ExtStringCapsShort() { - if !strings.HasPrefix(name, "k") || len(seq) == 0 { - continue - } - - if k, ok := tiTable[name]; ok { - table[string(seq)] = k - } - } - - return table -} - -// This returns a map of terminfo keys to key events. It's a mix of ncurses -// terminfo default and user-defined key capabilities. -// Upper-case caps that are defined in the default terminfo database are -// - kNXT -// - kPRV -// - kHOM -// - kEND -// - kDC -// - kIC -// - kLFT -// - kRIT -// -// See https://man7.org/linux/man-pages/man5/terminfo.5.html -// See https://github.com/mirror/ncurses/blob/master/include/Caps-ncurses -func defaultTerminfoKeys(flags int) map[string]Key { - keys := map[string]Key{ - "kcuu1": {Code: KeyUp}, - "kUP": {Code: KeyUp, Mod: ModShift}, - "kUP3": {Code: KeyUp, Mod: ModAlt}, - "kUP4": {Code: KeyUp, Mod: ModShift | ModAlt}, - "kUP5": {Code: KeyUp, Mod: ModCtrl}, - "kUP6": {Code: KeyUp, Mod: ModShift | ModCtrl}, - "kUP7": {Code: KeyUp, Mod: ModAlt | ModCtrl}, - "kUP8": {Code: KeyUp, Mod: ModShift | ModAlt | ModCtrl}, - "kcud1": {Code: KeyDown}, - "kDN": {Code: KeyDown, Mod: ModShift}, - "kDN3": {Code: KeyDown, Mod: ModAlt}, - "kDN4": {Code: KeyDown, Mod: ModShift | ModAlt}, - "kDN5": {Code: KeyDown, Mod: ModCtrl}, - "kDN7": {Code: KeyDown, Mod: ModAlt | ModCtrl}, - "kDN6": {Code: KeyDown, Mod: ModShift | ModCtrl}, - "kDN8": {Code: KeyDown, Mod: ModShift | ModAlt | ModCtrl}, - "kcub1": {Code: KeyLeft}, - "kLFT": {Code: KeyLeft, Mod: ModShift}, - "kLFT3": {Code: KeyLeft, Mod: ModAlt}, - "kLFT4": {Code: KeyLeft, Mod: ModShift | ModAlt}, - "kLFT5": {Code: KeyLeft, Mod: ModCtrl}, - "kLFT6": {Code: KeyLeft, Mod: ModShift | ModCtrl}, - "kLFT7": {Code: KeyLeft, Mod: ModAlt | ModCtrl}, - "kLFT8": {Code: KeyLeft, Mod: ModShift | ModAlt | ModCtrl}, - "kcuf1": {Code: KeyRight}, - "kRIT": {Code: KeyRight, Mod: ModShift}, - "kRIT3": {Code: KeyRight, Mod: ModAlt}, - "kRIT4": {Code: KeyRight, Mod: ModShift | ModAlt}, - "kRIT5": {Code: KeyRight, Mod: ModCtrl}, - "kRIT6": {Code: KeyRight, Mod: ModShift | ModCtrl}, - "kRIT7": {Code: KeyRight, Mod: ModAlt | ModCtrl}, - "kRIT8": {Code: KeyRight, Mod: ModShift | ModAlt | ModCtrl}, - "kich1": {Code: KeyInsert}, - "kIC": {Code: KeyInsert, Mod: ModShift}, - "kIC3": {Code: KeyInsert, Mod: ModAlt}, - "kIC4": {Code: KeyInsert, Mod: ModShift | ModAlt}, - "kIC5": {Code: KeyInsert, Mod: ModCtrl}, - "kIC6": {Code: KeyInsert, Mod: ModShift | ModCtrl}, - "kIC7": {Code: KeyInsert, Mod: ModAlt | ModCtrl}, - "kIC8": {Code: KeyInsert, Mod: ModShift | ModAlt | ModCtrl}, - "kdch1": {Code: KeyDelete}, - "kDC": {Code: KeyDelete, Mod: ModShift}, - "kDC3": {Code: KeyDelete, Mod: ModAlt}, - "kDC4": {Code: KeyDelete, Mod: ModShift | ModAlt}, - "kDC5": {Code: KeyDelete, Mod: ModCtrl}, - "kDC6": {Code: KeyDelete, Mod: ModShift | ModCtrl}, - "kDC7": {Code: KeyDelete, Mod: ModAlt | ModCtrl}, - "kDC8": {Code: KeyDelete, Mod: ModShift | ModAlt | ModCtrl}, - "khome": {Code: KeyHome}, - "kHOM": {Code: KeyHome, Mod: ModShift}, - "kHOM3": {Code: KeyHome, Mod: ModAlt}, - "kHOM4": {Code: KeyHome, Mod: ModShift | ModAlt}, - "kHOM5": {Code: KeyHome, Mod: ModCtrl}, - "kHOM6": {Code: KeyHome, Mod: ModShift | ModCtrl}, - "kHOM7": {Code: KeyHome, Mod: ModAlt | ModCtrl}, - "kHOM8": {Code: KeyHome, Mod: ModShift | ModAlt | ModCtrl}, - "kend": {Code: KeyEnd}, - "kEND": {Code: KeyEnd, Mod: ModShift}, - "kEND3": {Code: KeyEnd, Mod: ModAlt}, - "kEND4": {Code: KeyEnd, Mod: ModShift | ModAlt}, - "kEND5": {Code: KeyEnd, Mod: ModCtrl}, - "kEND6": {Code: KeyEnd, Mod: ModShift | ModCtrl}, - "kEND7": {Code: KeyEnd, Mod: ModAlt | ModCtrl}, - "kEND8": {Code: KeyEnd, Mod: ModShift | ModAlt | ModCtrl}, - "kpp": {Code: KeyPgUp}, - "kprv": {Code: KeyPgUp}, - "kPRV": {Code: KeyPgUp, Mod: ModShift}, - "kPRV3": {Code: KeyPgUp, Mod: ModAlt}, - "kPRV4": {Code: KeyPgUp, Mod: ModShift | ModAlt}, - "kPRV5": {Code: KeyPgUp, Mod: ModCtrl}, - "kPRV6": {Code: KeyPgUp, Mod: ModShift | ModCtrl}, - "kPRV7": {Code: KeyPgUp, Mod: ModAlt | ModCtrl}, - "kPRV8": {Code: KeyPgUp, Mod: ModShift | ModAlt | ModCtrl}, - "knp": {Code: KeyPgDown}, - "knxt": {Code: KeyPgDown}, - "kNXT": {Code: KeyPgDown, Mod: ModShift}, - "kNXT3": {Code: KeyPgDown, Mod: ModAlt}, - "kNXT4": {Code: KeyPgDown, Mod: ModShift | ModAlt}, - "kNXT5": {Code: KeyPgDown, Mod: ModCtrl}, - "kNXT6": {Code: KeyPgDown, Mod: ModShift | ModCtrl}, - "kNXT7": {Code: KeyPgDown, Mod: ModAlt | ModCtrl}, - "kNXT8": {Code: KeyPgDown, Mod: ModShift | ModAlt | ModCtrl}, - - "kbs": {Code: KeyBackspace}, - "kcbt": {Code: KeyTab, Mod: ModShift}, - - // Function keys - // This only includes the first 12 function keys. The rest are treated - // as modifiers of the first 12. - // Take a look at XTerm modifyFunctionKeys - // - // XXX: To use unambiguous function keys, use fixterms or kitty clipboard. - // - // See https://invisible-island.net/xterm/manpage/xterm.html#VT100-Widget-Resources:modifyFunctionKeys - // See https://invisible-island.net/xterm/terminfo.html - - "kf1": {Code: KeyF1}, - "kf2": {Code: KeyF2}, - "kf3": {Code: KeyF3}, - "kf4": {Code: KeyF4}, - "kf5": {Code: KeyF5}, - "kf6": {Code: KeyF6}, - "kf7": {Code: KeyF7}, - "kf8": {Code: KeyF8}, - "kf9": {Code: KeyF9}, - "kf10": {Code: KeyF10}, - "kf11": {Code: KeyF11}, - "kf12": {Code: KeyF12}, - "kf13": {Code: KeyF1, Mod: ModShift}, - "kf14": {Code: KeyF2, Mod: ModShift}, - "kf15": {Code: KeyF3, Mod: ModShift}, - "kf16": {Code: KeyF4, Mod: ModShift}, - "kf17": {Code: KeyF5, Mod: ModShift}, - "kf18": {Code: KeyF6, Mod: ModShift}, - "kf19": {Code: KeyF7, Mod: ModShift}, - "kf20": {Code: KeyF8, Mod: ModShift}, - "kf21": {Code: KeyF9, Mod: ModShift}, - "kf22": {Code: KeyF10, Mod: ModShift}, - "kf23": {Code: KeyF11, Mod: ModShift}, - "kf24": {Code: KeyF12, Mod: ModShift}, - "kf25": {Code: KeyF1, Mod: ModCtrl}, - "kf26": {Code: KeyF2, Mod: ModCtrl}, - "kf27": {Code: KeyF3, Mod: ModCtrl}, - "kf28": {Code: KeyF4, Mod: ModCtrl}, - "kf29": {Code: KeyF5, Mod: ModCtrl}, - "kf30": {Code: KeyF6, Mod: ModCtrl}, - "kf31": {Code: KeyF7, Mod: ModCtrl}, - "kf32": {Code: KeyF8, Mod: ModCtrl}, - "kf33": {Code: KeyF9, Mod: ModCtrl}, - "kf34": {Code: KeyF10, Mod: ModCtrl}, - "kf35": {Code: KeyF11, Mod: ModCtrl}, - "kf36": {Code: KeyF12, Mod: ModCtrl}, - "kf37": {Code: KeyF1, Mod: ModShift | ModCtrl}, - "kf38": {Code: KeyF2, Mod: ModShift | ModCtrl}, - "kf39": {Code: KeyF3, Mod: ModShift | ModCtrl}, - "kf40": {Code: KeyF4, Mod: ModShift | ModCtrl}, - "kf41": {Code: KeyF5, Mod: ModShift | ModCtrl}, - "kf42": {Code: KeyF6, Mod: ModShift | ModCtrl}, - "kf43": {Code: KeyF7, Mod: ModShift | ModCtrl}, - "kf44": {Code: KeyF8, Mod: ModShift | ModCtrl}, - "kf45": {Code: KeyF9, Mod: ModShift | ModCtrl}, - "kf46": {Code: KeyF10, Mod: ModShift | ModCtrl}, - "kf47": {Code: KeyF11, Mod: ModShift | ModCtrl}, - "kf48": {Code: KeyF12, Mod: ModShift | ModCtrl}, - "kf49": {Code: KeyF1, Mod: ModAlt}, - "kf50": {Code: KeyF2, Mod: ModAlt}, - "kf51": {Code: KeyF3, Mod: ModAlt}, - "kf52": {Code: KeyF4, Mod: ModAlt}, - "kf53": {Code: KeyF5, Mod: ModAlt}, - "kf54": {Code: KeyF6, Mod: ModAlt}, - "kf55": {Code: KeyF7, Mod: ModAlt}, - "kf56": {Code: KeyF8, Mod: ModAlt}, - "kf57": {Code: KeyF9, Mod: ModAlt}, - "kf58": {Code: KeyF10, Mod: ModAlt}, - "kf59": {Code: KeyF11, Mod: ModAlt}, - "kf60": {Code: KeyF12, Mod: ModAlt}, - "kf61": {Code: KeyF1, Mod: ModShift | ModAlt}, - "kf62": {Code: KeyF2, Mod: ModShift | ModAlt}, - "kf63": {Code: KeyF3, Mod: ModShift | ModAlt}, - } - - // Preserve F keys from F13 to F63 instead of using them for F-keys - // modifiers. - if flags&FlagFKeys != 0 { - keys["kf13"] = Key{Code: KeyF13} - keys["kf14"] = Key{Code: KeyF14} - keys["kf15"] = Key{Code: KeyF15} - keys["kf16"] = Key{Code: KeyF16} - keys["kf17"] = Key{Code: KeyF17} - keys["kf18"] = Key{Code: KeyF18} - keys["kf19"] = Key{Code: KeyF19} - keys["kf20"] = Key{Code: KeyF20} - keys["kf21"] = Key{Code: KeyF21} - keys["kf22"] = Key{Code: KeyF22} - keys["kf23"] = Key{Code: KeyF23} - keys["kf24"] = Key{Code: KeyF24} - keys["kf25"] = Key{Code: KeyF25} - keys["kf26"] = Key{Code: KeyF26} - keys["kf27"] = Key{Code: KeyF27} - keys["kf28"] = Key{Code: KeyF28} - keys["kf29"] = Key{Code: KeyF29} - keys["kf30"] = Key{Code: KeyF30} - keys["kf31"] = Key{Code: KeyF31} - keys["kf32"] = Key{Code: KeyF32} - keys["kf33"] = Key{Code: KeyF33} - keys["kf34"] = Key{Code: KeyF34} - keys["kf35"] = Key{Code: KeyF35} - keys["kf36"] = Key{Code: KeyF36} - keys["kf37"] = Key{Code: KeyF37} - keys["kf38"] = Key{Code: KeyF38} - keys["kf39"] = Key{Code: KeyF39} - keys["kf40"] = Key{Code: KeyF40} - keys["kf41"] = Key{Code: KeyF41} - keys["kf42"] = Key{Code: KeyF42} - keys["kf43"] = Key{Code: KeyF43} - keys["kf44"] = Key{Code: KeyF44} - keys["kf45"] = Key{Code: KeyF45} - keys["kf46"] = Key{Code: KeyF46} - keys["kf47"] = Key{Code: KeyF47} - keys["kf48"] = Key{Code: KeyF48} - keys["kf49"] = Key{Code: KeyF49} - keys["kf50"] = Key{Code: KeyF50} - keys["kf51"] = Key{Code: KeyF51} - keys["kf52"] = Key{Code: KeyF52} - keys["kf53"] = Key{Code: KeyF53} - keys["kf54"] = Key{Code: KeyF54} - keys["kf55"] = Key{Code: KeyF55} - keys["kf56"] = Key{Code: KeyF56} - keys["kf57"] = Key{Code: KeyF57} - keys["kf58"] = Key{Code: KeyF58} - keys["kf59"] = Key{Code: KeyF59} - keys["kf60"] = Key{Code: KeyF60} - keys["kf61"] = Key{Code: KeyF61} - keys["kf62"] = Key{Code: KeyF62} - keys["kf63"] = Key{Code: KeyF63} - } - - return keys -} diff --git a/packages/tui/input/xterm.go b/packages/tui/input/xterm.go deleted file mode 100644 index b3bbc308..00000000 --- a/packages/tui/input/xterm.go +++ /dev/null @@ -1,47 +0,0 @@ -package input - -import ( - "github.com/charmbracelet/x/ansi" -) - -func parseXTermModifyOtherKeys(params ansi.Params) Event { - // XTerm modify other keys starts with ESC [ 27 ; ; ~ - xmod, _, _ := params.Param(1, 1) - xrune, _, _ := params.Param(2, 1) - mod := KeyMod(xmod - 1) - r := rune(xrune) - - switch r { - case ansi.BS: - return KeyPressEvent{Mod: mod, Code: KeyBackspace} - case ansi.HT: - return KeyPressEvent{Mod: mod, Code: KeyTab} - case ansi.CR: - return KeyPressEvent{Mod: mod, Code: KeyEnter} - case ansi.ESC: - return KeyPressEvent{Mod: mod, Code: KeyEscape} - case ansi.DEL: - return KeyPressEvent{Mod: mod, Code: KeyBackspace} - } - - // CSI 27 ; ; ~ keys defined in XTerm modifyOtherKeys - k := KeyPressEvent{Code: r, Mod: mod} - if k.Mod <= ModShift { - k.Text = string(r) - } - - return k -} - -// TerminalVersionEvent is a message that represents the terminal version. -type TerminalVersionEvent string - -// ModifyOtherKeysEvent represents a modifyOtherKeys event. -// -// 0: disable -// 1: enable mode 1 -// 2: enable mode 2 -// -// See: https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-Functions-using-CSI-_-ordered-by-the-final-character_s_ -// See: https://invisible-island.net/xterm/manpage/xterm.html#VT100-Widget-Resources:modifyOtherKeys -type ModifyOtherKeysEvent uint8 diff --git a/packages/tui/internal/app/app.go b/packages/tui/internal/app/app.go index 8f7e2793..9b341c19 100644 --- a/packages/tui/internal/app/app.go +++ b/packages/tui/internal/app/app.go @@ -12,47 +12,28 @@ import ( tea "github.com/charmbracelet/bubbletea/v2" "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode/internal/clipboard" "github.com/sst/opencode/internal/commands" "github.com/sst/opencode/internal/components/toast" "github.com/sst/opencode/internal/config" - "github.com/sst/opencode/internal/id" "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" "github.com/sst/opencode/internal/util" ) -type Message struct { - Info opencode.MessageUnion - Parts []opencode.PartUnion -} - type App struct { - Info opencode.App - Modes []opencode.Mode - Providers []opencode.Provider - Version string - StatePath string - Config *opencode.Config - Client *opencode.Client - State *config.State - ModeIndex int - Mode *opencode.Mode - Provider *opencode.Provider - Model *opencode.Model - Session *opencode.Session - Messages []Message - Commands commands.CommandRegistry - InitialModel *string - InitialPrompt *string - IntitialMode *string - compactCancel context.CancelFunc - IsLeaderSequence bool + Info opencode.App + Version string + StatePath string + Config *opencode.Config + Client *opencode.Client + State *config.State + Provider *opencode.Provider + Model *opencode.Model + Session *opencode.Session + Messages []opencode.Message + Commands commands.CommandRegistry } -type SessionCreatedMsg = struct { - Session *opencode.Session -} type SessionSelectedMsg = *opencode.Session type SessionLoadedMsg struct{} type ModelSelectedMsg struct { @@ -63,10 +44,13 @@ type SessionClearedMsg struct{} type CompactSessionMsg struct{} type SendMsg struct { Text string - Attachments []opencode.FilePartInputParam + Attachments []Attachment } -type SetEditorContentMsg struct { - Text string +type CompletionDialogTriggeredMsg struct { + InitialValue string +} +type OptimisticMessageAddedMsg struct { + Message opencode.Message } type FileRenderedMsg struct { FilePath string @@ -76,11 +60,7 @@ func New( ctx context.Context, version string, appInfo opencode.App, - modes []opencode.Mode, httpClient *opencode.Client, - initialModel *string, - initialPrompt *string, - initialMode *string, ) (*App, error) { util.RootPath = appInfo.Path.Root util.CwdPath = appInfo.Path.Cwd @@ -101,36 +81,14 @@ func New( config.SaveState(appStatePath, appState) } - if appState.ModeModel == nil { - appState.ModeModel = make(map[string]config.ModeModel) - } - if configInfo.Theme != "" { appState.Theme = configInfo.Theme } - var modeIndex int - var mode *opencode.Mode - modeName := "build" - if appState.Mode != "" { - modeName = appState.Mode - } - if initialMode != nil && *initialMode != "" { - modeName = *initialMode - } - for i, m := range modes { - if m.Name == modeName { - modeIndex = i - break - } - } - mode = &modes[modeIndex] - - if mode.Model.ModelID != "" { - appState.ModeModel[mode.Name] = config.ModeModel{ - ProviderID: mode.Model.ProviderID, - ModelID: mode.Model.ModelID, - } + if configInfo.Model != "" { + splits := strings.Split(configInfo.Model, "/") + appState.Provider = splits[0] + appState.Model = strings.Join(splits[1:], "/") } if err := theme.LoadThemesFromDirectories( @@ -154,21 +112,15 @@ func New( slog.Debug("Loaded config", "config", configInfo) app := &App{ - Info: appInfo, - Modes: modes, - Version: version, - StatePath: appStatePath, - Config: configInfo, - State: appState, - Client: httpClient, - ModeIndex: modeIndex, - Mode: mode, - Session: &opencode.Session{}, - Messages: []Message{}, - Commands: commands.LoadFromConfig(configInfo), - InitialModel: initialModel, - InitialPrompt: initialPrompt, - IntitialMode: initialMode, + Info: appInfo, + Version: version, + StatePath: appStatePath, + Config: configInfo, + State: appState, + Client: httpClient, + Session: &opencode.Session{}, + Messages: []opencode.Message{}, + Commands: commands.LoadFromConfig(configInfo), } return app, nil @@ -177,11 +129,7 @@ func New( func (a *App) Key(commandName commands.CommandName) string { t := theme.CurrentTheme() base := styles.NewStyle().Background(t.Background()).Foreground(t.Text()).Bold(true).Render - muted := styles.NewStyle(). - Background(t.Background()). - Foreground(t.TextMuted()). - Faint(true). - Render + muted := styles.NewStyle().Background(t.Background()).Foreground(t.TextMuted()).Faint(true).Render command := a.Commands[commandName] kb := command.Keybindings[0] key := kb.Key @@ -191,170 +139,69 @@ func (a *App) Key(commandName commands.CommandName) string { return base(key) + muted(" "+command.Description) } -func (a *App) SetClipboard(text string) tea.Cmd { - var cmds []tea.Cmd - cmds = append(cmds, func() tea.Msg { - clipboard.Write(clipboard.FmtText, []byte(text)) - return nil - }) - // try to set the clipboard using OSC52 for terminals that support it - cmds = append(cmds, tea.SetClipboard(text)) - return tea.Sequence(cmds...) -} - -func (a *App) cycleMode(forward bool) (*App, tea.Cmd) { - if forward { - a.ModeIndex++ - if a.ModeIndex >= len(a.Modes) { - a.ModeIndex = 0 - } - } else { - a.ModeIndex-- - if a.ModeIndex < 0 { - a.ModeIndex = len(a.Modes) - 1 - } - } - a.Mode = &a.Modes[a.ModeIndex] - - modelID := a.Mode.Model.ModelID - providerID := a.Mode.Model.ProviderID - if modelID == "" { - if model, ok := a.State.ModeModel[a.Mode.Name]; ok { - modelID = model.ModelID - providerID = model.ProviderID - } - } - - if modelID != "" { - for _, provider := range a.Providers { - if provider.ID == providerID { - a.Provider = &provider - for _, model := range provider.Models { - if model.ID == modelID { - a.Model = &model - break - } - } - break - } - } - } - - a.State.Mode = a.Mode.Name - - return a, func() tea.Msg { - a.SaveState() - return nil - } -} - -func (a *App) SwitchMode() (*App, tea.Cmd) { - return a.cycleMode(true) -} - -func (a *App) SwitchModeReverse() (*App, tea.Cmd) { - return a.cycleMode(false) -} - func (a *App) InitializeProvider() tea.Cmd { - providersResponse, err := a.Client.App.Providers(context.Background()) - if err != nil { - slog.Error("Failed to list providers", "error", err) - // TODO: notify user - return nil - } - providers := providersResponse.Providers - var defaultProvider *opencode.Provider - var defaultModel *opencode.Model - - var anthropic *opencode.Provider - for _, provider := range providers { - if provider.ID == "anthropic" { - anthropic = &provider + return func() tea.Msg { + providersResponse, err := a.Client.Config.Providers(context.Background()) + if err != nil { + slog.Error("Failed to list providers", "error", err) + // TODO: notify user + return nil } - } + providers := providersResponse.Providers + var defaultProvider *opencode.Provider + var defaultModel *opencode.Model - // default to anthropic if available - if anthropic != nil { - defaultProvider = anthropic - defaultModel = getDefaultModel(providersResponse, *anthropic) - } - - for _, provider := range providers { - if defaultProvider == nil || defaultModel == nil { - defaultProvider = &provider - defaultModel = getDefaultModel(providersResponse, provider) - } - providers = append(providers, provider) - } - if len(providers) == 0 { - slog.Error("No providers configured") - return nil - } - - a.Providers = providers - - // retains backwards compatibility with old state format - if model, ok := a.State.ModeModel[a.State.Mode]; ok { - a.State.Provider = model.ProviderID - a.State.Model = model.ModelID - } - - var currentProvider *opencode.Provider - var currentModel *opencode.Model - for _, provider := range providers { - if provider.ID == a.State.Provider { - currentProvider = &provider - - for _, model := range provider.Models { - if model.ID == a.State.Model { - currentModel = &model - } + var anthropic *opencode.Provider + for _, provider := range providers { + if provider.ID == "anthropic" { + anthropic = &provider } } - } - if currentProvider == nil || currentModel == nil { - currentProvider = defaultProvider - currentModel = defaultModel - } - var initialProvider *opencode.Provider - var initialModel *opencode.Model - if a.InitialModel != nil && *a.InitialModel != "" { - splits := strings.Split(*a.InitialModel, "/") + // default to anthropic if available + if anthropic != nil { + defaultProvider = anthropic + defaultModel = getDefaultModel(providersResponse, *anthropic) + } + for _, provider := range providers { - if provider.ID == splits[0] { - initialProvider = &provider + if defaultProvider == nil || defaultModel == nil { + defaultProvider = &provider + defaultModel = getDefaultModel(providersResponse, provider) + } + providers = append(providers, provider) + } + if len(providers) == 0 { + slog.Error("No providers configured") + return nil + } + + var currentProvider *opencode.Provider + var currentModel *opencode.Model + for _, provider := range providers { + if provider.ID == a.State.Provider { + currentProvider = &provider + for _, model := range provider.Models { - modelID := strings.Join(splits[1:], "/") - if model.ID == modelID { - initialModel = &model + if model.ID == a.State.Model { + currentModel = &model } } } } - } + if currentProvider == nil || currentModel == nil { + currentProvider = defaultProvider + currentModel = defaultModel + } - if initialProvider != nil && initialModel != nil { - currentProvider = initialProvider - currentModel = initialModel + return ModelSelectedMsg{ + Provider: *currentProvider, + Model: *currentModel, + } } - - var cmds []tea.Cmd - cmds = append(cmds, util.CmdHandler(ModelSelectedMsg{ - Provider: *currentProvider, - Model: *currentModel, - })) - if a.InitialPrompt != nil && *a.InitialPrompt != "" { - cmds = append(cmds, util.CmdHandler(SendMsg{Text: *a.InitialPrompt})) - } - return tea.Sequence(cmds...) } -func getDefaultModel( - response *opencode.AppProvidersResponse, - provider opencode.Provider, -) *opencode.Model { +func getDefaultModel(response *opencode.ConfigProvidersResponse, provider opencode.Provider) *opencode.Model { if match, ok := response.Default[provider.ID]; ok { model := provider.Models[match] return &model @@ -366,16 +213,20 @@ func getDefaultModel( return nil } +type Attachment struct { + FilePath string + FileName string + MimeType string + Content []byte +} + func (a *App) IsBusy() bool { if len(a.Messages) == 0 { return false } lastMessage := a.Messages[len(a.Messages)-1] - if casted, ok := lastMessage.Info.(opencode.AssistantMessage); ok { - return casted.Time.Completed == 0 - } - return false + return lastMessage.Metadata.Time.Completed == 0 } func (a *App) SaveState() { @@ -395,11 +246,10 @@ func (a *App) InitializeProject(ctx context.Context) tea.Cmd { } a.Session = session - cmds = append(cmds, util.CmdHandler(SessionCreatedMsg{Session: session})) + cmds = append(cmds, util.CmdHandler(SessionSelectedMsg(session))) go func() { _, err := a.Client.Session.Init(ctx, a.Session.ID, opencode.SessionInitParams{ - MessageID: opencode.F(id.Ascending(id.Message)), ProviderID: opencode.F(a.Provider.ID), ModelID: opencode.F(a.Model.ID), }) @@ -413,30 +263,13 @@ func (a *App) InitializeProject(ctx context.Context) tea.Cmd { } func (a *App) CompactSession(ctx context.Context) tea.Cmd { - if a.compactCancel != nil { - a.compactCancel() - } - - compactCtx, cancel := context.WithCancel(ctx) - a.compactCancel = cancel - go func() { - defer func() { - a.compactCancel = nil - }() - - _, err := a.Client.Session.Summarize( - compactCtx, - a.Session.ID, - opencode.SessionSummarizeParams{ - ProviderID: opencode.F(a.Provider.ID), - ModelID: opencode.F(a.Model.ID), - }, - ) + _, err := a.Client.Session.Summarize(ctx, a.Session.ID, opencode.SessionSummarizeParams{ + ProviderID: opencode.F(a.Provider.ID), + ModelID: opencode.F(a.Model.ID), + }) if err != nil { - if compactCtx.Err() != context.Canceled { - slog.Error("Failed to compact session", "error", err) - } + slog.Error("Failed to compact session", "error", err) } }() return nil @@ -459,80 +292,45 @@ func (a *App) CreateSession(ctx context.Context) (*opencode.Session, error) { return session, nil } -func (a *App) SendChatMessage( - ctx context.Context, - text string, - attachments []opencode.FilePartInputParam, -) (*App, tea.Cmd) { +func (a *App) SendChatMessage(ctx context.Context, text string, attachments []Attachment) tea.Cmd { var cmds []tea.Cmd if a.Session.ID == "" { session, err := a.CreateSession(ctx) if err != nil { - return a, toast.NewErrorToast(err.Error()) + return toast.NewErrorToast(err.Error()) } a.Session = session - cmds = append(cmds, util.CmdHandler(SessionCreatedMsg{Session: session})) + cmds = append(cmds, util.CmdHandler(SessionSelectedMsg(session))) } - message := opencode.UserMessage{ - ID: id.Ascending(id.Message), - SessionID: a.Session.ID, - Role: opencode.UserMessageRoleUser, - Time: opencode.UserMessageTime{ - Created: float64(time.Now().UnixMilli()), + optimisticMessage := opencode.Message{ + ID: fmt.Sprintf("optimistic-%d", time.Now().UnixNano()), + Role: opencode.MessageRoleUser, + Parts: []opencode.MessagePart{{ + Type: opencode.MessagePartTypeText, + Text: text, + }}, + Metadata: opencode.MessageMetadata{ + SessionID: a.Session.ID, + Time: opencode.MessageMetadataTime{ + Created: float64(time.Now().Unix()), + }, }, } - parts := []opencode.PartUnion{opencode.TextPart{ - ID: id.Ascending(id.Part), - MessageID: message.ID, - SessionID: a.Session.ID, - Type: opencode.TextPartTypeText, - Text: text, - }} - if len(attachments) > 0 { - for _, attachment := range attachments { - parts = append(parts, opencode.FilePart{ - ID: id.Ascending(id.Part), - MessageID: message.ID, - SessionID: a.Session.ID, - Type: opencode.FilePartTypeFile, - Filename: attachment.Filename.Value, - Mime: attachment.Mime.Value, - URL: attachment.URL.Value, - }) - } - } - - a.Messages = append(a.Messages, Message{Info: message, Parts: parts}) + a.Messages = append(a.Messages, optimisticMessage) + cmds = append(cmds, util.CmdHandler(OptimisticMessageAddedMsg{Message: optimisticMessage})) cmds = append(cmds, func() tea.Msg { - partsParam := []opencode.SessionChatParamsPartUnion{} - for _, part := range parts { - switch casted := part.(type) { - case opencode.TextPart: - partsParam = append(partsParam, opencode.TextPartInputParam{ - ID: opencode.F(casted.ID), - Type: opencode.F(opencode.TextPartInputType(casted.Type)), - Text: opencode.F(casted.Text), - }) - case opencode.FilePart: - partsParam = append(partsParam, opencode.FilePartInputParam{ - ID: opencode.F(casted.ID), - Mime: opencode.F(casted.Mime), - Type: opencode.F(opencode.FilePartInputType(casted.Type)), - URL: opencode.F(casted.URL), - Filename: opencode.F(casted.Filename), - }) - } - } - _, err := a.Client.Session.Chat(ctx, a.Session.ID, opencode.SessionChatParams{ - Parts: opencode.F(partsParam), - MessageID: opencode.F(message.ID), + Parts: opencode.F([]opencode.MessagePartUnionParam{ + opencode.TextPartParam{ + Type: opencode.F(opencode.TextPartTypeText), + Text: opencode.F(text), + }, + }), ProviderID: opencode.F(a.Provider.ID), ModelID: opencode.F(a.Model.ID), - Mode: opencode.F(a.Mode.Name), }) if err != nil { errormsg := fmt.Sprintf("failed to send message: %v", err) @@ -544,16 +342,10 @@ func (a *App) SendChatMessage( // The actual response will come through SSE // For now, just return success - return a, tea.Batch(cmds...) + return tea.Batch(cmds...) } func (a *App) Cancel(ctx context.Context, sessionID string) error { - // Cancel any running compact operation - if a.compactCancel != nil { - a.compactCancel() - a.compactCancel = nil - } - _, err := a.Client.Session.Abort(ctx, sessionID) if err != nil { slog.Error("Failed to cancel session", "error", err) @@ -587,30 +379,20 @@ func (a *App) DeleteSession(ctx context.Context, sessionID string) error { return nil } -func (a *App) ListMessages(ctx context.Context, sessionId string) ([]Message, error) { +func (a *App) ListMessages(ctx context.Context, sessionId string) ([]opencode.Message, error) { response, err := a.Client.Session.Messages(ctx, sessionId) if err != nil { return nil, err } if response == nil { - return []Message{}, nil - } - messages := []Message{} - for _, message := range *response { - msg := Message{ - Info: message.Info.AsUnion(), - Parts: []opencode.PartUnion{}, - } - for _, part := range message.Parts { - msg.Parts = append(msg.Parts, part.AsUnion()) - } - messages = append(messages, msg) + return []opencode.Message{}, nil } + messages := *response return messages, nil } func (a *App) ListProviders(ctx context.Context) ([]opencode.Provider, error) { - response, err := a.Client.App.Providers(ctx) + response, err := a.Client.Config.Providers(ctx) if err != nil { return nil, err } diff --git a/packages/tui/internal/clipboard/clipboard.go b/packages/tui/internal/clipboard/clipboard.go deleted file mode 100644 index 70e05bd2..00000000 --- a/packages/tui/internal/clipboard/clipboard.go +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright 2021 The golang.design Initiative Authors. -// All rights reserved. Use of this source code is governed -// by a MIT license that can be found in the LICENSE file. -// -// Written by Changkun Ou - -/* -Package clipboard provides cross platform clipboard access and supports -macOS/Linux/Windows/Android/iOS platform. Before interacting with the -clipboard, one must call Init to assert if it is possible to use this -package: - - err := clipboard.Init() - if err != nil { - panic(err) - } - -The most common operations are `Read` and `Write`. To use them: - - // write/read text format data of the clipboard, and - // the byte buffer regarding the text are UTF8 encoded. - clipboard.Write(clipboard.FmtText, []byte("text data")) - clipboard.Read(clipboard.FmtText) - - // write/read image format data of the clipboard, and - // the byte buffer regarding the image are PNG encoded. - clipboard.Write(clipboard.FmtImage, []byte("image data")) - clipboard.Read(clipboard.FmtImage) - -Note that read/write regarding image format assumes that the bytes are -PNG encoded since it serves the alpha blending purpose that might be -used in other graphical software. - -In addition, `clipboard.Write` returns a channel that can receive an -empty struct as a signal, which indicates the corresponding write call -to the clipboard is outdated, meaning the clipboard has been overwritten -by others and the previously written data is lost. For instance: - - changed := clipboard.Write(clipboard.FmtText, []byte("text data")) - - select { - case <-changed: - println(`"text data" is no longer available from clipboard.`) - } - -You can ignore the returning channel if you don't need this type of -notification. Furthermore, when you need more than just knowing whether -clipboard data is changed, use the watcher API: - - ch := clipboard.Watch(context.TODO(), clipboard.FmtText) - for data := range ch { - // print out clipboard data whenever it is changed - println(string(data)) - } -*/ -package clipboard - -import ( - "context" - "errors" - "fmt" - "os" - "sync" -) - -var ( - // activate only for running tests. - debug = false - errUnavailable = errors.New("clipboard unavailable") - errUnsupported = errors.New("unsupported format") - errNoCgo = errors.New("clipboard: cannot use when CGO_ENABLED=0") -) - -// Format represents the format of clipboard data. -type Format int - -// All sorts of supported clipboard data -const ( - // FmtText indicates plain text clipboard format - FmtText Format = iota - // FmtImage indicates image/png clipboard format - FmtImage -) - -var ( - // Due to the limitation on operating systems (such as darwin), - // concurrent read can even cause panic, use a global lock to - // guarantee one read at a time. - lock = sync.Mutex{} - initOnce sync.Once - initError error -) - -// Init initializes the clipboard package. It returns an error -// if the clipboard is not available to use. This may happen if the -// target system lacks required dependency, such as libx11-dev in X11 -// environment. For example, -// -// err := clipboard.Init() -// if err != nil { -// panic(err) -// } -// -// If Init returns an error, any subsequent Read/Write/Watch call -// may result in an unrecoverable panic. -func Init() error { - initOnce.Do(func() { - initError = initialize() - }) - return initError -} - -// Read returns a chunk of bytes of the clipboard data if it presents -// in the desired format t presents. Otherwise, it returns nil. -func Read(t Format) []byte { - lock.Lock() - defer lock.Unlock() - - buf, err := read(t) - if err != nil { - if debug { - fmt.Fprintf(os.Stderr, "read clipboard err: %v\n", err) - } - return nil - } - return buf -} - -// Write writes a given buffer to the clipboard in a specified format. -// Write returned a receive-only channel can receive an empty struct -// as a signal, which indicates the clipboard has been overwritten from -// this write. -// If format t indicates an image, then the given buf assumes -// the image data is PNG encoded. -func Write(t Format, buf []byte) <-chan struct{} { - lock.Lock() - defer lock.Unlock() - - changed, err := write(t, buf) - if err != nil { - if debug { - fmt.Fprintf(os.Stderr, "write to clipboard err: %v\n", err) - } - return nil - } - return changed -} - -// Watch returns a receive-only channel that received the clipboard data -// whenever any change of clipboard data in the desired format happens. -// -// The returned channel will be closed if the given context is canceled. -func Watch(ctx context.Context, t Format) <-chan []byte { - return watch(ctx, t) -} diff --git a/packages/tui/internal/clipboard/clipboard_darwin.go b/packages/tui/internal/clipboard/clipboard_darwin.go deleted file mode 100644 index ead6811f..00000000 --- a/packages/tui/internal/clipboard/clipboard_darwin.go +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright 2021 The golang.design Initiative Authors. -// All rights reserved. Use of this source code is governed -// by a MIT license that can be found in the LICENSE file. -// -// Written by Changkun Ou - -//go:build darwin - -package clipboard - -import ( - "bytes" - "context" - "fmt" - "os" - "os/exec" - "strconv" - "strings" - "sync" - "time" -) - -var ( - lastChangeCount int64 - changeCountMu sync.Mutex -) - -func initialize() error { return nil } - -func read(t Format) (buf []byte, err error) { - switch t { - case FmtText: - return readText() - case FmtImage: - return readImage() - default: - return nil, errUnsupported - } -} - -func readText() ([]byte, error) { - // Check if clipboard contains string data - checkScript := ` - try - set clipboardTypes to (clipboard info) - repeat with aType in clipboardTypes - if (first item of aType) is string then - return "hastext" - end if - end repeat - return "notext" - on error - return "error" - end try - ` - - cmd := exec.Command("osascript", "-e", checkScript) - checkOut, err := cmd.Output() - if err != nil { - return nil, errUnavailable - } - - checkOut = bytes.TrimSpace(checkOut) - if !bytes.Equal(checkOut, []byte("hastext")) { - return nil, errUnavailable - } - - // Now get the actual text - cmd = exec.Command("osascript", "-e", "get the clipboard") - out, err := cmd.Output() - if err != nil { - return nil, errUnavailable - } - // Remove trailing newline that osascript adds - out = bytes.TrimSuffix(out, []byte("\n")) - - // If clipboard was set to empty string, return nil - if len(out) == 0 { - return nil, nil - } - return out, nil -} -func readImage() ([]byte, error) { - // AppleScript to read image data from clipboard as base64 - script := ` - try - set theData to the clipboard as «class PNGf» - return theData - on error - return "" - end try - ` - - cmd := exec.Command("osascript", "-e", script) - out, err := cmd.Output() - if err != nil { - return nil, errUnavailable - } - - // Check if we got any data - out = bytes.TrimSpace(out) - if len(out) == 0 { - return nil, errUnavailable - } - - // The output is in hex format (e.g., «data PNGf89504E...») - // We need to extract and convert it - outStr := string(out) - if !strings.HasPrefix(outStr, "«data PNGf") || !strings.HasSuffix(outStr, "»") { - return nil, errUnavailable - } - - // Extract hex data - hexData := strings.TrimPrefix(outStr, "«data PNGf") - hexData = strings.TrimSuffix(hexData, "»") - - // Convert hex to bytes - buf := make([]byte, len(hexData)/2) - for i := 0; i < len(hexData); i += 2 { - b, err := strconv.ParseUint(hexData[i:i+2], 16, 8) - if err != nil { - return nil, errUnavailable - } - buf[i/2] = byte(b) - } - - return buf, nil -} - -// write writes the given data to clipboard and -// returns true if success or false if failed. -func write(t Format, buf []byte) (<-chan struct{}, error) { - var err error - switch t { - case FmtText: - err = writeText(buf) - case FmtImage: - err = writeImage(buf) - default: - return nil, errUnsupported - } - - if err != nil { - return nil, err - } - - // Update change count - changeCountMu.Lock() - lastChangeCount++ - currentCount := lastChangeCount - changeCountMu.Unlock() - - // use unbuffered channel to prevent goroutine leak - changed := make(chan struct{}, 1) - go func() { - for { - time.Sleep(time.Second) - changeCountMu.Lock() - if lastChangeCount != currentCount { - changeCountMu.Unlock() - changed <- struct{}{} - close(changed) - return - } - changeCountMu.Unlock() - } - }() - return changed, nil -} - -func writeText(buf []byte) error { - if len(buf) == 0 { - // Clear clipboard - script := `set the clipboard to ""` - cmd := exec.Command("osascript", "-e", script) - if err := cmd.Run(); err != nil { - return errUnavailable - } - return nil - } - - // Escape the text for AppleScript - text := string(buf) - text = strings.ReplaceAll(text, "\\", "\\\\") - text = strings.ReplaceAll(text, "\"", "\\\"") - - script := fmt.Sprintf(`set the clipboard to "%s"`, text) - cmd := exec.Command("osascript", "-e", script) - if err := cmd.Run(); err != nil { - return errUnavailable - } - return nil -} -func writeImage(buf []byte) error { - if len(buf) == 0 { - // Clear clipboard - script := `set the clipboard to ""` - cmd := exec.Command("osascript", "-e", script) - if err := cmd.Run(); err != nil { - return errUnavailable - } - return nil - } - - // Create a temporary file to store the PNG data - tmpFile, err := os.CreateTemp("", "clipboard*.png") - if err != nil { - return errUnavailable - } - defer os.Remove(tmpFile.Name()) - - if _, err := tmpFile.Write(buf); err != nil { - tmpFile.Close() - return errUnavailable - } - tmpFile.Close() - - // Use osascript to set clipboard to the image file - script := fmt.Sprintf(` - set theFile to POSIX file "%s" - set theImage to read theFile as «class PNGf» - set the clipboard to theImage - `, tmpFile.Name()) - - cmd := exec.Command("osascript", "-e", script) - if err := cmd.Run(); err != nil { - return errUnavailable - } - return nil -} -func watch(ctx context.Context, t Format) <-chan []byte { - recv := make(chan []byte, 1) - ti := time.NewTicker(time.Second) - - // Get initial clipboard content - var lastContent []byte - if b := Read(t); b != nil { - lastContent = make([]byte, len(b)) - copy(lastContent, b) - } - - go func() { - defer close(recv) - defer ti.Stop() - - for { - select { - case <-ctx.Done(): - return - case <-ti.C: - b := Read(t) - if b == nil { - continue - } - - // Check if content changed - if !bytes.Equal(lastContent, b) { - recv <- b - lastContent = make([]byte, len(b)) - copy(lastContent, b) - } - } - } - }() - return recv -} diff --git a/packages/tui/internal/clipboard/clipboard_linux.go b/packages/tui/internal/clipboard/clipboard_linux.go deleted file mode 100644 index 10190639..00000000 --- a/packages/tui/internal/clipboard/clipboard_linux.go +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright 2021 The golang.design Initiative Authors. -// All rights reserved. Use of this source code is governed -// by a MIT license that can be found in the LICENSE file. -// -// Written by Changkun Ou - -//go:build linux - -package clipboard - -import ( - "bytes" - "context" - "fmt" - "log/slog" - "os" - "os/exec" - "strings" - "sync" - "time" -) - -var ( - // Clipboard tools in order of preference - clipboardTools = []struct { - name string - readCmd []string - writeCmd []string - readImg []string - writeImg []string - available bool - }{ - { - name: "xclip", - readCmd: []string{"xclip", "-selection", "clipboard", "-o"}, - writeCmd: []string{"xclip", "-selection", "clipboard"}, - readImg: []string{"xclip", "-selection", "clipboard", "-t", "image/png", "-o"}, - writeImg: []string{"xclip", "-selection", "clipboard", "-t", "image/png"}, - }, - { - name: "xsel", - readCmd: []string{"xsel", "--clipboard", "--output"}, - writeCmd: []string{"xsel", "--clipboard", "--input"}, - readImg: []string{"xsel", "--clipboard", "--output"}, - writeImg: []string{"xsel", "--clipboard", "--input"}, - }, - { - name: "wl-copy", - readCmd: []string{"wl-paste", "-n"}, - writeCmd: []string{"wl-copy"}, - readImg: []string{"wl-paste", "-t", "image/png", "-n"}, - writeImg: []string{"wl-copy", "-t", "image/png"}, - }, - } - - selectedTool int = -1 - toolMutex sync.Mutex - lastChangeTime time.Time - changeTimeMu sync.Mutex -) - -func initialize() error { - toolMutex.Lock() - defer toolMutex.Unlock() - - if selectedTool >= 0 { - return nil // Already initialized - } - - order := []string{"xclip", "xsel", "wl-copy"} - if os.Getenv("WAYLAND_DISPLAY") != "" { - order = []string{"wl-copy", "xclip", "xsel"} - } - - for _, name := range order { - for i, tool := range clipboardTools { - if tool.name == name { - cmd := exec.Command("which", tool.name) - if err := cmd.Run(); err == nil { - clipboardTools[i].available = true - if selectedTool < 0 { - selectedTool = i - slog.Debug("Clipboard tool found", "tool", tool.name) - } - } - break - } - } - } - - if selectedTool < 0 { - slog.Warn( - "No clipboard utility found on system. Copy/paste functionality will be disabled. See https://opencode.ai/docs/troubleshooting/ for more information.", - ) - return fmt.Errorf(`%w: No clipboard utility found. Install one of the following: - -For X11 systems: - apt install -y xclip - # or - apt install -y xsel - -For Wayland systems: - apt install -y wl-clipboard - -If running in a headless environment, you may also need: - apt install -y xvfb - # and run: - Xvfb :99 -screen 0 1024x768x24 > /dev/null 2>&1 & - export DISPLAY=:99.0`, errUnavailable) - } - - return nil -} - -func read(t Format) (buf []byte, err error) { - // Ensure clipboard is initialized before attempting to read - if err := initialize(); err != nil { - slog.Debug("Clipboard read failed: not initialized", "error", err) - return nil, err - } - - toolMutex.Lock() - tool := clipboardTools[selectedTool] - toolMutex.Unlock() - - switch t { - case FmtText: - return readText(tool) - case FmtImage: - return readImage(tool) - default: - return nil, errUnsupported - } -} - -func readText(tool struct { - name string - readCmd []string - writeCmd []string - readImg []string - writeImg []string - available bool -}) ([]byte, error) { - // First check if clipboard contains text - cmd := exec.Command(tool.readCmd[0], tool.readCmd[1:]...) - out, err := cmd.Output() - if err != nil { - // Check if it's because clipboard contains non-text data - if tool.name == "xclip" { - // xclip returns error when clipboard doesn't contain requested type - checkCmd := exec.Command("xclip", "-selection", "clipboard", "-t", "TARGETS", "-o") - targets, _ := checkCmd.Output() - if bytes.Contains(targets, []byte("image/png")) && - !bytes.Contains(targets, []byte("UTF8_STRING")) { - return nil, errUnavailable - } - } - return nil, errUnavailable - } - - return out, nil -} - -func readImage(tool struct { - name string - readCmd []string - writeCmd []string - readImg []string - writeImg []string - available bool -}) ([]byte, error) { - if tool.name == "xsel" { - // xsel doesn't support image types well, return error - return nil, errUnavailable - } - - cmd := exec.Command(tool.readImg[0], tool.readImg[1:]...) - out, err := cmd.Output() - if err != nil { - return nil, errUnavailable - } - - // Verify it's PNG data - if len(out) < 8 || - !bytes.Equal(out[:8], []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A}) { - return nil, errUnavailable - } - - return out, nil -} - -func write(t Format, buf []byte) (<-chan struct{}, error) { - // Ensure clipboard is initialized before attempting to write - if err := initialize(); err != nil { - return nil, err - } - - toolMutex.Lock() - tool := clipboardTools[selectedTool] - toolMutex.Unlock() - - var cmd *exec.Cmd - switch t { - case FmtText: - if len(buf) == 0 { - // Write empty string - cmd = exec.Command(tool.writeCmd[0], tool.writeCmd[1:]...) - cmd.Stdin = bytes.NewReader([]byte{}) - } else { - cmd = exec.Command(tool.writeCmd[0], tool.writeCmd[1:]...) - cmd.Stdin = bytes.NewReader(buf) - } - case FmtImage: - if tool.name == "xsel" { - // xsel doesn't support image types well - return nil, errUnavailable - } - if len(buf) == 0 { - // Clear clipboard - cmd = exec.Command(tool.writeCmd[0], tool.writeCmd[1:]...) - cmd.Stdin = bytes.NewReader([]byte{}) - } else { - cmd = exec.Command(tool.writeImg[0], tool.writeImg[1:]...) - cmd.Stdin = bytes.NewReader(buf) - } - default: - return nil, errUnsupported - } - - if err := cmd.Run(); err != nil { - return nil, errUnavailable - } - - // Update change time - changeTimeMu.Lock() - lastChangeTime = time.Now() - currentTime := lastChangeTime - changeTimeMu.Unlock() - - // Create change notification channel - changed := make(chan struct{}, 1) - go func() { - for { - time.Sleep(time.Second) - changeTimeMu.Lock() - if !lastChangeTime.Equal(currentTime) { - changeTimeMu.Unlock() - changed <- struct{}{} - close(changed) - return - } - changeTimeMu.Unlock() - } - }() - - return changed, nil -} - -func watch(ctx context.Context, t Format) <-chan []byte { - recv := make(chan []byte, 1) - - // Ensure clipboard is initialized before starting watch - if err := initialize(); err != nil { - close(recv) - return recv - } - - ti := time.NewTicker(time.Second) - - // Get initial clipboard content - var lastContent []byte - if b := Read(t); b != nil { - lastContent = make([]byte, len(b)) - copy(lastContent, b) - } - - go func() { - defer close(recv) - defer ti.Stop() - - for { - select { - case <-ctx.Done(): - return - case <-ti.C: - b := Read(t) - if b == nil { - continue - } - - // Check if content changed - if !bytes.Equal(lastContent, b) { - recv <- b - lastContent = make([]byte, len(b)) - copy(lastContent, b) - } - } - } - }() - return recv -} - -// Helper function to check clipboard content type for xclip -func getClipboardTargets() []string { - cmd := exec.Command("xclip", "-selection", "clipboard", "-t", "TARGETS", "-o") - out, err := cmd.Output() - if err != nil { - return nil - } - return strings.Split(string(out), "\n") -} diff --git a/packages/tui/internal/clipboard/clipboard_nocgo.go b/packages/tui/internal/clipboard/clipboard_nocgo.go deleted file mode 100644 index 7b3e05f6..00000000 --- a/packages/tui/internal/clipboard/clipboard_nocgo.go +++ /dev/null @@ -1,25 +0,0 @@ -//go:build !windows && !darwin && !linux && !cgo - -package clipboard - -import "context" - -func initialize() error { - return errNoCgo -} - -func read(t Format) (buf []byte, err error) { - panic("clipboard: cannot use when CGO_ENABLED=0") -} - -func readc(t string) ([]byte, error) { - panic("clipboard: cannot use when CGO_ENABLED=0") -} - -func write(t Format, buf []byte) (<-chan struct{}, error) { - panic("clipboard: cannot use when CGO_ENABLED=0") -} - -func watch(ctx context.Context, t Format) <-chan []byte { - panic("clipboard: cannot use when CGO_ENABLED=0") -} diff --git a/packages/tui/internal/clipboard/clipboard_windows.go b/packages/tui/internal/clipboard/clipboard_windows.go deleted file mode 100644 index bd042cda..00000000 --- a/packages/tui/internal/clipboard/clipboard_windows.go +++ /dev/null @@ -1,551 +0,0 @@ -// Copyright 2021 The golang.design Initiative Authors. -// All rights reserved. Use of this source code is governed -// by a MIT license that can be found in the LICENSE file. -// -// Written by Changkun Ou - -//go:build windows - -package clipboard - -// Interacting with Clipboard on Windows: -// https://docs.microsoft.com/zh-cn/windows/win32/dataxchg/using-the-clipboard - -import ( - "bytes" - "context" - "encoding/binary" - "errors" - "fmt" - "image" - "image/color" - "image/png" - "reflect" - "runtime" - "syscall" - "time" - "unicode/utf16" - "unsafe" - - "golang.org/x/image/bmp" -) - -func initialize() error { return nil } - -// readText reads the clipboard and returns the text data if presents. -// The caller is responsible for opening/closing the clipboard before -// calling this function. -func readText() (buf []byte, err error) { - hMem, _, err := getClipboardData.Call(cFmtUnicodeText) - if hMem == 0 { - return nil, err - } - p, _, err := gLock.Call(hMem) - if p == 0 { - return nil, err - } - defer gUnlock.Call(hMem) - - // Find NUL terminator - n := 0 - for ptr := unsafe.Pointer(p); *(*uint16)(ptr) != 0; n++ { - ptr = unsafe.Pointer(uintptr(ptr) + - unsafe.Sizeof(*((*uint16)(unsafe.Pointer(p))))) - } - - var s []uint16 - h := (*reflect.SliceHeader)(unsafe.Pointer(&s)) - h.Data = p - h.Len = n - h.Cap = n - return []byte(string(utf16.Decode(s))), nil -} - -// writeText writes given data to the clipboard. It is the caller's -// responsibility for opening/closing the clipboard before calling -// this function. -func writeText(buf []byte) error { - r, _, err := emptyClipboard.Call() - if r == 0 { - return fmt.Errorf("failed to clear clipboard: %w", err) - } - - // empty text, we are done here. - if len(buf) == 0 { - return nil - } - - s, err := syscall.UTF16FromString(string(buf)) - if err != nil { - return fmt.Errorf("failed to convert given string: %w", err) - } - - hMem, _, err := gAlloc.Call(gmemMoveable, uintptr(len(s)*int(unsafe.Sizeof(s[0])))) - if hMem == 0 { - return fmt.Errorf("failed to alloc global memory: %w", err) - } - - p, _, err := gLock.Call(hMem) - if p == 0 { - return fmt.Errorf("failed to lock global memory: %w", err) - } - defer gUnlock.Call(hMem) - - // no return value - memMove.Call(p, uintptr(unsafe.Pointer(&s[0])), - uintptr(len(s)*int(unsafe.Sizeof(s[0])))) - - v, _, err := setClipboardData.Call(cFmtUnicodeText, hMem) - if v == 0 { - gFree.Call(hMem) - return fmt.Errorf("failed to set text to clipboard: %w", err) - } - - return nil -} - -// readImage reads the clipboard and returns PNG encoded image data -// if presents. The caller is responsible for opening/closing the -// clipboard before calling this function. -func readImage() ([]byte, error) { - hMem, _, err := getClipboardData.Call(cFmtDIBV5) - if hMem == 0 { - // second chance to try FmtDIB - return readImageDib() - } - p, _, err := gLock.Call(hMem) - if p == 0 { - return nil, err - } - defer gUnlock.Call(hMem) - - // inspect header information - info := (*bitmapV5Header)(unsafe.Pointer(p)) - - // maybe deal with other formats? - if info.BitCount != 32 { - return nil, errUnsupported - } - - var data []byte - sh := (*reflect.SliceHeader)(unsafe.Pointer(&data)) - sh.Data = uintptr(p) - sh.Cap = int(info.Size + 4*uint32(info.Width)*uint32(info.Height)) - sh.Len = int(info.Size + 4*uint32(info.Width)*uint32(info.Height)) - img := image.NewRGBA(image.Rect(0, 0, int(info.Width), int(info.Height))) - offset := int(info.Size) - stride := int(info.Width) - for y := 0; y < int(info.Height); y++ { - for x := 0; x < int(info.Width); x++ { - idx := offset + 4*(y*stride+x) - xhat := (x + int(info.Width)) % int(info.Width) - yhat := int(info.Height) - 1 - y - r := data[idx+2] - g := data[idx+1] - b := data[idx+0] - a := data[idx+3] - img.SetRGBA(xhat, yhat, color.RGBA{r, g, b, a}) - } - } - // always use PNG encoding. - var buf bytes.Buffer - png.Encode(&buf, img) - return buf.Bytes(), nil -} - -func readImageDib() ([]byte, error) { - const ( - fileHeaderLen = 14 - infoHeaderLen = 40 - cFmtDIB = 8 - ) - - hClipDat, _, err := getClipboardData.Call(cFmtDIB) - if err != nil { - return nil, errors.New("not dib format data: " + err.Error()) - } - pMemBlk, _, err := gLock.Call(hClipDat) - if pMemBlk == 0 { - return nil, errors.New("failed to call global lock: " + err.Error()) - } - defer gUnlock.Call(hClipDat) - - bmpHeader := (*bitmapHeader)(unsafe.Pointer(pMemBlk)) - dataSize := bmpHeader.SizeImage + fileHeaderLen + infoHeaderLen - - if bmpHeader.SizeImage == 0 && bmpHeader.Compression == 0 { - iSizeImage := bmpHeader.Height * ((bmpHeader.Width*uint32(bmpHeader.BitCount)/8 + 3) &^ 3) - dataSize += iSizeImage - } - buf := new(bytes.Buffer) - binary.Write(buf, binary.LittleEndian, uint16('B')|(uint16('M')<<8)) - binary.Write(buf, binary.LittleEndian, uint32(dataSize)) - binary.Write(buf, binary.LittleEndian, uint32(0)) - const sizeof_colorbar = 0 - binary.Write(buf, binary.LittleEndian, uint32(fileHeaderLen+infoHeaderLen+sizeof_colorbar)) - j := 0 - for i := fileHeaderLen; i < int(dataSize); i++ { - binary.Write(buf, binary.BigEndian, *(*byte)(unsafe.Pointer(pMemBlk + uintptr(j)))) - j++ - } - return bmpToPng(buf) -} - -func bmpToPng(bmpBuf *bytes.Buffer) (buf []byte, err error) { - var f bytes.Buffer - original_image, err := bmp.Decode(bmpBuf) - if err != nil { - return nil, err - } - err = png.Encode(&f, original_image) - if err != nil { - return nil, err - } - return f.Bytes(), nil -} - -func writeImage(buf []byte) error { - r, _, err := emptyClipboard.Call() - if r == 0 { - return fmt.Errorf("failed to clear clipboard: %w", err) - } - - // empty text, we are done here. - if len(buf) == 0 { - return nil - } - - img, err := png.Decode(bytes.NewReader(buf)) - if err != nil { - return fmt.Errorf("input bytes is not PNG encoded: %w", err) - } - - offset := unsafe.Sizeof(bitmapV5Header{}) - width := img.Bounds().Dx() - height := img.Bounds().Dy() - imageSize := 4 * width * height - - data := make([]byte, int(offset)+imageSize) - for y := 0; y < height; y++ { - for x := 0; x < width; x++ { - idx := int(offset) + 4*(y*width+x) - r, g, b, a := img.At(x, height-1-y).RGBA() - data[idx+2] = uint8(r) - data[idx+1] = uint8(g) - data[idx+0] = uint8(b) - data[idx+3] = uint8(a) - } - } - - info := bitmapV5Header{} - info.Size = uint32(offset) - info.Width = int32(width) - info.Height = int32(height) - info.Planes = 1 - info.Compression = 0 // BI_RGB - info.SizeImage = uint32(4 * info.Width * info.Height) - info.RedMask = 0xff0000 // default mask - info.GreenMask = 0xff00 - info.BlueMask = 0xff - info.AlphaMask = 0xff000000 - info.BitCount = 32 // we only deal with 32 bpp at the moment. - // Use calibrated RGB values as Go's image/png assumes linear color space. - // Other options: - // - LCS_CALIBRATED_RGB = 0x00000000 - // - LCS_sRGB = 0x73524742 - // - LCS_WINDOWS_COLOR_SPACE = 0x57696E20 - // https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wmf/eb4bbd50-b3ce-4917-895c-be31f214797f - info.CSType = 0x73524742 - // Use GL_IMAGES for GamutMappingIntent - // Other options: - // - LCS_GM_ABS_COLORIMETRIC = 0x00000008 - // - LCS_GM_BUSINESS = 0x00000001 - // - LCS_GM_GRAPHICS = 0x00000002 - // - LCS_GM_IMAGES = 0x00000004 - // https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wmf/9fec0834-607d-427d-abd5-ab240fb0db38 - info.Intent = 4 // LCS_GM_IMAGES - - infob := make([]byte, int(unsafe.Sizeof(info))) - for i, v := range *(*[unsafe.Sizeof(info)]byte)(unsafe.Pointer(&info)) { - infob[i] = v - } - copy(data[:], infob[:]) - - hMem, _, err := gAlloc.Call(gmemMoveable, - uintptr(len(data)*int(unsafe.Sizeof(data[0])))) - if hMem == 0 { - return fmt.Errorf("failed to alloc global memory: %w", err) - } - - p, _, err := gLock.Call(hMem) - if p == 0 { - return fmt.Errorf("failed to lock global memory: %w", err) - } - defer gUnlock.Call(hMem) - - memMove.Call(p, uintptr(unsafe.Pointer(&data[0])), - uintptr(len(data)*int(unsafe.Sizeof(data[0])))) - - v, _, err := setClipboardData.Call(cFmtDIBV5, hMem) - if v == 0 { - gFree.Call(hMem) - return fmt.Errorf("failed to set text to clipboard: %w", err) - } - - return nil -} - -func read(t Format) (buf []byte, err error) { - // On Windows, OpenClipboard and CloseClipboard must be executed on - // the same thread. Thus, lock the OS thread for further execution. - runtime.LockOSThread() - defer runtime.UnlockOSThread() - - var format uintptr - switch t { - case FmtImage: - format = cFmtDIBV5 - case FmtText: - fallthrough - default: - format = cFmtUnicodeText - } - - // check if clipboard is avaliable for the requested format - r, _, err := isClipboardFormatAvailable.Call(format) - if r == 0 { - return nil, errUnavailable - } - - // try again until open clipboard successed - for { - r, _, _ = openClipboard.Call() - if r == 0 { - continue - } - break - } - defer closeClipboard.Call() - - switch format { - case cFmtDIBV5: - return readImage() - case cFmtUnicodeText: - fallthrough - default: - return readText() - } -} - -// write writes the given data to clipboard and -// returns true if success or false if failed. -func write(t Format, buf []byte) (<-chan struct{}, error) { - errch := make(chan error) - changed := make(chan struct{}, 1) - go func() { - // make sure GetClipboardSequenceNumber happens with - // OpenClipboard on the same thread. - runtime.LockOSThread() - defer runtime.UnlockOSThread() - for { - r, _, _ := openClipboard.Call(0) - if r == 0 { - continue - } - break - } - - // var param uintptr - switch t { - case FmtImage: - err := writeImage(buf) - if err != nil { - errch <- err - closeClipboard.Call() - return - } - case FmtText: - fallthrough - default: - // param = cFmtUnicodeText - err := writeText(buf) - if err != nil { - errch <- err - closeClipboard.Call() - return - } - } - // Close the clipboard otherwise other applications cannot - // paste the data. - closeClipboard.Call() - - cnt, _, _ := getClipboardSequenceNumber.Call() - errch <- nil - for { - time.Sleep(time.Second) - cur, _, _ := getClipboardSequenceNumber.Call() - if cur != cnt { - changed <- struct{}{} - close(changed) - return - } - } - }() - err := <-errch - if err != nil { - return nil, err - } - return changed, nil -} - -func watch(ctx context.Context, t Format) <-chan []byte { - recv := make(chan []byte, 1) - ready := make(chan struct{}) - go func() { - // not sure if we are too slow or the user too fast :) - ti := time.NewTicker(time.Second) - cnt, _, _ := getClipboardSequenceNumber.Call() - ready <- struct{}{} - for { - select { - case <-ctx.Done(): - close(recv) - return - case <-ti.C: - cur, _, _ := getClipboardSequenceNumber.Call() - if cnt != cur { - b := Read(t) - if b == nil { - continue - } - recv <- b - cnt = cur - } - } - } - }() - <-ready - return recv -} - -const ( - cFmtBitmap = 2 // Win+PrintScreen - cFmtUnicodeText = 13 - cFmtDIBV5 = 17 - // Screenshot taken from special shortcut is in different format (why??), see: - // https://jpsoft.com/forums/threads/detecting-clipboard-format.5225/ - cFmtDataObject = 49161 // Shift+Win+s, returned from enumClipboardFormats - gmemMoveable = 0x0002 -) - -// BITMAPV5Header structure, see: -// https://docs.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-bitmapv5header -type bitmapV5Header struct { - Size uint32 - Width int32 - Height int32 - Planes uint16 - BitCount uint16 - Compression uint32 - SizeImage uint32 - XPelsPerMeter int32 - YPelsPerMeter int32 - ClrUsed uint32 - ClrImportant uint32 - RedMask uint32 - GreenMask uint32 - BlueMask uint32 - AlphaMask uint32 - CSType uint32 - Endpoints struct { - CiexyzRed, CiexyzGreen, CiexyzBlue struct { - CiexyzX, CiexyzY, CiexyzZ int32 // FXPT2DOT30 - } - } - GammaRed uint32 - GammaGreen uint32 - GammaBlue uint32 - Intent uint32 - ProfileData uint32 - ProfileSize uint32 - Reserved uint32 -} - -type bitmapHeader struct { - Size uint32 - Width uint32 - Height uint32 - PLanes uint16 - BitCount uint16 - Compression uint32 - SizeImage uint32 - XPelsPerMeter uint32 - YPelsPerMeter uint32 - ClrUsed uint32 - ClrImportant uint32 -} - -// Calling a Windows DLL, see: -// https://github.com/golang/go/wiki/WindowsDLLs -var ( - user32 = syscall.MustLoadDLL("user32") - // Opens the clipboard for examination and prevents other - // applications from modifying the clipboard content. - // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-openclipboard - openClipboard = user32.MustFindProc("OpenClipboard") - // Closes the clipboard. - // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-closeclipboard - closeClipboard = user32.MustFindProc("CloseClipboard") - // Empties the clipboard and frees handles to data in the clipboard. - // The function then assigns ownership of the clipboard to the - // window that currently has the clipboard open. - // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-emptyclipboard - emptyClipboard = user32.MustFindProc("EmptyClipboard") - // Retrieves data from the clipboard in a specified format. - // The clipboard must have been opened previously. - // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getclipboarddata - getClipboardData = user32.MustFindProc("GetClipboardData") - // Places data on the clipboard in a specified clipboard format. - // The window must be the current clipboard owner, and the - // application must have called the OpenClipboard function. (When - // responding to the WM_RENDERFORMAT message, the clipboard owner - // must not call OpenClipboard before calling SetClipboardData.) - // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-setclipboarddata - setClipboardData = user32.MustFindProc("SetClipboardData") - // Determines whether the clipboard contains data in the specified format. - // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-isclipboardformatavailable - isClipboardFormatAvailable = user32.MustFindProc("IsClipboardFormatAvailable") - // Clipboard data formats are stored in an ordered list. To perform - // an enumeration of clipboard data formats, you make a series of - // calls to the EnumClipboardFormats function. For each call, the - // format parameter specifies an available clipboard format, and the - // function returns the next available clipboard format. - // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-isclipboardformatavailable - enumClipboardFormats = user32.MustFindProc("EnumClipboardFormats") - // Retrieves the clipboard sequence number for the current window station. - // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getclipboardsequencenumber - getClipboardSequenceNumber = user32.MustFindProc("GetClipboardSequenceNumber") - // Registers a new clipboard format. This format can then be used as - // a valid clipboard format. - // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-registerclipboardformata - registerClipboardFormatA = user32.MustFindProc("RegisterClipboardFormatA") - - kernel32 = syscall.NewLazyDLL("kernel32") - - // Locks a global memory object and returns a pointer to the first - // byte of the object's memory block. - // https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-globallock - gLock = kernel32.NewProc("GlobalLock") - // Decrements the lock count associated with a memory object that was - // allocated with GMEM_MOVEABLE. This function has no effect on memory - // objects allocated with GMEM_FIXED. - // https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-globalunlock - gUnlock = kernel32.NewProc("GlobalUnlock") - // Allocates the specified number of bytes from the heap. - // https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-globalalloc - gAlloc = kernel32.NewProc("GlobalAlloc") - // Frees the specified global memory object and invalidates its handle. - // https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-globalfree - gFree = kernel32.NewProc("GlobalFree") - memMove = kernel32.NewProc("RtlMoveMemory") -) diff --git a/packages/tui/internal/commands/command.go b/packages/tui/internal/commands/command.go index 5a981549..9c4da12e 100644 --- a/packages/tui/internal/commands/command.go +++ b/packages/tui/internal/commands/command.go @@ -29,7 +29,7 @@ type Command struct { Name CommandName Description string Keybindings []Keybinding - Trigger []string + Trigger string } func (c Command) Keys() []string { @@ -40,21 +40,6 @@ func (c Command) Keys() []string { return keys } -func (c Command) HasTrigger() bool { - return len(c.Trigger) > 0 -} - -func (c Command) PrimaryTrigger() string { - if len(c.Trigger) > 0 { - return c.Trigger[0] - } - return "" -} - -func (c Command) MatchesTrigger(trigger string) bool { - return slices.Contains(c.Trigger, trigger) -} - type CommandRegistry map[CommandName]Command func (r CommandRegistry) Sorted() []Command { @@ -86,8 +71,6 @@ func (r CommandRegistry) Matches(msg tea.KeyPressMsg, leader bool) []Command { const ( AppHelpCommand CommandName = "app_help" - SwitchModeCommand CommandName = "switch_mode" - SwitchModeReverseCommand CommandName = "switch_mode_reverse" EditorOpenCommand CommandName = "editor_open" SessionNewCommand CommandName = "session_new" SessionListCommand CommandName = "session_list" @@ -95,7 +78,6 @@ const ( SessionUnshareCommand CommandName = "session_unshare" SessionInterruptCommand CommandName = "session_interrupt" SessionCompactCommand CommandName = "session_compact" - SessionExportCommand CommandName = "session_export" ToolDetailsCommand CommandName = "tool_details" ModelListCommand CommandName = "model_list" ThemeListCommand CommandName = "theme_list" @@ -153,53 +135,37 @@ func LoadFromConfig(config *opencode.Config) CommandRegistry { Name: AppHelpCommand, Description: "show help", Keybindings: parseBindings("h"), - Trigger: []string{"help"}, - }, - { - Name: SwitchModeCommand, - Description: "next mode", - Keybindings: parseBindings("tab"), - }, - { - Name: SwitchModeReverseCommand, - Description: "previous mode", - Keybindings: parseBindings("shift+tab"), + Trigger: "help", }, { Name: EditorOpenCommand, Description: "open editor", Keybindings: parseBindings("e"), - Trigger: []string{"editor"}, - }, - { - Name: SessionExportCommand, - Description: "export conversation", - Keybindings: parseBindings("x"), - Trigger: []string{"export"}, + Trigger: "editor", }, { Name: SessionNewCommand, Description: "new session", Keybindings: parseBindings("n"), - Trigger: []string{"new", "clear"}, + Trigger: "new", }, { Name: SessionListCommand, Description: "list sessions", Keybindings: parseBindings("l"), - Trigger: []string{"sessions", "resume", "continue"}, + Trigger: "sessions", }, { Name: SessionShareCommand, Description: "share session", Keybindings: parseBindings("s"), - Trigger: []string{"share"}, + Trigger: "share", }, { Name: SessionUnshareCommand, Description: "unshare session", Keybindings: parseBindings("u"), - Trigger: []string{"unshare"}, + Trigger: "unshare", }, { Name: SessionInterruptCommand, @@ -210,32 +176,32 @@ func LoadFromConfig(config *opencode.Config) CommandRegistry { Name: SessionCompactCommand, Description: "compact the session", Keybindings: parseBindings("c"), - Trigger: []string{"compact", "summarize"}, + Trigger: "compact", }, { Name: ToolDetailsCommand, Description: "toggle tool details", Keybindings: parseBindings("d"), - Trigger: []string{"details"}, + Trigger: "details", }, { Name: ModelListCommand, Description: "list models", Keybindings: parseBindings("m"), - Trigger: []string{"models"}, + Trigger: "models", }, { Name: ThemeListCommand, Description: "list themes", Keybindings: parseBindings("t"), - Trigger: []string{"themes"}, + Trigger: "themes", + }, + { + Name: FileListCommand, + Description: "list files", + Keybindings: parseBindings("f"), + Trigger: "files", }, - // { - // Name: FileListCommand, - // Description: "list files", - // Keybindings: parseBindings("f"), - // Trigger: []string{"files"}, - // }, { Name: FileCloseCommand, Description: "close file", @@ -255,7 +221,7 @@ func LoadFromConfig(config *opencode.Config) CommandRegistry { Name: ProjectInitCommand, Description: "create/update AGENTS.md", Keybindings: parseBindings("i"), - Trigger: []string{"init"}, + Trigger: "init", }, { Name: InputClearCommand, @@ -265,7 +231,7 @@ func LoadFromConfig(config *opencode.Config) CommandRegistry { { Name: InputPasteCommand, Description: "paste content", - Keybindings: parseBindings("ctrl+v", "super+v"), + Keybindings: parseBindings("ctrl+v"), }, { Name: InputSubmitCommand, @@ -336,7 +302,7 @@ func LoadFromConfig(config *opencode.Config) CommandRegistry { Name: AppExitCommand, Description: "exit the app", Keybindings: parseBindings("ctrl+c", "q"), - Trigger: []string{"exit", "quit", "q"}, + Trigger: "exit", }, } registry := make(CommandRegistry) @@ -344,10 +310,6 @@ func LoadFromConfig(config *opencode.Config) CommandRegistry { marshalled, _ := json.Marshal(config.Keybinds) json.Unmarshal(marshalled, &keybinds) for _, command := range defaults { - // Remove share/unshare commands if sharing is disabled - if config.Share == opencode.ConfigShareDisabled && (command.Name == SessionShareCommand || command.Name == SessionUnshareCommand) { - continue - } if keybind, ok := keybinds[string(command.Name)]; ok && keybind != "" { command.Keybindings = parseBindings(keybind) } diff --git a/packages/tui/internal/completions/commands.go b/packages/tui/internal/completions/commands.go index 2ffe3ea9..c73923e8 100644 --- a/packages/tui/internal/completions/commands.go +++ b/packages/tui/internal/completions/commands.go @@ -8,6 +8,7 @@ import ( "github.com/lithammer/fuzzysearch/fuzzy" "github.com/sst/opencode/internal/app" "github.com/sst/opencode/internal/commands" + "github.com/sst/opencode/internal/components/dialog" "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" ) @@ -16,7 +17,7 @@ type CommandCompletionProvider struct { app *app.App } -func NewCommandCompletionProvider(app *app.App) CompletionProvider { +func NewCommandCompletionProvider(app *app.App) dialog.CompletionProvider { return &CommandCompletionProvider{app: app} } @@ -28,37 +29,24 @@ func (c *CommandCompletionProvider) GetEmptyMessage() string { return "no matching commands" } -func (c *CommandCompletionProvider) getCommandCompletionItem( - cmd commands.Command, - space int, -) CompletionSuggestion { - displayFunc := func(s styles.Style) string { - t := theme.CurrentTheme() - spacer := strings.Repeat(" ", space) - display := " /" + cmd.PrimaryTrigger() + s. - Foreground(t.TextMuted()). - Render(spacer+cmd.Description) - return display - } - +func getCommandCompletionItem(cmd commands.Command, space int, t theme.Theme) dialog.CompletionItemI { + spacer := strings.Repeat(" ", space) + title := " /" + cmd.Trigger + styles.NewStyle().Foreground(t.TextMuted()).Render(spacer+cmd.Description) value := string(cmd.Name) - return CompletionSuggestion{ - Display: displayFunc, - Value: value, - ProviderID: c.GetId(), - RawData: cmd, - } + return dialog.NewCompletionItem(dialog.CompletionItem{ + Title: title, + Value: value, + }) } -func (c *CommandCompletionProvider) GetChildEntries( - query string, -) ([]CompletionSuggestion, error) { +func (c *CommandCompletionProvider) GetChildEntries(query string) ([]dialog.CompletionItemI, error) { + t := theme.CurrentTheme() commands := c.app.Commands space := 1 for _, cmd := range c.app.Commands { - if cmd.HasTrigger() && lipgloss.Width(cmd.PrimaryTrigger()) > space { - space = lipgloss.Width(cmd.PrimaryTrigger()) + if lipgloss.Width(cmd.Trigger) > space { + space = lipgloss.Width(cmd.Trigger) } } space += 2 @@ -66,44 +54,41 @@ func (c *CommandCompletionProvider) GetChildEntries( sorted := commands.Sorted() if query == "" { // If no query, return all commands - items := []CompletionSuggestion{} + items := []dialog.CompletionItemI{} for _, cmd := range sorted { - if !cmd.HasTrigger() { + if cmd.Trigger == "" { continue } - space := space - lipgloss.Width(cmd.PrimaryTrigger()) - items = append(items, c.getCommandCompletionItem(cmd, space)) + space := space - lipgloss.Width(cmd.Trigger) + items = append(items, getCommandCompletionItem(cmd, space, t)) } return items, nil } + // Use fuzzy matching for commands var commandNames []string - commandMap := make(map[string]CompletionSuggestion) + commandMap := make(map[string]dialog.CompletionItemI) for _, cmd := range sorted { - if !cmd.HasTrigger() { + if cmd.Trigger == "" { continue } - space := space - lipgloss.Width(cmd.PrimaryTrigger()) - for _, trigger := range cmd.Trigger { - commandNames = append(commandNames, trigger) - commandMap[trigger] = c.getCommandCompletionItem(cmd, space) - } + space := space - lipgloss.Width(cmd.Trigger) + commandNames = append(commandNames, cmd.Trigger) + commandMap[cmd.Trigger] = getCommandCompletionItem(cmd, space, t) } - matches := fuzzy.RankFindFold(query, commandNames) + // Find fuzzy matches + matches := fuzzy.RankFind(query, commandNames) + + // Sort by score (best matches first) sort.Sort(matches) - // Convert matches to completion items, deduplicating by command name - items := []CompletionSuggestion{} - seen := make(map[string]bool) + // Convert matches to completion items + items := []dialog.CompletionItemI{} for _, match := range matches { if item, ok := commandMap[match.Target]; ok { - // Use the command's value (name) as the deduplication key - if !seen[item.Value] { - seen[item.Value] = true - items = append(items, item) - } + items = append(items, item) } } return items, nil diff --git a/packages/tui/internal/completions/files-folders.go b/packages/tui/internal/completions/files-folders.go new file mode 100644 index 00000000..8d6b9958 --- /dev/null +++ b/packages/tui/internal/completions/files-folders.go @@ -0,0 +1,115 @@ +package completions + +import ( + "context" + "log/slog" + "sort" + "strconv" + "strings" + + "github.com/sst/opencode-sdk-go" + "github.com/sst/opencode/internal/app" + "github.com/sst/opencode/internal/components/dialog" + "github.com/sst/opencode/internal/styles" + "github.com/sst/opencode/internal/theme" +) + +type filesAndFoldersContextGroup struct { + app *app.App + prefix string + gitFiles []dialog.CompletionItemI +} + +func (cg *filesAndFoldersContextGroup) GetId() string { + return cg.prefix +} + +func (cg *filesAndFoldersContextGroup) GetEmptyMessage() string { + return "no matching files" +} + +func (cg *filesAndFoldersContextGroup) getGitFiles() []dialog.CompletionItemI { + t := theme.CurrentTheme() + items := make([]dialog.CompletionItemI, 0) + base := styles.NewStyle().Background(t.BackgroundElement()) + green := base.Foreground(t.Success()).Render + red := base.Foreground(t.Error()).Render + + status, _ := cg.app.Client.File.Status(context.Background()) + if status != nil { + files := *status + sort.Slice(files, func(i, j int) bool { + return files[i].Added+files[i].Removed > files[j].Added+files[j].Removed + }) + + for _, file := range files { + title := file.File + if file.Added > 0 { + title += green(" +" + strconv.Itoa(int(file.Added))) + } + if file.Removed > 0 { + title += red(" -" + strconv.Itoa(int(file.Removed))) + } + item := dialog.NewCompletionItem(dialog.CompletionItem{ + Title: title, + Value: file.File, + }) + items = append(items, item) + } + } + + return items +} + +func (cg *filesAndFoldersContextGroup) GetChildEntries( + query string, +) ([]dialog.CompletionItemI, error) { + items := make([]dialog.CompletionItemI, 0) + + query = strings.TrimSpace(query) + if query == "" { + items = append(items, cg.gitFiles...) + } + + files, err := cg.app.Client.Find.Files( + context.Background(), + opencode.FindFilesParams{Query: opencode.F(query)}, + ) + if err != nil { + slog.Error("Failed to get completion items", "error", err) + return items, err + } + if files == nil { + return items, nil + } + + for _, file := range *files { + exists := false + for _, existing := range cg.gitFiles { + if existing.GetValue() == file { + if query != "" { + items = append(items, existing) + } + exists = true + } + } + if !exists { + item := dialog.NewCompletionItem(dialog.CompletionItem{ + Title: file, + Value: file, + }) + items = append(items, item) + } + } + + return items, nil +} + +func NewFileAndFolderContextGroup(app *app.App) dialog.CompletionProvider { + cg := &filesAndFoldersContextGroup{ + app: app, + prefix: "file", + } + cg.gitFiles = cg.getGitFiles() + return cg +} diff --git a/packages/tui/internal/completions/files.go b/packages/tui/internal/completions/files.go deleted file mode 100644 index bece89a8..00000000 --- a/packages/tui/internal/completions/files.go +++ /dev/null @@ -1,126 +0,0 @@ -package completions - -import ( - "context" - "log/slog" - "sort" - "strconv" - "strings" - - "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode/internal/app" - "github.com/sst/opencode/internal/styles" - "github.com/sst/opencode/internal/theme" -) - -type filesContextGroup struct { - app *app.App - gitFiles []CompletionSuggestion -} - -func (cg *filesContextGroup) GetId() string { - return "files" -} - -func (cg *filesContextGroup) GetEmptyMessage() string { - return "no matching files" -} - -func (cg *filesContextGroup) getGitFiles() []CompletionSuggestion { - items := make([]CompletionSuggestion, 0) - - status, _ := cg.app.Client.File.Status(context.Background()) - if status != nil { - files := *status - sort.Slice(files, func(i, j int) bool { - return files[i].Added+files[i].Removed > files[j].Added+files[j].Removed - }) - - for _, file := range files { - displayFunc := func(s styles.Style) string { - t := theme.CurrentTheme() - green := s.Foreground(t.Success()).Render - red := s.Foreground(t.Error()).Render - display := file.Path - if file.Added > 0 { - display += green(" +" + strconv.Itoa(int(file.Added))) - } - if file.Removed > 0 { - display += red(" -" + strconv.Itoa(int(file.Removed))) - } - return display - } - item := CompletionSuggestion{ - Display: displayFunc, - Value: file.Path, - ProviderID: cg.GetId(), - RawData: file, - } - items = append(items, item) - } - } - - return items -} - -func (cg *filesContextGroup) GetChildEntries( - query string, -) ([]CompletionSuggestion, error) { - items := make([]CompletionSuggestion, 0) - - query = strings.TrimSpace(query) - if query == "" { - items = append(items, cg.gitFiles...) - } - - files, err := cg.app.Client.Find.Files( - context.Background(), - opencode.FindFilesParams{Query: opencode.F(query)}, - ) - if err != nil { - slog.Error("Failed to get completion items", "error", err) - return items, err - } - if files == nil { - return items, nil - } - - for _, file := range *files { - exists := false - for _, existing := range cg.gitFiles { - if existing.Value == file { - if query != "" { - items = append(items, existing) - } - exists = true - } - } - if !exists { - displayFunc := func(s styles.Style) string { - // t := theme.CurrentTheme() - // return s.Foreground(t.Text()).Render(file) - return s.Render(file) - } - - item := CompletionSuggestion{ - Display: displayFunc, - Value: file, - ProviderID: cg.GetId(), - RawData: file, - } - items = append(items, item) - } - } - - return items, nil -} - -func NewFileContextGroup(app *app.App) CompletionProvider { - cg := &filesContextGroup{ - app: app, - } - go func() { - cg.gitFiles = cg.getGitFiles() - }() - return cg -} diff --git a/packages/tui/internal/completions/manager.go b/packages/tui/internal/completions/manager.go new file mode 100644 index 00000000..5368208f --- /dev/null +++ b/packages/tui/internal/completions/manager.go @@ -0,0 +1,32 @@ +package completions + +import ( + "strings" + + "github.com/sst/opencode/internal/app" + "github.com/sst/opencode/internal/components/dialog" +) + +type CompletionManager struct { + providers map[string]dialog.CompletionProvider +} + +func NewCompletionManager(app *app.App) *CompletionManager { + return &CompletionManager{ + providers: map[string]dialog.CompletionProvider{ + "files": NewFileAndFolderContextGroup(app), + "commands": NewCommandCompletionProvider(app), + }, + } +} + +func (m *CompletionManager) DefaultProvider() dialog.CompletionProvider { + return m.providers["commands"] +} + +func (m *CompletionManager) GetProvider(input string) dialog.CompletionProvider { + if strings.HasPrefix(input, "/") { + return m.providers["commands"] + } + return m.providers["files"] +} diff --git a/packages/tui/internal/completions/provider.go b/packages/tui/internal/completions/provider.go deleted file mode 100644 index dc11522c..00000000 --- a/packages/tui/internal/completions/provider.go +++ /dev/null @@ -1,8 +0,0 @@ -package completions - -// CompletionProvider defines the interface for completion data providers -type CompletionProvider interface { - GetId() string - GetChildEntries(query string) ([]CompletionSuggestion, error) - GetEmptyMessage() string -} diff --git a/packages/tui/internal/completions/suggestion.go b/packages/tui/internal/completions/suggestion.go deleted file mode 100644 index fac6b681..00000000 --- a/packages/tui/internal/completions/suggestion.go +++ /dev/null @@ -1,24 +0,0 @@ -package completions - -import "github.com/sst/opencode/internal/styles" - -// CompletionSuggestion represents a data-only completion suggestion -// with no styling or rendering logic -type CompletionSuggestion struct { - // The text to be displayed in the list. May contain minimal inline - // ANSI styling if intrinsic to the data (e.g., git diff colors). - Display func(styles.Style) string - - // The value to be used when the item is selected (e.g., inserted into the editor). - Value string - - // An optional, longer description to be displayed. - Description string - - // The ID of the provider that generated this suggestion. - ProviderID string - - // The raw, underlying data object (e.g., opencode.Symbol, commands.Command). - // This allows the selection handler to perform rich actions. - RawData any -} diff --git a/packages/tui/internal/completions/symbols.go b/packages/tui/internal/completions/symbols.go deleted file mode 100644 index 725e2e69..00000000 --- a/packages/tui/internal/completions/symbols.go +++ /dev/null @@ -1,119 +0,0 @@ -package completions - -import ( - "context" - "fmt" - "log/slog" - "strings" - - "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode/internal/app" - "github.com/sst/opencode/internal/styles" - "github.com/sst/opencode/internal/theme" -) - -type symbolsContextGroup struct { - app *app.App -} - -func (cg *symbolsContextGroup) GetId() string { - return "symbols" -} - -func (cg *symbolsContextGroup) GetEmptyMessage() string { - return "no matching symbols" -} - -type SymbolKind int - -const ( - SymbolKindFile SymbolKind = 1 - SymbolKindModule SymbolKind = 2 - SymbolKindNamespace SymbolKind = 3 - SymbolKindPackage SymbolKind = 4 - SymbolKindClass SymbolKind = 5 - SymbolKindMethod SymbolKind = 6 - SymbolKindProperty SymbolKind = 7 - SymbolKindField SymbolKind = 8 - SymbolKindConstructor SymbolKind = 9 - SymbolKindEnum SymbolKind = 10 - SymbolKindInterface SymbolKind = 11 - SymbolKindFunction SymbolKind = 12 - SymbolKindVariable SymbolKind = 13 - SymbolKindConstant SymbolKind = 14 - SymbolKindString SymbolKind = 15 - SymbolKindNumber SymbolKind = 16 - SymbolKindBoolean SymbolKind = 17 - SymbolKindArray SymbolKind = 18 - SymbolKindObject SymbolKind = 19 - SymbolKindKey SymbolKind = 20 - SymbolKindNull SymbolKind = 21 - SymbolKindEnumMember SymbolKind = 22 - SymbolKindStruct SymbolKind = 23 - SymbolKindEvent SymbolKind = 24 - SymbolKindOperator SymbolKind = 25 - SymbolKindTypeParameter SymbolKind = 26 -) - -func (cg *symbolsContextGroup) GetChildEntries( - query string, -) ([]CompletionSuggestion, error) { - items := make([]CompletionSuggestion, 0) - - query = strings.TrimSpace(query) - if query == "" { - return items, nil - } - - symbols, err := cg.app.Client.Find.Symbols( - context.Background(), - opencode.FindSymbolsParams{Query: opencode.F(query)}, - ) - if err != nil { - slog.Error("Failed to get symbol completion items", "error", err) - return items, err - } - if symbols == nil { - return items, nil - } - - for _, sym := range *symbols { - parts := strings.Split(sym.Name, ".") - lastPart := parts[len(parts)-1] - start := int(sym.Location.Range.Start.Line) - end := int(sym.Location.Range.End.Line) - - displayFunc := func(s styles.Style) string { - t := theme.CurrentTheme() - base := s.Foreground(t.Text()).Render - muted := s.Foreground(t.TextMuted()).Render - display := base(lastPart) - - uriParts := strings.Split(sym.Location.Uri, "/") - lastTwoParts := uriParts[len(uriParts)-2:] - joined := strings.Join(lastTwoParts, "/") - display += muted(fmt.Sprintf(" %s", joined)) - - display += muted(fmt.Sprintf(":L%d-%d", start, end)) - return display - } - - value := fmt.Sprintf("%s?start=%d&end=%d", sym.Location.Uri, start, end) - - item := CompletionSuggestion{ - Display: displayFunc, - Value: value, - ProviderID: cg.GetId(), - RawData: sym, - } - items = append(items, item) - } - - return items, nil -} - -func NewSymbolsContextGroup(app *app.App) CompletionProvider { - return &symbolsContextGroup{ - app: app, - } -} diff --git a/packages/tui/internal/components/chat/cache.go b/packages/tui/internal/components/chat/cache.go index 454f1a5a..1586c2cc 100644 --- a/packages/tui/internal/components/chat/cache.go +++ b/packages/tui/internal/components/chat/cache.go @@ -1,28 +1,28 @@ package chat import ( + "crypto/sha256" "encoding/hex" "fmt" - "hash/fnv" "sync" ) -// PartCache caches rendered messages to avoid re-rendering -type PartCache struct { +// MessageCache caches rendered messages to avoid re-rendering +type MessageCache struct { mu sync.RWMutex cache map[string]string } -// NewPartCache creates a new message cache -func NewPartCache() *PartCache { - return &PartCache{ +// NewMessageCache creates a new message cache +func NewMessageCache() *MessageCache { + return &MessageCache{ cache: make(map[string]string), } } // generateKey creates a unique key for a message based on its content and rendering parameters -func (c *PartCache) GenerateKey(params ...any) string { - h := fnv.New64a() +func (c *MessageCache) GenerateKey(params ...any) string { + h := sha256.New() for _, param := range params { h.Write(fmt.Appendf(nil, ":%v", param)) } @@ -30,7 +30,7 @@ func (c *PartCache) GenerateKey(params ...any) string { } // Get retrieves a cached rendered message -func (c *PartCache) Get(key string) (string, bool) { +func (c *MessageCache) Get(key string) (string, bool) { c.mu.RLock() defer c.mu.RUnlock() @@ -39,14 +39,14 @@ func (c *PartCache) Get(key string) (string, bool) { } // Set stores a rendered message in the cache -func (c *PartCache) Set(key string, content string) { +func (c *MessageCache) Set(key string, content string) { c.mu.Lock() defer c.mu.Unlock() c.cache[key] = content } // Clear removes all entries from the cache -func (c *PartCache) Clear() { +func (c *MessageCache) Clear() { c.mu.Lock() defer c.mu.Unlock() @@ -54,7 +54,7 @@ func (c *PartCache) Clear() { } // Size returns the number of cached entries -func (c *PartCache) Size() int { +func (c *MessageCache) Size() int { c.mu.RLock() defer c.mu.RUnlock() diff --git a/packages/tui/internal/components/chat/editor.go b/packages/tui/internal/components/chat/editor.go index ef129765..669ef47d 100644 --- a/packages/tui/internal/components/chat/editor.go +++ b/packages/tui/internal/components/chat/editor.go @@ -1,25 +1,18 @@ package chat import ( - "encoding/base64" "fmt" "log/slog" - "net/url" - "os" - "path/filepath" - "strconv" "strings" "github.com/charmbracelet/bubbles/v2/spinner" tea "github.com/charmbracelet/bubbletea/v2" "github.com/charmbracelet/lipgloss/v2" - "github.com/google/uuid" - "github.com/sst/opencode-sdk-go" "github.com/sst/opencode/internal/app" - "github.com/sst/opencode/internal/clipboard" "github.com/sst/opencode/internal/commands" "github.com/sst/opencode/internal/components/dialog" "github.com/sst/opencode/internal/components/textarea" + "github.com/sst/opencode/internal/image" "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" "github.com/sst/opencode/internal/util" @@ -27,11 +20,10 @@ import ( type EditorComponent interface { tea.Model - tea.ViewModel - Content() string + View(width int) string + Content(width int) string Lines() int Value() string - Length() int Focused() bool Focus() (tea.Model, tea.Cmd) Blur() @@ -39,19 +31,15 @@ type EditorComponent interface { Clear() (tea.Model, tea.Cmd) Paste() (tea.Model, tea.Cmd) Newline() (tea.Model, tea.Cmd) - SetValue(value string) - SetValueWithAttachments(value string) SetInterruptKeyInDebounce(inDebounce bool) - SetExitKeyInDebounce(inDebounce bool) } type editorComponent struct { app *app.App - width int textarea textarea.Model + attachments []app.Attachment spinner spinner.Model interruptKeyInDebounce bool - exitKeyInDebounce bool } func (m *editorComponent) Init() tea.Cmd { @@ -63,9 +51,6 @@ func (m *editorComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { var cmd tea.Cmd switch msg := msg.(type) { - case tea.WindowSizeMsg: - m.width = msg.Width - 4 - return m, nil case spinner.TickMsg: m.spinner, cmd = m.spinner.Update(msg) return m, cmd @@ -76,93 +61,29 @@ func (m *editorComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { cmds = append(cmds, cmd) return m, tea.Batch(cmds...) } - case tea.PasteMsg: - text := string(msg) - text = strings.ReplaceAll(text, "\\", "") - text, err := strconv.Unquote(`"` + text + `"`) - if err != nil { - slog.Error("Failed to unquote text", "error", err) - m.textarea.InsertRunesFromUserInput([]rune(msg)) - return m, nil - } - if _, err := os.Stat(text); err != nil { - slog.Error("Failed to paste file", "error", err) - m.textarea.InsertRunesFromUserInput([]rune(msg)) - return m, nil - } - - filePath := text - - attachment := m.createAttachmentFromFile(filePath) - if attachment == nil { - m.textarea.InsertRunesFromUserInput([]rune(msg)) - return m, nil - } - - m.textarea.InsertAttachment(attachment) - m.textarea.InsertString(" ") - case tea.ClipboardMsg: - text := string(msg) - m.textarea.InsertRunesFromUserInput([]rune(text)) case dialog.ThemeSelectedMsg: - m.textarea = updateTextareaStyles(m.textarea) + m.textarea = createTextArea(&m.textarea) m.spinner = createSpinner() return m, tea.Batch(m.spinner.Tick, m.textarea.Focus()) case dialog.CompletionSelectedMsg: - switch msg.Item.ProviderID { - case "commands": - commandName := strings.TrimPrefix(msg.Item.Value, "/") + if msg.IsCommand { + commandName := strings.TrimPrefix(msg.CompletionValue, "/") updated, cmd := m.Clear() m = updated.(*editorComponent) cmds = append(cmds, cmd) cmds = append(cmds, util.CmdHandler(commands.ExecuteCommandMsg(m.app.Commands[commands.CommandName(commandName)]))) return m, tea.Batch(cmds...) - case "files": - atIndex := m.textarea.LastRuneIndex('@') - if atIndex == -1 { - // Should not happen, but as a fallback, just insert. - m.textarea.InsertString(msg.Item.Value + " ") - return m, nil + } else { + existingValue := m.textarea.Value() + + // Replace the current token (after last space) + lastSpaceIndex := strings.LastIndex(existingValue, " ") + if lastSpaceIndex == -1 { + m.textarea.SetValue(msg.CompletionValue + " ") + } else { + modifiedValue := existingValue[:lastSpaceIndex+1] + msg.CompletionValue + m.textarea.SetValue(modifiedValue + " ") } - - // The range to replace is from the '@' up to the current cursor position. - // Replace the search term (e.g., "@search") with an empty string first. - cursorCol := m.textarea.CursorColumn() - m.textarea.ReplaceRange(atIndex, cursorCol, "") - - // Now, insert the attachment at the position where the '@' was. - // The cursor is now at `atIndex` after the replacement. - filePath := msg.Item.Value - attachment := m.createAttachmentFromPath(filePath) - m.textarea.InsertAttachment(attachment) - m.textarea.InsertString(" ") - return m, nil - case "symbols": - atIndex := m.textarea.LastRuneIndex('@') - if atIndex == -1 { - // Should not happen, but as a fallback, just insert. - m.textarea.InsertString(msg.Item.Value + " ") - return m, nil - } - - cursorCol := m.textarea.CursorColumn() - m.textarea.ReplaceRange(atIndex, cursorCol, "") - - symbol := msg.Item.RawData.(opencode.Symbol) - parts := strings.Split(symbol.Name, ".") - lastPart := parts[len(parts)-1] - attachment := &textarea.Attachment{ - ID: uuid.NewString(), - Display: "@" + lastPart, - URL: msg.Item.Value, - Filename: lastPart, - MediaType: "text/plain", - } - m.textarea.InsertAttachment(attachment) - m.textarea.InsertString(" ") - return m, nil - default: - slog.Debug("Unknown provider", "provider", msg.Item.ProviderID) return m, nil } } @@ -176,12 +97,7 @@ func (m *editorComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { return m, tea.Batch(cmds...) } -func (m *editorComponent) Content() string { - width := m.width - if m.app.Session.ID == "" { - width = min(width, 80) - } - +func (m *editorComponent) Content(width int) string { t := theme.CurrentTheme() base := styles.NewStyle().Foreground(t.Text()).Background(t.Background()).Render muted := styles.NewStyle().Foreground(t.TextMuted()).Background(t.Background()).Render @@ -196,38 +112,23 @@ func (m *editorComponent) Content() string { prompt, m.textarea.View(), ) - borderForeground := t.Border() - if m.app.IsLeaderSequence { - borderForeground = t.Accent() - } textarea = styles.NewStyle(). Background(t.BackgroundElement()). Width(width). PaddingTop(1). PaddingBottom(1). BorderStyle(lipgloss.ThickBorder()). - BorderForeground(borderForeground). + BorderForeground(t.Border()). BorderBackground(t.Background()). BorderLeft(true). BorderRight(true). Render(textarea) hint := base(m.getSubmitKeyText()) + muted(" send ") - if m.exitKeyInDebounce { - keyText := m.getExitKeyText() - hint = base(keyText+" again") + muted(" to exit") - } else if m.app.IsBusy() { + if m.app.IsBusy() { keyText := m.getInterruptKeyText() if m.interruptKeyInDebounce { - hint = muted( - "working", - ) + m.spinner.View() + muted( - " ", - ) + base( - keyText+" again", - ) + muted( - " interrupt", - ) + hint = muted("working") + m.spinner.View() + muted(" ") + base(keyText+" again") + muted(" interrupt") } else { hint = muted("working") + m.spinner.View() + muted(" ") + base(keyText) + muted(" interrupt") } @@ -248,12 +149,7 @@ func (m *editorComponent) Content() string { return content } -func (m *editorComponent) View() string { - width := m.width - if m.app.Session.ID == "" { - width = min(width, 80) - } - +func (m *editorComponent) View(width int) string { if m.Lines() > 1 { return lipgloss.Place( width, @@ -264,7 +160,7 @@ func (m *editorComponent) View() string { styles.WhitespaceStyle(theme.CurrentTheme().Background()), ) } - return m.Content() + return m.Content(width) } func (m *editorComponent) Focused() bool { @@ -287,47 +183,26 @@ func (m *editorComponent) Value() string { return m.textarea.Value() } -func (m *editorComponent) Length() int { - return m.textarea.Length() -} - func (m *editorComponent) Submit() (tea.Model, tea.Cmd) { value := strings.TrimSpace(m.Value()) if value == "" { return m, nil } - - switch value { - case "exit", "quit", "q", ":q": - return m, tea.Quit - } - if len(value) > 0 && value[len(value)-1] == '\\' { // If the last character is a backslash, remove it and add a newline - backslashCol := m.textarea.CurrentRowLength() - 1 - m.textarea.ReplaceRange(backslashCol, backslashCol+1, "") - m.textarea.InsertString("\n") + m.textarea.SetValue(value[:len(value)-1] + "\n") return m, nil } var cmds []tea.Cmd - - attachments := m.textarea.GetAttachments() - fileParts := make([]opencode.FilePartInputParam, 0) - for _, attachment := range attachments { - fileParts = append(fileParts, opencode.FilePartInputParam{ - Type: opencode.F(opencode.FilePartInputTypeFile), - Mime: opencode.F(attachment.MediaType), - URL: opencode.F(attachment.URL), - Filename: opencode.F(attachment.Filename), - }) - } - updated, cmd := m.Clear() m = updated.(*editorComponent) cmds = append(cmds, cmd) - cmds = append(cmds, util.CmdHandler(app.SendMsg{Text: value, Attachments: fileParts})) + attachments := m.attachments + m.attachments = nil + + cmds = append(cmds, util.CmdHandler(app.SendMsg{Text: value, Attachments: attachments})) return m, tea.Batch(cmds...) } @@ -337,31 +212,19 @@ func (m *editorComponent) Clear() (tea.Model, tea.Cmd) { } func (m *editorComponent) Paste() (tea.Model, tea.Cmd) { - imageBytes := clipboard.Read(clipboard.FmtImage) - if imageBytes != nil { - attachmentCount := len(m.textarea.GetAttachments()) - attachmentIndex := attachmentCount + 1 - base64EncodedFile := base64.StdEncoding.EncodeToString(imageBytes) - attachment := &textarea.Attachment{ - ID: uuid.NewString(), - MediaType: "image/png", - Display: fmt.Sprintf("[Image #%d]", attachmentIndex), - Filename: fmt.Sprintf("image-%d.png", attachmentIndex), - URL: fmt.Sprintf("data:image/png;base64,%s", base64EncodedFile), - } - m.textarea.InsertAttachment(attachment) - m.textarea.InsertString(" ") + imageBytes, text, err := image.GetImageFromClipboard() + if err != nil { + slog.Error(err.Error()) return m, nil } - - textBytes := clipboard.Read(clipboard.FmtText) - if textBytes != nil { - m.textarea.InsertRunesFromUserInput([]rune(string(textBytes))) - return m, nil + if len(imageBytes) != 0 { + attachmentName := fmt.Sprintf("clipboard-image-%d", len(m.attachments)) + attachment := app.Attachment{FilePath: attachmentName, FileName: attachmentName, Content: imageBytes, MimeType: "image/png"} + m.attachments = append(m.attachments, attachment) + } else { + m.textarea.SetValue(m.textarea.Value() + text) } - - // fallback to reading the clipboard using OSC52 - return m, tea.ReadClipboard + return m, nil } func (m *editorComponent) Newline() (tea.Model, tea.Cmd) { @@ -373,46 +236,6 @@ func (m *editorComponent) SetInterruptKeyInDebounce(inDebounce bool) { m.interruptKeyInDebounce = inDebounce } -func (m *editorComponent) SetValue(value string) { - m.textarea.SetValue(value) -} - -func (m *editorComponent) SetValueWithAttachments(value string) { - m.textarea.Reset() - - i := 0 - for i < len(value) { - // Check if filepath and add attachment - if value[i] == '@' { - start := i + 1 - end := start - for end < len(value) && value[end] != ' ' && value[end] != '\t' && value[end] != '\n' && value[end] != '\r' { - end++ - } - - if end > start { - filePath := value[start:end] - if _, err := os.Stat(filePath); err == nil { - attachment := m.createAttachmentFromFile(filePath) - if attachment != nil { - m.textarea.InsertAttachment(attachment) - i = end - continue - } - } - } - } - - // Not a valid file path, insert the character normally - m.textarea.InsertRune(rune(value[i])) - i++ - } -} - -func (m *editorComponent) SetExitKeyInDebounce(inDebounce bool) { - m.exitKeyInDebounce = inDebounce -} - func (m *editorComponent) getInterruptKeyText() string { return m.app.Commands[commands.SessionInterruptCommand].Keys()[0] } @@ -421,39 +244,34 @@ func (m *editorComponent) getSubmitKeyText() string { return m.app.Commands[commands.InputSubmitCommand].Keys()[0] } -func (m *editorComponent) getExitKeyText() string { - return m.app.Commands[commands.AppExitCommand].Keys()[0] -} - -func updateTextareaStyles(ta textarea.Model) textarea.Model { +func createTextArea(existing *textarea.Model) textarea.Model { t := theme.CurrentTheme() bgColor := t.BackgroundElement() textColor := t.Text() textMutedColor := t.TextMuted() + ta := textarea.New() + ta.Styles.Blurred.Base = styles.NewStyle().Foreground(textColor).Background(bgColor).Lipgloss() ta.Styles.Blurred.CursorLine = styles.NewStyle().Background(bgColor).Lipgloss() - ta.Styles.Blurred.Placeholder = styles.NewStyle(). - Foreground(textMutedColor). - Background(bgColor). - Lipgloss() + ta.Styles.Blurred.Placeholder = styles.NewStyle().Foreground(textMutedColor).Background(bgColor).Lipgloss() ta.Styles.Blurred.Text = styles.NewStyle().Foreground(textColor).Background(bgColor).Lipgloss() ta.Styles.Focused.Base = styles.NewStyle().Foreground(textColor).Background(bgColor).Lipgloss() ta.Styles.Focused.CursorLine = styles.NewStyle().Background(bgColor).Lipgloss() - ta.Styles.Focused.Placeholder = styles.NewStyle(). - Foreground(textMutedColor). - Background(bgColor). - Lipgloss() + ta.Styles.Focused.Placeholder = styles.NewStyle().Foreground(textMutedColor).Background(bgColor).Lipgloss() ta.Styles.Focused.Text = styles.NewStyle().Foreground(textColor).Background(bgColor).Lipgloss() - ta.Styles.Attachment = styles.NewStyle(). - Foreground(t.Secondary()). - Background(bgColor). - Lipgloss() - ta.Styles.SelectedAttachment = styles.NewStyle(). - Foreground(t.Text()). - Background(t.Secondary()). - Lipgloss() ta.Styles.Cursor.Color = t.Primary() + + ta.Prompt = " " + ta.ShowLineNumbers = false + ta.CharLimit = -1 + + if existing != nil { + ta.SetValue(existing.Value()) + // ta.SetWidth(existing.Width()) + ta.SetHeight(existing.Height()) + } + return ta } @@ -473,85 +291,12 @@ func createSpinner() spinner.Model { func NewEditorComponent(app *app.App) EditorComponent { s := createSpinner() + ta := createTextArea(nil) - ta := textarea.New() - ta.Prompt = " " - ta.ShowLineNumbers = false - ta.CharLimit = -1 - ta = updateTextareaStyles(ta) - - m := &editorComponent{ + return &editorComponent{ app: app, textarea: ta, spinner: s, interruptKeyInDebounce: false, } - - return m -} - -func getMediaTypeFromExtension(ext string) string { - switch strings.ToLower(ext) { - case ".jpg": - return "image/jpeg" - case ".png", ".jpeg", ".gif", ".webp": - return "image/" + ext[1:] - case ".pdf": - return "application/pdf" - default: - return "text/plain" - } -} - -func (m *editorComponent) createAttachmentFromFile(filePath string) *textarea.Attachment { - ext := strings.ToLower(filepath.Ext(filePath)) - mediaType := getMediaTypeFromExtension(ext) - - // For text files, create a simple file reference - if mediaType == "text/plain" { - return &textarea.Attachment{ - ID: uuid.NewString(), - Display: "@" + filePath, - URL: fmt.Sprintf("file://./%s", filePath), - Filename: filePath, - MediaType: mediaType, - } - } - - // For binary files (images, PDFs), read and encode - fileBytes, err := os.ReadFile(filePath) - if err != nil { - slog.Error("Failed to read file", "error", err) - return nil - } - - base64EncodedFile := base64.StdEncoding.EncodeToString(fileBytes) - url := fmt.Sprintf("data:%s;base64,%s", mediaType, base64EncodedFile) - attachmentCount := len(m.textarea.GetAttachments()) - attachmentIndex := attachmentCount + 1 - label := "File" - if strings.HasPrefix(mediaType, "image/") { - label = "Image" - } - - return &textarea.Attachment{ - ID: uuid.NewString(), - MediaType: mediaType, - Display: fmt.Sprintf("[%s #%d]", label, attachmentIndex), - URL: url, - Filename: filePath, - } -} - -func (m *editorComponent) createAttachmentFromPath(filePath string) *textarea.Attachment { - extension := filepath.Ext(filePath) - mediaType := getMediaTypeFromExtension(extension) - - return &textarea.Attachment{ - ID: uuid.NewString(), - Display: "@" + filePath, - URL: fmt.Sprintf("file://./%s", url.PathEscape(filePath)), - Filename: filePath, - MediaType: mediaType, - } } diff --git a/packages/tui/internal/components/chat/message.go b/packages/tui/internal/components/chat/message.go index d3263b05..4dde09ea 100644 --- a/packages/tui/internal/components/chat/message.go +++ b/packages/tui/internal/components/chat/message.go @@ -9,14 +9,15 @@ import ( "github.com/charmbracelet/lipgloss/v2" "github.com/charmbracelet/lipgloss/v2/compat" - "github.com/charmbracelet/x/ansi" - "github.com/muesli/reflow/truncate" "github.com/sst/opencode-sdk-go" "github.com/sst/opencode/internal/app" + "github.com/sst/opencode/internal/commands" "github.com/sst/opencode/internal/components/diff" + "github.com/sst/opencode/internal/layout" "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" "github.com/sst/opencode/internal/util" + "github.com/tidwall/gjson" "golang.org/x/text/cases" "golang.org/x/text/language" ) @@ -109,6 +110,7 @@ func WithPaddingBottom(padding int) renderingOption { func renderContentBlock( app *app.App, content string, + highlight bool, width int, options ...renderingOption, ) string { @@ -133,6 +135,7 @@ func renderContentBlock( style := styles.NewStyle(). Foreground(renderer.textColor). Background(t.BackgroundPanel()). + Width(width). PaddingTop(renderer.paddingTop). PaddingBottom(renderer.paddingBottom). PaddingLeft(renderer.paddingLeft). @@ -157,6 +160,18 @@ func renderContentBlock( BorderRightBackground(t.Background()) } + if highlight { + style = style. + BorderLeftForeground(borderColor). + BorderRightForeground(borderColor) + } + } + + if highlight { + style = style. + Foreground(t.Text()). + Background(t.BackgroundElement()). + Bold(true) } content = style.Render(content) @@ -171,45 +186,48 @@ func renderContentBlock( } } + if highlight { + copy := app.Key(commands.MessagesCopyCommand) + // revert := app.Key(commands.MessagesRevertCommand) + + background := t.Background() + header := layout.Render( + layout.FlexOptions{ + Background: &background, + Direction: layout.Row, + Justify: layout.JustifyCenter, + Align: layout.AlignStretch, + Width: width - 2, + Gap: 5, + }, + layout.FlexItem{ + View: copy, + }, + // layout.FlexItem{ + // View: revert, + // }, + ) + header = styles.NewStyle().Background(t.Background()).Padding(0, 1).Render(header) + + content = "\n\n\n" + header + "\n\n" + content + "\n\n\n" + } + return content } func renderText( app *app.App, - message opencode.MessageUnion, + message opencode.Message, text string, author string, showToolDetails bool, + highlight bool, width int, - extra string, - toolCalls ...opencode.ToolPart, + toolCalls ...opencode.ToolInvocationPart, ) string { t := theme.CurrentTheme() - var ts time.Time - backgroundColor := t.BackgroundPanel() - var content string - switch casted := message.(type) { - case opencode.AssistantMessage: - ts = time.UnixMilli(int64(casted.Time.Created)) - content = util.ToMarkdown(text, width, backgroundColor) - case opencode.UserMessage: - ts = time.UnixMilli(int64(casted.Time.Created)) - base := styles.NewStyle().Foreground(t.Text()).Background(backgroundColor) - words := strings.Fields(text) - for i, word := range words { - if strings.HasPrefix(word, "@") { - words[i] = base.Foreground(t.Secondary()).Render(word + " ") - } else { - words[i] = base.Render(word + " ") - } - } - text = strings.Join(words, "") - text = ansi.WordwrapWc(text, width-6, " -") - content = base.Width(width - 6).Render(text) - } - - timestamp := ts. + timestamp := time.UnixMilli(int64(message.Metadata.Time.Created)). Local(). Format("02 Jan 2006 03:04 PM") if time.Now().Format("02 Jan 2006") == timestamp[:11] { @@ -219,12 +237,30 @@ func renderText( info := fmt.Sprintf("%s (%s)", author, timestamp) info = styles.NewStyle().Foreground(t.TextMuted()).Render(info) + backgroundColor := t.BackgroundPanel() + if highlight { + backgroundColor = t.BackgroundElement() + } + messageStyle := styles.NewStyle().Background(backgroundColor) + if message.Role == opencode.MessageRoleUser { + messageStyle = messageStyle.Width(width - 6) + } + + content := messageStyle.Render(text) + if message.Role == opencode.MessageRoleAssistant { + content = util.ToMarkdown(text, width, backgroundColor) + } + if !showToolDetails && toolCalls != nil && len(toolCalls) > 0 { content = content + "\n\n" for _, toolCall := range toolCalls { - title := renderToolTitle(toolCall, width) + title := renderToolTitle(toolCall, message.Metadata, width) + metadata := opencode.MessageMetadataTool{} + if _, ok := message.Metadata.Tool[toolCall.ToolInvocation.ToolCallID]; ok { + metadata = message.Metadata.Tool[toolCall.ToolInvocation.ToolCallID] + } style := styles.NewStyle() - if toolCall.State.Status == opencode.ToolPartStateStatusError { + if _, ok := metadata.ExtraFields["error"]; ok { style = style.Foreground(t.Error()) } title = style.Render(title) @@ -233,25 +269,23 @@ func renderText( } } - sections := []string{content, info} - if extra != "" { - sections = append(sections, "\n"+extra) - } - content = strings.Join(sections, "\n") + content = strings.Join([]string{content, info}, "\n") - switch message.(type) { - case opencode.UserMessage: + switch message.Role { + case opencode.MessageRoleUser: return renderContentBlock( app, content, + highlight, width, WithTextColor(t.Text()), WithBorderColorRight(t.Secondary()), ) - case opencode.AssistantMessage: + case opencode.MessageRoleAssistant: return renderContentBlock( app, content, + highlight, width, WithBorderColor(t.Accent()), ) @@ -261,31 +295,39 @@ func renderText( func renderToolDetails( app *app.App, - toolCall opencode.ToolPart, + toolCall opencode.ToolInvocationPart, + messageMetadata opencode.MessageMetadata, + highlight bool, width int, ) string { ignoredTools := []string{"todoread"} - if slices.Contains(ignoredTools, toolCall.Tool) { + if slices.Contains(ignoredTools, toolCall.ToolInvocation.ToolName) { return "" } - if toolCall.State.Status == opencode.ToolPartStateStatusPending { - title := renderToolTitle(toolCall, width) - return renderContentBlock(app, title, width) + toolCallID := toolCall.ToolInvocation.ToolCallID + metadata := opencode.MessageMetadataTool{} + if _, ok := messageMetadata.Tool[toolCallID]; ok { + metadata = messageMetadata.Tool[toolCallID] } var result *string - if toolCall.State.Output != "" { - result = &toolCall.State.Output + if toolCall.ToolInvocation.Result != "" { + result = &toolCall.ToolInvocation.Result } - toolInputMap := make(map[string]any) - if toolCall.State.Input != nil { - value := toolCall.State.Input + if toolCall.ToolInvocation.State == "partial-call" { + title := renderToolTitle(toolCall, messageMetadata, width) + return renderContentBlock(app, title, highlight, width) + } + + toolArgsMap := make(map[string]any) + if toolCall.ToolInvocation.Args != nil { + value := toolCall.ToolInvocation.Args if m, ok := value.(map[string]any); ok { - toolInputMap = m - keys := make([]string, 0, len(toolInputMap)) - for key := range toolInputMap { + toolArgsMap = m + keys := make([]string, 0, len(toolArgsMap)) + for key := range toolArgsMap { keys = append(keys, key) } slices.Sort(keys) @@ -293,151 +335,150 @@ func renderToolDetails( } body := "" + finished := result != nil && *result != "" t := theme.CurrentTheme() backgroundColor := t.BackgroundPanel() borderColor := t.BackgroundPanel() - defaultStyle := styles.NewStyle().Background(backgroundColor).Width(width - 6).Render + if highlight { + backgroundColor = t.BackgroundElement() + borderColor = t.BorderActive() + } - if toolCall.State.Metadata != nil { - metadata := toolCall.State.Metadata.(map[string]any) - switch toolCall.Tool { - case "read": - var preview any - if metadata != nil { - preview = metadata["preview"] - } - if preview != nil && toolInputMap["filePath"] != nil { - filename := toolInputMap["filePath"].(string) - body = preview.(string) - body = util.RenderFile(filename, body, width, util.WithTruncate(6)) - } - case "edit": - if filename, ok := toolInputMap["filePath"].(string); ok { - var diffField any - if metadata != nil { - diffField = metadata["diff"] + switch toolCall.ToolInvocation.ToolName { + case "read": + preview := metadata.ExtraFields["preview"] + if preview != nil && toolArgsMap["filePath"] != nil { + filename := toolArgsMap["filePath"].(string) + body = preview.(string) + body = util.RenderFile(filename, body, width, util.WithTruncate(6)) + } + case "edit": + if filename, ok := toolArgsMap["filePath"].(string); ok { + diffField := metadata.ExtraFields["diff"] + if diffField != nil { + patch := diffField.(string) + var formattedDiff string + formattedDiff, _ = diff.FormatUnifiedDiff( + filename, + patch, + diff.WithWidth(width-2), + ) + body = strings.TrimSpace(formattedDiff) + style := styles.NewStyle(). + Background(backgroundColor). + Foreground(t.TextMuted()). + Padding(1, 2). + Width(width - 4) + if highlight { + style = style.Foreground(t.Text()).Bold(true) } - if diffField != nil { - patch := diffField.(string) - var formattedDiff string - if width < 120 { - formattedDiff, _ = diff.FormatUnifiedDiff( - filename, - patch, - diff.WithWidth(width-2), - ) - } else { - formattedDiff, _ = diff.FormatDiff( - filename, - patch, - diff.WithWidth(width-2), - ) - } - body = strings.TrimSpace(formattedDiff) - style := styles.NewStyle(). - Background(backgroundColor). - Foreground(t.TextMuted()). - Padding(1, 2). - Width(width - 4) - if diagnostics := renderDiagnostics(metadata, filename, backgroundColor, width-6); diagnostics != "" { - diagnostics = style.Render(diagnostics) - body += "\n" + diagnostics - } + if diagnostics := renderDiagnostics(metadata, filename); diagnostics != "" { + diagnostics = style.Render(diagnostics) + body += "\n" + diagnostics + } - title := renderToolTitle(toolCall, width) - title = style.Render(title) - content := title + "\n" + body - content = renderContentBlock( - app, - content, - width, - WithPadding(0), - WithBorderColor(borderColor), - ) - return content + title := renderToolTitle(toolCall, messageMetadata, width) + title = style.Render(title) + content := title + "\n" + body + content = renderContentBlock( + app, + content, + highlight, + width, + WithPadding(0), + WithBorderColor(borderColor), + ) + return content + } + } + case "write": + if filename, ok := toolArgsMap["filePath"].(string); ok { + if content, ok := toolArgsMap["content"].(string); ok { + body = util.RenderFile(filename, content, width) + if diagnostics := renderDiagnostics(metadata, filename); diagnostics != "" { + body += "\n\n" + diagnostics } } - case "write": - if filename, ok := toolInputMap["filePath"].(string); ok { - if content, ok := toolInputMap["content"].(string); ok { - body = util.RenderFile(filename, content, width) - if diagnostics := renderDiagnostics(metadata, filename, backgroundColor, width-4); diagnostics != "" { - body += "\n\n" + diagnostics - } - } - } - case "bash": - stdout := metadata["stdout"] - if stdout != nil { - command := toolInputMap["command"].(string) - body = fmt.Sprintf("```console\n> %s\n%s```", command, stdout) - body = util.ToMarkdown(body, width, backgroundColor) - } - case "webfetch": - if format, ok := toolInputMap["format"].(string); ok && result != nil { - body = *result - body = util.TruncateHeight(body, 10) - if format == "html" || format == "markdown" { - body = util.ToMarkdown(body, width, backgroundColor) - } - } - case "todowrite": - todos := metadata["todos"] - if todos != nil { - for _, item := range todos.([]any) { - todo := item.(map[string]any) - content := todo["content"].(string) - switch todo["status"] { - case "completed": - body += fmt.Sprintf("- [x] %s\n", content) - case "cancelled": - // strike through cancelled todo - body += fmt.Sprintf("- [~] ~~%s~~\n", content) - case "in_progress": - // highlight in progress todo - body += fmt.Sprintf("- [ ] `%s`\n", content) - default: - body += fmt.Sprintf("- [ ] %s\n", content) - } - } - body = util.ToMarkdown(body, width, backgroundColor) - } - case "task": - summary := metadata["summary"] - if summary != nil { - toolcalls := summary.([]any) - steps := []string{} - for _, item := range toolcalls { - data, _ := json.Marshal(item) - var toolCall opencode.ToolPart - _ = json.Unmarshal(data, &toolCall) - step := renderToolTitle(toolCall, width) - step = "∟ " + step - steps = append(steps, step) - } - body = strings.Join(steps, "\n") - } - body = defaultStyle(body) - default: - if result == nil { - empty := "" - result = &empty - } + } + case "bash": + stdout := metadata.ExtraFields["stdout"] + if stdout != nil { + command := toolArgsMap["command"].(string) + body = fmt.Sprintf("```console\n> %s\n%s```", command, stdout) + body = util.ToMarkdown(body, width, backgroundColor) + } + case "webfetch": + if format, ok := toolArgsMap["format"].(string); ok && result != nil { body = *result body = util.TruncateHeight(body, 10) - body = defaultStyle(body) + if format == "html" || format == "markdown" { + body = util.ToMarkdown(body, width, backgroundColor) + } } + case "todowrite": + todos := metadata.JSON.ExtraFields["todos"] + if !todos.IsNull() && finished { + strTodos := todos.Raw() + todos := gjson.Parse(strTodos) + for _, todo := range todos.Array() { + content := todo.Get("content").String() + switch todo.Get("status").String() { + case "completed": + body += fmt.Sprintf("- [x] %s\n", content) + // case "in-progress": + // body += fmt.Sprintf("- [ ] %s\n", content) + default: + body += fmt.Sprintf("- [ ] %s\n", content) + } + } + body = util.ToMarkdown(body, width, backgroundColor) + } + case "task": + summary := metadata.JSON.ExtraFields["summary"] + if !summary.IsNull() { + strValue := summary.Raw() + toolcalls := gjson.Parse(strValue).Array() + + steps := []string{} + for _, toolcall := range toolcalls { + call := toolcall.Value().(map[string]any) + if toolInvocation, ok := call["toolInvocation"].(map[string]any); ok { + data, _ := json.Marshal(toolInvocation) + var toolCall opencode.ToolInvocationPart + _ = json.Unmarshal(data, &toolCall) + + if metadata, ok := call["metadata"].(map[string]any); ok { + data, _ = json.Marshal(metadata) + var toolMetadata opencode.MessageMetadataTool + _ = json.Unmarshal(data, &toolMetadata) + + step := renderToolTitle(toolCall, messageMetadata, width) + step = "∟ " + step + steps = append(steps, step) + } + } + } + body = strings.Join(steps, "\n") + } + default: + if result == nil { + empty := "" + result = &empty + } + body = *result + body = util.TruncateHeight(body, 10) } error := "" - if toolCall.State.Status == opencode.ToolPartStateStatusError { - error = toolCall.State.Error + if err, ok := metadata.ExtraFields["error"].(bool); ok && err { + if message, ok := metadata.ExtraFields["message"].(string); ok { + error = message + } } if error != "" { body = styles.NewStyle(). - Width(width - 6). Foreground(t.Error()). Background(backgroundColor). Render(error) @@ -446,22 +487,19 @@ func renderToolDetails( if body == "" && error == "" && result != nil { body = *result body = util.TruncateHeight(body, 10) - body = defaultStyle(body) } - if body == "" { - body = defaultStyle("") - } - - title := renderToolTitle(toolCall, width) + title := renderToolTitle(toolCall, messageMetadata, width) content := title + "\n\n" + body - return renderContentBlock(app, content, width, WithBorderColor(borderColor)) + return renderContentBlock(app, content, highlight, width, WithBorderColor(borderColor)) } func renderToolName(name string) string { switch name { case "webfetch": return "Fetch" + case "todowrite", "todoread": + return "Plan" default: normalizedName := name if after, ok := strings.CutPrefix(name, "opencode_"); ok { @@ -471,54 +509,21 @@ func renderToolName(name string) string { } } -func getTodoPhase(metadata map[string]any) string { - todos, ok := metadata["todos"].([]any) - if !ok || len(todos) == 0 { - return "Plan" - } - - counts := map[string]int{"pending": 0, "completed": 0} - for _, item := range todos { - if todo, ok := item.(map[string]any); ok { - if status, ok := todo["status"].(string); ok { - counts[status]++ - } - } - } - - total := len(todos) - switch { - case counts["pending"] == total: - return "Creating plan" - case counts["completed"] == total: - return "Completing plan" - default: - return "Updating plan" - } -} - -func getTodoTitle(toolCall opencode.ToolPart) string { - if toolCall.State.Status == opencode.ToolPartStateStatusCompleted { - if metadata, ok := toolCall.State.Metadata.(map[string]any); ok { - return getTodoPhase(metadata) - } - } - return "Plan" -} - func renderToolTitle( - toolCall opencode.ToolPart, + toolCall opencode.ToolInvocationPart, + messageMetadata opencode.MessageMetadata, width int, ) string { - if toolCall.State.Status == opencode.ToolPartStateStatusPending { - title := renderToolAction(toolCall.Tool) - return styles.NewStyle().Width(width - 6).Render(title) + // TODO: handle truncate to width + + if toolCall.ToolInvocation.State == "partial-call" { + return renderToolAction(toolCall.ToolInvocation.ToolName) } toolArgs := "" toolArgsMap := make(map[string]any) - if toolCall.State.Input != nil { - value := toolCall.State.Input + if toolCall.ToolInvocation.Args != nil { + value := toolCall.ToolInvocation.Args if m, ok := value.(map[string]any); ok { toolArgsMap = m @@ -536,8 +541,8 @@ func renderToolTitle( } } - title := renderToolName(toolCall.Tool) - switch toolCall.Tool { + title := renderToolName(toolCall.ToolInvocation.ToolName) + switch toolCall.ToolInvocation.ToolName { case "read": toolArgs = renderArgs(&toolArgsMap, "filePath") title = fmt.Sprintf("%s %s", title, toolArgs) @@ -552,23 +557,19 @@ func renderToolTitle( case "webfetch": toolArgs = renderArgs(&toolArgsMap, "url") title = fmt.Sprintf("%s %s", title, toolArgs) - case "todowrite": - title = getTodoTitle(toolCall) - case "todoread": - return "Plan" + case "todowrite", "todoread": + // title is just the tool name default: - toolName := renderToolName(toolCall.Tool) + toolName := renderToolName(toolCall.ToolInvocation.ToolName) title = fmt.Sprintf("%s %s", toolName, toolArgs) } - - title = truncate.StringWithTail(title, uint(width-6), "...") return title } func renderToolAction(name string) string { switch name { case "task": - return "Planning..." + return "Searching..." case "bash": return "Writing command..." case "edit": @@ -639,13 +640,8 @@ type Diagnostic struct { } // renderDiagnostics formats LSP diagnostics for display in the TUI -func renderDiagnostics( - metadata map[string]any, - filePath string, - backgroundColor compat.AdaptiveColor, - width int, -) string { - if diagnosticsData, ok := metadata["diagnostics"].(map[string]any); ok { +func renderDiagnostics(metadata opencode.MessageMetadataTool, filePath string) string { + if diagnosticsData, ok := metadata.ExtraFields["diagnostics"].(map[string]any); ok { if fileDiagnostics, ok := diagnosticsData[filePath].([]any); ok { var errorDiagnostics []string for _, diagInterface := range fileDiagnostics { @@ -680,15 +676,9 @@ func renderDiagnostics( var result strings.Builder for _, diagnostic := range errorDiagnostics { if result.Len() > 0 { - result.WriteString("\n\n") + result.WriteString("\n") } - diagnostic = ansi.WordwrapWc(diagnostic, width, " -") - result.WriteString( - styles.NewStyle(). - Background(backgroundColor). - Foreground(t.Error()). - Render(diagnostic), - ) + result.WriteString(styles.NewStyle().Foreground(t.Error()).Render(diagnostic)) } return result.String() } diff --git a/packages/tui/internal/components/chat/messages.go b/packages/tui/internal/components/chat/messages.go index acc7b887..52288078 100644 --- a/packages/tui/internal/components/chat/messages.go +++ b/packages/tui/internal/components/chat/messages.go @@ -1,7 +1,6 @@ package chat import ( - "fmt" "strings" "github.com/charmbracelet/bubbles/v2/viewport" @@ -10,8 +9,6 @@ import ( "github.com/sst/opencode-sdk-go" "github.com/sst/opencode/internal/app" "github.com/sst/opencode/internal/components/dialog" - "github.com/sst/opencode/internal/components/toast" - "github.com/sst/opencode/internal/layout" "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" "github.com/sst/opencode/internal/util" @@ -19,30 +16,36 @@ import ( type MessagesComponent interface { tea.Model - tea.ViewModel + View(width, height int) string + SetWidth(width int) tea.Cmd PageUp() (tea.Model, tea.Cmd) PageDown() (tea.Model, tea.Cmd) HalfPageUp() (tea.Model, tea.Cmd) HalfPageDown() (tea.Model, tea.Cmd) + First() (tea.Model, tea.Cmd) + Last() (tea.Model, tea.Cmd) + Previous() (tea.Model, tea.Cmd) + Next() (tea.Model, tea.Cmd) ToolDetailsVisible() bool - GotoTop() (tea.Model, tea.Cmd) - GotoBottom() (tea.Model, tea.Cmd) - CopyLastMessage() (tea.Model, tea.Cmd) + Selected() string } type messagesComponent struct { - width, height int + width int app *app.App - header string viewport viewport.Model - cache *PartCache - loading bool - showToolDetails bool + cache *MessageCache rendering bool - dirty bool + showToolDetails bool tail bool partCount int lineCount int + selectedPart int + selectedText string +} +type renderFinishedMsg struct{} +type selectedMessagePartChangedMsg struct { + part int } type ToggleToolDetailsMsg struct{} @@ -51,61 +54,57 @@ func (m *messagesComponent) Init() tea.Cmd { return tea.Batch(m.viewport.Init()) } +func (m *messagesComponent) Selected() string { + return m.selectedText +} + func (m *messagesComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { var cmds []tea.Cmd switch msg := msg.(type) { - case tea.WindowSizeMsg: - effectiveWidth := msg.Width - 4 - // Clear cache on resize since width affects rendering - if m.width != effectiveWidth { - m.cache.Clear() - } - m.width = effectiveWidth - m.height = msg.Height - 7 - m.viewport.SetWidth(m.width) - m.loading = true - return m, m.Reload() case app.SendMsg: m.viewport.GotoBottom() m.tail = true + m.selectedPart = -1 + return m, nil + case app.OptimisticMessageAddedMsg: + m.renderView(m.width) + if m.tail { + m.viewport.GotoBottom() + } return m, nil case dialog.ThemeSelectedMsg: m.cache.Clear() - m.loading = true + m.rendering = true return m, m.Reload() case ToggleToolDetailsMsg: m.showToolDetails = !m.showToolDetails + m.rendering = true return m, m.Reload() case app.SessionLoadedMsg, app.SessionClearedMsg: m.cache.Clear() m.tail = true - m.loading = true + m.rendering = true return m, m.Reload() - - case opencode.EventListResponseEventSessionUpdated: - if msg.Properties.Info.ID == m.app.Session.ID { - m.header = m.renderHeader() - } - case opencode.EventListResponseEventMessageUpdated: - if msg.Properties.Info.SessionID == m.app.Session.ID { - cmds = append(cmds, m.renderView()) - } - case opencode.EventListResponseEventMessagePartUpdated: - if msg.Properties.Part.SessionID == m.app.Session.ID { - cmds = append(cmds, m.renderView()) - } - case renderCompleteMsg: - m.partCount = msg.partCount - m.lineCount = msg.lineCount + case renderFinishedMsg: m.rendering = false - m.loading = false - m.viewport.SetHeight(m.height - lipgloss.Height(m.header)) - m.viewport.SetContent(msg.content) if m.tail { m.viewport.GotoBottom() } - if m.dirty { - cmds = append(cmds, m.renderView()) + case selectedMessagePartChangedMsg: + return m, m.Reload() + case opencode.EventListResponseEventSessionUpdated: + if msg.Properties.Info.ID == m.app.Session.ID { + m.renderView(m.width) + if m.tail { + m.viewport.GotoBottom() + } + } + case opencode.EventListResponseEventMessageUpdated: + if msg.Properties.Info.Metadata.SessionID == m.app.Session.ID { + m.renderView(m.width) + if m.tail { + m.viewport.GotoBottom() + } } } @@ -117,377 +116,224 @@ func (m *messagesComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { return m, tea.Batch(cmds...) } -type renderCompleteMsg struct { - content string - partCount int - lineCount int -} +func (m *messagesComponent) renderView(width int) { + measure := util.Measure("messages.renderView") + defer measure("messageCount", len(m.app.Messages)) -func (m *messagesComponent) renderView() tea.Cmd { - m.header = m.renderHeader() + t := theme.CurrentTheme() + blocks := make([]string, 0) + m.partCount = 0 + m.lineCount = 0 - if m.rendering { - m.dirty = true - return func() tea.Msg { - return nil - } - } - m.dirty = false - m.rendering = true + orphanedToolCalls := make([]opencode.ToolInvocationPart, 0) - return func() tea.Msg { - measure := util.Measure("messages.renderView") - defer measure() + for _, message := range m.app.Messages { + var content string + var cached bool - t := theme.CurrentTheme() - blocks := make([]string, 0) - partCount := 0 - lineCount := 0 - - orphanedToolCalls := make([]opencode.ToolPart, 0) - - width := m.width // always use full width - - for _, message := range m.app.Messages { - var content string - var cached bool - - switch casted := message.Info.(type) { - case opencode.UserMessage: - for partIndex, part := range message.Parts { - switch part := part.(type) { - case opencode.TextPart: - if part.Synthetic { - continue - } - remainingParts := message.Parts[partIndex+1:] - fileParts := make([]opencode.FilePart, 0) - for _, part := range remainingParts { - switch part := part.(type) { - case opencode.FilePart: - fileParts = append(fileParts, part) - } - } - flexItems := []layout.FlexItem{} - if len(fileParts) > 0 { - fileStyle := styles.NewStyle().Background(t.BackgroundElement()).Foreground(t.TextMuted()).Padding(0, 1) - mediaTypeStyle := styles.NewStyle().Background(t.Secondary()).Foreground(t.BackgroundPanel()).Padding(0, 1) - for _, filePart := range fileParts { - mediaType := "" - switch filePart.Mime { - case "text/plain": - mediaType = "txt" - case "image/png", "image/jpeg", "image/gif", "image/webp": - mediaType = "img" - mediaTypeStyle = mediaTypeStyle.Background(t.Accent()) - case "application/pdf": - mediaType = "pdf" - mediaTypeStyle = mediaTypeStyle.Background(t.Primary()) - } - flexItems = append(flexItems, layout.FlexItem{ - View: mediaTypeStyle.Render(mediaType) + fileStyle.Render(filePart.Filename), - }) - } - } - bgColor := t.BackgroundPanel() - files := layout.Render( - layout.FlexOptions{ - Background: &bgColor, - Width: width - 6, - Direction: layout.Column, - }, - flexItems..., + switch message.Role { + case opencode.MessageRoleUser: + for _, part := range message.Parts { + switch part := part.AsUnion().(type) { + case opencode.TextPart: + key := m.cache.GenerateKey(message.ID, part.Text, width, m.selectedPart == m.partCount) + content, cached = m.cache.Get(key) + if !cached { + content = renderText( + m.app, + message, + part.Text, + m.app.Info.User, + m.showToolDetails, + m.partCount == m.selectedPart, + width, ) + m.cache.Set(key, content) + } + if content != "" { + m = m.updateSelected(content, part.Text) + blocks = append(blocks, content) + } + } + } - key := m.cache.GenerateKey(casted.ID, part.Text, width, files) + case opencode.MessageRoleAssistant: + hasTextPart := false + for partIndex, p := range message.Parts { + switch part := p.AsUnion().(type) { + case opencode.TextPart: + hasTextPart = true + finished := message.Metadata.Time.Completed > 0 + remainingParts := message.Parts[partIndex+1:] + toolCallParts := make([]opencode.ToolInvocationPart, 0) + + // sometimes tool calls happen without an assistant message + // these should be included in this assistant message as well + if len(orphanedToolCalls) > 0 { + toolCallParts = append(toolCallParts, orphanedToolCalls...) + orphanedToolCalls = make([]opencode.ToolInvocationPart, 0) + } + + remaining := true + for _, part := range remainingParts { + if !remaining { + break + } + switch part := part.AsUnion().(type) { + case opencode.TextPart: + // we only want tool calls associated with the current text part. + // if we hit another text part, we're done. + remaining = false + case opencode.ToolInvocationPart: + toolCallParts = append(toolCallParts, part) + if part.ToolInvocation.State != "result" { + // i don't think there's a case where a tool call isn't in result state + // and the message time is 0, but just in case + finished = false + } + } + } + + if finished { + key := m.cache.GenerateKey(message.ID, p.Text, width, m.showToolDetails, m.selectedPart == m.partCount) content, cached = m.cache.Get(key) if !cached { content = renderText( m.app, - message.Info, - part.Text, - m.app.Config.Username, + message, + p.Text, + message.Metadata.Assistant.ModelID, m.showToolDetails, + m.partCount == m.selectedPart, width, - files, - ) - content = lipgloss.PlaceHorizontal( - m.width, - lipgloss.Center, - content, - styles.WhitespaceStyle(t.Background()), + toolCallParts..., ) m.cache.Set(key, content) } - if content != "" { - partCount++ - lineCount += lipgloss.Height(content) + 1 - blocks = append(blocks, content) - } + } else { + content = renderText( + m.app, + message, + p.Text, + message.Metadata.Assistant.ModelID, + m.showToolDetails, + m.partCount == m.selectedPart, + width, + toolCallParts..., + ) } - } - - case opencode.AssistantMessage: - hasTextPart := false - for partIndex, p := range message.Parts { - switch part := p.(type) { - case opencode.TextPart: - hasTextPart = true - finished := part.Time.End > 0 - remainingParts := message.Parts[partIndex+1:] - toolCallParts := make([]opencode.ToolPart, 0) - - // sometimes tool calls happen without an assistant message - // these should be included in this assistant message as well - if len(orphanedToolCalls) > 0 { - toolCallParts = append(toolCallParts, orphanedToolCalls...) - orphanedToolCalls = make([]opencode.ToolPart, 0) + if content != "" { + m = m.updateSelected(content, p.Text) + blocks = append(blocks, content) + } + case opencode.ToolInvocationPart: + if !m.showToolDetails { + if !hasTextPart { + orphanedToolCalls = append(orphanedToolCalls, part) } + continue + } - remaining := true - for _, part := range remainingParts { - if !remaining { - break - } - switch part := part.(type) { - case opencode.TextPart: - // we only want tool calls associated with the current text part. - // if we hit another text part, we're done. - remaining = false - case opencode.ToolPart: - toolCallParts = append(toolCallParts, part) - if part.State.Status != opencode.ToolPartStateStatusCompleted && part.State.Status != opencode.ToolPartStateStatusError { - // i don't think there's a case where a tool call isn't in result state - // and the message time is 0, but just in case - finished = false - } - } - } - - if finished { - key := m.cache.GenerateKey(casted.ID, part.Text, width, m.showToolDetails) - content, cached = m.cache.Get(key) - if !cached { - content = renderText( - m.app, - message.Info, - part.Text, - casted.ModelID, - m.showToolDetails, - width, - "", - toolCallParts..., - ) - content = lipgloss.PlaceHorizontal( - m.width, - lipgloss.Center, - content, - styles.WhitespaceStyle(t.Background()), - ) - m.cache.Set(key, content) - } - } else { - content = renderText( - m.app, - message.Info, - part.Text, - casted.ModelID, - m.showToolDetails, - width, - "", - toolCallParts..., - ) - content = lipgloss.PlaceHorizontal( - m.width, - lipgloss.Center, - content, - styles.WhitespaceStyle(t.Background()), - ) - } - if content != "" { - partCount++ - lineCount += lipgloss.Height(content) + 1 - blocks = append(blocks, content) - } - case opencode.ToolPart: - if !m.showToolDetails { - if !hasTextPart { - orphanedToolCalls = append(orphanedToolCalls, part) - } - continue - } - - if part.State.Status == opencode.ToolPartStateStatusCompleted || part.State.Status == opencode.ToolPartStateStatusError { - key := m.cache.GenerateKey(casted.ID, - part.ID, - m.showToolDetails, - width, - ) - content, cached = m.cache.Get(key) - if !cached { - content = renderToolDetails( - m.app, - part, - width, - ) - content = lipgloss.PlaceHorizontal( - m.width, - lipgloss.Center, - content, - styles.WhitespaceStyle(t.Background()), - ) - m.cache.Set(key, content) - } - } else { - // if the tool call isn't finished, don't cache + if part.ToolInvocation.State == "result" { + key := m.cache.GenerateKey(message.ID, + part.ToolInvocation.ToolCallID, + m.showToolDetails, + width, + m.partCount == m.selectedPart, + ) + content, cached = m.cache.Get(key) + if !cached { content = renderToolDetails( m.app, part, + message.Metadata, + m.partCount == m.selectedPart, width, ) - content = lipgloss.PlaceHorizontal( - m.width, - lipgloss.Center, - content, - styles.WhitespaceStyle(t.Background()), - ) - } - if content != "" { - partCount++ - lineCount += lipgloss.Height(content) + 1 - blocks = append(blocks, content) + m.cache.Set(key, content) } + } else { + // if the tool call isn't finished, don't cache + content = renderToolDetails( + m.app, + part, + message.Metadata, + m.partCount == m.selectedPart, + width, + ) + } + if content != "" { + m = m.updateSelected(content, "") + blocks = append(blocks, content) } } } - - error := "" - if assistant, ok := message.Info.(opencode.AssistantMessage); ok { - switch err := assistant.Error.AsUnion().(type) { - case nil: - case opencode.AssistantMessageErrorMessageOutputLengthError: - error = "Message output length exceeded" - case opencode.ProviderAuthError: - error = err.Data.Message - case opencode.MessageAbortedError: - error = "Request was aborted" - case opencode.UnknownError: - error = err.Data.Message - } - } - - if error != "" { - error = styles.NewStyle().Width(width - 6).Render(error) - error = renderContentBlock( - m.app, - error, - width, - WithBorderColor(t.Error()), - ) - error = lipgloss.PlaceHorizontal( - m.width, - lipgloss.Center, - error, - styles.WhitespaceStyle(t.Background()), - ) - blocks = append(blocks, error) - lineCount += lipgloss.Height(error) + 1 - } } - content := "\n" + strings.Join(blocks, "\n\n") - return renderCompleteMsg{ - content: content, - partCount: partCount, - lineCount: lineCount, + error := "" + switch err := message.Metadata.Error.AsUnion().(type) { + case nil: + case opencode.MessageMetadataErrorMessageOutputLengthError: + error = "Message output length exceeded" + case opencode.ProviderAuthError: + error = err.Data.Message + case opencode.UnknownError: + error = err.Data.Message + } + + if error != "" { + error = renderContentBlock( + m.app, + error, + false, + width, + WithBorderColor(t.Error()), + ) + blocks = append(blocks, error) + m.lineCount += lipgloss.Height(error) + 1 } } + + m.viewport.SetContent("\n" + strings.Join(blocks, "\n\n")) + if m.selectedPart == m.partCount { + m.viewport.GotoBottom() + } + } -func (m *messagesComponent) renderHeader() string { +func (m *messagesComponent) updateSelected(content string, selectedText string) *messagesComponent { + if m.selectedPart == m.partCount { + m.viewport.SetYOffset(m.lineCount - (m.viewport.Height() / 2) + 4) + m.selectedText = selectedText + } + m.partCount++ + m.lineCount += lipgloss.Height(content) + 1 + return m +} + +func (m *messagesComponent) header(width int) string { if m.app.Session.ID == "" { return "" } - headerWidth := m.width - t := theme.CurrentTheme() base := styles.NewStyle().Foreground(t.Text()).Background(t.Background()).Render muted := styles.NewStyle().Foreground(t.TextMuted()).Background(t.Background()).Render headerLines := []string{} headerLines = append( headerLines, - util.ToMarkdown("# "+m.app.Session.Title, headerWidth-6, t.Background()), + util.ToMarkdown("# "+m.app.Session.Title, width-6, t.Background()), ) - - share := "" if m.app.Session.Share.URL != "" { - share = muted(m.app.Session.Share.URL + " /unshare") + headerLines = append(headerLines, muted(m.app.Session.Share.URL+" /unshare")) } else { - share = base("/share") + muted(" to create a shareable link") + headerLines = append(headerLines, base("/share")+muted(" to create a shareable link")) } - - sessionInfo := "" - tokens := float64(0) - cost := float64(0) - contextWindow := m.app.Model.Limit.Context - - for _, message := range m.app.Messages { - if assistant, ok := message.Info.(opencode.AssistantMessage); ok { - cost += assistant.Cost - usage := assistant.Tokens - if usage.Output > 0 { - if assistant.Summary { - tokens = usage.Output - continue - } - tokens = (usage.Input + - usage.Cache.Write + - usage.Cache.Read + - usage.Output + - usage.Reasoning) - } - } - } - - // Check if current model is a subscription model (cost is 0 for both input and output) - isSubscriptionModel := m.app.Model != nil && - m.app.Model.Cost.Input == 0 && m.app.Model.Cost.Output == 0 - - sessionInfo = styles.NewStyle(). - Foreground(t.TextMuted()). - Background(t.Background()). - Render(formatTokensAndCost(tokens, contextWindow, cost, isSubscriptionModel)) - - background := t.Background() - - var items []layout.FlexItem - justify := layout.JustifyEnd - - if m.app.Config.Share != opencode.ConfigShareDisabled { - items = append(items, layout.FlexItem{View: share}) - justify = layout.JustifySpaceBetween - } - - items = append(items, layout.FlexItem{View: sessionInfo}) - - headerRow := layout.Render( - layout.FlexOptions{ - Background: &background, - Direction: layout.Row, - Justify: justify, - Align: layout.AlignStretch, - Width: headerWidth - 6, - }, - items..., - ) - - headerLines = append(headerLines, headerRow) - header := strings.Join(headerLines, "\n") + header = styles.NewStyle(). Background(t.Background()). - Width(headerWidth). + Width(width). PaddingLeft(2). PaddingRight(2). BorderLeft(true). @@ -496,83 +342,50 @@ func (m *messagesComponent) renderHeader() string { BorderForeground(t.BackgroundElement()). BorderStyle(lipgloss.ThickBorder()). Render(header) - header = lipgloss.PlaceHorizontal( - m.width, - lipgloss.Center, - header, - styles.WhitespaceStyle(t.Background()), - ) return "\n" + header + "\n" } -func formatTokensAndCost( - tokens float64, - contextWindow float64, - cost float64, - isSubscriptionModel bool, -) string { - // Format tokens in human-readable format (e.g., 110K, 1.2M) - var formattedTokens string - switch { - case tokens >= 1_000_000: - formattedTokens = fmt.Sprintf("%.1fM", float64(tokens)/1_000_000) - case tokens >= 1_000: - formattedTokens = fmt.Sprintf("%.1fK", float64(tokens)/1_000) - default: - formattedTokens = fmt.Sprintf("%d", int(tokens)) - } - - // Remove .0 suffix if present - if strings.HasSuffix(formattedTokens, ".0K") { - formattedTokens = strings.Replace(formattedTokens, ".0K", "K", 1) - } - if strings.HasSuffix(formattedTokens, ".0M") { - formattedTokens = strings.Replace(formattedTokens, ".0M", "M", 1) - } - - percentage := 0.0 - if contextWindow > 0 { - percentage = (float64(tokens) / float64(contextWindow)) * 100 - } - - if isSubscriptionModel { - return fmt.Sprintf( - "%s/%d%%", - formattedTokens, - int(percentage), - ) - } - - formattedCost := fmt.Sprintf("$%.2f", cost) - return fmt.Sprintf( - "%s/%d%% (%s)", - formattedTokens, - int(percentage), - formattedCost, - ) -} - -func (m *messagesComponent) View() string { +func (m *messagesComponent) View(width, height int) string { t := theme.CurrentTheme() - if m.loading { + if m.rendering { return lipgloss.Place( - m.width, - m.height, + width, + height, lipgloss.Center, lipgloss.Center, styles.NewStyle().Background(t.Background()).Render(""), styles.WhitespaceStyle(t.Background()), ) } + header := m.header(width) + m.viewport.SetWidth(width) + m.viewport.SetHeight(height - lipgloss.Height(header)) return styles.NewStyle(). Background(t.Background()). - Render(m.header + "\n" + m.viewport.View()) + Render(header + "\n" + m.viewport.View()) +} + +func (m *messagesComponent) SetWidth(width int) tea.Cmd { + if m.width == width { + return nil + } + // Clear cache on resize since width affects rendering + if m.width != width { + m.cache.Clear() + } + m.width = width + m.viewport.SetWidth(width) + m.renderView(width) + return nil } func (m *messagesComponent) Reload() tea.Cmd { - return m.renderView() + return func() tea.Msg { + m.renderView(m.width) + return renderFinishedMsg{} + } } func (m *messagesComponent) PageUp() (tea.Model, tea.Cmd) { @@ -595,50 +408,61 @@ func (m *messagesComponent) HalfPageDown() (tea.Model, tea.Cmd) { return m, nil } +func (m *messagesComponent) Previous() (tea.Model, tea.Cmd) { + m.tail = false + if m.selectedPart < 0 { + m.selectedPart = m.partCount + } + m.selectedPart-- + if m.selectedPart < 0 { + m.selectedPart = 0 + } + return m, util.CmdHandler(selectedMessagePartChangedMsg{ + part: m.selectedPart, + }) +} + +func (m *messagesComponent) Next() (tea.Model, tea.Cmd) { + m.tail = false + m.selectedPart++ + if m.selectedPart >= m.partCount { + m.selectedPart = m.partCount + } + return m, util.CmdHandler(selectedMessagePartChangedMsg{ + part: m.selectedPart, + }) +} + +func (m *messagesComponent) First() (tea.Model, tea.Cmd) { + m.selectedPart = 0 + m.tail = false + return m, util.CmdHandler(selectedMessagePartChangedMsg{ + part: m.selectedPart, + }) +} + +func (m *messagesComponent) Last() (tea.Model, tea.Cmd) { + m.selectedPart = m.partCount - 1 + m.tail = true + return m, util.CmdHandler(selectedMessagePartChangedMsg{ + part: m.selectedPart, + }) +} + func (m *messagesComponent) ToolDetailsVisible() bool { return m.showToolDetails } -func (m *messagesComponent) GotoTop() (tea.Model, tea.Cmd) { - m.viewport.GotoTop() - return m, nil -} - -func (m *messagesComponent) GotoBottom() (tea.Model, tea.Cmd) { - m.viewport.GotoBottom() - return m, nil -} - -func (m *messagesComponent) CopyLastMessage() (tea.Model, tea.Cmd) { - if len(m.app.Messages) == 0 { - return m, nil - } - lastMessage := m.app.Messages[len(m.app.Messages)-1] - var lastTextPart *opencode.TextPart - for _, part := range lastMessage.Parts { - if p, ok := part.(opencode.TextPart); ok { - lastTextPart = &p - } - } - if lastTextPart == nil { - return m, nil - } - var cmds []tea.Cmd - cmds = append(cmds, m.app.SetClipboard(lastTextPart.Text)) - cmds = append(cmds, toast.NewSuccessToast("Message copied to clipboard")) - return m, tea.Batch(cmds...) -} - func NewMessagesComponent(app *app.App) MessagesComponent { vp := viewport.New() vp.KeyMap = viewport.KeyMap{} - vp.MouseWheelDelta = 4 return &messagesComponent{ app: app, viewport: vp, showToolDetails: true, - cache: NewPartCache(), + cache: NewMessageCache(), tail: true, + selectedPart: -1, } } diff --git a/packages/tui/internal/components/commands/commands.go b/packages/tui/internal/components/commands/commands.go index 7f293230..f3080b38 100644 --- a/packages/tui/internal/components/commands/commands.go +++ b/packages/tui/internal/components/commands/commands.go @@ -56,8 +56,8 @@ func (c *commandsComponent) View() string { var untriggeredCommands []commands.Command for _, cmd := range c.app.Commands.Sorted() { - if c.showAll || cmd.HasTrigger() { - if cmd.HasTrigger() { + if c.showAll || cmd.Trigger != "" { + if cmd.Trigger != "" { triggeredCommands = append(triggeredCommands, cmd) } else if c.showAll { untriggeredCommands = append(untriggeredCommands, cmd) @@ -97,8 +97,8 @@ func (c *commandsComponent) View() string { for _, cmd := range commandsToShow { trigger := "" - if cmd.HasTrigger() { - trigger = "/" + cmd.PrimaryTrigger() + if cmd.Trigger != "" { + trigger = "/" + cmd.Trigger } else { trigger = string(cmd.Name) } diff --git a/packages/tui/internal/components/dialog/complete.go b/packages/tui/internal/components/dialog/complete.go index f18d9751..f204d910 100644 --- a/packages/tui/internal/components/dialog/complete.go +++ b/packages/tui/internal/components/dialog/complete.go @@ -2,25 +2,70 @@ package dialog import ( "log/slog" - "sort" - "strings" "github.com/charmbracelet/bubbles/v2/key" "github.com/charmbracelet/bubbles/v2/textarea" tea "github.com/charmbracelet/bubbletea/v2" "github.com/charmbracelet/lipgloss/v2" - "github.com/lithammer/fuzzysearch/fuzzy" - "github.com/muesli/reflow/truncate" - "github.com/sst/opencode/internal/completions" + "github.com/sst/opencode/internal/app" "github.com/sst/opencode/internal/components/list" "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" "github.com/sst/opencode/internal/util" ) +type CompletionItem struct { + Title string + Value string +} + +type CompletionItemI interface { + list.ListItem + GetValue() string + DisplayValue() string +} + +func (ci *CompletionItem) Render(selected bool, width int) string { + t := theme.CurrentTheme() + baseStyle := styles.NewStyle().Foreground(t.Text()) + + itemStyle := baseStyle. + Background(t.BackgroundElement()). + Width(width). + Padding(0, 1) + + if selected { + itemStyle = itemStyle.Foreground(t.Primary()) + } + + title := itemStyle.Render( + ci.DisplayValue(), + ) + return title +} + +func (ci *CompletionItem) DisplayValue() string { + return ci.Title +} + +func (ci *CompletionItem) GetValue() string { + return ci.Value +} + +func NewCompletionItem(completionItem CompletionItem) CompletionItemI { + return &completionItem +} + +type CompletionProvider interface { + GetId() string + GetChildEntries(query string) ([]CompletionItemI, error) + GetEmptyMessage() string +} + type CompletionSelectedMsg struct { - Item completions.CompletionSuggestion - SearchString string + SearchString string + CompletionValue string + IsCommand bool } type CompletionDialogCompleteItemMsg struct { @@ -34,16 +79,16 @@ type CompletionDialog interface { tea.ViewModel SetWidth(width int) IsEmpty() bool + SetProvider(provider CompletionProvider) } type completionDialogComponent struct { query string - providers []completions.CompletionProvider + completionProvider CompletionProvider width int height int pseudoSearchTextArea textarea.Model - list list.List[completions.CompletionSuggestion] - trigger string + list list.List[CompletionItemI] } type completionDialogKeyMap struct { @@ -56,7 +101,7 @@ var completionDialogKeys = completionDialogKeyMap{ key.WithKeys("tab", "enter", "right"), ), Cancel: key.NewBinding( - key.WithKeys("space", " ", "esc", "backspace", "ctrl+h", "ctrl+c"), + key.WithKeys(" ", "esc", "backspace", "ctrl+c"), ), } @@ -64,60 +109,13 @@ func (c *completionDialogComponent) Init() tea.Cmd { return nil } -func (c *completionDialogComponent) getAllCompletions(query string) tea.Cmd { - return func() tea.Msg { - allItems := make([]completions.CompletionSuggestion, 0) - providersWithResults := 0 - - // Collect results from all providers - for _, provider := range c.providers { - items, err := provider.GetChildEntries(query) - if err != nil { - slog.Error( - "Failed to get completion items", - "provider", - provider.GetId(), - "error", - err, - ) - continue - } - if len(items) > 0 { - providersWithResults++ - allItems = append(allItems, items...) - } - } - - // If there's a query, use fuzzy ranking to sort results - if query != "" && providersWithResults > 1 { - t := theme.CurrentTheme() - baseStyle := styles.NewStyle().Background(t.BackgroundElement()) - // Create a slice of display values for fuzzy matching - displayValues := make([]string, len(allItems)) - for i, item := range allItems { - displayValues[i] = item.Display(baseStyle) - } - - matches := fuzzy.RankFindFold(query, displayValues) - sort.Sort(matches) - - // Reorder items based on fuzzy ranking - rankedItems := make([]completions.CompletionSuggestion, 0, len(matches)) - for _, match := range matches { - rankedItems = append(rankedItems, allItems[match.OriginalIndex]) - } - - return rankedItems - } - - return allItems - } -} func (c *completionDialogComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { var cmds []tea.Cmd switch msg := msg.(type) { - case []completions.CompletionSuggestion: + case []CompletionItemI: c.list.SetItems(msg) + case app.CompletionDialogTriggeredMsg: + c.pseudoSearchTextArea.SetValue(msg.InitialValue) case tea.KeyMsg: if c.pseudoSearchTextArea.Focused() { if !key.Matches(msg, completionDialogKeys.Complete) { @@ -125,16 +123,26 @@ func (c *completionDialogComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { c.pseudoSearchTextArea, cmd = c.pseudoSearchTextArea.Update(msg) cmds = append(cmds, cmd) - fullValue := c.pseudoSearchTextArea.Value() - query := strings.TrimPrefix(fullValue, c.trigger) + var query string + query = c.pseudoSearchTextArea.Value() + if query != "" { + query = query[1:] + } if query != c.query { c.query = query - cmds = append(cmds, c.getAllCompletions(query)) + cmd = func() tea.Msg { + items, err := c.completionProvider.GetChildEntries(query) + if err != nil { + slog.Error("Failed to get completion items", "error", err) + } + return items + } + cmds = append(cmds, cmd) } u, cmd := c.list.Update(msg) - c.list = u.(list.List[completions.CompletionSuggestion]) + c.list = u.(list.List[CompletionItemI]) cmds = append(cmds, cmd) } @@ -146,18 +154,22 @@ func (c *completionDialogComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { } return c, c.complete(item) case key.Matches(msg, completionDialogKeys.Cancel): - value := c.pseudoSearchTextArea.Value() - width := lipgloss.Width(value) - triggerWidth := lipgloss.Width(c.trigger) - // Only close on backspace when there are no characters left, unless we're back to just the trigger - if (msg.String() != "backspace" && msg.String() != "ctrl+h") || (width <= triggerWidth && value != c.trigger) { + // Only close on backspace when there are no characters left + if msg.String() != "backspace" || len(c.pseudoSearchTextArea.Value()) <= 0 { return c, c.close() } } return c, tea.Batch(cmds...) } else { - cmds = append(cmds, c.getAllCompletions("")) + cmd := func() tea.Msg { + items, err := c.completionProvider.GetChildEntries("") + if err != nil { + slog.Error("Failed to get completion items", "error", err) + } + return items + } + cmds = append(cmds, cmd) cmds = append(cmds, c.pseudoSearchTextArea.Focus()) return c, tea.Batch(cmds...) } @@ -168,11 +180,22 @@ func (c *completionDialogComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { func (c *completionDialogComponent) View() string { t := theme.CurrentTheme() - c.list.SetMaxWidth(c.width) + baseStyle := styles.NewStyle().Foreground(t.Text()) - return styles.NewStyle(). - Padding(0, 1). - Foreground(t.Text()). + maxWidth := 40 + completions := c.list.GetItems() + + for _, cmd := range completions { + title := cmd.DisplayValue() + if len(title) > maxWidth-4 { + maxWidth = len(title) + 4 + } + } + + c.list.SetMaxWidth(maxWidth) + + return baseStyle. + Padding(0, 0). Background(t.BackgroundElement()). BorderStyle(lipgloss.ThickBorder()). BorderLeft(true). @@ -191,12 +214,28 @@ func (c *completionDialogComponent) IsEmpty() bool { return c.list.IsEmpty() } -func (c *completionDialogComponent) complete(item completions.CompletionSuggestion) tea.Cmd { +func (c *completionDialogComponent) SetProvider(provider CompletionProvider) { + if c.completionProvider.GetId() != provider.GetId() { + c.completionProvider = provider + c.list.SetEmptyMessage(" " + provider.GetEmptyMessage()) + c.list.SetItems([]CompletionItemI{}) + } +} + +func (c *completionDialogComponent) complete(item CompletionItemI) tea.Cmd { value := c.pseudoSearchTextArea.Value() + if value == "" { + return nil + } + + // Check if this is a command completion + isCommand := c.completionProvider.GetId() == "commands" + return tea.Batch( util.CmdHandler(CompletionSelectedMsg{ - SearchString: value, - Item: item, + SearchString: value, + CompletionValue: item.GetValue(), + IsCommand: isCommand, }), c.close(), ) @@ -208,76 +247,28 @@ func (c *completionDialogComponent) close() tea.Cmd { return util.CmdHandler(CompletionDialogCloseMsg{}) } -func NewCompletionDialogComponent( - trigger string, - providers ...completions.CompletionProvider, -) CompletionDialog { +func NewCompletionDialogComponent(completionProvider CompletionProvider) CompletionDialog { ti := textarea.New() - ti.SetValue(trigger) - - // Use a generic empty message if we have multiple providers - emptyMessage := "no matching items" - if len(providers) == 1 { - emptyMessage = providers[0].GetEmptyMessage() - } - - // Define render function for completion suggestions - renderFunc := func(item completions.CompletionSuggestion, selected bool, width int, baseStyle styles.Style) string { - t := theme.CurrentTheme() - style := baseStyle - - if selected { - style = style.Background(t.BackgroundElement()).Foreground(t.Primary()) - } else { - style = style.Background(t.BackgroundElement()).Foreground(t.Text()) - } - - // The item.Display string already has any inline colors from the provider - truncatedStr := truncate.String(item.Display(style), uint(width-4)) - return style.Width(width - 4).Render(truncatedStr) - } - - // Define selectable function - all completion suggestions are selectable - selectableFunc := func(item completions.CompletionSuggestion) bool { - return true - } li := list.NewListComponent( - list.WithItems([]completions.CompletionSuggestion{}), - list.WithMaxVisibleHeight[completions.CompletionSuggestion](7), - list.WithFallbackMessage[completions.CompletionSuggestion](emptyMessage), - list.WithAlphaNumericKeys[completions.CompletionSuggestion](false), - list.WithRenderFunc(renderFunc), - list.WithSelectableFunc(selectableFunc), + []CompletionItemI{}, + 7, + completionProvider.GetEmptyMessage(), + false, ) - c := &completionDialogComponent{ - query: "", - providers: providers, - pseudoSearchTextArea: ti, - list: li, - trigger: trigger, - } - - // Load initial items from all providers go func() { - allItems := make([]completions.CompletionSuggestion, 0) - for _, provider := range providers { - items, err := provider.GetChildEntries("") - if err != nil { - slog.Error( - "Failed to get completion items", - "provider", - provider.GetId(), - "error", - err, - ) - continue - } - allItems = append(allItems, items...) + items, err := completionProvider.GetChildEntries("") + if err != nil { + slog.Error("Failed to get completion items", "error", err) } - li.SetItems(allItems) + li.SetItems(items) }() - return c + return &completionDialogComponent{ + query: "", + completionProvider: completionProvider, + pseudoSearchTextArea: ti, + list: li, + } } diff --git a/packages/tui/internal/components/dialog/find.go b/packages/tui/internal/components/dialog/find.go index 40be600c..3ca0d105 100644 --- a/packages/tui/internal/components/dialog/find.go +++ b/packages/tui/internal/components/dialog/find.go @@ -3,8 +3,9 @@ package dialog import ( "log/slog" + "github.com/charmbracelet/bubbles/v2/key" + "github.com/charmbracelet/bubbles/v2/textinput" tea "github.com/charmbracelet/bubbletea/v2" - "github.com/sst/opencode/internal/completions" "github.com/sst/opencode/internal/components/list" "github.com/sst/opencode/internal/components/modal" "github.com/sst/opencode/internal/layout" @@ -13,20 +14,12 @@ import ( "github.com/sst/opencode/internal/util" ) -const ( - findDialogWidth = 76 -) - type FindSelectedMsg struct { FilePath string } type FindDialogCloseMsg struct{} -type findInitialSuggestionsMsg struct { - suggestions []completions.CompletionSuggestion -} - type FindDialog interface { layout.Modal tea.Model @@ -34,159 +27,120 @@ type FindDialog interface { SetWidth(width int) SetHeight(height int) IsEmpty() bool -} - -// findItem is a custom list item for file suggestions -type findItem struct { - suggestion completions.CompletionSuggestion -} - -func (f findItem) Render( - selected bool, - width int, - baseStyle styles.Style, -) string { - t := theme.CurrentTheme() - - itemStyle := baseStyle. - Background(t.BackgroundPanel()). - Foreground(t.TextMuted()) - - if selected { - itemStyle = itemStyle.Foreground(t.Primary()) - } - - return itemStyle.PaddingLeft(1).Render(f.suggestion.Display(itemStyle)) -} - -func (f findItem) Selectable() bool { - return true + SetProvider(provider CompletionProvider) } type findDialogComponent struct { - completionProvider completions.CompletionProvider - allSuggestions []completions.CompletionSuggestion + query string + completionProvider CompletionProvider width, height int modal *modal.Modal - searchDialog *SearchDialog - dialogWidth int + textInput textinput.Model + list list.List[CompletionItemI] +} + +type findDialogKeyMap struct { + Select key.Binding + Cancel key.Binding +} + +var findDialogKeys = findDialogKeyMap{ + Select: key.NewBinding( + key.WithKeys("enter"), + ), + Cancel: key.NewBinding( + key.WithKeys("esc"), + ), } func (f *findDialogComponent) Init() tea.Cmd { - return tea.Batch( - f.loadInitialSuggestions(), - f.searchDialog.Init(), - ) -} - -func (f *findDialogComponent) loadInitialSuggestions() tea.Cmd { - return func() tea.Msg { - items, err := f.completionProvider.GetChildEntries("") - if err != nil { - slog.Error("Failed to get initial completion items", "error", err) - return findInitialSuggestionsMsg{suggestions: []completions.CompletionSuggestion{}} - } - return findInitialSuggestionsMsg{suggestions: items} - } + return textinput.Blink } func (f *findDialogComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + var cmd tea.Cmd + var cmds []tea.Cmd + switch msg := msg.(type) { - case findInitialSuggestionsMsg: - // Handle initial suggestions setup - f.allSuggestions = msg.suggestions - - // Calculate dialog width - f.dialogWidth = f.calculateDialogWidth() - - // Initialize search dialog with calculated width - f.searchDialog = NewSearchDialog("Search files...", 10) - f.searchDialog.SetWidth(f.dialogWidth) - - // Convert to list items - items := make([]list.Item, len(f.allSuggestions)) - for i, suggestion := range f.allSuggestions { - items[i] = findItem{suggestion: suggestion} + case []CompletionItemI: + f.list.SetItems(msg) + case tea.KeyMsg: + switch msg.String() { + case "ctrl+c": + if f.textInput.Value() == "" { + return f, nil + } + f.textInput.SetValue("") + return f.update(msg) } - f.searchDialog.SetItems(items) - // Update modal with calculated width - f.modal = modal.New( - modal.WithTitle("Find Files"), - modal.WithMaxWidth(f.dialogWidth+4), - ) + switch { + case key.Matches(msg, findDialogKeys.Select): + item, i := f.list.GetSelectedItem() + if i == -1 { + return f, nil + } + return f, f.selectFile(item) + case key.Matches(msg, findDialogKeys.Cancel): + return f, f.Close() + default: + f.textInput, cmd = f.textInput.Update(msg) + cmds = append(cmds, cmd) - return f, f.searchDialog.Init() - - case []completions.CompletionSuggestion: - // Store suggestions and convert to findItem for the search dialog - f.allSuggestions = msg - items := make([]list.Item, len(msg)) - for i, suggestion := range msg { - items[i] = findItem{suggestion: suggestion} + f, cmd = f.update(msg) + cmds = append(cmds, cmd) } - f.searchDialog.SetItems(items) - return f, nil + } - case SearchSelectionMsg: - // Handle selection from search dialog - now we can directly access the suggestion - if item, ok := msg.Item.(findItem); ok { - return f, f.selectFile(item.suggestion) - } - return f, nil + return f, tea.Batch(cmds...) +} - case SearchCancelledMsg: - return f, f.Close() +func (f *findDialogComponent) update(msg tea.Msg) (*findDialogComponent, tea.Cmd) { + var cmd tea.Cmd + var cmds []tea.Cmd - case SearchQueryChangedMsg: - // Update completion items based on search query - return f, func() tea.Msg { - items, err := f.completionProvider.GetChildEntries(msg.Query) + query := f.textInput.Value() + if query != f.query { + f.query = query + cmd = func() tea.Msg { + items, err := f.completionProvider.GetChildEntries(query) if err != nil { slog.Error("Failed to get completion items", "error", err) - return []completions.CompletionSuggestion{} } return items } - - case tea.WindowSizeMsg: - f.width = msg.Width - f.height = msg.Height - // Recalculate width based on new viewport size - oldWidth := f.dialogWidth - f.dialogWidth = f.calculateDialogWidth() - if oldWidth != f.dialogWidth { - f.searchDialog.SetWidth(f.dialogWidth) - // Update modal max width too - f.modal = modal.New( - modal.WithTitle("Find Files"), - modal.WithMaxWidth(f.dialogWidth+4), - ) - } - f.searchDialog.SetHeight(msg.Height) + cmds = append(cmds, cmd) } - // Forward all other messages to the search dialog - updatedDialog, cmd := f.searchDialog.Update(msg) - f.searchDialog = updatedDialog.(*SearchDialog) - return f, cmd + u, cmd := f.list.Update(msg) + f.list = u.(list.List[CompletionItemI]) + cmds = append(cmds, cmd) + + return f, tea.Batch(cmds...) } func (f *findDialogComponent) View() string { - return f.searchDialog.View() -} + t := theme.CurrentTheme() + f.textInput.SetWidth(f.width - 8) + f.list.SetMaxWidth(f.width - 4) + inputView := f.textInput.View() + inputView = styles.NewStyle(). + Background(t.BackgroundPanel()). + Height(1). + Width(f.width-4). + Padding(0, 0). + Render(inputView) -func (f *findDialogComponent) calculateDialogWidth() int { - // Use fixed width unless viewport is smaller - if f.width > 0 && f.width < findDialogWidth+10 { - return f.width - 10 - } - return findDialogWidth + listView := f.list.View() + return styles.NewStyle().Height(12).Render(inputView + "\n" + listView) } func (f *findDialogComponent) SetWidth(width int) { f.width = width - f.searchDialog.SetWidth(f.dialogWidth) + if width > 4 { + f.textInput.SetWidth(width - 4) + f.list.SetMaxWidth(width - 4) + } } func (f *findDialogComponent) SetHeight(height int) { @@ -194,14 +148,20 @@ func (f *findDialogComponent) SetHeight(height int) { } func (f *findDialogComponent) IsEmpty() bool { - return f.searchDialog.GetQuery() == "" + return f.list.IsEmpty() } -func (f *findDialogComponent) selectFile(item completions.CompletionSuggestion) tea.Cmd { +func (f *findDialogComponent) SetProvider(provider CompletionProvider) { + f.completionProvider = provider + f.list.SetEmptyMessage(" " + provider.GetEmptyMessage()) + f.list.SetItems([]CompletionItemI{}) +} + +func (f *findDialogComponent) selectFile(item CompletionItemI) tea.Cmd { return tea.Sequence( f.Close(), util.CmdHandler(FindSelectedMsg{ - FilePath: item.Value, + FilePath: item.GetValue(), }), ) } @@ -211,26 +171,65 @@ func (f *findDialogComponent) Render(background string) string { } func (f *findDialogComponent) Close() tea.Cmd { - f.searchDialog.SetQuery("") - f.searchDialog.Blur() + f.textInput.Reset() + f.textInput.Blur() return util.CmdHandler(modal.CloseModalMsg{}) } -func NewFindDialog(completionProvider completions.CompletionProvider) FindDialog { - component := &findDialogComponent{ - completionProvider: completionProvider, - dialogWidth: findDialogWidth, - allSuggestions: []completions.CompletionSuggestion{}, +func createTextInput(existing *textinput.Model) textinput.Model { + t := theme.CurrentTheme() + bgColor := t.BackgroundPanel() + textColor := t.Text() + textMutedColor := t.TextMuted() + + ti := textinput.New() + + ti.Styles.Blurred.Placeholder = styles.NewStyle().Foreground(textMutedColor).Background(bgColor).Lipgloss() + ti.Styles.Blurred.Text = styles.NewStyle().Foreground(textColor).Background(bgColor).Lipgloss() + ti.Styles.Focused.Placeholder = styles.NewStyle().Foreground(textMutedColor).Background(bgColor).Lipgloss() + ti.Styles.Focused.Text = styles.NewStyle().Foreground(textColor).Background(bgColor).Lipgloss() + ti.Styles.Cursor.Color = t.Primary() + ti.VirtualCursor = true + + ti.Prompt = " " + ti.CharLimit = -1 + ti.Focus() + + if existing != nil { + ti.SetValue(existing.Value()) + ti.SetWidth(existing.Width()) } - // Create search dialog and modal with fixed width - component.searchDialog = NewSearchDialog("Search files...", 10) - component.searchDialog.SetWidth(findDialogWidth) + return ti +} - component.modal = modal.New( - modal.WithTitle("Find Files"), - modal.WithMaxWidth(findDialogWidth+4), +func NewFindDialog(completionProvider CompletionProvider) FindDialog { + ti := createTextInput(nil) + + li := list.NewListComponent( + []CompletionItemI{}, + 10, // max visible items + completionProvider.GetEmptyMessage(), + false, ) - return component + // Load initial items + go func() { + items, err := completionProvider.GetChildEntries("") + if err != nil { + slog.Error("Failed to get completion items", "error", err) + } + li.SetItems(items) + }() + + return &findDialogComponent{ + query: "", + completionProvider: completionProvider, + textInput: ti, + list: li, + modal: modal.New( + modal.WithTitle("Find Files"), + modal.WithMaxWidth(80), + ), + } } diff --git a/packages/tui/internal/components/dialog/help.go b/packages/tui/internal/components/dialog/help.go index 80123165..4107384f 100644 --- a/packages/tui/internal/components/dialog/help.go +++ b/packages/tui/internal/components/dialog/help.go @@ -30,10 +30,9 @@ func (h *helpDialog) Update(msg tea.Msg) (tea.Model, tea.Cmd) { case tea.WindowSizeMsg: h.width = msg.Width h.height = msg.Height - // Set viewport size with some padding for the modal, but cap at reasonable width - maxWidth := min(80, msg.Width-8) - h.viewport = viewport.New(viewport.WithWidth(maxWidth-4), viewport.WithHeight(msg.Height-6)) - h.commandsComponent.SetSize(maxWidth-4, msg.Height-6) + // Set viewport size with some padding for the modal + h.viewport = viewport.New(viewport.WithWidth(msg.Width-4), viewport.WithHeight(msg.Height-6)) + h.commandsComponent.SetSize(msg.Width-4, msg.Height-6) } // Update viewport content @@ -49,7 +48,7 @@ func (h *helpDialog) Update(msg tea.Msg) (tea.Model, tea.Cmd) { func (h *helpDialog) View() string { t := theme.CurrentTheme() - h.commandsComponent.SetBackgroundColor(t.BackgroundPanel()) + h.commandsComponent.SetBackgroundColor(t.BackgroundElement()) return h.viewport.View() } @@ -70,11 +69,11 @@ func NewHelpDialog(app *app.App) HelpDialog { return &helpDialog{ app: app, commandsComponent: commandsComponent.New(app, - commandsComponent.WithBackground(theme.CurrentTheme().BackgroundPanel()), + commandsComponent.WithBackground(theme.CurrentTheme().BackgroundElement()), commandsComponent.WithShowAll(true), commandsComponent.WithKeybinds(true), ), - modal: modal.New(modal.WithTitle("Help"), modal.WithMaxWidth(80)), + modal: modal.New(modal.WithTitle("Help")), viewport: vp, } } diff --git a/packages/tui/internal/components/dialog/models.go b/packages/tui/internal/components/dialog/models.go index 8f1069fc..4ebf572e 100644 --- a/packages/tui/internal/components/dialog/models.go +++ b/packages/tui/internal/components/dialog/models.go @@ -8,7 +8,6 @@ import ( "github.com/charmbracelet/bubbles/v2/key" tea "github.com/charmbracelet/bubbletea/v2" - "github.com/lithammer/fuzzysearch/fuzzy" "github.com/sst/opencode-sdk-go" "github.com/sst/opencode/internal/app" "github.com/sst/opencode/internal/components/list" @@ -23,7 +22,6 @@ const ( numVisibleModels = 10 minDialogWidth = 40 maxDialogWidth = 80 - maxRecentModels = 5 ) // ModelDialog interface for the model selection dialog @@ -32,13 +30,13 @@ type ModelDialog interface { } type modelDialog struct { - app *app.App - allModels []ModelWithProvider - width int - height int - modal *modal.Modal - searchDialog *SearchDialog - dialogWidth int + app *app.App + allModels []ModelWithProvider + width int + height int + modal *modal.Modal + modelList list.List[ModelItem] + dialogWidth int } type ModelWithProvider struct { @@ -46,42 +44,39 @@ type ModelWithProvider struct { Provider opencode.Provider } -// modelItem is a custom list item for model selections -type modelItem struct { - model ModelWithProvider +type ModelItem struct { + ModelName string + ProviderName string } -func (m modelItem) Render( - selected bool, - width int, - baseStyle styles.Style, -) string { +func (m ModelItem) Render(selected bool, width int) string { t := theme.CurrentTheme() - itemStyle := baseStyle. - Background(t.BackgroundPanel()). - Foreground(t.Text()) - if selected { - itemStyle = itemStyle.Foreground(t.Primary()) + displayText := fmt.Sprintf("%s (%s)", m.ModelName, m.ProviderName) + return styles.NewStyle(). + Background(t.Primary()). + Foreground(t.BackgroundElement()). + Width(width). + PaddingLeft(1). + Render(displayText) + } else { + modelStyle := styles.NewStyle(). + Foreground(t.Text()). + Background(t.BackgroundElement()) + providerStyle := styles.NewStyle(). + Foreground(t.TextMuted()). + Background(t.BackgroundElement()) + + modelPart := modelStyle.Render(m.ModelName) + providerPart := providerStyle.Render(fmt.Sprintf(" (%s)", m.ProviderName)) + + combinedText := modelPart + providerPart + return styles.NewStyle(). + Background(t.BackgroundElement()). + PaddingLeft(1). + Render(combinedText) } - - providerStyle := baseStyle. - Foreground(t.TextMuted()). - Background(t.BackgroundPanel()) - - modelPart := itemStyle.Render(m.model.Model.Name) - providerPart := providerStyle.Render(fmt.Sprintf(" %s", m.model.Provider.Name)) - - combinedText := modelPart + providerPart - return baseStyle. - Background(t.BackgroundPanel()). - PaddingLeft(1). - Render(combinedText) -} - -func (m modelItem) Selectable() bool { - return true } type modelKeyMap struct { @@ -102,67 +97,52 @@ var modelKeys = modelKeyMap{ func (m *modelDialog) Init() tea.Cmd { m.setupAllModels() - return m.searchDialog.Init() + return nil } func (m *modelDialog) Update(msg tea.Msg) (tea.Model, tea.Cmd) { switch msg := msg.(type) { - case SearchSelectionMsg: - // Handle selection from search dialog - if item, ok := msg.Item.(modelItem); ok { - return m, tea.Sequence( - util.CmdHandler(modal.CloseModalMsg{}), - util.CmdHandler( - app.ModelSelectedMsg{ - Provider: item.model.Provider, - Model: item.model.Model, - }), - ) - } - return m, util.CmdHandler(modal.CloseModalMsg{}) - case SearchCancelledMsg: - return m, util.CmdHandler(modal.CloseModalMsg{}) - - case SearchRemoveItemMsg: - if item, ok := msg.Item.(modelItem); ok { - if m.isModelInRecentSection(item.model, msg.Index) { - m.app.State.RemoveModelFromRecentlyUsed(item.model.Provider.ID, item.model.Model.ID) - m.app.SaveState() - items := m.buildDisplayList(m.searchDialog.GetQuery()) - m.searchDialog.SetItems(items) + case tea.KeyMsg: + switch { + case key.Matches(msg, modelKeys.Enter): + _, selectedIndex := m.modelList.GetSelectedItem() + if selectedIndex >= 0 && selectedIndex < len(m.allModels) { + selectedModel := m.allModels[selectedIndex] + return m, tea.Sequence( + util.CmdHandler(modal.CloseModalMsg{}), + util.CmdHandler( + app.ModelSelectedMsg{ + Provider: selectedModel.Provider, + Model: selectedModel.Model, + }), + ) } + return m, util.CmdHandler(modal.CloseModalMsg{}) + case key.Matches(msg, modelKeys.Escape): + return m, util.CmdHandler(modal.CloseModalMsg{}) } - return m, nil - - case SearchQueryChangedMsg: - // Update the list based on search query - items := m.buildDisplayList(msg.Query) - m.searchDialog.SetItems(items) - return m, nil - case tea.WindowSizeMsg: m.width = msg.Width m.height = msg.Height - m.searchDialog.SetWidth(m.dialogWidth) - m.searchDialog.SetHeight(msg.Height) } - updatedDialog, cmd := m.searchDialog.Update(msg) - m.searchDialog = updatedDialog.(*SearchDialog) + // Update the list component + updatedList, cmd := m.modelList.Update(msg) + m.modelList = updatedList.(list.List[ModelItem]) return m, cmd } func (m *modelDialog) View() string { - return m.searchDialog.View() + return m.modelList.View() } -func (m *modelDialog) calculateOptimalWidth(models []ModelWithProvider) int { +func (m *modelDialog) calculateOptimalWidth(modelItems []ModelItem) int { maxWidth := minDialogWidth - for _, model := range models { + for _, item := range modelItems { // Calculate the width needed for this item: "ModelName (ProviderName)" // Add 4 for the parentheses, space, and some padding - itemWidth := len(model.Model.Name) + len(model.Provider.Name) + 4 + itemWidth := len(item.ModelName) + len(item.ProviderName) + 4 if itemWidth > maxWidth { maxWidth = itemWidth } @@ -190,16 +170,22 @@ func (m *modelDialog) setupAllModels() { m.sortModels() - // Calculate optimal width based on all models - m.dialogWidth = m.calculateOptimalWidth(m.allModels) + modelItems := make([]ModelItem, len(m.allModels)) + for i, modelWithProvider := range m.allModels { + modelItems[i] = ModelItem{ + ModelName: modelWithProvider.Model.Name, + ProviderName: modelWithProvider.Provider.Name, + } + } - // Initialize search dialog - m.searchDialog = NewSearchDialog("Search models...", numVisibleModels) - m.searchDialog.SetWidth(m.dialogWidth) + m.dialogWidth = m.calculateOptimalWidth(modelItems) - // Build initial display list (empty query shows grouped view) - items := m.buildDisplayList("") - m.searchDialog.SetItems(items) + m.modelList = list.NewListComponent(modelItems, numVisibleModels, "No models available", true) + m.modelList.SetMaxWidth(m.dialogWidth) + + if len(m.allModels) > 0 { + m.modelList.SetSelectedIndex(0) + } } func (m *modelDialog) sortModels() { @@ -262,176 +248,6 @@ func (m *modelDialog) getModelUsageTime(providerID, modelID string) time.Time { return time.Time{} } -// buildDisplayList creates the list items based on search query -func (m *modelDialog) buildDisplayList(query string) []list.Item { - if query != "" { - // Search mode: use fuzzy matching - return m.buildSearchResults(query) - } else { - // Grouped mode: show Recent section and provider groups - return m.buildGroupedResults() - } -} - -// buildSearchResults creates a flat list of search results using fuzzy matching -func (m *modelDialog) buildSearchResults(query string) []list.Item { - type modelMatch struct { - model ModelWithProvider - score int - } - - modelNames := []string{} - modelMap := make(map[string]ModelWithProvider) - - // Create search strings and perform fuzzy matching - for _, model := range m.allModels { - searchStr := fmt.Sprintf("%s %s", model.Model.Name, model.Provider.Name) - modelNames = append(modelNames, searchStr) - modelMap[searchStr] = model - - searchStr = fmt.Sprintf("%s %s", model.Provider.Name, model.Model.Name) - modelNames = append(modelNames, searchStr) - modelMap[searchStr] = model - } - - matches := fuzzy.RankFindFold(query, modelNames) - sort.Sort(matches) - - items := []list.Item{} - seenModels := make(map[string]bool) - - for _, match := range matches { - model := modelMap[match.Target] - // Create a unique key to avoid duplicates - key := fmt.Sprintf("%s:%s", model.Provider.ID, model.Model.ID) - if seenModels[key] { - continue - } - seenModels[key] = true - items = append(items, modelItem{model: model}) - } - - return items -} - -// buildGroupedResults creates a grouped list with Recent section and provider groups -func (m *modelDialog) buildGroupedResults() []list.Item { - var items []list.Item - - // Add Recent section - recentModels := m.getRecentModels(maxRecentModels) - if len(recentModels) > 0 { - items = append(items, list.HeaderItem("Recent")) - for _, model := range recentModels { - items = append(items, modelItem{model: model}) - } - } - - // Group models by provider - providerGroups := make(map[string][]ModelWithProvider) - for _, model := range m.allModels { - providerName := model.Provider.Name - providerGroups[providerName] = append(providerGroups[providerName], model) - } - - // Get sorted provider names for consistent order - var providerNames []string - for name := range providerGroups { - providerNames = append(providerNames, name) - } - sort.Strings(providerNames) - - // Add provider groups - for _, providerName := range providerNames { - models := providerGroups[providerName] - - // Sort models within provider group - sort.Slice(models, func(i, j int) bool { - modelA := models[i] - modelB := models[j] - - usageA := m.getModelUsageTime(modelA.Provider.ID, modelA.Model.ID) - usageB := m.getModelUsageTime(modelB.Provider.ID, modelB.Model.ID) - - // Sort by usage time first, then by release date, then alphabetically - if !usageA.IsZero() && !usageB.IsZero() { - return usageA.After(usageB) - } - if !usageA.IsZero() && usageB.IsZero() { - return true - } - if usageA.IsZero() && !usageB.IsZero() { - return false - } - - // Sort by release date if available - if modelA.Model.ReleaseDate != "" && modelB.Model.ReleaseDate != "" { - dateA := m.parseReleaseDate(modelA.Model.ReleaseDate) - dateB := m.parseReleaseDate(modelB.Model.ReleaseDate) - if !dateA.IsZero() && !dateB.IsZero() { - return dateA.After(dateB) - } - } - - return modelA.Model.Name < modelB.Model.Name - }) - - // Add provider header - items = append(items, list.HeaderItem(providerName)) - - // Add models in this provider group - for _, model := range models { - items = append(items, modelItem{model: model}) - } - } - - return items -} - -// getRecentModels returns the most recently used models -func (m *modelDialog) getRecentModels(limit int) []ModelWithProvider { - var recentModels []ModelWithProvider - - // Get recent models from app state - for _, usage := range m.app.State.RecentlyUsedModels { - if len(recentModels) >= limit { - break - } - - // Find the corresponding model - for _, model := range m.allModels { - if model.Provider.ID == usage.ProviderID && model.Model.ID == usage.ModelID { - recentModels = append(recentModels, model) - break - } - } - } - - return recentModels -} - -func (m *modelDialog) isModelInRecentSection(model ModelWithProvider, index int) bool { - // Only check if we're in grouped mode (no search query) - if m.searchDialog.GetQuery() != "" { - return false - } - - recentModels := m.getRecentModels(maxRecentModels) - if len(recentModels) == 0 { - return false - } - - // Index 0 is the "Recent" header, so recent models are at indices 1 to len(recentModels) - if index >= 1 && index <= len(recentModels) { - if index-1 < len(recentModels) { - recentModel := recentModels[index-1] - return recentModel.Provider.ID == model.Provider.ID && recentModel.Model.ID == model.Model.ID - } - } - - return false -} - func (m *modelDialog) Render(background string) string { return m.modal.Render(m.View(), background) } diff --git a/packages/tui/internal/components/dialog/permission.go b/packages/tui/internal/components/dialog/permission.go new file mode 100644 index 00000000..5bc40624 --- /dev/null +++ b/packages/tui/internal/components/dialog/permission.go @@ -0,0 +1,496 @@ +package dialog + +import ( + "fmt" + "github.com/charmbracelet/bubbles/v2/key" + "github.com/charmbracelet/bubbles/v2/viewport" + tea "github.com/charmbracelet/bubbletea/v2" + "github.com/charmbracelet/lipgloss/v2" + "github.com/sst/opencode/internal/styles" + "github.com/sst/opencode/internal/theme" + "github.com/sst/opencode/internal/util" + "strings" +) + +type PermissionAction string + +// Permission responses +const ( + PermissionAllow PermissionAction = "allow" + PermissionAllowForSession PermissionAction = "allow_session" + PermissionDeny PermissionAction = "deny" +) + +// PermissionResponseMsg represents the user's response to a permission request +type PermissionResponseMsg struct { + // Permission permission.PermissionRequest + Action PermissionAction +} + +// PermissionDialogComponent interface for permission dialog component +type PermissionDialogComponent interface { + tea.Model + tea.ViewModel + // SetPermissions(permission permission.PermissionRequest) tea.Cmd +} + +type permissionsMapping struct { + Left key.Binding + Right key.Binding + EnterSpace key.Binding + Allow key.Binding + AllowSession key.Binding + Deny key.Binding + Tab key.Binding +} + +var permissionsKeys = permissionsMapping{ + Left: key.NewBinding( + key.WithKeys("left"), + key.WithHelp("←", "switch options"), + ), + Right: key.NewBinding( + key.WithKeys("right"), + key.WithHelp("→", "switch options"), + ), + EnterSpace: key.NewBinding( + key.WithKeys("enter", " "), + key.WithHelp("enter/space", "confirm"), + ), + Allow: key.NewBinding( + key.WithKeys("a"), + key.WithHelp("a", "allow"), + ), + AllowSession: key.NewBinding( + key.WithKeys("s"), + key.WithHelp("s", "allow for session"), + ), + Deny: key.NewBinding( + key.WithKeys("d"), + key.WithHelp("d", "deny"), + ), + Tab: key.NewBinding( + key.WithKeys("tab"), + key.WithHelp("tab", "switch options"), + ), +} + +// permissionDialogComponent is the implementation of PermissionDialog +type permissionDialogComponent struct { + width int + height int + // permission permission.PermissionRequest + windowSize tea.WindowSizeMsg + contentViewPort viewport.Model + selectedOption int // 0: Allow, 1: Allow for session, 2: Deny + + diffCache map[string]string + markdownCache map[string]string +} + +func (p *permissionDialogComponent) Init() tea.Cmd { + return p.contentViewPort.Init() +} + +func (p *permissionDialogComponent) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + var cmds []tea.Cmd + + switch msg := msg.(type) { + case tea.WindowSizeMsg: + p.windowSize = msg + cmd := p.SetSize() + cmds = append(cmds, cmd) + p.markdownCache = make(map[string]string) + p.diffCache = make(map[string]string) + // case tea.KeyMsg: + // switch { + // case key.Matches(msg, permissionsKeys.Right) || key.Matches(msg, permissionsKeys.Tab): + // p.selectedOption = (p.selectedOption + 1) % 3 + // return p, nil + // case key.Matches(msg, permissionsKeys.Left): + // p.selectedOption = (p.selectedOption + 2) % 3 + // case key.Matches(msg, permissionsKeys.EnterSpace): + // return p, p.selectCurrentOption() + // case key.Matches(msg, permissionsKeys.Allow): + // return p, util.CmdHandler(PermissionResponseMsg{Action: PermissionAllow, Permission: p.permission}) + // case key.Matches(msg, permissionsKeys.AllowSession): + // return p, util.CmdHandler(PermissionResponseMsg{Action: PermissionAllowForSession, Permission: p.permission}) + // case key.Matches(msg, permissionsKeys.Deny): + // return p, util.CmdHandler(PermissionResponseMsg{Action: PermissionDeny, Permission: p.permission}) + // default: + // // Pass other keys to viewport + // viewPort, cmd := p.contentViewPort.Update(msg) + // p.contentViewPort = viewPort + // cmds = append(cmds, cmd) + // } + } + + return p, tea.Batch(cmds...) +} + +func (p *permissionDialogComponent) selectCurrentOption() tea.Cmd { + var action PermissionAction + + switch p.selectedOption { + case 0: + action = PermissionAllow + case 1: + action = PermissionAllowForSession + case 2: + action = PermissionDeny + } + + return util.CmdHandler(PermissionResponseMsg{Action: action}) // , Permission: p.permission}) +} + +func (p *permissionDialogComponent) renderButtons() string { + t := theme.CurrentTheme() + baseStyle := styles.NewStyle().Foreground(t.Text()) + + allowStyle := baseStyle + allowSessionStyle := baseStyle + denyStyle := baseStyle + spacerStyle := baseStyle.Background(t.Background()) + + // Style the selected button + switch p.selectedOption { + case 0: + allowStyle = allowStyle.Background(t.Primary()).Foreground(t.Background()) + allowSessionStyle = allowSessionStyle.Background(t.Background()).Foreground(t.Primary()) + denyStyle = denyStyle.Background(t.Background()).Foreground(t.Primary()) + case 1: + allowStyle = allowStyle.Background(t.Background()).Foreground(t.Primary()) + allowSessionStyle = allowSessionStyle.Background(t.Primary()).Foreground(t.Background()) + denyStyle = denyStyle.Background(t.Background()).Foreground(t.Primary()) + case 2: + allowStyle = allowStyle.Background(t.Background()).Foreground(t.Primary()) + allowSessionStyle = allowSessionStyle.Background(t.Background()).Foreground(t.Primary()) + denyStyle = denyStyle.Background(t.Primary()).Foreground(t.Background()) + } + + allowButton := allowStyle.Padding(0, 1).Render("Allow (a)") + allowSessionButton := allowSessionStyle.Padding(0, 1).Render("Allow for session (s)") + denyButton := denyStyle.Padding(0, 1).Render("Deny (d)") + + content := lipgloss.JoinHorizontal( + lipgloss.Left, + allowButton, + spacerStyle.Render(" "), + allowSessionButton, + spacerStyle.Render(" "), + denyButton, + spacerStyle.Render(" "), + ) + + remainingWidth := p.width - lipgloss.Width(content) + if remainingWidth > 0 { + content = spacerStyle.Render(strings.Repeat(" ", remainingWidth)) + content + } + return content +} + +func (p *permissionDialogComponent) renderHeader() string { + return "NOT IMPLEMENTED" + // t := theme.CurrentTheme() + // baseStyle := styles.BaseStyle() + // + // toolKey := baseStyle.Foreground(t.TextMuted()).Bold(true).Render("Tool") + // toolValue := baseStyle. + // Foreground(t.Text()). + // Width(p.width - lipgloss.Width(toolKey)). + // Render(fmt.Sprintf(": %s", p.permission.ToolName)) + // + // pathKey := baseStyle.Foreground(t.TextMuted()).Bold(true).Render("Path") + // + // // Get the current working directory to display relative path + // relativePath := p.permission.Path + // if filepath.IsAbs(relativePath) { + // if cwd, err := filepath.Rel(config.WorkingDirectory(), relativePath); err == nil { + // relativePath = cwd + // } + // } + // + // pathValue := baseStyle. + // Foreground(t.Text()). + // Width(p.width - lipgloss.Width(pathKey)). + // Render(fmt.Sprintf(": %s", relativePath)) + // + // headerParts := []string{ + // lipgloss.JoinHorizontal( + // lipgloss.Left, + // toolKey, + // toolValue, + // ), + // baseStyle.Render(strings.Repeat(" ", p.width)), + // lipgloss.JoinHorizontal( + // lipgloss.Left, + // pathKey, + // pathValue, + // ), + // baseStyle.Render(strings.Repeat(" ", p.width)), + // } + // + // // Add tool-specific header information + // switch p.permission.ToolName { + // case "bash": + // headerParts = append(headerParts, baseStyle.Foreground(t.TextMuted()).Width(p.width).Bold(true).Render("Command")) + // case "edit": + // headerParts = append(headerParts, baseStyle.Foreground(t.TextMuted()).Width(p.width).Bold(true).Render("Diff")) + // case "write": + // headerParts = append(headerParts, baseStyle.Foreground(t.TextMuted()).Width(p.width).Bold(true).Render("Diff")) + // case "fetch": + // headerParts = append(headerParts, baseStyle.Foreground(t.TextMuted()).Width(p.width).Bold(true).Render("URL")) + // } + // + // return lipgloss.NewStyle().Background(t.Background()).Render(lipgloss.JoinVertical(lipgloss.Left, headerParts...)) +} + +func (p *permissionDialogComponent) renderBashContent() string { + // t := theme.CurrentTheme() + // baseStyle := styles.BaseStyle() + // + // if pr, ok := p.permission.Params.(tools.BashPermissionsParams); ok { + // content := fmt.Sprintf("```bash\n%s\n```", pr.Command) + // + // // Use the cache for markdown rendering + // renderedContent := p.GetOrSetMarkdown(p.permission.ID, func() (string, error) { + // r := styles.GetMarkdownRenderer(p.width - 10) + // s, err := r.Render(content) + // return s + // }) + // + // finalContent := baseStyle. + // Width(p.contentViewPort.Width). + // Render(renderedContent) + // p.contentViewPort.SetContent(finalContent) + // return p.styleViewport() + // } + return "" +} + +func (p *permissionDialogComponent) renderEditContent() string { + // if pr, ok := p.permission.Params.(tools.EditPermissionsParams); ok { + // diff := p.GetOrSetDiff(p.permission.ID, func() (string, error) { + // return diff.FormatDiff(pr.Diff, diff.WithTotalWidth(p.contentViewPort.Width)) + // }) + // + // p.contentViewPort.SetContent(diff) + // return p.styleViewport() + // } + return "" +} + +func (p *permissionDialogComponent) renderPatchContent() string { + // if pr, ok := p.permission.Params.(tools.EditPermissionsParams); ok { + // diff := p.GetOrSetDiff(p.permission.ID, func() (string, error) { + // return diff.FormatDiff(pr.Diff, diff.WithTotalWidth(p.contentViewPort.Width)) + // }) + // + // p.contentViewPort.SetContent(diff) + // return p.styleViewport() + // } + return "" +} + +func (p *permissionDialogComponent) renderWriteContent() string { + // if pr, ok := p.permission.Params.(tools.WritePermissionsParams); ok { + // // Use the cache for diff rendering + // diff := p.GetOrSetDiff(p.permission.ID, func() (string, error) { + // return diff.FormatDiff(pr.Diff, diff.WithTotalWidth(p.contentViewPort.Width)) + // }) + // + // p.contentViewPort.SetContent(diff) + // return p.styleViewport() + // } + return "" +} + +func (p *permissionDialogComponent) renderFetchContent() string { + // t := theme.CurrentTheme() + // baseStyle := styles.BaseStyle() + // + // if pr, ok := p.permission.Params.(tools.FetchPermissionsParams); ok { + // content := fmt.Sprintf("```bash\n%s\n```", pr.URL) + // + // // Use the cache for markdown rendering + // renderedContent := p.GetOrSetMarkdown(p.permission.ID, func() (string, error) { + // r := styles.GetMarkdownRenderer(p.width - 10) + // s, err := r.Render(content) + // return s + // }) + // + // finalContent := baseStyle. + // Width(p.contentViewPort.Width). + // Render(renderedContent) + // p.contentViewPort.SetContent(finalContent) + // return p.styleViewport() + // } + return "" +} + +func (p *permissionDialogComponent) renderDefaultContent() string { + // t := theme.CurrentTheme() + // baseStyle := styles.BaseStyle() + // + // content := p.permission.Description + // + // // Use the cache for markdown rendering + // renderedContent := p.GetOrSetMarkdown(p.permission.ID, func() (string, error) { + // r := styles.GetMarkdownRenderer(p.width - 10) + // s, err := r.Render(content) + // return s + // }) + // + // finalContent := baseStyle. + // Width(p.contentViewPort.Width). + // Render(renderedContent) + // p.contentViewPort.SetContent(finalContent) + // + // if renderedContent == "" { + // return "" + // } + // + return p.styleViewport() +} + +func (p *permissionDialogComponent) styleViewport() string { + t := theme.CurrentTheme() + contentStyle := styles.NewStyle().Background(t.Background()) + + return contentStyle.Render(p.contentViewPort.View()) +} + +func (p *permissionDialogComponent) render() string { + return "NOT IMPLEMENTED" + // t := theme.CurrentTheme() + // baseStyle := styles.BaseStyle() + // + // title := baseStyle. + // Bold(true). + // Width(p.width - 4). + // Foreground(t.Primary()). + // Render("Permission Required") + // // Render header + // headerContent := p.renderHeader() + // // Render buttons + // buttons := p.renderButtons() + // + // // Calculate content height dynamically based on window size + // p.contentViewPort.Height = p.height - lipgloss.Height(headerContent) - lipgloss.Height(buttons) - 2 - lipgloss.Height(title) + // p.contentViewPort.Width = p.width - 4 + // + // // Render content based on tool type + // var contentFinal string + // switch p.permission.ToolName { + // case "bash": + // contentFinal = p.renderBashContent() + // case "edit": + // contentFinal = p.renderEditContent() + // case "patch": + // contentFinal = p.renderPatchContent() + // case "write": + // contentFinal = p.renderWriteContent() + // case "fetch": + // contentFinal = p.renderFetchContent() + // default: + // contentFinal = p.renderDefaultContent() + // } + // + // content := lipgloss.JoinVertical( + // lipgloss.Top, + // title, + // baseStyle.Render(strings.Repeat(" ", lipgloss.Width(title))), + // headerContent, + // contentFinal, + // buttons, + // baseStyle.Render(strings.Repeat(" ", p.width-4)), + // ) + // + // return baseStyle. + // Padding(1, 0, 0, 1). + // Border(lipgloss.RoundedBorder()). + // BorderBackground(t.Background()). + // BorderForeground(t.TextMuted()). + // Width(p.width). + // Height(p.height). + // Render( + // content, + // ) +} + +func (p *permissionDialogComponent) View() string { + return p.render() +} + +func (p *permissionDialogComponent) SetSize() tea.Cmd { + // if p.permission.ID == "" { + // return nil + // } + // switch p.permission.ToolName { + // case "bash": + // p.width = int(float64(p.windowSize.Width) * 0.4) + // p.height = int(float64(p.windowSize.Height) * 0.3) + // case "edit": + // p.width = int(float64(p.windowSize.Width) * 0.8) + // p.height = int(float64(p.windowSize.Height) * 0.8) + // case "write": + // p.width = int(float64(p.windowSize.Width) * 0.8) + // p.height = int(float64(p.windowSize.Height) * 0.8) + // case "fetch": + // p.width = int(float64(p.windowSize.Width) * 0.4) + // p.height = int(float64(p.windowSize.Height) * 0.3) + // default: + // p.width = int(float64(p.windowSize.Width) * 0.7) + // p.height = int(float64(p.windowSize.Height) * 0.5) + // } + return nil +} + +// func (p *permissionDialogCmp) SetPermissions(permission permission.PermissionRequest) tea.Cmd { +// p.permission = permission +// return p.SetSize() +// } + +// Helper to get or set cached diff content +func (c *permissionDialogComponent) GetOrSetDiff(key string, generator func() (string, error)) string { + if cached, ok := c.diffCache[key]; ok { + return cached + } + + content, err := generator() + if err != nil { + return fmt.Sprintf("Error formatting diff: %v", err) + } + + c.diffCache[key] = content + + return content +} + +// Helper to get or set cached markdown content +func (c *permissionDialogComponent) GetOrSetMarkdown(key string, generator func() (string, error)) string { + if cached, ok := c.markdownCache[key]; ok { + return cached + } + + content, err := generator() + if err != nil { + return fmt.Sprintf("Error rendering markdown: %v", err) + } + + c.markdownCache[key] = content + + return content +} + +func NewPermissionDialogCmp() PermissionDialogComponent { + // Create viewport for content + contentViewport := viewport.New() // (0, 0) + + return &permissionDialogComponent{ + contentViewPort: contentViewport, + selectedOption: 0, // Default to "Allow" + diffCache: make(map[string]string), + markdownCache: make(map[string]string), + } +} diff --git a/packages/tui/internal/components/dialog/search.go b/packages/tui/internal/components/dialog/search.go deleted file mode 100644 index cdb2b824..00000000 --- a/packages/tui/internal/components/dialog/search.go +++ /dev/null @@ -1,247 +0,0 @@ -package dialog - -import ( - "github.com/charmbracelet/bubbles/v2/key" - "github.com/charmbracelet/bubbles/v2/textinput" - tea "github.com/charmbracelet/bubbletea/v2" - "github.com/charmbracelet/lipgloss/v2" - "github.com/sst/opencode/internal/components/list" - "github.com/sst/opencode/internal/styles" - "github.com/sst/opencode/internal/theme" -) - -// SearchQueryChangedMsg is emitted when the search query changes -type SearchQueryChangedMsg struct { - Query string -} - -// SearchSelectionMsg is emitted when an item is selected -type SearchSelectionMsg struct { - Item any - Index int -} - -// SearchCancelledMsg is emitted when the search is cancelled -type SearchCancelledMsg struct{} - -// SearchRemoveItemMsg is emitted when Ctrl+X is pressed to remove an item -type SearchRemoveItemMsg struct { - Item any - Index int -} - -// SearchDialog is a reusable component that combines a text input with a list -type SearchDialog struct { - textInput textinput.Model - list list.List[list.Item] - width int - height int - focused bool -} - -type searchKeyMap struct { - Up key.Binding - Down key.Binding - Enter key.Binding - Escape key.Binding - Remove key.Binding -} - -var searchKeys = searchKeyMap{ - Up: key.NewBinding( - key.WithKeys("up", "ctrl+p"), - key.WithHelp("↑", "previous item"), - ), - Down: key.NewBinding( - key.WithKeys("down", "ctrl+n"), - key.WithHelp("↓", "next item"), - ), - Enter: key.NewBinding( - key.WithKeys("enter"), - key.WithHelp("enter", "select"), - ), - Escape: key.NewBinding( - key.WithKeys("esc"), - key.WithHelp("esc", "cancel"), - ), - Remove: key.NewBinding( - key.WithKeys("ctrl+x"), - key.WithHelp("ctrl+x", "remove from recent"), - ), -} - -// NewSearchDialog creates a new SearchDialog -func NewSearchDialog(placeholder string, maxVisibleHeight int) *SearchDialog { - t := theme.CurrentTheme() - bgColor := t.BackgroundElement() - textColor := t.Text() - textMutedColor := t.TextMuted() - - ti := textinput.New() - ti.Placeholder = placeholder - ti.Styles.Blurred.Placeholder = styles.NewStyle(). - Foreground(textMutedColor). - Background(bgColor). - Lipgloss() - ti.Styles.Blurred.Text = styles.NewStyle(). - Foreground(textColor). - Background(bgColor). - Lipgloss() - ti.Styles.Focused.Placeholder = styles.NewStyle(). - Foreground(textMutedColor). - Background(bgColor). - Lipgloss() - ti.Styles.Focused.Text = styles.NewStyle(). - Foreground(textColor). - Background(bgColor). - Lipgloss() - ti.Styles.Focused.Prompt = styles.NewStyle(). - Background(bgColor). - Lipgloss() - ti.Styles.Cursor.Color = t.Primary() - ti.VirtualCursor = true - - ti.Prompt = " " - ti.CharLimit = -1 - ti.Focus() - - emptyList := list.NewListComponent( - list.WithItems([]list.Item{}), - list.WithMaxVisibleHeight[list.Item](maxVisibleHeight), - list.WithFallbackMessage[list.Item](" No items"), - list.WithAlphaNumericKeys[list.Item](false), - list.WithRenderFunc( - func(item list.Item, selected bool, width int, baseStyle styles.Style) string { - return item.Render(selected, width, baseStyle) - }, - ), - list.WithSelectableFunc(func(item list.Item) bool { - return item.Selectable() - }), - ) - - return &SearchDialog{ - textInput: ti, - list: emptyList, - focused: true, - } -} - -func (s *SearchDialog) Init() tea.Cmd { - return textinput.Blink -} - -func (s *SearchDialog) Update(msg tea.Msg) (tea.Model, tea.Cmd) { - var cmds []tea.Cmd - - switch msg := msg.(type) { - case tea.KeyMsg: - switch msg.String() { - case "ctrl+c": - value := s.textInput.Value() - if value == "" { - return s, nil - } - s.textInput.Reset() - cmds = append(cmds, func() tea.Msg { - return SearchQueryChangedMsg{Query: ""} - }) - } - - switch { - case key.Matches(msg, searchKeys.Escape): - return s, func() tea.Msg { return SearchCancelledMsg{} } - - case key.Matches(msg, searchKeys.Enter): - if selectedItem, idx := s.list.GetSelectedItem(); idx != -1 { - return s, func() tea.Msg { - return SearchSelectionMsg{Item: selectedItem, Index: idx} - } - } - - case key.Matches(msg, searchKeys.Remove): - if selectedItem, idx := s.list.GetSelectedItem(); idx != -1 { - return s, func() tea.Msg { - return SearchRemoveItemMsg{Item: selectedItem, Index: idx} - } - } - - case key.Matches(msg, searchKeys.Up): - var cmd tea.Cmd - listModel, cmd := s.list.Update(msg) - s.list = listModel.(list.List[list.Item]) - if cmd != nil { - cmds = append(cmds, cmd) - } - - case key.Matches(msg, searchKeys.Down): - var cmd tea.Cmd - listModel, cmd := s.list.Update(msg) - s.list = listModel.(list.List[list.Item]) - if cmd != nil { - cmds = append(cmds, cmd) - } - - default: - oldValue := s.textInput.Value() - var cmd tea.Cmd - s.textInput, cmd = s.textInput.Update(msg) - if cmd != nil { - cmds = append(cmds, cmd) - } - if newValue := s.textInput.Value(); newValue != oldValue { - cmds = append(cmds, func() tea.Msg { - return SearchQueryChangedMsg{Query: newValue} - }) - } - } - } - - return s, tea.Batch(cmds...) -} - -func (s *SearchDialog) View() string { - s.list.SetMaxWidth(s.width) - listView := s.list.View() - listView = lipgloss.PlaceVertical(s.list.GetMaxVisibleHeight(), lipgloss.Top, listView) - textinput := s.textInput.View() - return textinput + "\n\n" + listView -} - -// SetWidth sets the width of the search dialog -func (s *SearchDialog) SetWidth(width int) { - s.width = width - s.textInput.SetWidth(width - 2) // Account for padding and borders -} - -// SetHeight sets the height of the search dialog -func (s *SearchDialog) SetHeight(height int) { - s.height = height -} - -// SetItems updates the list items -func (s *SearchDialog) SetItems(items []list.Item) { - s.list.SetItems(items) -} - -// GetQuery returns the current search query -func (s *SearchDialog) GetQuery() string { - return s.textInput.Value() -} - -// SetQuery sets the search query -func (s *SearchDialog) SetQuery(query string) { - s.textInput.SetValue(query) -} - -// Focus focuses the search dialog -func (s *SearchDialog) Focus() { - s.focused = true - s.textInput.Focus() -} - -// Blur removes focus from the search dialog -func (s *SearchDialog) Blur() { - s.focused = false - s.textInput.Blur() -} diff --git a/packages/tui/internal/components/dialog/session.go b/packages/tui/internal/components/dialog/session.go index 307897bc..6f18dba0 100644 --- a/packages/tui/internal/components/dialog/session.go +++ b/packages/tui/internal/components/dialog/session.go @@ -28,26 +28,17 @@ type SessionDialog interface { type sessionItem struct { title string isDeleteConfirming bool - isCurrentSession bool } -func (s sessionItem) Render( - selected bool, - width int, - isFirstInViewport bool, - baseStyle styles.Style, -) string { +func (s sessionItem) Render(selected bool, width int) string { t := theme.CurrentTheme() + baseStyle := styles.NewStyle() var text string if s.isDeleteConfirming { text = "Press again to confirm delete" } else { - if s.isCurrentSession { - text = "● " + s.title - } else { - text = s.title - } + text = s.title } truncatedStr := truncate.StringWithTail(text, uint(width-1), "...") @@ -61,14 +52,6 @@ func (s sessionItem) Render( Foreground(t.BackgroundElement()). Width(width). PaddingLeft(1) - } else if s.isCurrentSession { - // Different style for current session when selected - itemStyle = baseStyle. - Background(t.Primary()). - Foreground(t.BackgroundElement()). - Width(width). - PaddingLeft(1). - Bold(true) } else { // Normal selection itemStyle = baseStyle. @@ -83,12 +66,6 @@ func (s sessionItem) Render( itemStyle = baseStyle. Foreground(t.Error()). PaddingLeft(1) - } else if s.isCurrentSession { - // Highlight current session when not selected - itemStyle = baseStyle. - Foreground(t.Primary()). - PaddingLeft(1). - Bold(true) } else { itemStyle = baseStyle. PaddingLeft(1) @@ -98,10 +75,6 @@ func (s sessionItem) Render( return itemStyle.Render(truncatedStr) } -func (s sessionItem) Selectable() bool { - return true -} - type sessionDialog struct { width int height int @@ -137,13 +110,6 @@ func (s *sessionDialog) Update(msg tea.Msg) (tea.Model, tea.Cmd) { util.CmdHandler(app.SessionSelectedMsg(&selectedSession)), ) } - case "n": - s.app.Session = &opencode.Session{} - s.app.Messages = []app.Message{} - return s, tea.Sequence( - util.CmdHandler(modal.CloseModalMsg{}), - util.CmdHandler(app.SessionClearedMsg{}), - ) case "x", "delete", "backspace": if _, idx := s.list.GetSelectedItem(); idx >= 0 && idx < len(s.sessions) { if s.deleteConfirmation == idx { @@ -184,21 +150,10 @@ func (s *sessionDialog) Render(background string) string { listView := s.list.View() t := theme.CurrentTheme() - keyStyle := styles.NewStyle().Foreground(t.Text()).Background(t.BackgroundPanel()).Render - mutedStyle := styles.NewStyle().Foreground(t.TextMuted()).Background(t.BackgroundPanel()).Render - - leftHelp := keyStyle("n") + mutedStyle(" new session") - rightHelp := keyStyle("x/del") + mutedStyle(" delete session") - - bgColor := t.BackgroundPanel() - helpText := layout.Render(layout.FlexOptions{ - Direction: layout.Row, - Justify: layout.JustifySpaceBetween, - Width: layout.Current.Container.Width - 14, - Background: &bgColor, - }, layout.FlexItem{View: leftHelp}, layout.FlexItem{View: rightHelp}) - - helpText = styles.NewStyle().PaddingLeft(1).PaddingTop(1).Render(helpText) + helpStyle := styles.NewStyle().PaddingLeft(1).PaddingTop(1) + helpText := styles.NewStyle().Foreground(t.Text()).Render("x/del") + helpText = helpText + styles.NewStyle().Background(t.BackgroundElement()).Foreground(t.TextMuted()).Render(" delete session") + helpText = helpStyle.Render(helpText) content := strings.Join([]string{listView, helpText}, "\n") @@ -213,7 +168,6 @@ func (s *sessionDialog) updateListItems() { item := sessionItem{ title: sess.Title, isDeleteConfirming: s.deleteConfirmation == i, - isCurrentSession: s.app.Session != nil && s.app.Session.ID == sess.ID, } items = append(items, item) } @@ -249,23 +203,15 @@ func NewSessionDialog(app *app.App) SessionDialog { items = append(items, sessionItem{ title: sess.Title, isDeleteConfirming: false, - isCurrentSession: app.Session != nil && app.Session.ID == sess.ID, }) } + // Create a generic list component listComponent := list.NewListComponent( - list.WithItems(items), - list.WithMaxVisibleHeight[sessionItem](10), - list.WithFallbackMessage[sessionItem]("No sessions available"), - list.WithAlphaNumericKeys[sessionItem](true), - list.WithRenderFunc( - func(item sessionItem, selected bool, width int, baseStyle styles.Style) string { - return item.Render(selected, width, false, baseStyle) - }, - ), - list.WithSelectableFunc(func(item sessionItem) bool { - return true - }), + items, + 10, // maxVisibleSessions + "No sessions available", + true, // useAlphaNumericKeys ) listComponent.SetMaxWidth(layout.Current.Container.Width - 12) diff --git a/packages/tui/internal/components/dialog/theme.go b/packages/tui/internal/components/dialog/theme.go index c71cddc8..b6e97061 100644 --- a/packages/tui/internal/components/dialog/theme.go +++ b/packages/tui/internal/components/dialog/theme.go @@ -5,7 +5,6 @@ import ( list "github.com/sst/opencode/internal/components/list" "github.com/sst/opencode/internal/components/modal" "github.com/sst/opencode/internal/layout" - "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" "github.com/sst/opencode/internal/util" ) @@ -25,7 +24,7 @@ type themeDialog struct { height int modal *modal.Modal - list list.List[list.Item] + list list.List[list.StringItem] originalTheme string themeApplied bool } @@ -43,18 +42,16 @@ func (t *themeDialog) Update(msg tea.Msg) (tea.Model, tea.Cmd) { switch msg.String() { case "enter": if item, idx := t.list.GetSelectedItem(); idx >= 0 { - if stringItem, ok := item.(list.StringItem); ok { - selectedTheme := string(stringItem) - if err := theme.SetTheme(selectedTheme); err != nil { - // status.Error(err.Error()) - return t, nil - } - t.themeApplied = true - return t, tea.Sequence( - util.CmdHandler(modal.CloseModalMsg{}), - util.CmdHandler(ThemeSelectedMsg{ThemeName: selectedTheme}), - ) + selectedTheme := string(item) + if err := theme.SetTheme(selectedTheme); err != nil { + // status.Error(err.Error()) + return t, nil } + t.themeApplied = true + return t, tea.Sequence( + util.CmdHandler(modal.CloseModalMsg{}), + util.CmdHandler(ThemeSelectedMsg{ThemeName: selectedTheme}), + ) } } @@ -64,13 +61,11 @@ func (t *themeDialog) Update(msg tea.Msg) (tea.Model, tea.Cmd) { var cmd tea.Cmd listModel, cmd := t.list.Update(msg) - t.list = listModel.(list.List[list.Item]) + t.list = listModel.(list.List[list.StringItem]) if item, newIdx := t.list.GetSelectedItem(); newIdx >= 0 && newIdx != prevIdx { - if stringItem, ok := item.(list.StringItem); ok { - theme.SetTheme(string(stringItem)) - return t, util.CmdHandler(ThemeSelectedMsg{ThemeName: string(stringItem)}) - } + theme.SetTheme(string(item)) + return t, util.CmdHandler(ThemeSelectedMsg{ThemeName: string(item)}) } return t, cmd } @@ -99,32 +94,21 @@ func NewThemeDialog() ThemeDialog { } } - // Convert themes to list items - items := make([]list.Item, len(themes)) - for i, theme := range themes { - items[i] = list.StringItem(theme) - } - - listComponent := list.NewListComponent( - list.WithItems(items), - list.WithMaxVisibleHeight[list.Item](10), - list.WithFallbackMessage[list.Item]("No themes available"), - list.WithAlphaNumericKeys[list.Item](true), - list.WithRenderFunc(func(item list.Item, selected bool, width int, baseStyle styles.Style) string { - return item.Render(selected, width, baseStyle) - }), - list.WithSelectableFunc(func(item list.Item) bool { - return item.Selectable() - }), + list := list.NewStringList( + themes, + 10, // maxVisibleThemes + "No themes available", + true, ) // Set the initial selection to the current theme - listComponent.SetSelectedIndex(selectedIdx) + list.SetSelectedIndex(selectedIdx) // Set the max width for the list to match the modal width - listComponent.SetMaxWidth(36) // 40 (modal max width) - 4 (modal padding) + list.SetMaxWidth(36) // 40 (modal max width) - 4 (modal padding) + return &themeDialog{ - list: listComponent, + list: list, modal: modal.New(modal.WithTitle("Select Theme"), modal.WithMaxWidth(40)), originalTheme: currentTheme, themeApplied: false, diff --git a/packages/tui/internal/components/diff/diff.go b/packages/tui/internal/components/diff/diff.go index da2e007c..03f58cc2 100644 --- a/packages/tui/internal/components/diff/diff.go +++ b/packages/tui/internal/components/diff/diff.go @@ -38,10 +38,6 @@ const ( LineRemoved // Line removed from the old file ) -var ( - ansiRegex = regexp.MustCompile(`\x1b(?:[@-Z\\-_]|\[[0-9?]*(?:;[0-9?]*)*[@-~])`) -) - // Segment represents a portion of a line for intra-line highlighting type Segment struct { Start int @@ -552,6 +548,7 @@ func createStyles(t theme.Theme) (removedLineStyle, addedLineStyle, contextLineS // applyHighlighting applies intra-line highlighting to a piece of text func applyHighlighting(content string, segments []Segment, segmentType LineType, highlightBg compat.AdaptiveColor) string { // Find all ANSI sequences in the content + ansiRegex := regexp.MustCompile(`\x1b(?:[@-Z\\-_]|\[[0-9?]*(?:;[0-9?]*)*[@-~])`) ansiMatches := ansiRegex.FindAllStringIndex(content, -1) // Build a mapping of visible character positions to their actual indices diff --git a/packages/tui/internal/components/list/list.go b/packages/tui/internal/components/list/list.go index fd2d7d93..a7ea3458 100644 --- a/packages/tui/internal/components/list/list.go +++ b/packages/tui/internal/components/list/list.go @@ -5,88 +5,16 @@ import ( "github.com/charmbracelet/bubbles/v2/key" tea "github.com/charmbracelet/bubbletea/v2" - "github.com/charmbracelet/lipgloss/v2" "github.com/muesli/reflow/truncate" "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" ) -// Item interface that all list items must implement -type Item interface { - Render(selected bool, width int, baseStyle styles.Style) string - Selectable() bool +type ListItem interface { + Render(selected bool, width int) string } -// RenderFunc defines how to render an item in the list -type RenderFunc[T any] func(item T, selected bool, width int, baseStyle styles.Style) string - -// SelectableFunc defines whether an item is selectable -type SelectableFunc[T any] func(item T) bool - -// Options holds configuration for the list component -type Options[T any] struct { - items []T - maxVisibleHeight int - fallbackMsg string - useAlphaNumericKeys bool - renderItem RenderFunc[T] - isSelectable SelectableFunc[T] - baseStyle styles.Style -} - -// Option is a function that configures the list component -type Option[T any] func(*Options[T]) - -// WithItems sets the initial items for the list -func WithItems[T any](items []T) Option[T] { - return func(o *Options[T]) { - o.items = items - } -} - -// WithMaxVisibleHeight sets the maximum visible height in lines -func WithMaxVisibleHeight[T any](height int) Option[T] { - return func(o *Options[T]) { - o.maxVisibleHeight = height - } -} - -// WithFallbackMessage sets the message to show when the list is empty -func WithFallbackMessage[T any](msg string) Option[T] { - return func(o *Options[T]) { - o.fallbackMsg = msg - } -} - -// WithAlphaNumericKeys enables j/k navigation keys -func WithAlphaNumericKeys[T any](enabled bool) Option[T] { - return func(o *Options[T]) { - o.useAlphaNumericKeys = enabled - } -} - -// WithRenderFunc sets the function to render items -func WithRenderFunc[T any](fn RenderFunc[T]) Option[T] { - return func(o *Options[T]) { - o.renderItem = fn - } -} - -// WithSelectableFunc sets the function to determine if items are selectable -func WithSelectableFunc[T any](fn SelectableFunc[T]) Option[T] { - return func(o *Options[T]) { - o.isSelectable = fn - } -} - -// WithStyle sets the base style that gets passed to render functions -func WithStyle[T any](style styles.Style) Option[T] { - return func(o *Options[T]) { - o.baseStyle = style - } -} - -type List[T any] interface { +type List[T ListItem] interface { tea.Model tea.ViewModel SetMaxWidth(maxWidth int) @@ -96,21 +24,17 @@ type List[T any] interface { SetSelectedIndex(idx int) SetEmptyMessage(msg string) IsEmpty() bool - GetMaxVisibleHeight() int } -type listComponent[T any] struct { +type listComponent[T ListItem] struct { fallbackMsg string items []T selectedIdx int maxWidth int - maxVisibleHeight int + maxVisibleItems int useAlphaNumericKeys bool width int height int - renderItem RenderFunc[T] - isSelectable SelectableFunc[T] - baseStyle styles.Style } type listKeyMap struct { @@ -122,11 +46,11 @@ type listKeyMap struct { var simpleListKeys = listKeyMap{ Up: key.NewBinding( - key.WithKeys("up", "ctrl+p"), + key.WithKeys("up"), key.WithHelp("↑", "previous list item"), ), Down: key.NewBinding( - key.WithKeys("down", "ctrl+n"), + key.WithKeys("down"), key.WithHelp("↓", "next list item"), ), UpAlpha: key.NewBinding( @@ -148,10 +72,14 @@ func (c *listComponent[T]) Update(msg tea.Msg) (tea.Model, tea.Cmd) { case tea.KeyMsg: switch { case key.Matches(msg, simpleListKeys.Up) || (c.useAlphaNumericKeys && key.Matches(msg, simpleListKeys.UpAlpha)): - c.moveUp() + if c.selectedIdx > 0 { + c.selectedIdx-- + } return c, nil case key.Matches(msg, simpleListKeys.Down) || (c.useAlphaNumericKeys && key.Matches(msg, simpleListKeys.DownAlpha)): - c.moveDown() + if c.selectedIdx < len(c.items)-1 { + c.selectedIdx++ + } return c, nil } } @@ -159,50 +87,8 @@ func (c *listComponent[T]) Update(msg tea.Msg) (tea.Model, tea.Cmd) { return c, nil } -// moveUp moves the selection up, skipping non-selectable items -func (c *listComponent[T]) moveUp() { - if len(c.items) == 0 { - return - } - - // Find the previous selectable item - for i := c.selectedIdx - 1; i >= 0; i-- { - if c.isSelectable(c.items[i]) { - c.selectedIdx = i - return - } - } - - // If no selectable item found above, stay at current position -} - -// moveDown moves the selection down, skipping non-selectable items -func (c *listComponent[T]) moveDown() { - if len(c.items) == 0 { - return - } - - originalIdx := c.selectedIdx - for { - if c.selectedIdx < len(c.items)-1 { - c.selectedIdx++ - } else { - break - } - - if c.isSelectable(c.items[c.selectedIdx]) { - return - } - - // Prevent infinite loop - if c.selectedIdx == originalIdx { - break - } - } -} - func (c *listComponent[T]) GetSelectedItem() (T, int) { - if len(c.items) > 0 && c.isSelectable(c.items[c.selectedIdx]) { + if len(c.items) > 0 { return c.items[c.selectedIdx], c.selectedIdx } @@ -211,13 +97,8 @@ func (c *listComponent[T]) GetSelectedItem() (T, int) { } func (c *listComponent[T]) SetItems(items []T) { - c.items = items c.selectedIdx = 0 - - // Ensure initial selection is on a selectable item - if len(items) > 0 && !c.isSelectable(items[0]) { - c.moveDown() - } + c.items = items } func (c *listComponent[T]) GetItems() []T { @@ -242,145 +123,57 @@ func (c *listComponent[T]) SetSelectedIndex(idx int) { } } -func (c *listComponent[T]) GetMaxVisibleHeight() int { - return c.maxVisibleHeight -} - func (c *listComponent[T]) View() string { items := c.items maxWidth := c.maxWidth if maxWidth == 0 { maxWidth = 80 // Default width if not set } + maxVisibleItems := min(c.maxVisibleItems, len(items)) + startIdx := 0 if len(items) <= 0 { return c.fallbackMsg } - // Calculate viewport based on actual heights - startIdx, endIdx := c.calculateViewport() + if len(items) > maxVisibleItems { + halfVisible := maxVisibleItems / 2 + if c.selectedIdx >= halfVisible && c.selectedIdx < len(items)-halfVisible { + startIdx = c.selectedIdx - halfVisible + } else if c.selectedIdx >= len(items)-halfVisible { + startIdx = len(items) - maxVisibleItems + } + } - listItems := make([]string, 0, endIdx-startIdx) + endIdx := min(startIdx+maxVisibleItems, len(items)) + + listItems := make([]string, 0, maxVisibleItems) for i := startIdx; i < endIdx; i++ { item := items[i] - - // Special handling for HeaderItem to remove top margin on first item - if i == startIdx { - // Check if this is a HeaderItem - if _, ok := any(item).(Item); ok { - if headerItem, isHeader := any(item).(HeaderItem); isHeader { - // Render header without top margin when it's first - t := theme.CurrentTheme() - truncatedStr := truncate.StringWithTail(string(headerItem), uint(maxWidth-1), "...") - headerStyle := c.baseStyle. - Foreground(t.Accent()). - Bold(true). - MarginBottom(0). - PaddingLeft(1) - listItems = append(listItems, headerStyle.Render(truncatedStr)) - continue - } - } - } - - title := c.renderItem(item, i == c.selectedIdx, maxWidth, c.baseStyle) + title := item.Render(i == c.selectedIdx, maxWidth) listItems = append(listItems, title) } return strings.Join(listItems, "\n") } -// calculateViewport determines which items to show based on available space -func (c *listComponent[T]) calculateViewport() (startIdx, endIdx int) { - items := c.items - if len(items) == 0 { - return 0, 0 - } - - // Calculate heights of all items - itemHeights := make([]int, len(items)) - for i, item := range items { - rendered := c.renderItem(item, false, c.maxWidth, c.baseStyle) - itemHeights[i] = lipgloss.Height(rendered) - } - - // Find the range of items that fit within maxVisibleHeight - // Start by trying to center the selected item - start := 0 - end := len(items) - - // Calculate height from start to selected - heightToSelected := 0 - for i := 0; i <= c.selectedIdx && i < len(items); i++ { - heightToSelected += itemHeights[i] - } - - // If selected item is beyond visible height, scroll to show it - if heightToSelected > c.maxVisibleHeight { - // Start from selected and work backwards to find start - currentHeight := itemHeights[c.selectedIdx] - start = c.selectedIdx - - for i := c.selectedIdx - 1; i >= 0 && currentHeight+itemHeights[i] <= c.maxVisibleHeight; i-- { - currentHeight += itemHeights[i] - start = i - } - } - - // Calculate end based on start - currentHeight := 0 - for i := start; i < len(items); i++ { - if currentHeight+itemHeights[i] > c.maxVisibleHeight { - end = i - break - } - currentHeight += itemHeights[i] - } - - return start, end -} - -func abs(x int) int { - if x < 0 { - return -x - } - return x -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -func NewListComponent[T any](opts ...Option[T]) List[T] { - options := &Options[T]{ - baseStyle: styles.NewStyle(), // Default empty style - } - - for _, opt := range opts { - opt(options) - } - +func NewListComponent[T ListItem](items []T, maxVisibleItems int, fallbackMsg string, useAlphaNumericKeys bool) List[T] { return &listComponent[T]{ - fallbackMsg: options.fallbackMsg, - items: options.items, - maxVisibleHeight: options.maxVisibleHeight, - useAlphaNumericKeys: options.useAlphaNumericKeys, + fallbackMsg: fallbackMsg, + items: items, + maxVisibleItems: maxVisibleItems, + useAlphaNumericKeys: useAlphaNumericKeys, selectedIdx: 0, - renderItem: options.renderItem, - isSelectable: options.isSelectable, - baseStyle: options.baseStyle, } } -// StringItem is a simple implementation of Item for string values +// StringItem is a simple implementation of ListItem for string values type StringItem string -func (s StringItem) Render(selected bool, width int, baseStyle styles.Style) string { +func (s StringItem) Render(selected bool, width int) string { t := theme.CurrentTheme() + baseStyle := styles.NewStyle() truncatedStr := truncate.StringWithTail(string(s), uint(width-1), "...") @@ -400,32 +193,11 @@ func (s StringItem) Render(selected bool, width int, baseStyle styles.Style) str return itemStyle.Render(truncatedStr) } -func (s StringItem) Selectable() bool { - return true +// NewStringList creates a new list component with string items +func NewStringList(items []string, maxVisibleItems int, fallbackMsg string, useAlphaNumericKeys bool) List[StringItem] { + stringItems := make([]StringItem, len(items)) + for i, item := range items { + stringItems[i] = StringItem(item) + } + return NewListComponent(stringItems, maxVisibleItems, fallbackMsg, useAlphaNumericKeys) } - -// HeaderItem is a non-selectable header item for grouping -type HeaderItem string - -func (h HeaderItem) Render(selected bool, width int, baseStyle styles.Style) string { - t := theme.CurrentTheme() - - truncatedStr := truncate.StringWithTail(string(h), uint(width-1), "...") - - headerStyle := baseStyle. - Foreground(t.Accent()). - Bold(true). - MarginTop(1). - MarginBottom(0). - PaddingLeft(1) - - return headerStyle.Render(truncatedStr) -} - -func (h HeaderItem) Selectable() bool { - return false -} - -// Ensure StringItem and HeaderItem implement Item -var _ Item = StringItem("") -var _ Item = HeaderItem("") diff --git a/packages/tui/internal/components/list/list_test.go b/packages/tui/internal/components/list/list_test.go deleted file mode 100644 index 4d954409..00000000 --- a/packages/tui/internal/components/list/list_test.go +++ /dev/null @@ -1,219 +0,0 @@ -package list - -import ( - "testing" - - tea "github.com/charmbracelet/bubbletea/v2" - "github.com/sst/opencode/internal/styles" -) - -// testItem is a simple test implementation of ListItem -type testItem struct { - value string -} - -func (t testItem) Render( - selected bool, - width int, - isFirstInViewport bool, - baseStyle styles.Style, -) string { - return t.value -} - -func (t testItem) Selectable() bool { - return true -} - -// createTestList creates a list with test items for testing -func createTestList() *listComponent[testItem] { - items := []testItem{ - {value: "item1"}, - {value: "item2"}, - {value: "item3"}, - } - list := NewListComponent( - WithItems(items), - WithMaxVisibleItems[testItem](5), - WithFallbackMessage[testItem]("empty"), - WithAlphaNumericKeys[testItem](false), - WithRenderFunc( - func(item testItem, selected bool, width int, baseStyle styles.Style) string { - return item.Render(selected, width, false, baseStyle) - }, - ), - WithSelectableFunc(func(item testItem) bool { - return item.Selectable() - }), - WithHeightFunc(func(item testItem, isFirstInViewport bool) int { - return 1 - }), - ) - - return list.(*listComponent[testItem]) -} - -func TestArrowKeyNavigation(t *testing.T) { - list := createTestList() - - // Test down arrow navigation - downKey := tea.KeyPressMsg{Code: tea.KeyDown} - updatedModel, _ := list.Update(downKey) - list = updatedModel.(*listComponent[testItem]) - _, idx := list.GetSelectedItem() - if idx != 1 { - t.Errorf("Expected selected index 1 after down arrow, got %d", idx) - } - - // Test up arrow navigation - upKey := tea.KeyPressMsg{Code: tea.KeyUp} - updatedModel, _ = list.Update(upKey) - list = updatedModel.(*listComponent[testItem]) - _, idx = list.GetSelectedItem() - if idx != 0 { - t.Errorf("Expected selected index 0 after up arrow, got %d", idx) - } -} - -func TestJKKeyNavigation(t *testing.T) { - items := []testItem{ - {value: "item1"}, - {value: "item2"}, - {value: "item3"}, - } - // Create list with alpha keys enabled - list := NewListComponent( - WithItems(items), - WithMaxVisibleItems[testItem](5), - WithFallbackMessage[testItem]("empty"), - WithAlphaNumericKeys[testItem](true), - WithRenderFunc( - func(item testItem, selected bool, width int, baseStyle styles.Style) string { - return item.Render(selected, width, false, baseStyle) - }, - ), - WithSelectableFunc(func(item testItem) bool { - return item.Selectable() - }), - WithHeightFunc(func(item testItem, isFirstInViewport bool) int { - return 1 - }), - ) - - // Test j key (down) - jKey := tea.KeyPressMsg{Code: 'j', Text: "j"} - updatedModel, _ := list.Update(jKey) - list = updatedModel.(*listComponent[testItem]) - _, idx := list.GetSelectedItem() - if idx != 1 { - t.Errorf("Expected selected index 1 after 'j' key, got %d", idx) - } - - // Test k key (up) - kKey := tea.KeyPressMsg{Code: 'k', Text: "k"} - updatedModel, _ = list.Update(kKey) - list = updatedModel.(*listComponent[testItem]) - _, idx = list.GetSelectedItem() - if idx != 0 { - t.Errorf("Expected selected index 0 after 'k' key, got %d", idx) - } -} - -func TestCtrlNavigation(t *testing.T) { - list := createTestList() - - // Test Ctrl-N (down) - ctrlN := tea.KeyPressMsg{Code: 'n', Mod: tea.ModCtrl} - updatedModel, _ := list.Update(ctrlN) - list = updatedModel.(*listComponent[testItem]) - _, idx := list.GetSelectedItem() - if idx != 1 { - t.Errorf("Expected selected index 1 after Ctrl-N, got %d", idx) - } - - // Test Ctrl-P (up) - ctrlP := tea.KeyPressMsg{Code: 'p', Mod: tea.ModCtrl} - updatedModel, _ = list.Update(ctrlP) - list = updatedModel.(*listComponent[testItem]) - _, idx = list.GetSelectedItem() - if idx != 0 { - t.Errorf("Expected selected index 0 after Ctrl-P, got %d", idx) - } -} - -func TestNavigationBoundaries(t *testing.T) { - list := createTestList() - - // Test up arrow at first item (should stay at 0) - upKey := tea.KeyPressMsg{Code: tea.KeyUp} - updatedModel, _ := list.Update(upKey) - list = updatedModel.(*listComponent[testItem]) - _, idx := list.GetSelectedItem() - if idx != 0 { - t.Errorf("Expected to stay at index 0 when pressing up at first item, got %d", idx) - } - - // Move to last item - downKey := tea.KeyPressMsg{Code: tea.KeyDown} - updatedModel, _ = list.Update(downKey) - list = updatedModel.(*listComponent[testItem]) - updatedModel, _ = list.Update(downKey) - list = updatedModel.(*listComponent[testItem]) - _, idx = list.GetSelectedItem() - if idx != 2 { - t.Errorf("Expected to be at index 2, got %d", idx) - } - - // Test down arrow at last item (should stay at 2) - updatedModel, _ = list.Update(downKey) - list = updatedModel.(*listComponent[testItem]) - _, idx = list.GetSelectedItem() - if idx != 2 { - t.Errorf("Expected to stay at index 2 when pressing down at last item, got %d", idx) - } -} - -func TestEmptyList(t *testing.T) { - emptyList := NewListComponent( - WithItems([]testItem{}), - WithMaxVisibleItems[testItem](5), - WithFallbackMessage[testItem]("empty"), - WithAlphaNumericKeys[testItem](false), - WithRenderFunc( - func(item testItem, selected bool, width int, baseStyle styles.Style) string { - return item.Render(selected, width, false, baseStyle) - }, - ), - WithSelectableFunc(func(item testItem) bool { - return item.Selectable() - }), - WithHeightFunc(func(item testItem, isFirstInViewport bool) int { - return 1 - }), - ) - - // Test navigation on empty list (should not crash) - downKey := tea.KeyPressMsg{Code: tea.KeyDown} - upKey := tea.KeyPressMsg{Code: tea.KeyUp} - ctrlN := tea.KeyPressMsg{Code: 'n', Mod: tea.ModCtrl} - ctrlP := tea.KeyPressMsg{Code: 'p', Mod: tea.ModCtrl} - - updatedModel, _ := emptyList.Update(downKey) - emptyList = updatedModel.(*listComponent[testItem]) - updatedModel, _ = emptyList.Update(upKey) - emptyList = updatedModel.(*listComponent[testItem]) - updatedModel, _ = emptyList.Update(ctrlN) - emptyList = updatedModel.(*listComponent[testItem]) - updatedModel, _ = emptyList.Update(ctrlP) - emptyList = updatedModel.(*listComponent[testItem]) - - // Verify empty list behavior - _, idx := emptyList.GetSelectedItem() - if idx != -1 { - t.Errorf("Expected index -1 for empty list, got %d", idx) - } - - if !emptyList.IsEmpty() { - t.Error("Expected IsEmpty() to return true for empty list") - } -} diff --git a/packages/tui/internal/components/modal/modal.go b/packages/tui/internal/components/modal/modal.go index 09989d8e..aa81a83e 100644 --- a/packages/tui/internal/components/modal/modal.go +++ b/packages/tui/internal/components/modal/modal.go @@ -90,12 +90,12 @@ func (m *Modal) Render(contentView string, background string) string { innerWidth := outerWidth - 4 - baseStyle := styles.NewStyle().Foreground(t.TextMuted()).Background(t.BackgroundPanel()) + baseStyle := styles.NewStyle().Foreground(t.TextMuted()).Background(t.BackgroundElement()) var finalContent string if m.title != "" { titleStyle := baseStyle. - Foreground(t.Text()). + Foreground(t.Primary()). Bold(true). Padding(0, 1) diff --git a/packages/tui/internal/components/qr/qr.go b/packages/tui/internal/components/qr/qr.go index 233bcf52..ccf28200 100644 --- a/packages/tui/internal/components/qr/qr.go +++ b/packages/tui/internal/components/qr/qr.go @@ -23,7 +23,7 @@ func Generate(text string) (string, int, error) { } // Create lipgloss style for QR code with theme colors - qrStyle := styles.NewStyle().Foreground(t.Text()).Background(t.Background()) + qrStyle := styles.NewStyleWithColors(t.Text(), t.Background()) var result strings.Builder diff --git a/packages/tui/internal/components/status/status.go b/packages/tui/internal/components/status/status.go index 8ab54277..791267e0 100644 --- a/packages/tui/internal/components/status/status.go +++ b/packages/tui/internal/components/status/status.go @@ -1,14 +1,12 @@ package status import ( - "os" + "fmt" "strings" tea "github.com/charmbracelet/bubbletea/v2" "github.com/charmbracelet/lipgloss/v2" - "github.com/charmbracelet/lipgloss/v2/compat" "github.com/sst/opencode/internal/app" - "github.com/sst/opencode/internal/commands" "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" ) @@ -21,7 +19,6 @@ type StatusComponent interface { type statusComponent struct { app *app.App width int - cwd string } func (m statusComponent) Init() tea.Cmd { @@ -55,6 +52,38 @@ func (m statusComponent) logo() string { Render(open + code + version) } +func formatTokensAndCost(tokens float64, contextWindow float64, cost float64) string { + // Format tokens in human-readable format (e.g., 110K, 1.2M) + var formattedTokens string + switch { + case tokens >= 1_000_000: + formattedTokens = fmt.Sprintf("%.1fM", float64(tokens)/1_000_000) + case tokens >= 1_000: + formattedTokens = fmt.Sprintf("%.1fK", float64(tokens)/1_000) + default: + formattedTokens = fmt.Sprintf("%d", int(tokens)) + } + + // Remove .0 suffix if present + if strings.HasSuffix(formattedTokens, ".0K") { + formattedTokens = strings.Replace(formattedTokens, ".0K", "K", 1) + } + if strings.HasSuffix(formattedTokens, ".0M") { + formattedTokens = strings.Replace(formattedTokens, ".0M", "M", 1) + } + + // Format cost with $ symbol and 2 decimal places + formattedCost := fmt.Sprintf("$%.2f", cost) + percentage := (float64(tokens) / float64(contextWindow)) * 100 + + return fmt.Sprintf( + "Context: %s (%d%%), Cost: %s", + formattedTokens, + int(percentage), + formattedCost, + ) +} + func (m statusComponent) View() string { t := theme.CurrentTheme() logo := m.logo() @@ -63,70 +92,46 @@ func (m statusComponent) View() string { Foreground(t.TextMuted()). Background(t.BackgroundPanel()). Padding(0, 1). - Render(m.cwd) + Render(m.app.Info.Path.Cwd) - var modeBackground compat.AdaptiveColor - var modeForeground compat.AdaptiveColor - switch m.app.ModeIndex { - case 0: - modeBackground = t.BackgroundElement() - modeForeground = t.TextMuted() - case 1: - modeBackground = t.Secondary() - modeForeground = t.BackgroundPanel() - case 2: - modeBackground = t.Accent() - modeForeground = t.BackgroundPanel() - case 3: - modeBackground = t.Success() - modeForeground = t.BackgroundPanel() - case 4: - modeBackground = t.Warning() - modeForeground = t.BackgroundPanel() - case 5: - modeBackground = t.Primary() - modeForeground = t.BackgroundPanel() - case 6: - modeBackground = t.Error() - modeForeground = t.BackgroundPanel() - default: - modeBackground = t.Secondary() - modeForeground = t.BackgroundPanel() + sessionInfo := "" + if m.app.Session.ID != "" { + tokens := float64(0) + cost := float64(0) + contextWindow := m.app.Model.Limit.Context + + for _, message := range m.app.Messages { + cost += message.Metadata.Assistant.Cost + usage := message.Metadata.Assistant.Tokens + if usage.Output > 0 { + if message.Metadata.Assistant.Summary { + tokens = usage.Output + continue + } + tokens = (usage.Input + + usage.Cache.Write + + usage.Cache.Read + + usage.Output + + usage.Reasoning) + } + } + + sessionInfo = styles.NewStyle(). + Foreground(t.TextMuted()). + Background(t.BackgroundElement()). + Padding(0, 1). + Render(formatTokensAndCost(tokens, contextWindow, cost)) } - command := m.app.Commands[commands.SwitchModeCommand] - kb := command.Keybindings[0] - key := kb.Key - if kb.RequiresLeader { - key = m.app.Config.Keybinds.Leader + " " + kb.Key - } - - modeStyle := styles.NewStyle().Background(modeBackground).Foreground(modeForeground) - modeNameStyle := modeStyle.Bold(true).Render - modeDescStyle := modeStyle.Render - mode := modeNameStyle(strings.ToUpper(m.app.Mode.Name)) + modeDescStyle(" MODE") - mode = modeStyle. - Padding(0, 1). - BorderLeft(true). - BorderStyle(lipgloss.ThickBorder()). - BorderForeground(modeBackground). - BorderBackground(t.BackgroundPanel()). - Render(mode) - - mode = styles.NewStyle(). - Faint(true). - Background(t.BackgroundPanel()). - Foreground(t.TextMuted()). - Render(key+" ") + - mode + // diagnostics := styles.Padded().Background(t.BackgroundElement()).Render(m.projectDiagnostics()) space := max( 0, - m.width-lipgloss.Width(logo)-lipgloss.Width(cwd)-lipgloss.Width(mode), + m.width-lipgloss.Width(logo)-lipgloss.Width(cwd)-lipgloss.Width(sessionInfo), ) spacer := styles.NewStyle().Background(t.BackgroundPanel()).Width(space).Render("") - status := logo + cwd + spacer + mode + status := logo + cwd + spacer + sessionInfo blank := styles.NewStyle().Background(t.Background()).Width(m.width).Render("") return blank + "\n" + status @@ -137,12 +142,5 @@ func NewStatusCmp(app *app.App) StatusComponent { app: app, } - homePath, err := os.UserHomeDir() - cwdPath := app.Info.Path.Cwd - if err == nil && homePath != "" && strings.HasPrefix(cwdPath, homePath) { - cwdPath = "~" + cwdPath[len(homePath):] - } - statusComponent.cwd = cwdPath - return statusComponent } diff --git a/packages/tui/internal/components/textarea/textarea.go b/packages/tui/internal/components/textarea/textarea.go index cc073e27..2ca08bb8 100644 --- a/packages/tui/internal/components/textarea/textarea.go +++ b/packages/tui/internal/components/textarea/textarea.go @@ -9,8 +9,7 @@ import ( "time" "unicode" - "slices" - + "github.com/atotto/clipboard" "github.com/charmbracelet/bubbles/v2/cursor" "github.com/charmbracelet/bubbles/v2/key" tea "github.com/charmbracelet/bubbletea/v2" @@ -18,6 +17,7 @@ import ( "github.com/charmbracelet/x/ansi" rw "github.com/mattn/go-runewidth" "github.com/rivo/uniseg" + "slices" ) const ( @@ -32,145 +32,6 @@ const ( maxLines = 10000 ) -// Attachment represents a special object within the text, distinct from regular characters. -type Attachment struct { - ID string // A unique identifier for this attachment instance - Display string // e.g., "@filename.txt" - URL string - Filename string - MediaType string -} - -// Helper functions for converting between runes and any slices - -// runesToInterfaces converts a slice of runes to a slice of interfaces -func runesToInterfaces(runes []rune) []any { - result := make([]any, len(runes)) - for i, r := range runes { - result[i] = r - } - return result -} - -// interfacesToRunes converts a slice of interfaces to a slice of runes (for display purposes) -func interfacesToRunes(items []any) []rune { - var result []rune - for _, item := range items { - switch val := item.(type) { - case rune: - result = append(result, val) - case *Attachment: - result = append(result, []rune(val.Display)...) - } - } - return result -} - -// copyInterfaceSlice creates a copy of an any slice -func copyInterfaceSlice(src []any) []any { - dst := make([]any, len(src)) - copy(dst, src) - return dst -} - -// interfacesToString converts a slice of interfaces to a string for display -func interfacesToString(items []any) string { - var s strings.Builder - for _, item := range items { - switch val := item.(type) { - case rune: - s.WriteRune(val) - case *Attachment: - s.WriteString(val.Display) - } - } - return s.String() -} - -// isAttachmentAtCursor checks if the cursor is positioned on or immediately after an attachment. -// This allows for proper highlighting even when the cursor is technically at the position -// after the attachment object in the underlying slice. -func (m Model) isAttachmentAtCursor() (*Attachment, int, int) { - if m.row >= len(m.value) { - return nil, -1, -1 - } - - row := m.value[m.row] - col := m.col - - if col < 0 || col > len(row) { - return nil, -1, -1 - } - - // Check if the cursor is at the same index as an attachment. - if col < len(row) { - if att, ok := row[col].(*Attachment); ok { - return att, col, col - } - } - - // Check if the cursor is immediately after an attachment. This is a common - // state, for example, after just inserting one. - if col > 0 && col <= len(row) { - if att, ok := row[col-1].(*Attachment); ok { - return att, col - 1, col - 1 - } - } - - return nil, -1, -1 -} - -// renderLineWithAttachments renders a line with proper attachment highlighting -func (m Model) renderLineWithAttachments( - items []any, - style lipgloss.Style, -) string { - var s strings.Builder - currentAttachment, _, _ := m.isAttachmentAtCursor() - - for _, item := range items { - switch val := item.(type) { - case rune: - s.WriteString(style.Render(string(val))) - case *Attachment: - // Check if this is the attachment the cursor is currently on - if currentAttachment != nil && currentAttachment.ID == val.ID { - // Cursor is on this attachment, highlight it - s.WriteString(m.Styles.SelectedAttachment.Render(val.Display)) - } else { - s.WriteString(m.Styles.Attachment.Render(val.Display)) - } - } - } - return s.String() -} - -// getRuneAt safely gets a rune at a specific position, returns 0 if not a rune -func getRuneAt(items []any, index int) rune { - if index < 0 || index >= len(items) { - return 0 - } - if r, ok := items[index].(rune); ok { - return r - } - return 0 -} - -// isSpaceAt checks if the item at index is a space rune -func isSpaceAt(items []any, index int) bool { - r := getRuneAt(items, index) - return r != 0 && unicode.IsSpace(r) -} - -// setRuneAt safely sets a rune at a specific position if it's a rune -func setRuneAt(items []any, index int, r rune) { - if index >= 0 && index < len(items) { - if _, ok := items[index].(rune); ok { - items[index] = r - } - } -} - // Internal messages for clipboard operations. type ( pasteMsg string @@ -209,96 +70,30 @@ type KeyMap struct { // upon the textarea. func DefaultKeyMap() KeyMap { return KeyMap{ - CharacterForward: key.NewBinding( - key.WithKeys("right", "ctrl+f"), - key.WithHelp("right", "character forward"), - ), - CharacterBackward: key.NewBinding( - key.WithKeys("left", "ctrl+b"), - key.WithHelp("left", "character backward"), - ), - WordForward: key.NewBinding( - key.WithKeys("alt+right", "ctrl+right", "alt+f"), - key.WithHelp("alt+right", "word forward"), - ), - WordBackward: key.NewBinding( - key.WithKeys("alt+left", "ctrl+left", "alt+b"), - key.WithHelp("alt+left", "word backward"), - ), - LineNext: key.NewBinding( - key.WithKeys("down", "ctrl+n"), - key.WithHelp("down", "next line"), - ), - LinePrevious: key.NewBinding( - key.WithKeys("up", "ctrl+p"), - key.WithHelp("up", "previous line"), - ), - DeleteWordBackward: key.NewBinding( - key.WithKeys("alt+backspace", "ctrl+w"), - key.WithHelp("alt+backspace", "delete word backward"), - ), - DeleteWordForward: key.NewBinding( - key.WithKeys("alt+delete", "alt+d"), - key.WithHelp("alt+delete", "delete word forward"), - ), - DeleteAfterCursor: key.NewBinding( - key.WithKeys("ctrl+k"), - key.WithHelp("ctrl+k", "delete after cursor"), - ), - DeleteBeforeCursor: key.NewBinding( - key.WithKeys("ctrl+u"), - key.WithHelp("ctrl+u", "delete before cursor"), - ), - InsertNewline: key.NewBinding( - key.WithKeys("enter", "ctrl+m"), - key.WithHelp("enter", "insert newline"), - ), - DeleteCharacterBackward: key.NewBinding( - key.WithKeys("backspace", "ctrl+h"), - key.WithHelp("backspace", "delete character backward"), - ), - DeleteCharacterForward: key.NewBinding( - key.WithKeys("delete", "ctrl+d"), - key.WithHelp("delete", "delete character forward"), - ), - LineStart: key.NewBinding( - key.WithKeys("home", "ctrl+a"), - key.WithHelp("home", "line start"), - ), - LineEnd: key.NewBinding( - key.WithKeys("end", "ctrl+e"), - key.WithHelp("end", "line end"), - ), - Paste: key.NewBinding( - key.WithKeys("ctrl+v"), - key.WithHelp("ctrl+v", "paste"), - ), - InputBegin: key.NewBinding( - key.WithKeys("alt+<", "ctrl+home"), - key.WithHelp("alt+<", "input begin"), - ), - InputEnd: key.NewBinding( - key.WithKeys("alt+>", "ctrl+end"), - key.WithHelp("alt+>", "input end"), - ), + CharacterForward: key.NewBinding(key.WithKeys("right", "ctrl+f"), key.WithHelp("right", "character forward")), + CharacterBackward: key.NewBinding(key.WithKeys("left", "ctrl+b"), key.WithHelp("left", "character backward")), + WordForward: key.NewBinding(key.WithKeys("alt+right", "alt+f"), key.WithHelp("alt+right", "word forward")), + WordBackward: key.NewBinding(key.WithKeys("alt+left", "alt+b"), key.WithHelp("alt+left", "word backward")), + LineNext: key.NewBinding(key.WithKeys("down", "ctrl+n"), key.WithHelp("down", "next line")), + LinePrevious: key.NewBinding(key.WithKeys("up", "ctrl+p"), key.WithHelp("up", "previous line")), + DeleteWordBackward: key.NewBinding(key.WithKeys("alt+backspace", "ctrl+w"), key.WithHelp("alt+backspace", "delete word backward")), + DeleteWordForward: key.NewBinding(key.WithKeys("alt+delete", "alt+d"), key.WithHelp("alt+delete", "delete word forward")), + DeleteAfterCursor: key.NewBinding(key.WithKeys("ctrl+k"), key.WithHelp("ctrl+k", "delete after cursor")), + DeleteBeforeCursor: key.NewBinding(key.WithKeys("ctrl+u"), key.WithHelp("ctrl+u", "delete before cursor")), + InsertNewline: key.NewBinding(key.WithKeys("enter", "ctrl+m"), key.WithHelp("enter", "insert newline")), + DeleteCharacterBackward: key.NewBinding(key.WithKeys("backspace", "ctrl+h"), key.WithHelp("backspace", "delete character backward")), + DeleteCharacterForward: key.NewBinding(key.WithKeys("delete", "ctrl+d"), key.WithHelp("delete", "delete character forward")), + LineStart: key.NewBinding(key.WithKeys("home", "ctrl+a"), key.WithHelp("home", "line start")), + LineEnd: key.NewBinding(key.WithKeys("end", "ctrl+e"), key.WithHelp("end", "line end")), + Paste: key.NewBinding(key.WithKeys("ctrl+v"), key.WithHelp("ctrl+v", "paste")), + InputBegin: key.NewBinding(key.WithKeys("alt+<", "ctrl+home"), key.WithHelp("alt+<", "input begin")), + InputEnd: key.NewBinding(key.WithKeys("alt+>", "ctrl+end"), key.WithHelp("alt+>", "input end")), - CapitalizeWordForward: key.NewBinding( - key.WithKeys("alt+c"), - key.WithHelp("alt+c", "capitalize word forward"), - ), - LowercaseWordForward: key.NewBinding( - key.WithKeys("alt+l"), - key.WithHelp("alt+l", "lowercase word forward"), - ), - UppercaseWordForward: key.NewBinding( - key.WithKeys("alt+u"), - key.WithHelp("alt+u", "uppercase word forward"), - ), + CapitalizeWordForward: key.NewBinding(key.WithKeys("alt+c"), key.WithHelp("alt+c", "capitalize word forward")), + LowercaseWordForward: key.NewBinding(key.WithKeys("alt+l"), key.WithHelp("alt+l", "lowercase word forward")), + UppercaseWordForward: key.NewBinding(key.WithKeys("alt+u"), key.WithHelp("alt+u", "uppercase word forward")), - TransposeCharacterBackward: key.NewBinding( - key.WithKeys("ctrl+t"), - key.WithHelp("ctrl+t", "transpose character backward"), - ), + TransposeCharacterBackward: key.NewBinding(key.WithKeys("ctrl+t"), key.WithHelp("ctrl+t", "transpose character backward")), } } @@ -365,11 +160,9 @@ type CursorStyle struct { // states. The appropriate styles will be chosen based on the focus state of // the textarea. type Styles struct { - Focused StyleState - Blurred StyleState - Cursor CursorStyle - Attachment lipgloss.Style - SelectedAttachment lipgloss.Style + Focused StyleState + Blurred StyleState + Cursor CursorStyle } // StyleState that will be applied to the text area. @@ -424,22 +217,13 @@ func (s StyleState) computedText() lipgloss.Style { // line is the input to the text wrapping function. This is stored in a struct // so that it can be hashed and memoized. type line struct { - content []any // Contains runes and *Attachment - width int + runes []rune + width int } // Hash returns a hash of the line. func (w line) Hash() string { - var s strings.Builder - for _, item := range w.content { - switch v := item.(type) { - case rune: - s.WriteRune(v) - case *Attachment: - s.WriteString(v.ID) - } - } - v := fmt.Sprintf("%s:%d", s.String(), w.width) + v := fmt.Sprintf("%s:%d", string(w.runes), w.width) return fmt.Sprintf("%x", sha256.Sum256([]byte(v))) } @@ -448,7 +232,7 @@ type Model struct { Err error // General settings. - cache *MemoCache[line, [][]any] + cache *MemoCache[line, [][]rune] // Prompt is printed at the beginning of each line. // @@ -511,14 +295,14 @@ type Model struct { // if there are more lines than the permitted height. height int - // Underlying text value. Contains either rune or *Attachment types. - value [][]any + // Underlying text value. + value [][]rune // focus indicates whether user input focus should be on this input // component. When false, ignore keyboard input and hide the cursor. focus bool - // Cursor column (slice index). + // Cursor column. col int // Cursor row. @@ -544,14 +328,14 @@ func New() Model { MaxWidth: defaultMaxWidth, Prompt: lipgloss.ThickBorder().Left + " ", Styles: styles, - cache: NewMemoCache[line, [][]any](maxLines), + cache: NewMemoCache[line, [][]rune](maxLines), EndOfBufferCharacter: ' ', ShowLineNumbers: true, VirtualCursor: true, virtualCursor: cur, KeyMap: DefaultKeyMap(), - value: make([][]any, minHeight, maxLines), + value: make([][]rune, minHeight, maxLines), focus: false, col: 0, row: 0, @@ -570,40 +354,25 @@ func DefaultStyles(isDark bool) Styles { var s Styles s.Focused = StyleState{ - Base: lipgloss.NewStyle(), - CursorLine: lipgloss.NewStyle(). - Background(lightDark(lipgloss.Color("255"), lipgloss.Color("0"))), - CursorLineNumber: lipgloss.NewStyle(). - Foreground(lightDark(lipgloss.Color("240"), lipgloss.Color("240"))), - EndOfBuffer: lipgloss.NewStyle(). - Foreground(lightDark(lipgloss.Color("254"), lipgloss.Color("0"))), - LineNumber: lipgloss.NewStyle(). - Foreground(lightDark(lipgloss.Color("249"), lipgloss.Color("7"))), - Placeholder: lipgloss.NewStyle().Foreground(lipgloss.Color("240")), - Prompt: lipgloss.NewStyle().Foreground(lipgloss.Color("7")), - Text: lipgloss.NewStyle(), + Base: lipgloss.NewStyle(), + CursorLine: lipgloss.NewStyle().Background(lightDark(lipgloss.Color("255"), lipgloss.Color("0"))), + CursorLineNumber: lipgloss.NewStyle().Foreground(lightDark(lipgloss.Color("240"), lipgloss.Color("240"))), + EndOfBuffer: lipgloss.NewStyle().Foreground(lightDark(lipgloss.Color("254"), lipgloss.Color("0"))), + LineNumber: lipgloss.NewStyle().Foreground(lightDark(lipgloss.Color("249"), lipgloss.Color("7"))), + Placeholder: lipgloss.NewStyle().Foreground(lipgloss.Color("240")), + Prompt: lipgloss.NewStyle().Foreground(lipgloss.Color("7")), + Text: lipgloss.NewStyle(), } s.Blurred = StyleState{ - Base: lipgloss.NewStyle(), - CursorLine: lipgloss.NewStyle(). - Foreground(lightDark(lipgloss.Color("245"), lipgloss.Color("7"))), - CursorLineNumber: lipgloss.NewStyle(). - Foreground(lightDark(lipgloss.Color("249"), lipgloss.Color("7"))), - EndOfBuffer: lipgloss.NewStyle(). - Foreground(lightDark(lipgloss.Color("254"), lipgloss.Color("0"))), - LineNumber: lipgloss.NewStyle(). - Foreground(lightDark(lipgloss.Color("249"), lipgloss.Color("7"))), - Placeholder: lipgloss.NewStyle().Foreground(lipgloss.Color("240")), - Prompt: lipgloss.NewStyle().Foreground(lipgloss.Color("7")), - Text: lipgloss.NewStyle(). - Foreground(lightDark(lipgloss.Color("245"), lipgloss.Color("7"))), + Base: lipgloss.NewStyle(), + CursorLine: lipgloss.NewStyle().Foreground(lightDark(lipgloss.Color("245"), lipgloss.Color("7"))), + CursorLineNumber: lipgloss.NewStyle().Foreground(lightDark(lipgloss.Color("249"), lipgloss.Color("7"))), + EndOfBuffer: lipgloss.NewStyle().Foreground(lightDark(lipgloss.Color("254"), lipgloss.Color("0"))), + LineNumber: lipgloss.NewStyle().Foreground(lightDark(lipgloss.Color("249"), lipgloss.Color("7"))), + Placeholder: lipgloss.NewStyle().Foreground(lipgloss.Color("240")), + Prompt: lipgloss.NewStyle().Foreground(lipgloss.Color("7")), + Text: lipgloss.NewStyle().Foreground(lightDark(lipgloss.Color("245"), lipgloss.Color("7"))), } - s.Attachment = lipgloss.NewStyle(). - Background(lipgloss.Color("11")). - Foreground(lipgloss.Color("0")) - s.SelectedAttachment = lipgloss.NewStyle(). - Background(lipgloss.Color("11")). - Foreground(lipgloss.Color("0")) s.Cursor = CursorStyle{ Color: lipgloss.Color("7"), Shape: tea.CursorBlock, @@ -652,85 +421,16 @@ func (m *Model) SetValue(s string) { // InsertString inserts a string at the cursor position. func (m *Model) InsertString(s string) { - m.InsertRunesFromUserInput([]rune(s)) + m.insertRunesFromUserInput([]rune(s)) } // InsertRune inserts a rune at the cursor position. func (m *Model) InsertRune(r rune) { - m.InsertRunesFromUserInput([]rune{r}) + m.insertRunesFromUserInput([]rune{r}) } -// InsertAttachment inserts an attachment at the cursor position. -func (m *Model) InsertAttachment(att *Attachment) { - if m.CharLimit > 0 { - availSpace := m.CharLimit - m.Length() - // If the char limit's been reached, cancel. - if availSpace <= 0 { - return - } - } - - // Insert the attachment at the current cursor position - m.value[m.row] = append( - m.value[m.row][:m.col], - append([]any{att}, m.value[m.row][m.col:]...)...) - m.col++ - m.SetCursorColumn(m.col) -} - -// ReplaceRange replaces text from startCol to endCol on the current row with the given string. -// This preserves attachments outside the replaced range. -func (m *Model) ReplaceRange(startCol, endCol int, replacement string) { - if m.row >= len(m.value) || startCol < 0 || endCol < startCol { - return - } - - // Ensure bounds are within the current row - rowLen := len(m.value[m.row]) - startCol = max(0, min(startCol, rowLen)) - endCol = max(startCol, min(endCol, rowLen)) - - // Create new row content: before + replacement + after - before := m.value[m.row][:startCol] - after := m.value[m.row][endCol:] - replacementRunes := runesToInterfaces([]rune(replacement)) - - // Combine the parts - newRow := make([]any, 0, len(before)+len(replacementRunes)+len(after)) - newRow = append(newRow, before...) - newRow = append(newRow, replacementRunes...) - newRow = append(newRow, after...) - - m.value[m.row] = newRow - - // Position cursor at end of replacement - m.col = startCol + len(replacementRunes) - m.SetCursorColumn(m.col) -} - -// CurrentRowLength returns the length of the current row. -func (m *Model) CurrentRowLength() int { - if m.row >= len(m.value) { - return 0 - } - return len(m.value[m.row]) -} - -// GetAttachments returns all attachments in the textarea. -func (m Model) GetAttachments() []*Attachment { - var attachments []*Attachment - for _, row := range m.value { - for _, item := range row { - if att, ok := item.(*Attachment); ok { - attachments = append(attachments, att) - } - } - } - return attachments -} - -// InsertRunesFromUserInput inserts runes at the current cursor position. -func (m *Model) InsertRunesFromUserInput(runes []rune) { +// insertRunesFromUserInput inserts runes at the current cursor position. +func (m *Model) insertRunesFromUserInput(runes []rune) { // Clean up any special characters in the input provided by the // clipboard. This avoids bugs due to e.g. tab characters and // whatnot. @@ -781,22 +481,23 @@ func (m *Model) InsertRunesFromUserInput(runes []rune) { // Save the remainder of the original line at the current // cursor position. - tail := copyInterfaceSlice(m.value[m.row][m.col:]) + tail := make([]rune, len(m.value[m.row][m.col:])) + copy(tail, m.value[m.row][m.col:]) // Paste the first line at the current cursor position. - m.value[m.row] = append(m.value[m.row][:m.col], runesToInterfaces(lines[0])...) + m.value[m.row] = append(m.value[m.row][:m.col], lines[0]...) m.col += len(lines[0]) if numExtraLines := len(lines) - 1; numExtraLines > 0 { // Add the new lines. // We try to reuse the slice if there's already space. - var newGrid [][]any + var newGrid [][]rune if cap(m.value) >= len(m.value)+numExtraLines { // Can reuse the extra space. newGrid = m.value[:len(m.value)+numExtraLines] } else { // No space left; need a new slice. - newGrid = make([][]any, len(m.value)+numExtraLines) + newGrid = make([][]rune, len(m.value)+numExtraLines) copy(newGrid, m.value[:m.row+1]) } // Add all the rows that were after the cursor in the original @@ -806,7 +507,7 @@ func (m *Model) InsertRunesFromUserInput(runes []rune) { // Insert all the new lines in the middle. for _, l := range lines[1:] { m.row++ - m.value[m.row] = runesToInterfaces(l) + m.value[m.row] = l m.col = len(l) } } @@ -825,14 +526,7 @@ func (m Model) Value() string { var v strings.Builder for _, l := range m.value { - for _, item := range l { - switch val := item.(type) { - case rune: - v.WriteRune(val) - case *Attachment: - v.WriteString(val.Display) - } - } + v.WriteString(string(l)) v.WriteByte('\n') } @@ -843,14 +537,7 @@ func (m Model) Value() string { func (m *Model) Length() int { var l int for _, row := range m.value { - for _, item := range row { - switch val := item.(type) { - case rune: - l += rw.RuneWidth(val) - case *Attachment: - l += uniseg.StringWidth(val.Display) - } - } + l += uniseg.StringWidth(string(row)) } // We add len(m.value) to include the newline characters. return l + len(m.value) - 1 @@ -866,29 +553,6 @@ func (m Model) Line() int { return m.row } -// CursorColumn returns the cursor's column position (slice index). -func (m Model) CursorColumn() int { - return m.col -} - -// LastRuneIndex returns the index of the last occurrence of a rune on the current line, -// searching backwards from the current cursor position. -// Returns -1 if the rune is not found before the cursor. -func (m Model) LastRuneIndex(r rune) int { - if m.row >= len(m.value) { - return -1 - } - // Iterate backwards from just before the cursor position - for i := m.col - 1; i >= 0; i-- { - if i < len(m.value[m.row]) { - if item, ok := m.value[m.row][i].(rune); ok && item == r { - return i - } - } - } - return -1 -} - func (m *Model) Newline() { if m.MaxHeight > 0 && len(m.value) >= m.MaxHeight { return @@ -897,117 +561,39 @@ func (m *Model) Newline() { m.splitLine(m.row, m.col) } -// mapVisualOffsetToSliceIndex converts a visual column offset to a slice index. -// This is used to maintain the cursor's horizontal position when moving vertically. -func (m *Model) mapVisualOffsetToSliceIndex(row int, charOffset int) int { - if row < 0 || row >= len(m.value) { - return 0 - } - - offset := 0 - // Find the slice index that corresponds to the visual offset. - for i, item := range m.value[row] { - var itemWidth int - switch v := item.(type) { - case rune: - itemWidth = rw.RuneWidth(v) - case *Attachment: - itemWidth = uniseg.StringWidth(v.Display) - } - - // If the target offset falls within the current item, this is our index. - if offset+itemWidth > charOffset { - // Decide whether to stick with the previous index or move to the current - // one based on which is closer to the target offset. - if (charOffset - offset) > ((offset + itemWidth) - charOffset) { - return i + 1 - } - return i - } - offset += itemWidth - } - - return len(m.value[row]) -} - // CursorDown moves the cursor down by one line. +// Returns whether or not the cursor blink should be reset. func (m *Model) CursorDown() { li := m.LineInfo() charOffset := max(m.lastCharOffset, li.CharOffset) m.lastCharOffset = charOffset if li.RowOffset+1 >= li.Height && m.row < len(m.value)-1 { - // Move to the next model line m.row++ - - // We want to land on the first wrapped line of the new model line. - grid := m.memoizedWrap(m.value[m.row], m.width) - targetLineContent := grid[0] - - // Find position within the first wrapped line. - offset := 0 - colInLine := 0 - for i, item := range targetLineContent { - var itemWidth int - switch v := item.(type) { - case rune: - itemWidth = rw.RuneWidth(v) - case *Attachment: - itemWidth = uniseg.StringWidth(v.Display) - } - if offset+itemWidth > charOffset { - // Decide whether to stick with the previous index or move to the current - // one based on which is closer to the target offset. - if (charOffset - offset) > ((offset + itemWidth) - charOffset) { - colInLine = i + 1 - } else { - colInLine = i - } - goto foundNextLine - } - offset += itemWidth - } - colInLine = len(targetLineContent) - foundNextLine: - m.col = colInLine // startCol is 0 for the first wrapped line - } else if li.RowOffset+1 < li.Height { - // Move to the next wrapped line within the same model line - grid := m.memoizedWrap(m.value[m.row], m.width) - targetLineContent := grid[li.RowOffset+1] - - startCol := 0 - for i := 0; i < li.RowOffset+1; i++ { - startCol += len(grid[i]) - } - - // Find position within the target wrapped line. - offset := 0 - colInLine := 0 - for i, item := range targetLineContent { - var itemWidth int - switch v := item.(type) { - case rune: - itemWidth = rw.RuneWidth(v) - case *Attachment: - itemWidth = uniseg.StringWidth(v.Display) - } - if offset+itemWidth > charOffset { - // Decide whether to stick with the previous index or move to the current - // one based on which is closer to the target offset. - if (charOffset - offset) > ((offset + itemWidth) - charOffset) { - colInLine = i + 1 - } else { - colInLine = i - } - goto foundSameLine - } - offset += itemWidth - } - colInLine = len(targetLineContent) - foundSameLine: - m.col = startCol + colInLine + m.col = 0 + } else { + // Move the cursor to the start of the next line so that we can get + // the line information. We need to add 2 columns to account for the + // trailing space wrapping. + const trailingSpace = 2 + m.col = min(li.StartColumn+li.Width+trailingSpace, len(m.value[m.row])-1) + } + + nli := m.LineInfo() + m.col = nli.StartColumn + + if nli.Width <= 0 { + return + } + + offset := 0 + for offset < charOffset { + if m.row >= len(m.value) || m.col >= len(m.value[m.row]) || offset >= nli.CharWidth-1 { + break + } + offset += rw.RuneWidth(m.value[m.row][m.col]) + m.col++ } - m.SetCursorColumn(m.col) } // CursorUp moves the cursor up by one line. @@ -1017,79 +603,32 @@ func (m *Model) CursorUp() { m.lastCharOffset = charOffset if li.RowOffset <= 0 && m.row > 0 { - // Move to the previous model line. We want to land on the last wrapped - // line of the previous model line. m.row-- - grid := m.memoizedWrap(m.value[m.row], m.width) - targetLineContent := grid[len(grid)-1] - - // Find start of last wrapped line. - startCol := len(m.value[m.row]) - len(targetLineContent) - - // Find position within the last wrapped line. - offset := 0 - colInLine := 0 - for i, item := range targetLineContent { - var itemWidth int - switch v := item.(type) { - case rune: - itemWidth = rw.RuneWidth(v) - case *Attachment: - itemWidth = uniseg.StringWidth(v.Display) - } - if offset+itemWidth > charOffset { - // Decide whether to stick with the previous index or move to the current - // one based on which is closer to the target offset. - if (charOffset - offset) > ((offset + itemWidth) - charOffset) { - colInLine = i + 1 - } else { - colInLine = i - } - goto foundPrevLine - } - offset += itemWidth - } - colInLine = len(targetLineContent) - foundPrevLine: - m.col = startCol + colInLine - } else if li.RowOffset > 0 { - // Move to the previous wrapped line within the same model line. - grid := m.memoizedWrap(m.value[m.row], m.width) - targetLineContent := grid[li.RowOffset-1] - - startCol := 0 - for i := 0; i < li.RowOffset-1; i++ { - startCol += len(grid[i]) - } - - // Find position within the target wrapped line. - offset := 0 - colInLine := 0 - for i, item := range targetLineContent { - var itemWidth int - switch v := item.(type) { - case rune: - itemWidth = rw.RuneWidth(v) - case *Attachment: - itemWidth = uniseg.StringWidth(v.Display) - } - if offset+itemWidth > charOffset { - // Decide whether to stick with the previous index or move to the current - // one based on which is closer to the target offset. - if (charOffset - offset) > ((offset + itemWidth) - charOffset) { - colInLine = i + 1 - } else { - colInLine = i - } - goto foundSameLine - } - offset += itemWidth - } - colInLine = len(targetLineContent) - foundSameLine: - m.col = startCol + colInLine + m.col = len(m.value[m.row]) + } else { + // Move the cursor to the end of the previous line. + // This can be done by moving the cursor to the start of the line and + // then subtracting 2 to account for the trailing space we keep on + // soft-wrapped lines. + const trailingSpace = 2 + m.col = li.StartColumn - trailingSpace + } + + nli := m.LineInfo() + m.col = nli.StartColumn + + if nli.Width <= 0 { + return + } + + offset := 0 + for offset < charOffset { + if m.col >= len(m.value[m.row]) || offset >= nli.CharWidth-1 { + break + } + offset += rw.RuneWidth(m.value[m.row][m.col]) + m.col++ } - m.SetCursorColumn(m.col) } // SetCursorColumn moves the cursor to the given position. If the position is @@ -1141,7 +680,7 @@ func (m *Model) Blur() { // Reset sets the input to its default state with no input. func (m *Model) Reset() { - m.value = make([][]any, minHeight, maxLines) + m.value = make([][]rune, minHeight, maxLines) m.col = 0 m.row = 0 m.SetCursorColumn(0) @@ -1202,7 +741,7 @@ func (m *Model) deleteWordLeft() { oldCol := m.col //nolint:ifshort m.SetCursorColumn(m.col - 1) - for isSpaceAt(m.value[m.row], m.col) { + for unicode.IsSpace(m.value[m.row][m.col]) { if m.col <= 0 { break } @@ -1211,7 +750,7 @@ func (m *Model) deleteWordLeft() { } for m.col > 0 { - if !isSpaceAt(m.value[m.row], m.col) { + if !unicode.IsSpace(m.value[m.row][m.col]) { m.SetCursorColumn(m.col - 1) } else { if m.col > 0 { @@ -1237,13 +776,13 @@ func (m *Model) deleteWordRight() { oldCol := m.col - for m.col < len(m.value[m.row]) && isSpaceAt(m.value[m.row], m.col) { + for m.col < len(m.value[m.row]) && unicode.IsSpace(m.value[m.row][m.col]) { // ignore series of whitespace after cursor m.SetCursorColumn(m.col + 1) } for m.col < len(m.value[m.row]) { - if !isSpaceAt(m.value[m.row], m.col) { + if !unicode.IsSpace(m.value[m.row][m.col]) { m.SetCursorColumn(m.col + 1) } else { break @@ -1293,13 +832,13 @@ func (m *Model) characterLeft(insideLine bool) { func (m *Model) wordLeft() { for { m.characterLeft(true /* insideLine */) - if m.col < len(m.value[m.row]) && !isSpaceAt(m.value[m.row], m.col) { + if m.col < len(m.value[m.row]) && !unicode.IsSpace(m.value[m.row][m.col]) { break } } for m.col > 0 { - if isSpaceAt(m.value[m.row], m.col-1) { + if unicode.IsSpace(m.value[m.row][m.col-1]) { break } m.SetCursorColumn(m.col - 1) @@ -1315,7 +854,7 @@ func (m *Model) wordRight() { func (m *Model) doWordRight(fn func(charIdx int, pos int)) { // Skip spaces forward. - for m.col >= len(m.value[m.row]) || isSpaceAt(m.value[m.row], m.col) { + for m.col >= len(m.value[m.row]) || unicode.IsSpace(m.value[m.row][m.col]) { if m.row == len(m.value)-1 && m.col == len(m.value[m.row]) { // End of text. break @@ -1325,7 +864,7 @@ func (m *Model) doWordRight(fn func(charIdx int, pos int)) { charIdx := 0 for m.col < len(m.value[m.row]) { - if isSpaceAt(m.value[m.row], m.col) { + if unicode.IsSpace(m.value[m.row][m.col]) { break } fn(charIdx, m.col) @@ -1337,18 +876,14 @@ func (m *Model) doWordRight(fn func(charIdx int, pos int)) { // uppercaseRight changes the word to the right to uppercase. func (m *Model) uppercaseRight() { m.doWordRight(func(_ int, i int) { - if r, ok := m.value[m.row][i].(rune); ok { - m.value[m.row][i] = unicode.ToUpper(r) - } + m.value[m.row][i] = unicode.ToUpper(m.value[m.row][i]) }) } // lowercaseRight changes the word to the right to lowercase. func (m *Model) lowercaseRight() { m.doWordRight(func(_ int, i int) { - if r, ok := m.value[m.row][i].(rune); ok { - m.value[m.row][i] = unicode.ToLower(r) - } + m.value[m.row][i] = unicode.ToLower(m.value[m.row][i]) }) } @@ -1356,9 +891,7 @@ func (m *Model) lowercaseRight() { func (m *Model) capitalizeRight() { m.doWordRight(func(charIdx int, i int) { if charIdx == 0 { - if r, ok := m.value[m.row][i].(rune); ok { - m.value[m.row][i] = unicode.ToTitle(r) - } + m.value[m.row][i] = unicode.ToTitle(m.value[m.row][i]) } }) } @@ -1372,39 +905,34 @@ func (m Model) LineInfo() LineInfo { // m.col and counting the number of runes that we need to skip. var counter int for i, line := range grid { - start := counter - end := counter + len(line) - - if m.col >= start && m.col <= end { - // This is the wrapped line the cursor is on. - - // Special case: if the cursor is at the end of a wrapped line, - // and there's another wrapped line after it, the cursor should - // be considered at the beginning of the next line. - if m.col == end && i < len(grid)-1 { - nextLine := grid[i+1] - return LineInfo{ - CharOffset: 0, - ColumnOffset: 0, - Height: len(grid), - RowOffset: i + 1, - StartColumn: end, - Width: len(nextLine), - CharWidth: uniseg.StringWidth(interfacesToString(nextLine)), - } - } - + // We've found the line that we are on + if counter+len(line) == m.col && i+1 < len(grid) { + // We wrap around to the next line if we are at the end of the + // previous line so that we can be at the very beginning of the row return LineInfo{ - CharOffset: uniseg.StringWidth(interfacesToString(line[:max(0, m.col-start)])), - ColumnOffset: m.col - start, + CharOffset: 0, + ColumnOffset: 0, Height: len(grid), - RowOffset: i, - StartColumn: start, - Width: len(line), - CharWidth: uniseg.StringWidth(interfacesToString(line)), + RowOffset: i + 1, + StartColumn: m.col, + Width: len(grid[i+1]), + CharWidth: uniseg.StringWidth(string(line)), } } - counter = end + + if counter+len(line) >= m.col { + return LineInfo{ + CharOffset: uniseg.StringWidth(string(line[:max(0, m.col-counter)])), + ColumnOffset: m.col - counter, + Height: len(grid), + RowOffset: i, + StartColumn: counter, + Width: len(line), + CharWidth: uniseg.StringWidth(string(line)), + } + } + + counter += len(line) } return LineInfo{} } @@ -1532,18 +1060,17 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) { var cmds []tea.Cmd - if m.row >= len(m.value) { - m.value = append(m.value, make([]any, 0)) - } if m.value[m.row] == nil { - m.value[m.row] = make([]any, 0) + m.value[m.row] = make([]rune, 0) } if m.MaxHeight > 0 && m.MaxHeight != m.cache.Capacity() { - m.cache = NewMemoCache[line, [][]any](m.MaxHeight) + m.cache = NewMemoCache[line, [][]rune](m.MaxHeight) } switch msg := msg.(type) { + case tea.PasteMsg: + m.insertRunesFromUserInput([]rune(msg)) case tea.KeyPressMsg: switch { case key.Matches(msg, m.KeyMap.DeleteAfterCursor): @@ -1566,9 +1093,11 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) { m.mergeLineAbove(m.row) break } - if len(m.value[m.row]) > 0 && m.col > 0 { - m.value[m.row] = slices.Delete(m.value[m.row], m.col-1, m.col) - m.SetCursorColumn(m.col - 1) + if len(m.value[m.row]) > 0 { + m.value[m.row] = append(m.value[m.row][:max(0, m.col-1)], m.value[m.row][m.col:]...) + if m.col > 0 { + m.SetCursorColumn(m.col - 1) + } } case key.Matches(msg, m.KeyMap.DeleteCharacterForward): if len(m.value[m.row]) > 0 && m.col < len(m.value[m.row]) { @@ -1603,6 +1132,8 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) { m.CursorDown() case key.Matches(msg, m.KeyMap.WordForward): m.wordRight() + case key.Matches(msg, m.KeyMap.Paste): + return m, Paste case key.Matches(msg, m.KeyMap.CharacterBackward): m.characterLeft(false /* insideLine */) case key.Matches(msg, m.KeyMap.LinePrevious): @@ -1623,11 +1154,11 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) { m.transposeLeft() default: - m.InsertRunesFromUserInput([]rune(msg.Text)) + m.insertRunesFromUserInput([]rune(msg.Text)) } case pasteMsg: - m.InsertRunesFromUserInput([]rune(msg)) + m.insertRunesFromUserInput([]rune(msg)) case pasteErrMsg: m.Err = msg @@ -1695,8 +1226,7 @@ func (m Model) View() string { widestLineNumber = lnw } - wrappedLineStr := interfacesToString(wrappedLine) - strwidth := uniseg.StringWidth(wrappedLineStr) + strwidth := uniseg.StringWidth(string(wrappedLine)) padding := m.width - strwidth // If the trailing space causes the line to be wider than the // width, we should not draw it to the screen since it will result @@ -1706,46 +1236,22 @@ func (m Model) View() string { // The character causing the line to be wider than the width is // guaranteed to be a space since any other character would // have been wrapped. - wrappedLineStr = strings.TrimSuffix(wrappedLineStr, " ") - padding = m.width - uniseg.StringWidth(wrappedLineStr) + wrappedLine = []rune(strings.TrimSuffix(string(wrappedLine), " ")) + padding -= m.width - strwidth } - if m.row == l && lineInfo.RowOffset == wl { - // Render the part of the line before the cursor - s.WriteString( - m.renderLineWithAttachments( - wrappedLine[:lineInfo.ColumnOffset], - style, - ), - ) - + s.WriteString(style.Render(string(wrappedLine[:lineInfo.ColumnOffset]))) if m.col >= len(line) && lineInfo.CharOffset >= m.width { m.virtualCursor.SetChar(" ") s.WriteString(m.virtualCursor.View()) - } else if lineInfo.ColumnOffset < len(wrappedLine) { - // Render the item under the cursor - item := wrappedLine[lineInfo.ColumnOffset] - if att, ok := item.(*Attachment); ok { - // Item at cursor is an attachment. Render it with the selection style. - // This becomes the "cursor" visually. - s.WriteString(m.Styles.SelectedAttachment.Render(att.Display)) - } else { - // Item at cursor is a rune. Render it with the virtual cursor. - m.virtualCursor.SetChar(string(item.(rune))) - s.WriteString(style.Render(m.virtualCursor.View())) - } - - // Render the part of the line after the cursor - s.WriteString(m.renderLineWithAttachments(wrappedLine[lineInfo.ColumnOffset+1:], style)) } else { - // Cursor is at the end of the line - m.virtualCursor.SetChar(" ") + m.virtualCursor.SetChar(string(wrappedLine[lineInfo.ColumnOffset])) s.WriteString(style.Render(m.virtualCursor.View())) + s.WriteString(style.Render(string(wrappedLine[lineInfo.ColumnOffset+1:]))) } } else { - s.WriteString(m.renderLineWithAttachments(wrappedLine, style)) + s.WriteString(style.Render(string(wrappedLine))) } - s.WriteString(style.Render(strings.Repeat(" ", max(0, padding)))) s.WriteRune('\n') newLines++ @@ -1937,12 +1443,12 @@ func (m Model) Cursor() *tea.Cursor { return c } -func (m Model) memoizedWrap(content []any, width int) [][]any { - input := line{content: content, width: width} +func (m Model) memoizedWrap(runes []rune, width int) [][]rune { + input := line{runes: runes, width: width} if v, ok := m.cache.Get(input); ok { return v } - v := wrapInterfaces(content, width) + v := wrap(runes, width) m.cache.Set(input, v) return v } @@ -2008,7 +1514,8 @@ func (m *Model) splitLine(row, col int) { // the cursor, take the content after the cursor and make it the content of // the line underneath, and shift the remaining lines down by one head, tailSrc := m.value[row][:col], m.value[row][col:] - tail := copyInterfaceSlice(tailSrc) + tail := make([]rune, len(tailSrc)) + copy(tail, tailSrc) m.value = append(m.value[:row+1], m.value[row:]...) @@ -2019,97 +1526,75 @@ func (m *Model) splitLine(row, col int) { m.row++ } -func itemWidth(item any) int { - switch v := item.(type) { - case rune: - return rw.RuneWidth(v) - case *Attachment: - return uniseg.StringWidth(v.Display) +// Paste is a command for pasting from the clipboard into the text input. +func Paste() tea.Msg { + str, err := clipboard.ReadAll() + if err != nil { + return pasteErrMsg{err} } - return 0 + return pasteMsg(str) } -func wrapInterfaces(content []any, width int) [][]any { - if width <= 0 { - return [][]any{content} - } - +func wrap(runes []rune, width int) [][]rune { var ( - lines = [][]any{{}} - word = []any{} - wordW int - lineW int - spaceW int - inSpaces bool + lines = [][]rune{{}} + word = []rune{} + row int + spaces int ) - for _, item := range content { - itemW := 0 - isSpace := false - - if r, ok := item.(rune); ok { - if unicode.IsSpace(r) { - isSpace = true - } - itemW = rw.RuneWidth(r) - } else if att, ok := item.(*Attachment); ok { - itemW = uniseg.StringWidth(att.Display) - } - - if isSpace { - if !inSpaces { - // End of a word - if lineW > 0 && lineW+wordW > width { - lines = append(lines, word) - lineW = wordW - } else { - lines[len(lines)-1] = append(lines[len(lines)-1], word...) - lineW += wordW - } - word = nil - wordW = 0 - } - inSpaces = true - spaceW += itemW - } else { // It's not a space, it's a character for a word. - if inSpaces { - // We just finished a block of spaces. Handle them now. - lineW += spaceW - for i := 0; i < spaceW; i++ { - lines[len(lines)-1] = append(lines[len(lines)-1], rune(' ')) - } - if lineW > width { - // The spaces made the line overflow. Start a new line for the upcoming word. - lines = append(lines, []any{}) - lineW = 0 - } - spaceW = 0 - } - inSpaces = false - word = append(word, item) - wordW += itemW - } - } - - // Handle any remaining word/spaces at the end of the content. - if wordW > 0 { - if lineW > 0 && lineW+wordW > width { - lines = append(lines, word) - lineW = wordW + // Word wrap the runes + for _, r := range runes { + if unicode.IsSpace(r) { + spaces++ } else { - lines[len(lines)-1] = append(lines[len(lines)-1], word...) - lineW += wordW + word = append(word, r) + } + + if spaces > 0 { //nolint:nestif + if uniseg.StringWidth(string(lines[row]))+uniseg.StringWidth(string(word))+spaces > width { + row++ + lines = append(lines, []rune{}) + lines[row] = append(lines[row], word...) + lines[row] = append(lines[row], repeatSpaces(spaces)...) + spaces = 0 + word = nil + } else { + lines[row] = append(lines[row], word...) + lines[row] = append(lines[row], repeatSpaces(spaces)...) + spaces = 0 + word = nil + } + } else { + // If the last character is a double-width rune, then we may not be able to add it to this line + // as it might cause us to go past the width. + lastCharLen := rw.RuneWidth(word[len(word)-1]) + if uniseg.StringWidth(string(word))+lastCharLen > width { + // If the current line has any content, let's move to the next + // line because the current word fills up the entire line. + if len(lines[row]) > 0 { + row++ + lines = append(lines, []rune{}) + } + lines[row] = append(lines[row], word...) + word = nil + } } } - if spaceW > 0 { - // There are trailing spaces. Add them. - for i := 0; i < spaceW; i++ { - lines[len(lines)-1] = append(lines[len(lines)-1], rune(' ')) - lineW += 1 - } - if lineW > width { - lines = append(lines, []any{}) - } + + if uniseg.StringWidth(string(lines[row]))+uniseg.StringWidth(string(word))+spaces >= width { + lines = append(lines, []rune{}) + lines[row+1] = append(lines[row+1], word...) + // We add an extra space at the end of the line to account for the + // trailing space at the end of the previous soft-wrapped lines so that + // behaviour when navigating is consistent and so that we don't need to + // continually add edges to handle the last line of the wrapped input. + spaces++ + lines[row+1] = append(lines[row+1], repeatSpaces(spaces)...) + } else { + lines[row] = append(lines[row], word...) + spaces++ + lines[row] = append(lines[row], repeatSpaces(spaces)...) } return lines diff --git a/packages/tui/internal/config/config.go b/packages/tui/internal/config/config.go index d20376dd..3dd6fcf5 100644 --- a/packages/tui/internal/config/config.go +++ b/packages/tui/internal/config/config.go @@ -16,27 +16,18 @@ type ModelUsage struct { LastUsed time.Time `toml:"last_used"` } -type ModeModel struct { - ProviderID string `toml:"provider_id"` - ModelID string `toml:"model_id"` -} - type State struct { - Theme string `toml:"theme"` - ModeModel map[string]ModeModel `toml:"mode_model"` - Provider string `toml:"provider"` - Model string `toml:"model"` - Mode string `toml:"mode"` - RecentlyUsedModels []ModelUsage `toml:"recently_used_models"` - MessagesRight bool `toml:"messages_right"` - SplitDiff bool `toml:"split_diff"` + Theme string `toml:"theme"` + Provider string `toml:"provider"` + Model string `toml:"model"` + RecentlyUsedModels []ModelUsage `toml:"recently_used_models"` + MessagesRight bool `toml:"messages_right"` + SplitDiff bool `toml:"split_diff"` } func NewState() *State { return &State{ Theme: "opencode", - Mode: "build", - ModeModel: make(map[string]ModeModel), RecentlyUsedModels: make([]ModelUsage, 0), } } @@ -69,15 +60,6 @@ func (s *State) UpdateModelUsage(providerID, modelID string) { } } -func (s *State) RemoveModelFromRecentlyUsed(providerID, modelID string) { - for i, usage := range s.RecentlyUsedModels { - if usage.ProviderID == providerID && usage.ModelID == modelID { - s.RecentlyUsedModels = append(s.RecentlyUsedModels[:i], s.RecentlyUsedModels[i+1:]...) - return - } - } -} - // SaveState writes the provided Config struct to the specified TOML file. // It will create the file if it doesn't exist, or overwrite it if it does. func SaveState(filePath string, state *State) error { diff --git a/packages/tui/internal/id/id.go b/packages/tui/internal/id/id.go deleted file mode 100644 index 0490b8f2..00000000 --- a/packages/tui/internal/id/id.go +++ /dev/null @@ -1,96 +0,0 @@ -package id - -import ( - "crypto/rand" - "encoding/hex" - "fmt" - "strings" - "sync" - "time" -) - -const ( - PrefixSession = "ses" - PrefixMessage = "msg" - PrefixUser = "usr" - PrefixPart = "prt" -) - -const length = 26 - -var ( - lastTimestamp int64 - counter int64 - mu sync.Mutex -) - -type Prefix string - -const ( - Session Prefix = PrefixSession - Message Prefix = PrefixMessage - User Prefix = PrefixUser - Part Prefix = PrefixPart -) - -func ValidatePrefix(id string, prefix Prefix) bool { - return strings.HasPrefix(id, string(prefix)) -} - -func Ascending(prefix Prefix, given ...string) string { - return generateID(prefix, false, given...) -} - -func Descending(prefix Prefix, given ...string) string { - return generateID(prefix, true, given...) -} - -func generateID(prefix Prefix, descending bool, given ...string) string { - if len(given) > 0 && given[0] != "" { - if !strings.HasPrefix(given[0], string(prefix)) { - panic(fmt.Sprintf("ID %s does not start with %s", given[0], string(prefix))) - } - return given[0] - } - - return generateNewID(prefix, descending) -} - -func randomBase62(length int) string { - const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" - result := make([]byte, length) - bytes := make([]byte, length) - rand.Read(bytes) - - for i := 0; i < length; i++ { - result[i] = chars[bytes[i]%62] - } - - return string(result) -} - -func generateNewID(prefix Prefix, descending bool) string { - mu.Lock() - defer mu.Unlock() - - currentTimestamp := time.Now().UnixMilli() - - if currentTimestamp != lastTimestamp { - lastTimestamp = currentTimestamp - counter = 0 - } - counter++ - - now := uint64(currentTimestamp)*0x1000 + uint64(counter) - - if descending { - now = ^now - } - - timeBytes := make([]byte, 6) - for i := 0; i < 6; i++ { - timeBytes[i] = byte((now >> (40 - 8*i)) & 0xff) - } - - return string(prefix) + "_" + hex.EncodeToString(timeBytes) + randomBase62(length-12) -} \ No newline at end of file diff --git a/packages/tui/internal/image/clipboard_unix.go b/packages/tui/internal/image/clipboard_unix.go new file mode 100644 index 00000000..2653d8ca --- /dev/null +++ b/packages/tui/internal/image/clipboard_unix.go @@ -0,0 +1,46 @@ +//go:build !windows + +package image + +import ( + "bytes" + "fmt" + "github.com/atotto/clipboard" + "image" +) + +func GetImageFromClipboard() ([]byte, string, error) { + text, err := clipboard.ReadAll() + if err != nil { + return nil, "", fmt.Errorf("Error reading clipboard") + } + + if text == "" { + return nil, "", nil + } + + binaryData := []byte(text) + imageBytes, err := binaryToImage(binaryData) + if err != nil { + return nil, text, nil + } + return imageBytes, "", nil + +} + +func binaryToImage(data []byte) ([]byte, error) { + reader := bytes.NewReader(data) + img, _, err := image.Decode(reader) + if err != nil { + return nil, fmt.Errorf("Unable to covert bytes to image") + } + + return ImageToBytes(img) +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/packages/tui/internal/image/clipboard_windows.go b/packages/tui/internal/image/clipboard_windows.go new file mode 100644 index 00000000..6431ce3d --- /dev/null +++ b/packages/tui/internal/image/clipboard_windows.go @@ -0,0 +1,192 @@ +//go:build windows + +package image + +import ( + "bytes" + "fmt" + "image" + "image/color" + "log/slog" + "syscall" + "unsafe" +) + +var ( + user32 = syscall.NewLazyDLL("user32.dll") + kernel32 = syscall.NewLazyDLL("kernel32.dll") + openClipboard = user32.NewProc("OpenClipboard") + closeClipboard = user32.NewProc("CloseClipboard") + getClipboardData = user32.NewProc("GetClipboardData") + isClipboardFormatAvailable = user32.NewProc("IsClipboardFormatAvailable") + globalLock = kernel32.NewProc("GlobalLock") + globalUnlock = kernel32.NewProc("GlobalUnlock") + globalSize = kernel32.NewProc("GlobalSize") +) + +const ( + CF_TEXT = 1 + CF_UNICODETEXT = 13 + CF_DIB = 8 +) + +type BITMAPINFOHEADER struct { + BiSize uint32 + BiWidth int32 + BiHeight int32 + BiPlanes uint16 + BiBitCount uint16 + BiCompression uint32 + BiSizeImage uint32 + BiXPelsPerMeter int32 + BiYPelsPerMeter int32 + BiClrUsed uint32 + BiClrImportant uint32 +} + +func GetImageFromClipboard() ([]byte, string, error) { + ret, _, _ := openClipboard.Call(0) + if ret == 0 { + return nil, "", fmt.Errorf("failed to open clipboard") + } + defer func(closeClipboard *syscall.LazyProc, a ...uintptr) { + _, _, err := closeClipboard.Call(a...) + if err != nil { + slog.Error("close clipboard failed") + return + } + }(closeClipboard) + isTextAvailable, _, _ := isClipboardFormatAvailable.Call(uintptr(CF_TEXT)) + isUnicodeTextAvailable, _, _ := isClipboardFormatAvailable.Call(uintptr(CF_UNICODETEXT)) + + if isTextAvailable != 0 || isUnicodeTextAvailable != 0 { + // Get text from clipboard + var formatToUse uintptr = CF_TEXT + if isUnicodeTextAvailable != 0 { + formatToUse = CF_UNICODETEXT + } + + hClipboardText, _, _ := getClipboardData.Call(formatToUse) + if hClipboardText != 0 { + textPtr, _, _ := globalLock.Call(hClipboardText) + if textPtr != 0 { + defer func(globalUnlock *syscall.LazyProc, a ...uintptr) { + _, _, err := globalUnlock.Call(a...) + if err != nil { + slog.Error("Global unlock failed") + return + } + }(globalUnlock, hClipboardText) + + // Get clipboard text + var clipboardText string + if formatToUse == CF_UNICODETEXT { + // Convert wide string to Go string + clipboardText = syscall.UTF16ToString((*[1 << 20]uint16)(unsafe.Pointer(textPtr))[:]) + } else { + // Get size of ANSI text + size, _, _ := globalSize.Call(hClipboardText) + if size > 0 { + // Convert ANSI string to Go string + textBytes := make([]byte, size) + copy(textBytes, (*[1 << 20]byte)(unsafe.Pointer(textPtr))[:size:size]) + clipboardText = bytesToString(textBytes) + } + } + + // Check if the text is not empty + if clipboardText != "" { + return nil, clipboardText, nil + } + } + } + } + hClipboardData, _, _ := getClipboardData.Call(uintptr(CF_DIB)) + if hClipboardData == 0 { + return nil, "", fmt.Errorf("failed to get clipboard data") + } + + dataPtr, _, _ := globalLock.Call(hClipboardData) + if dataPtr == 0 { + return nil, "", fmt.Errorf("failed to lock clipboard data") + } + defer func(globalUnlock *syscall.LazyProc, a ...uintptr) { + _, _, err := globalUnlock.Call(a...) + if err != nil { + slog.Error("Global unlock failed") + return + } + }(globalUnlock, hClipboardData) + + bmiHeader := (*BITMAPINFOHEADER)(unsafe.Pointer(dataPtr)) + + width := int(bmiHeader.BiWidth) + height := int(bmiHeader.BiHeight) + if height < 0 { + height = -height + } + bitsPerPixel := int(bmiHeader.BiBitCount) + + img := image.NewRGBA(image.Rect(0, 0, width, height)) + + var bitsOffset uintptr + if bitsPerPixel <= 8 { + numColors := uint32(1) << bitsPerPixel + if bmiHeader.BiClrUsed > 0 { + numColors = bmiHeader.BiClrUsed + } + bitsOffset = unsafe.Sizeof(*bmiHeader) + uintptr(numColors*4) + } else { + bitsOffset = unsafe.Sizeof(*bmiHeader) + } + + for y := range height { + for x := range width { + + srcY := height - y - 1 + if bmiHeader.BiHeight < 0 { + srcY = y + } + + var pixelPointer unsafe.Pointer + var r, g, b, a uint8 + + switch bitsPerPixel { + case 24: + stride := (width*3 + 3) &^ 3 + pixelPointer = unsafe.Pointer(dataPtr + bitsOffset + uintptr(srcY*stride+x*3)) + b = *(*byte)(pixelPointer) + g = *(*byte)(unsafe.Add(pixelPointer, 1)) + r = *(*byte)(unsafe.Add(pixelPointer, 2)) + a = 255 + case 32: + pixelPointer = unsafe.Pointer(dataPtr + bitsOffset + uintptr(srcY*width*4+x*4)) + b = *(*byte)(pixelPointer) + g = *(*byte)(unsafe.Add(pixelPointer, 1)) + r = *(*byte)(unsafe.Add(pixelPointer, 2)) + a = *(*byte)(unsafe.Add(pixelPointer, 3)) + if a == 0 { + a = 255 + } + default: + return nil, "", fmt.Errorf("unsupported bit count: %d", bitsPerPixel) + } + + img.Set(x, y, color.RGBA{R: r, G: g, B: b, A: a}) + } + } + + imageBytes, err := ImageToBytes(img) + if err != nil { + return nil, "", err + } + return imageBytes, "", nil +} + +func bytesToString(b []byte) string { + i := bytes.IndexByte(b, 0) + if i == -1 { + return string(b) + } + return string(b[:i]) +} diff --git a/packages/tui/internal/image/images.go b/packages/tui/internal/image/images.go new file mode 100644 index 00000000..742eb30a --- /dev/null +++ b/packages/tui/internal/image/images.go @@ -0,0 +1,86 @@ +package image + +import ( + "bytes" + "fmt" + "image" + "image/color" + "image/png" + "os" + "strings" + + "github.com/charmbracelet/lipgloss/v2" + "github.com/disintegration/imaging" + "github.com/lucasb-eyer/go-colorful" + _ "golang.org/x/image/webp" +) + +func ValidateFileSize(filePath string, sizeLimit int64) (bool, error) { + fileInfo, err := os.Stat(filePath) + if err != nil { + return false, fmt.Errorf("error getting file info: %w", err) + } + + if fileInfo.Size() > sizeLimit { + return true, nil + } + + return false, nil +} + +func ToString(width int, img image.Image) string { + img = imaging.Resize(img, width, 0, imaging.Lanczos) + b := img.Bounds() + imageWidth := b.Max.X + h := b.Max.Y + str := strings.Builder{} + + for heightCounter := 0; heightCounter < h; heightCounter += 2 { + for x := range imageWidth { + c1, _ := colorful.MakeColor(img.At(x, heightCounter)) + color1 := lipgloss.Color(c1.Hex()) + + var color2 color.Color + if heightCounter+1 < h { + c2, _ := colorful.MakeColor(img.At(x, heightCounter+1)) + color2 = lipgloss.Color(c2.Hex()) + } else { + color2 = color1 + } + + str.WriteString(lipgloss.NewStyle().Foreground(color1). + Background(color2).Render("▀")) + } + + str.WriteString("\n") + } + + return str.String() +} + +func ImagePreview(width int, filename string) (string, error) { + imageContent, err := os.Open(filename) + if err != nil { + return "", err + } + defer imageContent.Close() + + img, _, err := image.Decode(imageContent) + if err != nil { + return "", err + } + + imageString := ToString(width, img) + + return imageString, nil +} + +func ImageToBytes(image image.Image) ([]byte, error) { + buf := new(bytes.Buffer) + err := png.Encode(buf, image) + if err != nil { + return nil, err + } + + return buf.Bytes(), nil +} diff --git a/packages/tui/internal/layout/flex_example_test.go b/packages/tui/internal/layout/flex_example_test.go new file mode 100644 index 00000000..a03346eb --- /dev/null +++ b/packages/tui/internal/layout/flex_example_test.go @@ -0,0 +1,41 @@ +package layout_test + +import ( + "fmt" + "github.com/sst/opencode/internal/layout" +) + +func ExampleRender_withGap() { + // Create a horizontal layout with 3px gap between items + result := layout.Render( + layout.FlexOptions{ + Direction: layout.Row, + Width: 30, + Height: 1, + Gap: 3, + }, + layout.FlexItem{View: "Item1"}, + layout.FlexItem{View: "Item2"}, + layout.FlexItem{View: "Item3"}, + ) + fmt.Println(result) + // Output: Item1 Item2 Item3 +} + +func ExampleRender_withGapAndJustify() { + // Create a horizontal layout with gap and space-between justification + result := layout.Render( + layout.FlexOptions{ + Direction: layout.Row, + Width: 30, + Height: 1, + Gap: 2, + Justify: layout.JustifySpaceBetween, + }, + layout.FlexItem{View: "A"}, + layout.FlexItem{View: "B"}, + layout.FlexItem{View: "C"}, + ) + fmt.Println(result) + // Output: A B C +} diff --git a/packages/tui/internal/layout/flex_test.go b/packages/tui/internal/layout/flex_test.go new file mode 100644 index 00000000..cad38dc8 --- /dev/null +++ b/packages/tui/internal/layout/flex_test.go @@ -0,0 +1,90 @@ +package layout + +import ( + "strings" + "testing" +) + +func TestFlexGap(t *testing.T) { + tests := []struct { + name string + opts FlexOptions + items []FlexItem + expected string + }{ + { + name: "Row with gap", + opts: FlexOptions{ + Direction: Row, + Width: 20, + Height: 1, + Gap: 2, + }, + items: []FlexItem{ + {View: "A"}, + {View: "B"}, + {View: "C"}, + }, + expected: "A B C", + }, + { + name: "Column with gap", + opts: FlexOptions{ + Direction: Column, + Width: 1, + Height: 5, + Gap: 1, + Align: AlignStart, + }, + items: []FlexItem{ + {View: "A", FixedSize: 1}, + {View: "B", FixedSize: 1}, + {View: "C", FixedSize: 1}, + }, + expected: "A\n \nB\n \nC", + }, + { + name: "Row with gap and justify space between", + opts: FlexOptions{ + Direction: Row, + Width: 15, + Height: 1, + Gap: 1, + Justify: JustifySpaceBetween, + }, + items: []FlexItem{ + {View: "A"}, + {View: "B"}, + {View: "C"}, + }, + expected: "A B C", + }, + { + name: "No gap specified", + opts: FlexOptions{ + Direction: Row, + Width: 10, + Height: 1, + }, + items: []FlexItem{ + {View: "A"}, + {View: "B"}, + {View: "C"}, + }, + expected: "ABC", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := Render(tt.opts, tt.items...) + // Trim any trailing spaces for comparison + result = strings.TrimRight(result, " ") + expected := strings.TrimRight(tt.expected, " ") + + if result != expected { + t.Errorf("Render() = %q, want %q", result, expected) + } + }) + } +} diff --git a/packages/tui/internal/layout/overlay.go b/packages/tui/internal/layout/overlay.go index 08016e31..48064c91 100644 --- a/packages/tui/internal/layout/overlay.go +++ b/packages/tui/internal/layout/overlay.go @@ -15,11 +15,6 @@ import ( "github.com/sst/opencode/internal/util" ) -var ( - // ANSI escape sequence regex - ansiRegex = regexp.MustCompile(`\x1b\[[0-9;]*m`) -) - // Split a string into lines, additionally returning the size of the widest line. func getLines(s string) (lines []string, widest int) { lines = strings.Split(s, "\n") @@ -277,6 +272,9 @@ func combineStyles(bgStyle ansiStyle, fgColor *compat.AdaptiveColor) string { // getStyleAtPosition extracts the active ANSI style at a given visual position func getStyleAtPosition(s string, targetPos int) ansiStyle { + // ANSI escape sequence regex + ansiRegex := regexp.MustCompile(`\x1b\[[0-9;]*m`) + visualPos := 0 currentStyle := ansiStyle{} diff --git a/packages/tui/internal/styles/markdown.go b/packages/tui/internal/styles/markdown.go index 9ff87c4e..14db7546 100644 --- a/packages/tui/internal/styles/markdown.go +++ b/packages/tui/internal/styles/markdown.go @@ -284,6 +284,7 @@ func generateMarkdownStyleConfig(backgroundColor compat.AdaptiveColor) ansi.Styl Table: ansi.StyleTable{ StyleBlock: ansi.StyleBlock{ StylePrimitive: ansi.StylePrimitive{ + BlockPrefix: "\n", BlockSuffix: "\n", }, }, diff --git a/packages/tui/internal/tui/tui.go b/packages/tui/internal/tui/tui.go index a81c1a6b..cc437f80 100644 --- a/packages/tui/internal/tui/tui.go +++ b/packages/tui/internal/tui/tui.go @@ -2,11 +2,9 @@ package tui import ( "context" - "fmt" "log/slog" "os" "os/exec" - "slices" "strings" "time" @@ -25,7 +23,6 @@ import ( "github.com/sst/opencode/internal/components/modal" "github.com/sst/opencode/internal/components/status" "github.com/sst/opencode/internal/components/toast" - "github.com/sst/opencode/internal/config" "github.com/sst/opencode/internal/layout" "github.com/sst/opencode/internal/styles" "github.com/sst/opencode/internal/theme" @@ -35,27 +32,16 @@ import ( // InterruptDebounceTimeoutMsg is sent when the interrupt key debounce timeout expires type InterruptDebounceTimeoutMsg struct{} -// ExitDebounceTimeoutMsg is sent when the exit key debounce timeout expires -type ExitDebounceTimeoutMsg struct{} - // InterruptKeyState tracks the state of interrupt key presses for debouncing type InterruptKeyState int -// ExitKeyState tracks the state of exit key presses for debouncing -type ExitKeyState int - const ( InterruptKeyIdle InterruptKeyState = iota InterruptKeyFirstPress ) -const ( - ExitKeyIdle ExitKeyState = iota - ExitKeyFirstPress -) - const interruptDebounceTimeout = 1 * time.Second -const exitDebounceTimeout = 1 * time.Second +const fileViewerFullWidthCutoff = 160 type appModel struct { width, height int @@ -65,17 +51,19 @@ type appModel struct { editor chat.EditorComponent messages chat.MessagesComponent completions dialog.CompletionDialog - commandProvider completions.CompletionProvider - fileProvider completions.CompletionProvider - symbolsProvider completions.CompletionProvider + completionManager *completions.CompletionManager showCompletionDialog bool leaderBinding *key.Binding - // isLeaderSequence bool - toastManager *toast.ToastManager - interruptKeyState InterruptKeyState - exitKeyState ExitKeyState - messagesRight bool - fileViewer fileviewer.Model + isLeaderSequence bool + toastManager *toast.ToastManager + interruptKeyState InterruptKeyState + lastScroll time.Time + messagesRight bool + fileViewer fileviewer.Model + lastMouse tea.Mouse + fileViewerStart int + fileViewerEnd int + fileViewerHit bool } func (a appModel) Init() tea.Cmd { @@ -102,6 +90,43 @@ func (a appModel) Init() tea.Cmd { return tea.Batch(cmds...) } +var BUGGED_SCROLL_KEYS = map[string]bool{ + "0": true, + "1": true, + "2": true, + "3": true, + "4": true, + "5": true, + "6": true, + "7": true, + "8": true, + "9": true, + "M": true, + "m": true, + "[": true, + ";": true, +} + +func isScrollRelatedInput(keyString string) bool { + if len(keyString) == 0 { + return false + } + + for _, char := range keyString { + charStr := string(char) + if !BUGGED_SCROLL_KEYS[charStr] { + return false + } + } + + if len(keyString) > 3 && + (keyString[len(keyString)-1] == 'M' || keyString[len(keyString)-1] == 'm') { + return true + } + + return len(keyString) > 1 +} + func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { var cmd tea.Cmd var cmds []tea.Cmd @@ -109,6 +134,9 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { switch msg := msg.(type) { case tea.KeyPressMsg: keyString := msg.String() + if time.Since(a.lastScroll) < time.Millisecond*100 && (BUGGED_SCROLL_KEYS[keyString] || isScrollRelatedInput(keyString)) { + return a, nil + } // 1. Handle active modal if a.modal != nil { @@ -137,45 +165,46 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { } // 2. Check for commands that require leader - if a.app.IsLeaderSequence { - matches := a.app.Commands.Matches(msg, a.app.IsLeaderSequence) - a.app.IsLeaderSequence = false + if a.isLeaderSequence { + matches := a.app.Commands.Matches(msg, a.isLeaderSequence) + a.isLeaderSequence = false if len(matches) > 0 { return a, util.CmdHandler(commands.ExecuteCommandsMsg(matches)) } } // 3. Handle completions trigger - if keyString == "/" && - !a.showCompletionDialog && - a.editor.Value() == "" { + if keyString == "/" && !a.showCompletionDialog { a.showCompletionDialog = true - updated, cmd := a.editor.Update(msg) - a.editor = updated.(chat.EditorComponent) - cmds = append(cmds, cmd) + initialValue := "/" + currentInput := a.editor.Value() - // Set command provider for command completion - a.completions = dialog.NewCompletionDialogComponent("/", a.commandProvider) - updated, cmd = a.completions.Update(msg) + // if the input doesn't end with a space, + // then we want to include the last word + // (ie, `packages/`) + if !strings.HasSuffix(currentInput, " ") { + words := strings.Split(a.editor.Value(), " ") + if len(words) > 0 { + lastWord := words[len(words)-1] + lastWord = strings.TrimSpace(lastWord) + initialValue = lastWord + "/" + } + } + + updated, cmd := a.completions.Update( + app.CompletionDialogTriggeredMsg{ + InitialValue: initialValue, + }, + ) a.completions = updated.(dialog.CompletionDialog) cmds = append(cmds, cmd) - return a, tea.Sequence(cmds...) - } - - // Handle file completions trigger - if keyString == "@" && - !a.showCompletionDialog { - a.showCompletionDialog = true - - updated, cmd := a.editor.Update(msg) + updated, cmd = a.editor.Update(msg) a.editor = updated.(chat.EditorComponent) cmds = append(cmds, cmd) - // Set both file and symbols providers for @ completion - a.completions = dialog.NewCompletionDialogComponent("@", a.fileProvider, a.symbolsProvider) - updated, cmd = a.completions.Update(msg) + updated, cmd = a.updateCompletions(msg) a.completions = updated.(dialog.CompletionDialog) cmds = append(cmds, cmd) @@ -184,8 +213,8 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { if a.showCompletionDialog { switch keyString { - case "tab", "enter", "esc", "ctrl+c", "up", "down", "ctrl+p", "ctrl+n": - updated, cmd := a.completions.Update(msg) + case "tab", "enter", "esc", "ctrl+c": + updated, cmd := a.updateCompletions(msg) a.completions = updated.(dialog.CompletionDialog) cmds = append(cmds, cmd) return a, tea.Batch(cmds...) @@ -195,7 +224,7 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { a.editor = updated.(chat.EditorComponent) cmds = append(cmds, cmd) - updated, cmd = a.completions.Update(msg) + updated, cmd = a.updateCompletions(msg) a.completions = updated.(dialog.CompletionDialog) cmds = append(cmds, cmd) @@ -212,21 +241,15 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { // 5. Check for leader key activation if a.leaderBinding != nil && - !a.app.IsLeaderSequence && + !a.isLeaderSequence && key.Matches(msg, *a.leaderBinding) { - a.app.IsLeaderSequence = true + a.isLeaderSequence = true return a, nil } - // 6 Handle input clear command - inputClearCommand := a.app.Commands[commands.InputClearCommand] - if inputClearCommand.Matches(msg, a.app.IsLeaderSequence) && a.editor.Length() > 0 { - return a, util.CmdHandler(commands.ExecuteCommandMsg(inputClearCommand)) - } - - // 7. Handle interrupt key debounce for session interrupt + // 6. Handle interrupt key debounce for session interrupt interruptCommand := a.app.Commands[commands.SessionInterruptCommand] - if interruptCommand.Matches(msg, a.app.IsLeaderSequence) && a.app.IsBusy() { + if interruptCommand.Matches(msg, a.isLeaderSequence) && a.app.IsBusy() { switch a.interruptKeyState { case InterruptKeyIdle: // First interrupt key press - start debounce timer @@ -243,56 +266,48 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { } } - // 8. Handle exit key debounce for app exit when using non-leader command - exitCommand := a.app.Commands[commands.AppExitCommand] - if exitCommand.Matches(msg, a.app.IsLeaderSequence) { - switch a.exitKeyState { - case ExitKeyIdle: - // First exit key press - start debounce timer - a.exitKeyState = ExitKeyFirstPress - a.editor.SetExitKeyInDebounce(true) - return a, tea.Tick(exitDebounceTimeout, func(t time.Time) tea.Msg { - return ExitDebounceTimeoutMsg{} - }) - case ExitKeyFirstPress: - // Second exit key press within timeout - actually exit - a.exitKeyState = ExitKeyIdle - a.editor.SetExitKeyInDebounce(false) - return a, util.CmdHandler(commands.ExecuteCommandMsg(exitCommand)) - } - } - - // 9. Check again for commands that don't require leader (excluding interrupt when busy and exit when in debounce) - matches := a.app.Commands.Matches(msg, a.app.IsLeaderSequence) + // 7. Check again for commands that don't require leader (excluding interrupt when busy) + matches := a.app.Commands.Matches(msg, a.isLeaderSequence) if len(matches) > 0 { // Skip interrupt key if we're in debounce mode and app is busy - if interruptCommand.Matches(msg, a.app.IsLeaderSequence) && a.app.IsBusy() && a.interruptKeyState != InterruptKeyIdle { + if interruptCommand.Matches(msg, a.isLeaderSequence) && a.app.IsBusy() && a.interruptKeyState != InterruptKeyIdle { return a, nil } return a, util.CmdHandler(commands.ExecuteCommandsMsg(matches)) } - // Fallback: suspend if ctrl+z is pressed and no user keybind matched - if keyString == "ctrl+z" { - return a, tea.Suspend - } - - // 10. Fallback to editor. This is for other characters like backspace, tab, etc. + // 7. Fallback to editor. This is for other characters + // like backspace, tab, etc. updatedEditor, cmd := a.editor.Update(msg) a.editor = updatedEditor.(chat.EditorComponent) return a, cmd case tea.MouseWheelMsg: + a.lastScroll = time.Now() if a.modal != nil { - u, cmd := a.modal.Update(msg) - a.modal = u.(layout.Modal) - cmds = append(cmds, cmd) - return a, tea.Batch(cmds...) + return a, nil + } + + var cmd tea.Cmd + if a.fileViewerHit { + a.fileViewer, cmd = a.fileViewer.Update(msg) + cmds = append(cmds, cmd) + } else { + updated, cmd := a.messages.Update(msg) + a.messages = updated.(chat.MessagesComponent) + cmds = append(cmds, cmd) } - updated, cmd := a.messages.Update(msg) - a.messages = updated.(chat.MessagesComponent) - cmds = append(cmds, cmd) return a, tea.Batch(cmds...) + case tea.MouseMotionMsg: + a.lastMouse = msg.Mouse() + a.fileViewerHit = a.fileViewer.HasFile() && + a.lastMouse.X > a.fileViewerStart && + a.lastMouse.X < a.fileViewerEnd + case tea.MouseClickMsg: + a.lastMouse = msg.Mouse() + a.fileViewerHit = a.fileViewer.HasFile() && + a.lastMouse.X > a.fileViewerStart && + a.lastMouse.X < a.fileViewerEnd case tea.BackgroundColorMsg: styles.Terminal = &styles.TerminalInfo{ Background: msg.Color, @@ -330,13 +345,7 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { return a, toast.NewErrorToast(msg.Error()) case app.SendMsg: a.showCompletionDialog = false - a.app, cmd = a.app.SendChatMessage(context.Background(), msg.Text, msg.Attachments) - cmds = append(cmds, cmd) - case app.SetEditorContentMsg: - // Set the editor content without sending - a.editor.SetValueWithAttachments(msg.Text) - updated, cmd := a.editor.Focus() - a.editor = updated.(chat.EditorComponent) + cmd := a.app.SendChatMessage(context.Background(), msg.Text, msg.Attachments) cmds = append(cmds, cmd) case dialog.CompletionDialogCloseMsg: a.showCompletionDialog = false @@ -348,76 +357,45 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { case opencode.EventListResponseEventSessionDeleted: if a.app.Session != nil && msg.Properties.Info.ID == a.app.Session.ID { a.app.Session = &opencode.Session{} - a.app.Messages = []app.Message{} + a.app.Messages = []opencode.Message{} } return a, toast.NewSuccessToast("Session deleted successfully") case opencode.EventListResponseEventSessionUpdated: if msg.Properties.Info.ID == a.app.Session.ID { a.app.Session = &msg.Properties.Info } - case opencode.EventListResponseEventMessagePartUpdated: - slog.Info("message part updated", "message", msg.Properties.Part.MessageID, "part", msg.Properties.Part.ID) - if msg.Properties.Part.SessionID == a.app.Session.ID { - messageIndex := slices.IndexFunc(a.app.Messages, func(m app.Message) bool { - switch casted := m.Info.(type) { - case opencode.UserMessage: - return casted.ID == msg.Properties.Part.MessageID - case opencode.AssistantMessage: - return casted.ID == msg.Properties.Part.MessageID - } - return false - }) - if messageIndex > -1 { - message := a.app.Messages[messageIndex] - partIndex := slices.IndexFunc(message.Parts, func(p opencode.PartUnion) bool { - switch casted := p.(type) { - case opencode.TextPart: - return casted.ID == msg.Properties.Part.ID - case opencode.FilePart: - return casted.ID == msg.Properties.Part.ID - case opencode.ToolPart: - return casted.ID == msg.Properties.Part.ID - case opencode.StepStartPart: - return casted.ID == msg.Properties.Part.ID - case opencode.StepFinishPart: - return casted.ID == msg.Properties.Part.ID - } - return false - }) - if partIndex > -1 { - message.Parts[partIndex] = msg.Properties.Part.AsUnion() - } - if partIndex == -1 { - message.Parts = append(message.Parts, msg.Properties.Part.AsUnion()) - } - a.app.Messages[messageIndex] = message - } - } case opencode.EventListResponseEventMessageUpdated: - if msg.Properties.Info.SessionID == a.app.Session.ID { - matchIndex := slices.IndexFunc(a.app.Messages, func(m app.Message) bool { - switch casted := m.Info.(type) { - case opencode.UserMessage: - return casted.ID == msg.Properties.Info.ID - case opencode.AssistantMessage: - return casted.ID == msg.Properties.Info.ID - } - return false - }) + if msg.Properties.Info.Metadata.SessionID == a.app.Session.ID { + exists := false + optimisticReplaced := false - if matchIndex > -1 { - match := a.app.Messages[matchIndex] - a.app.Messages[matchIndex] = app.Message{ - Info: msg.Properties.Info.AsUnion(), - Parts: match.Parts, + // First check if this is replacing an optimistic message + if msg.Properties.Info.Role == opencode.MessageRoleUser { + // Look for optimistic messages to replace + for i, m := range a.app.Messages { + if strings.HasPrefix(m.ID, "optimistic-") && m.Role == opencode.MessageRoleUser { + // Replace the optimistic message with the real one + a.app.Messages[i] = msg.Properties.Info + exists = true + optimisticReplaced = true + break + } } } - if matchIndex == -1 { - a.app.Messages = append(a.app.Messages, app.Message{ - Info: msg.Properties.Info.AsUnion(), - Parts: []opencode.PartUnion{}, - }) + // If not replacing optimistic, check for existing message with same ID + if !optimisticReplaced { + for i, m := range a.app.Messages { + if m.ID == msg.Properties.Info.ID { + a.app.Messages[i] = msg.Properties.Info + exists = true + break + } + } + } + + if !exists { + a.app.Messages = append(a.app.Messages, msg.Properties.Info) } } case opencode.EventListResponseEventSessionError: @@ -439,7 +417,14 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { case tea.WindowSizeMsg: msg.Height -= 2 // Make space for the status bar a.width, a.height = msg.Width, msg.Height - container := min(a.width, 86) + container := min(a.width, 84) + if a.fileViewer.HasFile() { + if a.width < fileViewerFullWidthCutoff { + container = a.width + } else { + container = min(min(a.width, max(a.width/2, 50)), 84) + } + } layout.Current = &layout.LayoutInfo{ Viewport: layout.Dimensions{ Width: a.width, @@ -449,25 +434,35 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { Width: container, }, } + mainWidth := layout.Current.Container.Width + a.messages.SetWidth(mainWidth - 4) + + sideWidth := a.width - mainWidth + if a.width < fileViewerFullWidthCutoff { + sideWidth = a.width + } + a.fileViewerStart = mainWidth + a.fileViewerEnd = a.fileViewerStart + sideWidth + if a.messagesRight { + a.fileViewerStart = 0 + a.fileViewerEnd = sideWidth + } + a.fileViewer, cmd = a.fileViewer.SetSize(sideWidth, layout.Current.Viewport.Height) + cmds = append(cmds, cmd) case app.SessionSelectedMsg: messages, err := a.app.ListMessages(context.Background(), msg.ID) if err != nil { - slog.Error("Failed to list messages", "error", err.Error()) + slog.Error("Failed to list messages", "error", err) return a, toast.NewErrorToast("Failed to open session") } a.app.Session = msg a.app.Messages = messages return a, util.CmdHandler(app.SessionLoadedMsg{}) - case app.SessionCreatedMsg: - a.app.Session = msg.Session - return a, util.CmdHandler(app.SessionLoadedMsg{}) case app.ModelSelectedMsg: a.app.Provider = &msg.Provider a.app.Model = &msg.Model - a.app.State.ModeModel[a.app.Mode.Name] = config.ModeModel{ - ProviderID: msg.Provider.ID, - ModelID: msg.Model.ID, - } + a.app.State.Provider = msg.Provider.ID + a.app.State.Model = msg.Model.ID a.app.State.UpdateModelUsage(msg.Provider.ID, msg.Model.ID) a.app.SaveState() case dialog.ThemeSelectedMsg: @@ -485,10 +480,6 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { // Reset interrupt key state after timeout a.interruptKeyState = InterruptKeyIdle a.editor.SetInterruptKeyInDebounce(false) - case ExitDebounceTimeoutMsg: - // Reset exit key state after timeout - a.exitKeyState = ExitKeyIdle - a.editor.SetExitKeyInDebounce(false) case dialog.FindSelectedMsg: return a.openFile(msg.FilePath) } @@ -528,22 +519,48 @@ func (a appModel) View() string { t := theme.CurrentTheme() var mainLayout string - + mainWidth := layout.Current.Container.Width - 4 if a.app.Session.ID == "" { - mainLayout = a.home() + mainLayout = a.home(mainWidth) } else { - mainLayout = a.chat() + mainLayout = a.chat(mainWidth) } mainLayout = styles.NewStyle(). Background(t.Background()). Padding(0, 2). Render(mainLayout) - mainLayout = lipgloss.PlaceHorizontal( - a.width, - lipgloss.Center, - mainLayout, - styles.WhitespaceStyle(t.Background()), - ) + + mainHeight := lipgloss.Height(mainLayout) + + if a.fileViewer.HasFile() { + file := a.fileViewer.View() + baseStyle := styles.NewStyle().Background(t.BackgroundPanel()) + sidePanel := baseStyle.Height(mainHeight).Render(file) + if a.width >= fileViewerFullWidthCutoff { + if a.messagesRight { + mainLayout = lipgloss.JoinHorizontal( + lipgloss.Top, + sidePanel, + mainLayout, + ) + } else { + mainLayout = lipgloss.JoinHorizontal( + lipgloss.Top, + mainLayout, + sidePanel, + ) + } + } else { + mainLayout = sidePanel + } + } else { + mainLayout = lipgloss.PlaceHorizontal( + a.width, + lipgloss.Center, + mainLayout, + styles.WhitespaceStyle(t.Background()), + ) + } mainStyle := styles.NewStyle().Background(t.Background()) mainLayout = mainStyle.Render(mainLayout) @@ -579,9 +596,8 @@ func (a appModel) openFile(filepath string) (tea.Model, tea.Cmd) { return a, cmd } -func (a appModel) home() string { +func (a appModel) home(width int) string { t := theme.CurrentTheme() - effectiveWidth := a.width - 4 baseStyle := styles.NewStyle().Background(t.Background()) base := baseStyle.Render muted := styles.NewStyle().Foreground(t.TextMuted()).Background(t.Background()).Render @@ -612,7 +628,7 @@ func (a appModel) home() string { logoAndVersion := strings.Join([]string{logo, version}, "\n") logoAndVersion = lipgloss.PlaceHorizontal( - effectiveWidth, + width, lipgloss.Center, logoAndVersion, styles.WhitespaceStyle(t.Background()), @@ -623,7 +639,7 @@ func (a appModel) home() string { cmdcomp.WithLimit(6), ) cmds := lipgloss.PlaceHorizontal( - effectiveWidth, + width, lipgloss.Center, commandsView.View(), styles.WhitespaceStyle(t.Background()), @@ -635,16 +651,19 @@ func (a appModel) home() string { lines = append(lines, logoAndVersion) lines = append(lines, "") lines = append(lines, "") + // lines = append(lines, base("cwd ")+muted(cwd)) + // lines = append(lines, base("config ")+muted(config)) + // lines = append(lines, "") lines = append(lines, cmds) lines = append(lines, "") lines = append(lines, "") mainHeight := lipgloss.Height(strings.Join(lines, "\n")) - editorView := a.editor.View() - editorWidth := lipgloss.Width(editorView) + editorWidth := min(width, 80) + editorView := a.editor.View(editorWidth) editorView = lipgloss.PlaceHorizontal( - effectiveWidth, + width, lipgloss.Center, editorView, styles.WhitespaceStyle(t.Background()), @@ -654,7 +673,7 @@ func (a appModel) home() string { editorLines := a.editor.Lines() mainLayout := lipgloss.Place( - effectiveWidth, + width, a.height, lipgloss.Center, lipgloss.Center, @@ -662,14 +681,14 @@ func (a appModel) home() string { styles.WhitespaceStyle(t.Background()), ) - editorX := (effectiveWidth - editorWidth) / 2 + editorX := (width - editorWidth) / 2 editorY := (a.height / 2) + (mainHeight / 2) - 2 if editorLines > 1 { mainLayout = layout.PlaceOverlay( editorX, editorY, - a.editor.Content(), + a.editor.Content(editorWidth), mainLayout, ) } @@ -690,31 +709,23 @@ func (a appModel) home() string { return mainLayout } -func (a appModel) chat() string { - effectiveWidth := a.width - 4 - t := theme.CurrentTheme() - editorView := a.editor.View() +func (a appModel) chat(width int) string { + editorView := a.editor.View(width) lines := a.editor.Lines() - messagesView := a.messages.View() + messagesView := a.messages.View(width, a.height-5) editorWidth := lipgloss.Width(editorView) editorHeight := max(lines, 5) - editorView = lipgloss.PlaceHorizontal( - effectiveWidth, - lipgloss.Center, - editorView, - styles.WhitespaceStyle(t.Background()), - ) mainLayout := messagesView + "\n" + editorView - editorX := (effectiveWidth - editorWidth) / 2 + editorX := (a.width - editorWidth) / 2 if lines > 1 { editorY := a.height - editorHeight mainLayout = layout.PlaceOverlay( editorX, editorY, - a.editor.Content(), + a.editor.Content(width), mainLayout, ) } @@ -745,14 +756,6 @@ func (a appModel) executeCommand(command commands.Command) (tea.Model, tea.Cmd) case commands.AppHelpCommand: helpDialog := dialog.NewHelpDialog(a.app) a.modal = helpDialog - case commands.SwitchModeCommand: - updated, cmd := a.app.SwitchMode() - a.app = updated - cmds = append(cmds, cmd) - case commands.SwitchModeReverseCommand: - updated, cmd := a.app.SwitchModeReverse() - a.app = updated - cmds = append(cmds, cmd) case commands.EditorOpenCommand: if a.app.IsBusy() { // status.Warn("Agent is working, please wait...") @@ -794,8 +797,11 @@ func (a appModel) executeCommand(command commands.Command) (tea.Model, tea.Cmd) return nil } os.Remove(tmpfile.Name()) - return app.SetEditorContentMsg{ - Text: string(content), + // attachments := m.attachments + // m.attachments = nil + return app.SendMsg{ + Text: string(content), + Attachments: []app.Attachment{}, // attachments, } }) cmds = append(cmds, cmd) @@ -804,7 +810,7 @@ func (a appModel) executeCommand(command commands.Command) (tea.Model, tea.Cmd) return a, nil } a.app.Session = &opencode.Session{} - a.app.Messages = []app.Message{} + a.app.Messages = []opencode.Message{} cmds = append(cmds, util.CmdHandler(app.SessionClearedMsg{})) case commands.SessionListCommand: sessionDialog := dialog.NewSessionDialog(a.app) @@ -819,7 +825,7 @@ func (a appModel) executeCommand(command commands.Command) (tea.Model, tea.Cmd) return a, toast.NewErrorToast("Failed to share session") } shareUrl := response.Share.URL - cmds = append(cmds, a.app.SetClipboard(shareUrl)) + cmds = append(cmds, tea.SetClipboard(shareUrl)) cmds = append(cmds, toast.NewSuccessToast("Share URL copied to clipboard!")) case commands.SessionUnshareCommand: if a.app.Session.ID == "" { @@ -844,56 +850,6 @@ func (a appModel) executeCommand(command commands.Command) (tea.Model, tea.Cmd) } // TODO: block until compaction is complete a.app.CompactSession(context.Background()) - case commands.SessionExportCommand: - if a.app.Session.ID == "" { - return a, toast.NewErrorToast("No active session to export.") - } - - // Use current conversation history - messages := a.app.Messages - if len(messages) == 0 { - return a, toast.NewInfoToast("No messages to export.") - } - - // Format to Markdown - markdownContent := formatConversationToMarkdown(messages) - - // Check if EDITOR is set - editor := os.Getenv("EDITOR") - if editor == "" { - return a, toast.NewErrorToast("No EDITOR set, can't open editor") - } - - // Create and write to temp file - tmpfile, err := os.CreateTemp("", "conversation-*.md") - if err != nil { - slog.Error("Failed to create temp file", "error", err) - return a, toast.NewErrorToast("Failed to create temporary file.") - } - - _, err = tmpfile.WriteString(markdownContent) - if err != nil { - slog.Error("Failed to write to temp file", "error", err) - tmpfile.Close() - os.Remove(tmpfile.Name()) - return a, toast.NewErrorToast("Failed to write conversation to file.") - } - tmpfile.Close() - - // Open in editor - c := exec.Command(editor, tmpfile.Name()) - c.Stdin = os.Stdin - c.Stdout = os.Stdout - c.Stderr = os.Stderr - cmd = tea.ExecProcess(c, func(err error) tea.Msg { - if err != nil { - slog.Error("Failed to open editor for conversation", "error", err) - } - // Clean up the file after editor closes - os.Remove(tmpfile.Name()) - return nil - }) - cmds = append(cmds, cmd) case commands.ToolDetailsCommand: message := "Tool details are now visible" if a.messages.ToolDetailsVisible() { @@ -907,11 +863,12 @@ func (a appModel) executeCommand(command commands.Command) (tea.Model, tea.Cmd) case commands.ThemeListCommand: themeDialog := dialog.NewThemeDialog() a.modal = themeDialog - // case commands.FileListCommand: - // a.editor.Blur() - // findDialog := dialog.NewFindDialog(a.fileProvider) - // cmds = append(cmds, findDialog.Init()) - // a.modal = findDialog + case commands.FileListCommand: + a.editor.Blur() + provider := completions.NewFileAndFolderContextGroup(a.app) + findDialog := dialog.NewFindDialog(provider) + findDialog.SetWidth(layout.Current.Container.Width - 8) + a.modal = findDialog case commands.FileCloseCommand: a.fileViewer, cmd = a.fileViewer.Clear() cmds = append(cmds, cmd) @@ -944,11 +901,11 @@ func (a appModel) executeCommand(command commands.Command) (tea.Model, tea.Cmd) a.editor = updated.(chat.EditorComponent) cmds = append(cmds, cmd) case commands.MessagesFirstCommand: - updated, cmd := a.messages.GotoTop() + updated, cmd := a.messages.First() a.messages = updated.(chat.MessagesComponent) cmds = append(cmds, cmd) case commands.MessagesLastCommand: - updated, cmd := a.messages.GotoBottom() + updated, cmd := a.messages.Last() a.messages = updated.(chat.MessagesComponent) cmds = append(cmds, cmd) case commands.MessagesPageUpCommand: @@ -987,14 +944,26 @@ func (a appModel) executeCommand(command commands.Command) (tea.Model, tea.Cmd) a.messages = updated.(chat.MessagesComponent) cmds = append(cmds, cmd) } + case commands.MessagesPreviousCommand: + updated, cmd := a.messages.Previous() + a.messages = updated.(chat.MessagesComponent) + cmds = append(cmds, cmd) + case commands.MessagesNextCommand: + updated, cmd := a.messages.Next() + a.messages = updated.(chat.MessagesComponent) + cmds = append(cmds, cmd) case commands.MessagesLayoutToggleCommand: a.messagesRight = !a.messagesRight a.app.State.MessagesRight = a.messagesRight a.app.SaveState() case commands.MessagesCopyCommand: - updated, cmd := a.messages.CopyLastMessage() - a.messages = updated.(chat.MessagesComponent) - cmds = append(cmds, cmd) + selected := a.messages.Selected() + if selected != "" { + cmd = tea.SetClipboard(selected) + cmds = append(cmds, cmd) + cmd = toast.NewSuccessToast("Message copied to clipboard") + cmds = append(cmds, cmd) + } case commands.MessagesRevertCommand: case commands.AppExitCommand: return a, tea.Quit @@ -1002,14 +971,22 @@ func (a appModel) executeCommand(command commands.Command) (tea.Model, tea.Cmd) return a, tea.Batch(cmds...) } +func (a appModel) updateCompletions(msg tea.Msg) (tea.Model, tea.Cmd) { + currentInput := a.editor.Value() + if currentInput != "" { + provider := a.completionManager.GetProvider(currentInput) + a.completions.SetProvider(provider) + } + return a.completions.Update(msg) +} + func NewModel(app *app.App) tea.Model { - commandProvider := completions.NewCommandCompletionProvider(app) - fileProvider := completions.NewFileContextGroup(app) - symbolsProvider := completions.NewSymbolsContextGroup(app) + completionManager := completions.NewCompletionManager(app) + initialProvider := completionManager.DefaultProvider() messages := chat.NewMessagesComponent(app) editor := chat.NewEditorComponent(app) - completions := dialog.NewCompletionDialogComponent("/", commandProvider) + completions := dialog.NewCompletionDialogComponent(initialProvider) var leaderBinding *key.Binding if app.Config.Keybinds.Leader != "" { @@ -1023,58 +1000,15 @@ func NewModel(app *app.App) tea.Model { editor: editor, messages: messages, completions: completions, - commandProvider: commandProvider, - fileProvider: fileProvider, - symbolsProvider: symbolsProvider, + completionManager: completionManager, leaderBinding: leaderBinding, + isLeaderSequence: false, showCompletionDialog: false, toastManager: toast.NewToastManager(), interruptKeyState: InterruptKeyIdle, - exitKeyState: ExitKeyIdle, fileViewer: fileviewer.New(app), messagesRight: app.State.MessagesRight, } return model } - -func formatConversationToMarkdown(messages []app.Message) string { - var builder strings.Builder - - builder.WriteString("# Conversation History\n\n") - - for _, msg := range messages { - builder.WriteString("---\n\n") - - var role string - var timestamp time.Time - - switch info := msg.Info.(type) { - case opencode.UserMessage: - role = "User" - timestamp = time.UnixMilli(int64(info.Time.Created)) - case opencode.AssistantMessage: - role = "Assistant" - timestamp = time.UnixMilli(int64(info.Time.Created)) - default: - continue - } - - builder.WriteString( - fmt.Sprintf("**%s** (*%s*)\n\n", role, timestamp.Format("2006-01-02 15:04:05")), - ) - - for _, part := range msg.Parts { - switch p := part.(type) { - case opencode.TextPart: - builder.WriteString(p.Text + "\n\n") - case opencode.FilePart: - builder.WriteString(fmt.Sprintf("[File: %s]\n\n", p.Filename)) - case opencode.ToolPart: - builder.WriteString(fmt.Sprintf("[Tool: %s]\n\n", p.Tool)) - } - } - } - - return builder.String() -} diff --git a/packages/tui/internal/util/apilogger.go b/packages/tui/internal/util/apilogger.go deleted file mode 100644 index b439bbec..00000000 --- a/packages/tui/internal/util/apilogger.go +++ /dev/null @@ -1,123 +0,0 @@ -package util - -import ( - "context" - "log/slog" - "sync" - - opencode "github.com/sst/opencode-sdk-go" -) - -type APILogHandler struct { - client *opencode.Client - service string - level slog.Level - attrs []slog.Attr - groups []string - mu sync.Mutex -} - -func NewAPILogHandler(client *opencode.Client, service string, level slog.Level) *APILogHandler { - return &APILogHandler{ - client: client, - service: service, - level: level, - attrs: make([]slog.Attr, 0), - groups: make([]string, 0), - } -} - -func (h *APILogHandler) Enabled(_ context.Context, level slog.Level) bool { - return level >= h.level -} - -func (h *APILogHandler) Handle(ctx context.Context, r slog.Record) error { - var apiLevel opencode.AppLogParamsLevel - switch r.Level { - case slog.LevelDebug: - apiLevel = opencode.AppLogParamsLevelDebug - case slog.LevelInfo: - apiLevel = opencode.AppLogParamsLevelInfo - case slog.LevelWarn: - apiLevel = opencode.AppLogParamsLevelWarn - case slog.LevelError: - apiLevel = opencode.AppLogParamsLevelError - default: - apiLevel = opencode.AppLogParamsLevelInfo - } - - extra := make(map[string]any) - - h.mu.Lock() - for _, attr := range h.attrs { - extra[attr.Key] = attr.Value.Any() - } - h.mu.Unlock() - - r.Attrs(func(attr slog.Attr) bool { - extra[attr.Key] = attr.Value.Any() - return true - }) - - params := opencode.AppLogParams{ - Service: opencode.F(h.service), - Level: opencode.F(apiLevel), - Message: opencode.F(r.Message), - } - - if len(extra) > 0 { - params.Extra = opencode.F(extra) - } - - go func() { - _, err := h.client.App.Log(context.Background(), params) - if err != nil { - // Fallback: we can't log the error using slog as it would create a loop - // TODO: fallback file? - } - }() - - return nil -} - -// WithAttrs returns a new Handler whose attributes consist of -// both the receiver's attributes and the arguments. -func (h *APILogHandler) WithAttrs(attrs []slog.Attr) slog.Handler { - h.mu.Lock() - defer h.mu.Unlock() - - newHandler := &APILogHandler{ - client: h.client, - service: h.service, - level: h.level, - attrs: make([]slog.Attr, len(h.attrs)+len(attrs)), - groups: make([]string, len(h.groups)), - } - - copy(newHandler.attrs, h.attrs) - copy(newHandler.attrs[len(h.attrs):], attrs) - copy(newHandler.groups, h.groups) - - return newHandler -} - -// WithGroup returns a new Handler with the given group appended to -// the receiver's existing groups. -func (h *APILogHandler) WithGroup(name string) slog.Handler { - h.mu.Lock() - defer h.mu.Unlock() - - newHandler := &APILogHandler{ - client: h.client, - service: h.service, - level: h.level, - attrs: make([]slog.Attr, len(h.attrs)), - groups: make([]string, len(h.groups)+1), - } - - copy(newHandler.attrs, h.attrs) - copy(newHandler.groups, h.groups) - newHandler.groups[len(h.groups)] = name - - return newHandler -} diff --git a/packages/tui/internal/util/file.go b/packages/tui/internal/util/file.go index b079f24c..2c0987dc 100644 --- a/packages/tui/internal/util/file.go +++ b/packages/tui/internal/util/file.go @@ -83,7 +83,7 @@ func Extension(path string) string { } func ToMarkdown(content string, width int, backgroundColor compat.AdaptiveColor) string { - r := styles.GetMarkdownRenderer(width-6, backgroundColor) + r := styles.GetMarkdownRenderer(width-7, backgroundColor) content = strings.ReplaceAll(content, RootPath+"/", "") rendered, _ := r.Render(content) lines := strings.Split(rendered, "\n") diff --git a/packages/tui/sdk/.devcontainer/devcontainer.json b/packages/tui/sdk/.devcontainer/devcontainer.json deleted file mode 100644 index 889ae347..00000000 --- a/packages/tui/sdk/.devcontainer/devcontainer.json +++ /dev/null @@ -1,7 +0,0 @@ -// For format details, see https://aka.ms/devcontainer.json. For config options, see the -// README at: https://github.com/devcontainers/templates/tree/main/src/debian -{ - "name": "Development", - "image": "mcr.microsoft.com/devcontainers/go:1.23-bookworm", - "postCreateCommand": "go mod tidy" -} diff --git a/packages/tui/sdk/.github/workflows/ci.yml b/packages/tui/sdk/.github/workflows/ci.yml deleted file mode 100644 index 4bf1e907..00000000 --- a/packages/tui/sdk/.github/workflows/ci.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: CI -on: - push: - branches-ignore: - - 'generated' - - 'codegen/**' - - 'integrated/**' - - 'stl-preview-head/**' - - 'stl-preview-base/**' - pull_request: - branches-ignore: - - 'stl-preview-head/**' - - 'stl-preview-base/**' - -jobs: - lint: - timeout-minutes: 10 - name: lint - runs-on: ${{ github.repository == 'stainless-sdks/opencode-go' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} - if: github.event_name == 'push' || github.event.pull_request.head.repo.fork - - steps: - - uses: actions/checkout@v4 - - - name: Setup go - uses: actions/setup-go@v5 - with: - go-version-file: ./go.mod - - - name: Run lints - run: ./scripts/lint - test: - timeout-minutes: 10 - name: test - runs-on: ${{ github.repository == 'stainless-sdks/opencode-go' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} - if: github.event_name == 'push' || github.event.pull_request.head.repo.fork - steps: - - uses: actions/checkout@v4 - - - name: Setup go - uses: actions/setup-go@v5 - with: - go-version-file: ./go.mod - - - name: Bootstrap - run: ./scripts/bootstrap - - - name: Run tests - run: ./scripts/test diff --git a/packages/tui/sdk/.gitignore b/packages/tui/sdk/.gitignore deleted file mode 100644 index c6d05015..00000000 --- a/packages/tui/sdk/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -.prism.log -codegen.log -Brewfile.lock.json -.idea/ diff --git a/packages/tui/sdk/.release-please-manifest.json b/packages/tui/sdk/.release-please-manifest.json deleted file mode 100644 index c373724d..00000000 --- a/packages/tui/sdk/.release-please-manifest.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - ".": "0.1.0-alpha.8" -} \ No newline at end of file diff --git a/packages/tui/sdk/.stats.yml b/packages/tui/sdk/.stats.yml deleted file mode 100644 index 02591cbb..00000000 --- a/packages/tui/sdk/.stats.yml +++ /dev/null @@ -1,4 +0,0 @@ -configured_endpoints: 22 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-a1da357fcefd3105736841fbf44018022fade78e67ffc81e178cf9196da723ee.yml -openapi_spec_hash: 9bd27afcc5b8f43d8e4223f7c984035f -config_hash: 62b73a3397120578a992bffd1e69386a diff --git a/packages/tui/sdk/Brewfile b/packages/tui/sdk/Brewfile deleted file mode 100644 index 577e34a4..00000000 --- a/packages/tui/sdk/Brewfile +++ /dev/null @@ -1 +0,0 @@ -brew "go" diff --git a/packages/tui/sdk/CHANGELOG.md b/packages/tui/sdk/CHANGELOG.md deleted file mode 100644 index bc407fad..00000000 --- a/packages/tui/sdk/CHANGELOG.md +++ /dev/null @@ -1,73 +0,0 @@ -# Changelog - -## 0.1.0-alpha.8 (2025-07-02) - -Full Changelog: [v0.1.0-alpha.7...v0.1.0-alpha.8](https://github.com/sst/opencode-sdk-go/compare/v0.1.0-alpha.7...v0.1.0-alpha.8) - -### Features - -* **api:** update via SDK Studio ([651e937](https://github.com/sst/opencode-sdk-go/commit/651e937c334e1caba3b968e6cac865c219879519)) - -## 0.1.0-alpha.7 (2025-06-30) - -Full Changelog: [v0.1.0-alpha.6...v0.1.0-alpha.7](https://github.com/sst/opencode-sdk-go/compare/v0.1.0-alpha.6...v0.1.0-alpha.7) - -### Features - -* **api:** update via SDK Studio ([13550a5](https://github.com/sst/opencode-sdk-go/commit/13550a5c65d77325e945ed99fe0799cd1107b775)) -* **api:** update via SDK Studio ([7b73730](https://github.com/sst/opencode-sdk-go/commit/7b73730c7fa62ba966dda3541c3e97b49be8d2bf)) - - -### Chores - -* **ci:** only run for pushes and fork pull requests ([bea59b8](https://github.com/sst/opencode-sdk-go/commit/bea59b886800ef555f89c47a9256d6392ed2e53d)) - -## 0.1.0-alpha.6 (2025-06-28) - -Full Changelog: [v0.1.0-alpha.5...v0.1.0-alpha.6](https://github.com/sst/opencode-sdk-go/compare/v0.1.0-alpha.5...v0.1.0-alpha.6) - -### Bug Fixes - -* don't try to deserialize as json when ResponseBodyInto is []byte ([5988d04](https://github.com/sst/opencode-sdk-go/commit/5988d04839cb78b6613057280b91b72a60fef33d)) - -## 0.1.0-alpha.5 (2025-06-27) - -Full Changelog: [v0.1.0-alpha.4...v0.1.0-alpha.5](https://github.com/sst/opencode-sdk-go/compare/v0.1.0-alpha.4...v0.1.0-alpha.5) - -### Features - -* **api:** update via SDK Studio ([9e39a59](https://github.com/sst/opencode-sdk-go/commit/9e39a59b3d5d1bd5e64633732521fb28362cc70e)) - -## 0.1.0-alpha.4 (2025-06-27) - -Full Changelog: [v0.1.0-alpha.3...v0.1.0-alpha.4](https://github.com/sst/opencode-sdk-go/compare/v0.1.0-alpha.3...v0.1.0-alpha.4) - -### Features - -* **api:** update via SDK Studio ([9609d1b](https://github.com/sst/opencode-sdk-go/commit/9609d1b1db7806d00cb846c9914cb4935cdedf52)) - -## 0.1.0-alpha.3 (2025-06-27) - -Full Changelog: [v0.1.0-alpha.2...v0.1.0-alpha.3](https://github.com/sst/opencode-sdk-go/compare/v0.1.0-alpha.2...v0.1.0-alpha.3) - -### Features - -* **api:** update via SDK Studio ([57f3230](https://github.com/sst/opencode-sdk-go/commit/57f32309023cc1f0f20c20d02a3907e390a71f61)) - -## 0.1.0-alpha.2 (2025-06-27) - -Full Changelog: [v0.1.0-alpha.1...v0.1.0-alpha.2](https://github.com/sst/opencode-sdk-go/compare/v0.1.0-alpha.1...v0.1.0-alpha.2) - -### Features - -* **api:** update via SDK Studio ([a766f1c](https://github.com/sst/opencode-sdk-go/commit/a766f1c54f02bbc1380151b0e22d97cc2c5892e6)) - -## 0.1.0-alpha.1 (2025-06-27) - -Full Changelog: [v0.0.1-alpha.0...v0.1.0-alpha.1](https://github.com/sst/opencode-sdk-go/compare/v0.0.1-alpha.0...v0.1.0-alpha.1) - -### Features - -* **api:** update via SDK Studio ([27b7376](https://github.com/sst/opencode-sdk-go/commit/27b7376310466ee17a63f2104f546b53a2b8361a)) -* **api:** update via SDK Studio ([0a73e04](https://github.com/sst/opencode-sdk-go/commit/0a73e04c23c90b2061611edaa8fd6282dc0ce397)) -* **api:** update via SDK Studio ([9b7883a](https://github.com/sst/opencode-sdk-go/commit/9b7883a144eeac526d9d04538e0876a9d18bb844)) diff --git a/packages/tui/sdk/CONTRIBUTING.md b/packages/tui/sdk/CONTRIBUTING.md deleted file mode 100644 index 34620a3c..00000000 --- a/packages/tui/sdk/CONTRIBUTING.md +++ /dev/null @@ -1,66 +0,0 @@ -## Setting up the environment - -To set up the repository, run: - -```sh -$ ./scripts/bootstrap -$ ./scripts/build -``` - -This will install all the required dependencies and build the SDK. - -You can also [install go 1.18+ manually](https://go.dev/doc/install). - -## Modifying/Adding code - -Most of the SDK is generated code. Modifications to code will be persisted between generations, but may -result in merge conflicts between manual patches and changes from the generator. The generator will never -modify the contents of the `lib/` and `examples/` directories. - -## Adding and running examples - -All files in the `examples/` directory are not modified by the generator and can be freely edited or added to. - -```go -# add an example to examples//main.go - -package main - -func main() { - // ... -} -``` - -```sh -$ go run ./examples/ -``` - -## Using the repository from source - -To use a local version of this library from source in another project, edit the `go.mod` with a replace -directive. This can be done through the CLI with the following: - -```sh -$ go mod edit -replace github.com/sst/opencode-sdk-go=/path/to/opencode-sdk-go -``` - -## Running tests - -Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests. - -```sh -# you will need npm installed -$ npx prism mock path/to/your/openapi.yml -``` - -```sh -$ ./scripts/test -``` - -## Formatting - -This library uses the standard gofmt code formatter: - -```sh -$ ./scripts/format -``` diff --git a/packages/tui/sdk/LICENSE b/packages/tui/sdk/LICENSE deleted file mode 100644 index a56ceacd..00000000 --- a/packages/tui/sdk/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2025 Opencode - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/tui/sdk/README.md b/packages/tui/sdk/README.md deleted file mode 100644 index 2588b614..00000000 --- a/packages/tui/sdk/README.md +++ /dev/null @@ -1,354 +0,0 @@ -# Opencode Go API Library - -Go Reference - -The Opencode Go library provides convenient access to the [Opencode REST API](https://opencode.ai/docs) -from applications written in Go. - -It is generated with [Stainless](https://www.stainless.com/). - -## Installation - - - -```go -import ( - "github.com/sst/opencode-sdk-go" // imported as opencode -) -``` - - - -Or to pin the version: - - - -```sh -go get -u 'github.com/sst/opencode-sdk-go@v0.1.0-alpha.8' -``` - - - -## Requirements - -This library requires Go 1.18+. - -## Usage - -The full API of this library can be found in [api.md](api.md). - -```go -package main - -import ( - "context" - "fmt" - - "github.com/sst/opencode-sdk-go" -) - -func main() { - client := opencode.NewClient() - sessions, err := client.Session.List(context.TODO()) - if err != nil { - panic(err.Error()) - } - fmt.Printf("%+v\n", sessions) -} - -``` - -### Request fields - -All request parameters are wrapped in a generic `Field` type, -which we use to distinguish zero values from null or omitted fields. - -This prevents accidentally sending a zero value if you forget a required parameter, -and enables explicitly sending `null`, `false`, `''`, or `0` on optional parameters. -Any field not specified is not sent. - -To construct fields with values, use the helpers `String()`, `Int()`, `Float()`, or most commonly, the generic `F[T]()`. -To send a null, use `Null[T]()`, and to send a nonconforming value, use `Raw[T](any)`. For example: - -```go -params := FooParams{ - Name: opencode.F("hello"), - - // Explicitly send `"description": null` - Description: opencode.Null[string](), - - Point: opencode.F(opencode.Point{ - X: opencode.Int(0), - Y: opencode.Int(1), - - // In cases where the API specifies a given type, - // but you want to send something else, use `Raw`: - Z: opencode.Raw[int64](0.01), // sends a float - }), -} -``` - -### Response objects - -All fields in response structs are value types (not pointers or wrappers). - -If a given field is `null`, not present, or invalid, the corresponding field -will simply be its zero value. - -All response structs also include a special `JSON` field, containing more detailed -information about each property, which you can use like so: - -```go -if res.Name == "" { - // true if `"name"` is either not present or explicitly null - res.JSON.Name.IsNull() - - // true if the `"name"` key was not present in the response JSON at all - res.JSON.Name.IsMissing() - - // When the API returns data that cannot be coerced to the expected type: - if res.JSON.Name.IsInvalid() { - raw := res.JSON.Name.Raw() - - legacyName := struct{ - First string `json:"first"` - Last string `json:"last"` - }{} - json.Unmarshal([]byte(raw), &legacyName) - name = legacyName.First + " " + legacyName.Last - } -} -``` - -These `.JSON` structs also include an `Extras` map containing -any properties in the json response that were not specified -in the struct. This can be useful for API features not yet -present in the SDK. - -```go -body := res.JSON.ExtraFields["my_unexpected_field"].Raw() -``` - -### RequestOptions - -This library uses the functional options pattern. Functions defined in the -`option` package return a `RequestOption`, which is a closure that mutates a -`RequestConfig`. These options can be supplied to the client or at individual -requests. For example: - -```go -client := opencode.NewClient( - // Adds a header to every request made by the client - option.WithHeader("X-Some-Header", "custom_header_info"), -) - -client.Session.List(context.TODO(), ..., - // Override the header - option.WithHeader("X-Some-Header", "some_other_custom_header_info"), - // Add an undocumented field to the request body, using sjson syntax - option.WithJSONSet("some.json.path", map[string]string{"my": "object"}), -) -``` - -See the [full list of request options](https://pkg.go.dev/github.com/sst/opencode-sdk-go/option). - -### Pagination - -This library provides some conveniences for working with paginated list endpoints. - -You can use `.ListAutoPaging()` methods to iterate through items across all pages: - -Or you can use simple `.List()` methods to fetch a single page and receive a standard response object -with additional helper methods like `.GetNextPage()`, e.g.: - -### Errors - -When the API returns a non-success status code, we return an error with type -`*opencode.Error`. This contains the `StatusCode`, `*http.Request`, and -`*http.Response` values of the request, as well as the JSON of the error body -(much like other response objects in the SDK). - -To handle errors, we recommend that you use the `errors.As` pattern: - -```go -_, err := client.Session.List(context.TODO()) -if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - println(string(apierr.DumpRequest(true))) // Prints the serialized HTTP request - println(string(apierr.DumpResponse(true))) // Prints the serialized HTTP response - } - panic(err.Error()) // GET "/session": 400 Bad Request { ... } -} -``` - -When other errors occur, they are returned unwrapped; for example, -if HTTP transport fails, you might receive `*url.Error` wrapping `*net.OpError`. - -### Timeouts - -Requests do not time out by default; use context to configure a timeout for a request lifecycle. - -Note that if a request is [retried](#retries), the context timeout does not start over. -To set a per-retry timeout, use `option.WithRequestTimeout()`. - -```go -// This sets the timeout for the request, including all the retries. -ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) -defer cancel() -client.Session.List( - ctx, - // This sets the per-retry timeout - option.WithRequestTimeout(20*time.Second), -) -``` - -### File uploads - -Request parameters that correspond to file uploads in multipart requests are typed as -`param.Field[io.Reader]`. The contents of the `io.Reader` will by default be sent as a multipart form -part with the file name of "anonymous_file" and content-type of "application/octet-stream". - -The file name and content-type can be customized by implementing `Name() string` or `ContentType() -string` on the run-time type of `io.Reader`. Note that `os.File` implements `Name() string`, so a -file returned by `os.Open` will be sent with the file name on disk. - -We also provide a helper `opencode.FileParam(reader io.Reader, filename string, contentType string)` -which can be used to wrap any `io.Reader` with the appropriate file name and content type. - -### Retries - -Certain errors will be automatically retried 2 times by default, with a short exponential backoff. -We retry by default all connection errors, 408 Request Timeout, 409 Conflict, 429 Rate Limit, -and >=500 Internal errors. - -You can use the `WithMaxRetries` option to configure or disable this: - -```go -// Configure the default for all requests: -client := opencode.NewClient( - option.WithMaxRetries(0), // default is 2 -) - -// Override per-request: -client.Session.List(context.TODO(), option.WithMaxRetries(5)) -``` - -### Accessing raw response data (e.g. response headers) - -You can access the raw HTTP response data by using the `option.WithResponseInto()` request option. This is useful when -you need to examine response headers, status codes, or other details. - -```go -// Create a variable to store the HTTP response -var response *http.Response -sessions, err := client.Session.List(context.TODO(), option.WithResponseInto(&response)) -if err != nil { - // handle error -} -fmt.Printf("%+v\n", sessions) - -fmt.Printf("Status Code: %d\n", response.StatusCode) -fmt.Printf("Headers: %+#v\n", response.Header) -``` - -### Making custom/undocumented requests - -This library is typed for convenient access to the documented API. If you need to access undocumented -endpoints, params, or response properties, the library can still be used. - -#### Undocumented endpoints - -To make requests to undocumented endpoints, you can use `client.Get`, `client.Post`, and other HTTP verbs. -`RequestOptions` on the client, such as retries, will be respected when making these requests. - -```go -var ( - // params can be an io.Reader, a []byte, an encoding/json serializable object, - // or a "…Params" struct defined in this library. - params map[string]interface{} - - // result can be an []byte, *http.Response, a encoding/json deserializable object, - // or a model defined in this library. - result *http.Response -) -err := client.Post(context.Background(), "/unspecified", params, &result) -if err != nil { - … -} -``` - -#### Undocumented request params - -To make requests using undocumented parameters, you may use either the `option.WithQuerySet()` -or the `option.WithJSONSet()` methods. - -```go -params := FooNewParams{ - ID: opencode.F("id_xxxx"), - Data: opencode.F(FooNewParamsData{ - FirstName: opencode.F("John"), - }), -} -client.Foo.New(context.Background(), params, option.WithJSONSet("data.last_name", "Doe")) -``` - -#### Undocumented response properties - -To access undocumented response properties, you may either access the raw JSON of the response as a string -with `result.JSON.RawJSON()`, or get the raw JSON of a particular field on the result with -`result.JSON.Foo.Raw()`. - -Any fields that are not present on the response struct will be saved and can be accessed by `result.JSON.ExtraFields()` which returns the extra fields as a `map[string]Field`. - -### Middleware - -We provide `option.WithMiddleware` which applies the given -middleware to requests. - -```go -func Logger(req *http.Request, next option.MiddlewareNext) (res *http.Response, err error) { - // Before the request - start := time.Now() - LogReq(req) - - // Forward the request to the next handler - res, err = next(req) - - // Handle stuff after the request - end := time.Now() - LogRes(res, err, start - end) - - return res, err -} - -client := opencode.NewClient( - option.WithMiddleware(Logger), -) -``` - -When multiple middlewares are provided as variadic arguments, the middlewares -are applied left to right. If `option.WithMiddleware` is given -multiple times, for example first in the client then the method, the -middleware in the client will run first and the middleware given in the method -will run next. - -You may also replace the default `http.Client` with -`option.WithHTTPClient(client)`. Only one http client is -accepted (this overwrites any previous client) and receives requests after any -middleware has been applied. - -## Semantic versioning - -This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: - -1. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals.)_ -2. Changes that we do not expect to impact the vast majority of users in practice. - -We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. - -We are keen for your feedback; please open an [issue](https://www.github.com/sst/opencode-sdk-go/issues) with questions, bugs, or suggestions. - -## Contributing - -See [the contributing documentation](./CONTRIBUTING.md). diff --git a/packages/tui/sdk/SECURITY.md b/packages/tui/sdk/SECURITY.md deleted file mode 100644 index 6912e12b..00000000 --- a/packages/tui/sdk/SECURITY.md +++ /dev/null @@ -1,27 +0,0 @@ -# Security Policy - -## Reporting Security Issues - -This SDK is generated by [Stainless Software Inc](http://stainless.com). Stainless takes security seriously, and encourages you to report any security vulnerability promptly so that appropriate action can be taken. - -To report a security issue, please contact the Stainless team at security@stainless.com. - -## Responsible Disclosure - -We appreciate the efforts of security researchers and individuals who help us maintain the security of -SDKs we generate. If you believe you have found a security vulnerability, please adhere to responsible -disclosure practices by allowing us a reasonable amount of time to investigate and address the issue -before making any information public. - -## Reporting Non-SDK Related Security Issues - -If you encounter security issues that are not directly related to SDKs but pertain to the services -or products provided by Opencode, please follow the respective company's security reporting guidelines. - -### Opencode Terms and Policies - -Please contact support@sst.dev for any questions or concerns regarding the security of our services. - ---- - -Thank you for helping us keep the SDKs and systems they interact with secure. diff --git a/packages/tui/sdk/aliases.go b/packages/tui/sdk/aliases.go deleted file mode 100644 index 6ab36d04..00000000 --- a/packages/tui/sdk/aliases.go +++ /dev/null @@ -1,43 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode - -import ( - "github.com/sst/opencode-sdk-go/internal/apierror" - "github.com/sst/opencode-sdk-go/shared" -) - -type Error = apierror.Error - -// This is an alias to an internal type. -type MessageAbortedError = shared.MessageAbortedError - -// This is an alias to an internal type. -type MessageAbortedErrorName = shared.MessageAbortedErrorName - -// This is an alias to an internal value. -const MessageAbortedErrorNameMessageAbortedError = shared.MessageAbortedErrorNameMessageAbortedError - -// This is an alias to an internal type. -type ProviderAuthError = shared.ProviderAuthError - -// This is an alias to an internal type. -type ProviderAuthErrorData = shared.ProviderAuthErrorData - -// This is an alias to an internal type. -type ProviderAuthErrorName = shared.ProviderAuthErrorName - -// This is an alias to an internal value. -const ProviderAuthErrorNameProviderAuthError = shared.ProviderAuthErrorNameProviderAuthError - -// This is an alias to an internal type. -type UnknownError = shared.UnknownError - -// This is an alias to an internal type. -type UnknownErrorData = shared.UnknownErrorData - -// This is an alias to an internal type. -type UnknownErrorName = shared.UnknownErrorName - -// This is an alias to an internal value. -const UnknownErrorNameUnknownError = shared.UnknownErrorNameUnknownError diff --git a/packages/tui/sdk/api.md b/packages/tui/sdk/api.md deleted file mode 100644 index 15b177e9..00000000 --- a/packages/tui/sdk/api.md +++ /dev/null @@ -1,112 +0,0 @@ -# Shared Response Types - -- shared.MessageAbortedError -- shared.ProviderAuthError -- shared.UnknownError - -# Event - -Response Types: - -- opencode.EventListResponse - -Methods: - -- client.Event.List(ctx context.Context) (opencode.EventListResponse, error) - -# App - -Response Types: - -- opencode.App -- opencode.LogLevel -- opencode.Mode -- opencode.Model -- opencode.Provider -- opencode.AppProvidersResponse - -Methods: - -- client.App.Get(ctx context.Context) (opencode.App, error) -- client.App.Init(ctx context.Context) (bool, error) -- client.App.Log(ctx context.Context, body opencode.AppLogParams) (bool, error) -- client.App.Modes(ctx context.Context) ([]opencode.Mode, error) -- client.App.Providers(ctx context.Context) (opencode.AppProvidersResponse, error) - -# Find - -Response Types: - -- opencode.Match -- opencode.Symbol - -Methods: - -- client.Find.Files(ctx context.Context, query opencode.FindFilesParams) ([]string, error) -- client.Find.Symbols(ctx context.Context, query opencode.FindSymbolsParams) ([]opencode.Symbol, error) -- client.Find.Text(ctx context.Context, query opencode.FindTextParams) ([]opencode.Match, error) - -# File - -Response Types: - -- opencode.File -- opencode.FileReadResponse - -Methods: - -- client.File.Read(ctx context.Context, query opencode.FileReadParams) (opencode.FileReadResponse, error) -- client.File.Status(ctx context.Context) ([]opencode.File, error) - -# Config - -Response Types: - -- opencode.Config -- opencode.KeybindsConfig -- opencode.McpLocalConfig -- opencode.McpRemoteConfig -- opencode.ModeConfig - -Methods: - -- client.Config.Get(ctx context.Context) (opencode.Config, error) - -# Session - -Params Types: - -- opencode.FilePartInputParam -- opencode.TextPartInputParam - -Response Types: - -- opencode.AssistantMessage -- opencode.FilePart -- opencode.Message -- opencode.Part -- opencode.Session -- opencode.SnapshotPart -- opencode.StepFinishPart -- opencode.StepStartPart -- opencode.TextPart -- opencode.ToolPart -- opencode.ToolStateCompleted -- opencode.ToolStateError -- opencode.ToolStatePending -- opencode.ToolStateRunning -- opencode.UserMessage -- opencode.SessionMessagesResponse - -Methods: - -- client.Session.New(ctx context.Context) (opencode.Session, error) -- client.Session.List(ctx context.Context) ([]opencode.Session, error) -- client.Session.Delete(ctx context.Context, id string) (bool, error) -- client.Session.Abort(ctx context.Context, id string) (bool, error) -- client.Session.Chat(ctx context.Context, id string, body opencode.SessionChatParams) (opencode.AssistantMessage, error) -- client.Session.Init(ctx context.Context, id string, body opencode.SessionInitParams) (bool, error) -- client.Session.Messages(ctx context.Context, id string) ([]opencode.SessionMessagesResponse, error) -- client.Session.Share(ctx context.Context, id string) (opencode.Session, error) -- client.Session.Summarize(ctx context.Context, id string, body opencode.SessionSummarizeParams) (bool, error) -- client.Session.Unshare(ctx context.Context, id string) (opencode.Session, error) diff --git a/packages/tui/sdk/app.go b/packages/tui/sdk/app.go deleted file mode 100644 index aa47e83b..00000000 --- a/packages/tui/sdk/app.go +++ /dev/null @@ -1,384 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode - -import ( - "context" - "net/http" - - "github.com/sst/opencode-sdk-go/internal/apijson" - "github.com/sst/opencode-sdk-go/internal/param" - "github.com/sst/opencode-sdk-go/internal/requestconfig" - "github.com/sst/opencode-sdk-go/option" -) - -// AppService contains methods and other services that help with interacting with -// the opencode API. -// -// Note, unlike clients, this service does not read variables from the environment -// automatically. You should not instantiate this service directly, and instead use -// the [NewAppService] method instead. -type AppService struct { - Options []option.RequestOption -} - -// NewAppService generates a new service that applies the given options to each -// request. These options are applied after the parent client's options (if there -// is one), and before any request-specific options. -func NewAppService(opts ...option.RequestOption) (r *AppService) { - r = &AppService{} - r.Options = opts - return -} - -// Get app info -func (r *AppService) Get(ctx context.Context, opts ...option.RequestOption) (res *App, err error) { - opts = append(r.Options[:], opts...) - path := "app" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...) - return -} - -// Initialize the app -func (r *AppService) Init(ctx context.Context, opts ...option.RequestOption) (res *bool, err error) { - opts = append(r.Options[:], opts...) - path := "app/init" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...) - return -} - -// Write a log entry to the server logs -func (r *AppService) Log(ctx context.Context, body AppLogParams, opts ...option.RequestOption) (res *bool, err error) { - opts = append(r.Options[:], opts...) - path := "log" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...) - return -} - -// List all modes -func (r *AppService) Modes(ctx context.Context, opts ...option.RequestOption) (res *[]Mode, err error) { - opts = append(r.Options[:], opts...) - path := "mode" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...) - return -} - -// List all providers -func (r *AppService) Providers(ctx context.Context, opts ...option.RequestOption) (res *AppProvidersResponse, err error) { - opts = append(r.Options[:], opts...) - path := "config/providers" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...) - return -} - -type App struct { - Git bool `json:"git,required"` - Hostname string `json:"hostname,required"` - Path AppPath `json:"path,required"` - Time AppTime `json:"time,required"` - JSON appJSON `json:"-"` -} - -// appJSON contains the JSON metadata for the struct [App] -type appJSON struct { - Git apijson.Field - Hostname apijson.Field - Path apijson.Field - Time apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *App) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r appJSON) RawJSON() string { - return r.raw -} - -type AppPath struct { - Config string `json:"config,required"` - Cwd string `json:"cwd,required"` - Data string `json:"data,required"` - Root string `json:"root,required"` - State string `json:"state,required"` - JSON appPathJSON `json:"-"` -} - -// appPathJSON contains the JSON metadata for the struct [AppPath] -type appPathJSON struct { - Config apijson.Field - Cwd apijson.Field - Data apijson.Field - Root apijson.Field - State apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *AppPath) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r appPathJSON) RawJSON() string { - return r.raw -} - -type AppTime struct { - Initialized float64 `json:"initialized"` - JSON appTimeJSON `json:"-"` -} - -// appTimeJSON contains the JSON metadata for the struct [AppTime] -type appTimeJSON struct { - Initialized apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *AppTime) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r appTimeJSON) RawJSON() string { - return r.raw -} - -// Log level -type LogLevel string - -const ( - LogLevelDebug LogLevel = "DEBUG" - LogLevelInfo LogLevel = "INFO" - LogLevelWarn LogLevel = "WARN" - LogLevelError LogLevel = "ERROR" -) - -func (r LogLevel) IsKnown() bool { - switch r { - case LogLevelDebug, LogLevelInfo, LogLevelWarn, LogLevelError: - return true - } - return false -} - -type Mode struct { - Name string `json:"name,required"` - Tools map[string]bool `json:"tools,required"` - Model ModeModel `json:"model"` - Prompt string `json:"prompt"` - JSON modeJSON `json:"-"` -} - -// modeJSON contains the JSON metadata for the struct [Mode] -type modeJSON struct { - Name apijson.Field - Tools apijson.Field - Model apijson.Field - Prompt apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *Mode) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r modeJSON) RawJSON() string { - return r.raw -} - -type ModeModel struct { - ModelID string `json:"modelID,required"` - ProviderID string `json:"providerID,required"` - JSON modeModelJSON `json:"-"` -} - -// modeModelJSON contains the JSON metadata for the struct [ModeModel] -type modeModelJSON struct { - ModelID apijson.Field - ProviderID apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ModeModel) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r modeModelJSON) RawJSON() string { - return r.raw -} - -type Model struct { - ID string `json:"id,required"` - Attachment bool `json:"attachment,required"` - Cost ModelCost `json:"cost,required"` - Limit ModelLimit `json:"limit,required"` - Name string `json:"name,required"` - Options map[string]interface{} `json:"options,required"` - Reasoning bool `json:"reasoning,required"` - ReleaseDate string `json:"release_date,required"` - Temperature bool `json:"temperature,required"` - ToolCall bool `json:"tool_call,required"` - JSON modelJSON `json:"-"` -} - -// modelJSON contains the JSON metadata for the struct [Model] -type modelJSON struct { - ID apijson.Field - Attachment apijson.Field - Cost apijson.Field - Limit apijson.Field - Name apijson.Field - Options apijson.Field - Reasoning apijson.Field - ReleaseDate apijson.Field - Temperature apijson.Field - ToolCall apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *Model) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r modelJSON) RawJSON() string { - return r.raw -} - -type ModelCost struct { - Input float64 `json:"input,required"` - Output float64 `json:"output,required"` - CacheRead float64 `json:"cache_read"` - CacheWrite float64 `json:"cache_write"` - JSON modelCostJSON `json:"-"` -} - -// modelCostJSON contains the JSON metadata for the struct [ModelCost] -type modelCostJSON struct { - Input apijson.Field - Output apijson.Field - CacheRead apijson.Field - CacheWrite apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ModelCost) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r modelCostJSON) RawJSON() string { - return r.raw -} - -type ModelLimit struct { - Context float64 `json:"context,required"` - Output float64 `json:"output,required"` - JSON modelLimitJSON `json:"-"` -} - -// modelLimitJSON contains the JSON metadata for the struct [ModelLimit] -type modelLimitJSON struct { - Context apijson.Field - Output apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ModelLimit) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r modelLimitJSON) RawJSON() string { - return r.raw -} - -type Provider struct { - ID string `json:"id,required"` - Env []string `json:"env,required"` - Models map[string]Model `json:"models,required"` - Name string `json:"name,required"` - API string `json:"api"` - Npm string `json:"npm"` - JSON providerJSON `json:"-"` -} - -// providerJSON contains the JSON metadata for the struct [Provider] -type providerJSON struct { - ID apijson.Field - Env apijson.Field - Models apijson.Field - Name apijson.Field - API apijson.Field - Npm apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *Provider) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r providerJSON) RawJSON() string { - return r.raw -} - -type AppProvidersResponse struct { - Default map[string]string `json:"default,required"` - Providers []Provider `json:"providers,required"` - JSON appProvidersResponseJSON `json:"-"` -} - -// appProvidersResponseJSON contains the JSON metadata for the struct -// [AppProvidersResponse] -type appProvidersResponseJSON struct { - Default apijson.Field - Providers apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *AppProvidersResponse) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r appProvidersResponseJSON) RawJSON() string { - return r.raw -} - -type AppLogParams struct { - // Log level - Level param.Field[AppLogParamsLevel] `json:"level,required"` - // Log message - Message param.Field[string] `json:"message,required"` - // Service name for the log entry - Service param.Field[string] `json:"service,required"` - // Additional metadata for the log entry - Extra param.Field[map[string]interface{}] `json:"extra"` -} - -func (r AppLogParams) MarshalJSON() (data []byte, err error) { - return apijson.MarshalRoot(r) -} - -// Log level -type AppLogParamsLevel string - -const ( - AppLogParamsLevelDebug AppLogParamsLevel = "debug" - AppLogParamsLevelInfo AppLogParamsLevel = "info" - AppLogParamsLevelError AppLogParamsLevel = "error" - AppLogParamsLevelWarn AppLogParamsLevel = "warn" -) - -func (r AppLogParamsLevel) IsKnown() bool { - switch r { - case AppLogParamsLevelDebug, AppLogParamsLevelInfo, AppLogParamsLevelError, AppLogParamsLevelWarn: - return true - } - return false -} diff --git a/packages/tui/sdk/app_test.go b/packages/tui/sdk/app_test.go deleted file mode 100644 index 16bb8ff8..00000000 --- a/packages/tui/sdk/app_test.go +++ /dev/null @@ -1,131 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode_test - -import ( - "context" - "errors" - "os" - "testing" - - "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode-sdk-go/internal/testutil" - "github.com/sst/opencode-sdk-go/option" -) - -func TestAppGet(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.App.Get(context.TODO()) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestAppInit(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.App.Init(context.TODO()) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestAppLogWithOptionalParams(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.App.Log(context.TODO(), opencode.AppLogParams{ - Level: opencode.F(opencode.AppLogParamsLevelDebug), - Message: opencode.F("message"), - Service: opencode.F("service"), - Extra: opencode.F(map[string]interface{}{ - "foo": "bar", - }), - }) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestAppModes(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.App.Modes(context.TODO()) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestAppProviders(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.App.Providers(context.TODO()) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} diff --git a/packages/tui/sdk/client.go b/packages/tui/sdk/client.go deleted file mode 100644 index 955eb7d6..00000000 --- a/packages/tui/sdk/client.go +++ /dev/null @@ -1,123 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode - -import ( - "context" - "net/http" - "os" - - "github.com/sst/opencode-sdk-go/internal/requestconfig" - "github.com/sst/opencode-sdk-go/option" -) - -// Client creates a struct with services and top level methods that help with -// interacting with the opencode API. You should not instantiate this client -// directly, and instead use the [NewClient] method instead. -type Client struct { - Options []option.RequestOption - Event *EventService - App *AppService - Find *FindService - File *FileService - Config *ConfigService - Session *SessionService -} - -// DefaultClientOptions read from the environment (OPENCODE_BASE_URL). This should -// be used to initialize new clients. -func DefaultClientOptions() []option.RequestOption { - defaults := []option.RequestOption{option.WithEnvironmentProduction()} - if o, ok := os.LookupEnv("OPENCODE_BASE_URL"); ok { - defaults = append(defaults, option.WithBaseURL(o)) - } - return defaults -} - -// NewClient generates a new client with the default option read from the -// environment (OPENCODE_BASE_URL). The option passed in as arguments are applied -// after these default arguments, and all option will be passed down to the -// services and requests that this client makes. -func NewClient(opts ...option.RequestOption) (r *Client) { - opts = append(DefaultClientOptions(), opts...) - - r = &Client{Options: opts} - - r.Event = NewEventService(opts...) - r.App = NewAppService(opts...) - r.Find = NewFindService(opts...) - r.File = NewFileService(opts...) - r.Config = NewConfigService(opts...) - r.Session = NewSessionService(opts...) - - return -} - -// Execute makes a request with the given context, method, URL, request params, -// response, and request options. This is useful for hitting undocumented endpoints -// while retaining the base URL, auth, retries, and other options from the client. -// -// If a byte slice or an [io.Reader] is supplied to params, it will be used as-is -// for the request body. -// -// The params is by default serialized into the body using [encoding/json]. If your -// type implements a MarshalJSON function, it will be used instead to serialize the -// request. If a URLQuery method is implemented, the returned [url.Values] will be -// used as query strings to the url. -// -// If your params struct uses [param.Field], you must provide either [MarshalJSON], -// [URLQuery], and/or [MarshalForm] functions. It is undefined behavior to use a -// struct uses [param.Field] without specifying how it is serialized. -// -// Any "…Params" object defined in this library can be used as the request -// argument. Note that 'path' arguments will not be forwarded into the url. -// -// The response body will be deserialized into the res variable, depending on its -// type: -// -// - A pointer to a [*http.Response] is populated by the raw response. -// - A pointer to a byte array will be populated with the contents of the request -// body. -// - A pointer to any other type uses this library's default JSON decoding, which -// respects UnmarshalJSON if it is defined on the type. -// - A nil value will not read the response body. -// -// For even greater flexibility, see [option.WithResponseInto] and -// [option.WithResponseBodyInto]. -func (r *Client) Execute(ctx context.Context, method string, path string, params interface{}, res interface{}, opts ...option.RequestOption) error { - opts = append(r.Options, opts...) - return requestconfig.ExecuteNewRequest(ctx, method, path, params, res, opts...) -} - -// Get makes a GET request with the given URL, params, and optionally deserializes -// to a response. See [Execute] documentation on the params and response. -func (r *Client) Get(ctx context.Context, path string, params interface{}, res interface{}, opts ...option.RequestOption) error { - return r.Execute(ctx, http.MethodGet, path, params, res, opts...) -} - -// Post makes a POST request with the given URL, params, and optionally -// deserializes to a response. See [Execute] documentation on the params and -// response. -func (r *Client) Post(ctx context.Context, path string, params interface{}, res interface{}, opts ...option.RequestOption) error { - return r.Execute(ctx, http.MethodPost, path, params, res, opts...) -} - -// Put makes a PUT request with the given URL, params, and optionally deserializes -// to a response. See [Execute] documentation on the params and response. -func (r *Client) Put(ctx context.Context, path string, params interface{}, res interface{}, opts ...option.RequestOption) error { - return r.Execute(ctx, http.MethodPut, path, params, res, opts...) -} - -// Patch makes a PATCH request with the given URL, params, and optionally -// deserializes to a response. See [Execute] documentation on the params and -// response. -func (r *Client) Patch(ctx context.Context, path string, params interface{}, res interface{}, opts ...option.RequestOption) error { - return r.Execute(ctx, http.MethodPatch, path, params, res, opts...) -} - -// Delete makes a DELETE request with the given URL, params, and optionally -// deserializes to a response. See [Execute] documentation on the params and -// response. -func (r *Client) Delete(ctx context.Context, path string, params interface{}, res interface{}, opts ...option.RequestOption) error { - return r.Execute(ctx, http.MethodDelete, path, params, res, opts...) -} diff --git a/packages/tui/sdk/client_test.go b/packages/tui/sdk/client_test.go deleted file mode 100644 index 0f5b8205..00000000 --- a/packages/tui/sdk/client_test.go +++ /dev/null @@ -1,332 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode_test - -import ( - "context" - "fmt" - "io" - "net/http" - "reflect" - "testing" - "time" - - "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode-sdk-go/internal" - "github.com/sst/opencode-sdk-go/option" -) - -type closureTransport struct { - fn func(req *http.Request) (*http.Response, error) -} - -func (t *closureTransport) RoundTrip(req *http.Request) (*http.Response, error) { - return t.fn(req) -} - -func TestUserAgentHeader(t *testing.T) { - var userAgent string - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - userAgent = req.Header.Get("User-Agent") - return &http.Response{ - StatusCode: http.StatusOK, - }, nil - }, - }, - }), - ) - client.Session.List(context.Background()) - if userAgent != fmt.Sprintf("Opencode/Go %s", internal.PackageVersion) { - t.Errorf("Expected User-Agent to be correct, but got: %#v", userAgent) - } -} - -func TestRetryAfter(t *testing.T) { - retryCountHeaders := make([]string, 0) - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - retryCountHeaders = append(retryCountHeaders, req.Header.Get("X-Stainless-Retry-Count")) - return &http.Response{ - StatusCode: http.StatusTooManyRequests, - Header: http.Header{ - http.CanonicalHeaderKey("Retry-After"): []string{"0.1"}, - }, - }, nil - }, - }, - }), - ) - _, err := client.Session.List(context.Background()) - if err == nil { - t.Error("Expected there to be a cancel error") - } - - attempts := len(retryCountHeaders) - if attempts != 3 { - t.Errorf("Expected %d attempts, got %d", 3, attempts) - } - - expectedRetryCountHeaders := []string{"0", "1", "2"} - if !reflect.DeepEqual(retryCountHeaders, expectedRetryCountHeaders) { - t.Errorf("Expected %v retry count headers, got %v", expectedRetryCountHeaders, retryCountHeaders) - } -} - -func TestDeleteRetryCountHeader(t *testing.T) { - retryCountHeaders := make([]string, 0) - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - retryCountHeaders = append(retryCountHeaders, req.Header.Get("X-Stainless-Retry-Count")) - return &http.Response{ - StatusCode: http.StatusTooManyRequests, - Header: http.Header{ - http.CanonicalHeaderKey("Retry-After"): []string{"0.1"}, - }, - }, nil - }, - }, - }), - option.WithHeaderDel("X-Stainless-Retry-Count"), - ) - _, err := client.Session.List(context.Background()) - if err == nil { - t.Error("Expected there to be a cancel error") - } - - expectedRetryCountHeaders := []string{"", "", ""} - if !reflect.DeepEqual(retryCountHeaders, expectedRetryCountHeaders) { - t.Errorf("Expected %v retry count headers, got %v", expectedRetryCountHeaders, retryCountHeaders) - } -} - -func TestOverwriteRetryCountHeader(t *testing.T) { - retryCountHeaders := make([]string, 0) - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - retryCountHeaders = append(retryCountHeaders, req.Header.Get("X-Stainless-Retry-Count")) - return &http.Response{ - StatusCode: http.StatusTooManyRequests, - Header: http.Header{ - http.CanonicalHeaderKey("Retry-After"): []string{"0.1"}, - }, - }, nil - }, - }, - }), - option.WithHeader("X-Stainless-Retry-Count", "42"), - ) - _, err := client.Session.List(context.Background()) - if err == nil { - t.Error("Expected there to be a cancel error") - } - - expectedRetryCountHeaders := []string{"42", "42", "42"} - if !reflect.DeepEqual(retryCountHeaders, expectedRetryCountHeaders) { - t.Errorf("Expected %v retry count headers, got %v", expectedRetryCountHeaders, retryCountHeaders) - } -} - -func TestRetryAfterMs(t *testing.T) { - attempts := 0 - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - attempts++ - return &http.Response{ - StatusCode: http.StatusTooManyRequests, - Header: http.Header{ - http.CanonicalHeaderKey("Retry-After-Ms"): []string{"100"}, - }, - }, nil - }, - }, - }), - ) - _, err := client.Session.List(context.Background()) - if err == nil { - t.Error("Expected there to be a cancel error") - } - if want := 3; attempts != want { - t.Errorf("Expected %d attempts, got %d", want, attempts) - } -} - -func TestContextCancel(t *testing.T) { - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - <-req.Context().Done() - return nil, req.Context().Err() - }, - }, - }), - ) - cancelCtx, cancel := context.WithCancel(context.Background()) - cancel() - _, err := client.Session.List(cancelCtx) - if err == nil { - t.Error("Expected there to be a cancel error") - } -} - -func TestContextCancelDelay(t *testing.T) { - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - <-req.Context().Done() - return nil, req.Context().Err() - }, - }, - }), - ) - cancelCtx, cancel := context.WithTimeout(context.Background(), 2*time.Millisecond) - defer cancel() - _, err := client.Session.List(cancelCtx) - if err == nil { - t.Error("expected there to be a cancel error") - } -} - -func TestContextDeadline(t *testing.T) { - testTimeout := time.After(3 * time.Second) - testDone := make(chan struct{}) - - deadline := time.Now().Add(100 * time.Millisecond) - deadlineCtx, cancel := context.WithDeadline(context.Background(), deadline) - defer cancel() - - go func() { - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - <-req.Context().Done() - return nil, req.Context().Err() - }, - }, - }), - ) - _, err := client.Session.List(deadlineCtx) - if err == nil { - t.Error("expected there to be a deadline error") - } - close(testDone) - }() - - select { - case <-testTimeout: - t.Fatal("client didn't finish in time") - case <-testDone: - if diff := time.Since(deadline); diff < -30*time.Millisecond || 30*time.Millisecond < diff { - t.Fatalf("client did not return within 30ms of context deadline, got %s", diff) - } - } -} - -func TestContextDeadlineStreaming(t *testing.T) { - testTimeout := time.After(3 * time.Second) - testDone := make(chan struct{}) - - deadline := time.Now().Add(100 * time.Millisecond) - deadlineCtx, cancel := context.WithDeadline(context.Background(), deadline) - defer cancel() - - go func() { - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - return &http.Response{ - StatusCode: 200, - Status: "200 OK", - Body: io.NopCloser( - io.Reader(readerFunc(func([]byte) (int, error) { - <-req.Context().Done() - return 0, req.Context().Err() - })), - ), - }, nil - }, - }, - }), - ) - stream := client.Event.ListStreaming(deadlineCtx) - for stream.Next() { - _ = stream.Current() - } - if stream.Err() == nil { - t.Error("expected there to be a deadline error") - } - close(testDone) - }() - - select { - case <-testTimeout: - t.Fatal("client didn't finish in time") - case <-testDone: - if diff := time.Since(deadline); diff < -30*time.Millisecond || 30*time.Millisecond < diff { - t.Fatalf("client did not return within 30ms of context deadline, got %s", diff) - } - } -} - -func TestContextDeadlineStreamingWithRequestTimeout(t *testing.T) { - testTimeout := time.After(3 * time.Second) - testDone := make(chan struct{}) - deadline := time.Now().Add(100 * time.Millisecond) - - go func() { - client := opencode.NewClient( - option.WithHTTPClient(&http.Client{ - Transport: &closureTransport{ - fn: func(req *http.Request) (*http.Response, error) { - return &http.Response{ - StatusCode: 200, - Status: "200 OK", - Body: io.NopCloser( - io.Reader(readerFunc(func([]byte) (int, error) { - <-req.Context().Done() - return 0, req.Context().Err() - })), - ), - }, nil - }, - }, - }), - ) - stream := client.Event.ListStreaming(context.Background(), option.WithRequestTimeout((100 * time.Millisecond))) - for stream.Next() { - _ = stream.Current() - } - if stream.Err() == nil { - t.Error("expected there to be a deadline error") - } - close(testDone) - }() - - select { - case <-testTimeout: - t.Fatal("client didn't finish in time") - case <-testDone: - if diff := time.Since(deadline); diff < -30*time.Millisecond || 30*time.Millisecond < diff { - t.Fatalf("client did not return within 30ms of context deadline, got %s", diff) - } - } -} - -type readerFunc func([]byte) (int, error) - -func (f readerFunc) Read(p []byte) (int, error) { return f(p) } -func (f readerFunc) Close() error { return nil } diff --git a/packages/tui/sdk/config.go b/packages/tui/sdk/config.go deleted file mode 100644 index 34788dac..00000000 --- a/packages/tui/sdk/config.go +++ /dev/null @@ -1,709 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode - -import ( - "context" - "net/http" - "reflect" - - "github.com/sst/opencode-sdk-go/internal/apijson" - "github.com/sst/opencode-sdk-go/internal/requestconfig" - "github.com/sst/opencode-sdk-go/option" - "github.com/tidwall/gjson" -) - -// ConfigService contains methods and other services that help with interacting -// with the opencode API. -// -// Note, unlike clients, this service does not read variables from the environment -// automatically. You should not instantiate this service directly, and instead use -// the [NewConfigService] method instead. -type ConfigService struct { - Options []option.RequestOption -} - -// NewConfigService generates a new service that applies the given options to each -// request. These options are applied after the parent client's options (if there -// is one), and before any request-specific options. -func NewConfigService(opts ...option.RequestOption) (r *ConfigService) { - r = &ConfigService{} - r.Options = opts - return -} - -// Get config info -func (r *ConfigService) Get(ctx context.Context, opts ...option.RequestOption) (res *Config, err error) { - opts = append(r.Options[:], opts...) - path := "config" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...) - return -} - -type Config struct { - // JSON schema reference for configuration validation - Schema string `json:"$schema"` - // @deprecated Use 'share' field instead. Share newly created sessions - // automatically - Autoshare bool `json:"autoshare"` - // Automatically update to the latest version - Autoupdate bool `json:"autoupdate"` - // Disable providers that are loaded automatically - DisabledProviders []string `json:"disabled_providers"` - Experimental ConfigExperimental `json:"experimental"` - // Additional instruction files or patterns to include - Instructions []string `json:"instructions"` - // Custom keybind configurations - Keybinds KeybindsConfig `json:"keybinds"` - // @deprecated Always uses stretch layout. - Layout ConfigLayout `json:"layout"` - // Minimum log level to write to log files - LogLevel LogLevel `json:"log_level"` - // MCP (Model Context Protocol) server configurations - Mcp map[string]ConfigMcp `json:"mcp"` - // Modes configuration, see https://opencode.ai/docs/modes - Mode ConfigMode `json:"mode"` - // Model to use in the format of provider/model, eg anthropic/claude-2 - Model string `json:"model"` - // Custom provider configurations and model overrides - Provider map[string]ConfigProvider `json:"provider"` - // Control sharing behavior:'manual' allows manual sharing via commands, 'auto' - // enables automatic sharing, 'disabled' disables all sharing - Share ConfigShare `json:"share"` - // Theme name to use for the interface - Theme string `json:"theme"` - // Custom username to display in conversations instead of system username - Username string `json:"username"` - JSON configJSON `json:"-"` -} - -// configJSON contains the JSON metadata for the struct [Config] -type configJSON struct { - Schema apijson.Field - Autoshare apijson.Field - Autoupdate apijson.Field - DisabledProviders apijson.Field - Experimental apijson.Field - Instructions apijson.Field - Keybinds apijson.Field - Layout apijson.Field - LogLevel apijson.Field - Mcp apijson.Field - Mode apijson.Field - Model apijson.Field - Provider apijson.Field - Share apijson.Field - Theme apijson.Field - Username apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *Config) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configJSON) RawJSON() string { - return r.raw -} - -type ConfigExperimental struct { - Hook ConfigExperimentalHook `json:"hook"` - JSON configExperimentalJSON `json:"-"` -} - -// configExperimentalJSON contains the JSON metadata for the struct -// [ConfigExperimental] -type configExperimentalJSON struct { - Hook apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ConfigExperimental) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configExperimentalJSON) RawJSON() string { - return r.raw -} - -type ConfigExperimentalHook struct { - FileEdited map[string][]ConfigExperimentalHookFileEdited `json:"file_edited"` - SessionCompleted []ConfigExperimentalHookSessionCompleted `json:"session_completed"` - JSON configExperimentalHookJSON `json:"-"` -} - -// configExperimentalHookJSON contains the JSON metadata for the struct -// [ConfigExperimentalHook] -type configExperimentalHookJSON struct { - FileEdited apijson.Field - SessionCompleted apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ConfigExperimentalHook) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configExperimentalHookJSON) RawJSON() string { - return r.raw -} - -type ConfigExperimentalHookFileEdited struct { - Command []string `json:"command,required"` - Environment map[string]string `json:"environment"` - JSON configExperimentalHookFileEditedJSON `json:"-"` -} - -// configExperimentalHookFileEditedJSON contains the JSON metadata for the struct -// [ConfigExperimentalHookFileEdited] -type configExperimentalHookFileEditedJSON struct { - Command apijson.Field - Environment apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ConfigExperimentalHookFileEdited) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configExperimentalHookFileEditedJSON) RawJSON() string { - return r.raw -} - -type ConfigExperimentalHookSessionCompleted struct { - Command []string `json:"command,required"` - Environment map[string]string `json:"environment"` - JSON configExperimentalHookSessionCompletedJSON `json:"-"` -} - -// configExperimentalHookSessionCompletedJSON contains the JSON metadata for the -// struct [ConfigExperimentalHookSessionCompleted] -type configExperimentalHookSessionCompletedJSON struct { - Command apijson.Field - Environment apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ConfigExperimentalHookSessionCompleted) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configExperimentalHookSessionCompletedJSON) RawJSON() string { - return r.raw -} - -// @deprecated Always uses stretch layout. -type ConfigLayout string - -const ( - ConfigLayoutAuto ConfigLayout = "auto" - ConfigLayoutStretch ConfigLayout = "stretch" -) - -func (r ConfigLayout) IsKnown() bool { - switch r { - case ConfigLayoutAuto, ConfigLayoutStretch: - return true - } - return false -} - -type ConfigMcp struct { - // Type of MCP server connection - Type ConfigMcpType `json:"type,required"` - // This field can have the runtime type of [[]string]. - Command interface{} `json:"command"` - // Enable or disable the MCP server on startup - Enabled bool `json:"enabled"` - // This field can have the runtime type of [map[string]string]. - Environment interface{} `json:"environment"` - // This field can have the runtime type of [map[string]string]. - Headers interface{} `json:"headers"` - // URL of the remote MCP server - URL string `json:"url"` - JSON configMcpJSON `json:"-"` - union ConfigMcpUnion -} - -// configMcpJSON contains the JSON metadata for the struct [ConfigMcp] -type configMcpJSON struct { - Type apijson.Field - Command apijson.Field - Enabled apijson.Field - Environment apijson.Field - Headers apijson.Field - URL apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r configMcpJSON) RawJSON() string { - return r.raw -} - -func (r *ConfigMcp) UnmarshalJSON(data []byte) (err error) { - *r = ConfigMcp{} - err = apijson.UnmarshalRoot(data, &r.union) - if err != nil { - return err - } - return apijson.Port(r.union, &r) -} - -// AsUnion returns a [ConfigMcpUnion] interface which you can cast to the specific -// types for more type safety. -// -// Possible runtime types of the union are [McpLocalConfig], [McpRemoteConfig]. -func (r ConfigMcp) AsUnion() ConfigMcpUnion { - return r.union -} - -// Union satisfied by [McpLocalConfig] or [McpRemoteConfig]. -type ConfigMcpUnion interface { - implementsConfigMcp() -} - -func init() { - apijson.RegisterUnion( - reflect.TypeOf((*ConfigMcpUnion)(nil)).Elem(), - "type", - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(McpLocalConfig{}), - DiscriminatorValue: "local", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(McpRemoteConfig{}), - DiscriminatorValue: "remote", - }, - ) -} - -// Type of MCP server connection -type ConfigMcpType string - -const ( - ConfigMcpTypeLocal ConfigMcpType = "local" - ConfigMcpTypeRemote ConfigMcpType = "remote" -) - -func (r ConfigMcpType) IsKnown() bool { - switch r { - case ConfigMcpTypeLocal, ConfigMcpTypeRemote: - return true - } - return false -} - -// Modes configuration, see https://opencode.ai/docs/modes -type ConfigMode struct { - Build ModeConfig `json:"build"` - Plan ModeConfig `json:"plan"` - ExtraFields map[string]ModeConfig `json:"-,extras"` - JSON configModeJSON `json:"-"` -} - -// configModeJSON contains the JSON metadata for the struct [ConfigMode] -type configModeJSON struct { - Build apijson.Field - Plan apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ConfigMode) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configModeJSON) RawJSON() string { - return r.raw -} - -type ConfigProvider struct { - Models map[string]ConfigProviderModel `json:"models,required"` - ID string `json:"id"` - API string `json:"api"` - Env []string `json:"env"` - Name string `json:"name"` - Npm string `json:"npm"` - Options map[string]interface{} `json:"options"` - JSON configProviderJSON `json:"-"` -} - -// configProviderJSON contains the JSON metadata for the struct [ConfigProvider] -type configProviderJSON struct { - Models apijson.Field - ID apijson.Field - API apijson.Field - Env apijson.Field - Name apijson.Field - Npm apijson.Field - Options apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ConfigProvider) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configProviderJSON) RawJSON() string { - return r.raw -} - -type ConfigProviderModel struct { - ID string `json:"id"` - Attachment bool `json:"attachment"` - Cost ConfigProviderModelsCost `json:"cost"` - Limit ConfigProviderModelsLimit `json:"limit"` - Name string `json:"name"` - Options map[string]interface{} `json:"options"` - Reasoning bool `json:"reasoning"` - ReleaseDate string `json:"release_date"` - Temperature bool `json:"temperature"` - ToolCall bool `json:"tool_call"` - JSON configProviderModelJSON `json:"-"` -} - -// configProviderModelJSON contains the JSON metadata for the struct -// [ConfigProviderModel] -type configProviderModelJSON struct { - ID apijson.Field - Attachment apijson.Field - Cost apijson.Field - Limit apijson.Field - Name apijson.Field - Options apijson.Field - Reasoning apijson.Field - ReleaseDate apijson.Field - Temperature apijson.Field - ToolCall apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ConfigProviderModel) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configProviderModelJSON) RawJSON() string { - return r.raw -} - -type ConfigProviderModelsCost struct { - Input float64 `json:"input,required"` - Output float64 `json:"output,required"` - CacheRead float64 `json:"cache_read"` - CacheWrite float64 `json:"cache_write"` - JSON configProviderModelsCostJSON `json:"-"` -} - -// configProviderModelsCostJSON contains the JSON metadata for the struct -// [ConfigProviderModelsCost] -type configProviderModelsCostJSON struct { - Input apijson.Field - Output apijson.Field - CacheRead apijson.Field - CacheWrite apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ConfigProviderModelsCost) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configProviderModelsCostJSON) RawJSON() string { - return r.raw -} - -type ConfigProviderModelsLimit struct { - Context float64 `json:"context,required"` - Output float64 `json:"output,required"` - JSON configProviderModelsLimitJSON `json:"-"` -} - -// configProviderModelsLimitJSON contains the JSON metadata for the struct -// [ConfigProviderModelsLimit] -type configProviderModelsLimitJSON struct { - Context apijson.Field - Output apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ConfigProviderModelsLimit) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r configProviderModelsLimitJSON) RawJSON() string { - return r.raw -} - -// Control sharing behavior:'manual' allows manual sharing via commands, 'auto' -// enables automatic sharing, 'disabled' disables all sharing -type ConfigShare string - -const ( - ConfigShareManual ConfigShare = "manual" - ConfigShareAuto ConfigShare = "auto" - ConfigShareDisabled ConfigShare = "disabled" -) - -func (r ConfigShare) IsKnown() bool { - switch r { - case ConfigShareManual, ConfigShareAuto, ConfigShareDisabled: - return true - } - return false -} - -type KeybindsConfig struct { - // Exit the application - AppExit string `json:"app_exit,required"` - // Show help dialog - AppHelp string `json:"app_help,required"` - // Open external editor - EditorOpen string `json:"editor_open,required"` - // Close file - FileClose string `json:"file_close,required"` - // Split/unified diff - FileDiffToggle string `json:"file_diff_toggle,required"` - // List files - FileList string `json:"file_list,required"` - // Search file - FileSearch string `json:"file_search,required"` - // Clear input field - InputClear string `json:"input_clear,required"` - // Insert newline in input - InputNewline string `json:"input_newline,required"` - // Paste from clipboard - InputPaste string `json:"input_paste,required"` - // Submit input - InputSubmit string `json:"input_submit,required"` - // Leader key for keybind combinations - Leader string `json:"leader,required"` - // Copy message - MessagesCopy string `json:"messages_copy,required"` - // Navigate to first message - MessagesFirst string `json:"messages_first,required"` - // Scroll messages down by half page - MessagesHalfPageDown string `json:"messages_half_page_down,required"` - // Scroll messages up by half page - MessagesHalfPageUp string `json:"messages_half_page_up,required"` - // Navigate to last message - MessagesLast string `json:"messages_last,required"` - // Toggle layout - MessagesLayoutToggle string `json:"messages_layout_toggle,required"` - // Navigate to next message - MessagesNext string `json:"messages_next,required"` - // Scroll messages down by one page - MessagesPageDown string `json:"messages_page_down,required"` - // Scroll messages up by one page - MessagesPageUp string `json:"messages_page_up,required"` - // Navigate to previous message - MessagesPrevious string `json:"messages_previous,required"` - // Revert message - MessagesRevert string `json:"messages_revert,required"` - // List available models - ModelList string `json:"model_list,required"` - // Create/update AGENTS.md - ProjectInit string `json:"project_init,required"` - // Compact the session - SessionCompact string `json:"session_compact,required"` - // Export session to editor - SessionExport string `json:"session_export,required"` - // Interrupt current session - SessionInterrupt string `json:"session_interrupt,required"` - // List all sessions - SessionList string `json:"session_list,required"` - // Create a new session - SessionNew string `json:"session_new,required"` - // Share current session - SessionShare string `json:"session_share,required"` - // Unshare current session - SessionUnshare string `json:"session_unshare,required"` - // Next mode - SwitchMode string `json:"switch_mode,required"` - // Previous Mode - SwitchModeReverse string `json:"switch_mode_reverse,required"` - // List available themes - ThemeList string `json:"theme_list,required"` - // Toggle tool details - ToolDetails string `json:"tool_details,required"` - JSON keybindsConfigJSON `json:"-"` -} - -// keybindsConfigJSON contains the JSON metadata for the struct [KeybindsConfig] -type keybindsConfigJSON struct { - AppExit apijson.Field - AppHelp apijson.Field - EditorOpen apijson.Field - FileClose apijson.Field - FileDiffToggle apijson.Field - FileList apijson.Field - FileSearch apijson.Field - InputClear apijson.Field - InputNewline apijson.Field - InputPaste apijson.Field - InputSubmit apijson.Field - Leader apijson.Field - MessagesCopy apijson.Field - MessagesFirst apijson.Field - MessagesHalfPageDown apijson.Field - MessagesHalfPageUp apijson.Field - MessagesLast apijson.Field - MessagesLayoutToggle apijson.Field - MessagesNext apijson.Field - MessagesPageDown apijson.Field - MessagesPageUp apijson.Field - MessagesPrevious apijson.Field - MessagesRevert apijson.Field - ModelList apijson.Field - ProjectInit apijson.Field - SessionCompact apijson.Field - SessionExport apijson.Field - SessionInterrupt apijson.Field - SessionList apijson.Field - SessionNew apijson.Field - SessionShare apijson.Field - SessionUnshare apijson.Field - SwitchMode apijson.Field - SwitchModeReverse apijson.Field - ThemeList apijson.Field - ToolDetails apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *KeybindsConfig) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r keybindsConfigJSON) RawJSON() string { - return r.raw -} - -type McpLocalConfig struct { - // Command and arguments to run the MCP server - Command []string `json:"command,required"` - // Type of MCP server connection - Type McpLocalConfigType `json:"type,required"` - // Enable or disable the MCP server on startup - Enabled bool `json:"enabled"` - // Environment variables to set when running the MCP server - Environment map[string]string `json:"environment"` - JSON mcpLocalConfigJSON `json:"-"` -} - -// mcpLocalConfigJSON contains the JSON metadata for the struct [McpLocalConfig] -type mcpLocalConfigJSON struct { - Command apijson.Field - Type apijson.Field - Enabled apijson.Field - Environment apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *McpLocalConfig) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r mcpLocalConfigJSON) RawJSON() string { - return r.raw -} - -func (r McpLocalConfig) implementsConfigMcp() {} - -// Type of MCP server connection -type McpLocalConfigType string - -const ( - McpLocalConfigTypeLocal McpLocalConfigType = "local" -) - -func (r McpLocalConfigType) IsKnown() bool { - switch r { - case McpLocalConfigTypeLocal: - return true - } - return false -} - -type McpRemoteConfig struct { - // Type of MCP server connection - Type McpRemoteConfigType `json:"type,required"` - // URL of the remote MCP server - URL string `json:"url,required"` - // Enable or disable the MCP server on startup - Enabled bool `json:"enabled"` - // Headers to send with the request - Headers map[string]string `json:"headers"` - JSON mcpRemoteConfigJSON `json:"-"` -} - -// mcpRemoteConfigJSON contains the JSON metadata for the struct [McpRemoteConfig] -type mcpRemoteConfigJSON struct { - Type apijson.Field - URL apijson.Field - Enabled apijson.Field - Headers apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *McpRemoteConfig) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r mcpRemoteConfigJSON) RawJSON() string { - return r.raw -} - -func (r McpRemoteConfig) implementsConfigMcp() {} - -// Type of MCP server connection -type McpRemoteConfigType string - -const ( - McpRemoteConfigTypeRemote McpRemoteConfigType = "remote" -) - -func (r McpRemoteConfigType) IsKnown() bool { - switch r { - case McpRemoteConfigTypeRemote: - return true - } - return false -} - -type ModeConfig struct { - Model string `json:"model"` - Prompt string `json:"prompt"` - Tools map[string]bool `json:"tools"` - JSON modeConfigJSON `json:"-"` -} - -// modeConfigJSON contains the JSON metadata for the struct [ModeConfig] -type modeConfigJSON struct { - Model apijson.Field - Prompt apijson.Field - Tools apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ModeConfig) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r modeConfigJSON) RawJSON() string { - return r.raw -} diff --git a/packages/tui/sdk/config_test.go b/packages/tui/sdk/config_test.go deleted file mode 100644 index 86e058a9..00000000 --- a/packages/tui/sdk/config_test.go +++ /dev/null @@ -1,36 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode_test - -import ( - "context" - "errors" - "os" - "testing" - - "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode-sdk-go/internal/testutil" - "github.com/sst/opencode-sdk-go/option" -) - -func TestConfigGet(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Config.Get(context.TODO()) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} diff --git a/packages/tui/sdk/event.go b/packages/tui/sdk/event.go deleted file mode 100644 index 9002d2aa..00000000 --- a/packages/tui/sdk/event.go +++ /dev/null @@ -1,1186 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode - -import ( - "context" - "net/http" - "reflect" - - "github.com/sst/opencode-sdk-go/internal/apijson" - "github.com/sst/opencode-sdk-go/internal/requestconfig" - "github.com/sst/opencode-sdk-go/option" - "github.com/sst/opencode-sdk-go/packages/ssestream" - "github.com/sst/opencode-sdk-go/shared" - "github.com/tidwall/gjson" -) - -// EventService contains methods and other services that help with interacting with -// the opencode API. -// -// Note, unlike clients, this service does not read variables from the environment -// automatically. You should not instantiate this service directly, and instead use -// the [NewEventService] method instead. -type EventService struct { - Options []option.RequestOption -} - -// NewEventService generates a new service that applies the given options to each -// request. These options are applied after the parent client's options (if there -// is one), and before any request-specific options. -func NewEventService(opts ...option.RequestOption) (r *EventService) { - r = &EventService{} - r.Options = opts - return -} - -// Get events -func (r *EventService) ListStreaming(ctx context.Context, opts ...option.RequestOption) (stream *ssestream.Stream[EventListResponse]) { - var ( - raw *http.Response - err error - ) - opts = append(r.Options[:], opts...) - path := "event" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &raw, opts...) - return ssestream.NewStream[EventListResponse](ssestream.NewDecoder(raw), err) -} - -type EventListResponse struct { - // This field can have the runtime type of - // [EventListResponseEventLspClientDiagnosticsProperties], - // [EventListResponseEventPermissionUpdatedProperties], - // [EventListResponseEventFileEditedProperties], - // [EventListResponseEventInstallationUpdatedProperties], - // [EventListResponseEventMessageUpdatedProperties], - // [EventListResponseEventMessageRemovedProperties], - // [EventListResponseEventMessagePartUpdatedProperties], - // [EventListResponseEventStorageWriteProperties], - // [EventListResponseEventSessionUpdatedProperties], - // [EventListResponseEventSessionDeletedProperties], - // [EventListResponseEventSessionIdleProperties], - // [EventListResponseEventSessionErrorProperties], - // [EventListResponseEventFileWatcherUpdatedProperties]. - Properties interface{} `json:"properties,required"` - Type EventListResponseType `json:"type,required"` - JSON eventListResponseJSON `json:"-"` - union EventListResponseUnion -} - -// eventListResponseJSON contains the JSON metadata for the struct -// [EventListResponse] -type eventListResponseJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r eventListResponseJSON) RawJSON() string { - return r.raw -} - -func (r *EventListResponse) UnmarshalJSON(data []byte) (err error) { - *r = EventListResponse{} - err = apijson.UnmarshalRoot(data, &r.union) - if err != nil { - return err - } - return apijson.Port(r.union, &r) -} - -// AsUnion returns a [EventListResponseUnion] interface which you can cast to the -// specific types for more type safety. -// -// Possible runtime types of the union are -// [EventListResponseEventLspClientDiagnostics], -// [EventListResponseEventPermissionUpdated], [EventListResponseEventFileEdited], -// [EventListResponseEventInstallationUpdated], -// [EventListResponseEventMessageUpdated], [EventListResponseEventMessageRemoved], -// [EventListResponseEventMessagePartUpdated], -// [EventListResponseEventStorageWrite], [EventListResponseEventSessionUpdated], -// [EventListResponseEventSessionDeleted], [EventListResponseEventSessionIdle], -// [EventListResponseEventSessionError], -// [EventListResponseEventFileWatcherUpdated]. -func (r EventListResponse) AsUnion() EventListResponseUnion { - return r.union -} - -// Union satisfied by [EventListResponseEventLspClientDiagnostics], -// [EventListResponseEventPermissionUpdated], [EventListResponseEventFileEdited], -// [EventListResponseEventInstallationUpdated], -// [EventListResponseEventMessageUpdated], [EventListResponseEventMessageRemoved], -// [EventListResponseEventMessagePartUpdated], -// [EventListResponseEventStorageWrite], [EventListResponseEventSessionUpdated], -// [EventListResponseEventSessionDeleted], [EventListResponseEventSessionIdle], -// [EventListResponseEventSessionError] or -// [EventListResponseEventFileWatcherUpdated]. -type EventListResponseUnion interface { - implementsEventListResponse() -} - -func init() { - apijson.RegisterUnion( - reflect.TypeOf((*EventListResponseUnion)(nil)).Elem(), - "type", - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventLspClientDiagnostics{}), - DiscriminatorValue: "lsp.client.diagnostics", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventPermissionUpdated{}), - DiscriminatorValue: "permission.updated", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventFileEdited{}), - DiscriminatorValue: "file.edited", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventInstallationUpdated{}), - DiscriminatorValue: "installation.updated", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventMessageUpdated{}), - DiscriminatorValue: "message.updated", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventMessageRemoved{}), - DiscriminatorValue: "message.removed", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventMessagePartUpdated{}), - DiscriminatorValue: "message.part.updated", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventStorageWrite{}), - DiscriminatorValue: "storage.write", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventSessionUpdated{}), - DiscriminatorValue: "session.updated", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventSessionDeleted{}), - DiscriminatorValue: "session.deleted", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventSessionIdle{}), - DiscriminatorValue: "session.idle", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventSessionError{}), - DiscriminatorValue: "session.error", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventFileWatcherUpdated{}), - DiscriminatorValue: "file.watcher.updated", - }, - ) -} - -type EventListResponseEventLspClientDiagnostics struct { - Properties EventListResponseEventLspClientDiagnosticsProperties `json:"properties,required"` - Type EventListResponseEventLspClientDiagnosticsType `json:"type,required"` - JSON eventListResponseEventLspClientDiagnosticsJSON `json:"-"` -} - -// eventListResponseEventLspClientDiagnosticsJSON contains the JSON metadata for -// the struct [EventListResponseEventLspClientDiagnostics] -type eventListResponseEventLspClientDiagnosticsJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventLspClientDiagnostics) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventLspClientDiagnosticsJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventLspClientDiagnostics) implementsEventListResponse() {} - -type EventListResponseEventLspClientDiagnosticsProperties struct { - Path string `json:"path,required"` - ServerID string `json:"serverID,required"` - JSON eventListResponseEventLspClientDiagnosticsPropertiesJSON `json:"-"` -} - -// eventListResponseEventLspClientDiagnosticsPropertiesJSON contains the JSON -// metadata for the struct [EventListResponseEventLspClientDiagnosticsProperties] -type eventListResponseEventLspClientDiagnosticsPropertiesJSON struct { - Path apijson.Field - ServerID apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventLspClientDiagnosticsProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventLspClientDiagnosticsPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventLspClientDiagnosticsType string - -const ( - EventListResponseEventLspClientDiagnosticsTypeLspClientDiagnostics EventListResponseEventLspClientDiagnosticsType = "lsp.client.diagnostics" -) - -func (r EventListResponseEventLspClientDiagnosticsType) IsKnown() bool { - switch r { - case EventListResponseEventLspClientDiagnosticsTypeLspClientDiagnostics: - return true - } - return false -} - -type EventListResponseEventPermissionUpdated struct { - Properties EventListResponseEventPermissionUpdatedProperties `json:"properties,required"` - Type EventListResponseEventPermissionUpdatedType `json:"type,required"` - JSON eventListResponseEventPermissionUpdatedJSON `json:"-"` -} - -// eventListResponseEventPermissionUpdatedJSON contains the JSON metadata for the -// struct [EventListResponseEventPermissionUpdated] -type eventListResponseEventPermissionUpdatedJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventPermissionUpdated) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventPermissionUpdatedJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventPermissionUpdated) implementsEventListResponse() {} - -type EventListResponseEventPermissionUpdatedProperties struct { - ID string `json:"id,required"` - Metadata map[string]interface{} `json:"metadata,required"` - SessionID string `json:"sessionID,required"` - Time EventListResponseEventPermissionUpdatedPropertiesTime `json:"time,required"` - Title string `json:"title,required"` - JSON eventListResponseEventPermissionUpdatedPropertiesJSON `json:"-"` -} - -// eventListResponseEventPermissionUpdatedPropertiesJSON contains the JSON metadata -// for the struct [EventListResponseEventPermissionUpdatedProperties] -type eventListResponseEventPermissionUpdatedPropertiesJSON struct { - ID apijson.Field - Metadata apijson.Field - SessionID apijson.Field - Time apijson.Field - Title apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventPermissionUpdatedProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventPermissionUpdatedPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventPermissionUpdatedPropertiesTime struct { - Created float64 `json:"created,required"` - JSON eventListResponseEventPermissionUpdatedPropertiesTimeJSON `json:"-"` -} - -// eventListResponseEventPermissionUpdatedPropertiesTimeJSON contains the JSON -// metadata for the struct [EventListResponseEventPermissionUpdatedPropertiesTime] -type eventListResponseEventPermissionUpdatedPropertiesTimeJSON struct { - Created apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventPermissionUpdatedPropertiesTime) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventPermissionUpdatedPropertiesTimeJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventPermissionUpdatedType string - -const ( - EventListResponseEventPermissionUpdatedTypePermissionUpdated EventListResponseEventPermissionUpdatedType = "permission.updated" -) - -func (r EventListResponseEventPermissionUpdatedType) IsKnown() bool { - switch r { - case EventListResponseEventPermissionUpdatedTypePermissionUpdated: - return true - } - return false -} - -type EventListResponseEventFileEdited struct { - Properties EventListResponseEventFileEditedProperties `json:"properties,required"` - Type EventListResponseEventFileEditedType `json:"type,required"` - JSON eventListResponseEventFileEditedJSON `json:"-"` -} - -// eventListResponseEventFileEditedJSON contains the JSON metadata for the struct -// [EventListResponseEventFileEdited] -type eventListResponseEventFileEditedJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventFileEdited) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventFileEditedJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventFileEdited) implementsEventListResponse() {} - -type EventListResponseEventFileEditedProperties struct { - File string `json:"file,required"` - JSON eventListResponseEventFileEditedPropertiesJSON `json:"-"` -} - -// eventListResponseEventFileEditedPropertiesJSON contains the JSON metadata for -// the struct [EventListResponseEventFileEditedProperties] -type eventListResponseEventFileEditedPropertiesJSON struct { - File apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventFileEditedProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventFileEditedPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventFileEditedType string - -const ( - EventListResponseEventFileEditedTypeFileEdited EventListResponseEventFileEditedType = "file.edited" -) - -func (r EventListResponseEventFileEditedType) IsKnown() bool { - switch r { - case EventListResponseEventFileEditedTypeFileEdited: - return true - } - return false -} - -type EventListResponseEventInstallationUpdated struct { - Properties EventListResponseEventInstallationUpdatedProperties `json:"properties,required"` - Type EventListResponseEventInstallationUpdatedType `json:"type,required"` - JSON eventListResponseEventInstallationUpdatedJSON `json:"-"` -} - -// eventListResponseEventInstallationUpdatedJSON contains the JSON metadata for the -// struct [EventListResponseEventInstallationUpdated] -type eventListResponseEventInstallationUpdatedJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventInstallationUpdated) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventInstallationUpdatedJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventInstallationUpdated) implementsEventListResponse() {} - -type EventListResponseEventInstallationUpdatedProperties struct { - Version string `json:"version,required"` - JSON eventListResponseEventInstallationUpdatedPropertiesJSON `json:"-"` -} - -// eventListResponseEventInstallationUpdatedPropertiesJSON contains the JSON -// metadata for the struct [EventListResponseEventInstallationUpdatedProperties] -type eventListResponseEventInstallationUpdatedPropertiesJSON struct { - Version apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventInstallationUpdatedProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventInstallationUpdatedPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventInstallationUpdatedType string - -const ( - EventListResponseEventInstallationUpdatedTypeInstallationUpdated EventListResponseEventInstallationUpdatedType = "installation.updated" -) - -func (r EventListResponseEventInstallationUpdatedType) IsKnown() bool { - switch r { - case EventListResponseEventInstallationUpdatedTypeInstallationUpdated: - return true - } - return false -} - -type EventListResponseEventMessageUpdated struct { - Properties EventListResponseEventMessageUpdatedProperties `json:"properties,required"` - Type EventListResponseEventMessageUpdatedType `json:"type,required"` - JSON eventListResponseEventMessageUpdatedJSON `json:"-"` -} - -// eventListResponseEventMessageUpdatedJSON contains the JSON metadata for the -// struct [EventListResponseEventMessageUpdated] -type eventListResponseEventMessageUpdatedJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventMessageUpdated) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventMessageUpdatedJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventMessageUpdated) implementsEventListResponse() {} - -type EventListResponseEventMessageUpdatedProperties struct { - Info Message `json:"info,required"` - JSON eventListResponseEventMessageUpdatedPropertiesJSON `json:"-"` -} - -// eventListResponseEventMessageUpdatedPropertiesJSON contains the JSON metadata -// for the struct [EventListResponseEventMessageUpdatedProperties] -type eventListResponseEventMessageUpdatedPropertiesJSON struct { - Info apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventMessageUpdatedProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventMessageUpdatedPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventMessageUpdatedType string - -const ( - EventListResponseEventMessageUpdatedTypeMessageUpdated EventListResponseEventMessageUpdatedType = "message.updated" -) - -func (r EventListResponseEventMessageUpdatedType) IsKnown() bool { - switch r { - case EventListResponseEventMessageUpdatedTypeMessageUpdated: - return true - } - return false -} - -type EventListResponseEventMessageRemoved struct { - Properties EventListResponseEventMessageRemovedProperties `json:"properties,required"` - Type EventListResponseEventMessageRemovedType `json:"type,required"` - JSON eventListResponseEventMessageRemovedJSON `json:"-"` -} - -// eventListResponseEventMessageRemovedJSON contains the JSON metadata for the -// struct [EventListResponseEventMessageRemoved] -type eventListResponseEventMessageRemovedJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventMessageRemoved) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventMessageRemovedJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventMessageRemoved) implementsEventListResponse() {} - -type EventListResponseEventMessageRemovedProperties struct { - MessageID string `json:"messageID,required"` - SessionID string `json:"sessionID,required"` - JSON eventListResponseEventMessageRemovedPropertiesJSON `json:"-"` -} - -// eventListResponseEventMessageRemovedPropertiesJSON contains the JSON metadata -// for the struct [EventListResponseEventMessageRemovedProperties] -type eventListResponseEventMessageRemovedPropertiesJSON struct { - MessageID apijson.Field - SessionID apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventMessageRemovedProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventMessageRemovedPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventMessageRemovedType string - -const ( - EventListResponseEventMessageRemovedTypeMessageRemoved EventListResponseEventMessageRemovedType = "message.removed" -) - -func (r EventListResponseEventMessageRemovedType) IsKnown() bool { - switch r { - case EventListResponseEventMessageRemovedTypeMessageRemoved: - return true - } - return false -} - -type EventListResponseEventMessagePartUpdated struct { - Properties EventListResponseEventMessagePartUpdatedProperties `json:"properties,required"` - Type EventListResponseEventMessagePartUpdatedType `json:"type,required"` - JSON eventListResponseEventMessagePartUpdatedJSON `json:"-"` -} - -// eventListResponseEventMessagePartUpdatedJSON contains the JSON metadata for the -// struct [EventListResponseEventMessagePartUpdated] -type eventListResponseEventMessagePartUpdatedJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventMessagePartUpdated) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventMessagePartUpdatedJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventMessagePartUpdated) implementsEventListResponse() {} - -type EventListResponseEventMessagePartUpdatedProperties struct { - Part Part `json:"part,required"` - JSON eventListResponseEventMessagePartUpdatedPropertiesJSON `json:"-"` -} - -// eventListResponseEventMessagePartUpdatedPropertiesJSON contains the JSON -// metadata for the struct [EventListResponseEventMessagePartUpdatedProperties] -type eventListResponseEventMessagePartUpdatedPropertiesJSON struct { - Part apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventMessagePartUpdatedProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventMessagePartUpdatedPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventMessagePartUpdatedType string - -const ( - EventListResponseEventMessagePartUpdatedTypeMessagePartUpdated EventListResponseEventMessagePartUpdatedType = "message.part.updated" -) - -func (r EventListResponseEventMessagePartUpdatedType) IsKnown() bool { - switch r { - case EventListResponseEventMessagePartUpdatedTypeMessagePartUpdated: - return true - } - return false -} - -type EventListResponseEventStorageWrite struct { - Properties EventListResponseEventStorageWriteProperties `json:"properties,required"` - Type EventListResponseEventStorageWriteType `json:"type,required"` - JSON eventListResponseEventStorageWriteJSON `json:"-"` -} - -// eventListResponseEventStorageWriteJSON contains the JSON metadata for the struct -// [EventListResponseEventStorageWrite] -type eventListResponseEventStorageWriteJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventStorageWrite) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventStorageWriteJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventStorageWrite) implementsEventListResponse() {} - -type EventListResponseEventStorageWriteProperties struct { - Key string `json:"key,required"` - Content interface{} `json:"content"` - JSON eventListResponseEventStorageWritePropertiesJSON `json:"-"` -} - -// eventListResponseEventStorageWritePropertiesJSON contains the JSON metadata for -// the struct [EventListResponseEventStorageWriteProperties] -type eventListResponseEventStorageWritePropertiesJSON struct { - Key apijson.Field - Content apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventStorageWriteProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventStorageWritePropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventStorageWriteType string - -const ( - EventListResponseEventStorageWriteTypeStorageWrite EventListResponseEventStorageWriteType = "storage.write" -) - -func (r EventListResponseEventStorageWriteType) IsKnown() bool { - switch r { - case EventListResponseEventStorageWriteTypeStorageWrite: - return true - } - return false -} - -type EventListResponseEventSessionUpdated struct { - Properties EventListResponseEventSessionUpdatedProperties `json:"properties,required"` - Type EventListResponseEventSessionUpdatedType `json:"type,required"` - JSON eventListResponseEventSessionUpdatedJSON `json:"-"` -} - -// eventListResponseEventSessionUpdatedJSON contains the JSON metadata for the -// struct [EventListResponseEventSessionUpdated] -type eventListResponseEventSessionUpdatedJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventSessionUpdated) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventSessionUpdatedJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventSessionUpdated) implementsEventListResponse() {} - -type EventListResponseEventSessionUpdatedProperties struct { - Info Session `json:"info,required"` - JSON eventListResponseEventSessionUpdatedPropertiesJSON `json:"-"` -} - -// eventListResponseEventSessionUpdatedPropertiesJSON contains the JSON metadata -// for the struct [EventListResponseEventSessionUpdatedProperties] -type eventListResponseEventSessionUpdatedPropertiesJSON struct { - Info apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventSessionUpdatedProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventSessionUpdatedPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventSessionUpdatedType string - -const ( - EventListResponseEventSessionUpdatedTypeSessionUpdated EventListResponseEventSessionUpdatedType = "session.updated" -) - -func (r EventListResponseEventSessionUpdatedType) IsKnown() bool { - switch r { - case EventListResponseEventSessionUpdatedTypeSessionUpdated: - return true - } - return false -} - -type EventListResponseEventSessionDeleted struct { - Properties EventListResponseEventSessionDeletedProperties `json:"properties,required"` - Type EventListResponseEventSessionDeletedType `json:"type,required"` - JSON eventListResponseEventSessionDeletedJSON `json:"-"` -} - -// eventListResponseEventSessionDeletedJSON contains the JSON metadata for the -// struct [EventListResponseEventSessionDeleted] -type eventListResponseEventSessionDeletedJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventSessionDeleted) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventSessionDeletedJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventSessionDeleted) implementsEventListResponse() {} - -type EventListResponseEventSessionDeletedProperties struct { - Info Session `json:"info,required"` - JSON eventListResponseEventSessionDeletedPropertiesJSON `json:"-"` -} - -// eventListResponseEventSessionDeletedPropertiesJSON contains the JSON metadata -// for the struct [EventListResponseEventSessionDeletedProperties] -type eventListResponseEventSessionDeletedPropertiesJSON struct { - Info apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventSessionDeletedProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventSessionDeletedPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventSessionDeletedType string - -const ( - EventListResponseEventSessionDeletedTypeSessionDeleted EventListResponseEventSessionDeletedType = "session.deleted" -) - -func (r EventListResponseEventSessionDeletedType) IsKnown() bool { - switch r { - case EventListResponseEventSessionDeletedTypeSessionDeleted: - return true - } - return false -} - -type EventListResponseEventSessionIdle struct { - Properties EventListResponseEventSessionIdleProperties `json:"properties,required"` - Type EventListResponseEventSessionIdleType `json:"type,required"` - JSON eventListResponseEventSessionIdleJSON `json:"-"` -} - -// eventListResponseEventSessionIdleJSON contains the JSON metadata for the struct -// [EventListResponseEventSessionIdle] -type eventListResponseEventSessionIdleJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventSessionIdle) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventSessionIdleJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventSessionIdle) implementsEventListResponse() {} - -type EventListResponseEventSessionIdleProperties struct { - SessionID string `json:"sessionID,required"` - JSON eventListResponseEventSessionIdlePropertiesJSON `json:"-"` -} - -// eventListResponseEventSessionIdlePropertiesJSON contains the JSON metadata for -// the struct [EventListResponseEventSessionIdleProperties] -type eventListResponseEventSessionIdlePropertiesJSON struct { - SessionID apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventSessionIdleProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventSessionIdlePropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventSessionIdleType string - -const ( - EventListResponseEventSessionIdleTypeSessionIdle EventListResponseEventSessionIdleType = "session.idle" -) - -func (r EventListResponseEventSessionIdleType) IsKnown() bool { - switch r { - case EventListResponseEventSessionIdleTypeSessionIdle: - return true - } - return false -} - -type EventListResponseEventSessionError struct { - Properties EventListResponseEventSessionErrorProperties `json:"properties,required"` - Type EventListResponseEventSessionErrorType `json:"type,required"` - JSON eventListResponseEventSessionErrorJSON `json:"-"` -} - -// eventListResponseEventSessionErrorJSON contains the JSON metadata for the struct -// [EventListResponseEventSessionError] -type eventListResponseEventSessionErrorJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventSessionError) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventSessionErrorJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventSessionError) implementsEventListResponse() {} - -type EventListResponseEventSessionErrorProperties struct { - Error EventListResponseEventSessionErrorPropertiesError `json:"error"` - SessionID string `json:"sessionID"` - JSON eventListResponseEventSessionErrorPropertiesJSON `json:"-"` -} - -// eventListResponseEventSessionErrorPropertiesJSON contains the JSON metadata for -// the struct [EventListResponseEventSessionErrorProperties] -type eventListResponseEventSessionErrorPropertiesJSON struct { - Error apijson.Field - SessionID apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventSessionErrorProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventSessionErrorPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventSessionErrorPropertiesError struct { - // This field can have the runtime type of [shared.ProviderAuthErrorData], - // [shared.UnknownErrorData], [interface{}]. - Data interface{} `json:"data,required"` - Name EventListResponseEventSessionErrorPropertiesErrorName `json:"name,required"` - JSON eventListResponseEventSessionErrorPropertiesErrorJSON `json:"-"` - union EventListResponseEventSessionErrorPropertiesErrorUnion -} - -// eventListResponseEventSessionErrorPropertiesErrorJSON contains the JSON metadata -// for the struct [EventListResponseEventSessionErrorPropertiesError] -type eventListResponseEventSessionErrorPropertiesErrorJSON struct { - Data apijson.Field - Name apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r eventListResponseEventSessionErrorPropertiesErrorJSON) RawJSON() string { - return r.raw -} - -func (r *EventListResponseEventSessionErrorPropertiesError) UnmarshalJSON(data []byte) (err error) { - *r = EventListResponseEventSessionErrorPropertiesError{} - err = apijson.UnmarshalRoot(data, &r.union) - if err != nil { - return err - } - return apijson.Port(r.union, &r) -} - -// AsUnion returns a [EventListResponseEventSessionErrorPropertiesErrorUnion] -// interface which you can cast to the specific types for more type safety. -// -// Possible runtime types of the union are [shared.ProviderAuthError], -// [shared.UnknownError], -// [EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthError], -// [shared.MessageAbortedError]. -func (r EventListResponseEventSessionErrorPropertiesError) AsUnion() EventListResponseEventSessionErrorPropertiesErrorUnion { - return r.union -} - -// Union satisfied by [shared.ProviderAuthError], [shared.UnknownError], -// [EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthError] or -// [shared.MessageAbortedError]. -type EventListResponseEventSessionErrorPropertiesErrorUnion interface { - ImplementsEventListResponseEventSessionErrorPropertiesError() -} - -func init() { - apijson.RegisterUnion( - reflect.TypeOf((*EventListResponseEventSessionErrorPropertiesErrorUnion)(nil)).Elem(), - "name", - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(shared.ProviderAuthError{}), - DiscriminatorValue: "ProviderAuthError", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(shared.UnknownError{}), - DiscriminatorValue: "UnknownError", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthError{}), - DiscriminatorValue: "MessageOutputLengthError", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(shared.MessageAbortedError{}), - DiscriminatorValue: "MessageAbortedError", - }, - ) -} - -type EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthError struct { - Data interface{} `json:"data,required"` - Name EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorName `json:"name,required"` - JSON eventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorJSON `json:"-"` -} - -// eventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorJSON -// contains the JSON metadata for the struct -// [EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthError] -type eventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorJSON struct { - Data apijson.Field - Name apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthError) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthError) ImplementsEventListResponseEventSessionErrorPropertiesError() { -} - -type EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorName string - -const ( - EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorNameMessageOutputLengthError EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorName = "MessageOutputLengthError" -) - -func (r EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorName) IsKnown() bool { - switch r { - case EventListResponseEventSessionErrorPropertiesErrorMessageOutputLengthErrorNameMessageOutputLengthError: - return true - } - return false -} - -type EventListResponseEventSessionErrorPropertiesErrorName string - -const ( - EventListResponseEventSessionErrorPropertiesErrorNameProviderAuthError EventListResponseEventSessionErrorPropertiesErrorName = "ProviderAuthError" - EventListResponseEventSessionErrorPropertiesErrorNameUnknownError EventListResponseEventSessionErrorPropertiesErrorName = "UnknownError" - EventListResponseEventSessionErrorPropertiesErrorNameMessageOutputLengthError EventListResponseEventSessionErrorPropertiesErrorName = "MessageOutputLengthError" - EventListResponseEventSessionErrorPropertiesErrorNameMessageAbortedError EventListResponseEventSessionErrorPropertiesErrorName = "MessageAbortedError" -) - -func (r EventListResponseEventSessionErrorPropertiesErrorName) IsKnown() bool { - switch r { - case EventListResponseEventSessionErrorPropertiesErrorNameProviderAuthError, EventListResponseEventSessionErrorPropertiesErrorNameUnknownError, EventListResponseEventSessionErrorPropertiesErrorNameMessageOutputLengthError, EventListResponseEventSessionErrorPropertiesErrorNameMessageAbortedError: - return true - } - return false -} - -type EventListResponseEventSessionErrorType string - -const ( - EventListResponseEventSessionErrorTypeSessionError EventListResponseEventSessionErrorType = "session.error" -) - -func (r EventListResponseEventSessionErrorType) IsKnown() bool { - switch r { - case EventListResponseEventSessionErrorTypeSessionError: - return true - } - return false -} - -type EventListResponseEventFileWatcherUpdated struct { - Properties EventListResponseEventFileWatcherUpdatedProperties `json:"properties,required"` - Type EventListResponseEventFileWatcherUpdatedType `json:"type,required"` - JSON eventListResponseEventFileWatcherUpdatedJSON `json:"-"` -} - -// eventListResponseEventFileWatcherUpdatedJSON contains the JSON metadata for the -// struct [EventListResponseEventFileWatcherUpdated] -type eventListResponseEventFileWatcherUpdatedJSON struct { - Properties apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventFileWatcherUpdated) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventFileWatcherUpdatedJSON) RawJSON() string { - return r.raw -} - -func (r EventListResponseEventFileWatcherUpdated) implementsEventListResponse() {} - -type EventListResponseEventFileWatcherUpdatedProperties struct { - Event EventListResponseEventFileWatcherUpdatedPropertiesEvent `json:"event,required"` - File string `json:"file,required"` - JSON eventListResponseEventFileWatcherUpdatedPropertiesJSON `json:"-"` -} - -// eventListResponseEventFileWatcherUpdatedPropertiesJSON contains the JSON -// metadata for the struct [EventListResponseEventFileWatcherUpdatedProperties] -type eventListResponseEventFileWatcherUpdatedPropertiesJSON struct { - Event apijson.Field - File apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *EventListResponseEventFileWatcherUpdatedProperties) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r eventListResponseEventFileWatcherUpdatedPropertiesJSON) RawJSON() string { - return r.raw -} - -type EventListResponseEventFileWatcherUpdatedPropertiesEvent string - -const ( - EventListResponseEventFileWatcherUpdatedPropertiesEventRename EventListResponseEventFileWatcherUpdatedPropertiesEvent = "rename" - EventListResponseEventFileWatcherUpdatedPropertiesEventChange EventListResponseEventFileWatcherUpdatedPropertiesEvent = "change" -) - -func (r EventListResponseEventFileWatcherUpdatedPropertiesEvent) IsKnown() bool { - switch r { - case EventListResponseEventFileWatcherUpdatedPropertiesEventRename, EventListResponseEventFileWatcherUpdatedPropertiesEventChange: - return true - } - return false -} - -type EventListResponseEventFileWatcherUpdatedType string - -const ( - EventListResponseEventFileWatcherUpdatedTypeFileWatcherUpdated EventListResponseEventFileWatcherUpdatedType = "file.watcher.updated" -) - -func (r EventListResponseEventFileWatcherUpdatedType) IsKnown() bool { - switch r { - case EventListResponseEventFileWatcherUpdatedTypeFileWatcherUpdated: - return true - } - return false -} - -type EventListResponseType string - -const ( - EventListResponseTypeLspClientDiagnostics EventListResponseType = "lsp.client.diagnostics" - EventListResponseTypePermissionUpdated EventListResponseType = "permission.updated" - EventListResponseTypeFileEdited EventListResponseType = "file.edited" - EventListResponseTypeInstallationUpdated EventListResponseType = "installation.updated" - EventListResponseTypeMessageUpdated EventListResponseType = "message.updated" - EventListResponseTypeMessageRemoved EventListResponseType = "message.removed" - EventListResponseTypeMessagePartUpdated EventListResponseType = "message.part.updated" - EventListResponseTypeStorageWrite EventListResponseType = "storage.write" - EventListResponseTypeSessionUpdated EventListResponseType = "session.updated" - EventListResponseTypeSessionDeleted EventListResponseType = "session.deleted" - EventListResponseTypeSessionIdle EventListResponseType = "session.idle" - EventListResponseTypeSessionError EventListResponseType = "session.error" - EventListResponseTypeFileWatcherUpdated EventListResponseType = "file.watcher.updated" -) - -func (r EventListResponseType) IsKnown() bool { - switch r { - case EventListResponseTypeLspClientDiagnostics, EventListResponseTypePermissionUpdated, EventListResponseTypeFileEdited, EventListResponseTypeInstallationUpdated, EventListResponseTypeMessageUpdated, EventListResponseTypeMessageRemoved, EventListResponseTypeMessagePartUpdated, EventListResponseTypeStorageWrite, EventListResponseTypeSessionUpdated, EventListResponseTypeSessionDeleted, EventListResponseTypeSessionIdle, EventListResponseTypeSessionError, EventListResponseTypeFileWatcherUpdated: - return true - } - return false -} diff --git a/packages/tui/sdk/examples/.keep b/packages/tui/sdk/examples/.keep deleted file mode 100644 index d8c73e93..00000000 --- a/packages/tui/sdk/examples/.keep +++ /dev/null @@ -1,4 +0,0 @@ -File generated from our OpenAPI spec by Stainless. - -This directory can be used to store example files demonstrating usage of this SDK. -It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/packages/tui/sdk/field.go b/packages/tui/sdk/field.go deleted file mode 100644 index 56d2f890..00000000 --- a/packages/tui/sdk/field.go +++ /dev/null @@ -1,50 +0,0 @@ -package opencode - -import ( - "github.com/sst/opencode-sdk-go/internal/param" - "io" -) - -// F is a param field helper used to initialize a [param.Field] generic struct. -// This helps specify null, zero values, and overrides, as well as normal values. -// You can read more about this in our [README]. -// -// [README]: https://pkg.go.dev/github.com/sst/opencode-sdk-go#readme-request-fields -func F[T any](value T) param.Field[T] { return param.Field[T]{Value: value, Present: true} } - -// Null is a param field helper which explicitly sends null to the API. -func Null[T any]() param.Field[T] { return param.Field[T]{Null: true, Present: true} } - -// Raw is a param field helper for specifying values for fields when the -// type you are looking to send is different from the type that is specified in -// the SDK. For example, if the type of the field is an integer, but you want -// to send a float, you could do that by setting the corresponding field with -// Raw[int](0.5). -func Raw[T any](value any) param.Field[T] { return param.Field[T]{Raw: value, Present: true} } - -// Int is a param field helper which helps specify integers. This is -// particularly helpful when specifying integer constants for fields. -func Int(value int64) param.Field[int64] { return F(value) } - -// String is a param field helper which helps specify strings. -func String(value string) param.Field[string] { return F(value) } - -// Float is a param field helper which helps specify floats. -func Float(value float64) param.Field[float64] { return F(value) } - -// Bool is a param field helper which helps specify bools. -func Bool(value bool) param.Field[bool] { return F(value) } - -// FileParam is a param field helper which helps files with a mime content-type. -func FileParam(reader io.Reader, filename string, contentType string) param.Field[io.Reader] { - return F[io.Reader](&file{reader, filename, contentType}) -} - -type file struct { - io.Reader - name string - contentType string -} - -func (f *file) ContentType() string { return f.contentType } -func (f *file) Filename() string { return f.name } diff --git a/packages/tui/sdk/file.go b/packages/tui/sdk/file.go deleted file mode 100644 index 0a8a4b2b..00000000 --- a/packages/tui/sdk/file.go +++ /dev/null @@ -1,142 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode - -import ( - "context" - "net/http" - "net/url" - - "github.com/sst/opencode-sdk-go/internal/apijson" - "github.com/sst/opencode-sdk-go/internal/apiquery" - "github.com/sst/opencode-sdk-go/internal/param" - "github.com/sst/opencode-sdk-go/internal/requestconfig" - "github.com/sst/opencode-sdk-go/option" -) - -// FileService contains methods and other services that help with interacting with -// the opencode API. -// -// Note, unlike clients, this service does not read variables from the environment -// automatically. You should not instantiate this service directly, and instead use -// the [NewFileService] method instead. -type FileService struct { - Options []option.RequestOption -} - -// NewFileService generates a new service that applies the given options to each -// request. These options are applied after the parent client's options (if there -// is one), and before any request-specific options. -func NewFileService(opts ...option.RequestOption) (r *FileService) { - r = &FileService{} - r.Options = opts - return -} - -// Read a file -func (r *FileService) Read(ctx context.Context, query FileReadParams, opts ...option.RequestOption) (res *FileReadResponse, err error) { - opts = append(r.Options[:], opts...) - path := "file" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, query, &res, opts...) - return -} - -// Get file status -func (r *FileService) Status(ctx context.Context, opts ...option.RequestOption) (res *[]File, err error) { - opts = append(r.Options[:], opts...) - path := "file/status" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...) - return -} - -type File struct { - Added int64 `json:"added,required"` - Path string `json:"path,required"` - Removed int64 `json:"removed,required"` - Status FileStatus `json:"status,required"` - JSON fileJSON `json:"-"` -} - -// fileJSON contains the JSON metadata for the struct [File] -type fileJSON struct { - Added apijson.Field - Path apijson.Field - Removed apijson.Field - Status apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *File) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r fileJSON) RawJSON() string { - return r.raw -} - -type FileStatus string - -const ( - FileStatusAdded FileStatus = "added" - FileStatusDeleted FileStatus = "deleted" - FileStatusModified FileStatus = "modified" -) - -func (r FileStatus) IsKnown() bool { - switch r { - case FileStatusAdded, FileStatusDeleted, FileStatusModified: - return true - } - return false -} - -type FileReadResponse struct { - Content string `json:"content,required"` - Type FileReadResponseType `json:"type,required"` - JSON fileReadResponseJSON `json:"-"` -} - -// fileReadResponseJSON contains the JSON metadata for the struct -// [FileReadResponse] -type fileReadResponseJSON struct { - Content apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *FileReadResponse) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r fileReadResponseJSON) RawJSON() string { - return r.raw -} - -type FileReadResponseType string - -const ( - FileReadResponseTypeRaw FileReadResponseType = "raw" - FileReadResponseTypePatch FileReadResponseType = "patch" -) - -func (r FileReadResponseType) IsKnown() bool { - switch r { - case FileReadResponseTypeRaw, FileReadResponseTypePatch: - return true - } - return false -} - -type FileReadParams struct { - Path param.Field[string] `query:"path,required"` -} - -// URLQuery serializes [FileReadParams]'s query parameters as `url.Values`. -func (r FileReadParams) URLQuery() (v url.Values) { - return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{ - ArrayFormat: apiquery.ArrayQueryFormatComma, - NestedFormat: apiquery.NestedQueryFormatBrackets, - }) -} diff --git a/packages/tui/sdk/file_test.go b/packages/tui/sdk/file_test.go deleted file mode 100644 index 60212ea2..00000000 --- a/packages/tui/sdk/file_test.go +++ /dev/null @@ -1,60 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode_test - -import ( - "context" - "errors" - "os" - "testing" - - "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode-sdk-go/internal/testutil" - "github.com/sst/opencode-sdk-go/option" -) - -func TestFileRead(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.File.Read(context.TODO(), opencode.FileReadParams{ - Path: opencode.F("path"), - }) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestFileStatus(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.File.Status(context.TODO()) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} diff --git a/packages/tui/sdk/find.go b/packages/tui/sdk/find.go deleted file mode 100644 index 10f92fc0..00000000 --- a/packages/tui/sdk/find.go +++ /dev/null @@ -1,322 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode - -import ( - "context" - "net/http" - "net/url" - - "github.com/sst/opencode-sdk-go/internal/apijson" - "github.com/sst/opencode-sdk-go/internal/apiquery" - "github.com/sst/opencode-sdk-go/internal/param" - "github.com/sst/opencode-sdk-go/internal/requestconfig" - "github.com/sst/opencode-sdk-go/option" -) - -// FindService contains methods and other services that help with interacting with -// the opencode API. -// -// Note, unlike clients, this service does not read variables from the environment -// automatically. You should not instantiate this service directly, and instead use -// the [NewFindService] method instead. -type FindService struct { - Options []option.RequestOption -} - -// NewFindService generates a new service that applies the given options to each -// request. These options are applied after the parent client's options (if there -// is one), and before any request-specific options. -func NewFindService(opts ...option.RequestOption) (r *FindService) { - r = &FindService{} - r.Options = opts - return -} - -// Find files -func (r *FindService) Files(ctx context.Context, query FindFilesParams, opts ...option.RequestOption) (res *[]string, err error) { - opts = append(r.Options[:], opts...) - path := "find/file" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, query, &res, opts...) - return -} - -// Find workspace symbols -func (r *FindService) Symbols(ctx context.Context, query FindSymbolsParams, opts ...option.RequestOption) (res *[]Symbol, err error) { - opts = append(r.Options[:], opts...) - path := "find/symbol" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, query, &res, opts...) - return -} - -// Find text in files -func (r *FindService) Text(ctx context.Context, query FindTextParams, opts ...option.RequestOption) (res *[]Match, err error) { - opts = append(r.Options[:], opts...) - path := "find" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, query, &res, opts...) - return -} - -type Match struct { - AbsoluteOffset float64 `json:"absolute_offset,required"` - LineNumber float64 `json:"line_number,required"` - Lines MatchLines `json:"lines,required"` - Path MatchPath `json:"path,required"` - Submatches []MatchSubmatch `json:"submatches,required"` - JSON matchJSON `json:"-"` -} - -// matchJSON contains the JSON metadata for the struct [Match] -type matchJSON struct { - AbsoluteOffset apijson.Field - LineNumber apijson.Field - Lines apijson.Field - Path apijson.Field - Submatches apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *Match) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r matchJSON) RawJSON() string { - return r.raw -} - -type MatchLines struct { - Text string `json:"text,required"` - JSON matchLinesJSON `json:"-"` -} - -// matchLinesJSON contains the JSON metadata for the struct [MatchLines] -type matchLinesJSON struct { - Text apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *MatchLines) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r matchLinesJSON) RawJSON() string { - return r.raw -} - -type MatchPath struct { - Text string `json:"text,required"` - JSON matchPathJSON `json:"-"` -} - -// matchPathJSON contains the JSON metadata for the struct [MatchPath] -type matchPathJSON struct { - Text apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *MatchPath) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r matchPathJSON) RawJSON() string { - return r.raw -} - -type MatchSubmatch struct { - End float64 `json:"end,required"` - Match MatchSubmatchesMatch `json:"match,required"` - Start float64 `json:"start,required"` - JSON matchSubmatchJSON `json:"-"` -} - -// matchSubmatchJSON contains the JSON metadata for the struct [MatchSubmatch] -type matchSubmatchJSON struct { - End apijson.Field - Match apijson.Field - Start apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *MatchSubmatch) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r matchSubmatchJSON) RawJSON() string { - return r.raw -} - -type MatchSubmatchesMatch struct { - Text string `json:"text,required"` - JSON matchSubmatchesMatchJSON `json:"-"` -} - -// matchSubmatchesMatchJSON contains the JSON metadata for the struct -// [MatchSubmatchesMatch] -type matchSubmatchesMatchJSON struct { - Text apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *MatchSubmatchesMatch) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r matchSubmatchesMatchJSON) RawJSON() string { - return r.raw -} - -type Symbol struct { - Kind float64 `json:"kind,required"` - Location SymbolLocation `json:"location,required"` - Name string `json:"name,required"` - JSON symbolJSON `json:"-"` -} - -// symbolJSON contains the JSON metadata for the struct [Symbol] -type symbolJSON struct { - Kind apijson.Field - Location apijson.Field - Name apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *Symbol) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r symbolJSON) RawJSON() string { - return r.raw -} - -type SymbolLocation struct { - Range SymbolLocationRange `json:"range,required"` - Uri string `json:"uri,required"` - JSON symbolLocationJSON `json:"-"` -} - -// symbolLocationJSON contains the JSON metadata for the struct [SymbolLocation] -type symbolLocationJSON struct { - Range apijson.Field - Uri apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *SymbolLocation) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r symbolLocationJSON) RawJSON() string { - return r.raw -} - -type SymbolLocationRange struct { - End SymbolLocationRangeEnd `json:"end,required"` - Start SymbolLocationRangeStart `json:"start,required"` - JSON symbolLocationRangeJSON `json:"-"` -} - -// symbolLocationRangeJSON contains the JSON metadata for the struct -// [SymbolLocationRange] -type symbolLocationRangeJSON struct { - End apijson.Field - Start apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *SymbolLocationRange) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r symbolLocationRangeJSON) RawJSON() string { - return r.raw -} - -type SymbolLocationRangeEnd struct { - Character float64 `json:"character,required"` - Line float64 `json:"line,required"` - JSON symbolLocationRangeEndJSON `json:"-"` -} - -// symbolLocationRangeEndJSON contains the JSON metadata for the struct -// [SymbolLocationRangeEnd] -type symbolLocationRangeEndJSON struct { - Character apijson.Field - Line apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *SymbolLocationRangeEnd) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r symbolLocationRangeEndJSON) RawJSON() string { - return r.raw -} - -type SymbolLocationRangeStart struct { - Character float64 `json:"character,required"` - Line float64 `json:"line,required"` - JSON symbolLocationRangeStartJSON `json:"-"` -} - -// symbolLocationRangeStartJSON contains the JSON metadata for the struct -// [SymbolLocationRangeStart] -type symbolLocationRangeStartJSON struct { - Character apijson.Field - Line apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *SymbolLocationRangeStart) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r symbolLocationRangeStartJSON) RawJSON() string { - return r.raw -} - -type FindFilesParams struct { - Query param.Field[string] `query:"query,required"` -} - -// URLQuery serializes [FindFilesParams]'s query parameters as `url.Values`. -func (r FindFilesParams) URLQuery() (v url.Values) { - return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{ - ArrayFormat: apiquery.ArrayQueryFormatComma, - NestedFormat: apiquery.NestedQueryFormatBrackets, - }) -} - -type FindSymbolsParams struct { - Query param.Field[string] `query:"query,required"` -} - -// URLQuery serializes [FindSymbolsParams]'s query parameters as `url.Values`. -func (r FindSymbolsParams) URLQuery() (v url.Values) { - return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{ - ArrayFormat: apiquery.ArrayQueryFormatComma, - NestedFormat: apiquery.NestedQueryFormatBrackets, - }) -} - -type FindTextParams struct { - Pattern param.Field[string] `query:"pattern,required"` -} - -// URLQuery serializes [FindTextParams]'s query parameters as `url.Values`. -func (r FindTextParams) URLQuery() (v url.Values) { - return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{ - ArrayFormat: apiquery.ArrayQueryFormatComma, - NestedFormat: apiquery.NestedQueryFormatBrackets, - }) -} diff --git a/packages/tui/sdk/find_test.go b/packages/tui/sdk/find_test.go deleted file mode 100644 index e2f1caa1..00000000 --- a/packages/tui/sdk/find_test.go +++ /dev/null @@ -1,86 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode_test - -import ( - "context" - "errors" - "os" - "testing" - - "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode-sdk-go/internal/testutil" - "github.com/sst/opencode-sdk-go/option" -) - -func TestFindFiles(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Find.Files(context.TODO(), opencode.FindFilesParams{ - Query: opencode.F("query"), - }) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestFindSymbols(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Find.Symbols(context.TODO(), opencode.FindSymbolsParams{ - Query: opencode.F("query"), - }) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestFindText(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Find.Text(context.TODO(), opencode.FindTextParams{ - Pattern: opencode.F("pattern"), - }) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} diff --git a/packages/tui/sdk/go.mod b/packages/tui/sdk/go.mod deleted file mode 100644 index 2817d301..00000000 --- a/packages/tui/sdk/go.mod +++ /dev/null @@ -1,13 +0,0 @@ -module github.com/sst/opencode-sdk-go - -go 1.21 - -require ( - github.com/tidwall/gjson v1.14.4 - github.com/tidwall/sjson v1.2.5 -) - -require ( - github.com/tidwall/match v1.1.1 // indirect - github.com/tidwall/pretty v1.2.1 // indirect -) diff --git a/packages/tui/sdk/go.sum b/packages/tui/sdk/go.sum deleted file mode 100644 index a70a5e0a..00000000 --- a/packages/tui/sdk/go.sum +++ /dev/null @@ -1,10 +0,0 @@ -github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM= -github.com/tidwall/gjson v1.14.4/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= -github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= -github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= -github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= -github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= -github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= -github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= diff --git a/packages/tui/sdk/internal/apierror/apierror.go b/packages/tui/sdk/internal/apierror/apierror.go deleted file mode 100644 index 24307fc3..00000000 --- a/packages/tui/sdk/internal/apierror/apierror.go +++ /dev/null @@ -1,53 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package apierror - -import ( - "fmt" - "net/http" - "net/http/httputil" - - "github.com/sst/opencode-sdk-go/internal/apijson" -) - -// Error represents an error that originates from the API, i.e. when a request is -// made and the API returns a response with a HTTP status code. Other errors are -// not wrapped by this SDK. -type Error struct { - JSON errorJSON `json:"-"` - StatusCode int - Request *http.Request - Response *http.Response -} - -// errorJSON contains the JSON metadata for the struct [Error] -type errorJSON struct { - raw string - ExtraFields map[string]apijson.Field -} - -func (r *Error) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r errorJSON) RawJSON() string { - return r.raw -} - -func (r *Error) Error() string { - // Attempt to re-populate the response body - return fmt.Sprintf("%s \"%s\": %d %s %s", r.Request.Method, r.Request.URL, r.Response.StatusCode, http.StatusText(r.Response.StatusCode), r.JSON.RawJSON()) -} - -func (r *Error) DumpRequest(body bool) []byte { - if r.Request.GetBody != nil { - r.Request.Body, _ = r.Request.GetBody() - } - out, _ := httputil.DumpRequestOut(r.Request, body) - return out -} - -func (r *Error) DumpResponse(body bool) []byte { - out, _ := httputil.DumpResponse(r.Response, body) - return out -} diff --git a/packages/tui/sdk/internal/apiform/encoder.go b/packages/tui/sdk/internal/apiform/encoder.go deleted file mode 100644 index 243a1a12..00000000 --- a/packages/tui/sdk/internal/apiform/encoder.go +++ /dev/null @@ -1,383 +0,0 @@ -package apiform - -import ( - "fmt" - "io" - "mime/multipart" - "net/textproto" - "path" - "reflect" - "sort" - "strconv" - "strings" - "sync" - "time" - - "github.com/sst/opencode-sdk-go/internal/param" -) - -var encoders sync.Map // map[encoderEntry]encoderFunc - -func Marshal(value interface{}, writer *multipart.Writer) error { - e := &encoder{dateFormat: time.RFC3339} - return e.marshal(value, writer) -} - -func MarshalRoot(value interface{}, writer *multipart.Writer) error { - e := &encoder{root: true, dateFormat: time.RFC3339} - return e.marshal(value, writer) -} - -type encoder struct { - dateFormat string - root bool -} - -type encoderFunc func(key string, value reflect.Value, writer *multipart.Writer) error - -type encoderField struct { - tag parsedStructTag - fn encoderFunc - idx []int -} - -type encoderEntry struct { - reflect.Type - dateFormat string - root bool -} - -func (e *encoder) marshal(value interface{}, writer *multipart.Writer) error { - val := reflect.ValueOf(value) - if !val.IsValid() { - return nil - } - typ := val.Type() - enc := e.typeEncoder(typ) - return enc("", val, writer) -} - -func (e *encoder) typeEncoder(t reflect.Type) encoderFunc { - entry := encoderEntry{ - Type: t, - dateFormat: e.dateFormat, - root: e.root, - } - - if fi, ok := encoders.Load(entry); ok { - return fi.(encoderFunc) - } - - // To deal with recursive types, populate the map with an - // indirect func before we build it. This type waits on the - // real func (f) to be ready and then calls it. This indirect - // func is only used for recursive types. - var ( - wg sync.WaitGroup - f encoderFunc - ) - wg.Add(1) - fi, loaded := encoders.LoadOrStore(entry, encoderFunc(func(key string, v reflect.Value, writer *multipart.Writer) error { - wg.Wait() - return f(key, v, writer) - })) - if loaded { - return fi.(encoderFunc) - } - - // Compute the real encoder and replace the indirect func with it. - f = e.newTypeEncoder(t) - wg.Done() - encoders.Store(entry, f) - return f -} - -func (e *encoder) newTypeEncoder(t reflect.Type) encoderFunc { - if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { - return e.newTimeTypeEncoder() - } - if t.ConvertibleTo(reflect.TypeOf((*io.Reader)(nil)).Elem()) { - return e.newReaderTypeEncoder() - } - e.root = false - switch t.Kind() { - case reflect.Pointer: - inner := t.Elem() - - innerEncoder := e.typeEncoder(inner) - return func(key string, v reflect.Value, writer *multipart.Writer) error { - if !v.IsValid() || v.IsNil() { - return nil - } - return innerEncoder(key, v.Elem(), writer) - } - case reflect.Struct: - return e.newStructTypeEncoder(t) - case reflect.Slice, reflect.Array: - return e.newArrayTypeEncoder(t) - case reflect.Map: - return e.newMapEncoder(t) - case reflect.Interface: - return e.newInterfaceEncoder() - default: - return e.newPrimitiveTypeEncoder(t) - } -} - -func (e *encoder) newPrimitiveTypeEncoder(t reflect.Type) encoderFunc { - switch t.Kind() { - // Note that we could use `gjson` to encode these types but it would complicate our - // code more and this current code shouldn't cause any issues - case reflect.String: - return func(key string, v reflect.Value, writer *multipart.Writer) error { - return writer.WriteField(key, v.String()) - } - case reflect.Bool: - return func(key string, v reflect.Value, writer *multipart.Writer) error { - if v.Bool() { - return writer.WriteField(key, "true") - } - return writer.WriteField(key, "false") - } - case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64: - return func(key string, v reflect.Value, writer *multipart.Writer) error { - return writer.WriteField(key, strconv.FormatInt(v.Int(), 10)) - } - case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return func(key string, v reflect.Value, writer *multipart.Writer) error { - return writer.WriteField(key, strconv.FormatUint(v.Uint(), 10)) - } - case reflect.Float32: - return func(key string, v reflect.Value, writer *multipart.Writer) error { - return writer.WriteField(key, strconv.FormatFloat(v.Float(), 'f', -1, 32)) - } - case reflect.Float64: - return func(key string, v reflect.Value, writer *multipart.Writer) error { - return writer.WriteField(key, strconv.FormatFloat(v.Float(), 'f', -1, 64)) - } - default: - return func(key string, v reflect.Value, writer *multipart.Writer) error { - return fmt.Errorf("unknown type received at primitive encoder: %s", t.String()) - } - } -} - -func (e *encoder) newArrayTypeEncoder(t reflect.Type) encoderFunc { - itemEncoder := e.typeEncoder(t.Elem()) - - return func(key string, v reflect.Value, writer *multipart.Writer) error { - if key != "" { - key = key + "." - } - for i := 0; i < v.Len(); i++ { - err := itemEncoder(key+strconv.Itoa(i), v.Index(i), writer) - if err != nil { - return err - } - } - return nil - } -} - -func (e *encoder) newStructTypeEncoder(t reflect.Type) encoderFunc { - if t.Implements(reflect.TypeOf((*param.FieldLike)(nil)).Elem()) { - return e.newFieldTypeEncoder(t) - } - - encoderFields := []encoderField{} - extraEncoder := (*encoderField)(nil) - - // This helper allows us to recursively collect field encoders into a flat - // array. The parameter `index` keeps track of the access patterns necessary - // to get to some field. - var collectEncoderFields func(r reflect.Type, index []int) - collectEncoderFields = func(r reflect.Type, index []int) { - for i := 0; i < r.NumField(); i++ { - idx := append(index, i) - field := t.FieldByIndex(idx) - if !field.IsExported() { - continue - } - // If this is an embedded struct, traverse one level deeper to extract - // the field and get their encoders as well. - if field.Anonymous { - collectEncoderFields(field.Type, idx) - continue - } - // If json tag is not present, then we skip, which is intentionally - // different behavior from the stdlib. - ptag, ok := parseFormStructTag(field) - if !ok { - continue - } - // We only want to support unexported field if they're tagged with - // `extras` because that field shouldn't be part of the public API. We - // also want to only keep the top level extras - if ptag.extras && len(index) == 0 { - extraEncoder = &encoderField{ptag, e.typeEncoder(field.Type.Elem()), idx} - continue - } - if ptag.name == "-" { - continue - } - - dateFormat, ok := parseFormatStructTag(field) - oldFormat := e.dateFormat - if ok { - switch dateFormat { - case "date-time": - e.dateFormat = time.RFC3339 - case "date": - e.dateFormat = "2006-01-02" - } - } - encoderFields = append(encoderFields, encoderField{ptag, e.typeEncoder(field.Type), idx}) - e.dateFormat = oldFormat - } - } - collectEncoderFields(t, []int{}) - - // Ensure deterministic output by sorting by lexicographic order - sort.Slice(encoderFields, func(i, j int) bool { - return encoderFields[i].tag.name < encoderFields[j].tag.name - }) - - return func(key string, value reflect.Value, writer *multipart.Writer) error { - if key != "" { - key = key + "." - } - - for _, ef := range encoderFields { - field := value.FieldByIndex(ef.idx) - err := ef.fn(key+ef.tag.name, field, writer) - if err != nil { - return err - } - } - - if extraEncoder != nil { - err := e.encodeMapEntries(key, value.FieldByIndex(extraEncoder.idx), writer) - if err != nil { - return err - } - } - - return nil - } -} - -func (e *encoder) newFieldTypeEncoder(t reflect.Type) encoderFunc { - f, _ := t.FieldByName("Value") - enc := e.typeEncoder(f.Type) - - return func(key string, value reflect.Value, writer *multipart.Writer) error { - present := value.FieldByName("Present") - if !present.Bool() { - return nil - } - null := value.FieldByName("Null") - if null.Bool() { - return nil - } - raw := value.FieldByName("Raw") - if !raw.IsNil() { - return e.typeEncoder(raw.Type())(key, raw, writer) - } - return enc(key, value.FieldByName("Value"), writer) - } -} - -func (e *encoder) newTimeTypeEncoder() encoderFunc { - format := e.dateFormat - return func(key string, value reflect.Value, writer *multipart.Writer) error { - return writer.WriteField(key, value.Convert(reflect.TypeOf(time.Time{})).Interface().(time.Time).Format(format)) - } -} - -func (e encoder) newInterfaceEncoder() encoderFunc { - return func(key string, value reflect.Value, writer *multipart.Writer) error { - value = value.Elem() - if !value.IsValid() { - return nil - } - return e.typeEncoder(value.Type())(key, value, writer) - } -} - -var quoteEscaper = strings.NewReplacer("\\", "\\\\", `"`, "\\\"") - -func escapeQuotes(s string) string { - return quoteEscaper.Replace(s) -} - -func (e *encoder) newReaderTypeEncoder() encoderFunc { - return func(key string, value reflect.Value, writer *multipart.Writer) error { - reader := value.Convert(reflect.TypeOf((*io.Reader)(nil)).Elem()).Interface().(io.Reader) - filename := "anonymous_file" - contentType := "application/octet-stream" - if named, ok := reader.(interface{ Filename() string }); ok { - filename = named.Filename() - } else if named, ok := reader.(interface{ Name() string }); ok { - filename = path.Base(named.Name()) - } - if typed, ok := reader.(interface{ ContentType() string }); ok { - contentType = typed.ContentType() - } - - // Below is taken almost 1-for-1 from [multipart.CreateFormFile] - h := make(textproto.MIMEHeader) - h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, escapeQuotes(key), escapeQuotes(filename))) - h.Set("Content-Type", contentType) - filewriter, err := writer.CreatePart(h) - if err != nil { - return err - } - _, err = io.Copy(filewriter, reader) - return err - } -} - -// Given a []byte of json (may either be an empty object or an object that already contains entries) -// encode all of the entries in the map to the json byte array. -func (e *encoder) encodeMapEntries(key string, v reflect.Value, writer *multipart.Writer) error { - type mapPair struct { - key string - value reflect.Value - } - - if key != "" { - key = key + "." - } - - pairs := []mapPair{} - - iter := v.MapRange() - for iter.Next() { - if iter.Key().Type().Kind() == reflect.String { - pairs = append(pairs, mapPair{key: iter.Key().String(), value: iter.Value()}) - } else { - return fmt.Errorf("cannot encode a map with a non string key") - } - } - - // Ensure deterministic output - sort.Slice(pairs, func(i, j int) bool { - return pairs[i].key < pairs[j].key - }) - - elementEncoder := e.typeEncoder(v.Type().Elem()) - for _, p := range pairs { - err := elementEncoder(key+string(p.key), p.value, writer) - if err != nil { - return err - } - } - - return nil -} - -func (e *encoder) newMapEncoder(t reflect.Type) encoderFunc { - return func(key string, value reflect.Value, writer *multipart.Writer) error { - return e.encodeMapEntries(key, value, writer) - } -} diff --git a/packages/tui/sdk/internal/apiform/form.go b/packages/tui/sdk/internal/apiform/form.go deleted file mode 100644 index 5445116e..00000000 --- a/packages/tui/sdk/internal/apiform/form.go +++ /dev/null @@ -1,5 +0,0 @@ -package apiform - -type Marshaler interface { - MarshalMultipart() ([]byte, string, error) -} diff --git a/packages/tui/sdk/internal/apiform/form_test.go b/packages/tui/sdk/internal/apiform/form_test.go deleted file mode 100644 index 39d1460c..00000000 --- a/packages/tui/sdk/internal/apiform/form_test.go +++ /dev/null @@ -1,440 +0,0 @@ -package apiform - -import ( - "bytes" - "mime/multipart" - "strings" - "testing" - "time" -) - -func P[T any](v T) *T { return &v } - -type Primitives struct { - A bool `form:"a"` - B int `form:"b"` - C uint `form:"c"` - D float64 `form:"d"` - E float32 `form:"e"` - F []int `form:"f"` -} - -type PrimitivePointers struct { - A *bool `form:"a"` - B *int `form:"b"` - C *uint `form:"c"` - D *float64 `form:"d"` - E *float32 `form:"e"` - F *[]int `form:"f"` -} - -type Slices struct { - Slice []Primitives `form:"slices"` -} - -type DateTime struct { - Date time.Time `form:"date" format:"date"` - DateTime time.Time `form:"date-time" format:"date-time"` -} - -type AdditionalProperties struct { - A bool `form:"a"` - Extras map[string]interface{} `form:"-,extras"` -} - -type TypedAdditionalProperties struct { - A bool `form:"a"` - Extras map[string]int `form:"-,extras"` -} - -type EmbeddedStructs struct { - AdditionalProperties - A *int `form:"number2"` - Extras map[string]interface{} `form:"-,extras"` -} - -type Recursive struct { - Name string `form:"name"` - Child *Recursive `form:"child"` -} - -type UnknownStruct struct { - Unknown interface{} `form:"unknown"` -} - -type UnionStruct struct { - Union Union `form:"union" format:"date"` -} - -type Union interface { - union() -} - -type UnionInteger int64 - -func (UnionInteger) union() {} - -type UnionStructA struct { - Type string `form:"type"` - A string `form:"a"` - B string `form:"b"` -} - -func (UnionStructA) union() {} - -type UnionStructB struct { - Type string `form:"type"` - A string `form:"a"` -} - -func (UnionStructB) union() {} - -type UnionTime time.Time - -func (UnionTime) union() {} - -type ReaderStruct struct { -} - -var tests = map[string]struct { - buf string - val interface{} -}{ - "map_string": { - `--xxx -Content-Disposition: form-data; name="foo" - -bar ---xxx-- -`, - map[string]string{"foo": "bar"}, - }, - - "map_interface": { - `--xxx -Content-Disposition: form-data; name="a" - -1 ---xxx -Content-Disposition: form-data; name="b" - -str ---xxx -Content-Disposition: form-data; name="c" - -false ---xxx-- -`, - map[string]interface{}{"a": float64(1), "b": "str", "c": false}, - }, - - "primitive_struct": { - `--xxx -Content-Disposition: form-data; name="a" - -false ---xxx -Content-Disposition: form-data; name="b" - -237628372683 ---xxx -Content-Disposition: form-data; name="c" - -654 ---xxx -Content-Disposition: form-data; name="d" - -9999.43 ---xxx -Content-Disposition: form-data; name="e" - -43.76 ---xxx -Content-Disposition: form-data; name="f.0" - -1 ---xxx -Content-Disposition: form-data; name="f.1" - -2 ---xxx -Content-Disposition: form-data; name="f.2" - -3 ---xxx -Content-Disposition: form-data; name="f.3" - -4 ---xxx-- -`, - Primitives{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}, - }, - - "slices": { - `--xxx -Content-Disposition: form-data; name="slices.0.a" - -false ---xxx -Content-Disposition: form-data; name="slices.0.b" - -237628372683 ---xxx -Content-Disposition: form-data; name="slices.0.c" - -654 ---xxx -Content-Disposition: form-data; name="slices.0.d" - -9999.43 ---xxx -Content-Disposition: form-data; name="slices.0.e" - -43.76 ---xxx -Content-Disposition: form-data; name="slices.0.f.0" - -1 ---xxx -Content-Disposition: form-data; name="slices.0.f.1" - -2 ---xxx -Content-Disposition: form-data; name="slices.0.f.2" - -3 ---xxx -Content-Disposition: form-data; name="slices.0.f.3" - -4 ---xxx-- -`, - Slices{ - Slice: []Primitives{{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}}, - }, - }, - - "primitive_pointer_struct": { - `--xxx -Content-Disposition: form-data; name="a" - -false ---xxx -Content-Disposition: form-data; name="b" - -237628372683 ---xxx -Content-Disposition: form-data; name="c" - -654 ---xxx -Content-Disposition: form-data; name="d" - -9999.43 ---xxx -Content-Disposition: form-data; name="e" - -43.76 ---xxx -Content-Disposition: form-data; name="f.0" - -1 ---xxx -Content-Disposition: form-data; name="f.1" - -2 ---xxx -Content-Disposition: form-data; name="f.2" - -3 ---xxx -Content-Disposition: form-data; name="f.3" - -4 ---xxx -Content-Disposition: form-data; name="f.4" - -5 ---xxx-- -`, - PrimitivePointers{ - A: P(false), - B: P(237628372683), - C: P(uint(654)), - D: P(9999.43), - E: P(float32(43.76)), - F: &[]int{1, 2, 3, 4, 5}, - }, - }, - - "datetime_struct": { - `--xxx -Content-Disposition: form-data; name="date" - -2006-01-02 ---xxx -Content-Disposition: form-data; name="date-time" - -2006-01-02T15:04:05Z ---xxx-- -`, - DateTime{ - Date: time.Date(2006, time.January, 2, 0, 0, 0, 0, time.UTC), - DateTime: time.Date(2006, time.January, 2, 15, 4, 5, 0, time.UTC), - }, - }, - - "additional_properties": { - `--xxx -Content-Disposition: form-data; name="a" - -true ---xxx -Content-Disposition: form-data; name="bar" - -value ---xxx -Content-Disposition: form-data; name="foo" - -true ---xxx-- -`, - AdditionalProperties{ - A: true, - Extras: map[string]interface{}{ - "bar": "value", - "foo": true, - }, - }, - }, - - "recursive_struct": { - `--xxx -Content-Disposition: form-data; name="child.name" - -Alex ---xxx -Content-Disposition: form-data; name="name" - -Robert ---xxx-- -`, - Recursive{Name: "Robert", Child: &Recursive{Name: "Alex"}}, - }, - - "unknown_struct_number": { - `--xxx -Content-Disposition: form-data; name="unknown" - -12 ---xxx-- -`, - UnknownStruct{ - Unknown: 12., - }, - }, - - "unknown_struct_map": { - `--xxx -Content-Disposition: form-data; name="unknown.foo" - -bar ---xxx-- -`, - UnknownStruct{ - Unknown: map[string]interface{}{ - "foo": "bar", - }, - }, - }, - - "union_integer": { - `--xxx -Content-Disposition: form-data; name="union" - -12 ---xxx-- -`, - UnionStruct{ - Union: UnionInteger(12), - }, - }, - - "union_struct_discriminated_a": { - `--xxx -Content-Disposition: form-data; name="union.a" - -foo ---xxx -Content-Disposition: form-data; name="union.b" - -bar ---xxx -Content-Disposition: form-data; name="union.type" - -typeA ---xxx-- -`, - - UnionStruct{ - Union: UnionStructA{ - Type: "typeA", - A: "foo", - B: "bar", - }, - }, - }, - - "union_struct_discriminated_b": { - `--xxx -Content-Disposition: form-data; name="union.a" - -foo ---xxx -Content-Disposition: form-data; name="union.type" - -typeB ---xxx-- -`, - UnionStruct{ - Union: UnionStructB{ - Type: "typeB", - A: "foo", - }, - }, - }, - - "union_struct_time": { - `--xxx -Content-Disposition: form-data; name="union" - -2010-05-23 ---xxx-- -`, - UnionStruct{ - Union: UnionTime(time.Date(2010, 05, 23, 0, 0, 0, 0, time.UTC)), - }, - }, -} - -func TestEncode(t *testing.T) { - for name, test := range tests { - t.Run(name, func(t *testing.T) { - buf := bytes.NewBuffer(nil) - writer := multipart.NewWriter(buf) - writer.SetBoundary("xxx") - err := Marshal(test.val, writer) - if err != nil { - t.Errorf("serialization of %v failed with error %v", test.val, err) - } - err = writer.Close() - if err != nil { - t.Errorf("serialization of %v failed with error %v", test.val, err) - } - raw := buf.Bytes() - if string(raw) != strings.ReplaceAll(test.buf, "\n", "\r\n") { - t.Errorf("expected %+#v to serialize to '%s' but got '%s'", test.val, test.buf, string(raw)) - } - }) - } -} diff --git a/packages/tui/sdk/internal/apiform/tag.go b/packages/tui/sdk/internal/apiform/tag.go deleted file mode 100644 index b22e054f..00000000 --- a/packages/tui/sdk/internal/apiform/tag.go +++ /dev/null @@ -1,48 +0,0 @@ -package apiform - -import ( - "reflect" - "strings" -) - -const jsonStructTag = "json" -const formStructTag = "form" -const formatStructTag = "format" - -type parsedStructTag struct { - name string - required bool - extras bool - metadata bool -} - -func parseFormStructTag(field reflect.StructField) (tag parsedStructTag, ok bool) { - raw, ok := field.Tag.Lookup(formStructTag) - if !ok { - raw, ok = field.Tag.Lookup(jsonStructTag) - } - if !ok { - return - } - parts := strings.Split(raw, ",") - if len(parts) == 0 { - return tag, false - } - tag.name = parts[0] - for _, part := range parts[1:] { - switch part { - case "required": - tag.required = true - case "extras": - tag.extras = true - case "metadata": - tag.metadata = true - } - } - return -} - -func parseFormatStructTag(field reflect.StructField) (format string, ok bool) { - format, ok = field.Tag.Lookup(formatStructTag) - return -} diff --git a/packages/tui/sdk/internal/apijson/decoder.go b/packages/tui/sdk/internal/apijson/decoder.go deleted file mode 100644 index 68b7ed6b..00000000 --- a/packages/tui/sdk/internal/apijson/decoder.go +++ /dev/null @@ -1,670 +0,0 @@ -package apijson - -import ( - "encoding/json" - "errors" - "fmt" - "reflect" - "strconv" - "sync" - "time" - "unsafe" - - "github.com/tidwall/gjson" -) - -// decoders is a synchronized map with roughly the following type: -// map[reflect.Type]decoderFunc -var decoders sync.Map - -// Unmarshal is similar to [encoding/json.Unmarshal] and parses the JSON-encoded -// data and stores it in the given pointer. -func Unmarshal(raw []byte, to any) error { - d := &decoderBuilder{dateFormat: time.RFC3339} - return d.unmarshal(raw, to) -} - -// UnmarshalRoot is like Unmarshal, but doesn't try to call MarshalJSON on the -// root element. Useful if a struct's UnmarshalJSON is overrode to use the -// behavior of this encoder versus the standard library. -func UnmarshalRoot(raw []byte, to any) error { - d := &decoderBuilder{dateFormat: time.RFC3339, root: true} - return d.unmarshal(raw, to) -} - -// decoderBuilder contains the 'compile-time' state of the decoder. -type decoderBuilder struct { - // Whether or not this is the first element and called by [UnmarshalRoot], see - // the documentation there to see why this is necessary. - root bool - // The dateFormat (a format string for [time.Format]) which is chosen by the - // last struct tag that was seen. - dateFormat string -} - -// decoderState contains the 'run-time' state of the decoder. -type decoderState struct { - strict bool - exactness exactness -} - -// Exactness refers to how close to the type the result was if deserialization -// was successful. This is useful in deserializing unions, where you want to try -// each entry, first with strict, then with looser validation, without actually -// having to do a lot of redundant work by marshalling twice (or maybe even more -// times). -type exactness int8 - -const ( - // Some values had to fudged a bit, for example by converting a string to an - // int, or an enum with extra values. - loose exactness = iota - // There are some extra arguments, but other wise it matches the union. - extras - // Exactly right. - exact -) - -type decoderFunc func(node gjson.Result, value reflect.Value, state *decoderState) error - -type decoderField struct { - tag parsedStructTag - fn decoderFunc - idx []int - goname string -} - -type decoderEntry struct { - reflect.Type - dateFormat string - root bool -} - -func (d *decoderBuilder) unmarshal(raw []byte, to any) error { - value := reflect.ValueOf(to).Elem() - result := gjson.ParseBytes(raw) - if !value.IsValid() { - return fmt.Errorf("apijson: cannot marshal into invalid value") - } - return d.typeDecoder(value.Type())(result, value, &decoderState{strict: false, exactness: exact}) -} - -func (d *decoderBuilder) typeDecoder(t reflect.Type) decoderFunc { - entry := decoderEntry{ - Type: t, - dateFormat: d.dateFormat, - root: d.root, - } - - if fi, ok := decoders.Load(entry); ok { - return fi.(decoderFunc) - } - - // To deal with recursive types, populate the map with an - // indirect func before we build it. This type waits on the - // real func (f) to be ready and then calls it. This indirect - // func is only used for recursive types. - var ( - wg sync.WaitGroup - f decoderFunc - ) - wg.Add(1) - fi, loaded := decoders.LoadOrStore(entry, decoderFunc(func(node gjson.Result, v reflect.Value, state *decoderState) error { - wg.Wait() - return f(node, v, state) - })) - if loaded { - return fi.(decoderFunc) - } - - // Compute the real decoder and replace the indirect func with it. - f = d.newTypeDecoder(t) - wg.Done() - decoders.Store(entry, f) - return f -} - -func indirectUnmarshalerDecoder(n gjson.Result, v reflect.Value, state *decoderState) error { - return v.Addr().Interface().(json.Unmarshaler).UnmarshalJSON([]byte(n.Raw)) -} - -func unmarshalerDecoder(n gjson.Result, v reflect.Value, state *decoderState) error { - if v.Kind() == reflect.Pointer && v.CanSet() { - v.Set(reflect.New(v.Type().Elem())) - } - return v.Interface().(json.Unmarshaler).UnmarshalJSON([]byte(n.Raw)) -} - -func (d *decoderBuilder) newTypeDecoder(t reflect.Type) decoderFunc { - if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { - return d.newTimeTypeDecoder(t) - } - if !d.root && t.Implements(reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()) { - return unmarshalerDecoder - } - if !d.root && reflect.PointerTo(t).Implements(reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()) { - if _, ok := unionVariants[t]; !ok { - return indirectUnmarshalerDecoder - } - } - d.root = false - - if _, ok := unionRegistry[t]; ok { - return d.newUnionDecoder(t) - } - - switch t.Kind() { - case reflect.Pointer: - inner := t.Elem() - innerDecoder := d.typeDecoder(inner) - - return func(n gjson.Result, v reflect.Value, state *decoderState) error { - if !v.IsValid() { - return fmt.Errorf("apijson: unexpected invalid reflection value %+#v", v) - } - - newValue := reflect.New(inner).Elem() - err := innerDecoder(n, newValue, state) - if err != nil { - return err - } - - v.Set(newValue.Addr()) - return nil - } - case reflect.Struct: - return d.newStructTypeDecoder(t) - case reflect.Array: - fallthrough - case reflect.Slice: - return d.newArrayTypeDecoder(t) - case reflect.Map: - return d.newMapDecoder(t) - case reflect.Interface: - return func(node gjson.Result, value reflect.Value, state *decoderState) error { - if !value.IsValid() { - return fmt.Errorf("apijson: unexpected invalid value %+#v", value) - } - if node.Value() != nil && value.CanSet() { - value.Set(reflect.ValueOf(node.Value())) - } - return nil - } - default: - return d.newPrimitiveTypeDecoder(t) - } -} - -// newUnionDecoder returns a decoderFunc that deserializes into a union using an -// algorithm roughly similar to Pydantic's [smart algorithm]. -// -// Conceptually this is equivalent to choosing the best schema based on how 'exact' -// the deserialization is for each of the schemas. -// -// If there is a tie in the level of exactness, then the tie is broken -// left-to-right. -// -// [smart algorithm]: https://docs.pydantic.dev/latest/concepts/unions/#smart-mode -func (d *decoderBuilder) newUnionDecoder(t reflect.Type) decoderFunc { - unionEntry, ok := unionRegistry[t] - if !ok { - panic("apijson: couldn't find union of type " + t.String() + " in union registry") - } - decoders := []decoderFunc{} - for _, variant := range unionEntry.variants { - decoder := d.typeDecoder(variant.Type) - decoders = append(decoders, decoder) - } - return func(n gjson.Result, v reflect.Value, state *decoderState) error { - // If there is a discriminator match, circumvent the exactness logic entirely - for idx, variant := range unionEntry.variants { - decoder := decoders[idx] - if variant.TypeFilter != n.Type { - continue - } - - if len(unionEntry.discriminatorKey) != 0 { - discriminatorValue := n.Get(unionEntry.discriminatorKey).Value() - if discriminatorValue == variant.DiscriminatorValue { - inner := reflect.New(variant.Type).Elem() - err := decoder(n, inner, state) - v.Set(inner) - return err - } - } - } - - // Set bestExactness to worse than loose - bestExactness := loose - 1 - for idx, variant := range unionEntry.variants { - decoder := decoders[idx] - if variant.TypeFilter != n.Type { - continue - } - sub := decoderState{strict: state.strict, exactness: exact} - inner := reflect.New(variant.Type).Elem() - err := decoder(n, inner, &sub) - if err != nil { - continue - } - if sub.exactness == exact { - v.Set(inner) - return nil - } - if sub.exactness > bestExactness { - v.Set(inner) - bestExactness = sub.exactness - } - } - - if bestExactness < loose { - return errors.New("apijson: was not able to coerce type as union") - } - - if guardStrict(state, bestExactness != exact) { - return errors.New("apijson: was not able to coerce type as union strictly") - } - - return nil - } -} - -func (d *decoderBuilder) newMapDecoder(t reflect.Type) decoderFunc { - keyType := t.Key() - itemType := t.Elem() - itemDecoder := d.typeDecoder(itemType) - - return func(node gjson.Result, value reflect.Value, state *decoderState) (err error) { - mapValue := reflect.MakeMapWithSize(t, len(node.Map())) - - node.ForEach(func(key, value gjson.Result) bool { - // It's fine for us to just use `ValueOf` here because the key types will - // always be primitive types so we don't need to decode it using the standard pattern - keyValue := reflect.ValueOf(key.Value()) - if !keyValue.IsValid() { - if err == nil { - err = fmt.Errorf("apijson: received invalid key type %v", keyValue.String()) - } - return false - } - if keyValue.Type() != keyType { - if err == nil { - err = fmt.Errorf("apijson: expected key type %v but got %v", keyType, keyValue.Type()) - } - return false - } - - itemValue := reflect.New(itemType).Elem() - itemerr := itemDecoder(value, itemValue, state) - if itemerr != nil { - if err == nil { - err = itemerr - } - return false - } - - mapValue.SetMapIndex(keyValue, itemValue) - return true - }) - - if err != nil { - return err - } - value.Set(mapValue) - return nil - } -} - -func (d *decoderBuilder) newArrayTypeDecoder(t reflect.Type) decoderFunc { - itemDecoder := d.typeDecoder(t.Elem()) - - return func(node gjson.Result, value reflect.Value, state *decoderState) (err error) { - if !node.IsArray() { - return fmt.Errorf("apijson: could not deserialize to an array") - } - - arrayNode := node.Array() - - arrayValue := reflect.MakeSlice(reflect.SliceOf(t.Elem()), len(arrayNode), len(arrayNode)) - for i, itemNode := range arrayNode { - err = itemDecoder(itemNode, arrayValue.Index(i), state) - if err != nil { - return err - } - } - - value.Set(arrayValue) - return nil - } -} - -func (d *decoderBuilder) newStructTypeDecoder(t reflect.Type) decoderFunc { - // map of json field name to struct field decoders - decoderFields := map[string]decoderField{} - anonymousDecoders := []decoderField{} - extraDecoder := (*decoderField)(nil) - inlineDecoder := (*decoderField)(nil) - - for i := 0; i < t.NumField(); i++ { - idx := []int{i} - field := t.FieldByIndex(idx) - if !field.IsExported() { - continue - } - // If this is an embedded struct, traverse one level deeper to extract - // the fields and get their encoders as well. - if field.Anonymous { - anonymousDecoders = append(anonymousDecoders, decoderField{ - fn: d.typeDecoder(field.Type), - idx: idx[:], - }) - continue - } - // If json tag is not present, then we skip, which is intentionally - // different behavior from the stdlib. - ptag, ok := parseJSONStructTag(field) - if !ok { - continue - } - // We only want to support unexported fields if they're tagged with - // `extras` because that field shouldn't be part of the public API. - if ptag.extras { - extraDecoder = &decoderField{ptag, d.typeDecoder(field.Type.Elem()), idx, field.Name} - continue - } - if ptag.inline { - inlineDecoder = &decoderField{ptag, d.typeDecoder(field.Type), idx, field.Name} - continue - } - if ptag.metadata { - continue - } - - oldFormat := d.dateFormat - dateFormat, ok := parseFormatStructTag(field) - if ok { - switch dateFormat { - case "date-time": - d.dateFormat = time.RFC3339 - case "date": - d.dateFormat = "2006-01-02" - } - } - decoderFields[ptag.name] = decoderField{ptag, d.typeDecoder(field.Type), idx, field.Name} - d.dateFormat = oldFormat - } - - return func(node gjson.Result, value reflect.Value, state *decoderState) (err error) { - if field := value.FieldByName("JSON"); field.IsValid() { - if raw := field.FieldByName("raw"); raw.IsValid() { - setUnexportedField(raw, node.Raw) - } - } - - for _, decoder := range anonymousDecoders { - // ignore errors - decoder.fn(node, value.FieldByIndex(decoder.idx), state) - } - - if inlineDecoder != nil { - var meta Field - dest := value.FieldByIndex(inlineDecoder.idx) - isValid := false - if dest.IsValid() && node.Type != gjson.Null { - err = inlineDecoder.fn(node, dest, state) - if err == nil { - isValid = true - } - } - - if node.Type == gjson.Null { - meta = Field{ - raw: node.Raw, - status: null, - } - } else if !isValid { - meta = Field{ - raw: node.Raw, - status: invalid, - } - } else if isValid { - meta = Field{ - raw: node.Raw, - status: valid, - } - } - if metadata := getSubField(value, inlineDecoder.idx, inlineDecoder.goname); metadata.IsValid() { - metadata.Set(reflect.ValueOf(meta)) - } - return err - } - - typedExtraType := reflect.Type(nil) - typedExtraFields := reflect.Value{} - if extraDecoder != nil { - typedExtraType = value.FieldByIndex(extraDecoder.idx).Type() - typedExtraFields = reflect.MakeMap(typedExtraType) - } - untypedExtraFields := map[string]Field{} - - for fieldName, itemNode := range node.Map() { - df, explicit := decoderFields[fieldName] - var ( - dest reflect.Value - fn decoderFunc - meta Field - ) - if explicit { - fn = df.fn - dest = value.FieldByIndex(df.idx) - } - if !explicit && extraDecoder != nil { - dest = reflect.New(typedExtraType.Elem()).Elem() - fn = extraDecoder.fn - } - - isValid := false - if dest.IsValid() && itemNode.Type != gjson.Null { - err = fn(itemNode, dest, state) - if err == nil { - isValid = true - } - } - - if itemNode.Type == gjson.Null { - meta = Field{ - raw: itemNode.Raw, - status: null, - } - } else if !isValid { - meta = Field{ - raw: itemNode.Raw, - status: invalid, - } - } else if isValid { - meta = Field{ - raw: itemNode.Raw, - status: valid, - } - } - - if explicit { - if metadata := getSubField(value, df.idx, df.goname); metadata.IsValid() { - metadata.Set(reflect.ValueOf(meta)) - } - } - if !explicit { - untypedExtraFields[fieldName] = meta - } - if !explicit && extraDecoder != nil { - typedExtraFields.SetMapIndex(reflect.ValueOf(fieldName), dest) - } - } - - if extraDecoder != nil && typedExtraFields.Len() > 0 { - value.FieldByIndex(extraDecoder.idx).Set(typedExtraFields) - } - - // Set exactness to 'extras' if there are untyped, extra fields. - if len(untypedExtraFields) > 0 && state.exactness > extras { - state.exactness = extras - } - - if metadata := getSubField(value, []int{-1}, "ExtraFields"); metadata.IsValid() && len(untypedExtraFields) > 0 { - metadata.Set(reflect.ValueOf(untypedExtraFields)) - } - return nil - } -} - -func (d *decoderBuilder) newPrimitiveTypeDecoder(t reflect.Type) decoderFunc { - switch t.Kind() { - case reflect.String: - return func(n gjson.Result, v reflect.Value, state *decoderState) error { - v.SetString(n.String()) - if guardStrict(state, n.Type != gjson.String) { - return fmt.Errorf("apijson: failed to parse string strictly") - } - // Everything that is not an object can be loosely stringified. - if n.Type == gjson.JSON { - return fmt.Errorf("apijson: failed to parse string") - } - if guardUnknown(state, v) { - return fmt.Errorf("apijson: failed string enum validation") - } - return nil - } - case reflect.Bool: - return func(n gjson.Result, v reflect.Value, state *decoderState) error { - v.SetBool(n.Bool()) - if guardStrict(state, n.Type != gjson.True && n.Type != gjson.False) { - return fmt.Errorf("apijson: failed to parse bool strictly") - } - // Numbers and strings that are either 'true' or 'false' can be loosely - // deserialized as bool. - if n.Type == gjson.String && (n.Raw != "true" && n.Raw != "false") || n.Type == gjson.JSON { - return fmt.Errorf("apijson: failed to parse bool") - } - if guardUnknown(state, v) { - return fmt.Errorf("apijson: failed bool enum validation") - } - return nil - } - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return func(n gjson.Result, v reflect.Value, state *decoderState) error { - v.SetInt(n.Int()) - if guardStrict(state, n.Type != gjson.Number || n.Num != float64(int(n.Num))) { - return fmt.Errorf("apijson: failed to parse int strictly") - } - // Numbers, booleans, and strings that maybe look like numbers can be - // loosely deserialized as numbers. - if n.Type == gjson.JSON || (n.Type == gjson.String && !canParseAsNumber(n.Str)) { - return fmt.Errorf("apijson: failed to parse int") - } - if guardUnknown(state, v) { - return fmt.Errorf("apijson: failed int enum validation") - } - return nil - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return func(n gjson.Result, v reflect.Value, state *decoderState) error { - v.SetUint(n.Uint()) - if guardStrict(state, n.Type != gjson.Number || n.Num != float64(int(n.Num)) || n.Num < 0) { - return fmt.Errorf("apijson: failed to parse uint strictly") - } - // Numbers, booleans, and strings that maybe look like numbers can be - // loosely deserialized as uint. - if n.Type == gjson.JSON || (n.Type == gjson.String && !canParseAsNumber(n.Str)) { - return fmt.Errorf("apijson: failed to parse uint") - } - if guardUnknown(state, v) { - return fmt.Errorf("apijson: failed uint enum validation") - } - return nil - } - case reflect.Float32, reflect.Float64: - return func(n gjson.Result, v reflect.Value, state *decoderState) error { - v.SetFloat(n.Float()) - if guardStrict(state, n.Type != gjson.Number) { - return fmt.Errorf("apijson: failed to parse float strictly") - } - // Numbers, booleans, and strings that maybe look like numbers can be - // loosely deserialized as floats. - if n.Type == gjson.JSON || (n.Type == gjson.String && !canParseAsNumber(n.Str)) { - return fmt.Errorf("apijson: failed to parse float") - } - if guardUnknown(state, v) { - return fmt.Errorf("apijson: failed float enum validation") - } - return nil - } - default: - return func(node gjson.Result, v reflect.Value, state *decoderState) error { - return fmt.Errorf("unknown type received at primitive decoder: %s", t.String()) - } - } -} - -func (d *decoderBuilder) newTimeTypeDecoder(t reflect.Type) decoderFunc { - format := d.dateFormat - return func(n gjson.Result, v reflect.Value, state *decoderState) error { - parsed, err := time.Parse(format, n.Str) - if err == nil { - v.Set(reflect.ValueOf(parsed).Convert(t)) - return nil - } - - if guardStrict(state, true) { - return err - } - - layouts := []string{ - "2006-01-02", - "2006-01-02T15:04:05Z07:00", - "2006-01-02T15:04:05Z0700", - "2006-01-02T15:04:05", - "2006-01-02 15:04:05Z07:00", - "2006-01-02 15:04:05Z0700", - "2006-01-02 15:04:05", - } - - for _, layout := range layouts { - parsed, err := time.Parse(layout, n.Str) - if err == nil { - v.Set(reflect.ValueOf(parsed).Convert(t)) - return nil - } - } - - return fmt.Errorf("unable to leniently parse date-time string: %s", n.Str) - } -} - -func setUnexportedField(field reflect.Value, value interface{}) { - reflect.NewAt(field.Type(), unsafe.Pointer(field.UnsafeAddr())).Elem().Set(reflect.ValueOf(value)) -} - -func guardStrict(state *decoderState, cond bool) bool { - if !cond { - return false - } - - if state.strict { - return true - } - - state.exactness = loose - return false -} - -func canParseAsNumber(str string) bool { - _, err := strconv.ParseFloat(str, 64) - return err == nil -} - -func guardUnknown(state *decoderState, v reflect.Value) bool { - if have, ok := v.Interface().(interface{ IsKnown() bool }); guardStrict(state, ok && !have.IsKnown()) { - return true - } - return false -} diff --git a/packages/tui/sdk/internal/apijson/encoder.go b/packages/tui/sdk/internal/apijson/encoder.go deleted file mode 100644 index 0e5f89e1..00000000 --- a/packages/tui/sdk/internal/apijson/encoder.go +++ /dev/null @@ -1,398 +0,0 @@ -package apijson - -import ( - "bytes" - "encoding/json" - "fmt" - "reflect" - "sort" - "strconv" - "strings" - "sync" - "time" - - "github.com/tidwall/sjson" - - "github.com/sst/opencode-sdk-go/internal/param" -) - -var encoders sync.Map // map[encoderEntry]encoderFunc - -func Marshal(value interface{}) ([]byte, error) { - e := &encoder{dateFormat: time.RFC3339} - return e.marshal(value) -} - -func MarshalRoot(value interface{}) ([]byte, error) { - e := &encoder{root: true, dateFormat: time.RFC3339} - return e.marshal(value) -} - -type encoder struct { - dateFormat string - root bool -} - -type encoderFunc func(value reflect.Value) ([]byte, error) - -type encoderField struct { - tag parsedStructTag - fn encoderFunc - idx []int -} - -type encoderEntry struct { - reflect.Type - dateFormat string - root bool -} - -func (e *encoder) marshal(value interface{}) ([]byte, error) { - val := reflect.ValueOf(value) - if !val.IsValid() { - return nil, nil - } - typ := val.Type() - enc := e.typeEncoder(typ) - return enc(val) -} - -func (e *encoder) typeEncoder(t reflect.Type) encoderFunc { - entry := encoderEntry{ - Type: t, - dateFormat: e.dateFormat, - root: e.root, - } - - if fi, ok := encoders.Load(entry); ok { - return fi.(encoderFunc) - } - - // To deal with recursive types, populate the map with an - // indirect func before we build it. This type waits on the - // real func (f) to be ready and then calls it. This indirect - // func is only used for recursive types. - var ( - wg sync.WaitGroup - f encoderFunc - ) - wg.Add(1) - fi, loaded := encoders.LoadOrStore(entry, encoderFunc(func(v reflect.Value) ([]byte, error) { - wg.Wait() - return f(v) - })) - if loaded { - return fi.(encoderFunc) - } - - // Compute the real encoder and replace the indirect func with it. - f = e.newTypeEncoder(t) - wg.Done() - encoders.Store(entry, f) - return f -} - -func marshalerEncoder(v reflect.Value) ([]byte, error) { - return v.Interface().(json.Marshaler).MarshalJSON() -} - -func indirectMarshalerEncoder(v reflect.Value) ([]byte, error) { - return v.Addr().Interface().(json.Marshaler).MarshalJSON() -} - -func (e *encoder) newTypeEncoder(t reflect.Type) encoderFunc { - if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { - return e.newTimeTypeEncoder() - } - if !e.root && t.Implements(reflect.TypeOf((*json.Marshaler)(nil)).Elem()) { - return marshalerEncoder - } - if !e.root && reflect.PointerTo(t).Implements(reflect.TypeOf((*json.Marshaler)(nil)).Elem()) { - return indirectMarshalerEncoder - } - e.root = false - switch t.Kind() { - case reflect.Pointer: - inner := t.Elem() - - innerEncoder := e.typeEncoder(inner) - return func(v reflect.Value) ([]byte, error) { - if !v.IsValid() || v.IsNil() { - return nil, nil - } - return innerEncoder(v.Elem()) - } - case reflect.Struct: - return e.newStructTypeEncoder(t) - case reflect.Array: - fallthrough - case reflect.Slice: - return e.newArrayTypeEncoder(t) - case reflect.Map: - return e.newMapEncoder(t) - case reflect.Interface: - return e.newInterfaceEncoder() - default: - return e.newPrimitiveTypeEncoder(t) - } -} - -func (e *encoder) newPrimitiveTypeEncoder(t reflect.Type) encoderFunc { - switch t.Kind() { - // Note that we could use `gjson` to encode these types but it would complicate our - // code more and this current code shouldn't cause any issues - case reflect.String: - return func(v reflect.Value) ([]byte, error) { - return json.Marshal(v.Interface()) - } - case reflect.Bool: - return func(v reflect.Value) ([]byte, error) { - if v.Bool() { - return []byte("true"), nil - } - return []byte("false"), nil - } - case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64: - return func(v reflect.Value) ([]byte, error) { - return []byte(strconv.FormatInt(v.Int(), 10)), nil - } - case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return func(v reflect.Value) ([]byte, error) { - return []byte(strconv.FormatUint(v.Uint(), 10)), nil - } - case reflect.Float32: - return func(v reflect.Value) ([]byte, error) { - return []byte(strconv.FormatFloat(v.Float(), 'f', -1, 32)), nil - } - case reflect.Float64: - return func(v reflect.Value) ([]byte, error) { - return []byte(strconv.FormatFloat(v.Float(), 'f', -1, 64)), nil - } - default: - return func(v reflect.Value) ([]byte, error) { - return nil, fmt.Errorf("unknown type received at primitive encoder: %s", t.String()) - } - } -} - -func (e *encoder) newArrayTypeEncoder(t reflect.Type) encoderFunc { - itemEncoder := e.typeEncoder(t.Elem()) - - return func(value reflect.Value) ([]byte, error) { - json := []byte("[]") - for i := 0; i < value.Len(); i++ { - var value, err = itemEncoder(value.Index(i)) - if err != nil { - return nil, err - } - if value == nil { - // Assume that empty items should be inserted as `null` so that the output array - // will be the same length as the input array - value = []byte("null") - } - - json, err = sjson.SetRawBytes(json, "-1", value) - if err != nil { - return nil, err - } - } - - return json, nil - } -} - -func (e *encoder) newStructTypeEncoder(t reflect.Type) encoderFunc { - if t.Implements(reflect.TypeOf((*param.FieldLike)(nil)).Elem()) { - return e.newFieldTypeEncoder(t) - } - - encoderFields := []encoderField{} - extraEncoder := (*encoderField)(nil) - - // This helper allows us to recursively collect field encoders into a flat - // array. The parameter `index` keeps track of the access patterns necessary - // to get to some field. - var collectEncoderFields func(r reflect.Type, index []int) - collectEncoderFields = func(r reflect.Type, index []int) { - for i := 0; i < r.NumField(); i++ { - idx := append(index, i) - field := t.FieldByIndex(idx) - if !field.IsExported() { - continue - } - // If this is an embedded struct, traverse one level deeper to extract - // the field and get their encoders as well. - if field.Anonymous { - collectEncoderFields(field.Type, idx) - continue - } - // If json tag is not present, then we skip, which is intentionally - // different behavior from the stdlib. - ptag, ok := parseJSONStructTag(field) - if !ok { - continue - } - // We only want to support unexported field if they're tagged with - // `extras` because that field shouldn't be part of the public API. We - // also want to only keep the top level extras - if ptag.extras && len(index) == 0 { - extraEncoder = &encoderField{ptag, e.typeEncoder(field.Type.Elem()), idx} - continue - } - if ptag.name == "-" { - continue - } - - dateFormat, ok := parseFormatStructTag(field) - oldFormat := e.dateFormat - if ok { - switch dateFormat { - case "date-time": - e.dateFormat = time.RFC3339 - case "date": - e.dateFormat = "2006-01-02" - } - } - encoderFields = append(encoderFields, encoderField{ptag, e.typeEncoder(field.Type), idx}) - e.dateFormat = oldFormat - } - } - collectEncoderFields(t, []int{}) - - // Ensure deterministic output by sorting by lexicographic order - sort.Slice(encoderFields, func(i, j int) bool { - return encoderFields[i].tag.name < encoderFields[j].tag.name - }) - - return func(value reflect.Value) (json []byte, err error) { - json = []byte("{}") - - for _, ef := range encoderFields { - field := value.FieldByIndex(ef.idx) - encoded, err := ef.fn(field) - if err != nil { - return nil, err - } - if encoded == nil { - continue - } - json, err = sjson.SetRawBytes(json, ef.tag.name, encoded) - if err != nil { - return nil, err - } - } - - if extraEncoder != nil { - json, err = e.encodeMapEntries(json, value.FieldByIndex(extraEncoder.idx)) - if err != nil { - return nil, err - } - } - return - } -} - -func (e *encoder) newFieldTypeEncoder(t reflect.Type) encoderFunc { - f, _ := t.FieldByName("Value") - enc := e.typeEncoder(f.Type) - - return func(value reflect.Value) (json []byte, err error) { - present := value.FieldByName("Present") - if !present.Bool() { - return nil, nil - } - null := value.FieldByName("Null") - if null.Bool() { - return []byte("null"), nil - } - raw := value.FieldByName("Raw") - if !raw.IsNil() { - return e.typeEncoder(raw.Type())(raw) - } - return enc(value.FieldByName("Value")) - } -} - -func (e *encoder) newTimeTypeEncoder() encoderFunc { - format := e.dateFormat - return func(value reflect.Value) (json []byte, err error) { - return []byte(`"` + value.Convert(reflect.TypeOf(time.Time{})).Interface().(time.Time).Format(format) + `"`), nil - } -} - -func (e encoder) newInterfaceEncoder() encoderFunc { - return func(value reflect.Value) ([]byte, error) { - value = value.Elem() - if !value.IsValid() { - return nil, nil - } - return e.typeEncoder(value.Type())(value) - } -} - -// Given a []byte of json (may either be an empty object or an object that already contains entries) -// encode all of the entries in the map to the json byte array. -func (e *encoder) encodeMapEntries(json []byte, v reflect.Value) ([]byte, error) { - type mapPair struct { - key []byte - value reflect.Value - } - - pairs := []mapPair{} - keyEncoder := e.typeEncoder(v.Type().Key()) - - iter := v.MapRange() - for iter.Next() { - var encodedKeyString string - if iter.Key().Type().Kind() == reflect.String { - encodedKeyString = iter.Key().String() - } else { - var err error - encodedKeyBytes, err := keyEncoder(iter.Key()) - if err != nil { - return nil, err - } - encodedKeyString = string(encodedKeyBytes) - } - encodedKey := []byte(sjsonReplacer.Replace(encodedKeyString)) - pairs = append(pairs, mapPair{key: encodedKey, value: iter.Value()}) - } - - // Ensure deterministic output - sort.Slice(pairs, func(i, j int) bool { - return bytes.Compare(pairs[i].key, pairs[j].key) < 0 - }) - - elementEncoder := e.typeEncoder(v.Type().Elem()) - for _, p := range pairs { - encodedValue, err := elementEncoder(p.value) - if err != nil { - return nil, err - } - if len(encodedValue) == 0 { - continue - } - json, err = sjson.SetRawBytes(json, string(p.key), encodedValue) - if err != nil { - return nil, err - } - } - - return json, nil -} - -func (e *encoder) newMapEncoder(t reflect.Type) encoderFunc { - return func(value reflect.Value) ([]byte, error) { - json := []byte("{}") - var err error - json, err = e.encodeMapEntries(json, value) - if err != nil { - return nil, err - } - return json, nil - } -} - -// If we want to set a literal key value into JSON using sjson, we need to make sure it doesn't have -// special characters that sjson interprets as a path. -var sjsonReplacer *strings.Replacer = strings.NewReplacer(".", "\\.", ":", "\\:", "*", "\\*") diff --git a/packages/tui/sdk/internal/apijson/field.go b/packages/tui/sdk/internal/apijson/field.go deleted file mode 100644 index 3ef207c5..00000000 --- a/packages/tui/sdk/internal/apijson/field.go +++ /dev/null @@ -1,41 +0,0 @@ -package apijson - -import "reflect" - -type status uint8 - -const ( - missing status = iota - null - invalid - valid -) - -type Field struct { - raw string - status status -} - -// Returns true if the field is explicitly `null` _or_ if it is not present at all (ie, missing). -// To check if the field's key is present in the JSON with an explicit null value, -// you must check `f.IsNull() && !f.IsMissing()`. -func (j Field) IsNull() bool { return j.status <= null } -func (j Field) IsMissing() bool { return j.status == missing } -func (j Field) IsInvalid() bool { return j.status == invalid } -func (j Field) Raw() string { return j.raw } - -func getSubField(root reflect.Value, index []int, name string) reflect.Value { - strct := root.FieldByIndex(index[:len(index)-1]) - if !strct.IsValid() { - panic("couldn't find encapsulating struct for field " + name) - } - meta := strct.FieldByName("JSON") - if !meta.IsValid() { - return reflect.Value{} - } - field := meta.FieldByName(name) - if !field.IsValid() { - return reflect.Value{} - } - return field -} diff --git a/packages/tui/sdk/internal/apijson/field_test.go b/packages/tui/sdk/internal/apijson/field_test.go deleted file mode 100644 index 2e170c76..00000000 --- a/packages/tui/sdk/internal/apijson/field_test.go +++ /dev/null @@ -1,66 +0,0 @@ -package apijson - -import ( - "testing" - "time" - - "github.com/sst/opencode-sdk-go/internal/param" -) - -type Struct struct { - A string `json:"a"` - B int64 `json:"b"` -} - -type FieldStruct struct { - A param.Field[string] `json:"a"` - B param.Field[int64] `json:"b"` - C param.Field[Struct] `json:"c"` - D param.Field[time.Time] `json:"d" format:"date"` - E param.Field[time.Time] `json:"e" format:"date-time"` - F param.Field[int64] `json:"f"` -} - -func TestFieldMarshal(t *testing.T) { - tests := map[string]struct { - value interface{} - expected string - }{ - "null_string": {param.Field[string]{Present: true, Null: true}, "null"}, - "null_int": {param.Field[int]{Present: true, Null: true}, "null"}, - "null_int64": {param.Field[int64]{Present: true, Null: true}, "null"}, - "null_struct": {param.Field[Struct]{Present: true, Null: true}, "null"}, - - "string": {param.Field[string]{Present: true, Value: "string"}, `"string"`}, - "int": {param.Field[int]{Present: true, Value: 123}, "123"}, - "int64": {param.Field[int64]{Present: true, Value: int64(123456789123456789)}, "123456789123456789"}, - "struct": {param.Field[Struct]{Present: true, Value: Struct{A: "yo", B: 123}}, `{"a":"yo","b":123}`}, - - "string_raw": {param.Field[int]{Present: true, Raw: "string"}, `"string"`}, - "int_raw": {param.Field[int]{Present: true, Raw: 123}, "123"}, - "int64_raw": {param.Field[int]{Present: true, Raw: int64(123456789123456789)}, "123456789123456789"}, - "struct_raw": {param.Field[int]{Present: true, Raw: Struct{A: "yo", B: 123}}, `{"a":"yo","b":123}`}, - - "param_struct": { - FieldStruct{ - A: param.Field[string]{Present: true, Value: "hello"}, - B: param.Field[int64]{Present: true, Value: int64(12)}, - D: param.Field[time.Time]{Present: true, Value: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)}, - E: param.Field[time.Time]{Present: true, Value: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)}, - }, - `{"a":"hello","b":12,"d":"2023-03-18","e":"2023-03-18T14:47:38Z"}`, - }, - } - - for name, test := range tests { - t.Run(name, func(t *testing.T) { - b, err := Marshal(test.value) - if err != nil { - t.Fatalf("didn't expect error %v", err) - } - if string(b) != test.expected { - t.Fatalf("expected %s, received %s", test.expected, string(b)) - } - }) - } -} diff --git a/packages/tui/sdk/internal/apijson/json_test.go b/packages/tui/sdk/internal/apijson/json_test.go deleted file mode 100644 index e6563448..00000000 --- a/packages/tui/sdk/internal/apijson/json_test.go +++ /dev/null @@ -1,617 +0,0 @@ -package apijson - -import ( - "reflect" - "strings" - "testing" - "time" - - "github.com/tidwall/gjson" -) - -func P[T any](v T) *T { return &v } - -type Primitives struct { - A bool `json:"a"` - B int `json:"b"` - C uint `json:"c"` - D float64 `json:"d"` - E float32 `json:"e"` - F []int `json:"f"` -} - -type PrimitivePointers struct { - A *bool `json:"a"` - B *int `json:"b"` - C *uint `json:"c"` - D *float64 `json:"d"` - E *float32 `json:"e"` - F *[]int `json:"f"` -} - -type Slices struct { - Slice []Primitives `json:"slices"` -} - -type DateTime struct { - Date time.Time `json:"date" format:"date"` - DateTime time.Time `json:"date-time" format:"date-time"` -} - -type AdditionalProperties struct { - A bool `json:"a"` - ExtraFields map[string]interface{} `json:"-,extras"` -} - -type TypedAdditionalProperties struct { - A bool `json:"a"` - ExtraFields map[string]int `json:"-,extras"` -} - -type EmbeddedStruct struct { - A bool `json:"a"` - B string `json:"b"` - - JSON EmbeddedStructJSON -} - -type EmbeddedStructJSON struct { - A Field - B Field - ExtraFields map[string]Field - raw string -} - -type EmbeddedStructs struct { - EmbeddedStruct - A *int `json:"a"` - ExtraFields map[string]interface{} `json:"-,extras"` - - JSON EmbeddedStructsJSON -} - -type EmbeddedStructsJSON struct { - A Field - ExtraFields map[string]Field - raw string -} - -type Recursive struct { - Name string `json:"name"` - Child *Recursive `json:"child"` -} - -type JSONFieldStruct struct { - A bool `json:"a"` - B int64 `json:"b"` - C string `json:"c"` - D string `json:"d"` - ExtraFields map[string]int64 `json:"-,extras"` - JSON JSONFieldStructJSON `json:"-,metadata"` -} - -type JSONFieldStructJSON struct { - A Field - B Field - C Field - D Field - ExtraFields map[string]Field - raw string -} - -type UnknownStruct struct { - Unknown interface{} `json:"unknown"` -} - -type UnionStruct struct { - Union Union `json:"union" format:"date"` -} - -type Union interface { - union() -} - -type Inline struct { - InlineField Primitives `json:"-,inline"` - JSON InlineJSON `json:"-,metadata"` -} - -type InlineArray struct { - InlineField []string `json:"-,inline"` - JSON InlineJSON `json:"-,metadata"` -} - -type InlineJSON struct { - InlineField Field - raw string -} - -type UnionInteger int64 - -func (UnionInteger) union() {} - -type UnionStructA struct { - Type string `json:"type"` - A string `json:"a"` - B string `json:"b"` -} - -func (UnionStructA) union() {} - -type UnionStructB struct { - Type string `json:"type"` - A string `json:"a"` -} - -func (UnionStructB) union() {} - -type UnionTime time.Time - -func (UnionTime) union() {} - -func init() { - RegisterUnion(reflect.TypeOf((*Union)(nil)).Elem(), "type", - UnionVariant{ - TypeFilter: gjson.String, - Type: reflect.TypeOf(UnionTime{}), - }, - UnionVariant{ - TypeFilter: gjson.Number, - Type: reflect.TypeOf(UnionInteger(0)), - }, - UnionVariant{ - TypeFilter: gjson.JSON, - DiscriminatorValue: "typeA", - Type: reflect.TypeOf(UnionStructA{}), - }, - UnionVariant{ - TypeFilter: gjson.JSON, - DiscriminatorValue: "typeB", - Type: reflect.TypeOf(UnionStructB{}), - }, - ) -} - -type ComplexUnionStruct struct { - Union ComplexUnion `json:"union"` -} - -type ComplexUnion interface { - complexUnion() -} - -type ComplexUnionA struct { - Boo string `json:"boo"` - Foo bool `json:"foo"` -} - -func (ComplexUnionA) complexUnion() {} - -type ComplexUnionB struct { - Boo bool `json:"boo"` - Foo string `json:"foo"` -} - -func (ComplexUnionB) complexUnion() {} - -type ComplexUnionC struct { - Boo int64 `json:"boo"` -} - -func (ComplexUnionC) complexUnion() {} - -type ComplexUnionTypeA struct { - Baz int64 `json:"baz"` - Type TypeA `json:"type"` -} - -func (ComplexUnionTypeA) complexUnion() {} - -type TypeA string - -func (t TypeA) IsKnown() bool { - return t == "a" -} - -type ComplexUnionTypeB struct { - Baz int64 `json:"baz"` - Type TypeB `json:"type"` -} - -type TypeB string - -func (t TypeB) IsKnown() bool { - return t == "b" -} - -type UnmarshalStruct struct { - Foo string `json:"foo"` - prop bool `json:"-"` -} - -func (r *UnmarshalStruct) UnmarshalJSON(json []byte) error { - r.prop = true - return UnmarshalRoot(json, r) -} - -func (ComplexUnionTypeB) complexUnion() {} - -func init() { - RegisterUnion(reflect.TypeOf((*ComplexUnion)(nil)).Elem(), "", - UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ComplexUnionA{}), - }, - UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ComplexUnionB{}), - }, - UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ComplexUnionC{}), - }, - UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ComplexUnionTypeA{}), - }, - UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ComplexUnionTypeB{}), - }, - ) -} - -type MarshallingUnionStruct struct { - Union MarshallingUnion -} - -func (r *MarshallingUnionStruct) UnmarshalJSON(data []byte) (err error) { - *r = MarshallingUnionStruct{} - err = UnmarshalRoot(data, &r.Union) - return -} - -func (r MarshallingUnionStruct) MarshalJSON() (data []byte, err error) { - return MarshalRoot(r.Union) -} - -type MarshallingUnion interface { - marshallingUnion() -} - -type MarshallingUnionA struct { - Boo string `json:"boo"` -} - -func (MarshallingUnionA) marshallingUnion() {} - -func (r *MarshallingUnionA) UnmarshalJSON(data []byte) (err error) { - return UnmarshalRoot(data, r) -} - -type MarshallingUnionB struct { - Foo string `json:"foo"` -} - -func (MarshallingUnionB) marshallingUnion() {} - -func (r *MarshallingUnionB) UnmarshalJSON(data []byte) (err error) { - return UnmarshalRoot(data, r) -} - -func init() { - RegisterUnion( - reflect.TypeOf((*MarshallingUnion)(nil)).Elem(), - "", - UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(MarshallingUnionA{}), - }, - UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(MarshallingUnionB{}), - }, - ) -} - -var tests = map[string]struct { - buf string - val interface{} -}{ - "true": {"true", true}, - "false": {"false", false}, - "int": {"1", 1}, - "int_bigger": {"12324", 12324}, - "int_string_coerce": {`"65"`, 65}, - "int_boolean_coerce": {"true", 1}, - "int64": {"1", int64(1)}, - "int64_huge": {"123456789123456789", int64(123456789123456789)}, - "uint": {"1", uint(1)}, - "uint_bigger": {"12324", uint(12324)}, - "uint_coerce": {`"65"`, uint(65)}, - "float_1.54": {"1.54", float32(1.54)}, - "float_1.89": {"1.89", float64(1.89)}, - "string": {`"str"`, "str"}, - "string_int_coerce": {`12`, "12"}, - "array_string": {`["foo","bar"]`, []string{"foo", "bar"}}, - "array_int": {`[1,2]`, []int{1, 2}}, - "array_int_coerce": {`["1",2]`, []int{1, 2}}, - - "ptr_true": {"true", P(true)}, - "ptr_false": {"false", P(false)}, - "ptr_int": {"1", P(1)}, - "ptr_int_bigger": {"12324", P(12324)}, - "ptr_int_string_coerce": {`"65"`, P(65)}, - "ptr_int_boolean_coerce": {"true", P(1)}, - "ptr_int64": {"1", P(int64(1))}, - "ptr_int64_huge": {"123456789123456789", P(int64(123456789123456789))}, - "ptr_uint": {"1", P(uint(1))}, - "ptr_uint_bigger": {"12324", P(uint(12324))}, - "ptr_uint_coerce": {`"65"`, P(uint(65))}, - "ptr_float_1.54": {"1.54", P(float32(1.54))}, - "ptr_float_1.89": {"1.89", P(float64(1.89))}, - - "date_time": {`"2007-03-01T13:00:00Z"`, time.Date(2007, time.March, 1, 13, 0, 0, 0, time.UTC)}, - "date_time_nano_coerce": {`"2007-03-01T13:03:05.123456789Z"`, time.Date(2007, time.March, 1, 13, 3, 5, 123456789, time.UTC)}, - - "date_time_missing_t_coerce": {`"2007-03-01 13:03:05Z"`, time.Date(2007, time.March, 1, 13, 3, 5, 0, time.UTC)}, - "date_time_missing_timezone_coerce": {`"2007-03-01T13:03:05"`, time.Date(2007, time.March, 1, 13, 3, 5, 0, time.UTC)}, - // note: using -1200 to minimize probability of conflicting with the local timezone of the test runner - // see https://en.wikipedia.org/wiki/UTC%E2%88%9212:00 - "date_time_missing_timezone_colon_coerce": {`"2007-03-01T13:03:05-1200"`, time.Date(2007, time.March, 1, 13, 3, 5, 0, time.FixedZone("", -12*60*60))}, - "date_time_nano_missing_t_coerce": {`"2007-03-01 13:03:05.123456789Z"`, time.Date(2007, time.March, 1, 13, 3, 5, 123456789, time.UTC)}, - - "map_string": {`{"foo":"bar"}`, map[string]string{"foo": "bar"}}, - "map_string_with_sjson_path_chars": {`{":a.b.c*:d*-1e.f":"bar"}`, map[string]string{":a.b.c*:d*-1e.f": "bar"}}, - "map_interface": {`{"a":1,"b":"str","c":false}`, map[string]interface{}{"a": float64(1), "b": "str", "c": false}}, - - "primitive_struct": { - `{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4]}`, - Primitives{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}, - }, - - "slices": { - `{"slices":[{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4]}]}`, - Slices{ - Slice: []Primitives{{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}}, - }, - }, - - "primitive_pointer_struct": { - `{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4,5]}`, - PrimitivePointers{ - A: P(false), - B: P(237628372683), - C: P(uint(654)), - D: P(9999.43), - E: P(float32(43.76)), - F: &[]int{1, 2, 3, 4, 5}, - }, - }, - - "datetime_struct": { - `{"date":"2006-01-02","date-time":"2006-01-02T15:04:05Z"}`, - DateTime{ - Date: time.Date(2006, time.January, 2, 0, 0, 0, 0, time.UTC), - DateTime: time.Date(2006, time.January, 2, 15, 4, 5, 0, time.UTC), - }, - }, - - "additional_properties": { - `{"a":true,"bar":"value","foo":true}`, - AdditionalProperties{ - A: true, - ExtraFields: map[string]interface{}{ - "bar": "value", - "foo": true, - }, - }, - }, - - "embedded_struct": { - `{"a":1,"b":"bar"}`, - EmbeddedStructs{ - EmbeddedStruct: EmbeddedStruct{ - A: true, - B: "bar", - JSON: EmbeddedStructJSON{ - A: Field{raw: `1`, status: valid}, - B: Field{raw: `"bar"`, status: valid}, - raw: `{"a":1,"b":"bar"}`, - }, - }, - A: P(1), - ExtraFields: map[string]interface{}{"b": "bar"}, - JSON: EmbeddedStructsJSON{ - A: Field{raw: `1`, status: valid}, - ExtraFields: map[string]Field{ - "b": {raw: `"bar"`, status: valid}, - }, - raw: `{"a":1,"b":"bar"}`, - }, - }, - }, - - "recursive_struct": { - `{"child":{"name":"Alex"},"name":"Robert"}`, - Recursive{Name: "Robert", Child: &Recursive{Name: "Alex"}}, - }, - - "metadata_coerce": { - `{"a":"12","b":"12","c":null,"extra_typed":12,"extra_untyped":{"foo":"bar"}}`, - JSONFieldStruct{ - A: false, - B: 12, - C: "", - JSON: JSONFieldStructJSON{ - raw: `{"a":"12","b":"12","c":null,"extra_typed":12,"extra_untyped":{"foo":"bar"}}`, - A: Field{raw: `"12"`, status: invalid}, - B: Field{raw: `"12"`, status: valid}, - C: Field{raw: "null", status: null}, - D: Field{raw: "", status: missing}, - ExtraFields: map[string]Field{ - "extra_typed": { - raw: "12", - status: valid, - }, - "extra_untyped": { - raw: `{"foo":"bar"}`, - status: invalid, - }, - }, - }, - ExtraFields: map[string]int64{ - "extra_typed": 12, - "extra_untyped": 0, - }, - }, - }, - - "unknown_struct_number": { - `{"unknown":12}`, - UnknownStruct{ - Unknown: 12., - }, - }, - - "unknown_struct_map": { - `{"unknown":{"foo":"bar"}}`, - UnknownStruct{ - Unknown: map[string]interface{}{ - "foo": "bar", - }, - }, - }, - - "union_integer": { - `{"union":12}`, - UnionStruct{ - Union: UnionInteger(12), - }, - }, - - "union_struct_discriminated_a": { - `{"union":{"a":"foo","b":"bar","type":"typeA"}}`, - UnionStruct{ - Union: UnionStructA{ - Type: "typeA", - A: "foo", - B: "bar", - }, - }, - }, - - "union_struct_discriminated_b": { - `{"union":{"a":"foo","type":"typeB"}}`, - UnionStruct{ - Union: UnionStructB{ - Type: "typeB", - A: "foo", - }, - }, - }, - - "union_struct_time": { - `{"union":"2010-05-23"}`, - UnionStruct{ - Union: UnionTime(time.Date(2010, 05, 23, 0, 0, 0, 0, time.UTC)), - }, - }, - - "complex_union_a": { - `{"union":{"boo":"12","foo":true}}`, - ComplexUnionStruct{Union: ComplexUnionA{Boo: "12", Foo: true}}, - }, - - "complex_union_b": { - `{"union":{"boo":true,"foo":"12"}}`, - ComplexUnionStruct{Union: ComplexUnionB{Boo: true, Foo: "12"}}, - }, - - "complex_union_c": { - `{"union":{"boo":12}}`, - ComplexUnionStruct{Union: ComplexUnionC{Boo: 12}}, - }, - - "complex_union_type_a": { - `{"union":{"baz":12,"type":"a"}}`, - ComplexUnionStruct{Union: ComplexUnionTypeA{Baz: 12, Type: TypeA("a")}}, - }, - - "complex_union_type_b": { - `{"union":{"baz":12,"type":"b"}}`, - ComplexUnionStruct{Union: ComplexUnionTypeB{Baz: 12, Type: TypeB("b")}}, - }, - - "marshalling_union_a": { - `{"boo":"hello"}`, - MarshallingUnionStruct{Union: MarshallingUnionA{Boo: "hello"}}, - }, - "marshalling_union_b": { - `{"foo":"hi"}`, - MarshallingUnionStruct{Union: MarshallingUnionB{Foo: "hi"}}, - }, - - "unmarshal": { - `{"foo":"hello"}`, - &UnmarshalStruct{Foo: "hello", prop: true}, - }, - - "array_of_unmarshal": { - `[{"foo":"hello"}]`, - []UnmarshalStruct{{Foo: "hello", prop: true}}, - }, - - "inline_coerce": { - `{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4]}`, - Inline{ - InlineField: Primitives{A: false, B: 237628372683, C: 0x28e, D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}, - JSON: InlineJSON{ - InlineField: Field{raw: "{\"a\":false,\"b\":237628372683,\"c\":654,\"d\":9999.43,\"e\":43.76,\"f\":[1,2,3,4]}", status: 3}, - raw: "{\"a\":false,\"b\":237628372683,\"c\":654,\"d\":9999.43,\"e\":43.76,\"f\":[1,2,3,4]}", - }, - }, - }, - - "inline_array_coerce": { - `["Hello","foo","bar"]`, - InlineArray{ - InlineField: []string{"Hello", "foo", "bar"}, - JSON: InlineJSON{ - InlineField: Field{raw: `["Hello","foo","bar"]`, status: 3}, - raw: `["Hello","foo","bar"]`, - }, - }, - }, -} - -func TestDecode(t *testing.T) { - for name, test := range tests { - t.Run(name, func(t *testing.T) { - result := reflect.New(reflect.TypeOf(test.val)) - if err := Unmarshal([]byte(test.buf), result.Interface()); err != nil { - t.Fatalf("deserialization of %v failed with error %v", result, err) - } - if !reflect.DeepEqual(result.Elem().Interface(), test.val) { - t.Fatalf("expected '%s' to deserialize to \n%#v\nbut got\n%#v", test.buf, test.val, result.Elem().Interface()) - } - }) - } -} - -func TestEncode(t *testing.T) { - for name, test := range tests { - if strings.HasSuffix(name, "_coerce") { - continue - } - t.Run(name, func(t *testing.T) { - raw, err := Marshal(test.val) - if err != nil { - t.Fatalf("serialization of %v failed with error %v", test.val, err) - } - if string(raw) != test.buf { - t.Fatalf("expected %+#v to serialize to %s but got %s", test.val, test.buf, string(raw)) - } - }) - } -} diff --git a/packages/tui/sdk/internal/apijson/port.go b/packages/tui/sdk/internal/apijson/port.go deleted file mode 100644 index 502ab778..00000000 --- a/packages/tui/sdk/internal/apijson/port.go +++ /dev/null @@ -1,120 +0,0 @@ -package apijson - -import ( - "fmt" - "reflect" -) - -// Port copies over values from one struct to another struct. -func Port(from any, to any) error { - toVal := reflect.ValueOf(to) - fromVal := reflect.ValueOf(from) - - if toVal.Kind() != reflect.Ptr || toVal.IsNil() { - return fmt.Errorf("destination must be a non-nil pointer") - } - - for toVal.Kind() == reflect.Ptr { - toVal = toVal.Elem() - } - toType := toVal.Type() - - for fromVal.Kind() == reflect.Ptr { - fromVal = fromVal.Elem() - } - fromType := fromVal.Type() - - if toType.Kind() != reflect.Struct { - return fmt.Errorf("destination must be a non-nil pointer to a struct (%v %v)", toType, toType.Kind()) - } - - values := map[string]reflect.Value{} - fields := map[string]reflect.Value{} - - fromJSON := fromVal.FieldByName("JSON") - toJSON := toVal.FieldByName("JSON") - - // Iterate through the fields of v and load all the "normal" fields in the struct to the map of - // string to reflect.Value, as well as their raw .JSON.Foo counterpart indicated by j. - var getFields func(t reflect.Type, v reflect.Value) - getFields = func(t reflect.Type, v reflect.Value) { - j := v.FieldByName("JSON") - - // Recurse into anonymous fields first, since the fields on the object should win over the fields in the - // embedded object. - for i := 0; i < t.NumField(); i++ { - field := t.Field(i) - if field.Anonymous { - getFields(field.Type, v.Field(i)) - continue - } - } - - for i := 0; i < t.NumField(); i++ { - field := t.Field(i) - ptag, ok := parseJSONStructTag(field) - if !ok || ptag.name == "-" { - continue - } - values[ptag.name] = v.Field(i) - if j.IsValid() { - fields[ptag.name] = j.FieldByName(field.Name) - } - } - } - getFields(fromType, fromVal) - - // Use the values from the previous step to populate the 'to' struct. - for i := 0; i < toType.NumField(); i++ { - field := toType.Field(i) - ptag, ok := parseJSONStructTag(field) - if !ok { - continue - } - if ptag.name == "-" { - continue - } - if value, ok := values[ptag.name]; ok { - delete(values, ptag.name) - if field.Type.Kind() == reflect.Interface { - toVal.Field(i).Set(value) - } else { - switch value.Kind() { - case reflect.String: - toVal.Field(i).SetString(value.String()) - case reflect.Bool: - toVal.Field(i).SetBool(value.Bool()) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - toVal.Field(i).SetInt(value.Int()) - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - toVal.Field(i).SetUint(value.Uint()) - case reflect.Float32, reflect.Float64: - toVal.Field(i).SetFloat(value.Float()) - default: - toVal.Field(i).Set(value) - } - } - } - - if fromJSONField, ok := fields[ptag.name]; ok { - if toJSONField := toJSON.FieldByName(field.Name); toJSONField.IsValid() { - toJSONField.Set(fromJSONField) - } - } - } - - // Finally, copy over the .JSON.raw and .JSON.ExtraFields - if toJSON.IsValid() { - if raw := toJSON.FieldByName("raw"); raw.IsValid() { - setUnexportedField(raw, fromJSON.Interface().(interface{ RawJSON() string }).RawJSON()) - } - - if toExtraFields := toJSON.FieldByName("ExtraFields"); toExtraFields.IsValid() { - if fromExtraFields := fromJSON.FieldByName("ExtraFields"); fromExtraFields.IsValid() { - setUnexportedField(toExtraFields, fromExtraFields.Interface()) - } - } - } - - return nil -} diff --git a/packages/tui/sdk/internal/apijson/port_test.go b/packages/tui/sdk/internal/apijson/port_test.go deleted file mode 100644 index 11540533..00000000 --- a/packages/tui/sdk/internal/apijson/port_test.go +++ /dev/null @@ -1,257 +0,0 @@ -package apijson - -import ( - "reflect" - "testing" -) - -type Metadata struct { - CreatedAt string `json:"created_at"` -} - -// Card is the "combined" type of CardVisa and CardMastercard -type Card struct { - Processor CardProcessor `json:"processor"` - Data any `json:"data"` - IsFoo bool `json:"is_foo"` - IsBar bool `json:"is_bar"` - Metadata Metadata `json:"metadata"` - Value interface{} `json:"value"` - - JSON cardJSON -} - -type cardJSON struct { - Processor Field - Data Field - IsFoo Field - IsBar Field - Metadata Field - Value Field - ExtraFields map[string]Field - raw string -} - -func (r cardJSON) RawJSON() string { return r.raw } - -type CardProcessor string - -// CardVisa -type CardVisa struct { - Processor CardVisaProcessor `json:"processor"` - Data CardVisaData `json:"data"` - IsFoo bool `json:"is_foo"` - Metadata Metadata `json:"metadata"` - Value string `json:"value"` - - JSON cardVisaJSON -} - -type cardVisaJSON struct { - Processor Field - Data Field - IsFoo Field - Metadata Field - Value Field - ExtraFields map[string]Field - raw string -} - -func (r cardVisaJSON) RawJSON() string { return r.raw } - -type CardVisaProcessor string - -type CardVisaData struct { - Foo string `json:"foo"` -} - -// CardMastercard -type CardMastercard struct { - Processor CardMastercardProcessor `json:"processor"` - Data CardMastercardData `json:"data"` - IsBar bool `json:"is_bar"` - Metadata Metadata `json:"metadata"` - Value bool `json:"value"` - - JSON cardMastercardJSON -} - -type cardMastercardJSON struct { - Processor Field - Data Field - IsBar Field - Metadata Field - Value Field - ExtraFields map[string]Field - raw string -} - -func (r cardMastercardJSON) RawJSON() string { return r.raw } - -type CardMastercardProcessor string - -type CardMastercardData struct { - Bar int64 `json:"bar"` -} - -type CommonFields struct { - Metadata Metadata `json:"metadata"` - Value string `json:"value"` - - JSON commonFieldsJSON -} - -type commonFieldsJSON struct { - Metadata Field - Value Field - ExtraFields map[string]Field - raw string -} - -type CardEmbedded struct { - CommonFields - Processor CardVisaProcessor `json:"processor"` - Data CardVisaData `json:"data"` - IsFoo bool `json:"is_foo"` - - JSON cardEmbeddedJSON -} - -type cardEmbeddedJSON struct { - Processor Field - Data Field - IsFoo Field - ExtraFields map[string]Field - raw string -} - -func (r cardEmbeddedJSON) RawJSON() string { return r.raw } - -var portTests = map[string]struct { - from any - to any -}{ - "visa to card": { - CardVisa{ - Processor: "visa", - IsFoo: true, - Data: CardVisaData{ - Foo: "foo", - }, - Metadata: Metadata{ - CreatedAt: "Mar 29 2024", - }, - Value: "value", - JSON: cardVisaJSON{ - raw: `{"processor":"visa","is_foo":true,"data":{"foo":"foo"}}`, - Processor: Field{raw: `"visa"`, status: valid}, - IsFoo: Field{raw: `true`, status: valid}, - Data: Field{raw: `{"foo":"foo"}`, status: valid}, - Value: Field{raw: `"value"`, status: valid}, - ExtraFields: map[string]Field{"extra": {raw: `"yo"`, status: valid}}, - }, - }, - Card{ - Processor: "visa", - IsFoo: true, - IsBar: false, - Data: CardVisaData{ - Foo: "foo", - }, - Metadata: Metadata{ - CreatedAt: "Mar 29 2024", - }, - Value: "value", - JSON: cardJSON{ - raw: `{"processor":"visa","is_foo":true,"data":{"foo":"foo"}}`, - Processor: Field{raw: `"visa"`, status: valid}, - IsFoo: Field{raw: `true`, status: valid}, - Data: Field{raw: `{"foo":"foo"}`, status: valid}, - Value: Field{raw: `"value"`, status: valid}, - ExtraFields: map[string]Field{"extra": {raw: `"yo"`, status: valid}}, - }, - }, - }, - "mastercard to card": { - CardMastercard{ - Processor: "mastercard", - IsBar: true, - Data: CardMastercardData{ - Bar: 13, - }, - Value: false, - }, - Card{ - Processor: "mastercard", - IsFoo: false, - IsBar: true, - Data: CardMastercardData{ - Bar: 13, - }, - Value: false, - }, - }, - "embedded to card": { - CardEmbedded{ - CommonFields: CommonFields{ - Metadata: Metadata{ - CreatedAt: "Mar 29 2024", - }, - Value: "embedded_value", - JSON: commonFieldsJSON{ - Metadata: Field{raw: `{"created_at":"Mar 29 2024"}`, status: valid}, - Value: Field{raw: `"embedded_value"`, status: valid}, - raw: `should not matter`, - }, - }, - Processor: "visa", - IsFoo: true, - Data: CardVisaData{ - Foo: "embedded_foo", - }, - JSON: cardEmbeddedJSON{ - raw: `{"processor":"visa","is_foo":true,"data":{"foo":"embedded_foo"},"metadata":{"created_at":"Mar 29 2024"},"value":"embedded_value"}`, - Processor: Field{raw: `"visa"`, status: valid}, - IsFoo: Field{raw: `true`, status: valid}, - Data: Field{raw: `{"foo":"embedded_foo"}`, status: valid}, - }, - }, - Card{ - Processor: "visa", - IsFoo: true, - IsBar: false, - Data: CardVisaData{ - Foo: "embedded_foo", - }, - Metadata: Metadata{ - CreatedAt: "Mar 29 2024", - }, - Value: "embedded_value", - JSON: cardJSON{ - raw: `{"processor":"visa","is_foo":true,"data":{"foo":"embedded_foo"},"metadata":{"created_at":"Mar 29 2024"},"value":"embedded_value"}`, - Processor: Field{raw: `"visa"`, status: 0x3}, - IsFoo: Field{raw: "true", status: 0x3}, - Data: Field{raw: `{"foo":"embedded_foo"}`, status: 0x3}, - Metadata: Field{raw: `{"created_at":"Mar 29 2024"}`, status: 0x3}, - Value: Field{raw: `"embedded_value"`, status: 0x3}, - }, - }, - }, -} - -func TestPort(t *testing.T) { - for name, test := range portTests { - t.Run(name, func(t *testing.T) { - toVal := reflect.New(reflect.TypeOf(test.to)) - - err := Port(test.from, toVal.Interface()) - if err != nil { - t.Fatalf("port of %v failed with error %v", test.from, err) - } - - if !reflect.DeepEqual(toVal.Elem().Interface(), test.to) { - t.Fatalf("expected:\n%+#v\n\nto port to:\n%+#v\n\nbut got:\n%+#v", test.from, test.to, toVal.Elem().Interface()) - } - }) - } -} diff --git a/packages/tui/sdk/internal/apijson/registry.go b/packages/tui/sdk/internal/apijson/registry.go deleted file mode 100644 index 119cc5ff..00000000 --- a/packages/tui/sdk/internal/apijson/registry.go +++ /dev/null @@ -1,41 +0,0 @@ -package apijson - -import ( - "reflect" - - "github.com/tidwall/gjson" -) - -type UnionVariant struct { - TypeFilter gjson.Type - DiscriminatorValue interface{} - Type reflect.Type -} - -var unionRegistry = map[reflect.Type]unionEntry{} -var unionVariants = map[reflect.Type]interface{}{} - -type unionEntry struct { - discriminatorKey string - variants []UnionVariant -} - -func RegisterUnion(typ reflect.Type, discriminator string, variants ...UnionVariant) { - unionRegistry[typ] = unionEntry{ - discriminatorKey: discriminator, - variants: variants, - } - for _, variant := range variants { - unionVariants[variant.Type] = typ - } -} - -// Useful to wrap a union type to force it to use [apijson.UnmarshalJSON] since you cannot define an -// UnmarshalJSON function on the interface itself. -type UnionUnmarshaler[T any] struct { - Value T -} - -func (c *UnionUnmarshaler[T]) UnmarshalJSON(buf []byte) error { - return UnmarshalRoot(buf, &c.Value) -} diff --git a/packages/tui/sdk/internal/apijson/tag.go b/packages/tui/sdk/internal/apijson/tag.go deleted file mode 100644 index 812fb3ca..00000000 --- a/packages/tui/sdk/internal/apijson/tag.go +++ /dev/null @@ -1,47 +0,0 @@ -package apijson - -import ( - "reflect" - "strings" -) - -const jsonStructTag = "json" -const formatStructTag = "format" - -type parsedStructTag struct { - name string - required bool - extras bool - metadata bool - inline bool -} - -func parseJSONStructTag(field reflect.StructField) (tag parsedStructTag, ok bool) { - raw, ok := field.Tag.Lookup(jsonStructTag) - if !ok { - return - } - parts := strings.Split(raw, ",") - if len(parts) == 0 { - return tag, false - } - tag.name = parts[0] - for _, part := range parts[1:] { - switch part { - case "required": - tag.required = true - case "extras": - tag.extras = true - case "metadata": - tag.metadata = true - case "inline": - tag.inline = true - } - } - return -} - -func parseFormatStructTag(field reflect.StructField) (format string, ok bool) { - format, ok = field.Tag.Lookup(formatStructTag) - return -} diff --git a/packages/tui/sdk/internal/apiquery/encoder.go b/packages/tui/sdk/internal/apiquery/encoder.go deleted file mode 100644 index 0922c231..00000000 --- a/packages/tui/sdk/internal/apiquery/encoder.go +++ /dev/null @@ -1,341 +0,0 @@ -package apiquery - -import ( - "encoding/json" - "fmt" - "reflect" - "strconv" - "strings" - "sync" - "time" - - "github.com/sst/opencode-sdk-go/internal/param" -) - -var encoders sync.Map // map[reflect.Type]encoderFunc - -type encoder struct { - dateFormat string - root bool - settings QuerySettings -} - -type encoderFunc func(key string, value reflect.Value) []Pair - -type encoderField struct { - tag parsedStructTag - fn encoderFunc - idx []int -} - -type encoderEntry struct { - reflect.Type - dateFormat string - root bool - settings QuerySettings -} - -type Pair struct { - key string - value string -} - -func (e *encoder) typeEncoder(t reflect.Type) encoderFunc { - entry := encoderEntry{ - Type: t, - dateFormat: e.dateFormat, - root: e.root, - settings: e.settings, - } - - if fi, ok := encoders.Load(entry); ok { - return fi.(encoderFunc) - } - - // To deal with recursive types, populate the map with an - // indirect func before we build it. This type waits on the - // real func (f) to be ready and then calls it. This indirect - // func is only used for recursive types. - var ( - wg sync.WaitGroup - f encoderFunc - ) - wg.Add(1) - fi, loaded := encoders.LoadOrStore(entry, encoderFunc(func(key string, v reflect.Value) []Pair { - wg.Wait() - return f(key, v) - })) - if loaded { - return fi.(encoderFunc) - } - - // Compute the real encoder and replace the indirect func with it. - f = e.newTypeEncoder(t) - wg.Done() - encoders.Store(entry, f) - return f -} - -func marshalerEncoder(key string, value reflect.Value) []Pair { - s, _ := value.Interface().(json.Marshaler).MarshalJSON() - return []Pair{{key, string(s)}} -} - -func (e *encoder) newTypeEncoder(t reflect.Type) encoderFunc { - if t.ConvertibleTo(reflect.TypeOf(time.Time{})) { - return e.newTimeTypeEncoder(t) - } - if !e.root && t.Implements(reflect.TypeOf((*json.Marshaler)(nil)).Elem()) { - return marshalerEncoder - } - e.root = false - switch t.Kind() { - case reflect.Pointer: - encoder := e.typeEncoder(t.Elem()) - return func(key string, value reflect.Value) (pairs []Pair) { - if !value.IsValid() || value.IsNil() { - return - } - pairs = encoder(key, value.Elem()) - return - } - case reflect.Struct: - return e.newStructTypeEncoder(t) - case reflect.Array: - fallthrough - case reflect.Slice: - return e.newArrayTypeEncoder(t) - case reflect.Map: - return e.newMapEncoder(t) - case reflect.Interface: - return e.newInterfaceEncoder() - default: - return e.newPrimitiveTypeEncoder(t) - } -} - -func (e *encoder) newStructTypeEncoder(t reflect.Type) encoderFunc { - if t.Implements(reflect.TypeOf((*param.FieldLike)(nil)).Elem()) { - return e.newFieldTypeEncoder(t) - } - - encoderFields := []encoderField{} - - // This helper allows us to recursively collect field encoders into a flat - // array. The parameter `index` keeps track of the access patterns necessary - // to get to some field. - var collectEncoderFields func(r reflect.Type, index []int) - collectEncoderFields = func(r reflect.Type, index []int) { - for i := 0; i < r.NumField(); i++ { - idx := append(index, i) - field := t.FieldByIndex(idx) - if !field.IsExported() { - continue - } - // If this is an embedded struct, traverse one level deeper to extract - // the field and get their encoders as well. - if field.Anonymous { - collectEncoderFields(field.Type, idx) - continue - } - // If query tag is not present, then we skip, which is intentionally - // different behavior from the stdlib. - ptag, ok := parseQueryStructTag(field) - if !ok { - continue - } - - if ptag.name == "-" && !ptag.inline { - continue - } - - dateFormat, ok := parseFormatStructTag(field) - oldFormat := e.dateFormat - if ok { - switch dateFormat { - case "date-time": - e.dateFormat = time.RFC3339 - case "date": - e.dateFormat = "2006-01-02" - } - } - encoderFields = append(encoderFields, encoderField{ptag, e.typeEncoder(field.Type), idx}) - e.dateFormat = oldFormat - } - } - collectEncoderFields(t, []int{}) - - return func(key string, value reflect.Value) (pairs []Pair) { - for _, ef := range encoderFields { - var subkey string = e.renderKeyPath(key, ef.tag.name) - if ef.tag.inline { - subkey = key - } - - field := value.FieldByIndex(ef.idx) - pairs = append(pairs, ef.fn(subkey, field)...) - } - return - } -} - -func (e *encoder) newMapEncoder(t reflect.Type) encoderFunc { - keyEncoder := e.typeEncoder(t.Key()) - elementEncoder := e.typeEncoder(t.Elem()) - return func(key string, value reflect.Value) (pairs []Pair) { - iter := value.MapRange() - for iter.Next() { - encodedKey := keyEncoder("", iter.Key()) - if len(encodedKey) != 1 { - panic("Unexpected number of parts for encoded map key. Are you using a non-primitive for this map?") - } - subkey := encodedKey[0].value - keyPath := e.renderKeyPath(key, subkey) - pairs = append(pairs, elementEncoder(keyPath, iter.Value())...) - } - return - } -} - -func (e *encoder) renderKeyPath(key string, subkey string) string { - if len(key) == 0 { - return subkey - } - if e.settings.NestedFormat == NestedQueryFormatDots { - return fmt.Sprintf("%s.%s", key, subkey) - } - return fmt.Sprintf("%s[%s]", key, subkey) -} - -func (e *encoder) newArrayTypeEncoder(t reflect.Type) encoderFunc { - switch e.settings.ArrayFormat { - case ArrayQueryFormatComma: - innerEncoder := e.typeEncoder(t.Elem()) - return func(key string, v reflect.Value) []Pair { - elements := []string{} - for i := 0; i < v.Len(); i++ { - for _, pair := range innerEncoder("", v.Index(i)) { - elements = append(elements, pair.value) - } - } - if len(elements) == 0 { - return []Pair{} - } - return []Pair{{key, strings.Join(elements, ",")}} - } - case ArrayQueryFormatRepeat: - innerEncoder := e.typeEncoder(t.Elem()) - return func(key string, value reflect.Value) (pairs []Pair) { - for i := 0; i < value.Len(); i++ { - pairs = append(pairs, innerEncoder(key, value.Index(i))...) - } - return pairs - } - case ArrayQueryFormatIndices: - panic("The array indices format is not supported yet") - case ArrayQueryFormatBrackets: - innerEncoder := e.typeEncoder(t.Elem()) - return func(key string, value reflect.Value) []Pair { - pairs := []Pair{} - for i := 0; i < value.Len(); i++ { - pairs = append(pairs, innerEncoder(key+"[]", value.Index(i))...) - } - return pairs - } - default: - panic(fmt.Sprintf("Unknown ArrayFormat value: %d", e.settings.ArrayFormat)) - } -} - -func (e *encoder) newPrimitiveTypeEncoder(t reflect.Type) encoderFunc { - switch t.Kind() { - case reflect.Pointer: - inner := t.Elem() - - innerEncoder := e.newPrimitiveTypeEncoder(inner) - return func(key string, v reflect.Value) []Pair { - if !v.IsValid() || v.IsNil() { - return nil - } - return innerEncoder(key, v.Elem()) - } - case reflect.String: - return func(key string, v reflect.Value) []Pair { - return []Pair{{key, v.String()}} - } - case reflect.Bool: - return func(key string, v reflect.Value) []Pair { - if v.Bool() { - return []Pair{{key, "true"}} - } - return []Pair{{key, "false"}} - } - case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64: - return func(key string, v reflect.Value) []Pair { - return []Pair{{key, strconv.FormatInt(v.Int(), 10)}} - } - case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return func(key string, v reflect.Value) []Pair { - return []Pair{{key, strconv.FormatUint(v.Uint(), 10)}} - } - case reflect.Float32, reflect.Float64: - return func(key string, v reflect.Value) []Pair { - return []Pair{{key, strconv.FormatFloat(v.Float(), 'f', -1, 64)}} - } - case reflect.Complex64, reflect.Complex128: - bitSize := 64 - if t.Kind() == reflect.Complex128 { - bitSize = 128 - } - return func(key string, v reflect.Value) []Pair { - return []Pair{{key, strconv.FormatComplex(v.Complex(), 'f', -1, bitSize)}} - } - default: - return func(key string, v reflect.Value) []Pair { - return nil - } - } -} - -func (e *encoder) newFieldTypeEncoder(t reflect.Type) encoderFunc { - f, _ := t.FieldByName("Value") - enc := e.typeEncoder(f.Type) - - return func(key string, value reflect.Value) []Pair { - present := value.FieldByName("Present") - if !present.Bool() { - return nil - } - null := value.FieldByName("Null") - if null.Bool() { - // TODO: Error? - return nil - } - raw := value.FieldByName("Raw") - if !raw.IsNil() { - return e.typeEncoder(raw.Type())(key, raw) - } - return enc(key, value.FieldByName("Value")) - } -} - -func (e *encoder) newTimeTypeEncoder(t reflect.Type) encoderFunc { - format := e.dateFormat - return func(key string, value reflect.Value) []Pair { - return []Pair{{ - key, - value.Convert(reflect.TypeOf(time.Time{})).Interface().(time.Time).Format(format), - }} - } -} - -func (e encoder) newInterfaceEncoder() encoderFunc { - return func(key string, value reflect.Value) []Pair { - value = value.Elem() - if !value.IsValid() { - return nil - } - return e.typeEncoder(value.Type())(key, value) - } - -} diff --git a/packages/tui/sdk/internal/apiquery/query.go b/packages/tui/sdk/internal/apiquery/query.go deleted file mode 100644 index 6f90e993..00000000 --- a/packages/tui/sdk/internal/apiquery/query.go +++ /dev/null @@ -1,50 +0,0 @@ -package apiquery - -import ( - "net/url" - "reflect" - "time" -) - -func MarshalWithSettings(value interface{}, settings QuerySettings) url.Values { - e := encoder{time.RFC3339, true, settings} - kv := url.Values{} - val := reflect.ValueOf(value) - if !val.IsValid() { - return nil - } - typ := val.Type() - for _, pair := range e.typeEncoder(typ)("", val) { - kv.Add(pair.key, pair.value) - } - return kv -} - -func Marshal(value interface{}) url.Values { - return MarshalWithSettings(value, QuerySettings{}) -} - -type Queryer interface { - URLQuery() url.Values -} - -type QuerySettings struct { - NestedFormat NestedQueryFormat - ArrayFormat ArrayQueryFormat -} - -type NestedQueryFormat int - -const ( - NestedQueryFormatBrackets NestedQueryFormat = iota - NestedQueryFormatDots -) - -type ArrayQueryFormat int - -const ( - ArrayQueryFormatComma ArrayQueryFormat = iota - ArrayQueryFormatRepeat - ArrayQueryFormatIndices - ArrayQueryFormatBrackets -) diff --git a/packages/tui/sdk/internal/apiquery/query_test.go b/packages/tui/sdk/internal/apiquery/query_test.go deleted file mode 100644 index 1e740d6a..00000000 --- a/packages/tui/sdk/internal/apiquery/query_test.go +++ /dev/null @@ -1,335 +0,0 @@ -package apiquery - -import ( - "net/url" - "testing" - "time" -) - -func P[T any](v T) *T { return &v } - -type Primitives struct { - A bool `query:"a"` - B int `query:"b"` - C uint `query:"c"` - D float64 `query:"d"` - E float32 `query:"e"` - F []int `query:"f"` -} - -type PrimitivePointers struct { - A *bool `query:"a"` - B *int `query:"b"` - C *uint `query:"c"` - D *float64 `query:"d"` - E *float32 `query:"e"` - F *[]int `query:"f"` -} - -type Slices struct { - Slice []Primitives `query:"slices"` - Mixed []interface{} `query:"mixed"` -} - -type DateTime struct { - Date time.Time `query:"date" format:"date"` - DateTime time.Time `query:"date-time" format:"date-time"` -} - -type AdditionalProperties struct { - A bool `query:"a"` - Extras map[string]interface{} `query:"-,inline"` -} - -type Recursive struct { - Name string `query:"name"` - Child *Recursive `query:"child"` -} - -type UnknownStruct struct { - Unknown interface{} `query:"unknown"` -} - -type UnionStruct struct { - Union Union `query:"union" format:"date"` -} - -type Union interface { - union() -} - -type UnionInteger int64 - -func (UnionInteger) union() {} - -type UnionString string - -func (UnionString) union() {} - -type UnionStructA struct { - Type string `query:"type"` - A string `query:"a"` - B string `query:"b"` -} - -func (UnionStructA) union() {} - -type UnionStructB struct { - Type string `query:"type"` - A string `query:"a"` -} - -func (UnionStructB) union() {} - -type UnionTime time.Time - -func (UnionTime) union() {} - -type DeeplyNested struct { - A DeeplyNested1 `query:"a"` -} - -type DeeplyNested1 struct { - B DeeplyNested2 `query:"b"` -} - -type DeeplyNested2 struct { - C DeeplyNested3 `query:"c"` -} - -type DeeplyNested3 struct { - D *string `query:"d"` -} - -var tests = map[string]struct { - enc string - val interface{} - settings QuerySettings -}{ - "primitives": { - "a=false&b=237628372683&c=654&d=9999.43&e=43.7599983215332&f=1,2,3,4", - Primitives{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}, - QuerySettings{}, - }, - - "slices_brackets": { - `mixed[]=1&mixed[]=2.3&mixed[]=hello&slices[][a]=false&slices[][a]=false&slices[][b]=237628372683&slices[][b]=237628372683&slices[][c]=654&slices[][c]=654&slices[][d]=9999.43&slices[][d]=9999.43&slices[][e]=43.7599983215332&slices[][e]=43.7599983215332&slices[][f][]=1&slices[][f][]=2&slices[][f][]=3&slices[][f][]=4&slices[][f][]=1&slices[][f][]=2&slices[][f][]=3&slices[][f][]=4`, - Slices{ - Slice: []Primitives{ - {A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}, - {A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}, - }, - Mixed: []interface{}{1, 2.3, "hello"}, - }, - QuerySettings{ArrayFormat: ArrayQueryFormatBrackets}, - }, - - "slices_comma": { - `mixed=1,2.3,hello`, - Slices{ - Mixed: []interface{}{1, 2.3, "hello"}, - }, - QuerySettings{ArrayFormat: ArrayQueryFormatComma}, - }, - - "slices_repeat": { - `mixed=1&mixed=2.3&mixed=hello&slices[a]=false&slices[a]=false&slices[b]=237628372683&slices[b]=237628372683&slices[c]=654&slices[c]=654&slices[d]=9999.43&slices[d]=9999.43&slices[e]=43.7599983215332&slices[e]=43.7599983215332&slices[f]=1&slices[f]=2&slices[f]=3&slices[f]=4&slices[f]=1&slices[f]=2&slices[f]=3&slices[f]=4`, - Slices{ - Slice: []Primitives{ - {A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}, - {A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}, - }, - Mixed: []interface{}{1, 2.3, "hello"}, - }, - QuerySettings{ArrayFormat: ArrayQueryFormatRepeat}, - }, - - "primitive_pointer_struct": { - "a=false&b=237628372683&c=654&d=9999.43&e=43.7599983215332&f=1,2,3,4,5", - PrimitivePointers{ - A: P(false), - B: P(237628372683), - C: P(uint(654)), - D: P(9999.43), - E: P(float32(43.76)), - F: &[]int{1, 2, 3, 4, 5}, - }, - QuerySettings{}, - }, - - "datetime_struct": { - `date=2006-01-02&date-time=2006-01-02T15:04:05Z`, - DateTime{ - Date: time.Date(2006, time.January, 2, 0, 0, 0, 0, time.UTC), - DateTime: time.Date(2006, time.January, 2, 15, 4, 5, 0, time.UTC), - }, - QuerySettings{}, - }, - - "additional_properties": { - `a=true&bar=value&foo=true`, - AdditionalProperties{ - A: true, - Extras: map[string]interface{}{ - "bar": "value", - "foo": true, - }, - }, - QuerySettings{}, - }, - - "recursive_struct_brackets": { - `child[name]=Alex&name=Robert`, - Recursive{Name: "Robert", Child: &Recursive{Name: "Alex"}}, - QuerySettings{NestedFormat: NestedQueryFormatBrackets}, - }, - - "recursive_struct_dots": { - `child.name=Alex&name=Robert`, - Recursive{Name: "Robert", Child: &Recursive{Name: "Alex"}}, - QuerySettings{NestedFormat: NestedQueryFormatDots}, - }, - - "unknown_struct_number": { - `unknown=12`, - UnknownStruct{ - Unknown: 12., - }, - QuerySettings{}, - }, - - "unknown_struct_map_brackets": { - `unknown[foo]=bar`, - UnknownStruct{ - Unknown: map[string]interface{}{ - "foo": "bar", - }, - }, - QuerySettings{NestedFormat: NestedQueryFormatBrackets}, - }, - - "unknown_struct_map_dots": { - `unknown.foo=bar`, - UnknownStruct{ - Unknown: map[string]interface{}{ - "foo": "bar", - }, - }, - QuerySettings{NestedFormat: NestedQueryFormatDots}, - }, - - "union_string": { - `union=hello`, - UnionStruct{ - Union: UnionString("hello"), - }, - QuerySettings{}, - }, - - "union_integer": { - `union=12`, - UnionStruct{ - Union: UnionInteger(12), - }, - QuerySettings{}, - }, - - "union_struct_discriminated_a": { - `union[a]=foo&union[b]=bar&union[type]=typeA`, - UnionStruct{ - Union: UnionStructA{ - Type: "typeA", - A: "foo", - B: "bar", - }, - }, - QuerySettings{}, - }, - - "union_struct_discriminated_b": { - `union[a]=foo&union[type]=typeB`, - UnionStruct{ - Union: UnionStructB{ - Type: "typeB", - A: "foo", - }, - }, - QuerySettings{}, - }, - - "union_struct_time": { - `union=2010-05-23`, - UnionStruct{ - Union: UnionTime(time.Date(2010, 05, 23, 0, 0, 0, 0, time.UTC)), - }, - QuerySettings{}, - }, - - "deeply_nested_brackets": { - `a[b][c][d]=hello`, - DeeplyNested{ - A: DeeplyNested1{ - B: DeeplyNested2{ - C: DeeplyNested3{ - D: P("hello"), - }, - }, - }, - }, - QuerySettings{NestedFormat: NestedQueryFormatBrackets}, - }, - - "deeply_nested_dots": { - `a.b.c.d=hello`, - DeeplyNested{ - A: DeeplyNested1{ - B: DeeplyNested2{ - C: DeeplyNested3{ - D: P("hello"), - }, - }, - }, - }, - QuerySettings{NestedFormat: NestedQueryFormatDots}, - }, - - "deeply_nested_brackets_empty": { - ``, - DeeplyNested{ - A: DeeplyNested1{ - B: DeeplyNested2{ - C: DeeplyNested3{ - D: nil, - }, - }, - }, - }, - QuerySettings{NestedFormat: NestedQueryFormatBrackets}, - }, - - "deeply_nested_dots_empty": { - ``, - DeeplyNested{ - A: DeeplyNested1{ - B: DeeplyNested2{ - C: DeeplyNested3{ - D: nil, - }, - }, - }, - }, - QuerySettings{NestedFormat: NestedQueryFormatDots}, - }, -} - -func TestEncode(t *testing.T) { - for name, test := range tests { - t.Run(name, func(t *testing.T) { - values := MarshalWithSettings(test.val, test.settings) - str, _ := url.QueryUnescape(values.Encode()) - if str != test.enc { - t.Fatalf("expected %+#v to serialize to %s but got %s", test.val, test.enc, str) - } - }) - } -} diff --git a/packages/tui/sdk/internal/apiquery/tag.go b/packages/tui/sdk/internal/apiquery/tag.go deleted file mode 100644 index 7ccd739c..00000000 --- a/packages/tui/sdk/internal/apiquery/tag.go +++ /dev/null @@ -1,41 +0,0 @@ -package apiquery - -import ( - "reflect" - "strings" -) - -const queryStructTag = "query" -const formatStructTag = "format" - -type parsedStructTag struct { - name string - omitempty bool - inline bool -} - -func parseQueryStructTag(field reflect.StructField) (tag parsedStructTag, ok bool) { - raw, ok := field.Tag.Lookup(queryStructTag) - if !ok { - return - } - parts := strings.Split(raw, ",") - if len(parts) == 0 { - return tag, false - } - tag.name = parts[0] - for _, part := range parts[1:] { - switch part { - case "omitempty": - tag.omitempty = true - case "inline": - tag.inline = true - } - } - return -} - -func parseFormatStructTag(field reflect.StructField) (format string, ok bool) { - format, ok = field.Tag.Lookup(formatStructTag) - return -} diff --git a/packages/tui/sdk/internal/param/field.go b/packages/tui/sdk/internal/param/field.go deleted file mode 100644 index 4d0fd9c6..00000000 --- a/packages/tui/sdk/internal/param/field.go +++ /dev/null @@ -1,29 +0,0 @@ -package param - -import ( - "fmt" -) - -type FieldLike interface{ field() } - -// Field is a wrapper used for all values sent to the API, -// to distinguish zero values from null or omitted fields. -// -// It also allows sending arbitrary deserializable values. -// -// To instantiate a Field, use the helpers exported from -// the package root: `F()`, `Null()`, `Raw()`, etc. -type Field[T any] struct { - FieldLike - Value T - Null bool - Present bool - Raw any -} - -func (f Field[T]) String() string { - if s, ok := any(f.Value).(fmt.Stringer); ok { - return s.String() - } - return fmt.Sprintf("%v", f.Value) -} diff --git a/packages/tui/sdk/internal/requestconfig/requestconfig.go b/packages/tui/sdk/internal/requestconfig/requestconfig.go deleted file mode 100644 index 91b70cca..00000000 --- a/packages/tui/sdk/internal/requestconfig/requestconfig.go +++ /dev/null @@ -1,629 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package requestconfig - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "math" - "math/rand" - "mime" - "net/http" - "net/url" - "runtime" - "strconv" - "strings" - "time" - - "github.com/sst/opencode-sdk-go/internal" - "github.com/sst/opencode-sdk-go/internal/apierror" - "github.com/sst/opencode-sdk-go/internal/apiform" - "github.com/sst/opencode-sdk-go/internal/apiquery" - "github.com/sst/opencode-sdk-go/internal/param" -) - -func getDefaultHeaders() map[string]string { - return map[string]string{ - "User-Agent": fmt.Sprintf("Opencode/Go %s", internal.PackageVersion), - } -} - -func getNormalizedOS() string { - switch runtime.GOOS { - case "ios": - return "iOS" - case "android": - return "Android" - case "darwin": - return "MacOS" - case "window": - return "Windows" - case "freebsd": - return "FreeBSD" - case "openbsd": - return "OpenBSD" - case "linux": - return "Linux" - default: - return fmt.Sprintf("Other:%s", runtime.GOOS) - } -} - -func getNormalizedArchitecture() string { - switch runtime.GOARCH { - case "386": - return "x32" - case "amd64": - return "x64" - case "arm": - return "arm" - case "arm64": - return "arm64" - default: - return fmt.Sprintf("other:%s", runtime.GOARCH) - } -} - -func getPlatformProperties() map[string]string { - return map[string]string{ - "X-Stainless-Lang": "go", - "X-Stainless-Package-Version": internal.PackageVersion, - "X-Stainless-OS": getNormalizedOS(), - "X-Stainless-Arch": getNormalizedArchitecture(), - "X-Stainless-Runtime": "go", - "X-Stainless-Runtime-Version": runtime.Version(), - } -} - -type RequestOption interface { - Apply(*RequestConfig) error -} - -type RequestOptionFunc func(*RequestConfig) error -type PreRequestOptionFunc func(*RequestConfig) error - -func (s RequestOptionFunc) Apply(r *RequestConfig) error { return s(r) } -func (s PreRequestOptionFunc) Apply(r *RequestConfig) error { return s(r) } - -func NewRequestConfig(ctx context.Context, method string, u string, body interface{}, dst interface{}, opts ...RequestOption) (*RequestConfig, error) { - var reader io.Reader - - contentType := "application/json" - hasSerializationFunc := false - - if body, ok := body.(json.Marshaler); ok { - content, err := body.MarshalJSON() - if err != nil { - return nil, err - } - reader = bytes.NewBuffer(content) - hasSerializationFunc = true - } - if body, ok := body.(apiform.Marshaler); ok { - var ( - content []byte - err error - ) - content, contentType, err = body.MarshalMultipart() - if err != nil { - return nil, err - } - reader = bytes.NewBuffer(content) - hasSerializationFunc = true - } - if body, ok := body.(apiquery.Queryer); ok { - hasSerializationFunc = true - params := body.URLQuery().Encode() - if params != "" { - u = u + "?" + params - } - } - if body, ok := body.([]byte); ok { - reader = bytes.NewBuffer(body) - hasSerializationFunc = true - } - if body, ok := body.(io.Reader); ok { - reader = body - hasSerializationFunc = true - } - - // Fallback to json serialization if none of the serialization functions that we expect - // to see is present. - if body != nil && !hasSerializationFunc { - content, err := json.Marshal(body) - if err != nil { - return nil, err - } - reader = bytes.NewBuffer(content) - } - - req, err := http.NewRequestWithContext(ctx, method, u, nil) - if err != nil { - return nil, err - } - if reader != nil { - req.Header.Set("Content-Type", contentType) - } - - req.Header.Set("Accept", "application/json") - req.Header.Set("X-Stainless-Retry-Count", "0") - req.Header.Set("X-Stainless-Timeout", "0") - for k, v := range getDefaultHeaders() { - req.Header.Add(k, v) - } - - for k, v := range getPlatformProperties() { - req.Header.Add(k, v) - } - cfg := RequestConfig{ - MaxRetries: 2, - Context: ctx, - Request: req, - HTTPClient: http.DefaultClient, - Body: reader, - } - cfg.ResponseBodyInto = dst - err = cfg.Apply(opts...) - if err != nil { - return nil, err - } - - // This must run after `cfg.Apply(...)` above in case the request timeout gets modified. We also only - // apply our own logic for it if it's still "0" from above. If it's not, then it was deleted or modified - // by the user and we should respect that. - if req.Header.Get("X-Stainless-Timeout") == "0" { - if cfg.RequestTimeout == time.Duration(0) { - req.Header.Del("X-Stainless-Timeout") - } else { - req.Header.Set("X-Stainless-Timeout", strconv.Itoa(int(cfg.RequestTimeout.Seconds()))) - } - } - - return &cfg, nil -} - -func UseDefaultParam[T any](dst *param.Field[T], src *T) { - if !dst.Present && src != nil { - dst.Value = *src - dst.Present = true - } -} - -// This interface is primarily used to describe an [*http.Client], but also -// supports custom HTTP implementations. -type HTTPDoer interface { - Do(req *http.Request) (*http.Response, error) -} - -// RequestConfig represents all the state related to one request. -// -// Editing the variables inside RequestConfig directly is unstable api. Prefer -// composing the RequestOption instead if possible. -type RequestConfig struct { - MaxRetries int - RequestTimeout time.Duration - Context context.Context - Request *http.Request - BaseURL *url.URL - // DefaultBaseURL will be used if BaseURL is not explicitly overridden using - // WithBaseURL. - DefaultBaseURL *url.URL - CustomHTTPDoer HTTPDoer - HTTPClient *http.Client - Middlewares []middleware - // If ResponseBodyInto not nil, then we will attempt to deserialize into - // ResponseBodyInto. If Destination is a []byte, then it will return the body as - // is. - ResponseBodyInto interface{} - // ResponseInto copies the \*http.Response of the corresponding request into the - // given address - ResponseInto **http.Response - Body io.Reader -} - -// middleware is exactly the same type as the Middleware type found in the [option] package, -// but it is redeclared here for circular dependency issues. -type middleware = func(*http.Request, middlewareNext) (*http.Response, error) - -// middlewareNext is exactly the same type as the MiddlewareNext type found in the [option] package, -// but it is redeclared here for circular dependency issues. -type middlewareNext = func(*http.Request) (*http.Response, error) - -func applyMiddleware(middleware middleware, next middlewareNext) middlewareNext { - return func(req *http.Request) (res *http.Response, err error) { - return middleware(req, next) - } -} - -func shouldRetry(req *http.Request, res *http.Response) bool { - // If there is no way to recover the Body, then we shouldn't retry. - if req.Body != nil && req.GetBody == nil { - return false - } - - // If there is no response, that indicates that there is a connection error - // so we retry the request. - if res == nil { - return true - } - - // If the header explicitly wants a retry behavior, respect that over the - // http status code. - if res.Header.Get("x-should-retry") == "true" { - return true - } - if res.Header.Get("x-should-retry") == "false" { - return false - } - - return res.StatusCode == http.StatusRequestTimeout || - res.StatusCode == http.StatusConflict || - res.StatusCode == http.StatusTooManyRequests || - res.StatusCode >= http.StatusInternalServerError -} - -func parseRetryAfterHeader(resp *http.Response) (time.Duration, bool) { - if resp == nil { - return 0, false - } - - type retryData struct { - header string - units time.Duration - - // custom is used when the regular algorithm failed and is optional. - // the returned duration is used verbatim (units is not applied). - custom func(string) (time.Duration, bool) - } - - nop := func(string) (time.Duration, bool) { return 0, false } - - // the headers are listed in order of preference - retries := []retryData{ - { - header: "Retry-After-Ms", - units: time.Millisecond, - custom: nop, - }, - { - header: "Retry-After", - units: time.Second, - - // retry-after values are expressed in either number of - // seconds or an HTTP-date indicating when to try again - custom: func(ra string) (time.Duration, bool) { - t, err := time.Parse(time.RFC1123, ra) - if err != nil { - return 0, false - } - return time.Until(t), true - }, - }, - } - - for _, retry := range retries { - v := resp.Header.Get(retry.header) - if v == "" { - continue - } - if retryAfter, err := strconv.ParseFloat(v, 64); err == nil { - return time.Duration(retryAfter * float64(retry.units)), true - } - if d, ok := retry.custom(v); ok { - return d, true - } - } - - return 0, false -} - -// isBeforeContextDeadline reports whether the non-zero Time t is -// before ctx's deadline. If ctx does not have a deadline, it -// always reports true (the deadline is considered infinite). -func isBeforeContextDeadline(t time.Time, ctx context.Context) bool { - d, ok := ctx.Deadline() - if !ok { - return true - } - return t.Before(d) -} - -// bodyWithTimeout is an io.ReadCloser which can observe a context's cancel func -// to handle timeouts etc. It wraps an existing io.ReadCloser. -type bodyWithTimeout struct { - stop func() // stops the time.Timer waiting to cancel the request - rc io.ReadCloser -} - -func (b *bodyWithTimeout) Read(p []byte) (n int, err error) { - n, err = b.rc.Read(p) - if err == nil { - return n, nil - } - if err == io.EOF { - return n, err - } - return n, err -} - -func (b *bodyWithTimeout) Close() error { - err := b.rc.Close() - b.stop() - return err -} - -func retryDelay(res *http.Response, retryCount int) time.Duration { - // If the API asks us to wait a certain amount of time (and it's a reasonable amount), - // just do what it says. - - if retryAfterDelay, ok := parseRetryAfterHeader(res); ok && 0 <= retryAfterDelay && retryAfterDelay < time.Minute { - return retryAfterDelay - } - - maxDelay := 8 * time.Second - delay := time.Duration(0.5 * float64(time.Second) * math.Pow(2, float64(retryCount))) - if delay > maxDelay { - delay = maxDelay - } - - jitter := rand.Int63n(int64(delay / 4)) - delay -= time.Duration(jitter) - return delay -} - -func (cfg *RequestConfig) Execute() (err error) { - if cfg.BaseURL == nil { - if cfg.DefaultBaseURL != nil { - cfg.BaseURL = cfg.DefaultBaseURL - } else { - return fmt.Errorf("requestconfig: base url is not set") - } - } - - cfg.Request.URL, err = cfg.BaseURL.Parse(strings.TrimLeft(cfg.Request.URL.String(), "/")) - if err != nil { - return err - } - - if cfg.Body != nil && cfg.Request.Body == nil { - switch body := cfg.Body.(type) { - case *bytes.Buffer: - b := body.Bytes() - cfg.Request.ContentLength = int64(body.Len()) - cfg.Request.GetBody = func() (io.ReadCloser, error) { return io.NopCloser(bytes.NewReader(b)), nil } - cfg.Request.Body, _ = cfg.Request.GetBody() - case *bytes.Reader: - cfg.Request.ContentLength = int64(body.Len()) - cfg.Request.GetBody = func() (io.ReadCloser, error) { - _, err := body.Seek(0, 0) - return io.NopCloser(body), err - } - cfg.Request.Body, _ = cfg.Request.GetBody() - default: - if rc, ok := body.(io.ReadCloser); ok { - cfg.Request.Body = rc - } else { - cfg.Request.Body = io.NopCloser(body) - } - } - } - - handler := cfg.HTTPClient.Do - if cfg.CustomHTTPDoer != nil { - handler = cfg.CustomHTTPDoer.Do - } - for i := len(cfg.Middlewares) - 1; i >= 0; i -= 1 { - handler = applyMiddleware(cfg.Middlewares[i], handler) - } - - // Don't send the current retry count in the headers if the caller modified the header defaults. - shouldSendRetryCount := cfg.Request.Header.Get("X-Stainless-Retry-Count") == "0" - - var res *http.Response - var cancel context.CancelFunc - for retryCount := 0; retryCount <= cfg.MaxRetries; retryCount += 1 { - ctx := cfg.Request.Context() - if cfg.RequestTimeout != time.Duration(0) && isBeforeContextDeadline(time.Now().Add(cfg.RequestTimeout), ctx) { - ctx, cancel = context.WithTimeout(ctx, cfg.RequestTimeout) - defer func() { - // The cancel function is nil if it was handed off to be handled in a different scope. - if cancel != nil { - cancel() - } - }() - } - - req := cfg.Request.Clone(ctx) - if shouldSendRetryCount { - req.Header.Set("X-Stainless-Retry-Count", strconv.Itoa(retryCount)) - } - - res, err = handler(req) - if ctx != nil && ctx.Err() != nil { - return ctx.Err() - } - if !shouldRetry(cfg.Request, res) || retryCount >= cfg.MaxRetries { - break - } - - // Prepare next request and wait for the retry delay - if cfg.Request.GetBody != nil { - cfg.Request.Body, err = cfg.Request.GetBody() - if err != nil { - return err - } - } - - // Can't actually refresh the body, so we don't attempt to retry here - if cfg.Request.GetBody == nil && cfg.Request.Body != nil { - break - } - - time.Sleep(retryDelay(res, retryCount)) - } - - // Save *http.Response if it is requested to, even if there was an error making the request. This is - // useful in cases where you might want to debug by inspecting the response. Note that if err != nil, - // the response should be generally be empty, but there are edge cases. - if cfg.ResponseInto != nil { - *cfg.ResponseInto = res - } - if responseBodyInto, ok := cfg.ResponseBodyInto.(**http.Response); ok { - *responseBodyInto = res - } - - // If there was a connection error in the final request or any other transport error, - // return that early without trying to coerce into an APIError. - if err != nil { - return err - } - - if res.StatusCode >= 400 { - contents, err := io.ReadAll(res.Body) - res.Body.Close() - if err != nil { - return err - } - - // If there is an APIError, re-populate the response body so that debugging - // utilities can conveniently dump the response without issue. - res.Body = io.NopCloser(bytes.NewBuffer(contents)) - - // Load the contents into the error format if it is provided. - aerr := apierror.Error{Request: cfg.Request, Response: res, StatusCode: res.StatusCode} - err = aerr.UnmarshalJSON(contents) - if err != nil { - return err - } - return &aerr - } - - _, intoCustomResponseBody := cfg.ResponseBodyInto.(**http.Response) - if cfg.ResponseBodyInto == nil || intoCustomResponseBody { - // We aren't reading the response body in this scope, but whoever is will need the - // cancel func from the context to observe request timeouts. - // Put the cancel function in the response body so it can be handled elsewhere. - if cancel != nil { - res.Body = &bodyWithTimeout{rc: res.Body, stop: cancel} - cancel = nil - } - return nil - } - - contents, err := io.ReadAll(res.Body) - res.Body.Close() - if err != nil { - return fmt.Errorf("error reading response body: %w", err) - } - - // If we are not json, return plaintext - contentType := res.Header.Get("content-type") - mediaType, _, _ := mime.ParseMediaType(contentType) - isJSON := strings.Contains(mediaType, "application/json") || strings.HasSuffix(mediaType, "+json") - if !isJSON { - switch dst := cfg.ResponseBodyInto.(type) { - case *string: - *dst = string(contents) - case **string: - tmp := string(contents) - *dst = &tmp - case *[]byte: - *dst = contents - default: - return fmt.Errorf("expected destination type of 'string' or '[]byte' for responses with content-type '%s' that is not 'application/json'", contentType) - } - return nil - } - - switch dst := cfg.ResponseBodyInto.(type) { - // If the response happens to be a byte array, deserialize the body as-is. - case *[]byte: - *dst = contents - default: - err = json.NewDecoder(bytes.NewReader(contents)).Decode(cfg.ResponseBodyInto) - if err != nil { - return fmt.Errorf("error parsing response json: %w", err) - } - } - - return nil -} - -func ExecuteNewRequest(ctx context.Context, method string, u string, body interface{}, dst interface{}, opts ...RequestOption) error { - cfg, err := NewRequestConfig(ctx, method, u, body, dst, opts...) - if err != nil { - return err - } - return cfg.Execute() -} - -func (cfg *RequestConfig) Clone(ctx context.Context) *RequestConfig { - if cfg == nil { - return nil - } - req := cfg.Request.Clone(ctx) - var err error - if req.Body != nil { - req.Body, err = req.GetBody() - } - if err != nil { - return nil - } - new := &RequestConfig{ - MaxRetries: cfg.MaxRetries, - RequestTimeout: cfg.RequestTimeout, - Context: ctx, - Request: req, - BaseURL: cfg.BaseURL, - HTTPClient: cfg.HTTPClient, - Middlewares: cfg.Middlewares, - } - - return new -} - -func (cfg *RequestConfig) Apply(opts ...RequestOption) error { - for _, opt := range opts { - err := opt.Apply(cfg) - if err != nil { - return err - } - } - return nil -} - -// PreRequestOptions is used to collect all the options which need to be known before -// a call to [RequestConfig.ExecuteNewRequest], such as path parameters -// or global defaults. -// PreRequestOptions will return a [RequestConfig] with the options applied. -// -// Only request option functions of type [PreRequestOptionFunc] are applied. -func PreRequestOptions(opts ...RequestOption) (RequestConfig, error) { - cfg := RequestConfig{} - for _, opt := range opts { - if opt, ok := opt.(PreRequestOptionFunc); ok { - err := opt.Apply(&cfg) - if err != nil { - return cfg, err - } - } - } - return cfg, nil -} - -// WithDefaultBaseURL returns a RequestOption that sets the client's default Base URL. -// This is always overridden by setting a base URL with WithBaseURL. -// WithBaseURL should be used instead of WithDefaultBaseURL except in internal code. -func WithDefaultBaseURL(baseURL string) RequestOption { - u, err := url.Parse(baseURL) - return RequestOptionFunc(func(r *RequestConfig) error { - if err != nil { - return err - } - r.DefaultBaseURL = u - return nil - }) -} diff --git a/packages/tui/sdk/internal/testutil/testutil.go b/packages/tui/sdk/internal/testutil/testutil.go deleted file mode 100644 index 826d266f..00000000 --- a/packages/tui/sdk/internal/testutil/testutil.go +++ /dev/null @@ -1,27 +0,0 @@ -package testutil - -import ( - "net/http" - "os" - "strconv" - "testing" -) - -func CheckTestServer(t *testing.T, url string) bool { - if _, err := http.Get(url); err != nil { - const SKIP_MOCK_TESTS = "SKIP_MOCK_TESTS" - if str, ok := os.LookupEnv(SKIP_MOCK_TESTS); ok { - skip, err := strconv.ParseBool(str) - if err != nil { - t.Fatalf("strconv.ParseBool(os.LookupEnv(%s)) failed: %s", SKIP_MOCK_TESTS, err) - } - if skip { - t.Skip("The test will not run without a mock Prism server running against your OpenAPI spec") - return false - } - t.Errorf("The test will not run without a mock Prism server running against your OpenAPI spec. You can set the environment variable %s to true to skip running any tests that require the mock server", SKIP_MOCK_TESTS) - return false - } - } - return true -} diff --git a/packages/tui/sdk/internal/version.go b/packages/tui/sdk/internal/version.go deleted file mode 100644 index 64dcebbb..00000000 --- a/packages/tui/sdk/internal/version.go +++ /dev/null @@ -1,5 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package internal - -const PackageVersion = "0.1.0-alpha.8" // x-release-please-version diff --git a/packages/tui/sdk/lib/.keep b/packages/tui/sdk/lib/.keep deleted file mode 100644 index 5e2c99fd..00000000 --- a/packages/tui/sdk/lib/.keep +++ /dev/null @@ -1,4 +0,0 @@ -File generated from our OpenAPI spec by Stainless. - -This directory can be used to store custom files to expand the SDK. -It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/packages/tui/sdk/option/middleware.go b/packages/tui/sdk/option/middleware.go deleted file mode 100644 index 8ec9dd60..00000000 --- a/packages/tui/sdk/option/middleware.go +++ /dev/null @@ -1,38 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package option - -import ( - "log" - "net/http" - "net/http/httputil" -) - -// WithDebugLog logs the HTTP request and response content. -// If the logger parameter is nil, it uses the default logger. -// -// WithDebugLog is for debugging and development purposes only. -// It should not be used in production code. The behavior and interface -// of WithDebugLog is not guaranteed to be stable. -func WithDebugLog(logger *log.Logger) RequestOption { - return WithMiddleware(func(req *http.Request, nxt MiddlewareNext) (*http.Response, error) { - if logger == nil { - logger = log.Default() - } - - if reqBytes, err := httputil.DumpRequest(req, true); err == nil { - logger.Printf("Request Content:\n%s\n", reqBytes) - } - - resp, err := nxt(req) - if err != nil { - return resp, err - } - - if respBytes, err := httputil.DumpResponse(resp, true); err == nil { - logger.Printf("Response Content:\n%s\n", respBytes) - } - - return resp, err - }) -} diff --git a/packages/tui/sdk/option/requestoption.go b/packages/tui/sdk/option/requestoption.go deleted file mode 100644 index 313552e9..00000000 --- a/packages/tui/sdk/option/requestoption.go +++ /dev/null @@ -1,266 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package option - -import ( - "bytes" - "fmt" - "io" - "net/http" - "net/url" - "strings" - "time" - - "github.com/sst/opencode-sdk-go/internal/requestconfig" - "github.com/tidwall/sjson" -) - -// RequestOption is an option for the requests made by the opencode API Client -// which can be supplied to clients, services, and methods. You can read more about this functional -// options pattern in our [README]. -// -// [README]: https://pkg.go.dev/github.com/sst/opencode-sdk-go#readme-requestoptions -type RequestOption = requestconfig.RequestOption - -// WithBaseURL returns a RequestOption that sets the BaseURL for the client. -// -// For security reasons, ensure that the base URL is trusted. -func WithBaseURL(base string) RequestOption { - u, err := url.Parse(base) - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - if err != nil { - return fmt.Errorf("requestoption: WithBaseURL failed to parse url %s\n", err) - } - - if u.Path != "" && !strings.HasSuffix(u.Path, "/") { - u.Path += "/" - } - r.BaseURL = u - return nil - }) -} - -// HTTPClient is primarily used to describe an [*http.Client], but also -// supports custom implementations. -// -// For bespoke implementations, prefer using an [*http.Client] with a -// custom transport. See [http.RoundTripper] for further information. -type HTTPClient interface { - Do(*http.Request) (*http.Response, error) -} - -// WithHTTPClient returns a RequestOption that changes the underlying http client used to make this -// request, which by default is [http.DefaultClient]. -// -// For custom uses cases, it is recommended to provide an [*http.Client] with a custom -// [http.RoundTripper] as its transport, rather than directly implementing [HTTPClient]. -func WithHTTPClient(client HTTPClient) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - if client == nil { - return fmt.Errorf("requestoption: custom http client cannot be nil") - } - - if c, ok := client.(*http.Client); ok { - // Prefer the native client if possible. - r.HTTPClient = c - r.CustomHTTPDoer = nil - } else { - r.CustomHTTPDoer = client - } - - return nil - }) -} - -// MiddlewareNext is a function which is called by a middleware to pass an HTTP request -// to the next stage in the middleware chain. -type MiddlewareNext = func(*http.Request) (*http.Response, error) - -// Middleware is a function which intercepts HTTP requests, processing or modifying -// them, and then passing the request to the next middleware or handler -// in the chain by calling the provided MiddlewareNext function. -type Middleware = func(*http.Request, MiddlewareNext) (*http.Response, error) - -// WithMiddleware returns a RequestOption that applies the given middleware -// to the requests made. Each middleware will execute in the order they were given. -func WithMiddleware(middlewares ...Middleware) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - r.Middlewares = append(r.Middlewares, middlewares...) - return nil - }) -} - -// WithMaxRetries returns a RequestOption that sets the maximum number of retries that the client -// attempts to make. When given 0, the client only makes one request. By -// default, the client retries two times. -// -// WithMaxRetries panics when retries is negative. -func WithMaxRetries(retries int) RequestOption { - if retries < 0 { - panic("option: cannot have fewer than 0 retries") - } - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - r.MaxRetries = retries - return nil - }) -} - -// WithHeader returns a RequestOption that sets the header value to the associated key. It overwrites -// any value if there was one already present. -func WithHeader(key, value string) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - r.Request.Header.Set(key, value) - return nil - }) -} - -// WithHeaderAdd returns a RequestOption that adds the header value to the associated key. It appends -// onto any existing values. -func WithHeaderAdd(key, value string) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - r.Request.Header.Add(key, value) - return nil - }) -} - -// WithHeaderDel returns a RequestOption that deletes the header value(s) associated with the given key. -func WithHeaderDel(key string) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - r.Request.Header.Del(key) - return nil - }) -} - -// WithQuery returns a RequestOption that sets the query value to the associated key. It overwrites -// any value if there was one already present. -func WithQuery(key, value string) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - query := r.Request.URL.Query() - query.Set(key, value) - r.Request.URL.RawQuery = query.Encode() - return nil - }) -} - -// WithQueryAdd returns a RequestOption that adds the query value to the associated key. It appends -// onto any existing values. -func WithQueryAdd(key, value string) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - query := r.Request.URL.Query() - query.Add(key, value) - r.Request.URL.RawQuery = query.Encode() - return nil - }) -} - -// WithQueryDel returns a RequestOption that deletes the query value(s) associated with the key. -func WithQueryDel(key string) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - query := r.Request.URL.Query() - query.Del(key) - r.Request.URL.RawQuery = query.Encode() - return nil - }) -} - -// WithJSONSet returns a RequestOption that sets the body's JSON value associated with the key. -// The key accepts a string as defined by the [sjson format]. -// -// [sjson format]: https://github.com/tidwall/sjson -func WithJSONSet(key string, value interface{}) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) (err error) { - var b []byte - - if r.Body == nil { - b, err = sjson.SetBytes(nil, key, value) - if err != nil { - return err - } - } else if buffer, ok := r.Body.(*bytes.Buffer); ok { - b = buffer.Bytes() - b, err = sjson.SetBytes(b, key, value) - if err != nil { - return err - } - } else { - return fmt.Errorf("cannot use WithJSONSet on a body that is not serialized as *bytes.Buffer") - } - - r.Body = bytes.NewBuffer(b) - return nil - }) -} - -// WithJSONDel returns a RequestOption that deletes the body's JSON value associated with the key. -// The key accepts a string as defined by the [sjson format]. -// -// [sjson format]: https://github.com/tidwall/sjson -func WithJSONDel(key string) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) (err error) { - if buffer, ok := r.Body.(*bytes.Buffer); ok { - b := buffer.Bytes() - b, err = sjson.DeleteBytes(b, key) - if err != nil { - return err - } - r.Body = bytes.NewBuffer(b) - return nil - } - - return fmt.Errorf("cannot use WithJSONDel on a body that is not serialized as *bytes.Buffer") - }) -} - -// WithResponseBodyInto returns a RequestOption that overwrites the deserialization target with -// the given destination. If provided, we don't deserialize into the default struct. -func WithResponseBodyInto(dst any) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - r.ResponseBodyInto = dst - return nil - }) -} - -// WithResponseInto returns a RequestOption that copies the [*http.Response] into the given address. -func WithResponseInto(dst **http.Response) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - r.ResponseInto = dst - return nil - }) -} - -// WithRequestBody returns a RequestOption that provides a custom serialized body with the given -// content type. -// -// body accepts an io.Reader or raw []bytes. -func WithRequestBody(contentType string, body any) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - if reader, ok := body.(io.Reader); ok { - r.Body = reader - return r.Apply(WithHeader("Content-Type", contentType)) - } - - if b, ok := body.([]byte); ok { - r.Body = bytes.NewBuffer(b) - return r.Apply(WithHeader("Content-Type", contentType)) - } - - return fmt.Errorf("body must be a byte slice or implement io.Reader") - }) -} - -// WithRequestTimeout returns a RequestOption that sets the timeout for -// each request attempt. This should be smaller than the timeout defined in -// the context, which spans all retries. -func WithRequestTimeout(dur time.Duration) RequestOption { - return requestconfig.RequestOptionFunc(func(r *requestconfig.RequestConfig) error { - r.RequestTimeout = dur - return nil - }) -} - -// WithEnvironmentProduction returns a RequestOption that sets the current -// environment to be the "production" environment. An environment specifies which base URL -// to use by default. -func WithEnvironmentProduction() RequestOption { - return requestconfig.WithDefaultBaseURL("http://localhost:54321/") -} diff --git a/packages/tui/sdk/packages/ssestream/ssestream.go b/packages/tui/sdk/packages/ssestream/ssestream.go deleted file mode 100644 index cc0afb7b..00000000 --- a/packages/tui/sdk/packages/ssestream/ssestream.go +++ /dev/null @@ -1,181 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package ssestream - -import ( - "bufio" - "bytes" - "encoding/json" - "io" - "net/http" - "strings" -) - -type Decoder interface { - Event() Event - Next() bool - Close() error - Err() error -} - -func NewDecoder(res *http.Response) Decoder { - if res == nil || res.Body == nil { - return nil - } - - var decoder Decoder - contentType := res.Header.Get("content-type") - if t, ok := decoderTypes[contentType]; ok { - decoder = t(res.Body) - } else { - scn := bufio.NewScanner(res.Body) - scn.Buffer(nil, bufio.MaxScanTokenSize<<9) - decoder = &eventStreamDecoder{rc: res.Body, scn: scn} - } - return decoder -} - -var decoderTypes = map[string](func(io.ReadCloser) Decoder){} - -func RegisterDecoder(contentType string, decoder func(io.ReadCloser) Decoder) { - decoderTypes[strings.ToLower(contentType)] = decoder -} - -type Event struct { - Type string - Data []byte -} - -// A base implementation of a Decoder for text/event-stream. -type eventStreamDecoder struct { - evt Event - rc io.ReadCloser - scn *bufio.Scanner - err error -} - -func (s *eventStreamDecoder) Next() bool { - if s.err != nil { - return false - } - - event := "" - data := bytes.NewBuffer(nil) - - for s.scn.Scan() { - txt := s.scn.Bytes() - - // Dispatch event on an empty line - if len(txt) == 0 { - s.evt = Event{ - Type: event, - Data: data.Bytes(), - } - return true - } - - // Split a string like "event: bar" into name="event" and value=" bar". - name, value, _ := bytes.Cut(txt, []byte(":")) - - // Consume an optional space after the colon if it exists. - if len(value) > 0 && value[0] == ' ' { - value = value[1:] - } - - switch string(name) { - case "": - // An empty line in the for ": something" is a comment and should be ignored. - continue - case "event": - event = string(value) - case "data": - _, s.err = data.Write(value) - if s.err != nil { - break - } - _, s.err = data.WriteRune('\n') - if s.err != nil { - break - } - } - } - - if s.scn.Err() != nil { - s.err = s.scn.Err() - } - - return false -} - -func (s *eventStreamDecoder) Event() Event { - return s.evt -} - -func (s *eventStreamDecoder) Close() error { - return s.rc.Close() -} - -func (s *eventStreamDecoder) Err() error { - return s.err -} - -type Stream[T any] struct { - decoder Decoder - cur T - err error -} - -func NewStream[T any](decoder Decoder, err error) *Stream[T] { - return &Stream[T]{ - decoder: decoder, - err: err, - } -} - -// Next returns false if the stream has ended or an error occurred. -// Call Stream.Current() to get the current value. -// Call Stream.Err() to get the error. -// -// for stream.Next() { -// data := stream.Current() -// } -// -// if stream.Err() != nil { -// ... -// } -func (s *Stream[T]) Next() bool { - if s.err != nil { - return false - } - - for s.decoder.Next() { - var nxt T - s.err = json.Unmarshal(s.decoder.Event().Data, &nxt) - if s.err != nil { - return false - } - s.cur = nxt - return true - } - - // decoder.Next() may be false because of an error - s.err = s.decoder.Err() - - return false -} - -func (s *Stream[T]) Current() T { - return s.cur -} - -func (s *Stream[T]) Err() error { - return s.err -} - -func (s *Stream[T]) Close() error { - if s.decoder == nil { - // already closed - return nil - } - return s.decoder.Close() -} diff --git a/packages/tui/sdk/release-please-config.json b/packages/tui/sdk/release-please-config.json deleted file mode 100644 index a38198ec..00000000 --- a/packages/tui/sdk/release-please-config.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "packages": { - ".": {} - }, - "$schema": "https://raw.githubusercontent.com/stainless-api/release-please/main/schemas/config.json", - "include-v-in-tag": true, - "include-component-in-tag": false, - "versioning": "prerelease", - "prerelease": true, - "bump-minor-pre-major": true, - "bump-patch-for-minor-pre-major": false, - "pull-request-header": "Automated Release PR", - "pull-request-title-pattern": "release: ${version}", - "changelog-sections": [ - { - "type": "feat", - "section": "Features" - }, - { - "type": "fix", - "section": "Bug Fixes" - }, - { - "type": "perf", - "section": "Performance Improvements" - }, - { - "type": "revert", - "section": "Reverts" - }, - { - "type": "chore", - "section": "Chores" - }, - { - "type": "docs", - "section": "Documentation" - }, - { - "type": "style", - "section": "Styles" - }, - { - "type": "refactor", - "section": "Refactors" - }, - { - "type": "test", - "section": "Tests", - "hidden": true - }, - { - "type": "build", - "section": "Build System" - }, - { - "type": "ci", - "section": "Continuous Integration", - "hidden": true - } - ], - "release-type": "go", - "extra-files": [ - "internal/version.go", - "README.md" - ] -} \ No newline at end of file diff --git a/packages/tui/sdk/scripts/bootstrap b/packages/tui/sdk/scripts/bootstrap deleted file mode 100755 index d6ac1654..00000000 --- a/packages/tui/sdk/scripts/bootstrap +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash - -set -e - -cd "$(dirname "$0")/.." - -if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ]; then - brew bundle check >/dev/null 2>&1 || { - echo "==> Installing Homebrew dependencies…" - brew bundle - } -fi - -echo "==> Installing Go dependencies…" - -go mod tidy -e diff --git a/packages/tui/sdk/scripts/format b/packages/tui/sdk/scripts/format deleted file mode 100755 index db2a3fa2..00000000 --- a/packages/tui/sdk/scripts/format +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -set -e - -cd "$(dirname "$0")/.." - -echo "==> Running gofmt -s -w" -gofmt -s -w . diff --git a/packages/tui/sdk/scripts/lint b/packages/tui/sdk/scripts/lint deleted file mode 100755 index 7e03a7be..00000000 --- a/packages/tui/sdk/scripts/lint +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -set -e - -cd "$(dirname "$0")/.." - -echo "==> Running Go build" -go build ./... - -echo "==> Checking tests compile" -go test -run=^$ ./... diff --git a/packages/tui/sdk/scripts/mock b/packages/tui/sdk/scripts/mock deleted file mode 100755 index d2814ae6..00000000 --- a/packages/tui/sdk/scripts/mock +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env bash - -set -e - -cd "$(dirname "$0")/.." - -if [[ -n "$1" && "$1" != '--'* ]]; then - URL="$1" - shift -else - URL="$(grep 'openapi_spec_url' .stats.yml | cut -d' ' -f2)" -fi - -# Check if the URL is empty -if [ -z "$URL" ]; then - echo "Error: No OpenAPI spec path/url provided or found in .stats.yml" - exit 1 -fi - -echo "==> Starting mock server with URL ${URL}" - -# Run prism mock on the given spec -if [ "$1" == "--daemon" ]; then - npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log & - - # Wait for server to come online - echo -n "Waiting for server" - while ! grep -q "✖ fatal\|Prism is listening" ".prism.log" ; do - echo -n "." - sleep 0.1 - done - - if grep -q "✖ fatal" ".prism.log"; then - cat .prism.log - exit 1 - fi - - echo -else - npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" -fi diff --git a/packages/tui/sdk/scripts/test b/packages/tui/sdk/scripts/test deleted file mode 100755 index efebceae..00000000 --- a/packages/tui/sdk/scripts/test +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env bash - -set -e - -cd "$(dirname "$0")/.." - -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[0;33m' -NC='\033[0m' # No Color - -function prism_is_running() { - curl --silent "http://localhost:4010" >/dev/null 2>&1 -} - -kill_server_on_port() { - pids=$(lsof -t -i tcp:"$1" || echo "") - if [ "$pids" != "" ]; then - kill "$pids" - echo "Stopped $pids." - fi -} - -function is_overriding_api_base_url() { - [ -n "$TEST_API_BASE_URL" ] -} - -if ! is_overriding_api_base_url && ! prism_is_running ; then - # When we exit this script, make sure to kill the background mock server process - trap 'kill_server_on_port 4010' EXIT - - # Start the dev server - ./scripts/mock --daemon -fi - -if is_overriding_api_base_url ; then - echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}" - echo -elif ! prism_is_running ; then - echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server" - echo -e "running against your OpenAPI spec." - echo - echo -e "To run the server, pass in the path or url of your OpenAPI" - echo -e "spec to the prism command:" - echo - echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}" - echo - - exit 1 -else - echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}" - echo -fi - -echo "==> Running tests" -go test ./... "$@" diff --git a/packages/tui/sdk/session.go b/packages/tui/sdk/session.go deleted file mode 100644 index bfb2e277..00000000 --- a/packages/tui/sdk/session.go +++ /dev/null @@ -1,1667 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode - -import ( - "context" - "errors" - "fmt" - "net/http" - "reflect" - - "github.com/sst/opencode-sdk-go/internal/apijson" - "github.com/sst/opencode-sdk-go/internal/param" - "github.com/sst/opencode-sdk-go/internal/requestconfig" - "github.com/sst/opencode-sdk-go/option" - "github.com/sst/opencode-sdk-go/shared" - "github.com/tidwall/gjson" -) - -// SessionService contains methods and other services that help with interacting -// with the opencode API. -// -// Note, unlike clients, this service does not read variables from the environment -// automatically. You should not instantiate this service directly, and instead use -// the [NewSessionService] method instead. -type SessionService struct { - Options []option.RequestOption -} - -// NewSessionService generates a new service that applies the given options to each -// request. These options are applied after the parent client's options (if there -// is one), and before any request-specific options. -func NewSessionService(opts ...option.RequestOption) (r *SessionService) { - r = &SessionService{} - r.Options = opts - return -} - -// Create a new session -func (r *SessionService) New(ctx context.Context, opts ...option.RequestOption) (res *Session, err error) { - opts = append(r.Options[:], opts...) - path := "session" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...) - return -} - -// List all sessions -func (r *SessionService) List(ctx context.Context, opts ...option.RequestOption) (res *[]Session, err error) { - opts = append(r.Options[:], opts...) - path := "session" - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...) - return -} - -// Delete a session and all its data -func (r *SessionService) Delete(ctx context.Context, id string, opts ...option.RequestOption) (res *bool, err error) { - opts = append(r.Options[:], opts...) - if id == "" { - err = errors.New("missing required id parameter") - return - } - path := fmt.Sprintf("session/%s", id) - err = requestconfig.ExecuteNewRequest(ctx, http.MethodDelete, path, nil, &res, opts...) - return -} - -// Abort a session -func (r *SessionService) Abort(ctx context.Context, id string, opts ...option.RequestOption) (res *bool, err error) { - opts = append(r.Options[:], opts...) - if id == "" { - err = errors.New("missing required id parameter") - return - } - path := fmt.Sprintf("session/%s/abort", id) - err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...) - return -} - -// Create and send a new message to a session -func (r *SessionService) Chat(ctx context.Context, id string, body SessionChatParams, opts ...option.RequestOption) (res *AssistantMessage, err error) { - opts = append(r.Options[:], opts...) - if id == "" { - err = errors.New("missing required id parameter") - return - } - path := fmt.Sprintf("session/%s/message", id) - err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...) - return -} - -// Analyze the app and create an AGENTS.md file -func (r *SessionService) Init(ctx context.Context, id string, body SessionInitParams, opts ...option.RequestOption) (res *bool, err error) { - opts = append(r.Options[:], opts...) - if id == "" { - err = errors.New("missing required id parameter") - return - } - path := fmt.Sprintf("session/%s/init", id) - err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...) - return -} - -// List messages for a session -func (r *SessionService) Messages(ctx context.Context, id string, opts ...option.RequestOption) (res *[]SessionMessagesResponse, err error) { - opts = append(r.Options[:], opts...) - if id == "" { - err = errors.New("missing required id parameter") - return - } - path := fmt.Sprintf("session/%s/message", id) - err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...) - return -} - -// Share a session -func (r *SessionService) Share(ctx context.Context, id string, opts ...option.RequestOption) (res *Session, err error) { - opts = append(r.Options[:], opts...) - if id == "" { - err = errors.New("missing required id parameter") - return - } - path := fmt.Sprintf("session/%s/share", id) - err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...) - return -} - -// Summarize the session -func (r *SessionService) Summarize(ctx context.Context, id string, body SessionSummarizeParams, opts ...option.RequestOption) (res *bool, err error) { - opts = append(r.Options[:], opts...) - if id == "" { - err = errors.New("missing required id parameter") - return - } - path := fmt.Sprintf("session/%s/summarize", id) - err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...) - return -} - -// Unshare the session -func (r *SessionService) Unshare(ctx context.Context, id string, opts ...option.RequestOption) (res *Session, err error) { - opts = append(r.Options[:], opts...) - if id == "" { - err = errors.New("missing required id parameter") - return - } - path := fmt.Sprintf("session/%s/share", id) - err = requestconfig.ExecuteNewRequest(ctx, http.MethodDelete, path, nil, &res, opts...) - return -} - -type AssistantMessage struct { - ID string `json:"id,required"` - Cost float64 `json:"cost,required"` - ModelID string `json:"modelID,required"` - Path AssistantMessagePath `json:"path,required"` - ProviderID string `json:"providerID,required"` - Role AssistantMessageRole `json:"role,required"` - SessionID string `json:"sessionID,required"` - System []string `json:"system,required"` - Time AssistantMessageTime `json:"time,required"` - Tokens AssistantMessageTokens `json:"tokens,required"` - Error AssistantMessageError `json:"error"` - Summary bool `json:"summary"` - JSON assistantMessageJSON `json:"-"` -} - -// assistantMessageJSON contains the JSON metadata for the struct -// [AssistantMessage] -type assistantMessageJSON struct { - ID apijson.Field - Cost apijson.Field - ModelID apijson.Field - Path apijson.Field - ProviderID apijson.Field - Role apijson.Field - SessionID apijson.Field - System apijson.Field - Time apijson.Field - Tokens apijson.Field - Error apijson.Field - Summary apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *AssistantMessage) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r assistantMessageJSON) RawJSON() string { - return r.raw -} - -func (r AssistantMessage) implementsMessage() {} - -type AssistantMessagePath struct { - Cwd string `json:"cwd,required"` - Root string `json:"root,required"` - JSON assistantMessagePathJSON `json:"-"` -} - -// assistantMessagePathJSON contains the JSON metadata for the struct -// [AssistantMessagePath] -type assistantMessagePathJSON struct { - Cwd apijson.Field - Root apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *AssistantMessagePath) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r assistantMessagePathJSON) RawJSON() string { - return r.raw -} - -type AssistantMessageRole string - -const ( - AssistantMessageRoleAssistant AssistantMessageRole = "assistant" -) - -func (r AssistantMessageRole) IsKnown() bool { - switch r { - case AssistantMessageRoleAssistant: - return true - } - return false -} - -type AssistantMessageTime struct { - Created float64 `json:"created,required"` - Completed float64 `json:"completed"` - JSON assistantMessageTimeJSON `json:"-"` -} - -// assistantMessageTimeJSON contains the JSON metadata for the struct -// [AssistantMessageTime] -type assistantMessageTimeJSON struct { - Created apijson.Field - Completed apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *AssistantMessageTime) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r assistantMessageTimeJSON) RawJSON() string { - return r.raw -} - -type AssistantMessageTokens struct { - Cache AssistantMessageTokensCache `json:"cache,required"` - Input float64 `json:"input,required"` - Output float64 `json:"output,required"` - Reasoning float64 `json:"reasoning,required"` - JSON assistantMessageTokensJSON `json:"-"` -} - -// assistantMessageTokensJSON contains the JSON metadata for the struct -// [AssistantMessageTokens] -type assistantMessageTokensJSON struct { - Cache apijson.Field - Input apijson.Field - Output apijson.Field - Reasoning apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *AssistantMessageTokens) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r assistantMessageTokensJSON) RawJSON() string { - return r.raw -} - -type AssistantMessageTokensCache struct { - Read float64 `json:"read,required"` - Write float64 `json:"write,required"` - JSON assistantMessageTokensCacheJSON `json:"-"` -} - -// assistantMessageTokensCacheJSON contains the JSON metadata for the struct -// [AssistantMessageTokensCache] -type assistantMessageTokensCacheJSON struct { - Read apijson.Field - Write apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *AssistantMessageTokensCache) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r assistantMessageTokensCacheJSON) RawJSON() string { - return r.raw -} - -type AssistantMessageError struct { - // This field can have the runtime type of [shared.ProviderAuthErrorData], - // [shared.UnknownErrorData], [interface{}]. - Data interface{} `json:"data,required"` - Name AssistantMessageErrorName `json:"name,required"` - JSON assistantMessageErrorJSON `json:"-"` - union AssistantMessageErrorUnion -} - -// assistantMessageErrorJSON contains the JSON metadata for the struct -// [AssistantMessageError] -type assistantMessageErrorJSON struct { - Data apijson.Field - Name apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r assistantMessageErrorJSON) RawJSON() string { - return r.raw -} - -func (r *AssistantMessageError) UnmarshalJSON(data []byte) (err error) { - *r = AssistantMessageError{} - err = apijson.UnmarshalRoot(data, &r.union) - if err != nil { - return err - } - return apijson.Port(r.union, &r) -} - -// AsUnion returns a [AssistantMessageErrorUnion] interface which you can cast to -// the specific types for more type safety. -// -// Possible runtime types of the union are [shared.ProviderAuthError], -// [shared.UnknownError], [AssistantMessageErrorMessageOutputLengthError], -// [shared.MessageAbortedError]. -func (r AssistantMessageError) AsUnion() AssistantMessageErrorUnion { - return r.union -} - -// Union satisfied by [shared.ProviderAuthError], [shared.UnknownError], -// [AssistantMessageErrorMessageOutputLengthError] or [shared.MessageAbortedError]. -type AssistantMessageErrorUnion interface { - ImplementsAssistantMessageError() -} - -func init() { - apijson.RegisterUnion( - reflect.TypeOf((*AssistantMessageErrorUnion)(nil)).Elem(), - "name", - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(shared.ProviderAuthError{}), - DiscriminatorValue: "ProviderAuthError", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(shared.UnknownError{}), - DiscriminatorValue: "UnknownError", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(AssistantMessageErrorMessageOutputLengthError{}), - DiscriminatorValue: "MessageOutputLengthError", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(shared.MessageAbortedError{}), - DiscriminatorValue: "MessageAbortedError", - }, - ) -} - -type AssistantMessageErrorMessageOutputLengthError struct { - Data interface{} `json:"data,required"` - Name AssistantMessageErrorMessageOutputLengthErrorName `json:"name,required"` - JSON assistantMessageErrorMessageOutputLengthErrorJSON `json:"-"` -} - -// assistantMessageErrorMessageOutputLengthErrorJSON contains the JSON metadata for -// the struct [AssistantMessageErrorMessageOutputLengthError] -type assistantMessageErrorMessageOutputLengthErrorJSON struct { - Data apijson.Field - Name apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *AssistantMessageErrorMessageOutputLengthError) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r assistantMessageErrorMessageOutputLengthErrorJSON) RawJSON() string { - return r.raw -} - -func (r AssistantMessageErrorMessageOutputLengthError) ImplementsAssistantMessageError() {} - -type AssistantMessageErrorMessageOutputLengthErrorName string - -const ( - AssistantMessageErrorMessageOutputLengthErrorNameMessageOutputLengthError AssistantMessageErrorMessageOutputLengthErrorName = "MessageOutputLengthError" -) - -func (r AssistantMessageErrorMessageOutputLengthErrorName) IsKnown() bool { - switch r { - case AssistantMessageErrorMessageOutputLengthErrorNameMessageOutputLengthError: - return true - } - return false -} - -type AssistantMessageErrorName string - -const ( - AssistantMessageErrorNameProviderAuthError AssistantMessageErrorName = "ProviderAuthError" - AssistantMessageErrorNameUnknownError AssistantMessageErrorName = "UnknownError" - AssistantMessageErrorNameMessageOutputLengthError AssistantMessageErrorName = "MessageOutputLengthError" - AssistantMessageErrorNameMessageAbortedError AssistantMessageErrorName = "MessageAbortedError" -) - -func (r AssistantMessageErrorName) IsKnown() bool { - switch r { - case AssistantMessageErrorNameProviderAuthError, AssistantMessageErrorNameUnknownError, AssistantMessageErrorNameMessageOutputLengthError, AssistantMessageErrorNameMessageAbortedError: - return true - } - return false -} - -type FilePart struct { - ID string `json:"id,required"` - MessageID string `json:"messageID,required"` - Mime string `json:"mime,required"` - SessionID string `json:"sessionID,required"` - Type FilePartType `json:"type,required"` - URL string `json:"url,required"` - Filename string `json:"filename"` - JSON filePartJSON `json:"-"` -} - -// filePartJSON contains the JSON metadata for the struct [FilePart] -type filePartJSON struct { - ID apijson.Field - MessageID apijson.Field - Mime apijson.Field - SessionID apijson.Field - Type apijson.Field - URL apijson.Field - Filename apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *FilePart) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r filePartJSON) RawJSON() string { - return r.raw -} - -func (r FilePart) implementsPart() {} - -type FilePartType string - -const ( - FilePartTypeFile FilePartType = "file" -) - -func (r FilePartType) IsKnown() bool { - switch r { - case FilePartTypeFile: - return true - } - return false -} - -type FilePartInputParam struct { - Mime param.Field[string] `json:"mime,required"` - Type param.Field[FilePartInputType] `json:"type,required"` - URL param.Field[string] `json:"url,required"` - ID param.Field[string] `json:"id"` - Filename param.Field[string] `json:"filename"` -} - -func (r FilePartInputParam) MarshalJSON() (data []byte, err error) { - return apijson.MarshalRoot(r) -} - -func (r FilePartInputParam) implementsSessionChatParamsPartUnion() {} - -type FilePartInputType string - -const ( - FilePartInputTypeFile FilePartInputType = "file" -) - -func (r FilePartInputType) IsKnown() bool { - switch r { - case FilePartInputTypeFile: - return true - } - return false -} - -type Message struct { - ID string `json:"id,required"` - Role MessageRole `json:"role,required"` - SessionID string `json:"sessionID,required"` - // This field can have the runtime type of [UserMessageTime], - // [AssistantMessageTime]. - Time interface{} `json:"time,required"` - Cost float64 `json:"cost"` - // This field can have the runtime type of [AssistantMessageError]. - Error interface{} `json:"error"` - ModelID string `json:"modelID"` - // This field can have the runtime type of [AssistantMessagePath]. - Path interface{} `json:"path"` - ProviderID string `json:"providerID"` - Summary bool `json:"summary"` - // This field can have the runtime type of [[]string]. - System interface{} `json:"system"` - // This field can have the runtime type of [AssistantMessageTokens]. - Tokens interface{} `json:"tokens"` - JSON messageJSON `json:"-"` - union MessageUnion -} - -// messageJSON contains the JSON metadata for the struct [Message] -type messageJSON struct { - ID apijson.Field - Role apijson.Field - SessionID apijson.Field - Time apijson.Field - Cost apijson.Field - Error apijson.Field - ModelID apijson.Field - Path apijson.Field - ProviderID apijson.Field - Summary apijson.Field - System apijson.Field - Tokens apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r messageJSON) RawJSON() string { - return r.raw -} - -func (r *Message) UnmarshalJSON(data []byte) (err error) { - *r = Message{} - err = apijson.UnmarshalRoot(data, &r.union) - if err != nil { - return err - } - return apijson.Port(r.union, &r) -} - -// AsUnion returns a [MessageUnion] interface which you can cast to the specific -// types for more type safety. -// -// Possible runtime types of the union are [UserMessage], [AssistantMessage]. -func (r Message) AsUnion() MessageUnion { - return r.union -} - -// Union satisfied by [UserMessage] or [AssistantMessage]. -type MessageUnion interface { - implementsMessage() -} - -func init() { - apijson.RegisterUnion( - reflect.TypeOf((*MessageUnion)(nil)).Elem(), - "role", - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(UserMessage{}), - DiscriminatorValue: "user", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(AssistantMessage{}), - DiscriminatorValue: "assistant", - }, - ) -} - -type MessageRole string - -const ( - MessageRoleUser MessageRole = "user" - MessageRoleAssistant MessageRole = "assistant" -) - -func (r MessageRole) IsKnown() bool { - switch r { - case MessageRoleUser, MessageRoleAssistant: - return true - } - return false -} - -type Part struct { - ID string `json:"id,required"` - MessageID string `json:"messageID,required"` - SessionID string `json:"sessionID,required"` - Type PartType `json:"type,required"` - CallID string `json:"callID"` - Cost float64 `json:"cost"` - Filename string `json:"filename"` - Mime string `json:"mime"` - Snapshot string `json:"snapshot"` - // This field can have the runtime type of [ToolPartState]. - State interface{} `json:"state"` - Synthetic bool `json:"synthetic"` - Text string `json:"text"` - // This field can have the runtime type of [TextPartTime]. - Time interface{} `json:"time"` - // This field can have the runtime type of [StepFinishPartTokens]. - Tokens interface{} `json:"tokens"` - Tool string `json:"tool"` - URL string `json:"url"` - JSON partJSON `json:"-"` - union PartUnion -} - -// partJSON contains the JSON metadata for the struct [Part] -type partJSON struct { - ID apijson.Field - MessageID apijson.Field - SessionID apijson.Field - Type apijson.Field - CallID apijson.Field - Cost apijson.Field - Filename apijson.Field - Mime apijson.Field - Snapshot apijson.Field - State apijson.Field - Synthetic apijson.Field - Text apijson.Field - Time apijson.Field - Tokens apijson.Field - Tool apijson.Field - URL apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r partJSON) RawJSON() string { - return r.raw -} - -func (r *Part) UnmarshalJSON(data []byte) (err error) { - *r = Part{} - err = apijson.UnmarshalRoot(data, &r.union) - if err != nil { - return err - } - return apijson.Port(r.union, &r) -} - -// AsUnion returns a [PartUnion] interface which you can cast to the specific types -// for more type safety. -// -// Possible runtime types of the union are [TextPart], [FilePart], [ToolPart], -// [StepStartPart], [StepFinishPart], [SnapshotPart]. -func (r Part) AsUnion() PartUnion { - return r.union -} - -// Union satisfied by [TextPart], [FilePart], [ToolPart], [StepStartPart], -// [StepFinishPart] or [SnapshotPart]. -type PartUnion interface { - implementsPart() -} - -func init() { - apijson.RegisterUnion( - reflect.TypeOf((*PartUnion)(nil)).Elem(), - "type", - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(TextPart{}), - DiscriminatorValue: "text", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(FilePart{}), - DiscriminatorValue: "file", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ToolPart{}), - DiscriminatorValue: "tool", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(StepStartPart{}), - DiscriminatorValue: "step-start", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(StepFinishPart{}), - DiscriminatorValue: "step-finish", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(SnapshotPart{}), - DiscriminatorValue: "snapshot", - }, - ) -} - -type PartType string - -const ( - PartTypeText PartType = "text" - PartTypeFile PartType = "file" - PartTypeTool PartType = "tool" - PartTypeStepStart PartType = "step-start" - PartTypeStepFinish PartType = "step-finish" - PartTypeSnapshot PartType = "snapshot" -) - -func (r PartType) IsKnown() bool { - switch r { - case PartTypeText, PartTypeFile, PartTypeTool, PartTypeStepStart, PartTypeStepFinish, PartTypeSnapshot: - return true - } - return false -} - -type Session struct { - ID string `json:"id,required"` - Time SessionTime `json:"time,required"` - Title string `json:"title,required"` - Version string `json:"version,required"` - ParentID string `json:"parentID"` - Revert SessionRevert `json:"revert"` - Share SessionShare `json:"share"` - JSON sessionJSON `json:"-"` -} - -// sessionJSON contains the JSON metadata for the struct [Session] -type sessionJSON struct { - ID apijson.Field - Time apijson.Field - Title apijson.Field - Version apijson.Field - ParentID apijson.Field - Revert apijson.Field - Share apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *Session) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r sessionJSON) RawJSON() string { - return r.raw -} - -type SessionTime struct { - Created float64 `json:"created,required"` - Updated float64 `json:"updated,required"` - JSON sessionTimeJSON `json:"-"` -} - -// sessionTimeJSON contains the JSON metadata for the struct [SessionTime] -type sessionTimeJSON struct { - Created apijson.Field - Updated apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *SessionTime) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r sessionTimeJSON) RawJSON() string { - return r.raw -} - -type SessionRevert struct { - MessageID string `json:"messageID,required"` - Part float64 `json:"part,required"` - Snapshot string `json:"snapshot"` - JSON sessionRevertJSON `json:"-"` -} - -// sessionRevertJSON contains the JSON metadata for the struct [SessionRevert] -type sessionRevertJSON struct { - MessageID apijson.Field - Part apijson.Field - Snapshot apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *SessionRevert) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r sessionRevertJSON) RawJSON() string { - return r.raw -} - -type SessionShare struct { - URL string `json:"url,required"` - JSON sessionShareJSON `json:"-"` -} - -// sessionShareJSON contains the JSON metadata for the struct [SessionShare] -type sessionShareJSON struct { - URL apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *SessionShare) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r sessionShareJSON) RawJSON() string { - return r.raw -} - -type SnapshotPart struct { - ID string `json:"id,required"` - MessageID string `json:"messageID,required"` - SessionID string `json:"sessionID,required"` - Snapshot string `json:"snapshot,required"` - Type SnapshotPartType `json:"type,required"` - JSON snapshotPartJSON `json:"-"` -} - -// snapshotPartJSON contains the JSON metadata for the struct [SnapshotPart] -type snapshotPartJSON struct { - ID apijson.Field - MessageID apijson.Field - SessionID apijson.Field - Snapshot apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *SnapshotPart) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r snapshotPartJSON) RawJSON() string { - return r.raw -} - -func (r SnapshotPart) implementsPart() {} - -type SnapshotPartType string - -const ( - SnapshotPartTypeSnapshot SnapshotPartType = "snapshot" -) - -func (r SnapshotPartType) IsKnown() bool { - switch r { - case SnapshotPartTypeSnapshot: - return true - } - return false -} - -type StepFinishPart struct { - ID string `json:"id,required"` - Cost float64 `json:"cost,required"` - MessageID string `json:"messageID,required"` - SessionID string `json:"sessionID,required"` - Tokens StepFinishPartTokens `json:"tokens,required"` - Type StepFinishPartType `json:"type,required"` - JSON stepFinishPartJSON `json:"-"` -} - -// stepFinishPartJSON contains the JSON metadata for the struct [StepFinishPart] -type stepFinishPartJSON struct { - ID apijson.Field - Cost apijson.Field - MessageID apijson.Field - SessionID apijson.Field - Tokens apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *StepFinishPart) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r stepFinishPartJSON) RawJSON() string { - return r.raw -} - -func (r StepFinishPart) implementsPart() {} - -type StepFinishPartTokens struct { - Cache StepFinishPartTokensCache `json:"cache,required"` - Input float64 `json:"input,required"` - Output float64 `json:"output,required"` - Reasoning float64 `json:"reasoning,required"` - JSON stepFinishPartTokensJSON `json:"-"` -} - -// stepFinishPartTokensJSON contains the JSON metadata for the struct -// [StepFinishPartTokens] -type stepFinishPartTokensJSON struct { - Cache apijson.Field - Input apijson.Field - Output apijson.Field - Reasoning apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *StepFinishPartTokens) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r stepFinishPartTokensJSON) RawJSON() string { - return r.raw -} - -type StepFinishPartTokensCache struct { - Read float64 `json:"read,required"` - Write float64 `json:"write,required"` - JSON stepFinishPartTokensCacheJSON `json:"-"` -} - -// stepFinishPartTokensCacheJSON contains the JSON metadata for the struct -// [StepFinishPartTokensCache] -type stepFinishPartTokensCacheJSON struct { - Read apijson.Field - Write apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *StepFinishPartTokensCache) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r stepFinishPartTokensCacheJSON) RawJSON() string { - return r.raw -} - -type StepFinishPartType string - -const ( - StepFinishPartTypeStepFinish StepFinishPartType = "step-finish" -) - -func (r StepFinishPartType) IsKnown() bool { - switch r { - case StepFinishPartTypeStepFinish: - return true - } - return false -} - -type StepStartPart struct { - ID string `json:"id,required"` - MessageID string `json:"messageID,required"` - SessionID string `json:"sessionID,required"` - Type StepStartPartType `json:"type,required"` - JSON stepStartPartJSON `json:"-"` -} - -// stepStartPartJSON contains the JSON metadata for the struct [StepStartPart] -type stepStartPartJSON struct { - ID apijson.Field - MessageID apijson.Field - SessionID apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *StepStartPart) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r stepStartPartJSON) RawJSON() string { - return r.raw -} - -func (r StepStartPart) implementsPart() {} - -type StepStartPartType string - -const ( - StepStartPartTypeStepStart StepStartPartType = "step-start" -) - -func (r StepStartPartType) IsKnown() bool { - switch r { - case StepStartPartTypeStepStart: - return true - } - return false -} - -type TextPart struct { - ID string `json:"id,required"` - MessageID string `json:"messageID,required"` - SessionID string `json:"sessionID,required"` - Text string `json:"text,required"` - Type TextPartType `json:"type,required"` - Synthetic bool `json:"synthetic"` - Time TextPartTime `json:"time"` - JSON textPartJSON `json:"-"` -} - -// textPartJSON contains the JSON metadata for the struct [TextPart] -type textPartJSON struct { - ID apijson.Field - MessageID apijson.Field - SessionID apijson.Field - Text apijson.Field - Type apijson.Field - Synthetic apijson.Field - Time apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *TextPart) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r textPartJSON) RawJSON() string { - return r.raw -} - -func (r TextPart) implementsPart() {} - -type TextPartType string - -const ( - TextPartTypeText TextPartType = "text" -) - -func (r TextPartType) IsKnown() bool { - switch r { - case TextPartTypeText: - return true - } - return false -} - -type TextPartTime struct { - Start float64 `json:"start,required"` - End float64 `json:"end"` - JSON textPartTimeJSON `json:"-"` -} - -// textPartTimeJSON contains the JSON metadata for the struct [TextPartTime] -type textPartTimeJSON struct { - Start apijson.Field - End apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *TextPartTime) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r textPartTimeJSON) RawJSON() string { - return r.raw -} - -type TextPartInputParam struct { - Text param.Field[string] `json:"text,required"` - Type param.Field[TextPartInputType] `json:"type,required"` - ID param.Field[string] `json:"id"` - Synthetic param.Field[bool] `json:"synthetic"` - Time param.Field[TextPartInputTimeParam] `json:"time"` -} - -func (r TextPartInputParam) MarshalJSON() (data []byte, err error) { - return apijson.MarshalRoot(r) -} - -func (r TextPartInputParam) implementsSessionChatParamsPartUnion() {} - -type TextPartInputType string - -const ( - TextPartInputTypeText TextPartInputType = "text" -) - -func (r TextPartInputType) IsKnown() bool { - switch r { - case TextPartInputTypeText: - return true - } - return false -} - -type TextPartInputTimeParam struct { - Start param.Field[float64] `json:"start,required"` - End param.Field[float64] `json:"end"` -} - -func (r TextPartInputTimeParam) MarshalJSON() (data []byte, err error) { - return apijson.MarshalRoot(r) -} - -type ToolPart struct { - ID string `json:"id,required"` - CallID string `json:"callID,required"` - MessageID string `json:"messageID,required"` - SessionID string `json:"sessionID,required"` - State ToolPartState `json:"state,required"` - Tool string `json:"tool,required"` - Type ToolPartType `json:"type,required"` - JSON toolPartJSON `json:"-"` -} - -// toolPartJSON contains the JSON metadata for the struct [ToolPart] -type toolPartJSON struct { - ID apijson.Field - CallID apijson.Field - MessageID apijson.Field - SessionID apijson.Field - State apijson.Field - Tool apijson.Field - Type apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ToolPart) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r toolPartJSON) RawJSON() string { - return r.raw -} - -func (r ToolPart) implementsPart() {} - -type ToolPartState struct { - Status ToolPartStateStatus `json:"status,required"` - Error string `json:"error"` - // This field can have the runtime type of [interface{}], [map[string]interface{}]. - Input interface{} `json:"input"` - // This field can have the runtime type of [map[string]interface{}]. - Metadata interface{} `json:"metadata"` - Output string `json:"output"` - // This field can have the runtime type of [ToolStateRunningTime], - // [ToolStateCompletedTime], [ToolStateErrorTime]. - Time interface{} `json:"time"` - Title string `json:"title"` - JSON toolPartStateJSON `json:"-"` - union ToolPartStateUnion -} - -// toolPartStateJSON contains the JSON metadata for the struct [ToolPartState] -type toolPartStateJSON struct { - Status apijson.Field - Error apijson.Field - Input apijson.Field - Metadata apijson.Field - Output apijson.Field - Time apijson.Field - Title apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r toolPartStateJSON) RawJSON() string { - return r.raw -} - -func (r *ToolPartState) UnmarshalJSON(data []byte) (err error) { - *r = ToolPartState{} - err = apijson.UnmarshalRoot(data, &r.union) - if err != nil { - return err - } - return apijson.Port(r.union, &r) -} - -// AsUnion returns a [ToolPartStateUnion] interface which you can cast to the -// specific types for more type safety. -// -// Possible runtime types of the union are [ToolStatePending], [ToolStateRunning], -// [ToolStateCompleted], [ToolStateError]. -func (r ToolPartState) AsUnion() ToolPartStateUnion { - return r.union -} - -// Union satisfied by [ToolStatePending], [ToolStateRunning], [ToolStateCompleted] -// or [ToolStateError]. -type ToolPartStateUnion interface { - implementsToolPartState() -} - -func init() { - apijson.RegisterUnion( - reflect.TypeOf((*ToolPartStateUnion)(nil)).Elem(), - "status", - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ToolStatePending{}), - DiscriminatorValue: "pending", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ToolStateRunning{}), - DiscriminatorValue: "running", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ToolStateCompleted{}), - DiscriminatorValue: "completed", - }, - apijson.UnionVariant{ - TypeFilter: gjson.JSON, - Type: reflect.TypeOf(ToolStateError{}), - DiscriminatorValue: "error", - }, - ) -} - -type ToolPartStateStatus string - -const ( - ToolPartStateStatusPending ToolPartStateStatus = "pending" - ToolPartStateStatusRunning ToolPartStateStatus = "running" - ToolPartStateStatusCompleted ToolPartStateStatus = "completed" - ToolPartStateStatusError ToolPartStateStatus = "error" -) - -func (r ToolPartStateStatus) IsKnown() bool { - switch r { - case ToolPartStateStatusPending, ToolPartStateStatusRunning, ToolPartStateStatusCompleted, ToolPartStateStatusError: - return true - } - return false -} - -type ToolPartType string - -const ( - ToolPartTypeTool ToolPartType = "tool" -) - -func (r ToolPartType) IsKnown() bool { - switch r { - case ToolPartTypeTool: - return true - } - return false -} - -type ToolStateCompleted struct { - Input map[string]interface{} `json:"input,required"` - Metadata map[string]interface{} `json:"metadata,required"` - Output string `json:"output,required"` - Status ToolStateCompletedStatus `json:"status,required"` - Time ToolStateCompletedTime `json:"time,required"` - Title string `json:"title,required"` - JSON toolStateCompletedJSON `json:"-"` -} - -// toolStateCompletedJSON contains the JSON metadata for the struct -// [ToolStateCompleted] -type toolStateCompletedJSON struct { - Input apijson.Field - Metadata apijson.Field - Output apijson.Field - Status apijson.Field - Time apijson.Field - Title apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ToolStateCompleted) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r toolStateCompletedJSON) RawJSON() string { - return r.raw -} - -func (r ToolStateCompleted) implementsToolPartState() {} - -type ToolStateCompletedStatus string - -const ( - ToolStateCompletedStatusCompleted ToolStateCompletedStatus = "completed" -) - -func (r ToolStateCompletedStatus) IsKnown() bool { - switch r { - case ToolStateCompletedStatusCompleted: - return true - } - return false -} - -type ToolStateCompletedTime struct { - End float64 `json:"end,required"` - Start float64 `json:"start,required"` - JSON toolStateCompletedTimeJSON `json:"-"` -} - -// toolStateCompletedTimeJSON contains the JSON metadata for the struct -// [ToolStateCompletedTime] -type toolStateCompletedTimeJSON struct { - End apijson.Field - Start apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ToolStateCompletedTime) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r toolStateCompletedTimeJSON) RawJSON() string { - return r.raw -} - -type ToolStateError struct { - Error string `json:"error,required"` - Input map[string]interface{} `json:"input,required"` - Status ToolStateErrorStatus `json:"status,required"` - Time ToolStateErrorTime `json:"time,required"` - JSON toolStateErrorJSON `json:"-"` -} - -// toolStateErrorJSON contains the JSON metadata for the struct [ToolStateError] -type toolStateErrorJSON struct { - Error apijson.Field - Input apijson.Field - Status apijson.Field - Time apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ToolStateError) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r toolStateErrorJSON) RawJSON() string { - return r.raw -} - -func (r ToolStateError) implementsToolPartState() {} - -type ToolStateErrorStatus string - -const ( - ToolStateErrorStatusError ToolStateErrorStatus = "error" -) - -func (r ToolStateErrorStatus) IsKnown() bool { - switch r { - case ToolStateErrorStatusError: - return true - } - return false -} - -type ToolStateErrorTime struct { - End float64 `json:"end,required"` - Start float64 `json:"start,required"` - JSON toolStateErrorTimeJSON `json:"-"` -} - -// toolStateErrorTimeJSON contains the JSON metadata for the struct -// [ToolStateErrorTime] -type toolStateErrorTimeJSON struct { - End apijson.Field - Start apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ToolStateErrorTime) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r toolStateErrorTimeJSON) RawJSON() string { - return r.raw -} - -type ToolStatePending struct { - Status ToolStatePendingStatus `json:"status,required"` - JSON toolStatePendingJSON `json:"-"` -} - -// toolStatePendingJSON contains the JSON metadata for the struct -// [ToolStatePending] -type toolStatePendingJSON struct { - Status apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ToolStatePending) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r toolStatePendingJSON) RawJSON() string { - return r.raw -} - -func (r ToolStatePending) implementsToolPartState() {} - -type ToolStatePendingStatus string - -const ( - ToolStatePendingStatusPending ToolStatePendingStatus = "pending" -) - -func (r ToolStatePendingStatus) IsKnown() bool { - switch r { - case ToolStatePendingStatusPending: - return true - } - return false -} - -type ToolStateRunning struct { - Status ToolStateRunningStatus `json:"status,required"` - Time ToolStateRunningTime `json:"time,required"` - Input interface{} `json:"input"` - Metadata map[string]interface{} `json:"metadata"` - Title string `json:"title"` - JSON toolStateRunningJSON `json:"-"` -} - -// toolStateRunningJSON contains the JSON metadata for the struct -// [ToolStateRunning] -type toolStateRunningJSON struct { - Status apijson.Field - Time apijson.Field - Input apijson.Field - Metadata apijson.Field - Title apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ToolStateRunning) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r toolStateRunningJSON) RawJSON() string { - return r.raw -} - -func (r ToolStateRunning) implementsToolPartState() {} - -type ToolStateRunningStatus string - -const ( - ToolStateRunningStatusRunning ToolStateRunningStatus = "running" -) - -func (r ToolStateRunningStatus) IsKnown() bool { - switch r { - case ToolStateRunningStatusRunning: - return true - } - return false -} - -type ToolStateRunningTime struct { - Start float64 `json:"start,required"` - JSON toolStateRunningTimeJSON `json:"-"` -} - -// toolStateRunningTimeJSON contains the JSON metadata for the struct -// [ToolStateRunningTime] -type toolStateRunningTimeJSON struct { - Start apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ToolStateRunningTime) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r toolStateRunningTimeJSON) RawJSON() string { - return r.raw -} - -type UserMessage struct { - ID string `json:"id,required"` - Role UserMessageRole `json:"role,required"` - SessionID string `json:"sessionID,required"` - Time UserMessageTime `json:"time,required"` - JSON userMessageJSON `json:"-"` -} - -// userMessageJSON contains the JSON metadata for the struct [UserMessage] -type userMessageJSON struct { - ID apijson.Field - Role apijson.Field - SessionID apijson.Field - Time apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *UserMessage) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r userMessageJSON) RawJSON() string { - return r.raw -} - -func (r UserMessage) implementsMessage() {} - -type UserMessageRole string - -const ( - UserMessageRoleUser UserMessageRole = "user" -) - -func (r UserMessageRole) IsKnown() bool { - switch r { - case UserMessageRoleUser: - return true - } - return false -} - -type UserMessageTime struct { - Created float64 `json:"created,required"` - JSON userMessageTimeJSON `json:"-"` -} - -// userMessageTimeJSON contains the JSON metadata for the struct [UserMessageTime] -type userMessageTimeJSON struct { - Created apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *UserMessageTime) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r userMessageTimeJSON) RawJSON() string { - return r.raw -} - -type SessionMessagesResponse struct { - Info Message `json:"info,required"` - Parts []Part `json:"parts,required"` - JSON sessionMessagesResponseJSON `json:"-"` -} - -// sessionMessagesResponseJSON contains the JSON metadata for the struct -// [SessionMessagesResponse] -type sessionMessagesResponseJSON struct { - Info apijson.Field - Parts apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *SessionMessagesResponse) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r sessionMessagesResponseJSON) RawJSON() string { - return r.raw -} - -type SessionChatParams struct { - ModelID param.Field[string] `json:"modelID,required"` - Parts param.Field[[]SessionChatParamsPartUnion] `json:"parts,required"` - ProviderID param.Field[string] `json:"providerID,required"` - MessageID param.Field[string] `json:"messageID"` - Mode param.Field[string] `json:"mode"` -} - -func (r SessionChatParams) MarshalJSON() (data []byte, err error) { - return apijson.MarshalRoot(r) -} - -type SessionChatParamsPart struct { - Type param.Field[SessionChatParamsPartsType] `json:"type,required"` - ID param.Field[string] `json:"id"` - Filename param.Field[string] `json:"filename"` - Mime param.Field[string] `json:"mime"` - Synthetic param.Field[bool] `json:"synthetic"` - Text param.Field[string] `json:"text"` - Time param.Field[interface{}] `json:"time"` - URL param.Field[string] `json:"url"` -} - -func (r SessionChatParamsPart) MarshalJSON() (data []byte, err error) { - return apijson.MarshalRoot(r) -} - -func (r SessionChatParamsPart) implementsSessionChatParamsPartUnion() {} - -// Satisfied by [TextPartInputParam], [FilePartInputParam], -// [SessionChatParamsPart]. -type SessionChatParamsPartUnion interface { - implementsSessionChatParamsPartUnion() -} - -type SessionChatParamsPartsType string - -const ( - SessionChatParamsPartsTypeText SessionChatParamsPartsType = "text" - SessionChatParamsPartsTypeFile SessionChatParamsPartsType = "file" -) - -func (r SessionChatParamsPartsType) IsKnown() bool { - switch r { - case SessionChatParamsPartsTypeText, SessionChatParamsPartsTypeFile: - return true - } - return false -} - -type SessionInitParams struct { - MessageID param.Field[string] `json:"messageID,required"` - ModelID param.Field[string] `json:"modelID,required"` - ProviderID param.Field[string] `json:"providerID,required"` -} - -func (r SessionInitParams) MarshalJSON() (data []byte, err error) { - return apijson.MarshalRoot(r) -} - -type SessionSummarizeParams struct { - ModelID param.Field[string] `json:"modelID,required"` - ProviderID param.Field[string] `json:"providerID,required"` -} - -func (r SessionSummarizeParams) MarshalJSON() (data []byte, err error) { - return apijson.MarshalRoot(r) -} diff --git a/packages/tui/sdk/session_test.go b/packages/tui/sdk/session_test.go deleted file mode 100644 index b96a98b9..00000000 --- a/packages/tui/sdk/session_test.go +++ /dev/null @@ -1,268 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode_test - -import ( - "context" - "errors" - "os" - "testing" - - "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode-sdk-go/internal/testutil" - "github.com/sst/opencode-sdk-go/option" -) - -func TestSessionNew(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.New(context.TODO()) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestSessionList(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.List(context.TODO()) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestSessionDelete(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.Delete(context.TODO(), "id") - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestSessionAbort(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.Abort(context.TODO(), "id") - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestSessionChatWithOptionalParams(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.Chat( - context.TODO(), - "id", - opencode.SessionChatParams{ - ModelID: opencode.F("modelID"), - Parts: opencode.F([]opencode.SessionChatParamsPartUnion{opencode.TextPartInputParam{ - Text: opencode.F("text"), - Type: opencode.F(opencode.TextPartInputTypeText), - ID: opencode.F("id"), - Synthetic: opencode.F(true), - Time: opencode.F(opencode.TextPartInputTimeParam{ - Start: opencode.F(0.000000), - End: opencode.F(0.000000), - }), - }}), - ProviderID: opencode.F("providerID"), - MessageID: opencode.F("msg"), - Mode: opencode.F("mode"), - }, - ) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestSessionInit(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.Init( - context.TODO(), - "id", - opencode.SessionInitParams{ - MessageID: opencode.F("messageID"), - ModelID: opencode.F("modelID"), - ProviderID: opencode.F("providerID"), - }, - ) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestSessionMessages(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.Messages(context.TODO(), "id") - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestSessionShare(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.Share(context.TODO(), "id") - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestSessionSummarize(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.Summarize( - context.TODO(), - "id", - opencode.SessionSummarizeParams{ - ModelID: opencode.F("modelID"), - ProviderID: opencode.F("providerID"), - }, - ) - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} - -func TestSessionUnshare(t *testing.T) { - t.Skip("skipped: tests are disabled for the time being") - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - _, err := client.Session.Unshare(context.TODO(), "id") - if err != nil { - var apierr *opencode.Error - if errors.As(err, &apierr) { - t.Log(string(apierr.DumpRequest(true))) - } - t.Fatalf("err should be nil: %s", err.Error()) - } -} diff --git a/packages/tui/sdk/shared/shared.go b/packages/tui/sdk/shared/shared.go deleted file mode 100644 index 58baf3d9..00000000 --- a/packages/tui/sdk/shared/shared.go +++ /dev/null @@ -1,173 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package shared - -import ( - "github.com/sst/opencode-sdk-go/internal/apijson" -) - -type MessageAbortedError struct { - Data interface{} `json:"data,required"` - Name MessageAbortedErrorName `json:"name,required"` - JSON messageAbortedErrorJSON `json:"-"` -} - -// messageAbortedErrorJSON contains the JSON metadata for the struct -// [MessageAbortedError] -type messageAbortedErrorJSON struct { - Data apijson.Field - Name apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *MessageAbortedError) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r messageAbortedErrorJSON) RawJSON() string { - return r.raw -} - -func (r MessageAbortedError) ImplementsEventListResponseEventSessionErrorPropertiesError() {} - -func (r MessageAbortedError) ImplementsAssistantMessageError() {} - -type MessageAbortedErrorName string - -const ( - MessageAbortedErrorNameMessageAbortedError MessageAbortedErrorName = "MessageAbortedError" -) - -func (r MessageAbortedErrorName) IsKnown() bool { - switch r { - case MessageAbortedErrorNameMessageAbortedError: - return true - } - return false -} - -type ProviderAuthError struct { - Data ProviderAuthErrorData `json:"data,required"` - Name ProviderAuthErrorName `json:"name,required"` - JSON providerAuthErrorJSON `json:"-"` -} - -// providerAuthErrorJSON contains the JSON metadata for the struct -// [ProviderAuthError] -type providerAuthErrorJSON struct { - Data apijson.Field - Name apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ProviderAuthError) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r providerAuthErrorJSON) RawJSON() string { - return r.raw -} - -func (r ProviderAuthError) ImplementsEventListResponseEventSessionErrorPropertiesError() {} - -func (r ProviderAuthError) ImplementsAssistantMessageError() {} - -type ProviderAuthErrorData struct { - Message string `json:"message,required"` - ProviderID string `json:"providerID,required"` - JSON providerAuthErrorDataJSON `json:"-"` -} - -// providerAuthErrorDataJSON contains the JSON metadata for the struct -// [ProviderAuthErrorData] -type providerAuthErrorDataJSON struct { - Message apijson.Field - ProviderID apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *ProviderAuthErrorData) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r providerAuthErrorDataJSON) RawJSON() string { - return r.raw -} - -type ProviderAuthErrorName string - -const ( - ProviderAuthErrorNameProviderAuthError ProviderAuthErrorName = "ProviderAuthError" -) - -func (r ProviderAuthErrorName) IsKnown() bool { - switch r { - case ProviderAuthErrorNameProviderAuthError: - return true - } - return false -} - -type UnknownError struct { - Data UnknownErrorData `json:"data,required"` - Name UnknownErrorName `json:"name,required"` - JSON unknownErrorJSON `json:"-"` -} - -// unknownErrorJSON contains the JSON metadata for the struct [UnknownError] -type unknownErrorJSON struct { - Data apijson.Field - Name apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *UnknownError) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r unknownErrorJSON) RawJSON() string { - return r.raw -} - -func (r UnknownError) ImplementsEventListResponseEventSessionErrorPropertiesError() {} - -func (r UnknownError) ImplementsAssistantMessageError() {} - -type UnknownErrorData struct { - Message string `json:"message,required"` - JSON unknownErrorDataJSON `json:"-"` -} - -// unknownErrorDataJSON contains the JSON metadata for the struct -// [UnknownErrorData] -type unknownErrorDataJSON struct { - Message apijson.Field - raw string - ExtraFields map[string]apijson.Field -} - -func (r *UnknownErrorData) UnmarshalJSON(data []byte) (err error) { - return apijson.UnmarshalRoot(data, r) -} - -func (r unknownErrorDataJSON) RawJSON() string { - return r.raw -} - -type UnknownErrorName string - -const ( - UnknownErrorNameUnknownError UnknownErrorName = "UnknownError" -) - -func (r UnknownErrorName) IsKnown() bool { - switch r { - case UnknownErrorNameUnknownError: - return true - } - return false -} diff --git a/packages/tui/sdk/usage_test.go b/packages/tui/sdk/usage_test.go deleted file mode 100644 index ef7ce8bd..00000000 --- a/packages/tui/sdk/usage_test.go +++ /dev/null @@ -1,32 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -package opencode_test - -import ( - "context" - "os" - "testing" - - "github.com/sst/opencode-sdk-go" - "github.com/sst/opencode-sdk-go/internal/testutil" - "github.com/sst/opencode-sdk-go/option" -) - -func TestUsage(t *testing.T) { - baseURL := "http://localhost:4010" - if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { - baseURL = envURL - } - if !testutil.CheckTestServer(t, baseURL) { - return - } - client := opencode.NewClient( - option.WithBaseURL(baseURL), - ) - sessions, err := client.Session.List(context.TODO()) - if err != nil { - t.Error(err) - return - } - t.Logf("%+v\n", sessions) -} diff --git a/packages/web/astro.config.mjs b/packages/web/astro.config.mjs index a250ce60..0c3a5ecc 100644 --- a/packages/web/astro.config.mjs +++ b/packages/web/astro.config.mjs @@ -7,7 +7,6 @@ import theme from "toolbeam-docs-theme" import config from "./config.mjs" import { rehypeHeadingIds } from "@astrojs/markdown-remark" import rehypeAutolinkHeadings from "rehype-autolink-headings" -import { spawnSync } from "child_process" const github = "https://github.com/sst/opencode" @@ -21,24 +20,18 @@ export default defineConfig({ devToolbar: { enabled: false, }, - server: { - host: "0.0.0.0", - }, markdown: { - rehypePlugins: [rehypeHeadingIds, [rehypeAutolinkHeadings, { behavior: "wrap" }]], + rehypePlugins: [ + rehypeHeadingIds, + [rehypeAutolinkHeadings, { behavior: "wrap" }], + ], }, - build: {}, integrations: [ - configSchema(), solidJs(), starlight({ title: "opencode", - lastUpdated: true, expressiveCode: { themes: ["github-light", "github-dark"] }, - social: [ - { icon: "github", label: "GitHub", href: config.github }, - { icon: "discord", label: "Dscord", href: config.discord }, - ], + social: [{ icon: "github", label: "GitHub", href: config.github }], head: [ { tag: "link", @@ -63,16 +56,12 @@ export default defineConfig({ sidebar: [ "docs", "docs/cli", - "docs/share", - "docs/modes", "docs/rules", "docs/config", "docs/models", "docs/themes", "docs/keybinds", - "docs/enterprise", "docs/mcp-servers", - "docs/troubleshooting", ], components: { Hero: "./src/components/Hero.astro", @@ -86,19 +75,4 @@ export default defineConfig({ ], }), ], - redirects: { - "/discord": "https://discord.gg/opencode", - }, }) - -function configSchema() { - return { - name: "configSchema", - hooks: { - "astro:build:done": async () => { - console.log("generating config schema") - spawnSync("../opencode/script/schema.ts", ["./dist/config.json"]) - }, - }, - } -} diff --git a/packages/web/config.mjs b/packages/web/config.mjs index bb1ec003..b6ec26e3 100644 --- a/packages/web/config.mjs +++ b/packages/web/config.mjs @@ -1,12 +1,7 @@ -const stage = process.env.SST_STAGE || "dev" - export default { - url: stage === "production" - ? "https://opencode.ai" - : `https://${stage}.opencode.ai`, + url: "https://opencode.ai", socialCard: "https://social-cards.sst.dev", github: "https://github.com/sst/opencode", - discord: "https://opencode.ai/discord", headerLinks: [ { name: "Home", url: "/" }, { name: "Docs", url: "/docs/" }, diff --git a/packages/web/package.json b/packages/web/package.json index 548c84c3..383b979f 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -4,7 +4,6 @@ "version": "0.0.1", "scripts": { "dev": "astro dev", - "dev:remote": "sst shell --stage=dev --target=Web astro dev", "start": "astro dev", "build": "astro build", "preview": "astro preview", @@ -25,13 +24,11 @@ "lang-map": "0.4.0", "luxon": "3.6.1", "marked": "15.0.12", - "marked-shiki": "1.2.0", "rehype-autolink-headings": "7.1.0", - "remeda": "2.26.0", "sharp": "0.32.5", "shiki": "3.4.2", "solid-js": "1.9.7", - "toolbeam-docs-theme": "0.4.3" + "toolbeam-docs-theme": "0.4.1" }, "devDependencies": { "opencode": "workspace:*", diff --git a/packages/web/public/theme.json b/packages/web/public/theme.json index b3e97f7c..0b1b95f0 100644 --- a/packages/web/public/theme.json +++ b/packages/web/public/theme.json @@ -88,7 +88,14 @@ "syntaxOperator": { "$ref": "#/definitions/colorValue" }, "syntaxPunctuation": { "$ref": "#/definitions/colorValue" } }, - "required": ["primary", "secondary", "accent", "text", "textMuted", "background"], + "required": [ + "primary", + "secondary", + "accent", + "text", + "textMuted", + "background" + ], "additionalProperties": false } }, diff --git a/packages/web/src/assets/lander/screenshot.png b/packages/web/src/assets/lander/screenshot.png index feb61758..d49a62b4 100644 Binary files a/packages/web/src/assets/lander/screenshot.png and b/packages/web/src/assets/lander/screenshot.png differ diff --git a/packages/web/src/components/CodeBlock.tsx b/packages/web/src/components/CodeBlock.tsx new file mode 100644 index 00000000..4c6aab48 --- /dev/null +++ b/packages/web/src/components/CodeBlock.tsx @@ -0,0 +1,56 @@ +import { + type JSX, + onCleanup, + splitProps, + createEffect, + createResource, +} from "solid-js" +import { codeToHtml } from "shiki" +import styles from "./codeblock.module.css" +import { transformerNotationDiff } from "@shikijs/transformers" + +interface CodeBlockProps extends JSX.HTMLAttributes { + code: string + lang?: string + onRendered?: () => void +} +function CodeBlock(props: CodeBlockProps) { + const [local, rest] = splitProps(props, ["code", "lang", "onRendered"]) + let containerRef!: HTMLDivElement + + const [html] = createResource( + () => [local.code, local.lang], + async ([code, lang]) => { + return (await codeToHtml(code || "", { + lang: lang || "text", + themes: { + light: "github-light", + dark: "github-dark", + }, + transformers: [transformerNotationDiff()], + })) as string + }, + ) + + onCleanup(() => { + if (containerRef) containerRef.innerHTML = "" + }) + + createEffect(() => { + if (html() && containerRef) { + containerRef.innerHTML = html() as string + + local.onRendered?.() + } + }) + + return ( + <> + {html() ? ( +
+ ) : null} + + ) +} + +export default CodeBlock diff --git a/packages/web/src/components/share/content-diff.tsx b/packages/web/src/components/DiffView.tsx similarity index 64% rename from packages/web/src/components/share/content-diff.tsx rename to packages/web/src/components/DiffView.tsx index 45249e0c..66dd7f0f 100644 --- a/packages/web/src/components/share/content-diff.tsx +++ b/packages/web/src/components/DiffView.tsx @@ -1,7 +1,7 @@ +import { type Component, createMemo } from "solid-js" import { parsePatch } from "diff" -import { createMemo } from "solid-js" -import { ContentCode } from "./content-code" -import styles from "./content-diff.module.css" +import CodeBlock from "./CodeBlock" +import styles from "./diffview.module.css" type DiffRow = { left: string @@ -9,12 +9,14 @@ type DiffRow = { type: "added" | "removed" | "unchanged" | "modified" } -interface Props { +interface DiffViewProps { diff: string lang?: string + class?: string } -export function ContentDiff(props: Props) { +const DiffView: Component = (props) => { + const rows = createMemo(() => { const diffRows: DiffRow[] = [] @@ -31,20 +33,20 @@ export function ContentDiff(props: Props) { const content = line.slice(1) const prefix = line[0] - if (prefix === "-") { + if (prefix === '-') { // Look ahead for consecutive additions to pair with removals const removals: string[] = [content] let j = i + 1 // Collect all consecutive removals - while (j < lines.length && lines[j][0] === "-") { + while (j < lines.length && lines[j][0] === '-') { removals.push(lines[j].slice(1)) j++ } // Collect all consecutive additions that follow const additions: string[] = [] - while (j < lines.length && lines[j][0] === "+") { + while (j < lines.length && lines[j][0] === '+') { additions.push(lines[j].slice(1)) j++ } @@ -60,39 +62,39 @@ export function ContentDiff(props: Props) { diffRows.push({ left: removals[k], right: additions[k], - type: "modified", + type: "modified" }) } else if (hasLeft) { // Pure removal diffRows.push({ left: removals[k], right: "", - type: "removed", + type: "removed" }) } else if (hasRight) { // Pure addition - only create if we actually have content diffRows.push({ left: "", right: additions[k], - type: "added", + type: "added" }) } } i = j - } else if (prefix === "+") { + } else if (prefix === '+') { // Standalone addition (not paired with removal) diffRows.push({ left: "", right: content, - type: "added", + type: "added" }) i++ - } else if (prefix === " ") { + } else if (prefix === ' ') { diffRows.push({ - left: content === "" ? " " : content, - right: content === "" ? " " : content, - type: "unchanged", + left: content, + right: content, + type: "unchanged" }) i++ } else { @@ -110,7 +112,7 @@ export function ContentDiff(props: Props) { }) const mobileRows = createMemo(() => { - const mobileBlocks: { type: "removed" | "added" | "unchanged"; lines: string[] }[] = [] + const mobileBlocks: { type: 'removed' | 'added' | 'unchanged', lines: string[] }[] = [] const currentRows = rows() let i = 0 @@ -119,15 +121,15 @@ export function ContentDiff(props: Props) { const addedLines: string[] = [] // Collect consecutive modified/removed/added rows - while ( - i < currentRows.length && - (currentRows[i].type === "modified" || currentRows[i].type === "removed" || currentRows[i].type === "added") - ) { + while (i < currentRows.length && + (currentRows[i].type === 'modified' || + currentRows[i].type === 'removed' || + currentRows[i].type === 'added')) { const row = currentRows[i] - if (row.left && (row.type === "removed" || row.type === "modified")) { + if (row.left && (row.type === 'removed' || row.type === 'modified')) { removedLines.push(row.left) } - if (row.right && (row.type === "added" || row.type === "modified")) { + if (row.right && (row.type === 'added' || row.type === 'modified')) { addedLines.push(row.right) } i++ @@ -135,17 +137,17 @@ export function ContentDiff(props: Props) { // Add grouped blocks if (removedLines.length > 0) { - mobileBlocks.push({ type: "removed", lines: removedLines }) + mobileBlocks.push({ type: 'removed', lines: removedLines }) } if (addedLines.length > 0) { - mobileBlocks.push({ type: "added", lines: addedLines }) + mobileBlocks.push({ type: 'added', lines: addedLines }) } // Add unchanged rows as-is - if (i < currentRows.length && currentRows[i].type === "unchanged") { + if (i < currentRows.length && currentRows[i].type === 'unchanged') { mobileBlocks.push({ - type: "unchanged", - lines: [currentRows[i].left], + type: 'unchanged', + lines: [currentRows[i].left] }) i++ } @@ -155,27 +157,41 @@ export function ContentDiff(props: Props) { }) return ( -
-
+
+
{rows().map((r) => ( -
-
- +
+
+
-
- +
+
))}
-
+
{mobileRows().map((block) => ( -
+
{block.lines.map((line) => ( -
- -
+ ))}
))} @@ -184,6 +200,8 @@ export function ContentDiff(props: Props) { ) } +export default DiffView + // const testDiff = `--- combined_before.txt 2025-06-24 16:38:08 // +++ combined_after.txt 2025-06-24 16:38:12 // @@ -1,21 +1,25 @@ @@ -192,12 +210,12 @@ export function ContentDiff(props: Props) { // -old content // +added line // +new content -// +// // -removed empty line below // +added empty line above -// +// // - tab indented -// -trailing spaces +// -trailing spaces // -very long line that will definitely wrap in most editors and cause potential alignment issues when displayed in a two column diff view // -unicode content: 🚀 ✨ 中文 // -mixed content with tabs and spaces @@ -208,14 +226,14 @@ export function ContentDiff(props: Props) { // +different unicode: 🎉 💻 日本語 // +normalized content with consistent spacing // +newline to content -// +// // -content to remove -// -whitespace only: +// -whitespace only: // -multiple // -consecutive // -deletions // -single deletion -// + +// + // +single addition // +first addition // +second addition diff --git a/packages/web/src/components/Head.astro b/packages/web/src/components/Head.astro index f6166f58..3d9bc0f5 100644 --- a/packages/web/src/components/Head.astro +++ b/packages/web/src/components/Head.astro @@ -36,10 +36,6 @@ if (isDocs) { } --- -{ slug === "" && ( -{title} | AI coding agent built for the terminal -)} - { (isDocs || !slug.startsWith("s")) && ( diff --git a/packages/web/src/components/MarkdownView.tsx b/packages/web/src/components/MarkdownView.tsx new file mode 100644 index 00000000..5e21c0d7 --- /dev/null +++ b/packages/web/src/components/MarkdownView.tsx @@ -0,0 +1,21 @@ +import { type JSX, splitProps, createResource } from "solid-js" +import { marked } from "marked" +import styles from "./markdownview.module.css" + +interface MarkdownViewProps extends JSX.HTMLAttributes { + markdown: string +} + +function MarkdownView(props: MarkdownViewProps) { + const [local, rest] = splitProps(props, ["markdown"]) + const [html] = createResource(() => local.markdown, async (markdown) => { + return marked.parse(markdown) + }) + + return ( +
+ ) +} + +export default MarkdownView + diff --git a/packages/web/src/components/Share.tsx b/packages/web/src/components/Share.tsx index 7d9265bb..fd828629 100644 --- a/packages/web/src/components/Share.tsx +++ b/packages/web/src/components/Share.tsx @@ -1,18 +1,82 @@ -import { For, Show, onMount, Suspense, onCleanup, createMemo, createSignal, SuspenseList, createEffect } from "solid-js" +import { type JSX } from "solid-js" +import { + For, + Show, + Match, + Switch, + onMount, + onCleanup, + splitProps, + createMemo, + createEffect, + createSignal, +} from "solid-js" +import map from "lang-map" import { DateTime } from "luxon" -import { createStore, reconcile, unwrap } from "solid-js/store" -import { mapValues } from "remeda" -import { IconArrowDown } from "./icons" -import { IconOpencode } from "./icons/custom" +import { createStore, reconcile } from "solid-js/store" +import type { Diagnostic } from "vscode-languageserver-types" +import { + IconOpenAI, + IconGemini, + IconOpencode, + IconAnthropic, +} from "./icons/custom" +import { + IconFolder, + IconHashtag, + IconSparkles, + IconGlobeAlt, + IconDocument, + IconQueueList, + IconUserCircle, + IconCheckCircle, + IconChevronDown, + IconCommandLine, + IconChevronRight, + IconDocumentPlus, + IconPencilSquare, + IconRectangleStack, + IconMagnifyingGlass, + IconWrenchScrewdriver, + IconDocumentMagnifyingGlass, + IconArrowDown, +} from "./icons" +import DiffView from "./DiffView" +import CodeBlock from "./CodeBlock" +import MarkdownView from "./MarkdownView" import styles from "./share.module.css" -import type { MessageV2 } from "opencode/session/message-v2" import type { Message } from "opencode/session/message" import type { Session } from "opencode/session/index" -import { Part, ProviderIcon } from "./share/part" -type MessageWithParts = MessageV2.Info & { parts: MessageV2.Part[] } +const MIN_DURATION = 2 -type Status = "disconnected" | "connecting" | "connected" | "error" | "reconnecting" +type Status = + | "disconnected" + | "connecting" + | "connected" + | "error" + | "reconnecting" + +type TodoStatus = "pending" | "in_progress" | "completed" + +interface Todo { + id: string + content: string + status: TodoStatus + priority: "low" | "medium" | "high" +} + +function sortTodosByStatus(todos: Todo[]) { + const statusPriority: Record = { + in_progress: 0, + pending: 1, + completed: 2, + } + + return todos + .slice() + .sort((a, b) => statusPriority[a.status] - statusPriority[b.status]) +} function scrollToAnchor(id: string) { const el = document.getElementById(id) @@ -21,6 +85,146 @@ function scrollToAnchor(id: string) { el.scrollIntoView({ behavior: "smooth" }) } +function stripWorkingDirectory(filePath?: string, workingDir?: string) { + if (filePath === undefined || workingDir === undefined) return filePath + + const prefix = workingDir.endsWith("/") ? workingDir : workingDir + "/" + + if (filePath === workingDir) { + return "" + } + + if (filePath.startsWith(prefix)) { + return filePath.slice(prefix.length) + } + + return filePath +} + +function getShikiLang(filename: string) { + const ext = filename.split(".").pop()?.toLowerCase() ?? "" + + // map.languages(ext) returns an array of matching Linguist language names (e.g. ['TypeScript']) + const langs = map.languages(ext) + const type = langs?.[0]?.toLowerCase() + + // Overrride any specific language mappings + const overrides: Record = { + conf: "shellscript", + } + + return type ? (overrides[type] ?? type) : "plaintext" +} + +function formatDuration(ms: number): string { + const ONE_SECOND = 1000 + const ONE_MINUTE = 60 * ONE_SECOND + + if (ms >= ONE_MINUTE) { + const minutes = Math.floor(ms / ONE_MINUTE) + return minutes === 1 ? `1min` : `${minutes}mins` + } + + if (ms >= ONE_SECOND) { + const seconds = Math.floor(ms / ONE_SECOND) + return `${seconds}s` + } + + return `${ms}ms` +} + +// Converts nested objects/arrays into [path, value] pairs. +// E.g. {a:{b:{c:1}}, d:[{e:2}, 3]} => [["a.b.c",1], ["d[0].e",2], ["d[1]",3]] +function flattenToolArgs(obj: any, prefix: string = ""): Array<[string, any]> { + const entries: Array<[string, any]> = [] + + for (const [key, value] of Object.entries(obj)) { + const path = prefix ? `${prefix}.${key}` : key + + if (value !== null && typeof value === "object") { + if (Array.isArray(value)) { + value.forEach((item, index) => { + const arrayPath = `${path}[${index}]` + if (item !== null && typeof item === "object") { + entries.push(...flattenToolArgs(item, arrayPath)) + } else { + entries.push([arrayPath, item]) + } + }) + } else { + entries.push(...flattenToolArgs(value, path)) + } + } else { + entries.push([path, value]) + } + } + + return entries +} + +function formatErrorString(error: string): JSX.Element { + const errorMarker = "Error: " + const startsWithError = error.startsWith(errorMarker) + + return startsWithError ? ( +
+      
+        Error
+      
+      {error.slice(errorMarker.length)}
+    
+ ) : ( +
+      {error}
+    
+ ) +} + +function getDiagnostics( + diagnosticsByFile: Record, + currentFile: string, +): JSX.Element[] { + // Return a flat array of error diagnostics, in the format: + // "Error [65:20] Property 'x' does not exist on type 'Y'" + const result: JSX.Element[] = [] + + if ( + diagnosticsByFile === undefined || + diagnosticsByFile[currentFile] === undefined + ) + return result + + for (const diags of Object.values(diagnosticsByFile)) { + for (const d of diags) { + // Only keep diagnostics explicitly marked as Error (severity === 1) + if (d.severity !== 1) continue + + const line = d.range.start.line + 1 // 1-based + const column = d.range.start.character + 1 // 1-based + + result.push( +
+          
+            Error
+          
+          
+            [{line}:{column}]
+          
+          {d.message}
+        
, + ) + } + } + + return result +} + +function stripEnclosingTag(text: string): string { + const wrappedRe = /^\s*<([A-Za-z]\w*)>\s*([\s\S]*?)\s*<\/\1>\s*$/ + const match = text.match(wrappedRe) + return match ? match[2] : text +} + function getStatusText(status: [Status, string?]): string { switch (status[0]) { case "connected": @@ -38,17 +242,362 @@ function getStatusText(status: [Status, string?]): string { } } +function ProviderIcon(props: { provider: string; size?: number }) { + const size = props.size || 16 + return ( + }> + + + + + + + + + + + ) +} + +interface ResultsButtonProps extends JSX.HTMLAttributes { + showCopy?: string + hideCopy?: string + results: boolean +} +function ResultsButton(props: ResultsButtonProps) { + const [local, rest] = splitProps(props, ["results", "showCopy", "hideCopy"]) + return ( + + ) +} + +interface TextPartProps extends JSX.HTMLAttributes { + text: string + expand?: boolean + invert?: boolean + highlight?: boolean +} +function TextPart(props: TextPartProps) { + const [local, rest] = splitProps(props, [ + "text", + "expand", + "invert", + "highlight", + ]) + const [expanded, setExpanded] = createSignal(false) + const [overflowed, setOverflowed] = createSignal(false) + let preEl: HTMLPreElement | undefined + + function checkOverflow() { + if (preEl && !local.expand) { + setOverflowed(preEl.scrollHeight > preEl.clientHeight + 1) + } + } + + onMount(() => { + checkOverflow() + window.addEventListener("resize", checkOverflow) + }) + + createEffect(() => { + local.text + setTimeout(checkOverflow, 0) + }) + + onCleanup(() => { + window.removeEventListener("resize", checkOverflow) + }) + + return ( +
+
 (preEl = el)}>{local.text}
+ {((!local.expand && overflowed()) || expanded()) && ( + + )} +
+ ) +} + +interface ErrorPartProps extends JSX.HTMLAttributes { + expand?: boolean +} +function ErrorPart(props: ErrorPartProps) { + const [local, rest] = splitProps(props, ["expand", "children"]) + const [expanded, setExpanded] = createSignal(false) + const [overflowed, setOverflowed] = createSignal(false) + let preEl: HTMLElement | undefined + + function checkOverflow() { + if (preEl && !local.expand) { + setOverflowed(preEl.scrollHeight > preEl.clientHeight + 1) + } + } + + onMount(() => { + checkOverflow() + window.addEventListener("resize", checkOverflow) + }) + + createEffect(() => { + local.children + setTimeout(checkOverflow, 0) + }) + + onCleanup(() => { + window.removeEventListener("resize", checkOverflow) + }) + + return ( +
+
(preEl = el)}> + {local.children} +
+ {((!local.expand && overflowed()) || expanded()) && ( + + )} +
+ ) +} + +interface MarkdownPartProps extends JSX.HTMLAttributes { + text: string + expand?: boolean + highlight?: boolean +} +function MarkdownPart(props: MarkdownPartProps) { + const [local, rest] = splitProps(props, ["text", "expand", "highlight"]) + const [expanded, setExpanded] = createSignal(false) + const [overflowed, setOverflowed] = createSignal(false) + let divEl: HTMLDivElement | undefined + + function checkOverflow() { + if (divEl && !local.expand) { + setOverflowed(divEl.scrollHeight > divEl.clientHeight + 1) + } + } + + onMount(() => { + checkOverflow() + window.addEventListener("resize", checkOverflow) + }) + + createEffect(() => { + local.text + setTimeout(checkOverflow, 0) + }) + + onCleanup(() => { + window.removeEventListener("resize", checkOverflow) + }) + + return ( +
+ (divEl = el)} + /> + {((!local.expand && overflowed()) || expanded()) && ( + + )} +
+ ) +} + +interface TerminalPartProps extends JSX.HTMLAttributes { + command: string + error?: string + result?: string + desc?: string + expand?: boolean +} +function TerminalPart(props: TerminalPartProps) { + const [local, rest] = splitProps(props, [ + "command", + "error", + "result", + "desc", + "expand", + ]) + const [expanded, setExpanded] = createSignal(false) + const [overflowed, setOverflowed] = createSignal(false) + let preEl: HTMLElement | undefined + + function checkOverflow() { + if (!preEl) return + + const code = preEl.getElementsByTagName("code")[0] + + if (code && !local.expand) { + setOverflowed(preEl.clientHeight < code.offsetHeight) + } + } + + onMount(() => { + window.addEventListener("resize", checkOverflow) + }) + + onCleanup(() => { + window.removeEventListener("resize", checkOverflow) + }) + + return ( +
+
+
+ {local.desc} +
+
+ + + + (preEl = el)} + code={local.error || ""} + /> + + + (preEl = el)} + code={local.result || ""} + /> + + +
+
+ {((!local.expand && overflowed()) || expanded()) && ( + + )} +
+ ) +} + +function ToolFooter(props: { time: number }) { + return props.time > MIN_DURATION ? ( + + {formatDuration(props.time)} + + ) : ( +
+ ) +} + +interface AnchorProps extends JSX.HTMLAttributes { + id: string +} +function AnchorIcon(props: AnchorProps) { + const [local, rest] = splitProps(props, ["id", "children"]) + const [copied, setCopied] = createSignal(false) + + return ( + + ) +} + export default function Share(props: { id: string api: string info: Session.Info - messages: Record + messages: Record }) { let lastScrollY = 0 - let hasScrolledToAnchor = false + let hasScrolled = false let scrollTimeout: number | undefined - let scrollSentinel: HTMLElement | undefined - let scrollObserver: IntersectionObserver | undefined const id = props.id const params = new URLSearchParams(window.location.search) @@ -56,17 +605,23 @@ export default function Share(props: { const [showScrollButton, setShowScrollButton] = createSignal(false) const [isButtonHovered, setIsButtonHovered] = createSignal(false) - const [isNearBottom, setIsNearBottom] = createSignal(false) + + const anchorId = createMemo(() => { + const raw = window.location.hash.slice(1) + const [id] = raw.split("-") + return id + }) const [store, setStore] = createStore<{ info?: Session.Info - messages: Record - }>({ info: props.info, messages: mapValues(props.messages, (x: any) => "metadata" in x ? fromV1(x) : x) }) - const messages = createMemo(() => Object.values(store.messages).toSorted((a, b) => a.id?.localeCompare(b.id))) - const [connectionStatus, setConnectionStatus] = createSignal<[Status, string?]>(["disconnected", "Disconnected"]) - createEffect(() => { - console.log(unwrap(store)) - }) + messages: Record + }>({ info: props.info, messages: props.messages }) + const messages = createMemo(() => + Object.values(store.messages).toSorted((a, b) => a.id?.localeCompare(b.id)), + ) + const [connectionStatus, setConnectionStatus] = createSignal< + [Status, string?] + >(["disconnected", "Disconnected"]) onMount(() => { const apiUrl = props.api @@ -121,21 +676,12 @@ export default function Share(props: { } if (type === "message") { const [, messageID] = splits - if ("metadata" in d.content) { - d.content = fromV1(d.content) - } - d.content.parts = d.content.parts ?? store.messages[messageID]?.parts ?? [] setStore("messages", messageID, reconcile(d.content)) - } - if (type === "part") { - setStore("messages", d.content.messageID, "parts", arr => { - const index = arr.findIndex((x) => x.id === d.content.id) - if (index === -1) - arr.push(d.content) - if (index > -1) - arr[index] = d.content - return [...arr] - }) + + if (!hasScrolled && messageID === anchorId()) { + scrollToAnchor(window.location.hash.slice(1)) + hasScrolled = true + } } } catch (error) { console.error("Error parsing WebSocket message:", error) @@ -155,7 +701,10 @@ export default function Share(props: { // Try to reconnect after 2 seconds clearTimeout(reconnectTimer) - reconnectTimer = window.setTimeout(setupWebSocket, 2000) as unknown as number + reconnectTimer = window.setTimeout( + setupWebSocket, + 2000, + ) as unknown as number } } @@ -176,9 +725,10 @@ export default function Share(props: { const currentScrollY = window.scrollY const isScrollingDown = currentScrollY > lastScrollY const scrolled = currentScrollY > 200 // Show after scrolling 200px + const isNearBottom = window.innerHeight + currentScrollY >= document.body.scrollHeight - 100 // Only show when scrolling down, scrolled enough, and not near bottom - const shouldShow = isScrollingDown && scrolled && !isNearBottom() + const shouldShow = isScrollingDown && scrolled && !isNearBottom // Update last scroll position lastScrollY = currentScrollY @@ -194,7 +744,7 @@ export default function Share(props: { if (!isButtonHovered()) { setShowScrollButton(false) } - }, 1500) + }, 3000) } else if (!isButtonHovered()) { // Only hide if not hovered (to prevent disappearing while user is about to click) setShowScrollButton(false) @@ -206,26 +756,6 @@ export default function Share(props: { onMount(() => { lastScrollY = window.scrollY // Initialize scroll position - - // Create sentinel element - const sentinel = document.createElement("div") - sentinel.style.height = "1px" - sentinel.style.position = "absolute" - sentinel.style.bottom = "100px" - sentinel.style.width = "100%" - sentinel.style.pointerEvents = "none" - document.body.appendChild(sentinel) - - // Create intersection observer - const observer = new IntersectionObserver((entries) => { - setIsNearBottom(entries[0].isIntersecting) - }) - observer.observe(sentinel) - - // Store references for cleanup - scrollSentinel = sentinel - scrollObserver = observer - checkScrollNeed() window.addEventListener("scroll", checkScrollNeed) window.addEventListener("resize", checkScrollNeed) @@ -234,15 +764,6 @@ export default function Share(props: { onCleanup(() => { window.removeEventListener("scroll", checkScrollNeed) window.removeEventListener("resize", checkScrollNeed) - - // Clean up observer and sentinel - if (scrollObserver) { - scrollObserver.disconnect() - } - if (scrollSentinel) { - document.body.removeChild(scrollSentinel) - } - if (scrollTimeout) { clearTimeout(scrollTimeout) } @@ -253,7 +774,7 @@ export default function Share(props: { rootDir: undefined as string | undefined, created: undefined as number | undefined, completed: undefined as number | undefined, - messages: [] as MessageWithParts[], + messages: [] as Message.Info[], models: {} as Record, cost: 0, tokens: { @@ -265,26 +786,42 @@ export default function Share(props: { result.created = props.info.time.created - const msgs = messages() - for (let i = 0; i < msgs.length; i++) { - const msg = msgs[i] + for (let i = 0; i < messages().length; i++) { + const msg = messages()[i] + + // TODO: Cleanup + // const system = result.messages.length === 0 && msg.role === "system" + const assistant = msg.metadata?.assistant + + // if (system) { + // for (const part of msg.parts) { + // if (part.type === "text") { + // result.system.push(part.text) + // } + // } + // result.created = msg.metadata?.time.created + // continue + // } result.messages.push(msg) - if (msg.role === "assistant") { - result.cost += msg.cost - result.tokens.input += msg.tokens.input - result.tokens.output += msg.tokens.output - result.tokens.reasoning += msg.tokens.reasoning + if (assistant) { + result.cost += assistant.cost + result.tokens.input += assistant.tokens.input + result.tokens.output += assistant.tokens.output + result.tokens.reasoning += assistant.tokens.reasoning - result.models[`${msg.providerID} ${msg.modelID}`] = [msg.providerID, msg.modelID] + result.models[`${assistant.providerID} ${assistant.modelID}`] = [ + assistant.providerID, + assistant.modelID, + ] - if (msg.path.root) { - result.rootDir = msg.path.root + if (assistant.path?.root) { + result.rootDir = assistant.path.root } - if (msg.time.completed) { - result.completed = msg.time.completed + if (msg.metadata?.time.completed) { + result.completed = msg.metadata?.time.completed } } } @@ -292,13 +829,15 @@ export default function Share(props: { }) return ( -
-
-

{store.info?.title}

-
-
    -
  • -
    +
    +
    +
    +

    {store.info?.title}

    +
    +
    +
      +
    • +
      @@ -308,11 +847,11 @@ export default function Share(props: { {Object.values(data().models).length > 0 ? ( {([provider, model]) => ( -
    • -
      - +
    • +
      +
      - {model} + {model}
    • )} @@ -323,69 +862,1025 @@ export default function Share(props: { )}
    -
    - {DateTime.fromMillis(data().created || 0).toLocaleString(DateTime.DATETIME_MED)} +
    + {data().created ? ( + + {DateTime.fromMillis(data().created || 0).toLocaleString( + DateTime.DATETIME_MED, + )} + + ) : ( + + Started at — + + )}
    +
    - 0} fallback={

    Waiting for messages...

    }> + 0} + fallback={

    Waiting for messages...

    } + >
    - - - {(msg, msgIndex) => { - const filteredParts = createMemo(() => - msg.parts.filter((x, index) => { - if (x.type === "step-start" && index > 0) return false - if (x.type === "snapshot") return false - if (x.type === "step-finish") return false - if (x.type === "text" && x.synthetic === true) return false - if (x.type === "tool" && x.tool === "todoread") return false - if (x.type === "text" && !x.text) return false - if (x.type === "tool" && (x.state.status === "pending" || x.state.status === "running")) - return false - return true + + {(msg, msgIndex) => ( + + {(part, partIndex) => { + if ( + (part.type === "step-start" && + (partIndex() > 0 || !msg.metadata?.assistant)) || + (msg.role === "assistant" && + part.type === "tool-invocation" && + part.toolInvocation.toolName === "todoread") + ) + return null + + const anchor = createMemo(() => `${msg.id}-${partIndex()}`) + const [showResults, setShowResults] = createSignal(false) + const isLastPart = createMemo( + () => + data().messages.length === msgIndex() + 1 && + msg.parts.length === partIndex() + 1, + ) + const toolData = createMemo(() => { + if ( + msg.role !== "assistant" || + part.type !== "tool-invocation" + ) + return {} + + const metadata = + msg.metadata?.tool[part.toolInvocation.toolCallId] + const args = part.toolInvocation.args + const result = + part.toolInvocation.state === "result" && + part.toolInvocation.result + const duration = DateTime.fromMillis( + metadata?.time.end || 0, + ) + .diff(DateTime.fromMillis(metadata?.time.start || 0)) + .toMillis() + + return { metadata, args, result, duration } }) - ) + return ( + + {/* User text */} + + {(part) => ( +
    +
    + + + +
    +
    +
    + +
    +
    + )} +
    + {/* AI text */} + + {(part) => ( +
    +
    + + + +
    +
    +
    + + + + {DateTime.fromMillis( + data().completed || 0, + ).toLocaleString(DateTime.DATETIME_MED)} + + +
    +
    + )} +
    + {/* AI model */} + + {(assistant) => { + return ( +
    +
    + + + +
    +
    +
    +
    +
    + + {assistant().providerID} + +
    + + {assistant().modelID} + +
    +
    +
    + ) + }} +
    - return ( - - - {(part, partIndex) => { - const last = createMemo( - () => - data().messages.length === msgIndex() + 1 && filteredParts().length === partIndex() + 1, - ) - - onMount(() => { - const hash = window.location.hash.slice(1) - // Wait till all parts are loaded - if ( - hash !== "" && - !hasScrolledToAnchor && - filteredParts().length === partIndex() + 1 && - data().messages.length === msgIndex() + 1 - ) { - hasScrolledToAnchor = true - scrollToAnchor(hash) + {/* Grep tool */} + + {(_part) => { + const matches = () => toolData()?.metadata?.matches + const splitArgs = () => { + const { pattern, ...rest } = toolData()?.args + return { pattern, rest } } - }) - return - }} - - - ) - }} -
    -
    + return ( +
    +
    + + + +
    +
    +
    +
    +
    + Grep + “{splitArgs().pattern}” +
    + 0 + } + > +
    + + {([name, value]) => ( + <> +
    +
    {name}
    +
    {value}
    + + )} +
    +
    +
    + + 0}> +
    + + setShowResults((e) => !e) + } + /> + + + +
    +
    + +
    + +
    +
    +
    +
    + +
    +
    + ) + }} + + {/* Glob tool */} + + {(_part) => { + const count = () => toolData()?.metadata?.count + const pattern = () => toolData()?.args.pattern + + return ( +
    +
    + + + +
    +
    +
    +
    +
    + Glob + “{pattern()}” +
    + + 0}> +
    + + setShowResults((e) => !e) + } + /> + + + +
    +
    + +
    + +
    +
    +
    +
    + +
    +
    + ) + }} +
    + {/* LS tool */} + + {(_part) => { + const path = createMemo(() => + toolData()?.args?.path !== data().rootDir + ? stripWorkingDirectory( + toolData()?.args?.path, + data().rootDir, + ) + : toolData()?.args?.path, + ) + + return ( +
    +
    + + + +
    +
    +
    +
    +
    + LS + + {path()} + +
    + + +
    + + setShowResults((e) => !e) + } + /> + + + +
    +
    +
    +
    + +
    +
    + ) + }} +
    + {/* Read tool */} + + {(_part) => { + const filePath = createMemo(() => + stripWorkingDirectory( + toolData()?.args?.filePath, + data().rootDir, + ), + ) + const hasError = () => toolData()?.metadata?.error + const preview = () => toolData()?.metadata?.preview + + return ( +
    +
    + + + +
    +
    +
    +
    +
    + Read + + {filePath()} + +
    + + +
    + + {formatErrorString( + toolData()?.result, + )} + +
    +
    + {/* Always try to show CodeBlock if preview is available (even if empty string) */} + +
    + + setShowResults((e) => !e) + } + /> + +
    + +
    +
    +
    +
    + {/* Fallback to TextPart if preview is not a string (e.g. undefined) AND result exists */} + +
    + + setShowResults((e) => !e) + } + /> + + + +
    +
    +
    +
    + +
    +
    + ) + }} +
    + {/* Write tool */} + + {(_part) => { + const filePath = createMemo(() => + stripWorkingDirectory( + toolData()?.args?.filePath, + data().rootDir, + ), + ) + const hasError = () => toolData()?.metadata?.error + const content = () => toolData()?.args?.content + const diagnostics = createMemo(() => + getDiagnostics( + toolData()?.metadata?.diagnostics, + toolData()?.args.filePath, + ), + ) + + return ( +
    +
    + + + +
    +
    +
    +
    +
    + Write + + {filePath()} + +
    + 0}> + {diagnostics()} + + + +
    + + {formatErrorString( + toolData()?.result + )} + +
    +
    + +
    + + setShowResults((e) => !e) + } + /> + +
    + +
    +
    +
    +
    +
    +
    + +
    +
    + ) + }} +
    + {/* Edit tool */} + + {(_part) => { + const diff = () => toolData()?.metadata?.diff + const message = () => toolData()?.metadata?.message + const hasError = () => toolData()?.metadata?.error + const filePath = createMemo(() => + stripWorkingDirectory( + toolData()?.args.filePath, + data().rootDir, + ), + ) + const diagnostics = createMemo(() => + getDiagnostics( + toolData()?.metadata?.diagnostics, + toolData()?.args.filePath, + ), + ) + + return ( +
    +
    + + + +
    +
    +
    +
    +
    + Edit + + {filePath()} + +
    + + +
    + + {formatErrorString(message())} + +
    +
    + +
    + +
    +
    +
    + 0}> + {diagnostics()} + +
    + +
    +
    + ) + }} +
    + {/* Bash tool */} + + {(_part) => { + const command = () => toolData()?.metadata?.title + const desc = () => toolData()?.metadata?.description + const result = () => toolData()?.metadata?.stdout + const error = () => toolData()?.metadata?.stderr + + return ( +
    +
    + + + +
    +
    +
    + {command() && ( +
    + +
    + )} + +
    +
    + ) + }} +
    + {/* Todo write */} + + {(_part) => { + const todos = createMemo(() => + sortTodosByStatus(toolData()?.args?.todos ?? []), + ) + const starting = () => + todos().every((t) => t.status === "pending") + const finished = () => + todos().every((t) => t.status === "completed") + + return ( +
    +
    + + + +
    +
    +
    +
    +
    + + + + Creating plan + + + Completing plan + + + +
    + 0}> +
      + + {(todo) => ( +
    • + + {todo.content} +
    • + )} +
      +
    +
    +
    + +
    +
    + ) + }} +
    + {/* Fetch tool */} + + {(_part) => { + const url = () => toolData()?.args.url + const format = () => toolData()?.args.format + const hasError = () => toolData()?.metadata?.error + + return ( +
    +
    + + + +
    +
    +
    +
    +
    + Fetch + {url()} +
    + + +
    + + {formatErrorString( + toolData()?.result, + )} + +
    +
    + +
    + + setShowResults((e) => !e) + } + /> + +
    + +
    +
    +
    +
    +
    +
    + +
    +
    + ) + }} +
    + {/* Tool call */} + + {(part) => { + return ( +
    +
    + + + +
    +
    +
    +
    +
    + {part().toolInvocation.toolName} +
    +
    + + {(arg) => ( + <> +
    +
    {arg[0]}
    +
    {arg[1]}
    + + )} +
    +
    + + +
    + + setShowResults((e) => !e) + } + /> + + + +
    +
    + + + +
    +
    + +
    +
    + ) + }} +
    + {/* Fallback */} + +
    +
    + + + } + > + + + + + + + + + +
    +
    +
    +
    +
    + {part.type} +
    + +
    +
    +
    +
    + + ) + }} + + )} +
    +

    {getStatusText(connectionStatus())}

    @@ -400,11 +1895,19 @@ export default function Share(props: {
  • Input Tokens - {data().tokens.input ? {data().tokens.input} : } + {data().tokens.input ? ( + {data().tokens.input} + ) : ( + + )}
  • Output Tokens - {data().tokens.output ? {data().tokens.output} : } + {data().tokens.output ? ( + {data().tokens.output} + ) : ( + + )}
  • Reasoning Tokens @@ -430,7 +1933,10 @@ export default function Share(props: { "overflow-y": "auto", }} > - 0} fallback={

    Waiting for messages...

    }> + 0} + fallback={

    Waiting for messages...

    } + >
      {(msg) => ( @@ -458,7 +1964,9 @@ export default function Share(props: {
) } - -export function fromV1(v1: Message.Info): MessageWithParts { - if (v1.role === "assistant") { - return { - id: v1.id, - sessionID: v1.metadata.sessionID, - role: "assistant", - time: { - created: v1.metadata.time.created, - completed: v1.metadata.time.completed, - }, - cost: v1.metadata.assistant!.cost, - path: v1.metadata.assistant!.path, - summary: v1.metadata.assistant!.summary, - tokens: v1.metadata.assistant!.tokens ?? { - input: 0, - output: 0, - cache: { - read: 0, - write: 0, - }, - reasoning: 0, - }, - modelID: v1.metadata.assistant!.modelID, - providerID: v1.metadata.assistant!.providerID, - system: v1.metadata.assistant!.system, - error: v1.metadata.error, - parts: v1.parts.flatMap((part, index): MessageV2.Part[] => { - const base = { - id: index.toString(), - messageID: v1.id, - sessionID: v1.metadata.sessionID, - } - if (part.type === "text") { - return [ - { - ...base, - type: "text", - text: part.text, - }, - ] - } - if (part.type === "step-start") { - return [ - { - ...base, - type: "step-start", - }, - ] - } - if (part.type === "tool-invocation") { - return [ - { - ...base, - type: "tool", - callID: part.toolInvocation.toolCallId, - tool: part.toolInvocation.toolName, - state: (() => { - if (part.toolInvocation.state === "partial-call") { - return { - status: "pending", - } - } - - const { title, time, ...metadata } = v1.metadata.tool[part.toolInvocation.toolCallId] - if (part.toolInvocation.state === "call") { - return { - status: "running", - input: part.toolInvocation.args, - time: { - start: time.start, - }, - } - } - - if (part.toolInvocation.state === "result") { - return { - status: "completed", - input: part.toolInvocation.args, - output: part.toolInvocation.result, - title, - time, - metadata, - } - } - throw new Error("unknown tool invocation state") - })(), - }, - ] - } - return [] - }), - } - } - - if (v1.role === "user") { - return { - id: v1.id, - sessionID: v1.metadata.sessionID, - role: "user", - time: { - created: v1.metadata.time.created, - }, - parts: v1.parts.flatMap((part, index): MessageV2.Part[] => { - const base = { - id: index.toString(), - messageID: v1.id, - sessionID: v1.metadata.sessionID, - } - if (part.type === "text") { - return [ - { - ...base, - type: "text", - text: part.text, - }, - ] - } - if (part.type === "file") { - return [ - { - ...base, - type: "file", - mime: part.mediaType, - filename: part.filename, - url: part.url, - }, - ] - } - return [] - }), - } - } - - throw new Error("unknown message type") -} diff --git a/packages/web/src/components/codeblock.module.css b/packages/web/src/components/codeblock.module.css new file mode 100644 index 00000000..ddd88ef1 --- /dev/null +++ b/packages/web/src/components/codeblock.module.css @@ -0,0 +1,11 @@ +.codeblock { + pre { + --shiki-dark-bg: var(--sl-color-bg-surface) !important; + background-color: var(--sl-color-bg-surface) !important; + + span { + white-space: break-spaces; + } + } +} + diff --git a/packages/web/src/components/diffview.module.css b/packages/web/src/components/diffview.module.css new file mode 100644 index 00000000..a748c5d0 --- /dev/null +++ b/packages/web/src/components/diffview.module.css @@ -0,0 +1,121 @@ +.diff { + display: flex; + flex-direction: column; + border: 1px solid var(--sl-color-divider); + background-color: var(--sl-color-bg-surface); + border-radius: 0.25rem; +} + +.desktopView { + display: block; +} + +.mobileView { + display: none; +} + +.mobileBlock { + display: flex; + flex-direction: column; +} + +.row { + display: grid; + grid-template-columns: 1fr 1fr; + align-items: stretch; +} + +.beforeColumn, +.afterColumn { + display: flex; + flex-direction: column; + overflow-x: visible; + min-width: 0; + align-items: stretch; +} + +.beforeColumn { + border-right: 1px solid var(--sl-color-divider); +} + +.diff > .row:first-child [data-section="cell"]:first-child { + padding-top: 0.5rem; +} + +.diff > .row:last-child [data-section="cell"]:last-child { + padding-bottom: 0.5rem; +} + +[data-section="cell"] { + position: relative; + flex: 1; + display: flex; + flex-direction: column; + + width: 100%; + padding: 0.1875rem 0.5rem 0.1875rem 2.2ch; + margin: 0; + + &[data-display-mobile="true"] { + display: none; + } + + pre { + --shiki-dark-bg: var(--sl-color-bg-surface) !important; + background-color: var(--sl-color-bg-surface) !important; + + white-space: pre-wrap; + word-break: break-word; + + code > span:empty::before { + content: "\00a0"; + white-space: pre; + display: inline-block; + width: 0; + } + } +} + +[data-diff-type="removed"] { + background-color: var(--sl-color-red-low); + + pre { + --shiki-dark-bg: var(--sl-color-red-low) !important; + background-color: var(--sl-color-red-low) !important; + } + + &::before { + content: "-"; + position: absolute; + left: 0.5ch; + user-select: none; + color: var(--sl-color-red-high); + } +} + +[data-diff-type="added"] { + background-color: var(--sl-color-green-low); + + pre { + --shiki-dark-bg: var(--sl-color-green-low) !important; + background-color: var(--sl-color-green-low) !important; + } + + &::before { + content: "+"; + position: absolute; + left: 0.6ch; + user-select: none; + color: var(--sl-color-green-high); + } +} + +@media (max-width: 40rem) { + .desktopView { + display: none; + } + + .mobileView { + display: block; + } +} diff --git a/packages/web/src/components/icons/custom.tsx b/packages/web/src/components/icons/custom.tsx index ba06ddfb..b4e32d0c 100644 --- a/packages/web/src/components/icons/custom.tsx +++ b/packages/web/src/components/icons/custom.tsx @@ -39,30 +39,8 @@ export function IconGemini(props: JSX.SvgSVGAttributes) { export function IconOpencode(props: JSX.SvgSVGAttributes) { return ( - + ) } - -// https://icones.js.org/collection/ri?s=meta&icon=ri:meta-fill -export function IconMeta(props: JSX.SvgSVGAttributes) { - return ( - - - - ) -} - -// https://icones.js.org/collection/ri?s=robot&icon=ri:robot-2-line -export function IconRobot(props: JSX.SvgSVGAttributes) { - return ( - - - ) -} diff --git a/packages/web/src/components/icons/index.tsx b/packages/web/src/components/icons/index.tsx index 62445611..a788d8f4 100644 --- a/packages/web/src/components/icons/index.tsx +++ b/packages/web/src/components/icons/index.tsx @@ -3,7 +3,12 @@ import { type JSX } from "solid-js" export function IconAcademicCap(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconAdjustmentsHorizontal(props: JSX.SvgSVGAttributes) { +export function IconAdjustmentsHorizontal( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconAdjustmentsVertical(props: JSX.SvgSVGAttributes) { +export function IconAdjustmentsVertical( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconArchiveBoxArrowDown(props: JSX.SvgSVGAttributes) { +export function IconArchiveBoxArrowDown( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconArchiveBoxXMark(props: JSX.SvgSVGAttributes) { +export function IconArchiveBoxXMark( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconArchiveBox(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconArrowDownCircle(props: JSX.SvgSVGAttributes) { +export function IconArrowDownCircle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconArrowDownLeft(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconArrowDownOnSquareStack(props: JSX.SvgSVGAttributes) { +export function IconArrowDownOnSquareStack( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconArrowDownOnSquare(props: JSX.SvgSVGAttributes) { +export function IconArrowDownOnSquare( + props: JSX.SvgSVGAttributes, +) { return ( - + } export function IconArrowDownRight(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowDownTray(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowDown(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconArrowLeftCircle(props: JSX.SvgSVGAttributes) { +export function IconArrowLeftCircle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) ) } -export function IconArrowLeftOnRectangle(props: JSX.SvgSVGAttributes) { +export function IconArrowLeftOnRectangle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { } export function IconArrowLongDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowLongLeft(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowLongRight(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowLongUp(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconArrowPathRoundedSquare(props: JSX.SvgSVGAttributes) { +export function IconArrowPathRoundedSquare( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { ) } -export function IconArrowRightCircle(props: JSX.SvgSVGAttributes) { +export function IconArrowRightCircle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) ) } -export function IconArrowRightOnRectangle(props: JSX.SvgSVGAttributes) { +export function IconArrowRightOnRectangle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { } export function IconArrowSmallDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowSmallLeft(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconArrowSmallRight(props: JSX.SvgSVGAttributes) { +export function IconArrowSmallRight( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconArrowSmallUp(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconArrowTopRightOnSquare(props: JSX.SvgSVGAttributes) { +export function IconArrowTopRightOnSquare( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconArrowTrendingDown(props: JSX.SvgSVGAttributes) { +export function IconArrowTrendingDown( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconArrowTrendingUp(props: JSX.SvgSVGAttributes) { +export function IconArrowTrendingUp( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconArrowUpCircle(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowUpLeft(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconArrowUpOnSquareStack(props: JSX.SvgSVGAttributes) { +export function IconArrowUpOnSquareStack( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconArrowUpOnSquare(props: JSX.SvgSVGAttributes) { +export function IconArrowUpOnSquare( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconArrowUpRight(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowUpTray(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowUp(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowUturnDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconArrowUturnLeft(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconArrowUturnRight(props: JSX.SvgSVGAttributes) { +export function IconArrowUturnRight( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconArrowUturnUp(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconArrowsPointingIn(props: JSX.SvgSVGAttributes) { +export function IconArrowsPointingIn( + props: JSX.SvgSVGAttributes, +) { return ( - + ) ) } -export function IconArrowsPointingOut(props: JSX.SvgSVGAttributes) { +export function IconArrowsPointingOut( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconArrowsRightLeft(props: JSX.SvgSVGAttributes) { +export function IconArrowsRightLeft( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconArrowsUpDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconAtSymbol(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBackspace(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBackward(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBanknotes(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBars2(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconBars3BottomLeft(props: JSX.SvgSVGAttributes) { +export function IconBars3BottomLeft( + props: JSX.SvgSVGAttributes, +) { return ( - + ) ) } -export function IconBars3BottomRight(props: JSX.SvgSVGAttributes) { +export function IconBars3BottomRight( + props: JSX.SvgSVGAttributes, +) { return ( - + ) ) } -export function IconBars3CenterLeft(props: JSX.SvgSVGAttributes) { +export function IconBars3CenterLeft( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconBars3(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBars4(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBarsArrowDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBarsArrowUp(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBattery0(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBattery100(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBattery50(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBeaker(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBellAlert(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBellSlash(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBellSnooze(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBell(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBoltSlash(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBolt(props: JSX.SvgSVGAttributes) { return ( - + ) { export function IconBoltSolid(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBookOpen(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBookmarkSlash(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBookmarkSquare(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBookmark(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBriefcase(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconBugAnt(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconBuildingLibrary(props: JSX.SvgSVGAttributes) { +export function IconBuildingLibrary( + props: JSX.SvgSVGAttributes, +) { return ( - + ) ) } -export function IconBuildingOffice2(props: JSX.SvgSVGAttributes) { +export function IconBuildingOffice2( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconBuildingOffice(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconBuildingStorefront(props: JSX.SvgSVGAttributes) { +export function IconBuildingStorefront( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { } export function IconCalculator(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCalendarDays(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCalendar(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCamera(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconChartBarSquare(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconChartBar(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconChartPie(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconChatBubbleBottomCenterText(props: JSX.SvgSVGAttributes) { +export function IconChatBubbleBottomCenterText( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconChatBubbleBottomCenter(props: JSX.SvgSVGAttributes) { +export function IconChatBubbleBottomCenter( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconChatBubbleLeftEllipsis(props: JSX.SvgSVGAttributes) { +export function IconChatBubbleLeftEllipsis( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconChatBubbleLeftRight(props: JSX.SvgSVGAttributes) { +export function IconChatBubbleLeftRight( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { ) } -export function IconChatBubbleOvalLeftEllipsis(props: JSX.SvgSVGAttributes) { +export function IconChatBubbleOvalLeftEllipsis( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconChatBubbleOvalLeft(props: JSX.SvgSVGAttributes) { +export function IconChatBubbleOvalLeft( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { } export function IconCheckCircle(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCheck(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconChevronDoubleDown(props: JSX.SvgSVGAttributes) { +export function IconChevronDoubleDown( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconChevronDoubleLeft(props: JSX.SvgSVGAttributes) { +export function IconChevronDoubleLeft( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconChevronDoubleRight(props: JSX.SvgSVGAttributes) { +export function IconChevronDoubleRight( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconChevronDoubleUp(props: JSX.SvgSVGAttributes) { +export function IconChevronDoubleUp( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconChevronDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconChevronLeft(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconChevronRight(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconChevronUpDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconChevronUp(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCircleStack(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconClipboardDocumentCheck(props: JSX.SvgSVGAttributes) { +export function IconClipboardDocumentCheck( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconClipboardDocumentList(props: JSX.SvgSVGAttributes) { +export function IconClipboardDocumentList( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconClipboardDocument(props: JSX.SvgSVGAttributes) { +export function IconClipboardDocument( + props: JSX.SvgSVGAttributes, +) { return ( - + } export function IconClipboard(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconClock(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCloudArrowDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCloudArrowUp(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCloud(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconCodeBracketSquare(props: JSX.SvgSVGAttributes) { +export function IconCodeBracketSquare( + props: JSX.SvgSVGAttributes, +) { return ( - + } export function IconCodeBracket(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCog6Tooth(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCog8Tooth(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCog(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCommandLine(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconComputerDesktop(props: JSX.SvgSVGAttributes) { +export function IconComputerDesktop( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconCpuChip(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCreditCard(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconCubeTransparent(props: JSX.SvgSVGAttributes) { +export function IconCubeTransparent( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconCube(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconCurrencyBangladeshi(props: JSX.SvgSVGAttributes) { +export function IconCurrencyBangladeshi( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { } export function IconCurrencyEuro(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCurrencyPound(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCurrencyRupee(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconCurrencyYen(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconCursorArrowRays(props: JSX.SvgSVGAttributes) { +export function IconCursorArrowRays( + props: JSX.SvgSVGAttributes, +) { return ( - + ) ) } -export function IconCursorArrowRipple(props: JSX.SvgSVGAttributes) { +export function IconCursorArrowRipple( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconDevicePhoneMobile(props: JSX.SvgSVGAttributes) { +export function IconDevicePhoneMobile( + props: JSX.SvgSVGAttributes, +) { return ( - + } export function IconDeviceTablet(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconDocumentArrowDown(props: JSX.SvgSVGAttributes) { +export function IconDocumentArrowDown( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconDocumentArrowUp(props: JSX.SvgSVGAttributes) { +export function IconDocumentArrowUp( + props: JSX.SvgSVGAttributes, +) { return ( - + ) ) } -export function IconDocumentChartBar(props: JSX.SvgSVGAttributes) { +export function IconDocumentChartBar( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconDocumentCheck(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconDocumentDuplicate(props: JSX.SvgSVGAttributes) { +export function IconDocumentDuplicate( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconDocumentMagnifyingGlass(props: JSX.SvgSVGAttributes) { +export function IconDocumentMagnifyingGlass( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { } export function IconDocumentPlus(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconDocumentText(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconDocument(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconEllipsisHorizontalCircle(props: JSX.SvgSVGAttributes) { +export function IconEllipsisHorizontalCircle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconEllipsisHorizontal(props: JSX.SvgSVGAttributes) { +export function IconEllipsisHorizontal( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconEllipsisVertical(props: JSX.SvgSVGAttributes) { +export function IconEllipsisVertical( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconEnvelopeOpen(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconEnvelope(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconEnvelopeSolid(props: JSX.SvgSVGAttributes) { return ( - + ) } -export function IconExclamationCircle(props: JSX.SvgSVGAttributes) { +export function IconExclamationCircle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconExclamationTriangle(props: JSX.SvgSVGAttributes) { +export function IconExclamationTriangle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { } export function IconEyeSlash(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconEye(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFaceFrown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFaceSmile(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFilm(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFingerPrint(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFire(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFlag(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconFolderArrowDown(props: JSX.SvgSVGAttributes) { +export function IconFolderArrowDown( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconFolderMinus(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFolderOpen(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFolderPlus(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFolder(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconForward(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconFunnel(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconGif(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconGiftTop(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconGift(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconGlobeAlt(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconGlobeAmericas(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconGlobeAsiaAustralia(props: JSX.SvgSVGAttributes) { +export function IconGlobeAsiaAustralia( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconGlobeEuropeAfrica(props: JSX.SvgSVGAttributes) { +export function IconGlobeEuropeAfrica( + props: JSX.SvgSVGAttributes, +) { return ( - + } export function IconHandRaised(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconHandThumbDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconHandThumbUp(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconHashtag(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconHeart(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconHomeModern(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconHome(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconIdentification(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconInboxArrowDown(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconInboxStack(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconInbox(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconInformationCircle(props: JSX.SvgSVGAttributes) { +export function IconInformationCircle( + props: JSX.SvgSVGAttributes, +) { return ( - + } export function IconKey(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconLanguage(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconLifebuoy(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconLightBulb(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconLink(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconListBullet(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconLockClosed(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconLockOpen(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconMagnifyingGlassCircle(props: JSX.SvgSVGAttributes) { +export function IconMagnifyingGlassCircle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconMagnifyingGlassMinus(props: JSX.SvgSVGAttributes) { +export function IconMagnifyingGlassMinus( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconMagnifyingGlassPlus(props: JSX.SvgSVGAttributes) { +export function IconMagnifyingGlassPlus( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconMagnifyingGlass(props: JSX.SvgSVGAttributes) { +export function IconMagnifyingGlass( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconMapPin(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconMap(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconMegaphone(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconMicrophone(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconMinusCircle(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconMinusSmall(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconMinus(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconMoon(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconMusicalNote(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconNewspaper(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconNoSymbol(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPaintBrush(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPaperAirplane(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPaperClip(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPauseCircle(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPause(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPencilSquare(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPencil(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconPhoneArrowDownLeft(props: JSX.SvgSVGAttributes) { +export function IconPhoneArrowDownLeft( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconPhoneArrowUpRight(props: JSX.SvgSVGAttributes) { +export function IconPhoneArrowUpRight( + props: JSX.SvgSVGAttributes, +) { return ( - + } export function IconPhoneXMark(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPhone(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPhoto(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPlayCircle(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPlayPause(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPlay(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPlusCircle(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPlusSmall(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPlus(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconPower(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconPresentationChartBar(props: JSX.SvgSVGAttributes) { +export function IconPresentationChartBar( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } -export function IconPresentationChartLine(props: JSX.SvgSVGAttributes) { +export function IconPresentationChartLine( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { } export function IconPuzzlePiece(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconQrCode(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconQuestionMarkCircle(props: JSX.SvgSVGAttributes) { +export function IconQuestionMarkCircle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) { return ( - + ) { } export function IconRadio(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconReceiptPercent(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconReceiptRefund(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconRectangleGroup(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconRectangleStack(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconRocketLaunch(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconRss(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconScale(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconScissors(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconServerStack(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconServer(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconShare(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconShieldCheck(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconShieldExclamation(props: JSX.SvgSVGAttributes) { +export function IconShieldExclamation( + props: JSX.SvgSVGAttributes, +) { return ( - + } export function IconShoppingBag(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconShoppingCart(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSignalSlash(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSignal(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSparkles(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSpeakerWave(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSpeakerXMark(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSquare2Stack(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSquare3Stack3d(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSquares2x2(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSquaresPlus(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconStar(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconStopCircle(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconStop(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSun(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSwatch(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconTableCells(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconTag(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconTicket(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconTrash(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconTrophy(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconTruck(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconTv(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconUserCircle(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconUserGroup(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconUserMinus(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconUserPlus(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconUser(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconUsers(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconVariable(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconVideoCameraSlash(props: JSX.SvgSVGAttributes) { +export function IconVideoCameraSlash( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconVideoCamera(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconViewColumns(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconViewfinderCircle(props: JSX.SvgSVGAttributes) { +export function IconViewfinderCircle( + props: JSX.SvgSVGAttributes, +) { return ( - + ) } export function IconWallet(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconWifi(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconWindow(props: JSX.SvgSVGAttributes) { return ( - + ) { ) } -export function IconWrenchScrewdriver(props: JSX.SvgSVGAttributes) { +export function IconWrenchScrewdriver( + props: JSX.SvgSVGAttributes, +) { return ( - + } export function IconWrench(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconXCircle(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconXMark(props: JSX.SvgSVGAttributes) { return ( - + ) { // index export function IconCommand(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconLetter(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconMultiSelect(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSettings(props: JSX.SvgSVGAttributes) { return ( - + ) { } export function IconSingleSelect(props: JSX.SvgSVGAttributes) { return ( - + *:last-child { + margin-bottom: 0; + } + + pre { + white-space: pre-wrap; + border-radius: 0.25rem; + border: 1px solid rgba(0, 0, 0, 0.2); + padding: 0.5rem 0.75rem; + font-size: 0.75rem; + } + + code { + font-weight: 500; + + &:not(pre code) { + &::before { + content: "`"; + font-weight: 700; + } + &::after { + content: "`"; + font-weight: 700; + } + } + } +} diff --git a/packages/web/src/components/share.module.css b/packages/web/src/components/share.module.css index 9930e6b5..dafbdd8a 100644 --- a/packages/web/src/components/share.module.css +++ b/packages/web/src/components/share.module.css @@ -15,106 +15,118 @@ --lg-tool-width: 56rem; --term-icon: url("data:image/svg+xml,%3Csvg%20xmlns%3D'http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg'%20viewBox%3D'0%200%2060%2016'%20preserveAspectRatio%3D'xMidYMid%20meet'%3E%3Ccircle%20cx%3D'8'%20cy%3D'8'%20r%3D'8'%2F%3E%3Ccircle%20cx%3D'30'%20cy%3D'8'%20r%3D'8'%2F%3E%3Ccircle%20cx%3D'52'%20cy%3D'8'%20r%3D'8'%2F%3E%3C%2Fsvg%3E"); +} - [data-component="header"] { +[data-element-button-text] { + cursor: pointer; + appearance: none; + background-color: transparent; + border: none; + padding: 0; + color: var(--sl-color-text-secondary); + + &:hover { + color: var(--sl-color-text); + } + + &[data-element-button-more] { display: flex; - flex-direction: column; + align-items: center; + gap: 0.125rem; + + span[data-button-icon] { + line-height: 1; + opacity: 0.85; + svg { + display: block; + } + } + } +} + +[data-element-label] { + text-transform: uppercase; + letter-spacing: -0.5px; + color: var(--sl-color-text-dimmed); +} + +.header { + display: flex; + flex-direction: column; + gap: 1rem; + + @media (max-width: 30rem) { gap: 1rem; - - @media (max-width: 30rem) { - gap: 1rem; - } } - [data-component="header-title"] { - font-size: 2.75rem; - font-weight: 500; - line-height: 1.2; - letter-spacing: -0.05em; - display: -webkit-box; - -webkit-box-orient: vertical; - -webkit-line-clamp: 3; - line-clamp: 3; - overflow: hidden; - - @media (max-width: 30rem) { - font-size: 1.75rem; - line-height: 1.25; + [data-section="title"] { + h1 { + font-size: 2.75rem; + font-weight: 500; + line-height: 1.2; + letter-spacing: -0.05em; + display: -webkit-box; + -webkit-box-orient: vertical; -webkit-line-clamp: 3; + overflow: hidden; + + @media (max-width: 30rem) { + font-size: 1.75rem; + line-height: 1.25; + -webkit-line-clamp: 3; + } } } - [data-component="header-details"] { + [data-section="row"] { display: flex; flex-direction: column; gap: 0.5rem; } - [data-component="header-stats"] { + [data-section="stats"] { list-style-type: none; padding: 0; margin: 0; display: flex; gap: 0.5rem 0.875rem; flex-wrap: wrap; - max-width: var(--lg-tool-width); - [data-slot="item"] { + li { display: flex; align-items: center; - gap: 0.3125rem; + gap: 0.5rem; font-size: 0.875rem; span[data-placeholder] { color: var(--sl-color-text-dimmed); } } - - [data-slot="icon"] { - flex: 0 0 auto; - color: var(--sl-color-text-dimmed); - opacity: 0.85; - - svg { - display: block; - } - } - - [data-slot="model"] { - color: var(--sl-color-text); - } } - [data-component="header-time"] { - color: var(--sl-color-text-dimmed); - font-size: 0.875rem; - } + [data-section="stats"] { + li { + gap: 0.3125rem; - [data-component="text-button"] { - cursor: pointer; - appearance: none; - background-color: transparent; - border: none; - padding: 0; - color: var(--sl-color-text-secondary); - - &:hover { - color: var(--sl-color-text); - } - - &[data-element-button-more] { - display: flex; - align-items: center; - gap: 0.125rem; - - span[data-button-icon] { - line-height: 1; + [data-stat-icon] { + flex: 0 0 auto; + color: var(--sl-color-text-dimmed); opacity: 0.85; - svg { display: block; } } + + span[data-stat-model] { + color: var(--sl-color-text); + } + } + } + + [data-section="time"] { + span { + color: var(--sl-color-text-dimmed); + font-size: 0.875rem; } } } @@ -158,12 +170,10 @@ svg:nth-child(3) { display: none; } - &:hover { svg:nth-child(1) { display: none; } - svg:nth-child(2) { display: block; } @@ -203,14 +213,12 @@ opacity: 1; visibility: visible; } - a, a:hover { svg:nth-child(1), svg:nth-child(2) { display: none; } - svg:nth-child(3) { display: block; } @@ -245,7 +253,7 @@ line-height: 18px; font-size: 0.875rem; color: var(--sl-color-text-secondary); - max-width: var(--md-tool-width); + max-width: var(--sm-tool-width); display: flex; align-items: flex-start; @@ -256,7 +264,7 @@ } b { - color: var(--sl-color-text); + color: var(--sl-color-text); word-break: break-all; font-weight: 500; } @@ -340,7 +348,8 @@ } [data-part-type="tool-grep"] { - &:not(:has([data-part-tool-args])) > [data-section="content"] > [data-part-tool-body] { + &:not(:has([data-part-tool-args])) + > [data-section="content"] > [data-part-tool-body] { gap: 0.5rem; } } @@ -365,7 +374,6 @@ } } } - [data-part-type="summary"] { & > [data-section="decoration"] { span:first-child { @@ -380,19 +388,15 @@ &[data-status="connected"] { background-color: var(--sl-color-green); } - &[data-status="connecting"] { background-color: var(--sl-color-orange); } - &[data-status="disconnected"] { background-color: var(--sl-color-divider); } - &[data-status="reconnecting"] { background-color: var(--sl-color-orange); } - &[data-status="error"] { background-color: var(--sl-color-red); } @@ -489,11 +493,7 @@ } } - &[data-background="none"] { - background-color: transparent; - } - - &[data-background="blue"] { + &[data-highlight="true"] { background-color: var(--sl-color-blue-low); } @@ -502,7 +502,6 @@ display: block; } } - &[data-expanded="false"] { pre { display: -webkit-box; @@ -538,25 +537,20 @@ span { margin-right: 0.25rem; - &:last-child { margin-right: 0; } } - span[data-color="red"] { color: var(--sl-color-red); } - span[data-color="dimmed"] { color: var(--sl-color-text-dimmed); } - span[data-marker="label"] { text-transform: uppercase; letter-spacing: -0.5px; } - span[data-separator] { margin-right: 0.375rem; } @@ -568,7 +562,6 @@ display: block; } } - &[data-expanded="false"] { [data-section="content"] { display: -webkit-box; @@ -583,6 +576,7 @@ padding: 2px 0; font-size: 0.75rem; } + } .message-terminal { @@ -618,7 +612,7 @@ } &::before { - content: ""; + content: ''; position: absolute; pointer-events: none; top: 8px; @@ -658,7 +652,6 @@ display: block; } } - &[data-expanded="false"] { pre { display: -webkit-box; @@ -676,7 +669,7 @@ } .message-markdown { - border: 1px solid var(--sl-color-blue-high); + background-color: var(--sl-color-bg-surface); padding: 0.5rem calc(0.5rem + 3px); border-radius: 0.25rem; display: flex; @@ -701,7 +694,6 @@ display: block; } } - &[data-expanded="false"] { [data-element-markdown] { display: -webkit-box; @@ -759,14 +751,10 @@ &[data-status="pending"] { color: var(--sl-color-text); } - &[data-status="in_progress"] { color: var(--sl-color-text); - & > span { - border-color: var(--sl-color-orange); - } - + & > span { border-color: var(--sl-color-orange); } & > span::before { content: ""; position: absolute; @@ -777,14 +765,10 @@ box-shadow: inset 1rem 1rem var(--sl-color-orange-low); } } - &[data-status="completed"] { color: var(--sl-color-text-secondary); - & > span { - border-color: var(--sl-color-green-low); - } - + & > span { border-color: var(--sl-color-green-low); } & > span::before { content: ""; position: absolute; @@ -815,9 +799,7 @@ display: flex; align-items: center; justify-content: center; - transition: - all 0.15s ease, - opacity 0.5s ease; + transition: all 0.15s ease, opacity 0.5s ease; z-index: 100; appearance: none; opacity: 1; diff --git a/packages/web/src/components/share/common.tsx b/packages/web/src/components/share/common.tsx deleted file mode 100644 index cab2dbdb..00000000 --- a/packages/web/src/components/share/common.tsx +++ /dev/null @@ -1,77 +0,0 @@ -import { createSignal, onCleanup, splitProps } from "solid-js" -import type { JSX } from "solid-js/jsx-runtime" -import { IconCheckCircle, IconHashtag } from "../icons" - -interface AnchorProps extends JSX.HTMLAttributes { - id: string -} -export function AnchorIcon(props: AnchorProps) { - const [local, rest] = splitProps(props, ["id", "children"]) - const [copied, setCopied] = createSignal(false) - - return ( - - ) -} - -export function createOverflow() { - const [overflow, setOverflow] = createSignal(false) - return { - get status() { - return overflow() - }, - ref(el: HTMLElement) { - const ro = new ResizeObserver(() => { - if (el.scrollHeight > el.clientHeight + 1) { - setOverflow(true) - } - return - }) - ro.observe(el) - - onCleanup(() => { - ro.disconnect() - }) - }, - } -} - -export function formatDuration(ms: number): string { - const ONE_SECOND = 1000 - const ONE_MINUTE = 60 * ONE_SECOND - - if (ms >= ONE_MINUTE) { - const minutes = Math.floor(ms / ONE_MINUTE) - return minutes === 1 ? `1min` : `${minutes}mins` - } - - if (ms >= ONE_SECOND) { - const seconds = Math.floor(ms / ONE_SECOND) - return `${seconds}s` - } - - return `${ms}ms` -} diff --git a/packages/web/src/components/share/content-bash.module.css b/packages/web/src/components/share/content-bash.module.css deleted file mode 100644 index 0915282d..00000000 --- a/packages/web/src/components/share/content-bash.module.css +++ /dev/null @@ -1,85 +0,0 @@ -.root { - display: contents; - - [data-slot="expand-button"] { - flex: 0 0 auto; - padding: 2px 0; - font-size: 0.75rem; - } - - [data-slot="body"] { - border: 1px solid var(--sl-color-divider); - border-radius: 0.25rem; - overflow: hidden; - width: 100%; - } - - [data-slot="header"] { - position: relative; - border-bottom: 1px solid var(--sl-color-divider); - width: 100%; - height: 1.625rem; - text-align: center; - padding: 0 3.25rem; - - > span { - max-width: min(100%, 140ch); - display: inline-block; - white-space: nowrap; - overflow: hidden; - line-height: 1.625rem; - font-size: 0.75rem; - text-overflow: ellipsis; - color: var(--sl-color-text-dimmed); - } - - &::before { - content: ""; - position: absolute; - pointer-events: none; - top: 8px; - left: 10px; - width: 2rem; - height: 0.5rem; - line-height: 0; - background-color: var(--sl-color-hairline); - mask-image: var(--term-icon); - mask-repeat: no-repeat; - } - } - - [data-slot="content"] { - display: flex; - flex-direction: column; - padding: 0.5rem calc(0.5rem + 3px); - - pre { - --shiki-dark-bg: var(--sl-color-bg) !important; - background-color: var(--sl-color-bg) !important; - line-height: 1.6; - font-size: 0.75rem; - white-space: pre-wrap; - word-break: break-word; - margin: 0; - - span { - white-space: break-spaces; - } - } - } - - [data-slot="output"] { - display: -webkit-box; - -webkit-box-orient: vertical; - -webkit-line-clamp: 10; - line-clamp: 10; - overflow: hidden; - } - - &[data-expanded] [data-slot="output"] { - display: block; - -webkit-line-clamp: none; - line-clamp: none; - overflow: visible; - } -} diff --git a/packages/web/src/components/share/content-bash.tsx b/packages/web/src/components/share/content-bash.tsx deleted file mode 100644 index 5ccd95c0..00000000 --- a/packages/web/src/components/share/content-bash.tsx +++ /dev/null @@ -1,67 +0,0 @@ -import style from "./content-bash.module.css" -import { createResource, createSignal } from "solid-js" -import { createOverflow } from "./common" -import { codeToHtml } from "shiki" - -interface Props { - command: string - output: string - description?: string - expand?: boolean -} - -export function ContentBash(props: Props) { - const [commandHtml] = createResource( - () => props.command, - async (command) => { - return codeToHtml(command || "", { - lang: "bash", - themes: { - light: "github-light", - dark: "github-dark", - }, - }) - }, - ) - - const [outputHtml] = createResource( - () => props.output, - async (output) => { - return codeToHtml(output || "", { - lang: "console", - themes: { - light: "github-light", - dark: "github-dark", - }, - }) - }, - ) - - const [expanded, setExpanded] = createSignal(false) - const overflow = createOverflow() - - return ( -
-
-
- {props.description} -
-
-
-
-
-
- - {!props.expand && overflow.status && ( - - )} -
- ) -} diff --git a/packages/web/src/components/share/content-code.module.css b/packages/web/src/components/share/content-code.module.css deleted file mode 100644 index ec159d64..00000000 --- a/packages/web/src/components/share/content-code.module.css +++ /dev/null @@ -1,26 +0,0 @@ -.root { - border: 1px solid var(--sl-color-divider); - background-color: var(--sl-color-bg-surface); - border-radius: 0.25rem; - padding: 0.5rem calc(0.5rem + 3px); - - &[data-flush="true"] { - border: none; - background-color: transparent; - padding: 0; - border-radius: 0; - } - - pre { - --shiki-dark-bg: var(--sl-color-bg-surface) !important; - background-color: var(--sl-color-bg-surface) !important; - line-height: 1.6; - font-size: 0.75rem; - white-space: pre-wrap; - word-break: break-word; - - span { - white-space: break-spaces; - } - } -} diff --git a/packages/web/src/components/share/content-code.tsx b/packages/web/src/components/share/content-code.tsx deleted file mode 100644 index 2f383b8b..00000000 --- a/packages/web/src/components/share/content-code.tsx +++ /dev/null @@ -1,32 +0,0 @@ -import { codeToHtml, bundledLanguages } from "shiki" -import { createResource, Suspense } from "solid-js" -import { transformerNotationDiff } from "@shikijs/transformers" -import style from "./content-code.module.css" - -interface Props { - code: string - lang?: string - flush?: boolean -} -export function ContentCode(props: Props) { - const [html] = createResource( - () => [props.code, props.lang], - async ([code, lang]) => { - // TODO: For testing delays - // await new Promise((resolve) => setTimeout(resolve, 3000)) - return (await codeToHtml(code || "", { - lang: lang && lang in bundledLanguages ? lang : "text", - themes: { - light: "github-light", - dark: "github-dark", - }, - transformers: [transformerNotationDiff()], - })) as string - }, - ) - return ( - -
- - ) -} diff --git a/packages/web/src/components/share/content-diff.module.css b/packages/web/src/components/share/content-diff.module.css deleted file mode 100644 index 5bf6e224..00000000 --- a/packages/web/src/components/share/content-diff.module.css +++ /dev/null @@ -1,154 +0,0 @@ -.root { - display: flex; - flex-direction: column; - border: 1px solid var(--sl-color-divider); - background-color: var(--sl-color-bg-surface); - border-radius: 0.25rem; - - [data-component="desktop"] { - display: block; - } - - [data-component="mobile"] { - display: none; - } - - [data-component="diff-block"] { - display: flex; - flex-direction: column; - } - - [data-component="diff-row"] { - display: grid; - grid-template-columns: 1fr 1fr; - align-items: stretch; - - &:first-child { - [data-slot="before"], - [data-slot="after"] { - padding-top: 0.25rem; - } - } - - &:last-child { - [data-slot="before"], - [data-slot="after"] { - padding-bottom: 0.25rem; - } - } - - [data-slot="before"], - [data-slot="after"] { - position: relative; - display: flex; - flex-direction: column; - overflow-x: visible; - min-width: 0; - align-items: stretch; - padding: 0 1rem 0 2.2ch; - - &[data-diff-type="removed"] { - background-color: var(--sl-color-red-low); - - pre { - --shiki-dark-bg: var(--sl-color-red-low) !important; - background-color: var(--sl-color-red-low) !important; - } - - &::before { - content: "-"; - position: absolute; - left: 0.6ch; - top: 1px; - user-select: none; - color: var(--sl-color-red-high); - } - } - - &[data-diff-type="added"] { - background-color: var(--sl-color-green-low); - - pre { - --shiki-dark-bg: var(--sl-color-green-low) !important; - background-color: var(--sl-color-green-low) !important; - } - - &::before { - content: "+"; - position: absolute; - user-select: none; - color: var(--sl-color-green-high); - left: 0.6ch; - top: 1px; - } - } - } - - [data-slot="before"] { - border-right: 1px solid var(--sl-color-divider); - } - } - - [data-component="mobile"] { - - & > [data-component="diff-block"]:first-child > div { - padding-top: 0.25rem; - } - - & > [data-component="diff-block"]:last-child > div { - padding-bottom: 0.25rem; - } - - & > [data-component="diff-block"] > div { - padding: 0 1rem 0 2.2ch; - - &[data-diff-type="removed"] { - position: relative; - background-color: var(--sl-color-red-low); - - pre { - --shiki-dark-bg: var(--sl-color-red-low) !important; - background-color: var(--sl-color-red-low) !important; - } - - &::before { - content: "-"; - position: absolute; - left: 0.6ch; - top: 1px; - user-select: none; - color: var(--sl-color-red-high); - } - } - - &[data-diff-type="added"] { - position: relative; - background-color: var(--sl-color-green-low); - - pre { - --shiki-dark-bg: var(--sl-color-green-low) !important; - background-color: var(--sl-color-green-low) !important; - } - - &::before { - content: "+"; - position: absolute; - left: 0.6ch; - top: 1px; - user-select: none; - color: var(--sl-color-green-high); - } - } - } - } - - @media (max-width: 40rem) { - [data-component="desktop"] { - display: none; - } - - [data-component="mobile"] { - display: block; - } - } -} diff --git a/packages/web/src/components/share/content-error.module.css b/packages/web/src/components/share/content-error.module.css deleted file mode 100644 index 6303be63..00000000 --- a/packages/web/src/components/share/content-error.module.css +++ /dev/null @@ -1,65 +0,0 @@ -.root { - background-color: var(--sl-color-bg-surface); - padding: 0.5rem calc(0.5rem + 3px); - border-radius: 0.25rem; - display: flex; - flex-direction: column; - align-items: flex-start; - gap: 1rem; - align-self: flex-start; - - [data-section="content"] { - pre { - margin-bottom: 0.5rem; - line-height: 1.5; - font-size: 0.75rem; - white-space: pre-wrap; - word-break: break-word; - - &:last-child { - margin-bottom: 0; - } - - span { - margin-right: 0.25rem; - &:last-child { - margin-right: 0; - } - } - span[data-color="red"] { - color: var(--sl-color-red); - } - span[data-color="dimmed"] { - color: var(--sl-color-text-dimmed); - } - span[data-marker="label"] { - text-transform: uppercase; - letter-spacing: -0.5px; - } - span[data-separator] { - margin-right: 0.375rem; - } - } - } - - &[data-expanded="true"] { - [data-section="content"] { - display: block; - } - } - &[data-expanded="false"] { - [data-section="content"] { - display: -webkit-box; - -webkit-box-orient: vertical; - -webkit-line-clamp: 7; - overflow: hidden; - } - } - - button { - flex: 0 0 auto; - padding: 2px 0; - font-size: 0.75rem; - } - -} diff --git a/packages/web/src/components/share/content-error.tsx b/packages/web/src/components/share/content-error.tsx deleted file mode 100644 index b6d7023b..00000000 --- a/packages/web/src/components/share/content-error.tsx +++ /dev/null @@ -1,31 +0,0 @@ -import style from "./content-error.module.css" -import { type JSX, createSignal } from "solid-js" -import { createOverflow } from "./common" - -interface Props extends JSX.HTMLAttributes { - expand?: boolean -} -export function ContentError(props: Props) { - const [expanded, setExpanded] = createSignal(false) - const overflow = createOverflow() - - return ( -
-
- {props.children} -
- {((!props.expand && overflow.status) || expanded()) && ( - - )} -
- ) -} diff --git a/packages/web/src/components/share/content-markdown.module.css b/packages/web/src/components/share/content-markdown.module.css deleted file mode 100644 index 3e38ddf0..00000000 --- a/packages/web/src/components/share/content-markdown.module.css +++ /dev/null @@ -1,131 +0,0 @@ -.root { - display: flex; - flex-direction: column; - align-items: flex-start; - gap: 1rem; - - [data-slot="expand-button"] { - flex: 0 0 auto; - padding: 2px 0; - font-size: 0.857em; - } - - [data-slot="markdown"] { - display: -webkit-box; - -webkit-box-orient: vertical; - -webkit-line-clamp: 3; - line-clamp: 3; - overflow: hidden; - - [data-expanded] & { - display: block; - } - - font-size: 1em; - line-height: 1.5; - - p, - blockquote, - ul, - ol, - dl, - table, - pre { - margin-bottom: 1rem; - } - - strong { - font-weight: 600; - } - - ol { - list-style-position: inside; - padding-left: 0.75rem; - } - - ul { - padding-left: 1.5rem; - } - - h1, - h2, - h3, - h4, - h5, - h6 { - font-size: 1em; - font-weight: 600; - margin-bottom: 0.5rem; - } - - & > *:last-child { - margin-bottom: 0; - } - - pre { - --shiki-dark-bg: var(--sl-color-bg-surface) !important; - background-color: var(--sl-color-bg-surface) !important; - padding: 0.5rem 0.75rem; - line-height: 1.6; - font-size: 0.857em; - white-space: pre-wrap; - word-break: break-word; - - span { - white-space: break-spaces; - } - } - - code { - font-weight: 500; - - &:not(pre code) { - &::before { - content: "`"; - font-weight: 700; - } - - &::after { - content: "`"; - font-weight: 700; - } - } - } - - table { - border-collapse: collapse; - width: 100%; - } - - th, - td { - border: 1px solid var(--sl-color-border); - padding: 0.5rem 0.75rem; - text-align: left; - } - - th { - border-bottom: 1px solid var(--sl-color-border); - } - - /* Remove outer borders */ - table tr:first-child th, - table tr:first-child td { - border-top: none; - } - - table tr:last-child td { - border-bottom: none; - } - - table th:first-child, - table td:first-child { - border-left: none; - } - - table th:last-child, - table td:last-child { - border-right: none; - } - } -} diff --git a/packages/web/src/components/share/content-markdown.tsx b/packages/web/src/components/share/content-markdown.tsx deleted file mode 100644 index 69cde82b..00000000 --- a/packages/web/src/components/share/content-markdown.tsx +++ /dev/null @@ -1,67 +0,0 @@ -import { marked } from "marked" -import { codeToHtml } from "shiki" -import markedShiki from "marked-shiki" -import { createOverflow } from "./common" -import { CopyButton } from "./copy-button" -import { createResource, createSignal } from "solid-js" -import { transformerNotationDiff } from "@shikijs/transformers" -import style from "./content-markdown.module.css" - -const markedWithShiki = marked.use( - markedShiki({ - highlight(code, lang) { - return codeToHtml(code, { - lang: lang || "text", - themes: { - light: "github-light", - dark: "github-dark", - }, - transformers: [transformerNotationDiff()], - }) - }, - }), -) - -interface Props { - text: string - expand?: boolean - highlight?: boolean -} -export function ContentMarkdown(props: Props) { - const [html] = createResource( - () => strip(props.text), - async (markdown) => { - return markedWithShiki.parse(markdown) - }, - ) - const [expanded, setExpanded] = createSignal(false) - const overflow = createOverflow() - - return ( -
-
- - {!props.expand && overflow.status && ( - - )} - -
- ) -} - -function strip(text: string): string { - const wrappedRe = /^\s*<([A-Za-z]\w*)>\s*([\s\S]*?)\s*<\/\1>\s*$/ - const match = text.match(wrappedRe) - return match ? match[2] : text -} diff --git a/packages/web/src/components/share/content-text.module.css b/packages/web/src/components/share/content-text.module.css deleted file mode 100644 index a3842275..00000000 --- a/packages/web/src/components/share/content-text.module.css +++ /dev/null @@ -1,57 +0,0 @@ -.root { - color: var(--sl-color-text); - background-color: var(--sl-color-bg-surface); - padding: 0.5rem calc(0.5rem + 3px); - padding-right: calc(1rem + 18px); - border-radius: 0.25rem; - display: flex; - flex-direction: column; - align-items: flex-start; - gap: 1rem; - align-self: flex-start; - font-size: 0.875rem; - - &[data-compact] { - font-size: 0.75rem; - color: var(--sl-color-text-dimmed); - } - - [data-slot="text"] { - line-height: 1.5; - white-space: pre-wrap; - overflow-wrap: anywhere; - display: -webkit-box; - -webkit-box-orient: vertical; - -webkit-line-clamp: 3; - line-clamp: 3; - overflow: hidden; - - [data-expanded] & { - display: block; - } - } - - [data-slot="expand-button"] { - flex: 0 0 auto; - padding: 2px 0; - font-size: 0.75rem; - } - - &[data-theme="invert"] { - background-color: var(--sl-color-blue-high); - color: var(--sl-color-text-invert); - - [data-slot="expand-button"] { - opacity: 0.85; - color: var(--sl-color-text-invert); - - &:hover { - opacity: 1; - } - } - } - - &[data-theme="blue"] { - background-color: var(--sl-color-blue-low); - } -} diff --git a/packages/web/src/components/share/content-text.tsx b/packages/web/src/components/share/content-text.tsx deleted file mode 100644 index c52e0dfc..00000000 --- a/packages/web/src/components/share/content-text.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import style from "./content-text.module.css" -import { createSignal } from "solid-js" -import { createOverflow } from "./common" - -interface Props { - text: string - expand?: boolean - compact?: boolean -} -export function ContentText(props: Props) { - const [expanded, setExpanded] = createSignal(false) - const overflow = createOverflow() - - return ( -
-
-        {props.text}
-      
- {((!props.expand && overflow.status) || expanded()) && ( - - )} -
- ) -} diff --git a/packages/web/src/components/share/copy-button.module.css b/packages/web/src/components/share/copy-button.module.css deleted file mode 100644 index 9da67a1b..00000000 --- a/packages/web/src/components/share/copy-button.module.css +++ /dev/null @@ -1,31 +0,0 @@ -.root { - position: absolute; - opacity: 0; - visibility: hidden; - transition: opacity 0.15s ease; - - button { - cursor: pointer; - background: none; - border: none; - padding: 0.125rem; - background-color: var(--sl-color-bg); - color: var(--sl-color-text-secondary); - - svg { - display: block; - width: 1rem; - height: 1rem; - } - - &[data-copied="true"] { - color: var(--sl-color-green-high); - } - } -} - -/* Show copy button when parent is hovered */ -*:hover > .root { - opacity: 1; - visibility: visible; -} diff --git a/packages/web/src/components/share/copy-button.tsx b/packages/web/src/components/share/copy-button.tsx deleted file mode 100644 index ad2e83b2..00000000 --- a/packages/web/src/components/share/copy-button.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import { createSignal } from "solid-js" -import { IconClipboard, IconCheckCircle } from "../icons" -import styles from "./copy-button.module.css" - -interface CopyButtonProps { - text: string -} - -export function CopyButton(props: CopyButtonProps) { - const [copied, setCopied] = createSignal(false) - - function handleCopyClick() { - if (props.text) { - navigator.clipboard.writeText(props.text) - .catch((err) => console.error("Copy failed", err)) - - setCopied(true) - setTimeout(() => setCopied(false), 2000) - } - } - - return ( -
- -
- ) -} diff --git a/packages/web/src/components/share/part.module.css b/packages/web/src/components/share/part.module.css deleted file mode 100644 index ffae0c3b..00000000 --- a/packages/web/src/components/share/part.module.css +++ /dev/null @@ -1,419 +0,0 @@ -.root { - display: flex; - gap: 0.625rem; - - [data-component="decoration"] { - flex: 0 0 auto; - display: flex; - flex-direction: column; - gap: 0.625rem; - align-items: center; - justify-content: flex-start; - - [data-slot="anchor"] { - position: relative; - - a:first-child { - display: block; - flex: 0 0 auto; - width: 18px; - opacity: 0.65; - - svg { - color: var(--sl-color-text-secondary); - display: block; - - &:nth-child(3) { - color: var(--sl-color-green-high); - } - } - - svg:nth-child(2), - svg:nth-child(3) { - display: none; - } - - &:hover { - svg:nth-child(1) { - display: none; - } - - svg:nth-child(2) { - display: block; - } - } - } - - [data-copied] & { - a, - a:hover { - svg:nth-child(1), - svg:nth-child(2) { - display: none; - } - - svg:nth-child(3) { - display: block; - } - } - } - } - - [data-slot="bar"] { - width: 3px; - height: 100%; - border-radius: 1px; - background-color: var(--sl-color-hairline); - } - - [data-slot="tooltip"] { - position: absolute; - top: 50%; - left: calc(100% + 12px); - transform: translate(0, -50%); - line-height: 1.1; - padding: 0.375em 0.5em calc(0.375em + 2px); - background: var(--sl-color-white); - color: var(--sl-color-text-invert); - font-size: 0.6875rem; - border-radius: 7px; - white-space: nowrap; - - z-index: 1; - opacity: 0; - visibility: hidden; - - &::after { - content: ""; - position: absolute; - top: 50%; - left: -15px; - transform: translateY(-50%); - border: 8px solid transparent; - border-right-color: var(--sl-color-white); - } - - [data-copied] & { - opacity: 1; - visibility: visible; - } - } - } - - [data-component="content"] { - flex: 1 1 auto; - min-width: 0; - padding: 0 0 1rem; - display: flex; - flex-direction: column; - gap: 1rem; - } - - [data-component="spacer"] { - height: 0rem; - } - - [data-component="content-footer"] { - align-self: flex-start; - font-size: 0.75rem; - color: var(--sl-color-text-dimmed); - } - - [data-component="user-text"] { - min-width: 0; - display: flex; - flex-direction: column; - gap: 1rem; - flex-grow: 1; - max-width: var(--md-tool-width); - } - - [data-component="assistant-text"] { - min-width: 0; - display: flex; - flex-direction: column; - gap: 1rem; - flex-grow: 1; - max-width: var(--md-tool-width); - - & > [data-component="assistant-text-markdown"] { - align-self: flex-start; - font-size: 0.875rem; - border: 1px solid var(--sl-color-blue-high); - padding: 0.5rem calc(0.5rem + 3px); - border-radius: 0.25rem; - position: relative; - - [data-component="copy-button"] { - top: 0.5rem; - right: calc(0.5rem - 1px); - } - } - } - - [data-component="step-start"] { - display: flex; - flex-direction: column; - align-items: flex-start; - gap: 0.375rem; - - [data-slot="provider"] { - line-height: 18px; - font-size: 0.875rem; - text-transform: uppercase; - letter-spacing: -0.5px; - color: var(--sl-color-text-secondary); - } - - [data-slot="model"] { - line-height: 1.5; - } - } - - [data-component="attachment"] { - display: flex; - flex-direction: column; - align-items: flex-start; - gap: 0.375rem; - padding-bottom: 1rem; - - [data-slot="copy"] { - line-height: 18px; - font-size: 0.875rem; - text-transform: uppercase; - letter-spacing: -0.5px; - color: var(--sl-color-text-secondary); - } - - [data-slot="filename"] { - line-height: 1.5; - font-size: 0.875rem; - font-weight: 500; - max-width: var(--md-tool-width); - } - } - - [data-component="button-text"] { - cursor: pointer; - appearance: none; - background-color: transparent; - border: none; - padding: 0; - color: var(--sl-color-text-secondary); - font-size: 0.75rem; - - &:hover { - color: var(--sl-color-text); - } - - &[data-more] { - display: flex; - align-items: center; - gap: 0.125rem; - - span[data-slot="icon"] { - line-height: 1; - opacity: 0.85; - - svg { - display: block; - } - } - } - } - - [data-component="tool"] { - display: flex; - flex-direction: column; - align-items: flex-start; - gap: 0.375rem; - - &[data-tool="bash"] { - max-width: var(--sm-tool-width); - } - - &[data-tool="error"] { - max-width: var(--md-tool-width); - } - - &[data-tool="read"], - &[data-tool="edit"], - &[data-tool="list"], - &[data-tool="glob"], - &[data-tool="grep"], - &[data-tool="write"], - &[data-tool="webfetch"] { - [data-component="tool-result"] { - max-width: var(--sm-tool-width); - } - } - &[data-tool="edit"] { - [data-component="tool-result"] { - max-width: var(--lg-tool-width); - align-items: stretch; - width: 100%; - } - } - &[data-tool="task"] { - [data-component="tool-input"] { - font-size: 0.75rem; - line-height: 1.5; - max-width: var(--md-tool-width); - display: -webkit-box; - -webkit-line-clamp: 3; - -webkit-box-orient: vertical; - overflow: hidden; - } - [data-component="tool-output"] { - max-width: var(--sm-tool-width); - font-size: 0.75rem; - border: 1px solid var(--sl-color-divider); - padding: 0.5rem calc(0.5rem + 3px); - border-radius: 0.25rem; - position: relative; - - [data-component="copy-button"] { - top: 0.5rem; - right: calc(0.5rem - 1px); - } - } - } - } - - [data-component="tool-title"] { - line-height: 18px; - font-size: 0.875rem; - color: var(--sl-color-text-secondary); - max-width: var(--md-tool-width); - display: flex; - align-items: flex-start; - gap: 0.375rem; - - [data-slot="name"] { - text-transform: uppercase; - letter-spacing: -0.5px; - } - - [data-slot="target"] { - color: var(--sl-color-text); - word-break: break-all; - font-weight: 500; - } - } - - [data-component="tool-result"] { - display: flex; - flex-direction: column; - align-items: flex-start; - gap: 0.5rem; - } - - [data-component="todos"] { - list-style-type: none; - padding: 0; - margin: 0; - width: 100%; - max-width: var(--sm-tool-width); - border: 1px solid var(--sl-color-divider); - border-radius: 0.25rem; - - [data-slot="item"] { - margin: 0; - position: relative; - padding-left: 1.5rem; - font-size: 0.75rem; - padding: 0.375rem 0.625rem 0.375rem 1.75rem; - border-bottom: 1px solid var(--sl-color-divider); - line-height: 1.5; - word-break: break-word; - - &:last-child { - border-bottom: none; - } - - & > span { - position: absolute; - display: inline-block; - left: 0.5rem; - top: calc(0.5rem + 1px); - width: 0.75rem; - height: 0.75rem; - border: 1px solid var(--sl-color-divider); - border-radius: 0.15rem; - - &::before { - } - } - - &[data-status="pending"] { - color: var(--sl-color-text); - } - - &[data-status="in_progress"] { - color: var(--sl-color-text); - - & > span { - border-color: var(--sl-color-orange); - } - - & > span::before { - content: ""; - position: absolute; - top: 2px; - left: 2px; - width: calc(0.75rem - 2px - 4px); - height: calc(0.75rem - 2px - 4px); - box-shadow: inset 1rem 1rem var(--sl-color-orange-low); - } - } - - &[data-status="completed"] { - color: var(--sl-color-text-secondary); - - & > span { - border-color: var(--sl-color-green-low); - } - - & > span::before { - content: ""; - position: absolute; - top: 2px; - left: 2px; - width: calc(0.75rem - 2px - 4px); - height: calc(0.75rem - 2px - 4px); - box-shadow: inset 1rem 1rem var(--sl-color-green); - - transform-origin: bottom left; - clip-path: polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%); - } - } - } - } - - [data-component="tool-args"] { - display: inline-grid; - align-items: center; - grid-template-columns: max-content max-content minmax(0, 1fr); - max-width: var(--md-tool-width); - gap: 0.25rem 0.375rem; - - & > div:nth-child(3n + 1) { - width: 8px; - height: 2px; - border-radius: 1px; - background: var(--sl-color-divider); - } - - & > div:nth-child(3n + 2), - & > div:nth-child(3n + 3) { - font-size: 0.75rem; - line-height: 1.5; - } - - & > div:nth-child(3n + 3) { - padding-left: 0.125rem; - word-break: break-word; - color: var(--sl-color-text-secondary); - } - } -} diff --git a/packages/web/src/components/share/part.tsx b/packages/web/src/components/share/part.tsx deleted file mode 100644 index 4a9320e6..00000000 --- a/packages/web/src/components/share/part.tsx +++ /dev/null @@ -1,757 +0,0 @@ -import map from "lang-map" -import { DateTime } from "luxon" -import { For, Show, Match, Switch, type JSX, createMemo, createSignal, type ParentProps } from "solid-js" -import { - IconHashtag, - IconSparkles, - IconGlobeAlt, - IconDocument, - IconPaperClip, - IconQueueList, - IconUserCircle, - IconCommandLine, - IconCheckCircle, - IconChevronDown, - IconChevronRight, - IconDocumentPlus, - IconPencilSquare, - IconRectangleStack, - IconMagnifyingGlass, - IconDocumentMagnifyingGlass, -} from "../icons" -import { IconMeta, IconRobot, IconOpenAI, IconGemini, IconAnthropic } from "../icons/custom" -import { ContentCode } from "./content-code" -import { ContentDiff } from "./content-diff" -import { ContentText } from "./content-text" -import { ContentBash } from "./content-bash" -import { ContentError } from "./content-error" -import { formatDuration } from "../share/common" -import { ContentMarkdown } from "./content-markdown" -import type { MessageV2 } from "opencode/session/message-v2" -import type { Diagnostic } from "vscode-languageserver-types" - -import styles from "./part.module.css" - -const MIN_DURATION = 2000 - -export interface PartProps { - index: number - message: MessageV2.Info - part: MessageV2.Part - last: boolean -} - -export function Part(props: PartProps) { - const [copied, setCopied] = createSignal(false) - const id = createMemo(() => props.message.id + "-" + props.index) - - return ( -
- -
- {props.message.role === "user" && props.part.type === "text" && ( -
- -
- )} - {props.message.role === "assistant" && props.part.type === "text" && ( -
-
- -
- {props.last && props.message.role === "assistant" && props.message.time.completed && ( -
- {DateTime.fromMillis(props.message.time.completed).toLocaleString(DateTime.DATETIME_MED)} -
- )} -
- )} - {props.message.role === "user" && props.part.type === "file" && ( -
-
Attachment
-
{props.part.filename}
-
- )} - {props.part.type === "step-start" && props.message.role === "assistant" && ( -
-
{props.message.providerID}
-
{props.message.modelID}
-
- )} - {props.part.type === "tool" && props.part.state.status === "error" && ( -
- {formatErrorString(props.part.state.error)} - -
- )} - {props.part.type === "tool" && - props.part.state.status === "completed" && - props.message.role === "assistant" && ( - <> -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - )} -
-
- ) -} - -type ToolProps = { - id: MessageV2.ToolPart["id"] - tool: MessageV2.ToolPart["tool"] - state: MessageV2.ToolStateCompleted - message: MessageV2.Assistant - isLastPart?: boolean -} - -interface Todo { - id: string - content: string - status: "pending" | "in_progress" | "completed" - priority: "low" | "medium" | "high" -} - -function stripWorkingDirectory(filePath?: string, workingDir?: string) { - if (filePath === undefined || workingDir === undefined) return filePath - - const prefix = workingDir.endsWith("/") ? workingDir : workingDir + "/" - - if (filePath === workingDir) { - return "" - } - - if (filePath.startsWith(prefix)) { - return filePath.slice(prefix.length) - } - - return filePath -} - -function getShikiLang(filename: string) { - const ext = filename.split(".").pop()?.toLowerCase() ?? "" - const langs = map.languages(ext) - const type = langs?.[0]?.toLowerCase() - - const overrides: Record = { - conf: "shellscript", - } - - return type ? (overrides[type] ?? type) : "plaintext" -} - -function getDiagnostics(diagnosticsByFile: Record, currentFile: string): JSX.Element[] { - const result: JSX.Element[] = [] - - if (diagnosticsByFile === undefined || diagnosticsByFile[currentFile] === undefined) return result - - for (const diags of Object.values(diagnosticsByFile)) { - for (const d of diags) { - if (d.severity !== 1) continue - - const line = d.range.start.line + 1 - const column = d.range.start.character + 1 - - result.push( -
-          
-            Error
-          
-          
-            [{line}:{column}]
-          
-          {d.message}
-        
, - ) - } - } - - return result -} - -function formatErrorString(error: string): JSX.Element { - const errorMarker = "Error: " - const startsWithError = error.startsWith(errorMarker) - - return startsWithError ? ( -
-      
-        Error
-      
-      {error.slice(errorMarker.length)}
-    
- ) : ( -
-      {error}
-    
- ) -} - -export function TodoWriteTool(props: ToolProps) { - const priority: Record = { - in_progress: 0, - pending: 1, - completed: 2, - } - const todos = createMemo(() => - ((props.state.input?.todos ?? []) as Todo[]).slice().sort((a, b) => priority[a.status] - priority[b.status]), - ) - const starting = () => todos().every((t: Todo) => t.status === "pending") - const finished = () => todos().every((t: Todo) => t.status === "completed") - - return ( - <> -
- - - Creating plan - Completing plan - - -
- 0}> -
    - - {(todo) => ( -
  • - - {todo.content} -
  • - )} -
    -
-
- - ) -} - -export function GrepTool(props: ToolProps) { - return ( - <> -
- Grep - “{props.state.input.pattern}” -
-
- - 0}> - - - - - - - - -
- - ) -} - -export function ListTool(props: ToolProps) { - const path = createMemo(() => - props.state.input?.path !== props.message.path.cwd - ? stripWorkingDirectory(props.state.input?.path, props.message.path.cwd) - : props.state.input?.path, - ) - - return ( - <> -
- LS - - {path()} - -
-
- - - - - - - -
- - ) -} - -export function WebFetchTool(props: ToolProps) { - return ( - <> -
- Fetch - {props.state.input.url} -
-
- - - {formatErrorString(props.state.output)} - - - - - - - -
- - ) -} - -export function ReadTool(props: ToolProps) { - const filePath = createMemo(() => stripWorkingDirectory(props.state.input?.filePath, props.message.path.cwd)) - - return ( - <> -
- Read - - {filePath()} - -
-
- - - {formatErrorString(props.state.output)} - - - - - - - - - - - - -
- - ) -} - -export function WriteTool(props: ToolProps) { - const filePath = createMemo(() => stripWorkingDirectory(props.state.input?.filePath, props.message.path.cwd)) - const diagnostics = createMemo(() => getDiagnostics(props.state.metadata?.diagnostics, props.state.input.filePath)) - - return ( - <> -
- Write - - {filePath()} - -
- 0}> - {diagnostics()} - -
- - - {formatErrorString(props.state.output)} - - - - - - - -
- - ) -} - -export function EditTool(props: ToolProps) { - const filePath = createMemo(() => stripWorkingDirectory(props.state.input.filePath, props.message.path.cwd)) - const diagnostics = createMemo(() => getDiagnostics(props.state.metadata?.diagnostics, props.state.input.filePath)) - - return ( - <> -
- Edit - - {filePath()} - -
-
- - - {formatErrorString(props.state.metadata?.message || "")} - - -
- -
-
-
-
- 0}> - {diagnostics()} - - - ) -} - -export function BashTool(props: ToolProps) { - return ( - - ) -} - -export function GlobTool(props: ToolProps) { - return ( - <> -
- Glob - “{props.state.input.pattern}” -
- - 0}> -
- - - -
-
- - - -
- - ) -} - -interface ResultsButtonProps extends ParentProps { - showCopy?: string - hideCopy?: string -} -function ResultsButton(props: ResultsButtonProps) { - const [show, setShow] = createSignal(false) - - return ( - <> - - {props.children} - - ) -} - -export function Spacer() { - return
-} - -function Footer(props: ParentProps<{ title: string }>) { - return ( -
- {props.children} -
- ) -} - -function ToolFooter(props: { time: number }) { - return props.time > MIN_DURATION &&
{formatDuration(props.time)}
-} - -function TaskTool(props: ToolProps) { - return ( - <> -
- Task - {props.state.input.description} -
-
- “{props.state.input.prompt}” -
- -
- -
-
- - ) -} - -export function FallbackTool(props: ToolProps) { - return ( - <> -
- {props.tool} -
-
- - {(arg) => ( - <> -
-
{arg[0]}
-
{arg[1]}
- - )} -
-
- - -
- - - -
-
-
- - ) -} - -// Converts nested objects/arrays into [path, value] pairs. -// E.g. {a:{b:{c:1}}, d:[{e:2}, 3]} => [["a.b.c",1], ["d[0].e",2], ["d[1]",3]] -function flattenToolArgs(obj: any, prefix: string = ""): Array<[string, any]> { - const entries: Array<[string, any]> = [] - - for (const [key, value] of Object.entries(obj)) { - const path = prefix ? `${prefix}.${key}` : key - - if (value !== null && typeof value === "object") { - if (Array.isArray(value)) { - value.forEach((item, index) => { - const arrayPath = `${path}[${index}]` - if (item !== null && typeof item === "object") { - entries.push(...flattenToolArgs(item, arrayPath)) - } else { - entries.push([arrayPath, item]) - } - }) - } else { - entries.push(...flattenToolArgs(value, path)) - } - } else { - entries.push([path, value]) - } - } - - return entries -} - -function getProvider(model: string) { - const lowerModel = model.toLowerCase() - - if (/claude|anthropic/.test(lowerModel)) return "anthropic" - if (/gpt|o[1-4]|codex|openai/.test(lowerModel)) return "openai" - if (/gemini|palm|bard|google/.test(lowerModel)) return "gemini" - if (/llama|meta/.test(lowerModel)) return "meta" - - return "any" -} - -export function ProviderIcon(props: { model: string; size?: number }) { - const provider = getProvider(props.model) - const size = props.size || 16 - return ( - }> - - - - - - - - - - - - - - ) -} diff --git a/packages/web/src/content/docs/docs/cli.mdx b/packages/web/src/content/docs/docs/cli.mdx index 61592461..49d343be 100644 --- a/packages/web/src/content/docs/docs/cli.mdx +++ b/packages/web/src/content/docs/docs/cli.mdx @@ -39,12 +39,12 @@ opencode run Explain the use of context in Go #### Flags -| Flag | Short | Description | -| ------------ | ----- | ------------------------------------------ | -| `--continue` | `-c` | Continue the last session | -| `--session` | `-s` | Session ID to continue | -| `--share` | | Share the session | -| `--model` | `-m` | Model to use in the form of provider/model | +| Flag | Short | Description | +| ----------------- | ----- | --------------------- | +| `--continue` | `-c` | Continue the last session | +| `--session` | `-s` | Session ID to continue | +| `--share` | | Share the session | +| `--model` | `-m` | Model to use in the form of provider/model | --- @@ -66,7 +66,7 @@ Logs you into a provider and saves them in the credentials file in `~/.local/sha opencode auth login ``` -When opencode starts up it loads the providers from the credentials file. And if there are any keys defined in your environments or a `.env` file in your project. +When opencode starts up it will loads the providers from the credentials file. And if there are any keys defined in your environments or a `.env` file in your project. --- @@ -122,11 +122,8 @@ opencode upgrade v0.1.48 The opencode CLI takes the following flags. -| Flag | Short | Description | -| -------------- | ----- | -------------------- | -| `--help` | `-h` | Display help | -| `--version` | | Print version number | -| `--print-logs` | | Print logs to stderr | -| `--prompt` | `-p` | Prompt to use | -| `--model` | `-m` | Model to use in the form of provider/model | -| `--mode` | | Mode to use | +| Flag | Short | Description | +| ----------------- | ----- | --------------------- | +| `--help` | `-h` | Display help | +| `--version` | | Print version number | +| `--print-logs` | | Print logs to stderr | diff --git a/packages/web/src/content/docs/docs/config.mdx b/packages/web/src/content/docs/docs/config.mdx index 8cb91169..d88749c6 100644 --- a/packages/web/src/content/docs/docs/config.mdx +++ b/packages/web/src/content/docs/docs/config.mdx @@ -3,35 +3,25 @@ title: Config description: Using the opencode JSON config. --- -You can configure opencode using a JSON config file. +You can configure opencode using a JSON config file that can be placed in: -```json title="opencode.json" +- Globally under `~/.config/opencode/config.json`. +- Your project root under `opencode.json`. This is safe to be checked into Git and uses the same schema as the global one. + +```json { "$schema": "https://opencode.ai/config.json", "theme": "opencode", "model": "anthropic/claude-sonnet-4-20250514", + "autoshare": false, "autoupdate": true } ``` -This can be used to configure opencode globally or for a specific project. - ---- - -### Global - -Place your global opencode config in `~/.config/opencode/opencode.json`. You'll want to use the global config for things like themes, providers, or keybinds. - ---- - -### Per project - -You can also add a `opencode.json` in your project. This is useful for configuring providers or modes specific to your project. +In most cases, you'll want to use the global config for things like themes, providers, or keybinds. Having a config per project is useful if you are using different providers for your company. When opencode starts up, it looks for a config file in the current directory or traverse up to the nearest Git directory. -This is also safe to be checked into Git and uses the same schema as the global one. - --- ## Schema @@ -42,25 +32,6 @@ Your editor should be able to validate and autocomplete based on the schema. --- -### Modes - -opencode comes with two built-in modes: _build_, the default with all tools enabled. And _plan_, restricted mode with file modification tools disabled. You can override these built-in modes or define your own custom modes with the `mode` option. - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "mode": { - "build": { }, - "plan": { }, - "my-custom-mode": { } - } -} -``` - -[Learn more here](/docs/modes). - ---- - ### Models You can configure the providers and models you want to use in your opencode config through the `provider` and `model` options. @@ -68,12 +39,12 @@ You can configure the providers and models you want to use in your opencode conf ```json title="opencode.json" { "$schema": "https://opencode.ai/config.json", - "provider": {}, + "provider": { }, "model": "" } ``` -You can also configure [local models](/docs/models#local). [Learn more](/docs/models). +[Learn more here](/docs/models). --- @@ -92,74 +63,6 @@ You can configure the theme you want to use in your opencode config through the --- -### Layout - -You can configure the layout of the TUI with the `layout` option. - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "layout": "stretch" -} -``` - -This takes: - -- `"auto"`: Centers content with padding. This is the default. -- `"stretch"`: Uses full terminal width. - ---- - -### Logging - -Logs are written to: - -- **macOS/Linux**: `~/.local/share/opencode/log/` -- **Windows**: `%APPDATA%\opencode\log\` - -You can configure the minimum log level through the `log_level` option. - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "log_level": "INFO" -} -``` - -With the following options: - -| Level | Description | -| ------- | ---------------------------------------- | -| `DEBUG` | All messages including debug information | -| `INFO` | Informational messages and above | -| `WARN` | Warnings and errors only | -| `ERROR` | Errors only | - -The **default** log level is `INFO`. If you are running opencode locally in development mode it's set to `DEBUG`. - ---- - -### Sharing - -You can configure the [share](/docs/share) feature through the `share` option. - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "share": "manual" -} -``` - -This takes: - -- `"manual"` - Allow manual sharing via commands (default) -- `"auto"` - Automatically share new conversations -- `"disabled"` - Disable sharing entirely - -By default, sharing is set to manual mode where you need to explicitly share conversations using the `/share` command. - ---- - ### Keybinds You can customize your keybinds through the `keybinds` option. @@ -167,7 +70,7 @@ You can customize your keybinds through the `keybinds` option. ```json title="opencode.json" { "$schema": "https://opencode.ai/config.json", - "keybinds": {} + "keybinds": { } } ``` @@ -175,19 +78,6 @@ You can customize your keybinds through the `keybinds` option. --- -### Autoupdate - -opencode will automatically download any new updates when it starts up. You can disable this with the `autoupdate` option. - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "autoupdate": false -} -``` - ---- - ### MCP servers You can configure MCP servers you want to use through the `mcp` option. @@ -195,7 +85,7 @@ You can configure MCP servers you want to use through the `mcp` option. ```json title="opencode.json" { "$schema": "https://opencode.ai/config.json", - "mcp": {} + "mcp": { } } ``` @@ -203,22 +93,6 @@ You can configure MCP servers you want to use through the `mcp` option. --- -### Instructions - -You can configure the instructions for the model you're using through the `instructions` option. - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "instructions": ["CONTRIBUTING.md", "docs/guidelines.md", ".cursor/rules/*.md"] -} -``` - -This takes an array of paths and glob patterns to instruction files. [Learn more -about rules here](/docs/rules). - ---- - ### Disabled providers You can disable providers that are loaded automatically through the `disabled_providers` option. This is useful when you want to prevent certain providers from being loaded even if their credentials are available. @@ -231,66 +105,6 @@ You can disable providers that are loaded automatically through the `disabled_pr ``` The `disabled_providers` option accepts an array of provider IDs. When a provider is disabled: - - It won't be loaded even if environment variables are set - It won't be loaded even if API keys are configured through `opencode auth login` - The provider's models won't appear in the model selection list - ---- - -## Variables - -You can use variable substitution in your config files to reference environment variables and file contents. - ---- - -### Env vars - -Use `{env:VARIABLE_NAME}` to substitute environment variables: - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "model": "{env:OPENCODE_MODEL}", - "provider": { - "anthropic": { - "options": { - "apiKey": "{env:ANTHROPIC_API_KEY}" - } - } - } -} -``` - -If the environment variable is not set, it will be replaced with an empty string. - ---- - -### Files - -Use `{file:path/to/file}` to substitute the contents of a file: - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "instructions": ["{file:./custom-instructions.md}"], - "provider": { - "openai": { - "options": { - "apiKey": "{file:~/.secrets/openai-key}" - } - } - } -} -``` - -File paths can be: - -- Relative to the config file directory -- Or absolute paths starting with `/` or `~` - -These are useful for: - -- Keeping sensitive data like API keys in separate files. -- Including large instruction files without cluttering your config. -- Sharing common configuration snippets across multiple config files. diff --git a/packages/web/src/content/docs/docs/enterprise.mdx b/packages/web/src/content/docs/docs/enterprise.mdx deleted file mode 100644 index d73d1d3a..00000000 --- a/packages/web/src/content/docs/docs/enterprise.mdx +++ /dev/null @@ -1,102 +0,0 @@ ---- -title: Enterprise -description: Using opencode in your organization. ---- - -opencode does not store any of your code or context data. This makes it easy for -you to use opencode at your organization. - -To get started, we recommend: - -1. Do a trial internally with your team. -2. [**Contact us**](mailto:hello@sst.dev) to discuss pricing and implementation options. - ---- - -## Trial - -Since opencode is open source and does not store any of your code or context data, your developers can simply [get started](/docs/) and carry out a trial. - ---- - -### Data handling - -**opencode does not store your code or context data.** All processing happens locally or through direct API calls to your AI provider. - -The only caveat here is the optional `/share` feature. - ---- - -#### Sharing conversations - -If a user enables the `/share` feature, the conversation and the data associated with it are sent to the service we use to host these shares pages at opencode.ai. - -The data is currently served through our CDN's edge network, and is cached on the edge near your users. - -We recommend you disable this for your trial. - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "share": "disabled" -} -``` - -[Learn more about sharing](/docs/share). - ---- - -### Code ownership - -**You own all code produced by opencode.** There are no licensing restrictions or ownership claims. - ---- - -## Deployment - -Once you have completed your trial and you are ready to self-host opencode at -your organization, you can [**contact us**](mailto:hello@sst.dev) to discuss -pricing and implementation options. - ---- - -### SSO - -SSO integration can be implemented for enterprise deployments after your trial. -This will allow your team's session data and shared conversations to be protected -by your enterprise's authentication system. - ---- - -### Private NPM - -opencode supports private npm registries through Bun's native `.npmrc` file support. If your organization uses a private registry, such as JFrog Artifactory, Nexus, or similar, ensure developers are authenticated before running opencode. - -To set up authentication with your private registry: - -```bash -npm login --registry=https://your-company.jfrog.io/api/npm/npm-virtual/ -``` - -This creates `~/.npmrc` with authentication details. opencode will automatically -pick this up. - -:::caution -You must be logged into the private registry before running opencode. -::: - -Alternatively, you can manually configure a `.npmrc` file: - -```bash title="~/.npmrc" -registry=https://your-company.jfrog.io/api/npm/npm-virtual/ -//your-company.jfrog.io/api/npm/npm-virtual/:_authToken=${NPM_AUTH_TOKEN} -``` - -Developers must be logged into the private registry before running opencode to ensure packages can be installed from your enterprise registry. - ---- - -### Self-hosting - -The share feature can be self-hosted and the share pages can be made accessible -only after the user has been authenticated. diff --git a/packages/web/src/content/docs/docs/index.mdx b/packages/web/src/content/docs/docs/index.mdx index 0484e9b3..b39ce452 100644 --- a/packages/web/src/content/docs/docs/index.mdx +++ b/packages/web/src/content/docs/docs/index.mdx @@ -3,7 +3,7 @@ title: Intro description: Get started with opencode. --- -import { Tabs, TabItem } from "@astrojs/starlight/components" +import { Tabs, TabItem } from '@astrojs/starlight/components'; [**opencode**](/) is an AI coding agent built for the terminal. It features: @@ -22,24 +22,24 @@ import { Tabs, TabItem } from "@astrojs/starlight/components" - ```bash - npm install -g opencode-ai - ``` + ```bash + npm install -g opencode-ai + ``` - ```bash - bun install -g opencode-ai - ``` + ```bash + bun install -g opencode-ai + ``` - ```bash - pnpm install -g opencode-ai - ``` + ```bash + pnpm install -g opencode-ai + ``` - ```bash - yarn global add opencode-ai - ``` + ```bash + yarn global add opencode-ai + ``` diff --git a/packages/web/src/content/docs/docs/keybinds.mdx b/packages/web/src/content/docs/docs/keybinds.mdx index 1d0c53b3..8c5aa2c4 100644 --- a/packages/web/src/content/docs/docs/keybinds.mdx +++ b/packages/web/src/content/docs/docs/keybinds.mdx @@ -9,45 +9,32 @@ opencode has a list of keybinds that you can customize through the opencode conf { "$schema": "https://opencode.ai/config.json", "keybinds": { - "leader": "ctrl+x", - "app_help": "h", - "switch_mode": "tab", - + "help": "h", "editor_open": "e", - "session_new": "n", "session_list": "l", "session_share": "s", - "session_unshare": "u", "session_interrupt": "esc", "session_compact": "c", - "tool_details": "d", "model_list": "m", "theme_list": "t", "project_init": "i", - - "file_list": "f", - "file_close": "esc", - "file_diff_toggle": "v", - "input_clear": "ctrl+c", "input_paste": "ctrl+v", "input_submit": "enter", "input_newline": "shift+enter,ctrl+j", - + "history_previous": "up", + "history_next": "down", "messages_page_up": "pgup", "messages_page_down": "pgdown", "messages_half_page_up": "ctrl+alt+u", "messages_half_page_down": "ctrl+alt+d", - "messages_previous": "ctrl+up", - "messages_next": "ctrl+down", + "messages_previous": "ctrl+alt+k", + "messages_next": "ctrl+alt+j", "messages_first": "ctrl+g", "messages_last": "ctrl+alt+g", - "messages_layout_toggle": "p", - "messages_copy": "y", - "messages_revert": "r", "app_exit": "ctrl+c,q" } } diff --git a/packages/web/src/content/docs/docs/mcp-servers.mdx b/packages/web/src/content/docs/docs/mcp-servers.mdx index 985570a4..72c33e8a 100644 --- a/packages/web/src/content/docs/docs/mcp-servers.mdx +++ b/packages/web/src/content/docs/docs/mcp-servers.mdx @@ -18,24 +18,18 @@ You can define MCP servers in your opencode config under `mcp`. ### Local -Add local MCP servers under `mcp` with `"type": "local"`. +Add a local MCP servers under `mcp.localmcp`. ```json title="opencode.json" { "$schema": "https://opencode.ai/config.json", "mcp": { - "my-local-mcp-server": { + "localmcp": { "type": "local", "command": ["bun", "x", "my-mcp-command"], "enabled": true, "environment": { "MY_ENV_VAR": "my_env_var_value" - } - }, { - "my-different-local-mcp-server": { - "type": "local", - "command": ["bun", "x", "my-other-mcp-command"], - "enabled": true } } } @@ -45,19 +39,16 @@ You can also disable a server by setting `enabled` to `false`. This is useful if ### Remote -Add remote MCP servers under `mcp` with `"type": "remote"`. +Add a remote MCP servers under `mcp.remotemcp`. ```json title="opencode.json" { "$schema": "https://opencode.ai/config.json", "mcp": { - "my-remote-mcp": { + "remotemcp": { "type": "remote", "url": "https://my-mcp-server.com", - "enabled": true, - "headers": { - "Authorization": "Bearer MY_API_KEY" - } + "enabled": true } } } diff --git a/packages/web/src/content/docs/docs/models.mdx b/packages/web/src/content/docs/docs/models.mdx index cde1d2a4..f60a2544 100644 --- a/packages/web/src/content/docs/docs/models.mdx +++ b/packages/web/src/content/docs/docs/models.mdx @@ -11,14 +11,10 @@ opencode uses the [AI SDK](https://ai-sdk.dev/) and [Models.dev](https://models. You can configure providers in your opencode config under the `provider` section. ---- - ### Defaults Most popular providers are preloaded by default. If you've added the credentials for a provider through `opencode auth login`, they'll be available when you start opencode. ---- - ### Custom You can add custom providers by specifying the npm package for the provider and the models you want to use. @@ -27,60 +23,13 @@ You can add custom providers by specifying the npm package for the provider and { "$schema": "https://opencode.ai/config.json", "provider": { - "moonshot": { - "npm": "@ai-sdk/openai-compatible", - "options": { - "baseURL": "https://api.moonshot.ai/v1" - }, + "openrouter": { + "npm": "@openrouter/ai-sdk-provider", + "name": "OpenRouter", + "options": {}, "models": { - "kimi-k2-0711-preview": {} - } - } - } -} -``` - ---- - -### Base URL - -You can customize the base URL for any provider by setting the `baseURL` option. This is useful when using proxy services or custom endpoints. - -```json title="opencode.json" {6} -{ - "$schema": "https://opencode.ai/config.json", - "provider": { - "anthropic": { - "options": { - "baseURL": "https://api.anthropic.com/v1" - } - } - } -} -``` - ---- - -### Local - -You can configure local model like ones served through LM Studio or Ollama. To -do so, you'll need to specify a couple of things. - -Here's an example of configuring a local model from LM Studio: - -```json title="opencode.json" {4-15} -{ - "$schema": "https://opencode.ai/config.json", - "provider": { - "lmstudio": { - "npm": "@ai-sdk/openai-compatible", - "name": "LM Studio (local)", - "options": { - "baseURL": "http://127.0.0.1:1234/v1" - }, - "models": { - "google/gemma-3n-e4b": { - "name": "Gemma 3n-e4b (local)" + "anthropic/claude-3.5-sonnet": { + "name": "Claude 3.5 Sonnet" } } } @@ -88,15 +37,9 @@ Here's an example of configuring a local model from LM Studio: } ``` -In this example: +### Local -- `lmstudio` is the custom provider ID. We'll use this later. -- `npm` specifies the package to use for this provider. Here, `@ai-sdk/openai-compatible` is used for any OpenAI-compatible API. -- `name` is the display name for the provider in the UI. -- `options.baseURL` is the endpoint for the local server. -- `models` is a map of model IDs to their configurations. The model name will be displayed in the model selection list. - -Similarly, to configure a local model from Ollama: +To configure a local model, specify the npm package to use and the `baseURL`. ```json title="opencode.json" {5,7} { @@ -115,18 +58,6 @@ Similarly, to configure a local model from Ollama: } ``` -To set one of these as the default model, you can set the `model` key at the -root. - -```json title="opencode.json" {3} -{ - "$schema": "https://opencode.ai/config.json", - "model": "lmstudio/google/gemma-3n-e4b" -} -``` - -Here the full ID is `provider_id/model_id`, where `provider_id` is the key in the `provider` list we set above and `model_id` is the key from the `provider.models` list. - --- ## Select a model diff --git a/packages/web/src/content/docs/docs/modes.mdx b/packages/web/src/content/docs/docs/modes.mdx deleted file mode 100644 index 97e9248e..00000000 --- a/packages/web/src/content/docs/docs/modes.mdx +++ /dev/null @@ -1,199 +0,0 @@ ---- -title: Modes -description: Different modes for different use cases. ---- - -Modes in opencode allow you to customize the behavior, tools, and prompts for different use cases. - -It comes with two built-in modes: **build** and **plan**. You can customize -these or configure your own through the opencode config. - -:::tip -Use the plan mode to analyze code and review suggestions without making any code -changes. -::: - -You can switch between modes during a session or configure them in your config file. - ---- - -## Built-in - -opencode comes with two built-in modes. - ---- - -### Build - -Build is the **default** mode with all tools enabled. This is the standard mode for development work where you need full access to file operations and system commands. - ---- - -### Plan - -A restricted mode designed for planning and analysis. In plan mode, the following tools are disabled by default: - -- `write` - Cannot create new files -- `edit` - Cannot modify existing files -- `patch` - Cannot apply patches -- `bash` - Cannot execute shell commands - -This mode is useful when you want the AI to analyze code, suggest changes, or create plans without making any actual modifications to your codebase. - ---- - -## Switching - -You can switch between modes during a session using the _Tab_ key. Or your configured `switch_mode` keybind. - ---- - -## Configure - -You can customize the built-in modes or create your own in the opencode [config](/docs/config). - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "mode": { - "build": { - "model": "anthropic/claude-sonnet-4-20250514", - "prompt": "{file:./prompts/build.txt}", - "tools": { - "write": true, - "edit": true, - "bash": true - } - }, - "plan": { - "model": "anthropic/claude-haiku-4-20250514", - "tools": { - "write": false, - "edit": false, - "bash": false - } - } - } -} -``` - -Let's look at these options in detail. - ---- - -### Model - -Use the `model` config to override the default model for this mode. Useful for using different models optimized for different tasks. For example, a faster model for planning, a more capable model for implementation. - -```json title="opencode.json" -{ - "mode": { - "plan": { - "model": "anthropic/claude-haiku-4-20250514" - } - } -} -``` - ---- - -### Prompt - -Specify a custom system prompt file for this mode with the `prompt` config. The prompt file should contain instructions specific to the mode's purpose. - -```json title="opencode.json" -{ - "mode": { - "review": { - "prompt": "{file:./prompts/code-review.txt}" - } - } -} -``` - -This path is relative to where the config file is located. So this works for -both the global opencode config and the project specific config. - ---- - -### Tools - -Control which tools are available in this mode with the `tools` config. You can enable or disable specific tools by setting them to `true` or `false`. - -```json -{ - "mode": { - "readonly": { - "tools": { - "write": false, - "edit": false, - "bash": false, - "read": true, - "grep": true, - "glob": true - } - } - } -} -``` - -If no tools are specified, all tools are enabled by default. - ---- - -#### Available tools - -Here are all the tools can be controlled through the mode config. - -| Tool | Description | -| ----------- | ----------------------- | -| `bash` | Execute shell commands | -| `edit` | Modify existing files | -| `write` | Create new files | -| `read` | Read file contents | -| `grep` | Search file contents | -| `glob` | Find files by pattern | -| `list` | List directory contents | -| `patch` | Apply patches to files | -| `todowrite` | Manage todo lists | -| `todoread` | Read todo lists | -| `webfetch` | Fetch web content | - ---- - -## Custom modes - -You can create your own custom modes by adding them to the `mode` configuration. For example, a documentation mode that focuses on reading and analysis. - -```json title="opencode.json" {4-14} -{ - "$schema": "https://opencode.ai/config.json", - "mode": { - "docs": { - "prompt": "{file:./prompts/documentation.txt}", - "tools": { - "write": true, - "edit": true, - "bash": false, - "read": true, - "grep": true, - "glob": true - } - } - } -} -``` - ---- - -### Use cases - -Here are some common use cases for different modes. - -- **Build mode**: Full development work with all tools enabled -- **Plan mode**: Analysis and planning without making changes -- **Review mode**: Code review with read-only access plus documentation tools -- **Debug mode**: Focused on investigation with bash and read tools enabled -- **Docs mode**: Documentation writing with file operations but no system commands - -You might also find different models are good for different use cases. diff --git a/packages/web/src/content/docs/docs/rules.mdx b/packages/web/src/content/docs/docs/rules.mdx index aa5590bb..aed08535 100644 --- a/packages/web/src/content/docs/docs/rules.mdx +++ b/packages/web/src/content/docs/docs/rules.mdx @@ -31,20 +31,17 @@ You can also just create this file manually. Here's an example of some things yo This is an SST v3 monorepo with TypeScript. The project uses bun workspaces for package management. ## Project Structure - - `packages/` - Contains all workspace packages (functions, core, web, etc.) - `infra/` - Infrastructure definitions split by service (storage.ts, api.ts, web.ts) - `sst.config.ts` - Main SST configuration with dynamic imports ## Code Standards - - Use TypeScript with strict mode enabled - Shared code goes in `packages/core/` with proper exports configuration - Functions go in `packages/functions/` - Infrastructure should be split into logical files in `infra/` ## Monorepo Conventions - - Import shared modules using workspace names: `@my-app/core/example` ``` @@ -76,77 +73,3 @@ So when opencode starts, it looks for: 2. **Global file** by checking `~/.config/opencode/AGENTS.md` If you have both global and project-specific rules, opencode will combine them together. - ---- - -## Custom Instructions - -You can specify custom instruction files in your `opencode.json` or the global `~/.config/opencode/opencode.json`. This allows you and your team to reuse existing rules rather than having to duplicate them to AGENTS.md. - -Example: - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "instructions": ["CONTRIBUTING.md", "docs/guidelines.md", ".cursor/rules/*.md"] -} -``` - -All instruction files are combined with your `AGENTS.md` files. - ---- - -## Referencing External Files - -While opencode doesn't automatically parse file references in `AGENTS.md`, you can achieve similar functionality in two ways: - -### Using opencode.json - -The recommended approach is to use the `instructions` field in `opencode.json`: - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "instructions": ["docs/development-standards.md", "test/testing-guidelines.md", "packages/*/AGENTS.md"] -} -``` - -### Manual Instructions in AGENTS.md - -You can teach opencode to read external files by providing explicit instructions in your `AGENTS.md`. Here's a practical example: - -```markdown title="AGENTS.md" -# TypeScript Project Rules - -## External File Loading - -CRITICAL: When you encounter a file reference (e.g., @rules/general.md), use your Read tool to load it on a need-to-know basis. They're relevant to the SPECIFIC task at hand. - -Instructions: - -- Do NOT preemptively load all references - use lazy loading based on actual need -- When loaded, treat content as mandatory instructions that override defaults -- Follow references recursively when needed - -## Development Guidelines - -For TypeScript code style and best practices: @docs/typescript-guidelines.md -For React component architecture and hooks patterns: @docs/react-patterns.md -For REST API design and error handling: @docs/api-standards.md -For testing strategies and coverage requirements: @test/testing-guidelines.md - -## General Guidelines - -Read the following file immediately as it's relevant to all workflows: @rules/general-guidelines.md. -``` - -This approach allows you to: - -- Create modular, reusable rule files -- Share rules across projects via symlinks or git submodules -- Keep AGENTS.md concise while referencing detailed guidelines -- Ensure opencode loads files only when needed for the specific task - -:::tip -For monorepos or projects with shared standards, using `opencode.json` with glob patterns (like `packages/*/AGENTS.md`) is more maintainable than manual instructions. -::: diff --git a/packages/web/src/content/docs/docs/share.mdx b/packages/web/src/content/docs/docs/share.mdx deleted file mode 100644 index efb54c2d..00000000 --- a/packages/web/src/content/docs/docs/share.mdx +++ /dev/null @@ -1,128 +0,0 @@ ---- -title: Share -description: Share your opencode conversations. ---- - -opencode's share feature allows you to create public links to your opencode conversations, so you can collaborate with teammates or get help from others. - -:::note -Shared conversations are publicly accessible to anyone with the link. -::: - ---- - -## How it works - -When you share a conversation, opencode: - -1. Creates a unique public URL for your session -2. Syncs your conversation history to our servers -3. Makes the conversation accessible via the shareable link — `opencode.ai/s/` - ---- - -## Sharing - -opencode supports three sharing modes that control how conversations are shared: - ---- - -### Manual (default) - -By default, opencode uses manual sharing mode. Sessions are not shared automatically, but you can manually share them using the `/share` command: - -``` -/share -``` - -This will generate a unique URL that'll be copied to your clipboard. - -To explicitly set manual mode in your [config file](/docs/config): - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "share": "manual" -} -``` - ---- - -### Auto-share - -You can enable automatic sharing for all new conversations by setting the `share` option to `"auto"` in your [config file](/docs/config): - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "share": "auto" -} -``` - -With auto-share enabled, every new conversation will automatically be shared and a link will be generated. - ---- - -### Disabled - -You can disable sharing entirely by setting the `share` option to `"disabled"` in your [config file](/docs/config): - -```json title="opencode.json" -{ - "$schema": "https://opencode.ai/config.json", - "share": "disabled" -} -``` - -To enforce this across your team for a given project, add it to the `opencode.json` in your project and check into Git. - ---- - -## Un-sharing - -To stop sharing a conversation and remove it from public access: - -``` -/unshare -``` - -This will remove the share link and delete the data related to the conversation. - ---- - -## Privacy - -There are a few things to keep in mind when sharing a conversation. - ---- - -### Data retention - -Shared conversations remain accessible until you explicitly unshare them. This -includes: - -- Full conversation history -- All messages and responses -- Session metadata - ---- - -### Recommendations - -- Only share conversations that don't contain sensitive information. -- Review conversation content before sharing. -- Unshare conversations when collaboration is complete. -- Avoid sharing conversations with proprietary code or confidential data. -- For sensitive projects, disable sharing entirely. - ---- - -## For enterprises - -For enterprise deployments, the share feature can be: - -- **Disabled** entirely for security compliance -- **Restricted** to users authenticated through SSO only -- **Self-hosted** on your own infrastructure - -[Learn more](/docs/enterprise) about using opencode in your organization. diff --git a/packages/web/src/content/docs/docs/themes.mdx b/packages/web/src/content/docs/docs/themes.mdx index 3defceae..da612284 100644 --- a/packages/web/src/content/docs/docs/themes.mdx +++ b/packages/web/src/content/docs/docs/themes.mdx @@ -9,34 +9,22 @@ By default, opencode uses our own `opencode` theme. --- -## Terminal requirements - -For themes to display correctly with their full color palette, your terminal must support **truecolor** (24-bit color). Most modern terminals support this by default, but you may need to enable it: - -- **Check support**: Run `echo $COLORTERM` - it should output `truecolor` or `24bit` -- **Enable truecolor**: Set the environment variable `COLORTERM=truecolor` in your shell profile -- **Terminal compatibility**: Ensure your terminal emulator supports 24-bit color (most modern terminals like iTerm2, Alacritty, Kitty, Windows Terminal, and recent versions of GNOME Terminal do) - -Without truecolor support, themes may appear with reduced color accuracy or fall back to the nearest 256-color approximation. - ---- - ## Built-in themes opencode comes with several built-in themes. -| Name | Description | -| ------------ | ------------------------------------------ | -| `system` | Adapts to your terminal's background color | -| `tokyonight` | Based on the Tokyonight theme | -| `everforest` | Based on the Everforest theme | -| `ayu` | Based on the Ayu dark theme | -| `catppuccin` | Based on the Catppuccin theme | -| `gruvbox` | Based on the Gruvbox theme | -| `kanagawa` | Based on the Kanagawa theme | -| `nord` | Based on the Nord theme | -| `matrix` | Hacker-style green on black theme | -| `one-dark` | Based on the Atom One Dark theme | +| Name | Description | +| --- | --- | +| `system` | Adapts to your terminal's background color | +| `tokyonight` | Based on the Tokyonight theme | +| `everforest` | Based on the Everforest theme | +| `ayu` | Based on the Ayu dark theme | +| `catppuccin` | Based on the Catppuccin theme | +| `gruvbox` | Based on the Gruvbox theme | +| `kanagawa` | Based on the Kanagawa theme | +| `nord` | Based on the Nord theme | +| `matrix` | Hacker-style green on black theme | +| `one-dark` | Based on the Atom One Dark theme | And more, we are constantly adding new themes. @@ -73,7 +61,7 @@ You can select a theme by bringing up the theme select with the `/theme` command ## Custom themes -opencode supports a flexible JSON-based theme system that allows users to create and customize themes easily. +opencode supports a flexible JSON-based theme system that allows users to create and customize themes easily. --- diff --git a/packages/web/src/content/docs/docs/troubleshooting.mdx b/packages/web/src/content/docs/docs/troubleshooting.mdx deleted file mode 100644 index 9d7dd33a..00000000 --- a/packages/web/src/content/docs/docs/troubleshooting.mdx +++ /dev/null @@ -1,143 +0,0 @@ ---- -title: Troubleshooting -description: Common issues and how to resolve them. ---- - -To debug any issues with opencode, you can check the logs or the session data -that it stores locally. - ---- - -### Logs - -Log files are written to: - -- **macOS/Linux**: `~/.local/share/opencode/log/` -- **Windows**: `%APPDATA%\opencode\log\` - -Log files are named with timestamps (e.g., `2025-01-09T123456.log`) and the most recent 10 log files are kept. - -You can configure the log level in your [config file](/docs/config#logging) to get more detailed debug information. - ---- - -### Storage - -opencode stores session data and other application data on disk at: - -- **macOS/Linux**: `~/.local/share/opencode/` -- **Windows**: `%USERPROFILE%\.local\share\opencode` - -This directory contains: - -- `auth.json` - Authentication data like API keys, OAuth tokens -- `log/` - Application logs -- `project/` - Project-specific data like session and message data - - If the project is within a Git repo, it is stored in `.//storage/` - - If it is not a Git repo, it is stored in `./global/storage/` - ---- - -## Getting help - -If you're experiencing issues with opencode: - -1. **Report issues on GitHub** - - The best way to report bugs or request features is through our GitHub repository: - - [**github.com/sst/opencode/issues**](https://github.com/sst/opencode/issues) - - Before creating a new issue, search existing issues to see if your problem has already been reported. - -2. **Join our Discord** - - For real-time help and community discussion, join our Discord server: - - [**opencode.ai/discord**](https://opencode.ai/discord) - ---- - -## Common issues - -Here are some common issues and how to resolve them. - ---- - -### opencode won't start - -1. Check the logs for error messages -2. Try running with `--print-logs` to see output in the terminal -3. Ensure you have the latest version with `opencode upgrade` - ---- - -### Authentication issues - -1. Try re-authenticating with `opencode auth login ` -2. Check that your API keys are valid -3. Ensure your network allows connections to the provider's API - ---- - -### Model not available - -1. Check that you've authenticated with the provider -2. Verify the model name in your config is correct -3. Some models may require specific access or subscriptions - ---- - -### Copy/paste not working on Linux - -Linux users need to have one of the following clipboard utilities installed for copy/paste functionality to work: - -**For X11 systems:** - -```bash -apt install -y xclip -# or -apt install -y xsel -``` - -**For Wayland systems:** - -```bash -apt install -y wl-clipboard -``` - -**For headless environments:** - -```bash -apt install -y xvfb -# and run: -Xvfb :99 -screen 0 1024x768x24 > /dev/null 2>&1 & -export DISPLAY=:99.0 -``` - -opencode will detect if you're using Wayland and prefer `wl-clipboard`, otherwise it will try to find clipboard tools in order of: `xclip` and `xsel`. - ---- - -### How to select and copy text in the TUI - -There are several ways to copy text from opencode's TUI: - -- **Copy latest message**: Use `y` to copy the most recent message in your current session to the clipboard -- **Export session**: Use `/export` (or `x`) to open the current session as plain text in your `$EDITOR` (requires the `EDITOR` environment variable to be set) - -We're working on adding click & drag text selection in a future update. - ---- - -### TUI not rendering full width - -By default, opencode's TUI uses an "auto" layout that centers content with padding. If you want the TUI to use the full width of your terminal, you can configure the layout setting: - -```json title="opencode.json" -{ - "layout": "stretch" -} -``` - -Read more about this in the [config docs](/docs/config#layout). diff --git a/packages/web/src/pages/s/[id].astro b/packages/web/src/pages/s/[id].astro index fadf0eb0..a81d2e78 100644 --- a/packages/web/src/pages/s/[id].astro +++ b/packages/web/src/pages/s/[id].astro @@ -23,8 +23,9 @@ const models: Set = new Set(); const version = data.info.version ? `v${data.info.version}` : "v0.0.1"; Object.values(data.messages).forEach((d) => { - if (d.role === "assistant" && d.modelID) { - models.add(d.modelID); + const assistant = d.metadata?.assistant; + if (assistant) { + models.add(assistant.modelID); } }); @@ -38,19 +39,8 @@ const encodedTitle = encodeURIComponent( ) ); -const modelsArray = Array.from(models); -let modelParam; -if (modelsArray.length === 1) { - modelParam = modelsArray[0]; -} -else if (modelsArray.length === 2) { - modelParam = encodeURIComponent(`${modelsArray[0]} & ${modelsArray[1]}`); -} -else { - modelParam = encodeURIComponent(`${modelsArray[0]} & ${modelsArray.length - 1} others`); -} +const ogImage = `${config.socialCard}/opencode-share/${encodedTitle}.png?model=${Array.from(models).join(",")}&version=${version}&id=${id}`; -const ogImage = `${config.socialCard}/opencode-share/${encodedTitle}.png?model=${modelParam}&version=${version}&id=${id}`; --- + extensions: Record; /** All languages keyed by file-extension */ - languages: Record + languages: Record; } /** @@ -14,14 +14,14 @@ declare module "lang-map" { * const { extensions, languages } = map(); * ``` */ - function map(): MapReturn + function map(): MapReturn; /** Static method: get extensions for a given language */ namespace map { - function extensions(language: string): string[] + function extensions(language: string): string[]; /** Static method: get languages for a given extension */ - function languages(extension: string): string[] + function languages(extension: string): string[]; } - export = map + export = map; } diff --git a/patches/ai@4.3.16.patch b/patches/ai@4.3.16.patch new file mode 100644 index 00000000..7d6df589 --- /dev/null +++ b/patches/ai@4.3.16.patch @@ -0,0 +1,13 @@ +diff --git a/dist/index.mjs b/dist/index.mjs +index 92a80377692488c4ba8801ce33e7736ad7055e43..add6281bbecaa1c03d3b48eb99aead4a7a7336b2 100644 +--- a/dist/index.mjs ++++ b/dist/index.mjs +@@ -1593,7 +1593,7 @@ function prepareCallSettings({ + return { + maxTokens, + // TODO v5 remove default 0 for temperature +- temperature: temperature != null ? temperature : 0, ++ temperature: temperature, + topP, + topK, + presencePenalty, diff --git a/scripts/publish-github-action.ts b/scripts/publish-github-action.ts deleted file mode 100755 index 9d0cbb7e..00000000 --- a/scripts/publish-github-action.ts +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bun - -import { $ } from "bun" - -try { - await $`git tag -d github-v1` - await $`git push origin :refs/tags/github-v1` -} catch (e: any) { - if (e instanceof $.ShellError && e.stderr.toString().match(/tag \S+ not found/)) { - console.log("tag not found, continuing...") - } else { - throw e - } -} -await $`git tag -a github-v1 -m "Update github-v1 to latest"` -await $`git push origin github-v1` diff --git a/scripts/stainless b/scripts/stainless deleted file mode 100755 index 4b417a00..00000000 --- a/scripts/stainless +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -set -e - -echo "Starting opencode server on port 4096..." -bun run ./packages/opencode/src/index.ts serve --port 4096 & -SERVER_PID=$! - -echo "Waiting for server to start..." -sleep 3 - -echo "Fetching OpenAPI spec from http://127.0.0.1:4096/doc..." -curl -s http://127.0.0.1:4096/doc > openapi.json - -echo "Stopping server..." -kill $SERVER_PID - -echo "Running stl builds create..." -stl builds create --branch dev --pull --allow-empty --targets go - -echo "Cleaning up..." -rm -rf packages/tui/sdk -mv opencode-go/ packages/tui/sdk/ -rm -rf packages/tui/sdk/.git - -echo "Kicking off production build..." -stl builds create --branch main --wait false - -echo "Done!" diff --git a/scripts/stats.ts b/scripts/stats.ts index bce21185..b30e57d9 100755 --- a/scripts/stats.ts +++ b/scripts/stats.ts @@ -26,9 +26,13 @@ async function fetchNpmDownloads(packageName: string): Promise { // Use a range from 2020 to current year + 5 years to ensure it works forever const currentYear = new Date().getFullYear() const endYear = currentYear + 5 - const response = await fetch(`https://api.npmjs.org/downloads/range/2020-01-01:${endYear}-12-31/${packageName}`) + const response = await fetch( + `https://api.npmjs.org/downloads/range/2020-01-01:${endYear}-12-31/${packageName}`, + ) if (!response.ok) { - console.warn(`Failed to fetch npm downloads for ${packageName}: ${response.status}`) + console.warn( + `Failed to fetch npm downloads for ${packageName}: ${response.status}`, + ) return 0 } const data: NpmDownloadsRange = await response.json() @@ -49,7 +53,9 @@ async function fetchReleases(): Promise { const response = await fetch(url) if (!response.ok) { - throw new Error(`GitHub API error: ${response.status} ${response.statusText}`) + throw new Error( + `GitHub API error: ${response.status} ${response.statusText}`, + ) } const batch: Release[] = await response.json() @@ -60,7 +66,6 @@ async function fetchReleases(): Promise { if (batch.length < per) break page++ - await new Promise((resolve) => setTimeout(resolve, 1000)) } return releases @@ -110,7 +115,11 @@ async function save(githubTotal: number, npmDownloads: number) { for (let i = lines.length - 1; i >= 0; i--) { const line = lines[i].trim() - if (line.startsWith("|") && !line.includes("Date") && !line.includes("---")) { + if ( + line.startsWith("|") && + !line.includes("Date") && + !line.includes("---") + ) { const match = line.match( /\|\s*[\d-]+\s*\|\s*([\d,]+)\s*(?:\([^)]*\))?\s*\|\s*([\d,]+)\s*(?:\([^)]*\))?\s*\|\s*([\d,]+)\s*(?:\([^)]*\))?\s*\|/, ) @@ -138,7 +147,11 @@ async function save(githubTotal: number, npmDownloads: number) { ? ` (${githubChange.toLocaleString()})` : " (+0)" const npmChangeStr = - npmChange > 0 ? ` (+${npmChange.toLocaleString()})` : npmChange < 0 ? ` (${npmChange.toLocaleString()})` : " (+0)" + npmChange > 0 + ? ` (+${npmChange.toLocaleString()})` + : npmChange < 0 + ? ` (${npmChange.toLocaleString()})` + : " (+0)" const totalChangeStr = totalChange > 0 ? ` (+${totalChange.toLocaleString()})` @@ -169,7 +182,9 @@ const { total: githubTotal, stats } = calculate(releases) console.log("Fetching npm all-time downloads for opencode-ai...\n") const npmDownloads = await fetchNpmDownloads("opencode-ai") -console.log(`Fetched npm all-time downloads: ${npmDownloads.toLocaleString()}\n`) +console.log( + `Fetched npm all-time downloads: ${npmDownloads.toLocaleString()}\n`, +) await save(githubTotal, npmDownloads) @@ -187,18 +202,24 @@ console.log("-".repeat(60)) stats .sort((a, b) => b.downloads - a.downloads) .forEach((release) => { - console.log(`${release.tag.padEnd(15)} ${release.downloads.toLocaleString().padStart(10)} downloads`) + console.log( + `${release.tag.padEnd(15)} ${release.downloads.toLocaleString().padStart(10)} downloads`, + ) if (release.assets.length > 1) { release.assets .sort((a, b) => b.downloads - a.downloads) .forEach((asset) => { - console.log(` └─ ${asset.name.padEnd(25)} ${asset.downloads.toLocaleString().padStart(8)}`) + console.log( + ` └─ ${asset.name.padEnd(25)} ${asset.downloads.toLocaleString().padStart(8)}`, + ) }) } }) console.log("-".repeat(60)) -console.log(`GitHub Total: ${githubTotal.toLocaleString()} downloads across ${releases.length} releases`) +console.log( + `GitHub Total: ${githubTotal.toLocaleString()} downloads across ${releases.length} releases`, +) console.log(`npm Total: ${npmDownloads.toLocaleString()} downloads`) console.log(`Combined Total: ${totalDownloads.toLocaleString()} downloads`) diff --git a/sdks/github/action.yml b/sdks/github/action.yml deleted file mode 100644 index 8501ce09..00000000 --- a/sdks/github/action.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: "opencode GitHub Action" -description: "Run opencode in GitHub Actions workflows" -branding: - icon: "code" - color: "orange" - -inputs: - model: - description: "Model to use" - required: false - - share: - description: "Share the opencode session (defaults to true for public repos)" - required: false - -outputs: - share_url: - description: "URL to share the opencode execution" - value: ${{ steps.run_opencode.outputs.share_url }} - -runs: - using: "composite" - steps: - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 22 - - - name: Install Bun - uses: oven-sh/setup-bun@v2 - with: - bun-version: 1.2.16 - - - name: Install Dependencies - shell: bash - run: | - cd ${GITHUB_ACTION_PATH} - bun install - - - name: Install opencode - shell: bash - run: curl -fsSL https://opencode.ai/install | bash - - - name: Run opencode - shell: bash - id: run_opencode - run: | - bun run ${GITHUB_ACTION_PATH}/src/index.ts - env: - INPUT_MODEL: ${{ inputs.model }} - INPUT_SHARE: ${{ inputs.share }} - - #- name: Testing - # shell: bash - # run: | - # gh pr comment ${{ github.event.number }} --body "This is an automated comment" - # env: - # GH_TOKEN: ${{ github.token }} diff --git a/sdks/github/bun.lock b/sdks/github/bun.lock deleted file mode 100644 index 25587751..00000000 --- a/sdks/github/bun.lock +++ /dev/null @@ -1,157 +0,0 @@ -{ - "lockfileVersion": 1, - "workspaces": { - "": { - "name": "github", - "dependencies": { - "@actions/core": "^1.11.1", - "@actions/github": "^6.0.1", - "@octokit/graphql": "^9.0.1", - "@octokit/rest": "^22.0.0", - }, - "devDependencies": { - "@octokit/webhooks-types": "^7.6.1", - "@types/bun": "latest", - "@types/node": "^24.0.10", - }, - "peerDependencies": { - "typescript": "^5", - }, - }, - }, - "packages": { - "@actions/core": ["@actions/core@1.11.1", "", { "dependencies": { "@actions/exec": "^1.1.1", "@actions/http-client": "^2.0.1" } }, "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A=="], - - "@actions/exec": ["@actions/exec@1.1.1", "", { "dependencies": { "@actions/io": "^1.0.1" } }, "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w=="], - - "@actions/github": ["@actions/github@6.0.1", "", { "dependencies": { "@actions/http-client": "^2.2.0", "@octokit/core": "^5.0.1", "@octokit/plugin-paginate-rest": "^9.2.2", "@octokit/plugin-rest-endpoint-methods": "^10.4.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "undici": "^5.28.5" } }, "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw=="], - - "@actions/http-client": ["@actions/http-client@2.2.3", "", { "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" } }, "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA=="], - - "@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="], - - "@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="], - - "@octokit/auth-token": ["@octokit/auth-token@4.0.0", "", {}, "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA=="], - - "@octokit/core": ["@octokit/core@5.2.2", "", { "dependencies": { "@octokit/auth-token": "^4.0.0", "@octokit/graphql": "^7.1.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" } }, "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg=="], - - "@octokit/endpoint": ["@octokit/endpoint@9.0.6", "", { "dependencies": { "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw=="], - - "@octokit/graphql": ["@octokit/graphql@9.0.1", "", { "dependencies": { "@octokit/request": "^10.0.2", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-j1nQNU1ZxNFx2ZtKmL4sMrs4egy5h65OMDmSbVyuCzjOcwsHq6EaYjOTGXPQxgfiN8dJ4CriYHk6zF050WEULg=="], - - "@octokit/openapi-types": ["@octokit/openapi-types@25.1.0", "", {}, "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA=="], - - "@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@9.2.2", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ=="], - - "@octokit/plugin-request-log": ["@octokit/plugin-request-log@6.0.0", "", { "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q=="], - - "@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@10.4.1", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg=="], - - "@octokit/request": ["@octokit/request@8.4.1", "", { "dependencies": { "@octokit/endpoint": "^9.0.6", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw=="], - - "@octokit/request-error": ["@octokit/request-error@5.1.1", "", { "dependencies": { "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" } }, "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g=="], - - "@octokit/rest": ["@octokit/rest@22.0.0", "", { "dependencies": { "@octokit/core": "^7.0.2", "@octokit/plugin-paginate-rest": "^13.0.1", "@octokit/plugin-request-log": "^6.0.0", "@octokit/plugin-rest-endpoint-methods": "^16.0.0" } }, "sha512-z6tmTu9BTnw51jYGulxrlernpsQYXpui1RK21vmXn8yF5bp6iX16yfTtJYGK5Mh1qDkvDOmp2n8sRMcQmR8jiA=="], - - "@octokit/types": ["@octokit/types@14.1.0", "", { "dependencies": { "@octokit/openapi-types": "^25.1.0" } }, "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g=="], - - "@octokit/webhooks-types": ["@octokit/webhooks-types@7.6.1", "", {}, "sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw=="], - - "@types/bun": ["@types/bun@1.2.18", "", { "dependencies": { "bun-types": "1.2.18" } }, "sha512-Xf6RaWVheyemaThV0kUfaAUvCNokFr+bH8Jxp+tTZfx7dAPA8z9ePnP9S9+Vspzuxxx9JRAXhnyccRj3GyCMdQ=="], - - "@types/node": ["@types/node@24.0.13", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-Qm9OYVOFHFYg3wJoTSrz80hoec5Lia/dPp84do3X7dZvLikQvM1YpmvTBEdIr/e+U8HTkFjLHLnl78K/qjf+jQ=="], - - "@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="], - - "before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="], - - "bun-types": ["bun-types@1.2.18", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-04+Eha5NP7Z0A9YgDAzMk5PHR16ZuLVa83b26kH5+cp1qZW4F6FmAURngE7INf4tKOvCE69vYvDEwoNl1tGiWw=="], - - "csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="], - - "deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="], - - "fast-content-type-parse": ["fast-content-type-parse@3.0.0", "", {}, "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg=="], - - "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], - - "tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="], - - "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="], - - "undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="], - - "undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], - - "universal-user-agent": ["universal-user-agent@7.0.3", "", {}, "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A=="], - - "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="], - - "@octokit/core/@octokit/graphql": ["@octokit/graphql@7.1.1", "", { "dependencies": { "@octokit/request": "^8.4.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g=="], - - "@octokit/core/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="], - - "@octokit/core/universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="], - - "@octokit/endpoint/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="], - - "@octokit/endpoint/universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="], - - "@octokit/graphql/@octokit/request": ["@octokit/request@10.0.3", "", { "dependencies": { "@octokit/endpoint": "^11.0.0", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA=="], - - "@octokit/plugin-paginate-rest/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="], - - "@octokit/plugin-request-log/@octokit/core": ["@octokit/core@7.0.3", "", { "dependencies": { "@octokit/auth-token": "^6.0.0", "@octokit/graphql": "^9.0.1", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "before-after-hook": "^4.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-oNXsh2ywth5aowwIa7RKtawnkdH6LgU1ztfP9AIUCQCvzysB+WeU8o2kyyosDPwBZutPpjZDKPQGIzzrfTWweQ=="], - - "@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="], - - "@octokit/request/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="], - - "@octokit/request/universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="], - - "@octokit/request-error/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="], - - "@octokit/rest/@octokit/core": ["@octokit/core@7.0.3", "", { "dependencies": { "@octokit/auth-token": "^6.0.0", "@octokit/graphql": "^9.0.1", "@octokit/request": "^10.0.2", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "before-after-hook": "^4.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-oNXsh2ywth5aowwIa7RKtawnkdH6LgU1ztfP9AIUCQCvzysB+WeU8o2kyyosDPwBZutPpjZDKPQGIzzrfTWweQ=="], - - "@octokit/rest/@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@13.1.1", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-q9iQGlZlxAVNRN2jDNskJW/Cafy7/XE52wjZ5TTvyhyOD904Cvx//DNyoO3J/MXJ0ve3rPoNWKEg5iZrisQSuw=="], - - "@octokit/rest/@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@16.0.0", "", { "dependencies": { "@octokit/types": "^14.1.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-kJVUQk6/dx/gRNLWUnAWKFs1kVPn5O5CYZyssyEoNYaFedqZxsfYs7DwI3d67hGz4qOwaJ1dpm07hOAD1BXx6g=="], - - "@octokit/core/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], - - "@octokit/endpoint/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], - - "@octokit/graphql/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@11.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ=="], - - "@octokit/graphql/@octokit/request/@octokit/request-error": ["@octokit/request-error@7.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg=="], - - "@octokit/plugin-paginate-rest/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="], - - "@octokit/plugin-request-log/@octokit/core/@octokit/auth-token": ["@octokit/auth-token@6.0.0", "", {}, "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w=="], - - "@octokit/plugin-request-log/@octokit/core/@octokit/request": ["@octokit/request@10.0.3", "", { "dependencies": { "@octokit/endpoint": "^11.0.0", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA=="], - - "@octokit/plugin-request-log/@octokit/core/@octokit/request-error": ["@octokit/request-error@7.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg=="], - - "@octokit/plugin-request-log/@octokit/core/before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="], - - "@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="], - - "@octokit/request-error/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], - - "@octokit/request/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], - - "@octokit/rest/@octokit/core/@octokit/auth-token": ["@octokit/auth-token@6.0.0", "", {}, "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w=="], - - "@octokit/rest/@octokit/core/@octokit/request": ["@octokit/request@10.0.3", "", { "dependencies": { "@octokit/endpoint": "^11.0.0", "@octokit/request-error": "^7.0.0", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA=="], - - "@octokit/rest/@octokit/core/@octokit/request-error": ["@octokit/request-error@7.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg=="], - - "@octokit/rest/@octokit/core/before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="], - - "@octokit/plugin-request-log/@octokit/core/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@11.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ=="], - - "@octokit/rest/@octokit/core/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@11.0.0", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ=="], - } -} diff --git a/sdks/github/package.json b/sdks/github/package.json deleted file mode 100644 index e1b9222e..00000000 --- a/sdks/github/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "github", - "type": "module", - "private": true, - "devDependencies": { - "@octokit/webhooks-types": "^7.6.1", - "@types/bun": "latest", - "@types/node": "^24.0.10" - }, - "peerDependencies": { - "typescript": "^5" - }, - "dependencies": { - "@actions/core": "^1.11.1", - "@actions/github": "^6.0.1", - "@octokit/graphql": "^9.0.1", - "@octokit/rest": "^22.0.0" - } -} diff --git a/sdks/github/src/index.ts b/sdks/github/src/index.ts deleted file mode 100644 index fd6e08aa..00000000 --- a/sdks/github/src/index.ts +++ /dev/null @@ -1,541 +0,0 @@ -#!/usr/bin/env bun - -import os from "os" -import path from "path" -import { $ } from "bun" -import { Octokit } from "@octokit/rest" -import { graphql } from "@octokit/graphql" -import * as core from "@actions/core" -import * as github from "@actions/github" -import type { IssueCommentEvent } from "@octokit/webhooks-types" -import type { GitHubIssue, GitHubPullRequest, IssueQueryResponse, PullRequestQueryResponse } from "./types" - -if (github.context.eventName !== "issue_comment") { - core.setFailed(`Unsupported event type: ${github.context.eventName}`) - process.exit(1) -} - -const { owner, repo } = github.context.repo -const payload = github.context.payload as IssueCommentEvent -const actor = github.context.actor -const issueId = payload.issue.number -const body = payload.comment.body - -let appToken: string -let octoRest: Octokit -let octoGraph: typeof graphql -let commentId: number -let gitCredentials: string -let shareUrl: string | undefined -let state: - | { - type: "issue" - issue: GitHubIssue - } - | { - type: "local-pr" - pr: GitHubPullRequest - } - | { - type: "fork-pr" - pr: GitHubPullRequest - } - -async function run() { - try { - const match = body.match(/^hey\s*opencode,?\s*(.*)$/) - if (!match?.[1]) throw new Error("Command must start with `hey opencode`") - const userPrompt = match[1] - - const oidcToken = await generateGitHubToken() - appToken = await exchangeForAppToken(oidcToken) - octoRest = new Octokit({ auth: appToken }) - octoGraph = graphql.defaults({ - headers: { authorization: `token ${appToken}` }, - }) - - await configureGit(appToken) - await assertPermissions() - - const comment = await createComment("opencode started...") - commentId = comment.data.id - - // Set state - const repoData = await fetchRepo() - if (payload.issue.pull_request) { - const prData = await fetchPR() - state = { - type: prData.headRepository.nameWithOwner === prData.baseRepository.nameWithOwner ? "local-pr" : "fork-pr", - pr: prData, - } - } else { - state = { - type: "issue", - issue: await fetchIssue(), - } - } - - // Setup git branch - if (state.type === "local-pr") await checkoutLocalBranch(state.pr) - else if (state.type === "fork-pr") await checkoutForkBranch(state.pr) - - // Prompt - const share = process.env.INPUT_SHARE === "true" || !repoData.data.private - const promptData = state.type === "issue" ? buildPromptDataForIssue(state.issue) : buildPromptDataForPR(state.pr) - const responseRet = await runOpencode(`${userPrompt}\n\n${promptData}`, { - share, - }) - - const response = responseRet.stdout - shareUrl = responseRet.stderr.match(/https:\/\/opencode\.ai\/s\/\w+/)?.[0] - - // Comment and push changes - if (await branchIsDirty()) { - const summary = - (await runOpencode(`Summarize the following in less than 40 characters:\n\n${response}`, { share: false })) - ?.stdout || `Fix issue: ${payload.issue.title}` - - if (state.type === "issue") { - const branch = await pushToNewBranch(summary) - const pr = await createPR(repoData.data.default_branch, branch, summary, `${response}\n\nCloses #${issueId}`) - await updateComment(`opencode created pull request #${pr}`) - } else if (state.type === "local-pr") { - await pushToCurrentBranch(summary) - await updateComment(response) - } else if (state.type === "fork-pr") { - await pushToForkBranch(summary, state.pr) - await updateComment(response) - } - } else { - await updateComment(response) - } - await restoreGitConfig() - await revokeAppToken() - } catch (e: any) { - await restoreGitConfig() - await revokeAppToken() - console.error(e) - let msg = e - if (e instanceof $.ShellError) { - msg = e.stderr.toString() - } else if (e instanceof Error) { - msg = e.message - } - if (commentId) await updateComment(msg) - core.setFailed(`opencode failed with error: ${msg}`) - // Also output the clean error message for the action to capture - //core.setOutput("prepare_error", e.message); - process.exit(1) - } -} - -if (import.meta.main) { - run() -} - -async function generateGitHubToken() { - try { - return await core.getIDToken("opencode-github-action") - } catch (error) { - console.error("Failed to get OIDC token:", error) - throw new Error("Could not fetch an OIDC token. Make sure to add `id-token: write` to your workflow permissions.") - } -} - -async function exchangeForAppToken(oidcToken: string) { - const response = await fetch("https://api.opencode.ai/exchange_github_app_token", { - method: "POST", - headers: { - Authorization: `Bearer ${oidcToken}`, - }, - }) - - if (!response.ok) { - const responseJson = (await response.json()) as { error?: string } - throw new Error(`App token exchange failed: ${response.status} ${response.statusText} - ${responseJson.error}`) - } - - const responseJson = (await response.json()) as { token: string } - return responseJson.token -} - -async function configureGit(appToken: string) { - console.log("Configuring git...") - const config = "http.https://github.com/.extraheader" - const ret = await $`git config --local --get ${config}` - gitCredentials = ret.stdout.toString().trim() - - const newCredentials = Buffer.from(`x-access-token:${appToken}`, "utf8").toString("base64") - - await $`git config --local --unset-all ${config}` - await $`git config --local ${config} "AUTHORIZATION: basic ${newCredentials}"` - await $`git config --global user.name "opencode-agent[bot]"` - await $`git config --global user.email "opencode-agent[bot]@users.noreply.github.com"` -} - -async function checkoutLocalBranch(pr: GitHubPullRequest) { - console.log("Checking out local branch...") - - const branch = pr.headRefName - const depth = Math.max(pr.commits.totalCount, 20) - - await $`git fetch origin --depth=${depth} ${branch}` - await $`git checkout ${branch}` -} - -async function checkoutForkBranch(pr: GitHubPullRequest) { - console.log("Checking out fork branch...") - - const remoteBranch = pr.headRefName - const localBranch = generateBranchName() - const depth = Math.max(pr.commits.totalCount, 20) - - await $`git remote add fork https://github.com/${pr.headRepository.nameWithOwner}.git` - await $`git fetch fork --depth=${depth} ${remoteBranch}` - await $`git checkout -b ${localBranch} fork/${remoteBranch}` -} - -async function restoreGitConfig() { - if (!gitCredentials) return - const config = "http.https://github.com/.extraheader" - await $`git config --local ${config} "${gitCredentials}"` -} - -async function assertPermissions() { - console.log(`Asserting permissions for user ${actor}...`) - - let permission - try { - const response = await octoRest.repos.getCollaboratorPermissionLevel({ - owner, - repo, - username: actor, - }) - - permission = response.data.permission - console.log(` permission: ${permission}`) - } catch (error) { - console.error(`Failed to check permissions: ${error}`) - throw new Error(`Failed to check permissions for user ${actor}: ${error}`) - } - - if (!["admin", "write"].includes(permission)) throw new Error(`User ${actor} does not have write permissions`) -} - -function buildComment(content: string) { - const runId = process.env.GITHUB_RUN_ID! - const runUrl = `/${owner}/${repo}/actions/runs/${runId}` - return [content, "\n\n", shareUrl ? `[view session](${shareUrl}) | ` : "", `[view log](${runUrl})`].join("") -} - -async function createComment(body: string) { - console.log("Creating comment...") - return await octoRest.rest.issues.createComment({ - owner, - repo, - issue_number: issueId, - body: buildComment(body), - }) -} - -async function updateComment(body: string) { - console.log("Updating comment...") - return await octoRest.rest.issues.updateComment({ - owner, - repo, - comment_id: commentId, - body: buildComment(body), - }) -} - -function generateBranchName() { - const type = state.type === "issue" ? "issue" : "pr" - const timestamp = new Date() - .toISOString() - .replace(/[:-]/g, "") - .replace(/\.\d{3}Z/, "") - .split("T") - .join("_") - return `opencode/${type}${issueId}-${timestamp}` -} - -async function pushToCurrentBranch(summary: string) { - console.log("Pushing to current branch...") - await $`git add .` - await $`git commit -m "${summary} - -Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"` - await $`git push` -} - -async function pushToForkBranch(summary: string, pr: GitHubPullRequest) { - console.log("Pushing to fork branch...") - - const remoteBranch = pr.headRefName - - await $`git add .` - await $`git commit -m "${summary} - -Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"` - await $`git push fork HEAD:${remoteBranch}` -} - -async function pushToNewBranch(summary: string) { - console.log("Pushing to new branch...") - const branch = generateBranchName() - await $`git checkout -b ${branch}` - await $`git add .` - await $`git commit -m "${summary} - -Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"` - await $`git push -u origin ${branch}` - return branch -} - -async function createPR(base: string, branch: string, title: string, body: string) { - console.log("Creating pull request...") - const pr = await octoRest.rest.pulls.create({ - owner, - repo, - head: branch, - base, - title, - body: buildComment(body), - }) - return pr.data.number -} - -async function runOpencode( - prompt: string, - opts?: { - share?: boolean - }, -) { - console.log("Running opencode...") - - const promptPath = path.join(os.tmpdir(), "PROMPT") - await Bun.write(promptPath, prompt) - const ret = await $`cat ${promptPath} | opencode run -m ${process.env.INPUT_MODEL} ${opts?.share ? "--share" : ""}` - return { - stdout: ret.stdout.toString().trim(), - stderr: ret.stderr.toString().trim(), - } -} - -async function branchIsDirty() { - console.log("Checking if branch is dirty...") - const ret = await $`git status --porcelain` - return ret.stdout.toString().trim().length > 0 -} - -async function fetchRepo() { - return await octoRest.rest.repos.get({ owner, repo }) -} - -async function fetchIssue() { - console.log("Fetching prompt data for issue...") - const issueResult = await octoGraph( - ` -query($owner: String!, $repo: String!, $number: Int!) { - repository(owner: $owner, name: $repo) { - issue(number: $number) { - title - body - author { - login - } - createdAt - state - comments(first: 100) { - nodes { - id - databaseId - body - author { - login - } - createdAt - } - } - } - } -}`, - { - owner, - repo, - number: issueId, - }, - ) - - const issue = issueResult.repository.issue - if (!issue) throw new Error(`Issue #${issueId} not found`) - - return issue -} - -function buildPromptDataForIssue(issue: GitHubIssue) { - const comments = (issue.comments?.nodes || []) - .filter((c) => { - const id = parseInt(c.databaseId) - return id !== commentId && id !== payload.comment.id - }) - .map((c) => ` - ${c.author.login} at ${c.createdAt}: ${c.body}`) - - return [ - "Here is the context for the issue:", - `- Title: ${issue.title}`, - `- Body: ${issue.body}`, - `- Author: ${issue.author.login}`, - `- Created At: ${issue.createdAt}`, - `- State: ${issue.state}`, - ...(comments.length > 0 ? ["- Comments:", ...comments] : []), - ].join("\n") -} - -async function fetchPR() { - console.log("Fetching prompt data for PR...") - const prResult = await octoGraph( - ` -query($owner: String!, $repo: String!, $number: Int!) { - repository(owner: $owner, name: $repo) { - pullRequest(number: $number) { - title - body - author { - login - } - baseRefName - headRefName - headRefOid - createdAt - additions - deletions - state - baseRepository { - nameWithOwner - } - headRepository { - nameWithOwner - } - commits(first: 100) { - totalCount - nodes { - commit { - oid - message - author { - name - email - } - } - } - } - files(first: 100) { - nodes { - path - additions - deletions - changeType - } - } - comments(first: 100) { - nodes { - id - databaseId - body - author { - login - } - createdAt - } - } - reviews(first: 100) { - nodes { - id - databaseId - author { - login - } - body - state - submittedAt - comments(first: 100) { - nodes { - id - databaseId - body - path - line - author { - login - } - createdAt - } - } - } - } - } - } -}`, - { - owner, - repo, - number: issueId, - }, - ) - - const pr = prResult.repository.pullRequest - if (!pr) throw new Error(`PR #${issueId} not found`) - - return pr -} - -function buildPromptDataForPR(pr: GitHubPullRequest) { - const comments = (pr.comments?.nodes || []) - .filter((c) => { - const id = parseInt(c.databaseId) - return id !== commentId && id !== payload.comment.id - }) - .map((c) => ` - ${c.author.login} at ${c.createdAt}: ${c.body}`) - - const files = (pr.files.nodes || []).map((f) => ` - ${f.path} (${f.changeType}) +${f.additions}/-${f.deletions}`) - const reviewData = (pr.reviews.nodes || []).map((r) => { - const comments = (r.comments.nodes || []).map((c) => ` - ${c.path}:${c.line ?? "?"}: ${c.body}`) - return [ - ` - ${r.author.login} at ${r.submittedAt}:`, - ` - Review body: ${r.body}`, - ...(comments.length > 0 ? [" - Comments:", ...comments] : []), - ] - }) - - return [ - "Here is the context for the pull request:", - `- Title: ${pr.title}`, - `- Body: ${pr.body}`, - `- Author: ${pr.author.login}`, - `- Created At: ${pr.createdAt}`, - `- Base Branch: ${pr.baseRefName}`, - `- Head Branch: ${pr.headRefName}`, - `- State: ${pr.state}`, - `- Additions: ${pr.additions}`, - `- Deletions: ${pr.deletions}`, - `- Total Commits: ${pr.commits.totalCount}`, - `- Changed Files: ${pr.files.nodes.length} files`, - ...(comments.length > 0 ? ["- Comments:", ...comments] : []), - ...(files.length > 0 ? ["- Changed files:", ...files] : []), - ...(reviewData.length > 0 ? ["- Reviews:", ...reviewData] : []), - ].join("\n") -} - -async function revokeAppToken() { - if (!appToken) return - - await fetch("https://api.github.com/installation/token", { - method: "DELETE", - headers: { - Authorization: `Bearer ${appToken}`, - Accept: "application/vnd.github+json", - "X-GitHub-Api-Version": "2022-11-28", - }, - }) -} diff --git a/sdks/github/src/types.ts b/sdks/github/src/types.ts deleted file mode 100644 index fe0058fb..00000000 --- a/sdks/github/src/types.ts +++ /dev/null @@ -1,103 +0,0 @@ -// Types for GitHub GraphQL query responses -export type GitHubAuthor = { - login: string; - name?: string; -}; - -export type GitHubComment = { - id: string; - databaseId: string; - body: string; - author: GitHubAuthor; - createdAt: string; -}; - -export type GitHubReviewComment = GitHubComment & { - path: string; - line: number | null; -}; - -export type GitHubCommit = { - oid: string; - message: string; - author: { - name: string; - email: string; - }; -}; - -export type GitHubFile = { - path: string; - additions: number; - deletions: number; - changeType: string; -}; - -export type GitHubReview = { - id: string; - databaseId: string; - author: GitHubAuthor; - body: string; - state: string; - submittedAt: string; - comments: { - nodes: GitHubReviewComment[]; - }; -}; - -export type GitHubPullRequest = { - title: string; - body: string; - author: GitHubAuthor; - baseRefName: string; - headRefName: string; - headRefOid: string; - createdAt: string; - additions: number; - deletions: number; - state: string; - baseRepository: { - nameWithOwner: string; - }; - headRepository: { - nameWithOwner: string; - }; - commits: { - totalCount: number; - nodes: Array<{ - commit: GitHubCommit; - }>; - }; - files: { - nodes: GitHubFile[]; - }; - comments: { - nodes: GitHubComment[]; - }; - reviews: { - nodes: GitHubReview[]; - }; -}; - -export type GitHubIssue = { - title: string; - body: string; - author: GitHubAuthor; - createdAt: string; - state: string; - comments: { - nodes: GitHubComment[]; - }; -}; - -export type PullRequestQueryResponse = { - repository: { - pullRequest: GitHubPullRequest; - }; -}; - -export type IssueQueryResponse = { - repository: { - issue: GitHubIssue; - }; -}; diff --git a/sdks/github/sst-env.d.ts b/sdks/github/sst-env.d.ts deleted file mode 100644 index b6a7e906..00000000 --- a/sdks/github/sst-env.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -/* This file is auto-generated by SST. Do not edit. */ -/* tslint:disable */ -/* eslint-disable */ -/* deno-fmt-ignore-file */ - -/// - -import "sst" -export {} \ No newline at end of file diff --git a/sdks/github/tsconfig.json b/sdks/github/tsconfig.json deleted file mode 100644 index 59435b49..00000000 --- a/sdks/github/tsconfig.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "compilerOptions": { - // Environment setup & latest features - "lib": ["ESNext"], - "target": "ESNext", - "module": "ESNext", - "moduleDetection": "force", - "jsx": "react-jsx", - "allowJs": true, - - // Bundler mode - "moduleResolution": "bundler", - "allowImportingTsExtensions": true, - "verbatimModuleSyntax": true, - "noEmit": true, - - // Best practices - "strict": true, - "skipLibCheck": true, - "noFallthroughCasesInSwitch": true, - "noUncheckedIndexedAccess": true, - "noImplicitOverride": true, - - // Some stricter flags (disabled by default) - "noUnusedLocals": false, - "noUnusedParameters": false, - "noPropertyAccessFromIndexSignature": false - } -} diff --git a/sst-env.d.ts b/sst-env.d.ts index 2c3e3d5a..627d74a5 100644 --- a/sst-env.d.ts +++ b/sst-env.d.ts @@ -12,14 +12,6 @@ declare module "sst" { "Bucket": { "type": "sst.cloudflare.Bucket" } - "GITHUB_APP_ID": { - "type": "sst.sst.Secret" - "value": string - } - "GITHUB_APP_PRIVATE_KEY": { - "type": "sst.sst.Secret" - "value": string - } "Web": { "type": "sst.cloudflare.Astro" "url": string diff --git a/stainless-workspace.json b/stainless-workspace.json deleted file mode 100644 index b4230b05..00000000 --- a/stainless-workspace.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "project": "opencode", - "openapi_spec": "openapi.json", - "stainless_config": "stainless.yml" -} diff --git a/stainless.yml b/stainless.yml index 42f42b4c..f8d654fb 100644 --- a/stainless.yml +++ b/stainless.yml @@ -34,7 +34,6 @@ resources: models: unknownError: UnknownError providerAuthError: ProviderAuthError - messageAbortedError: MessageAbortedError event: methods: @@ -48,29 +47,17 @@ resources: app: models: app: App - logLevel: LogLevel - provider: Provider - model: Model - mode: Mode methods: get: get /app init: post /app/init - log: post /log - modes: get /mode - providers: get /config/providers find: - models: - match: Match - symbol: Symbol methods: text: get /find files: get /find/file symbols: get /find/symbol file: - models: - file: File methods: read: get /file status: get /file/status @@ -78,33 +65,29 @@ resources: config: models: config: Config - keybindsConfig: KeybindsConfig - mcpLocalConfig: McpLocalConfig - mcpRemoteConfig: McpRemoteConfig - modeConfig: ModeConfig + keybinds: KeybindsConfig + mcpLocal: McpLocalConfig + mcpRemote: McpRemoteConfig + provider: Provider + model: Model methods: get: get /config + providers: get /config/providers session: models: session: Session message: Message - part: Part + toolCall: ToolCall + toolPartialCall: ToolPartialCall + toolResult: ToolResult textPart: TextPart - textPartInput: TextPartInput + reasoningPart: ReasoningPart + toolInvocationPart: ToolInvocationPart + sourceUrlPart: SourceUrlPart filePart: FilePart - filePartInput: FilePartInput - toolPart: ToolPart stepStartPart: StepStartPart - stepFinishPart: StepFinishPart - snapshotPart: SnapshotPart - assistantMessage: AssistantMessage - userMessage: UserMessage - toolStatePending: ToolStatePending - toolStateRunning: ToolStateRunning - toolStateCompleted: ToolStateCompleted - toolStateError: ToolStateError - + messagePart: MessagePart methods: list: get /session create: post /session @@ -128,13 +111,9 @@ readme: example_requests: default: type: request - endpoint: get /session + endpoint: get /event params: {} headline: - type: request - endpoint: get /session - params: {} - streaming: type: request endpoint: get /event params: {}