mirror of
https://github.com/roc-lang/roc.git
synced 2025-08-04 12:18:19 +00:00
Merge remote-tracking branch 'origin/main' into str-docs
This commit is contained in:
commit
d9774109f1
780 changed files with 56418 additions and 26481 deletions
|
@ -1,4 +0,0 @@
|
|||
AUTHORS
|
||||
nix
|
||||
.envrc
|
||||
.gitignore
|
3
.github/FUNDING.yml
vendored
Normal file
3
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
# These are supported funding model platforms
|
||||
|
||||
github: roc-lang
|
2
.github/dependabot.yml
vendored
2
.github/dependabot.yml
vendored
|
@ -7,3 +7,5 @@ updates:
|
|||
day: "monday"
|
||||
time: "07:00"
|
||||
timezone: "Europe/Brussels"
|
||||
# Disable all version updates, only critical security updates will be submitted
|
||||
open-pull-requests-limit: 0
|
||||
|
|
15
.github/workflows/benchmarks.yml
vendored
15
.github/workflows/benchmarks.yml
vendored
|
@ -23,25 +23,18 @@ jobs:
|
|||
ref: "main"
|
||||
clean: "true"
|
||||
|
||||
- name: Earthly version
|
||||
run: earthly --version
|
||||
|
||||
- name: on main; prepare a self-contained benchmark folder
|
||||
run: ./ci/safe-earthly.sh --build-arg BENCH_SUFFIX=main +prep-bench-folder
|
||||
run: nix develop -c ./ci/benchmarks/prep_folder.sh main
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
clean: "false" # we want to keep the benchmark folder
|
||||
|
||||
- name: on current branch; prepare a self-contained benchmark folder
|
||||
run: ./ci/safe-earthly.sh +prep-bench-folder
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
run: nix develop -c ./ci/benchmarks/prep_folder.sh branch
|
||||
|
||||
- name: build benchmark runner
|
||||
run: cd ci/bench-runner && cargo build --release && cd ../..
|
||||
run: nix develop -c bash -c "cd ci/benchmarks/bench-runner && cargo build --release && cd ../../.."
|
||||
|
||||
- name: run benchmarks with regression check
|
||||
run: ./ci/bench-runner/target/release/bench-runner --check-executables-changed
|
||||
run: nix develop -c ./ci/benchmarks/bench-runner/target/release/bench-runner --check-executables-changed
|
||||
|
|
30
.github/workflows/markdown_link_check.yml
vendored
Normal file
30
.github/workflows/markdown_link_check.yml
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
on:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 9 * * *' # 9=9am utc+0
|
||||
|
||||
name: Check Markdown links
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
markdown-link-check:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: gaurav-nelson/github-action-markdown-link-check@v1
|
||||
with:
|
||||
use-quiet-mode: 'yes'
|
||||
use-verbose-mode: 'yes'
|
||||
base-branch: 'main'
|
||||
check-modified-files-only: 'yes'
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
- uses: gaurav-nelson/github-action-markdown-link-check@v1
|
||||
with:
|
||||
use-quiet-mode: 'yes'
|
||||
use-verbose-mode: 'yes'
|
||||
base-branch: 'main'
|
||||
check-modified-files-only: 'no'
|
||||
if: ${{ github.event_name == 'schedule' }}
|
26
.github/workflows/nightly_linux_x86_64.yml
vendored
26
.github/workflows/nightly_linux_x86_64.yml
vendored
|
@ -3,14 +3,12 @@ on:
|
|||
- cron: '0 9 * * *'
|
||||
|
||||
name: Nightly Release Linux x86_64
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Rust tests, build and package nightly release
|
||||
runs-on: [self-hosted, i5-4690K]
|
||||
runs-on: [self-hosted, i7-6700K]
|
||||
timeout-minutes: 90
|
||||
env:
|
||||
FORCE_COLOR: 1 # for earthly logging
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
|
@ -21,14 +19,26 @@ jobs:
|
|||
run: RUSTFLAGS="-C target-cpu=x86-64" cargo build --features with_sound --release --locked
|
||||
# target-cpu=x86-64 -> For maximal compatibility for all CPU's. Note that this setting will likely make the compiler slower.
|
||||
|
||||
- name: Make release tar archive
|
||||
run: ./ci/package_release.sh roc_linux_x86_64.tar.gz
|
||||
- name: get commit SHA
|
||||
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
|
||||
|
||||
- name: get date
|
||||
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
|
||||
|
||||
- name: build file name
|
||||
env:
|
||||
DATE: ${{ env.DATE }}
|
||||
SHA: ${{ env.SHA }}
|
||||
run: echo "RELEASE_TAR_FILENAME=roc_nightly-linux_x86_64-$DATE-$SHA.tar.gz" >> $GITHUB_ENV
|
||||
|
||||
- name: Make nightly release tar archive
|
||||
run: ./ci/package_release.sh ${{ env.RELEASE_TAR_FILENAME }}
|
||||
|
||||
- name: Upload roc nightly tar. Actually uploading to github releases has to be done manually.
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: roc_nightly-linux_x86_64.tar.gz
|
||||
path: roc_linux_x86_64.tar.gz
|
||||
name: ${{ env.RELEASE_TAR_FILENAME }}
|
||||
path: ${{ env.RELEASE_TAR_FILENAME }}
|
||||
retention-days: 4
|
||||
|
||||
- name: build wasm repl
|
||||
|
|
|
@ -18,6 +18,18 @@ jobs:
|
|||
|
||||
- name: run tests
|
||||
run: cargo test --locked --release
|
||||
|
||||
- name: get commit SHA
|
||||
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
|
||||
|
||||
- name: get date
|
||||
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
|
||||
|
||||
- name: build file name
|
||||
env:
|
||||
DATE: ${{ env.DATE }}
|
||||
SHA: ${{ env.SHA }}
|
||||
run: echo "RELEASE_TAR_FILENAME=roc_nightly-macos_12_apple_silicon-$DATE-$SHA.tar.gz" >> $GITHUB_ENV
|
||||
|
||||
- name: write version to file
|
||||
run: ./ci/write_version.sh
|
||||
|
@ -26,7 +38,7 @@ jobs:
|
|||
run: cargo build --locked --release
|
||||
|
||||
- name: package release
|
||||
run: ./ci/package_release.sh roc_darwin_apple_silicon.tar.gz
|
||||
run: ./ci/package_release.sh ${{ env.RELEASE_TAR_FILENAME }}
|
||||
|
||||
- name: print short commit SHA
|
||||
run: git rev-parse --short "$GITHUB_SHA"
|
||||
|
@ -35,6 +47,6 @@ jobs:
|
|||
- name: Upload artifact Actually uploading to github releases has to be done manually
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: roc_nightly-macos_apple_silicon.tar.gz
|
||||
path: roc_darwin_apple_silicon.tar.gz
|
||||
name: ${{ env.RELEASE_TAR_FILENAME }}
|
||||
path: ${{ env.RELEASE_TAR_FILENAME }}
|
||||
retention-days: 4
|
||||
|
|
77
.github/workflows/nightly_macos_x86_64.yml
vendored
77
.github/workflows/nightly_macos_x86_64.yml
vendored
|
@ -1,20 +1,26 @@
|
|||
on:
|
||||
schedule:
|
||||
- cron: '0 9 * * 1' # 9=9am utc+0, 1=monday
|
||||
- cron: '0 9 * * *' # 9=9am utc+0
|
||||
|
||||
name: Nightly Release macOS x86_64
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.9.1
|
||||
LLVM_SYS_130_PREFIX: /usr/local/opt/llvm
|
||||
LLVM_SYS_130_PREFIX: /usr/local/opt/llvm@13
|
||||
|
||||
jobs:
|
||||
test-and-build:
|
||||
name: Rust tests, build and package nightly release
|
||||
runs-on: [macos-12]
|
||||
timeout-minutes: 90
|
||||
test-build-upload:
|
||||
name: build, test, package and upload nightly release
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ macos-11, macos-12 ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: write version to file
|
||||
run: ./ci/write_version.sh
|
||||
|
||||
- name: Install zig
|
||||
run: |
|
||||
|
@ -24,30 +30,57 @@ jobs:
|
|||
run: zig version
|
||||
- name: Install LLVM
|
||||
run: brew install llvm@13
|
||||
|
||||
# build has to be done before tests #2572
|
||||
- name: build release
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --locked
|
||||
- name: execute rust tests
|
||||
|
||||
- name: execute rust tests if macos 12
|
||||
if: endsWith(matrix.os, '12')
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked # no --release yet until #3166 is fixed
|
||||
- name: write version to file
|
||||
run: ./ci/write_version.sh
|
||||
- name: package release
|
||||
run: ./ci/package_release.sh roc_darwin_x86_64.tar.gz
|
||||
- name: Create pre-release with test_archive.tar.gz
|
||||
uses: Anton-4/deploy-nightly@1609d8dfe211b078674801113ab7a2ec2938b2a9
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # automatically provided by github actions
|
||||
args: --release --locked -- --skip opaque_wrap_function --skip bool_list_literal
|
||||
|
||||
- name: execute rust tests if macos 11
|
||||
if: endsWith(matrix.os, '11')
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
upload_url: https://uploads.github.com/repos/roc-lang/roc/releases/51880579/assets{?name,label}
|
||||
release_id: 51880579
|
||||
asset_path: ./roc_darwin_x86_64.tar.gz
|
||||
asset_name: roc_nightly-macos_x86_64-$$.tar.gz # $$ inserts 6 char commit hash and date (YYYY-MM-DD)
|
||||
asset_content_type: application/gzip
|
||||
max_releases: 3
|
||||
command: test
|
||||
args: --release --locked -- --skip opaque_wrap_function --skip bool_list_literal --skip platform_switching_swift --skip swift_ui
|
||||
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
|
||||
# this issue may be caused by using older versions of XCode
|
||||
|
||||
- name: get commit SHA
|
||||
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
|
||||
|
||||
- name: get date
|
||||
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
|
||||
|
||||
- name: get macos version if 11
|
||||
if: endsWith(matrix.os, '11')
|
||||
run: echo "MACOSVERSION=11" >> $GITHUB_ENV
|
||||
|
||||
- name: get macos version if 12
|
||||
if: endsWith(matrix.os, '12')
|
||||
run: echo "MACOSVERSION=12" >> $GITHUB_ENV
|
||||
|
||||
- name: build file name
|
||||
env:
|
||||
DATE: ${{ env.DATE }}
|
||||
SHA: ${{ env.SHA }}
|
||||
run: echo "RELEASE_TAR_FILENAME=roc_nightly-macos_${MACOSVERSION}_x86_64-$DATE-$SHA.tar.gz" >> $GITHUB_ENV
|
||||
|
||||
- name: package release
|
||||
run: ./ci/package_release.sh ${{ env.RELEASE_TAR_FILENAME }}
|
||||
|
||||
- name: Upload artifact. Actually uploading to github releases has to be done manually.
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.RELEASE_TAR_FILENAME }}
|
||||
path: ${{ env.RELEASE_TAR_FILENAME }}
|
||||
retention-days: 4
|
||||
|
||||
|
|
16
.github/workflows/nightly_netlify_build_deploy.yml
vendored
Normal file
16
.github/workflows/nightly_netlify_build_deploy.yml
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
on:
|
||||
schedule:
|
||||
- cron: '0 9 * * *'
|
||||
|
||||
name: Nightly netlify build and deploy
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: trigger netlify build and deploy
|
||||
env:
|
||||
HOOK: ${{ secrets.NETLIFY_BUILD_HOOK }}
|
||||
run: |
|
||||
curl -X POST -d {} https://api.netlify.com/build_hooks/${HOOK}
|
5
.github/workflows/nix_linux_x86_64.yml
vendored
5
.github/workflows/nix_linux_x86_64.yml
vendored
|
@ -20,4 +20,7 @@ jobs:
|
|||
clean: "true"
|
||||
|
||||
- name: execute tests with --release
|
||||
run: /home/big-ci-user/.nix-profile/bin/nix develop -c cargo test --locked --release
|
||||
run: nix develop -c cargo test --locked --release
|
||||
|
||||
- name: test wasm32 cli_run
|
||||
run: nix develop -c cargo test --locked --release --features="wasm32-cli-run"
|
||||
|
|
|
@ -31,6 +31,9 @@ jobs:
|
|||
- name: execute tests with --release
|
||||
run: nix develop -c cargo test --locked --release
|
||||
|
||||
# we run the llvm wasm tests only on this machine because it is fast and wasm should be cross-platform
|
||||
- name: test launching the editor
|
||||
run: cargo test --release --locked editor_launch_test::launch -- --ignored # `--ignored` to run this test that is ignored for "normal" runs
|
||||
|
||||
# we run the llvm wasm tests only on this machine because it is fast and wasm should be cross-target
|
||||
- name: execute llvm wasm tests with --release
|
||||
run: nix develop -c cargo test-gen-llvm-wasm --locked --release
|
||||
|
|
31
.github/workflows/nix_macos_x86_64.yml
vendored
Normal file
31
.github/workflows/nix_macos_x86_64.yml
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
on: [pull_request]
|
||||
|
||||
name: Nix macOS x86_64 cargo test
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
nix-macos-x86-64:
|
||||
name: nix-macos-x86-64
|
||||
runs-on: [macos-12]
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
clean: "true"
|
||||
|
||||
- uses: cachix/install-nix-action@v15
|
||||
|
||||
# to cache nix packages
|
||||
- uses: cachix/cachix-action@v10
|
||||
with:
|
||||
name: enigmaticsunrise
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
||||
- name: execute cli_run tests only, the full tests take too long but are run nightly
|
||||
run: nix develop -c cargo test --locked --release -p roc_cli
|
9
.github/workflows/spellcheck.yml
vendored
9
.github/workflows/spellcheck.yml
vendored
|
@ -12,7 +12,7 @@ env:
|
|||
jobs:
|
||||
spell-check:
|
||||
name: spell check
|
||||
runs-on: [self-hosted, i7-6700K]
|
||||
runs-on: [self-hosted]
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
@ -21,8 +21,5 @@ jobs:
|
|||
with:
|
||||
clean: "true"
|
||||
|
||||
- name: Earthly version
|
||||
run: earthly --version
|
||||
|
||||
- name: install spell checker, do spell check
|
||||
run: ./ci/safe-earthly.sh +check-typos
|
||||
- name: do spell check with typos-cli 1.0.11 # to reproduce locally: cargo install typos-cli --version 1.0.11
|
||||
run: typos
|
||||
|
|
|
@ -5,7 +5,7 @@ on:
|
|||
name: Test latest nightly release for macOS Apple Silicon
|
||||
|
||||
jobs:
|
||||
test-and-build:
|
||||
test-nightly:
|
||||
name: test nightly macos aarch64
|
||||
runs-on: [self-hosted, macOS, ARM64]
|
||||
timeout-minutes: 90
|
||||
|
@ -16,7 +16,7 @@ jobs:
|
|||
run: curl https://api.github.com/repos/roc-lang/roc/releases > roc_releases.json
|
||||
|
||||
- name: get the url of today`s release for macos apple silicon
|
||||
run: echo "RELEASE_URL=$(./ci/get_latest_release_url.sh)" >> $GITHUB_ENV
|
||||
run: echo "RELEASE_URL=$(./ci/get_latest_release_url.sh silicon)" >> $GITHUB_ENV
|
||||
|
||||
- name: get the archive from the url
|
||||
run: curl -OL ${{ env.RELEASE_URL }}
|
||||
|
@ -28,7 +28,7 @@ jobs:
|
|||
run: ls | grep "roc_nightly.*tar\.gz" | xargs tar -xzvf
|
||||
|
||||
- name: test roc hello world
|
||||
run: ./roc examples/hello-world/main.roc
|
||||
run: ./roc examples/helloWorld.roc
|
||||
|
||||
|
||||
|
51
.github/workflows/test_nightly_many_os.yml
vendored
Normal file
51
.github/workflows/test_nightly_many_os.yml
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
on:
|
||||
schedule:
|
||||
- cron: '0 13 * * *'
|
||||
|
||||
name: Test latest nightly release for macOS, ubu 20.04, ubu 22.04 x86_64
|
||||
|
||||
jobs:
|
||||
test-nightly:
|
||||
name: test nightly macos 11, macos 12, ubu 20.04, ubu 22.04
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ macos-11, macos-12, ubuntu-20.04, ubuntu-22.04 ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: fetch releases data and save to file
|
||||
run: |
|
||||
curl --request GET \
|
||||
--url https://api.github.com/repos/roc-lang/roc/releases \
|
||||
--header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' \
|
||||
--header 'content-type: application/json' \
|
||||
--output roc_releases.json
|
||||
|
||||
- name: get the url of today`s release for linux (x86_64)
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: echo "RELEASE_URL=$(./ci/get_latest_release_url.sh linux_x86_64)" >> $GITHUB_ENV
|
||||
|
||||
- name: get the url of today`s release for macos 11 (x86_64)
|
||||
if: startsWith(matrix.os, 'macos-11')
|
||||
run: echo "RELEASE_URL=$(./ci/get_latest_release_url.sh macos_11_x86_64)" >> $GITHUB_ENV
|
||||
|
||||
- name: get the url of today`s release for macos 12 (x86_64)
|
||||
if: startsWith(matrix.os, 'macos-12')
|
||||
run: echo "RELEASE_URL=$(./ci/get_latest_release_url.sh macos_12_x86_64)" >> $GITHUB_ENV
|
||||
|
||||
- name: get the archive from the url
|
||||
run: curl -OL ${{ env.RELEASE_URL }}
|
||||
|
||||
- name: remove everything in this dir except the tar # we want to test like a user who would have downloaded the release, so we clean up all files from the repo checkout
|
||||
run: ls | grep -v "roc_nightly.*tar\.gz" | xargs rm -rf
|
||||
|
||||
- name: decompress the tar
|
||||
run: ls | grep "roc_nightly.*tar\.gz" | xargs tar -xzvf
|
||||
|
||||
- name: test roc hello world
|
||||
run: ./roc examples/helloWorld.roc
|
||||
|
||||
|
||||
|
9
.github/workflows/ubuntu_x86_64.yml
vendored
9
.github/workflows/ubuntu_x86_64.yml
vendored
|
@ -26,6 +26,9 @@ jobs:
|
|||
|
||||
- name: zig fmt check, zig tests
|
||||
run: cd crates/compiler/builtins/bitcode && ./run-tests.sh
|
||||
|
||||
- name: roc format check on builtins
|
||||
run: cargo run --locked --release format --check crates/compiler/builtins/roc
|
||||
|
||||
- name: zig wasm tests
|
||||
run: cd crates/compiler/builtins/bitcode && ./run-wasm-tests.sh
|
||||
|
@ -33,6 +36,9 @@ jobs:
|
|||
- name: regular rust tests
|
||||
run: cargo test --locked --release --features with_sound serde --workspace && sccache --show-stats
|
||||
|
||||
- name: test launching the editor
|
||||
run: cargo test --release --locked editor_launch_test::launch -- --ignored # `--ignored` to run this test that is ignored for "normal" runs
|
||||
|
||||
- name: test the dev backend # these tests require an explicit feature flag
|
||||
run: cargo test --locked --release --package test_gen --no-default-features --features gen-dev && sccache --show-stats
|
||||
|
||||
|
@ -42,6 +48,9 @@ jobs:
|
|||
- name: run `roc test` on Str builtins
|
||||
run: cargo run --locked --release -- test crates/compiler/builtins/roc/Str.roc && sccache --show-stats
|
||||
|
||||
- name: run `roc test` on Dict builtins
|
||||
run: cargo run --locked --release -- test crates/compiler/builtins/roc/Dict.roc && sccache --show-stats
|
||||
|
||||
#TODO pass --locked into the script here as well, this avoids rebuilding dependencies unnecessarily
|
||||
- name: wasm repl test
|
||||
run: crates/repl_test/test_wasm.sh && sccache --show-stats
|
||||
|
|
52
.github/workflows/windows.yml
vendored
Normal file
52
.github/workflows/windows.yml
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
on: [pull_request]
|
||||
|
||||
name: Test windows build
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
windows-cargo-build:
|
||||
name: windows-cargo-build
|
||||
runs-on: windows-2022
|
||||
env:
|
||||
LLVM_SYS_130_PREFIX: C:\LLVM-13.0.1-win64
|
||||
|
||||
timeout-minutes: 150
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- run: Add-Content -Path "$env:GITHUB_ENV" -Value "GITHUB_RUNNER_CPU=$((Get-CimInstance Win32_Processor).Name)"
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
shared-key: "rust-cache-windows-${{env.GITHUB_RUNNER_CPU}}"
|
||||
|
||||
- name: download and install zig
|
||||
run: |
|
||||
curl.exe --output "C:\zig-windows-x86_64-0.9.1.zip" --url https://ziglang.org/download/0.9.1/zig-windows-x86_64-0.9.1.zip
|
||||
cd C:\
|
||||
7z x zig-windows-x86_64-0.9.1.zip
|
||||
Add-Content $env:GITHUB_PATH "C:\zig-windows-x86_64-0.9.1\"
|
||||
|
||||
- name: zig version
|
||||
run: zig version
|
||||
|
||||
- name: set up llvm 13
|
||||
run: |
|
||||
curl.exe -L -O https://github.com/roc-lang/llvm-package-windows/releases/download/v13.0.1/LLVM-13.0.1-win64.7z
|
||||
7z x LLVM-13.0.1-win64.7z -oC:\LLVM-13.0.1-win64
|
||||
|
||||
- name: Build tests --release without running. Twice for zig lld-link error.
|
||||
run: cargo test --locked --release --no-run || cargo test --locked --release --no-run
|
||||
|
||||
# Why are these tests not build with previous command? => fingerprint error. Use `CARGO_LOG=cargo::core::compiler::fingerprint=info` to investigate
|
||||
- name: Build specific tests without running. Twice for zig lld-link error.
|
||||
run: cargo test --locked --release --no-run -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_editor -p roc_linker || cargo test --locked --release --no-run -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_editor -p roc_linker
|
||||
|
||||
- name: Actually run the tests.
|
||||
run: cargo test --locked --release -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_editor -p roc_linker
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -6,6 +6,9 @@ zig-cache
|
|||
*.rs.bk
|
||||
*.o
|
||||
*.obj
|
||||
*.dll
|
||||
*.lib
|
||||
*.def
|
||||
*.tmp
|
||||
*.wasm
|
||||
*.exe
|
||||
|
@ -36,7 +39,7 @@ editor/benches/resources/50000_lines.roc
|
|||
editor/benches/resources/500_lines.roc
|
||||
|
||||
# file editor creates when no arg is passed
|
||||
new-roc-project
|
||||
roc-projects
|
||||
|
||||
# rust cache (sccache folder)
|
||||
sccache_dir
|
||||
|
|
30
AUTHORS
30
AUTHORS
|
@ -65,6 +65,7 @@ Mats Sigge <<mats.sigge@gmail.com>>
|
|||
Drew Lazzeri <dlazzeri1@gmail.com>
|
||||
Tom Dohrmann <erbse.13@gmx.de>
|
||||
Elijah Schow <elijah.schow@gmail.com>
|
||||
Emi Simpson <emi@alchemi.dev>
|
||||
Derek Gustafson <degustaf@gmail.com>
|
||||
Philippe Vinchon <p.vinchon@gmail.com>
|
||||
Pierre-Henri Trivier <phtrivier@yahoo.fr>
|
||||
|
@ -74,17 +75,21 @@ Ananda Umamil <zweimach@zweimach.org>
|
|||
SylvanSign <jake.d.bray@gmail.com>
|
||||
Nikita Mounier <36044205+nikitamounier@users.noreply.github.com>
|
||||
Cai Bingjun <62678643+C-BJ@users.noreply.github.com>
|
||||
Kevin Gillette <kgillette628@gmail.com>
|
||||
Jared Cone <jared.cone@gmail.com>
|
||||
Sean Hagstrom <sean@seanhagstrom.com>
|
||||
Kas Buunk <kasbuunk@icloud.com>
|
||||
Kas Buunk <kasbuunk@icloud.com>
|
||||
Tommy Graves <tommy@rwx.com>
|
||||
Oskar Hahn <mail@oshahn.de>
|
||||
Nuno Ferreira <nunogcferreira@gmail.com>
|
||||
Jonas Schell <jonasschell@ocupe.org>
|
||||
Mfon Eti-mfon <mfonetimfon@gmail.com>
|
||||
Drake Bennion <drake.bennion@gmail.com>
|
||||
Hashi364 <49736221+Kiyoshi364@users.noreply.github.com>
|
||||
Jared Forsyth <jared@jaredforsyth.com>
|
||||
Patrick Kilgore <git@pck.email>
|
||||
Marten/Qqwy <w-m@wmcode.nl>
|
||||
Tobias Steckenborn <tobias.steckenborn@consolvis.de>
|
||||
Christoph Rüßler <christoph.ruessler@mailbox.org>
|
||||
Ralf Engbers <raleng@users.noreply.github.com>
|
||||
Mostly Void <7rat13@gmail.com>
|
||||
|
@ -94,4 +99,25 @@ David A. Kunz <david.kunz@sap.com>
|
|||
Paul Young <84700+paulyoung@users.noreply.github.com>
|
||||
Rod <randomer@users.noreply.github.com>
|
||||
Marko Vujanic <crashxx@gmail.com>
|
||||
kilianv <r0754877>
|
||||
KilianVounckx <kilianvounckx@hotmail.be>
|
||||
David Dunn <26876072+doubledup@users.noreply.github.com>
|
||||
Jelle Besseling <jelle@pingiun.com>
|
||||
isaacthefallenapple <isaacthefallenapple@gmail.com>
|
||||
Bryce Miller <sandprickle@users.noreply.github.com>
|
||||
Bjørn Madsen <bm@aeons.dk>
|
||||
Vilem <17603372+buggymcbugfix@users.noreply.github.com>
|
||||
J Teeuwissen <jelleteeuwissen@hotmail.nl>
|
||||
Matthieu Pizenberg <matthieu.pizenberg@gmail.com>
|
||||
rezzaghi <lbrezzaghi@gmail.com>
|
||||
João Mota <jackthemotorcycle@gmail.com>
|
||||
Marcos Prieto <marcospri@gmail.com>
|
||||
Prajwal S N <prajwalnadig21@gmail.com>
|
||||
Christopher Duncan <chris.duncan.arauz+git@protonmail.com>
|
||||
Luke Boswell <lukewilliamboswell@gmail.com>
|
||||
Luca Cervello <luca.cervello@gmail.com>
|
||||
Josh Mak <joshmak@berkeley.edu>
|
||||
Travis Staloch <twostepted@gmail.com>
|
||||
Nick Gravgaard <nick@nickgravgaard.com>
|
||||
Keerthana Kasthuril <76804118+keerthanak-tw@users.noreply.github.com>
|
||||
Salman Shaik <salmansiddiq.shaik@gmail.com>
|
||||
Austin Clements <austinclementsbass@gmail.com>
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
# Building the Roc compiler from source
|
||||
|
||||
Installation should be a smooth process, let us now if anything does not work perfectly on [Roc Zulip](https://roc.zulipchat.com) or by creating an issue.
|
||||
If you run into any problems getting Roc built from source, please ask for help in the `#beginners` channel on [Roc Zulip](https://roc.zulipchat.com) (the fastest way), or create an issue in this repo!
|
||||
|
||||
## Using Nix
|
||||
|
||||
We highly recommend Using [nix](https://nixos.org/download.html) to quickly install all dependencies necessary to build roc.
|
||||
On Macos and Linux, we highly recommend Using [nix](https://nixos.org/download.html) to quickly install all dependencies necessary to build roc.
|
||||
|
||||
> See issue [#3863](https://github.com/roc-lang/roc/issues/3863) if you encounter "version GLIBC_2.34 not found".
|
||||
> This error can occur if you ran `cargo build` in the same folder without nix.
|
||||
:warning: If you tried to run `cargo` in the repo folder before installing nix, make sure to execute `cargo clean` first. To prevent you from executing `cargo` outside of nix, tools like [direnv](https://github.com/nix-community/nix-direnv) and [lorri](https://github.com/nix-community/lorri) can put you in a nix shell automatically when you `cd` into the directory.
|
||||
|
||||
### On Linux x86_64/aarch64 or MacOS aarch64/arm64/x86_64
|
||||
|
||||
|
@ -16,17 +15,22 @@ We highly recommend Using [nix](https://nixos.org/download.html) to quickly inst
|
|||
If you are running ArchLinux or a derivative like Manjaro, you'll need to run `sudo sysctl -w kernel.unprivileged_userns_clone=1` before installing nix.
|
||||
|
||||
Install nix (not necessary on NixOS):
|
||||
|
||||
- If you are using WSL (Windows subsystem for Linux):
|
||||
|
||||
```sh
|
||||
sh <(curl -L https://nixos.org/nix/install) --no-daemon
|
||||
```
|
||||
|
||||
- For everything else:
|
||||
|
||||
```sh
|
||||
sh <(curl -L https://nixos.org/nix/install) --daemon
|
||||
```
|
||||
|
||||
Open a new terminal and install nixFlakes in your environment:
|
||||
```
|
||||
nix-env -iA nixpkgs.nixFlakes
|
||||
```
|
||||
Open a new terminal and edit either `~/.config/nix/nix.conf` or `/etc/nix/nix.conf` and add:
|
||||
|
||||
Edit either `~/.config/nix/nix.conf` or `/etc/nix/nix.conf` and add:
|
||||
```
|
||||
```text
|
||||
experimental-features = nix-command flakes
|
||||
```
|
||||
|
||||
|
@ -36,9 +40,11 @@ If you don't know how to do this, restarting your computer will also do the job.
|
|||
#### Usage
|
||||
|
||||
Now with nix set up, you just need to run one command from the roc project root directory:
|
||||
```
|
||||
|
||||
```sh
|
||||
nix develop
|
||||
```
|
||||
|
||||
You should be in a shell with everything needed to build already installed.
|
||||
Use `cargo run help` to see all subcommands.
|
||||
To use the `repl` subcommand, execute `cargo run repl`.
|
||||
|
@ -57,7 +63,8 @@ The editor is a :construction:WIP:construction: and not ready yet to replace you
|
|||
`cargo run edit` should work on NixOS and MacOS. If you use Linux x86_64, follow the instructions below.
|
||||
|
||||
If you're not already in a nix shell, execute `nix develop` at the the root of the repo folder and then execute:
|
||||
```
|
||||
|
||||
```sh
|
||||
nixVulkanIntel cargo run edit
|
||||
```
|
||||
|
||||
|
@ -70,16 +77,16 @@ That will help us improve this document for everyone who reads it in the future!
|
|||
|
||||
To build the compiler, you need these installed:
|
||||
|
||||
* [Zig](https://ziglang.org/), see below for version
|
||||
* `libxkbcommon` - macOS seems to have it already; on Ubuntu or Debian you can get it with `apt-get install libxkbcommon-dev`
|
||||
* On Debian/Ubuntu `sudo apt-get install pkg-config`
|
||||
* LLVM, see below for version
|
||||
* [rust](https://rustup.rs/)
|
||||
* Also run `cargo install bindgen` after installing rust. You may need to open a new terminal.
|
||||
- [Zig](https://ziglang.org/), see below for version
|
||||
- `libxkbcommon` - macOS seems to have it already; on Ubuntu or Debian you can get it with `apt-get install libxkbcommon-dev`
|
||||
- On Debian/Ubuntu `sudo apt-get install pkg-config`
|
||||
- LLVM, see below for version
|
||||
- [rust](https://rustup.rs/)
|
||||
- Also run `cargo install bindgen` after installing rust. You may need to open a new terminal.
|
||||
|
||||
To run the test suite (via `cargo test`), you additionally need to install:
|
||||
|
||||
* [`valgrind`](https://www.valgrind.org/) (needs special treatment to [install on macOS](https://stackoverflow.com/a/61359781)
|
||||
- [`valgrind`](https://www.valgrind.org/) (needs special treatment to [install on macOS](https://stackoverflow.com/a/61359781)
|
||||
Alternatively, you can use `cargo test --no-fail-fast` or `cargo test -p specific_tests` to skip over the valgrind failures & tests.
|
||||
|
||||
For debugging LLVM IR, we use [DebugIR](https://github.com/vaivaswatha/debugir). This dependency is only required to build with the `--debug` flag, and for normal development you should be fine without it.
|
||||
|
@ -88,7 +95,7 @@ For debugging LLVM IR, we use [DebugIR](https://github.com/vaivaswatha/debugir).
|
|||
|
||||
You may see an error like this during builds:
|
||||
|
||||
```
|
||||
```text
|
||||
/usr/bin/ld: cannot find -lxcb-render
|
||||
/usr/bin/ld: cannot find -lxcb-shape
|
||||
/usr/bin/ld: cannot find -lxcb-xfixes
|
||||
|
@ -96,35 +103,42 @@ You may see an error like this during builds:
|
|||
|
||||
If so, you can fix it like so:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo apt-get install libxcb-render0-dev libxcb-shape0-dev libxcb-xfixes0-dev
|
||||
```
|
||||
|
||||
### Zig
|
||||
|
||||
**version: 0.9.1**
|
||||
|
||||
For any OS, you can use [`zigup`](https://github.com/marler8997/zigup) to manage zig installations.
|
||||
|
||||
If you prefer a package manager, you can try the following:
|
||||
- For MacOS, you can install with `brew install zig`
|
||||
|
||||
- For MacOS, you can install with `brew install zig@0.9.1`
|
||||
- For, Ubuntu, you can use Snap, you can install with `snap install zig --classic --beta`
|
||||
- For other systems, checkout this [page](https://github.com/ziglang/zig/wiki/Install-Zig-from-a-Package-Manager)
|
||||
|
||||
If you want to install it manually, you can also download Zig directly [here](https://ziglang.org/download/). Just make sure you download the right version, the bleeding edge master build is the first download link on this page.
|
||||
|
||||
> WINDOWS NOTE: when you unpack the Zig archive on windows, the result is nested in an extra directory. The instructions on the zig website will seem to not work. So, double-check that the path to zig executable does not include the same directory name twice.
|
||||
|
||||
### LLVM
|
||||
|
||||
**version: 13.0.x**
|
||||
|
||||
For macOS, you can install LLVM 13 using `brew install llvm@13` and then adding
|
||||
`$(brew --prefix llvm@13)/bin` to your `PATH`. You can confirm this worked by
|
||||
running `llc --version` - it should mention "LLVM version 13.0.0" at the top.
|
||||
running `llc --version` - it should mention "LLVM version 13.0.1" at the top.
|
||||
You may also need to manually specify a prefix env var like so:
|
||||
```
|
||||
|
||||
```sh
|
||||
export LLVM_SYS_130_PREFIX=/usr/local/opt/llvm@13
|
||||
```
|
||||
|
||||
For Ubuntu and Debian:
|
||||
```
|
||||
|
||||
```sh
|
||||
sudo apt -y install lsb-release software-properties-common gnupg
|
||||
wget https://apt.llvm.org/llvm.sh
|
||||
chmod +x llvm.sh
|
||||
|
@ -134,11 +148,11 @@ chmod +x llvm.sh
|
|||
If you use this script, you'll need to add `clang` to your `PATH`.
|
||||
By default, the script installs it as `clang-13`. You can address this with symlinks like so:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo ln -s /usr/bin/clang-13 /usr/bin/clang
|
||||
```
|
||||
|
||||
There are also alternative installation options at http://releases.llvm.org/download.html
|
||||
There are also alternative installation options at <http://releases.llvm.org/download.html>
|
||||
|
||||
[Troubleshooting](#troubleshooting)
|
||||
|
||||
|
@ -150,18 +164,18 @@ To use the `repl` subcommand, execute `cargo run repl`.
|
|||
|
||||
### LLVM installation on Linux
|
||||
|
||||
For a current list of all dependency versions and their names in apt, see the Earthfile.
|
||||
|
||||
On some Linux systems we've seen the error "failed to run custom build command for x11".
|
||||
On Ubuntu, running `sudo apt install pkg-config cmake libx11-dev` fixed this.
|
||||
|
||||
If you encounter `cannot find -lz` run `sudo apt install zlib1g-dev`.
|
||||
|
||||
If you encounter:
|
||||
```
|
||||
|
||||
```text
|
||||
error: No suitable version of LLVM was found system-wide or pointed
|
||||
to by LLVM_SYS_130_PREFIX.
|
||||
```
|
||||
|
||||
Add `export LLVM_SYS_130_PREFIX=/usr/lib/llvm-13` to your `~/.bashrc` or equivalent file for your shell.
|
||||
|
||||
### LLVM installation on macOS
|
||||
|
@ -170,7 +184,7 @@ If installing LLVM fails, it might help to run `sudo xcode-select -r` before ins
|
|||
|
||||
It might also be useful to add these exports to your shell:
|
||||
|
||||
```
|
||||
```sh
|
||||
export LDFLAGS="-L/usr/local/opt/llvm/lib -Wl,-rpath,/usr/local/opt/llvm/lib"
|
||||
export CPPFLAGS="-I/usr/local/opt/llvm/include"
|
||||
```
|
||||
|
@ -178,24 +192,24 @@ export CPPFLAGS="-I/usr/local/opt/llvm/include"
|
|||
### LLVM installation on Windows
|
||||
|
||||
**Warning** While `cargo build` works on windows, linking roc programs does not yet, see issue #2608. This also means the repl, the editor and many tests will not work on windows.
|
||||
Installing LLVM's prebuilt binaries doesn't seem to be enough for the `llvm-sys` crate that Roc depends on, so I had to follow the steps below:
|
||||
The official LLVM pre-built binaries for Windows lack features that roc needs. Instead:
|
||||
|
||||
1. I downloaded and installed [Build Tools for Visual Studio 2019](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools&rel=16) (a full Visual Studio install should work too; the Build Tools are just the CLI tools, which is all I wanted)
|
||||
1. Download the custom LLVM 7z archive [here](https://github.com/PLC-lang/llvm-package-windows/releases/tag/v13.0.0).
|
||||
1. Download the custom LLVM 7z archive [here](https://github.com/roc-lang/llvm-package-windows/releases/download/v13.0.1/LLVM-13.0.1-win64.7z).
|
||||
1. [Download 7-zip](https://www.7-zip.org/) to be able to extract this archive.
|
||||
1. Extract the 7z file to where you want to permanently keep the folder.
|
||||
1. In powershell, set the `LLVM_SYS_130_PREFIX` environment variable (check [here](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_environment_variables?view=powershell-7.2#saving-changes-to-environment-variables) to make this a permanent environment variable):
|
||||
```
|
||||
[Environment]::SetEnvironmentVariable(
|
||||
"Path",
|
||||
[Environment]::GetEnvironmentVariable("Path", "User") + ";C:\Users\anton\Downloads\LLVM-13.0.0-win64\bin",
|
||||
"User"
|
||||
)
|
||||
```
|
||||
1. Extract the 7z file to where you want to permanently keep the folder. We recommend you pick a path without any spaces in it.
|
||||
1. In powershell, set the `LLVM_SYS_130_PREFIX` environment variable (check [here](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_environment_variables?view=powershell-7.2#saving-environment-variables-with-the-system-control-panel) to make this a permanent environment variable):
|
||||
|
||||
```text
|
||||
<# ! Replace YOUR_USERNAME ! #>
|
||||
$env:LLVM_SYS_130_PREFIX = 'C:\Users\YOUR_USERNAME\Downloads\LLVM-13.0.1-win64'
|
||||
```
|
||||
|
||||
Once all that was done, `cargo build` ran successfully for Roc!
|
||||
|
||||
#### Build issues on Windows
|
||||
|
||||
If you see the build failing because some internal file is not available, it might be your anti-virus program. Cargo's behavior is kind of similar to a virus (downloading files from the internet, creating many files), and this has been known to cause problems.
|
||||
|
||||
### Build speed on WSL/WSL2
|
||||
|
||||
If your Roc project folder is in the Windows filesystem but you're compiling from Linux, rebuilds may be as much as 20x slower than they should be!
|
||||
|
@ -208,7 +222,7 @@ makes build times a lot faster, and I highly recommend it.
|
|||
|
||||
Create `~/.cargo/config.toml` if it does not exist and add this to it:
|
||||
|
||||
```
|
||||
```toml
|
||||
[build]
|
||||
# Link with lld, per https://github.com/rust-lang/rust/issues/39915#issuecomment-538049306
|
||||
# Use target-cpu=native, per https://deterministic.space/high-performance-rust.html
|
||||
|
|
|
@ -8,20 +8,20 @@ In the interest of fostering an open and welcoming environment, we as participan
|
|||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Kindly giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the overall
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Kindly giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience
|
||||
- Focusing on what is best not just for us as individuals, but for the overall
|
||||
community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email address, without their explicit permission
|
||||
* Telling others to be less sensitive, or that they should not feel hurt or offended by something
|
||||
- The use of sexualized language or imagery, and sexual attention or advances of any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email address, without their explicit permission
|
||||
- Telling others to be less sensitive, or that they should not feel hurt or offended by something
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
|
@ -41,4 +41,4 @@ Moderators who do not follow or enforce the Code of Conduct in good faith may fa
|
|||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the Contributor Covenant, version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
This Code of Conduct is adapted from the Contributor Covenant, version 1.4, available at <https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>
|
||||
|
|
|
@ -2,7 +2,20 @@
|
|||
|
||||
## Code of Conduct
|
||||
|
||||
We are committed to providing a friendly, safe and welcoming environment for all. Make sure to take a look at the [Code of Conduct](CodeOfConduct.md)!
|
||||
We are committed to providing a friendly, safe and welcoming environment for all. Make sure to take a look at the [Code of Conduct](CODE_OF_CONDUCT.md)!
|
||||
|
||||
## How to contribute
|
||||
|
||||
All contributions are appreciated! Typo fixes, bug fixes, feature requests,
|
||||
bug reports are all helpful for the project.
|
||||
|
||||
If you are looking for a good place to start, consider reaching out on the `#contributing` channel on [Roc Zulip][roc-zulip].
|
||||
Before making your first pull request, definitely talk to an existing contributor on [Roc Zulip][roc-zulip] first about what you plan to do! This can not only avoid duplicated effort, it can also avoid making a whole PR only to discover it won't be accepted because the change doesn't fit with the goals of the language's design or implementation.
|
||||
|
||||
If you are interested in larger, implementation- or research-heavy projects
|
||||
related to Roc, check out [Roc Project Ideas][project-ideas] and reach out to us
|
||||
on Zulip! These projects may be suitable for academic theses, independent
|
||||
research, or even just valuable projects to learn from and improve Roc with.
|
||||
|
||||
## Building from Source
|
||||
|
||||
|
@ -10,30 +23,70 @@ Check [Building from source](BUILDING_FROM_SOURCE.md) for instructions.
|
|||
|
||||
## Running Tests
|
||||
|
||||
Most contributors execute the following commands befor pushing their code:
|
||||
```
|
||||
Most contributors execute the following commands before pushing their code:
|
||||
|
||||
```sh
|
||||
cargo test
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --workspace --tests -- --deny warnings
|
||||
```
|
||||
|
||||
Execute `cargo fmt --all` to fix the formatting.
|
||||
|
||||
## Generating Docs
|
||||
|
||||
If you make changes to [Roc's Standard Library](https://www.roc-lang.org/builtins/Str), you can add comments to the code following [the CommonMark Spec](https://spec.commonmark.org/current/) to further explain your intentions. You can view these changes locally with:
|
||||
|
||||
```sh
|
||||
cargo run docs crates/compiler/builtins/roc
|
||||
```
|
||||
|
||||
This command will generate the documentation in the [`generated-docs`](generated-docs) directory.
|
||||
|
||||
## Contribution Tips
|
||||
|
||||
- If you've never made a pull request on github before, [this](https://www.freecodecamp.org/news/how-to-make-your-first-pull-request-on-github-3/) will be a good place to start.
|
||||
- Create an issue if the purpose of a struct/field/type/function/... is not immediately clear from its name or nearby comments.
|
||||
- You can find good first issues [here](https://github.com/roc-lang/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
|
||||
- Before making your first pull request, definitely talk to an existing contributor on [Roc Zulip](https://roc.zulipchat.com) first about what you plan to do! This can not only avoid duplicated effort, it can also avoid making a whole PR only to discover it won't be accepted because the change doesn't fit with the goals of the language's design or implementation.
|
||||
- You can find good first issues [here][good-first-issues]. Once you have gained some experience you can take a look at the [intermediate issues](https://github.com/roc-lang/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22intermediate+issue%22).
|
||||
- [Fork](https://github.com/roc-lang/roc/fork) the repo so that you can apply your changes first on your own copy of the roc repo.
|
||||
- It's a good idea to open a draft pull request as you begin working on something. This way, others can see that you're working on it, which avoids duplicate effort, and others can give feedback sooner rather than later if they notice a problem in the direction things are going. Click the button "ready for review" when it's ready.
|
||||
- All your commits need to be signed to prevent impersonation:
|
||||
1. If you have a Yubikey, follow [guide 1](https://dev.to/paulmicheli/using-your-yubikey-to-get-started-with-gpg-3h4k), [guide 2](https://dev.to/paulmicheli/using-your-yubikey-for-signed-git-commits-4l73) and skip the steps below.
|
||||
2. [Make a key to sign your commits.](https://docs.github.com/en/authentication/managing-commit-signature-verification/generating-a-new-gpg-key).
|
||||
3. [Configure git to use your key.](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key)
|
||||
4. Make git sign your commits automatically:
|
||||
```
|
||||
git config --global commit.gpgsign true
|
||||
```
|
||||
- All your commits need to be signed [to prevent impersonation](https://dev.to/martiliones/how-i-got-linus-torvalds-in-my-contributors-on-github-3k4g):
|
||||
- If you don't have signing set up on your device and you only want to change a single file, it will be easier to use [github's edit button](https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files). This will sign your commit automatically.
|
||||
- For multi-file or complex changes you will want to set up signing on your device:
|
||||
1. If you have a Yubikey, follow [guide 1](https://dev.to/paulmicheli/using-your-yubikey-to-get-started-with-gpg-3h4k), [guide 2](https://dev.to/paulmicheli/using-your-yubikey-for-signed-git-commits-4l73) and skip the steps below.
|
||||
2. [Make a key to sign your commits.](https://docs.github.com/en/authentication/managing-commit-signature-verification/generating-a-new-gpg-key)
|
||||
3. [Configure git to use your key.](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key)
|
||||
4. Make git sign your commits automatically:
|
||||
|
||||
```sh
|
||||
git config --global commit.gpgsign true
|
||||
```
|
||||
|
||||
### Forgot to sign commits?
|
||||
|
||||
You can find which commits need to be signed by running `git log --show-signature`.
|
||||
|
||||
If you have only one commit, running `git commit --amend --no-edit -S` would sign the latest commit 🚀.
|
||||
|
||||
In case you have multiple commits, you can sign them in two ways:
|
||||
1. Switching to interactive rebase mode and editing the file:
|
||||
- Enter into interactive mode, by running `git rebase -i HEAD~n` where `n` is the number of commits up to the most current commit you would like to see.
|
||||
- This would display a set of commits in a text file like below:
|
||||
```
|
||||
pick hash2 commit message 2
|
||||
pick hash1 commit message 1
|
||||
```
|
||||
- After every commit you want to sign, add `exec git commit --amend --no-edit -S`.
|
||||
2. Or run git rebase recursively:
|
||||
- Find the oldest commit you want to sign, using the `git log --show-signature` command.
|
||||
- Run the command `git rebase --exec 'git commit --amend --no-edit -n -S' -i HASH` which would sign all commits up to commit `HASH`.
|
||||
|
||||
If you already pushed unsigned commits, you mmay have to do a force push with `git push origin -f <branch_name>`.
|
||||
|
||||
## Can we do better?
|
||||
|
||||
Feel free to open an issue if you think this document can be improved or is unclear in any way.
|
||||
|
||||
[roc-zulip]: https://roc.zulipchat.com
|
||||
[good-first-issues]: https://github.com/roc-lang/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22
|
||||
[project-ideas]: https://docs.google.com/document/d/1mMaxIi7vxyUyNAUCs98d68jYj6C9Fpq4JIZRU735Kwg/edit?usp=sharing
|
||||
|
|
479
Cargo.lock
generated
479
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
114
Cargo.toml
114
Cargo.toml
|
@ -1,39 +1,7 @@
|
|||
[workspace]
|
||||
members = [
|
||||
"crates/compiler/ident",
|
||||
"crates/compiler/region",
|
||||
"crates/compiler/collections",
|
||||
"crates/compiler/exhaustive",
|
||||
"crates/compiler/module",
|
||||
"crates/compiler/parse",
|
||||
"crates/compiler/can",
|
||||
"crates/compiler/problem",
|
||||
"crates/compiler/solve_problem",
|
||||
"crates/compiler/types",
|
||||
"crates/compiler/builtins",
|
||||
"crates/compiler/constrain",
|
||||
"crates/compiler/unify",
|
||||
"crates/compiler/solve",
|
||||
"crates/compiler/late_solve",
|
||||
"crates/compiler/fmt",
|
||||
"crates/compiler/derive_key",
|
||||
"crates/compiler/mono",
|
||||
"crates/compiler/alias_analysis",
|
||||
"crates/compiler/test_mono",
|
||||
"crates/compiler/test_derive",
|
||||
"crates/compiler/load",
|
||||
"crates/compiler/load_internal",
|
||||
"crates/compiler/gen_llvm",
|
||||
"crates/compiler/gen_dev",
|
||||
"crates/compiler/gen_wasm",
|
||||
"crates/compiler/build",
|
||||
"crates/compiler/arena_pool",
|
||||
"crates/compiler/test_gen",
|
||||
"crates/compiler/roc_target",
|
||||
"crates/compiler/debug_flags",
|
||||
"crates/vendor/inkwell",
|
||||
"crates/vendor/pathfinding",
|
||||
"crates/vendor/pretty",
|
||||
"crates/compiler/*",
|
||||
"crates/vendor/*",
|
||||
"crates/glue",
|
||||
"crates/editor",
|
||||
"crates/ast",
|
||||
|
@ -56,13 +24,15 @@ members = [
|
|||
"crates/wasi-libc-sys",
|
||||
]
|
||||
exclude = [
|
||||
"ci/benchmarks/bench-runner",
|
||||
# Examples sometimes have Rust hosts in their platforms. The compiler should ignore those.
|
||||
"crates/cli_testing_examples",
|
||||
"examples",
|
||||
"ci/bench-runner",
|
||||
# Ignore building these normally. They are only imported by tests.
|
||||
# The tests will still correctly build them.
|
||||
"crates/cli_utils",
|
||||
"crates/compiler/test_mono_macros",
|
||||
"crates/compiler/str",
|
||||
# `cargo build` would cause roc_std to be built with default features which errors on windows
|
||||
"crates/roc_std",
|
||||
]
|
||||
|
@ -73,6 +43,80 @@ exclude = [
|
|||
# workspace, and without `resolver = "2"` here, you can't use `-p` like this.
|
||||
resolver = "2"
|
||||
|
||||
[workspace.dependencies]
|
||||
# NOTE: roc-lang/inkwell is a fork of TheDan64/inkwell which does not change anything.
|
||||
#
|
||||
# The reason for this fork is that the way Inkwell is designed, you have to use
|
||||
# a particular branch (e.g. "llvm8-0") in Cargo.toml. That would be fine, except that
|
||||
# breaking changes get pushed directly to that branch, which breaks our build
|
||||
# without warning.
|
||||
#
|
||||
# We tried referencing a specific rev on TheDan64/inkwell directly (instead of branch),
|
||||
# but although that worked locally, it did not work on GitHub Actions. (After a few
|
||||
# hours of investigation, gave up trying to figure out why.) So this is the workaround:
|
||||
# having an immutable tag on the roc-lang/inkwell fork which points to
|
||||
# a particular "release" of Inkwell.
|
||||
#
|
||||
# When we want to update Inkwell, we can sync up roc-lang/inkwell to the latest
|
||||
# commit of TheDan64/inkwell, push a new tag which points to the latest commit,
|
||||
# change the tag value in this Cargo.toml to point to that tag, and `cargo update`.
|
||||
# This way, GitHub Actions works and nobody's builds get broken.
|
||||
inkwell = { git = "https://github.com/roc-lang/inkwell", branch = "master", features = [ "llvm13-0" ] }
|
||||
|
||||
arrayvec = "0.7.2"
|
||||
bincode = "1.3.3"
|
||||
bitvec = "1.0.1"
|
||||
bumpalo = { version = "3.11.0", features = ["collections"] }
|
||||
capstone = "0.11.0"
|
||||
clap = { version = "3.2.20", default-features = false, features = ["std", "color", "suggestions"] }
|
||||
const_format = { version = "0.2.23", features = ["const_generics"] }
|
||||
criterion = { git = "https://github.com/Anton-4/criterion.rs", features = ["html_reports"]}
|
||||
crossbeam = "0.8.2"
|
||||
distance = "0.4.0"
|
||||
encode_unicode = "1.0.0"
|
||||
errno = "0.2.8"
|
||||
fnv = "1.0.7"
|
||||
hashbrown = { version = "0.12.3", features = [ "bumpalo" ] }
|
||||
iced-x86 = { version = "1.15.0", default-features = false, features = ["std", "decoder", "op_code_info", "instr_info"] }
|
||||
im = "15.0.0"
|
||||
im-rc = "15.0.0"
|
||||
indoc = "1.0.7"
|
||||
insta = "1.20.0"
|
||||
lazy_static = "1.4.0"
|
||||
libc = "0.2.135"
|
||||
libloading = "0.7.1"
|
||||
mach_object = "0.1"
|
||||
maplit = "1.0.2"
|
||||
memmap2 = "0.5.7"
|
||||
mimalloc = { version = "0.1.26", default-features = false }
|
||||
packed_struct = "0.10.0"
|
||||
page_size = "0.4.2"
|
||||
parking_lot = "0.12"
|
||||
peg = "0.8.1"
|
||||
pretty_assertions = "1.3.0"
|
||||
quickcheck = "1.0.3"
|
||||
quickcheck_macros = "1.0.0"
|
||||
regex = "1.5.5"
|
||||
rustyline = {git = "https://github.com/roc-lang/rustyline", rev = "e74333c"}
|
||||
rustyline-derive = {git = "https://github.com/roc-lang/rustyline", rev = "e74333c"}
|
||||
serde = { version = "1.0.144", features = ["derive"] }
|
||||
signal-hook = "0.3.14"
|
||||
snafu = { version = "0.7.1", features = ["backtraces"] }
|
||||
static_assertions = "1.1.0"
|
||||
strip-ansi-escapes = "0.1.1"
|
||||
strum = { version = "0.24.1", features = ["derive"] }
|
||||
target-lexicon = "0.12.3"
|
||||
tempfile = "3.2.0"
|
||||
unicode-segmentation = "1.10.0"
|
||||
walkdir = "2.3.2"
|
||||
wasm3 = { git = "https://github.com/roc-lang/wasm3-rs", rev = "f0f807d1fc0a50d1d68e5799e54ee62c05af00f5" }
|
||||
wyhash = "0.5.0"
|
||||
|
||||
# TODO: Deal with the update of object to 0.27.
|
||||
# It looks like it breaks linking the generated objects.
|
||||
# Probably just need to specify an extra field that used to be implicit or something.
|
||||
object = { version = "0.29.0", features = ["read", "write"] }
|
||||
|
||||
# Optimizations based on https://deterministic.space/high-performance-rust.html
|
||||
[profile.release]
|
||||
lto = "thin"
|
||||
|
|
150
Earthfile
150
Earthfile
|
@ -1,150 +0,0 @@
|
|||
FROM rust:1.61.0-slim-bullseye # make sure to update rust-toolchain.toml too so that everything uses the same rust version
|
||||
WORKDIR /earthbuild
|
||||
|
||||
prep-debian:
|
||||
RUN apt -y update
|
||||
|
||||
install-other-libs:
|
||||
FROM +prep-debian
|
||||
RUN apt -y install wget git
|
||||
RUN apt -y install libxcb-shape0-dev libxcb-xfixes0-dev # for editor clipboard
|
||||
RUN apt -y install libasound2-dev # for editor sounds
|
||||
RUN apt -y install libunwind-dev pkg-config libx11-dev zlib1g-dev
|
||||
RUN apt -y install unzip # for www/build.sh
|
||||
|
||||
install-zig-llvm-valgrind:
|
||||
FROM +install-other-libs
|
||||
# editor
|
||||
RUN apt -y install libxkbcommon-dev
|
||||
# zig
|
||||
RUN wget -c https://ziglang.org/download/0.9.1/zig-linux-x86_64-0.9.1.tar.xz --no-check-certificate
|
||||
RUN tar -xf zig-linux-x86_64-0.9.1.tar.xz
|
||||
RUN ln -s /earthbuild/zig-linux-x86_64-0.9.1/zig /bin/zig
|
||||
# zig builtins wasm tests
|
||||
RUN apt -y install build-essential
|
||||
RUN cargo install wasmer-cli --features "singlepass"
|
||||
RUN cargo install bindgen
|
||||
# llvm
|
||||
RUN apt -y install lsb-release software-properties-common gnupg
|
||||
RUN wget https://apt.llvm.org/llvm.sh
|
||||
RUN chmod +x llvm.sh
|
||||
RUN ./llvm.sh 13
|
||||
RUN ln -s /usr/bin/clang-13 /usr/bin/clang
|
||||
# use lld as linker
|
||||
RUN ln -s /usr/bin/lld-13 /usr/bin/ld.lld
|
||||
ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
|
||||
# valgrind
|
||||
RUN apt -y install valgrind
|
||||
# wasm repl & tests
|
||||
RUN rustup target add wasm32-unknown-unknown wasm32-wasi
|
||||
RUN apt -y install libssl-dev
|
||||
RUN OPENSSL_NO_VENDOR=1 cargo install wasm-pack
|
||||
# criterion
|
||||
RUN cargo install cargo-criterion
|
||||
# sccache
|
||||
RUN cargo install sccache
|
||||
RUN sccache -V
|
||||
ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache
|
||||
ENV SCCACHE_DIR=/earthbuild/sccache_dir
|
||||
ENV CARGO_INCREMENTAL=0 # no need to recompile package when using new function
|
||||
|
||||
copy-dirs:
|
||||
FROM +install-zig-llvm-valgrind
|
||||
COPY --dir crates examples Cargo.toml Cargo.lock version.txt www ./
|
||||
|
||||
test-zig:
|
||||
FROM +install-zig-llvm-valgrind
|
||||
COPY --dir crates/compiler/builtins/bitcode ./
|
||||
RUN cd bitcode && ./run-tests.sh && ./run-wasm-tests.sh
|
||||
|
||||
build-rust-test:
|
||||
FROM +copy-dirs
|
||||
RUN echo "deb http://deb.debian.org/debian testing main contrib non-free" >> /etc/apt/sources.list # to get gcc 10.3
|
||||
RUN apt -y update
|
||||
RUN apt -y install gcc-10 g++-10 && rm /usr/bin/gcc && ln -s /usr/bin/gcc-10 /usr/bin/gcc # gcc-9 maybe causes segfault
|
||||
RUN gcc --version
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
cargo test --locked --release --features with_sound serde --workspace --no-run && sccache --show-stats
|
||||
|
||||
check-typos:
|
||||
RUN cargo install typos-cli --version 1.0.11 # version set to prevent confusion if the version is updated automatically
|
||||
COPY --dir .github ci crates examples nightly_benches www *.md LEGAL_DETAILS flake.nix version.txt ./
|
||||
RUN typos
|
||||
|
||||
test-rust:
|
||||
FROM +build-rust-test
|
||||
ENV ROC_WORKSPACE_DIR=/earthbuild
|
||||
ENV RUST_BACKTRACE=1
|
||||
# for race condition problem with cli test
|
||||
ENV ROC_NUM_WORKERS=1
|
||||
# run one of the benchmarks to make sure the host is compiled
|
||||
# not pre-compiling the host can cause race conditions
|
||||
RUN gcc --version
|
||||
RUN echo "4" | cargo run --release examples/benchmarks/NQueens.roc
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
cargo test --locked --release --features with_sound serde --workspace && sccache --show-stats
|
||||
# test the dev and wasm backend: they require an explicit feature flag.
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
cargo test --locked --release --package test_gen --no-default-features --features gen-dev && sccache --show-stats
|
||||
# gen-wasm has some multithreading problems to do with the wasmer runtime. Run it single-threaded as a separate job
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
cargo test --locked --release --package test_gen --no-default-features --features gen-wasm -- --test-threads=1 && sccache --show-stats
|
||||
# run `roc test` on Str builtins
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
cargo run --release -- test crates/compiler/builtins/roc/Str.roc && sccache --show-stats
|
||||
# repl_test: build the compiler for wasm target, then run the tests on native target
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
crates/repl_test/test_wasm.sh && sccache --show-stats
|
||||
# run i386 (32-bit linux) cli tests
|
||||
# NOTE: disabled until zig 0.9
|
||||
# RUN echo "4" | cargo run --locked --release --features="target-x86" -- --target=x86_32 examples/benchmarks/NQueens.roc
|
||||
# RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
# cargo test --locked --release --features with_sound serde --test cli_run i386 --features="i386-cli-run" && sccache --show-stats
|
||||
# make sure website deployment works (that is, make sure build.sh returns status code 0)
|
||||
ENV REPL_DEBUG=1
|
||||
RUN bash www/build.sh
|
||||
|
||||
|
||||
verify-no-git-changes:
|
||||
FROM +test-rust
|
||||
# If running tests caused anything to be changed or added (without being
|
||||
# included in a .gitignore somewhere), fail the build!
|
||||
#
|
||||
# How it works: the `git ls-files` command lists all the modified or
|
||||
# uncommitted files in the working tree, the `| grep -E .` command returns a
|
||||
# zero exit code if it listed any files and nonzero otherwise (which is the
|
||||
# opposite of what we want), and the `!` at the start inverts the exit code.
|
||||
RUN ! git ls-files --deleted --modified --others --exclude-standard | grep -E .
|
||||
|
||||
test-all:
|
||||
BUILD +test-zig
|
||||
BUILD +test-rust
|
||||
BUILD +verify-no-git-changes
|
||||
|
||||
build-nightly-release:
|
||||
FROM +test-rust
|
||||
COPY --dir .git LICENSE LEGAL_DETAILS ci ./
|
||||
# version.txt is used by the CLI: roc --version
|
||||
RUN ./ci/write_version.sh
|
||||
RUN RUSTFLAGS="-C target-cpu=x86-64" cargo build --features with_sound --release
|
||||
RUN ./ci/package_release.sh roc_linux_x86_64.tar.gz
|
||||
SAVE ARTIFACT ./roc_linux_x86_64.tar.gz AS LOCAL roc_linux_x86_64.tar.gz
|
||||
|
||||
# compile everything needed for benchmarks and output a self-contained dir from which benchmarks can be run.
|
||||
prep-bench-folder:
|
||||
FROM +copy-dirs
|
||||
# to make use of avx, avx2, sse2, sse4.2... instructions
|
||||
ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
|
||||
ARG BENCH_SUFFIX=branch
|
||||
RUN cargo criterion -V
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR cd crates/cli && cargo criterion --no-run
|
||||
RUN mkdir -p bench-folder/crates/compiler/builtins/bitcode/src
|
||||
RUN mkdir -p bench-folder/target/release/deps
|
||||
RUN mkdir -p bench-folder/examples/benchmarks
|
||||
RUN cp examples/benchmarks/*.roc bench-folder/examples/benchmarks/
|
||||
RUN cp -r examples/benchmarks/platform bench-folder/examples/benchmarks/
|
||||
RUN cp crates/compiler/builtins/bitcode/src/str.zig bench-folder/crates/compiler/builtins/bitcode/src
|
||||
RUN cp target/release/roc bench-folder/target/release
|
||||
# copy the most recent time bench to bench-folder
|
||||
RUN cp target/release/deps/`ls -t target/release/deps/ | grep time_bench | head -n 1` bench-folder/target/release/deps/time_bench
|
||||
SAVE ARTIFACT bench-folder AS LOCAL bench-folder-$BENCH_SUFFIX
|
130
FAQ.md
130
FAQ.md
|
@ -1,3 +1,5 @@
|
|||
Click the ☰ button in the top left to see and search the table of contents.
|
||||
|
||||
# Frequently Asked Questions
|
||||
|
||||
## Where did the name Roc come from?
|
||||
|
@ -29,7 +31,7 @@ fantastical, and it has incredible potential for puns. Here are some different w
|
|||
|
||||
Fun fact: "roc" translates to 鹏 in Chinese, [which means](https://www.mdbg.net/chinese/dictionary?page=worddict&wdrst=0&wdqb=%E9%B9%8F) "a large fabulous bird."
|
||||
|
||||
# Why make a new editor instead of making an LSP plugin for VSCode, Vim or Emacs?
|
||||
## Why make a new editor instead of making an LSP plugin for VSCode, Vim or Emacs?
|
||||
|
||||
The Roc editor is one of the key areas where we want to innovate. Constraining ourselves to a plugin for existing editors would severely limit our possibilities for innovation.
|
||||
|
||||
|
@ -45,6 +47,16 @@ This is an unusual approach, but there are more details in [this 2021 interview]
|
|||
|
||||
In the meantime, using CoffeeScript syntax highlighting for .roc files turns out to work surprisingly well!
|
||||
|
||||
## Why won't the editor be able to edit non-roc files like .md, .gitignore, .yml, ... ?
|
||||
|
||||
The downside of having the Roc editor support files other than .roc is that it seems extremely difficult to avoid scope creep if we allow it. For example, it starts with just editing json as plaintext but then it's annoying that there's no syntax highlighting, so maybe we add the capability to do syntax highlighting for json but of course then some people want it for toml, .md, etc, so we need to add a way to specify custom syntax highlighting rules for all of those.
|
||||
|
||||
Then of course people don't want to be copy/pasting syntax highlighting rules from online, so maybe someone develops a third party "plugin manager" for the editor to distribute these syntax highlighting definitions.
|
||||
So maybe we add sharing syntax highlighting as a first-class thing, so people don't have to download a separate tool to use their editor normally but then some people who are using it for .json and .yaml start using it for .css too. Syntax highlighting is okay but it's annoying that they don't get error reporting when they mess up syntax or type an invalid selector or import and pretty soon there's demand for the Roc editor to do all the hardest parts of VS code.
|
||||
|
||||
We have to draw the line somewhere in there...but where to draw it?
|
||||
It seems like drawing a bright line at .roc files is the most straightforward. It means the roc editor is the absolute best at editing .roc files and it isn't a weak editor for anything else because it doesn't try to be an editor for anything else and it means the scope is very clear.
|
||||
|
||||
## Why is there no way to specify "import everything this module exposes" in `imports`?
|
||||
|
||||
In [Elm](https://elm-lang.org), it's possible to import a module in a way that brings everything that module
|
||||
|
@ -95,20 +107,20 @@ the function might give different answers.
|
|||
|
||||
Both of these would make revising code riskier across the entire language, which is very undesirable.
|
||||
|
||||
Another option would be to define that function equality always returns `False`. So both of these would evaluate
|
||||
to `False`:
|
||||
Another option would be to define that function equality always returns `false`. So both of these would evaluate
|
||||
to `false`:
|
||||
|
||||
- `(\x -> x + 1) == (\x -> 1 + x)`
|
||||
- `(\x -> x + 1) == (\x -> x + 1)`
|
||||
|
||||
This makes function equality effectively useless, while still technically allowing it. It has some other downsides:
|
||||
|
||||
- Now if you put a function inside a record, using `==` on that record will still type-check, but it will then return `False`. This could lead to bugs if you didn't realize you had accidentally put a function in there - for example, because you were actually storing a different type (e.g. an opaque type) and didn't realize it had a function inside it.
|
||||
- Now if you put a function inside a record, using `==` on that record will still type-check, but it will then return `false`. This could lead to bugs if you didn't realize you had accidentally put a function in there - for example, because you were actually storing a different type (e.g. an opaque type) and didn't realize it had a function inside it.
|
||||
- If you put a function (or a value containing a function) into a `Dict` or `Set`, you'll never be able to get it out again. This is a common problem with [NaN](https://en.wikipedia.org/wiki/NaN), which is also defined not to be equal to itself.
|
||||
|
||||
The first of these problems could be addressed by having function equality always return `True` instead of `False` (since that way it would not affect other fields' equality checks in a record), but that design has its own problems:
|
||||
The first of these problems could be addressed by having function equality always return true instead of false (since that way it would not affect other fields' equality checks in a record), but that design has its own problems:
|
||||
|
||||
- Although function equality is still useless, `(\x -> x + 1) == (\x -> x)` returns `True`. Even if it didn't lead to bugs in practice, this would certainly be surprising and confusing to beginners.
|
||||
- Although function equality is still useless, `(\x -> x + 1) == (\x -> x)` returns `Bool.true`. Even if it didn't lead to bugs in practice, this would certainly be surprising and confusing to beginners.
|
||||
- Now if you put several different functions into a `Dict` or `Set`, only one of them will be kept; the others will be discarded or overwritten. This could cause bugs if a value stored a function internally, and then other functions relied on that internal function for correctness.
|
||||
|
||||
Each of these designs makes Roc a language that's some combination of more error-prone, more confusing, and more
|
||||
|
@ -309,56 +321,83 @@ Here are some more details about the downsides as I see them.
|
|||
|
||||
### Currying and the `|>` operator
|
||||
|
||||
In Roc, this code produces `"Hello, World!"`
|
||||
In Roc, both of these expressions evaluate to `"Hello, World!"`
|
||||
|
||||
```elm
|
||||
"Hello, World"
|
||||
|> Str.concat "!"
|
||||
```elixir
|
||||
Str.concat "Hello, " "World!"
|
||||
```
|
||||
|
||||
This is because Roc's `|>` operator uses the expression before the `|>` as the _first_ argument to the function
|
||||
after it. For functions where both arguments have the same type, but it's obvious which argument goes where (e.g.
|
||||
`Str.concat "Hello, " "World!"`, `List.concat [1, 2] [3, 4]`), this works out well. Another example would
|
||||
be `|> Num.sub 1`, which subtracts 1 from whatever came before the `|>`.
|
||||
```elixir
|
||||
"Hello, "
|
||||
|> Str.concat "World!"
|
||||
```
|
||||
|
||||
For this reason, "pipeline-friendliness" in Roc means that the first argument to each function is typically
|
||||
the one that's most likely to be built up using a pipeline. For example, `List.map`:
|
||||
In curried languages with a `|>` operator, the first expression still returns `"Hello, World!"` but the second one returns `"World!Hello, "`. This is because Roc's `|>` operator uses the expression before the `|>` as the _first_ argument, whereas in curried languages, `|>` uses it as the _last_ argument.
|
||||
|
||||
```elm
|
||||
(For example, this is how `|>` works in both [F#](https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/symbol-and-operator-reference/#function-symbols-and-operators) and in [Elm](https://package.elm-lang.org/packages/elm/core/1.0.5/Basics#|%3E), both of which are curried languages. In contrast, Roc's `|>` design uses the same argument ordering as [Elixir](https://hexdocs.pm/elixir/1.14.0/Kernel.html#%7C%3E/2) and [Gleam](https://gleam.run/book/tour/functions.html#pipe-operator), neither of which is a curried language.)
|
||||
|
||||
This comes up in other situations as well. For example, consider subtraction and division:
|
||||
|
||||
```elixir
|
||||
someNumber
|
||||
|> Num.div 2
|
||||
```
|
||||
|
||||
```elixir
|
||||
someNumber
|
||||
|> Num.sub 1
|
||||
```
|
||||
|
||||
What do you expect these expressions to do?
|
||||
|
||||
In Roc, the first divides `someNumber` by 2 and the second one subtracts 1 from `someNumber`. In languages where `|>` uses the other argument ordering, the first example instead takes 2 and divides it by `someNumber`, while the second takes 1 and subtracts `someNumber` from it. This was a pain point I ran into with curried languages, and I was pleasantly surprised that changing the argument ordering in `|>` addressed the pain point.
|
||||
|
||||
This style has a second benefit when it comes to higher-order functions. Consider these two examples:
|
||||
|
||||
```elixir
|
||||
answer = List.map numbers \num ->
|
||||
someFunction
|
||||
"some argument"
|
||||
anotherArg
|
||||
someOtherArg
|
||||
```
|
||||
|
||||
```elixir
|
||||
numbers
|
||||
|> List.map Num.abs
|
||||
|> List.map Num.abs
|
||||
```
|
||||
|
||||
This argument ordering convention also often makes it possible to pass anonymous functions to higher-order
|
||||
functions without needing parentheses, like so:
|
||||
In Roc, `List.map` takes a list and then a function. Because of the way `|>` works in Roc, `numbers |> List.map Num.abs` passes `numbers` as the first argument to `List.map`, and `Num.abs` as the second argument. So both of these examples work fine.
|
||||
|
||||
```elm
|
||||
List.map numbers \num -> Num.abs (num - 1)
|
||||
In a curried language, these two examples couldn't both be valid. In order for `|> List.map Num.abs` to work in a curried language (where `|>` works the other way), `List.map` would have to take its arguments in the opposite order: the function first and the list second.
|
||||
|
||||
This means the first example would have to change from this...
|
||||
|
||||
```elixir
|
||||
answer = List.map numbers \num ->
|
||||
someFunction
|
||||
"some argument"
|
||||
anotherArg
|
||||
someOtherArg
|
||||
```
|
||||
|
||||
(If the arguments were reversed, this would be `List.map (\num -> Num.abs (num - 1)) numbers` and the
|
||||
extra parentheses would be required.)
|
||||
...to this:
|
||||
|
||||
Neither of these benefits is compatible with the argument ordering currying encourages. Currying encourages
|
||||
`List.map` to take the `List` as its second argument instead of the first, so that you can partially apply it
|
||||
like `(List.map Num.abs)`; if Roc introduced currying but kept the order of `List.map` the same way it is today,
|
||||
then partially applying `List.map` (e.g. `(List.map numbers)`) would be much less useful than if the arguments
|
||||
were swapped - but that in turn would make it less useful with `|>` and would require parentheses when passing
|
||||
it an anonymous function.
|
||||
```elixir
|
||||
answer =
|
||||
List.map
|
||||
(\num ->
|
||||
someFunction
|
||||
"some argument"
|
||||
anotherArg
|
||||
someOtherArg
|
||||
)
|
||||
numbers
|
||||
```
|
||||
|
||||
This is a fundamental design tension. One argument order works well with `|>` (at least the way it works in Roc
|
||||
today) and with passing anonymous functions to higher-order functions, and the other works well with currying.
|
||||
It's impossible to have both.
|
||||
This was also a pain point I'd encountered in curried languages. I prefer the way the former example reads, but that style doesn't work with the argument order that currying encourages for higher-order functions like `List.map`. (Prior to using curried languages, I'd used [CoffeeScript](https://coffeescript.org/) in a functional style with [`_.map`](https://underscorejs.org/#map), and was disappointed to realize that I could no longer use the enjoyable style of `answer = _.map numbers (num) -> …` as I had before. In Roc, this style works.)
|
||||
|
||||
Of note, one possible design is to have currying while also having `|>` pass the _last_ argument instead of the first.
|
||||
This is what Elm does, and it makes pipeline-friendliness and curry-friendliness the same thing. However, it also
|
||||
means that either `|> Str.concat "!"` would add the `"!"` to the front of the string, or else `Str.concat`'s
|
||||
arguments would have to be flipped - meaning that `Str.concat "Hello, World" "!"` would evaluate to `"!Hello, World"`.
|
||||
|
||||
The only way to have `Str.concat` work the way it does in Roc today (where both pipelines and non-pipeline calling
|
||||
do what you'd want them to) is to order function arguments in a way that is not conducive to currying. This design
|
||||
tension only exists if there's currying in the language; without it, you can order arguments for pipeline-friendliness
|
||||
without concern.
|
||||
As a historical note, these stylistic benefits (of `|> Num.sub 1` working as expected, and being able to write `List.map numbers \num ->`) were not among the original reasons Roc did not have currying. These benefits were discovered after the decision had already been made that Roc would not be a curried language, and they served to reinforce after the fact that the decision was the right one for Roc given the language's goals.
|
||||
|
||||
### Currying and learning curve
|
||||
|
||||
|
@ -406,9 +445,10 @@ reverseSort = \list -> List.reverse (List.sort list)
|
|||
|
||||
I've consistently found that I can more quickly and accurately understand function definitions that use
|
||||
named arguments, even though the code is longer. I suspect this is because I'm faster at reading than I am at
|
||||
desugaring, and whenever I read the top version I end up needing to mentally desugar it into the bottom version.
|
||||
eta-expanding ( e.g. converting `List.sort` into `\l -> List.sort l` ). Whenever I read
|
||||
the top version I end up needing to mentally eta-expand it into the bottom version.
|
||||
In more complex examples (this is among the tamest pointfree function composition examples I've seen), I make
|
||||
a mistake in my mental desugaring, and misunderstand what the function is doing - which can cause bugs.
|
||||
a mistake in my mental eta-expansion, and misunderstand what the function is doing - which can cause bugs.
|
||||
|
||||
I assumed I would get faster and more accurate at this over time. However, by now it's been about a decade
|
||||
since I first learned about the technique, and I'm still slower and less accurate at reading code that uses
|
||||
|
@ -475,4 +515,4 @@ The split of Rust for the compiler and Zig for the standard library has worked w
|
|||
|
||||
## Why is the website so basic?
|
||||
|
||||
We have a very basic website on purpose, it helps set expectations that roc is a work in progress and not ready yet for a first release.
|
||||
We have a very basic website on purpose, it helps set expectations that roc is a work in progress and not ready yet for a first release.
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
# Work in progress!
|
||||
|
||||
Roc is not ready for a 0.1 release yet, but we do have:
|
||||
|
||||
- [**installation** guide](https://github.com/roc-lang/roc/tree/main/getting_started)
|
||||
- [**tutorial**](https://github.com/roc-lang/roc/blob/main/TUTORIAL.md)
|
||||
- [**docs** for the standard library](https://www.roc-lang.org/builtins/Str)
|
||||
- [frequently asked questions](https://github.com/roc-lang/roc/blob/main/FAQ.md)
|
||||
- [Zulip chat](https://roc.zulipchat.com) for help, questions and discussions
|
||||
|
||||
If you'd like to get involved in contributing to the language, the Zulip chat is also the best place to get help with [good first issues](https://github.com/roc-lang/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
|
||||
|
||||
# Sponsors
|
||||
## Sponsors
|
||||
|
||||
We are very grateful to our sponsors [NoRedInk](https://www.noredink.com/), [rwx](https://www.rwx.com), and [Tweede golf](https://tweedegolf.nl/en).
|
||||
|
||||
|
|
838
TUTORIAL.md
838
TUTORIAL.md
File diff suppressed because it is too large
Load diff
|
@ -30,7 +30,7 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
|
|||
|
||||
[[package]]
|
||||
name = "bench-runner"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"data-encoding",
|
||||
|
@ -240,9 +240,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.5.4"
|
||||
version = "1.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
|
||||
checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
|
@ -1,13 +1,13 @@
|
|||
[package]
|
||||
name = "bench-runner"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "3.1.15", features = ["derive"] }
|
||||
regex = "1.5.4"
|
||||
regex = "1.5.5"
|
||||
is_executable = "1.0.1"
|
||||
ring = "0.16.20"
|
||||
data-encoding = "2.3.2"
|
|
@ -33,7 +33,7 @@ fn main() {
|
|||
|
||||
if check_if_bench_executables_changed() {
|
||||
println!(
|
||||
"Comparison of sha256 of executables reveals changes, doing full benchmarks..."
|
||||
"\n\nComparison of sha256 of executables reveals changes, doing full benchmarks...\n\n"
|
||||
);
|
||||
|
||||
let all_regressed_benches = do_all_benches(optional_args.nr_repeat_benchmarks);
|
||||
|
@ -51,8 +51,8 @@ fn main() {
|
|||
eprintln!(
|
||||
r#"I can't find bench-folder-main and bench-folder-branch from the current directory.
|
||||
I should be executed from the repo root.
|
||||
Use `./ci/safe-earthly.sh --build-arg BENCH_SUFFIX=main +prep-bench-folder` to generate bench-folder-main.
|
||||
Use `./ci/safe-earthly.sh +prep-bench-folder` to generate bench-folder-branch."#
|
||||
Use `./ci/benchmarks/prep_folder.sh main` to generate bench-folder-main.
|
||||
Use `./ci/benchmarks/prep_folder.sh branch` to generate bench-folder-branch."#
|
||||
);
|
||||
|
||||
process::exit(1)
|
||||
|
@ -85,6 +85,8 @@ fn do_all_benches(nr_repeat_benchmarks: usize) -> HashSet<String> {
|
|||
return HashSet::new();
|
||||
}
|
||||
|
||||
println!("\n\nDoing benchmarks {:?} times to reduce flukes.\n\n", nr_repeat_benchmarks);
|
||||
|
||||
for _ in 1..nr_repeat_benchmarks {
|
||||
delete_old_bench_results();
|
||||
do_benchmark("main");
|
||||
|
@ -112,7 +114,7 @@ fn do_benchmark(branch_name: &'static str) -> HashSet<String> {
|
|||
))
|
||||
.args(&["--bench", "--noplot"])
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.unwrap_or_else(|_| panic!("Failed to benchmark {}.", branch_name));
|
||||
|
||||
|
@ -133,14 +135,14 @@ fn do_benchmark(branch_name: &'static str) -> HashSet<String> {
|
|||
"Failed to get line that contains benchmark name from last_three_lines_queue.",
|
||||
);
|
||||
|
||||
let regex_match = bench_name_regex.find(regressed_bench_name_line).expect("This line should hoave the benchmark name between double quotes but I could not match it");
|
||||
let regex_match = bench_name_regex.find(regressed_bench_name_line).expect("This line should have the benchmark name between double quotes but I could not match it");
|
||||
|
||||
regressed_benches.insert(regex_match.as_str().to_string().replace("\"", ""));
|
||||
}
|
||||
|
||||
last_three_lines_queue.push_front(line_str.clone());
|
||||
|
||||
println!("bench {:?}: {:?}", branch_name, line_str);
|
||||
println!(">>bench {:?}: {:?}", branch_name, line_str);
|
||||
}
|
||||
|
||||
regressed_benches
|
||||
|
@ -186,8 +188,20 @@ fn sha256_digest<R: Read>(mut reader: R) -> Result<Digest, io::Error> {
|
|||
}
|
||||
|
||||
fn sha_file(file_path: &Path) -> Result<String, io::Error> {
|
||||
let input = File::open(file_path)?;
|
||||
let reader = BufReader::new(input);
|
||||
// Debug info is dependent on the dir in which executable was created,
|
||||
// so we need to strip that to be able to compare binaries.
|
||||
let no_debug_info_file_path = file_path.to_str().unwrap().to_string() + ("_no_debug_info");
|
||||
std::fs::copy(file_path, &no_debug_info_file_path)?;
|
||||
|
||||
let strip_output = Command::new("strip")
|
||||
.args(["--strip-debug", &no_debug_info_file_path])
|
||||
.output()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(strip_output.status.success());
|
||||
|
||||
let no_debug_info_file = File::open(no_debug_info_file_path)?;
|
||||
let reader = BufReader::new(no_debug_info_file);
|
||||
let digest = sha256_digest(reader)?;
|
||||
|
||||
Ok(HEXUPPER.encode(digest.as_ref()))
|
||||
|
@ -227,9 +241,10 @@ fn calc_hashes_for_folder(benches_path_str: &str) -> HashMap<String, String> {
|
|||
}
|
||||
|
||||
fn check_if_bench_executables_changed() -> bool {
|
||||
let bench_folder_str = "/examples/benchmarks/";
|
||||
let bench_folder_str = "/crates/cli_testing_examples/benchmarks/";
|
||||
|
||||
let main_benches_path_str = [BENCH_FOLDER_MAIN, bench_folder_str].join("");
|
||||
|
||||
let main_bench_hashes = calc_hashes_for_folder(&main_benches_path_str);
|
||||
|
||||
let branch_benches_path_str = [BENCH_FOLDER_BRANCH, bench_folder_str].join("");
|
26
ci/benchmarks/prep_folder.sh
Executable file
26
ci/benchmarks/prep_folder.sh
Executable file
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# compile everything needed for benchmarks and output a self-contained dir from which benchmarks can be run.
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
# to make use of avx, avx2, sse2, sse4.2... instructions
|
||||
RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
|
||||
BENCH_SUFFIX=$1
|
||||
|
||||
cargo criterion -V
|
||||
cd crates/cli && cargo criterion --no-run && cd ../..
|
||||
mkdir -p bench-folder/crates/cli_testing_examples/benchmarks
|
||||
mkdir -p bench-folder/crates/compiler/builtins/bitcode/src
|
||||
mkdir -p bench-folder/target/release/deps
|
||||
mkdir -p bench-folder/target/release/lib
|
||||
cp "crates/cli_testing_examples/benchmarks/"*".roc" bench-folder/crates/cli_testing_examples/benchmarks/
|
||||
cp -r crates/cli_testing_examples/benchmarks/platform bench-folder/crates/cli_testing_examples/benchmarks/
|
||||
cp crates/compiler/builtins/bitcode/src/str.zig bench-folder/crates/compiler/builtins/bitcode/src
|
||||
cp target/release/roc bench-folder/target/release
|
||||
cp -r target/release/lib bench-folder/target/release
|
||||
|
||||
# copy the most recent time bench to bench-folder
|
||||
cp target/release/deps/`ls -t target/release/deps/ | grep time_bench | head -n 1` bench-folder/target/release/deps/time_bench
|
||||
mv bench-folder bench-folder-$BENCH_SUFFIX
|
|
@ -1,5 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
mkdir -p $HOME/.cargo
|
||||
echo -e "[build]\nrustflags = [\"-C\", \"link-arg=-fuse-ld=lld\", \"-C\", \"target-cpu=native\"]" > $HOME/.cargo/config
|
||||
|
||||
|
|
|
@ -2,7 +2,10 @@
|
|||
|
||||
# assumes roc_releases.json is present
|
||||
|
||||
LATEST_RELEASE_URL=`cat roc_releases.json | jq --arg today $(date +'%Y-%m-%d') '.[0] | .assets | map(.browser_download_url) | map(select(. | contains("silicon-\($today)"))) | .[0]'`
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
LATEST_RELEASE_URL=`cat roc_releases.json | jq --arg arch $1 --arg today $(date +'%Y-%m-%d') '.[0] | .assets | map(.browser_download_url) | map(select(. | contains("\($arch)-\($today)"))) | .[0]'`
|
||||
|
||||
if [[ "$LATEST_RELEASE_URL" == "null" ]]
|
||||
then
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
cp target/release/roc ./roc # to be able to exclude "target" later in the tar command
|
||||
cp -r target/release/lib ./lib
|
||||
tar -czvf $1 --exclude="target" --exclude="zig-cache" roc lib LICENSE LEGAL_DETAILS examples/hello-world crates/roc_std
|
||||
tar -czvf $1 --exclude="target" --exclude="zig-cache" roc lib LICENSE LEGAL_DETAILS examples/helloWorld.roc examples/platform-switching examples/cli crates/roc_std
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
LOG_FILE="earthly_log.txt"
|
||||
touch $LOG_FILE
|
||||
|
||||
# first arg + everything after
|
||||
ARGS=${@:1}
|
||||
FULL_CMD="earthly --config ci/earthly-conf.yml $ARGS"
|
||||
echo $FULL_CMD
|
||||
script -efq $LOG_FILE -c "$FULL_CMD"
|
||||
EXIT_CODE=$?
|
||||
|
||||
if grep -q "failed to mount" "$LOG_FILE"; then
|
||||
echo ""
|
||||
echo ""
|
||||
echo "------<<<<<<!!!!!!>>>>>>------"
|
||||
echo "DETECTED FAILURE TO MOUNT ERROR: running without cache"
|
||||
echo "------<<<<<<!!!!!!>>>>>>------"
|
||||
echo ""
|
||||
echo ""
|
||||
earthly --config ci/earthly-conf.yml --no-cache $ARGS
|
||||
else
|
||||
exit $EXIT_CODE
|
||||
fi
|
|
@ -1,3 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
# version.txt is used by the CLI: roc --version
|
||||
printf 'nightly pre-release, built from commit ' > version.txt && git log --pretty=format:'%h' -n 1 >> version.txt && printf ' on ' >> version.txt && date -u >> version.txt
|
|
@ -1,3 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
crates/repl_wasm/build-www.sh `pwd`/roc_repl_wasm.tar.gz
|
||||
|
|
158
crates/README.md
Normal file
158
crates/README.md
Normal file
|
@ -0,0 +1,158 @@
|
|||
# Roc Internals
|
||||
|
||||
Roc has different rust crates for various binaries and libraries. Their roles are briefly described below. If you'd like to learn more, have any questions, or suspect something is out of date, please start a discussion on the [Roc Zulip](https://roc.zulipchat.com/)!
|
||||
|
||||
You can use `cargo doc` to generate docs for a specific package; e.g.
|
||||
|
||||
```
|
||||
cargo doc --package roc_ast --open
|
||||
```
|
||||
|
||||
## `ast/` - `roc_ast`
|
||||
|
||||
Code to represent the [Abstract Syntax Tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree) as used by the editor. In contrast to the compiler, the types in this AST do not keep track of the location of the matching code in the source file.
|
||||
|
||||
## `cli/` - `roc_cli`
|
||||
|
||||
The `roc` binary that brings together all functionality in the Roc toolset.
|
||||
|
||||
## `cli_utils/` - `cli_utils`
|
||||
|
||||
Provides shared code for cli tests and benchmarks.
|
||||
|
||||
## `code_markup/` - `roc_code_markup`
|
||||
|
||||
A [markup language](https://en.wikipedia.org/wiki/Markup_language) to display Roc code in the editor.
|
||||
|
||||
## `compiler/`
|
||||
|
||||
Compiles `.roc` files and combines them with their platform into an executable binary. See [compiler/README.md](./compiler/README.md) for more information.
|
||||
|
||||
TODO explain what "compiler frontend" is
|
||||
TODO explain what "compiler backend" is
|
||||
|
||||
The compiler includes the following sub-crates;
|
||||
- `roc_alias_analysis` Performs analysis and optimizations to remove unneeded [reference counts](https://en.wikipedia.org/wiki/Reference_counting) at runtime, and supports in-place mutation.
|
||||
- `arena_pool` An implementation of an [arena allocator](https://mgravell.github.io/Pipelines.Sockets.Unofficial/docs/arenas.html) designed for the compiler's workloads.
|
||||
- `roc_build` Responsible for coordinating building and linking of a Roc app with its host.
|
||||
- `roc_builtins` provides the Roc functions and modules that are implicitly imported into every module. See [README.md](./compiler/builtins/README.md) for more information.
|
||||
- `roc_can` [Canonicalize](https://en.wikipedia.org/wiki/Canonicalization) a roc [abstract syntax tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree), [resolving symbols](https://stackoverflow.com/a/1175493/4200103), [re-ordering definitions](https://www.oreilly.com/library/view/c-high-performance/9781787120952/546b5677-9157-4333-bc90-16db696436ac.xhtml), and preparing a module for [type inference](https://en.wikipedia.org/wiki/Type_inference).
|
||||
- `roc_collections` Domain-specific collections created for the needs of the compiler.
|
||||
- `roc_constrain` Responsible for building the set of constraints that are used during [type inference](https://en.wikipedia.org/wiki/Type_inference) of a program, and for gathering context needed for pleasant error messages when a type error occurs.
|
||||
- `roc_debug_flags` Environment variables that can be toggled to aid debugging of the compiler itself.
|
||||
- `roc_derive` provides auto-derivers for builtin abilities like `Hash` and `Decode`.
|
||||
- `roc_exhaustive` provides [exhaustiveness](https://dev.to/babak/exhaustive-type-checking-with-typescript-4l3f) checking for Roc.
|
||||
- `roc_fmt` The roc code formatter.
|
||||
- `roc_gen_dev` provides the compiler backend to generate Roc binaries fast, for a nice developer experience. See [README.md](./compiler/gen_dev/README.md) for more information.
|
||||
- `roc_gen_llvm` provides the LLVM backend to generate Roc binaries. Used to generate a binary with the fastest possible execution speed.
|
||||
- `roc_gen_wasm` provides the WASM backend to generate Roc binaries. See [README.md](./compiler/gen_wasm/README.md) for more information.
|
||||
- `roc_ident` Implements data structures used for efficiently representing small strings, like identifiers.
|
||||
- `roc_intern` provides generic interners for concurrent and single-thread use cases.
|
||||
- `roc_late_solve` provides type unification and solving primitives from the perspective of the compiler backend.
|
||||
- `roc_load` Used to load a .roc file and coordinate the compiler pipeline, including parsing, type checking, and [code generation](https://en.wikipedia.org/wiki/Code_generation_(compiler)).
|
||||
- `roc_load_internal` The internal implementation of roc_load, separate from roc_load to support caching.
|
||||
- `roc_module` Implements data structures used for efficiently representing unique modules and identifiers in Roc programs.
|
||||
- `roc_mono` Roc's main intermediate representation (IR), which is responsible for [monomorphization](https://en.wikipedia.org/wiki/Monomorphization), defunctionalization, inserting [ref-count](https://en.wikipedia.org/wiki/Reference_counting) instructions, and transforming a Roc program into a form that is easy to consume by a backend.
|
||||
- `roc_parse` Implements the Roc parser, which transforms a textual representation of a Roc program to an [abstract syntax tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree).
|
||||
- `roc_problem` provides types to describe problems that can occur when compiling `.roc` code.
|
||||
- `roc_region` Data structures for storing source-code-location information, used heavily for contextual error messages.
|
||||
- `roc_target` provides types and helpers for compiler targets such as `default_x86_64`.
|
||||
- `roc_serialize` provides helpers for serializing and deserializing to/from bytes.
|
||||
- `roc_solve` The entry point of Roc's [type inference](https://en.wikipedia.org/wiki/Type_inference) system. Implements type inference and specialization of abilities.
|
||||
- `roc_solve_problem` provides types to describe problems that can occur during solving.
|
||||
- `roc_str` provides `Roc` styled collection [reference counting](https://en.wikipedia.org/wiki/Reference_counting). See [README.md](./compiler/str/README.md) for more information.
|
||||
- `test_derive` Tests Roc's auto-derivers.
|
||||
- `test_gen` contains all of Roc's [code generation](https://en.wikipedia.org/wiki/Code_generation_(compiler)) tests. See [README.md](./compiler/test_gen/README.md) for more information.
|
||||
- `test_mono` Tests Roc's generation of the mono intermediate representation.
|
||||
- `test_mono_macros` Macros for use in `test_mono`.
|
||||
- `roc_types` Various representations and utilities for dealing with types in the Roc compiler.
|
||||
- `roc_unify` Implements Roc's unification algorithm, the heartstone of Roc's [type inference](https://en.wikipedia.org/wiki/Type_inference).
|
||||
|
||||
## `docs/` - `roc_docs`
|
||||
|
||||
Generates html documentation from Roc files.
|
||||
Used for [roc-lang.org/builtins/Num](https://www.roc-lang.org/builtins/Num).
|
||||
|
||||
## `docs_cli/` - `roc_docs_cli` library and `roc-docs` binary
|
||||
|
||||
Provides a binary that is only used for static build servers.
|
||||
|
||||
## `editor/` - `roc_editor`
|
||||
|
||||
Roc's editor. See [README.md](./editor/README.md) for more information.
|
||||
|
||||
## `error_macros/` - `roc_error_macros`
|
||||
|
||||
Provides macros for consistent reporting of errors in Roc's rust code.
|
||||
|
||||
## `glue/` - `roc_glue`
|
||||
|
||||
The `roc_glue` crate generates code needed for platform hosts to communicate with Roc apps. This tool is not necessary for writing a platform in another language, however, it's a great convenience! Currently supports Rust platforms, and the plan is to support any language via a plugin model.
|
||||
|
||||
## `highlight/` - `roc_highlight`
|
||||
|
||||
Provides syntax highlighting for the editor by transforming a string to markup nodes.
|
||||
|
||||
## `linker/` - `roc_linker`
|
||||
|
||||
Surgical linker that links platforms to Roc applications. We created our own linker for performance, since regular linkers add complexity that is not needed for linking Roc apps. Because we want `roc` to manage the build system and final linking of the executable, it is significantly less practical to use a regular linker. See [README.md](./linker/README.md) for more information.
|
||||
|
||||
## `repl_cli/` - `roc_repl_cli`
|
||||
|
||||
Command Line Interface(CLI) functionality for the Read-Evaluate-Print-Loop (REPL).
|
||||
|
||||
## `repl_eval/` - `roc_repl_eval`
|
||||
|
||||
Provides the functionality for the REPL to evaluate Roc expressions.
|
||||
|
||||
## `repl_expect/` - `roc_repl_expect`
|
||||
|
||||
Supports evaluating `expect` and printing contextual information when they fail.
|
||||
|
||||
## `repl_test/` - `repl_test`
|
||||
|
||||
Tests the roc REPL.
|
||||
|
||||
## `repl_wasm/` - `roc_repl_wasm`
|
||||
|
||||
Provides a build of the REPL for the Roc website using WebAssembly. See [README.md](./repl_wasm/README.md) for more information.
|
||||
|
||||
## `reporting/` - `roc_reporting`
|
||||
|
||||
Responsible for generating warning and error messages.
|
||||
|
||||
## `roc_std/` - `roc_std`
|
||||
|
||||
Provides Rust representations of Roc data structures.
|
||||
|
||||
## `test_utils/` - `roc_test_utils`
|
||||
|
||||
Provides testing utility functions for use throughout the Rust code base.
|
||||
|
||||
## `tracing/` - `roc_tracing`
|
||||
|
||||
Provides tracing utility functions for various executable entry points.
|
||||
|
||||
## `utils/` - `roc_utils`
|
||||
|
||||
Provides utility functions used all over the code base.
|
||||
|
||||
## `vendor/`
|
||||
|
||||
These are files that were originally obtained somewhere else (e.g. crates.io) but which we needed to fork for some Roc-specific reason. See [README.md](./vendor/README.md) for more information.
|
||||
|
||||
## `wasi-libc-sys/` - `wasi_libc_sys`
|
||||
|
||||
Provides a Rust wrapper for the WebAssembly test platform built on libc and is primarily used for testing purposes.
|
||||
|
||||
# Building a Roc Application
|
||||
|
||||
Below is a simplified diagram to illustrate how a Roc application and host are combined to build an executable file.
|
||||
|
||||

|
||||
|
||||
# Roc Compiler Stages
|
||||
|
||||
Below is a simplified diagram to illustrate the different stages of the Roc Compiler.
|
||||
|
||||

|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_ast"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
@ -21,15 +21,17 @@ roc_load = { path = "../compiler/load" }
|
|||
roc_target = { path = "../compiler/roc_target" }
|
||||
roc_error_macros = { path = "../error_macros" }
|
||||
roc_reporting = { path = "../reporting" }
|
||||
arrayvec = "0.7.2"
|
||||
bumpalo = { version = "3.8.0", features = ["collections"] }
|
||||
page_size = "0.4.2"
|
||||
snafu = { version = "0.7.1", features = ["backtraces"] }
|
||||
|
||||
ven_graph = { path = "../vendor/pathfinding" }
|
||||
libc = "0.2.106"
|
||||
|
||||
arrayvec.workspace = true
|
||||
bumpalo.workspace = true
|
||||
page_size.workspace = true
|
||||
snafu.workspace = true
|
||||
libc.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
indoc = "1.0.7"
|
||||
indoc.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winapi = { version = "0.3.9", features = ["memoryapi"]}
|
||||
|
|
|
@ -3,7 +3,7 @@ use roc_module::ident::{Lowercase, TagName};
|
|||
use roc_module::symbol::Symbol;
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::{VarId, Variable};
|
||||
use roc_types::types::{AliasKind, Problem, RecordField};
|
||||
use roc_types::types::{AliasKind, RecordField};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -44,7 +44,7 @@ pub enum SolvedType {
|
|||
EmptyTagUnion,
|
||||
/// A type from an Invalid module
|
||||
#[allow(unused)]
|
||||
Erroneous(Problem),
|
||||
Erroneous,
|
||||
|
||||
Alias(
|
||||
Symbol,
|
||||
|
|
|
@ -2199,7 +2199,7 @@ pub mod test_constrain {
|
|||
Foo
|
||||
"#
|
||||
),
|
||||
"[Foo]*",
|
||||
"[Foo]",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2235,7 +2235,7 @@ pub mod test_constrain {
|
|||
if True then Green else Red
|
||||
"#
|
||||
),
|
||||
"[Green, Red]*",
|
||||
"[Green, Red]",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2249,7 +2249,7 @@ pub mod test_constrain {
|
|||
Red -> Purple
|
||||
"#
|
||||
),
|
||||
"[Blue, Purple]*",
|
||||
"[Blue, Purple]",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2302,7 +2302,7 @@ pub mod test_constrain {
|
|||
\a, b -> Pair a b
|
||||
"#
|
||||
),
|
||||
"a, b -> [Pair a b]*",
|
||||
"a, b -> [Pair a b]",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2445,7 +2445,7 @@ pub mod test_constrain {
|
|||
curryPair
|
||||
"#
|
||||
),
|
||||
"a -> (b -> [Pair a b]*)",
|
||||
"a -> (b -> [Pair a b])",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2658,7 +2658,7 @@ pub mod test_constrain {
|
|||
B -> Y
|
||||
"#
|
||||
),
|
||||
"[A, B] -> [X, Y]*",
|
||||
"[A, B] -> [X, Y]",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2674,7 +2674,7 @@ pub mod test_constrain {
|
|||
_ -> Z
|
||||
"#
|
||||
),
|
||||
"[A, B]* -> [X, Y, Z]*",
|
||||
"[A, B]* -> [X, Y, Z]",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2689,7 +2689,7 @@ pub mod test_constrain {
|
|||
A N -> Y
|
||||
"#
|
||||
),
|
||||
"[A [M, N]] -> [X, Y]*",
|
||||
"[A [M, N]] -> [X, Y]",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2705,7 +2705,7 @@ pub mod test_constrain {
|
|||
A _ -> Z
|
||||
"#
|
||||
),
|
||||
"[A [M, N]] -> [X, Y, Z]*",
|
||||
"[A [M, N]] -> [X, Y, Z]",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2737,7 +2737,7 @@ pub mod test_constrain {
|
|||
A N -> X
|
||||
"#
|
||||
),
|
||||
"[A [M, N], B] -> [X]*",
|
||||
"[A [M, N], B] -> [X]",
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -323,7 +323,7 @@ fn from_pending_alias<'a>(
|
|||
let symbol = name.value;
|
||||
|
||||
match to_annotation2(env, scope, &ann.value, ann.region) {
|
||||
Annotation2::Erroneous(_) => todo!(),
|
||||
Annotation2::Erroneous => todo!(),
|
||||
Annotation2::Annotation {
|
||||
named_rigids,
|
||||
unnamed_rigids,
|
||||
|
@ -419,7 +419,7 @@ fn canonicalize_pending_def<'a>(
|
|||
// but the rigids can show up in type error messages, so still register them
|
||||
|
||||
match to_annotation2(env, scope, &loc_ann.value, loc_ann.region) {
|
||||
Annotation2::Erroneous(_) => todo!(),
|
||||
Annotation2::Erroneous => todo!(),
|
||||
Annotation2::Annotation {
|
||||
named_rigids,
|
||||
unnamed_rigids,
|
||||
|
@ -468,7 +468,7 @@ fn canonicalize_pending_def<'a>(
|
|||
|
||||
TypedBody(loc_pattern, loc_can_pattern, loc_ann, loc_expr) => {
|
||||
match to_annotation2(env, scope, &loc_ann.value, loc_ann.region) {
|
||||
Annotation2::Erroneous(_) => todo!(),
|
||||
Annotation2::Erroneous => todo!(),
|
||||
Annotation2::Annotation {
|
||||
named_rigids,
|
||||
unnamed_rigids,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use roc_module::symbol::IdentId;
|
||||
use std::fmt::Write as _; // import without risk of name clashing
|
||||
|
||||
use crate::{
|
||||
lang::core::expr::{expr2::Expr2, expr2_to_string::expr2_to_string},
|
||||
|
@ -35,11 +36,12 @@ pub fn def2_to_string(node_id: DefId, pool: &Pool) -> String {
|
|||
identifier_id,
|
||||
expr_id,
|
||||
} => {
|
||||
full_string.push_str(&format!(
|
||||
let _ = write!(
|
||||
full_string,
|
||||
"Def2::ValueDef(identifier_id: >>{:?}), expr_id: >>{:?})",
|
||||
identifier_id,
|
||||
expr2_to_string(*expr_id, pool)
|
||||
));
|
||||
);
|
||||
}
|
||||
Def2::Blank => {
|
||||
full_string.push_str("Def2::Blank");
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use super::expr2::{Expr2, ExprId};
|
||||
use crate::{
|
||||
lang::core::{expr::record_field::RecordField, val_def::value_def_to_string},
|
||||
mem_pool::pool::Pool,
|
||||
};
|
||||
|
||||
use super::expr2::{Expr2, ExprId};
|
||||
use roc_types::subs::Variable;
|
||||
use std::fmt::Write as _; // import without risk of name clashing
|
||||
|
||||
pub fn expr2_to_string(node_id: ExprId, pool: &Pool) -> String {
|
||||
let mut full_string = String::new();
|
||||
|
@ -31,14 +31,11 @@ fn expr2_to_string_helper(
|
|||
out_string.push_str(&get_spacing(indent_level));
|
||||
|
||||
match expr2 {
|
||||
Expr2::SmallStr(arr_string) => out_string.push_str(&format!(
|
||||
"{}{}{}",
|
||||
"SmallStr(\"",
|
||||
arr_string.as_str(),
|
||||
"\")",
|
||||
)),
|
||||
Expr2::SmallStr(arr_string) => {
|
||||
let _ = write!(out_string, "SmallStr(\"{}\")", arr_string.as_str());
|
||||
}
|
||||
Expr2::Str(pool_str) => {
|
||||
out_string.push_str(&format!("{}{}{}", "Str(\"", pool_str.as_str(pool), "\")",))
|
||||
let _ = write!(out_string, "Str(\"{}\")", pool_str.as_str(pool));
|
||||
}
|
||||
Expr2::Blank => out_string.push_str("Blank"),
|
||||
Expr2::EmptyRecord => out_string.push_str("EmptyRecord"),
|
||||
|
@ -46,7 +43,7 @@ fn expr2_to_string_helper(
|
|||
out_string.push_str("Record:\n");
|
||||
out_string.push_str(&var_to_string(record_var, indent_level + 1));
|
||||
|
||||
out_string.push_str(&format!("{}fields: [\n", get_spacing(indent_level + 1)));
|
||||
let _ = writeln!(out_string, "{}fields: [", get_spacing(indent_level + 1));
|
||||
|
||||
let mut first_child = true;
|
||||
|
||||
|
@ -59,43 +56,46 @@ fn expr2_to_string_helper(
|
|||
|
||||
match field {
|
||||
RecordField::InvalidLabelOnly(pool_str, var) => {
|
||||
out_string.push_str(&format!(
|
||||
let _ = write!(
|
||||
out_string,
|
||||
"{}({}, Var({:?})",
|
||||
get_spacing(indent_level + 2),
|
||||
pool_str.as_str(pool),
|
||||
var,
|
||||
));
|
||||
);
|
||||
}
|
||||
RecordField::LabelOnly(pool_str, var, symbol) => {
|
||||
out_string.push_str(&format!(
|
||||
let _ = write!(
|
||||
out_string,
|
||||
"{}({}, Var({:?}), Symbol({:?})",
|
||||
get_spacing(indent_level + 2),
|
||||
pool_str.as_str(pool),
|
||||
var,
|
||||
symbol
|
||||
));
|
||||
);
|
||||
}
|
||||
RecordField::LabeledValue(pool_str, var, val_node_id) => {
|
||||
out_string.push_str(&format!(
|
||||
"{}({}, Var({:?}), Expr2(\n",
|
||||
let _ = writeln!(
|
||||
out_string,
|
||||
"{}({}, Var({:?}), Expr2(",
|
||||
get_spacing(indent_level + 2),
|
||||
pool_str.as_str(pool),
|
||||
var,
|
||||
));
|
||||
);
|
||||
|
||||
let val_expr2 = pool.get(*val_node_id);
|
||||
expr2_to_string_helper(val_expr2, indent_level + 3, pool, out_string);
|
||||
out_string.push_str(&format!("{})\n", get_spacing(indent_level + 2)));
|
||||
let _ = writeln!(out_string, "{})", get_spacing(indent_level + 2));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out_string.push_str(&format!("{}]\n", get_spacing(indent_level + 1)));
|
||||
let _ = writeln!(out_string, "{}]", get_spacing(indent_level + 1));
|
||||
}
|
||||
Expr2::List { elem_var, elems } => {
|
||||
out_string.push_str("List:\n");
|
||||
out_string.push_str(&var_to_string(elem_var, indent_level + 1));
|
||||
out_string.push_str(&format!("{}elems: [\n", get_spacing(indent_level + 1)));
|
||||
let _ = writeln!(out_string, "{}elems: [\n", get_spacing(indent_level + 1));
|
||||
|
||||
let mut first_elt = true;
|
||||
|
||||
|
@ -111,42 +111,44 @@ fn expr2_to_string_helper(
|
|||
expr2_to_string_helper(elem_expr2, indent_level + 2, pool, out_string)
|
||||
}
|
||||
|
||||
out_string.push_str(&format!("{}]\n", get_spacing(indent_level + 1)));
|
||||
let _ = writeln!(out_string, "{}]", get_spacing(indent_level + 1));
|
||||
}
|
||||
Expr2::InvalidLookup(pool_str) => {
|
||||
out_string.push_str(&format!("InvalidLookup({})", pool_str.as_str(pool)));
|
||||
let _ = write!(out_string, "InvalidLookup({})", pool_str.as_str(pool));
|
||||
}
|
||||
Expr2::SmallInt { text, .. } => {
|
||||
out_string.push_str(&format!("SmallInt({})", text.as_str(pool)));
|
||||
let _ = write!(out_string, "SmallInt({})", text.as_str(pool));
|
||||
}
|
||||
Expr2::LetValue {
|
||||
def_id, body_id, ..
|
||||
} => {
|
||||
out_string.push_str(&format!(
|
||||
let _ = write!(
|
||||
out_string,
|
||||
"LetValue(def_id: >>{:?}), body_id: >>{:?})",
|
||||
value_def_to_string(pool.get(*def_id), pool),
|
||||
pool.get(*body_id)
|
||||
));
|
||||
);
|
||||
}
|
||||
Expr2::Call { .. } => {
|
||||
out_string.push_str(&format!("Call({:?})", expr2,));
|
||||
let _ = write!(out_string, "Call({:?})", expr2);
|
||||
}
|
||||
Expr2::Closure { args, .. } => {
|
||||
out_string.push_str("Closure:\n");
|
||||
out_string.push_str(&format!("{}args: [\n", get_spacing(indent_level + 1)));
|
||||
let _ = writeln!(out_string, "{}args: [", get_spacing(indent_level + 1));
|
||||
|
||||
for (_, pattern_id) in args.iter(pool) {
|
||||
let arg_pattern2 = pool.get(*pattern_id);
|
||||
|
||||
out_string.push_str(&format!(
|
||||
"{}{:?}\n",
|
||||
let _ = writeln!(
|
||||
out_string,
|
||||
"{}{:?}",
|
||||
get_spacing(indent_level + 2),
|
||||
arg_pattern2
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
&Expr2::Var { .. } => {
|
||||
out_string.push_str(&format!("{:?}", expr2,));
|
||||
let _ = write!(out_string, "{:?}", expr2);
|
||||
}
|
||||
Expr2::RuntimeError { .. } => {
|
||||
out_string.push_str("RuntimeError\n");
|
||||
|
|
|
@ -278,7 +278,7 @@ pub fn expr_to_expr2<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
Access(record_expr, field) => {
|
||||
RecordAccess(record_expr, field) => {
|
||||
// TODO
|
||||
let region = ZERO;
|
||||
let (record_expr_id, output) = to_expr_id(env, scope, record_expr, region);
|
||||
|
@ -295,7 +295,7 @@ pub fn expr_to_expr2<'a>(
|
|||
)
|
||||
}
|
||||
|
||||
AccessorFunction(field) => (
|
||||
RecordAccessorFunction(field) => (
|
||||
Expr2::Accessor {
|
||||
function_var: env.var_store.fresh(),
|
||||
record_var: env.var_store.fresh(),
|
||||
|
@ -352,7 +352,7 @@ pub fn expr_to_expr2<'a>(
|
|||
|
||||
for (node_id, branch) in can_branches.iter_node_ids().zip(branches.iter()) {
|
||||
let (can_when_branch, branch_references) =
|
||||
canonicalize_when_branch(env, scope, *branch, &mut output);
|
||||
canonicalize_when_branch(env, scope, branch, &mut output);
|
||||
|
||||
output.references.union_mut(branch_references);
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ use roc_module::ident::Lowercase;
|
|||
use roc_module::symbol::Symbol;
|
||||
use roc_types::subs::Variable;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Default)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Default)]
|
||||
pub struct IntroducedVariables {
|
||||
// Rigids must be unique within a type annotation.
|
||||
// E.g. in `identity : a -> a`, there should only be one
|
||||
|
|
|
@ -447,6 +447,10 @@ pub fn to_pattern2<'a>(
|
|||
unreachable!("should have been handled in RecordDestructure");
|
||||
}
|
||||
|
||||
Tuple(..) => todo!(),
|
||||
List(..) => todo!(),
|
||||
ListRest => todo!(),
|
||||
|
||||
Malformed(_str) => {
|
||||
let problem = MalformedPatternProblem::Unknown;
|
||||
malformed_pattern(env, problem, region)
|
||||
|
|
|
@ -7,7 +7,7 @@ use roc_error_macros::todo_abilities;
|
|||
use roc_module::ident::{Ident, Lowercase, TagName, Uppercase};
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::types::{AliasKind, Problem, RecordField};
|
||||
use roc_types::types::{AliasKind, RecordField};
|
||||
use roc_types::{subs::Variable, types::ErrorType};
|
||||
|
||||
use crate::lang::env::Env;
|
||||
|
@ -185,7 +185,7 @@ pub enum Annotation2 {
|
|||
symbols: MutSet<Symbol>,
|
||||
signature: Signature,
|
||||
},
|
||||
Erroneous(roc_types::types::Problem),
|
||||
Erroneous,
|
||||
}
|
||||
|
||||
pub fn to_annotation2<'a>(
|
||||
|
@ -346,8 +346,8 @@ pub fn to_type2<'a>(
|
|||
references.symbols.insert(symbol);
|
||||
Type2::Alias(symbol, args, actual)
|
||||
}
|
||||
TypeApply::Erroneous(_problem) => {
|
||||
// Type2::Erroneous(problem)
|
||||
TypeApply::Erroneous => {
|
||||
// Type2::Erroneous
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
@ -414,24 +414,7 @@ pub fn to_type2<'a>(
|
|||
for (node_id, (label, field)) in field_types.iter_node_ids().zip(field_types_map) {
|
||||
let poolstr = PoolStr::new(label.as_str(), env.pool);
|
||||
|
||||
let rec_field = match field {
|
||||
RecordField::Optional(_) => {
|
||||
let field_id = env.pool.add(field.into_inner());
|
||||
RecordField::Optional(field_id)
|
||||
}
|
||||
RecordField::RigidOptional(_) => {
|
||||
let field_id = env.pool.add(field.into_inner());
|
||||
RecordField::RigidOptional(field_id)
|
||||
}
|
||||
RecordField::Demanded(_) => {
|
||||
let field_id = env.pool.add(field.into_inner());
|
||||
RecordField::Demanded(field_id)
|
||||
}
|
||||
RecordField::Required(_) => {
|
||||
let field_id = env.pool.add(field.into_inner());
|
||||
RecordField::Required(field_id)
|
||||
}
|
||||
};
|
||||
let rec_field = field.map_owned(|field| env.pool.add(field));
|
||||
env.pool[node_id] = (poolstr, rec_field);
|
||||
}
|
||||
|
||||
|
@ -738,7 +721,7 @@ fn can_tags<'a>(
|
|||
enum TypeApply {
|
||||
Apply(Symbol, PoolVec<Type2>),
|
||||
Alias(Symbol, PoolVec<TypeId>, TypeId),
|
||||
Erroneous(roc_types::types::Problem),
|
||||
Erroneous,
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -761,7 +744,7 @@ fn to_type_apply<'a>(
|
|||
Err(problem) => {
|
||||
env.problem(roc_problem::can::Problem::RuntimeError(problem));
|
||||
|
||||
return TypeApply::Erroneous(Problem::UnrecognizedIdent(ident.into()));
|
||||
return TypeApply::Erroneous;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -772,7 +755,7 @@ fn to_type_apply<'a>(
|
|||
// it was imported but it doesn't expose this ident.
|
||||
env.problem(roc_problem::can::Problem::RuntimeError(problem));
|
||||
|
||||
return TypeApply::Erroneous(Problem::UnrecognizedIdent((*ident).into()));
|
||||
return TypeApply::Erroneous;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -792,14 +775,7 @@ fn to_type_apply<'a>(
|
|||
let mut substitutions: MutMap<Variable, TypeId> = MutMap::default();
|
||||
|
||||
if alias.targs.len() != args.len() {
|
||||
let error = TypeApply::Erroneous(Problem::BadTypeArguments {
|
||||
symbol,
|
||||
region,
|
||||
alias_needs: alias.targs.len() as u8,
|
||||
type_got: args.len() as u8,
|
||||
alias_kind: AliasKind::Structural,
|
||||
});
|
||||
return error;
|
||||
return TypeApply::Erroneous;
|
||||
}
|
||||
|
||||
let arguments = PoolVec::with_capacity(type_arguments.len() as u32, env.pool);
|
||||
|
|
|
@ -169,7 +169,7 @@ impl Scope {
|
|||
aliases.insert(symbol, alias);
|
||||
}
|
||||
|
||||
let idents = Symbol::default_in_scope();
|
||||
let idents = Symbol::apply_types_in_scope();
|
||||
let idents: MutMap<_, _> = idents.into_iter().collect();
|
||||
|
||||
Scope {
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
//! Library for the Roc AST
|
||||
//!
|
||||
//! Code to represent the [Abstract Syntax Tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree)
|
||||
//! as used by the editor. In contrast to the compiler, the types in this AST do
|
||||
//! not keep track of the location of the matching code in the source file.
|
||||
pub mod ast_error;
|
||||
mod builtin_aliases;
|
||||
mod canonicalization;
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::lang::core::{expr::expr2::ExprId, header::AppHeader};
|
|||
pub fn parse_from_string(_header_str: &str, ast_node_id: ExprId) -> AppHeader {
|
||||
AppHeader {
|
||||
app_name: "\"untitled-app\"".to_owned(),
|
||||
packages_base: "\"platform/main.roc\"".to_owned(),
|
||||
packages_base: "\"rust-platform/main.roc\"".to_owned(),
|
||||
imports: vec![],
|
||||
provides: vec!["main".to_owned()],
|
||||
ast_node_id,
|
||||
|
|
|
@ -13,7 +13,7 @@ use roc_types::subs::{
|
|||
Subs, SubsSlice, UnionLambdas, UnionTags, Variable, VariableSubsSlice,
|
||||
};
|
||||
use roc_types::types::{
|
||||
gather_fields_unsorted_iter, Alias, AliasKind, Category, ErrorType, PatternCategory,
|
||||
gather_fields_unsorted_iter, Alias, AliasKind, Category, ErrorType, PatternCategory, Polarity,
|
||||
RecordField,
|
||||
};
|
||||
use roc_unify::unify::unify;
|
||||
|
@ -82,7 +82,6 @@ pub enum TypeError {
|
|||
BadExpr(Region, Category, ErrorType, Expected<ErrorType>),
|
||||
BadPattern(Region, PatternCategory, ErrorType, PExpected<ErrorType>),
|
||||
CircularType(Region, Symbol, ErrorType),
|
||||
BadType(roc_types::types::Problem),
|
||||
UnexposedLookup(Symbol),
|
||||
}
|
||||
|
||||
|
@ -228,7 +227,13 @@ fn solve<'a>(
|
|||
expectation.get_type_ref(),
|
||||
);
|
||||
|
||||
match unify(&mut UEnv::new(subs), actual, expected, Mode::EQ) {
|
||||
match unify(
|
||||
&mut UEnv::new(subs),
|
||||
actual,
|
||||
expected,
|
||||
Mode::EQ,
|
||||
Polarity::OF_VALUE,
|
||||
) {
|
||||
Success {
|
||||
vars,
|
||||
must_implement_ability: _,
|
||||
|
@ -252,13 +257,6 @@ fn solve<'a>(
|
|||
|
||||
problems.push(problem);
|
||||
|
||||
state
|
||||
}
|
||||
BadType(vars, problem) => {
|
||||
introduce(subs, rank, pools, &vars);
|
||||
|
||||
problems.push(TypeError::BadType(problem));
|
||||
|
||||
state
|
||||
}
|
||||
}
|
||||
|
@ -327,7 +325,13 @@ fn solve<'a>(
|
|||
expectation.get_type_ref(),
|
||||
);
|
||||
|
||||
match unify(&mut UEnv::new(subs), actual, expected, Mode::EQ) {
|
||||
match unify(
|
||||
&mut UEnv::new(subs),
|
||||
actual,
|
||||
expected,
|
||||
Mode::EQ,
|
||||
Polarity::OF_VALUE,
|
||||
) {
|
||||
Success {
|
||||
vars,
|
||||
must_implement_ability: _,
|
||||
|
@ -352,13 +356,6 @@ fn solve<'a>(
|
|||
|
||||
problems.push(problem);
|
||||
|
||||
state
|
||||
}
|
||||
BadType(vars, problem) => {
|
||||
introduce(subs, rank, pools, &vars);
|
||||
|
||||
problems.push(TypeError::BadType(problem));
|
||||
|
||||
state
|
||||
}
|
||||
}
|
||||
|
@ -404,7 +401,13 @@ fn solve<'a>(
|
|||
);
|
||||
|
||||
// TODO(ayazhafiz): presence constraints for Expr2/Type2
|
||||
match unify(&mut UEnv::new(subs), actual, expected, Mode::EQ) {
|
||||
match unify(
|
||||
&mut UEnv::new(subs),
|
||||
actual,
|
||||
expected,
|
||||
Mode::EQ,
|
||||
Polarity::OF_PATTERN,
|
||||
) {
|
||||
Success {
|
||||
vars,
|
||||
must_implement_ability: _,
|
||||
|
@ -428,13 +431,6 @@ fn solve<'a>(
|
|||
|
||||
problems.push(problem);
|
||||
|
||||
state
|
||||
}
|
||||
BadType(vars, problem) => {
|
||||
introduce(subs, rank, pools, &vars);
|
||||
|
||||
problems.push(TypeError::BadType(problem));
|
||||
|
||||
state
|
||||
}
|
||||
}
|
||||
|
@ -718,7 +714,13 @@ fn solve<'a>(
|
|||
);
|
||||
let includes = type_to_var(arena, mempool, subs, rank, pools, cached_aliases, &tag_ty);
|
||||
|
||||
match unify(&mut UEnv::new(subs), actual, includes, Mode::PRESENT) {
|
||||
match unify(
|
||||
&mut UEnv::new(subs),
|
||||
actual,
|
||||
includes,
|
||||
Mode::PRESENT,
|
||||
Polarity::OF_PATTERN,
|
||||
) {
|
||||
Success {
|
||||
vars,
|
||||
must_implement_ability: _,
|
||||
|
@ -743,13 +745,6 @@ fn solve<'a>(
|
|||
|
||||
problems.push(problem);
|
||||
|
||||
state
|
||||
}
|
||||
BadType(vars, problem) => {
|
||||
introduce(subs, rank, pools, &vars);
|
||||
|
||||
problems.push(TypeError::BadType(problem));
|
||||
|
||||
state
|
||||
}
|
||||
}
|
||||
|
@ -834,6 +829,15 @@ fn type_to_variable<'a>(
|
|||
cached,
|
||||
mempool.get(*type_id),
|
||||
)),
|
||||
RigidRequired(type_id) => RigidRequired(type_to_variable(
|
||||
arena,
|
||||
mempool,
|
||||
subs,
|
||||
rank,
|
||||
pools,
|
||||
cached,
|
||||
mempool.get(*type_id),
|
||||
)),
|
||||
Optional(type_id) => Optional(type_to_variable(
|
||||
arena,
|
||||
mempool,
|
||||
|
@ -925,7 +929,7 @@ fn type_to_variable<'a>(
|
|||
arg_vars.push(arg_var);
|
||||
}
|
||||
|
||||
let arg_vars = AliasVariables::insert_into_subs(subs, arg_vars, []);
|
||||
let arg_vars = AliasVariables::insert_into_subs(subs, arg_vars, [], []);
|
||||
|
||||
let alias_var = type_to_variable(arena, mempool, subs, rank, pools, cached, alias_type);
|
||||
|
||||
|
@ -1182,7 +1186,7 @@ fn circular_error(
|
|||
loc_var: &Loc<Variable>,
|
||||
) {
|
||||
let var = loc_var.value;
|
||||
let (error_type, _) = subs.var_to_error_type(var);
|
||||
let error_type = subs.var_to_error_type(var, Polarity::Pos);
|
||||
let problem = TypeError::CircularType(loc_var.region, symbol, error_type);
|
||||
|
||||
subs.set_content(var, Content::Error);
|
||||
|
@ -1422,8 +1426,6 @@ fn adjust_rank_content(
|
|||
|
||||
rank
|
||||
}
|
||||
|
||||
Erroneous(_) => group_rank,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1562,7 +1564,7 @@ fn instantiate_rigids_help(
|
|||
}
|
||||
}
|
||||
|
||||
EmptyRecord | EmptyTagUnion | Erroneous(_) => {}
|
||||
EmptyRecord | EmptyTagUnion => {}
|
||||
|
||||
Record(fields, ext_var) => {
|
||||
for index in fields.iter_variables() {
|
||||
|
@ -1742,7 +1744,7 @@ fn deep_copy_var_help(
|
|||
Func(arg_vars, new_closure_var, new_ret_var)
|
||||
}
|
||||
|
||||
same @ EmptyRecord | same @ EmptyTagUnion | same @ Erroneous(_) => same,
|
||||
same @ EmptyRecord | same @ EmptyTagUnion => same,
|
||||
|
||||
Record(fields, ext_var) => {
|
||||
let record_fields = {
|
||||
|
|
1
crates/building_a_roc_application.svg
Normal file
1
crates/building_a_roc_application.svg
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 120 KiB |
|
@ -1,11 +1,11 @@
|
|||
[package]
|
||||
name = "roc_cli"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
repository = "https://github.com/roc-lang/roc"
|
||||
edition = "2021"
|
||||
description = "A CLI for Roc"
|
||||
description = "The Roc binary that brings together all functionality in the Roc toolset."
|
||||
default-run = "roc"
|
||||
|
||||
[[bin]]
|
||||
|
@ -24,7 +24,7 @@ editor = ["roc_editor"]
|
|||
|
||||
run-wasm32 = ["wasmer", "wasmer-wasi"]
|
||||
|
||||
# Compiling for a different platform than the host can cause linker errors.
|
||||
# Compiling for a different target than the current machine can cause linker errors.
|
||||
target-arm = ["roc_build/target-arm", "roc_repl_cli/target-arm"]
|
||||
target-aarch64 = ["roc_build/target-aarch64", "roc_repl_cli/target-aarch64"]
|
||||
target-x86 = ["roc_build/target-x86", "roc_repl_cli/target-x86"]
|
||||
|
@ -60,25 +60,26 @@ roc_editor = { path = "../editor", optional = true }
|
|||
roc_linker = { path = "../linker" }
|
||||
roc_repl_cli = { path = "../repl_cli", optional = true }
|
||||
roc_tracing = { path = "../tracing" }
|
||||
clap = { version = "3.1.15", default-features = false, features = ["std", "color", "suggestions"] }
|
||||
const_format = { version = "0.2.23", features = ["const_generics"] }
|
||||
bumpalo = { version = "3.8.0", features = ["collections"] }
|
||||
mimalloc = { version = "0.1.26", default-features = false }
|
||||
libc = "0.2.106"
|
||||
errno = "0.2.8"
|
||||
roc_intern = { path = "../compiler/intern" }
|
||||
roc_gen_llvm = {path = "../compiler/gen_llvm"}
|
||||
|
||||
ven_pretty = { path = "../vendor/pretty" }
|
||||
|
||||
target-lexicon = "0.12.3"
|
||||
tempfile = "3.2.0"
|
||||
wasmer-wasi = { version = "2.2.1", optional = true }
|
||||
|
||||
libloading = "0.7.1"
|
||||
roc_gen_llvm = {path = "../compiler/gen_llvm"}
|
||||
inkwell = {path = "../vendor/inkwell"}
|
||||
signal-hook = "0.3.14"
|
||||
clap.workspace = true
|
||||
const_format.workspace = true
|
||||
mimalloc.workspace = true
|
||||
bumpalo.workspace = true
|
||||
libc.workspace = true
|
||||
errno.workspace = true
|
||||
target-lexicon.workspace = true
|
||||
tempfile.workspace = true
|
||||
strum.workspace = true
|
||||
libloading.workspace = true
|
||||
signal-hook.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
memexec = "0.2.0"
|
||||
inkwell.workspace = true
|
||||
|
||||
# for now, uses unix/libc functions that windows does not support
|
||||
[target.'cfg(not(windows))'.dependencies]
|
||||
|
@ -93,14 +94,15 @@ wasmer = { version = "2.2.1", optional = true, default-features = false, feature
|
|||
|
||||
[dev-dependencies]
|
||||
wasmer-wasi = "2.2.1"
|
||||
pretty_assertions = "1.0.0"
|
||||
pretty_assertions = "1.3.0"
|
||||
roc_test_utils = { path = "../test_utils" }
|
||||
roc_utils = { path = "../utils" }
|
||||
indoc = "1.0.7"
|
||||
serial_test = "0.9.0"
|
||||
criterion = { git = "https://github.com/Anton-4/criterion.rs"}
|
||||
cli_utils = { path = "../cli_utils" }
|
||||
strum = "0.24.0"
|
||||
strum_macros = "0.24"
|
||||
once_cell = "1.15.0"
|
||||
parking_lot = "0.12"
|
||||
|
||||
# Wasmer singlepass compiler only works on x86_64.
|
||||
[target.'cfg(target_arch = "x86_64")'.dev-dependencies]
|
||||
|
|
|
@ -2,16 +2,19 @@
|
|||
# Running the benchmarks
|
||||
|
||||
Install cargo criterion:
|
||||
```
|
||||
|
||||
```sh
|
||||
cargo install cargo-criterion
|
||||
```
|
||||
|
||||
To prevent stack overflow on the `CFold` benchmark:
|
||||
```
|
||||
|
||||
```sh
|
||||
ulimit -s unlimited
|
||||
```
|
||||
|
||||
In the `cli` folder execute:
|
||||
```
|
||||
|
||||
```sh
|
||||
cargo criterion
|
||||
```
|
||||
```
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
use bumpalo::Bump;
|
||||
use roc_build::{
|
||||
link::{link, preprocess_host_wasm32, rebuild_host, LinkType, LinkingStrategy},
|
||||
program::{self, Problems},
|
||||
program::{self, CodeGenOptions, Problems},
|
||||
};
|
||||
use roc_builtins::bitcode;
|
||||
use roc_collections::VecMap;
|
||||
use roc_load::{
|
||||
EntryPoint, ExecutionMode, Expectations, LoadConfig, LoadMonomorphizedError, LoadedModule,
|
||||
EntryPoint, ExecutionMode, ExpectMetadata, LoadConfig, LoadMonomorphizedError, LoadedModule,
|
||||
LoadingProblem, Threading,
|
||||
};
|
||||
use roc_module::symbol::{Interns, ModuleId};
|
||||
use roc_mono::ir::OptLevel;
|
||||
use roc_reporting::report::RenderTarget;
|
||||
use roc_target::TargetInfo;
|
||||
|
@ -30,12 +28,11 @@ fn report_timing(buf: &mut String, label: &str, duration: Duration) {
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
pub struct BuiltFile {
|
||||
pub struct BuiltFile<'a> {
|
||||
pub binary_path: PathBuf,
|
||||
pub problems: Problems,
|
||||
pub total_time: Duration,
|
||||
pub expectations: VecMap<ModuleId, Expectations>,
|
||||
pub interns: Interns,
|
||||
pub expect_metadata: ExpectMetadata<'a>,
|
||||
}
|
||||
|
||||
pub enum BuildOrdering {
|
||||
|
@ -60,16 +57,15 @@ pub fn build_file<'a>(
|
|||
arena: &'a Bump,
|
||||
target: &Triple,
|
||||
app_module_path: PathBuf,
|
||||
opt_level: OptLevel,
|
||||
emit_debug_info: bool,
|
||||
code_gen_options: CodeGenOptions,
|
||||
emit_timings: bool,
|
||||
link_type: LinkType,
|
||||
linking_strategy: LinkingStrategy,
|
||||
precompiled: bool,
|
||||
prebuilt: bool,
|
||||
threading: Threading,
|
||||
wasm_dev_stack_bytes: Option<u32>,
|
||||
order: BuildOrdering,
|
||||
) -> Result<BuiltFile, BuildFileError<'a>> {
|
||||
) -> Result<BuiltFile<'a>, BuildFileError<'a>> {
|
||||
let compilation_start = Instant::now();
|
||||
let target_info = TargetInfo::from(target);
|
||||
|
||||
|
@ -121,7 +117,7 @@ pub fn build_file<'a>(
|
|||
|
||||
match roc_target::OperatingSystem::from(target.operating_system) {
|
||||
Wasi => {
|
||||
if matches!(opt_level, OptLevel::Development) {
|
||||
if matches!(code_gen_options.opt_level, OptLevel::Development) {
|
||||
("wasm", "wasm", Some("wasm"))
|
||||
} else {
|
||||
("zig", "bc", Some("wasm"))
|
||||
|
@ -151,7 +147,7 @@ pub fn build_file<'a>(
|
|||
// TODO this should probably be moved before load_and_monomorphize.
|
||||
// To do this we will need to preprocess files just for their exported symbols.
|
||||
// Also, we should no longer need to do this once we have platforms on
|
||||
// a package repository, as we can then get precompiled hosts from there.
|
||||
// a package repository, as we can then get prebuilt platforms from there.
|
||||
|
||||
let exposed_values = loaded
|
||||
.exposed_to_host
|
||||
|
@ -180,9 +176,9 @@ pub fn build_file<'a>(
|
|||
};
|
||||
|
||||
let rebuild_thread = spawn_rebuild_thread(
|
||||
opt_level,
|
||||
code_gen_options.opt_level,
|
||||
linking_strategy,
|
||||
precompiled,
|
||||
prebuilt,
|
||||
host_input_path.clone(),
|
||||
preprocessed_host_path.clone(),
|
||||
binary_path.clone(),
|
||||
|
@ -191,14 +187,6 @@ pub fn build_file<'a>(
|
|||
exposed_closure_types,
|
||||
);
|
||||
|
||||
// TODO try to move as much of this linking as possible to the precompiled
|
||||
// host, to minimize the amount of host-application linking required.
|
||||
let app_o_file = Builder::new()
|
||||
.prefix("roc_app")
|
||||
.suffix(&format!(".{}", app_extension))
|
||||
.tempfile()
|
||||
.map_err(|err| todo!("TODO Gracefully handle tempfile creation error {:?}", err))?;
|
||||
let app_o_file = app_o_file.path();
|
||||
let buf = &mut String::with_capacity(1024);
|
||||
|
||||
let mut it = loaded.timings.iter().peekable();
|
||||
|
@ -249,21 +237,20 @@ pub fn build_file<'a>(
|
|||
// inside a nested scope without causing a borrow error!
|
||||
let mut loaded = loaded;
|
||||
let problems = program::report_problems_monomorphized(&mut loaded);
|
||||
let expectations = std::mem::take(&mut loaded.expectations);
|
||||
let loaded = loaded;
|
||||
|
||||
let interns = loaded.interns.clone();
|
||||
|
||||
enum HostRebuildTiming {
|
||||
BeforeApp(u128),
|
||||
ConcurrentWithApp(JoinHandle<u128>),
|
||||
}
|
||||
|
||||
let rebuild_timing = if linking_strategy == LinkingStrategy::Additive {
|
||||
let rebuild_duration = rebuild_thread.join().unwrap();
|
||||
if emit_timings && !precompiled {
|
||||
let rebuild_duration = rebuild_thread
|
||||
.join()
|
||||
.expect("Failed to (re)build platform.");
|
||||
if emit_timings && !prebuilt {
|
||||
println!(
|
||||
"Finished rebuilding and preprocessing the host in {} ms\n",
|
||||
"Finished rebuilding the platform in {} ms\n",
|
||||
rebuild_duration
|
||||
);
|
||||
}
|
||||
|
@ -272,14 +259,12 @@ pub fn build_file<'a>(
|
|||
HostRebuildTiming::ConcurrentWithApp(rebuild_thread)
|
||||
};
|
||||
|
||||
let code_gen_timing = program::gen_from_mono_module(
|
||||
let (roc_app_bytes, code_gen_timing, expect_metadata) = program::gen_from_mono_module(
|
||||
arena,
|
||||
loaded,
|
||||
&app_module_path,
|
||||
target,
|
||||
app_o_file,
|
||||
opt_level,
|
||||
emit_debug_info,
|
||||
code_gen_options,
|
||||
&preprocessed_host_path,
|
||||
wasm_dev_stack_bytes,
|
||||
);
|
||||
|
@ -294,18 +279,10 @@ pub fn build_file<'a>(
|
|||
"Generate Assembly from Mono IR",
|
||||
code_gen_timing.code_gen,
|
||||
);
|
||||
report_timing(buf, "Emit .o file", code_gen_timing.emit_o_file);
|
||||
|
||||
let compilation_end = compilation_start.elapsed();
|
||||
|
||||
let size = std::fs::metadata(&app_o_file)
|
||||
.unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Could not open {:?} - which was supposed to have been generated. Error: {:?}",
|
||||
app_o_file, err
|
||||
);
|
||||
})
|
||||
.len();
|
||||
let size = roc_app_bytes.len();
|
||||
|
||||
if emit_timings {
|
||||
println!(
|
||||
|
@ -321,29 +298,44 @@ pub fn build_file<'a>(
|
|||
}
|
||||
|
||||
if let HostRebuildTiming::ConcurrentWithApp(thread) = rebuild_timing {
|
||||
let rebuild_duration = thread.join().unwrap();
|
||||
if emit_timings && !precompiled {
|
||||
let rebuild_duration = thread.join().expect("Failed to (re)build platform.");
|
||||
if emit_timings && !prebuilt {
|
||||
println!(
|
||||
"Finished rebuilding and preprocessing the host in {} ms\n",
|
||||
"Finished rebuilding the platform in {} ms\n",
|
||||
rebuild_duration
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: link the precompiled host and compiled app
|
||||
// Step 2: link the prebuilt platform and compiled app
|
||||
let link_start = Instant::now();
|
||||
let problems = match (linking_strategy, link_type) {
|
||||
(LinkingStrategy::Surgical, _) => {
|
||||
roc_linker::link_preprocessed_host(target, &host_input_path, app_o_file, &binary_path);
|
||||
roc_linker::link_preprocessed_host(
|
||||
target,
|
||||
&host_input_path,
|
||||
&roc_app_bytes,
|
||||
&binary_path,
|
||||
);
|
||||
|
||||
problems
|
||||
}
|
||||
(LinkingStrategy::Additive, _) | (LinkingStrategy::Legacy, LinkType::None) => {
|
||||
// Just copy the object file to the output folder.
|
||||
binary_path.set_extension(app_extension);
|
||||
std::fs::copy(app_o_file, &binary_path).unwrap();
|
||||
std::fs::write(&binary_path, &*roc_app_bytes).unwrap();
|
||||
problems
|
||||
}
|
||||
(LinkingStrategy::Legacy, _) => {
|
||||
let app_o_file = Builder::new()
|
||||
.prefix("roc_app")
|
||||
.suffix(&format!(".{}", app_extension))
|
||||
.tempfile()
|
||||
.map_err(|err| todo!("TODO Gracefully handle tempfile creation error {:?}", err))?;
|
||||
let app_o_file = app_o_file.path();
|
||||
|
||||
std::fs::write(app_o_file, &*roc_app_bytes).unwrap();
|
||||
|
||||
let mut inputs = vec![
|
||||
host_input_path.as_path().to_str().unwrap(),
|
||||
app_o_file.to_str().unwrap(),
|
||||
|
@ -351,7 +343,7 @@ pub fn build_file<'a>(
|
|||
|
||||
let str_host_obj_path = bitcode::get_builtins_host_obj_path();
|
||||
|
||||
if matches!(opt_level, OptLevel::Development) {
|
||||
if matches!(code_gen_options.backend, program::CodeGenBackend::Assembly) {
|
||||
inputs.push(&str_host_obj_path);
|
||||
}
|
||||
|
||||
|
@ -388,8 +380,7 @@ pub fn build_file<'a>(
|
|||
binary_path,
|
||||
problems,
|
||||
total_time,
|
||||
interns,
|
||||
expectations,
|
||||
expect_metadata,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -397,7 +388,7 @@ pub fn build_file<'a>(
|
|||
fn spawn_rebuild_thread(
|
||||
opt_level: OptLevel,
|
||||
linking_strategy: LinkingStrategy,
|
||||
precompiled: bool,
|
||||
prebuilt: bool,
|
||||
host_input_path: PathBuf,
|
||||
preprocessed_host_path: PathBuf,
|
||||
binary_path: PathBuf,
|
||||
|
@ -407,13 +398,16 @@ fn spawn_rebuild_thread(
|
|||
) -> std::thread::JoinHandle<u128> {
|
||||
let thread_local_target = target.clone();
|
||||
std::thread::spawn(move || {
|
||||
if !precompiled {
|
||||
println!("🔨 Rebuilding host...");
|
||||
if !prebuilt {
|
||||
// Printing to stderr because we want stdout to contain only the output of the roc program.
|
||||
// We are aware of the trade-offs.
|
||||
// `cargo run` follows the same approach
|
||||
eprintln!("🔨 Rebuilding platform...");
|
||||
}
|
||||
|
||||
let rebuild_host_start = Instant::now();
|
||||
|
||||
if !precompiled {
|
||||
if !prebuilt {
|
||||
match linking_strategy {
|
||||
LinkingStrategy::Additive => {
|
||||
let host_dest = rebuild_host(
|
||||
|
|
|
@ -55,8 +55,7 @@ fn flatten_directories(files: std::vec::Vec<PathBuf>) -> std::vec::Vec<PathBuf>
|
|||
}
|
||||
|
||||
fn is_roc_file(path: &Path) -> bool {
|
||||
let ext = path.extension().and_then(OsStr::to_str);
|
||||
return matches!(ext, Some("roc"));
|
||||
matches!(path.extension().and_then(OsStr::to_str), Some("roc"))
|
||||
}
|
||||
|
||||
pub fn format(files: std::vec::Vec<PathBuf>, mode: FormatMode) -> Result<(), String> {
|
||||
|
@ -101,11 +100,11 @@ pub fn format(files: std::vec::Vec<PathBuf>, mode: FormatMode) -> Result<(), Str
|
|||
|
||||
let mut before_file = file.clone();
|
||||
before_file.set_extension("roc-format-failed-ast-before");
|
||||
std::fs::write(&before_file, &format!("{:#?}\n", ast)).unwrap();
|
||||
std::fs::write(&before_file, &format!("{:#?}\n", ast_normalized)).unwrap();
|
||||
|
||||
let mut after_file = file.clone();
|
||||
after_file.set_extension("roc-format-failed-ast-after");
|
||||
std::fs::write(&after_file, &format!("{:#?}\n", reparsed_ast)).unwrap();
|
||||
std::fs::write(&after_file, &format!("{:#?}\n", reparsed_ast_normalized)).unwrap();
|
||||
|
||||
internal_error!(
|
||||
"Formatting bug; formatting didn't reparse as the same tree\n\n\
|
||||
|
@ -158,7 +157,9 @@ fn parse_all<'a>(arena: &'a Bump, src: &'a str) -> Result<Ast<'a>, SyntaxError<'
|
|||
let (module, state) = module::parse_header(arena, State::new(src.as_bytes()))
|
||||
.map_err(|e| SyntaxError::Header(e.problem))?;
|
||||
|
||||
let (_, defs, _) = module_defs().parse(arena, state).map_err(|(_, e, _)| e)?;
|
||||
let (_, defs, _) = module_defs()
|
||||
.parse(arena, state, 0)
|
||||
.map_err(|(_, e, _)| e)?;
|
||||
|
||||
Ok(Ast { module, defs })
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
//! Provides the core CLI functionality for the Roc binary.
|
||||
|
||||
#[macro_use]
|
||||
extern crate const_format;
|
||||
|
||||
|
@ -5,17 +7,18 @@ use build::BuiltFile;
|
|||
use bumpalo::Bump;
|
||||
use clap::{Arg, ArgMatches, Command, ValueSource};
|
||||
use roc_build::link::{LinkType, LinkingStrategy};
|
||||
use roc_collections::VecMap;
|
||||
use roc_build::program::{CodeGenBackend, CodeGenOptions, Problems};
|
||||
use roc_error_macros::{internal_error, user_error};
|
||||
use roc_load::{Expectations, LoadingProblem, Threading};
|
||||
use roc_module::symbol::{Interns, ModuleId};
|
||||
use roc_load::{ExpectMetadata, LoadingProblem, Threading};
|
||||
use roc_mono::ir::OptLevel;
|
||||
use std::env;
|
||||
use std::ffi::{CString, OsStr};
|
||||
use std::io;
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::os::raw::{c_char, c_int};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process;
|
||||
use strum::{EnumIter, IntoEnumIterator, IntoStaticStr};
|
||||
use target_lexicon::BinaryFormat;
|
||||
use target_lexicon::{
|
||||
Architecture, Environment, OperatingSystem, Triple, Vendor, X86_32Architecture,
|
||||
|
@ -42,6 +45,7 @@ pub const CMD_VERSION: &str = "version";
|
|||
pub const CMD_FORMAT: &str = "format";
|
||||
pub const CMD_TEST: &str = "test";
|
||||
pub const CMD_GLUE: &str = "glue";
|
||||
pub const CMD_GEN_STUB_LIB: &str = "gen-stub-lib";
|
||||
|
||||
pub const FLAG_DEBUG: &str = "debug";
|
||||
pub const FLAG_DEV: &str = "dev";
|
||||
|
@ -53,7 +57,7 @@ pub const FLAG_NO_LINK: &str = "no-link";
|
|||
pub const FLAG_TARGET: &str = "target";
|
||||
pub const FLAG_TIME: &str = "time";
|
||||
pub const FLAG_LINKER: &str = "linker";
|
||||
pub const FLAG_PRECOMPILED: &str = "precompiled-host";
|
||||
pub const FLAG_PREBUILT: &str = "prebuilt-platform";
|
||||
pub const FLAG_CHECK: &str = "check";
|
||||
pub const FLAG_WASM_STACK_SIZE_KB: &str = "wasm-stack-size-kb";
|
||||
pub const ROC_FILE: &str = "ROC_FILE";
|
||||
|
@ -67,51 +71,51 @@ const VERSION: &str = include_str!("../../../version.txt");
|
|||
pub fn build_app<'a>() -> Command<'a> {
|
||||
let flag_optimize = Arg::new(FLAG_OPTIMIZE)
|
||||
.long(FLAG_OPTIMIZE)
|
||||
.help("Optimize the compiled program to run faster. (Optimization takes time to complete.)")
|
||||
.help("Optimize the compiled program to run faster\n(Optimization takes time to complete.)")
|
||||
.required(false);
|
||||
|
||||
let flag_max_threads = Arg::new(FLAG_MAX_THREADS)
|
||||
.long(FLAG_MAX_THREADS)
|
||||
.help("Limit the number of threads (and hence cores) used during compilation.")
|
||||
.help("Limit the number of threads (and hence cores) used during compilation")
|
||||
.takes_value(true)
|
||||
.validator(|s| s.parse::<usize>())
|
||||
.required(false);
|
||||
|
||||
let flag_opt_size = Arg::new(FLAG_OPT_SIZE)
|
||||
.long(FLAG_OPT_SIZE)
|
||||
.help("Optimize the compiled program to have a small binary size. (Optimization takes time to complete.)")
|
||||
.help("Optimize the compiled program to have a small binary size\n(Optimization takes time to complete.)")
|
||||
.required(false);
|
||||
|
||||
let flag_dev = Arg::new(FLAG_DEV)
|
||||
.long(FLAG_DEV)
|
||||
.help("Make compilation finish as soon as possible, at the expense of runtime performance.")
|
||||
.help("Make compilation finish as soon as possible, at the expense of runtime performance")
|
||||
.required(false);
|
||||
|
||||
let flag_debug = Arg::new(FLAG_DEBUG)
|
||||
.long(FLAG_DEBUG)
|
||||
.help("Store LLVM debug information in the generated program.")
|
||||
.help("Store LLVM debug information in the generated program")
|
||||
.required(false);
|
||||
|
||||
let flag_time = Arg::new(FLAG_TIME)
|
||||
.long(FLAG_TIME)
|
||||
.help("Prints detailed compilation time information.")
|
||||
.help("Print detailed compilation time information")
|
||||
.required(false);
|
||||
|
||||
let flag_linker = Arg::new(FLAG_LINKER)
|
||||
.long(FLAG_LINKER)
|
||||
.help("Sets which linker to use. The surgical linker is enabled by default only when building for wasm32 or x86_64 Linux, because those are the only targets it currently supports. Otherwise the legacy linker is used by default.")
|
||||
.help("Set which linker to use\n(The surgical linker is enabled by default only when building for wasm32 or x86_64 Linux, because those are the only targets it currently supports. Otherwise the legacy linker is used by default.)")
|
||||
.possible_values(["surgical", "legacy"])
|
||||
.required(false);
|
||||
|
||||
let flag_precompiled = Arg::new(FLAG_PRECOMPILED)
|
||||
.long(FLAG_PRECOMPILED)
|
||||
.help("Assumes the host has been precompiled and skips recompiling the host. (Enabled by default when using `roc build` with a --target other than `--target host`)")
|
||||
let flag_prebuilt = Arg::new(FLAG_PREBUILT)
|
||||
.long(FLAG_PREBUILT)
|
||||
.help("Assume the platform has been prebuilt and skip rebuilding the platform\n(This is enabled by default when using `roc build` with a --target other than `--target <current machine>`.)")
|
||||
.possible_values(["true", "false"])
|
||||
.required(false);
|
||||
|
||||
let flag_wasm_stack_size_kb = Arg::new(FLAG_WASM_STACK_SIZE_KB)
|
||||
.long(FLAG_WASM_STACK_SIZE_KB)
|
||||
.help("Stack size in kilobytes for wasm32 target. Only applies when --dev also provided.")
|
||||
.help("Stack size in kilobytes for wasm32 target\n(This only applies when --dev also provided.)")
|
||||
.takes_value(true)
|
||||
.validator(|s| s.parse::<u32>())
|
||||
.required(false);
|
||||
|
@ -123,7 +127,7 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
.default_value(DEFAULT_ROC_FILENAME);
|
||||
|
||||
let args_for_app = Arg::new(ARGS_FOR_APP)
|
||||
.help("Arguments to pass into the app being run, e.g. `roc run -- arg1 arg2`")
|
||||
.help("Arguments to pass into the app being run\ne.g. `roc run -- arg1 arg2`")
|
||||
.allow_invalid_utf8(true)
|
||||
.multiple_values(true)
|
||||
.takes_value(true)
|
||||
|
@ -132,7 +136,7 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
|
||||
let app = Command::new("roc")
|
||||
.version(concatcp!(VERSION, "\n"))
|
||||
.about("Runs the given .roc file, if there are no compilation errors.\nUse one of the SUBCOMMANDS below to do something else!")
|
||||
.about("Run the given .roc file, if there are no compilation errors.\nYou can use one of the SUBCOMMANDS below to do something else!")
|
||||
.subcommand(Command::new(CMD_BUILD)
|
||||
.about("Build a binary from the given .roc file, but don't run it")
|
||||
.arg(flag_optimize.clone())
|
||||
|
@ -142,26 +146,28 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
.arg(flag_debug.clone())
|
||||
.arg(flag_time.clone())
|
||||
.arg(flag_linker.clone())
|
||||
.arg(flag_precompiled.clone())
|
||||
.arg(flag_prebuilt.clone())
|
||||
.arg(flag_wasm_stack_size_kb.clone())
|
||||
.arg(
|
||||
Arg::new(FLAG_TARGET)
|
||||
.long(FLAG_TARGET)
|
||||
.help("Choose a different target")
|
||||
.default_value(Target::default().as_str())
|
||||
.possible_values(Target::OPTIONS)
|
||||
.default_value(Target::default().into())
|
||||
.possible_values(Target::iter().map(|target| {
|
||||
Into::<&'static str>::into(target)
|
||||
}))
|
||||
.required(false),
|
||||
)
|
||||
.arg(
|
||||
Arg::new(FLAG_LIB)
|
||||
.long(FLAG_LIB)
|
||||
.help("Build a C library instead of an executable.")
|
||||
.help("Build a C library instead of an executable")
|
||||
.required(false),
|
||||
)
|
||||
.arg(
|
||||
Arg::new(FLAG_NO_LINK)
|
||||
.long(FLAG_NO_LINK)
|
||||
.help("Does not link. Instead just outputs the `.o` file")
|
||||
.help("Do not link\n(Instead, just output the `.o` file.)")
|
||||
.required(false),
|
||||
)
|
||||
.arg(
|
||||
|
@ -173,7 +179,7 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
)
|
||||
)
|
||||
.subcommand(Command::new(CMD_TEST)
|
||||
.about("Run all top-level `expect`s in a main module and any modules it imports.")
|
||||
.about("Run all top-level `expect`s in a main module and any modules it imports")
|
||||
.arg(flag_optimize.clone())
|
||||
.arg(flag_max_threads.clone())
|
||||
.arg(flag_opt_size.clone())
|
||||
|
@ -181,7 +187,7 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
.arg(flag_debug.clone())
|
||||
.arg(flag_time.clone())
|
||||
.arg(flag_linker.clone())
|
||||
.arg(flag_precompiled.clone())
|
||||
.arg(flag_prebuilt.clone())
|
||||
.arg(
|
||||
Arg::new(ROC_FILE)
|
||||
.help("The .roc file for the main module")
|
||||
|
@ -203,12 +209,12 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
.arg(flag_debug.clone())
|
||||
.arg(flag_time.clone())
|
||||
.arg(flag_linker.clone())
|
||||
.arg(flag_precompiled.clone())
|
||||
.arg(flag_prebuilt.clone())
|
||||
.arg(roc_file_to_run.clone())
|
||||
.arg(args_for_app.clone())
|
||||
)
|
||||
.subcommand(Command::new(CMD_DEV)
|
||||
.about("`check` a .roc file, and then run it if there were no errors.")
|
||||
.about("`check` a .roc file, and then run it if there were no errors")
|
||||
.arg(flag_optimize.clone())
|
||||
.arg(flag_max_threads.clone())
|
||||
.arg(flag_opt_size.clone())
|
||||
|
@ -216,7 +222,7 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
.arg(flag_debug.clone())
|
||||
.arg(flag_time.clone())
|
||||
.arg(flag_linker.clone())
|
||||
.arg(flag_precompiled.clone())
|
||||
.arg(flag_prebuilt.clone())
|
||||
.arg(roc_file_to_run.clone())
|
||||
.arg(args_for_app.clone())
|
||||
)
|
||||
|
@ -231,14 +237,14 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
.arg(
|
||||
Arg::new(FLAG_CHECK)
|
||||
.long(FLAG_CHECK)
|
||||
.help("Checks that specified files are formatted. If formatting is needed, it will return a non-zero exit code.")
|
||||
.help("Checks that specified files are formatted\n(If formatting is needed, return a non-zero exit code.)")
|
||||
.required(false),
|
||||
)
|
||||
)
|
||||
.subcommand(Command::new(CMD_VERSION)
|
||||
.about(concatcp!("Print the Roc compiler’s version, which is currently ", VERSION)))
|
||||
.subcommand(Command::new(CMD_CHECK)
|
||||
.about("Check the code for problems, but doesn’t build or run it")
|
||||
.about("Check the code for problems, but don’t build or run it")
|
||||
.arg(flag_time.clone())
|
||||
.arg(flag_max_threads.clone())
|
||||
.arg(
|
||||
|
@ -260,7 +266,7 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
)
|
||||
)
|
||||
.subcommand(Command::new(CMD_GLUE)
|
||||
.about("Generate glue code between a platform's Roc API and its host language.")
|
||||
.about("Generate glue code between a platform's Roc API and its host language")
|
||||
.arg(
|
||||
Arg::new(ROC_FILE)
|
||||
.help("The .roc file for the platform module")
|
||||
|
@ -269,20 +275,39 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
)
|
||||
.arg(
|
||||
Arg::new(GLUE_FILE)
|
||||
.help("The filename for the generated glue code. Currently, this must be a .rs file because only Rust glue generation is supported so far.")
|
||||
.help("The filename for the generated glue code\n(Currently, this must be a .rs file because only Rust glue generation is supported so far.)")
|
||||
.allow_invalid_utf8(true)
|
||||
.required(true)
|
||||
)
|
||||
)
|
||||
.subcommand(Command::new(CMD_GEN_STUB_LIB)
|
||||
.about("Generate a stubbed shared library that can be used for linking a platform binary.\nThe stubbed library has prototypes, but no function bodies.\n\nNote: This command will be removed in favor of just using `roc build` once all platforms support the surgical linker")
|
||||
.arg(
|
||||
Arg::new(ROC_FILE)
|
||||
.help("The .roc file for an app using the platform")
|
||||
.allow_invalid_utf8(true)
|
||||
.required(true)
|
||||
)
|
||||
.arg(
|
||||
Arg::new(FLAG_TARGET)
|
||||
.long(FLAG_TARGET)
|
||||
.help("Choose a different target")
|
||||
.default_value(Target::default().into())
|
||||
.possible_values(Target::iter().map(|target| {
|
||||
Into::<&'static str>::into(target)
|
||||
}))
|
||||
.required(false),
|
||||
)
|
||||
)
|
||||
.trailing_var_arg(true)
|
||||
.arg(flag_optimize)
|
||||
.arg(flag_max_threads.clone())
|
||||
.arg(flag_max_threads.clone())
|
||||
.arg(flag_opt_size)
|
||||
.arg(flag_dev)
|
||||
.arg(flag_debug)
|
||||
.arg(flag_time)
|
||||
.arg(flag_linker)
|
||||
.arg(flag_precompiled)
|
||||
.arg(flag_prebuilt)
|
||||
.arg(roc_file_to_run.required(false))
|
||||
.arg(args_for_app);
|
||||
|
||||
|
@ -294,7 +319,7 @@ pub fn build_app<'a>() -> Command<'a> {
|
|||
Arg::new(DIRECTORY_OR_FILES)
|
||||
.multiple_values(true)
|
||||
.required(false)
|
||||
.help("(optional) The directory or files to open on launch."),
|
||||
.help("(optional) The directory or files to open on launch"),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
|
@ -396,7 +421,7 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
|
|||
|
||||
let interns = loaded.interns.clone();
|
||||
|
||||
let (lib, expects) = roc_repl_expect::run::expect_mono_module_to_dylib(
|
||||
let (lib, expects, layout_interner) = roc_repl_expect::run::expect_mono_module_to_dylib(
|
||||
arena,
|
||||
target.clone(),
|
||||
loaded,
|
||||
|
@ -410,11 +435,12 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
|
|||
|
||||
let mut writer = std::io::stdout();
|
||||
|
||||
let (failed, passed) = roc_repl_expect::run::run_expects(
|
||||
let (failed, passed) = roc_repl_expect::run::run_toplevel_expects(
|
||||
&mut writer,
|
||||
roc_reporting::report::RenderTarget::ColorTerminal,
|
||||
arena,
|
||||
interns,
|
||||
&layout_interner.into_global(),
|
||||
&lib,
|
||||
&mut expectations,
|
||||
expects,
|
||||
|
@ -458,18 +484,33 @@ pub fn build(
|
|||
use build::build_file;
|
||||
use BuildConfig::*;
|
||||
|
||||
let arena = Bump::new();
|
||||
let filename = matches.value_of_os(ROC_FILE).unwrap();
|
||||
let opt_level = match (
|
||||
matches.is_present(FLAG_OPTIMIZE),
|
||||
matches.is_present(FLAG_OPT_SIZE),
|
||||
matches.is_present(FLAG_DEV),
|
||||
) {
|
||||
(true, false, false) => OptLevel::Optimize,
|
||||
(false, true, false) => OptLevel::Size,
|
||||
(false, false, true) => OptLevel::Development,
|
||||
(false, false, false) => OptLevel::Normal,
|
||||
_ => user_error!("build can be only one of `--dev`, `--optimize`, or `--opt-size`"),
|
||||
// the process will end after this function,
|
||||
// so we don't want to spend time freeing these values
|
||||
let arena = ManuallyDrop::new(Bump::new());
|
||||
|
||||
let code_gen_backend = if matches!(triple.architecture, Architecture::Wasm32) {
|
||||
CodeGenBackend::Wasm
|
||||
} else {
|
||||
match matches.is_present(FLAG_DEV) {
|
||||
true => CodeGenBackend::Assembly,
|
||||
false => CodeGenBackend::Llvm,
|
||||
}
|
||||
};
|
||||
|
||||
let opt_level = if let BuildConfig::BuildAndRunIfNoErrors = config {
|
||||
OptLevel::Development
|
||||
} else {
|
||||
match (
|
||||
matches.is_present(FLAG_OPTIMIZE),
|
||||
matches.is_present(FLAG_OPT_SIZE),
|
||||
) {
|
||||
(true, false) => OptLevel::Optimize,
|
||||
(false, true) => OptLevel::Size,
|
||||
(false, false) => OptLevel::Normal,
|
||||
(true, true) => {
|
||||
user_error!("build can be only one of `--optimize` and `--opt-size`")
|
||||
}
|
||||
}
|
||||
};
|
||||
let emit_debug_info = matches.is_present(FLAG_DEBUG);
|
||||
let emit_timings = matches.is_present(FLAG_TIME);
|
||||
|
@ -485,7 +526,7 @@ pub fn build(
|
|||
};
|
||||
|
||||
let wasm_dev_backend = matches!(opt_level, OptLevel::Development)
|
||||
&& matches!(triple.architecture, Architecture::Wasm32);
|
||||
&& matches!(code_gen_backend, CodeGenBackend::Wasm);
|
||||
|
||||
let linking_strategy = if wasm_dev_backend {
|
||||
LinkingStrategy::Additive
|
||||
|
@ -497,14 +538,16 @@ pub fn build(
|
|||
LinkingStrategy::Surgical
|
||||
};
|
||||
|
||||
let precompiled = if matches.is_present(FLAG_PRECOMPILED) {
|
||||
matches.value_of(FLAG_PRECOMPILED) == Some("true")
|
||||
let prebuilt = if matches.is_present(FLAG_PREBUILT) {
|
||||
matches.value_of(FLAG_PREBUILT) == Some("true")
|
||||
} else {
|
||||
// When compiling for a different target, default to assuming a precompiled host.
|
||||
// Otherwise compilation would most likely fail because many toolchains assume you're compiling for the host
|
||||
// When compiling for a different target, default to assuming a prebuilt platform.
|
||||
// Otherwise compilation would most likely fail because many toolchains assume you're compiling for the current machine.
|
||||
// We make an exception for Wasm, because cross-compiling is the norm in that case.
|
||||
triple != Triple::host() && !matches!(triple.architecture, Architecture::Wasm32)
|
||||
};
|
||||
|
||||
let filename = matches.value_of_os(ROC_FILE).unwrap();
|
||||
let path = Path::new(filename);
|
||||
|
||||
// Spawn the root task
|
||||
|
@ -536,16 +579,22 @@ pub fn build(
|
|||
BuildAndRunIfNoErrors => BuildOrdering::BuildIfChecks,
|
||||
_ => BuildOrdering::AlwaysBuild,
|
||||
};
|
||||
|
||||
let code_gen_options = CodeGenOptions {
|
||||
backend: code_gen_backend,
|
||||
opt_level,
|
||||
emit_debug_info,
|
||||
};
|
||||
|
||||
let res_binary_path = build_file(
|
||||
&arena,
|
||||
&triple,
|
||||
path.to_path_buf(),
|
||||
opt_level,
|
||||
emit_debug_info,
|
||||
code_gen_options,
|
||||
emit_timings,
|
||||
link_type,
|
||||
linking_strategy,
|
||||
precompiled,
|
||||
prebuilt,
|
||||
threading,
|
||||
wasm_dev_stack_bytes,
|
||||
build_ordering,
|
||||
|
@ -556,97 +605,43 @@ pub fn build(
|
|||
binary_path,
|
||||
problems,
|
||||
total_time,
|
||||
expectations,
|
||||
interns,
|
||||
expect_metadata,
|
||||
}) => {
|
||||
match config {
|
||||
BuildOnly => {
|
||||
// If possible, report the generated executable name relative to the current dir.
|
||||
let generated_filename = binary_path
|
||||
.strip_prefix(env::current_dir().unwrap())
|
||||
.unwrap_or(&binary_path);
|
||||
.unwrap_or(&binary_path)
|
||||
.to_str()
|
||||
.unwrap();
|
||||
|
||||
// No need to waste time freeing this memory,
|
||||
// since the process is about to exit anyway.
|
||||
std::mem::forget(arena);
|
||||
// std::mem::forget(arena);
|
||||
|
||||
println!(
|
||||
"\x1B[{}m{}\x1B[39m {} and \x1B[{}m{}\x1B[39m {} found in {} ms while successfully building:\n\n {}",
|
||||
if problems.errors == 0 {
|
||||
32 // green
|
||||
} else {
|
||||
33 // yellow
|
||||
},
|
||||
problems.errors,
|
||||
if problems.errors == 1 {
|
||||
"error"
|
||||
} else {
|
||||
"errors"
|
||||
},
|
||||
if problems.warnings == 0 {
|
||||
32 // green
|
||||
} else {
|
||||
33 // yellow
|
||||
},
|
||||
problems.warnings,
|
||||
if problems.warnings == 1 {
|
||||
"warning"
|
||||
} else {
|
||||
"warnings"
|
||||
},
|
||||
total_time.as_millis(),
|
||||
generated_filename.to_str().unwrap()
|
||||
);
|
||||
print_problems(problems, total_time);
|
||||
println!(" while successfully building:\n\n {generated_filename}");
|
||||
|
||||
// Return a nonzero exit code if there were problems
|
||||
Ok(problems.exit_code())
|
||||
}
|
||||
BuildAndRun => {
|
||||
if problems.errors > 0 || problems.warnings > 0 {
|
||||
print_problems(problems, total_time);
|
||||
println!(
|
||||
"\x1B[{}m{}\x1B[39m {} and \x1B[{}m{}\x1B[39m {} found in {} ms.\n\nRunning program anyway…\n\n\x1B[36m{}\x1B[39m",
|
||||
if problems.errors == 0 {
|
||||
32 // green
|
||||
} else {
|
||||
33 // yellow
|
||||
},
|
||||
problems.errors,
|
||||
if problems.errors == 1 {
|
||||
"error"
|
||||
} else {
|
||||
"errors"
|
||||
},
|
||||
if problems.warnings == 0 {
|
||||
32 // green
|
||||
} else {
|
||||
33 // yellow
|
||||
},
|
||||
problems.warnings,
|
||||
if problems.warnings == 1 {
|
||||
"warning"
|
||||
} else {
|
||||
"warnings"
|
||||
},
|
||||
total_time.as_millis(),
|
||||
".\n\nRunning program anyway…\n\n\x1B[36m{}\x1B[39m",
|
||||
"─".repeat(80)
|
||||
);
|
||||
}
|
||||
|
||||
let args = matches.values_of_os(ARGS_FOR_APP).unwrap_or_default();
|
||||
|
||||
let mut bytes = std::fs::read(&binary_path).unwrap();
|
||||
// don't waste time deallocating; the process ends anyway
|
||||
// ManuallyDrop will leak the bytes because we don't drop manually
|
||||
let bytes = &ManuallyDrop::new(std::fs::read(&binary_path).unwrap());
|
||||
|
||||
let x = roc_run(
|
||||
arena,
|
||||
opt_level,
|
||||
triple,
|
||||
args,
|
||||
&mut bytes,
|
||||
expectations,
|
||||
interns,
|
||||
);
|
||||
std::mem::forget(bytes);
|
||||
x
|
||||
roc_run(&arena, opt_level, triple, args, bytes, expect_metadata)
|
||||
}
|
||||
BuildAndRunIfNoErrors => {
|
||||
debug_assert!(
|
||||
|
@ -654,34 +649,20 @@ pub fn build(
|
|||
"if there are errors, they should have been returned as an error variant"
|
||||
);
|
||||
if problems.warnings > 0 {
|
||||
print_problems(problems, total_time);
|
||||
println!(
|
||||
"\x1B[32m0\x1B[39m errors and \x1B[33m{}\x1B[39m {} found in {} ms.\n\nRunning program…\n\n\x1B[36m{}\x1B[39m",
|
||||
problems.warnings,
|
||||
if problems.warnings == 1 {
|
||||
"warning"
|
||||
} else {
|
||||
"warnings"
|
||||
},
|
||||
total_time.as_millis(),
|
||||
".\n\nRunning program…\n\n\x1B[36m{}\x1B[39m",
|
||||
"─".repeat(80)
|
||||
);
|
||||
}
|
||||
|
||||
let args = matches.values_of_os(ARGS_FOR_APP).unwrap_or_default();
|
||||
|
||||
let mut bytes = std::fs::read(&binary_path).unwrap();
|
||||
// don't waste time deallocating; the process ends anyway
|
||||
// ManuallyDrop will leak the bytes because we don't drop manually
|
||||
let bytes = &ManuallyDrop::new(std::fs::read(&binary_path).unwrap());
|
||||
|
||||
let x = roc_run(
|
||||
arena,
|
||||
opt_level,
|
||||
triple,
|
||||
args,
|
||||
&mut bytes,
|
||||
expectations,
|
||||
interns,
|
||||
);
|
||||
std::mem::forget(bytes);
|
||||
x
|
||||
roc_run(&arena, opt_level, triple, args, bytes, expect_metadata)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -693,40 +674,17 @@ pub fn build(
|
|||
|
||||
let problems = roc_build::program::report_problems_typechecked(&mut module);
|
||||
|
||||
let mut output = format!(
|
||||
"\x1B[{}m{}\x1B[39m {} and \x1B[{}m{}\x1B[39m {} found in {} ms.\n\nYou can run the program anyway with \x1B[32mroc run",
|
||||
if problems.errors == 0 {
|
||||
32 // green
|
||||
} else {
|
||||
33 // yellow
|
||||
},
|
||||
problems.errors,
|
||||
if problems.errors == 1 {
|
||||
"error"
|
||||
} else {
|
||||
"errors"
|
||||
},
|
||||
if problems.warnings == 0 {
|
||||
32 // green
|
||||
} else {
|
||||
33 // yellow
|
||||
},
|
||||
problems.warnings,
|
||||
if problems.warnings == 1 {
|
||||
"warning"
|
||||
} else {
|
||||
"warnings"
|
||||
},
|
||||
total_time.as_millis(),
|
||||
);
|
||||
print_problems(problems, total_time);
|
||||
|
||||
print!(".\n\nYou can run the program anyway with \x1B[32mroc run");
|
||||
|
||||
// If you're running "main.roc" then you can just do `roc run`
|
||||
// to re-run the program.
|
||||
if filename != DEFAULT_ROC_FILENAME {
|
||||
output.push(' ');
|
||||
output.push_str(&filename.to_string_lossy());
|
||||
print!(" {}", &filename.to_string_lossy());
|
||||
}
|
||||
|
||||
println!("{}\x1B[39m", output);
|
||||
println!("\x1B[39m");
|
||||
|
||||
Ok(problems.exit_code())
|
||||
}
|
||||
|
@ -741,14 +699,41 @@ pub fn build(
|
|||
}
|
||||
}
|
||||
|
||||
fn print_problems(problems: Problems, total_time: std::time::Duration) {
|
||||
const GREEN: usize = 32;
|
||||
const YELLOW: usize = 33;
|
||||
|
||||
print!(
|
||||
"\x1B[{}m{}\x1B[39m {} and \x1B[{}m{}\x1B[39m {} found in {} ms",
|
||||
match problems.errors {
|
||||
0 => GREEN,
|
||||
_ => YELLOW,
|
||||
},
|
||||
problems.errors,
|
||||
match problems.errors {
|
||||
1 => "error",
|
||||
_ => "errors",
|
||||
},
|
||||
match problems.warnings {
|
||||
0 => GREEN,
|
||||
_ => YELLOW,
|
||||
},
|
||||
problems.warnings,
|
||||
match problems.warnings {
|
||||
1 => "warning",
|
||||
_ => "warnings",
|
||||
},
|
||||
total_time.as_millis(),
|
||||
);
|
||||
}
|
||||
|
||||
fn roc_run<'a, I: IntoIterator<Item = &'a OsStr>>(
|
||||
arena: Bump, // This should be passed an owned value, not a reference, so we can usefully mem::forget it!
|
||||
arena: &Bump,
|
||||
opt_level: OptLevel,
|
||||
triple: Triple,
|
||||
args: I,
|
||||
binary_bytes: &mut [u8],
|
||||
expectations: VecMap<ModuleId, Expectations>,
|
||||
interns: Interns,
|
||||
binary_bytes: &[u8],
|
||||
expect_metadata: ExpectMetadata,
|
||||
) -> io::Result<i32> {
|
||||
match triple.architecture {
|
||||
Architecture::Wasm32 => {
|
||||
|
@ -759,10 +744,6 @@ fn roc_run<'a, I: IntoIterator<Item = &'a OsStr>>(
|
|||
.strip_prefix(env::current_dir().unwrap())
|
||||
.unwrap_or(path);
|
||||
|
||||
// No need to waste time freeing this memory,
|
||||
// since the process is about to exit anyway.
|
||||
std::mem::forget(arena);
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
{
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
|
@ -787,11 +768,21 @@ fn roc_run<'a, I: IntoIterator<Item = &'a OsStr>>(
|
|||
|
||||
Ok(0)
|
||||
}
|
||||
_ => roc_run_native(arena, opt_level, args, binary_bytes, expectations, interns),
|
||||
_ => roc_run_native(arena, opt_level, args, binary_bytes, expect_metadata),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
fn os_str_as_utf8_bytes(os_str: &OsStr) -> &[u8] {
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
os_str.as_bytes()
|
||||
}
|
||||
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
fn os_str_as_utf8_bytes(os_str: &OsStr) -> &[u8] {
|
||||
os_str.to_str().unwrap().as_bytes()
|
||||
}
|
||||
|
||||
fn make_argv_envp<'a, I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
||||
arena: &'a Bump,
|
||||
executable: &ExecutableFile,
|
||||
|
@ -801,10 +792,9 @@ fn make_argv_envp<'a, I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
|||
bumpalo::collections::Vec<'a, CString>,
|
||||
) {
|
||||
use bumpalo::collections::CollectIn;
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
|
||||
let path = executable.as_path();
|
||||
let path_cstring = CString::new(path.as_os_str().as_bytes()).unwrap();
|
||||
let path_cstring = CString::new(os_str_as_utf8_bytes(path.as_os_str())).unwrap();
|
||||
|
||||
// argv is an array of pointers to strings passed to the new program
|
||||
// as its command-line arguments. By convention, the first of these
|
||||
|
@ -813,7 +803,7 @@ fn make_argv_envp<'a, I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
|||
// by a NULL pointer. (Thus, in the new program, argv[argc] will be NULL.)
|
||||
let it = args
|
||||
.into_iter()
|
||||
.map(|x| CString::new(x.as_ref().as_bytes()).unwrap());
|
||||
.map(|x| CString::new(os_str_as_utf8_bytes(x.as_ref())).unwrap());
|
||||
|
||||
let argv_cstrings: bumpalo::collections::Vec<CString> =
|
||||
std::iter::once(path_cstring).chain(it).collect_in(arena);
|
||||
|
@ -821,12 +811,17 @@ fn make_argv_envp<'a, I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
|||
// envp is an array of pointers to strings, conventionally of the
|
||||
// form key=value, which are passed as the environment of the new
|
||||
// program. The envp array must be terminated by a NULL pointer.
|
||||
let mut buffer = Vec::with_capacity(100);
|
||||
let envp_cstrings: bumpalo::collections::Vec<CString> = std::env::vars_os()
|
||||
.flat_map(|(k, v)| {
|
||||
[
|
||||
CString::new(k.as_bytes()).unwrap(),
|
||||
CString::new(v.as_bytes()).unwrap(),
|
||||
]
|
||||
.map(|(k, v)| {
|
||||
buffer.clear();
|
||||
|
||||
use std::io::Write;
|
||||
buffer.write_all(os_str_as_utf8_bytes(&k)).unwrap();
|
||||
buffer.write_all(b"=").unwrap();
|
||||
buffer.write_all(os_str_as_utf8_bytes(&v)).unwrap();
|
||||
|
||||
CString::new(buffer.as_slice()).unwrap()
|
||||
})
|
||||
.collect_in(arena);
|
||||
|
||||
|
@ -836,35 +831,32 @@ fn make_argv_envp<'a, I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
|||
/// Run on the native OS (not on wasm)
|
||||
#[cfg(target_family = "unix")]
|
||||
fn roc_run_native<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
||||
arena: Bump,
|
||||
arena: &Bump,
|
||||
opt_level: OptLevel,
|
||||
args: I,
|
||||
binary_bytes: &mut [u8],
|
||||
expectations: VecMap<ModuleId, Expectations>,
|
||||
interns: Interns,
|
||||
binary_bytes: &[u8],
|
||||
expect_metadata: ExpectMetadata,
|
||||
) -> std::io::Result<i32> {
|
||||
use bumpalo::collections::CollectIn;
|
||||
|
||||
unsafe {
|
||||
let executable = roc_run_executable_file_path(binary_bytes)?;
|
||||
let (argv_cstrings, envp_cstrings) = make_argv_envp(&arena, &executable, args);
|
||||
let (argv_cstrings, envp_cstrings) = make_argv_envp(arena, &executable, args);
|
||||
|
||||
let argv: bumpalo::collections::Vec<*const c_char> = argv_cstrings
|
||||
.iter()
|
||||
.map(|s| s.as_ptr())
|
||||
.chain([std::ptr::null()])
|
||||
.collect_in(&arena);
|
||||
.collect_in(arena);
|
||||
|
||||
let envp: bumpalo::collections::Vec<*const c_char> = envp_cstrings
|
||||
.iter()
|
||||
.map(|s| s.as_ptr())
|
||||
.chain([std::ptr::null()])
|
||||
.collect_in(&arena);
|
||||
.collect_in(arena);
|
||||
|
||||
match opt_level {
|
||||
OptLevel::Development => {
|
||||
roc_run_native_debug(executable, &argv, &envp, expectations, interns)
|
||||
}
|
||||
OptLevel::Development => roc_dev_native(arena, executable, argv, envp, expect_metadata),
|
||||
OptLevel::Normal | OptLevel::Size | OptLevel::Optimize => {
|
||||
roc_run_native_fast(executable, &argv, &envp);
|
||||
}
|
||||
|
@ -928,12 +920,9 @@ impl ExecutableFile {
|
|||
|
||||
#[cfg(target_family = "windows")]
|
||||
ExecutableFile::OnDisk(_, path) => {
|
||||
let _ = argv;
|
||||
let _ = envp;
|
||||
use memexec::memexec_exe;
|
||||
let bytes = std::fs::read(path).unwrap();
|
||||
memexec_exe(&bytes).unwrap();
|
||||
std::process::exit(0);
|
||||
let path_cstring = CString::new(path.to_str().unwrap()).unwrap();
|
||||
|
||||
libc::execve(path_cstring.as_ptr().cast(), argv.as_ptr(), envp.as_ptr())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -941,18 +930,80 @@ impl ExecutableFile {
|
|||
|
||||
// with Expect
|
||||
#[cfg(target_family = "unix")]
|
||||
unsafe fn roc_run_native_debug(
|
||||
_executable: ExecutableFile,
|
||||
_argv: &[*const c_char],
|
||||
_envp: &[*const c_char],
|
||||
_expectations: VecMap<ModuleId, Expectations>,
|
||||
_interns: Interns,
|
||||
) {
|
||||
todo!()
|
||||
fn roc_dev_native(
|
||||
arena: &Bump,
|
||||
executable: ExecutableFile,
|
||||
argv: bumpalo::collections::Vec<*const c_char>,
|
||||
envp: bumpalo::collections::Vec<*const c_char>,
|
||||
expect_metadata: ExpectMetadata,
|
||||
) -> ! {
|
||||
use roc_repl_expect::run::ExpectMemory;
|
||||
use signal_hook::{consts::signal::SIGCHLD, consts::signal::SIGUSR1, iterator::Signals};
|
||||
|
||||
let ExpectMetadata {
|
||||
mut expectations,
|
||||
interns,
|
||||
layout_interner,
|
||||
} = expect_metadata;
|
||||
|
||||
let mut signals = Signals::new(&[SIGCHLD, SIGUSR1]).unwrap();
|
||||
|
||||
// let shm_name =
|
||||
let shm_name = format!("/roc_expect_buffer_{}", std::process::id());
|
||||
let memory = ExpectMemory::create_or_reuse_mmap(&shm_name);
|
||||
|
||||
let layout_interner = layout_interner.into_global();
|
||||
|
||||
let mut writer = std::io::stdout();
|
||||
|
||||
match unsafe { libc::fork() } {
|
||||
0 => unsafe {
|
||||
// we are the child
|
||||
|
||||
executable.execve(&argv, &envp);
|
||||
|
||||
// Display a human-friendly error message
|
||||
println!("Error {:?}", std::io::Error::last_os_error());
|
||||
|
||||
std::process::exit(1);
|
||||
},
|
||||
-1 => {
|
||||
// something failed
|
||||
|
||||
// Display a human-friendly error message
|
||||
println!("Error {:?}", std::io::Error::last_os_error());
|
||||
|
||||
std::process::exit(1)
|
||||
}
|
||||
1.. => {
|
||||
for sig in &mut signals {
|
||||
match sig {
|
||||
SIGCHLD => break,
|
||||
SIGUSR1 => {
|
||||
// this is the signal we use for an expect failure. Let's see what the child told us
|
||||
|
||||
roc_repl_expect::run::render_expects_in_memory(
|
||||
&mut writer,
|
||||
arena,
|
||||
&mut expectations,
|
||||
&interns,
|
||||
&layout_interner,
|
||||
&memory,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
_ => println!("received signal {}", sig),
|
||||
}
|
||||
}
|
||||
|
||||
std::process::exit(0)
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn roc_run_executable_file_path(binary_bytes: &mut [u8]) -> std::io::Result<ExecutableFile> {
|
||||
fn roc_run_executable_file_path(binary_bytes: &[u8]) -> std::io::Result<ExecutableFile> {
|
||||
// on linux, we use the `memfd_create` function to create an in-memory anonymous file.
|
||||
let flags = 0;
|
||||
let anonymous_file_name = "roc_file_descriptor\0";
|
||||
|
@ -974,7 +1025,7 @@ fn roc_run_executable_file_path(binary_bytes: &mut [u8]) -> std::io::Result<Exec
|
|||
}
|
||||
|
||||
#[cfg(all(target_family = "unix", not(target_os = "linux")))]
|
||||
fn roc_run_executable_file_path(binary_bytes: &mut [u8]) -> std::io::Result<ExecutableFile> {
|
||||
fn roc_run_executable_file_path(binary_bytes: &[u8]) -> std::io::Result<ExecutableFile> {
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
|
@ -1000,7 +1051,7 @@ fn roc_run_executable_file_path(binary_bytes: &mut [u8]) -> std::io::Result<Exec
|
|||
}
|
||||
|
||||
#[cfg(all(target_family = "windows"))]
|
||||
fn roc_run_executable_file_path(binary_bytes: &mut [u8]) -> std::io::Result<ExecutableFile> {
|
||||
fn roc_run_executable_file_path(binary_bytes: &[u8]) -> std::io::Result<ExecutableFile> {
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
|
||||
|
@ -1027,12 +1078,11 @@ fn roc_run_executable_file_path(binary_bytes: &mut [u8]) -> std::io::Result<Exec
|
|||
/// Run on the native OS (not on wasm)
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
fn roc_run_native<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
||||
arena: Bump, // This should be passed an owned value, not a reference, so we can usefully mem::forget it!
|
||||
arena: &Bump, // This should be passed an owned value, not a reference, so we can usefully mem::forget it!
|
||||
opt_level: OptLevel,
|
||||
_args: I,
|
||||
binary_bytes: &mut [u8],
|
||||
_expectations: VecMap<ModuleId, Expectations>,
|
||||
_interns: Interns,
|
||||
args: I,
|
||||
binary_bytes: &[u8],
|
||||
_expect_metadata: ExpectMetadata,
|
||||
) -> io::Result<i32> {
|
||||
use bumpalo::collections::CollectIn;
|
||||
|
||||
|
@ -1040,26 +1090,24 @@ fn roc_run_native<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
|||
let executable = roc_run_executable_file_path(binary_bytes)?;
|
||||
|
||||
// TODO forward the arguments
|
||||
// let (argv_cstrings, envp_cstrings) = make_argv_envp(&arena, &executable, args);
|
||||
let argv_cstrings = bumpalo::vec![ in &arena; CString::default()];
|
||||
let envp_cstrings = bumpalo::vec![ in &arena; CString::default()];
|
||||
let (argv_cstrings, envp_cstrings) = make_argv_envp(&arena, &executable, args);
|
||||
|
||||
let argv: bumpalo::collections::Vec<*const c_char> = argv_cstrings
|
||||
.iter()
|
||||
.map(|s| s.as_ptr())
|
||||
.chain([std::ptr::null()])
|
||||
.collect_in(&arena);
|
||||
.collect_in(arena);
|
||||
|
||||
let envp: bumpalo::collections::Vec<*const c_char> = envp_cstrings
|
||||
.iter()
|
||||
.map(|s| s.as_ptr())
|
||||
.chain([std::ptr::null()])
|
||||
.collect_in(&arena);
|
||||
.collect_in(arena);
|
||||
|
||||
match opt_level {
|
||||
OptLevel::Development => {
|
||||
// roc_run_native_debug(executable, &argv, &envp, expectations, interns)
|
||||
todo!()
|
||||
internal_error!("running `expect`s does not currently work on windows")
|
||||
}
|
||||
OptLevel::Normal | OptLevel::Size | OptLevel::Optimize => {
|
||||
roc_run_native_fast(executable, &argv, &envp);
|
||||
|
@ -1106,12 +1154,17 @@ fn run_with_wasmer<I: Iterator<Item = S>, S: AsRef<[u8]>>(_wasm_path: &std::path
|
|||
println!("Running wasm files is not supported on this target.");
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Copy, Clone, EnumIter, IntoStaticStr, PartialEq, Eq)]
|
||||
pub enum Target {
|
||||
#[strum(serialize = "system")]
|
||||
System,
|
||||
#[strum(serialize = "linux32")]
|
||||
Linux32,
|
||||
#[strum(serialize = "linux64")]
|
||||
Linux64,
|
||||
#[strum(serialize = "windows64")]
|
||||
Windows64,
|
||||
#[strum(serialize = "wasm32")]
|
||||
Wasm32,
|
||||
}
|
||||
|
||||
|
@ -1122,27 +1175,6 @@ impl Default for Target {
|
|||
}
|
||||
|
||||
impl Target {
|
||||
const fn as_str(&self) -> &'static str {
|
||||
use Target::*;
|
||||
|
||||
match self {
|
||||
System => "system",
|
||||
Linux32 => "linux32",
|
||||
Linux64 => "linux64",
|
||||
Windows64 => "windows64",
|
||||
Wasm32 => "wasm32",
|
||||
}
|
||||
}
|
||||
|
||||
/// NOTE keep up to date!
|
||||
const OPTIONS: &'static [&'static str] = &[
|
||||
Target::System.as_str(),
|
||||
Target::Linux32.as_str(),
|
||||
Target::Linux64.as_str(),
|
||||
Target::Windows64.as_str(),
|
||||
Target::Wasm32.as_str(),
|
||||
];
|
||||
|
||||
pub fn to_triple(self) -> Triple {
|
||||
use Target::*;
|
||||
|
||||
|
@ -1172,7 +1204,7 @@ impl Target {
|
|||
Wasm32 => Triple {
|
||||
architecture: Architecture::Wasm32,
|
||||
vendor: Vendor::Unknown,
|
||||
operating_system: OperatingSystem::Unknown,
|
||||
operating_system: OperatingSystem::Wasi,
|
||||
environment: Environment::Unknown,
|
||||
binary_format: BinaryFormat::Wasm,
|
||||
},
|
||||
|
@ -1188,7 +1220,7 @@ impl From<&Target> for Triple {
|
|||
|
||||
impl std::fmt::Display for Target {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(f, "{}", self.as_str())
|
||||
write!(f, "{}", Into::<&'static str>::into(self))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
//! The `roc` binary that brings together all functionality in the Roc toolset.
|
||||
use roc_build::link::LinkType;
|
||||
use roc_cli::build::check_file;
|
||||
use roc_cli::{
|
||||
build_app, format, test, BuildConfig, FormatMode, Target, CMD_BUILD, CMD_CHECK, CMD_DEV,
|
||||
CMD_DOCS, CMD_EDIT, CMD_FORMAT, CMD_GLUE, CMD_REPL, CMD_RUN, CMD_TEST, CMD_VERSION,
|
||||
DIRECTORY_OR_FILES, FLAG_CHECK, FLAG_LIB, FLAG_NO_LINK, FLAG_TARGET, FLAG_TIME, GLUE_FILE,
|
||||
ROC_FILE,
|
||||
CMD_DOCS, CMD_EDIT, CMD_FORMAT, CMD_GEN_STUB_LIB, CMD_GLUE, CMD_REPL, CMD_RUN, CMD_TEST,
|
||||
CMD_VERSION, DIRECTORY_OR_FILES, FLAG_CHECK, FLAG_LIB, FLAG_NO_LINK, FLAG_TARGET, FLAG_TIME,
|
||||
GLUE_FILE, ROC_FILE,
|
||||
};
|
||||
use roc_docs::generate_docs_html;
|
||||
use roc_error_macros::user_error;
|
||||
|
@ -93,6 +94,12 @@ fn main() -> io::Result<()> {
|
|||
Ok(1)
|
||||
}
|
||||
}
|
||||
Some((CMD_GEN_STUB_LIB, matches)) => {
|
||||
let input_path = Path::new(matches.value_of_os(ROC_FILE).unwrap());
|
||||
let target: Target = matches.value_of_t(FLAG_TARGET).unwrap_or_default();
|
||||
|
||||
roc_linker::generate_stub_lib(input_path, &target.to_triple())
|
||||
}
|
||||
Some((CMD_BUILD, matches)) => {
|
||||
let target: Target = matches.value_of_t(FLAG_TARGET).unwrap_or_default();
|
||||
|
||||
|
@ -171,14 +178,7 @@ fn main() -> io::Result<()> {
|
|||
}
|
||||
}
|
||||
}
|
||||
Some((CMD_REPL, _)) => {
|
||||
{
|
||||
roc_repl_cli::main()?;
|
||||
|
||||
// Exit 0 if the repl exited normally
|
||||
Ok(0)
|
||||
}
|
||||
}
|
||||
Some((CMD_REPL, _)) => Ok(roc_repl_cli::main()),
|
||||
Some((CMD_EDIT, matches)) => {
|
||||
match matches
|
||||
.values_of_os(DIRECTORY_OR_FILES)
|
||||
|
|
File diff suppressed because it is too large
Load diff
55
crates/cli/tests/editor.rs
Normal file
55
crates/cli/tests/editor.rs
Normal file
|
@ -0,0 +1,55 @@
|
|||
#[cfg(test)]
|
||||
mod editor_launch_test {
|
||||
|
||||
use core::time;
|
||||
use std::{
|
||||
env,
|
||||
process::{Command, Stdio},
|
||||
thread,
|
||||
};
|
||||
|
||||
use cli_utils::helpers::build_roc_bin_cached;
|
||||
use roc_cli::CMD_EDIT;
|
||||
use roc_utils::root_dir;
|
||||
use std::io::Read;
|
||||
|
||||
// ignored because we don't want to bring up the editor window during regular tests, only on specific CI machines
|
||||
#[ignore]
|
||||
#[test]
|
||||
fn launch() {
|
||||
let root_dir = root_dir();
|
||||
|
||||
// The editor expects to be run from the root of the repo, so it can find the cli-platform to init a new project folder.
|
||||
env::set_current_dir(&root_dir)
|
||||
.unwrap_or_else(|_| panic!("Failed to set current dir to {:?}", root_dir));
|
||||
|
||||
let roc_binary_path = build_roc_bin_cached();
|
||||
|
||||
let mut roc_process = Command::new(roc_binary_path)
|
||||
.arg(CMD_EDIT)
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
.expect("Failed to start editor from cli.");
|
||||
|
||||
// wait for editor to show
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
|
||||
// We extract 12 bytes from the logs for verification
|
||||
let mut stdout_buffer = [0; 12];
|
||||
let mut stdout = roc_process.stdout.take().unwrap();
|
||||
stdout.read_exact(&mut stdout_buffer).unwrap();
|
||||
|
||||
match roc_process.try_wait() {
|
||||
Ok(Some(status)) => panic!(
|
||||
"The editor exited with status \"{status}\" but I expected it to still be running."
|
||||
),
|
||||
Ok(None) => {
|
||||
// The editor is still running as desired, we check if logs are as expected:
|
||||
assert_eq!("Loading file", std::str::from_utf8(&stdout_buffer).unwrap());
|
||||
// Kill the editor, we don't want it to stay open forever.
|
||||
roc_process.kill().unwrap();
|
||||
}
|
||||
Err(e) => panic!("Failed to wait launch editor cli command: {e}"),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -65,32 +65,63 @@ export fn roc_panic(c_ptr: *anyopaque, tag_id: u32) callconv(.C) void {
|
|||
std.process.exit(0);
|
||||
}
|
||||
|
||||
extern fn kill(pid: c_int, sig: c_int) c_int;
|
||||
extern fn shm_open(name: *const i8, oflag: c_int, mode: c_uint) c_int;
|
||||
extern fn mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) *anyopaque;
|
||||
extern fn getppid() c_int;
|
||||
|
||||
fn roc_getppid() callconv(.C) c_int {
|
||||
return getppid();
|
||||
}
|
||||
|
||||
fn roc_getppid_windows_stub() callconv(.C) c_int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn roc_send_signal(pid: c_int, sig: c_int) callconv(.C) c_int {
|
||||
return kill(pid, sig);
|
||||
}
|
||||
fn roc_shm_open(name: *const i8, oflag: c_int, mode: c_uint) callconv(.C) c_int {
|
||||
return shm_open(name, oflag, mode);
|
||||
}
|
||||
fn roc_mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) callconv(.C) *anyopaque {
|
||||
return mmap(addr, length, prot, flags, fd, offset);
|
||||
}
|
||||
|
||||
comptime {
|
||||
if (builtin.os.tag == .macos or builtin.os.tag == .linux) {
|
||||
@export(roc_getppid, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
@export(roc_mmap, .{ .name = "roc_mmap", .linkage = .Strong });
|
||||
@export(roc_send_signal, .{ .name = "roc_send_signal", .linkage = .Strong });
|
||||
@export(roc_shm_open, .{ .name = "roc_shm_open", .linkage = .Strong });
|
||||
}
|
||||
|
||||
if (builtin.os.tag == .windows) {
|
||||
@export(roc_getppid_windows_stub, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
}
|
||||
}
|
||||
|
||||
const Unit = extern struct {};
|
||||
|
||||
pub export fn main() i32 {
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
|
||||
// start time
|
||||
var ts1: std.os.timespec = undefined;
|
||||
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts1) catch unreachable;
|
||||
var timer = std.time.Timer.start() catch unreachable;
|
||||
|
||||
// actually call roc to populate the callresult
|
||||
var callresult = RocStr.empty();
|
||||
roc__mainForHost_1_exposed_generic(&callresult);
|
||||
|
||||
// end time
|
||||
var ts2: std.os.timespec = undefined;
|
||||
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts2) catch unreachable;
|
||||
const nanos = timer.read();
|
||||
const seconds = (@intToFloat(f64, nanos) / 1_000_000_000.0);
|
||||
|
||||
// stdout the result
|
||||
stdout.print("{s}\n", .{callresult.asSlice()}) catch unreachable;
|
||||
|
||||
callresult.deinit();
|
||||
|
||||
const delta = to_seconds(ts2) - to_seconds(ts1);
|
||||
|
||||
stderr.print("runtime: {d:.3}ms\n", .{delta * 1000}) catch unreachable;
|
||||
stderr.print("runtime: {d:.3}ms\n", .{seconds * 1000}) catch unreachable;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -64,32 +64,63 @@ export fn roc_panic(c_ptr: *anyopaque, tag_id: u32) callconv(.C) void {
|
|||
std.process.exit(0);
|
||||
}
|
||||
|
||||
extern fn kill(pid: c_int, sig: c_int) c_int;
|
||||
extern fn shm_open(name: *const i8, oflag: c_int, mode: c_uint) c_int;
|
||||
extern fn mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) *anyopaque;
|
||||
extern fn getppid() c_int;
|
||||
|
||||
fn roc_getppid() callconv(.C) c_int {
|
||||
return getppid();
|
||||
}
|
||||
|
||||
fn roc_getppid_windows_stub() callconv(.C) c_int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn roc_send_signal(pid: c_int, sig: c_int) callconv(.C) c_int {
|
||||
return kill(pid, sig);
|
||||
}
|
||||
fn roc_shm_open(name: *const i8, oflag: c_int, mode: c_uint) callconv(.C) c_int {
|
||||
return shm_open(name, oflag, mode);
|
||||
}
|
||||
fn roc_mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) callconv(.C) *anyopaque {
|
||||
return mmap(addr, length, prot, flags, fd, offset);
|
||||
}
|
||||
|
||||
comptime {
|
||||
if (builtin.os.tag == .macos or builtin.os.tag == .linux) {
|
||||
@export(roc_getppid, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
@export(roc_mmap, .{ .name = "roc_mmap", .linkage = .Strong });
|
||||
@export(roc_send_signal, .{ .name = "roc_send_signal", .linkage = .Strong });
|
||||
@export(roc_shm_open, .{ .name = "roc_shm_open", .linkage = .Strong });
|
||||
}
|
||||
|
||||
if (builtin.os.tag == .windows) {
|
||||
@export(roc_getppid_windows_stub, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
}
|
||||
}
|
||||
|
||||
const Unit = extern struct {};
|
||||
|
||||
pub export fn main() i32 {
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
|
||||
// start time
|
||||
var ts1: std.os.timespec = undefined;
|
||||
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts1) catch unreachable;
|
||||
var timer = std.time.Timer.start() catch unreachable;
|
||||
|
||||
// actually call roc to populate the callresult
|
||||
var callresult = RocStr.empty();
|
||||
roc__mainForHost_1_exposed_generic(&callresult);
|
||||
|
||||
// end time
|
||||
var ts2: std.os.timespec = undefined;
|
||||
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts2) catch unreachable;
|
||||
const nanos = timer.read();
|
||||
const seconds = (@intToFloat(f64, nanos) / 1_000_000_000.0);
|
||||
|
||||
// stdout the result
|
||||
stdout.print("{s}\n", .{callresult.asSlice()}) catch unreachable;
|
||||
|
||||
callresult.deinit();
|
||||
|
||||
const delta = to_seconds(ts2) - to_seconds(ts1);
|
||||
|
||||
stderr.print("runtime: {d:.3}ms\n", .{delta * 1000}) catch unreachable;
|
||||
stderr.print("runtime: {d:.3}ms\n", .{seconds * 1000}) catch unreachable;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
interface Foo
|
||||
interface ExposedNotDefined
|
||||
exposes [bar]
|
||||
imports []
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
app "type-error"
|
||||
packages { pf: "../../../../examples/interactive/cli-platform/main.roc" }
|
||||
imports [pf.Stdout.{ line }, pf.Task.{ await }]
|
||||
packages { pf: "../../../../examples/cli/cli-platform/main.roc" }
|
||||
imports [pf.Stdout.{ line }, pf.Task.{ await }, pf.Program]
|
||||
provides [main] to pf
|
||||
|
||||
main =
|
||||
_ <- await (line "a")
|
||||
_ <- await (line "b")
|
||||
_ <- await (line "c")
|
||||
_ <- await (line d)
|
||||
_ <- await (line "d")
|
||||
line "e"
|
||||
# Type mismatch because this line is missing:
|
||||
# |> Program.quick
|
||||
|
|
6
crates/cli_testing_examples/.gitignore
vendored
Normal file
6
crates/cli_testing_examples/.gitignore
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
*.dSYM
|
||||
libhost.a
|
||||
libapp.so
|
||||
dynhost
|
||||
preprocessedhost
|
||||
metadata
|
|
@ -1,4 +1,5 @@
|
|||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const testing = std.testing;
|
||||
const expectEqual = testing.expectEqual;
|
||||
const expect = testing.expect;
|
||||
|
@ -13,7 +14,6 @@ comptime {
|
|||
// -fcompiler-rt in link.rs instead of doing this. Note that this
|
||||
// workaround is present in many host.zig files, so make sure to undo
|
||||
// it everywhere!
|
||||
const builtin = @import("builtin");
|
||||
if (builtin.os.tag == .macos) {
|
||||
_ = @import("compiler_rt");
|
||||
}
|
||||
|
@ -81,25 +81,56 @@ export fn roc_memset(dst: [*]u8, value: i32, size: usize) callconv(.C) void {
|
|||
return memset(dst, value, size);
|
||||
}
|
||||
|
||||
extern fn kill(pid: c_int, sig: c_int) c_int;
|
||||
extern fn shm_open(name: *const i8, oflag: c_int, mode: c_uint) c_int;
|
||||
extern fn mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) *anyopaque;
|
||||
extern fn getppid() c_int;
|
||||
|
||||
fn roc_getppid() callconv(.C) c_int {
|
||||
return getppid();
|
||||
}
|
||||
|
||||
fn roc_getppid_windows_stub() callconv(.C) c_int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn roc_send_signal(pid: c_int, sig: c_int) callconv(.C) c_int {
|
||||
return kill(pid, sig);
|
||||
}
|
||||
fn roc_shm_open(name: *const i8, oflag: c_int, mode: c_uint) callconv(.C) c_int {
|
||||
return shm_open(name, oflag, mode);
|
||||
}
|
||||
fn roc_mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) callconv(.C) *anyopaque {
|
||||
return mmap(addr, length, prot, flags, fd, offset);
|
||||
}
|
||||
|
||||
comptime {
|
||||
if (builtin.os.tag == .macos or builtin.os.tag == .linux) {
|
||||
@export(roc_getppid, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
@export(roc_mmap, .{ .name = "roc_mmap", .linkage = .Strong });
|
||||
@export(roc_send_signal, .{ .name = "roc_send_signal", .linkage = .Strong });
|
||||
@export(roc_shm_open, .{ .name = "roc_shm_open", .linkage = .Strong });
|
||||
}
|
||||
|
||||
if (builtin.os.tag == .windows) {
|
||||
@export(roc_getppid_windows_stub, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
}
|
||||
}
|
||||
|
||||
pub export fn main() u8 {
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
|
||||
// start time
|
||||
var ts1: std.os.timespec = undefined;
|
||||
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts1) catch unreachable;
|
||||
var timer = std.time.Timer.start() catch unreachable;
|
||||
|
||||
const result = roc__mainForHost_1_exposed(10);
|
||||
|
||||
// end time
|
||||
var ts2: std.os.timespec = undefined;
|
||||
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts2) catch unreachable;
|
||||
const nanos = timer.read();
|
||||
const seconds = (@intToFloat(f64, nanos) / 1_000_000_000.0);
|
||||
|
||||
stdout.print("{d}\n", .{result}) catch unreachable;
|
||||
|
||||
const delta = to_seconds(ts2) - to_seconds(ts1);
|
||||
|
||||
stderr.print("runtime: {d:.3}ms\n", .{delta * 1000}) catch unreachable;
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stderr.print("runtime: {d:.3}ms\n", .{seconds * 1000}) catch unreachable;
|
||||
|
||||
return 0;
|
||||
}
|
|
@ -80,6 +80,42 @@ export fn roc_memset(dst: [*]u8, value: i32, size: usize) callconv(.C) void {
|
|||
return memset(dst, value, size);
|
||||
}
|
||||
|
||||
extern fn kill(pid: c_int, sig: c_int) c_int;
|
||||
extern fn shm_open(name: *const i8, oflag: c_int, mode: c_uint) c_int;
|
||||
extern fn mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) *anyopaque;
|
||||
extern fn getppid() c_int;
|
||||
|
||||
fn roc_getppid() callconv(.C) c_int {
|
||||
return getppid();
|
||||
}
|
||||
|
||||
fn roc_getppid_windows_stub() callconv(.C) c_int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn roc_send_signal(pid: c_int, sig: c_int) callconv(.C) c_int {
|
||||
return kill(pid, sig);
|
||||
}
|
||||
fn roc_shm_open(name: *const i8, oflag: c_int, mode: c_uint) callconv(.C) c_int {
|
||||
return shm_open(name, oflag, mode);
|
||||
}
|
||||
fn roc_mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) callconv(.C) *anyopaque {
|
||||
return mmap(addr, length, prot, flags, fd, offset);
|
||||
}
|
||||
|
||||
comptime {
|
||||
if (builtin.os.tag == .macos or builtin.os.tag == .linux) {
|
||||
@export(roc_getppid, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
@export(roc_mmap, .{ .name = "roc_mmap", .linkage = .Strong });
|
||||
@export(roc_send_signal, .{ .name = "roc_send_signal", .linkage = .Strong });
|
||||
@export(roc_shm_open, .{ .name = "roc_shm_open", .linkage = .Strong });
|
||||
}
|
||||
|
||||
if (builtin.os.tag == .windows) {
|
||||
@export(roc_getppid_windows_stub, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
}
|
||||
}
|
||||
|
||||
// warning! the array is currently stack-allocated so don't make this too big
|
||||
const NUM_NUMS = 100;
|
||||
|
||||
|
@ -103,9 +139,7 @@ pub export fn main() u8 {
|
|||
|
||||
var roc_list = RocList{ .elements = numbers, .length = NUM_NUMS, .capacity = NUM_NUMS };
|
||||
|
||||
// start time
|
||||
var ts1: std.os.timespec = undefined;
|
||||
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts1) catch unreachable;
|
||||
var timer = std.time.Timer.start() catch unreachable;
|
||||
|
||||
// actually call roc to populate the callresult
|
||||
const callresult: RocList = roc__mainForHost_1_exposed(roc_list);
|
||||
|
@ -114,9 +148,8 @@ pub export fn main() u8 {
|
|||
const length = std.math.min(20, callresult.length);
|
||||
var result = callresult.elements[0..length];
|
||||
|
||||
// end time
|
||||
var ts2: std.os.timespec = undefined;
|
||||
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts2) catch unreachable;
|
||||
const nanos = timer.read();
|
||||
const seconds = (@intToFloat(f64, nanos) / 1_000_000_000.0);
|
||||
|
||||
for (result) |x, i| {
|
||||
if (i == 0) {
|
||||
|
@ -128,9 +161,8 @@ pub export fn main() u8 {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO apparently the typestamps are still (partially) undefined?
|
||||
// const delta = to_seconds(ts2) - to_seconds(ts1);
|
||||
// stderr.print("runtime: {d:.3}ms\n", .{delta * 1000}) catch unreachable;
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stderr.print("runtime: {d:.3}ms\n", .{seconds * 1000}) catch unreachable;
|
||||
|
||||
return 0;
|
||||
}
|
|
@ -20,7 +20,7 @@ initialModel = \start -> {
|
|||
cameFrom: Dict.empty,
|
||||
}
|
||||
|
||||
cheapestOpen : (position -> F64), Model position -> Result position {}
|
||||
cheapestOpen : (position -> F64), Model position -> Result position {} | position has Eq
|
||||
cheapestOpen = \costFn, model ->
|
||||
model.openSet
|
||||
|> Set.toList
|
||||
|
@ -35,13 +35,13 @@ cheapestOpen = \costFn, model ->
|
|||
|> Result.map .position
|
||||
|> Result.mapErr (\_ -> {})
|
||||
|
||||
reconstructPath : Dict position position, position -> List position
|
||||
reconstructPath : Dict position position, position -> List position | position has Eq
|
||||
reconstructPath = \cameFrom, goal ->
|
||||
when Dict.get cameFrom goal is
|
||||
Err _ -> []
|
||||
Ok next -> List.append (reconstructPath cameFrom next) goal
|
||||
|
||||
updateCost : position, position, Model position -> Model position
|
||||
updateCost : position, position, Model position -> Model position | position has Eq
|
||||
updateCost = \current, neighbor, model ->
|
||||
newCameFrom =
|
||||
Dict.insert model.cameFrom neighbor current
|
||||
|
@ -70,7 +70,7 @@ updateCost = \current, neighbor, model ->
|
|||
else
|
||||
model
|
||||
|
||||
astar : (position, position -> F64), (position -> Set position), position, Model position -> Result (List position) {}
|
||||
astar : (position, position -> F64), (position -> Set position), position, Model position -> Result (List position) {} | position has Eq
|
||||
astar = \costFn, moveFn, goal, model ->
|
||||
when cheapestOpen (\source -> costFn source goal) model is
|
||||
Err {} -> Err {}
|
|
@ -1,7 +1,7 @@
|
|||
interface Base64 exposes [fromBytes, fromStr, toBytes, toStr] imports [Base64.Decode, Base64.Encode]
|
||||
|
||||
# base 64 encoding from a sequence of bytes
|
||||
fromBytes : List U8 -> Result Str [InvalidInput]*
|
||||
fromBytes : List U8 -> Result Str [InvalidInput]
|
||||
fromBytes = \bytes ->
|
||||
when Base64.Decode.fromBytes bytes is
|
||||
Ok v ->
|
||||
|
@ -11,16 +11,16 @@ fromBytes = \bytes ->
|
|||
Err InvalidInput
|
||||
|
||||
# base 64 encoding from a string
|
||||
fromStr : Str -> Result Str [InvalidInput]*
|
||||
fromStr : Str -> Result Str [InvalidInput]
|
||||
fromStr = \str ->
|
||||
fromBytes (Str.toUtf8 str)
|
||||
|
||||
# base64-encode bytes to the original
|
||||
toBytes : Str -> Result (List U8) [InvalidInput]*
|
||||
toBytes : Str -> Result (List U8) [InvalidInput]
|
||||
toBytes = \str ->
|
||||
Ok (Base64.Encode.toBytes str)
|
||||
|
||||
toStr : Str -> Result Str [InvalidInput]*
|
||||
toStr : Str -> Result Str [InvalidInput]
|
||||
toStr = \str ->
|
||||
when toBytes str is
|
||||
Ok bytes ->
|
|
@ -1,13 +1,13 @@
|
|||
interface Base64.Decode exposes [fromBytes] imports [Bytes.Decode.{ Decoder, DecodeProblem }]
|
||||
interface Base64.Decode exposes [fromBytes] imports [Bytes.Decode.{ ByteDecoder, DecodeProblem }]
|
||||
|
||||
fromBytes : List U8 -> Result Str DecodeProblem
|
||||
fromBytes = \bytes ->
|
||||
Bytes.Decode.decode bytes (decodeBase64 (List.len bytes))
|
||||
|
||||
decodeBase64 : Nat -> Decoder Str
|
||||
decodeBase64 : Nat -> ByteDecoder Str
|
||||
decodeBase64 = \width -> Bytes.Decode.loop loopHelp { remaining: width, string: "" }
|
||||
|
||||
loopHelp : { remaining : Nat, string : Str } -> Decoder (Bytes.Decode.Step { remaining : Nat, string : Str } Str)
|
||||
loopHelp : { remaining : Nat, string : Str } -> ByteDecoder (Bytes.Decode.Step { remaining : Nat, string : Str } Str)
|
||||
loopHelp = \{ remaining, string } ->
|
||||
if remaining >= 3 then
|
||||
x, y, z <- Bytes.Decode.map3 Bytes.Decode.u8 Bytes.Decode.u8 Bytes.Decode.u8
|
|
@ -1,6 +1,6 @@
|
|||
interface Base64.Encode
|
||||
exposes [toBytes]
|
||||
imports [Bytes.Encode.{ Encoder }]
|
||||
imports [Bytes.Encode.{ ByteEncoder }]
|
||||
|
||||
InvalidChar : U8
|
||||
|
||||
|
@ -13,7 +13,7 @@ toBytes = \str ->
|
|||
|> Bytes.Encode.sequence
|
||||
|> Bytes.Encode.encode
|
||||
|
||||
encodeChunks : List U8 -> List Encoder
|
||||
encodeChunks : List U8 -> List ByteEncoder
|
||||
encodeChunks = \bytes ->
|
||||
List.walk bytes { output: [], accum: None } folder
|
||||
|> encodeResidual
|
||||
|
@ -21,7 +21,7 @@ encodeChunks = \bytes ->
|
|||
coerce : Nat, a -> a
|
||||
coerce = \_, x -> x
|
||||
|
||||
# folder : { output : List Encoder, accum : State }, U8 -> { output : List Encoder, accum : State }
|
||||
# folder : { output : List ByteEncoder, accum : State }, U8 -> { output : List ByteEncoder, accum : State }
|
||||
folder = \{ output, accum }, char ->
|
||||
when accum is
|
||||
Unreachable n -> coerce n { output, accum: Unreachable n }
|
||||
|
@ -40,7 +40,7 @@ folder = \{ output, accum }, char ->
|
|||
{ output, accum: None }
|
||||
|
||||
# SGVs bG8g V29y bGQ=
|
||||
# encodeResidual : { output : List Encoder, accum : State } -> List Encoder
|
||||
# encodeResidual : { output : List ByteEncoder, accum : State } -> List ByteEncoder
|
||||
encodeResidual = \{ output, accum } ->
|
||||
when accum is
|
||||
Unreachable _ -> output
|
||||
|
@ -59,8 +59,8 @@ encodeResidual = \{ output, accum } ->
|
|||
equals : U8
|
||||
equals = 61
|
||||
|
||||
# Convert 4 characters to 24 bits (as an Encoder)
|
||||
encodeCharacters : U8, U8, U8, U8 -> Result Encoder InvalidChar
|
||||
# Convert 4 characters to 24 bits (as an ByteEncoder)
|
||||
encodeCharacters : U8, U8, U8, U8 -> Result ByteEncoder InvalidChar
|
||||
encodeCharacters = \a, b, c, d ->
|
||||
if !(isValidChar a) then
|
||||
Err a
|
||||
|
@ -131,19 +131,19 @@ encodeCharacters = \a, b, c, d ->
|
|||
isValidChar : U8 -> Bool
|
||||
isValidChar = \c ->
|
||||
if isAlphaNum c then
|
||||
True
|
||||
Bool.true
|
||||
else
|
||||
when c is
|
||||
43 ->
|
||||
# '+'
|
||||
True
|
||||
Bool.true
|
||||
|
||||
47 ->
|
||||
# '/'
|
||||
True
|
||||
Bool.true
|
||||
|
||||
_ ->
|
||||
False
|
||||
Bool.false
|
||||
|
||||
isAlphaNum : U8 -> Bool
|
||||
isAlphaNum = \key ->
|
|
@ -1,13 +1,13 @@
|
|||
interface Bytes.Decode exposes [Decoder, decode, map, map2, u8, loop, Step, succeed, DecodeProblem, after, map3] imports []
|
||||
interface Bytes.Decode exposes [ByteDecoder, decode, map, map2, u8, loop, Step, succeed, DecodeProblem, after, map3] imports []
|
||||
|
||||
State : { bytes : List U8, cursor : Nat }
|
||||
|
||||
DecodeProblem : [OutOfBytes]
|
||||
|
||||
Decoder a := State -> [Good State a, Bad DecodeProblem]
|
||||
ByteDecoder a := State -> [Good State a, Bad DecodeProblem]
|
||||
|
||||
decode : List U8, Decoder a -> Result a DecodeProblem
|
||||
decode = \bytes, @Decoder decoder ->
|
||||
decode : List U8, ByteDecoder a -> Result a DecodeProblem
|
||||
decode = \bytes, @ByteDecoder decoder ->
|
||||
when decoder { bytes, cursor: 0 } is
|
||||
Good _ value ->
|
||||
Ok value
|
||||
|
@ -15,12 +15,12 @@ decode = \bytes, @Decoder decoder ->
|
|||
Bad e ->
|
||||
Err e
|
||||
|
||||
succeed : a -> Decoder a
|
||||
succeed = \value -> @Decoder \state -> Good state value
|
||||
succeed : a -> ByteDecoder a
|
||||
succeed = \value -> @ByteDecoder \state -> Good state value
|
||||
|
||||
map : Decoder a, (a -> b) -> Decoder b
|
||||
map = \@Decoder decoder, transform ->
|
||||
@Decoder
|
||||
map : ByteDecoder a, (a -> b) -> ByteDecoder b
|
||||
map = \@ByteDecoder decoder, transform ->
|
||||
@ByteDecoder
|
||||
\state ->
|
||||
when decoder state is
|
||||
Good state1 value ->
|
||||
|
@ -29,9 +29,9 @@ map = \@Decoder decoder, transform ->
|
|||
Bad e ->
|
||||
Bad e
|
||||
|
||||
map2 : Decoder a, Decoder b, (a, b -> c) -> Decoder c
|
||||
map2 = \@Decoder decoder1, @Decoder decoder2, transform ->
|
||||
@Decoder
|
||||
map2 : ByteDecoder a, ByteDecoder b, (a, b -> c) -> ByteDecoder c
|
||||
map2 = \@ByteDecoder decoder1, @ByteDecoder decoder2, transform ->
|
||||
@ByteDecoder
|
||||
\state1 ->
|
||||
when decoder1 state1 is
|
||||
Good state2 a ->
|
||||
|
@ -45,9 +45,9 @@ map2 = \@Decoder decoder1, @Decoder decoder2, transform ->
|
|||
Bad e ->
|
||||
Bad e
|
||||
|
||||
map3 : Decoder a, Decoder b, Decoder c, (a, b, c -> d) -> Decoder d
|
||||
map3 = \@Decoder decoder1, @Decoder decoder2, @Decoder decoder3, transform ->
|
||||
@Decoder
|
||||
map3 : ByteDecoder a, ByteDecoder b, ByteDecoder c, (a, b, c -> d) -> ByteDecoder d
|
||||
map3 = \@ByteDecoder decoder1, @ByteDecoder decoder2, @ByteDecoder decoder3, transform ->
|
||||
@ByteDecoder
|
||||
\state1 ->
|
||||
when decoder1 state1 is
|
||||
Good state2 a ->
|
||||
|
@ -66,21 +66,21 @@ map3 = \@Decoder decoder1, @Decoder decoder2, @Decoder decoder3, transform ->
|
|||
Bad e ->
|
||||
Bad e
|
||||
|
||||
after : Decoder a, (a -> Decoder b) -> Decoder b
|
||||
after = \@Decoder decoder, transform ->
|
||||
@Decoder
|
||||
after : ByteDecoder a, (a -> ByteDecoder b) -> ByteDecoder b
|
||||
after = \@ByteDecoder decoder, transform ->
|
||||
@ByteDecoder
|
||||
\state ->
|
||||
when decoder state is
|
||||
Good state1 value ->
|
||||
(@Decoder decoder1) = transform value
|
||||
(@ByteDecoder decoder1) = transform value
|
||||
|
||||
decoder1 state1
|
||||
|
||||
Bad e ->
|
||||
Bad e
|
||||
|
||||
u8 : Decoder U8
|
||||
u8 = @Decoder
|
||||
u8 : ByteDecoder U8
|
||||
u8 = @ByteDecoder
|
||||
\state ->
|
||||
when List.get state.bytes state.cursor is
|
||||
Ok b ->
|
||||
|
@ -91,14 +91,14 @@ u8 = @Decoder
|
|||
|
||||
Step state b : [Loop state, Done b]
|
||||
|
||||
loop : (state -> Decoder (Step state a)), state -> Decoder a
|
||||
loop : (state -> ByteDecoder (Step state a)), state -> ByteDecoder a
|
||||
loop = \stepper, initial ->
|
||||
@Decoder
|
||||
@ByteDecoder
|
||||
\state ->
|
||||
loopHelp stepper initial state
|
||||
|
||||
loopHelp = \stepper, accum, state ->
|
||||
(@Decoder stepper1) = stepper accum
|
||||
(@ByteDecoder stepper1) = stepper accum
|
||||
|
||||
when stepper1 state is
|
||||
Good newState (Done value) ->
|
|
@ -1,30 +1,30 @@
|
|||
interface Bytes.Encode exposes [Encoder, sequence, u8, u16, bytes, empty, encode] imports []
|
||||
interface Bytes.Encode exposes [ByteEncoder, sequence, u8, u16, bytes, empty, encode] imports []
|
||||
|
||||
Endianness : [BE, LE]
|
||||
|
||||
Encoder : [Signed8 I8, Unsigned8 U8, Signed16 Endianness I16, Unsigned16 Endianness U16, Sequence Nat (List Encoder), Bytes (List U8)]
|
||||
ByteEncoder : [Signed8 I8, Unsigned8 U8, Signed16 Endianness I16, Unsigned16 Endianness U16, Sequence Nat (List ByteEncoder), Bytes (List U8)]
|
||||
|
||||
u8 : U8 -> Encoder
|
||||
u8 : U8 -> ByteEncoder
|
||||
u8 = \value -> Unsigned8 value
|
||||
|
||||
empty : Encoder
|
||||
empty : ByteEncoder
|
||||
empty =
|
||||
foo : List Encoder
|
||||
foo : List ByteEncoder
|
||||
foo = []
|
||||
|
||||
Sequence 0 foo
|
||||
|
||||
u16 : Endianness, U16 -> Encoder
|
||||
u16 : Endianness, U16 -> ByteEncoder
|
||||
u16 = \endianness, value -> Unsigned16 endianness value
|
||||
|
||||
bytes : List U8 -> Encoder
|
||||
bytes : List U8 -> ByteEncoder
|
||||
bytes = \bs -> Bytes bs
|
||||
|
||||
sequence : List Encoder -> Encoder
|
||||
sequence : List ByteEncoder -> ByteEncoder
|
||||
sequence = \encoders ->
|
||||
Sequence (getWidths encoders 0) encoders
|
||||
|
||||
getWidth : Encoder -> Nat
|
||||
getWidth : ByteEncoder -> Nat
|
||||
getWidth = \encoder ->
|
||||
when encoder is
|
||||
Signed8 _ -> 1
|
||||
|
@ -40,18 +40,18 @@ getWidth = \encoder ->
|
|||
Sequence w _ -> w
|
||||
Bytes bs -> List.len bs
|
||||
|
||||
getWidths : List Encoder, Nat -> Nat
|
||||
getWidths : List ByteEncoder, Nat -> Nat
|
||||
getWidths = \encoders, initial ->
|
||||
List.walk encoders initial \accum, encoder -> accum + getWidth encoder
|
||||
|
||||
encode : Encoder -> List U8
|
||||
encode : ByteEncoder -> List U8
|
||||
encode = \encoder ->
|
||||
output = List.repeat 0 (getWidth encoder)
|
||||
|
||||
encodeHelp encoder 0 output
|
||||
|> .output
|
||||
|
||||
encodeHelp : Encoder, Nat, List U8 -> { output : List U8, offset : Nat }
|
||||
encodeHelp : ByteEncoder, Nat, List U8 -> { output : List U8, offset : Nat }
|
||||
encodeHelp = \encoder, offset, output ->
|
||||
when encoder is
|
||||
Unsigned8 value ->
|
|
@ -36,7 +36,7 @@ nestHelp = \{ s, f, m, x } ->
|
|||
|
||||
Expr : [Val I64, Var Str, Add Expr Expr, Mul Expr Expr, Pow Expr Expr, Ln Expr]
|
||||
|
||||
divmod : I64, I64 -> Result { div : I64, mod : I64 } [DivByZero]*
|
||||
divmod : I64, I64 -> Result { div : I64, mod : I64 } [DivByZero]
|
||||
divmod = \l, r ->
|
||||
when Pair (Num.divTruncChecked l r) (Num.remChecked l r) is
|
||||
Pair (Ok div) (Ok mod) -> Ok { div, mod }
|
|
@ -5,7 +5,7 @@ app "issue2279"
|
|||
|
||||
main =
|
||||
text =
|
||||
if True then
|
||||
if Bool.true then
|
||||
Issue2279Help.text
|
||||
else
|
||||
Issue2279Help.asText 42
|
|
@ -28,7 +28,7 @@ lengthHelp = \foobar, acc ->
|
|||
safe : I64, I64, ConsList I64 -> Bool
|
||||
safe = \queen, diagonal, xs ->
|
||||
when xs is
|
||||
Nil -> True
|
||||
Nil -> Bool.true
|
||||
Cons q t ->
|
||||
queen != q && queen != q + diagonal && queen != q - diagonal && safe queen (diagonal + 1) t
|
||||
|
|
@ -70,8 +70,8 @@ setBlack = \tree ->
|
|||
isRed : Tree a b -> Bool
|
||||
isRed = \tree ->
|
||||
when tree is
|
||||
Node Red _ _ _ _ -> True
|
||||
_ -> False
|
||||
Node Red _ _ _ _ -> Bool.true
|
||||
_ -> Bool.false
|
||||
|
||||
lt = \x, y -> x < y
|
||||
|
|
@ -74,8 +74,8 @@ setBlack = \tree ->
|
|||
isRed : Tree a b -> Bool
|
||||
isRed = \tree ->
|
||||
when tree is
|
||||
Node Red _ _ _ _ -> True
|
||||
_ -> False
|
||||
Node Red _ _ _ _ -> Bool.true
|
||||
_ -> Bool.false
|
||||
|
||||
ins : Tree I64 Bool, I64, Bool -> Tree I64 Bool
|
||||
ins = \tree, kx, vx ->
|
||||
|
@ -93,13 +93,13 @@ ins = \tree, kx, vx ->
|
|||
when Num.compare kx ky is
|
||||
LT ->
|
||||
when isRed a is
|
||||
True -> balanceLeft (ins a kx vx) ky vy b
|
||||
False -> Node Black (ins a kx vx) ky vy b
|
||||
Bool.true -> balanceLeft (ins a kx vx) ky vy b
|
||||
Bool.false -> Node Black (ins a kx vx) ky vy b
|
||||
|
||||
GT ->
|
||||
when isRed b is
|
||||
True -> balanceRight a ky vy (ins b kx vx)
|
||||
False -> Node Black a ky vy (ins b kx vx)
|
||||
Bool.true -> balanceRight a ky vy (ins b kx vx)
|
||||
Bool.false -> Node Black a ky vy (ins b kx vx)
|
||||
|
||||
EQ ->
|
||||
Node Black a kx vx b
|
||||
|
@ -137,8 +137,8 @@ balanceRight = \l, k, v, r ->
|
|||
isBlack : Color -> Bool
|
||||
isBlack = \c ->
|
||||
when c is
|
||||
Black -> True
|
||||
Red -> False
|
||||
Black -> Bool.true
|
||||
Red -> Bool.false
|
||||
|
||||
Del a b : [Del (Tree a b) Bool]
|
||||
|
||||
|
@ -155,10 +155,10 @@ makeBlack : Map -> Del I64 Bool
|
|||
makeBlack = \t ->
|
||||
when t is
|
||||
Node Red l k v r ->
|
||||
Del (Node Black l k v r) False
|
||||
Del (Node Black l k v r) Bool.false
|
||||
|
||||
_ ->
|
||||
Del t True
|
||||
Del t Bool.true
|
||||
|
||||
rebalanceLeft = \c, l, k, v, r ->
|
||||
when l is
|
||||
|
@ -166,7 +166,7 @@ rebalanceLeft = \c, l, k, v, r ->
|
|||
Del (balanceLeft (setRed l) k v r) (isBlack c)
|
||||
|
||||
Node Red lx kx vx rx ->
|
||||
Del (Node Black lx kx vx (balanceLeft (setRed rx) k v r)) False
|
||||
Del (Node Black lx kx vx (balanceLeft (setRed rx) k v r)) Bool.false
|
||||
|
||||
_ ->
|
||||
boom "unreachable"
|
||||
|
@ -177,7 +177,7 @@ rebalanceRight = \c, l, k, v, r ->
|
|||
Del (balanceRight l k v (setRed r)) (isBlack c)
|
||||
|
||||
Node Red lx kx vx rx ->
|
||||
Del (Node Black (balanceRight l k v (setRed lx)) kx vx rx) False
|
||||
Del (Node Black (balanceRight l k v (setRed lx)) kx vx rx) Bool.false
|
||||
|
||||
_ ->
|
||||
boom "unreachable"
|
||||
|
@ -187,24 +187,24 @@ delMin = \t ->
|
|||
Node Black Leaf k v r ->
|
||||
when r is
|
||||
Leaf ->
|
||||
Delmin (Del Leaf True) k v
|
||||
Delmin (Del Leaf Bool.true) k v
|
||||
|
||||
_ ->
|
||||
Delmin (Del (setBlack r) False) k v
|
||||
Delmin (Del (setBlack r) Bool.false) k v
|
||||
|
||||
Node Red Leaf k v r ->
|
||||
Delmin (Del r False) k v
|
||||
Delmin (Del r Bool.false) k v
|
||||
|
||||
Node c l k v r ->
|
||||
when delMin l is
|
||||
Delmin (Del lx True) kx vx ->
|
||||
Delmin (Del lx Bool.true) kx vx ->
|
||||
Delmin (rebalanceRight c lx k v r) kx vx
|
||||
|
||||
Delmin (Del lx False) kx vx ->
|
||||
Delmin (Del (Node c lx k v r) False) kx vx
|
||||
Delmin (Del lx Bool.false) kx vx ->
|
||||
Delmin (Del (Node c lx k v r) Bool.false) kx vx
|
||||
|
||||
Leaf ->
|
||||
Delmin (Del t False) 0 False
|
||||
Delmin (Del t Bool.false) 0 Bool.false
|
||||
|
||||
delete : Tree I64 Bool, I64 -> Tree I64 Bool
|
||||
delete = \t, k ->
|
||||
|
@ -216,32 +216,32 @@ del : Tree I64 Bool, I64 -> Del I64 Bool
|
|||
del = \t, k ->
|
||||
when t is
|
||||
Leaf ->
|
||||
Del Leaf False
|
||||
Del Leaf Bool.false
|
||||
|
||||
Node cx lx kx vx rx ->
|
||||
if (k < kx) then
|
||||
when del lx k is
|
||||
Del ly True ->
|
||||
Del ly Bool.true ->
|
||||
rebalanceRight cx ly kx vx rx
|
||||
|
||||
Del ly False ->
|
||||
Del (Node cx ly kx vx rx) False
|
||||
Del ly Bool.false ->
|
||||
Del (Node cx ly kx vx rx) Bool.false
|
||||
else if (k > kx) then
|
||||
when del rx k is
|
||||
Del ry True ->
|
||||
Del ry Bool.true ->
|
||||
rebalanceLeft cx lx kx vx ry
|
||||
|
||||
Del ry False ->
|
||||
Del (Node cx lx kx vx ry) False
|
||||
Del ry Bool.false ->
|
||||
Del (Node cx lx kx vx ry) Bool.false
|
||||
else
|
||||
when rx is
|
||||
Leaf ->
|
||||
if isBlack cx then makeBlack lx else Del lx False
|
||||
if isBlack cx then makeBlack lx else Del lx Bool.false
|
||||
|
||||
Node _ _ _ _ _ ->
|
||||
when delMin rx is
|
||||
Delmin (Del ry True) ky vy ->
|
||||
Delmin (Del ry Bool.true) ky vy ->
|
||||
rebalanceLeft cx lx ky vy ry
|
||||
|
||||
Delmin (Del ry False) ky vy ->
|
||||
Del (Node cx lx ky vy ry) False
|
||||
Delmin (Del ry Bool.false) ky vy ->
|
||||
Del (Node cx lx ky vy ry) Bool.false
|
|
@ -17,9 +17,12 @@ main =
|
|||
# Task.putLine "No test \(ns)"
|
||||
showBool : Bool -> Str
|
||||
showBool = \b ->
|
||||
when b is
|
||||
True -> "True"
|
||||
False -> "False"
|
||||
if
|
||||
b
|
||||
then
|
||||
"True"
|
||||
else
|
||||
"False"
|
||||
|
||||
test1 : Bool
|
||||
test1 =
|
|
@ -66,13 +66,13 @@ getInt =
|
|||
Effect.after
|
||||
Effect.getInt
|
||||
\{ isError, value } ->
|
||||
when isError is
|
||||
True ->
|
||||
# when errorCode is
|
||||
# # A -> Task.fail InvalidCharacter
|
||||
# # B -> Task.fail IOError
|
||||
# _ ->
|
||||
Task.succeed -1
|
||||
|
||||
False ->
|
||||
Task.succeed value
|
||||
if
|
||||
isError
|
||||
then
|
||||
# when errorCode is
|
||||
# # A -> Task.fail InvalidCharacter
|
||||
# # B -> Task.fail IOError
|
||||
# _ ->
|
||||
Task.succeed -1
|
||||
else
|
||||
Task.succeed value
|
|
@ -1,4 +1,5 @@
|
|||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const str = @import("str");
|
||||
const RocStr = str.RocStr;
|
||||
const testing = std.testing;
|
||||
|
@ -15,7 +16,6 @@ comptime {
|
|||
// -fcompiler-rt in link.rs instead of doing this. Note that this
|
||||
// workaround is present in many host.zig files, so make sure to undo
|
||||
// it everywhere!
|
||||
const builtin = @import("builtin");
|
||||
if (builtin.os.tag == .macos) {
|
||||
_ = @import("compiler_rt");
|
||||
}
|
||||
|
@ -85,10 +85,49 @@ export fn roc_memset(dst: [*]u8, value: i32, size: usize) callconv(.C) void {
|
|||
return memset(dst, value, size);
|
||||
}
|
||||
|
||||
extern fn kill(pid: c_int, sig: c_int) c_int;
|
||||
extern fn shm_open(name: *const i8, oflag: c_int, mode: c_uint) c_int;
|
||||
extern fn mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) *anyopaque;
|
||||
extern fn getppid() c_int;
|
||||
|
||||
fn roc_getppid() callconv(.C) c_int {
|
||||
return getppid();
|
||||
}
|
||||
|
||||
fn roc_getppid_windows_stub() callconv(.C) c_int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn roc_send_signal(pid: c_int, sig: c_int) callconv(.C) c_int {
|
||||
return kill(pid, sig);
|
||||
}
|
||||
fn roc_shm_open(name: *const i8, oflag: c_int, mode: c_uint) callconv(.C) c_int {
|
||||
return shm_open(name, oflag, mode);
|
||||
}
|
||||
fn roc_mmap(addr: ?*anyopaque, length: c_uint, prot: c_int, flags: c_int, fd: c_int, offset: c_uint) callconv(.C) *anyopaque {
|
||||
return mmap(addr, length, prot, flags, fd, offset);
|
||||
}
|
||||
|
||||
comptime {
|
||||
if (builtin.os.tag == .macos or builtin.os.tag == .linux) {
|
||||
@export(roc_getppid, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
@export(roc_mmap, .{ .name = "roc_mmap", .linkage = .Strong });
|
||||
@export(roc_send_signal, .{ .name = "roc_send_signal", .linkage = .Strong });
|
||||
@export(roc_shm_open, .{ .name = "roc_shm_open", .linkage = .Strong });
|
||||
}
|
||||
|
||||
if (builtin.os.tag == .windows) {
|
||||
@export(roc_getppid_windows_stub, .{ .name = "roc_getppid", .linkage = .Strong });
|
||||
}
|
||||
}
|
||||
|
||||
const Unit = extern struct {};
|
||||
|
||||
pub export fn main() callconv(.C) u8 {
|
||||
const size = @intCast(usize, roc__mainForHost_size());
|
||||
pub fn main() !u8 {
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
|
||||
// The size might be zero; if so, make it at least 8 so that we don't have a nullptr
|
||||
const size = std.math.max(@intCast(usize, roc__mainForHost_size()), 8);
|
||||
const raw_output = roc_alloc(@intCast(usize, size), @alignOf(u64)).?;
|
||||
var output = @ptrCast([*]u8, raw_output);
|
||||
|
||||
|
@ -107,7 +146,6 @@ pub export fn main() callconv(.C) u8 {
|
|||
const nanos = timer.read();
|
||||
const seconds = (@intToFloat(f64, nanos) / 1_000_000_000.0);
|
||||
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stderr.print("runtime: {d:.3}ms\n", .{seconds * 1000}) catch unreachable;
|
||||
|
||||
return 0;
|
||||
|
@ -120,7 +158,8 @@ fn to_seconds(tms: std.os.timespec) f64 {
|
|||
fn call_the_closure(closure_data_pointer: [*]u8) void {
|
||||
const allocator = std.heap.page_allocator;
|
||||
|
||||
const size = roc__mainForHost_1__Fx_result_size();
|
||||
// The size might be zero; if so, make it at least 8 so that we don't have a nullptr
|
||||
const size = std.math.max(roc__mainForHost_1__Fx_result_size(), 8);
|
||||
const raw_output = allocator.allocAdvanced(u8, @alignOf(u64), @intCast(usize, size), .at_least) catch unreachable;
|
||||
var output = @ptrCast([*]u8, raw_output);
|
||||
|
||||
|
@ -187,14 +226,14 @@ fn roc_fx_getInt_64bit() callconv(.C) GetInt {
|
|||
|
||||
fn roc_fx_getInt_32bit(output: *GetInt) callconv(.C) void {
|
||||
if (roc_fx_getInt_help()) |value| {
|
||||
const get_int = GetInt{ .is_error = false, .value = value, .error_code = false };
|
||||
const get_int = GetInt{ .is_error = false, .value = value };
|
||||
output.* = get_int;
|
||||
} else |err| switch (err) {
|
||||
error.InvalidCharacter => {
|
||||
output.* = GetInt{ .is_error = true, .value = 0, .error_code = false };
|
||||
output.* = GetInt{ .is_error = true, .value = 0 };
|
||||
},
|
||||
else => {
|
||||
output.* = GetInt{ .is_error = true, .value = 0, .error_code = true };
|
||||
output.* = GetInt{ .is_error = true, .value = 0 };
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -208,7 +247,9 @@ fn roc_fx_getInt_help() !i64 {
|
|||
const stdin = std.io.getStdIn().reader();
|
||||
var buf: [40]u8 = undefined;
|
||||
|
||||
const line: []u8 = (try stdin.readUntilDelimiterOrEof(&buf, '\n')) orelse "";
|
||||
// make sure to strip `\r` on windows
|
||||
const raw_line: []u8 = (try stdin.readUntilDelimiterOrEof(&buf, '\n')) orelse "";
|
||||
const line = std.mem.trimRight(u8, raw_line, &std.ascii.spaces);
|
||||
|
||||
return std.fmt.parseInt(i64, line, 10);
|
||||
}
|
4301
crates/cli_utils/Cargo.lock
generated
4301
crates/cli_utils/Cargo.lock
generated
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue