Merge branch 'main' into dependabot/cargo/tracing-0.1.40

This commit is contained in:
Anton-4 2024-04-15 14:05:47 +02:00 committed by GitHub
commit 9a8d8ed243
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
522 changed files with 19943 additions and 32274 deletions

View file

@ -38,13 +38,21 @@ ROC_PRINT_UNDERIVABLE = "0"
ROC_TRACE_COMPACTION = "0"
ROC_PRINT_UNIFICATIONS_DERIVED = "0"
ROC_PRINT_MISMATCHES = "0"
ROC_PRINT_FIXPOINT_FIXING = "0"
ROC_VERIFY_RIGID_LET_GENERALIZED = "0"
ROC_VERIFY_OCCURS_ONE_RECURSION = "0"
ROC_CHECK_MONO_IR = "0"
ROC_PRINT_IR_AFTER_SPECIALIZATION = "0"
ROC_PRINT_IR_AFTER_RESET_REUSE = "0"
ROC_PRINT_IR_AFTER_DROP_SPECIALIZATION = "0"
ROC_PRINT_IR_AFTER_REFCOUNT = "0"
ROC_PRINT_RUNTIME_ERROR_GEN = "0"
ROC_PRINT_IR_AFTER_TRMC = "0"
ROC_PRINT_IR_AFTER_DROP_SPECIALIZATION = "0"
ROC_DEBUG_ALIAS_ANALYSIS = "0"
ROC_PRINT_RUNTIME_ERROR_GEN = "0"
ROC_PRINT_LLVM_FN_VERIFICATION = "0"
ROC_WRITE_FINAL_WASM = "0"
ROC_LOG_WASM_INTERP = "0"
ROC_PRINT_LOAD_LOG = "0"
ROC_SKIP_SUBS_CACHE = "0"
ROC_PRINT_BUILD_COMMANDS = "0"
ROC_PRINT_BUILD_COMMANDS_WITH_ENV_VARS = "0"

4
.gitattributes vendored
View file

@ -1,2 +1,4 @@
# Require roc files to be checlked out with Unix line endings, even on windows
# Require roc files to be checked out with Unix line endings, even on windows
*.roc text eol=lf
crates/compiler/test_mono/generated/* linguist-generated=true

View file

@ -6,18 +6,18 @@ on:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
# use .tar.gz for quick testing
ARCHIVE_FORMAT: .tar.br
# Make a new basic-cli git tag and set it here before starting this workflow
RELEASE_TAG: 0.7.1
RELEASE_TAG: 0.9.0
jobs:
prepare:
runs-on: [ubuntu-20.04]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
repository: roc-lang/basic-cli
@ -38,9 +38,13 @@ jobs:
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-TESTING.tar.gz
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-TESTING.tar.gz
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-TESTING.tar.gz
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-TESTING.tar.gz
- name: Save roc_nightly archives
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
path: roc_nightly-*
@ -48,18 +52,18 @@ jobs:
runs-on: [ubuntu-20.04]
needs: [prepare]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- name: build basic-cli with surgical linker and also with legacy linker
env:
CARGO_BUILD_TARGET: x86_64-unknown-linux-musl
run: ./ci/build_basic_cli.sh linux_x86_64 "--linker legacy"
- name: Save .rh, .rm and .o file
uses: actions/upload-artifact@v3
- name: Save .rh, .rm and .o file
uses: actions/upload-artifact@v4
with:
name: linux-x86_64-files
path: |
@ -72,10 +76,10 @@ jobs:
runs-on: [self-hosted, Linux, ARM64]
needs: [prepare]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- name: build basic-cli
env:
@ -85,8 +89,8 @@ jobs:
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS: "-Clink-self-contained=yes -Clinker=rust-lld"
run: ./ci/build_basic_cli.sh linux_arm64
- name: Save .o file
uses: actions/upload-artifact@v3
- name: Save .o file
uses: actions/upload-artifact@v4
with:
name: linux-arm64-files
path: |
@ -96,15 +100,15 @@ jobs:
runs-on: [macos-11] # I expect the generated files to work on macOS 12 and up
needs: [prepare]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- run: ./ci/build_basic_cli.sh macos_x86_64
- name: Save .o files
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: macos-x86_64-files
path: |
@ -115,15 +119,15 @@ jobs:
runs-on: [self-hosted, macOS, ARM64]
needs: [prepare]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- run: ./ci/build_basic_cli.sh macos_apple_silicon
- name: Save macos-arm64.o file
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: macos-apple-silicon-files
path: |
@ -134,13 +138,13 @@ jobs:
name: create release archive
runs-on: [ubuntu-20.04]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: remove all folders except the ci folder
run: ls | grep -v ci | xargs rm -rf
- name: Download the previously uploaded files
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- name: mv roc nightly and simplify name
run: mv $(ls -d artifact/* | grep "roc_nightly.*tar\.gz" | grep "linux_x86_64") ./roc_nightly.tar.gz
@ -157,7 +161,7 @@ jobs:
- run: git clone https://github.com/roc-lang/basic-cli.git
- run: cp macos-apple-silicon-files/* ./basic-cli/platform
- run: cp linux-x86_64-files/* ./basic-cli/platform
- run: cp linux-arm64-files/* ./basic-cli/platform
@ -177,14 +181,14 @@ jobs:
- run: echo "TAR_FILENAME=$(ls -d basic-cli/platform/* | grep ${{ env.ARCHIVE_FORMAT }})" >> $GITHUB_ENV
- name: Upload platform archive
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: basic-cli-platform
path: |
${{ env.TAR_FILENAME }}
- name: Upload docs archive
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: release-assets-docs
path: |
@ -196,7 +200,7 @@ jobs:
steps:
- name: Download the previously uploaded files
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- name: mv roc nightly and simplify name
run: mv $(ls -d artifact/* | grep "roc_nightly.*tar\.gz" | grep "linux_x86_64") ./roc_nightly.tar.gz
@ -232,7 +236,7 @@ jobs:
mkdir platform
# move all files to platform dir
find . -maxdepth 1 -type f -exec mv {} platform/ \;
mkdir temp-basic-cli
cd temp-basic-cli
git clone https://github.com/roc-lang/basic-cli.git
@ -242,9 +246,8 @@ jobs:
cp -r ci ../..
cp -r LICENSE ../..
# LICENSE is necessary for command test
- name: run tests
run: |
cd basic-cli-platform
ROC=./roc_nightly/roc EXAMPLES_DIR=./examples/ ROC_BUILD_FLAGS=--prebuilt-platform ./ci/all_tests.sh

View file

@ -11,7 +11,7 @@ jobs:
runs-on: [self-hosted, Linux, ARM64]
steps:
- name: clone basic-cli repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: roc-lang/basic-cli
ref: main
@ -19,7 +19,7 @@ jobs:
- name: get latest roc nightly
run: |
curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
- name: rename nightly tar
run: mv $(ls | grep "roc_nightly.*tar\.gz") roc_nightly.tar.gz
@ -35,7 +35,7 @@ jobs:
- run: expect -v
# Run all tests
# Run all tests
- run: ROC=./roc_nightly/roc EXAMPLES_DIR=./examples/ ./ci/all_tests.sh
######
@ -44,7 +44,7 @@ jobs:
- name: Remove roc_nightly folder to keep things simple (we'll download it again later)
run: rm -rf roc_nightly
- name: Get the repo of the latest basic-cli release
run: |
git clone --depth 1 https://github.com/roc-lang/basic-cli
@ -53,10 +53,6 @@ jobs:
latestTag=$(git describe --tags $(git rev-list --tags --max-count=1))
git checkout $latestTag
# temp issue with new string interpolation syntax
# TODO undo when 0.7.2 or 0.8.0 is released
- run: sed -i 's/\$//g' basic-cli/examples/tcp-client.roc
- name: Run all tests with latest roc nightly and latest basic-cli release
run: |
sed -i 's/x86_64/arm64/g' ./ci/test_latest_release.sh

View file

@ -1,12 +1,12 @@
on:
# pull_request:
#pull_request:
workflow_dispatch:
# this cancels workflows currently in progress if you start a new one
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
# use .tar.gz for quick testing
ARCHIVE_FORMAT: .tar.br
@ -16,15 +16,21 @@ jobs:
fetch-releases:
runs-on: [ubuntu-20.04]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
# get latest nightly releases
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-TESTING.tar.gz
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-TESTING.tar.gz
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-TESTING.tar.gz
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-TESTING.tar.gz
- name: Save roc_nightly archives
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
path: roc_nightly-*
@ -32,18 +38,18 @@ jobs:
runs-on: [ubuntu-20.04]
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- name: build basic-webserver with legacy linker
env:
CARGO_BUILD_TARGET: x86_64-unknown-linux-musl
run: ./ci/build_basic_webserver.sh linux_x86_64 "--linker legacy"
- name: Save .rh, .rm and .o file
uses: actions/upload-artifact@v3
- name: Save .rh, .rm and .o file
uses: actions/upload-artifact@v4
with:
name: linux-x86_64-files
path: |
@ -56,10 +62,10 @@ jobs:
runs-on: [self-hosted, Linux, ARM64]
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- name: build basic-webserver
env:
@ -69,8 +75,8 @@ jobs:
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS: "-Clink-self-contained=yes -Clinker=rust-lld"
run: ./ci/build_basic_webserver.sh linux_arm64
- name: Save .o file
uses: actions/upload-artifact@v3
- name: Save .o file
uses: actions/upload-artifact@v4
with:
name: linux-arm64-files
path: |
@ -80,15 +86,15 @@ jobs:
runs-on: [macos-11] # I expect the generated files to work on macOS 12 and 13
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- run: ./ci/build_basic_webserver.sh macos_x86_64
- name: Save .o files
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: macos-x86_64-files
path: |
@ -99,15 +105,15 @@ jobs:
runs-on: [self-hosted, macOS, ARM64]
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- run: ./ci/build_basic_webserver.sh macos_apple_silicon
- name: Save macos-arm64.o file
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: macos-apple-silicon-files
path: |
@ -118,13 +124,13 @@ jobs:
name: create release archive
runs-on: [ubuntu-20.04]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: remove all folders except the ci folder
run: ls | grep -v ci | xargs rm -rf
- name: Download the previously uploaded files
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
- name: mv roc nightly and simplify name
run: mv $(ls -d artifact/* | grep "roc_nightly.*tar\.gz" | grep "linux_x86_64") ./roc_nightly.tar.gz
@ -145,7 +151,7 @@ jobs:
cd ..
- run: cp macos-apple-silicon-files/* ./basic-webserver/platform
- run: cp linux-x86_64-files/* ./basic-webserver/platform
- run: cp linux-arm64-files/* ./basic-webserver/platform
@ -157,7 +163,7 @@ jobs:
- run: echo "TAR_FILENAME=$(ls -d basic-webserver/platform/* | grep ${{ env.ARCHIVE_FORMAT }})" >> $GITHUB_ENV
- name: Upload platform archive
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: basic-webserver-platform
path: |

View file

@ -15,7 +15,7 @@ jobs:
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: "main"
clean: "true"
@ -23,7 +23,7 @@ jobs:
- name: on main; prepare a self-contained benchmark folder
run: nix develop -c ./ci/benchmarks/prep_folder.sh main
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
clean: "false" # we want to keep the benchmark folder

View file

@ -15,16 +15,61 @@ jobs:
run_tests: ${{ steps.filecheck.outputs.run_tests }}
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Check if only css, html or md files changed
id: filecheck
id: check_ignored_files
run: |
git fetch origin ${{ github.base_ref }}
if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -qvE '(\.md$|\.css$|\.html$|^AUTHORS$)'; then
echo "run_tests=full" >> $GITHUB_OUTPUT
echo "should_run_tests=y" >> $GITHUB_OUTPUT
else
echo "run_tests=none" >> $GITHUB_OUTPUT
echo "should_run_tests=n" >> $GITHUB_OUTPUT
fi
- name: Check if only comments changed in rust files
id: check_rs_comments
run: |
git fetch origin ${{ github.base_ref }}
if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -qvE '(\.md$|\.css$|\.html$|^AUTHORS$|\.rs|\.roc)'; then
echo "should_run_tests=y" >> $GITHUB_OUTPUT
else
if git diff --unified=0 origin/${{ github.base_ref }} HEAD '*.rs' | grep -E --color=never '^[+-]' | grep -qvE '^(\+\+\+|\-\-\-|[+-]\s*($|\/\/[^\/]|\/\*.*\*\/\s*$))'; then
echo "should_run_tests=y" >> $GITHUB_OUTPUT
else
echo "should_run_tests=n" >> $GITHUB_OUTPUT
fi
fi
- name: Check if only comments changed in roc files
id: check_roc_comments
run: |
git fetch origin ${{ github.base_ref }}
if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -qvE '(\.md$|\.css$|\.html$|^AUTHORS$|\.rs|\.roc)'; then
echo "should_run_tests=y" >> $GITHUB_OUTPUT
else
if git diff --unified=0 origin/${{ github.base_ref }} HEAD '*.roc' | grep -E --color=never '^[+-]' | grep -qvE '^(\+\+\+|\-\-\-|[+-]\s*($|#))'; then
echo "should_run_tests=y" >> $GITHUB_OUTPUT
else
echo "should_run_tests=n" >> $GITHUB_OUTPUT
fi
fi
- name: Check if tests need to run based on earlier checks
id: filecheck
run: |
if [ ${{ steps.check_ignored_files.outputs.should_run_tests }} = "y" ]; then
if [ ${{ steps.check_rs_comments.outputs.should_run_tests }} = "y" ]; then
echo "run_tests=full" >> $GITHUB_OUTPUT
else
if [ ${{ steps.check_roc_comments.outputs.should_run_tests }} = "y" ]; then
echo "run_tests=full" >> $GITHUB_OUTPUT
else
echo "run_tests=none" >> $GITHUB_OUTPUT
fi
fi
else
echo "run_tests=none" >> $GITHUB_OUTPUT
fi
- run: echo "debug output ${{ steps.filecheck.outputs.run_tests }}"
@ -95,7 +140,7 @@ jobs:
needs: [check-changes]
if: needs.check-changes.outputs.run_tests == 'none'
steps:
- run: echo "Only non-code files changed. CI manager did not run any workflows."
- run: echo "Only non-code files changed and/or rs and/or roc comment lines changed. CI manager did not run any workflows."
# we need a single end job for the required checks under branch protection rules
finish:
@ -112,7 +157,3 @@ jobs:
fi
- run: echo "Workflow succeeded :)"

View file

@ -1,6 +1,6 @@
on:
pull_request:
name: devtools nix files test - linux
concurrency:
@ -13,7 +13,7 @@ jobs:
runs-on: [ubuntu-20.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Only run all steps if a nix file changed
run: |
@ -25,7 +25,7 @@ jobs:
echo "A nix file was changed. No need to run tests."
echo "nix_changed=false" >> $GITHUB_ENV
fi
- uses: cachix/install-nix-action@v23
if: env.nix_changed == 'true'
@ -53,6 +53,3 @@ jobs:
echo "locally deleting devtools/flake.lock and following the"
echo "instructions in devtools/README.md. This will create a"
echo "new flake.lock you should use to replace the old devtools/flake.lock"

View file

@ -13,7 +13,7 @@ jobs:
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Only run all steps if a nix file changed
run: |
@ -47,6 +47,3 @@ jobs:
echo "locally deleting devtools/flake.lock and following the"
echo "instructions in devtools/README.md. This will create a"
echo "new flake.lock you should use to replace the old devtools/flake.lock"

View file

@ -9,7 +9,7 @@ jobs:
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Copy example docker file
run: cp docker/nightly-ubuntu-latest/docker-compose.example.yml docker/nightly-ubuntu-latest/docker-compose.yml
@ -26,7 +26,7 @@ jobs:
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Copy example docker file
run: cp docker/nightly-ubuntu-2204/docker-compose.example.yml docker/nightly-ubuntu-2204/docker-compose.yml
@ -42,7 +42,7 @@ jobs:
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Copy example docker file
run: cp docker/nightly-ubuntu-2004/docker-compose.example.yml docker/nightly-ubuntu-2004/docker-compose.yml
@ -58,7 +58,7 @@ jobs:
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Copy example docker file
run: cp docker/nightly-debian-latest/docker-compose.example.yml docker/nightly-debian-latest/docker-compose.yml
@ -74,7 +74,7 @@ jobs:
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Copy example docker file
run: cp docker/nightly-debian-bookworm/docker-compose.example.yml docker/nightly-debian-bookworm/docker-compose.yml
@ -90,7 +90,7 @@ jobs:
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Copy example docker file
run: cp docker/nightly-debian-buster/docker-compose.example.yml docker/nightly-debian-buster/docker-compose.yml
@ -100,4 +100,3 @@ jobs:
- name: Run hello world test
run: docker-compose -f docker/nightly-debian-buster/docker-compose.yml run roc examples/helloWorld.roc

View file

@ -1,33 +1,33 @@
on:
workflow_call:
workflow_call:
name: Macos x86-64 rust tests
env:
RUST_BACKTRACE: 1
RUST_BACKTRACE: 1
jobs:
test-rust-macos-x86-64:
runs-on: [self-hosted, macOS, X64]
timeout-minutes: 90
env:
RUSTC_WRAPPER: /Users/username1/.cargo/bin/sccache
steps:
- uses: actions/checkout@v3
test-rust-macos-x86-64:
runs-on: [self-hosted, macOS, X64]
timeout-minutes: 90
env:
RUSTC_WRAPPER: /Users/username1/.cargo/bin/sccache
steps:
- uses: actions/checkout@v4
- name: set LLVM_SYS_160_PREFIX
run: echo "LLVM_SYS_160_PREFIX=$(brew --prefix llvm@16)" >> $GITHUB_ENV
- name: set LLVM_SYS_160_PREFIX
run: echo "LLVM_SYS_160_PREFIX=$(brew --prefix llvm@16)" >> $GITHUB_ENV
- name: Update PATH to use zig 11
run: |
echo "PATH=/Users/username1/Downloads/zig-macos-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: Update PATH to use zig 11
run: |
echo "PATH=/Users/username1/Downloads/zig-macos-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- name: test_gen llvm tests
run: cargo nextest-gen-llvm --release --no-fail-fast --locked -E "package(test_gen) - test(gen_str::str_append_scalar)"
- run: zig version
- name: regular rust tests
run: cargo test --locked --release -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_str::str_append_scalar --skip gen_tags::phantom_polymorphic_record && sccache --show-stats
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
# this issue may be caused by using older versions of XCode
- name: test_gen llvm tests
run: cargo nextest-gen-llvm --release --no-fail-fast --locked -E "package(test_gen) - test(gen_str::str_append_scalar)"
- name: regular rust tests
run: cargo test --locked --release -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_tags::phantom_polymorphic_record && sccache --show-stats
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
# this issue may be caused by using older versions of XCode

View file

@ -13,7 +13,7 @@ jobs:
markdown-link-check:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: gaurav-nelson/github-action-markdown-link-check@v1
with:
use-quiet-mode: 'yes'

View file

@ -1,38 +1,38 @@
on:
# pull_request:
#pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
name: Nightly Release Linux arm64/aarch64
jobs:
build:
name: build and package nightly release
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 90
timeout-minutes: 110
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Update PATH to use zig 11
run: |
echo "PATH=/home/username/Downloads/zig-linux-aarch64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: create version.txt
run: ./ci/write_version.sh
- name: build release with lto
run: cargo build --profile=release-with-lto --locked --bin roc --bin roc_ls
run: cargo build --profile=release-with-lto --locked --bin roc --bin roc_language_server
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
@ -42,12 +42,12 @@ jobs:
# this makes the roc binary a lot smaller
- name: strip debug info
run: strip ./target/release-with-lto/roc
- name: Make nightly release tar archive
run: ./ci/package_release.sh ${{ env.RELEASE_FOLDER_NAME }}
- name: Upload roc nightly tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz

View file

@ -1,36 +1,36 @@
on:
# pull_request:
#pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
name: Nightly Release Linux x86_64
jobs:
build:
name: build and package nightly release
runs-on: [self-hosted, i7-6700K]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Update PATH to use zig 11
run: |
echo "PATH=/home/big-ci-user/Downloads/zig-linux-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: create version.txt
run: ./ci/write_version.sh
- name: build release with lto
run: RUSTFLAGS="-C target-cpu=x86-64" cargo build --profile=release-with-lto --locked --bin roc --bin roc_ls
run: RUSTFLAGS="-C target-cpu=x86-64" cargo build --profile=release-with-lto --locked --bin roc --bin roc_language_server
# target-cpu=x86-64 -> For maximal compatibility for all CPU's. This was also faster in our tests: https://roc.zulipchat.com/#narrow/stream/231635-compiler-development/topic/.2Ecargo.2Fconfig.2Etoml/near/325726299
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
@ -38,12 +38,12 @@ jobs:
run: ./ci/www-repl.sh
- name: Upload wasm repl tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: roc_repl_wasm.tar.gz
path: roc_repl_wasm.tar.gz
retention-days: 4
- name: build file name
env:
DATE: ${{ env.DATE }}
@ -53,12 +53,12 @@ jobs:
# this makes the roc binary a lot smaller
- name: strip debug info
run: strip ./target/release-with-lto/roc
- name: Make nightly release tar archive
run: ./ci/package_release.sh ${{ env.RELEASE_FOLDER_NAME }}
- name: Upload roc nightly tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz

View file

@ -1,5 +1,5 @@
on:
# pull_request:
#pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
@ -16,8 +16,8 @@ jobs:
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- run: zig version
- name: llvm version
@ -25,24 +25,24 @@ jobs:
- name: run tests
run: cargo test --locked --release
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-macos_apple_silicon-$DATE-$SHA" >> $GITHUB_ENV
run: echo "RELEASE_FOLDER_NAME=roc_nightly-macos_apple_silicon-$DATE-$SHA" >> $GITHUB_ENV
- name: write version to file
run: ./ci/write_version.sh
- name: build nightly release
run: cargo build --locked --profile=release-with-lto --bin roc --bin roc_ls
run: cargo build --locked --profile=release-with-lto --bin roc --bin roc_language_server
- name: package release
run: ./ci/package_release.sh ${{ env.RELEASE_FOLDER_NAME }}
@ -61,7 +61,7 @@ jobs:
- name: print date
run: date
- name: Upload artifact Actually uploading to github releases has to be done manually
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz

View file

@ -1,63 +1,62 @@
on:
# pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *' # 9=9am utc+0
#pull_request:
workflow_dispatch:
schedule:
- cron: "0 9 * * *" # 9=9am utc+0
name: Nightly Release macOS x86_64
env:
LLVM_SYS_160_PREFIX: /usr/local/opt/llvm@16
LLVM_SYS_160_PREFIX: /usr/local/opt/llvm@16
jobs:
test-build-upload:
name: build, test, package and upload nightly release
runs-on: [self-hosted, macOS, X64]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
test-build-upload:
name: build, test, package and upload nightly release
runs-on: [self-hosted, macOS, X64]
timeout-minutes: 120
steps:
- uses: actions/checkout@v4
- name: Update PATH to use zig 11
run: |
echo "PATH=/Users/username1/Downloads/zig-macos-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: write version to file
run: ./ci/write_version.sh
- name: execute rust tests
run: cargo test --release --locked -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_str::str_append_scalar --skip gen_tags::phantom_polymorphic_record
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
# this issue may be caused by using older versions of XCode
- name: Update PATH to use zig 11
run: |
echo "PATH=/Users/username1/Downloads/zig-macos-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- name: build release
run: RUSTFLAGS="-C target-cpu=x86-64" cargo build --profile=release-with-lto --locked --bin roc --bin roc_ls
# target-cpu=x86-64 -> For maximal compatibility for all CPU's.
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-macos_x86_64-$DATE-$SHA" >> $GITHUB_ENV
- run: zig version
# this makes the roc binary a lot smaller
- name: strip debug info
run: strip ./target/release-with-lto/roc
- name: package release
run: ./ci/package_release.sh ${{ env.RELEASE_FOLDER_NAME }}
- name: Upload artifact. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
retention-days: 4
- name: write version to file
run: ./ci/write_version.sh
- name: execute rust tests
run: cargo test --release --locked -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_tags::phantom_polymorphic_record
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
# this issue may be caused by using older versions of XCode
- name: build release
run: RUSTFLAGS="-C target-cpu=x86-64" cargo build --profile=release-with-lto --locked --bin roc --bin roc_language_server
# target-cpu=x86-64 -> For maximal compatibility for all CPU's.
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-macos_x86_64-$DATE-$SHA" >> $GITHUB_ENV
# this makes the roc binary a lot smaller
- name: strip debug info
run: strip ./target/release-with-lto/roc
- name: package release
run: ./ci/package_release.sh ${{ env.RELEASE_FOLDER_NAME }}
- name: Upload artifact. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v4
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
retention-days: 4

View file

@ -1,25 +1,25 @@
on:
# pull_request:
#pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
name: Nightly Release Old Linux arm64 using Earthly
jobs:
build:
name: build and package nightly release
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 180
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
@ -32,7 +32,7 @@ jobs:
run: earthly +build-nightly-release --RELEASE_FOLDER_NAME=${{ env.RELEASE_FOLDER_NAME }} --ZIG_ARCH=aarch64
- name: Upload roc nightly tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz

View file

@ -1,41 +1,41 @@
on:
# pull_request:
#pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
name: Nightly Release Old Linux x86_64 using Earthly
jobs:
build:
name: build and package nightly release
runs-on: [ubuntu-20.04]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-old_linux_x86_64-$DATE-$SHA" >> $GITHUB_ENV
- name: install earthly
run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete'
- run: earthly --version
- name: build release with earthly
run: earthly +build-nightly-release --RELEASE_FOLDER_NAME=${{ env.RELEASE_FOLDER_NAME }} --RUSTFLAGS="-C target-cpu=x86-64"
- name: Upload roc nightly tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz

View file

@ -1,6 +1,6 @@
on:
workflow_call:
name: test cargo build on linux arm64 inside nix
env:
@ -12,11 +12,11 @@ jobs:
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 150
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: test release build
run: nix develop -c cargo build --release --locked
# TODO
# TODO
#- name: build tests without running
# run: cargo test --no-run --release

View file

@ -1,7 +1,7 @@
on:
workflow_call:
name: test default.nix on linux arm64
name: test default.nix on linux arm64
env:
RUST_BACKTRACE: 1
@ -12,7 +12,7 @@ jobs:
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 150
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: test building default.nix
run: nix-build

View file

@ -12,10 +12,10 @@ jobs:
runs-on: [self-hosted, i5-4690K]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: test building default.nix
run: nix-build
run: nix-build
- name: execute tests with --release
run: nix develop -c cargo test --locked --release

View file

@ -12,7 +12,7 @@ jobs:
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
# These started to accumulate quickly since #5990, not sure why
- name: Clean up old nix shells

View file

@ -12,7 +12,7 @@ jobs:
runs-on: [macos-12]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: cachix/install-nix-action@v22

View file

@ -18,7 +18,7 @@ jobs:
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- run: cargo install typos-cli --version 1.0.11

View file

@ -9,7 +9,7 @@ jobs:
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: get the latest release archive
run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
@ -45,6 +45,3 @@ jobs:
cp target/release/repl_basic_test ../../roc_nightly
cd ../../roc_nightly
./repl_basic_test

View file

@ -13,14 +13,14 @@ jobs:
runs-on: ${{ matrix.os }}
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: goto-bus-stop/setup-zig@v2
with:
version: 0.11.0
- name: get the latest release archive for linux (x86_64)
if: startsWith(matrix.os, 'ubuntu')
run: |
run: |
curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
@ -40,7 +40,3 @@ jobs:
rm -rf roc_nightly
curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-old_linux_x86_64-latest.tar.gz
./ci/basic_nightly_test.sh

View file

@ -14,7 +14,7 @@ jobs:
env:
RUSTC_WRAPPER: /home/big-ci-user/.cargo/bin/sccache
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Check for duplicate AUTHORS
run: diff <(sort AUTHORS) <(sort AUTHORS | uniq) # The < operator treats a string as a file. diff 'succeeds' if no difference.
@ -22,7 +22,7 @@ jobs:
- name: Update PATH to use zig 11
run: |
echo "PATH=/home/big-ci-user/Downloads/zig-linux-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: zig fmt check, zig tests
@ -31,6 +31,9 @@ jobs:
- name: roc format check on builtins
run: cargo run --locked --release format --check crates/compiler/builtins/roc
- name: ensure there are no unused dependencies
run: cargo +nightly-2023-08-20 udeps --all-targets
- name: zig wasm tests
run: cd crates/compiler/builtins/bitcode && ./run-wasm-tests.sh
@ -38,9 +41,12 @@ jobs:
# see #5904 for skipped test
run: cargo test --locked --release -- --skip cli_run::expects_dev_and_test && sccache --show-stats
- name: tests examples in docs
run: cargo test --doc --release
- name: check that the platform`s produced dylib is loadable
run: cd examples/platform-switching/rust-platform && LD_LIBRARY_PATH=. cargo test --release --locked
- name: test the dev backend # these tests require an explicit feature flag
run: cargo test --locked --release --package test_gen --no-default-features --features gen-dev && sccache --show-stats

View file

@ -15,7 +15,7 @@ jobs:
timeout-minutes: 150
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- run: Add-Content -Path "$env:GITHUB_ENV" -Value "GITHUB_RUNNER_CPU=$((Get-CimInstance Win32_Processor).Name)"
@ -29,8 +29,8 @@ jobs:
- name: zig version
run: zig version
- name: install rust nightly 1.71.0
run: rustup install nightly-2023-05-28
- name: install rust nightly 1.72.0
run: rustup install nightly-2023-08-20
- name: set up llvm 16
run: |

View file

@ -15,7 +15,7 @@ jobs:
timeout-minutes: 150
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- run: Add-Content -Path "$env:GITHUB_ENV" -Value "GITHUB_RUNNER_CPU=$((Get-CimInstance Win32_Processor).Name)"
@ -37,29 +37,26 @@ jobs:
cd crates\compiler\builtins\bitcode\
zig build test
- name: install rust nightly 1.71.0
run: rustup install nightly-2023-05-28
- name: install rust nightly 1.73.0
run: rustup install nightly-2023-08-20
- name: set up llvm 16
run: |
curl.exe -f -L -O -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" https://github.com/roc-lang/llvm-package-windows/releases/download/v16.0.6/LLVM-16.0.6-win64.7z
7z x LLVM-16.0.6-win64.7z -oC:\LLVM-16.0.6-win64
- name: Build tests --release without running.
- name: Build tests --release without running.
run: cargo test --locked --release --no-run
# Why are these tests not build with previous command? => fingerprint error. Use `CARGO_LOG=cargo::core::compiler::fingerprint=info` to investigate
- name: Build specific tests without running.
run: cargo test --locked --release --no-run -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_linker -p roc_cli -p test_gen
- name: Build specific tests without running.
run: cargo test --locked --release --no-run -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_linker -p roc_cli -p test_gen
- name: Test setjmp/longjmp logic
run: cargo test-gen-dev --locked --release nat_alias && cargo test-gen-dev --locked --release a_crash
- name: Run gen tests
run: cargo test-gen-llvm --locked --release gen_str
run: cargo test-gen-llvm --locked --release gen_str
- name: Actually run the tests.
run: cargo test --locked --release -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_linker -p roc_cli

9
.gitignore vendored
View file

@ -109,4 +109,11 @@ www/content/examples
www/examples-main.zip
www/examples-main
examples/static-site-gen/**/*.html
examples/static-site-gen/**/*.html
# glue auto-generated fixture code
crates/glue/tests/fixtures/*/Cargo.toml
crates/glue/tests/fixtures/*/build.rs
crates/glue/tests/fixtures/*/host.c
crates/glue/tests/fixtures/*/src/main.rs
crates/glue/tests/fixtures/*/test_glue/

View file

@ -10,7 +10,7 @@ On MacOS and Linux, we highly recommend Using [nix](https://nixos.org/download.h
### On Linux x86_64 or MacOS aarch64/arm64/x86_64
#### Install
#### Installing Nix
If you are running ArchLinux or a derivative like Manjaro, you'll need to run `sudo sysctl -w kernel.unprivileged_userns_clone=1` before installing nix.

216
Cargo.lock generated
View file

@ -40,9 +40,9 @@ dependencies = [
[[package]]
name = "aligned"
version = "0.4.1"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80a21b9440a626c7fc8573a9e3d3a06b75c7c97754c2949bc7857b90353ca655"
checksum = "377e4c0ba83e4431b10df45c1d4666f178ea9c552cac93e60c3a88bf32785923"
dependencies = [
"as-slice",
]
@ -707,7 +707,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if",
"hashbrown 0.14.3",
"hashbrown",
"lock_api",
"once_cell",
"parking_lot_core",
@ -771,6 +771,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "dissimilar"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86e3bdc80eee6e16b2b6b0f87fbc98c04bee3455e35174c0de1a125d0688c632"
[[package]]
name = "distance"
version = "0.4.0"
@ -838,6 +844,19 @@ dependencies = [
"regex",
]
[[package]]
name = "env_logger"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580"
dependencies = [
"humantime",
"is-terminal",
"log",
"regex",
"termcolor",
]
[[package]]
name = "equivalent"
version = "1.0.1"
@ -864,6 +883,16 @@ dependencies = [
"str-buf",
]
[[package]]
name = "expect-test"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30d9eafeadd538e68fb28016364c9732d78e420b9ff8853fa5e4058861e9f8d3"
dependencies = [
"dissimilar",
"once_cell",
]
[[package]]
name = "fd-lock"
version = "3.0.13"
@ -1048,9 +1077,9 @@ checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0"
[[package]]
name = "h2"
version = "0.3.21"
version = "0.3.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833"
checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8"
dependencies = [
"bytes",
"fnv",
@ -1058,7 +1087,7 @@ dependencies = [
"futures-sink",
"futures-util",
"http",
"indexmap 1.9.3",
"indexmap",
"slab",
"tokio",
"tokio-util",
@ -1071,12 +1100,6 @@ version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
[[package]]
name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.14.3"
@ -1151,6 +1174,12 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hyper"
version = "0.14.27"
@ -1259,16 +1288,6 @@ dependencies = [
"version_check",
]
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown 0.12.3",
]
[[package]]
name = "indexmap"
version = "2.1.0"
@ -1276,7 +1295,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
dependencies = [
"equivalent",
"hashbrown 0.14.3",
"hashbrown",
]
[[package]]
@ -1327,6 +1346,17 @@ version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6"
[[package]]
name = "is-terminal"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bad00257d07be169d870ab665980b06cdb366d792ad690bf2e76876dc503455"
dependencies = [
"hermit-abi 0.3.3",
"rustix",
"windows-sys 0.52.0",
]
[[package]]
name = "itertools"
version = "0.9.0"
@ -1563,9 +1593,9 @@ dependencies = [
[[package]]
name = "mio"
version = "0.8.8"
version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
dependencies = [
"libc",
"wasi",
@ -1663,8 +1693,8 @@ checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
dependencies = [
"crc32fast",
"flate2",
"hashbrown 0.14.3",
"indexmap 2.1.0",
"hashbrown",
"indexmap",
"memchr",
"ruzstd",
]
@ -1732,33 +1762,6 @@ dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "peg"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "400bcab7d219c38abf8bd7cc2054eb9bbbd4312d66f6a5557d572a203f646f61"
dependencies = [
"peg-macros",
"peg-runtime",
]
[[package]]
name = "peg-macros"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46e61cce859b76d19090f62da50a9fe92bab7c2a5f09e183763559a2ac392c90"
dependencies = [
"peg-runtime",
"proc-macro2",
"quote",
]
[[package]]
name = "peg-runtime"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36bae92c60fa2398ce4678b98b2c4b5a7c61099961ca1fa305aec04a9ad28922"
[[package]]
name = "percent-encoding"
version = "2.3.0"
@ -1922,7 +1925,7 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6"
dependencies = [
"env_logger",
"env_logger 0.8.4",
"log",
"rand",
]
@ -2150,6 +2153,7 @@ version = "0.0.1"
dependencies = [
"bumpalo",
"indoc",
"regex",
"roc_build",
"roc_cli",
"roc_repl_cli",
@ -2273,6 +2277,7 @@ dependencies = [
"roc_collections",
"roc_command_utils",
"roc_constrain",
"roc_debug_flags",
"roc_error_macros",
"roc_gen_dev",
"roc_gen_llvm",
@ -2306,7 +2311,6 @@ dependencies = [
"roc_module",
"roc_region",
"roc_target",
"tempfile",
]
[[package]]
@ -2369,6 +2373,7 @@ dependencies = [
"mimalloc",
"parking_lot",
"pretty_assertions",
"regex",
"roc_build",
"roc_builtins",
"roc_can",
@ -2409,7 +2414,7 @@ dependencies = [
"bitvec",
"bumpalo",
"fnv",
"hashbrown 0.14.3",
"hashbrown",
"im",
"im-rc",
"smallvec",
@ -2472,8 +2477,6 @@ name = "roc_docs"
version = "0.0.1"
dependencies = [
"bumpalo",
"peg",
"pretty_assertions",
"pulldown-cmark",
"roc_builtins",
"roc_can",
@ -2604,7 +2607,7 @@ dependencies = [
"cli_utils",
"dircpy",
"fnv",
"indexmap 2.1.0",
"indexmap",
"indoc",
"libc",
"libloading",
@ -2649,6 +2652,11 @@ name = "roc_lang_srv"
version = "0.0.1"
dependencies = [
"bumpalo",
"env_logger 0.10.2",
"expect-test",
"futures",
"indoc",
"log",
"parking_lot",
"roc_can",
"roc_collections",
@ -2735,7 +2743,7 @@ dependencies = [
"roc_solve",
"roc_solve_problem",
"roc_target",
"roc_test_utils",
"roc_test_utils_dir",
"roc_types",
"ven_pretty",
]
@ -2770,7 +2778,7 @@ dependencies = [
"roc_solve",
"roc_solve_problem",
"roc_target",
"roc_test_utils",
"roc_test_utils_dir",
"roc_tracing",
"roc_types",
"roc_unify",
@ -2798,7 +2806,7 @@ dependencies = [
"arrayvec 0.7.4",
"bitvec",
"bumpalo",
"hashbrown 0.14.3",
"hashbrown",
"parking_lot",
"roc_builtins",
"roc_can",
@ -2978,7 +2986,6 @@ dependencies = [
"roc_repl_eval",
"roc_reporting",
"roc_target",
"unicode-segmentation",
]
[[package]]
@ -3046,9 +3053,7 @@ dependencies = [
"arrayvec 0.7.4",
"bumpalo",
"indoc",
"insta",
"lazy_static",
"libtest-mimic",
"pretty_assertions",
"regex",
"roc_builtins",
@ -3062,7 +3067,6 @@ dependencies = [
"roc_exhaustive",
"roc_load",
"roc_module",
"roc_packaging",
"roc_parse",
"roc_problem",
"roc_region",
@ -3114,6 +3118,7 @@ dependencies = [
name = "roc_target"
version = "0.0.1"
dependencies = [
"roc_error_macros",
"strum",
"strum_macros",
"target-lexicon",
@ -3124,6 +3129,12 @@ name = "roc_test_utils"
version = "0.0.1"
dependencies = [
"pretty_assertions",
]
[[package]]
name = "roc_test_utils_dir"
version = "0.0.1"
dependencies = [
"remove_dir_all 0.8.2",
]
@ -3831,9 +3842,7 @@ version = "0.0.1"
dependencies = [
"bumpalo",
"indoc",
"insta",
"lazy_static",
"pretty_assertions",
"regex",
"roc_can",
"roc_derive",
@ -3864,6 +3873,7 @@ dependencies = [
"roc_parse",
"roc_region",
"roc_test_utils",
"roc_test_utils_dir",
"walkdir",
]
@ -4187,10 +4197,8 @@ version = "0.0.1"
dependencies = [
"bumpalo",
"indoc",
"insta",
"lazy_static",
"libtest-mimic",
"pretty_assertions",
"regex",
"roc_builtins",
"roc_collections",
@ -4546,6 +4554,15 @@ dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.0",
]
[[package]]
name = "windows-targets"
version = "0.42.2"
@ -4576,6 +4593,21 @@ dependencies = [
"windows_x86_64_msvc 0.48.5",
]
[[package]]
name = "windows-targets"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
dependencies = [
"windows_aarch64_gnullvm 0.52.0",
"windows_aarch64_msvc 0.52.0",
"windows_i686_gnu 0.52.0",
"windows_i686_msvc 0.52.0",
"windows_x86_64_gnu 0.52.0",
"windows_x86_64_gnullvm 0.52.0",
"windows_x86_64_msvc 0.52.0",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.42.2"
@ -4588,6 +4620,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
[[package]]
name = "windows_aarch64_msvc"
version = "0.42.2"
@ -4600,6 +4638,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
[[package]]
name = "windows_i686_gnu"
version = "0.42.2"
@ -4612,6 +4656,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
[[package]]
name = "windows_i686_msvc"
version = "0.42.2"
@ -4624,6 +4674,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
[[package]]
name = "windows_x86_64_gnu"
version = "0.42.2"
@ -4636,6 +4692,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.2"
@ -4648,6 +4710,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
[[package]]
name = "windows_x86_64_msvc"
version = "0.42.2"
@ -4660,6 +4728,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
[[package]]
name = "winreg"
version = "0.50.0"

View file

@ -17,6 +17,7 @@ members = [
"crates/repl_expect",
"crates/roc_std",
"crates/test_utils",
"crates/test_utils_dir",
"crates/valgrind",
"crates/tracing",
"crates/utils/*",
@ -33,7 +34,6 @@ exclude = [
"ci/benchmarks/bench-runner",
"ci/repl_basic_test",
# Examples sometimes have Rust hosts in their platforms. The compiler should ignore those.
"crates/cli_testing_examples",
"examples",
]
# Needed to be able to run `cargo run -p roc_cli --no-default-features` -
@ -125,7 +125,6 @@ packed_struct = "0.10.1"
page_size = "0.5.0"
palette = "0.6.1"
parking_lot = "0.12"
peg = "0.8.1"
perfcnt = "0.8.0"
pest = "2.5.6"
pest_derive = "2.5.6"
@ -196,4 +195,4 @@ lto = "thin" # TODO: We could consider full here since this is only used for pac
[profile.debug-full]
inherits = "dev"
debug = true
debug = true

View file

@ -1,6 +1,6 @@
VERSION 0.6
FROM rust:1.71.1-slim-buster # make sure to update rust-toolchain.toml too so that everything uses the same rust version
FROM rust:1.73.0-slim-buster # make sure to update rust-toolchain.toml too so that everything uses the same rust version
WORKDIR /earthbuild
prep-debian:
@ -32,7 +32,7 @@ install-zig-llvm:
RUN apt -y install libpolly-16-dev # required by llvm-sys crate
ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
RUN apt -y install libssl-dev
RUN OPENSSL_NO_VENDOR=1 cargo install wasm-pack
RUN wget https://rustwasm.github.io/wasm-pack/installer/init.sh -O init.sh && sh init.sh
# sccache
RUN cargo install sccache --locked
RUN sccache -V
@ -53,7 +53,7 @@ build-nightly-release:
COPY --dir .git LICENSE LEGAL_DETAILS ci ./
# version.txt is used by the CLI: roc --version
RUN ./ci/write_version.sh
RUN RUSTFLAGS=$RUSTFLAGS cargo build --profile=release-with-lto --locked --bin roc --bin roc_ls
RUN RUSTFLAGS=$RUSTFLAGS cargo build --profile=release-with-lto --locked --bin roc --bin roc_language_server
RUN ./ci/package_release.sh $RELEASE_FOLDER_NAME
RUN ls
SAVE ARTIFACT ./$RELEASE_FOLDER_NAME.tar.gz AS LOCAL $RELEASE_FOLDER_NAME.tar.gz

View file

@ -5,7 +5,7 @@
- [**installation** guide](https://github.com/roc-lang/roc/tree/main/getting_started)
- [**tutorial**](https://roc-lang.org/tutorial)
- [**docs** for the standard library](https://www.roc-lang.org/builtins)
- [**examples**](https://github.com/roc-lang/examples/tree/main/examples)
- [**examples**](https://www.roc-lang.org/examples)
- [**faq**: frequently asked questions](https://github.com/roc-lang/roc/blob/main/www/content/faq.md)
- [**group chat**](https://roc.zulipchat.com) for help, questions and discussions
@ -33,6 +33,9 @@ If you would like your company to become a corporate sponsor of Roc's developmen
We'd also like to express our gratitude to our generous [individual sponsors](https://github.com/sponsors/roc-lang/)! A special thanks to those sponsoring $25/month or more:
* [Krzysztof G.](https://github.com/krzysztofgb)
* [Sam Mohr](https://github.com/smores56)
* [Steven Chen](https://github.com/megakilo)
* [Drew Lazzeri](https://github.com/asteroidb612)
* [Alex Binaei](https://github.com/mrmizz)
* [Jono Mallanyk](https://github.com/jonomallanyk)

View file

@ -87,7 +87,10 @@ fn do_all_benches(nr_repeat_benchmarks: usize) -> HashSet<String> {
return HashSet::new();
}
println!("\n\nDoing benchmarks {:?} times to reduce flukes.\n\n", nr_repeat_benchmarks);
println!(
"\n\nDoing benchmarks {:?} times to reduce flukes.\n\n",
nr_repeat_benchmarks
);
for _ in 1..nr_repeat_benchmarks {
delete_old_bench_results();
@ -111,19 +114,16 @@ fn do_all_benches(nr_repeat_benchmarks: usize) -> HashSet<String> {
// returns Vec with names of regressed benchmarks
fn do_benchmark(branch_name: &'static str) -> HashSet<String> {
let mut bench_cmd =
Command::new(format!(
"./bench-folder-{}/target/release/deps/time_bench",
branch_name
));
let mut bench_cmd = Command::new(format!(
"./bench-folder-{}/target/release/deps/time_bench",
branch_name
));
let bench_cmd_w_args =
bench_cmd.args(&["--bench", "--noplot"]);
let bench_cmd_w_args = bench_cmd.args(&["--bench", "--noplot"]);
let bench_cmd_as_str = format!("{bench_cmd_w_args:?}");
let mut bench_cmd_child =
bench_cmd_w_args
let mut bench_cmd_child = bench_cmd_w_args
.stdout(Stdio::piped())
.stderr(Stdio::inherit())
.spawn()
@ -163,8 +163,7 @@ fn do_benchmark(branch_name: &'static str) -> HashSet<String> {
"Error: time-bench execution failed with exit code {}.\n\
See output above for error info.\n\
Command was:\n\t{}",
exit_status,
bench_cmd_as_str
exit_status, bench_cmd_as_str
);
}
@ -220,7 +219,7 @@ fn sha_file(file_path: &Path) -> Result<String, io::Error> {
assert!(disassembly_output.status.success());
let mut reader = BufReader::new(disassembly_output.stdout.as_slice());
// the first line contains the path, we want to skip it
let mut _discard_lines = String::new();
reader.read_line(&mut _discard_lines)?;
@ -265,7 +264,7 @@ fn calc_hashes_for_folder(benches_path_str: &str) -> HashMap<String, String> {
}
fn check_if_bench_executables_changed() -> bool {
let bench_folder_str = "/crates/cli_testing_examples/benchmarks/";
let bench_folder_str = "/crates/cli/tests/benchmarks/";
let main_benches_path_str = [BENCH_FOLDER_MAIN, bench_folder_str].join("");

View file

@ -11,11 +11,11 @@ BENCH_SUFFIX=$1
cargo criterion -V
cd crates/cli && cargo criterion --no-run && cd ../..
mkdir -p bench-folder/crates/cli_testing_examples/benchmarks
mkdir -p bench-folder/crates/cli/tests/benchmarks
mkdir -p bench-folder/crates/compiler/builtins/bitcode/src
mkdir -p bench-folder/target/release/deps
cp "crates/cli_testing_examples/benchmarks/"*".roc" bench-folder/crates/cli_testing_examples/benchmarks/
cp -r crates/cli_testing_examples/benchmarks/platform bench-folder/crates/cli_testing_examples/benchmarks/
cp "crates/cli/tests/benchmarks/"*".roc" bench-folder/crates/cli/tests/benchmarks/
cp -r crates/cli/tests/benchmarks/platform bench-folder/crates/cli/tests/benchmarks/
cp crates/compiler/builtins/bitcode/src/str.zig bench-folder/crates/compiler/builtins/bitcode/src
cp target/release/roc bench-folder/target/release

View file

@ -5,19 +5,19 @@ set -euxo pipefail
# this makes the binaries a lot smaller
strip ./target/release-with-lto/roc
strip ./target/release-with-lto/roc_ls
strip ./target/release-with-lto/roc_language_server
# to be able to delete "target" later
cp target/release-with-lto/roc ./roc
cp target/release-with-lto/roc_ls ./roc_lang_server
cp target/release-with-lto/roc_language_server ./roc_language_server
# delete unnecessary files and folders
git clean -fdx --exclude roc --exclude roc_lang_server
git clean -fdx --exclude roc --exclude roc_language_server
mkdir $1
mv roc roc_lang_server LICENSE LEGAL_DETAILS $1
mv roc roc_language_server LICENSE LEGAL_DETAILS $1
mkdir $1/examples
mv examples/helloWorld.roc examples/platform-switching examples/cli $1/examples

View file

@ -7,7 +7,7 @@ use std::time::Duration;
fn roc_repl_session() -> Result<PtyReplSession, Error> {
let roc_repl = PtyReplSession {
echo_on: false,
prompt: "\u{1b}[0K\u{1b}[34\u{1b}[0m ".to_string(),
prompt: "\u{1b}[0K\u{1b}[1;36\u{1b}[0m ".to_string(),
pty_session: spawn("./roc repl", Some(7000))?,
quit_command: None,
};
@ -23,8 +23,8 @@ fn main() -> Result<(), Error> {
thread::sleep(Duration::from_secs(1));
match repl.exp_regex(r".*2\u{1b}\[35m : \u{1b}\[0mNum *.*") {
Ok((a, b)) => {
match repl.exp_regex(r".*2\u{1b}\[1;32m : \u{1b}\[0mNum *.*") { // 2 : Num
Ok(_) => {
println!("Expected output received.");
return Ok(());
}

View file

@ -71,6 +71,7 @@ inkwell.workspace = true
libc.workspace = true
libloading.workspace = true
mimalloc.workspace = true
regex.workspace = true
signal-hook.workspace = true
strum.workspace = true
target-lexicon.workspace = true

View file

@ -1,6 +0,0 @@
fn main() {
// workaround for issue https://github.com/NixOS/nixpkgs/issues/166205 . This println can be removed when this issue is fixed. Upgrading to LLVM 14 could also fix this issue.
// also see https://github.com/NixOS/nixpkgs/pull/181485
#[cfg(all(target_os = "macos", target_arch = "aarch64"))]
println!("cargo:rustc-link-lib=c++abi")
}

View file

@ -13,14 +13,17 @@ use roc_build::program::{
handle_error_module, handle_loading_problem, standard_load_config, BuildFileError,
BuildOrdering, BuiltFile, CodeGenBackend, CodeGenOptions, DEFAULT_ROC_FILENAME,
};
use roc_collections::MutMap;
use roc_error_macros::{internal_error, user_error};
use roc_gen_dev::AssemblyBackendMode;
use roc_gen_llvm::llvm::build::LlvmBackendMode;
use roc_load::{ExpectMetadata, Threading};
use roc_module::symbol::ModuleId;
use roc_mono::ir::OptLevel;
use roc_packaging::cache::RocCacheDir;
use roc_packaging::tarball::Compression;
use roc_target::Target;
use roc_reporting::report::ANSI_STYLE_CODES;
use roc_target::{Architecture, Target};
use std::env;
use std::ffi::{CString, OsStr, OsString};
use std::io;
@ -28,9 +31,8 @@ use std::mem::ManuallyDrop;
use std::os::raw::{c_char, c_int};
use std::path::{Path, PathBuf};
use std::process;
use std::time::Instant;
use std::time::{Duration, Instant};
use strum::IntoEnumIterator;
use target_lexicon::{Architecture, Triple};
#[cfg(not(target_os = "linux"))]
use tempfile::TempDir;
@ -61,6 +63,7 @@ pub const FLAG_LIB: &str = "lib";
pub const FLAG_NO_LINK: &str = "no-link";
pub const FLAG_TARGET: &str = "target";
pub const FLAG_TIME: &str = "time";
pub const FLAG_VERBOSE: &str = "verbose";
pub const FLAG_LINKER: &str = "linker";
pub const FLAG_PREBUILT: &str = "prebuilt-platform";
pub const FLAG_CHECK: &str = "check";
@ -68,6 +71,7 @@ pub const FLAG_STDIN: &str = "stdin";
pub const FLAG_STDOUT: &str = "stdout";
pub const FLAG_WASM_STACK_SIZE_KB: &str = "wasm-stack-size-kb";
pub const FLAG_OUTPUT: &str = "output";
pub const FLAG_FUZZ: &str = "fuzz";
pub const ROC_FILE: &str = "ROC_FILE";
pub const ROC_DIR: &str = "ROC_DIR";
pub const GLUE_DIR: &str = "GLUE_DIR";
@ -111,7 +115,7 @@ pub fn build_app() -> Command {
let flag_profiling = Arg::new(FLAG_PROFILING)
.long(FLAG_PROFILING)
.help("Keep debug info in the final generated program even in optmized builds")
.help("Keep debug info in the final generated program even in optimized builds")
.action(ArgAction::SetTrue)
.required(false);
@ -139,6 +143,12 @@ pub fn build_app() -> Command {
.value_parser(value_parser!(u32))
.required(false);
let flag_fuzz = Arg::new(FLAG_FUZZ)
.long(FLAG_FUZZ)
.help("Instrument the roc binary for fuzzing with roc-fuzz")
.action(ArgAction::SetTrue)
.required(false);
let roc_file_to_run = Arg::new(ROC_FILE)
.help("The .roc file of an app to run")
.value_parser(value_parser!(PathBuf))
@ -175,6 +185,7 @@ pub fn build_app() -> Command {
.arg(flag_time.clone())
.arg(flag_linker.clone())
.arg(flag_prebuilt.clone())
.arg(flag_fuzz.clone())
.arg(flag_wasm_stack_size_kb)
.arg(
Arg::new(FLAG_TARGET)
@ -225,6 +236,14 @@ pub fn build_app() -> Command {
.arg(flag_time.clone())
.arg(flag_linker.clone())
.arg(flag_prebuilt.clone())
.arg(flag_fuzz.clone())
.arg(
Arg::new(FLAG_VERBOSE)
.long(FLAG_VERBOSE)
.help("Print detailed test statistics by module")
.action(ArgAction::SetTrue)
.required(false)
)
.arg(
Arg::new(ROC_FILE)
.help("The .roc file for the main module")
@ -248,6 +267,7 @@ pub fn build_app() -> Command {
.arg(flag_time.clone())
.arg(flag_linker.clone())
.arg(flag_prebuilt.clone())
.arg(flag_fuzz.clone())
.arg(roc_file_to_run.clone())
.arg(args_for_app.clone().last(true))
)
@ -262,11 +282,12 @@ pub fn build_app() -> Command {
.arg(flag_time.clone())
.arg(flag_linker.clone())
.arg(flag_prebuilt.clone())
.arg(flag_fuzz.clone())
.arg(roc_file_to_run.clone())
.arg(args_for_app.clone().last(true))
)
.subcommand(Command::new(CMD_FORMAT)
.about("Format a .roc file using standard Roc formatting")
.about("Format a .roc file or the .roc files contained in a directory using standard\nRoc formatting")
.arg(
Arg::new(DIRECTORY_OR_FILES)
.index(1)
@ -294,6 +315,7 @@ pub fn build_app() -> Command {
.action(ArgAction::SetTrue)
.required(false),
)
.after_help("If DIRECTORY_OR_FILES is omitted, the .roc files in the current working\ndirectory are formatted.")
)
.subcommand(Command::new(CMD_VERSION)
.about(concatcp!("Print the Roc compilers version, which is currently ", VERSION)))
@ -392,6 +414,7 @@ pub fn build_app() -> Command {
.arg(flag_time)
.arg(flag_linker)
.arg(flag_prebuilt)
.arg(flag_fuzz)
.arg(roc_file_to_run)
.arg(args_for_app.trailing_var_arg(true))
}
@ -418,16 +441,22 @@ fn opt_level_from_flags(matches: &ArgMatches) -> OptLevel {
}
#[cfg(windows)]
pub fn test(_matches: &ArgMatches, _triple: Triple) -> io::Result<i32> {
pub fn test(_matches: &ArgMatches, _target: Target) -> io::Result<i32> {
todo!("running tests does not work on windows right now")
}
struct ModuleTestResults {
module_id: ModuleId,
failed_count: usize,
passed_count: usize,
tests_duration: Duration,
}
#[cfg(not(windows))]
pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
use roc_build::program::report_problems_monomorphized;
use roc_load::{ExecutionMode, FunctionKind, LoadConfig, LoadMonomorphizedError};
use roc_packaging::cache;
use roc_target::TargetInfo;
let start_time = Instant::now();
let arena = Bump::new();
@ -464,14 +493,12 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
}
let arena = &arena;
let target = &triple;
let target_info = TargetInfo::from(target);
// TODO may need to determine this dynamically based on dev builds.
let function_kind = FunctionKind::LambdaSet;
// Step 1: compile the app and generate the .o file
let load_config = LoadConfig {
target_info,
target,
function_kind,
// TODO: expose this from CLI?
render: roc_reporting::report::RenderTarget::ColorTerminal,
@ -500,15 +527,17 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
let mut expectations = std::mem::take(&mut loaded.expectations);
let interns = loaded.interns.clone();
let sources = loaded.sources.clone();
let (lib, expects, layout_interner) = roc_repl_expect::run::expect_mono_module_to_dylib(
arena,
target.clone(),
loaded,
opt_level,
LlvmBackendMode::CliTest,
)
.unwrap();
let (dyn_lib, expects_by_module, layout_interner) =
roc_repl_expect::run::expect_mono_module_to_dylib(
arena,
target,
loaded,
opt_level,
LlvmBackendMode::CliTest,
)
.unwrap();
// Print warnings before running tests.
{
@ -517,7 +546,7 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
"if there were errors, we would have already exited."
);
if problems.warnings > 0 {
problems.print_to_stdout(start_time.elapsed());
problems.print_error_warning_count(start_time.elapsed());
println!(".\n\nRunning tests…\n\n\x1B[36m{}\x1B[39m", "".repeat(80));
}
}
@ -528,21 +557,45 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
let mut writer = std::io::stdout();
let (failed, passed) = roc_repl_expect::run::run_toplevel_expects(
&mut writer,
roc_reporting::report::RenderTarget::ColorTerminal,
arena,
interns,
&layout_interner.into_global(),
&lib,
&mut expectations,
expects,
)
.unwrap();
let mut total_failed_count = 0;
let mut total_passed_count = 0;
let total_time = start_time.elapsed();
let mut results_by_module = Vec::new();
let global_layout_interner = layout_interner.into_global();
if failed == 0 && passed == 0 {
let compilation_duration = start_time.elapsed();
for (module_id, expects) in expects_by_module.into_iter() {
let test_start_time = Instant::now();
let (failed_count, passed_count) = roc_repl_expect::run::run_toplevel_expects(
&mut writer,
roc_reporting::report::RenderTarget::ColorTerminal,
arena,
interns,
&global_layout_interner,
&dyn_lib,
&mut expectations,
expects,
)
.unwrap();
let tests_duration = test_start_time.elapsed();
results_by_module.push(ModuleTestResults {
module_id,
failed_count,
passed_count,
tests_duration,
});
total_failed_count += failed_count;
total_passed_count += passed_count;
}
let total_duration = start_time.elapsed();
if total_failed_count == 0 && total_passed_count == 0 {
// TODO print this in a more nicely formatted way!
println!("No expectations were found.");
@ -553,21 +606,55 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
// running tests altogether!
Ok(2)
} else {
let failed_color = if failed == 0 {
32 // green
if matches.get_flag(FLAG_VERBOSE) {
println!("Compiled in {} ms.", compilation_duration.as_millis());
for module_test_results in results_by_module {
print_test_results(module_test_results, &sources);
}
} else {
31 // red
};
let test_summary_str =
test_summary(total_failed_count, total_passed_count, total_duration);
println!("{test_summary_str}");
}
println!(
"\n\x1B[{failed_color}m{failed}\x1B[39m failed and \x1B[32m{passed}\x1B[39m passed in {} ms.\n",
total_time.as_millis(),
);
Ok((failed > 0) as i32)
Ok((total_failed_count > 0) as i32)
}
}
fn print_test_results(
module_test_results: ModuleTestResults,
sources: &MutMap<ModuleId, (PathBuf, Box<str>)>,
) {
let ModuleTestResults {
module_id,
failed_count,
passed_count,
tests_duration,
} = module_test_results;
let test_summary_str = test_summary(failed_count, passed_count, tests_duration);
let (module_path, _) = sources.get(&module_id).unwrap();
let module_name = module_path.file_name().unwrap().to_str().unwrap();
println!("\n{module_name}:\n {test_summary_str}",);
}
fn test_summary(failed_count: usize, passed_count: usize, tests_duration: Duration) -> String {
let failed_color = if failed_count == 0 {
ANSI_STYLE_CODES.green
} else {
ANSI_STYLE_CODES.red
};
let passed_color = ANSI_STYLE_CODES.green;
let reset = ANSI_STYLE_CODES.reset;
format!(
"{failed_color}{failed_count}{reset} failed and {passed_color}{passed_count}{reset} passed in {} ms.",
tests_duration.as_millis()
)
}
/// Find the element of `options` with the smallest edit distance to
/// `reference`. Returns a tuple containing the element and the distance, or
/// `None` if the `options` `Vec` is empty.
@ -582,7 +669,7 @@ pub fn build(
matches: &ArgMatches,
subcommands: &[String],
config: BuildConfig,
triple: Triple,
target: Target,
out_path: Option<&Path>,
roc_cache_dir: RocCacheDir<'_>,
link_type: LinkType,
@ -693,7 +780,7 @@ pub fn build(
// Note: This allows using `--dev` with `--optimize`.
// This means frontend optimizations and dev backend.
let code_gen_backend = if matches.get_flag(FLAG_DEV) {
if matches!(triple.architecture, Architecture::Wasm32) {
if matches!(target.architecture(), Architecture::Wasm32) {
CodeGenBackend::Wasm
} else {
CodeGenBackend::Assembly(AssemblyBackendMode::Binary)
@ -727,7 +814,7 @@ pub fn build(
let linking_strategy = if wasm_dev_backend {
LinkingStrategy::Additive
} else if !roc_linker::supported(link_type, &triple)
} else if !roc_linker::supported(link_type, target)
|| matches.get_one::<String>(FLAG_LINKER).map(|s| s.as_str()) == Some("legacy")
{
LinkingStrategy::Legacy
@ -736,8 +823,8 @@ pub fn build(
};
let prebuilt = {
let cross_compile = triple != Triple::host();
let targeting_wasm = matches!(triple.architecture, Architecture::Wasm32);
let cross_compile = target != Target::default();
let targeting_wasm = matches!(target.architecture(), Architecture::Wasm32);
matches.get_flag(FLAG_PREBUILT) ||
// When compiling for a different target, assume a prebuilt platform.
@ -747,6 +834,11 @@ pub fn build(
(cross_compile && !targeting_wasm)
};
let fuzz = matches.get_flag(FLAG_FUZZ);
if fuzz && !matches!(code_gen_backend, CodeGenBackend::Llvm(_)) {
user_error!("Cannot instrument binary for fuzzing while using a dev backend.");
}
let wasm_dev_stack_bytes: Option<u32> = matches
.try_get_one::<u32>(FLAG_WASM_STACK_SIZE_KB)
.ok()
@ -763,13 +855,14 @@ pub fn build(
opt_level,
emit_debug_info,
emit_llvm_ir,
fuzz,
};
let load_config = standard_load_config(&triple, build_ordering, threading);
let load_config = standard_load_config(target, build_ordering, threading);
let res_binary_path = build_file(
&arena,
&triple,
target,
path.to_owned(),
code_gen_options,
emit_timings,
@ -802,7 +895,7 @@ pub fn build(
// since the process is about to exit anyway.
// std::mem::forget(arena);
problems.print_to_stdout(total_time);
problems.print_error_warning_count(total_time);
println!(" while successfully building:\n\n {generated_filename}");
// Return a nonzero exit code if there were problems
@ -810,7 +903,7 @@ pub fn build(
}
BuildAndRun => {
if problems.fatally_errored {
problems.print_to_stdout(total_time);
problems.print_error_warning_count(total_time);
println!(
".\n\nCannot run program due to fatal error…\n\n\x1B[36m{}\x1B[39m",
"".repeat(80)
@ -820,7 +913,7 @@ pub fn build(
return Ok(problems.exit_code());
}
if problems.errors > 0 || problems.warnings > 0 {
problems.print_to_stdout(total_time);
problems.print_error_warning_count(total_time);
println!(
".\n\nRunning program anyway…\n\n\x1B[36m{}\x1B[39m",
"".repeat(80)
@ -836,11 +929,11 @@ pub fn build(
// ManuallyDrop will leak the bytes because we don't drop manually
let bytes = &ManuallyDrop::new(std::fs::read(&binary_path).unwrap());
roc_run(&arena, opt_level, triple, args, bytes, expect_metadata)
roc_run(&arena, opt_level, target, args, bytes, expect_metadata)
}
BuildAndRunIfNoErrors => {
if problems.fatally_errored {
problems.print_to_stdout(total_time);
problems.print_error_warning_count(total_time);
println!(
".\n\nCannot run program due to fatal error…\n\n\x1B[36m{}\x1B[39m",
"".repeat(80)
@ -855,7 +948,7 @@ pub fn build(
);
if problems.warnings > 0 {
problems.print_to_stdout(total_time);
problems.print_error_warning_count(total_time);
println!(
".\n\nRunning program…\n\n\x1B[36m{}\x1B[39m",
"".repeat(80)
@ -871,7 +964,7 @@ pub fn build(
// ManuallyDrop will leak the bytes because we don't drop manually
let bytes = &ManuallyDrop::new(std::fs::read(&binary_path).unwrap());
roc_run(&arena, opt_level, triple, args, bytes, expect_metadata)
roc_run(&arena, opt_level, target, args, bytes, expect_metadata)
}
}
}
@ -885,12 +978,12 @@ pub fn build(
fn roc_run<'a, I: IntoIterator<Item = &'a OsStr>>(
arena: &Bump,
opt_level: OptLevel,
triple: Triple,
target: Target,
args: I,
binary_bytes: &[u8],
expect_metadata: ExpectMetadata,
) -> io::Result<i32> {
match triple.architecture {
match target.architecture() {
Architecture::Wasm32 => {
let executable = roc_run_executable_file_path(binary_bytes)?;
let path = executable.as_path();

View file

@ -15,7 +15,7 @@ use roc_gen_dev::AssemblyBackendMode;
use roc_gen_llvm::llvm::build::LlvmBackendMode;
use roc_load::{FunctionKind, LoadingProblem, Threading};
use roc_packaging::cache::{self, RocCacheDir};
use roc_target::{get_target_triple_str, Target};
use roc_target::Target;
use std::fs::{self, FileType};
use std::io::{self, Read, Write};
use std::path::{Path, PathBuf};
@ -49,7 +49,7 @@ fn main() -> io::Result<()> {
&matches,
&subcommands,
BuildConfig::BuildAndRunIfNoErrors,
Triple::host(),
Triple::host().into(),
None,
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
LinkType::Executable,
@ -64,7 +64,7 @@ fn main() -> io::Result<()> {
matches,
&subcommands,
BuildConfig::BuildAndRun,
Triple::host(),
Triple::host().into(),
None,
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
LinkType::Executable,
@ -77,7 +77,7 @@ fn main() -> io::Result<()> {
}
Some((CMD_TEST, matches)) => {
if matches.contains_id(ROC_FILE) {
test(matches, Triple::host())
test(matches, Triple::host().into())
} else {
eprintln!("What .roc file do you want to test? Specify it at the end of the `roc test` command.");
@ -90,7 +90,7 @@ fn main() -> io::Result<()> {
matches,
&subcommands,
BuildConfig::BuildAndRunIfNoErrors,
Triple::host(),
Triple::host().into(),
None,
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
LinkType::Executable,
@ -130,7 +130,7 @@ fn main() -> io::Result<()> {
roc_linker::generate_stub_lib(
input_path,
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
&target.to_triple(),
target,
function_kind,
);
Ok(0)
@ -142,24 +142,22 @@ fn main() -> io::Result<()> {
.and_then(|s| Target::from_str(s).ok())
.unwrap_or_default();
let triple = target.to_triple();
let function_kind = FunctionKind::LambdaSet;
let (platform_path, stub_lib, stub_dll_symbols) = roc_linker::generate_stub_lib(
input_path,
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
&triple,
target,
function_kind,
);
// TODO: pipeline the executable location through here.
// Currently it is essentally hardcoded as platform_path/dynhost.
roc_linker::preprocess_host(
&triple,
target,
&platform_path.with_file_name("main.roc"),
// The target triple string must be derived from the triple to convert from the generic
// `system` target to the exact specific target.
&platform_path
.with_file_name(format!("{}.rh", get_target_triple_str(&triple).unwrap())),
&platform_path.with_file_name(format!("{}.rh", target)),
&stub_lib,
&stub_dll_symbols,
);
@ -184,7 +182,7 @@ fn main() -> io::Result<()> {
matches,
&subcommands,
BuildConfig::BuildOnly,
target.to_triple(),
target,
out_path,
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
link_type,
@ -210,33 +208,7 @@ fn main() -> io::Result<()> {
threading,
) {
Ok((problems, total_time)) => {
println!(
"\x1B[{}m{}\x1B[39m {} and \x1B[{}m{}\x1B[39m {} found in {} ms.",
if problems.errors == 0 {
32 // green
} else {
33 // yellow
},
problems.errors,
if problems.errors == 1 {
"error"
} else {
"errors"
},
if problems.warnings == 0 {
32 // green
} else {
33 // yellow
},
problems.warnings,
if problems.warnings == 1 {
"warning"
} else {
"warnings"
},
total_time.as_millis(),
);
problems.print_error_warning_count(total_time);
Ok(problems.exit_code())
}
@ -287,6 +259,7 @@ fn main() -> io::Result<()> {
values.push(os_string.to_owned());
}
}
None if from_stdin || to_stdout => {}
None => {
let mut os_string_values: Vec<OsString> = Vec::new();

View file

@ -8,7 +8,7 @@ quicksort = \originalList ->
quicksortHelp originalList 0 (n - 1)
quicksortHelp : List (Num a), Nat, Nat -> List (Num a)
quicksortHelp : List (Num a), U64, U64 -> List (Num a)
quicksortHelp = \list, low, high ->
if low < high then
when partition low high list is
@ -19,7 +19,7 @@ quicksortHelp = \list, low, high ->
else
list
partition : Nat, Nat, List (Num a) -> [Pair Nat (List (Num a))]
partition : U64, U64, List (Num a) -> [Pair U64 (List (Num a))]
partition = \low, high, initialList ->
when List.get initialList high is
Ok pivot ->
@ -30,7 +30,7 @@ partition = \low, high, initialList ->
Err _ ->
Pair low initialList
partitionHelp : Nat, Nat, List (Num c), Nat, Num c -> [Pair Nat (List (Num c))]
partitionHelp : U64, U64, List (Num c), U64, Num c -> [Pair U64 (List (Num c))]
partitionHelp = \i, j, list, high, pivot ->
if j < high then
when List.get list j is
@ -45,7 +45,7 @@ partitionHelp = \i, j, list, high, pivot ->
else
Pair i list
swap : Nat, Nat, List a -> List a
swap : U64, U64, List a -> List a
swap = \i, j, list ->
when Pair (List.get list i) (List.get list j) is
Pair (Ok atI) (Ok atJ) ->

View file

@ -4,10 +4,10 @@ fromBytes : List U8 -> Result Str DecodeProblem
fromBytes = \bytes ->
Bytes.Decode.decode bytes (decodeBase64 (List.len bytes))
decodeBase64 : Nat -> ByteDecoder Str
decodeBase64 : U64 -> ByteDecoder Str
decodeBase64 = \width -> Bytes.Decode.loop loopHelp { remaining: width, string: "" }
loopHelp : { remaining : Nat, string : Str } -> ByteDecoder (Bytes.Decode.Step { remaining : Nat, string : Str } Str)
loopHelp : { remaining : U64, string : Str } -> ByteDecoder (Bytes.Decode.Step { remaining : U64, string : Str } Str)
loopHelp = \{ remaining, string } ->
if remaining >= 3 then
x, y, z <- Bytes.Decode.map3 Bytes.Decode.u8 Bytes.Decode.u8 Bytes.Decode.u8

View file

@ -18,7 +18,7 @@ encodeChunks = \bytes ->
List.walk bytes { output: [], accum: None } folder
|> encodeResidual
coerce : Nat, a -> a
coerce : U64, a -> a
coerce = \_, x -> x
# folder : { output : List ByteEncoder, accum : State }, U8 -> { output : List ByteEncoder, accum : State }

View file

@ -1,6 +1,6 @@
interface Bytes.Decode exposes [ByteDecoder, decode, map, map2, u8, loop, Step, succeed, DecodeProblem, after, map3] imports []
State : { bytes : List U8, cursor : Nat }
State : { bytes : List U8, cursor : U64 }
DecodeProblem : [OutOfBytes]

View file

@ -2,7 +2,7 @@ interface Bytes.Encode exposes [ByteEncoder, sequence, u8, u16, bytes, empty, en
Endianness : [BE, LE]
ByteEncoder : [Signed8 I8, Unsigned8 U8, Signed16 Endianness I16, Unsigned16 Endianness U16, Sequence Nat (List ByteEncoder), Bytes (List U8)]
ByteEncoder : [Signed8 I8, Unsigned8 U8, Signed16 Endianness I16, Unsigned16 Endianness U16, Sequence U64 (List ByteEncoder), Bytes (List U8)]
u8 : U8 -> ByteEncoder
u8 = \value -> Unsigned8 value
@ -24,7 +24,7 @@ sequence : List ByteEncoder -> ByteEncoder
sequence = \encoders ->
Sequence (getWidths encoders 0) encoders
getWidth : ByteEncoder -> Nat
getWidth : ByteEncoder -> U64
getWidth = \encoder ->
when encoder is
Signed8 _ -> 1
@ -40,7 +40,7 @@ getWidth = \encoder ->
Sequence w _ -> w
Bytes bs -> List.len bs
getWidths : List ByteEncoder, Nat -> Nat
getWidths : List ByteEncoder, U64 -> U64
getWidths = \encoders, initial ->
List.walk encoders initial \accum, encoder -> accum + getWidth encoder
@ -51,7 +51,7 @@ encode = \encoder ->
encodeHelp encoder 0 output
|> .output
encodeHelp : ByteEncoder, Nat, List U8 -> { output : List U8, offset : Nat }
encodeHelp : ByteEncoder, U64, List U8 -> { output : List U8, offset : U64 }
encodeHelp = \encoder, offset, output ->
when encoder is
Unsigned8 value ->

View file

@ -10,7 +10,7 @@ show = \list ->
|> List.map Num.toStr
|> Str.joinWith ", "
"[\(content)]"
"[$(content)]"
sortBy : List a, (a -> Num *) -> List a
sortBy = \list, toComparable ->
@ -24,7 +24,7 @@ sortWith = \list, order ->
quicksortHelp list order 0 (n - 1)
quicksortHelp : List a, Order a, Nat, Nat -> List a
quicksortHelp : List a, Order a, U64, U64 -> List a
quicksortHelp = \list, order, low, high ->
if low < high then
when partition low high list order is
@ -35,7 +35,7 @@ quicksortHelp = \list, order, low, high ->
else
list
partition : Nat, Nat, List a, Order a -> [Pair Nat (List a)]
partition : U64, U64, List a, Order a -> [Pair U64 (List a)]
partition = \low, high, initialList, order ->
when List.get initialList high is
Ok pivot ->
@ -46,7 +46,7 @@ partition = \low, high, initialList, order ->
Err _ ->
Pair low initialList
partitionHelp : Nat, Nat, List c, Order c, Nat, c -> [Pair Nat (List c)]
partitionHelp : U64, U64, List c, Order c, U64, c -> [Pair U64 (List c)]
partitionHelp = \i, j, list, order, high, pivot ->
if j < high then
when List.get list j is
@ -63,7 +63,7 @@ partitionHelp = \i, j, list, order, high, pivot ->
else
Pair i list
swap : Nat, Nat, List a -> List a
swap : U64, U64, List a -> List a
swap = \i, j, list ->
when Pair (List.get list i) (List.get list j) is
Pair (Ok atI) (Ok atJ) ->

View file

@ -15,7 +15,7 @@ closure1 = \_ ->
Task.succeed (foo toUnitBorrowed "a long string such that it's malloced")
|> Task.map \_ -> {}
toUnitBorrowed = \x -> Str.countGraphemes x
toUnitBorrowed = \x -> Str.countUtf8Bytes x
foo = \f, x -> f x

View file

@ -26,7 +26,7 @@ showRBTree = \tree, showKey, showValue ->
sL = nodeInParens left showKey showValue
sR = nodeInParens right showKey showValue
"Node \(sColor) \(sKey) \(sValue) \(sL) \(sR)"
"Node $(sColor) $(sKey) $(sValue) $(sL) $(sR)"
nodeInParens : RedBlackTree k v, (k -> Str), (v -> Str) -> Str
nodeInParens = \tree, showKey, showValue ->
@ -37,7 +37,7 @@ nodeInParens = \tree, showKey, showValue ->
Node _ _ _ _ _ ->
inner = showRBTree tree showKey showValue
"(\(inner))"
"($(inner))"
showColor : NodeColor -> Str
showColor = \color ->

View file

@ -14,7 +14,7 @@ main =
#
# _ ->
# ns = Num.toStr n
# Task.putLine "No test \(ns)"
# Task.putLine "No test $(ns)"
showBool : Bool -> Str
showBool = \b ->
if

View file

@ -11,12 +11,14 @@ extern crate roc_module;
mod cli_run {
use cli_utils::helpers::{
extract_valgrind_errors, file_path_from_root, fixture_file, fixtures_dir, has_error,
known_bad_file, run_cmd, run_roc, run_with_valgrind, strip_colors, Out, ValgrindError,
known_bad_file, run_cmd, run_roc, run_with_valgrind, Out, ValgrindError,
ValgrindErrorXWhat,
};
use const_format::concatcp;
use indoc::indoc;
use regex::Regex;
use roc_cli::{CMD_BUILD, CMD_CHECK, CMD_DEV, CMD_FORMAT, CMD_RUN, CMD_TEST};
use roc_reporting::report::strip_colors;
use roc_test_utils::assert_multiline_str_eq;
use serial_test::serial;
use std::iter;
@ -198,7 +200,7 @@ mod cli_run {
vec.into_iter()
};
let out = match cli_mode {
let cmd_output = match cli_mode {
CliMode::RocBuild => {
run_roc_on_failure_is_panic(
file,
@ -294,39 +296,47 @@ mod cli_run {
}
};
let mut actual = strip_colors(&out.stdout);
// e.g. "1 failed and 0 passed in 123 ms."
if let Some(split) = actual.rfind("passed in ") {
let (before_first_digit, _) = actual.split_at(split);
actual = format!("{before_first_digit}passed in <ignored for test> ms.");
}
let self_path = file.display().to_string();
actual = actual.replace(&self_path, "<ignored for tests>");
if !actual.ends_with(expected_ending) {
let actual_cmd_stdout = ignore_test_timings(&strip_colors(&cmd_output.stdout))
.replace(&self_path, "<ignored for tests>");
if !actual_cmd_stdout.ends_with(expected_ending) {
panic!(
"> expected output to end with:\n{}\n> but instead got:\n{}\n> stderr was:\n{}",
expected_ending, actual, out.stderr
expected_ending, actual_cmd_stdout, cmd_output.stderr
);
}
if !out.status.success() && !matches!(cli_mode, CliMode::RocTest) {
if !cmd_output.status.success() && !matches!(cli_mode, CliMode::RocTest) {
// We don't need stdout, Cargo prints it for us.
panic!(
"Example program exited with status {:?}\nstderr was:\n{:#?}",
out.status, out.stderr
cmd_output.status, cmd_output.stderr
);
}
}
}
fn ignore_test_timings(cmd_output: &str) -> String {
let regex = Regex::new(r" in (\d+) ms\.").expect("Invalid regex pattern");
let replacement = " in <ignored for test> ms.";
regex.replace_all(cmd_output, replacement).to_string()
}
// when you want to run `roc test` to execute `expect`s, perhaps on a library rather than an application.
fn test_roc_expect(dir_name: &str, roc_filename: &str) {
fn test_roc_expect(dir_name: &str, roc_filename: &str, flags: &[&str], expected_ending: &str) {
let path = file_path_from_root(dir_name, roc_filename);
let out = run_roc([CMD_TEST, path.to_str().unwrap()], &[], &[]);
assert!(out.status.success());
check_output_with_stdin(
&path,
&[],
flags,
&[],
&[],
expected_ending,
UseValgrind::Yes,
TestCliCommands::Test,
);
}
// when you don't need args, stdin or extra_env
@ -545,16 +555,18 @@ mod cli_run {
// on the building of the platform
test_roc_app(
"crates/cli_testing_examples/expects",
"crates/cli/tests/expects",
"expects.roc",
&[],
&[],
&[],
indoc!(
r#"
EXPECT FAILED in tests/expects/expects.roc
This expectation failed:
19 expect words == []
28 expect words == []
^^^^^^^^^^^
When it failed, these variables had these values:
@ -562,12 +574,12 @@ mod cli_run {
words : List Str
words = ["this", "will", "for", "sure", "be", "a", "large", "string", "so", "when", "we", "split", "it", "it", "will", "use", "seamless", "slices", "which", "affect", "printing"]
[<ignored for tests>:22] x = 42
[<ignored for tests>:23] "Fjoer en ferdjer frieten oan dyn geve lea" = "Fjoer en ferdjer frieten oan dyn geve lea"
[<ignored for tests>:24] "this is line 24" = "this is line 24"
[<ignored for tests>:13] x = "abc"
[<ignored for tests>:13] x = 10
[<ignored for tests>:13] x = (A (B C))
[<ignored for tests>:31] x = 42
[<ignored for tests>:33] "Fjoer en ferdjer frieten oan dyn geve lea" = "Fjoer en ferdjer frieten oan dyn geve lea"
[<ignored for tests>:35] "this is line 24" = "this is line 24"
[<ignored for tests>:21] x = "abc"
[<ignored for tests>:21] x = 10
[<ignored for tests>:21] x = (A (B C))
Program finished!
"#
),
@ -576,38 +588,100 @@ mod cli_run {
);
test_roc_app(
"crates/cli_testing_examples/expects",
"crates/cli/tests/expects",
"expects.roc",
&[],
&[],
&[],
indoc!(
r#"
EXPECT FAILED in tests/expects/expects.roc
This expectation failed:
6> expect
7> a = 1
8> b = 2
9>
10> a == b
9 expect a == 2
^^^^^^
When it failed, these variables had these values:
a : Num *
a = 1
b : Num *
EXPECT FAILED in tests/expects/expects.roc
This expectation failed:
10 expect a == 3
^^^^^^
When it failed, these variables had these values:
a : Num *
a = 1
EXPECT FAILED in tests/expects/expects.roc
This expectation failed:
14> expect
15> a = makeA
16> b = 2i64
17>
18> a == b
When it failed, these variables had these values:
a : Int Signed64
a = 1
b : I64
b = 2
1 failed and 0 passed in <ignored for test> ms."#
1 failed and 0 passed in <ignored for test> ms.
"#
),
UseValgrind::Yes,
TestCliCommands::Test,
);
}
#[test]
#[cfg_attr(windows, ignore)]
fn transitive_expects() {
test_roc_expect(
"crates/cli/tests/expects_transitive",
"main.roc",
&[],
indoc!(
r#"
0 failed and 3 passed in <ignored for test> ms.
"#
),
);
}
#[test]
#[cfg_attr(windows, ignore)]
fn transitive_expects_verbose() {
test_roc_expect(
"crates/cli/tests/expects_transitive",
"main.roc",
&["--verbose"],
indoc!(
r#"
Compiled in <ignored for test> ms.
Direct.roc:
0 failed and 2 passed in <ignored for test> ms.
Transitive.roc:
0 failed and 1 passed in <ignored for test> ms.
"#
),
);
}
#[test]
#[cfg_attr(
windows,
@ -629,7 +703,7 @@ mod cli_run {
)]
fn fibonacci() {
test_roc_app_slim(
"crates/cli_testing_examples/algorithms",
"crates/cli/tests/algorithms",
"fibonacci.roc",
"",
UseValgrind::Yes,
@ -668,7 +742,7 @@ mod cli_run {
#[cfg_attr(windows, ignore)]
fn quicksort() {
test_roc_app_slim(
"crates/cli_testing_examples/algorithms",
"crates/cli/tests/algorithms",
"quicksort.roc",
"[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2]\n",
UseValgrind::Yes,
@ -871,10 +945,10 @@ mod cli_run {
&[],
indoc!(
r#"
This roc file can print it's own source code. The source is:
This roc file can print its own source code. The source is:
app "ingested-file"
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.7.1/Icc3xJoIixF3hCcfXrDwLCu4wQHtNdPyoJkEbkgIElA.tar.br" }
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.9.0/oKWkaruh2zXxin_xfsYsCJobH1tO8_JvNkFzDwwzNUQ.tar.br" }
imports [
pf.Stdout,
"ingested-file.roc" as ownCode : Str,
@ -882,7 +956,7 @@ mod cli_run {
provides [main] to pf
main =
Stdout.line "\nThis roc file can print it's own source code. The source is:\n\n\(ownCode)"
Stdout.line "\nThis roc file can print its own source code. The source is:\n\n$(ownCode)"
"#
),
@ -912,9 +986,9 @@ mod cli_run {
#[cfg_attr(windows, ignore)]
fn parse_movies_csv() {
test_roc_app_slim(
"examples/parser/examples",
"examples/parser",
"parse-movies-csv.roc",
"Parse success!\n",
"2 movies were found:\n\nThe movie 'Airplane!' was released in 1980 and stars Robert Hays and Julie Hagerty\nThe movie 'Caddyshack' was released in 1980 and stars Chevy Chase, Rodney Dangerfield, Ted Knight, Michael O'Keefe and Bill Murray\n\nParse success!\n\n",
UseValgrind::No,
)
}
@ -924,19 +998,13 @@ mod cli_run {
#[cfg_attr(windows, ignore)]
fn parse_letter_counts() {
test_roc_app_slim(
"examples/parser/examples",
"examples/parser",
"letter-counts.roc",
"I counted 7 letter A's!\n",
UseValgrind::No,
)
}
#[test]
#[cfg_attr(windows, ignore)]
fn parse_http() {
test_roc_expect("examples/parser/package", "ParserHttp.roc")
}
#[test]
#[cfg_attr(windows, ignore)]
fn inspect_logging() {
@ -1333,7 +1401,7 @@ mod cli_run {
&[],
indoc!(
r#"
TYPE MISMATCH tests/known_bad/TypeError.roc
TYPE MISMATCH in tests/known_bad/TypeError.roc
Something is off with the body of the main definition:
@ -1362,6 +1430,29 @@ mod cli_run {
);
}
#[test]
fn known_type_error_with_long_path() {
check_compile_error(
&known_bad_file("UnusedImportButWithALongFileNameForTesting.roc"),
&[],
indoc!(
r#"
UNUSED IMPORT in ...nown_bad/UnusedImportButWithALongFileNameForTesting.roc
Nothing from Symbol is used in this module.
3 imports [Symbol.{ Ident }]
^^^^^^^^^^^^^^^^
Since Symbol isn't used, you don't need to import it.
0 errors and 1 warning found in <ignored for test> ms."#
),
);
}
#[test]
fn exposed_not_defined() {
check_compile_error(
@ -1369,7 +1460,7 @@ mod cli_run {
&[],
indoc!(
r#"
MISSING DEFINITION tests/known_bad/ExposedNotDefined.roc
MISSING DEFINITION in tests/known_bad/ExposedNotDefined.roc
bar is listed as exposed, but it isn't defined in this module.
@ -1390,7 +1481,7 @@ mod cli_run {
&[],
indoc!(
r#"
UNUSED IMPORT tests/known_bad/UnusedImport.roc
UNUSED IMPORT in tests/known_bad/UnusedImport.roc
Nothing from Symbol is used in this module.
@ -1413,7 +1504,7 @@ mod cli_run {
&[],
indoc!(
r#"
UNKNOWN GENERATES FUNCTION tests/known_bad/UnknownGeneratesWith.roc
UNKNOWN GENERATES FUNCTION in tests/known_bad/UnknownGeneratesWith.roc
I don't know how to generate the foobar function.

View file

@ -3,14 +3,23 @@ app "expects-test"
imports []
provides [main] to pf
expect
makeA =
a = 1
b = 2
expect a == 2
expect a == 3
a
expect
a = makeA
b = 2i64
a == b
polyDbg = \x ->
dbg x
x
main =
@ -20,10 +29,12 @@ main =
x = 42
dbg x
dbg "Fjoer en ferdjer frieten oan dyn geve lea"
dbg "this is line 24"
r = {x : polyDbg "abc", y: polyDbg 10u8, z : polyDbg (A (B C))}
r = { x: polyDbg "abc", y: polyDbg 10u8, z: polyDbg (A (B C)) }
when r is
_ -> "Program finished!\n"

View file

@ -0,0 +1,14 @@
interface Direct
exposes [
addAndStringify,
]
imports [
Transitive,
]
addAndStringify = \num1, num2 ->
Num.toStr (Transitive.add num1 num2)
expect addAndStringify 1 2 == "3"
expect addAndStringify 3 4 == "7"

View file

@ -0,0 +1,9 @@
interface Transitive
exposes [
add,
]
imports []
add = \num1, num2 -> (num1 + num2)
expect add 1 2 == 3

View file

@ -0,0 +1,5 @@
package "transitive-tests"
exposes [
Direct,
]
packages {}

View file

@ -3,4 +3,4 @@ app "packages-test"
imports [json.JsonParser, csv.Csv]
provides [main] to pf
main = "Hello, World! \(JsonParser.example) \(Csv.example)"
main = "Hello, World! $(JsonParser.example) $(Csv.example)"

View file

@ -0,0 +1,7 @@
interface UnusedImportButWithALongFileNameForTesting
exposes [plainText, emText]
imports [Symbol.{ Ident }]
plainText = \str -> PlainText str
emText = \str -> EmText str

File diff suppressed because one or more lines are too long

View file

@ -98,22 +98,6 @@ pub fn path_to_binary(binary_name: &str) -> PathBuf {
path
}
pub fn strip_colors(str: &str) -> String {
use roc_reporting::report::ANSI_STYLE_CODES;
str.replace(ANSI_STYLE_CODES.red, "")
.replace(ANSI_STYLE_CODES.green, "")
.replace(ANSI_STYLE_CODES.yellow, "")
.replace(ANSI_STYLE_CODES.blue, "")
.replace(ANSI_STYLE_CODES.magenta, "")
.replace(ANSI_STYLE_CODES.cyan, "")
.replace(ANSI_STYLE_CODES.white, "")
.replace(ANSI_STYLE_CODES.bold, "")
.replace(ANSI_STYLE_CODES.underline, "")
.replace(ANSI_STYLE_CODES.reset, "")
.replace(ANSI_STYLE_CODES.color_reset, "")
}
pub fn run_roc_with_stdin<I, S>(args: I, stdin_vals: &[&str]) -> Out
where
I: IntoIterator<Item = S>,
@ -420,14 +404,15 @@ pub fn extract_valgrind_errors(xml: &str) -> Result<Vec<ValgrindError>, serde_xm
Ok(answer)
}
// start the dir with crates/cli_testing_examples
// start the dir with crates/cli/tests
#[allow(dead_code)]
pub fn cli_testing_dir(dir_name: &str) -> PathBuf {
let mut path = root_dir();
// Descend into examples/{dir_name}
path.push("crates");
path.push("cli_testing_examples");
path.push("cli");
path.push("tests");
path.extend(dir_name.split('/')); // Make slashes cross-target
path

View file

@ -1121,7 +1121,7 @@ fn lowlevel_spec<'a>(
// just dream up a unit value
builder.add_make_tuple(block, &[])
}
ListLen => {
ListLenUsize | ListLenU64 => {
// TODO should this touch the heap cell?
// just dream up a unit value
builder.add_make_tuple(block, &[])
@ -1173,6 +1173,16 @@ fn lowlevel_spec<'a>(
_ => unreachable!(),
}
}
ListClone => {
let list = env.symbols[&arguments[0]];
let bag = builder.add_get_tuple_field(block, list, LIST_BAG_INDEX)?;
let cell = builder.add_get_tuple_field(block, list, LIST_CELL_INDEX)?;
let _unit = builder.add_update(block, update_mode_var, cell)?;
with_new_heap_cell(builder, block, bag)
}
ListSwap => {
let list = env.symbols[&arguments[0]];
@ -1220,7 +1230,7 @@ fn lowlevel_spec<'a>(
builder.add_make_tuple(block, &[cell, bag])
}
StrFromUtf8Range => {
StrFromUtf8 => {
let list = env.symbols[&arguments[0]];
let cell = builder.add_get_tuple_field(block, list, LIST_CELL_INDEX)?;

View file

@ -12,6 +12,7 @@ roc_bitcode = { path = "../builtins/bitcode" }
roc_can = { path = "../can" }
roc_collections = { path = "../collections" }
roc_constrain = { path = "../constrain" }
roc_debug_flags = { path = "../debug_flags" }
roc_error_macros = { path = "../../error_macros" }
roc_gen_dev = { path = "../gen_dev", default-features = false }
roc_gen_llvm = { path = "../gen_llvm" }

View file

@ -1,8 +1,10 @@
use crate::target::{arch_str, target_zig_str};
use crate::target::arch_str;
use libloading::{Error, Library};
use roc_command_utils::{cargo, clang, rustup, zig};
use roc_debug_flags;
use roc_error_macros::internal_error;
use roc_mono::ir::OptLevel;
use roc_target::{Architecture, OperatingSystem, Target};
use std::collections::HashMap;
use std::ffi::OsString;
use std::fs::DirEntry;
@ -10,7 +12,6 @@ use std::io;
use std::path::{Path, PathBuf};
use std::process::{self, Child, Command};
use std::{env, fs};
use target_lexicon::{Architecture, OperatingSystem, Triple};
use wasi_libc_sys::{WASI_COMPILER_RT_PATH, WASI_LIBC_PATH};
pub use roc_linker::LinkType;
@ -27,46 +28,33 @@ pub enum LinkingStrategy {
/// input_paths can include the host as well as the app. e.g. &["host.o", "roc_app.o"]
pub fn link(
target: &Triple,
target: Target,
output_path: PathBuf,
input_paths: &[&str],
link_type: LinkType,
) -> io::Result<(Child, PathBuf)> {
match target {
Triple {
architecture: Architecture::Wasm32,
..
} => link_wasm32(target, output_path, input_paths, link_type),
Triple {
operating_system: OperatingSystem::Linux,
..
} => link_linux(target, output_path, input_paths, link_type),
Triple {
operating_system: OperatingSystem::Darwin,
..
} => link_macos(target, output_path, input_paths, link_type),
Triple {
operating_system: OperatingSystem::Windows,
..
} => link_windows(target, output_path, input_paths, link_type),
match target.arch_os() {
(Architecture::Wasm32, _) => link_wasm32(target, output_path, input_paths, link_type),
(_, OperatingSystem::Linux) => link_linux(target, output_path, input_paths, link_type),
(_, OperatingSystem::Mac) => link_macos(target, output_path, input_paths, link_type),
(_, OperatingSystem::Windows) => link_windows(output_path, input_paths, link_type),
_ => internal_error!("TODO gracefully handle unsupported target: {:?}", target),
}
}
/// Same format as the precompiled host filename, except with a file extension like ".o" or ".obj"
pub fn legacy_host_file(target: &Triple, platform_main_roc: &Path) -> Option<PathBuf> {
let os = roc_target::OperatingSystem::from(target.operating_system);
let lib_ext = os.static_library_file_ext();
pub fn legacy_host_file(target: Target, platform_main_roc: &Path) -> PathBuf {
let lib_ext = target.static_library_file_ext();
let file_name = roc_linker::preprocessed_host_filename(target)?
let file_name = roc_linker::preprocessed_host_filename(target)
.replace(roc_linker::PRECOMPILED_HOST_EXT, lib_ext);
let lib_path = platform_main_roc.with_file_name(file_name);
if lib_path.exists() {
Some(lib_path)
lib_path
} else {
let obj_ext = os.object_file_ext();
Some(lib_path.with_extension(obj_ext))
let obj_ext = target.object_file_ext();
lib_path.with_extension(obj_ext)
}
}
@ -299,7 +287,7 @@ pub fn build_zig_host_wasm32(
"c",
"-target",
"wasm32-wasi",
// "-femit-llvm-ir=/home/folkertdev/roc/roc/crates/cli_testing_examples/benchmarks/platform/host.ll",
// "-femit-llvm-ir=/home/folkertdev/roc/roc/crates/cli/tests/benchmarks/platform/host.ll",
"-fPIC",
"-fstrip",
]);
@ -315,7 +303,7 @@ pub fn build_zig_host_wasm32(
#[allow(clippy::too_many_arguments)]
pub fn build_c_host_native(
target: &Triple,
target: Target,
env_path: &str,
env_home: &str,
env_cpath: &str,
@ -334,7 +322,7 @@ pub fn build_c_host_native(
.args(sources)
.args(["-o", dest]);
if let Some(shared_lib_path) = shared_lib_path {
match target.operating_system {
match target.operating_system() {
OperatingSystem::Windows => {
// just use zig as a C compiler
@ -346,7 +334,7 @@ pub fn build_c_host_native(
env_home,
dest,
sources[0],
get_target_str(target),
"native",
opt_level,
Some(shared_lib_path),
builtins_host_path,
@ -405,7 +393,7 @@ pub fn build_swift_host_native(
.env("HOME", env_home);
match arch {
Architecture::Aarch64(_) => command.arg("-arm64"),
Architecture::Aarch64 => command.arg("-arm64"),
_ => command.arg(format!("-{arch}")),
};
@ -436,7 +424,7 @@ pub fn build_swift_host_native(
pub fn rebuild_host(
opt_level: OptLevel,
target: &Triple,
target: Target,
platform_main_roc: &Path,
shared_lib_path: Option<&Path>,
) -> PathBuf {
@ -449,14 +437,12 @@ pub fn rebuild_host(
let swift_host_src = platform_main_roc.with_file_name("host.swift");
let swift_host_header_src = platform_main_roc.with_file_name("host.h");
let os = roc_target::OperatingSystem::from(target.operating_system);
let executable_extension = match os {
roc_target::OperatingSystem::Windows => "exe",
roc_target::OperatingSystem::Unix => "",
roc_target::OperatingSystem::Wasi => "",
let executable_extension = match target.operating_system() {
OperatingSystem::Windows => "exe",
_ => "",
};
let host_dest = if matches!(target.architecture, Architecture::Wasm32) {
let host_dest = if matches!(target.architecture(), Architecture::Wasm32) {
if matches!(opt_level, OptLevel::Development) {
platform_main_roc.with_extension("o")
} else {
@ -467,7 +453,7 @@ pub fn rebuild_host(
.with_file_name("dynhost")
.with_extension(executable_extension)
} else {
legacy_host_file(target, platform_main_roc).unwrap()
legacy_host_file(target, platform_main_roc)
};
let env_path = env::var("PATH").unwrap_or_else(|_| "".to_string());
@ -479,7 +465,7 @@ pub fn rebuild_host(
if zig_host_src.exists() {
// Compile host.zig
let zig_cmd = match target.architecture {
let zig_cmd = match target.architecture() {
Architecture::Wasm32 => {
let emit_bin = if matches!(opt_level, OptLevel::Development) {
format!("-femit-bin={}", host_dest.to_str().unwrap())
@ -500,12 +486,15 @@ pub fn rebuild_host(
&env_home,
host_dest.to_str().unwrap(),
zig_host_src.to_str().unwrap(),
get_target_str(target),
// This used to be "native" but that caused segfaults that were hard to
// reproduce and investigate.
// For context: github.com/roc-lang/roc/pull/6591#issuecomment-2039808944
"x86_64-native",
opt_level,
shared_lib_path,
builtins_host_tempfile.path(),
),
Architecture::X86_32(_) => build_zig_host_native(
Architecture::X86_32 => build_zig_host_native(
&env_path,
&env_home,
host_dest.to_str().unwrap(),
@ -515,17 +504,17 @@ pub fn rebuild_host(
shared_lib_path,
builtins_host_tempfile.path(),
),
Architecture::Aarch64(_) => build_zig_host_native(
Architecture::Aarch64 => build_zig_host_native(
&env_path,
&env_home,
host_dest.to_str().unwrap(),
zig_host_src.to_str().unwrap(),
target_zig_str(target),
"native",
opt_level,
shared_lib_path,
builtins_host_tempfile.path(),
),
_ => internal_error!("Unsupported architecture {:?}", target.architecture),
_ => internal_error!("Unsupported architecture {:?}", target.architecture()),
};
run_build_command(zig_cmd, "host.zig", 0);
@ -537,7 +526,7 @@ pub fn rebuild_host(
// on windows, we need the nightly toolchain so we can use `-Z export-executable-symbols`
// using `+nightly` only works when running cargo through rustup
let mut cmd = rustup();
cmd.args(["run", "nightly-2023-05-28", "cargo"]);
cmd.args(["run", "nightly-2023-08-20", "cargo"]);
cmd
} else {
@ -613,7 +602,8 @@ pub fn rebuild_host(
// Clean up c_host.o
if c_host_dest.exists() {
std::fs::remove_file(c_host_dest).unwrap();
// there can be a race condition on this file cleanup
let _ = std::fs::remove_file(c_host_dest);
}
}
} else if rust_host_src.exists() {
@ -712,7 +702,7 @@ pub fn rebuild_host(
swift_host_header_src
.exists()
.then(|| swift_host_header_src.to_str().unwrap()),
target.architecture,
target.architecture(),
);
run_build_command(swiftc_cmd, "host.swift", 0);
@ -782,16 +772,6 @@ fn find_in_folder_or_subfolders(path: &PathBuf, folder_to_find: &str) -> Vec<Dir
matching_dirs
}
fn get_target_str(target: &Triple) -> &str {
if target.operating_system == OperatingSystem::Windows
&& target.environment == target_lexicon::Environment::Gnu
{
"x86_64-windows-gnu"
} else {
"native"
}
}
fn nix_paths() -> Vec<String> {
let mut paths = vec![];
@ -810,15 +790,15 @@ fn nix_glibc_path_opt() -> Option<OsString> {
env::var_os("NIX_GLIBC_PATH")
}
fn build_path<const N: usize>(segments: [&str; N]) -> Option<PathBuf> {
fn build_path_or_panic<const N: usize>(segments: [&str; N]) -> PathBuf {
let mut guess_path = PathBuf::new();
for s in segments {
guess_path.push(s);
}
if guess_path.exists() {
Some(guess_path)
guess_path
} else {
None
panic!("{} does not exist.", guess_path.display());
}
}
@ -848,13 +828,24 @@ fn strs_to_path(strs: &[&str]) -> PathBuf {
strs.iter().collect()
}
fn extra_link_flags() -> Vec<String> {
match env::var("ROC_LINK_FLAGS") {
Ok(flags) => {
println!("⚠️ CAUTION: The ROC_LINK_FLAGS environment variable is a temporary workaround, and will no longer do anything once surgical linking lands! If you're concerned about what this means for your use case, please ask about it on Zulip.");
flags
}
Err(_) => "".to_string(),
}.split_whitespace().map(|x| x.to_owned()).collect()
}
fn link_linux(
target: &Triple,
target: Target,
output_path: PathBuf,
input_paths: &[&str],
link_type: LinkType,
) -> io::Result<(Child, PathBuf)> {
let architecture = format!("{}-linux-gnu", target.architecture);
let architecture = format!("{}-linux-gnu", target.architecture());
// Command::new("cp")
// .args(&[input_paths[0], "/home/folkertdev/roc/wasm/host.o"])
@ -866,7 +857,7 @@ fn link_linux(
// .output()
// .unwrap();
if let Architecture::X86_32(_) = target.architecture {
if let Architecture::X86_32 = target.architecture() {
return Ok((
zig()
.args(["build-exe"])
@ -914,7 +905,7 @@ fn link_linux(
let scrt1_name = "Scrt1.o";
let scrt1_path = look_for_library(&lib_dirs, scrt1_name);
// Unwrap all the paths at once so we can inform the user of all missing libs at once
// Unwrap all the paths at once so we can inform the user of any missing libs
let (libgcc_path, crti_path, crtn_path, scrt1_path) =
match (libgcc_path, crti_path, crtn_path, scrt1_path) {
(Some(libgcc), Some(crti), Some(crtn), Some(scrt1)) => (libgcc, crti, crtn, scrt1),
@ -951,57 +942,50 @@ fn link_linux(
}
};
let ld_linux = match target.architecture {
let (libgcc_path_str, crti_path_str, crtn_path_str, scrt1_path_str) = (
libgcc_path.to_string_lossy(),
crti_path.to_string_lossy(),
crtn_path.to_string_lossy(),
scrt1_path.to_string_lossy(),
);
let ld_linux_path = match target.architecture() {
Architecture::X86_64 => {
// give preference to nix_path if it's defined, this prevents bugs
if let Some(nix_glibc_path) = nix_glibc_path_opt() {
build_path([
build_path_or_panic([
&nix_glibc_path.into_string().unwrap(),
"ld-linux-x86-64.so.2",
])
} else {
build_path(["/lib64", "ld-linux-x86-64.so.2"])
build_path_or_panic(["/lib64", "ld-linux-x86-64.so.2"])
}
}
Architecture::Aarch64(_) => build_path(["/lib", "ld-linux-aarch64.so.1"]),
Architecture::Aarch64 => build_path_or_panic(["/lib", "ld-linux-aarch64.so.1"]),
_ => internal_error!(
"TODO gracefully handle unsupported linux architecture: {:?}",
target.architecture
target.architecture()
),
};
let ld_linux = ld_linux.unwrap();
let ld_linux = ld_linux.to_str().unwrap();
let mut soname;
let ld_linux_path_str = &ld_linux_path.to_string_lossy();
let (base_args, output_path) = match link_type {
LinkType::Executable => (
// Presumably this S stands for Static, since if we include Scrt1.o
// in the linking for dynamic builds, linking fails.
vec![scrt1_path.to_string_lossy().into_owned()],
vec![scrt1_path_str.to_string()],
output_path,
),
LinkType::Dylib => {
// TODO: do we actually need the version number on this?
// Do we even need the "-soname" argument?
//
// See https://software.intel.com/content/www/us/en/develop/articles/create-a-unix-including-linux-shared-library.html
soname = output_path.clone();
soname.set_extension("so.1");
let mut output_path = output_path;
output_path.set_extension("so.1.0");
output_path.set_extension("so");
(
// TODO: find a way to avoid using a vec! here - should theoretically be
// able to do this somehow using &[] but the borrow checker isn't having it.
// Also find a way to have these be string slices instead of Strings.
vec![
"-shared".to_string(),
"-soname".to_string(),
soname.as_path().to_str().unwrap().to_string(),
],
vec!["-shared".to_string()],
output_path,
)
}
@ -1013,9 +997,9 @@ fn link_linux(
// NOTE: order of arguments to `ld` matters here!
// The `-l` flags should go after the `.o` arguments
let mut command = Command::new("ld");
let mut ld_command = Command::new("ld");
command
ld_command
// Don't allow LD_ env vars to affect this
.env_clear()
.env("PATH", &env_path)
@ -1031,12 +1015,13 @@ fn link_linux(
"-A",
arch_str(target),
"-pie",
&*crti_path.to_string_lossy(),
&*crtn_path.to_string_lossy(),
&crti_path_str,
&crtn_path_str,
])
.args(&base_args)
.args(["-dynamic-linker", ld_linux])
.args(["-dynamic-linker", ld_linux_path_str])
.args(input_paths)
.args(extra_link_flags())
// ld.lld requires this argument, and does not accept --arch
// .args(&["-L/usr/lib/x86_64-linux-gnu"])
.args([
@ -1049,19 +1034,20 @@ fn link_linux(
"-lrt",
"-lutil",
"-lc_nonshared",
libgcc_path.to_str().unwrap(),
&libgcc_path_str,
// Output
"-o",
output_path.as_path().to_str().unwrap(), // app (or app.so or app.dylib etc.)
]);
debug_print_command(&ld_command);
let output = command.spawn()?;
let ld_output = ld_command.spawn()?;
Ok((output, output_path))
Ok((ld_output, output_path))
}
fn link_macos(
target: &Triple,
target: Target,
output_path: PathBuf,
input_paths: &[&str],
link_type: LinkType,
@ -1078,9 +1064,9 @@ fn link_macos(
LinkType::None => internal_error!("link_macos should not be called with link type of none"),
};
let arch = match target.architecture {
Architecture::Aarch64(_) => "arm64".to_string(),
_ => target.architecture.to_string(),
let arch = match target.architecture() {
Architecture::Aarch64 => "arm64".to_string(),
_ => target.architecture().to_string(),
};
let mut ld_command = Command::new("ld");
@ -1108,7 +1094,8 @@ fn link_macos(
"-macos_version_min",
&get_macos_version(),
])
.args(input_paths);
.args(input_paths)
.args(extra_link_flags());
let sdk_path = "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib";
if Path::new(sdk_path).exists() {
@ -1116,18 +1103,6 @@ fn link_macos(
ld_command.arg(format!("-L{sdk_path}/swift"));
};
let roc_link_flags = match env::var("ROC_LINK_FLAGS") {
Ok(flags) => {
println!("⚠️ CAUTION: The ROC_LINK_FLAGS environment variable is a temporary workaround, and will no longer do anything once surgical linking lands! If you're concerned about what this means for your use case, please ask about it on Zulip.");
flags
}
Err(_) => "".to_string(),
};
for roc_link_flag in roc_link_flags.split_whitespace() {
ld_command.arg(roc_link_flag);
}
ld_command.args([
// Libraries - see https://github.com/roc-lang/roc/pull/554#discussion_r496392274
// for discussion and further references
@ -1162,14 +1137,18 @@ fn link_macos(
output_path.to_str().unwrap(), // app
]);
debug_print_command(&ld_command);
let mut ld_child = ld_command.spawn()?;
match target.architecture {
Architecture::Aarch64(_) => {
match target.architecture() {
Architecture::Aarch64 => {
ld_child.wait()?;
let codesign_child = Command::new("codesign")
.args(["-s", "-", output_path.to_str().unwrap()])
.spawn()?;
let mut codesign_cmd = Command::new("codesign");
codesign_cmd.args(["-s", "-", output_path.to_str().unwrap()]);
debug_print_command(&codesign_cmd);
let codesign_child = codesign_cmd.spawn()?;
Ok((codesign_child, output_path))
}
@ -1178,8 +1157,11 @@ fn link_macos(
}
fn get_macos_version() -> String {
let cmd_stdout = Command::new("sw_vers")
.arg("-productVersion")
let mut cmd = Command::new("sw_vers");
cmd.arg("-productVersion");
debug_print_command(&cmd);
let cmd_stdout = cmd
.output()
.expect("Failed to execute command 'sw_vers -productVersion'")
.stdout;
@ -1196,7 +1178,7 @@ fn get_macos_version() -> String {
}
fn link_wasm32(
_target: &Triple,
_target: Target,
output_path: PathBuf,
input_paths: &[&str],
_link_type: LinkType,
@ -1221,8 +1203,9 @@ fn link_wasm32(
"-fstrip",
"-O",
"ReleaseSmall",
"-rdynamic",
// useful for debugging
// "-femit-llvm-ir=/home/folkertdev/roc/roc/crates/cli_testing_examples/benchmarks/platform/host.ll",
// "-femit-llvm-ir=/home/folkertdev/roc/roc/crates/cli/tests/benchmarks/platform/host.ll",
])
.spawn()?;
@ -1230,7 +1213,6 @@ fn link_wasm32(
}
fn link_windows(
target: &Triple,
output_path: PathBuf,
input_paths: &[&str],
link_type: LinkType,
@ -1263,7 +1245,7 @@ fn link_windows(
.args(input_paths)
.args([
"-target",
get_target_str(target),
"native",
"--subsystem",
"console",
"-lc",
@ -1279,7 +1261,7 @@ fn link_windows(
pub fn llvm_module_to_dylib(
module: &inkwell::module::Module,
target: &Triple,
target: Target,
opt_level: OptLevel,
) -> Result<Library, Error> {
use crate::target::{self, convert_opt_level};
@ -1303,7 +1285,7 @@ pub fn llvm_module_to_dylib(
// Link app.o into a dylib - e.g. app.so or app.dylib
let (mut child, dylib_path) = link(
&Triple::host(),
target,
app_o_file.clone(),
&[app_o_file.to_str().unwrap()],
LinkType::Dylib,
@ -1320,7 +1302,7 @@ pub fn llvm_module_to_dylib(
// Load the dylib
let path = dylib_path.as_path().to_str().unwrap();
if matches!(target.architecture, Architecture::Aarch64(_)) {
if matches!(target.architecture(), Architecture::Aarch64) {
// On AArch64 darwin machines, calling `ldopen` on Roc-generated libs from multiple threads
// sometimes fails with
// cannot dlopen until fork() handlers have completed
@ -1382,15 +1364,11 @@ pub fn preprocess_host_wasm32(host_input_path: &Path, preprocessed_host_path: &P
}
fn run_build_command(mut command: Command, file_to_build: &str, flaky_fail_counter: usize) {
let mut command_string = std::ffi::OsString::new();
command_string.push(command.get_program());
for arg in command.get_args() {
command_string.push(" ");
command_string.push(arg);
}
let cmd_str = command_string.to_str().unwrap();
let command_string = stringify_command(&command, false);
let cmd_str = &command_string;
roc_debug_flags::dbg_do!(roc_debug_flags::ROC_PRINT_BUILD_COMMANDS, {
print_command_str(cmd_str);
});
let cmd_output = command.output().unwrap();
let max_flaky_fail_count = 10;
@ -1428,3 +1406,47 @@ fn run_build_command(mut command: Command, file_to_build: &str, flaky_fail_count
}
}
}
/// Stringify a command for printing
/// e.g. `HOME=~ zig build-exe foo.zig -o foo`
fn stringify_command(cmd: &Command, include_env_vars: bool) -> String {
let mut command_string = std::ffi::OsString::new();
if include_env_vars {
for (name, opt_val) in cmd.get_envs() {
command_string.push(name);
command_string.push("=");
if let Some(val) = opt_val {
command_string.push(val);
} else {
command_string.push("''");
}
command_string.push(" ");
}
}
command_string.push(cmd.get_program());
for arg in cmd.get_args() {
command_string.push(" ");
command_string.push(arg);
}
String::from(command_string.to_str().unwrap())
}
#[cfg(debug_assertions)]
fn print_command_str(s: &str) {
println!("\nRoc build command:\n{}\n", s);
}
fn debug_print_command(_cmd: &Command) {
// This debug macro is compiled out in release mode, so the argument is unused
roc_debug_flags::dbg_do!(roc_debug_flags::ROC_PRINT_BUILD_COMMANDS_WITH_ENV_VARS, {
print_command_str(&stringify_command(_cmd, true));
});
roc_debug_flags::dbg_do!(roc_debug_flags::ROC_PRINT_BUILD_COMMANDS, {
print_command_str(&stringify_command(_cmd, false));
});
}

View file

@ -17,7 +17,7 @@ use roc_reporting::{
cli::{report_problems, Problems},
report::{RenderTarget, DEFAULT_PALETTE},
};
use roc_target::{OperatingSystem, TargetInfo};
use roc_target::{Architecture, Target};
use std::ffi::OsStr;
use std::ops::Deref;
use std::{
@ -25,7 +25,6 @@ use std::{
thread::JoinHandle,
time::{Duration, Instant},
};
use target_lexicon::Triple;
#[cfg(feature = "target-wasm32")]
use roc_collections::all::MutSet;
@ -86,6 +85,7 @@ pub struct CodeGenOptions {
pub opt_level: OptLevel,
pub emit_debug_info: bool,
pub emit_llvm_ir: bool,
pub fuzz: bool,
}
type GenFromMono<'a> = (CodeObject, CodeGenTiming, ExpectMetadata<'a>);
@ -95,7 +95,7 @@ pub fn gen_from_mono_module<'a>(
arena: &'a bumpalo::Bump,
loaded: MonomorphizedModule<'a>,
roc_file_path: &Path,
target: &target_lexicon::Triple,
target: Target,
code_gen_options: CodeGenOptions,
preprocessed_host_path: &Path,
wasm_dev_stack_bytes: Option<u32>,
@ -103,6 +103,7 @@ pub fn gen_from_mono_module<'a>(
let path = roc_file_path;
let debug = code_gen_options.emit_debug_info;
let emit_llvm_ir = code_gen_options.emit_llvm_ir;
let fuzz = code_gen_options.fuzz;
let opt = code_gen_options.opt_level;
match code_gen_options.backend {
@ -131,6 +132,7 @@ pub fn gen_from_mono_module<'a>(
backend_mode,
debug,
emit_llvm_ir,
fuzz,
),
}
}
@ -143,11 +145,12 @@ fn gen_from_mono_module_llvm<'a>(
arena: &'a bumpalo::Bump,
loaded: MonomorphizedModule<'a>,
roc_file_path: &Path,
target: &target_lexicon::Triple,
target: Target,
opt_level: OptLevel,
backend_mode: LlvmBackendMode,
emit_debug_info: bool,
emit_llvm_ir: bool,
fuzz: bool,
) -> GenFromMono<'a> {
use crate::target::{self, convert_opt_level};
use inkwell::attributes::{Attribute, AttributeLoc};
@ -158,16 +161,14 @@ fn gen_from_mono_module_llvm<'a>(
let all_code_gen_start = Instant::now();
// Generate the binary
let target_info = roc_target::TargetInfo::from(target);
let context = Context::create();
let module = arena.alloc(module_from_builtins(target, &context, "app"));
// mark our zig-defined builtins as internal
let app_ll_file = {
let mut temp = PathBuf::from(roc_file_path);
temp.set_extension("ll");
let mut roc_file_path_buf = PathBuf::from(roc_file_path);
roc_file_path_buf.set_extension("ll");
temp
roc_file_path_buf
};
let kind_id = Attribute::get_named_enum_kind_id("alwaysinline");
@ -207,7 +208,7 @@ fn gen_from_mono_module_llvm<'a>(
context: &context,
interns: loaded.interns,
module,
target_info,
target,
mode: backend_mode,
exposed_to_host: loaded
@ -274,26 +275,21 @@ fn gen_from_mono_module_llvm<'a>(
);
}
if emit_llvm_ir {
eprintln!("Emitting LLVM IR to {}", &app_ll_file.display());
module.print_to_file(&app_ll_file).unwrap();
}
// Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr();
// annotate the LLVM IR output with debug info
// so errors are reported with the line number of the LLVM source
let memory_buffer = if cfg!(feature = "sanitizers") && std::env::var("ROC_SANITIZERS").is_ok() {
let gen_sanitizers = cfg!(feature = "sanitizers") && std::env::var("ROC_SANITIZERS").is_ok();
let memory_buffer = if fuzz || gen_sanitizers {
let dir = tempfile::tempdir().unwrap();
let dir = dir.into_path();
let app_ll_file = dir.join("app.ll");
let app_bc_file = dir.join("app.bc");
let app_o_file = dir.join("app.o");
let temp_app_ll_file = dir.join("app.ll");
let temp_app_processed_file = dir.join("app_processed.ll"); // app.ll with llvm passes applied
let temp_app_processed_file_str = temp_app_processed_file.to_str().unwrap().to_owned();
let temp_app_o_file = dir.join("app.o");
// write the ll code to a file, so we can modify it
module.print_to_file(&app_ll_file).unwrap();
module.print_to_file(&temp_app_ll_file).unwrap();
// Apply coverage passes.
// Note, this is specifically tailored for `cargo afl` and afl++.
@ -301,33 +297,27 @@ fn gen_from_mono_module_llvm<'a>(
let mut passes = vec![];
let mut extra_args = vec![];
let mut unrecognized = vec![];
for sanitizer in std::env::var("ROC_SANITIZERS")
.unwrap()
.split(',')
.map(|x| x.trim())
{
match sanitizer {
"address" => passes.push("asan-module"),
"memory" => passes.push("msan-module"),
"thread" => passes.push("tsan-module"),
"cargo-fuzz" => {
passes.push("sancov-module");
extra_args.extend_from_slice(&[
"-sanitizer-coverage-level=3",
"-sanitizer-coverage-prune-blocks=0",
"-sanitizer-coverage-inline-8bit-counters",
"-sanitizer-coverage-pc-table",
]);
if fuzz {
passes.push("sancov-module");
extra_args.extend_from_slice(&[
"-sanitizer-coverage-level=4",
"-sanitizer-coverage-inline-8bit-counters",
"-sanitizer-coverage-pc-table",
"-sanitizer-coverage-trace-compares",
]);
}
if gen_sanitizers {
for sanitizer in std::env::var("ROC_SANITIZERS")
.unwrap()
.split(',')
.map(|x| x.trim())
{
match sanitizer {
"address" => passes.push("asan-module"),
"memory" => passes.push("msan-module"),
"thread" => passes.push("tsan-module"),
x => unrecognized.push(x.to_owned()),
}
"afl.rs" => {
passes.push("sancov-module");
extra_args.extend_from_slice(&[
"-sanitizer-coverage-level=3",
"-sanitizer-coverage-prune-blocks=0",
"-sanitizer-coverage-trace-pc-guard",
]);
}
x => unrecognized.push(x.to_owned()),
}
}
if !unrecognized.is_empty() {
@ -341,44 +331,62 @@ fn gen_from_mono_module_llvm<'a>(
}
use std::process::Command;
let mut opt = Command::new("opt");
opt.args([
app_ll_file.to_str().unwrap(),
"-o",
app_bc_file.to_str().unwrap(),
])
.args(extra_args);
if !passes.is_empty() {
opt.arg(format!("-passes={}", passes.join(",")));
}
let opt = opt.output().unwrap();
assert!(opt.stderr.is_empty(), "{opt:#?}");
// apply passes to app.ll
let mut opt_command = Command::new("opt");
opt_command
.args([
temp_app_ll_file.to_str().unwrap(),
"-o",
&temp_app_processed_file_str,
])
.args(extra_args);
if !passes.is_empty() {
opt_command.arg(format!("-passes={}", passes.join(",")));
}
let opt_output = opt_command.output().unwrap();
assert!(opt_output.stderr.is_empty(), "{opt_output:#?}");
if emit_llvm_ir {
eprintln!("Emitting LLVM IR to {}", &app_ll_file.display());
std::fs::copy(temp_app_processed_file, app_ll_file).unwrap();
}
// write the .o file. Note that this builds the .o for the local machine,
// and ignores the `target_machine` entirely.
//
// different systems name this executable differently, so we shotgun for
// the most common ones and then give up.
let bc_to_object = Command::new("llc")
let bc_to_object_output = Command::new("llc")
.args([
"-relocation-model=pic",
"-filetype=obj",
app_bc_file.to_str().unwrap(),
&temp_app_processed_file_str,
"-o",
app_o_file.to_str().unwrap(),
temp_app_o_file.to_str().unwrap(),
])
.output()
.unwrap();
assert!(bc_to_object.status.success(), "{bc_to_object:#?}");
assert!(
bc_to_object_output.status.success(),
"{bc_to_object_output:#?}"
);
MemoryBuffer::create_from_file(&app_o_file).expect("memory buffer creation works")
MemoryBuffer::create_from_file(&temp_app_o_file).expect("memory buffer creation works")
} else {
if emit_llvm_ir {
eprintln!("Emitting LLVM IR to {}", &app_ll_file.display());
module.print_to_file(&app_ll_file).unwrap();
}
// Emit the .o file
use target_lexicon::Architecture;
match target.architecture {
Architecture::X86_64 | Architecture::X86_32(_) | Architecture::Aarch64(_) => {
match target.architecture() {
Architecture::X86_64 | Architecture::X86_32 | Architecture::Aarch64 => {
let reloc = RelocMode::PIC;
let target_machine =
target::target_machine(target, convert_opt_level(opt_level), reloc).unwrap();
@ -394,7 +402,7 @@ fn gen_from_mono_module_llvm<'a>(
}
_ => internal_error!(
"TODO gracefully handle unsupported architecture: {:?}",
target.architecture
target.architecture()
),
}
};
@ -421,21 +429,19 @@ fn gen_from_mono_module_llvm<'a>(
fn gen_from_mono_module_dev<'a>(
arena: &'a bumpalo::Bump,
loaded: MonomorphizedModule<'a>,
target: &target_lexicon::Triple,
target: Target,
preprocessed_host_path: &Path,
wasm_dev_stack_bytes: Option<u32>,
backend_mode: AssemblyBackendMode,
) -> GenFromMono<'a> {
use target_lexicon::Architecture;
match target.architecture {
match target.architecture() {
Architecture::Wasm32 => gen_from_mono_module_dev_wasm32(
arena,
loaded,
preprocessed_host_path,
wasm_dev_stack_bytes,
),
Architecture::X86_64 | Architecture::Aarch64(_) => {
Architecture::X86_64 | Architecture::Aarch64 => {
gen_from_mono_module_dev_assembly(arena, loaded, target, backend_mode)
}
_ => todo!(),
@ -446,15 +452,13 @@ fn gen_from_mono_module_dev<'a>(
pub fn gen_from_mono_module_dev<'a>(
arena: &'a bumpalo::Bump,
loaded: MonomorphizedModule<'a>,
target: &target_lexicon::Triple,
target: Target,
_host_input_path: &Path,
_wasm_dev_stack_bytes: Option<u32>,
backend_mode: AssemblyBackendMode,
) -> GenFromMono<'a> {
use target_lexicon::Architecture;
match target.architecture {
Architecture::X86_64 | Architecture::Aarch64(_) => {
match target.architecture() {
Architecture::X86_64 | Architecture::Aarch64 => {
gen_from_mono_module_dev_assembly(arena, loaded, target, backend_mode)
}
_ => todo!(),
@ -538,7 +542,7 @@ fn gen_from_mono_module_dev_wasm32<'a>(
fn gen_from_mono_module_dev_assembly<'a>(
arena: &'a bumpalo::Bump,
loaded: MonomorphizedModule<'a>,
target: &target_lexicon::Triple,
target: Target,
backend_mode: AssemblyBackendMode,
) -> GenFromMono<'a> {
let all_code_gen_start = Instant::now();
@ -650,7 +654,7 @@ pub fn handle_error_module(
let problems = report_problems_typechecked(&mut module);
problems.print_to_stdout(total_time);
problems.print_error_warning_count(total_time);
if print_run_anyway_hint {
// If you're running "main.roc" then you can just do `roc run`
@ -676,19 +680,17 @@ pub fn handle_loading_problem(problem: LoadingProblem) -> std::io::Result<i32> {
_ => {
// TODO: tighten up the types here, we should always end up with a
// formatted report from load.
print!("Failed with error: {problem:?}");
println!("Failed with error: {problem:?}");
Ok(1)
}
}
}
pub fn standard_load_config(
target: &Triple,
target: Target,
order: BuildOrdering,
threading: Threading,
) -> LoadConfig {
let target_info = TargetInfo::from(target);
let exec_mode = match order {
BuildOrdering::BuildIfChecks => ExecutionMode::ExecutableIfCheck,
BuildOrdering::AlwaysBuild => ExecutionMode::Executable,
@ -706,7 +708,7 @@ pub fn standard_load_config(
};
LoadConfig {
target_info,
target,
function_kind,
render: RenderTarget::ColorTerminal,
palette: DEFAULT_PALETTE,
@ -718,7 +720,7 @@ pub fn standard_load_config(
#[allow(clippy::too_many_arguments)]
pub fn build_file<'a>(
arena: &'a Bump,
target: &Triple,
target: Target,
app_module_path: PathBuf,
code_gen_options: CodeGenOptions,
emit_timings: bool,
@ -756,7 +758,7 @@ pub fn build_file<'a>(
#[allow(clippy::too_many_arguments)]
fn build_loaded_file<'a>(
arena: &'a Bump,
target: &Triple,
target: Target,
app_module_path: PathBuf,
code_gen_options: CodeGenOptions,
emit_timings: bool,
@ -768,8 +770,6 @@ fn build_loaded_file<'a>(
compilation_start: Instant,
out_path: Option<&Path>,
) -> Result<BuiltFile<'a>, BuildFileError<'a>> {
let operating_system = roc_target::OperatingSystem::from(target.operating_system);
let platform_main_roc = match &loaded.entry_point {
EntryPoint::Executable { platform_path, .. } => platform_path.to_path_buf(),
_ => unreachable!(),
@ -781,9 +781,9 @@ fn build_loaded_file<'a>(
if is_platform_prebuilt && linking_strategy == LinkingStrategy::Surgical {
// Fallback to legacy linking if the preprocessed host file does not exist, but a legacy host does exist.
let preprocessed_host_path = platform_main_roc
.with_file_name(roc_linker::preprocessed_host_filename(target).unwrap());
let legacy_host_path = legacy_host_file(target, &platform_main_roc).unwrap();
let preprocessed_host_path =
platform_main_roc.with_file_name(roc_linker::preprocessed_host_filename(target));
let legacy_host_path = legacy_host_file(target, &platform_main_roc);
if !preprocessed_host_path.exists() && legacy_host_path.exists() {
linking_strategy = LinkingStrategy::Legacy;
}
@ -791,15 +791,15 @@ fn build_loaded_file<'a>(
// the preprocessed host is stored beside the platform's main.roc
let preprocessed_host_path = if linking_strategy == LinkingStrategy::Legacy {
if let roc_target::OperatingSystem::Wasi = operating_system {
if target == Target::Wasm32 {
// when compiling a wasm application, we implicitly assume here that the host is in zig
// and has a file called "host.zig"
platform_main_roc.with_file_name("host.zig")
} else {
legacy_host_file(target, &platform_main_roc).unwrap()
legacy_host_file(target, &platform_main_roc)
}
} else {
platform_main_roc.with_file_name(roc_linker::preprocessed_host_filename(target).unwrap())
platform_main_roc.with_file_name(roc_linker::preprocessed_host_filename(target))
};
let output_exe_path = match out_path {
@ -830,22 +830,12 @@ fn build_loaded_file<'a>(
if ends_with_sep {
let filename = app_module_path.file_name().unwrap_or_default();
with_output_extension(
&path.join(filename),
operating_system,
linking_strategy,
link_type,
)
with_output_extension(&path.join(filename), target, linking_strategy, link_type)
} else {
path.to_path_buf()
}
}
None => with_output_extension(
&app_module_path,
operating_system,
linking_strategy,
link_type,
),
None => with_output_extension(&app_module_path, target, linking_strategy, link_type),
};
// We don't need to spawn a rebuild thread when using a prebuilt host.
@ -1007,13 +997,13 @@ fn build_loaded_file<'a>(
std::fs::write(&output_exe_path, &*roc_app_bytes).unwrap();
}
(LinkingStrategy::Legacy, _) => {
let extension = if matches!(operating_system, roc_target::OperatingSystem::Wasi) {
let extension = if target == Target::Wasm32 {
// Legacy linker is only by used llvm wasm backend, not dev.
// llvm wasm backend directly emits a bitcode file when targeting wasi, not a `.o` or `.wasm` file.
// If we set the extension wrong, zig will print a ton of warnings when linking.
"bc"
} else {
operating_system.object_file_ext()
target.object_file_ext()
};
let app_o_file = tempfile::Builder::new()
.prefix("roc_app")
@ -1113,10 +1103,9 @@ fn spawn_rebuild_thread(
platform_main_roc: PathBuf,
preprocessed_host_path: PathBuf,
output_exe_path: PathBuf,
target: &Triple,
target: Target,
dll_stub_symbols: Vec<String>,
) -> std::thread::JoinHandle<u128> {
let thread_local_target = target.clone();
std::thread::spawn(move || {
// Printing to stderr because we want stdout to contain only the output of the roc program.
// We are aware of the trade-offs.
@ -1127,19 +1116,14 @@ fn spawn_rebuild_thread(
match linking_strategy {
LinkingStrategy::Additive => {
let host_dest = rebuild_host(
opt_level,
&thread_local_target,
platform_main_roc.as_path(),
None,
);
let host_dest = rebuild_host(opt_level, target, platform_main_roc.as_path(), None);
preprocess_host_wasm32(host_dest.as_path(), &preprocessed_host_path);
}
LinkingStrategy::Surgical => {
build_and_preprocess_host_lowlevel(
opt_level,
&thread_local_target,
target,
platform_main_roc.as_path(),
preprocessed_host_path.as_path(),
&dll_stub_symbols,
@ -1150,12 +1134,7 @@ fn spawn_rebuild_thread(
std::fs::copy(&preprocessed_host_path, output_exe_path.as_path()).unwrap();
}
LinkingStrategy::Legacy => {
rebuild_host(
opt_level,
&thread_local_target,
platform_main_roc.as_path(),
None,
);
rebuild_host(opt_level, target, platform_main_roc.as_path(), None);
}
}
@ -1165,7 +1144,7 @@ fn spawn_rebuild_thread(
pub fn build_and_preprocess_host(
opt_level: OptLevel,
target: &Triple,
target: Target,
platform_main_roc: &Path,
preprocessed_host_path: &Path,
exposed_symbols: roc_linker::ExposedSymbols,
@ -1183,7 +1162,7 @@ pub fn build_and_preprocess_host(
fn build_and_preprocess_host_lowlevel(
opt_level: OptLevel,
target: &Triple,
target: Target,
platform_main_roc: &Path,
preprocessed_host_path: &Path,
stub_dll_symbols: &[String],
@ -1216,12 +1195,12 @@ pub fn check_file<'a>(
// only used for generating errors. We don't do code generation, so hardcoding should be fine
// we need monomorphization for when exhaustiveness checking
let target_info = TargetInfo::default_x86_64();
let target = Target::LinuxX64;
// Step 1: compile the app and generate the .o file
let load_config = LoadConfig {
target_info,
target,
// TODO: we may not want this for just checking.
function_kind: FunctionKind::LambdaSet,
// TODO: expose this from CLI?
@ -1284,13 +1263,14 @@ pub fn build_str_test<'a>(
app_module_source: &'a str,
assume_prebuild: bool,
) -> Result<BuiltFile<'a>, BuildFileError<'a>> {
let triple = target_lexicon::Triple::host();
let target = target_lexicon::Triple::host().into();
let code_gen_options = CodeGenOptions {
backend: CodeGenBackend::Llvm(LlvmBackendMode::Binary),
opt_level: OptLevel::Normal,
emit_debug_info: false,
emit_llvm_ir: false,
fuzz: false,
};
let emit_timings = false;
@ -1302,7 +1282,7 @@ pub fn build_str_test<'a>(
let build_ordering = BuildOrdering::AlwaysBuild;
let threading = Threading::AtMost(2);
let load_config = standard_load_config(&triple, build_ordering, threading);
let load_config = standard_load_config(target, build_ordering, threading);
let compilation_start = std::time::Instant::now();
@ -1319,7 +1299,7 @@ pub fn build_str_test<'a>(
build_loaded_file(
arena,
&triple,
target,
app_module_path.to_path_buf(),
code_gen_options,
emit_timings,
@ -1335,15 +1315,15 @@ pub fn build_str_test<'a>(
fn with_output_extension(
path: &Path,
os: OperatingSystem,
target: Target,
linking_strategy: LinkingStrategy,
link_type: LinkType,
) -> PathBuf {
match (linking_strategy, link_type) {
(LinkingStrategy::Additive, _) | (LinkingStrategy::Legacy, LinkType::None) => {
// Additive linking and no linking both output the object file type.
path.with_extension(os.object_file_ext())
path.with_extension(target.object_file_ext())
}
_ => path.with_extension(os.executable_file_ext().unwrap_or_default()),
_ => path.with_extension(target.executable_file_ext().unwrap_or_default()),
}
}

View file

@ -1,121 +1,65 @@
use inkwell::{
targets::{CodeModel, InitializationConfig, RelocMode, Target, TargetMachine, TargetTriple},
targets::{
CodeModel, InitializationConfig, RelocMode, Target as LlvmTarget, TargetMachine,
TargetTriple,
},
OptimizationLevel,
};
use roc_error_macros::internal_error;
use roc_mono::ir::OptLevel;
use target_lexicon::{Architecture, Environment, OperatingSystem, Triple};
use roc_target::{Architecture, Target};
pub fn target_triple_str(target: &Triple) -> &'static str {
pub fn target_triple_str(target: Target) -> &'static str {
// Best guide I've found on how to determine these magic strings:
//
// https://stackoverflow.com/questions/15036909/clang-how-to-list-supported-target-architectures
match target {
Triple {
architecture: Architecture::X86_64,
operating_system: OperatingSystem::Linux,
..
} => "x86_64-unknown-linux-gnu",
Triple {
architecture: Architecture::X86_32(target_lexicon::X86_32Architecture::I386),
operating_system: OperatingSystem::Linux,
..
} => "i386-unknown-linux-gnu",
Triple {
architecture: Architecture::Wasm32,
..
} => "wasm32-unknown-unknown",
Triple {
architecture: Architecture::Aarch64(_),
operating_system: OperatingSystem::Linux,
..
} => "aarch64-unknown-linux-gnu",
Triple {
architecture: Architecture::Aarch64(_),
operating_system: OperatingSystem::Darwin,
..
} => "aarch64-apple-darwin",
Triple {
architecture: Architecture::X86_64,
operating_system: OperatingSystem::Darwin,
..
} => "x86_64-unknown-darwin10",
Triple {
architecture: Architecture::X86_64,
operating_system: OperatingSystem::Windows,
..
} => "x86_64-pc-windows-gnu",
Target::LinuxArm64 => "aarch64-unknown-linux-gnu",
Target::LinuxX32 => "i386-unknown-linux-gnu",
Target::LinuxX64 => "x86_64-unknown-linux-gnu",
Target::MacArm64 => "aarch64-apple-darwin",
Target::MacX64 => "x86_64-unknown-darwin10",
Target::Wasm32 => "wasm32-unknown-unknown",
Target::WinX64 => "x86_64-pc-windows-gnu",
_ => internal_error!("TODO gracefully handle unsupported target: {:?}", target),
}
}
pub fn target_zig_str(target: &Triple) -> &'static str {
pub fn target_zig_str(target: Target) -> &'static str {
// Zig has its own architecture mappings, defined here:
// https://github.com/ziglang/zig/blob/master/tools/process_headers.zig
//
// and an open proposal to unify them with the more typical "target triples":
// https://github.com/ziglang/zig/issues/4911
match target {
Triple {
architecture: Architecture::X86_64,
operating_system: OperatingSystem::Linux,
environment: Environment::Musl,
..
} => "x86_64-linux-musl",
Triple {
architecture: Architecture::X86_64,
operating_system: OperatingSystem::Linux,
..
} => "x86_64-linux-gnu",
Triple {
architecture: Architecture::X86_32(target_lexicon::X86_32Architecture::I386),
operating_system: OperatingSystem::Linux,
environment: Environment::Musl,
..
} => "i386-linux-musl",
Triple {
architecture: Architecture::X86_32(target_lexicon::X86_32Architecture::I386),
operating_system: OperatingSystem::Linux,
..
} => "i386-linux-gnu",
Triple {
architecture: Architecture::Aarch64(_),
operating_system: OperatingSystem::Linux,
..
} => "aarch64-linux-gnu",
Triple {
architecture: Architecture::X86_64,
operating_system: OperatingSystem::Darwin,
..
} => "x86_64-macos-none",
Triple {
architecture: Architecture::Aarch64(_),
operating_system: OperatingSystem::Darwin,
..
} => "aarch64-macos-none",
Target::LinuxArm64 => "aarch64-linux-gnu",
Target::LinuxX32 => "i386-linux-gnu",
Target::LinuxX64 => "x86_64-linux-gnu",
Target::MacArm64 => "aarch64-macos-none",
Target::MacX64 => "x86_64-macos-none",
_ => internal_error!("TODO gracefully handle unsupported target: {:?}", target),
}
}
pub fn init_arch(target: &Triple) {
match target.architecture {
Architecture::X86_64 | Architecture::X86_32(_)
pub fn init_arch(target: Target) {
match target.architecture() {
Architecture::X86_64 | Architecture::X86_32
if cfg!(any(feature = "target-x86", feature = "target-x86_64")) =>
{
Target::initialize_x86(&InitializationConfig::default());
LlvmTarget::initialize_x86(&InitializationConfig::default());
}
Architecture::Aarch64(_) if cfg!(feature = "target-aarch64") => {
Target::initialize_aarch64(&InitializationConfig::default());
Architecture::Aarch64 if cfg!(feature = "target-aarch64") => {
LlvmTarget::initialize_aarch64(&InitializationConfig::default());
}
Architecture::Arm(_) if cfg!(feature = "target-arm") => {
Target::initialize_arm(&InitializationConfig::default());
Architecture::Aarch32 if cfg!(feature = "target-arm") => {
LlvmTarget::initialize_arm(&InitializationConfig::default());
}
Architecture::Wasm32 if cfg!(feature = "target-wasm32") => {
Target::initialize_webassembly(&InitializationConfig::default());
LlvmTarget::initialize_webassembly(&InitializationConfig::default());
}
_ => internal_error!(
"TODO gracefully handle unsupported target architecture: {:?}",
target.architecture
target.architecture()
),
}
}
@ -123,25 +67,25 @@ pub fn init_arch(target: &Triple) {
/// NOTE: arch_str is *not* the same as the beginning of the magic target triple
/// string! For example, if it's "x86-64" here, the magic target triple string
/// will begin with "x86_64" (with an underscore) instead.
pub fn arch_str(target: &Triple) -> &'static str {
pub fn arch_str(target: Target) -> &'static str {
// Best guide I've found on how to determine these magic strings:
//
// https://stackoverflow.com/questions/15036909/clang-how-to-list-supported-target-architectures
match target.architecture {
Architecture::X86_64 if cfg!(feature = "target-x86_64") => "x86-64",
Architecture::X86_32(_) if cfg!(feature = "target-x86") => "x86",
Architecture::Aarch64(_) if cfg!(feature = "target-aarch64") => "aarch64",
Architecture::Arm(_) if cfg!(feature = "target-arm") => "arm",
Architecture::Wasm32 if cfg!(feature = "target-webassembly") => "wasm32",
match target.architecture() {
roc_target::Architecture::X86_64 if cfg!(feature = "target-x86_64") => "x86-64",
roc_target::Architecture::X86_32 if cfg!(feature = "target-x86") => "x86",
roc_target::Architecture::Aarch64 if cfg!(feature = "target-aarch64") => "aarch64",
roc_target::Architecture::Aarch32 if cfg!(feature = "target-arm") => "arm",
roc_target::Architecture::Wasm32 if cfg!(feature = "target-webassembly") => "wasm32",
_ => internal_error!(
"TODO gracefully handle unsupported target architecture: {:?}",
target.architecture
target.architecture()
),
}
}
pub fn target_machine(
target: &Triple,
target: Target,
opt: OptimizationLevel,
reloc: RelocMode,
) -> Option<TargetMachine> {
@ -150,11 +94,7 @@ pub fn target_machine(
init_arch(target);
let code_model = match target {
Triple {
operating_system: OperatingSystem::Darwin,
architecture: Architecture::Aarch64(_),
..
} => {
Target::MacArm64 => {
// We used to have a problem that LLVM 12 would not compile our programs without a large code model.
// The reason was not totally clear to us, but one guess is a few special-cases in
// llvm/lib/Target/AArch64/AArch64ISelLowering.cpp (instructions)
@ -168,7 +108,7 @@ pub fn target_machine(
_ => CodeModel::Default,
};
Target::from_name(arch).unwrap().create_target_machine(
LlvmTarget::from_name(arch).unwrap().create_target_machine(
&TargetTriple::create(target_triple_str(target)),
"generic",
"",

View file

@ -13,6 +13,3 @@ roc_module = { path = "../module" }
roc_region = { path = "../region" }
roc_target = { path = "../roc_target" }
roc_error_macros = { path = "../../error_macros" }
tempfile.workspace = true

View file

@ -6,7 +6,7 @@ Builtins are the functions and modules that are implicitly imported into every m
Edit the appropriate `roc/*.roc` file with your new implementation. All normal rules for writing Roc code apply. Be sure to add a declaration, definition, some documentation and add it to the exposes list it in the module head.
Next, look towards the bottom of the `compiler/module/src/symbol.rs` file. Inside the `define_builtins!` macro, there is a list for each of the builtin modules and the function or value names it contains. Add a new entry to the appropriate list for your new function.
Next, look towards the bottom of the `compiler/module/src/symbol.rs` file. Inside the `define_builtins!` macro, there is a list for each of the builtin modules and the function or value names it contains. Add a new entry to the appropriate list for your new function.
For each of the builtin modules, there is a file in `compiler/test_gen/src/` like `gen_num.rs`, `gen_str.rs` etc. Add new tests for the module you are changing to the appropriate file here. You can look at the existing test cases for examples and inspiration.
@ -22,14 +22,14 @@ Some of these have `#` inside their name (`first#list`, `#lt` ..). This is a tri
But we can use these values and some of these are necessary for implementing builtins. For example, `List.get` returns tags, and it is not easy for us to create tags when composing LLVM. What is easier however, is:
- ..writing `List.#getUnsafe` that has the dangerous signature of `List elem, Nat -> elem` in LLVM
- ..writing `List elem, Nat -> Result elem [OutOfBounds]*` in a type safe way that uses `getUnsafe` internally, only after it checks if the `elem` at `Nat` index exists.
- ..writing `List.#getUnsafe` that has the dangerous signature of `List elem, U64 -> elem` in LLVM
- ..writing `List elem, U64 -> Result elem [OutOfBounds]*` in a type safe way that uses `getUnsafe` internally, only after it checks if the `elem` at `U64` index exists.
### can/src/builtins.rs
Right at the top of this module is a function called `builtin_defs`. All this is doing is mapping the `Symbol` defined in `module/src/symbol.rs` to its implementation. Some of the builtins are quite complex, such as `list_get`. What makes `list_get` is that it returns tags, and in order to return tags it first has to defer to lower-level functions via an if statement.
Right at the top of this module is a function called `builtin_defs`. All this is doing is mapping the `Symbol` defined in `module/src/symbol.rs` to its implementation. Some of the builtins are quite complex, such as `list_get`. What makes `list_get` is that it returns tags, and in order to return tags it first has to defer to lower-level functions via an if statement.
Lets look at `List.repeat : elem, Nat -> List elem`, which is more straight-forward, and points directly to its lower level implementation:
Lets look at `List.repeat : elem, U64 -> List elem`, which is more straightforward, and points directly to its lower level implementation:
```rust
fn list_repeat(symbol: Symbol, var_store: &mut VarStore) -> Def {
@ -106,7 +106,7 @@ fn atan() {
But replace `Num.atan` and the type signature with the new builtin.
### test_gen/test/*.rs
### test_gen/test/\*.rs
In this directory, there are a couple files like `gen_num.rs`, `gen_str.rs`, etc. For the `Str` module builtins, put the test in `gen_str.rs`, etc. Find the one for the new builtin, and add a test like:
@ -123,5 +123,5 @@ But replace `Num.atan`, the return value, and the return type with your new buil
When implementing a new builtin, it is often easy to copy and paste the implementation for an existing builtin. This can take you quite far since many builtins are very similar, but it also risks forgetting to change one small part of what you copy and pasted and losing a lot of time later on when you cant figure out why things don't work. So, speaking from experience, even if you are copying an existing builtin, try and implement it manually without copying and pasting. Two recent instances of this (as of September 7th, 2020):
- `List.keepIf` did not work for a long time because in builtins its `LowLevel` was `ListMap`. This was because I copy and pasted the `List.map` implementation in `builtins.rs
- `List.walkBackwards` had mysterious memory bugs for a little while because in `unique.rs` its return type was `list_type(flex(b))` instead of `flex(b)` since it was copy and pasted from `List.keepIf`.
- `List.keepIf` did not work for a long time because in builtins its `LowLevel` was `ListMap`. This was because I copy and pasted the `List.map` implementation in `builtins.rs
- `List.walkBackwards` had mysterious memory bugs for a little while because in `unique.rs` its return type was `list_type(flex(b))` instead of `flex(b)` since it was copy and pasted from `List.keepIf`.

View file

@ -24,6 +24,9 @@ pub const RocDec = extern struct {
pub const one_point_zero_i128: i128 = math.pow(i128, 10, RocDec.decimal_places);
pub const one_point_zero: RocDec = .{ .num = one_point_zero_i128 };
pub const two_point_zero: RocDec = RocDec.add(RocDec.one_point_zero, RocDec.one_point_zero);
pub const zero_point_five: RocDec = RocDec.div(RocDec.one_point_zero, RocDec.two_point_zero);
pub fn fromU64(num: u64) RocDec {
return .{ .num = num * one_point_zero_i128 };
}
@ -223,6 +226,10 @@ pub const RocDec = extern struct {
return self.num;
}
pub fn fromI128(num: i128) RocDec {
return .{ .num = num };
}
pub fn eq(self: RocDec, other: RocDec) bool {
return self.num == other.num;
}
@ -340,6 +347,77 @@ pub const RocDec = extern struct {
}
}
fn trunc(self: RocDec) RocDec {
return RocDec.sub(self, self.fract());
}
fn fract(self: RocDec) RocDec {
const sign = std.math.sign(self.num);
const digits = @mod(sign * self.num, RocDec.one_point_zero.num);
return RocDec{ .num = sign * digits };
}
// Returns the nearest integer to self. If a value is half-way between two integers, round away from 0.0.
fn round(arg1: RocDec) RocDec {
// this rounds towards zero
const tmp = arg1.trunc();
const sign = std.math.sign(arg1.num);
const abs_fract = sign * arg1.fract().num;
if (abs_fract >= RocDec.zero_point_five.num) {
return RocDec.add(tmp, RocDec{ .num = sign * RocDec.one_point_zero.num });
} else {
return tmp;
}
}
// Returns the largest integer less than or equal to itself
fn floor(arg1: RocDec) RocDec {
const tmp = arg1.trunc();
if (arg1.num < 0 and arg1.fract().num != 0) {
return RocDec.sub(tmp, RocDec.one_point_zero);
} else {
return tmp;
}
}
// Returns the smallest integer greater than or equal to itself
fn ceiling(arg1: RocDec) RocDec {
const tmp = arg1.trunc();
if (arg1.num > 0 and arg1.fract().num != 0) {
return RocDec.add(tmp, RocDec.one_point_zero);
} else {
return tmp;
}
}
fn powInt(base: RocDec, exponent: i128) RocDec {
if (exponent == 0) {
return RocDec.one_point_zero;
} else if (exponent > 0) {
if (@mod(exponent, 2) == 0) {
const half_power = RocDec.powInt(base, exponent >> 1); // `>> 1` == `/ 2`
return RocDec.mul(half_power, half_power);
} else {
return RocDec.mul(base, RocDec.powInt(base, exponent - 1));
}
} else {
return RocDec.div(RocDec.one_point_zero, RocDec.powInt(base, -exponent));
}
}
fn pow(base: RocDec, exponent: RocDec) RocDec {
if (exponent.trunc().num == exponent.num) {
return base.powInt(@divTrunc(exponent.num, RocDec.one_point_zero_i128));
} else {
return fromF64(std.math.pow(f64, base.toF64(), exponent.toF64())).?;
}
}
pub fn mul(self: RocDec, other: RocDec) RocDec {
const answer = RocDec.mulWithOverflow(self, other);
@ -1195,6 +1273,153 @@ test "log: 1" {
try expectEqual(RocDec.fromU64(0), RocDec.log(RocDec.fromU64(1)));
}
test "fract: 0" {
var roc_str = RocStr.init("0", 1);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = 0 }, dec.fract());
}
test "fract: 1" {
var roc_str = RocStr.init("1", 1);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = 0 }, dec.fract());
}
test "fract: 123.45" {
var roc_str = RocStr.init("123.45", 6);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = 450000000000000000 }, dec.fract());
}
test "fract: -123.45" {
var roc_str = RocStr.init("-123.45", 7);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = -450000000000000000 }, dec.fract());
}
test "fract: .45" {
var roc_str = RocStr.init(".45", 3);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = 450000000000000000 }, dec.fract());
}
test "fract: -0.00045" {
const dec: RocDec = .{ .num = -450000000000000 };
const res = dec.fract();
try expectEqual(dec.num, res.num);
}
test "trunc: 0" {
var roc_str = RocStr.init("0", 1);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = 0 }, dec.trunc());
}
test "trunc: 1" {
var roc_str = RocStr.init("1", 1);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec.one_point_zero, dec.trunc());
}
test "trunc: 123.45" {
var roc_str = RocStr.init("123.45", 6);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = 123000000000000000000 }, dec.trunc());
}
test "trunc: -123.45" {
var roc_str = RocStr.init("-123.45", 7);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = -123000000000000000000 }, dec.trunc());
}
test "trunc: .45" {
var roc_str = RocStr.init(".45", 3);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = 0 }, dec.trunc());
}
test "trunc: -0.00045" {
const dec: RocDec = .{ .num = -450000000000000 };
const res = dec.trunc();
try expectEqual(RocDec{ .num = 0 }, res);
}
test "round: 123.45" {
var roc_str = RocStr.init("123.45", 6);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = 123000000000000000000 }, dec.round());
}
test "round: -123.45" {
var roc_str = RocStr.init("-123.45", 7);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = -123000000000000000000 }, dec.round());
}
test "round: 0.5" {
var roc_str = RocStr.init("0.5", 3);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec.one_point_zero, dec.round());
}
test "round: -0.5" {
var roc_str = RocStr.init("-0.5", 4);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec{ .num = -1000000000000000000 }, dec.round());
}
test "powInt: 3.1 ^ 0" {
var roc_str = RocStr.init("3.1", 3);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(RocDec.one_point_zero, dec.powInt(0));
}
test "powInt: 3.1 ^ 1" {
var roc_str = RocStr.init("3.1", 3);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(dec, dec.powInt(1));
}
test "powInt: 2 ^ 2" {
var roc_str = RocStr.init("4", 1);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(dec, RocDec.two_point_zero.powInt(2));
}
test "powInt: 0.5 ^ 2" {
var roc_str = RocStr.init("0.25", 4);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(dec, RocDec.zero_point_five.powInt(2));
}
test "pow: 0.5 ^ 2.0" {
var roc_str = RocStr.init("0.25", 4);
var dec = RocDec.fromStr(roc_str).?;
try expectEqual(dec, RocDec.zero_point_five.pow(RocDec.two_point_zero));
}
// exports
pub fn fromStr(arg: RocStr) callconv(.C) num_.NumParseResult(i128) {
@ -1254,6 +1479,10 @@ pub fn toI128(arg: RocDec) callconv(.C) i128 {
return @call(.always_inline, RocDec.toI128, .{arg});
}
pub fn fromI128(arg: i128) callconv(.C) RocDec {
return @call(.always_inline, RocDec.fromI128, .{arg});
}
pub fn eqC(arg1: RocDec, arg2: RocDec) callconv(.C) bool {
return @call(.always_inline, RocDec.eq, .{ arg1, arg2 });
}
@ -1295,6 +1524,10 @@ pub fn logC(arg: RocDec) callconv(.C) i128 {
return @call(.always_inline, RocDec.log, .{arg}).num;
}
pub fn powC(arg1: RocDec, arg2: RocDec) callconv(.C) i128 {
return @call(.always_inline, RocDec.pow, .{ arg1, arg2 }).num;
}
pub fn sinC(arg: RocDec) callconv(.C) i128 {
return @call(.always_inline, RocDec.sin, .{arg}).num;
}
@ -1342,3 +1575,30 @@ pub fn mulOrPanicC(arg1: RocDec, arg2: RocDec) callconv(.C) RocDec {
pub fn mulSaturatedC(arg1: RocDec, arg2: RocDec) callconv(.C) RocDec {
return @call(.always_inline, RocDec.mulSaturated, .{ arg1, arg2 });
}
pub fn exportRound(comptime T: type, comptime name: []const u8) void {
comptime var f = struct {
fn func(input: RocDec) callconv(.C) T {
return @as(T, @intCast(@divFloor(input.round().num, RocDec.one_point_zero_i128)));
}
}.func;
@export(f, .{ .name = name ++ @typeName(T), .linkage = .Strong });
}
pub fn exportFloor(comptime T: type, comptime name: []const u8) void {
comptime var f = struct {
fn func(input: RocDec) callconv(.C) T {
return @as(T, @intCast(@divFloor(input.floor().num, RocDec.one_point_zero_i128)));
}
}.func;
@export(f, .{ .name = name ++ @typeName(T), .linkage = .Strong });
}
pub fn exportCeiling(comptime T: type, comptime name: []const u8) void {
comptime var f = struct {
fn func(input: RocDec) callconv(.C) T {
return @as(T, @intCast(@divFloor(input.ceiling().num, RocDec.one_point_zero_i128)));
}
}.func;
@export(f, .{ .name = name ++ @typeName(T), .linkage = .Strong });
}

File diff suppressed because it is too large Load diff

View file

@ -472,7 +472,7 @@ pub fn listMap4(
}
pub fn listWithCapacity(
capacity: usize,
capacity: u64,
alignment: u32,
element_width: usize,
) callconv(.C) RocList {
@ -482,16 +482,22 @@ pub fn listWithCapacity(
pub fn listReserve(
list: RocList,
alignment: u32,
spare: usize,
spare: u64,
element_width: usize,
update_mode: UpdateMode,
) callconv(.C) RocList {
const old_length = list.len();
if ((update_mode == .InPlace or list.isUnique()) and list.getCapacity() >= list.len() + spare) {
const original_len = list.len();
const cap = @as(u64, @intCast(list.getCapacity()));
const desired_cap = @as(u64, @intCast(original_len)) +| spare;
if ((update_mode == .InPlace or list.isUnique()) and cap >= desired_cap) {
return list;
} else {
var output = list.reallocate(alignment, old_length + spare, element_width);
output.length = old_length;
// Make sure on 32-bit targets we don't accidentally wrap when we cast our U64 desired capacity to U32.
const reserve_size: u64 = @min(desired_cap, @as(u64, @intCast(std.math.maxInt(usize))));
var output = list.reallocate(alignment, @as(usize, @intCast(reserve_size)), element_width);
output.length = original_len;
return output;
}
}
@ -577,13 +583,13 @@ pub fn listSwap(
list: RocList,
alignment: u32,
element_width: usize,
index_1: usize,
index_2: usize,
index_1: u64,
index_2: u64,
update_mode: UpdateMode,
) callconv(.C) RocList {
const size = list.len();
const size = @as(u64, @intCast(list.len()));
if (index_1 == index_2 or index_1 >= size or index_2 >= size) {
// Either index out of bounds so we just return
// Either one index was out of bounds, or both indices were the same; just return
return list;
}
@ -596,7 +602,11 @@ pub fn listSwap(
};
const source_ptr = @as([*]u8, @ptrCast(newList.bytes));
swapElements(source_ptr, element_width, index_1, index_2);
swapElements(source_ptr, element_width, @as(usize,
// We already verified that both indices are less than the stored list length,
// which is usize, so casting them to usize will definitely be lossless.
@intCast(index_1)), @as(usize, @intCast(index_2)));
return newList;
}
@ -605,12 +615,12 @@ pub fn listSublist(
list: RocList,
alignment: u32,
element_width: usize,
start: usize,
len: usize,
start_u64: u64,
len_u64: u64,
dec: Dec,
) callconv(.C) RocList {
const size = list.len();
if (len == 0 or start >= size) {
if (size == 0 or start_u64 >= @as(u64, @intCast(size))) {
// Decrement the reference counts of all elements.
if (list.bytes) |source_ptr| {
var i: usize = 0;
@ -629,9 +639,26 @@ pub fn listSublist(
}
if (list.bytes) |source_ptr| {
const keep_len = @min(len, size - start);
// This cast is lossless because we would have early-returned already
// if `start_u64` were greater than `size`, and `size` fits in usize.
const start: usize = @intCast(start_u64);
const drop_start_len = start;
const drop_end_len = size - (start + keep_len);
// (size - start) can't overflow because we would have early-returned already
// if `start` were greater than `size`.
const size_minus_start = size - start;
// This outer cast to usize is lossless. size, start, and size_minus_start all fit in usize,
// and @min guarantees that if `len_u64` gets returned, it's because it was smaller
// than something that fit in usize.
const keep_len = @as(usize, @intCast(@min(len_u64, @as(u64, @intCast(size_minus_start)))));
// This can't overflow because if len > size_minus_start,
// then keep_len == size_minus_start and this will be 0.
// Alternatively, if len <= size_minus_start, then keep_len will
// be equal to len, meaning keep_len <= size_minus_start too,
// which in turn means this won't overflow.
const drop_end_len = size_minus_start - keep_len;
// Decrement the reference counts of elements before `start`.
var i: usize = 0;
@ -671,28 +698,33 @@ pub fn listDropAt(
list: RocList,
alignment: u32,
element_width: usize,
drop_index: usize,
drop_index_u64: u64,
dec: Dec,
) callconv(.C) RocList {
const size = list.len();
const size_u64 = @as(u64, @intCast(size));
// If droping the first or last element, return a seamless slice.
// For simplicity, do this by calling listSublist.
// In the future, we can test if it is faster to manually inline the important parts here.
if (drop_index == 0) {
if (drop_index_u64 == 0) {
return listSublist(list, alignment, element_width, 1, size -| 1, dec);
} else if (drop_index == size -| 1) {
} else if (drop_index_u64 == size_u64 - 1) { // It's fine if (size - 1) wraps on size == 0 here,
// because if size is 0 then it's always fine for this branch to be taken; no
// matter what drop_index was, we're size == 0, so empty list will always be returned.
return listSublist(list, alignment, element_width, 0, size -| 1, dec);
}
if (list.bytes) |source_ptr| {
if (drop_index >= size) {
if (drop_index_u64 >= size_u64) {
return list;
}
if (drop_index < size) {
const element = source_ptr + drop_index * element_width;
dec(element);
}
// This cast must be lossless, because we would have just early-returned if drop_index
// were >= than `size`, and we know `size` fits in usize.
const drop_index: usize = @intCast(drop_index_u64);
const element = source_ptr + drop_index * element_width;
dec(element);
// NOTE
// we need to return an empty list explicitly,
@ -906,7 +938,7 @@ pub fn listConcat(list_a: RocList, list_b: RocList, alignment: u32, element_widt
pub fn listReplaceInPlace(
list: RocList,
index: usize,
index: u64,
element: Opaque,
element_width: usize,
out_element: ?[*]u8,
@ -916,14 +948,15 @@ pub fn listReplaceInPlace(
// at the time of writing, the function is implemented roughly as
// `if inBounds then LowLevelListReplace input index item else input`
// so we don't do a bounds check here. Hence, the list is also non-empty,
// because inserting into an empty list is always out of bounds
return listReplaceInPlaceHelp(list, index, element, element_width, out_element);
// because inserting into an empty list is always out of bounds,
// and it's always safe to cast index to usize.
return listReplaceInPlaceHelp(list, @as(usize, @intCast(index)), element, element_width, out_element);
}
pub fn listReplace(
list: RocList,
alignment: u32,
index: usize,
index: u64,
element: Opaque,
element_width: usize,
out_element: ?[*]u8,
@ -933,8 +966,9 @@ pub fn listReplace(
// at the time of writing, the function is implemented roughly as
// `if inBounds then LowLevelListReplace input index item else input`
// so we don't do a bounds check here. Hence, the list is also non-empty,
// because inserting into an empty list is always out of bounds
return listReplaceInPlaceHelp(list.makeUnique(alignment, element_width), index, element, element_width, out_element);
// because inserting into an empty list is always out of bounds,
// and it's always safe to cast index to usize.
return listReplaceInPlaceHelp(list.makeUnique(alignment, element_width), @as(usize, @intCast(index)), element, element_width, out_element);
}
inline fn listReplaceInPlaceHelp(
@ -962,6 +996,14 @@ pub fn listIsUnique(
return list.isEmpty() or list.isUnique();
}
pub fn listClone(
list: RocList,
alignment: u32,
element_width: usize,
) callconv(.C) RocList {
return list.makeUnique(alignment, element_width);
}
pub fn listCapacity(
list: RocList,
) callconv(.C) usize {

View file

@ -36,6 +36,7 @@ comptime {
exportDecFn(dec.fromStr, "from_str");
exportDecFn(dec.fromU64C, "from_u64");
exportDecFn(dec.logC, "log");
exportDecFn(dec.powC, "pow");
exportDecFn(dec.mulC, "mul_with_overflow");
exportDecFn(dec.mulOrPanicC, "mul_or_panic");
exportDecFn(dec.mulSaturatedC, "mul_saturated");
@ -48,10 +49,15 @@ comptime {
exportDecFn(dec.tanC, "tan");
exportDecFn(dec.toF64, "to_f64");
exportDecFn(dec.toI128, "to_i128");
exportDecFn(dec.fromI128, "from_i128");
exportDecFn(dec.toStr, "to_str");
inline for (INTEGERS) |T| {
dec.exportFromInt(T, ROC_BUILTINS ++ ".dec.from_int.");
dec.exportRound(T, ROC_BUILTINS ++ ".dec.round.");
dec.exportFloor(T, ROC_BUILTINS ++ ".dec.floor.");
dec.exportCeiling(T, ROC_BUILTINS ++ ".dec.ceiling.");
}
}
@ -75,6 +81,7 @@ comptime {
exportListFn(list.listReplaceInPlace, "replace_in_place");
exportListFn(list.listSwap, "swap");
exportListFn(list.listIsUnique, "is_unique");
exportListFn(list.listClone, "clone");
exportListFn(list.listCapacity, "capacity");
exportListFn(list.listAllocationPtr, "allocation_ptr");
exportListFn(list.listReleaseExcessCapacity, "release_excess_capacity");
@ -89,11 +96,6 @@ const FLOATS = [_]type{ f32, f64 };
const NUMBERS = INTEGERS ++ FLOATS;
comptime {
exportNumFn(num.bytesToU16C, "bytes_to_u16");
exportNumFn(num.bytesToU32C, "bytes_to_u32");
exportNumFn(num.bytesToU64C, "bytes_to_u64");
exportNumFn(num.bytesToU128C, "bytes_to_u128");
exportNumFn(num.shiftRightZeroFillI128, "shift_right_zero_fill.i128");
exportNumFn(num.shiftRightZeroFillU128, "shift_right_zero_fill.u128");
@ -109,19 +111,10 @@ comptime {
exportNumFn(num.lessThanOrEqualU128, "less_than_or_equal.u128");
exportNumFn(num.greaterThanU128, "greater_than.u128");
exportNumFn(num.greaterThanOrEqualU128, "greater_than_or_equal.u128");
exportNumFn(num.compareI128, "compare.i128");
exportNumFn(num.compareU128, "compare.u128");
exportNumFn(num.lessThanI128, "less_than.i128");
exportNumFn(num.lessThanOrEqualI128, "less_than_or_equal.i128");
exportNumFn(num.greaterThanI128, "greater_than.i128");
exportNumFn(num.greaterThanOrEqualI128, "greater_than_or_equal.i128");
exportNumFn(num.lessThanU128, "less_than.u128");
exportNumFn(num.lessThanOrEqualU128, "less_than_or_equal.u128");
exportNumFn(num.greaterThanU128, "greater_than.u128");
exportNumFn(num.greaterThanOrEqualU128, "greater_than_or_equal.u128");
exportNumFn(num.f32ToParts, "f32_to_parts");
exportNumFn(num.f64ToParts, "f64_to_parts");
exportNumFn(num.f32FromParts, "f32_from_parts");
exportNumFn(num.f64FromParts, "f64_from_parts");
inline for (INTEGERS, 0..) |T, i| {
num.exportPow(T, ROC_BUILTINS ++ "." ++ NUM ++ ".pow_int.");
@ -134,6 +127,9 @@ comptime {
num.exportCeiling(f32, T, ROC_BUILTINS ++ "." ++ NUM ++ ".ceiling_f32.");
num.exportCeiling(f64, T, ROC_BUILTINS ++ "." ++ NUM ++ ".ceiling_f64.");
num.exportNumToFloatCast(T, f32, ROC_BUILTINS ++ "." ++ NUM ++ ".num_to_float_cast_f32.");
num.exportNumToFloatCast(T, f64, ROC_BUILTINS ++ "." ++ NUM ++ ".num_to_float_cast_f64.");
num.exportAddWithOverflow(T, ROC_BUILTINS ++ "." ++ NUM ++ ".add_with_overflow.");
num.exportAddOrPanic(T, ROC_BUILTINS ++ "." ++ NUM ++ ".add_or_panic.");
num.exportAddSaturatedInt(T, ROC_BUILTINS ++ "." ++ NUM ++ ".add_saturated.");
@ -190,34 +186,28 @@ comptime {
const str = @import("str.zig");
comptime {
exportStrFn(str.init, "init");
exportStrFn(str.strToScalarsC, "to_scalars");
exportStrFn(str.strSplit, "str_split");
exportStrFn(str.countSegments, "count_segments");
exportStrFn(str.countGraphemeClusters, "count_grapheme_clusters");
exportStrFn(str.countUtf8Bytes, "count_utf8_bytes");
exportStrFn(str.isEmpty, "is_empty");
exportStrFn(str.getCapacity, "capacity");
exportStrFn(str.startsWith, "starts_with");
exportStrFn(str.startsWithScalar, "starts_with_scalar");
exportStrFn(str.endsWith, "ends_with");
exportStrFn(str.strConcatC, "concat");
exportStrFn(str.strJoinWithC, "joinWith");
exportStrFn(str.strNumberOfBytes, "number_of_bytes");
exportStrFn(str.strEqual, "equal");
exportStrFn(str.substringUnsafe, "substring_unsafe");
exportStrFn(str.getUnsafe, "get_unsafe");
exportStrFn(str.reserve, "reserve");
exportStrFn(str.getScalarUnsafe, "get_scalar_unsafe");
exportStrFn(str.appendScalar, "append_scalar");
exportStrFn(str.substringUnsafeC, "substring_unsafe");
exportStrFn(str.getUnsafeC, "get_unsafe");
exportStrFn(str.reserveC, "reserve");
exportStrFn(str.strToUtf8C, "to_utf8");
exportStrFn(str.fromUtf8RangeC, "from_utf8_range");
exportStrFn(str.repeat, "repeat");
exportStrFn(str.fromUtf8C, "from_utf8");
exportStrFn(str.repeatC, "repeat");
exportStrFn(str.strTrim, "trim");
exportStrFn(str.strTrimStart, "trim_start");
exportStrFn(str.strTrimEnd, "trim_end");
exportStrFn(str.strCloneTo, "clone_to");
exportStrFn(str.withCapacity, "with_capacity");
exportStrFn(str.strGraphemes, "graphemes");
exportStrFn(str.withCapacityC, "with_capacity");
exportStrFn(str.strAllocationPtr, "allocation_ptr");
exportStrFn(str.strReleaseExcessCapacity, "release_excess_capacity");

View file

@ -15,6 +15,18 @@ pub fn NumParseResult(comptime T: type) type {
};
}
pub const F32Parts = extern struct {
fraction: u32,
exponent: u8,
sign: bool,
};
pub const F64Parts = extern struct {
fraction: u64,
exponent: u16,
sign: bool,
};
pub const U256 = struct {
hi: u128,
lo: u128,
@ -86,6 +98,15 @@ pub fn exportParseFloat(comptime T: type, comptime name: []const u8) void {
@export(f, .{ .name = name ++ @typeName(T), .linkage = .Strong });
}
pub fn exportNumToFloatCast(comptime T: type, comptime F: type, comptime name: []const u8) void {
comptime var f = struct {
fn func(x: T) callconv(.C) F {
return @floatFromInt(x);
}
}.func;
@export(f, .{ .name = name ++ @typeName(T), .linkage = .Strong });
}
pub fn exportPow(comptime T: type, comptime name: []const u8) void {
comptime var f = struct {
fn func(base: T, exp: T) callconv(.C) T {
@ -274,42 +295,6 @@ pub fn exportToIntCheckingMaxAndMin(comptime From: type, comptime To: type, comp
@export(f, .{ .name = name ++ @typeName(From), .linkage = .Strong });
}
pub fn bytesToU16C(arg: RocList, position: usize) callconv(.C) u16 {
return @call(.always_inline, bytesToU16, .{ arg, position });
}
fn bytesToU16(arg: RocList, position: usize) u16 {
const bytes = @as([*]const u8, @ptrCast(arg.bytes));
return @as(u16, @bitCast([_]u8{ bytes[position], bytes[position + 1] }));
}
pub fn bytesToU32C(arg: RocList, position: usize) callconv(.C) u32 {
return @call(.always_inline, bytesToU32, .{ arg, position });
}
fn bytesToU32(arg: RocList, position: usize) u32 {
const bytes = @as([*]const u8, @ptrCast(arg.bytes));
return @as(u32, @bitCast([_]u8{ bytes[position], bytes[position + 1], bytes[position + 2], bytes[position + 3] }));
}
pub fn bytesToU64C(arg: RocList, position: usize) callconv(.C) u64 {
return @call(.always_inline, bytesToU64, .{ arg, position });
}
fn bytesToU64(arg: RocList, position: usize) u64 {
const bytes = @as([*]const u8, @ptrCast(arg.bytes));
return @as(u64, @bitCast([_]u8{ bytes[position], bytes[position + 1], bytes[position + 2], bytes[position + 3], bytes[position + 4], bytes[position + 5], bytes[position + 6], bytes[position + 7] }));
}
pub fn bytesToU128C(arg: RocList, position: usize) callconv(.C) u128 {
return @call(.always_inline, bytesToU128, .{ arg, position });
}
fn bytesToU128(arg: RocList, position: usize) u128 {
const bytes = @as([*]const u8, @ptrCast(arg.bytes));
return @as(u128, @bitCast([_]u8{ bytes[position], bytes[position + 1], bytes[position + 2], bytes[position + 3], bytes[position + 4], bytes[position + 5], bytes[position + 6], bytes[position + 7], bytes[position + 8], bytes[position + 9], bytes[position + 10], bytes[position + 11], bytes[position + 12], bytes[position + 13], bytes[position + 14], bytes[position + 15] }));
}
fn isMultipleOf(comptime T: type, lhs: T, rhs: T) bool {
if (rhs == 0 or rhs == -1) {
// lhs is a multiple of rhs iff
@ -657,3 +642,29 @@ pub fn exportCountOneBits(comptime T: type, comptime name: []const u8) void {
}.func;
@export(f, .{ .name = name ++ @typeName(T), .linkage = .Strong });
}
pub fn f32ToParts(self: f32) callconv(.C) F32Parts {
const u32Value = @as(u32, @bitCast(self));
return F32Parts{
.fraction = u32Value & 0x7fffff,
.exponent = @truncate(u32Value >> 23 & 0xff),
.sign = u32Value >> 31 & 1 == 1,
};
}
pub fn f64ToParts(self: f64) callconv(.C) F64Parts {
const u64Value = @as(u64, @bitCast(self));
return F64Parts{
.fraction = u64Value & 0xfffffffffffff,
.exponent = @truncate(u64Value >> 52 & 0x7ff),
.sign = u64Value >> 63 & 1 == 1,
};
}
pub fn f32FromParts(parts: F32Parts) callconv(.C) f32 {
return @as(f32, @bitCast(parts.fraction & 0x7fffff | (@as(u32, parts.exponent) << 23) | (@as(u32, @intFromBool(parts.sign)) << 31)));
}
pub fn f64FromParts(parts: F64Parts) callconv(.C) f64 {
return @as(f64, @bitCast(parts.fraction & 0xfffffffffffff | (@as(u64, parts.exponent & 0x7ff) << 52) | (@as(u64, @intFromBool(parts.sign)) << 63)));
}

View file

@ -1,6 +1,5 @@
const utils = @import("utils.zig");
const RocList = @import("list.zig").RocList;
const grapheme = @import("helpers/grapheme.zig");
const UpdateMode = utils.UpdateMode;
const std = @import("std");
const mem = std.mem;
@ -552,242 +551,6 @@ pub fn strNumberOfBytes(string: RocStr) callconv(.C) usize {
return string.len();
}
// Str.toScalars
pub fn strToScalarsC(str: RocStr) callconv(.C) RocList {
return @call(.always_inline, strToScalars, .{str});
}
fn strToScalars(string: RocStr) callconv(.C) RocList {
const len = string.len();
if (len == 0) {
return RocList.empty();
}
var capacity = len;
if (!string.isSmallStr()) {
capacity = string.getCapacity();
}
// For purposes of preallocation, assume the number of code points is the same
// as the number of bytes. This might be longer than necessary, but definitely
// should not require a second allocation.
var answer = RocList.allocate(@alignOf(u32), capacity, @sizeOf(u32));
// `orelse unreachable` is fine here, because we already did an early
// return to verify the string was nonempty.
var answer_elems = answer.elements(u32) orelse unreachable;
var src_index: usize = 0;
var answer_index: usize = 0;
while (src_index < len) {
src_index += writeNextScalar(string, src_index, answer_elems, answer_index);
answer_index += 1;
}
answer.length = answer_index;
return answer;
}
// Given a non-empty RocStr, and a src_index byte index into that string,
// and a destination [*]u32, and an index into that destination,
// Parses the next scalar value out of the string (at the given byte index),
// writes it into the destination, and returns the number of bytes parsed.
inline fn writeNextScalar(non_empty_string: RocStr, src_index: usize, dest: [*]u32, dest_index: usize) usize {
const utf8_byte = non_empty_string.getUnchecked(src_index);
// How UTF-8 bytes work:
// https://docs.teradata.com/r/Teradata-Database-International-Character-Set-Support/June-2017/Client-Character-Set-Options/UTF8-Client-Character-Set-Support/UTF8-Multibyte-Sequences
if (utf8_byte <= 127) {
// It's an ASCII character. Copy it over directly.
dest[dest_index] = @as(u32, @intCast(utf8_byte));
return 1;
} else if (utf8_byte >> 5 == 0b0000_0110) {
// Its three high order bits are 110, so this is a two-byte sequence.
// Example:
// utf-8: 1100 1111 1011 0001
// code pt: 0000 0011 1111 0001 (decimal: 1009)
// Discard the first byte's high order bits of 110.
var code_pt = @as(u32, @intCast(utf8_byte & 0b0001_1111));
// Discard the second byte's high order bits of 10.
code_pt <<= 6;
code_pt |= non_empty_string.getUnchecked(src_index + 1) & 0b0011_1111;
dest[dest_index] = code_pt;
return 2;
} else if (utf8_byte >> 4 == 0b0000_1110) {
// Its four high order bits are 1110, so this is a three-byte sequence.
// Discard the first byte's high order bits of 1110.
var code_pt = @as(u32, @intCast(utf8_byte & 0b0000_1111));
// Discard the second byte's high order bits of 10.
code_pt <<= 6;
code_pt |= non_empty_string.getUnchecked(src_index + 1) & 0b0011_1111;
// Discard the third byte's high order bits of 10 (same as second byte).
code_pt <<= 6;
code_pt |= non_empty_string.getUnchecked(src_index + 2) & 0b0011_1111;
dest[dest_index] = code_pt;
return 3;
} else {
// This must be a four-byte sequence, so the five high order bits should be 11110.
// Discard the first byte's high order bits of 11110.
var code_pt = @as(u32, @intCast(utf8_byte & 0b0000_0111));
// Discard the second byte's high order bits of 10.
code_pt <<= 6;
code_pt |= non_empty_string.getUnchecked(src_index + 1) & 0b0011_1111;
// Discard the third byte's high order bits of 10 (same as second byte).
code_pt <<= 6;
code_pt |= non_empty_string.getUnchecked(src_index + 2) & 0b0011_1111;
// Discard the fourth byte's high order bits of 10 (same as second and third).
code_pt <<= 6;
code_pt |= non_empty_string.getUnchecked(src_index + 3) & 0b0011_1111;
dest[dest_index] = code_pt;
return 4;
}
}
test "strToScalars: empty string" {
const str = RocStr.fromSlice("");
defer RocStr.decref(str);
const expected = RocList.empty();
const actual = strToScalars(str);
defer actual.decref(@sizeOf(u32));
try expect(RocList.eql(actual, expected));
}
test "strToScalars: One ASCII char" {
const str = RocStr.fromSlice("R");
defer RocStr.decref(str);
const expected_array = [_]u32{82};
const expected = RocList.fromSlice(u32, expected_array[0..expected_array.len]);
defer expected.decref(@sizeOf(u32));
const actual = strToScalars(str);
defer actual.decref(@sizeOf(u32));
try expect(RocList.eql(actual, expected));
}
test "strToScalars: Multiple ASCII chars" {
const str = RocStr.fromSlice("Roc!");
defer RocStr.decref(str);
const expected_array = [_]u32{ 82, 111, 99, 33 };
const expected = RocList.fromSlice(u32, expected_array[0..expected_array.len]);
defer expected.decref(@sizeOf(u32));
const actual = strToScalars(str);
defer actual.decref(@sizeOf(u32));
try expect(RocList.eql(actual, expected));
}
test "strToScalars: One 2-byte UTF-8 character" {
const str = RocStr.fromSlice("é");
defer RocStr.decref(str);
const expected_array = [_]u32{233};
const expected = RocList.fromSlice(u32, expected_array[0..expected_array.len]);
defer expected.decref(@sizeOf(u32));
const actual = strToScalars(str);
defer actual.decref(@sizeOf(u32));
try expect(RocList.eql(actual, expected));
}
test "strToScalars: Multiple 2-byte UTF-8 characters" {
const str = RocStr.fromSlice("Cäfés");
defer RocStr.decref(str);
const expected_array = [_]u32{ 67, 228, 102, 233, 115 };
const expected = RocList.fromSlice(u32, expected_array[0..expected_array.len]);
defer expected.decref(@sizeOf(u32));
const actual = strToScalars(str);
defer actual.decref(@sizeOf(u32));
try expect(RocList.eql(actual, expected));
}
test "strToScalars: One 3-byte UTF-8 character" {
const str = RocStr.fromSlice("");
defer RocStr.decref(str);
const expected_array = [_]u32{40527};
const expected = RocList.fromSlice(u32, expected_array[0..expected_array.len]);
defer expected.decref(@sizeOf(u32));
const actual = strToScalars(str);
defer actual.decref(@sizeOf(u32));
try expect(RocList.eql(actual, expected));
}
test "strToScalars: Multiple 3-byte UTF-8 characters" {
const str = RocStr.fromSlice("鹏很有趣");
defer RocStr.decref(str);
const expected_array = [_]u32{ 40527, 24456, 26377, 36259 };
const expected = RocList.fromSlice(u32, expected_array[0..expected_array.len]);
defer expected.decref(@sizeOf(u32));
const actual = strToScalars(str);
defer actual.decref(@sizeOf(u32));
try expect(RocList.eql(actual, expected));
}
test "strToScalars: One 4-byte UTF-8 character" {
// from https://design215.com/toolbox/utf8-4byte-characters.php
const str = RocStr.fromSlice("𒀀");
defer RocStr.decref(str);
const expected_array = [_]u32{73728};
const expected = RocList.fromSlice(u32, expected_array[0..expected_array.len]);
defer expected.decref(@sizeOf(u32));
const actual = strToScalars(str);
defer actual.decref(@sizeOf(u32));
try expect(RocList.eql(actual, expected));
}
test "strToScalars: Multiple 4-byte UTF-8 characters" {
// from https://design215.com/toolbox/utf8-4byte-characters.php
const str = RocStr.fromSlice("𒀀𒀁");
defer RocStr.decref(str);
const expected_array = [_]u32{ 73728, 73729 };
const expected = RocList.fromSlice(u32, expected_array[0..expected_array.len]);
defer expected.decref(@sizeOf(u32));
const actual = strToScalars(str);
defer actual.decref(@sizeOf(u32));
try expect(RocList.eql(actual, expected));
}
// Str.fromInt
pub fn exportFromInt(comptime T: type, comptime name: []const u8) void {
comptime var f = struct {
@ -862,64 +625,27 @@ fn initFromBigStr(slice_bytes: [*]u8, len: usize, alloc_ptr: usize) RocStr {
}
fn strSplitHelp(array: [*]RocStr, string: RocStr, delimiter: RocStr) void {
var ret_array_index: usize = 0;
var slice_start_index: usize = 0;
var str_index: usize = 0;
const bytes = string.asU8ptr();
const len = string.len();
const alloc_ptr = @intFromPtr(string.getAllocationPtr()) >> 1;
const init_fn = if (string.isSmallStr())
&initFromSmallStr
else
&initFromBigStr;
const delimiter_bytes_ptrs = delimiter.asU8ptr();
const delimiter_len = delimiter.len();
if (len >= delimiter_len and delimiter_len > 0) {
const end_index: usize = len - delimiter_len + 1;
while (str_index <= end_index) {
var delimiter_index: usize = 0;
var matches_delimiter = true;
while (delimiter_index < delimiter_len) {
var delimiterChar = delimiter_bytes_ptrs[delimiter_index];
if (str_index + delimiter_index >= len) {
matches_delimiter = false;
break;
}
var strChar = bytes[str_index + delimiter_index];
if (delimiterChar != strChar) {
matches_delimiter = false;
break;
}
delimiter_index += 1;
}
if (matches_delimiter) {
const segment_len: usize = str_index - slice_start_index;
array[ret_array_index] = init_fn(@constCast(bytes) + slice_start_index, segment_len, alloc_ptr);
slice_start_index = str_index + delimiter_len;
ret_array_index += 1;
str_index += delimiter_len;
} else {
str_index += 1;
}
}
if (delimiter.len() == 0) {
string.incref(1);
array[0] = string;
return;
}
array[ret_array_index] = init_fn(@constCast(bytes) + slice_start_index, len - slice_start_index, alloc_ptr);
var it = std.mem.split(u8, string.asSlice(), delimiter.asSlice());
if (!string.isSmallStr()) {
// Correct refcount for all of the splits made.
string.incref(ret_array_index + 1);
var i: usize = 0;
var offset: usize = 0;
while (it.next()) |zig_slice| {
const roc_slice = substringUnsafe(string, offset, zig_slice.len);
array[i] = roc_slice;
i += 1;
offset += zig_slice.len + delimiter.len();
}
// Correct refcount for all of the splits made.
string.incref(i); // i == array.len()
}
test "strSplitHelp: empty delimiter" {
@ -1245,44 +971,15 @@ test "strSplitHelp: overlapping delimiter 2" {
// needs to be broken into, so that we can allocate a array
// of that size. It always returns at least 1.
pub fn countSegments(string: RocStr, delimiter: RocStr) callconv(.C) usize {
const bytes = string.asU8ptr();
const len = string.len();
const delimiter_bytes_ptrs = delimiter.asU8ptr();
const delimiter_len = delimiter.len();
var count: usize = 1;
if (len >= delimiter_len and delimiter_len > 0) {
var str_index: usize = 0;
const end_cond: usize = len - delimiter_len + 1;
while (str_index < end_cond) {
var delimiter_index: usize = 0;
var matches_delimiter = true;
while (delimiter_index < delimiter_len) {
const delimiterChar = delimiter_bytes_ptrs[delimiter_index];
const strChar = bytes[str_index + delimiter_index];
if (delimiterChar != strChar) {
matches_delimiter = false;
break;
}
delimiter_index += 1;
}
if (matches_delimiter) {
count += 1;
str_index += delimiter_len;
} else {
str_index += 1;
}
}
if (delimiter.isEmpty()) {
return 1;
}
var it = std.mem.split(u8, string.asSlice(), delimiter.asSlice());
var count: usize = 0;
while (it.next()) |_| : (count += 1) {}
return count;
}
@ -1371,127 +1068,8 @@ test "countSegments: overlapping delimiter 2" {
try expectEqual(segments_count, 3);
}
// Str.countGraphemeClusters
pub fn countGraphemeClusters(string: RocStr) callconv(.C) usize {
if (string.isEmpty()) {
return 0;
}
const bytes_len = string.len();
const bytes_ptr = string.asU8ptr();
var bytes = bytes_ptr[0..bytes_len];
var iter = (unicode.Utf8View.init(bytes) catch unreachable).iterator();
var count: usize = 0;
var grapheme_break_state: ?grapheme.BoundClass = null;
var grapheme_break_state_ptr = &grapheme_break_state;
var opt_last_codepoint: ?u21 = null;
while (iter.nextCodepoint()) |cur_codepoint| {
if (opt_last_codepoint) |last_codepoint| {
var did_break = grapheme.isGraphemeBreak(last_codepoint, cur_codepoint, grapheme_break_state_ptr);
if (did_break) {
count += 1;
grapheme_break_state = null;
}
}
opt_last_codepoint = cur_codepoint;
}
// If there are no breaks, but the str is not empty, then there
// must be a single grapheme
if (bytes_len != 0) {
count += 1;
}
return count;
}
// Str.graphemes
pub fn strGraphemes(roc_str: RocStr) callconv(.C) RocList {
var break_state: ?grapheme.BoundClass = null;
var opt_last_codepoint: ?u21 = null;
var index: usize = 0;
var last_codepoint_len: u8 = 0;
const alloc_ptr = @intFromPtr(roc_str.getAllocationPtr()) >> 1;
const init_fn = if (roc_str.isSmallStr())
&initFromSmallStr
else
&initFromBigStr;
var result = RocList.allocate(@alignOf(RocStr), countGraphemeClusters(roc_str), @sizeOf(RocStr));
const graphemes = result.elements(RocStr) orelse return result;
var slice = roc_str.asSlice();
var iter = (unicode.Utf8View.init(slice) catch unreachable).iterator();
while (iter.nextCodepoint()) |cur_codepoint| {
const cur_codepoint_len = unicode.utf8CodepointSequenceLength(cur_codepoint) catch unreachable;
if (opt_last_codepoint) |last_codepoint| {
var did_break = grapheme.isGraphemeBreak(last_codepoint, cur_codepoint, &break_state);
if (did_break) {
graphemes[index] = init_fn(@constCast(slice.ptr), last_codepoint_len, alloc_ptr);
slice = slice[last_codepoint_len..];
index += 1;
break_state = null;
last_codepoint_len = 0;
}
}
last_codepoint_len += cur_codepoint_len;
opt_last_codepoint = cur_codepoint;
}
// Append last grapheme
graphemes[index] = init_fn(@constCast(slice.ptr), slice.len, alloc_ptr);
if (!roc_str.isSmallStr()) {
// Correct refcount for all of the splits made.
roc_str.incref(index + 1);
}
return result;
}
// these test both countGraphemeClusters() and strGraphemes()
fn graphemesTest(input: []const u8, expected: []const []const u8) !void {
const rocstr = RocStr.fromSlice(input);
defer rocstr.decref();
const count = countGraphemeClusters(rocstr);
try expectEqual(expected.len, count);
const graphemes = strGraphemes(rocstr);
defer graphemes.decref(@sizeOf(u8));
if (input.len == 0) return; // empty string
const elems = graphemes.elements(RocStr) orelse unreachable;
for (expected, 0..) |g, i| {
try std.testing.expectEqualStrings(g, elems[i].asSlice());
}
}
test "graphemes: empty string" {
try graphemesTest("", &.{});
}
test "graphemes: ascii characters" {
try graphemesTest("abcd", &.{ "a", "b", "c", "d" });
}
test "graphemes: utf8 characters" {
try graphemesTest("ãxā", &.{ "ã", "x", "ā" });
}
test "graphemes: emojis" {
try graphemesTest("🤔🤔🤔", &.{ "🤔", "🤔", "🤔" });
}
test "graphemes: emojis and ut8 characters" {
try graphemesTest("🤔å🤔¥🤔ç", &.{ "🤔", "å", "🤔", "¥", "🤔", "ç" });
}
test "graphemes: emojis, ut8, and ascii characters" {
try graphemesTest("6🤔å🤔e¥🤔çpp", &.{ "6", "🤔", "å", "🤔", "e", "¥", "🤔", "ç", "p", "p" });
}
pub fn countUtf8Bytes(string: RocStr) callconv(.C) usize {
return string.len();
pub fn countUtf8Bytes(string: RocStr) callconv(.C) u64 {
return @intCast(string.len());
}
pub fn isEmpty(string: RocStr) callconv(.C) bool {
@ -1502,7 +1080,14 @@ pub fn getCapacity(string: RocStr) callconv(.C) usize {
return string.getCapacity();
}
pub fn substringUnsafe(string: RocStr, start: usize, length: usize) callconv(.C) RocStr {
pub fn substringUnsafeC(string: RocStr, start_u64: u64, length_u64: u64) callconv(.C) RocStr {
const start: usize = @intCast(start_u64);
const length: usize = @intCast(length_u64);
return substringUnsafe(string, start, length);
}
fn substringUnsafe(string: RocStr, start: usize, length: usize) RocStr {
if (string.isSmallStr()) {
if (start == 0) {
var output = string;
@ -1534,8 +1119,8 @@ pub fn substringUnsafe(string: RocStr, start: usize, length: usize) callconv(.C)
return RocStr.empty();
}
pub fn getUnsafe(string: RocStr, index: usize) callconv(.C) u8 {
return string.getUnchecked(index);
pub fn getUnsafeC(string: RocStr, index: u64) callconv(.C) u8 {
return string.getUnchecked(@intCast(index));
}
test "substringUnsafe: start" {
@ -1598,7 +1183,8 @@ pub fn startsWith(string: RocStr, prefix: RocStr) callconv(.C) bool {
}
// Str.repeat
pub fn repeat(string: RocStr, count: usize) callconv(.C) RocStr {
pub fn repeatC(string: RocStr, count_u64: u64) callconv(.C) RocStr {
const count: usize = @intCast(count_u64);
const bytes_len = string.len();
const bytes_ptr = string.asU8ptr();
@ -1614,44 +1200,6 @@ pub fn repeat(string: RocStr, count: usize) callconv(.C) RocStr {
return ret_string;
}
// Str.startsWithScalar
pub fn startsWithScalar(string: RocStr, prefix: u32) callconv(.C) bool {
const len = string.len();
if (len == 0) {
return false;
}
// Write this (non-empty) string's first scalar into `first_scalar`
var first_scalar: [1]u32 = undefined;
_ = writeNextScalar(string, 0, &first_scalar, 0);
// Return whether `first_scalar` equals `prefix`
return @as(*u32, @ptrCast(&first_scalar)).* == prefix;
}
test "startsWithScalar: empty string" {
const whole = RocStr.empty();
const prefix: u32 = 'x';
try expect(!startsWithScalar(whole, prefix));
}
test "startsWithScalar: ascii char" {
const whole = RocStr.fromSlice("foobar");
const prefix: u32 = 'f';
try expect(startsWithScalar(whole, prefix));
}
test "startsWithScalar: emoji" {
const yes = RocStr.fromSlice("💖foobar");
const no = RocStr.fromSlice("foobar");
const prefix: u32 = '💖';
try expect(startsWithScalar(yes, prefix));
try expect(!startsWithScalar(no, prefix));
}
test "startsWith: foo starts with fo" {
const foo = RocStr.fromSlice("foo");
const fo = RocStr.fromSlice("fo");
@ -1891,29 +1439,25 @@ inline fn strToBytes(arg: RocStr) RocList {
}
const FromUtf8Result = extern struct {
byte_index: usize,
byte_index: u64,
string: RocStr,
is_ok: bool,
problem_code: Utf8ByteProblem,
};
const CountAndStart = extern struct {
count: usize,
start: usize,
};
pub fn fromUtf8RangeC(
pub fn fromUtf8C(
list: RocList,
start: usize,
count: usize,
update_mode: UpdateMode,
) callconv(.C) FromUtf8Result {
return fromUtf8Range(list, start, count, update_mode);
return fromUtf8(list, update_mode);
}
pub fn fromUtf8Range(arg: RocList, start: usize, count: usize, update_mode: UpdateMode) FromUtf8Result {
if (arg.len() == 0 or count == 0) {
arg.decref(RocStr.alignment);
pub fn fromUtf8(
list: RocList,
update_mode: UpdateMode,
) FromUtf8Result {
if (list.len() == 0) {
list.decref(1); // Alignment 1 for List U8
return FromUtf8Result{
.is_ok = true,
.string = RocStr.empty(),
@ -1921,11 +1465,11 @@ pub fn fromUtf8Range(arg: RocList, start: usize, count: usize, update_mode: Upda
.problem_code = Utf8ByteProblem.InvalidStartByte,
};
}
const bytes = @as([*]const u8, @ptrCast(arg.bytes))[start .. start + count];
const bytes = @as([*]const u8, @ptrCast(list.bytes))[0..list.len()];
if (isValidUnicode(bytes)) {
// Make a seamless slice of the input.
const string = RocStr.fromSubListUnsafe(arg, start, count, update_mode);
const string = RocStr.fromSubListUnsafe(list, 0, list.len(), update_mode);
return FromUtf8Result{
.is_ok = true,
.string = string,
@ -1933,25 +1477,25 @@ pub fn fromUtf8Range(arg: RocList, start: usize, count: usize, update_mode: Upda
.problem_code = Utf8ByteProblem.InvalidStartByte,
};
} else {
const temp = errorToProblem(@as([*]u8, @ptrCast(arg.bytes)), arg.length);
const temp = errorToProblem(bytes);
// decref the list
arg.decref(RocStr.alignment);
list.decref(1); // Alignment 1 for List U8
return FromUtf8Result{
.is_ok = false,
.string = RocStr.empty(),
.byte_index = temp.index,
.byte_index = @intCast(temp.index),
.problem_code = temp.problem,
};
}
}
fn errorToProblem(bytes: [*]u8, length: usize) struct { index: usize, problem: Utf8ByteProblem } {
fn errorToProblem(bytes: []const u8) struct { index: usize, problem: Utf8ByteProblem } {
const len = bytes.len;
var index: usize = 0;
while (index < length) {
const nextNumBytes = numberOfNextCodepointBytes(bytes, length, index) catch |err| {
while (index < len) {
const nextNumBytes = numberOfNextCodepointBytes(bytes, index) catch |err| {
switch (err) {
error.UnexpectedEof => {
return .{ .index = index, .problem = Utf8ByteProblem.UnexpectedEndOfSequence };
@ -2025,13 +1569,13 @@ const Utf8DecodeError = error{
// Essentially unicode.utf8ValidateSlice -> https://github.com/ziglang/zig/blob/0.7.x/lib/std/unicode.zig#L156
// but only for the next codepoint from the index. Then we return the number of bytes of that codepoint.
// TODO: we only ever use the values 0-4, so can we use smaller int than `usize`?
pub fn numberOfNextCodepointBytes(ptr: [*]u8, len: usize, index: usize) Utf8DecodeError!usize {
const codepoint_len = try unicode.utf8ByteSequenceLength(ptr[index]);
pub fn numberOfNextCodepointBytes(bytes: []const u8, index: usize) Utf8DecodeError!usize {
const codepoint_len = try unicode.utf8ByteSequenceLength(bytes[index]);
const codepoint_end_index = index + codepoint_len;
if (codepoint_end_index > len) {
if (codepoint_end_index > bytes.len) {
return error.UnexpectedEof;
}
_ = try unicode.utf8Decode(ptr[index..codepoint_end_index]);
_ = try unicode.utf8Decode(bytes[index..codepoint_end_index]);
return codepoint_end_index - index;
}
@ -2047,11 +1591,11 @@ pub const Utf8ByteProblem = enum(u8) {
};
fn validateUtf8Bytes(bytes: [*]u8, length: usize) FromUtf8Result {
return fromUtf8Range(RocList{ .bytes = bytes, .length = length, .capacity_or_alloc_ptr = length }, 0, length, .Immutable);
return fromUtf8(RocList{ .bytes = bytes, .length = length, .capacity_or_alloc_ptr = length }, .Immutable);
}
fn validateUtf8BytesX(str: RocList) FromUtf8Result {
return fromUtf8Range(str, 0, str.len(), .Immutable);
return fromUtf8(str, .Immutable);
}
fn expectOk(result: FromUtf8Result) !void {
@ -2068,7 +1612,7 @@ fn sliceHelp(bytes: [*]const u8, length: usize) RocList {
}
fn toErrUtf8ByteResponse(index: usize, problem: Utf8ByteProblem) FromUtf8Result {
return FromUtf8Result{ .is_ok = false, .string = RocStr.empty(), .byte_index = index, .problem_code = problem };
return FromUtf8Result{ .is_ok = false, .string = RocStr.empty(), .byte_index = @as(u64, @intCast(index)), .problem_code = problem };
}
// NOTE on memory: the validate function consumes a RC token of the input. Since
@ -2130,7 +1674,7 @@ fn expectErr(list: RocList, index: usize, err: Utf8DecodeError, problem: Utf8Byt
const str_ptr = @as([*]u8, @ptrCast(list.bytes));
const len = list.length;
try expectError(err, numberOfNextCodepointBytes(str_ptr, len, index));
try expectError(err, numberOfNextCodepointBytes(str_ptr[0..len], index));
try expectEqual(toErrUtf8ByteResponse(index, problem), validateUtf8Bytes(str_ptr, len));
}
@ -2761,80 +2305,13 @@ test "capacity: big string" {
try expect(data.getCapacity() >= data_bytes.len);
}
pub fn appendScalar(string: RocStr, scalar_u32: u32) callconv(.C) RocStr {
const scalar = @as(u21, @intCast(scalar_u32));
const width = std.unicode.utf8CodepointSequenceLength(scalar) catch unreachable;
var output = string.reallocate(string.len() + width);
var slice = output.asSliceWithCapacityMut();
_ = std.unicode.utf8Encode(scalar, slice[string.len() .. string.len() + width]) catch unreachable;
return output;
pub fn reserveC(string: RocStr, spare_u64: u64) callconv(.C) RocStr {
return reserve(string, @intCast(spare_u64));
}
test "appendScalar: small A" {
const A: []const u8 = "A";
const data_bytes = "hello";
var data = RocStr.init(data_bytes, data_bytes.len);
const actual = appendScalar(data, A[0]);
defer actual.decref();
const expected_bytes = "helloA";
const expected = RocStr.init(expected_bytes, expected_bytes.len);
defer expected.decref();
try expect(actual.eq(expected));
}
test "appendScalar: small 😀" {
const data_bytes = "hello";
var data = RocStr.init(data_bytes, data_bytes.len);
const actual = appendScalar(data, 0x1F600);
defer actual.decref();
const expected_bytes = "hello😀";
const expected = RocStr.init(expected_bytes, expected_bytes.len);
defer expected.decref();
try expect(actual.eq(expected));
}
test "appendScalar: big A" {
const A: []const u8 = "A";
const data_bytes = "a string so large that it must be heap-allocated";
var data = RocStr.init(data_bytes, data_bytes.len);
const actual = appendScalar(data, A[0]);
defer actual.decref();
const expected_bytes = "a string so large that it must be heap-allocatedA";
const expected = RocStr.init(expected_bytes, expected_bytes.len);
defer expected.decref();
try expect(actual.eq(expected));
}
test "appendScalar: big 😀" {
const data_bytes = "a string so large that it must be heap-allocated";
var data = RocStr.init(data_bytes, data_bytes.len);
const actual = appendScalar(data, 0x1F600);
defer actual.decref();
const expected_bytes = "a string so large that it must be heap-allocated😀";
const expected = RocStr.init(expected_bytes, expected_bytes.len);
defer expected.decref();
try expect(actual.eq(expected));
}
pub fn reserve(string: RocStr, spare: usize) callconv(.C) RocStr {
fn reserve(string: RocStr, spare: usize) RocStr {
const old_length = string.len();
if (string.getCapacity() >= old_length + spare) {
return string;
} else {
@ -2844,32 +2321,12 @@ pub fn reserve(string: RocStr, spare: usize) callconv(.C) RocStr {
}
}
pub fn withCapacity(capacity: usize) callconv(.C) RocStr {
var str = RocStr.allocate(capacity);
pub fn withCapacityC(capacity: u64) callconv(.C) RocStr {
var str = RocStr.allocate(@intCast(capacity));
str.setLen(0);
return str;
}
pub fn getScalarUnsafe(string: RocStr, index: usize) callconv(.C) extern struct { bytesParsed: usize, scalar: u32 } {
const slice = string.asSlice();
const bytesParsed = @as(usize, @intCast(std.unicode.utf8ByteSequenceLength(slice[index]) catch unreachable));
const scalar = std.unicode.utf8Decode(slice[index .. index + bytesParsed]) catch unreachable;
return .{ .bytesParsed = bytesParsed, .scalar = @as(u32, @intCast(scalar)) };
}
test "getScalarUnsafe" {
const data_bytes = "A";
var data = RocStr.init(data_bytes, data_bytes.len);
const result = getScalarUnsafe(data, 0);
const expected = try std.unicode.utf8Decode("A");
try expectEqual(result.scalar, @as(u32, @intCast(expected)));
try expectEqual(result.bytesParsed, 1);
}
pub fn strCloneTo(
string: RocStr,
ptr: [*]u8,

View file

@ -51,7 +51,7 @@ false = @Bool False
## gate. The infix operator `&&` can also be used as shorthand for
## `Bool.and`.
##
## ```
## ```roc
## expect (Bool.and Bool.true Bool.true) == Bool.true
## expect (Bool.true && Bool.true) == Bool.true
## expect (Bool.false && Bool.true) == Bool.false
@ -66,7 +66,7 @@ false = @Bool False
## In these languages the compiler will skip evaluating the expression after the
## first operator under certain circumstances. For example an expression like
## `enablePets && likesDogs user` would compile to.
## ```
## ```roc
## if enablePets then
## likesDogs user
## else
@ -80,7 +80,7 @@ and : Bool, Bool -> Bool
## Returns `Bool.true` when either input is a `Bool.true`. This is equivalent to
## the logic [OR](https://en.wikipedia.org/wiki/Logical_disjunction) gate.
## The infix operator `||` can also be used as shorthand for `Bool.or`.
## ```
## ```roc
## expect (Bool.or Bool.false Bool.true) == Bool.true
## expect (Bool.true || Bool.true) == Bool.true
## expect (Bool.false || Bool.true) == Bool.true
@ -98,7 +98,7 @@ or : Bool, Bool -> Bool
## Returns `Bool.false` when given `Bool.true`, and vice versa. This is
## equivalent to the logic [NOT](https://en.wikipedia.org/wiki/Negation)
## gate. The operator `!` can also be used as shorthand for `Bool.not`.
## ```
## ```roc
## expect (Bool.not Bool.false) == Bool.true
## expect (!Bool.false) == Bool.true
## ```
@ -111,7 +111,7 @@ not : Bool -> Bool
##
## **Note** that `isNotEq` does not accept arguments whose types contain
## functions.
## ```
## ```roc
## expect (Bool.isNotEq Bool.false Bool.true) == Bool.true
## expect (Bool.false != Bool.false) == Bool.false
## expect "Apples" != "Oranges"

View file

@ -10,13 +10,13 @@ interface Box
## the value from the stack to the heap. This may provide a performance
## optimization for advanced use cases with large values. A platform may require
## that some values are boxed.
## ```
## ```roc
## expect Box.unbox (Box.box "Stack Faster") == "Stack Faster"
## ```
box : a -> Box a
## Returns a boxed value.
## ```
## ```roc
## expect Box.unbox (Box.box "Stack Faster") == "Stack Faster"
## ```
unbox : Box a -> a

View file

@ -44,7 +44,6 @@ interface Decode
I32,
I64,
I128,
Nat,
F32,
F64,
Dec,
@ -60,7 +59,7 @@ DecodeError : [TooShort]
## This can be useful when creating a [custom](#custom) decoder or when
## using [fromBytesPartial](#fromBytesPartial). For example writing unit tests,
## such as;
## ```
## ```roc
## expect
## input = "\"hello\", " |> Str.toUtf8
## actual = Decode.fromBytesPartial input Json.json
@ -104,7 +103,7 @@ DecoderFormatting implements
## `Skip` if the field is not a part of the decoded record.
##
## `finalizer` should produce the record value from the decoded `state`.
record : state, (state, Str -> [Keep (Decoder state fmt), Skip]), (state -> Result val DecodeError) -> Decoder val fmt where fmt implements DecoderFormatting
record : state, (state, Str -> [Keep (Decoder state fmt), Skip]), (state, fmt -> Result val DecodeError) -> Decoder val fmt where fmt implements DecoderFormatting
## `tuple state stepElem finalizer` decodes a tuple element-by-element.
##
@ -113,12 +112,12 @@ DecoderFormatting implements
## index passed to `stepElem` is 0-indexed.
##
## `finalizer` should produce the tuple value from the decoded `state`.
tuple : state, (state, Nat -> [Next (Decoder state fmt), TooLong]), (state -> Result val DecodeError) -> Decoder val fmt where fmt implements DecoderFormatting
tuple : state, (state, U64 -> [Next (Decoder state fmt), TooLong]), (state -> Result val DecodeError) -> Decoder val fmt where fmt implements DecoderFormatting
## Build a custom [Decoder] function. For example the implementation of
## `decodeBool` could be defined as follows;
##
## ```
## ```roc
## decodeBool = Decode.custom \bytes, @Json {} ->
## when bytes is
## ['f', 'a', 'l', 's', 'e', ..] -> { result: Ok Bool.false, rest: List.dropFirst bytes 5 }
@ -133,7 +132,7 @@ decodeWith : List U8, Decoder val fmt, fmt -> DecodeResult val where fmt impleme
decodeWith = \bytes, @Decoder decode, fmt -> decode bytes fmt
## Decode a `List U8` utf-8 bytes and return a [DecodeResult](#DecodeResult)
## ```
## ```roc
## expect
## input = "\"hello\", " |> Str.toUtf8
## actual = Decode.fromBytesPartial input Json.json
@ -147,7 +146,7 @@ fromBytesPartial = \bytes, fmt -> decodeWith bytes decoder fmt
## Decode a `List U8` utf-8 bytes and return a [Result] with no leftover bytes
## expected. If successful returns `Ok val`, however, if there are bytes
## remaining returns `Err Leftover (List U8)`.
## ```
## ```roc
## expect
## input = "\"hello\", " |> Str.toUtf8
## actual = Decode.fromBytes input Json.json

Some files were not shown because too many files have changed in this diff Show more