mirror of
https://github.com/roc-lang/roc.git
synced 2025-08-03 11:52:19 +00:00
Merge branch 'main' of github.com:roc-lang/roc into rust-1-77-2-upgrade
This commit is contained in:
commit
fb7fa99b2c
631 changed files with 21948 additions and 16419 deletions
14
.github/workflows/basic_cli_build_release.yml
vendored
14
.github/workflows/basic_cli_build_release.yml
vendored
|
@ -11,7 +11,7 @@ env:
|
|||
# use .tar.gz for quick testing
|
||||
ARCHIVE_FORMAT: .tar.br
|
||||
# Make a new basic-cli git tag and set it here before starting this workflow
|
||||
RELEASE_TAG: 0.7.1
|
||||
RELEASE_TAG: 0.9.1
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
|
@ -38,6 +38,10 @@ jobs:
|
|||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-TESTING.tar.gz
|
||||
|
||||
- name: Save roc_nightly archives
|
||||
uses: actions/upload-artifact@v4
|
||||
|
@ -93,7 +97,7 @@ jobs:
|
|||
basic-cli/platform/linux-arm64.o
|
||||
|
||||
build-macos-x86_64-files:
|
||||
runs-on: [macos-11] # I expect the generated files to work on macOS 12 and up
|
||||
runs-on: [macos-12] # I expect the generated files to work on macOS 12 and up
|
||||
needs: [prepare]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
@ -154,7 +158,11 @@ jobs:
|
|||
- name: rename nightly folder
|
||||
run: mv roc_nightly* roc_nightly
|
||||
|
||||
- run: git clone https://github.com/roc-lang/basic-cli.git
|
||||
- run: |
|
||||
git clone https://github.com/roc-lang/basic-cli.git
|
||||
cd basic-cli
|
||||
git checkout $RELEASE_TAG
|
||||
cd ..
|
||||
|
||||
- run: cp macos-apple-silicon-files/* ./basic-cli/platform
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
on:
|
||||
# pull_request:
|
||||
#pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
# this cancels workflows currently in progress if you start a new one
|
||||
|
@ -18,10 +18,16 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# get latest nightly releases
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-TESTING.tar.gz
|
||||
|
||||
|
||||
- name: Save roc_nightly archives
|
||||
uses: actions/upload-artifact@v4
|
||||
|
@ -77,7 +83,7 @@ jobs:
|
|||
basic-webserver/platform/linux-arm64.o
|
||||
|
||||
build-macos-x86_64-files:
|
||||
runs-on: [macos-11] # I expect the generated files to work on macOS 12 and 13
|
||||
runs-on: [macos-12] # I expect the generated files to work on macOS 12 and up
|
||||
needs: [fetch-releases]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
|
2
.github/workflows/macos_x86_64.yml
vendored
2
.github/workflows/macos_x86_64.yml
vendored
|
@ -29,5 +29,5 @@ jobs:
|
|||
|
||||
- name: regular rust tests
|
||||
run: cargo test --locked --release -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_tags::phantom_polymorphic_record && sccache --show-stats
|
||||
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
|
||||
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos x86_64 CI machine
|
||||
# this issue may be caused by using older versions of XCode
|
||||
|
|
2
.github/workflows/nightly_linux_arm64.yml
vendored
2
.github/workflows/nightly_linux_arm64.yml
vendored
|
@ -1,5 +1,5 @@
|
|||
on:
|
||||
# pull_request:
|
||||
#pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 9 * * *'
|
||||
|
|
2
.github/workflows/nightly_linux_x86_64.yml
vendored
2
.github/workflows/nightly_linux_x86_64.yml
vendored
|
@ -1,5 +1,5 @@
|
|||
on:
|
||||
# pull_request:
|
||||
#pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 9 * * *'
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
on:
|
||||
# pull_request:
|
||||
#pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 9 * * *'
|
||||
|
@ -14,14 +14,17 @@ jobs:
|
|||
test-and-build:
|
||||
name: Rust tests, build and package nightly release
|
||||
runs-on: [self-hosted, macOS, ARM64]
|
||||
env:
|
||||
LIBRARY_PATH: /opt/homebrew/Cellar/zstd/1.5.6/lib
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: zig version
|
||||
- name: Update PATH to use zig 11
|
||||
run: |
|
||||
echo "PATH=/Users/m1ci/Downloads/zig-macos-aarch64-0.11.0:$PATH" >> $GITHUB_ENV
|
||||
|
||||
- name: llvm version
|
||||
run: llc --version | grep LLVM
|
||||
- run: zig version
|
||||
|
||||
- name: run tests
|
||||
run: cargo test --locked --release
|
||||
|
|
4
.github/workflows/nightly_macos_x86_64.yml
vendored
4
.github/workflows/nightly_macos_x86_64.yml
vendored
|
@ -1,5 +1,5 @@
|
|||
on:
|
||||
# pull_request:
|
||||
#pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 9 * * *" # 9=9am utc+0
|
||||
|
@ -28,7 +28,7 @@ jobs:
|
|||
|
||||
- name: execute rust tests
|
||||
run: cargo test --release --locked -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_tags::phantom_polymorphic_record
|
||||
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
|
||||
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos x86_64 CI machine
|
||||
# this issue may be caused by using older versions of XCode
|
||||
|
||||
- name: build release
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
on:
|
||||
# pull_request:
|
||||
#pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 9 * * *'
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
on:
|
||||
# pull_request:
|
||||
#pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 9 * * *'
|
||||
|
|
7
.github/workflows/test_nightly_many_os.yml
vendored
7
.github/workflows/test_nightly_many_os.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ macos-11, macos-12, macos-13, ubuntu-20.04, ubuntu-22.04 ]
|
||||
os: [ macos-12, macos-13, ubuntu-20.04, ubuntu-22.04 ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
|
@ -18,12 +18,15 @@ jobs:
|
|||
with:
|
||||
version: 0.11.0
|
||||
|
||||
- name: Install zlib on macOS-13
|
||||
if: matrix.os == 'macos-13'
|
||||
run: brew install zlib
|
||||
|
||||
- name: get the latest release archive for linux (x86_64)
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: |
|
||||
curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
|
||||
|
||||
|
||||
- name: get the latest release archive for macos (x86_64)
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
|
||||
|
|
12
.github/workflows/ubuntu_x86_64.yml
vendored
12
.github/workflows/ubuntu_x86_64.yml
vendored
|
@ -11,8 +11,6 @@ jobs:
|
|||
name: test zig, rust, wasm...
|
||||
runs-on: [self-hosted, i7-6700K]
|
||||
timeout-minutes: 90
|
||||
env:
|
||||
RUSTC_WRAPPER: /home/big-ci-user/.cargo/bin/sccache
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
|
@ -39,7 +37,7 @@ jobs:
|
|||
|
||||
- name: regular rust tests
|
||||
# see #5904 for skipped test
|
||||
run: cargo test --locked --release -- --skip cli_run::expects_dev_and_test && sccache --show-stats
|
||||
run: cargo test --locked --release -- --skip cli_run::expects_dev_and_test
|
||||
|
||||
- name: tests examples in docs
|
||||
run: cargo test --doc --release
|
||||
|
@ -48,19 +46,19 @@ jobs:
|
|||
run: cd examples/platform-switching/rust-platform && LD_LIBRARY_PATH=. cargo test --release --locked
|
||||
|
||||
- name: test the dev backend # these tests require an explicit feature flag
|
||||
run: cargo test --locked --release --package test_gen --no-default-features --features gen-dev && sccache --show-stats
|
||||
run: cargo test --locked --release --package test_gen --no-default-features --features gen-dev
|
||||
|
||||
- name: test gen-wasm single threaded # gen-wasm has some multithreading problems to do with the wasmer runtime
|
||||
run: cargo test --locked --release --package test_gen --no-default-features --features gen-wasm -- --test-threads=1 && sccache --show-stats
|
||||
run: cargo test --locked --release --package test_gen --no-default-features --features gen-wasm -- --test-threads=1
|
||||
|
||||
- name: roc test all builtins
|
||||
run: ./ci/roc_test_builtins.sh
|
||||
|
||||
- name: wasm repl test
|
||||
run: crates/repl_test/test_wasm.sh && sccache --show-stats
|
||||
run: crates/repl_test/test_wasm.sh
|
||||
|
||||
- name: test building wasm repl
|
||||
run: ./ci/www-repl.sh && sccache --show-stats
|
||||
run: ./ci/www-repl.sh
|
||||
|
||||
#TODO i386 (32-bit linux) cli tests
|
||||
#TODO verify-no-git-changes
|
||||
|
|
74
Cargo.lock
generated
74
Cargo.lock
generated
|
@ -40,9 +40,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "aligned"
|
||||
version = "0.4.1"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "80a21b9440a626c7fc8573a9e3d3a06b75c7c97754c2949bc7857b90353ca655"
|
||||
checksum = "377e4c0ba83e4431b10df45c1d4666f178ea9c552cac93e60c3a88bf32785923"
|
||||
dependencies = [
|
||||
"as-slice",
|
||||
]
|
||||
|
@ -2216,12 +2216,27 @@ dependencies = [
|
|||
"cc",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"spin",
|
||||
"untrusted",
|
||||
"spin 0.5.2",
|
||||
"untrusted 0.7.1",
|
||||
"web-sys",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ring"
|
||||
version = "0.17.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"getrandom",
|
||||
"libc",
|
||||
"spin 0.9.8",
|
||||
"untrusted 0.9.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rlimit"
|
||||
version = "0.9.1"
|
||||
|
@ -2329,6 +2344,7 @@ dependencies = [
|
|||
"roc_problem",
|
||||
"roc_region",
|
||||
"roc_serialize",
|
||||
"roc_test_utils",
|
||||
"roc_types",
|
||||
"static_assertions",
|
||||
"ven_pretty",
|
||||
|
@ -2486,12 +2502,14 @@ dependencies = [
|
|||
"roc_module",
|
||||
"roc_packaging",
|
||||
"roc_parse",
|
||||
"roc_problem",
|
||||
"roc_region",
|
||||
"roc_reporting",
|
||||
"roc_solve",
|
||||
"roc_target",
|
||||
"roc_types",
|
||||
"snafu",
|
||||
"ven_pretty",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2531,6 +2549,7 @@ version = "0.0.1"
|
|||
dependencies = [
|
||||
"bumpalo",
|
||||
"roc_collections",
|
||||
"roc_error_macros",
|
||||
"roc_module",
|
||||
"roc_parse",
|
||||
"roc_region",
|
||||
|
@ -2648,7 +2667,7 @@ name = "roc_ident"
|
|||
version = "0.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "roc_lang_srv"
|
||||
name = "roc_language_server"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
|
@ -2743,7 +2762,7 @@ dependencies = [
|
|||
"roc_solve",
|
||||
"roc_solve_problem",
|
||||
"roc_target",
|
||||
"roc_test_utils",
|
||||
"roc_test_utils_dir",
|
||||
"roc_types",
|
||||
"ven_pretty",
|
||||
]
|
||||
|
@ -2778,7 +2797,7 @@ dependencies = [
|
|||
"roc_solve",
|
||||
"roc_solve_problem",
|
||||
"roc_target",
|
||||
"roc_test_utils",
|
||||
"roc_test_utils_dir",
|
||||
"roc_tracing",
|
||||
"roc_types",
|
||||
"roc_unify",
|
||||
|
@ -3129,6 +3148,12 @@ name = "roc_test_utils"
|
|||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"pretty_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "roc_test_utils_dir"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"remove_dir_all 0.8.2",
|
||||
]
|
||||
|
||||
|
@ -3221,12 +3246,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rustls"
|
||||
version = "0.21.7"
|
||||
version = "0.21.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd8d6c9f025a446bc4d18ad9632e69aec8f287aa84499ee335599fabd20c3fd8"
|
||||
checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4"
|
||||
dependencies = [
|
||||
"log",
|
||||
"ring",
|
||||
"ring 0.17.8",
|
||||
"rustls-webpki",
|
||||
"sct",
|
||||
]
|
||||
|
@ -3242,12 +3267,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.101.6"
|
||||
version = "0.101.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c7d5dece342910d9ba34d259310cae3e0154b873b35408b787b59bce53d34fe"
|
||||
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
|
||||
dependencies = [
|
||||
"ring",
|
||||
"untrusted",
|
||||
"ring 0.17.8",
|
||||
"untrusted 0.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3362,8 +3387,8 @@ version = "0.7.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4"
|
||||
dependencies = [
|
||||
"ring",
|
||||
"untrusted",
|
||||
"ring 0.16.20",
|
||||
"untrusted 0.7.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3592,6 +3617,12 @@ version = "0.5.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
|
||||
|
||||
[[package]]
|
||||
name = "spin"
|
||||
version = "0.9.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.0"
|
||||
|
@ -3867,6 +3898,7 @@ dependencies = [
|
|||
"roc_parse",
|
||||
"roc_region",
|
||||
"roc_test_utils",
|
||||
"roc_test_utils_dir",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
|
@ -4086,9 +4118,9 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
|
|||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.39"
|
||||
version = "0.1.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee2ef2af84856a50c1d430afce2fdded0a4ec7eda868db86409b4543df0797f9"
|
||||
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
|
||||
dependencies = [
|
||||
"pin-project-lite",
|
||||
"tracing-attributes",
|
||||
|
@ -4269,6 +4301,12 @@ version = "0.7.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
|
||||
|
||||
[[package]]
|
||||
name = "untrusted"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.4.1"
|
||||
|
|
|
@ -17,6 +17,7 @@ members = [
|
|||
"crates/repl_expect",
|
||||
"crates/roc_std",
|
||||
"crates/test_utils",
|
||||
"crates/test_utils_dir",
|
||||
"crates/valgrind",
|
||||
"crates/tracing",
|
||||
"crates/utils/*",
|
||||
|
@ -26,7 +27,7 @@ members = [
|
|||
"crates/wasi-libc-sys",
|
||||
"crates/wasm_module",
|
||||
"crates/wasm_interp",
|
||||
"crates/lang_srv",
|
||||
"crates/language_server",
|
||||
]
|
||||
|
||||
exclude = [
|
||||
|
@ -158,7 +159,7 @@ tar = "0.4.38"
|
|||
target-lexicon = "0.12.6"
|
||||
tempfile = "=3.2.0"
|
||||
threadpool = "1.8.1"
|
||||
tracing = { version = "0.1.37", features = ["release_max_level_off"] }
|
||||
tracing = { version = "0.1.40", features = ["release_max_level_off"] }
|
||||
tracing-appender = "0.2.2"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
unicode-segmentation = "1.10.1"
|
||||
|
@ -194,4 +195,4 @@ lto = "thin" # TODO: We could consider full here since this is only used for pac
|
|||
|
||||
[profile.debug-full]
|
||||
inherits = "dev"
|
||||
debug = true
|
||||
debug = true
|
||||
|
|
41
README.md
41
README.md
|
@ -2,7 +2,7 @@
|
|||
|
||||
[Roc](https://www.roc-lang.org) is not ready for a 0.1 release yet, but we do have:
|
||||
|
||||
- [**installation** guide](https://github.com/roc-lang/roc/tree/main/getting_started)
|
||||
- [**installation** guide](https://www.roc-lang.org/install)
|
||||
- [**tutorial**](https://roc-lang.org/tutorial)
|
||||
- [**docs** for the standard library](https://www.roc-lang.org/builtins)
|
||||
- [**examples**](https://www.roc-lang.org/examples)
|
||||
|
@ -14,6 +14,7 @@ If you'd like to contribute, check out [good first issues](https://github.com/ro
|
|||
## Sponsors
|
||||
|
||||
You can 💜 **sponsor** 💜 Roc on:
|
||||
|
||||
- [GitHub](https://github.com/sponsors/roc-lang)
|
||||
- [Liberapay](https://liberapay.com/roc_lang)
|
||||
|
||||
|
@ -33,22 +34,26 @@ If you would like your company to become a corporate sponsor of Roc's developmen
|
|||
|
||||
We'd also like to express our gratitude to our generous [individual sponsors](https://github.com/sponsors/roc-lang/)! A special thanks to those sponsoring $25/month or more:
|
||||
|
||||
* [Krzysztof G.](https://github.com/krzysztofgb)
|
||||
* [Sam Mohr](https://github.com/smores56)
|
||||
* [Steven Chen](https://github.com/megakilo)
|
||||
* [Drew Lazzeri](https://github.com/asteroidb612)
|
||||
* [Alex Binaei](https://github.com/mrmizz)
|
||||
* [Jono Mallanyk](https://github.com/jonomallanyk)
|
||||
* [Chris Packett](https://github.com/chris-packett)
|
||||
* [James Birtles](https://github.com/jamesbirtles)
|
||||
* [Ivo Balbaert](https://github.com/Ivo-Balbaert)
|
||||
* [Lucas Rosa](https://github.com/rvcas)
|
||||
* [Jonas Schell](https://github.com/Ocupe)
|
||||
* [Christopher Dolan](https://github.com/cdolan)
|
||||
* [Nick Gravgaard](https://github.com/nick-gravgaard)
|
||||
* [Zeljko Nesic](https://github.com/popara)
|
||||
* [Shritesh Bhattarai](https://github.com/shritesh)
|
||||
* [Richard Feldman](https://github.com/rtfeldman)
|
||||
* [Ayaz Hafiz](https://github.com/ayazhafiz)
|
||||
- [Jackson Lucky](https://github.com/jluckyiv)
|
||||
- [Agus Zubiaga](https://github.com/agu-z)
|
||||
- [Angelo Ceccato](https://github.com/AngeloChecked)
|
||||
- [Niclas Overby](https://github.com/noverby)
|
||||
- [Krzysztof G.](https://github.com/krzysztofgb)
|
||||
- [Sam Mohr](https://github.com/smores56)
|
||||
- [Steven Chen](https://github.com/megakilo)
|
||||
- [Drew Lazzeri](https://github.com/asteroidb612)
|
||||
- [Alex Binaei](https://github.com/mrmizz)
|
||||
- [Jono Mallanyk](https://github.com/jonomallanyk)
|
||||
- [Chris Packett](https://github.com/chris-packett)
|
||||
- [James Birtles](https://github.com/jamesbirtles)
|
||||
- [Ivo Balbaert](https://github.com/Ivo-Balbaert)
|
||||
- [Lucas Rosa](https://github.com/rvcas)
|
||||
- [Jonas Schell](https://github.com/Ocupe)
|
||||
- [Christopher Dolan](https://github.com/cdolan)
|
||||
- [Nick Gravgaard](https://github.com/nick-gravgaard)
|
||||
- [Zeljko Nesic](https://github.com/popara)
|
||||
- [Shritesh Bhattarai](https://github.com/shritesh)
|
||||
- [Richard Feldman](https://github.com/rtfeldman)
|
||||
- [Ayaz Hafiz](https://github.com/ayazhafiz)
|
||||
|
||||
Thank you all so much for helping Roc progress!
|
||||
|
|
|
@ -8,11 +8,8 @@ use roc_fmt::def::fmt_defs;
|
|||
use roc_fmt::module::fmt_module;
|
||||
use roc_fmt::spaces::RemoveSpaces;
|
||||
use roc_fmt::{Ast, Buf};
|
||||
use roc_parse::{
|
||||
module::{self, module_defs},
|
||||
parser::{Parser, SyntaxError},
|
||||
state::State,
|
||||
};
|
||||
use roc_parse::module::parse_module_defs;
|
||||
use roc_parse::{module, parser::SyntaxError, state::State};
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum FormatMode {
|
||||
|
@ -67,6 +64,7 @@ fn is_roc_file(path: &Path) -> bool {
|
|||
|
||||
pub fn format_files(files: std::vec::Vec<PathBuf>, mode: FormatMode) -> Result<(), String> {
|
||||
let arena = Bump::new();
|
||||
let mut files_to_reformat = Vec::new(); // to track which files failed `roc format --check`
|
||||
|
||||
for file in flatten_directories(files) {
|
||||
let src = std::fs::read_to_string(&file).unwrap();
|
||||
|
@ -75,9 +73,10 @@ pub fn format_files(files: std::vec::Vec<PathBuf>, mode: FormatMode) -> Result<(
|
|||
Ok(buf) => {
|
||||
match mode {
|
||||
FormatMode::CheckOnly => {
|
||||
// If we notice that this file needs to be formatted, return early
|
||||
// If a file fails `format --check`, add it to the file
|
||||
// list for reporting afterwards.
|
||||
if buf.as_str() != src {
|
||||
return Err("One or more files need to be reformatted.".to_string());
|
||||
files_to_reformat.push(file.display().to_string());
|
||||
}
|
||||
}
|
||||
FormatMode::WriteToFile => {
|
||||
|
@ -155,7 +154,14 @@ pub fn format_files(files: std::vec::Vec<PathBuf>, mode: FormatMode) -> Result<(
|
|||
},
|
||||
}
|
||||
}
|
||||
|
||||
// After processing all files, check if any files failed `format --check`
|
||||
if !files_to_reformat.is_empty() {
|
||||
let file_list = files_to_reformat.join(", ");
|
||||
return Err(format!(
|
||||
"The following file(s) failed `roc format --check`:\n\t{}\nYou can fix this with `roc format filename.roc`.",
|
||||
file_list
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -228,7 +234,9 @@ fn parse_all<'a>(arena: &'a Bump, src: &'a str) -> Result<Ast<'a>, SyntaxError<'
|
|||
let (module, state) = module::parse_header(arena, State::new(src.as_bytes()))
|
||||
.map_err(|e| SyntaxError::Header(e.problem))?;
|
||||
|
||||
let (_, defs, _) = module_defs().parse(arena, state, 0).map_err(|(_, e)| e)?;
|
||||
let (module, defs) = module.upgrade_header_imports(arena);
|
||||
|
||||
let defs = parse_module_defs(arena, state, defs)?;
|
||||
|
||||
Ok(Ast { module, defs })
|
||||
}
|
||||
|
@ -240,3 +248,105 @@ fn fmt_all<'a>(buf: &mut Buf<'a>, ast: &'a Ast) {
|
|||
|
||||
buf.fmt_end_of_file();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use tempfile::{tempdir, TempDir};
|
||||
|
||||
const FORMATTED_ROC: &str = r#"app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Task
|
||||
|
||||
main =
|
||||
Stdout.line! "I'm a Roc application!""#;
|
||||
|
||||
const UNFORMATTED_ROC: &str = r#"app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
|
||||
import pf.Stdout
|
||||
|
||||
import pf.Task
|
||||
|
||||
main =
|
||||
Stdout.line! "I'm a Roc application!"
|
||||
"#;
|
||||
|
||||
fn setup_test_file(dir: &Path, file_name: &str, contents: &str) -> PathBuf {
|
||||
let file_path = dir.join(file_name);
|
||||
let mut file = File::create(&file_path).unwrap();
|
||||
writeln!(file, "{}", contents).unwrap();
|
||||
file.flush().unwrap();
|
||||
file_path
|
||||
}
|
||||
|
||||
fn cleanup_temp_dir(dir: TempDir) {
|
||||
let result = dir.close();
|
||||
assert!(result.is_ok(), "Failed to delete temp directory");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_single_file_needs_reformatting() {
|
||||
let dir = tempdir().unwrap();
|
||||
let file_path = setup_test_file(dir.path(), "test1.roc", UNFORMATTED_ROC);
|
||||
|
||||
let result = format_files(vec![file_path.clone()], FormatMode::CheckOnly);
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result.unwrap_err(),
|
||||
format!(
|
||||
"The following file(s) failed `roc format --check`:\n\t{}\nYou can fix this with `roc format filename.roc`.",
|
||||
&file_path.as_path().to_str().unwrap()
|
||||
)
|
||||
);
|
||||
|
||||
cleanup_temp_dir(dir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_files_needs_reformatting() {
|
||||
let dir = tempdir().unwrap();
|
||||
let file1 = setup_test_file(dir.path(), "test1.roc", UNFORMATTED_ROC);
|
||||
let file2 = setup_test_file(dir.path(), "test2.roc", UNFORMATTED_ROC);
|
||||
|
||||
let result = format_files(vec![file1, file2], FormatMode::CheckOnly);
|
||||
assert!(result.is_err());
|
||||
let error_message = result.unwrap_err();
|
||||
assert!(error_message.contains("test1.roc") && error_message.contains("test2.roc"));
|
||||
|
||||
cleanup_temp_dir(dir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_files_need_reformatting() {
|
||||
let dir = tempdir().unwrap();
|
||||
let file_path = setup_test_file(dir.path(), "formatted.roc", FORMATTED_ROC);
|
||||
|
||||
let result = format_files(vec![file_path], FormatMode::CheckOnly);
|
||||
assert!(result.is_ok());
|
||||
|
||||
cleanup_temp_dir(dir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_some_files_need_reformatting() {
|
||||
let dir = tempdir().unwrap();
|
||||
let file_formatted = setup_test_file(dir.path(), "formatted.roc", FORMATTED_ROC);
|
||||
let file1_unformated = setup_test_file(dir.path(), "test1.roc", UNFORMATTED_ROC);
|
||||
let file2_unformated = setup_test_file(dir.path(), "test2.roc", UNFORMATTED_ROC);
|
||||
|
||||
let result = format_files(
|
||||
vec![file_formatted, file1_unformated, file2_unformated],
|
||||
FormatMode::CheckOnly,
|
||||
);
|
||||
assert!(result.is_err());
|
||||
let error_message = result.unwrap_err();
|
||||
assert!(error_message.contains("test1.roc") && error_message.contains("test2.roc"));
|
||||
assert!(!error_message.contains("formatted.roc"));
|
||||
|
||||
cleanup_temp_dir(dir);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,6 +72,7 @@ pub const FLAG_STDOUT: &str = "stdout";
|
|||
pub const FLAG_WASM_STACK_SIZE_KB: &str = "wasm-stack-size-kb";
|
||||
pub const FLAG_OUTPUT: &str = "output";
|
||||
pub const FLAG_FUZZ: &str = "fuzz";
|
||||
pub const FLAG_MAIN: &str = "main";
|
||||
pub const ROC_FILE: &str = "ROC_FILE";
|
||||
pub const ROC_DIR: &str = "ROC_DIR";
|
||||
pub const GLUE_DIR: &str = "GLUE_DIR";
|
||||
|
@ -149,6 +150,12 @@ pub fn build_app() -> Command {
|
|||
.action(ArgAction::SetTrue)
|
||||
.required(false);
|
||||
|
||||
let flag_main = Arg::new(FLAG_MAIN)
|
||||
.long(FLAG_MAIN)
|
||||
.help("The .roc file of the main app/package module to resolve dependencies from")
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.required(false);
|
||||
|
||||
let roc_file_to_run = Arg::new(ROC_FILE)
|
||||
.help("The .roc file of an app to run")
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
|
@ -227,6 +234,7 @@ pub fn build_app() -> Command {
|
|||
)
|
||||
.subcommand(Command::new(CMD_TEST)
|
||||
.about("Run all top-level `expect`s in a main module and any modules it imports")
|
||||
.arg(flag_main.clone())
|
||||
.arg(flag_optimize.clone())
|
||||
.arg(flag_max_threads.clone())
|
||||
.arg(flag_opt_size.clone())
|
||||
|
@ -246,7 +254,7 @@ pub fn build_app() -> Command {
|
|||
)
|
||||
.arg(
|
||||
Arg::new(ROC_FILE)
|
||||
.help("The .roc file for the main module")
|
||||
.help("The .roc file to test")
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.required(false)
|
||||
.default_value(DEFAULT_ROC_FILENAME)
|
||||
|
@ -321,11 +329,12 @@ pub fn build_app() -> Command {
|
|||
.about(concatcp!("Print the Roc compiler’s version, which is currently ", VERSION)))
|
||||
.subcommand(Command::new(CMD_CHECK)
|
||||
.about("Check the code for problems, but don’t build or run it")
|
||||
.arg(flag_main.clone())
|
||||
.arg(flag_time.clone())
|
||||
.arg(flag_max_threads.clone())
|
||||
.arg(
|
||||
Arg::new(ROC_FILE)
|
||||
.help("The .roc file of an app to check")
|
||||
.help("The .roc file to check")
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.required(false)
|
||||
.default_value(DEFAULT_ROC_FILENAME),
|
||||
|
@ -496,6 +505,8 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
|
|||
// TODO may need to determine this dynamically based on dev builds.
|
||||
let function_kind = FunctionKind::LambdaSet;
|
||||
|
||||
let opt_main_path = matches.get_one::<PathBuf>(FLAG_MAIN);
|
||||
|
||||
// Step 1: compile the app and generate the .o file
|
||||
let load_config = LoadConfig {
|
||||
target,
|
||||
|
@ -509,6 +520,7 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
|
|||
let load_result = roc_load::load_and_monomorphize(
|
||||
arena,
|
||||
path.to_path_buf(),
|
||||
opt_main_path.cloned(),
|
||||
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
|
||||
load_config,
|
||||
);
|
||||
|
@ -747,7 +759,7 @@ pub fn build(
|
|||
}
|
||||
|
||||
// Rather than building an executable or library, we're building
|
||||
// a tarball so this code can be distributed via a HTTPS
|
||||
// a tarball so this code can be distributed via HTTPS
|
||||
let filename = roc_packaging::tarball::build(path, compression)?;
|
||||
let total_time_ms = start_time.elapsed().as_millis();
|
||||
let total_time = if total_time_ms > 1000 {
|
||||
|
|
|
@ -5,7 +5,7 @@ use roc_build::program::{check_file, CodeGenBackend};
|
|||
use roc_cli::{
|
||||
build_app, format_files, format_src, test, BuildConfig, FormatMode, CMD_BUILD, CMD_CHECK,
|
||||
CMD_DEV, CMD_DOCS, CMD_FORMAT, CMD_GEN_STUB_LIB, CMD_GLUE, CMD_PREPROCESS_HOST, CMD_REPL,
|
||||
CMD_RUN, CMD_TEST, CMD_VERSION, DIRECTORY_OR_FILES, FLAG_CHECK, FLAG_DEV, FLAG_LIB,
|
||||
CMD_RUN, CMD_TEST, CMD_VERSION, DIRECTORY_OR_FILES, FLAG_CHECK, FLAG_DEV, FLAG_LIB, FLAG_MAIN,
|
||||
FLAG_NO_LINK, FLAG_OUTPUT, FLAG_STDIN, FLAG_STDOUT, FLAG_TARGET, FLAG_TIME, GLUE_DIR,
|
||||
GLUE_SPEC, ROC_FILE,
|
||||
};
|
||||
|
@ -200,9 +200,12 @@ fn main() -> io::Result<()> {
|
|||
Some(n) => Threading::AtMost(*n),
|
||||
};
|
||||
|
||||
let opt_main_path = matches.get_one::<PathBuf>(FLAG_MAIN);
|
||||
|
||||
match check_file(
|
||||
&arena,
|
||||
roc_file_path.to_owned(),
|
||||
opt_main_path.cloned(),
|
||||
emit_timings,
|
||||
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
|
||||
threading,
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
interface Base64 exposes [fromBytes, fromStr, toBytes, toStr] imports [Base64.Decode, Base64.Encode]
|
||||
interface Base64 exposes [fromBytes, fromStr, toBytes, toStr] imports []
|
||||
|
||||
import Base64.Decode
|
||||
import Base64.Encode
|
||||
|
||||
# base 64 encoding from a sequence of bytes
|
||||
fromBytes : List U8 -> Result Str [InvalidInput]
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
interface Base64.Decode exposes [fromBytes] imports [Bytes.Decode.{ ByteDecoder, DecodeProblem }]
|
||||
interface Base64.Decode exposes [fromBytes] imports []
|
||||
|
||||
import Bytes.Decode exposing [ByteDecoder, DecodeProblem]
|
||||
|
||||
fromBytes : List U8 -> Result Str DecodeProblem
|
||||
fromBytes = \bytes ->
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
interface Base64.Encode
|
||||
exposes [toBytes]
|
||||
imports [Bytes.Encode.{ ByteEncoder }]
|
||||
imports []
|
||||
|
||||
|
||||
import Bytes.Encode exposing [ByteEncoder]
|
||||
|
||||
InvalidChar : U8
|
||||
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
app "args"
|
||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.8.1/x8URkvfyi9I0QhmVG98roKBUs_AZRkLFwFJVJ3942YA.tar.br" }
|
||||
imports [pf.Stdout, pf.Arg, pf.Task.{ Task }]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Arg
|
||||
import pf.Task exposing [Task]
|
||||
|
||||
main : Task {} I32
|
||||
main =
|
||||
args <- Arg.list |> Task.await
|
||||
args = Arg.list!
|
||||
parser =
|
||||
divCmd =
|
||||
Arg.succeed (\dividend -> \divisor -> Div (Num.toF64 dividend) (Num.toF64 divisor))
|
||||
|
@ -55,9 +55,8 @@ main =
|
|||
|> Num.toStr
|
||||
|> Stdout.line
|
||||
|
||||
Err helpMenu ->
|
||||
{} <- Stdout.line helpMenu |> Task.await
|
||||
Task.err 1
|
||||
Err helpMenuErr ->
|
||||
Task.err (Exit 1 "unable to parse args: $(Inspect.toStr helpMenuErr)")
|
||||
|
||||
runCmd = \cmd ->
|
||||
when cmd is
|
|
@ -1,7 +1,8 @@
|
|||
app "countdown"
|
||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.8.1/x8URkvfyi9I0QhmVG98roKBUs_AZRkLFwFJVJ3942YA.tar.br" }
|
||||
imports [pf.Stdin, pf.Stdout, pf.Task.{ await, loop }]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdin
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [await, loop]
|
||||
|
||||
main =
|
||||
_ <- await (Stdout.line "\nLet's count down from 3 together - all you have to do is press <ENTER>.")
|
|
@ -1,21 +1,20 @@
|
|||
app "echo"
|
||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.8.1/x8URkvfyi9I0QhmVG98roKBUs_AZRkLFwFJVJ3942YA.tar.br" }
|
||||
imports [pf.Stdin, pf.Stdout, pf.Task.{ Task }]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdin
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [Task]
|
||||
|
||||
main : Task {} I32
|
||||
main =
|
||||
_ <- Task.await (Stdout.line "🗣 Shout into this cave and hear the echo! 👂👂👂")
|
||||
|
||||
Task.loop {} tick
|
||||
|
||||
tick : {} -> Task [Step {}, Done {}] *
|
||||
tick : {} -> Task [Step {}, Done {}] _
|
||||
tick = \{} ->
|
||||
shout <- Task.await Stdin.line
|
||||
|
||||
when shout is
|
||||
Input s -> Stdout.line (echo s) |> Task.map Step
|
||||
End -> Stdout.line (echo "Received end of input (EOF).") |> Task.map Done
|
||||
when Stdin.line |> Task.result! is
|
||||
Ok str -> Stdout.line (echo str) |> Task.map Step
|
||||
Err (StdinErr EndOfFile) -> Stdout.line (echo "Received end of input (EOF).") |> Task.map Done
|
||||
Err (StdinErr err) -> Stdout.line (echo "Unable to read input $(Inspect.toStr err)") |> Task.map Done
|
||||
|
||||
echo : Str -> Str
|
||||
echo = \shout ->
|
|
@ -1,9 +1,10 @@
|
|||
app "env"
|
||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.8.1/x8URkvfyi9I0QhmVG98roKBUs_AZRkLFwFJVJ3942YA.tar.br" }
|
||||
imports [pf.Stdout, pf.Stderr, pf.Env, pf.Task.{ Task }]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Stderr
|
||||
import pf.Env
|
||||
import pf.Task exposing [Task]
|
||||
|
||||
main : Task {} I32
|
||||
main =
|
||||
task =
|
||||
Env.decode "EDITOR"
|
36
crates/cli/tests/cli/fileBROKEN.roc
Normal file
36
crates/cli/tests/cli/fileBROKEN.roc
Normal file
|
@ -0,0 +1,36 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [Task]
|
||||
import pf.File
|
||||
import pf.Path
|
||||
import pf.Env
|
||||
import pf.Dir
|
||||
|
||||
main : Task {} [Exit I32 Str]_
|
||||
main =
|
||||
path = Path.fromStr "out.txt"
|
||||
|
||||
task =
|
||||
cwd = Env.cwd!
|
||||
Stdout.line! "cwd: $(Path.display cwd)"
|
||||
dirEntries = Dir.list! cwd
|
||||
contentsStr = Str.joinWith (List.map dirEntries Path.display) "\n "
|
||||
Stdout.line! "Directory contents:\n $(contentsStr)\n"
|
||||
Stdout.line! "Writing a string to out.txt"
|
||||
File.writeUtf8! path "a string!"
|
||||
contents = File.readUtf8! path
|
||||
Stdout.line! "I read the file back. Its contents: \"$(contents)\""
|
||||
|
||||
when Task.result! task is
|
||||
Ok {} -> Stdout.line! "Successfully wrote a string to out.txt"
|
||||
Err err ->
|
||||
msg =
|
||||
when err is
|
||||
FileWriteErr _ PermissionDenied -> "PermissionDenied"
|
||||
FileWriteErr _ Unsupported -> "Unsupported"
|
||||
FileWriteErr _ (Unrecognized _ other) -> other
|
||||
FileReadErr _ _ -> "Error reading file"
|
||||
_ -> "Uh oh, there was an error!"
|
||||
|
||||
Task.err (Exit 1 msg)
|
13
crates/cli/tests/cli/form.roc
Normal file
13
crates/cli/tests/cli/form.roc
Normal file
|
@ -0,0 +1,13 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdin
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [await, Task]
|
||||
|
||||
main =
|
||||
Stdout.line! "What's your first name?"
|
||||
firstName = Stdin.line!
|
||||
Stdout.line! "What's your last name?"
|
||||
lastName = Stdin.line!
|
||||
|
||||
Stdout.line "Hi, $(firstName) $(lastName)! 👋"
|
24
crates/cli/tests/cli/http-get.roc
Normal file
24
crates/cli/tests/cli/http-get.roc
Normal file
|
@ -0,0 +1,24 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Http
|
||||
import pf.Task exposing [Task]
|
||||
import pf.Stdout
|
||||
|
||||
main =
|
||||
request = {
|
||||
method: Get,
|
||||
headers: [],
|
||||
url: "http://www.example.com",
|
||||
mimeType: "",
|
||||
body: [],
|
||||
timeout: TimeoutMilliseconds 5000,
|
||||
}
|
||||
|
||||
resp = Http.send! request
|
||||
|
||||
output =
|
||||
when resp |> Http.handleStringResponse is
|
||||
Err err -> crash (Http.errorToString err)
|
||||
Ok body -> body
|
||||
|
||||
Stdout.line output
|
11
crates/cli/tests/cli/ingested-file-bytes-no-ann.roc
Normal file
11
crates/cli/tests/cli/ingested-file-bytes-no-ann.roc
Normal file
|
@ -0,0 +1,11 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import "ingested-file.roc" as license
|
||||
|
||||
main =
|
||||
license
|
||||
|> List.map Num.toU64
|
||||
|> List.sum
|
||||
|> Num.toStr
|
||||
|> Stdout.line!
|
12
crates/cli/tests/cli/ingested-file-bytes.roc
Normal file
12
crates/cli/tests/cli/ingested-file-bytes.roc
Normal file
|
@ -0,0 +1,12 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import "ingested-file.roc" as license : _ # A type hole can also be used here.
|
||||
|
||||
main =
|
||||
# Due to how license is used, it will be a List U8.
|
||||
license
|
||||
|> List.map Num.toU64
|
||||
|> List.sum
|
||||
|> Num.toStr
|
||||
|> Stdout.line!
|
7
crates/cli/tests/cli/ingested-file.roc
Normal file
7
crates/cli/tests/cli/ingested-file.roc
Normal file
|
@ -0,0 +1,7 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import "ingested-file.roc" as ownCode : Str
|
||||
|
||||
main =
|
||||
Stdout.line! "\nThis roc file can print its own source code. The source is:\n\n$(ownCode)"
|
|
@ -1,15 +1,12 @@
|
|||
app "example"
|
||||
packages {
|
||||
cli: "https://github.com/roc-lang/basic-cli/releases/download/0.8.1/x8URkvfyi9I0QhmVG98roKBUs_AZRkLFwFJVJ3942YA.tar.br",
|
||||
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
||||
}
|
||||
imports [
|
||||
cli.Stdout,
|
||||
cli.Stderr,
|
||||
parser.Core.{ Parser, buildPrimitiveParser, many },
|
||||
parser.String.{ parseStr },
|
||||
]
|
||||
provides [main] to cli
|
||||
app [main] {
|
||||
cli: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br",
|
||||
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
||||
}
|
||||
|
||||
import cli.Stdout
|
||||
import cli.Stderr
|
||||
import parser.Core exposing [Parser, buildPrimitiveParser, many]
|
||||
import parser.String exposing [parseStr]
|
||||
|
||||
main =
|
||||
lettersInput = "AAAiBByAABBwBtCCCiAyArBBx"
|
|
@ -1,22 +1,18 @@
|
|||
app "example"
|
||||
packages {
|
||||
pf: "https://github.com/roc-lang/basic-cli/releases/download/0.8.1/x8URkvfyi9I0QhmVG98roKBUs_AZRkLFwFJVJ3942YA.tar.br",
|
||||
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
||||
}
|
||||
imports [
|
||||
pf.Stdout,
|
||||
pf.Stderr,
|
||||
pf.Task.{ Task },
|
||||
parser.Core.{ Parser, map, keep },
|
||||
parser.String.{ strFromUtf8 },
|
||||
parser.CSV.{ CSV },
|
||||
]
|
||||
provides [main] to pf
|
||||
app [main] {
|
||||
pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br",
|
||||
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
||||
}
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Stderr
|
||||
import pf.Task exposing [Task]
|
||||
import parser.Core exposing [Parser, map, keep]
|
||||
import parser.String exposing [strFromUtf8]
|
||||
import parser.CSV exposing [CSV]
|
||||
|
||||
input : Str
|
||||
input = "Airplane!,1980,\"Robert Hays,Julie Hagerty\"\r\nCaddyshack,1980,\"Chevy Chase,Rodney Dangerfield,Ted Knight,Michael O'Keefe,Bill Murray\""
|
||||
|
||||
main : Task {} *
|
||||
main =
|
||||
when CSV.parseStr movieInfoParser input is
|
||||
Ok movies ->
|
|
@ -54,6 +54,7 @@ mod cli_run {
|
|||
const OPTIMIZE_FLAG: &str = concatcp!("--", roc_cli::FLAG_OPTIMIZE);
|
||||
const LINKER_FLAG: &str = concatcp!("--", roc_cli::FLAG_LINKER);
|
||||
const CHECK_FLAG: &str = concatcp!("--", roc_cli::FLAG_CHECK);
|
||||
#[allow(dead_code)]
|
||||
const PREBUILT_PLATFORM: &str = concatcp!("--", roc_cli::FLAG_PREBUILT);
|
||||
#[allow(dead_code)]
|
||||
const TARGET_FLAG: &str = concatcp!("--", roc_cli::FLAG_TARGET);
|
||||
|
@ -646,6 +647,78 @@ mod cli_run {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(windows, ignore)]
|
||||
fn test_module_imports_pkg_w_flag() {
|
||||
test_roc_expect(
|
||||
"crates/cli/tests/module_imports_pkg",
|
||||
"Module.roc",
|
||||
&["--main", "tests/module_imports_pkg/app.roc"],
|
||||
indoc!(
|
||||
r#"
|
||||
0 failed and 1 passed in <ignored for test> ms.
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(windows, ignore)]
|
||||
fn test_module_imports_pkg_no_flag() {
|
||||
test_roc_expect(
|
||||
"crates/cli/tests/module_imports_pkg",
|
||||
"Module.roc",
|
||||
&[],
|
||||
indoc!(
|
||||
r#"
|
||||
── UNRECOGNIZED PACKAGE in tests/module_imports_pkg/Module.roc ─────────────────
|
||||
|
||||
This module is trying to import from `pkg`:
|
||||
|
||||
3│ import pkg.Foo
|
||||
^^^^^^^
|
||||
|
||||
A lowercase name indicates a package shorthand, but I don't know which
|
||||
packages are available.
|
||||
|
||||
When checking a module directly, I look for a `main.roc` app or
|
||||
package to resolve shorthands from.
|
||||
|
||||
You can create it, or specify an existing one with the --main flag."#
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(windows, ignore)]
|
||||
fn test_module_imports_unknown_pkg() {
|
||||
test_roc_expect(
|
||||
"crates/cli/tests/module_imports_pkg",
|
||||
"ImportsUnknownPkg.roc",
|
||||
&["--main", "tests/module_imports_pkg/app.roc"],
|
||||
indoc!(
|
||||
r#"
|
||||
── UNRECOGNIZED PACKAGE in tests/module_imports_pkg/ImportsUnknownPkg.roc ──────
|
||||
|
||||
This module is trying to import from `cli`:
|
||||
|
||||
3│ import cli.Foo
|
||||
^^^^^^^
|
||||
|
||||
A lowercase name indicates a package shorthand, but I don't recognize
|
||||
this one. Did you mean one of these?
|
||||
|
||||
pkg
|
||||
|
||||
Note: I'm using the following module to resolve package shorthands:
|
||||
|
||||
tests/module_imports_pkg/app.roc
|
||||
|
||||
You can specify a different one with the --main flag."#
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(windows, ignore)]
|
||||
fn transitive_expects() {
|
||||
|
@ -715,29 +788,6 @@ mod cli_run {
|
|||
test_roc_app_slim("examples/gui", "hello-guiBROKEN.roc", "", UseValgrind::No)
|
||||
}
|
||||
|
||||
#[cfg_attr(windows, ignore)] // flaky error; issue #5024
|
||||
#[serial(breakout)]
|
||||
#[test]
|
||||
fn breakout() {
|
||||
test_roc_app_slim(
|
||||
"examples/gui/breakout",
|
||||
"breakoutBROKEN.roc",
|
||||
"",
|
||||
UseValgrind::No,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial(breakout)]
|
||||
fn breakout_hello_gui() {
|
||||
test_roc_app_slim(
|
||||
"examples/gui/breakout",
|
||||
"hello-guiBROKEN.roc",
|
||||
"",
|
||||
UseValgrind::No,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(windows, ignore)]
|
||||
fn quicksort() {
|
||||
|
@ -777,7 +827,7 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore = "missing __udivdi3 and some other symbols")]
|
||||
#[serial(cli_platform)]
|
||||
fn cli_args_check() {
|
||||
let path = file_path_from_root("examples/cli", "argsBROKEN.roc");
|
||||
let path = file_path_from_root("crates/cli/tests/cli", "argsBROKEN.roc");
|
||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||
assert!(out.status.success());
|
||||
}
|
||||
|
@ -804,7 +854,7 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore)]
|
||||
#[serial(cli_platform)]
|
||||
fn cli_countdown_check() {
|
||||
let path = file_path_from_root("examples/cli", "countdown.roc");
|
||||
let path = file_path_from_root("crates/cli/tests/cli", "countdown.roc");
|
||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||
assert!(out.status.success());
|
||||
}
|
||||
|
@ -813,7 +863,7 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore)]
|
||||
#[serial(cli_platform)]
|
||||
fn cli_echo_check() {
|
||||
let path = file_path_from_root("examples/cli", "echo.roc");
|
||||
let path = file_path_from_root("crates/cli/tests/cli", "echo.roc");
|
||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||
assert!(out.status.success());
|
||||
}
|
||||
|
@ -822,7 +872,7 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore)]
|
||||
#[serial(cli_platform)]
|
||||
fn cli_file_check() {
|
||||
let path = file_path_from_root("examples/cli", "fileBROKEN.roc");
|
||||
let path = file_path_from_root("crates/cli/tests/cli", "fileBROKEN.roc");
|
||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||
assert!(out.status.success());
|
||||
}
|
||||
|
@ -831,7 +881,7 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore)]
|
||||
#[serial(cli_platform)]
|
||||
fn cli_form_check() {
|
||||
let path = file_path_from_root("examples/cli", "form.roc");
|
||||
let path = file_path_from_root("crates/cli/tests/cli", "form.roc");
|
||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||
assert!(out.status.success());
|
||||
}
|
||||
|
@ -840,7 +890,7 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore)]
|
||||
#[serial(cli_platform)]
|
||||
fn cli_http_get_check() {
|
||||
let path = file_path_from_root("examples/cli", "http-get.roc");
|
||||
let path = file_path_from_root("crates/cli/tests/cli", "http-get.roc");
|
||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||
assert!(out.status.success());
|
||||
}
|
||||
|
@ -896,27 +946,12 @@ mod cli_run {
|
|||
test_roc_app_slim("examples/swiftui", "main.roc", "", UseValgrind::No)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(windows, ignore)]
|
||||
fn static_site_gen() {
|
||||
test_roc_app(
|
||||
"examples/static-site-gen",
|
||||
"static-site.roc",
|
||||
&[],
|
||||
&[Arg::ExamplePath("input"), Arg::ExamplePath("output")],
|
||||
&[],
|
||||
"Processed 4 files with 3 successes and 0 errors\n",
|
||||
UseValgrind::No,
|
||||
TestCliCommands::Run,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial(cli_platform)]
|
||||
#[cfg_attr(windows, ignore)]
|
||||
fn with_env_vars() {
|
||||
test_roc_app(
|
||||
"examples/cli",
|
||||
"crates/cli/tests/cli",
|
||||
"env.roc",
|
||||
&[],
|
||||
&[],
|
||||
|
@ -938,28 +973,16 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore)]
|
||||
fn ingested_file() {
|
||||
test_roc_app(
|
||||
"examples/cli",
|
||||
"crates/cli/tests/cli",
|
||||
"ingested-file.roc",
|
||||
&[],
|
||||
&[],
|
||||
&[],
|
||||
indoc!(
|
||||
r#"
|
||||
This roc file can print its own source code. The source is:
|
||||
|
||||
app "ingested-file"
|
||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.8.1/x8URkvfyi9I0QhmVG98roKBUs_AZRkLFwFJVJ3942YA.tar.br" }
|
||||
imports [
|
||||
pf.Stdout,
|
||||
"ingested-file.roc" as ownCode : Str,
|
||||
]
|
||||
provides [main] to pf
|
||||
|
||||
main =
|
||||
Stdout.line "\nThis roc file can print its own source code. The source is:\n\n$(ownCode)"
|
||||
|
||||
"#
|
||||
),
|
||||
format!(
|
||||
"\nThis roc file can print its own source code. The source is:\n\n{}\n",
|
||||
include_str!("cli/ingested-file.roc")
|
||||
)
|
||||
.as_str(),
|
||||
UseValgrind::No,
|
||||
TestCliCommands::Run,
|
||||
)
|
||||
|
@ -970,12 +993,27 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore)]
|
||||
fn ingested_file_bytes() {
|
||||
test_roc_app(
|
||||
"examples/cli",
|
||||
"crates/cli/tests/cli",
|
||||
"ingested-file-bytes.roc",
|
||||
&[],
|
||||
&[],
|
||||
&[],
|
||||
"162088\n",
|
||||
"27101\n",
|
||||
UseValgrind::No,
|
||||
TestCliCommands::Run,
|
||||
)
|
||||
}
|
||||
#[test]
|
||||
#[serial(cli_platform)]
|
||||
#[cfg_attr(windows, ignore)]
|
||||
fn ingested_file_bytes_no_ann() {
|
||||
test_roc_app(
|
||||
"crates/cli/tests/cli",
|
||||
"ingested-file-bytes-no-ann.roc",
|
||||
&[],
|
||||
&[],
|
||||
&[],
|
||||
"27101\n",
|
||||
UseValgrind::No,
|
||||
TestCliCommands::Run,
|
||||
)
|
||||
|
@ -986,8 +1024,8 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore)]
|
||||
fn parse_movies_csv() {
|
||||
test_roc_app_slim(
|
||||
"examples/parser",
|
||||
"parse-movies-csv.roc",
|
||||
"crates/cli/tests/cli",
|
||||
"parser-movies-csv.roc",
|
||||
"2 movies were found:\n\nThe movie 'Airplane!' was released in 1980 and stars Robert Hays and Julie Hagerty\nThe movie 'Caddyshack' was released in 1980 and stars Chevy Chase, Rodney Dangerfield, Ted Knight, Michael O'Keefe and Bill Murray\n\nParse success!\n\n",
|
||||
UseValgrind::No,
|
||||
)
|
||||
|
@ -998,8 +1036,8 @@ mod cli_run {
|
|||
#[cfg_attr(windows, ignore)]
|
||||
fn parse_letter_counts() {
|
||||
test_roc_app_slim(
|
||||
"examples/parser",
|
||||
"letter-counts.roc",
|
||||
"crates/cli/tests/cli",
|
||||
"parser-letter-counts.roc",
|
||||
"I counted 7 letter A's!\n",
|
||||
UseValgrind::No,
|
||||
)
|
||||
|
@ -1025,20 +1063,21 @@ mod cli_run {
|
|||
// TODO not sure if this cfg should still be here: #[cfg(not(debug_assertions))]
|
||||
// this is for testing the benchmarks, to perform proper benchmarks see crates/cli/benches/README.md
|
||||
mod test_benchmarks {
|
||||
#[allow(unused_imports)]
|
||||
use super::{TestCliCommands, UseValgrind};
|
||||
use cli_utils::helpers::cli_testing_dir;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
use super::{check_output_with_stdin, OPTIMIZE_FLAG, PREBUILT_PLATFORM};
|
||||
|
||||
#[allow(unused_imports)]
|
||||
use std::{path::Path, sync::Once};
|
||||
|
||||
static BENCHMARKS_BUILD_PLATFORM: Once = Once::new();
|
||||
|
||||
fn test_benchmark(
|
||||
roc_filename: &str,
|
||||
stdin: &[&str],
|
||||
expected_ending: &str,
|
||||
use_valgrind: UseValgrind,
|
||||
_use_valgrind: UseValgrind,
|
||||
) {
|
||||
let file_name = cli_testing_dir("benchmarks").join(roc_filename);
|
||||
|
||||
|
@ -1062,15 +1101,18 @@ mod cli_run {
|
|||
}
|
||||
|
||||
#[cfg(all(not(feature = "wasm32-cli-run"), not(feature = "i386-cli-run")))]
|
||||
check_output_regular(&file_name, stdin, expected_ending, use_valgrind);
|
||||
check_output_regular(&file_name, stdin, expected_ending, _use_valgrind);
|
||||
|
||||
#[cfg(feature = "wasm32-cli-run")]
|
||||
check_output_wasm(&file_name, stdin, expected_ending);
|
||||
|
||||
#[cfg(feature = "i386-cli-run")]
|
||||
check_output_i386(&file_name, stdin, expected_ending, use_valgrind);
|
||||
check_output_i386(&file_name, stdin, expected_ending, _use_valgrind);
|
||||
}
|
||||
|
||||
#[cfg(all(not(feature = "wasm32-cli-run"), not(feature = "i386-cli-run")))]
|
||||
static BENCHMARKS_BUILD_PLATFORM: Once = Once::new();
|
||||
|
||||
#[cfg(all(not(feature = "wasm32-cli-run"), not(feature = "i386-cli-run")))]
|
||||
fn check_output_regular(
|
||||
file_name: &Path,
|
||||
|
@ -1439,7 +1481,7 @@ mod cli_run {
|
|||
r#"
|
||||
── UNUSED IMPORT in ...nown_bad/UnusedImportButWithALongFileNameForTesting.roc ─
|
||||
|
||||
Nothing from Symbol is used in this module.
|
||||
Symbol is imported but not used.
|
||||
|
||||
3│ imports [Symbol.{ Ident }]
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
@ -1483,7 +1525,7 @@ mod cli_run {
|
|||
r#"
|
||||
── UNUSED IMPORT in tests/known_bad/UnusedImport.roc ───────────────────────────
|
||||
|
||||
Nothing from Symbol is used in this module.
|
||||
Symbol is imported but not used.
|
||||
|
||||
3│ imports [Symbol.{ Ident }]
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
app "formatted"
|
||||
packages { pf: "platform/main.roc" } imports []
|
||||
provides [main] to pf
|
||||
app [main] { pf: "platform/main.roc" }
|
||||
|
||||
main : Str
|
||||
main = Dep1.value1 {}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
app "formatted"
|
||||
packages { pf: "platform/main.roc" }
|
||||
provides [main] to pf
|
||||
app [main] { pf: "platform/main.roc" }
|
||||
|
||||
main : Str
|
||||
main = Dep1.value1 {}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
app "formatted"
|
||||
packages { pf: "platform/main.roc" } imports []
|
||||
provides [main] to pf
|
||||
app [main] { pf: "platform/main.roc" }
|
||||
|
||||
main : Str
|
||||
main = Dep1.value1 {}
|
||||
|
|
|
@ -1,3 +1 @@
|
|||
package "csv"
|
||||
exposes [Csv]
|
||||
packages {}
|
||||
package [Csv] {}
|
||||
|
|
|
@ -1,3 +1 @@
|
|||
package "json"
|
||||
exposes [JsonParser]
|
||||
packages {}
|
||||
package [JsonParser] {}
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
module [valueFromPkg]
|
||||
|
||||
import cli.Foo
|
||||
|
||||
valueFromPkg = Foo.foo
|
||||
|
||||
expect valueFromPkg == "Foo"
|
7
crates/cli/tests/module_imports_pkg/Module.roc
Normal file
7
crates/cli/tests/module_imports_pkg/Module.roc
Normal file
|
@ -0,0 +1,7 @@
|
|||
module [valueFromPkg]
|
||||
|
||||
import pkg.Foo
|
||||
|
||||
valueFromPkg = Foo.foo
|
||||
|
||||
expect valueFromPkg == "Foo"
|
8
crates/cli/tests/module_imports_pkg/app.roc
Normal file
8
crates/cli/tests/module_imports_pkg/app.roc
Normal file
|
@ -0,0 +1,8 @@
|
|||
app [main] {
|
||||
pkg: "./pkg/main.roc"
|
||||
}
|
||||
|
||||
import Module
|
||||
|
||||
main =
|
||||
Module.valueFromPkg
|
3
crates/cli/tests/module_imports_pkg/pkg/Foo.roc
Normal file
3
crates/cli/tests/module_imports_pkg/pkg/Foo.roc
Normal file
|
@ -0,0 +1,3 @@
|
|||
module [foo]
|
||||
|
||||
foo = "Foo"
|
1
crates/cli/tests/module_imports_pkg/pkg/main.roc
Normal file
1
crates/cli/tests/module_imports_pkg/pkg/main.roc
Normal file
|
@ -0,0 +1 @@
|
|||
package [Foo] {}
|
|
@ -50,8 +50,15 @@ pub fn legacy_host_file(target: Target, platform_main_roc: &Path) -> PathBuf {
|
|||
.replace(roc_linker::PRECOMPILED_HOST_EXT, lib_ext);
|
||||
|
||||
let lib_path = platform_main_roc.with_file_name(file_name);
|
||||
|
||||
let default_host_path: PathBuf = platform_main_roc
|
||||
.with_file_name("libhost")
|
||||
.with_extension(lib_ext);
|
||||
|
||||
if lib_path.exists() {
|
||||
lib_path
|
||||
} else if default_host_path.exists() {
|
||||
default_host_path
|
||||
} else {
|
||||
let obj_ext = target.object_file_ext();
|
||||
lib_path.with_extension(obj_ext)
|
||||
|
@ -1132,6 +1139,8 @@ fn link_macos(
|
|||
// "-lgcc", // TODO will eventually need compiler_rt from gcc or something - see https://github.com/roc-lang/roc/pull/554#discussion_r496370840
|
||||
"-framework",
|
||||
"Security",
|
||||
"-framework",
|
||||
"SystemConfiguration",
|
||||
// Output
|
||||
"-o",
|
||||
output_path.to_str().unwrap(), // app
|
||||
|
|
|
@ -735,9 +735,14 @@ pub fn build_file<'a>(
|
|||
let compilation_start = Instant::now();
|
||||
|
||||
// Step 1: compile the app and generate the .o file
|
||||
let loaded =
|
||||
roc_load::load_and_monomorphize(arena, app_module_path.clone(), roc_cache_dir, load_config)
|
||||
.map_err(|e| BuildFileError::from_mono_error(e, compilation_start))?;
|
||||
let loaded = roc_load::load_and_monomorphize(
|
||||
arena,
|
||||
app_module_path.clone(),
|
||||
None,
|
||||
roc_cache_dir,
|
||||
load_config,
|
||||
)
|
||||
.map_err(|e| BuildFileError::from_mono_error(e, compilation_start))?;
|
||||
|
||||
build_loaded_file(
|
||||
arena,
|
||||
|
@ -1187,6 +1192,7 @@ fn build_and_preprocess_host_lowlevel(
|
|||
pub fn check_file<'a>(
|
||||
arena: &'a Bump,
|
||||
roc_file_path: PathBuf,
|
||||
opt_main_path: Option<PathBuf>,
|
||||
emit_timings: bool,
|
||||
roc_cache_dir: RocCacheDir<'_>,
|
||||
threading: Threading,
|
||||
|
@ -1209,8 +1215,13 @@ pub fn check_file<'a>(
|
|||
threading,
|
||||
exec_mode: ExecutionMode::Check,
|
||||
};
|
||||
let mut loaded =
|
||||
roc_load::load_and_typecheck(arena, roc_file_path, roc_cache_dir, load_config)?;
|
||||
let mut loaded = roc_load::load_and_typecheck(
|
||||
arena,
|
||||
roc_file_path,
|
||||
opt_main_path,
|
||||
roc_cache_dir,
|
||||
load_config,
|
||||
)?;
|
||||
|
||||
let buf = &mut String::with_capacity(1024);
|
||||
|
||||
|
@ -1292,6 +1303,7 @@ pub fn build_str_test<'a>(
|
|||
PathBuf::from("valgrind_test.roc"),
|
||||
app_module_source,
|
||||
app_module_path.to_path_buf(),
|
||||
None,
|
||||
roc_cache_dir,
|
||||
load_config,
|
||||
)
|
||||
|
|
|
@ -80,7 +80,7 @@ It's one thing to actually write these functions, it's _another_ thing to let th
|
|||
|
||||
## Specifying how we pass args to the function
|
||||
|
||||
### builtins/mono/src/borrow.rs
|
||||
### builtins/mono/src/inc_dec.rs
|
||||
|
||||
After we have all of this, we need to specify if the arguments we're passing are owned, borrowed or irrelevant. Towards the bottom of this file, add a new case for your builtin and specify each arg. Be sure to read the comment, as it explains this in more detail.
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const std = @import("std");
|
||||
const utils = @import("utils.zig");
|
||||
const str = @import("str.zig");
|
||||
const UpdateMode = utils.UpdateMode;
|
||||
const mem = std.mem;
|
||||
const math = std.math;
|
||||
|
@ -1033,3 +1034,34 @@ test "listConcat: non-unique with unique overlapping" {
|
|||
|
||||
try expect(concatted.eql(wanted));
|
||||
}
|
||||
|
||||
pub fn listConcatUtf8(
|
||||
list: RocList,
|
||||
string: str.RocStr,
|
||||
) callconv(.C) RocList {
|
||||
if (string.len() == 0) {
|
||||
return list;
|
||||
} else {
|
||||
const combined_length = list.len() + string.len();
|
||||
|
||||
// List U8 has alignment 1 and element_width 1
|
||||
var result = list.reallocate(1, combined_length, 1);
|
||||
// We just allocated combined_length, which is > 0 because string.len() > 0
|
||||
var bytes = result.bytes orelse unreachable;
|
||||
@memcpy(bytes[list.len()..combined_length], string.asU8ptr()[0..string.len()]);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
test "listConcatUtf8" {
|
||||
const list = RocList.fromSlice(u8, &[_]u8{ 1, 2, 3, 4 });
|
||||
defer list.decref(1);
|
||||
const string_bytes = "🐦";
|
||||
const string = str.RocStr.init(string_bytes, string_bytes.len);
|
||||
defer string.decref();
|
||||
const ret = listConcatUtf8(list, string);
|
||||
const expected = RocList.fromSlice(u8, &[_]u8{ 1, 2, 3, 4, 240, 159, 144, 166 });
|
||||
defer expected.decref(1);
|
||||
try expect(ret.eql(expected));
|
||||
}
|
||||
|
|
|
@ -85,6 +85,7 @@ comptime {
|
|||
exportListFn(list.listCapacity, "capacity");
|
||||
exportListFn(list.listAllocationPtr, "allocation_ptr");
|
||||
exportListFn(list.listReleaseExcessCapacity, "release_excess_capacity");
|
||||
exportListFn(list.listConcatUtf8, "concat_utf8");
|
||||
}
|
||||
|
||||
// Num Module
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
interface Bool
|
||||
exposes [Bool, Eq, true, false, and, or, not, isEq, isNotEq]
|
||||
imports []
|
||||
module [Bool, Eq, true, false, and, or, not, isEq, isNotEq]
|
||||
|
||||
## Defines a type that can be compared for total equality.
|
||||
##
|
||||
|
|
|
@ -2,9 +2,7 @@
|
|||
## - Holding unknown Roc types when developing [platforms](https://github.com/roc-lang/roc/wiki/Roc-concepts-explained#platform).
|
||||
## - To improve performance in rare cases.
|
||||
##
|
||||
interface Box
|
||||
exposes [box, unbox]
|
||||
imports []
|
||||
module [box, unbox]
|
||||
|
||||
## Allocates a value on the heap. Boxing is an expensive process as it copies
|
||||
## the value from the stack to the heap. This may provide a performance
|
||||
|
|
|
@ -1,55 +1,53 @@
|
|||
interface Decode
|
||||
exposes [
|
||||
DecodeError,
|
||||
DecodeResult,
|
||||
Decoder,
|
||||
Decoding,
|
||||
DecoderFormatting,
|
||||
decoder,
|
||||
u8,
|
||||
u16,
|
||||
u32,
|
||||
u64,
|
||||
u128,
|
||||
i8,
|
||||
i16,
|
||||
i32,
|
||||
i64,
|
||||
i128,
|
||||
f32,
|
||||
f64,
|
||||
dec,
|
||||
bool,
|
||||
string,
|
||||
list,
|
||||
record,
|
||||
tuple,
|
||||
custom,
|
||||
decodeWith,
|
||||
fromBytesPartial,
|
||||
fromBytes,
|
||||
mapResult,
|
||||
]
|
||||
imports [
|
||||
List,
|
||||
Result.{ Result },
|
||||
Num.{
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
U128,
|
||||
I8,
|
||||
I16,
|
||||
I32,
|
||||
I64,
|
||||
I128,
|
||||
F32,
|
||||
F64,
|
||||
Dec,
|
||||
},
|
||||
Bool.{ Bool },
|
||||
]
|
||||
module [
|
||||
DecodeError,
|
||||
DecodeResult,
|
||||
Decoder,
|
||||
Decoding,
|
||||
DecoderFormatting,
|
||||
decoder,
|
||||
u8,
|
||||
u16,
|
||||
u32,
|
||||
u64,
|
||||
u128,
|
||||
i8,
|
||||
i16,
|
||||
i32,
|
||||
i64,
|
||||
i128,
|
||||
f32,
|
||||
f64,
|
||||
dec,
|
||||
bool,
|
||||
string,
|
||||
list,
|
||||
record,
|
||||
tuple,
|
||||
custom,
|
||||
decodeWith,
|
||||
fromBytesPartial,
|
||||
fromBytes,
|
||||
mapResult,
|
||||
]
|
||||
|
||||
import List
|
||||
import Result exposing [Result]
|
||||
import Num exposing [
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
U128,
|
||||
I8,
|
||||
I16,
|
||||
I32,
|
||||
I64,
|
||||
I128,
|
||||
F32,
|
||||
F64,
|
||||
Dec,
|
||||
]
|
||||
import Bool exposing [Bool]
|
||||
|
||||
## Error types when decoding a `List U8` of utf-8 bytes using a [Decoder]
|
||||
DecodeError : [TooShort]
|
||||
|
@ -103,7 +101,7 @@ DecoderFormatting implements
|
|||
## `Skip` if the field is not a part of the decoded record.
|
||||
##
|
||||
## `finalizer` should produce the record value from the decoded `state`.
|
||||
record : state, (state, Str -> [Keep (Decoder state fmt), Skip]), (state -> Result val DecodeError) -> Decoder val fmt where fmt implements DecoderFormatting
|
||||
record : state, (state, Str -> [Keep (Decoder state fmt), Skip]), (state, fmt -> Result val DecodeError) -> Decoder val fmt where fmt implements DecoderFormatting
|
||||
|
||||
## `tuple state stepElem finalizer` decodes a tuple element-by-element.
|
||||
##
|
||||
|
|
|
@ -1,43 +1,41 @@
|
|||
interface Dict
|
||||
exposes [
|
||||
Dict,
|
||||
empty,
|
||||
withCapacity,
|
||||
single,
|
||||
clear,
|
||||
capacity,
|
||||
reserve,
|
||||
releaseExcessCapacity,
|
||||
len,
|
||||
isEmpty,
|
||||
get,
|
||||
contains,
|
||||
insert,
|
||||
remove,
|
||||
update,
|
||||
walk,
|
||||
walkUntil,
|
||||
keepIf,
|
||||
dropIf,
|
||||
toList,
|
||||
fromList,
|
||||
keys,
|
||||
values,
|
||||
insertAll,
|
||||
keepShared,
|
||||
removeAll,
|
||||
map,
|
||||
joinMap,
|
||||
]
|
||||
imports [
|
||||
Bool.{ Bool, Eq },
|
||||
Result.{ Result },
|
||||
List,
|
||||
Str,
|
||||
Num.{ U64, F32, U32, U8, I8 },
|
||||
Hash.{ Hasher, Hash },
|
||||
Inspect.{ Inspect, Inspector, InspectFormatter },
|
||||
]
|
||||
module [
|
||||
Dict,
|
||||
empty,
|
||||
withCapacity,
|
||||
single,
|
||||
clear,
|
||||
capacity,
|
||||
reserve,
|
||||
releaseExcessCapacity,
|
||||
len,
|
||||
isEmpty,
|
||||
get,
|
||||
contains,
|
||||
insert,
|
||||
remove,
|
||||
update,
|
||||
walk,
|
||||
walkUntil,
|
||||
keepIf,
|
||||
dropIf,
|
||||
toList,
|
||||
fromList,
|
||||
keys,
|
||||
values,
|
||||
insertAll,
|
||||
keepShared,
|
||||
removeAll,
|
||||
map,
|
||||
joinMap,
|
||||
]
|
||||
|
||||
import Bool exposing [Bool, Eq]
|
||||
import Result exposing [Result]
|
||||
import List
|
||||
import Str
|
||||
import Num exposing [U64, F32, U32, U8]
|
||||
import Hash exposing [Hasher, Hash]
|
||||
import Inspect exposing [Inspect, Inspector, InspectFormatter]
|
||||
|
||||
## A [dictionary](https://en.wikipedia.org/wiki/Associative_array) that lets you
|
||||
## associate keys with values.
|
||||
|
|
|
@ -1,51 +1,49 @@
|
|||
interface Encode
|
||||
exposes [
|
||||
Encoder,
|
||||
Encoding,
|
||||
toEncoder,
|
||||
EncoderFormatting,
|
||||
u8,
|
||||
u16,
|
||||
u32,
|
||||
u64,
|
||||
u128,
|
||||
i8,
|
||||
i16,
|
||||
i32,
|
||||
i64,
|
||||
i128,
|
||||
f32,
|
||||
f64,
|
||||
dec,
|
||||
bool,
|
||||
string,
|
||||
list,
|
||||
record,
|
||||
tag,
|
||||
tuple,
|
||||
custom,
|
||||
appendWith,
|
||||
append,
|
||||
toBytes,
|
||||
]
|
||||
imports [
|
||||
Num.{
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
U128,
|
||||
I8,
|
||||
I16,
|
||||
I32,
|
||||
I64,
|
||||
I128,
|
||||
F32,
|
||||
F64,
|
||||
Dec,
|
||||
},
|
||||
Bool.{ Bool },
|
||||
]
|
||||
module [
|
||||
Encoder,
|
||||
Encoding,
|
||||
toEncoder,
|
||||
EncoderFormatting,
|
||||
u8,
|
||||
u16,
|
||||
u32,
|
||||
u64,
|
||||
u128,
|
||||
i8,
|
||||
i16,
|
||||
i32,
|
||||
i64,
|
||||
i128,
|
||||
f32,
|
||||
f64,
|
||||
dec,
|
||||
bool,
|
||||
string,
|
||||
list,
|
||||
record,
|
||||
tag,
|
||||
tuple,
|
||||
custom,
|
||||
appendWith,
|
||||
append,
|
||||
toBytes,
|
||||
]
|
||||
|
||||
import Num exposing [
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
U128,
|
||||
I8,
|
||||
I16,
|
||||
I32,
|
||||
I64,
|
||||
I128,
|
||||
F32,
|
||||
F64,
|
||||
Dec,
|
||||
]
|
||||
import Bool exposing [Bool]
|
||||
|
||||
Encoder fmt := List U8, fmt -> List U8 where fmt implements EncoderFormatting
|
||||
|
||||
|
|
|
@ -1,31 +1,42 @@
|
|||
interface Hash
|
||||
exposes [
|
||||
Hash,
|
||||
Hasher,
|
||||
hash,
|
||||
addBytes,
|
||||
addU8,
|
||||
addU16,
|
||||
addU32,
|
||||
addU64,
|
||||
addU128,
|
||||
hashBool,
|
||||
hashI8,
|
||||
hashI16,
|
||||
hashI32,
|
||||
hashI64,
|
||||
hashI128,
|
||||
hashDec,
|
||||
complete,
|
||||
hashStrBytes,
|
||||
hashList,
|
||||
hashUnordered,
|
||||
] imports [
|
||||
Bool.{ Bool },
|
||||
List,
|
||||
Str,
|
||||
Num.{ U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, Dec },
|
||||
]
|
||||
module [
|
||||
Hash,
|
||||
Hasher,
|
||||
hash,
|
||||
addBytes,
|
||||
addU8,
|
||||
addU16,
|
||||
addU32,
|
||||
addU64,
|
||||
addU128,
|
||||
hashBool,
|
||||
hashI8,
|
||||
hashI16,
|
||||
hashI32,
|
||||
hashI64,
|
||||
hashI128,
|
||||
hashDec,
|
||||
complete,
|
||||
hashStrBytes,
|
||||
hashList,
|
||||
hashUnordered,
|
||||
]
|
||||
|
||||
import Bool exposing [Bool]
|
||||
import List
|
||||
import Str
|
||||
import Num exposing [
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
U128,
|
||||
I8,
|
||||
I16,
|
||||
I32,
|
||||
I64,
|
||||
I128,
|
||||
Dec,
|
||||
]
|
||||
|
||||
## A value that can be hashed.
|
||||
Hash implements
|
||||
|
|
|
@ -1,46 +1,44 @@
|
|||
interface Inspect
|
||||
exposes [
|
||||
Inspect,
|
||||
Inspector,
|
||||
InspectFormatter,
|
||||
ElemWalker,
|
||||
KeyValWalker,
|
||||
inspect,
|
||||
init,
|
||||
list,
|
||||
set,
|
||||
dict,
|
||||
tag,
|
||||
tuple,
|
||||
record,
|
||||
bool,
|
||||
str,
|
||||
function,
|
||||
opaque,
|
||||
u8,
|
||||
i8,
|
||||
u16,
|
||||
i16,
|
||||
u32,
|
||||
i32,
|
||||
u64,
|
||||
i64,
|
||||
u128,
|
||||
i128,
|
||||
f32,
|
||||
f64,
|
||||
dec,
|
||||
custom,
|
||||
apply,
|
||||
toInspector,
|
||||
toStr,
|
||||
]
|
||||
imports [
|
||||
Bool.{ Bool },
|
||||
Num.{ U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec },
|
||||
List,
|
||||
Str,
|
||||
]
|
||||
module [
|
||||
Inspect,
|
||||
Inspector,
|
||||
InspectFormatter,
|
||||
ElemWalker,
|
||||
KeyValWalker,
|
||||
inspect,
|
||||
init,
|
||||
list,
|
||||
set,
|
||||
dict,
|
||||
tag,
|
||||
tuple,
|
||||
record,
|
||||
bool,
|
||||
str,
|
||||
function,
|
||||
opaque,
|
||||
u8,
|
||||
i8,
|
||||
u16,
|
||||
i16,
|
||||
u32,
|
||||
i32,
|
||||
u64,
|
||||
i64,
|
||||
u128,
|
||||
i128,
|
||||
f32,
|
||||
f64,
|
||||
dec,
|
||||
custom,
|
||||
apply,
|
||||
toInspector,
|
||||
toStr,
|
||||
]
|
||||
|
||||
import Bool exposing [Bool]
|
||||
import Num exposing [U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec]
|
||||
import List
|
||||
import Str
|
||||
|
||||
KeyValWalker state collection key val : collection, state, (state, key, val -> state) -> state
|
||||
ElemWalker state collection elem : collection, state, (state, elem -> state) -> state
|
||||
|
|
|
@ -1,81 +1,80 @@
|
|||
interface List
|
||||
exposes [
|
||||
isEmpty,
|
||||
get,
|
||||
set,
|
||||
replace,
|
||||
update,
|
||||
append,
|
||||
appendIfOk,
|
||||
prepend,
|
||||
prependIfOk,
|
||||
map,
|
||||
len,
|
||||
withCapacity,
|
||||
walkBackwards,
|
||||
concat,
|
||||
first,
|
||||
single,
|
||||
repeat,
|
||||
reverse,
|
||||
join,
|
||||
keepIf,
|
||||
contains,
|
||||
sum,
|
||||
walk,
|
||||
last,
|
||||
keepOks,
|
||||
keepErrs,
|
||||
mapWithIndex,
|
||||
map2,
|
||||
map3,
|
||||
product,
|
||||
walkWithIndex,
|
||||
walkUntil,
|
||||
walkWithIndexUntil,
|
||||
walkFrom,
|
||||
walkFromUntil,
|
||||
range,
|
||||
sortWith,
|
||||
swap,
|
||||
dropAt,
|
||||
min,
|
||||
max,
|
||||
map4,
|
||||
mapTry,
|
||||
walkTry,
|
||||
joinMap,
|
||||
any,
|
||||
takeFirst,
|
||||
takeLast,
|
||||
dropFirst,
|
||||
dropLast,
|
||||
findFirst,
|
||||
findLast,
|
||||
findFirstIndex,
|
||||
findLastIndex,
|
||||
sublist,
|
||||
intersperse,
|
||||
split,
|
||||
splitFirst,
|
||||
splitLast,
|
||||
startsWith,
|
||||
endsWith,
|
||||
all,
|
||||
dropIf,
|
||||
sortAsc,
|
||||
sortDesc,
|
||||
reserve,
|
||||
releaseExcessCapacity,
|
||||
walkBackwardsUntil,
|
||||
countIf,
|
||||
chunksOf,
|
||||
]
|
||||
imports [
|
||||
Bool.{ Bool, Eq },
|
||||
Result.{ Result },
|
||||
Num.{ U64, Num, Int },
|
||||
]
|
||||
module [
|
||||
isEmpty,
|
||||
get,
|
||||
set,
|
||||
replace,
|
||||
update,
|
||||
append,
|
||||
appendIfOk,
|
||||
prepend,
|
||||
prependIfOk,
|
||||
map,
|
||||
len,
|
||||
withCapacity,
|
||||
walkBackwards,
|
||||
concat,
|
||||
first,
|
||||
single,
|
||||
repeat,
|
||||
reverse,
|
||||
join,
|
||||
keepIf,
|
||||
contains,
|
||||
sum,
|
||||
walk,
|
||||
last,
|
||||
keepOks,
|
||||
keepErrs,
|
||||
mapWithIndex,
|
||||
map2,
|
||||
map3,
|
||||
product,
|
||||
walkWithIndex,
|
||||
walkUntil,
|
||||
walkWithIndexUntil,
|
||||
walkFrom,
|
||||
walkFromUntil,
|
||||
range,
|
||||
sortWith,
|
||||
swap,
|
||||
dropAt,
|
||||
min,
|
||||
max,
|
||||
map4,
|
||||
mapTry,
|
||||
walkTry,
|
||||
joinMap,
|
||||
any,
|
||||
takeFirst,
|
||||
takeLast,
|
||||
dropFirst,
|
||||
dropLast,
|
||||
findFirst,
|
||||
findLast,
|
||||
findFirstIndex,
|
||||
findLastIndex,
|
||||
sublist,
|
||||
intersperse,
|
||||
split,
|
||||
splitFirst,
|
||||
splitLast,
|
||||
startsWith,
|
||||
endsWith,
|
||||
all,
|
||||
dropIf,
|
||||
sortAsc,
|
||||
sortDesc,
|
||||
reserve,
|
||||
releaseExcessCapacity,
|
||||
walkBackwardsUntil,
|
||||
countIf,
|
||||
chunksOf,
|
||||
concatUtf8,
|
||||
]
|
||||
|
||||
import Bool exposing [Bool, Eq]
|
||||
import Result exposing [Result]
|
||||
import Num exposing [U64, Num, U8]
|
||||
|
||||
## ## Types
|
||||
##
|
||||
|
@ -1326,3 +1325,12 @@ iterBackwardsHelp = \list, state, f, prevIndex ->
|
|||
Break b -> Break b
|
||||
else
|
||||
Continue state
|
||||
|
||||
## Concatenates the bytes of a string encoded as utf8 to a list of bytes.
|
||||
## ```roc
|
||||
## expect (List.concatUtf8 [1, 2, 3, 4] "🐦") == [1, 2, 3, 4, 240, 159, 144, 166]
|
||||
## ```
|
||||
concatUtf8 : List U8, Str -> List U8
|
||||
|
||||
expect (List.concatUtf8 [1, 2, 3, 4] "🐦") == [1, 2, 3, 4, 240, 159, 144, 166]
|
||||
|
||||
|
|
|
@ -1,166 +1,168 @@
|
|||
interface Num
|
||||
exposes [
|
||||
Num,
|
||||
Int,
|
||||
Frac,
|
||||
Integer,
|
||||
FloatingPoint,
|
||||
I128,
|
||||
I64,
|
||||
I32,
|
||||
I16,
|
||||
I8,
|
||||
U128,
|
||||
U64,
|
||||
U32,
|
||||
U16,
|
||||
U8,
|
||||
Signed128,
|
||||
Signed64,
|
||||
Signed32,
|
||||
Signed16,
|
||||
Signed8,
|
||||
Unsigned128,
|
||||
Unsigned64,
|
||||
Unsigned32,
|
||||
Unsigned16,
|
||||
Unsigned8,
|
||||
Dec,
|
||||
F64,
|
||||
F32,
|
||||
Decimal,
|
||||
Binary32,
|
||||
Binary64,
|
||||
e,
|
||||
pi,
|
||||
tau,
|
||||
abs,
|
||||
absDiff,
|
||||
neg,
|
||||
add,
|
||||
sub,
|
||||
mul,
|
||||
min,
|
||||
max,
|
||||
isLt,
|
||||
isLte,
|
||||
isGt,
|
||||
isGte,
|
||||
isApproxEq,
|
||||
sin,
|
||||
cos,
|
||||
tan,
|
||||
atan,
|
||||
acos,
|
||||
asin,
|
||||
isZero,
|
||||
isEven,
|
||||
isOdd,
|
||||
toFrac,
|
||||
isPositive,
|
||||
isNegative,
|
||||
isNaN,
|
||||
isInfinite,
|
||||
isFinite,
|
||||
rem,
|
||||
remChecked,
|
||||
div,
|
||||
divChecked,
|
||||
sqrt,
|
||||
sqrtChecked,
|
||||
log,
|
||||
logChecked,
|
||||
round,
|
||||
ceiling,
|
||||
floor,
|
||||
compare,
|
||||
pow,
|
||||
powInt,
|
||||
countLeadingZeroBits,
|
||||
countTrailingZeroBits,
|
||||
countOneBits,
|
||||
addWrap,
|
||||
addChecked,
|
||||
addSaturated,
|
||||
bitwiseAnd,
|
||||
bitwiseXor,
|
||||
bitwiseOr,
|
||||
bitwiseNot,
|
||||
shiftLeftBy,
|
||||
shiftRightBy,
|
||||
shiftRightZfBy,
|
||||
subWrap,
|
||||
subChecked,
|
||||
subSaturated,
|
||||
mulWrap,
|
||||
mulSaturated,
|
||||
mulChecked,
|
||||
intCast,
|
||||
divCeil,
|
||||
divCeilChecked,
|
||||
divTrunc,
|
||||
divTruncChecked,
|
||||
toStr,
|
||||
isMultipleOf,
|
||||
minI8,
|
||||
maxI8,
|
||||
minU8,
|
||||
maxU8,
|
||||
minI16,
|
||||
maxI16,
|
||||
minU16,
|
||||
maxU16,
|
||||
minI32,
|
||||
maxI32,
|
||||
minU32,
|
||||
maxU32,
|
||||
minI64,
|
||||
maxI64,
|
||||
minU64,
|
||||
maxU64,
|
||||
minI128,
|
||||
maxI128,
|
||||
minU128,
|
||||
maxU128,
|
||||
minF32,
|
||||
maxF32,
|
||||
minF64,
|
||||
maxF64,
|
||||
toI8,
|
||||
toI8Checked,
|
||||
toI16,
|
||||
toI16Checked,
|
||||
toI32,
|
||||
toI32Checked,
|
||||
toI64,
|
||||
toI64Checked,
|
||||
toI128,
|
||||
toI128Checked,
|
||||
toU8,
|
||||
toU8Checked,
|
||||
toU16,
|
||||
toU16Checked,
|
||||
toU32,
|
||||
toU32Checked,
|
||||
toU64,
|
||||
toU64Checked,
|
||||
toU128,
|
||||
toU128Checked,
|
||||
toF32,
|
||||
toF32Checked,
|
||||
toF64,
|
||||
toF64Checked,
|
||||
withoutDecimalPoint,
|
||||
withDecimalPoint,
|
||||
f32ToParts,
|
||||
f64ToParts,
|
||||
f32FromParts,
|
||||
f64FromParts,
|
||||
]
|
||||
imports [
|
||||
Bool.{ Bool },
|
||||
Result.{ Result },
|
||||
]
|
||||
module [
|
||||
Num,
|
||||
Int,
|
||||
Frac,
|
||||
Integer,
|
||||
FloatingPoint,
|
||||
I128,
|
||||
I64,
|
||||
I32,
|
||||
I16,
|
||||
I8,
|
||||
U128,
|
||||
U64,
|
||||
U32,
|
||||
U16,
|
||||
U8,
|
||||
Signed128,
|
||||
Signed64,
|
||||
Signed32,
|
||||
Signed16,
|
||||
Signed8,
|
||||
Unsigned128,
|
||||
Unsigned64,
|
||||
Unsigned32,
|
||||
Unsigned16,
|
||||
Unsigned8,
|
||||
Dec,
|
||||
F64,
|
||||
F32,
|
||||
Decimal,
|
||||
Binary32,
|
||||
Binary64,
|
||||
e,
|
||||
pi,
|
||||
tau,
|
||||
abs,
|
||||
absDiff,
|
||||
neg,
|
||||
add,
|
||||
sub,
|
||||
mul,
|
||||
min,
|
||||
max,
|
||||
isLt,
|
||||
isLte,
|
||||
isGt,
|
||||
isGte,
|
||||
isApproxEq,
|
||||
sin,
|
||||
cos,
|
||||
tan,
|
||||
atan,
|
||||
acos,
|
||||
asin,
|
||||
isZero,
|
||||
isEven,
|
||||
isOdd,
|
||||
toFrac,
|
||||
isPositive,
|
||||
isNegative,
|
||||
isNaN,
|
||||
isInfinite,
|
||||
isFinite,
|
||||
rem,
|
||||
remChecked,
|
||||
div,
|
||||
divChecked,
|
||||
sqrt,
|
||||
sqrtChecked,
|
||||
log,
|
||||
logChecked,
|
||||
round,
|
||||
ceiling,
|
||||
floor,
|
||||
compare,
|
||||
pow,
|
||||
powInt,
|
||||
countLeadingZeroBits,
|
||||
countTrailingZeroBits,
|
||||
countOneBits,
|
||||
addWrap,
|
||||
addChecked,
|
||||
addSaturated,
|
||||
bitwiseAnd,
|
||||
bitwiseXor,
|
||||
bitwiseOr,
|
||||
bitwiseNot,
|
||||
shiftLeftBy,
|
||||
shiftRightBy,
|
||||
shiftRightZfBy,
|
||||
subWrap,
|
||||
subChecked,
|
||||
subSaturated,
|
||||
mulWrap,
|
||||
mulSaturated,
|
||||
mulChecked,
|
||||
intCast,
|
||||
divCeil,
|
||||
divCeilChecked,
|
||||
divTrunc,
|
||||
divTruncChecked,
|
||||
toStr,
|
||||
isMultipleOf,
|
||||
minI8,
|
||||
maxI8,
|
||||
minU8,
|
||||
maxU8,
|
||||
minI16,
|
||||
maxI16,
|
||||
minU16,
|
||||
maxU16,
|
||||
minI32,
|
||||
maxI32,
|
||||
minU32,
|
||||
maxU32,
|
||||
minI64,
|
||||
maxI64,
|
||||
minU64,
|
||||
maxU64,
|
||||
minI128,
|
||||
maxI128,
|
||||
minU128,
|
||||
maxU128,
|
||||
minF32,
|
||||
maxF32,
|
||||
minF64,
|
||||
maxF64,
|
||||
toI8,
|
||||
toI8Checked,
|
||||
toI16,
|
||||
toI16Checked,
|
||||
toI32,
|
||||
toI32Checked,
|
||||
toI64,
|
||||
toI64Checked,
|
||||
toI128,
|
||||
toI128Checked,
|
||||
toU8,
|
||||
toU8Checked,
|
||||
toU16,
|
||||
toU16Checked,
|
||||
toU32,
|
||||
toU32Checked,
|
||||
toU64,
|
||||
toU64Checked,
|
||||
toU128,
|
||||
toU128Checked,
|
||||
toF32,
|
||||
toF32Checked,
|
||||
toF64,
|
||||
toF64Checked,
|
||||
withoutDecimalPoint,
|
||||
withDecimalPoint,
|
||||
f32ToParts,
|
||||
f64ToParts,
|
||||
f32FromParts,
|
||||
f64FromParts,
|
||||
nanF32,
|
||||
nanF64,
|
||||
infinityF32,
|
||||
infinityF64,
|
||||
]
|
||||
|
||||
import Bool exposing [Bool]
|
||||
import Result exposing [Result]
|
||||
|
||||
## Represents a number that could be either an [Int] or a [Frac].
|
||||
##
|
||||
|
@ -1001,7 +1003,7 @@ bitwiseNot = \n ->
|
|||
## ```roc
|
||||
## shiftLeftBy 0b0000_0011 2 == 0b0000_1100
|
||||
##
|
||||
## 0b0000_0101 |> shiftLeftBy 2 == 0b0000_1100
|
||||
## 0b0000_0101 |> shiftLeftBy 2 == 0b0001_0100
|
||||
## ```
|
||||
## In some languages `shiftLeftBy` is implemented as a binary operator `<<`.
|
||||
shiftLeftBy : Int a, U8 -> Int a
|
||||
|
@ -1435,3 +1437,19 @@ f32FromParts : { sign : Bool, exponent : U8, fraction : U32 } -> F32
|
|||
## The fraction should not be bigger than 0x000F_FFFF_FFFF_FFFF, any bigger value will be truncated.
|
||||
## The exponent should not be bigger than 0x07FF, any bigger value will be truncated.
|
||||
f64FromParts : { sign : Bool, exponent : U16, fraction : U64 } -> F64
|
||||
|
||||
## The value for not-a-number for a [F32] according to the IEEE 754 standard.
|
||||
nanF32 : F32
|
||||
nanF32 = 0.0f32 / 0.0
|
||||
|
||||
## The value for not-a-number for a [F64] according to the IEEE 754 standard.
|
||||
nanF64 : F64
|
||||
nanF64 = 0.0f64 / 0.0
|
||||
|
||||
## The value for infinity for a [F32] according to the IEEE 754 standard.
|
||||
infinityF32 : F32
|
||||
infinityF32 = 1.0f32 / 0.0
|
||||
|
||||
## The value for infinity for a [F64] according to the IEEE 754 standard.
|
||||
infinityF64 : F64
|
||||
infinityF64 = 1.0f64 / 0.0
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
interface Result
|
||||
exposes [Result, isOk, isErr, map, mapErr, try, onErr, withDefault]
|
||||
imports [Bool.{ Bool }]
|
||||
module [Result, isOk, isErr, map, mapErr, try, onErr, withDefault]
|
||||
|
||||
import Bool exposing [Bool]
|
||||
|
||||
## The result of an operation that could fail: either the operation went
|
||||
## okay, or else there was an error of some sort.
|
||||
|
|
|
@ -1,37 +1,35 @@
|
|||
interface Set
|
||||
exposes [
|
||||
Set,
|
||||
empty,
|
||||
withCapacity,
|
||||
reserve,
|
||||
releaseExcessCapacity,
|
||||
single,
|
||||
walk,
|
||||
walkUntil,
|
||||
keepIf,
|
||||
dropIf,
|
||||
insert,
|
||||
len,
|
||||
isEmpty,
|
||||
capacity,
|
||||
remove,
|
||||
contains,
|
||||
toList,
|
||||
fromList,
|
||||
union,
|
||||
intersection,
|
||||
difference,
|
||||
map,
|
||||
joinMap,
|
||||
]
|
||||
imports [
|
||||
List,
|
||||
Bool.{ Bool, Eq },
|
||||
Dict.{ Dict },
|
||||
Num.{ U64 },
|
||||
Hash.{ Hash, Hasher },
|
||||
Inspect.{ Inspect, Inspector, InspectFormatter },
|
||||
]
|
||||
module [
|
||||
Set,
|
||||
empty,
|
||||
withCapacity,
|
||||
reserve,
|
||||
releaseExcessCapacity,
|
||||
single,
|
||||
walk,
|
||||
walkUntil,
|
||||
keepIf,
|
||||
dropIf,
|
||||
insert,
|
||||
len,
|
||||
isEmpty,
|
||||
capacity,
|
||||
remove,
|
||||
contains,
|
||||
toList,
|
||||
fromList,
|
||||
union,
|
||||
intersection,
|
||||
difference,
|
||||
map,
|
||||
joinMap,
|
||||
]
|
||||
|
||||
import List
|
||||
import Bool exposing [Bool, Eq]
|
||||
import Dict
|
||||
import Num exposing [U64]
|
||||
import Hash exposing [Hash, Hasher]
|
||||
import Inspect exposing [Inspect, Inspector, InspectFormatter]
|
||||
|
||||
## Provides a [set](https://en.wikipedia.org/wiki/Set_(abstract_data_type))
|
||||
## type which stores a collection of unique values, without any ordering
|
||||
|
|
|
@ -326,55 +326,53 @@
|
|||
## If a situation like this comes up, a slice can be turned into a separate string by using [`Str.concat`](https://www.roc-lang.org/builtins/Str#concat) to concatenate the slice onto an empty string (or one created with [`Str.withCapacity`](https://www.roc-lang.org/builtins/Str#withCapacity)).
|
||||
##
|
||||
## Currently, the only way to get seamless slices of strings is by calling certain `Str` functions which return them. In general, `Str` functions which accept a string and return a subset of that string tend to do this. [`Str.trim`](https://www.roc-lang.org/builtins/Str#trim) is another example of a function which returns a seamless slice.
|
||||
interface Str
|
||||
exposes [
|
||||
Utf8Problem,
|
||||
Utf8ByteProblem,
|
||||
concat,
|
||||
isEmpty,
|
||||
joinWith,
|
||||
split,
|
||||
repeat,
|
||||
countUtf8Bytes,
|
||||
toUtf8,
|
||||
fromUtf8,
|
||||
startsWith,
|
||||
endsWith,
|
||||
trim,
|
||||
trimStart,
|
||||
trimEnd,
|
||||
toDec,
|
||||
toF64,
|
||||
toF32,
|
||||
toU128,
|
||||
toI128,
|
||||
toU64,
|
||||
toI64,
|
||||
toU32,
|
||||
toI32,
|
||||
toU16,
|
||||
toI16,
|
||||
toU8,
|
||||
toI8,
|
||||
replaceEach,
|
||||
replaceFirst,
|
||||
replaceLast,
|
||||
splitFirst,
|
||||
splitLast,
|
||||
walkUtf8,
|
||||
walkUtf8WithIndex,
|
||||
reserve,
|
||||
releaseExcessCapacity,
|
||||
withCapacity,
|
||||
withPrefix,
|
||||
contains,
|
||||
]
|
||||
imports [
|
||||
Bool.{ Bool, Eq },
|
||||
Result.{ Result },
|
||||
List,
|
||||
Num.{ Num, U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec },
|
||||
]
|
||||
module [
|
||||
Utf8Problem,
|
||||
Utf8ByteProblem,
|
||||
concat,
|
||||
isEmpty,
|
||||
joinWith,
|
||||
split,
|
||||
repeat,
|
||||
countUtf8Bytes,
|
||||
toUtf8,
|
||||
fromUtf8,
|
||||
startsWith,
|
||||
endsWith,
|
||||
trim,
|
||||
trimStart,
|
||||
trimEnd,
|
||||
toDec,
|
||||
toF64,
|
||||
toF32,
|
||||
toU128,
|
||||
toI128,
|
||||
toU64,
|
||||
toI64,
|
||||
toU32,
|
||||
toI32,
|
||||
toU16,
|
||||
toI16,
|
||||
toU8,
|
||||
toI8,
|
||||
replaceEach,
|
||||
replaceFirst,
|
||||
replaceLast,
|
||||
splitFirst,
|
||||
splitLast,
|
||||
walkUtf8,
|
||||
walkUtf8WithIndex,
|
||||
reserve,
|
||||
releaseExcessCapacity,
|
||||
withCapacity,
|
||||
withPrefix,
|
||||
contains,
|
||||
]
|
||||
|
||||
import Bool exposing [Bool]
|
||||
import Result exposing [Result]
|
||||
import List
|
||||
import Num exposing [Num, U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec]
|
||||
|
||||
Utf8ByteProblem : [
|
||||
InvalidStartByte,
|
||||
|
|
|
@ -1,44 +1,18 @@
|
|||
## THIS MODULE IS DEPRECATED AND CURRENTLY IN THE PROCESS OF BEING REMOVED
|
||||
## FROM STD LIBRARY
|
||||
interface TotallyNotJson
|
||||
exposes [
|
||||
Json,
|
||||
json,
|
||||
jsonWithOptions,
|
||||
]
|
||||
imports [
|
||||
List,
|
||||
Str,
|
||||
Result.{ Result },
|
||||
Encode,
|
||||
Encode.{
|
||||
Encoder,
|
||||
EncoderFormatting,
|
||||
appendWith,
|
||||
},
|
||||
Decode,
|
||||
Decode.{
|
||||
DecoderFormatting,
|
||||
DecodeResult,
|
||||
},
|
||||
Num.{
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
U128,
|
||||
I8,
|
||||
I16,
|
||||
I32,
|
||||
I64,
|
||||
I128,
|
||||
F32,
|
||||
F64,
|
||||
Dec,
|
||||
},
|
||||
Bool.{ Bool, Eq },
|
||||
Result,
|
||||
]
|
||||
module [
|
||||
Json,
|
||||
json,
|
||||
jsonWithOptions,
|
||||
]
|
||||
|
||||
import List
|
||||
import Str
|
||||
import Result
|
||||
import Encode exposing [EncoderFormatting, appendWith]
|
||||
import Decode exposing [DecoderFormatting, DecodeResult]
|
||||
import Num exposing [U8, U16, U64, F32, F64, Dec]
|
||||
import Bool exposing [Bool]
|
||||
|
||||
## An opaque type with the `EncoderFormatting` and
|
||||
## `DecoderFormatting` abilities.
|
||||
|
@ -232,14 +206,20 @@ escapedByteToJson = \b ->
|
|||
encodeList = \lst, encodeElem ->
|
||||
Encode.custom \bytes, @Json {} ->
|
||||
writeList = \{ buffer, elemsLeft }, elem ->
|
||||
bufferWithElem = appendWith buffer (encodeElem elem) (@Json {})
|
||||
bufferWithSuffix =
|
||||
if elemsLeft > 1 then
|
||||
List.append bufferWithElem (Num.toU8 ',')
|
||||
else
|
||||
bufferWithElem
|
||||
beforeBufferLen = buffer |> List.len
|
||||
|
||||
{ buffer: bufferWithSuffix, elemsLeft: elemsLeft - 1 }
|
||||
bufferWithElem = appendWith buffer (encodeElem elem) (@Json {})
|
||||
# If our encoder returned [] we just skip the elem
|
||||
if bufferWithElem |> List.len == beforeBufferLen then
|
||||
{ buffer: bufferWithElem, elemsLeft: elemsLeft - 1 }
|
||||
else
|
||||
bufferWithSuffix =
|
||||
if elemsLeft > 1 then
|
||||
List.append bufferWithElem (Num.toU8 ',')
|
||||
else
|
||||
bufferWithElem
|
||||
|
||||
{ buffer: bufferWithSuffix, elemsLeft: elemsLeft - 1 }
|
||||
|
||||
head = List.append bytes (Num.toU8 '[')
|
||||
{ buffer: withList } = List.walk lst { buffer: head, elemsLeft: List.len lst } writeList
|
||||
|
@ -249,21 +229,27 @@ encodeList = \lst, encodeElem ->
|
|||
encodeRecord = \fields ->
|
||||
Encode.custom \bytes, @Json {} ->
|
||||
writeRecord = \{ buffer, fieldsLeft }, { key, value } ->
|
||||
fieldName = key
|
||||
bufferWithKeyValue =
|
||||
List.append buffer (Num.toU8 '"')
|
||||
|> List.concat (Str.toUtf8 fieldName)
|
||||
|> List.append (Num.toU8 '"')
|
||||
|> List.append (Num.toU8 ':') # Note we need to encode using the json config here
|
||||
|> appendWith value (@Json {})
|
||||
|
||||
bufferWithSuffix =
|
||||
if fieldsLeft > 1 then
|
||||
List.append bufferWithKeyValue (Num.toU8 ',')
|
||||
else
|
||||
bufferWithKeyValue
|
||||
fieldValue = [] |> appendWith value (json)
|
||||
# If our encoder returned [] we just skip the field
|
||||
if fieldValue == [] then
|
||||
{ buffer, fieldsLeft: fieldsLeft - 1 }
|
||||
else
|
||||
fieldName = key
|
||||
bufferWithKeyValue =
|
||||
List.append buffer (Num.toU8 '"')
|
||||
|> List.concat (Str.toUtf8 fieldName)
|
||||
|> List.append (Num.toU8 '"')
|
||||
|> List.append (Num.toU8 ':') # Note we need to encode using the json config here
|
||||
|> List.concat fieldValue
|
||||
|
||||
{ buffer: bufferWithSuffix, fieldsLeft: fieldsLeft - 1 }
|
||||
bufferWithSuffix =
|
||||
if fieldsLeft > 1 then
|
||||
List.append bufferWithKeyValue (Num.toU8 ',')
|
||||
else
|
||||
bufferWithKeyValue
|
||||
|
||||
{ buffer: bufferWithSuffix, fieldsLeft: fieldsLeft - 1 }
|
||||
|
||||
bytesHead = List.append bytes (Num.toU8 '{')
|
||||
{ buffer: bytesWithRecord } = List.walk fields { buffer: bytesHead, fieldsLeft: List.len fields } writeRecord
|
||||
|
@ -273,16 +259,21 @@ encodeRecord = \fields ->
|
|||
encodeTuple = \elems ->
|
||||
Encode.custom \bytes, @Json {} ->
|
||||
writeTuple = \{ buffer, elemsLeft }, elemEncoder ->
|
||||
bufferWithElem =
|
||||
appendWith buffer elemEncoder (@Json {})
|
||||
beforeBufferLen = buffer |> List.len
|
||||
|
||||
bufferWithSuffix =
|
||||
if elemsLeft > 1 then
|
||||
List.append bufferWithElem (Num.toU8 ',')
|
||||
else
|
||||
bufferWithElem
|
||||
bufferWithElem = appendWith buffer (elemEncoder) (@Json {})
|
||||
|
||||
{ buffer: bufferWithSuffix, elemsLeft: elemsLeft - 1 }
|
||||
# If our encoder returned [] we just skip the elem
|
||||
if bufferWithElem |> List.len == beforeBufferLen then
|
||||
{ buffer: bufferWithElem, elemsLeft: elemsLeft - 1 }
|
||||
else
|
||||
bufferWithSuffix =
|
||||
if elemsLeft > 1 then
|
||||
List.append bufferWithElem (Num.toU8 ',')
|
||||
else
|
||||
bufferWithElem
|
||||
|
||||
{ buffer: bufferWithSuffix, elemsLeft: elemsLeft - 1 }
|
||||
|
||||
bytesHead = List.append bytes (Num.toU8 '[')
|
||||
{ buffer: bytesWithRecord } = List.walk elems { buffer: bytesHead, elemsLeft: List.len elems } writeTuple
|
||||
|
@ -1273,7 +1264,7 @@ decodeRecord = \initialState, stepField, finalizer -> Decode.custom \bytes, @Jso
|
|||
rest = List.dropFirst bytesAfterValue n
|
||||
|
||||
# Build final record from decoded fields and values
|
||||
when finalizer updatedRecord is
|
||||
when finalizer updatedRecord json is
|
||||
Ok val -> { result: Ok val, rest }
|
||||
Err e -> { result: Err e, rest }
|
||||
|
||||
|
|
|
@ -1,3 +1,15 @@
|
|||
package "builtins"
|
||||
exposes [Str, Num, Bool, Result, List, Dict, Set, Decode, Encode, Hash, Box, TotallyNotJson, Inspect]
|
||||
packages {}
|
||||
package [
|
||||
Str,
|
||||
Num,
|
||||
Bool,
|
||||
Result,
|
||||
List,
|
||||
Dict,
|
||||
Set,
|
||||
Decode,
|
||||
Encode,
|
||||
Hash,
|
||||
Box,
|
||||
TotallyNotJson,
|
||||
Inspect,
|
||||
] {}
|
||||
|
|
|
@ -390,6 +390,7 @@ pub const LIST_RESERVE: &str = "roc_builtins.list.reserve";
|
|||
pub const LIST_CAPACITY: &str = "roc_builtins.list.capacity";
|
||||
pub const LIST_ALLOCATION_PTR: &str = "roc_builtins.list.allocation_ptr";
|
||||
pub const LIST_RELEASE_EXCESS_CAPACITY: &str = "roc_builtins.list.release_excess_capacity";
|
||||
pub const LIST_CONCAT_UTF8: &str = "roc_builtins.list.concat_utf8";
|
||||
|
||||
pub const DEC_ABS: &str = "roc_builtins.dec.abs";
|
||||
pub const DEC_ACOS: &str = "roc_builtins.dec.acos";
|
||||
|
|
|
@ -17,6 +17,7 @@ roc_problem = { path = "../problem" }
|
|||
roc_region = { path = "../region" }
|
||||
roc_serialize = { path = "../serialize" }
|
||||
roc_types = { path = "../types" }
|
||||
roc_test_utils = { path = "../../test_utils" }
|
||||
|
||||
ven_pretty = { path = "../../vendor/pretty" }
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::env::Env;
|
||||
use crate::procedure::References;
|
||||
use crate::procedure::{QualifiedReference, References};
|
||||
use crate::scope::{PendingAbilitiesInScope, Scope};
|
||||
use roc_collections::{ImMap, MutSet, SendMap, VecMap, VecSet};
|
||||
use roc_module::ident::{Ident, Lowercase, TagName};
|
||||
|
@ -17,7 +17,7 @@ use roc_types::types::{
|
|||
pub struct Annotation {
|
||||
pub typ: Type,
|
||||
pub introduced_variables: IntroducedVariables,
|
||||
pub references: VecSet<Symbol>,
|
||||
pub references: References,
|
||||
pub aliases: VecMap<Symbol, Alias>,
|
||||
}
|
||||
|
||||
|
@ -28,9 +28,7 @@ impl Annotation {
|
|||
references: &mut References,
|
||||
introduced_variables: &mut IntroducedVariables,
|
||||
) {
|
||||
for symbol in self.references.iter() {
|
||||
references.insert_type_lookup(*symbol);
|
||||
}
|
||||
references.union_mut(&self.references);
|
||||
|
||||
introduced_variables.union(&self.introduced_variables);
|
||||
|
||||
|
@ -291,7 +289,7 @@ pub(crate) fn canonicalize_annotation(
|
|||
annotation_for: AnnotationFor,
|
||||
) -> Annotation {
|
||||
let mut introduced_variables = IntroducedVariables::default();
|
||||
let mut references = VecSet::default();
|
||||
let mut references = References::new();
|
||||
let mut aliases = VecMap::default();
|
||||
|
||||
let (annotation, region) = match annotation {
|
||||
|
@ -381,13 +379,17 @@ pub(crate) fn make_apply_symbol(
|
|||
scope: &mut Scope,
|
||||
module_name: &str,
|
||||
ident: &str,
|
||||
references: &mut References,
|
||||
) -> Result<Symbol, Type> {
|
||||
if module_name.is_empty() {
|
||||
// Since module_name was empty, this is an unqualified type.
|
||||
// Look it up in scope!
|
||||
|
||||
match scope.lookup_str(ident, region) {
|
||||
Ok(symbol) => Ok(symbol),
|
||||
Ok(symbol) => {
|
||||
references.insert_type_lookup(symbol, QualifiedReference::Unqualified);
|
||||
Ok(symbol)
|
||||
}
|
||||
Err(problem) => {
|
||||
env.problem(roc_problem::can::Problem::RuntimeError(problem));
|
||||
|
||||
|
@ -396,7 +398,10 @@ pub(crate) fn make_apply_symbol(
|
|||
}
|
||||
} else {
|
||||
match env.qualified_lookup(scope, module_name, ident, region) {
|
||||
Ok(symbol) => Ok(symbol),
|
||||
Ok(symbol) => {
|
||||
references.insert_type_lookup(symbol, QualifiedReference::Qualified);
|
||||
Ok(symbol)
|
||||
}
|
||||
Err(problem) => {
|
||||
// Either the module wasn't imported, or
|
||||
// it was imported but it doesn't expose this ident.
|
||||
|
@ -537,7 +542,7 @@ fn can_annotation_help(
|
|||
var_store: &mut VarStore,
|
||||
introduced_variables: &mut IntroducedVariables,
|
||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||
references: &mut VecSet<Symbol>,
|
||||
references: &mut References,
|
||||
) -> Type {
|
||||
use roc_parse::ast::TypeAnnotation::*;
|
||||
|
||||
|
@ -580,15 +585,14 @@ fn can_annotation_help(
|
|||
Type::Function(args, Box::new(closure), Box::new(ret))
|
||||
}
|
||||
Apply(module_name, ident, type_arguments) => {
|
||||
let symbol = match make_apply_symbol(env, region, scope, module_name, ident) {
|
||||
let symbol = match make_apply_symbol(env, region, scope, module_name, ident, references)
|
||||
{
|
||||
Err(problem) => return problem,
|
||||
Ok(symbol) => symbol,
|
||||
};
|
||||
|
||||
let mut args = Vec::new();
|
||||
|
||||
references.insert(symbol);
|
||||
|
||||
if scope.abilities_store.is_ability(symbol) {
|
||||
let fresh_ty_var = find_fresh_var_name(introduced_variables);
|
||||
|
||||
|
@ -744,11 +748,13 @@ fn can_annotation_help(
|
|||
let mut vars = Vec::with_capacity(loc_vars.len());
|
||||
let mut lowercase_vars: Vec<Loc<AliasVar>> = Vec::with_capacity(loc_vars.len());
|
||||
|
||||
references.insert(symbol);
|
||||
references.insert_type_lookup(symbol, QualifiedReference::Unqualified);
|
||||
|
||||
for loc_var in *loc_vars {
|
||||
let var = match loc_var.value {
|
||||
Pattern::Identifier(name) if name.chars().next().unwrap().is_lowercase() => {
|
||||
Pattern::Identifier { ident: name, .. }
|
||||
if name.chars().next().unwrap().is_lowercase() =>
|
||||
{
|
||||
name
|
||||
}
|
||||
_ => unreachable!("I thought this was validated during parsing"),
|
||||
|
@ -1055,7 +1061,7 @@ fn canonicalize_has_clause(
|
|||
introduced_variables: &mut IntroducedVariables,
|
||||
clause: &Loc<roc_parse::ast::ImplementsClause<'_>>,
|
||||
pending_abilities_in_scope: &PendingAbilitiesInScope,
|
||||
references: &mut VecSet<Symbol>,
|
||||
references: &mut References,
|
||||
) -> Result<(), Type> {
|
||||
let Loc {
|
||||
region,
|
||||
|
@ -1078,7 +1084,7 @@ fn canonicalize_has_clause(
|
|||
{
|
||||
let ability = match ability {
|
||||
TypeAnnotation::Apply(module_name, ident, _type_arguments) => {
|
||||
let symbol = make_apply_symbol(env, region, scope, module_name, ident)?;
|
||||
let symbol = make_apply_symbol(env, region, scope, module_name, ident, references)?;
|
||||
|
||||
// Ability defined locally, whose members we are constructing right now...
|
||||
if !pending_abilities_in_scope.contains_key(&symbol)
|
||||
|
@ -1096,7 +1102,6 @@ fn canonicalize_has_clause(
|
|||
}
|
||||
};
|
||||
|
||||
references.insert(ability);
|
||||
let already_seen = can_abilities.insert(ability);
|
||||
|
||||
if already_seen {
|
||||
|
@ -1130,7 +1135,7 @@ fn can_extension_type(
|
|||
var_store: &mut VarStore,
|
||||
introduced_variables: &mut IntroducedVariables,
|
||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||
references: &mut VecSet<Symbol>,
|
||||
references: &mut References,
|
||||
opt_ext: &Option<&Loc<TypeAnnotation>>,
|
||||
ext_problem_kind: roc_problem::can::ExtensionTypeKind,
|
||||
) -> (Type, ExtImplicitOpenness) {
|
||||
|
@ -1333,7 +1338,7 @@ fn can_assigned_fields<'a>(
|
|||
var_store: &mut VarStore,
|
||||
introduced_variables: &mut IntroducedVariables,
|
||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||
references: &mut VecSet<Symbol>,
|
||||
references: &mut References,
|
||||
) -> SendMap<Lowercase, RecordField<Type>> {
|
||||
use roc_parse::ast::AssignedField::*;
|
||||
use roc_types::types::RecordField::*;
|
||||
|
@ -1448,7 +1453,7 @@ fn can_assigned_tuple_elems(
|
|||
var_store: &mut VarStore,
|
||||
introduced_variables: &mut IntroducedVariables,
|
||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||
references: &mut VecSet<Symbol>,
|
||||
references: &mut References,
|
||||
) -> VecMap<usize, Type> {
|
||||
let mut elem_types = VecMap::with_capacity(elems.len());
|
||||
|
||||
|
@ -1482,7 +1487,7 @@ fn can_tags<'a>(
|
|||
var_store: &mut VarStore,
|
||||
introduced_variables: &mut IntroducedVariables,
|
||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||
references: &mut VecSet<Symbol>,
|
||||
references: &mut References,
|
||||
) -> Vec<(TagName, Vec<Type>)> {
|
||||
let mut tag_types = Vec::with_capacity(tags.len());
|
||||
|
||||
|
|
|
@ -150,6 +150,7 @@ map_symbol_to_lowlevel_and_arity! {
|
|||
ListSwap; LIST_SWAP; 3,
|
||||
ListGetCapacity; LIST_CAPACITY; 1,
|
||||
ListReleaseExcessCapacity; LIST_RELEASE_EXCESS_CAPACITY; 1,
|
||||
ListConcatUtf8; LIST_CONCAT_UTF8; 2,
|
||||
|
||||
ListGetUnsafe; DICT_LIST_GET_UNSAFE; 2,
|
||||
|
||||
|
|
|
@ -28,6 +28,8 @@ use roc_collections::{ImSet, MutMap, SendMap};
|
|||
use roc_error_macros::internal_error;
|
||||
use roc_module::ident::Ident;
|
||||
use roc_module::ident::Lowercase;
|
||||
use roc_module::ident::ModuleName;
|
||||
use roc_module::ident::QualifiedModuleName;
|
||||
use roc_module::symbol::IdentId;
|
||||
use roc_module::symbol::ModuleId;
|
||||
use roc_module::symbol::Symbol;
|
||||
|
@ -52,6 +54,10 @@ use roc_types::types::MemberImpl;
|
|||
use roc_types::types::OptAbleType;
|
||||
use roc_types::types::{Alias, Type};
|
||||
use std::fmt::Debug;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Def {
|
||||
|
@ -158,6 +164,12 @@ enum PendingValueDef<'a> {
|
|||
&'a Loc<ast::TypeAnnotation<'a>>,
|
||||
&'a Loc<ast::Expr<'a>>,
|
||||
),
|
||||
/// Ingested file
|
||||
IngestedFile(
|
||||
Loc<Pattern>,
|
||||
Option<Loc<ast::TypeAnnotation<'a>>>,
|
||||
Loc<ast::StrLiteral<'a>>,
|
||||
),
|
||||
}
|
||||
|
||||
impl PendingValueDef<'_> {
|
||||
|
@ -166,6 +178,7 @@ impl PendingValueDef<'_> {
|
|||
PendingValueDef::AnnotationOnly(_, loc_pattern, _) => loc_pattern,
|
||||
PendingValueDef::Body(loc_pattern, _) => loc_pattern,
|
||||
PendingValueDef::TypedBody(_, loc_pattern, _, _) => loc_pattern,
|
||||
PendingValueDef::IngestedFile(loc_pattern, _, _) => loc_pattern,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -357,9 +370,7 @@ fn canonicalize_alias<'a>(
|
|||
);
|
||||
|
||||
// Record all the annotation's references in output.references.lookups
|
||||
for symbol in can_ann.references {
|
||||
output.references.insert_type_lookup(symbol);
|
||||
}
|
||||
output.references.union_mut(&can_ann.references);
|
||||
|
||||
let mut can_vars: Vec<Loc<AliasVar>> = Vec::with_capacity(vars.len());
|
||||
let mut is_phantom = false;
|
||||
|
@ -428,36 +439,54 @@ fn canonicalize_alias<'a>(
|
|||
return Err(());
|
||||
}
|
||||
|
||||
let num_unbound = named.len() + wildcards.len() + inferred.len();
|
||||
if num_unbound > 0 {
|
||||
let one_occurrence = named
|
||||
.iter()
|
||||
.map(|nv| Loc::at(nv.first_seen(), nv.variable()))
|
||||
.chain(wildcards)
|
||||
.chain(inferred)
|
||||
.next()
|
||||
.unwrap()
|
||||
.region;
|
||||
// Report errors for wildcards (*), underscores (_), and named vars that weren't declared.
|
||||
let mut no_problems = true;
|
||||
|
||||
env.problems.push(Problem::UnboundTypeVariable {
|
||||
if let Some(loc_var) = wildcards.first() {
|
||||
env.problems.push(Problem::WildcardNotAllowed {
|
||||
typ: symbol,
|
||||
num_unbound,
|
||||
one_occurrence,
|
||||
num_wildcards: wildcards.len(),
|
||||
one_occurrence: loc_var.region,
|
||||
kind,
|
||||
});
|
||||
|
||||
// Bail out
|
||||
return Err(());
|
||||
no_problems = false;
|
||||
}
|
||||
|
||||
Ok(create_alias(
|
||||
symbol,
|
||||
name.region,
|
||||
can_vars.clone(),
|
||||
infer_ext_in_output,
|
||||
can_ann.typ,
|
||||
kind,
|
||||
))
|
||||
if let Some(loc_var) = inferred.first() {
|
||||
env.problems.push(Problem::UnderscoreNotAllowed {
|
||||
typ: symbol,
|
||||
num_underscores: inferred.len(),
|
||||
one_occurrence: loc_var.region,
|
||||
kind,
|
||||
});
|
||||
|
||||
no_problems = false;
|
||||
}
|
||||
|
||||
if let Some(nv) = named.first() {
|
||||
env.problems.push(Problem::UndeclaredTypeVar {
|
||||
typ: symbol,
|
||||
num_unbound: named.len(),
|
||||
one_occurrence: nv.first_seen(),
|
||||
kind,
|
||||
});
|
||||
|
||||
no_problems = false;
|
||||
}
|
||||
|
||||
if no_problems {
|
||||
Ok(create_alias(
|
||||
symbol,
|
||||
name.region,
|
||||
can_vars.clone(),
|
||||
infer_ext_in_output,
|
||||
can_ann.typ,
|
||||
kind,
|
||||
))
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Canonicalizes a claimed ability implementation like `{ eq }` or `{ eq: myEq }`.
|
||||
|
@ -495,7 +524,7 @@ fn canonicalize_claimed_ability_impl<'a>(
|
|||
// OPTION-1: The implementation identifier is the only identifier of that name in the
|
||||
// scope. For example,
|
||||
//
|
||||
// interface F imports [] exposes []
|
||||
// module []
|
||||
//
|
||||
// Hello := {} implements [Encoding.{ toEncoder }]
|
||||
//
|
||||
|
@ -507,7 +536,9 @@ fn canonicalize_claimed_ability_impl<'a>(
|
|||
// OPTION-2: The implementation identifier is a unique shadow of the ability member,
|
||||
// which has also been explicitly imported. For example,
|
||||
//
|
||||
// interface F imports [Encoding.{ toEncoder }] exposes []
|
||||
// module []
|
||||
//
|
||||
// import Encoding exposing [toEncoder]
|
||||
//
|
||||
// Hello := {} implements [Encoding.{ toEncoder }]
|
||||
//
|
||||
|
@ -703,6 +734,8 @@ fn canonicalize_opaque<'a>(
|
|||
AliasKind::Opaque,
|
||||
)?;
|
||||
|
||||
let mut references = References::new();
|
||||
|
||||
let mut derived_defs = Vec::new();
|
||||
if let Some(has_abilities) = has_abilities {
|
||||
let has_abilities = has_abilities.value.collection();
|
||||
|
@ -721,7 +754,8 @@ fn canonicalize_opaque<'a>(
|
|||
// Op := {} has [Eq]
|
||||
let (ability, members) = match ability.value {
|
||||
ast::TypeAnnotation::Apply(module_name, ident, []) => {
|
||||
match make_apply_symbol(env, region, scope, module_name, ident) {
|
||||
match make_apply_symbol(env, region, scope, module_name, ident, &mut references)
|
||||
{
|
||||
Ok(ability) => {
|
||||
let opt_members = scope
|
||||
.abilities_store
|
||||
|
@ -914,6 +948,8 @@ fn canonicalize_opaque<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
output.references.union_mut(&references);
|
||||
|
||||
Ok(CanonicalizedOpaque {
|
||||
opaque_def: alias,
|
||||
derived_defs,
|
||||
|
@ -928,7 +964,12 @@ pub(crate) fn canonicalize_defs<'a>(
|
|||
scope: &mut Scope,
|
||||
loc_defs: &'a mut roc_parse::ast::Defs<'a>,
|
||||
pattern_type: PatternType,
|
||||
) -> (CanDefs, Output, MutMap<Symbol, Region>) {
|
||||
) -> (
|
||||
CanDefs,
|
||||
Output,
|
||||
MutMap<Symbol, Region>,
|
||||
Vec<IntroducedImport>,
|
||||
) {
|
||||
// Canonicalizing defs while detecting shadowing involves a multi-step process:
|
||||
//
|
||||
// 1. Go through each of the patterns.
|
||||
|
@ -978,6 +1019,7 @@ pub(crate) fn canonicalize_defs<'a>(
|
|||
env,
|
||||
var_store,
|
||||
value_def,
|
||||
region,
|
||||
scope,
|
||||
&pending_abilities_in_scope,
|
||||
&mut output,
|
||||
|
@ -1034,7 +1076,12 @@ fn canonicalize_value_defs<'a>(
|
|||
pattern_type: PatternType,
|
||||
mut aliases: VecMap<Symbol, Alias>,
|
||||
mut symbols_introduced: MutMap<Symbol, Region>,
|
||||
) -> (CanDefs, Output, MutMap<Symbol, Region>) {
|
||||
) -> (
|
||||
CanDefs,
|
||||
Output,
|
||||
MutMap<Symbol, Region>,
|
||||
Vec<IntroducedImport>,
|
||||
) {
|
||||
// Canonicalize all the patterns, record shadowing problems, and store
|
||||
// the ast::Expr values in pending_exprs for further canonicalization
|
||||
// once we've finished assembling the entire scope.
|
||||
|
@ -1043,6 +1090,8 @@ fn canonicalize_value_defs<'a>(
|
|||
let mut pending_expects = Vec::with_capacity(value_defs.len());
|
||||
let mut pending_expect_fx = Vec::with_capacity(value_defs.len());
|
||||
|
||||
let mut imports_introduced = Vec::with_capacity(value_defs.len());
|
||||
|
||||
for loc_pending_def in value_defs {
|
||||
match loc_pending_def.value {
|
||||
PendingValue::Def(pending_def) => {
|
||||
|
@ -1062,6 +1111,11 @@ fn canonicalize_value_defs<'a>(
|
|||
PendingValue::ExpectFx(pending_expect) => {
|
||||
pending_expect_fx.push(pending_expect);
|
||||
}
|
||||
PendingValue::ModuleImport(introduced_import) => {
|
||||
imports_introduced.push(introduced_import);
|
||||
}
|
||||
PendingValue::InvalidIngestedFile => { /* skip */ }
|
||||
PendingValue::ImportNameConflict => { /* skip */ }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1171,7 +1225,7 @@ fn canonicalize_value_defs<'a>(
|
|||
aliases,
|
||||
};
|
||||
|
||||
(can_defs, output, symbols_introduced)
|
||||
(can_defs, output, symbols_introduced, imports_introduced)
|
||||
}
|
||||
|
||||
struct CanonicalizedTypeDefs<'a> {
|
||||
|
@ -1385,9 +1439,7 @@ fn resolve_abilities(
|
|||
);
|
||||
|
||||
// Record all the annotation's references in output.references.lookups
|
||||
for symbol in member_annot.references {
|
||||
output.references.insert_type_lookup(symbol);
|
||||
}
|
||||
output.references.union_mut(&member_annot.references);
|
||||
|
||||
// What variables in the annotation are bound to the parent ability, and what variables
|
||||
// are bound to some other ability?
|
||||
|
@ -2302,6 +2354,75 @@ fn canonicalize_pending_value_def<'a>(
|
|||
None,
|
||||
)
|
||||
}
|
||||
IngestedFile(loc_pattern, opt_loc_ann, path_literal) => {
|
||||
let relative_path =
|
||||
if let ast::StrLiteral::PlainLine(ingested_path) = path_literal.value {
|
||||
ingested_path
|
||||
} else {
|
||||
todo!(
|
||||
"Only plain strings are supported. Other cases should be made impossible here"
|
||||
);
|
||||
};
|
||||
|
||||
let mut file_path: PathBuf = env.module_path.into();
|
||||
// Remove the header file name and push the new path.
|
||||
file_path.pop();
|
||||
file_path.push(relative_path);
|
||||
|
||||
let mut bytes = vec![];
|
||||
|
||||
let expr = match fs::File::open(&file_path)
|
||||
.and_then(|mut file| file.read_to_end(&mut bytes))
|
||||
{
|
||||
Ok(_) => Expr::IngestedFile(file_path.into(), Arc::new(bytes), var_store.fresh()),
|
||||
Err(e) => {
|
||||
env.problems.push(Problem::FileProblem {
|
||||
filename: file_path.to_path_buf(),
|
||||
error: e.kind(),
|
||||
});
|
||||
|
||||
Expr::RuntimeError(RuntimeError::ReadIngestedFileError {
|
||||
filename: file_path.to_path_buf(),
|
||||
error: e.kind(),
|
||||
region: path_literal.region,
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let loc_expr = Loc::at(path_literal.region, expr);
|
||||
|
||||
let opt_loc_can_ann = if let Some(loc_ann) = opt_loc_ann {
|
||||
let can_ann = canonicalize_annotation(
|
||||
env,
|
||||
scope,
|
||||
&loc_ann.value,
|
||||
loc_ann.region,
|
||||
var_store,
|
||||
pending_abilities_in_scope,
|
||||
AnnotationFor::Value,
|
||||
);
|
||||
|
||||
output.references.union_mut(&can_ann.references);
|
||||
|
||||
Some(Loc::at(loc_ann.region, can_ann))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let def = single_can_def(
|
||||
loc_pattern,
|
||||
loc_expr,
|
||||
var_store.fresh(),
|
||||
opt_loc_can_ann,
|
||||
SendMap::default(),
|
||||
);
|
||||
|
||||
DefOutput {
|
||||
output,
|
||||
references: DefReferences::Value(References::new()),
|
||||
def,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Disallow ability specializations that aren't on the toplevel (note: we might loosen this
|
||||
|
@ -2460,7 +2581,7 @@ pub fn can_defs_with_return<'a>(
|
|||
loc_defs: &'a mut Defs<'a>,
|
||||
loc_ret: &'a Loc<ast::Expr<'a>>,
|
||||
) -> (Expr, Output) {
|
||||
let (unsorted, defs_output, symbols_introduced) = canonicalize_defs(
|
||||
let (unsorted, defs_output, symbols_introduced, imports_introduced) = canonicalize_defs(
|
||||
env,
|
||||
Output::default(),
|
||||
var_store,
|
||||
|
@ -2494,6 +2615,8 @@ pub fn can_defs_with_return<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
report_unused_imports(imports_introduced, &output.references, env, scope);
|
||||
|
||||
let mut loc_expr: Loc<Expr> = ret_expr;
|
||||
|
||||
for declaration in declarations.into_iter().rev() {
|
||||
|
@ -2503,6 +2626,28 @@ pub fn can_defs_with_return<'a>(
|
|||
(loc_expr.value, output)
|
||||
}
|
||||
|
||||
pub fn report_unused_imports(
|
||||
imports_introduced: Vec<IntroducedImport>,
|
||||
references: &References,
|
||||
env: &mut Env<'_>,
|
||||
scope: &mut Scope,
|
||||
) {
|
||||
for import in imports_introduced {
|
||||
if references.has_module_lookup(import.module_id) {
|
||||
for (symbol, region) in &import.exposed_symbols {
|
||||
if !references.has_unqualified_type_or_value_lookup(*symbol)
|
||||
&& !scope.abilities_store.is_specialization_name(*symbol)
|
||||
&& !import.is_task(env)
|
||||
{
|
||||
env.problem(Problem::UnusedImport(*symbol, *region));
|
||||
}
|
||||
}
|
||||
} else if !import.is_task(env) {
|
||||
env.problem(Problem::UnusedModuleImport(import.module_id, import.region));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn decl_to_let(decl: Declaration, loc_ret: Loc<Expr>) -> Loc<Expr> {
|
||||
match decl {
|
||||
Declaration::Declare(def) => {
|
||||
|
@ -2570,7 +2715,7 @@ fn to_pending_alias_or_opaque<'a>(
|
|||
|
||||
for loc_var in vars.iter() {
|
||||
match loc_var.value {
|
||||
ast::Pattern::Identifier(name)
|
||||
ast::Pattern::Identifier { ident: name, .. }
|
||||
if name.chars().next().unwrap().is_lowercase() =>
|
||||
{
|
||||
let lowercase = Lowercase::from(name);
|
||||
|
@ -2750,7 +2895,10 @@ enum PendingValue<'a> {
|
|||
Dbg(PendingExpectOrDbg<'a>),
|
||||
Expect(PendingExpectOrDbg<'a>),
|
||||
ExpectFx(PendingExpectOrDbg<'a>),
|
||||
ModuleImport(IntroducedImport),
|
||||
SignatureDefMismatch,
|
||||
InvalidIngestedFile,
|
||||
ImportNameConflict,
|
||||
}
|
||||
|
||||
struct PendingExpectOrDbg<'a> {
|
||||
|
@ -2758,10 +2906,28 @@ struct PendingExpectOrDbg<'a> {
|
|||
preceding_comment: Region,
|
||||
}
|
||||
|
||||
pub struct IntroducedImport {
|
||||
module_id: ModuleId,
|
||||
region: Region,
|
||||
exposed_symbols: Vec<(Symbol, Region)>,
|
||||
}
|
||||
|
||||
impl IntroducedImport {
|
||||
pub fn is_task(&self, env: &Env<'_>) -> bool {
|
||||
// Temporarily needed for `!` convenience. Can be removed when Task becomes a builtin.
|
||||
match env.qualified_module_ids.get_name(self.module_id) {
|
||||
Some(name) => name.as_inner().as_str() == "Task",
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn to_pending_value_def<'a>(
|
||||
env: &mut Env<'a>,
|
||||
var_store: &mut VarStore,
|
||||
def: &'a ast::ValueDef<'a>,
|
||||
region: Region,
|
||||
scope: &mut Scope,
|
||||
pending_abilities_in_scope: &PendingAbilitiesInScope,
|
||||
output: &mut Output,
|
||||
|
@ -2874,6 +3040,118 @@ fn to_pending_value_def<'a>(
|
|||
condition,
|
||||
preceding_comment: *preceding_comment,
|
||||
}),
|
||||
|
||||
ModuleImport(module_import) => {
|
||||
let qualified_module_name: QualifiedModuleName = module_import.name.value.into();
|
||||
let module_name = qualified_module_name.module.clone();
|
||||
let pq_module_name = qualified_module_name.into_pq_module_name(env.opt_shorthand);
|
||||
|
||||
let module_id = env
|
||||
.qualified_module_ids
|
||||
.get_id(&pq_module_name)
|
||||
.expect("Module id should have been added in load");
|
||||
|
||||
let name_with_alias = match module_import.alias {
|
||||
Some(alias) => ModuleName::from(alias.item.value.as_str()),
|
||||
None => module_name.clone(),
|
||||
};
|
||||
|
||||
if let Err(existing_import) =
|
||||
scope
|
||||
.modules
|
||||
.insert(name_with_alias.clone(), module_id, region)
|
||||
{
|
||||
env.problems.push(Problem::ImportNameConflict {
|
||||
name: name_with_alias,
|
||||
is_alias: module_import.alias.is_some(),
|
||||
new_module_id: module_id,
|
||||
new_import_region: region,
|
||||
existing_import,
|
||||
});
|
||||
|
||||
return PendingValue::ImportNameConflict;
|
||||
}
|
||||
|
||||
let exposed_names = module_import
|
||||
.exposed
|
||||
.map(|kw| kw.item.items)
|
||||
.unwrap_or_default();
|
||||
|
||||
if exposed_names.is_empty() && !env.home.is_builtin() && module_id.is_automatically_imported() {
|
||||
env.problems.push(Problem::ExplicitBuiltinImport(module_id, region));
|
||||
}
|
||||
|
||||
let exposed_ids = env
|
||||
.dep_idents
|
||||
.get(&module_id)
|
||||
.expect("Module id should have been added in load");
|
||||
|
||||
let mut exposed_symbols = Vec::with_capacity(exposed_names.len());
|
||||
|
||||
for loc_name in exposed_names {
|
||||
let exposed_name = loc_name.value.item();
|
||||
let name = exposed_name.as_str();
|
||||
let ident = Ident::from(name);
|
||||
|
||||
match exposed_ids.get_id(name) {
|
||||
Some(ident_id) => {
|
||||
let symbol = Symbol::new(module_id, ident_id);
|
||||
exposed_symbols.push((symbol, loc_name.region));
|
||||
|
||||
if let Err((_shadowed_symbol, existing_symbol_region)) = scope.import_symbol(ident, symbol, loc_name.region) {
|
||||
if symbol.is_automatically_imported() {
|
||||
env.problem(Problem::ExplicitBuiltinTypeImport(
|
||||
symbol,
|
||||
loc_name.region,
|
||||
));
|
||||
} else {
|
||||
env.problem(Problem::ImportShadowsSymbol {
|
||||
region: loc_name.region,
|
||||
new_symbol: symbol,
|
||||
existing_symbol_region,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
env.problem(Problem::RuntimeError(RuntimeError::ValueNotExposed {
|
||||
module_name: module_name.clone(),
|
||||
ident,
|
||||
region: loc_name.region,
|
||||
exposed_values: exposed_ids.exposed_values(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
PendingValue::ModuleImport(IntroducedImport {
|
||||
module_id,
|
||||
region,
|
||||
exposed_symbols,
|
||||
})
|
||||
}
|
||||
IngestedFileImport(ingested_file) => {
|
||||
let loc_name = ingested_file.name.item;
|
||||
|
||||
let symbol = match scope.introduce(loc_name.value.into(), loc_name.region) {
|
||||
Ok(symbol ) => symbol,
|
||||
Err((original, shadow, _)) => {
|
||||
env.problem(Problem::Shadowing {
|
||||
original_region: original.region,
|
||||
shadow,
|
||||
kind: ShadowKind::Variable
|
||||
});
|
||||
|
||||
return PendingValue::InvalidIngestedFile;
|
||||
}
|
||||
};
|
||||
|
||||
let loc_pattern = Loc::at(loc_name.region, Pattern::Identifier(symbol));
|
||||
|
||||
PendingValue::Def(PendingValueDef::IngestedFile(loc_pattern, ingested_file.annotation.map(|ann| ann.annotation), ingested_file.path))
|
||||
}
|
||||
Stmt(_) => internal_error!("a Stmt was not desugared correctly, should have been converted to a Body(...) in desguar"),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,9 +23,10 @@ fn to_encoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
|||
let opaque_ref = alloc_pat(ast::Pattern::OpaqueRef(at_opaque));
|
||||
let opaque_apply_pattern = ast::Pattern::Apply(
|
||||
opaque_ref,
|
||||
&*env
|
||||
.arena
|
||||
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(payload))]),
|
||||
&*env.arena.alloc([Loc::at(
|
||||
DERIVED_REGION,
|
||||
ast::Pattern::Identifier { ident: payload },
|
||||
)]),
|
||||
);
|
||||
|
||||
// Encode.toEncoder payload
|
||||
|
@ -96,8 +97,8 @@ fn decoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
|||
// Decode.mapResult (Decode.decodeWith bytes Decode.decoder fmt) @Opaq
|
||||
let custom_closure = ast::Expr::Closure(
|
||||
env.arena.alloc([
|
||||
Loc::at(DERIVED_REGION, ast::Pattern::Identifier(bytes)),
|
||||
Loc::at(DERIVED_REGION, ast::Pattern::Identifier(fmt)),
|
||||
Loc::at(DERIVED_REGION, ast::Pattern::Identifier { ident: bytes }),
|
||||
Loc::at(DERIVED_REGION, ast::Pattern::Identifier { ident: fmt }),
|
||||
]),
|
||||
alloc_expr(call_map_result),
|
||||
);
|
||||
|
@ -127,9 +128,10 @@ fn hash<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
|||
let opaque_ref = alloc_pat(ast::Pattern::OpaqueRef(at_opaque));
|
||||
let opaque_apply_pattern = ast::Pattern::Apply(
|
||||
opaque_ref,
|
||||
&*env
|
||||
.arena
|
||||
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(payload))]),
|
||||
&*env.arena.alloc([Loc::at(
|
||||
DERIVED_REGION,
|
||||
ast::Pattern::Identifier { ident: payload },
|
||||
)]),
|
||||
);
|
||||
|
||||
// Hash.hash hasher payload
|
||||
|
@ -154,7 +156,7 @@ fn hash<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
|||
// \hasher, @Opaq payload -> Hash.hash hasher payload
|
||||
ast::Expr::Closure(
|
||||
env.arena.alloc([
|
||||
Loc::at(DERIVED_REGION, ast::Pattern::Identifier(hasher)),
|
||||
Loc::at(DERIVED_REGION, ast::Pattern::Identifier { ident: hasher }),
|
||||
Loc::at(DERIVED_REGION, opaque_apply_pattern),
|
||||
]),
|
||||
call_member,
|
||||
|
@ -172,16 +174,18 @@ fn is_eq<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
|||
// \@Opaq payload1
|
||||
let opaque1 = ast::Pattern::Apply(
|
||||
opaque_ref,
|
||||
&*env
|
||||
.arena
|
||||
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(payload1))]),
|
||||
&*env.arena.alloc([Loc::at(
|
||||
DERIVED_REGION,
|
||||
ast::Pattern::Identifier { ident: payload1 },
|
||||
)]),
|
||||
);
|
||||
// \@Opaq payload2
|
||||
let opaque2 = ast::Pattern::Apply(
|
||||
opaque_ref,
|
||||
&*env
|
||||
.arena
|
||||
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(payload2))]),
|
||||
&*env.arena.alloc([Loc::at(
|
||||
DERIVED_REGION,
|
||||
ast::Pattern::Identifier { ident: payload2 },
|
||||
)]),
|
||||
);
|
||||
|
||||
// Bool.isEq payload1 payload2
|
||||
|
@ -224,9 +228,10 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
|||
let opaque_ref = alloc_pat(ast::Pattern::OpaqueRef(at_opaque));
|
||||
let opaque_apply_pattern = ast::Pattern::Apply(
|
||||
opaque_ref,
|
||||
&*env
|
||||
.arena
|
||||
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(payload))]),
|
||||
&*env.arena.alloc([Loc::at(
|
||||
DERIVED_REGION,
|
||||
ast::Pattern::Identifier { ident: payload },
|
||||
)]),
|
||||
);
|
||||
|
||||
// Inspect.toInspector payload
|
||||
|
@ -276,8 +281,10 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
|||
));
|
||||
|
||||
let custom_closure = alloc_expr(ast::Expr::Closure(
|
||||
env.arena
|
||||
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(fmt))]),
|
||||
env.arena.alloc([Loc::at(
|
||||
DERIVED_REGION,
|
||||
ast::Pattern::Identifier { ident: fmt },
|
||||
)]),
|
||||
apply_opaque_inspector,
|
||||
));
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#![allow(clippy::manual_map)]
|
||||
|
||||
use crate::suffixed::{apply_task_await, unwrap_suffixed_expression, EUnwrapped};
|
||||
use bumpalo::collections::Vec;
|
||||
use bumpalo::Bump;
|
||||
use roc_error_macros::internal_error;
|
||||
|
@ -8,8 +9,8 @@ use roc_module::called_via::{BinOp, CalledVia};
|
|||
use roc_module::ident::ModuleName;
|
||||
use roc_parse::ast::Expr::{self, *};
|
||||
use roc_parse::ast::{
|
||||
AssignedField, Collection, Pattern, RecordBuilderField, StrLiteral, StrSegment, ValueDef,
|
||||
WhenBranch,
|
||||
AssignedField, Collection, ModuleImportParams, Pattern, RecordBuilderField, StrLiteral,
|
||||
StrSegment, ValueDef, WhenBranch,
|
||||
};
|
||||
use roc_region::all::{LineInfo, Loc, Region};
|
||||
|
||||
|
@ -92,9 +93,10 @@ fn desugar_value_def<'a>(
|
|||
ann_pattern,
|
||||
ann_type,
|
||||
comment: *comment,
|
||||
body_pattern,
|
||||
body_pattern: desugar_loc_pattern(arena, body_pattern, src, line_info, module_path),
|
||||
body_expr: desugar_expr(arena, body_expr, src, line_info, module_path),
|
||||
},
|
||||
|
||||
Dbg {
|
||||
condition,
|
||||
preceding_comment,
|
||||
|
@ -128,6 +130,40 @@ fn desugar_value_def<'a>(
|
|||
preceding_comment: *preceding_comment,
|
||||
}
|
||||
}
|
||||
ModuleImport(roc_parse::ast::ModuleImport {
|
||||
before_name,
|
||||
name,
|
||||
params,
|
||||
alias,
|
||||
exposed,
|
||||
}) => {
|
||||
let desugared_params =
|
||||
params.map(|ModuleImportParams { before, params }| ModuleImportParams {
|
||||
before,
|
||||
params: desugar_field_collection(arena, params, src, line_info, module_path),
|
||||
});
|
||||
|
||||
ModuleImport(roc_parse::ast::ModuleImport {
|
||||
before_name,
|
||||
name: *name,
|
||||
params: desugared_params,
|
||||
alias: *alias,
|
||||
exposed: *exposed,
|
||||
})
|
||||
}
|
||||
IngestedFileImport(_) => *def,
|
||||
|
||||
Stmt(stmt_expr) => {
|
||||
// desugar into a Body({}, stmt_expr)
|
||||
let loc_pattern = arena.alloc(Loc::at(
|
||||
stmt_expr.region,
|
||||
Pattern::RecordDestructure(Collection::empty()),
|
||||
));
|
||||
Body(
|
||||
loc_pattern,
|
||||
desugar_expr(arena, stmt_expr, src, line_info, module_path),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -137,227 +173,109 @@ pub fn desugar_defs_node_values<'a>(
|
|||
src: &'a str,
|
||||
line_info: &mut Option<LineInfo>,
|
||||
module_path: &str,
|
||||
top_level_def: bool,
|
||||
) {
|
||||
for value_def in defs.value_defs.iter_mut() {
|
||||
*value_def = desugar_value_def(arena, arena.alloc(*value_def), src, line_info, module_path);
|
||||
}
|
||||
}
|
||||
|
||||
fn desugar_defs_node_suffixed<'a>(
|
||||
arena: &'a Bump,
|
||||
loc_expr: &'a Loc<Expr<'a>>,
|
||||
) -> &'a Loc<Expr<'a>> {
|
||||
match loc_expr.value {
|
||||
Defs(defs, loc_ret) => {
|
||||
match defs.search_suffixed_defs() {
|
||||
None => loc_expr,
|
||||
Some((tag_index, value_index)) => {
|
||||
if defs.value_defs.len() == 1 {
|
||||
// We have only one value_def and it must be Suffixed
|
||||
// replace Defs with an Apply(Task.await) and Closure of loc_return
|
||||
|
||||
debug_assert!(
|
||||
value_index == 0,
|
||||
"we have only one value_def and so it must be Suffixed "
|
||||
);
|
||||
|
||||
// Unwrap Suffixed def within Apply, and the pattern so we can use in the call to Task.await
|
||||
let (suffixed_sub_apply_loc, pattern) = unwrap_suffixed_def_and_pattern(
|
||||
arena,
|
||||
loc_expr.region,
|
||||
defs.value_defs[0],
|
||||
);
|
||||
|
||||
// Create Closure for the result of the recursion,
|
||||
// use the pattern from our Suffixed Def as closure argument
|
||||
let closure_expr = Closure(arena.alloc([*pattern]), loc_ret);
|
||||
|
||||
// Apply arguments to Task.await, first is the unwrapped Suffix expr second is the Closure
|
||||
let mut task_await_apply_args: Vec<&'a Loc<Expr<'a>>> = Vec::new_in(arena);
|
||||
|
||||
task_await_apply_args
|
||||
.push(arena.alloc(Loc::at(loc_expr.region, suffixed_sub_apply_loc)));
|
||||
task_await_apply_args
|
||||
.push(arena.alloc(Loc::at(loc_expr.region, closure_expr)));
|
||||
|
||||
arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Apply(
|
||||
arena.alloc(Loc {
|
||||
region: loc_expr.region,
|
||||
value: Var {
|
||||
module_name: ModuleName::TASK,
|
||||
ident: "await",
|
||||
},
|
||||
}),
|
||||
arena.alloc(task_await_apply_args),
|
||||
CalledVia::BangSuffix,
|
||||
),
|
||||
))
|
||||
} else if value_index == 0 {
|
||||
// We have a Suffixed in first index, and also other nodes in Defs
|
||||
// pop the first Suffixed and recurse on Defs (without first) to handle any other Suffixed
|
||||
// the result will be wrapped in an Apply(Task.await) and Closure
|
||||
|
||||
debug_assert!(
|
||||
defs.value_defs.len() > 1,
|
||||
"we know we have other Defs that will need to be considered"
|
||||
);
|
||||
|
||||
// Unwrap Suffixed def within Apply, and the pattern so we can use in the call to Task.await
|
||||
let (suffixed_sub_apply_loc, pattern) = unwrap_suffixed_def_and_pattern(
|
||||
arena,
|
||||
loc_expr.region,
|
||||
defs.value_defs[0],
|
||||
);
|
||||
|
||||
// Get a mutable copy of the defs
|
||||
let mut copied_defs = defs.clone();
|
||||
|
||||
// Remove the suffixed def
|
||||
copied_defs.remove_value_def(tag_index);
|
||||
|
||||
// Recurse using new Defs to get new expression
|
||||
let new_loc_expr = desugar_defs_node_suffixed(
|
||||
arena,
|
||||
arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Defs(arena.alloc(copied_defs), loc_ret),
|
||||
)),
|
||||
);
|
||||
|
||||
// Create Closure for the result of the recursion,
|
||||
// use the pattern from our Suffixed Def as closure argument
|
||||
let closure_expr = Closure(arena.alloc([*pattern]), new_loc_expr);
|
||||
|
||||
// Apply arguments to Task.await, first is the unwrapped Suffix expr second is the Closure
|
||||
let mut task_await_apply_args: Vec<&'a Loc<Expr<'a>>> = Vec::new_in(arena);
|
||||
|
||||
task_await_apply_args
|
||||
.push(arena.alloc(Loc::at(loc_expr.region, suffixed_sub_apply_loc)));
|
||||
task_await_apply_args
|
||||
.push(arena.alloc(Loc::at(loc_expr.region, closure_expr)));
|
||||
|
||||
arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Apply(
|
||||
arena.alloc(Loc {
|
||||
region: loc_expr.region,
|
||||
value: Var {
|
||||
module_name: ModuleName::TASK,
|
||||
ident: "await",
|
||||
},
|
||||
}),
|
||||
arena.alloc(task_await_apply_args),
|
||||
CalledVia::BangSuffix,
|
||||
),
|
||||
))
|
||||
} else {
|
||||
// The first Suffixed is in the middle of our Defs
|
||||
// We will keep the defs before the Suffixed in our Defs node
|
||||
// We take the defs after the Suffixed and create a new Defs node using the current loc_return
|
||||
// Then recurse on the new Defs node, wrap the result in an Apply(Task.await) and Closure,
|
||||
// which will become the new loc_return
|
||||
|
||||
let (before, after) = {
|
||||
let values = defs.split_values_either_side_of(tag_index);
|
||||
(values.before, values.after)
|
||||
};
|
||||
|
||||
// If there are no defs after, then just use loc_ret as we dont need a Defs node
|
||||
let defs_after_suffixed_desugared = {
|
||||
if !after.is_empty() {
|
||||
desugar_defs_node_suffixed(
|
||||
arena,
|
||||
arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Defs(arena.alloc(after), loc_ret),
|
||||
)),
|
||||
)
|
||||
} else {
|
||||
loc_ret
|
||||
}
|
||||
};
|
||||
|
||||
// Unwrap Suffixed def within Apply, and the pattern so we can use in the call to Task.await
|
||||
let (suffixed_sub_apply_loc, pattern) = unwrap_suffixed_def_and_pattern(
|
||||
arena,
|
||||
loc_expr.region,
|
||||
defs.value_defs[value_index],
|
||||
);
|
||||
|
||||
// Create Closure for the result of the recursion,
|
||||
// use the pattern from our Suffixed Def as closure argument
|
||||
let closure_expr =
|
||||
Closure(arena.alloc([*pattern]), defs_after_suffixed_desugared);
|
||||
|
||||
// Apply arguments to Task.await, first is the unwrapped Suffix expr second is the Closure
|
||||
let mut task_await_apply_args: Vec<&'a Loc<Expr<'a>>> = Vec::new_in(arena);
|
||||
|
||||
task_await_apply_args
|
||||
.push(arena.alloc(Loc::at(loc_expr.region, suffixed_sub_apply_loc)));
|
||||
task_await_apply_args
|
||||
.push(arena.alloc(Loc::at(loc_expr.region, closure_expr)));
|
||||
|
||||
let new_loc_return = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Apply(
|
||||
arena.alloc(Loc {
|
||||
region: loc_expr.region,
|
||||
value: Var {
|
||||
module_name: ModuleName::TASK,
|
||||
ident: "await",
|
||||
},
|
||||
}),
|
||||
arena.alloc(task_await_apply_args),
|
||||
CalledVia::BangSuffix,
|
||||
),
|
||||
));
|
||||
|
||||
arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Defs(arena.alloc(before), new_loc_return),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
// `desugar_defs_node_values` is called recursively in `desugar_expr`
|
||||
// and we only want to unwrap suffixed nodes if they are a top level def.
|
||||
//
|
||||
// check here first so we only unwrap the expressions once, and after they have
|
||||
// been desugared
|
||||
if top_level_def {
|
||||
for value_def in defs.value_defs.iter_mut() {
|
||||
*value_def = desugar_value_def_suffixed(arena, *value_def);
|
||||
}
|
||||
_ => unreachable!(
|
||||
"should only be passed a Defs node as it is called from within desugar_expr for Defs"
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
// Unwrap Suffixed def within Apply, and the pattern so we can use in the call to Task.await
|
||||
fn unwrap_suffixed_def_and_pattern<'a>(
|
||||
arena: &'a Bump,
|
||||
region: Region,
|
||||
value_def: ValueDef<'a>,
|
||||
) -> (
|
||||
roc_parse::ast::Expr<'a>,
|
||||
&'a Loc<roc_parse::ast::Pattern<'a>>,
|
||||
) {
|
||||
/// For each top-level ValueDef in our module, we will unwrap any suffixed
|
||||
/// expressions
|
||||
///
|
||||
/// e.g. `say! "hi"` desugars to `Task.await (say "hi") \{} -> ...`
|
||||
pub fn desugar_value_def_suffixed<'a>(arena: &'a Bump, value_def: ValueDef<'a>) -> ValueDef<'a> {
|
||||
use ValueDef::*;
|
||||
|
||||
match value_def {
|
||||
ValueDef::Body(pattern, suffixed_expression) => match suffixed_expression.value {
|
||||
// The Suffixed has arguments applied e.g. `Stdout.line! "Hello World"`
|
||||
Apply(sub_loc, suffixed_args, called_via) => match sub_loc.value {
|
||||
Suffixed(sub_expr) => (
|
||||
Apply(
|
||||
arena.alloc(Loc::at(region, *sub_expr)),
|
||||
suffixed_args,
|
||||
called_via,
|
||||
Body(loc_pattern, loc_expr) => {
|
||||
// note called_from_def is passed as `false` as this is a top_level_def
|
||||
match unwrap_suffixed_expression(arena, loc_expr, None) {
|
||||
Ok(new_expr) => Body(loc_pattern, new_expr),
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
}) => desugar_value_def_suffixed(
|
||||
arena,
|
||||
Body(
|
||||
loc_pattern,
|
||||
apply_task_await(arena, loc_expr.region, sub_arg, sub_pat, sub_new),
|
||||
),
|
||||
pattern,
|
||||
),
|
||||
_ => unreachable!("should have a suffixed Apply inside Body def"),
|
||||
},
|
||||
// The Suffixed has NIL arguments applied e.g. `Stdin.line!`
|
||||
Suffixed(sub_expr) => (*sub_expr, pattern),
|
||||
_ => {
|
||||
unreachable!("should have a suffixed Apply inside Body def")
|
||||
Err(..) => Body(
|
||||
loc_pattern,
|
||||
arena.alloc(Loc::at(loc_expr.region, MalformedSuffixed(loc_expr))),
|
||||
),
|
||||
}
|
||||
},
|
||||
_ => unreachable!("should have a suffixed Body def"),
|
||||
}
|
||||
ann @ Annotation(_, _) => ann,
|
||||
AnnotatedBody {
|
||||
ann_pattern,
|
||||
ann_type,
|
||||
comment,
|
||||
body_pattern,
|
||||
body_expr,
|
||||
} => {
|
||||
// note called_from_def is passed as `false` as this is a top_level_def
|
||||
match unwrap_suffixed_expression(arena, body_expr, None) {
|
||||
Ok(new_expr) => AnnotatedBody {
|
||||
ann_pattern,
|
||||
ann_type,
|
||||
comment,
|
||||
body_pattern,
|
||||
body_expr: new_expr,
|
||||
},
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
}) => desugar_value_def_suffixed(
|
||||
arena,
|
||||
AnnotatedBody {
|
||||
ann_pattern,
|
||||
ann_type,
|
||||
comment,
|
||||
body_pattern,
|
||||
body_expr: apply_task_await(
|
||||
arena,
|
||||
body_expr.region,
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
),
|
||||
},
|
||||
),
|
||||
Err(..) => AnnotatedBody {
|
||||
ann_pattern,
|
||||
ann_type,
|
||||
comment,
|
||||
body_pattern,
|
||||
body_expr: arena.alloc(Loc::at(body_expr.region, MalformedSuffixed(body_expr))),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// TODO support desugaring of Dbg, Expect, and ExpectFx
|
||||
Dbg { .. } | Expect { .. } | ExpectFx { .. } => value_def,
|
||||
ModuleImport { .. } | IngestedFileImport(_) => value_def,
|
||||
|
||||
Stmt(..) => {
|
||||
internal_error!(
|
||||
"this should have been desugared into a Body(..) before this call in desugar_expr"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -380,12 +298,12 @@ pub fn desugar_expr<'a>(
|
|||
| Underscore { .. }
|
||||
| MalformedIdent(_, _)
|
||||
| MalformedClosure
|
||||
| MalformedSuffixed(..)
|
||||
| PrecedenceConflict { .. }
|
||||
| MultipleRecordBuilders { .. }
|
||||
| UnappliedRecordBuilder { .. }
|
||||
| Tag(_)
|
||||
| OpaqueRef(_)
|
||||
| IngestedFile(_, _)
|
||||
| Crash => loc_expr,
|
||||
|
||||
Str(str_literal) => match str_literal {
|
||||
|
@ -436,6 +354,15 @@ pub fn desugar_expr<'a>(
|
|||
|
||||
arena.alloc(Loc { region, value })
|
||||
}
|
||||
// desugar the sub_expression, but leave the TaskAwaitBang as this will
|
||||
// be unwrapped later in desugar_value_def_suffixed
|
||||
TaskAwaitBang(sub_expr) => {
|
||||
let intermediate = arena.alloc(Loc::at(loc_expr.region, **sub_expr));
|
||||
let new_sub_loc_expr = desugar_expr(arena, intermediate, src, line_info, module_path);
|
||||
let new_sub_expr = arena.alloc(new_sub_loc_expr.value);
|
||||
|
||||
arena.alloc(Loc::at(loc_expr.region, TaskAwaitBang(new_sub_expr)))
|
||||
}
|
||||
RecordAccess(sub_expr, paths) => {
|
||||
let region = loc_expr.region;
|
||||
let loc_sub_expr = Loc {
|
||||
|
@ -471,15 +398,7 @@ pub fn desugar_expr<'a>(
|
|||
})
|
||||
}
|
||||
Record(fields) => {
|
||||
let mut allocated = Vec::with_capacity_in(fields.len(), arena);
|
||||
for field in fields.iter() {
|
||||
let value = desugar_field(arena, &field.value, src, line_info, module_path);
|
||||
allocated.push(Loc {
|
||||
value,
|
||||
region: field.region,
|
||||
});
|
||||
}
|
||||
let fields = fields.replace_items(allocated.into_bump_slice());
|
||||
let fields = desugar_field_collection(arena, *fields, src, line_info, module_path);
|
||||
arena.alloc(Loc {
|
||||
region: loc_expr.region,
|
||||
value: Record(fields),
|
||||
|
@ -581,14 +500,10 @@ pub fn desugar_expr<'a>(
|
|||
),
|
||||
Defs(defs, loc_ret) => {
|
||||
let mut defs = (*defs).clone();
|
||||
desugar_defs_node_values(arena, &mut defs, src, line_info, module_path);
|
||||
desugar_defs_node_values(arena, &mut defs, src, line_info, module_path, false);
|
||||
let loc_ret = desugar_expr(arena, loc_ret, src, line_info, module_path);
|
||||
|
||||
// Desugar any Suffixed nodes
|
||||
desugar_defs_node_suffixed(
|
||||
arena,
|
||||
arena.alloc(Loc::at(loc_expr.region, Defs(arena.alloc(defs), loc_ret))),
|
||||
)
|
||||
arena.alloc(Loc::at(loc_expr.region, Defs(arena.alloc(defs), loc_ret)))
|
||||
}
|
||||
Apply(loc_fn, loc_args, called_via) => {
|
||||
let mut desugared_args = Vec::with_capacity_in(loc_args.len(), arena);
|
||||
|
@ -839,31 +754,31 @@ pub fn desugar_expr<'a>(
|
|||
region: loc_expr.region,
|
||||
})
|
||||
}
|
||||
LowLevelDbg(_, _, _) => unreachable!("Only exists after desugaring"),
|
||||
Suffixed(expr) => {
|
||||
// Rewrite `Suffixed(BinOps([args...], Var(...)))` to `BinOps([args...], Suffixed(Var(...)))`
|
||||
// This is to handle cases like e.g. `"Hello" |> line!`
|
||||
if let BinOps(args, sub_expr) = expr {
|
||||
return desugar_expr(
|
||||
arena,
|
||||
|
||||
// Replace an empty final def with a `Task.ok {}`
|
||||
EmptyDefsFinal => {
|
||||
let mut apply_args: Vec<&'a Loc<Expr<'a>>> = Vec::new_in(arena);
|
||||
apply_args
|
||||
.push(arena.alloc(Loc::at(loc_expr.region, Expr::Record(Collection::empty()))));
|
||||
|
||||
arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Expr::Apply(
|
||||
arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
BinOps(
|
||||
args,
|
||||
arena.alloc(Loc::at(sub_expr.region, Suffixed(&sub_expr.value))),
|
||||
),
|
||||
Expr::Var {
|
||||
module_name: ModuleName::TASK,
|
||||
ident: "ok",
|
||||
},
|
||||
)),
|
||||
src,
|
||||
line_info,
|
||||
module_path,
|
||||
);
|
||||
}
|
||||
|
||||
// Suffixed are also desugared in Defs
|
||||
// Any nodes that don't get desugared will be caught by canonicalize_expr
|
||||
// and we can handle those cases as required
|
||||
loc_expr
|
||||
arena.alloc(apply_args),
|
||||
CalledVia::BangSuffix,
|
||||
),
|
||||
))
|
||||
}
|
||||
|
||||
// note this only exists after desugaring
|
||||
LowLevelDbg(_, _, _) => loc_expr,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -917,6 +832,24 @@ fn desugar_str_segments<'a>(
|
|||
.into_bump_slice()
|
||||
}
|
||||
|
||||
fn desugar_field_collection<'a>(
|
||||
arena: &'a Bump,
|
||||
fields: Collection<'a, Loc<AssignedField<'a, Expr<'a>>>>,
|
||||
src: &'a str,
|
||||
line_info: &mut Option<LineInfo>,
|
||||
module_path: &str,
|
||||
) -> Collection<'a, Loc<AssignedField<'a, Expr<'a>>>> {
|
||||
let mut allocated = Vec::with_capacity_in(fields.len(), arena);
|
||||
|
||||
for field in fields.iter() {
|
||||
let value = desugar_field(arena, &field.value, src, line_info, module_path);
|
||||
|
||||
allocated.push(Loc::at(field.region, value));
|
||||
}
|
||||
|
||||
fields.replace_items(allocated.into_bump_slice())
|
||||
}
|
||||
|
||||
fn desugar_field<'a>(
|
||||
arena: &'a Bump,
|
||||
field: &'a AssignedField<'a, Expr<'a>>,
|
||||
|
@ -1009,7 +942,7 @@ fn desugar_pattern<'a>(
|
|||
use roc_parse::ast::Pattern::*;
|
||||
|
||||
match pattern {
|
||||
Identifier(_)
|
||||
Identifier { .. }
|
||||
| Tag(_)
|
||||
| OpaqueRef(_)
|
||||
| NumLiteral(_)
|
||||
|
@ -1168,7 +1101,7 @@ fn record_builder_arg<'a>(
|
|||
|
||||
for label in apply_field_names.iter().rev() {
|
||||
let name = arena.alloc("#".to_owned() + label.value);
|
||||
let ident = roc_parse::ast::Pattern::Identifier(name);
|
||||
let ident = roc_parse::ast::Pattern::Identifier { ident: name };
|
||||
|
||||
let arg_pattern = arena.alloc(Loc {
|
||||
value: ident,
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::procedure::References;
|
||||
use crate::scope::Scope;
|
||||
use bumpalo::Bump;
|
||||
use roc_collections::{MutMap, VecSet};
|
||||
use roc_module::ident::{Ident, Lowercase, ModuleName};
|
||||
use roc_module::symbol::{IdentIdsByModule, ModuleId, ModuleIds, Symbol};
|
||||
use roc_module::ident::{Ident, ModuleName};
|
||||
use roc_module::symbol::{IdentIdsByModule, ModuleId, PQModuleName, PackageModuleIds, Symbol};
|
||||
use roc_problem::can::{Problem, RuntimeError};
|
||||
use roc_region::all::{Loc, Region};
|
||||
|
||||
|
@ -13,9 +15,11 @@ pub struct Env<'a> {
|
|||
/// are assumed to be relative to this path.
|
||||
pub home: ModuleId,
|
||||
|
||||
pub module_path: &'a Path,
|
||||
|
||||
pub dep_idents: &'a IdentIdsByModule,
|
||||
|
||||
pub module_ids: &'a ModuleIds,
|
||||
pub qualified_module_ids: &'a PackageModuleIds<'a>,
|
||||
|
||||
/// Problems we've encountered along the way, which will be reported to the user at the end.
|
||||
pub problems: Vec<Problem>,
|
||||
|
@ -35,26 +39,32 @@ pub struct Env<'a> {
|
|||
pub top_level_symbols: VecSet<Symbol>,
|
||||
|
||||
pub arena: &'a Bump,
|
||||
|
||||
pub opt_shorthand: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl<'a> Env<'a> {
|
||||
pub fn new(
|
||||
arena: &'a Bump,
|
||||
home: ModuleId,
|
||||
module_path: &'a Path,
|
||||
dep_idents: &'a IdentIdsByModule,
|
||||
module_ids: &'a ModuleIds,
|
||||
qualified_module_ids: &'a PackageModuleIds<'a>,
|
||||
opt_shorthand: Option<&'a str>,
|
||||
) -> Env<'a> {
|
||||
Env {
|
||||
arena,
|
||||
home,
|
||||
module_path,
|
||||
dep_idents,
|
||||
module_ids,
|
||||
qualified_module_ids,
|
||||
problems: Vec::new(),
|
||||
closures: MutMap::default(),
|
||||
qualified_value_lookups: VecSet::default(),
|
||||
qualified_type_lookups: VecSet::default(),
|
||||
tailcallable_symbol: None,
|
||||
top_level_symbols: VecSet::default(),
|
||||
opt_shorthand,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,17 +82,20 @@ impl<'a> Env<'a> {
|
|||
|
||||
let module_name = ModuleName::from(module_name_str);
|
||||
|
||||
match self.module_ids.get_id(&module_name) {
|
||||
match scope.modules.get_id(&module_name) {
|
||||
Some(module_id) => self.qualified_lookup_help(scope, module_id, ident, region),
|
||||
None => Err(RuntimeError::ModuleNotImported {
|
||||
module_name,
|
||||
imported_modules: self
|
||||
.module_ids
|
||||
.available_modules()
|
||||
module_name: module_name.clone(),
|
||||
imported_modules: scope
|
||||
.modules
|
||||
.available_names()
|
||||
.map(|string| string.as_ref().into())
|
||||
.collect(),
|
||||
region,
|
||||
module_exists: false,
|
||||
module_exists: self
|
||||
.qualified_module_ids
|
||||
.get_id(&PQModuleName::Unqualified(module_name))
|
||||
.is_some(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -94,7 +107,11 @@ impl<'a> Env<'a> {
|
|||
ident: &str,
|
||||
region: Region,
|
||||
) -> Result<Symbol, RuntimeError> {
|
||||
self.qualified_lookup_help(scope, module_id, ident, region)
|
||||
if !scope.modules.has_id(module_id) {
|
||||
Err(self.module_exists_but_not_imported(scope, module_id, region))
|
||||
} else {
|
||||
self.qualified_lookup_help(scope, module_id, ident, region)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns Err if the symbol resolved, but it was not exposed by the given module
|
||||
|
@ -153,43 +170,46 @@ impl<'a> Env<'a> {
|
|||
|
||||
Ok(symbol)
|
||||
}
|
||||
None => {
|
||||
let exposed_values = exposed_ids
|
||||
.ident_strs()
|
||||
.filter(|(_, ident)| ident.starts_with(|c: char| c.is_lowercase()))
|
||||
.map(|(_, ident)| Lowercase::from(ident))
|
||||
.collect();
|
||||
Err(RuntimeError::ValueNotExposed {
|
||||
module_name: self
|
||||
.module_ids
|
||||
.get_name(module_id)
|
||||
.expect("Module ID known, but not in the module IDs somehow")
|
||||
.clone(),
|
||||
ident: Ident::from(ident),
|
||||
region,
|
||||
exposed_values,
|
||||
})
|
||||
}
|
||||
None => Err(RuntimeError::ValueNotExposed {
|
||||
module_name: self
|
||||
.qualified_module_ids
|
||||
.get_name(module_id)
|
||||
.expect("Module ID known, but not in the module IDs somehow")
|
||||
.as_inner()
|
||||
.clone(),
|
||||
ident: Ident::from(ident),
|
||||
region,
|
||||
exposed_values: exposed_ids.exposed_values(),
|
||||
}),
|
||||
},
|
||||
None => Err(RuntimeError::ModuleNotImported {
|
||||
module_name: self
|
||||
.module_ids
|
||||
.get_name(module_id)
|
||||
.expect("Module ID known, but not in the module IDs somehow")
|
||||
.clone(),
|
||||
imported_modules: self
|
||||
.dep_idents
|
||||
.keys()
|
||||
.filter_map(|module_id| self.module_ids.get_name(*module_id))
|
||||
.map(|module_name| module_name.as_ref().into())
|
||||
.collect(),
|
||||
region,
|
||||
module_exists: true,
|
||||
}),
|
||||
_ => Err(self.module_exists_but_not_imported(scope, module_id, region)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn module_exists_but_not_imported(
|
||||
&self,
|
||||
scope: &Scope,
|
||||
module_id: ModuleId,
|
||||
region: Region,
|
||||
) -> RuntimeError {
|
||||
RuntimeError::ModuleNotImported {
|
||||
module_name: self
|
||||
.qualified_module_ids
|
||||
.get_name(module_id)
|
||||
.expect("Module ID known, but not in the module IDs somehow")
|
||||
.as_inner()
|
||||
.clone(),
|
||||
imported_modules: scope
|
||||
.modules
|
||||
.available_names()
|
||||
.map(|string| string.as_ref().into())
|
||||
.collect(),
|
||||
region,
|
||||
module_exists: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn problem(&mut self, problem: Problem) {
|
||||
self.problems.push(problem)
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::num::{
|
|||
int_expr_from_result, num_expr_from_result, FloatBound, IntBound, NumBound,
|
||||
};
|
||||
use crate::pattern::{canonicalize_pattern, BindingsFromPattern, Pattern, PermitShadows};
|
||||
use crate::procedure::References;
|
||||
use crate::procedure::{QualifiedReference, References};
|
||||
use crate::scope::Scope;
|
||||
use crate::traverse::{walk_expr, Visitor};
|
||||
use roc_collections::soa::Index;
|
||||
|
@ -27,8 +27,6 @@ use roc_types::num::SingleQuoteBound;
|
|||
use roc_types::subs::{ExhaustiveMark, IllegalCycleMark, RedundantMark, VarStore, Variable};
|
||||
use roc_types::types::{Alias, Category, IndexOrField, LambdaSet, OptAbleVar, Type};
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::{char, u32};
|
||||
|
@ -623,6 +621,9 @@ pub fn canonicalize_expr<'a>(
|
|||
use Expr::*;
|
||||
|
||||
let (expr, output) = match expr {
|
||||
&ast::Expr::EmptyDefsFinal => {
|
||||
internal_error!("EmptyDefsFinal should have been desugared")
|
||||
}
|
||||
&ast::Expr::Num(str) => {
|
||||
let answer = num_expr_from_result(var_store, finish_parsing_num(str), region, env);
|
||||
|
||||
|
@ -739,48 +740,6 @@ pub fn canonicalize_expr<'a>(
|
|||
|
||||
ast::Expr::Str(literal) => flatten_str_literal(env, var_store, scope, literal),
|
||||
|
||||
ast::Expr::IngestedFile(file_path, _) => match File::open(file_path) {
|
||||
Ok(mut file) => {
|
||||
let mut bytes = vec![];
|
||||
match file.read_to_end(&mut bytes) {
|
||||
Ok(_) => (
|
||||
Expr::IngestedFile(
|
||||
file_path.to_path_buf().into(),
|
||||
Arc::new(bytes),
|
||||
var_store.fresh(),
|
||||
),
|
||||
Output::default(),
|
||||
),
|
||||
Err(e) => {
|
||||
env.problems.push(Problem::FileProblem {
|
||||
filename: file_path.to_path_buf(),
|
||||
error: e.kind(),
|
||||
});
|
||||
|
||||
// This will not manifest as a real runtime error and is just returned to have a value here.
|
||||
// The pushed FileProblem will be fatal to compilation.
|
||||
(
|
||||
Expr::RuntimeError(roc_problem::can::RuntimeError::NoImplementation),
|
||||
Output::default(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
env.problems.push(Problem::FileProblem {
|
||||
filename: file_path.to_path_buf(),
|
||||
error: e.kind(),
|
||||
});
|
||||
|
||||
// This will not manifest as a real runtime error and is just returned to have a value here.
|
||||
// The pushed FileProblem will be fatal to compilation.
|
||||
(
|
||||
Expr::RuntimeError(roc_problem::can::RuntimeError::NoImplementation),
|
||||
Output::default(),
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
ast::Expr::SingleQuote(string) => {
|
||||
let mut it = string.chars().peekable();
|
||||
if let Some(char) = it.next() {
|
||||
|
@ -882,7 +841,9 @@ pub fn canonicalize_expr<'a>(
|
|||
}
|
||||
Ok((name, opaque_def)) => {
|
||||
let argument = Box::new(args.pop().unwrap());
|
||||
output.references.insert_type_lookup(name);
|
||||
output
|
||||
.references
|
||||
.insert_type_lookup(name, QualifiedReference::Unqualified);
|
||||
|
||||
let (type_arguments, lambda_set_variables, specialized_def_type) =
|
||||
freshen_opaque_def(var_store, opaque_def);
|
||||
|
@ -1166,6 +1127,7 @@ pub fn canonicalize_expr<'a>(
|
|||
output,
|
||||
)
|
||||
}
|
||||
ast::Expr::TaskAwaitBang(..) => internal_error!("a Expr::TaskAwaitBang expression was not completely removed in desugar_value_def_suffixed"),
|
||||
ast::Expr::Tag(tag) => {
|
||||
let variant_var = var_store.fresh();
|
||||
let ext_var = var_store.fresh();
|
||||
|
@ -1193,7 +1155,9 @@ pub fn canonicalize_expr<'a>(
|
|||
}
|
||||
Ok((name, opaque_def)) => {
|
||||
let mut output = Output::default();
|
||||
output.references.insert_type_lookup(name);
|
||||
output
|
||||
.references
|
||||
.insert_type_lookup(name, QualifiedReference::Unqualified);
|
||||
|
||||
let (type_arguments, lambda_set_variables, specialized_def_type) =
|
||||
freshen_opaque_def(var_store, opaque_def);
|
||||
|
@ -1375,6 +1339,10 @@ pub fn canonicalize_expr<'a>(
|
|||
|
||||
(RuntimeError(problem), Output::default())
|
||||
}
|
||||
ast::Expr::MalformedSuffixed(..) => {
|
||||
use roc_problem::can::RuntimeError::*;
|
||||
(RuntimeError(MalformedSuffixed(region)), Output::default())
|
||||
}
|
||||
ast::Expr::MultipleRecordBuilders(sub_expr) => {
|
||||
use roc_problem::can::RuntimeError::*;
|
||||
|
||||
|
@ -1441,12 +1409,6 @@ pub fn canonicalize_expr<'a>(
|
|||
bad_expr
|
||||
);
|
||||
}
|
||||
bad_expr @ ast::Expr::Suffixed(_) => {
|
||||
internal_error!(
|
||||
"A suffixed expression did not get desugared somehow: {:#?}",
|
||||
bad_expr
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// At the end, diff used_idents and defined_idents to see which were unused.
|
||||
|
@ -1890,7 +1852,9 @@ fn canonicalize_var_lookup(
|
|||
// Look it up in scope!
|
||||
match scope.lookup_str(ident, region) {
|
||||
Ok(symbol) => {
|
||||
output.references.insert_value_lookup(symbol);
|
||||
output
|
||||
.references
|
||||
.insert_value_lookup(symbol, QualifiedReference::Unqualified);
|
||||
|
||||
if scope.abilities_store.is_ability_member_name(symbol) {
|
||||
AbilityMember(
|
||||
|
@ -1913,7 +1877,9 @@ fn canonicalize_var_lookup(
|
|||
// Look it up in the env!
|
||||
match env.qualified_lookup(scope, module_name, ident, region) {
|
||||
Ok(symbol) => {
|
||||
output.references.insert_value_lookup(symbol);
|
||||
output
|
||||
.references
|
||||
.insert_value_lookup(symbol, QualifiedReference::Qualified);
|
||||
|
||||
if scope.abilities_store.is_ability_member_name(symbol) {
|
||||
AbilityMember(
|
||||
|
@ -2424,10 +2390,10 @@ pub fn is_valid_interpolation(expr: &ast::Expr<'_>) -> bool {
|
|||
| ast::Expr::Expect(_, _)
|
||||
| ast::Expr::When(_, _)
|
||||
| ast::Expr::Backpassing(_, _, _)
|
||||
| ast::Expr::IngestedFile(_, _)
|
||||
| ast::Expr::SpaceBefore(_, _)
|
||||
| ast::Expr::Str(StrLiteral::Block(_))
|
||||
| ast::Expr::SpaceAfter(_, _) => false,
|
||||
| ast::Expr::SpaceAfter(_, _)
|
||||
| ast::Expr::EmptyDefsFinal => false,
|
||||
// These can contain subexpressions, so we need to recursively check those
|
||||
ast::Expr::Str(StrLiteral::Line(segments)) => {
|
||||
segments.iter().all(|segment| match segment {
|
||||
|
@ -2453,13 +2419,15 @@ pub fn is_valid_interpolation(expr: &ast::Expr<'_>) -> bool {
|
|||
.iter()
|
||||
.all(|loc_field| is_valid_interpolation(&loc_field.value)),
|
||||
ast::Expr::MultipleRecordBuilders(loc_expr)
|
||||
| ast::Expr::MalformedSuffixed(loc_expr)
|
||||
| ast::Expr::UnappliedRecordBuilder(loc_expr)
|
||||
| ast::Expr::PrecedenceConflict(PrecedenceConflict { expr: loc_expr, .. })
|
||||
| ast::Expr::UnaryOp(loc_expr, _)
|
||||
| ast::Expr::Closure(_, loc_expr) => is_valid_interpolation(&loc_expr.value),
|
||||
ast::Expr::TupleAccess(sub_expr, _)
|
||||
| ast::Expr::ParensAround(sub_expr)
|
||||
| ast::Expr::RecordAccess(sub_expr, _) => is_valid_interpolation(sub_expr),
|
||||
| ast::Expr::RecordAccess(sub_expr, _)
|
||||
| ast::Expr::TaskAwaitBang(sub_expr) => is_valid_interpolation(sub_expr),
|
||||
ast::Expr::Apply(loc_expr, args, _called_via) => {
|
||||
is_valid_interpolation(&loc_expr.value)
|
||||
&& args
|
||||
|
@ -2512,7 +2480,6 @@ pub fn is_valid_interpolation(expr: &ast::Expr<'_>) -> bool {
|
|||
ast::RecordBuilderField::SpaceBefore(_, _)
|
||||
| ast::RecordBuilderField::SpaceAfter(_, _) => false,
|
||||
}),
|
||||
ast::Expr::Suffixed(_) => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ pub mod pattern;
|
|||
pub mod procedure;
|
||||
pub mod scope;
|
||||
pub mod string;
|
||||
pub mod suffixed;
|
||||
pub mod traverse;
|
||||
|
||||
pub use derive::DERIVED_REGION;
|
||||
|
|
|
@ -1,19 +1,22 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::abilities::{AbilitiesStore, ImplKey, PendingAbilitiesStore, ResolvedImpl};
|
||||
use crate::annotation::{canonicalize_annotation, AnnotationFor};
|
||||
use crate::def::{canonicalize_defs, Def};
|
||||
use crate::def::{canonicalize_defs, report_unused_imports, Def};
|
||||
use crate::effect_module::HostedGeneratedFunctions;
|
||||
use crate::env::Env;
|
||||
use crate::expr::{
|
||||
ClosureData, DbgLookup, Declarations, ExpectLookup, Expr, Output, PendingDerives,
|
||||
};
|
||||
use crate::pattern::{BindingsFromPattern, Pattern};
|
||||
use crate::procedure::References;
|
||||
use crate::scope::Scope;
|
||||
use bumpalo::Bump;
|
||||
use roc_collections::{MutMap, SendMap, VecMap, VecSet};
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::ident::Ident;
|
||||
use roc_module::ident::Lowercase;
|
||||
use roc_module::symbol::{IdentIds, IdentIdsByModule, ModuleId, ModuleIds, Symbol};
|
||||
use roc_module::symbol::{IdentIds, IdentIdsByModule, ModuleId, PackageModuleIds, Symbol};
|
||||
use roc_parse::ast::{Defs, TypeAnnotation};
|
||||
use roc_parse::header::HeaderType;
|
||||
use roc_parse::pattern::PatternType;
|
||||
|
@ -127,7 +130,6 @@ pub struct Module {
|
|||
pub exposed_imports: MutMap<Symbol, Region>,
|
||||
pub exposed_symbols: VecSet<Symbol>,
|
||||
pub referenced_values: VecSet<Symbol>,
|
||||
pub referenced_types: VecSet<Symbol>,
|
||||
/// all aliases. `bool` indicates whether it is exposed
|
||||
pub aliases: MutMap<Symbol, (bool, Alias)>,
|
||||
pub rigid_variables: RigidVariables,
|
||||
|
@ -152,7 +154,6 @@ pub struct ModuleOutput {
|
|||
pub exposed_symbols: VecSet<Symbol>,
|
||||
pub problems: Vec<Problem>,
|
||||
pub referenced_values: VecSet<Symbol>,
|
||||
pub referenced_types: VecSet<Symbol>,
|
||||
pub symbols_from_requires: Vec<(Loc<Symbol>, Loc<Type>)>,
|
||||
pub pending_derives: PendingDerives,
|
||||
pub scope: Scope,
|
||||
|
@ -275,21 +276,38 @@ pub fn canonicalize_module_defs<'a>(
|
|||
loc_defs: &'a mut Defs<'a>,
|
||||
header_type: &roc_parse::header::HeaderType,
|
||||
home: ModuleId,
|
||||
module_path: &str,
|
||||
module_path: &'a str,
|
||||
src: &'a str,
|
||||
module_ids: &'a ModuleIds,
|
||||
qualified_module_ids: &'a PackageModuleIds<'a>,
|
||||
exposed_ident_ids: IdentIds,
|
||||
dep_idents: &'a IdentIdsByModule,
|
||||
aliases: MutMap<Symbol, Alias>,
|
||||
imported_abilities_state: PendingAbilitiesStore,
|
||||
exposed_imports: MutMap<Ident, (Symbol, Region)>,
|
||||
initial_scope: MutMap<Ident, (Symbol, Region)>,
|
||||
exposed_symbols: VecSet<Symbol>,
|
||||
symbols_from_requires: &[(Loc<Symbol>, Loc<TypeAnnotation<'a>>)],
|
||||
var_store: &mut VarStore,
|
||||
opt_shorthand: Option<&'a str>,
|
||||
) -> ModuleOutput {
|
||||
let mut can_exposed_imports = MutMap::default();
|
||||
let mut scope = Scope::new(home, exposed_ident_ids, imported_abilities_state);
|
||||
let mut env = Env::new(arena, home, dep_idents, module_ids);
|
||||
let mut scope = Scope::new(
|
||||
home,
|
||||
qualified_module_ids
|
||||
.get_name(home)
|
||||
.expect("home module not found")
|
||||
.as_inner()
|
||||
.to_owned(),
|
||||
exposed_ident_ids,
|
||||
imported_abilities_state,
|
||||
);
|
||||
let mut env = Env::new(
|
||||
arena,
|
||||
home,
|
||||
arena.alloc(Path::new(module_path)),
|
||||
dep_idents,
|
||||
qualified_module_ids,
|
||||
opt_shorthand,
|
||||
);
|
||||
|
||||
for (name, alias) in aliases.into_iter() {
|
||||
scope.add_alias(
|
||||
|
@ -312,30 +330,26 @@ pub fn canonicalize_module_defs<'a>(
|
|||
// visited a BinOp node we'd recursively try to apply this to each of its nested
|
||||
// operators, and then again on *their* nested operators, ultimately applying the
|
||||
// rules multiple times unnecessarily.
|
||||
crate::desugar::desugar_defs_node_values(arena, loc_defs, src, &mut None, module_path);
|
||||
|
||||
crate::desugar::desugar_defs_node_values(arena, loc_defs, src, &mut None, module_path, true);
|
||||
|
||||
let mut rigid_variables = RigidVariables::default();
|
||||
|
||||
// Exposed values are treated like defs that appear before any others, e.g.
|
||||
//
|
||||
// imports [Foo.{ bar, baz }]
|
||||
//
|
||||
// ...is basically the same as if we'd added these extra defs at the start of the module:
|
||||
//
|
||||
// bar = Foo.bar
|
||||
// baz = Foo.baz
|
||||
// Iniital scope values are treated like defs that appear before any others.
|
||||
// They include builtin types that are automatically imported, and for a platform
|
||||
// package, the required values from the app.
|
||||
//
|
||||
// Here we essentially add those "defs" to "the beginning of the module"
|
||||
// by canonicalizing them right before we canonicalize the actual ast::Def nodes.
|
||||
for (ident, (symbol, region)) in exposed_imports {
|
||||
for (ident, (symbol, region)) in initial_scope {
|
||||
let first_char = ident.as_inline_str().as_str().chars().next().unwrap();
|
||||
|
||||
if first_char.is_lowercase() {
|
||||
match scope.import(ident, symbol, region) {
|
||||
match scope.import_symbol(ident, symbol, region) {
|
||||
Ok(()) => {
|
||||
// Add an entry to exposed_imports using the current module's name
|
||||
// as the key; e.g. if this is the Foo module and we have
|
||||
// exposes [Bar.{ baz }] then insert Foo.baz as the key, so when
|
||||
// Bar exposes [baz] then insert Foo.baz as the key, so when
|
||||
// anything references `baz` in this Foo module, it will resolve to Bar.baz.
|
||||
can_exposed_imports.insert(symbol, region);
|
||||
}
|
||||
|
@ -354,7 +368,7 @@ pub fn canonicalize_module_defs<'a>(
|
|||
|
||||
// but now we know this symbol by a different identifier, so we still need to add it to
|
||||
// the scope
|
||||
match scope.import(ident, symbol, region) {
|
||||
match scope.import_symbol(ident, symbol, region) {
|
||||
Ok(()) => {
|
||||
// here we do nothing special
|
||||
}
|
||||
|
@ -368,7 +382,7 @@ pub fn canonicalize_module_defs<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
let (defs, output, symbols_introduced) = canonicalize_defs(
|
||||
let (defs, output, symbols_introduced, imports_introduced) = canonicalize_defs(
|
||||
&mut env,
|
||||
Output::default(),
|
||||
var_store,
|
||||
|
@ -409,18 +423,15 @@ pub fn canonicalize_module_defs<'a>(
|
|||
}
|
||||
|
||||
let mut referenced_values = VecSet::default();
|
||||
let mut referenced_types = VecSet::default();
|
||||
|
||||
// Gather up all the symbols that were referenced across all the defs' lookups.
|
||||
referenced_values.extend(output.references.value_lookups().copied());
|
||||
referenced_types.extend(output.references.type_lookups().copied());
|
||||
|
||||
// Gather up all the symbols that were referenced across all the defs' calls.
|
||||
referenced_values.extend(output.references.calls().copied());
|
||||
|
||||
// Gather up all the symbols that were referenced from other modules.
|
||||
referenced_values.extend(env.qualified_value_lookups.iter().copied());
|
||||
referenced_types.extend(env.qualified_type_lookups.iter().copied());
|
||||
|
||||
// NOTE previously we inserted builtin defs into the list of defs here
|
||||
// this is now done later, in file.rs.
|
||||
|
@ -432,6 +443,7 @@ pub fn canonicalize_module_defs<'a>(
|
|||
|
||||
let new_output = Output {
|
||||
aliases: output.aliases,
|
||||
references: output.references,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
|
@ -481,6 +493,8 @@ pub fn canonicalize_module_defs<'a>(
|
|||
})
|
||||
.collect();
|
||||
|
||||
report_unused_imports(imports_introduced, &output.references, &mut env, &mut scope);
|
||||
|
||||
if let GeneratedInfo::Hosted {
|
||||
effect_symbol,
|
||||
generated_functions,
|
||||
|
@ -544,7 +558,7 @@ pub fn canonicalize_module_defs<'a>(
|
|||
let annotation = crate::annotation::Annotation {
|
||||
typ: def_annotation.signature,
|
||||
introduced_variables: def_annotation.introduced_variables,
|
||||
references: Default::default(),
|
||||
references: References::new(),
|
||||
aliases: Default::default(),
|
||||
};
|
||||
|
||||
|
@ -602,7 +616,7 @@ pub fn canonicalize_module_defs<'a>(
|
|||
let annotation = crate::annotation::Annotation {
|
||||
typ: def_annotation.signature,
|
||||
introduced_variables: def_annotation.introduced_variables,
|
||||
references: Default::default(),
|
||||
references: References::new(),
|
||||
aliases: Default::default(),
|
||||
};
|
||||
|
||||
|
@ -699,14 +713,12 @@ pub fn canonicalize_module_defs<'a>(
|
|||
|
||||
// Incorporate any remaining output.lookups entries into references.
|
||||
referenced_values.extend(output.references.value_lookups().copied());
|
||||
referenced_types.extend(output.references.type_lookups().copied());
|
||||
|
||||
// Incorporate any remaining output.calls entries into references.
|
||||
referenced_values.extend(output.references.calls().copied());
|
||||
|
||||
// Gather up all the symbols that were referenced from other modules.
|
||||
referenced_values.extend(env.qualified_value_lookups.iter().copied());
|
||||
referenced_types.extend(env.qualified_type_lookups.iter().copied());
|
||||
|
||||
let mut fix_closures_no_capture_symbols = VecSet::default();
|
||||
let mut fix_closures_closure_captures = VecMap::default();
|
||||
|
@ -802,7 +814,6 @@ pub fn canonicalize_module_defs<'a>(
|
|||
rigid_variables,
|
||||
declarations,
|
||||
referenced_values,
|
||||
referenced_types,
|
||||
exposed_imports: can_exposed_imports,
|
||||
problems: env.problems,
|
||||
symbols_from_requires,
|
||||
|
|
|
@ -265,7 +265,7 @@ pub fn canonicalize_def_header_pattern<'a>(
|
|||
|
||||
match pattern {
|
||||
// Identifiers that shadow ability members may appear (and may only appear) at the header of a def.
|
||||
Identifier(name) => {
|
||||
Identifier { ident: name } => {
|
||||
match scope.introduce_or_shadow_ability_member(
|
||||
pending_abilities_in_scope,
|
||||
(*name).into(),
|
||||
|
@ -373,7 +373,7 @@ pub fn canonicalize_pattern<'a>(
|
|||
use PatternType::*;
|
||||
|
||||
let can_pattern = match pattern {
|
||||
Identifier(name) => {
|
||||
Identifier { ident: name } => {
|
||||
match canonicalize_pattern_symbol(env, scope, output, region, permit_shadows, name) {
|
||||
Ok(symbol) => Pattern::Identifier(symbol),
|
||||
Err(pattern) => pattern,
|
||||
|
@ -446,7 +446,10 @@ pub fn canonicalize_pattern<'a>(
|
|||
let (type_arguments, lambda_set_variables, specialized_def_type) =
|
||||
freshen_opaque_def(var_store, opaque_def);
|
||||
|
||||
output.references.insert_type_lookup(opaque);
|
||||
output.references.insert_type_lookup(
|
||||
opaque,
|
||||
crate::procedure::QualifiedReference::Unqualified,
|
||||
);
|
||||
|
||||
Pattern::UnwrappedOpaque {
|
||||
whole_var: var_store.fresh(),
|
||||
|
@ -628,7 +631,7 @@ pub fn canonicalize_pattern<'a>(
|
|||
|
||||
for loc_pattern in patterns.iter() {
|
||||
match loc_pattern.value {
|
||||
Identifier(label) => {
|
||||
Identifier { ident: label } => {
|
||||
match scope.introduce(label.into(), region) {
|
||||
Ok(symbol) => {
|
||||
output.references.insert_bound(symbol);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::expr::Expr;
|
||||
use crate::pattern::Pattern;
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_module::symbol::{ModuleId, Symbol};
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::Variable;
|
||||
|
||||
|
@ -46,6 +46,23 @@ impl ReferencesBitflags {
|
|||
const TYPE_LOOKUP: Self = ReferencesBitflags(2);
|
||||
const CALL: Self = ReferencesBitflags(4);
|
||||
const BOUND: Self = ReferencesBitflags(8);
|
||||
const QUALIFIED: Self = ReferencesBitflags(16);
|
||||
const UNQUALIFIED: Self = ReferencesBitflags(32);
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum QualifiedReference {
|
||||
Unqualified,
|
||||
Qualified,
|
||||
}
|
||||
|
||||
impl QualifiedReference {
|
||||
fn flags(&self, flags: ReferencesBitflags) -> ReferencesBitflags {
|
||||
match self {
|
||||
Self::Unqualified => ReferencesBitflags(flags.0 | ReferencesBitflags::UNQUALIFIED.0),
|
||||
Self::Qualified => ReferencesBitflags(flags.0 | ReferencesBitflags::QUALIFIED.0),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
|
@ -108,12 +125,12 @@ impl References {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn insert_value_lookup(&mut self, symbol: Symbol) {
|
||||
self.insert(symbol, ReferencesBitflags::VALUE_LOOKUP);
|
||||
pub fn insert_value_lookup(&mut self, symbol: Symbol, qualified: QualifiedReference) {
|
||||
self.insert(symbol, qualified.flags(ReferencesBitflags::VALUE_LOOKUP));
|
||||
}
|
||||
|
||||
pub fn insert_type_lookup(&mut self, symbol: Symbol) {
|
||||
self.insert(symbol, ReferencesBitflags::TYPE_LOOKUP);
|
||||
pub fn insert_type_lookup(&mut self, symbol: Symbol, qualified: QualifiedReference) {
|
||||
self.insert(symbol, qualified.flags(ReferencesBitflags::TYPE_LOOKUP));
|
||||
}
|
||||
|
||||
pub fn insert_bound(&mut self, symbol: Symbol) {
|
||||
|
@ -178,7 +195,24 @@ impl References {
|
|||
false
|
||||
}
|
||||
|
||||
pub fn has_unqualified_type_or_value_lookup(&self, symbol: Symbol) -> bool {
|
||||
let mask = ReferencesBitflags::VALUE_LOOKUP.0 | ReferencesBitflags::TYPE_LOOKUP.0;
|
||||
let it = self.symbols.iter().zip(self.bitflags.iter());
|
||||
|
||||
for (a, b) in it {
|
||||
if *a == symbol && b.0 & mask > 0 && b.0 & ReferencesBitflags::UNQUALIFIED.0 > 0 {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub fn references_type_def(&self, symbol: Symbol) -> bool {
|
||||
self.has_type_lookup(symbol)
|
||||
}
|
||||
|
||||
pub fn has_module_lookup(&self, module_id: ModuleId) -> bool {
|
||||
self.symbols.iter().any(|sym| sym.module_id() == module_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use roc_collections::{VecMap, VecSet};
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::ident::Ident;
|
||||
use roc_module::symbol::{IdentId, IdentIds, ModuleId, Symbol};
|
||||
use roc_module::ident::{Ident, ModuleName};
|
||||
use roc_module::symbol::{IdentId, IdentIds, ModuleId, ScopeModules, Symbol};
|
||||
use roc_problem::can::RuntimeError;
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::Variable;
|
||||
|
@ -29,8 +29,11 @@ pub struct Scope {
|
|||
/// The first `exposed_ident_count` identifiers are exposed
|
||||
exposed_ident_count: usize,
|
||||
|
||||
/// Identifiers that are imported (and introduced in the header)
|
||||
imports: Vec<(Ident, Symbol, Region)>,
|
||||
/// Modules that are imported
|
||||
pub modules: ScopeModules,
|
||||
|
||||
/// Identifiers that are imported
|
||||
imported_symbols: Vec<(Ident, Symbol, Region)>,
|
||||
|
||||
/// Shadows of an ability member, for example a local specialization of `eq` for the ability
|
||||
/// member `Eq implements eq : a, a -> Bool where a implements Eq` gets a shadow symbol it can use for its
|
||||
|
@ -50,16 +53,15 @@ pub struct Scope {
|
|||
impl Scope {
|
||||
pub fn new(
|
||||
home: ModuleId,
|
||||
module_name: ModuleName,
|
||||
initial_ident_ids: IdentIds,
|
||||
starting_abilities_store: PendingAbilitiesStore,
|
||||
) -> Scope {
|
||||
let default_imports =
|
||||
// Add all `Apply` types.
|
||||
(Symbol::apply_types_in_scope().into_iter())
|
||||
// Add all tag names we might want to suggest as hints in error messages.
|
||||
.chain(Symbol::symbols_in_scope_for_hints());
|
||||
|
||||
let default_imports = default_imports.map(|(a, (b, c))| (a, b, c)).collect();
|
||||
// Add all `Apply` types.
|
||||
let default_imports = Symbol::apply_types_in_scope()
|
||||
.into_iter()
|
||||
.map(|(a, (b, c))| (a, b, c))
|
||||
.collect();
|
||||
|
||||
Scope {
|
||||
home,
|
||||
|
@ -68,7 +70,8 @@ impl Scope {
|
|||
aliases: VecMap::default(),
|
||||
abilities_store: starting_abilities_store,
|
||||
shadows: VecMap::default(),
|
||||
imports: default_imports,
|
||||
modules: ScopeModules::new(home, module_name),
|
||||
imported_symbols: default_imports,
|
||||
ignored_locals: VecMap::default(),
|
||||
}
|
||||
}
|
||||
|
@ -82,9 +85,9 @@ impl Scope {
|
|||
}
|
||||
|
||||
pub fn add_docs_imports(&mut self) {
|
||||
self.imports
|
||||
self.imported_symbols
|
||||
.push(("Dict".into(), Symbol::DICT_DICT, Region::zero()));
|
||||
self.imports
|
||||
self.imported_symbols
|
||||
.push(("Set".into(), Symbol::SET_SET, Region::zero()));
|
||||
}
|
||||
|
||||
|
@ -113,7 +116,7 @@ impl Scope {
|
|||
|
||||
fn idents_in_scope(&self) -> impl Iterator<Item = Ident> + '_ {
|
||||
let it1 = self.locals.idents_in_scope();
|
||||
let it2 = self.imports.iter().map(|t| t.0.clone());
|
||||
let it2 = self.imported_symbols.iter().map(|t| t.0.clone());
|
||||
|
||||
it2.chain(it1)
|
||||
}
|
||||
|
@ -139,7 +142,7 @@ impl Scope {
|
|||
},
|
||||
None => {
|
||||
// opaque types can only be wrapped/unwrapped in the scope they are defined in (and below)
|
||||
let error = if let Some((_, decl_region)) = self.has_imported(opaque_str) {
|
||||
let error = if let Some((_, decl_region)) = self.has_imported_symbol(opaque_str) {
|
||||
// specific error for when the opaque is imported, which definitely does not work
|
||||
RuntimeError::OpaqueOutsideScope {
|
||||
opaque,
|
||||
|
@ -202,8 +205,8 @@ impl Scope {
|
|||
}
|
||||
}
|
||||
|
||||
fn has_imported(&self, ident: &str) -> Option<(Symbol, Region)> {
|
||||
for (import, shadow, original_region) in self.imports.iter() {
|
||||
fn has_imported_symbol(&self, ident: &str) -> Option<(Symbol, Region)> {
|
||||
for (import, shadow, original_region) in self.imported_symbols.iter() {
|
||||
if ident == import.as_str() {
|
||||
return Some((*shadow, *original_region));
|
||||
}
|
||||
|
@ -215,7 +218,7 @@ impl Scope {
|
|||
/// Is an identifier in scope, either in the locals or imports
|
||||
fn scope_contains_ident(&self, ident: &str) -> ContainsIdent {
|
||||
// exposed imports are likely to be small
|
||||
match self.has_imported(ident) {
|
||||
match self.has_imported_symbol(ident) {
|
||||
Some((symbol, region)) => ContainsIdent::InScope(symbol, region),
|
||||
None => self.locals.contains_ident(ident),
|
||||
}
|
||||
|
@ -379,19 +382,19 @@ impl Scope {
|
|||
///
|
||||
/// Returns Err if this would shadow an existing ident, including the
|
||||
/// Symbol and Region of the ident we already had in scope under that name.
|
||||
pub fn import(
|
||||
pub fn import_symbol(
|
||||
&mut self,
|
||||
ident: Ident,
|
||||
symbol: Symbol,
|
||||
region: Region,
|
||||
) -> Result<(), (Symbol, Region)> {
|
||||
if let Some((s, r)) = self.has_imported(ident.as_str()) {
|
||||
return Err((s, r));
|
||||
match self.scope_contains_ident(ident.as_str()) {
|
||||
ContainsIdent::InScope(symbol, region) => Err((symbol, region)),
|
||||
ContainsIdent::NotPresent | ContainsIdent::NotInScope(_) => {
|
||||
self.imported_symbols.push((ident, symbol, region));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
self.imports.push((ident, symbol, region));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add_alias(
|
||||
|
@ -423,17 +426,22 @@ impl Scope {
|
|||
//
|
||||
// - abilities_store: ability definitions not allowed in inner scopes
|
||||
// - locals: everything introduced in the inner scope is marked as not in scope in the rollback
|
||||
// - imports: everything that was imported in the inner scope is removed in the rollback
|
||||
// - aliases: stored in a VecMap, we just discard anything added in an inner scope
|
||||
// - exposed_ident_count: unchanged
|
||||
// - home: unchanged
|
||||
let aliases_count = self.aliases.len();
|
||||
let ignored_locals_count = self.ignored_locals.len();
|
||||
let locals_snapshot = self.locals.in_scope.len();
|
||||
let imported_symbols_snapshot = self.imported_symbols.len();
|
||||
let imported_modules_snapshot = self.modules.len();
|
||||
|
||||
let result = f(self);
|
||||
|
||||
self.aliases.truncate(aliases_count);
|
||||
self.ignored_locals.truncate(ignored_locals_count);
|
||||
self.imported_symbols.truncate(imported_symbols_snapshot);
|
||||
self.modules.truncate(imported_modules_snapshot);
|
||||
|
||||
// anything added in the inner scope is no longer in scope now
|
||||
for i in locals_snapshot..self.locals.in_scope.len() {
|
||||
|
@ -651,6 +659,7 @@ mod test {
|
|||
let _register_module_debug_names = ModuleIds::default();
|
||||
let mut scope = Scope::new(
|
||||
ModuleId::ATTR,
|
||||
"#Attr".into(),
|
||||
IdentIds::default(),
|
||||
PendingAbilitiesStore::default(),
|
||||
);
|
||||
|
@ -670,6 +679,7 @@ mod test {
|
|||
let _register_module_debug_names = ModuleIds::default();
|
||||
let mut scope = Scope::new(
|
||||
ModuleId::ATTR,
|
||||
"#Attr".into(),
|
||||
IdentIds::default(),
|
||||
PendingAbilitiesStore::default(),
|
||||
);
|
||||
|
@ -699,6 +709,7 @@ mod test {
|
|||
let _register_module_debug_names = ModuleIds::default();
|
||||
let mut scope = Scope::new(
|
||||
ModuleId::ATTR,
|
||||
"#Attr".into(),
|
||||
IdentIds::default(),
|
||||
PendingAbilitiesStore::default(),
|
||||
);
|
||||
|
@ -720,6 +731,7 @@ mod test {
|
|||
let _register_module_debug_names = ModuleIds::default();
|
||||
let scope = Scope::new(
|
||||
ModuleId::ATTR,
|
||||
"#Attr".into(),
|
||||
IdentIds::default(),
|
||||
PendingAbilitiesStore::default(),
|
||||
);
|
||||
|
@ -728,13 +740,7 @@ mod test {
|
|||
|
||||
assert_eq!(
|
||||
&idents,
|
||||
&[
|
||||
Ident::from("Str"),
|
||||
Ident::from("List"),
|
||||
Ident::from("Box"),
|
||||
Ident::from("Ok"),
|
||||
Ident::from("Err"),
|
||||
]
|
||||
&[Ident::from("Str"), Ident::from("List"), Ident::from("Box"),]
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -743,6 +749,7 @@ mod test {
|
|||
let _register_module_debug_names = ModuleIds::default();
|
||||
let mut scope = Scope::new(
|
||||
ModuleId::ATTR,
|
||||
"#Attr".into(),
|
||||
IdentIds::default(),
|
||||
PendingAbilitiesStore::default(),
|
||||
);
|
||||
|
@ -751,13 +758,7 @@ mod test {
|
|||
|
||||
assert_eq!(
|
||||
&idents,
|
||||
&[
|
||||
Ident::from("Str"),
|
||||
Ident::from("List"),
|
||||
Ident::from("Box"),
|
||||
Ident::from("Ok"),
|
||||
Ident::from("Err"),
|
||||
]
|
||||
&[Ident::from("Str"), Ident::from("List"), Ident::from("Box"),]
|
||||
);
|
||||
|
||||
let builtin_count = idents.len();
|
||||
|
@ -810,6 +811,7 @@ mod test {
|
|||
let _register_module_debug_names = ModuleIds::default();
|
||||
let mut scope = Scope::new(
|
||||
ModuleId::ATTR,
|
||||
"#Attr".into(),
|
||||
IdentIds::default(),
|
||||
PendingAbilitiesStore::default(),
|
||||
);
|
||||
|
@ -820,7 +822,7 @@ mod test {
|
|||
|
||||
assert!(scope.lookup(&ident, region).is_err());
|
||||
|
||||
assert!(scope.import(ident.clone(), symbol, region).is_ok());
|
||||
assert!(scope.import_symbol(ident.clone(), symbol, region).is_ok());
|
||||
|
||||
assert!(scope.lookup(&ident, region).is_ok());
|
||||
|
||||
|
@ -832,6 +834,7 @@ mod test {
|
|||
let _register_module_debug_names = ModuleIds::default();
|
||||
let mut scope = Scope::new(
|
||||
ModuleId::ATTR,
|
||||
"#Attr".into(),
|
||||
IdentIds::default(),
|
||||
PendingAbilitiesStore::default(),
|
||||
);
|
||||
|
@ -842,7 +845,7 @@ mod test {
|
|||
let region1 = Region::from_pos(Position { offset: 10 });
|
||||
let region2 = Region::from_pos(Position { offset: 20 });
|
||||
|
||||
scope.import(ident.clone(), symbol, region1).unwrap();
|
||||
scope.import_symbol(ident.clone(), symbol, region1).unwrap();
|
||||
|
||||
let (original, _ident, shadow_symbol) =
|
||||
scope.introduce(ident.clone(), region2).unwrap_err();
|
||||
|
|
879
crates/compiler/can/src/suffixed.rs
Normal file
879
crates/compiler/can/src/suffixed.rs
Normal file
|
@ -0,0 +1,879 @@
|
|||
#![allow(clippy::manual_map)]
|
||||
|
||||
use bumpalo::collections::Vec;
|
||||
use bumpalo::Bump;
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::called_via::CalledVia;
|
||||
use roc_module::ident::ModuleName;
|
||||
use roc_parse::ast::Expr::{self, *};
|
||||
use roc_parse::ast::{is_expr_suffixed, Pattern, ValueDef, WhenBranch};
|
||||
use roc_region::all::{Loc, Region};
|
||||
use std::cell::Cell;
|
||||
|
||||
thread_local! {
|
||||
// we use a thread_local here so that tests consistently give the same pattern
|
||||
static SUFFIXED_ANSWER_COUNTER: Cell<usize> = Cell::new(0);
|
||||
}
|
||||
|
||||
/// Provide an intermediate answer expression and pattern when unwrapping a
|
||||
/// (sub) expression
|
||||
///
|
||||
/// e.g. `x = foo (bar!)` unwraps to `x = Task.await (bar) \#!a0 -> foo #!a0`
|
||||
fn next_suffixed_answer_pattern(arena: &Bump) -> (Expr, Pattern) {
|
||||
// Use the thread-local counter
|
||||
SUFFIXED_ANSWER_COUNTER.with(|counter| {
|
||||
let count = counter.get();
|
||||
counter.set(count + 1);
|
||||
|
||||
let answer_ident = arena.alloc(format!("#!a{}", count));
|
||||
|
||||
(
|
||||
Expr::Var {
|
||||
module_name: "",
|
||||
ident: answer_ident,
|
||||
},
|
||||
Pattern::Identifier {
|
||||
ident: answer_ident.as_str(),
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum EUnwrapped<'a> {
|
||||
UnwrappedDefExpr(&'a Loc<Expr<'a>>),
|
||||
|
||||
UnwrappedSubExpr {
|
||||
/// the unwrapped expression argument for Task.await
|
||||
sub_arg: &'a Loc<Expr<'a>>,
|
||||
|
||||
/// the pattern for the closure
|
||||
sub_pat: &'a Loc<Pattern<'a>>,
|
||||
|
||||
/// the expression to replace the unwrapped
|
||||
sub_new: &'a Loc<Expr<'a>>,
|
||||
},
|
||||
|
||||
Malformed,
|
||||
}
|
||||
|
||||
fn init_unwrapped_err<'a>(
|
||||
arena: &'a Bump,
|
||||
unwrapped_expr: &'a Loc<Expr<'a>>,
|
||||
maybe_def_pat: Option<&'a Loc<Pattern<'a>>>,
|
||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||
match maybe_def_pat {
|
||||
Some(..) => {
|
||||
// we have a def pattern, so no need to generate a new pattern
|
||||
// as this should only be created in the first call from a def
|
||||
Err(EUnwrapped::UnwrappedDefExpr(unwrapped_expr))
|
||||
}
|
||||
None => {
|
||||
let (answer_var, answer_pat) = next_suffixed_answer_pattern(arena);
|
||||
let sub_new = arena.alloc(Loc::at(unwrapped_expr.region, answer_var));
|
||||
let sub_pat = arena.alloc(Loc::at(unwrapped_expr.region, answer_pat));
|
||||
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg: unwrapped_expr,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Descend through the AST and unwrap each suffixed expression
|
||||
/// when an expression is unwrapped, we apply a `Task.await` and
|
||||
/// then descend through the AST again until there are no more suffixed
|
||||
/// expressions, or we hit an error
|
||||
pub fn unwrap_suffixed_expression<'a>(
|
||||
arena: &'a Bump,
|
||||
loc_expr: &'a Loc<Expr<'a>>,
|
||||
maybe_def_pat: Option<&'a Loc<Pattern<'a>>>,
|
||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||
let unwrapped_expression = {
|
||||
match loc_expr.value {
|
||||
Expr::TaskAwaitBang(sub_expr) => {
|
||||
let unwrapped_sub_expr = arena.alloc(Loc::at(loc_expr.region, *sub_expr));
|
||||
|
||||
init_unwrapped_err(arena, unwrapped_sub_expr, maybe_def_pat)
|
||||
}
|
||||
|
||||
Expr::Defs(..) => unwrap_suffixed_expression_defs_help(arena, loc_expr, maybe_def_pat),
|
||||
|
||||
Expr::Apply(..) => {
|
||||
unwrap_suffixed_expression_apply_help(arena, loc_expr, maybe_def_pat)
|
||||
}
|
||||
|
||||
Expr::When(..) => unwrap_suffixed_expression_when_help(arena, loc_expr, maybe_def_pat),
|
||||
|
||||
Expr::If(..) => {
|
||||
unwrap_suffixed_expression_if_then_else_help(arena, loc_expr, maybe_def_pat)
|
||||
}
|
||||
|
||||
Expr::Closure(..) => {
|
||||
unwrap_suffixed_expression_closure_help(arena, loc_expr, maybe_def_pat)
|
||||
}
|
||||
|
||||
Expr::ParensAround(..) => {
|
||||
unwrap_suffixed_expression_parens_help(arena, loc_expr, maybe_def_pat)
|
||||
}
|
||||
|
||||
Expr::SpaceBefore(..) | Expr::SpaceAfter(..) => {
|
||||
internal_error!(
|
||||
"SpaceBefore and SpaceAfter should have been removed in desugar_expr"
|
||||
);
|
||||
}
|
||||
|
||||
Expr::BinOps(..) => {
|
||||
internal_error!("BinOps should have been desugared in desugar_expr");
|
||||
}
|
||||
|
||||
Expr::LowLevelDbg(dbg_src, arg, rest) => {
|
||||
if is_expr_suffixed(&arg.value) {
|
||||
// we cannot unwrap a suffixed expression within dbg
|
||||
// e.g. dbg (foo! "bar")
|
||||
return Err(EUnwrapped::Malformed);
|
||||
}
|
||||
|
||||
match unwrap_suffixed_expression(arena, rest, maybe_def_pat) {
|
||||
Ok(unwrapped_expr) => {
|
||||
let new_dbg = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
LowLevelDbg(dbg_src, arg, unwrapped_expr),
|
||||
));
|
||||
return Ok(new_dbg);
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(unwrapped_expr)) => {
|
||||
let new_dbg = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
LowLevelDbg(dbg_src, arg, unwrapped_expr),
|
||||
));
|
||||
Err(EUnwrapped::UnwrappedDefExpr(new_dbg))
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg: unwrapped_expr,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
}) => {
|
||||
let new_dbg = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
LowLevelDbg(dbg_src, arg, unwrapped_expr),
|
||||
));
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg: new_dbg,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
})
|
||||
}
|
||||
Err(EUnwrapped::Malformed) => Err(EUnwrapped::Malformed),
|
||||
}
|
||||
}
|
||||
|
||||
Expr::Expect(condition, continuation) => {
|
||||
if is_expr_suffixed(&condition.value) {
|
||||
// we cannot unwrap a suffixed expression within expect
|
||||
// e.g. expect (foo! "bar")
|
||||
return Err(EUnwrapped::Malformed);
|
||||
}
|
||||
|
||||
match unwrap_suffixed_expression(arena, continuation, maybe_def_pat) {
|
||||
Ok(unwrapped_expr) => {
|
||||
let new_expect = arena
|
||||
.alloc(Loc::at(loc_expr.region, Expect(condition, unwrapped_expr)));
|
||||
return Ok(new_expect);
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(unwrapped_expr)) => {
|
||||
let new_expect = arena
|
||||
.alloc(Loc::at(loc_expr.region, Expect(condition, unwrapped_expr)));
|
||||
Err(EUnwrapped::UnwrappedDefExpr(new_expect))
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg: unwrapped_expr,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
}) => {
|
||||
let new_expect = arena
|
||||
.alloc(Loc::at(loc_expr.region, Expect(condition, unwrapped_expr)));
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg: new_expect,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
})
|
||||
}
|
||||
Err(EUnwrapped::Malformed) => Err(EUnwrapped::Malformed),
|
||||
}
|
||||
}
|
||||
|
||||
// we only need to unwrap some expressions, leave the rest as is
|
||||
_ => Ok(loc_expr),
|
||||
}
|
||||
};
|
||||
|
||||
// KEEP THIS HERE FOR DEBUGGING
|
||||
// USEFUL TO SEE THE UNWRAPPING
|
||||
// OF AST NODES AS THEY DESCEND
|
||||
// if is_expr_suffixed(&loc_expr.value) {
|
||||
// dbg!(&maybe_def_pat, &loc_expr, &unwrapped_expression);
|
||||
// }
|
||||
|
||||
unwrapped_expression
|
||||
}
|
||||
|
||||
pub fn unwrap_suffixed_expression_parens_help<'a>(
|
||||
arena: &'a Bump,
|
||||
loc_expr: &'a Loc<Expr<'a>>,
|
||||
_maybe_def_pat: Option<&'a Loc<Pattern<'a>>>,
|
||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||
match loc_expr.value {
|
||||
Expr::ParensAround(sub_loc_expr) => {
|
||||
// note we use `None` here as we always want to generate a new pattern from child expressions
|
||||
match unwrap_suffixed_expression(arena, arena.alloc(Loc::at_zero(*sub_loc_expr)), None)
|
||||
{
|
||||
Ok(new_expr) => {
|
||||
let new_parens = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
ParensAround(arena.alloc(new_expr.value)),
|
||||
));
|
||||
Ok(new_parens)
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) => {
|
||||
internal_error!("unreachable, child expressions from ParensAround should generate UnwrappedSubExpr instead");
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
}) => {
|
||||
let new_parens = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
ParensAround(arena.alloc(sub_new.value)),
|
||||
));
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
sub_new: new_parens,
|
||||
})
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
_ => internal_error!("unreachable, expected a ParensAround node to be passed into unwrap_suffixed_expression_parens_help"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwrap_suffixed_expression_closure_help<'a>(
|
||||
arena: &'a Bump,
|
||||
loc_expr: &'a Loc<Expr<'a>>,
|
||||
_maybe_def_pat: Option<&'a Loc<Pattern<'a>>>,
|
||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||
match loc_expr.value {
|
||||
Expr::Closure(closure_args, closure_loc_ret) => {
|
||||
// note we use `None` here as we don't want to pass a DefExpr up and
|
||||
// unwrap the definition pattern for the closure
|
||||
match unwrap_suffixed_expression(arena, closure_loc_ret, None) {
|
||||
Ok(unwrapped_expr) => {
|
||||
let new_closure = arena.alloc(Loc::at(loc_expr.region, Expr::Closure(closure_args, unwrapped_expr)));
|
||||
Ok(new_closure)
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
||||
let new_closure_loc_ret = apply_task_await(arena, loc_expr.region, sub_arg, sub_pat, sub_new);
|
||||
let new_closure = arena.alloc(Loc::at(loc_expr.region, Expr::Closure(closure_args, new_closure_loc_ret)));
|
||||
Ok(new_closure)
|
||||
}
|
||||
Err(err) => {
|
||||
debug_assert!(false,"the closure Defs was malformd, got {:#?}", err);
|
||||
Err(EUnwrapped::Malformed)
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => internal_error!("unreachable, expected a Closure node to be passed into unwrap_suffixed_expression_closure_help"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwrap_suffixed_expression_apply_help<'a>(
|
||||
arena: &'a Bump,
|
||||
loc_expr: &'a Loc<Expr<'a>>,
|
||||
maybe_def_pat: Option<&'a Loc<Pattern<'a>>>,
|
||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||
match loc_expr.value {
|
||||
Expr::Apply(function, apply_args, called_via) => {
|
||||
|
||||
// Any suffixed arguments will be innermost, therefore we unwrap those first
|
||||
let local_args = arena.alloc_slice_copy(apply_args);
|
||||
for arg in local_args.iter_mut() {
|
||||
// Args are always expressions, don't pass `maybe_def_pat`
|
||||
match unwrap_suffixed_expression(arena, arg, None) {
|
||||
Ok(new_arg) => {
|
||||
*arg = new_arg;
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) => {
|
||||
internal_error!("unreachable, unwrapped arg cannot be def expression as `None` was passed as pattern");
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new: new_arg }) => {
|
||||
|
||||
*arg = new_arg;
|
||||
|
||||
let new_apply = arena.alloc(Loc::at(loc_expr.region, Apply(function, local_args, called_via)));
|
||||
return Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new: new_apply});
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
// special case for when our Apply function is a suffixed Var (but not multiple suffixed)
|
||||
if let Expr::TaskAwaitBang(sub_expr) = function.value {
|
||||
let unwrapped_function = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
*sub_expr,
|
||||
));
|
||||
|
||||
let new_apply = arena.alloc(Loc::at(loc_expr.region, Expr::Apply(unwrapped_function, local_args, called_via)));
|
||||
|
||||
return init_unwrapped_err(arena, new_apply, maybe_def_pat);
|
||||
}
|
||||
|
||||
// function is another expression
|
||||
match unwrap_suffixed_expression(arena, function, maybe_def_pat) {
|
||||
Ok(new_function) => {
|
||||
let new_apply = arena.alloc(Loc::at(loc_expr.region, Expr::Apply(new_function, local_args, called_via)));
|
||||
Ok(new_apply)
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(unwrapped_function)) => {
|
||||
let new_apply = arena.alloc(Loc::at(loc_expr.region, Expr::Apply(unwrapped_function, local_args, called_via)));
|
||||
Err(EUnwrapped::UnwrappedDefExpr(new_apply))
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg: unwrapped_function, sub_pat, sub_new }) => {
|
||||
|
||||
let new_apply = arena.alloc(Loc::at(loc_expr.region, Expr::Apply(sub_new, local_args, called_via)));
|
||||
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg: unwrapped_function, sub_pat, sub_new:new_apply})
|
||||
}
|
||||
Err(err) => Err(err)
|
||||
}
|
||||
}
|
||||
_ => internal_error!("unreachable, expected an Apply node to be passed into unwrap_suffixed_expression_apply_help"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Unwrap if-then-else statements
|
||||
pub fn unwrap_suffixed_expression_if_then_else_help<'a>(
|
||||
arena: &'a Bump,
|
||||
loc_expr: &'a Loc<Expr<'a>>,
|
||||
maybe_def_pat: Option<&'a Loc<Pattern<'a>>>,
|
||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||
match loc_expr.value {
|
||||
Expr::If(if_thens, final_else_branch) => {
|
||||
for (index, if_then) in if_thens.iter().enumerate() {
|
||||
let (current_if_then_statement, current_if_then_expression) = if_then;
|
||||
|
||||
// unwrap suffixed (innermost) expressions e.g. `if true then doThing! then ...`
|
||||
if is_expr_suffixed(¤t_if_then_expression.value) {
|
||||
// split if_thens around the current index
|
||||
let (before, after) = roc_parse::ast::split_around(if_thens, index);
|
||||
|
||||
match unwrap_suffixed_expression(arena, current_if_then_expression, None) {
|
||||
Ok(unwrapped_expression) => {
|
||||
let mut new_if_thens = Vec::new_in(arena);
|
||||
|
||||
new_if_thens.extend(before);
|
||||
new_if_thens.push((*current_if_then_statement, *unwrapped_expression));
|
||||
new_if_thens.extend(after);
|
||||
|
||||
let new_if = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Expr::If(
|
||||
arena.alloc_slice_copy(new_if_thens.as_slice()),
|
||||
final_else_branch,
|
||||
),
|
||||
));
|
||||
|
||||
return unwrap_suffixed_expression(arena, new_if, maybe_def_pat);
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) => {
|
||||
internal_error!("unexpected, unwrapped if-then-else Def expr should have intermediate answer as `None` was passed as pattern");
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
}) => {
|
||||
let unwrapped_expression =
|
||||
apply_task_await(arena, sub_arg.region, sub_arg, sub_pat, sub_new);
|
||||
|
||||
let mut new_if_thens = Vec::new_in(arena);
|
||||
|
||||
new_if_thens.extend(before);
|
||||
new_if_thens.push((*current_if_then_statement, *unwrapped_expression));
|
||||
new_if_thens.extend(after);
|
||||
|
||||
let new_if = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Expr::If(
|
||||
arena.alloc_slice_copy(new_if_thens.as_slice()),
|
||||
final_else_branch,
|
||||
),
|
||||
));
|
||||
|
||||
return unwrap_suffixed_expression(arena, new_if, maybe_def_pat);
|
||||
}
|
||||
Err(EUnwrapped::Malformed) => return Err(EUnwrapped::Malformed),
|
||||
}
|
||||
}
|
||||
|
||||
// unwrap suffixed statements e.g. `if isThing! then ...`
|
||||
// note we want to split and nest if-then's so we only run Task's
|
||||
// that are required
|
||||
if is_expr_suffixed(¤t_if_then_statement.value) {
|
||||
// split if_thens around the current index
|
||||
let (before, after) = roc_parse::ast::split_around(if_thens, index);
|
||||
|
||||
match unwrap_suffixed_expression(arena, current_if_then_statement, None) {
|
||||
Ok(unwrapped_statement) => {
|
||||
let mut new_if_thens = Vec::new_in(arena);
|
||||
|
||||
new_if_thens.push((*unwrapped_statement, *current_if_then_expression));
|
||||
new_if_thens.extend(after);
|
||||
|
||||
let new_if = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Expr::If(
|
||||
arena.alloc_slice_copy(new_if_thens.as_slice()),
|
||||
final_else_branch,
|
||||
),
|
||||
));
|
||||
|
||||
return unwrap_suffixed_expression(arena, new_if, maybe_def_pat);
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) => {
|
||||
internal_error!("unexpected, unwrapped if-then-else Def expr should have intermediate answer as `None` was passed as pattern");
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
}) => {
|
||||
if before.is_empty() {
|
||||
let mut new_if_thens = Vec::new_in(arena);
|
||||
|
||||
new_if_thens.extend(before);
|
||||
new_if_thens.push((*sub_new, *current_if_then_expression));
|
||||
new_if_thens.extend(after);
|
||||
|
||||
let new_if = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Expr::If(
|
||||
arena.alloc_slice_copy(new_if_thens.as_slice()),
|
||||
final_else_branch,
|
||||
),
|
||||
));
|
||||
|
||||
let unwrapped_if_then = apply_task_await(
|
||||
arena,
|
||||
sub_arg.region,
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
new_if,
|
||||
);
|
||||
|
||||
return unwrap_suffixed_expression(
|
||||
arena,
|
||||
unwrapped_if_then,
|
||||
maybe_def_pat,
|
||||
);
|
||||
} else {
|
||||
let mut after_if_thens = Vec::new_in(arena);
|
||||
|
||||
after_if_thens.push((*sub_new, *current_if_then_expression));
|
||||
after_if_thens.extend(after);
|
||||
|
||||
let after_if = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Expr::If(
|
||||
arena.alloc_slice_copy(after_if_thens.as_slice()),
|
||||
final_else_branch,
|
||||
),
|
||||
));
|
||||
|
||||
let after_if_then = apply_task_await(
|
||||
arena,
|
||||
sub_arg.region,
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
after_if,
|
||||
);
|
||||
|
||||
let before_if_then = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Expr::If(before, after_if_then),
|
||||
));
|
||||
|
||||
return unwrap_suffixed_expression(
|
||||
arena,
|
||||
before_if_then,
|
||||
maybe_def_pat,
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(EUnwrapped::Malformed) => return Err(EUnwrapped::Malformed),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// check the final_else_branch
|
||||
match unwrap_suffixed_expression(arena, final_else_branch, None) {
|
||||
Ok(unwrapped_final_else) => {
|
||||
return Ok(arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Expr::If(if_thens, unwrapped_final_else),
|
||||
)));
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) => {
|
||||
internal_error!("unexpected, unwrapped if-then-else Def expr should have intermediate answer as `None` was passed as pattern");
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr {
|
||||
sub_arg,
|
||||
sub_pat,
|
||||
sub_new,
|
||||
}) => {
|
||||
let unwrapped_final_else =
|
||||
apply_task_await(arena, sub_arg.region, sub_arg, sub_pat, sub_new);
|
||||
|
||||
let new_if = arena.alloc(Loc::at(
|
||||
loc_expr.region,
|
||||
Expr::If(if_thens, unwrapped_final_else),
|
||||
));
|
||||
|
||||
return unwrap_suffixed_expression(arena, new_if, maybe_def_pat);
|
||||
}
|
||||
Err(EUnwrapped::Malformed) => Err(EUnwrapped::Malformed),
|
||||
}
|
||||
}
|
||||
_ => internal_error!("unreachable, expected an If expression to desugar"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwrap_suffixed_expression_when_help<'a>(
|
||||
arena: &'a Bump,
|
||||
loc_expr: &'a Loc<Expr<'a>>,
|
||||
maybe_def_pat: Option<&'a Loc<Pattern<'a>>>,
|
||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||
match loc_expr.value {
|
||||
Expr::When(condition, branches) => {
|
||||
|
||||
// first unwrap any when branches values
|
||||
// e.g.
|
||||
// when foo is
|
||||
// [] -> line! "bar"
|
||||
// _ -> line! "baz"
|
||||
for (branch_index, WhenBranch{value: branch_loc_expr,patterns, guard}) in branches.iter().enumerate() {
|
||||
|
||||
// if the branch isn't suffixed we can leave it alone
|
||||
if is_expr_suffixed(&branch_loc_expr.value) {
|
||||
let unwrapped_branch_value = match unwrap_suffixed_expression(arena, branch_loc_expr, None) {
|
||||
Ok(unwrapped_branch_value) => unwrapped_branch_value,
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => apply_task_await(arena, branch_loc_expr.region, sub_arg, sub_pat, sub_new),
|
||||
Err(..) => return Err(EUnwrapped::Malformed),
|
||||
};
|
||||
|
||||
// TODO: unwrap guard
|
||||
|
||||
let new_branch = WhenBranch{value: *unwrapped_branch_value, patterns, guard: *guard};
|
||||
let mut new_branches = Vec::new_in(arena);
|
||||
let (before, rest) = branches.split_at(branch_index);
|
||||
let after = &rest[1..];
|
||||
|
||||
new_branches.extend_from_slice(before);
|
||||
new_branches.push(arena.alloc(new_branch));
|
||||
new_branches.extend_from_slice(after);
|
||||
|
||||
let new_when = arena.alloc(Loc::at(loc_expr.region, Expr::When(condition, arena.alloc_slice_copy(new_branches.as_slice()))));
|
||||
|
||||
return unwrap_suffixed_expression(arena, new_when, maybe_def_pat);
|
||||
}
|
||||
}
|
||||
|
||||
// then unwrap the when condition
|
||||
match unwrap_suffixed_expression(arena, condition, None) {
|
||||
Ok(unwrapped_condition) => {
|
||||
let new_when = arena.alloc(Loc::at(loc_expr.region, Expr::When(unwrapped_condition, branches)));
|
||||
Ok(new_when)
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
||||
let new_when = arena.alloc(Loc::at(loc_expr.region, Expr::When(sub_new, branches)));
|
||||
let applied_task_await = apply_task_await(arena,loc_expr.region,sub_arg,sub_pat,new_when);
|
||||
Ok(applied_task_await)
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..))
|
||||
| Err(EUnwrapped::Malformed) => Err(EUnwrapped::Malformed)
|
||||
}
|
||||
|
||||
}
|
||||
_ => internal_error!("unreachable, expected a When node to be passed into unwrap_suffixed_expression_defs_help"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwrap_suffixed_expression_defs_help<'a>(
|
||||
arena: &'a Bump,
|
||||
loc_expr: &'a Loc<Expr<'a>>,
|
||||
maybe_def_pat: Option<&'a Loc<Pattern<'a>>>,
|
||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||
match loc_expr.value {
|
||||
Expr::Defs(defs, loc_ret) => {
|
||||
|
||||
let mut local_defs = defs.clone();
|
||||
let tags = local_defs.tags.clone();
|
||||
|
||||
// try an unwrap each def, if none can be unwrapped, then try to unwrap the loc_ret
|
||||
for (tag_index, type_or_value_def_index) in tags.iter().enumerate() {
|
||||
use ValueDef::*;
|
||||
|
||||
let mut current_value_def = match type_or_value_def_index.split() {
|
||||
Ok(..) => {
|
||||
// ignore type definitions
|
||||
continue;
|
||||
},
|
||||
Err(value_index) => *local_defs.value_defs.get(value_index.index()).unwrap(),
|
||||
};
|
||||
|
||||
let maybe_suffixed_value_def = match current_value_def {
|
||||
Annotation(..) | Dbg{..} | Expect{..} | ExpectFx{..} | Stmt(..) | ModuleImport{..} | IngestedFileImport(_) => None,
|
||||
AnnotatedBody { body_pattern, body_expr, .. } => Some((body_pattern, body_expr)),
|
||||
Body (def_pattern, def_expr, .. ) => Some((def_pattern, def_expr)),
|
||||
};
|
||||
|
||||
match maybe_suffixed_value_def {
|
||||
None => {
|
||||
// We can't unwrap this def type, continue
|
||||
},
|
||||
Some((def_pattern, def_expr)) => {
|
||||
match unwrap_suffixed_expression(arena, def_expr, Some(def_pattern)) {
|
||||
Ok(unwrapped_def) => {
|
||||
current_value_def.replace_expr(unwrapped_def);
|
||||
local_defs.replace_with_value_def(tag_index, current_value_def, def_expr.region);
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(unwrapped_expr)) => {
|
||||
let split_defs = local_defs.split_defs_around(tag_index);
|
||||
let before_empty = split_defs.before.is_empty();
|
||||
let after_empty = split_defs.after.is_empty();
|
||||
if before_empty && after_empty {
|
||||
// NIL before, NIL after -> SINGLE DEF
|
||||
let next_expr = match unwrap_suffixed_expression(arena,loc_ret,maybe_def_pat) {
|
||||
Ok(next_expr) => next_expr,
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
||||
// We need to apply Task.ok here as the defs final expression was unwrapped
|
||||
apply_task_await(arena,def_expr.region,sub_arg,sub_pat,sub_new)
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) | Err(EUnwrapped::Malformed) => {
|
||||
// TODO handle case when we have maybe_def_pat so can return an unwrapped up
|
||||
return Err(EUnwrapped::Malformed);
|
||||
},
|
||||
};
|
||||
return unwrap_suffixed_expression(arena, apply_task_await(arena,def_expr.region,unwrapped_expr,def_pattern,next_expr), maybe_def_pat);
|
||||
} else if before_empty {
|
||||
// NIL before, SOME after -> FIRST DEF
|
||||
let new_defs = arena.alloc(Loc::at(def_expr.region, Defs(arena.alloc(split_defs.after), loc_ret)));
|
||||
|
||||
let next_expr = match unwrap_suffixed_expression(arena,new_defs,maybe_def_pat){
|
||||
Ok(next_expr) => next_expr,
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
||||
apply_task_await(arena, def_expr.region, sub_arg, sub_pat, sub_new)
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) | Err(EUnwrapped::Malformed) => {
|
||||
// TODO handle case when we have maybe_def_pat so can return an unwrapped up
|
||||
return Err(EUnwrapped::Malformed);
|
||||
},
|
||||
};
|
||||
|
||||
return unwrap_suffixed_expression(arena, apply_task_await(arena,def_expr.region,unwrapped_expr,def_pattern,next_expr), maybe_def_pat);
|
||||
} else if after_empty {
|
||||
// SOME before, NIL after -> LAST DEF
|
||||
match unwrap_suffixed_expression(arena,loc_ret,maybe_def_pat){
|
||||
Ok(new_loc_ret) => {
|
||||
let applied_task_await = apply_task_await(arena, loc_expr.region, unwrapped_expr, def_pattern, new_loc_ret);
|
||||
let new_defs = arena.alloc(Loc::at(loc_expr.region,Defs(arena.alloc(split_defs.before), applied_task_await)));
|
||||
return unwrap_suffixed_expression(arena, new_defs, maybe_def_pat);
|
||||
},
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
||||
let new_loc_ret = apply_task_await(arena,def_expr.region,sub_arg,sub_pat,sub_new);
|
||||
let applied_task_await = apply_task_await(arena, loc_expr.region, unwrapped_expr, def_pattern, new_loc_ret);
|
||||
let new_defs = arena.alloc(Loc::at(loc_expr.region,Defs(arena.alloc(split_defs.before), applied_task_await)));
|
||||
return unwrap_suffixed_expression(arena, new_defs, maybe_def_pat);
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) => {
|
||||
// TODO confirm this is correct with test case
|
||||
return Err(EUnwrapped::Malformed);
|
||||
}
|
||||
Err(EUnwrapped::Malformed) => {
|
||||
return Err(EUnwrapped::Malformed);
|
||||
},
|
||||
}
|
||||
} else {
|
||||
// SOME before, SOME after -> MIDDLE DEF
|
||||
let after_defs = arena.alloc(Loc::at(def_expr.region, Defs(arena.alloc(split_defs.after), loc_ret)));
|
||||
|
||||
match unwrap_suffixed_expression(arena,after_defs,maybe_def_pat){
|
||||
Ok(new_loc_ret) => {
|
||||
let applied_await = apply_task_await(arena, loc_expr.region, unwrapped_expr, def_pattern, new_loc_ret);
|
||||
let new_defs = arena.alloc(Loc::at(loc_expr.region,Defs(arena.alloc(split_defs.before), applied_await)));
|
||||
return unwrap_suffixed_expression(arena, new_defs, maybe_def_pat);
|
||||
},
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
||||
let new_loc_ret = apply_task_await(arena, def_expr.region, sub_arg, sub_pat, sub_new);
|
||||
let applied_await = apply_task_await(arena, loc_expr.region, unwrapped_expr, def_pattern, new_loc_ret);
|
||||
let new_defs = arena.alloc(Loc::at(loc_expr.region,Defs(arena.alloc(split_defs.before), applied_await)));
|
||||
return unwrap_suffixed_expression(arena, new_defs, maybe_def_pat);
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) | Err(EUnwrapped::Malformed) => {
|
||||
// TODO handle case when we have maybe_def_pat so can return an unwrapped up
|
||||
return Err(EUnwrapped::Malformed);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
||||
let new_body_def = ValueDef::Body(def_pattern, sub_new);
|
||||
local_defs.replace_with_value_def(tag_index,new_body_def, sub_new.region);
|
||||
let new_defs_expr = arena.alloc(Loc::at(def_expr.region,Defs(arena.alloc(local_defs), loc_ret)));
|
||||
let replaced_def = apply_task_await(arena,def_expr.region,sub_arg,sub_pat,new_defs_expr);
|
||||
return unwrap_suffixed_expression(arena,replaced_def,maybe_def_pat);
|
||||
}
|
||||
Err(err) => return Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// try to unwrap the loc_ret
|
||||
match unwrap_suffixed_expression(arena,loc_ret,maybe_def_pat){
|
||||
Ok(new_loc_ret) => {
|
||||
Ok(arena.alloc(Loc::at(loc_expr.region,Defs(arena.alloc(local_defs), new_loc_ret))))
|
||||
},
|
||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
||||
let new_loc_ret = apply_task_await(arena, loc_expr.region,sub_arg,sub_pat,sub_new);
|
||||
let new_defs = arena.alloc(Loc::at(loc_expr.region,Defs(arena.alloc(local_defs), new_loc_ret)));
|
||||
unwrap_suffixed_expression(arena, new_defs, maybe_def_pat)
|
||||
}
|
||||
Err(EUnwrapped::UnwrappedDefExpr(..)) => {
|
||||
// TODO confirm this is correct with test case
|
||||
Err(EUnwrapped::Malformed)
|
||||
}
|
||||
Err(EUnwrapped::Malformed) => {
|
||||
Err(EUnwrapped::Malformed)
|
||||
},
|
||||
}
|
||||
},
|
||||
_ => internal_error!("unreachable, expected a Defs node to be passed into unwrap_suffixed_expression_defs_help"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper for `Task.await (loc_arg) \loc_pat -> loc_new`
|
||||
pub fn apply_task_await<'a>(
|
||||
arena: &'a Bump,
|
||||
region: Region,
|
||||
loc_arg: &'a Loc<Expr<'a>>,
|
||||
loc_pat: &'a Loc<Pattern<'a>>,
|
||||
loc_new: &'a Loc<Expr<'a>>,
|
||||
) -> &'a Loc<Expr<'a>> {
|
||||
// If the pattern and the new are the same then we don't need to unwrap anything
|
||||
// e.g. `Task.await foo \{} -> Task.ok {}` is the same as `foo`
|
||||
if is_matching_empty_record(loc_pat, loc_new) {
|
||||
return loc_arg;
|
||||
}
|
||||
|
||||
// If the pattern and the new are matching answers then we don't need to unwrap anything
|
||||
// e.g. `Task.await foo \#!a1 -> Task.ok #!a1` is the same as `foo`
|
||||
if is_matching_intermediate_answer(loc_pat, loc_new) {
|
||||
return loc_arg;
|
||||
}
|
||||
|
||||
let mut task_await_apply_args: Vec<&'a Loc<Expr<'a>>> = Vec::new_in(arena);
|
||||
|
||||
// apply the unwrapped suffixed expression
|
||||
task_await_apply_args.push(loc_arg);
|
||||
|
||||
// apply the closure
|
||||
let mut closure_pattern = Vec::new_in(arena);
|
||||
closure_pattern.push(*loc_pat);
|
||||
task_await_apply_args.push(arena.alloc(Loc::at(
|
||||
region,
|
||||
Closure(arena.alloc_slice_copy(closure_pattern.as_slice()), loc_new),
|
||||
)));
|
||||
|
||||
arena.alloc(Loc::at(
|
||||
region,
|
||||
Apply(
|
||||
arena.alloc(Loc {
|
||||
region,
|
||||
value: Var {
|
||||
module_name: ModuleName::TASK,
|
||||
ident: "await",
|
||||
},
|
||||
}),
|
||||
arena.alloc(task_await_apply_args),
|
||||
CalledVia::BangSuffix,
|
||||
),
|
||||
))
|
||||
}
|
||||
|
||||
fn extract_wrapped_task_ok_value<'a>(loc_expr: &'a Loc<Expr<'a>>) -> Option<&'a Loc<Expr<'a>>> {
|
||||
match loc_expr.value {
|
||||
Expr::Apply(function, arguments, _) => match function.value {
|
||||
Var {
|
||||
module_name, ident, ..
|
||||
} if module_name == ModuleName::TASK && ident == "ok" => arguments.first().copied(),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_matching_empty_record<'a>(
|
||||
loc_pat: &'a Loc<Pattern<'a>>,
|
||||
loc_expr: &'a Loc<Expr<'a>>,
|
||||
) -> bool {
|
||||
let is_empty_record = match extract_wrapped_task_ok_value(loc_expr) {
|
||||
Some(task_expr) => match task_expr.value {
|
||||
Expr::Record(collection) => collection.is_empty(),
|
||||
_ => false,
|
||||
},
|
||||
None => false,
|
||||
};
|
||||
|
||||
let is_pattern_empty_record = match loc_pat.value {
|
||||
Pattern::RecordDestructure(collection) => collection.is_empty(),
|
||||
_ => false,
|
||||
};
|
||||
|
||||
is_empty_record && is_pattern_empty_record
|
||||
}
|
||||
|
||||
pub fn is_matching_intermediate_answer<'a>(
|
||||
loc_pat: &'a Loc<Pattern<'a>>,
|
||||
loc_new: &'a Loc<Expr<'a>>,
|
||||
) -> bool {
|
||||
let pat_ident = match loc_pat.value {
|
||||
Pattern::Identifier { ident, .. } => Some(ident),
|
||||
_ => None,
|
||||
};
|
||||
let exp_ident = match loc_new.value {
|
||||
Expr::Var {
|
||||
module_name, ident, ..
|
||||
} if module_name.is_empty() && ident.starts_with('#') => Some(ident),
|
||||
_ => None,
|
||||
};
|
||||
let exp_ident_in_task = match extract_wrapped_task_ok_value(loc_new) {
|
||||
Some(task_expr) => match task_expr.value {
|
||||
Expr::Var {
|
||||
module_name, ident, ..
|
||||
} if module_name.is_empty() && ident.starts_with('#') => Some(ident),
|
||||
_ => None,
|
||||
},
|
||||
None => None,
|
||||
};
|
||||
match (pat_ident, exp_ident, exp_ident_in_task) {
|
||||
(Some(a), Some(b), None) => a == b,
|
||||
(Some(a), None, Some(b)) => a == b,
|
||||
_ => false,
|
||||
}
|
||||
}
|
|
@ -7,12 +7,13 @@ use roc_can::expr::Output;
|
|||
use roc_can::expr::{canonicalize_expr, Expr};
|
||||
use roc_can::scope::Scope;
|
||||
use roc_collections::all::MutMap;
|
||||
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds, Symbol};
|
||||
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds, PackageModuleIds, Symbol};
|
||||
use roc_problem::can::Problem;
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::{VarStore, Variable};
|
||||
use roc_types::types::{AliasVar, Type};
|
||||
use std::hash::Hash;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn test_home() -> ModuleId {
|
||||
ModuleIds::default().get_or_insert(&"Test".into())
|
||||
|
@ -43,7 +44,7 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
|
|||
|
||||
let mut var_store = VarStore::default();
|
||||
let var = var_store.fresh();
|
||||
let module_ids = ModuleIds::default();
|
||||
let qualified_module_ids = PackageModuleIds::default();
|
||||
|
||||
// Desugar operators (convert them to Apply calls, taking into account
|
||||
// operator precedence and associativity rules), before doing other canonicalization.
|
||||
|
@ -60,7 +61,12 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
|
|||
arena.alloc("TestPath"),
|
||||
);
|
||||
|
||||
let mut scope = Scope::new(home, IdentIds::default(), Default::default());
|
||||
let mut scope = Scope::new(
|
||||
home,
|
||||
"TestPath".into(),
|
||||
IdentIds::default(),
|
||||
Default::default(),
|
||||
);
|
||||
scope.add_alias(
|
||||
Symbol::NUM_INT,
|
||||
Region::zero(),
|
||||
|
@ -74,7 +80,14 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
|
|||
);
|
||||
|
||||
let dep_idents = IdentIds::exposed_builtins(0);
|
||||
let mut env = Env::new(arena, home, &dep_idents, &module_ids);
|
||||
let mut env = Env::new(
|
||||
arena,
|
||||
home,
|
||||
Path::new("Test.roc"),
|
||||
&dep_idents,
|
||||
&qualified_module_ids,
|
||||
None,
|
||||
);
|
||||
let (loc_expr, output) = canonicalize_expr(
|
||||
&mut env,
|
||||
&mut var_store,
|
||||
|
@ -87,7 +100,7 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
|
|||
all_ident_ids.insert(home, scope.locals.ident_ids);
|
||||
|
||||
let interns = Interns {
|
||||
module_ids: env.module_ids.clone(),
|
||||
module_ids: env.qualified_module_ids.clone().into_module_ids(),
|
||||
all_ident_ids,
|
||||
};
|
||||
|
||||
|
|
922
crates/compiler/can/tests/test_suffixed.rs
Normal file
922
crates/compiler/can/tests/test_suffixed.rs
Normal file
|
@ -0,0 +1,922 @@
|
|||
#[cfg(test)]
|
||||
mod suffixed_tests {
|
||||
|
||||
use bumpalo::Bump;
|
||||
use roc_can::desugar::desugar_defs_node_values;
|
||||
use roc_parse::test_helpers::parse_defs_with;
|
||||
use roc_test_utils::assert_multiline_str_eq;
|
||||
|
||||
fn run_test(src: &str, expected: &str) {
|
||||
let arena = &Bump::new();
|
||||
let mut defs = parse_defs_with(arena, src).unwrap();
|
||||
desugar_defs_node_values(arena, &mut defs, src, &mut None, "test.roc", true);
|
||||
print!("{:#?}", &defs);
|
||||
assert_multiline_str_eq!(format!("{:?}", &defs).as_str(), expected);
|
||||
}
|
||||
|
||||
/**
|
||||
* This example tests a suffixed statement, followed
|
||||
* by a Body with an empty record pattern.
|
||||
*
|
||||
* The def final expression is explicitly provided.
|
||||
*
|
||||
```roc
|
||||
main =
|
||||
line! "Ahoy"
|
||||
{} = "There" |> Stdout.line!
|
||||
|
||||
Task.ok {}
|
||||
|
||||
main =
|
||||
Task.await [line "Ahoy"] \{} ->
|
||||
Task.await [Stdout.line "there"] \{} ->
|
||||
Task.ok {}
|
||||
|
||||
main =
|
||||
Task.await [line "Ahoy"] \{} -> Stdout.line "there"
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn multi_defs_stmts() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
line! "Ahoy"
|
||||
{} = "There" |> Stdout.line!
|
||||
|
||||
Task.ok {}
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-125], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @29-36 Apply(@29-36 Var { module_name: "Task", ident: "await" }, [@29-36 Apply(@29-36 Var { module_name: "", ident: "line" }, [@30-36 Str(PlainLine("Ahoy"))], Space), @29-36 Closure([@29-36 RecordDestructure([])], @58-80 Apply(@58-80 Var { module_name: "Stdout", ident: "line" }, [@58-65 Str(PlainLine("There"))], BinOp(Pizza)))], BangSuffix))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* The most simple suffixed example. A single statement
|
||||
* without arguments and a final expression.
|
||||
```roc
|
||||
main =
|
||||
foo!
|
||||
|
||||
ok {}
|
||||
|
||||
main =
|
||||
Task.await [foo] \{} ->
|
||||
ok {}
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
|
||||
fn basic() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
foo!
|
||||
|
||||
ok {}
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-47], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @24-24 Apply(@24-24 Var { module_name: "Task", ident: "await" }, [@24-24 Var { module_name: "", ident: "foo" }, @24-24 Closure([@24-24 RecordDestructure([])], @42-47 Apply(@42-44 Var { module_name: "", ident: "ok" }, [@45-47 Record([])], Space))], BangSuffix))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A single suffixed statement with arguments applied.
|
||||
* Note there is no final expression.
|
||||
```roc
|
||||
main = foo! "bar" {} "baz"
|
||||
|
||||
main = foo "bar" {} "baz"
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn last_suffixed_single() {
|
||||
run_test(
|
||||
r#"
|
||||
main = foo! "bar" {} "baz"
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-26], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @0-26 Apply(@0-26 Var { module_name: "", ident: "foo" }, [@12-17 Str(PlainLine("bar")), @18-20 Record([]), @21-26 Str(PlainLine("baz"))], Space))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Multiple suffixed statements with no
|
||||
* arguments, and no final expression.
|
||||
```roc
|
||||
main =
|
||||
foo!
|
||||
bar!
|
||||
baz!
|
||||
|
||||
main =
|
||||
Task.await foo \{} ->
|
||||
Task.await bar \{} ->
|
||||
Task.await baz \{} ->
|
||||
Task.ok {}
|
||||
|
||||
main =
|
||||
Task.await foo \{} ->
|
||||
Task.await bar \{} ->
|
||||
baz
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn last_suffixed_multiple() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
foo!
|
||||
bar!
|
||||
baz!
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-70], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @28-28 Apply(@28-28 Var { module_name: "Task", ident: "await" }, [@28-28 Var { module_name: "", ident: "foo" }, @28-28 Closure([@28-28 RecordDestructure([])], @45-49 Apply(@45-49 Var { module_name: "Task", ident: "await" }, [@45-49 Var { module_name: "", ident: "bar" }, @45-49 Closure([@45-49 RecordDestructure([])], @66-70 Var { module_name: "", ident: "baz" })], BangSuffix))], BangSuffix))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A definition with a closure that contains a Defs node, which also
|
||||
* contains a suffixed binops statement.
|
||||
```roc
|
||||
main =
|
||||
x = \msg ->
|
||||
msg |> line!
|
||||
ok {}
|
||||
|
||||
x "hi"
|
||||
|
||||
main =
|
||||
|
||||
x = \msg ->
|
||||
Task.await [line msg] \{} -> ok {}
|
||||
|
||||
x "hi"
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn closure_simple() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
x = \msg ->
|
||||
msg |> line!
|
||||
ok {}
|
||||
|
||||
x "hi"
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-118], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @23-118 Defs(Defs { tags: [Index(2147483649)], regions: [@27-94], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@23-24 Identifier { ident: "x" }, @27-94 Closure([@28-31 Identifier { ident: "msg" }], @55-94 Defs(Defs { tags: [Index(2147483648)], regions: [@55-66], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@55-66 RecordDestructure([]), @55-66 Apply(@62-66 TaskAwaitBang(Var { module_name: "", ident: "line" }), [@55-58 Var { module_name: "", ident: "msg" }], BinOp(Pizza)))] }, @89-94 Apply(@89-91 Var { module_name: "", ident: "ok" }, [@92-94 Record([])], Space)))), Body(@23-24 Identifier { ident: "x" }, @27-94 Closure([@28-31 Identifier { ident: "msg" }], @55-66 Apply(@55-66 Var { module_name: "Task", ident: "await" }, [@55-66 Apply(@55-66 Var { module_name: "", ident: "line" }, [@55-58 Var { module_name: "", ident: "msg" }], BinOp(Pizza)), @55-66 Closure([@55-66 RecordDestructure([])], @89-94 Apply(@89-91 Var { module_name: "", ident: "ok" }, [@92-94 Record([])], Space))], BangSuffix)))] }, @112-118 Apply(@112-113 Var { module_name: "", ident: "x" }, [@114-118 Str(PlainLine("hi"))], Space)))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Example of unwrapping a pipline statement
|
||||
*
|
||||
* Note pipelines are desugared into Apply functions,
|
||||
* however this also tests the parser.
|
||||
*
|
||||
```roc
|
||||
main =
|
||||
"hello"
|
||||
|> Str.concat "world"
|
||||
|> line!
|
||||
|
||||
Task.ok {}
|
||||
|
||||
main =
|
||||
Task.await [line [Str.concat "hello" "world"]] \{} ->
|
||||
Task.ok {}
|
||||
|
||||
main =
|
||||
line (Str.concat "hello" "world")
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn simple_pizza() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
"hello"
|
||||
|> Str.concat "world"
|
||||
|> line!
|
||||
|
||||
Task.ok {}
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-130], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @24-93 Apply(@24-93 Var { module_name: "", ident: "line" }, [@24-69 Apply(@51-61 Var { module_name: "Str", ident: "concat" }, [@24-31 Str(PlainLine("hello")), @62-69 Str(PlainLine("world"))], BinOp(Pizza))], BinOp(Pizza)))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Example with a parens suffixed sub-expression
|
||||
* in the function part of an Apply.
|
||||
*
|
||||
* Note how the parens unwraps into an intermediate answer #!a0 instead of
|
||||
* unwrapping the def `do`.
|
||||
*
|
||||
```roc
|
||||
main =
|
||||
do = (sayMultiple!) "hi"
|
||||
do
|
||||
|
||||
main =
|
||||
Task.await [sayMultiple] \#!a0 ->
|
||||
do = (#!a0) "hi"
|
||||
do
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn body_parens_apply() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
do = (sayMultiple!) "hi"
|
||||
do
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-66], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @28-47 Apply(@28-47 Var { module_name: "Task", ident: "await" }, [Var { module_name: "", ident: "sayMultiple" }, @28-47 Closure([Identifier { ident: "#!a0" }], @28-47 Defs(Defs { tags: [Index(2147483650)], regions: [@28-47], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@23-25 Identifier { ident: "do" }, @28-47 Apply(@29-41 ParensAround(TaskAwaitBang(Var { module_name: "", ident: "sayMultiple" })), [@43-47 Str(PlainLine("hi"))], Space)), Body(@23-25 Identifier { ident: "do" }, @28-47 Apply(@29-41 ParensAround(Var { module_name: "", ident: "#!a0" }), [@43-47 Str(PlainLine("hi"))], Space)), Body(@23-25 Identifier { ident: "do" }, @28-47 Apply(@29-41 ParensAround(Var { module_name: "", ident: "#!a0" }), [@43-47 Str(PlainLine("hi"))], Space))] }, @64-66 Var { module_name: "", ident: "do" }))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Example of unwrapping mixed Body defs with
|
||||
* Var's of both single and multiple suffixes
|
||||
```roc
|
||||
main =
|
||||
a = foo!
|
||||
b = bar!!
|
||||
baz a b
|
||||
|
||||
main =
|
||||
Task.await [foo] \a ->
|
||||
b = bar!!
|
||||
baz a b
|
||||
|
||||
main =
|
||||
Task.await [foo] \a ->
|
||||
Tas.await [bar] \#!a0 ->
|
||||
b = #!a0!
|
||||
baz a b
|
||||
|
||||
main =
|
||||
Task.await [foo] \a ->
|
||||
Task.await [bar] \#!a0 ->
|
||||
Task.await #!a0 \b -> baz a b
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn var_suffixes() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
a = foo!
|
||||
b = bar!!
|
||||
baz a b
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-81], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @27-31 Apply(@27-31 Var { module_name: "Task", ident: "await" }, [@27-31 Var { module_name: "", ident: "foo" }, @27-31 Closure([@23-24 Identifier { ident: "a" }], @27-31 Apply(@27-31 Var { module_name: "Task", ident: "await" }, [@48-57 Var { module_name: "", ident: "bar" }, @27-31 Closure([@48-57 Identifier { ident: "#!a0" }], @48-57 Apply(@48-57 Var { module_name: "Task", ident: "await" }, [@48-57 Var { module_name: "", ident: "#!a0" }, @48-57 Closure([@48-49 Identifier { ident: "b" }], @74-81 Apply(@74-77 Var { module_name: "", ident: "baz" }, [@78-79 Var { module_name: "", ident: "a" }, @80-81 Var { module_name: "", ident: "b" }], Space))], BangSuffix))], BangSuffix))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Example with a multiple suffixed Var
|
||||
*
|
||||
* Note it unwraps into an intermediate answer `#!a0`
|
||||
*
|
||||
```roc
|
||||
main =
|
||||
foo!!
|
||||
bar
|
||||
|
||||
main =
|
||||
Task.await [foo] \#!a0 ->
|
||||
#!a0!
|
||||
bar
|
||||
|
||||
main =
|
||||
Task.await [foo] \#!a0 ->
|
||||
Task.await [#!a0] \{} -> bar
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn multiple_suffix() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
foo!!
|
||||
bar
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-49], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @24-49 Apply(@24-49 Var { module_name: "Task", ident: "await" }, [@29-29 Var { module_name: "", ident: "foo" }, @24-49 Closure([@29-29 Identifier { ident: "#!a0" }], @29-29 Apply(@29-29 Var { module_name: "Task", ident: "await" }, [@29-29 Var { module_name: "", ident: "#!a0" }, @29-29 Closure([@29-29 RecordDestructure([])], @46-49 Var { module_name: "", ident: "bar" })], BangSuffix))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A suffixed expression in the function part of the Apply
|
||||
```roc
|
||||
main =
|
||||
x = (foo! "bar") "hello"
|
||||
baz x
|
||||
|
||||
main =
|
||||
Task.await [foo "bar"] \#!a0 ->
|
||||
x = (#!a0) "hello"
|
||||
baz x
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn apply_function_suffixed() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
x = (foo! "bar") "hello"
|
||||
baz x
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-70], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @28-48 Apply(@28-48 Var { module_name: "Task", ident: "await" }, [Apply(Var { module_name: "", ident: "foo" }, [@34-39 Str(PlainLine("bar"))], Space), @28-48 Closure([Identifier { ident: "#!a0" }], @28-48 Defs(Defs { tags: [Index(2147483650)], regions: [@28-48], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@24-25 Identifier { ident: "x" }, @28-48 Apply(@29-39 ParensAround(Apply(@29-32 TaskAwaitBang(Var { module_name: "", ident: "foo" }), [@34-39 Str(PlainLine("bar"))], Space)), [@41-48 Str(PlainLine("hello"))], Space)), Body(@24-25 Identifier { ident: "x" }, @28-48 Apply(@29-39 ParensAround(Var { module_name: "", ident: "#!a0" }), [@41-48 Str(PlainLine("hello"))], Space)), Body(@24-25 Identifier { ident: "x" }, @28-48 Apply(@29-39 ParensAround(Var { module_name: "", ident: "#!a0" }), [@41-48 Str(PlainLine("hello"))], Space))] }, @65-70 Apply(@65-68 Var { module_name: "", ident: "baz" }, [@69-70 Var { module_name: "", ident: "x" }], Space)))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A suffixed expression in an Apply argument position.
|
||||
```roc
|
||||
main =
|
||||
x = bar (foo! "hello")
|
||||
baz x
|
||||
|
||||
main =
|
||||
Task.await [foo "hello"] \#!a0 ->
|
||||
x = bar (#!a0)
|
||||
baz x
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn apply_argument_suffixed() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
x = bar (foo! "hello")
|
||||
baz x
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-68], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @28-46 Apply(@28-46 Var { module_name: "Task", ident: "await" }, [Apply(Var { module_name: "", ident: "foo" }, [@38-45 Str(PlainLine("hello"))], Space), @28-46 Closure([Identifier { ident: "#!a0" }], @28-46 Defs(Defs { tags: [Index(2147483650)], regions: [@28-46], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@24-25 Identifier { ident: "x" }, @28-46 Apply(@28-31 Var { module_name: "", ident: "bar" }, [@33-45 ParensAround(Apply(@33-36 TaskAwaitBang(Var { module_name: "", ident: "foo" }), [@38-45 Str(PlainLine("hello"))], Space))], Space)), Body(@24-25 Identifier { ident: "x" }, @28-46 Apply(@28-31 Var { module_name: "", ident: "bar" }, [@33-45 ParensAround(Var { module_name: "", ident: "#!a0" })], Space)), Body(@24-25 Identifier { ident: "x" }, @28-46 Apply(@28-31 Var { module_name: "", ident: "bar" }, [@33-45 ParensAround(Var { module_name: "", ident: "#!a0" })], Space))] }, @63-68 Apply(@63-66 Var { module_name: "", ident: "baz" }, [@67-68 Var { module_name: "", ident: "x" }], Space)))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Example where the suffixed def is not the first def
|
||||
```roc
|
||||
main =
|
||||
msg = "hello"
|
||||
x = foo! msg
|
||||
bar x
|
||||
|
||||
main =
|
||||
msg = "hello"
|
||||
Task.await [foo msg] \x -> bar x
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn multiple_def_first_suffixed() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
msg = "hello"
|
||||
x = foo! msg
|
||||
bar x
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-88], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @0-88 Defs(Defs { tags: [Index(2147483649)], regions: [@30-37], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@24-27 Identifier { ident: "msg" }, @30-37 Str(PlainLine("hello"))), Body(@24-27 Identifier { ident: "msg" }, @30-37 Str(PlainLine("hello")))] }, @0-88 Apply(@0-88 Var { module_name: "Task", ident: "await" }, [@54-66 Apply(@54-66 Var { module_name: "", ident: "foo" }, [@63-66 Var { module_name: "", ident: "msg" }], Space), @0-88 Closure([@54-55 Identifier { ident: "x" }], @83-88 Apply(@83-86 Var { module_name: "", ident: "bar" }, [@87-88 Var { module_name: "", ident: "x" }], Space))], BangSuffix)))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Annotated defs and a suffixed expression
|
||||
* with annotations inside a closure
|
||||
```roc
|
||||
main =
|
||||
x : Str -> Task _ _
|
||||
x = \msg ->
|
||||
|
||||
y : Task {} _
|
||||
y = line! msg
|
||||
y
|
||||
|
||||
x "foo"
|
||||
|
||||
main =
|
||||
x : Str -> Task _ _
|
||||
x = \msg ->
|
||||
Task.await [line msg] \y -> y
|
||||
|
||||
x "foo"
|
||||
```
|
||||
*/
|
||||
|
||||
#[test]
|
||||
fn closure_with_annotations() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
x : Str -> Task _ _
|
||||
x = \msg ->
|
||||
|
||||
y : Task {} _
|
||||
y = line! msg
|
||||
y
|
||||
|
||||
x "foo"
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-187], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @0-187 Defs(Defs { tags: [Index(2147483650)], regions: [@60-162], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Annotation(@24-25 Identifier { ident: "x" }, @28-43 Function([@28-31 Apply("", "Str", [])], @35-43 Apply("", "Task", [@40-41 Inferred, @42-43 Inferred]))), AnnotatedBody { ann_pattern: @24-25 Identifier { ident: "x" }, ann_type: @28-43 Function([@28-31 Apply("", "Str", [])], @35-43 Apply("", "Task", [@40-41 Inferred, @42-43 Inferred])), comment: None, body_pattern: @60-61 Identifier { ident: "x" }, body_expr: @60-162 Closure([@65-68 Identifier { ident: "msg" }], @93-162 Defs(Defs { tags: [Index(2147483649)], regions: [@93-140], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Annotation(@93-94 Identifier { ident: "y" }, @97-106 Apply("", "Task", [@102-104 Record { fields: [], ext: None }, @105-106 Inferred])), AnnotatedBody { ann_pattern: @93-94 Identifier { ident: "y" }, ann_type: @97-106 Apply("", "Task", [@102-104 Record { fields: [], ext: None }, @105-106 Inferred]), comment: None, body_pattern: @127-128 Identifier { ident: "y" }, body_expr: @127-140 Apply(@131-135 TaskAwaitBang(Var { module_name: "", ident: "line" }), [@137-140 Var { module_name: "", ident: "msg" }], Space) }] }, @161-162 Var { module_name: "", ident: "y" })) }, AnnotatedBody { ann_pattern: @24-25 Identifier { ident: "x" }, ann_type: @28-43 Function([@28-31 Apply("", "Str", [])], @35-43 Apply("", "Task", [@40-41 Inferred, @42-43 Inferred])), comment: None, body_pattern: @60-61 Identifier { ident: "x" }, body_expr: @60-162 Closure([@65-68 Identifier { ident: "msg" }], @127-140 Apply(@127-140 Var { module_name: "Task", ident: "await" }, [@127-140 Apply(@127-140 Var { module_name: "", ident: "line" }, [@137-140 Var { module_name: "", ident: "msg" }], Space), @127-140 Closure([@127-128 Identifier { ident: "y" }], @161-162 Var { module_name: "", ident: "y" })], BangSuffix)) }] }, @180-187 Apply(@180-181 Var { module_name: "", ident: "x" }, [@182-187 Str(PlainLine("foo"))], Space)))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Nested suffixed expressions
|
||||
```roc
|
||||
run = line! (nextMsg!)
|
||||
|
||||
run = Task.await nextMsg \#!a0 -> line! (#!a0)
|
||||
|
||||
run = Task.await nextMsg \#!a0 -> line (#!a0)
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn nested_simple() {
|
||||
run_test(
|
||||
r#"
|
||||
run = line! (nextMsg!)
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-22], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-3 Identifier { ident: "run" }, @0-22 Apply(@0-22 Var { module_name: "Task", ident: "await" }, [Var { module_name: "", ident: "nextMsg" }, @0-22 Closure([Identifier { ident: "#!a0" }], @0-22 Apply(@0-22 Var { module_name: "", ident: "line" }, [@13-21 ParensAround(Var { module_name: "", ident: "#!a0" })], Space))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Nested suffixed expressions
|
||||
```roc
|
||||
main =
|
||||
z = foo! (bar! baz) (blah stuff)
|
||||
doSomething z
|
||||
|
||||
main =
|
||||
Task.await [bar baz] \#!a0 ->
|
||||
z = foo! (#!a0) (blah stuff)
|
||||
doSomething z
|
||||
|
||||
main =
|
||||
Task.await [bar baz] \#!a0 ->
|
||||
Task.await [foo (#!a0) (blah stuff)] \z -> doSomething z
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn nested_complex() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
z = foo! (bar! baz) (blah stuff)
|
||||
doSomething z
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-86], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @28-56 Apply(@28-56 Var { module_name: "Task", ident: "await" }, [Apply(Var { module_name: "", ident: "bar" }, [@39-42 Var { module_name: "", ident: "baz" }], Space), @28-56 Closure([Identifier { ident: "#!a0" }], @28-56 Apply(@28-56 Var { module_name: "Task", ident: "await" }, [@28-56 Apply(@28-56 Var { module_name: "", ident: "foo" }, [@34-42 ParensAround(Var { module_name: "", ident: "#!a0" }), @45-55 ParensAround(Apply(@45-49 Var { module_name: "", ident: "blah" }, [@50-55 Var { module_name: "", ident: "stuff" }], Space))], Space), @28-56 Closure([@24-25 Identifier { ident: "z" }], @73-86 Apply(@73-84 Var { module_name: "", ident: "doSomething" }, [@85-86 Var { module_name: "", ident: "z" }], Space))], BangSuffix))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A closure that contains a Defs node
|
||||
```roc
|
||||
main = foo "bar" {} "baz"
|
||||
|
||||
foo : Str, {}, Str -> Task {} I32
|
||||
foo = \a, _, b ->
|
||||
line! a
|
||||
line! b
|
||||
|
||||
Task.ok {}
|
||||
|
||||
foo : Str, {}, Str -> Task {} I32
|
||||
foo = \a, _, b ->
|
||||
Task.await line a \{} ->
|
||||
line! b
|
||||
|
||||
Task.ok {}
|
||||
|
||||
foo : Str, {}, Str -> Task {} I32
|
||||
foo = \a, _, b ->
|
||||
Task.await [line a] \{} ->
|
||||
Task.await [line b] \{} ->
|
||||
Task.ok {}
|
||||
|
||||
foo : Str, {}, Str -> Task {} I32
|
||||
foo = \a, _, b ->
|
||||
Task.await [line a] \{} -> line b
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn closure_with_defs() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
|
||||
foo : Str, {}, Str -> Task {} I32
|
||||
foo = \a, _, b ->
|
||||
line! a
|
||||
line! b
|
||||
|
||||
Task.ok {}
|
||||
|
||||
foo "bar" {} "baz"
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-249], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @25-249 Defs(Defs { tags: [Index(2147483650)], regions: [@81-193], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Annotation(@25-28 Identifier { ident: "foo" }, @31-58 Function([@31-34 Apply("", "Str", []), @36-38 Record { fields: [], ext: None }, @40-43 Apply("", "Str", [])], @47-58 Apply("", "Task", [@52-54 Record { fields: [], ext: None }, @55-58 Apply("", "I32", [])]))), AnnotatedBody { ann_pattern: @25-28 Identifier { ident: "foo" }, ann_type: @31-58 Function([@31-34 Apply("", "Str", []), @36-38 Record { fields: [], ext: None }, @40-43 Apply("", "Str", [])], @47-58 Apply("", "Task", [@52-54 Record { fields: [], ext: None }, @55-58 Apply("", "I32", [])])), comment: None, body_pattern: @75-78 Identifier { ident: "foo" }, body_expr: @81-193 Closure([@82-83 Identifier { ident: "a" }, @85-86 Underscore(""), @88-89 Identifier { ident: "b" }], @114-193 Defs(Defs { tags: [Index(2147483648), Index(2147483649)], regions: [@119-121, @142-149], space_before: [Slice(start = 0, length = 0), Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0), Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@119-121 RecordDestructure([]), @119-121 Apply(@114-118 TaskAwaitBang(Var { module_name: "", ident: "line" }), [@120-121 Var { module_name: "", ident: "a" }], Space)), Body(@142-149 RecordDestructure([]), @142-149 Apply(@142-146 TaskAwaitBang(Var { module_name: "", ident: "line" }), [@148-149 Var { module_name: "", ident: "b" }], Space))] }, @183-193 Apply(@183-190 Var { module_name: "Task", ident: "ok" }, [@191-193 Record([])], Space))) }, AnnotatedBody { ann_pattern: @25-28 Identifier { ident: "foo" }, ann_type: @31-58 Function([@31-34 Apply("", "Str", []), @36-38 Record { fields: [], ext: None }, @40-43 Apply("", "Str", [])], @47-58 Apply("", "Task", [@52-54 Record { fields: [], ext: None }, @55-58 Apply("", "I32", [])])), comment: None, body_pattern: @75-78 Identifier { ident: "foo" }, body_expr: @81-193 Closure([@82-83 Identifier { ident: "a" }, @85-86 Underscore(""), @88-89 Identifier { ident: "b" }], @119-121 Apply(@119-121 Var { module_name: "Task", ident: "await" }, [@119-121 Apply(@119-121 Var { module_name: "", ident: "line" }, [@120-121 Var { module_name: "", ident: "a" }], Space), @119-121 Closure([@119-121 RecordDestructure([])], @142-149 Apply(@142-149 Var { module_name: "", ident: "line" }, [@148-149 Var { module_name: "", ident: "b" }], Space))], BangSuffix)) }] }, @231-249 Apply(@231-234 Var { module_name: "", ident: "foo" }, [@235-240 Str(PlainLine("bar")), @241-243 Record([]), @244-249 Str(PlainLine("baz"))], Space)))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test when the suffixed def being unwrapped is not the first or last
|
||||
```roc
|
||||
main =
|
||||
a = "Foo"
|
||||
Stdout.line! a
|
||||
|
||||
printBar!
|
||||
|
||||
printBar =
|
||||
b = "Bar"
|
||||
Stdout.line b
|
||||
|
||||
main =
|
||||
a = "Foo"
|
||||
Task.await [Stdout.line a] \{} ->
|
||||
printBar!
|
||||
|
||||
main =
|
||||
a = "Foo"
|
||||
Task.await [Stdout.line a] \{} ->
|
||||
Task.await [printBar] \{} ->
|
||||
Task.ok {}
|
||||
|
||||
main =
|
||||
a = "Foo"
|
||||
Task.await [Stdout.line a] \{} ->
|
||||
printBar
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn defs_suffixed_middle() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
a = "Foo"
|
||||
Stdout.line! a
|
||||
|
||||
printBar!
|
||||
|
||||
printBar =
|
||||
b = "Bar"
|
||||
Stdout.line b
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648), Index(2147483649)], regions: [@0-90, @120-186], space_before: [Slice(start = 0, length = 0), Slice(start = 0, length = 2)], space_after: [Slice(start = 0, length = 0), Slice(start = 2, length = 0)], spaces: [Newline, Newline], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @23-90 Defs(Defs { tags: [Index(2147483649)], regions: [@27-32], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@23-24 Identifier { ident: "a" }, @27-32 Str(PlainLine("Foo"))), Body(@23-24 Identifier { ident: "a" }, @27-32 Str(PlainLine("Foo")))] }, @23-90 Apply(@23-90 Var { module_name: "Task", ident: "await" }, [@49-63 Apply(@49-63 Var { module_name: "Stdout", ident: "line" }, [@62-63 Var { module_name: "", ident: "a" }], Space), @23-90 Closure([@49-63 RecordDestructure([])], @81-90 Var { module_name: "", ident: "printBar" })], BangSuffix))), Body(@120-128 Identifier { ident: "printBar" }, @147-186 Defs(Defs { tags: [Index(2147483649)], regions: [@151-156], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@147-148 Identifier { ident: "b" }, @151-156 Str(PlainLine("Bar"))), Body(@147-148 Identifier { ident: "b" }, @151-156 Str(PlainLine("Bar")))] }, @173-186 Apply(@173-184 Var { module_name: "Stdout", ident: "line" }, [@185-186 Var { module_name: "", ident: "b" }], Space)))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A simple if-then-else statement which is split
|
||||
```roc
|
||||
|
||||
main =
|
||||
isTrue = Task.ok Bool.true
|
||||
isFalse = Task.ok Bool.false
|
||||
|
||||
if isFalse! then
|
||||
line "fail"
|
||||
else if isTrue! then
|
||||
line "success"
|
||||
else
|
||||
line "fail"
|
||||
|
||||
main =
|
||||
isTrue = Task.ok Bool.true
|
||||
|
||||
Task.await isFalse \#!a0 ->
|
||||
if #!a0 then
|
||||
line "fail"
|
||||
else
|
||||
Task.await isTrue \#!a1 ->
|
||||
if #!a0 then
|
||||
line "success"
|
||||
else
|
||||
line "fail"
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn if_simple() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
isTrue = Task.ok Bool.true
|
||||
isFalse = Task.ok Bool.false
|
||||
|
||||
if isFalse! then
|
||||
line "fail"
|
||||
else if isTrue! then
|
||||
line "success"
|
||||
else
|
||||
line "fail"
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-286], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @23-286 Defs(Defs { tags: [Index(2147483650), Index(2147483651)], regions: [@32-49, @76-94], space_before: [Slice(start = 0, length = 0), Slice(start = 0, length = 1)], space_after: [Slice(start = 0, length = 0), Slice(start = 1, length = 0)], spaces: [Newline], type_defs: [], value_defs: [Body(@23-29 Identifier { ident: "isTrue" }, @32-49 Apply(@32-39 Var { module_name: "Task", ident: "ok" }, [@40-49 Var { module_name: "Bool", ident: "true" }], Space)), Body(@66-73 Identifier { ident: "isFalse" }, @76-94 Apply(@76-83 Var { module_name: "Task", ident: "ok" }, [@84-94 Var { module_name: "Bool", ident: "false" }], Space)), Body(@23-29 Identifier { ident: "isTrue" }, @32-49 Apply(@32-39 Var { module_name: "Task", ident: "ok" }, [@40-49 Var { module_name: "Bool", ident: "true" }], Space)), Body(@66-73 Identifier { ident: "isFalse" }, @76-94 Apply(@76-83 Var { module_name: "Task", ident: "ok" }, [@84-94 Var { module_name: "Bool", ident: "false" }], Space))] }, @115-123 Apply(@115-123 Var { module_name: "Task", ident: "await" }, [@115-123 Var { module_name: "", ident: "isFalse" }, @115-123 Closure([@115-123 Identifier { ident: "#!a0" }], @112-286 If([(@115-123 Var { module_name: "", ident: "#!a0" }, @149-160 Apply(@149-153 Var { module_name: "", ident: "line" }, [@154-160 Str(PlainLine("fail"))], Space))], @185-192 Apply(@185-192 Var { module_name: "Task", ident: "await" }, [@185-192 Var { module_name: "", ident: "isTrue" }, @185-192 Closure([@185-192 Identifier { ident: "#!a1" }], @112-286 If([(@185-192 Var { module_name: "", ident: "#!a1" }, @219-233 Apply(@219-223 Var { module_name: "", ident: "line" }, [@224-233 Str(PlainLine("success"))], Space))], @275-286 Apply(@275-279 Var { module_name: "", ident: "line" }, [@280-286 Str(PlainLine("fail"))], Space)))], BangSuffix)))], BangSuffix)))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A more complex example including the use of nested Defs nodes
|
||||
```roc
|
||||
# OTHER DEFS AND INTERMEDIATE STEPS NOT SHOWN
|
||||
msg =
|
||||
Task.await isTrue \#!a0 ->
|
||||
if !(#!a0) then
|
||||
Task.await line "fail" \{} -> err 1
|
||||
else
|
||||
Task.await isFalsey Bool.false \#!a1 ->
|
||||
if (#!a1) then
|
||||
Task.await line "nope" \{} -> ok {}
|
||||
else
|
||||
line "success"
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn if_complex() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
isTrue = Task.ok Bool.true
|
||||
isFalsey = \x -> Task.ok x
|
||||
msg : Task {} I32
|
||||
msg =
|
||||
if !(isTrue!) then
|
||||
line! "fail"
|
||||
err 1
|
||||
else if (isFalsey! Bool.false) then
|
||||
line! "nope"
|
||||
ok {}
|
||||
else
|
||||
line! "success"
|
||||
|
||||
msg
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-466], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @0-466 Defs(Defs { tags: [Index(2147483652), Index(2147483653), Index(2147483654)], regions: [@32-49, @77-92, @143-445], space_before: [Slice(start = 0, length = 0), Slice(start = 0, length = 1), Slice(start = 1, length = 1)], space_after: [Slice(start = 0, length = 0), Slice(start = 1, length = 0), Slice(start = 2, length = 0)], spaces: [Newline, Newline], type_defs: [], value_defs: [Body(@23-29 Identifier { ident: "isTrue" }, @32-49 Apply(@32-39 Var { module_name: "Task", ident: "ok" }, [@40-49 Var { module_name: "Bool", ident: "true" }], Space)), Body(@66-74 Identifier { ident: "isFalsey" }, @77-92 Closure([@78-79 Identifier { ident: "x" }], @83-92 Apply(@83-90 Var { module_name: "Task", ident: "ok" }, [@91-92 Var { module_name: "", ident: "x" }], Space))), Annotation(@109-112 Identifier { ident: "msg" }, @115-126 Apply("", "Task", [@120-122 Record { fields: [], ext: None }, @123-126 Apply("", "I32", [])])), AnnotatedBody { ann_pattern: @109-112 Identifier { ident: "msg" }, ann_type: @115-126 Apply("", "Task", [@120-122 Record { fields: [], ext: None }, @123-126 Apply("", "I32", [])]), comment: None, body_pattern: @143-146 Identifier { ident: "msg" }, body_expr: @143-445 If([(@173-183 Apply(@173-174 Var { module_name: "Bool", ident: "not" }, [@175-182 ParensAround(TaskAwaitBang(Var { module_name: "", ident: "isTrue" }))], UnaryOp(Not)), @213-256 Defs(Defs { tags: [Index(2147483648)], regions: [@218-225], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@218-225 RecordDestructure([]), @218-225 Apply(@213-217 TaskAwaitBang(Var { module_name: "", ident: "line" }), [@219-225 Str(PlainLine("fail"))], Space))] }, @251-256 Apply(@251-254 Var { module_name: "", ident: "err" }, [@255-256 Num("1")], Space))), (@285-307 ParensAround(Apply(@286-294 TaskAwaitBang(Var { module_name: "", ident: "isFalsey" }), [@296-306 Var { module_name: "Bool", ident: "false" }], Space)), @338-380 Defs(Defs { tags: [Index(2147483648)], regions: [@343-350], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@343-350 RecordDestructure([]), @343-350 Apply(@338-342 TaskAwaitBang(Var { module_name: "", ident: "line" }), [@344-350 Str(PlainLine("nope"))], Space))] }, @375-380 Apply(@375-377 Var { module_name: "", ident: "ok" }, [@378-380 Record([])], Space)))], @430-445 Apply(@430-434 TaskAwaitBang(Var { module_name: "", ident: "line" }), [@436-445 Str(PlainLine("success"))], Space)) }, Body(@23-29 Identifier { ident: "isTrue" }, @32-49 Apply(@32-39 Var { module_name: "Task", ident: "ok" }, [@40-49 Var { module_name: "Bool", ident: "true" }], Space)), Body(@66-74 Identifier { ident: "isFalsey" }, @77-92 Closure([@78-79 Identifier { ident: "x" }], @83-92 Apply(@83-90 Var { module_name: "Task", ident: "ok" }, [@91-92 Var { module_name: "", ident: "x" }], Space))), AnnotatedBody { ann_pattern: @109-112 Identifier { ident: "msg" }, ann_type: @115-126 Apply("", "Task", [@120-122 Record { fields: [], ext: None }, @123-126 Apply("", "I32", [])]), comment: None, body_pattern: @143-146 Identifier { ident: "msg" }, body_expr: Apply(Var { module_name: "Task", ident: "await" }, [Var { module_name: "", ident: "isTrue" }, Closure([Identifier { ident: "#!a0" }], @143-445 If([(@173-183 Apply(@173-174 Var { module_name: "Bool", ident: "not" }, [@175-182 ParensAround(Var { module_name: "", ident: "#!a0" })], UnaryOp(Not)), @218-225 Apply(@218-225 Var { module_name: "Task", ident: "await" }, [@218-225 Apply(@218-225 Var { module_name: "", ident: "line" }, [@219-225 Str(PlainLine("fail"))], Space), @218-225 Closure([@218-225 RecordDestructure([])], @251-256 Apply(@251-254 Var { module_name: "", ident: "err" }, [@255-256 Num("1")], Space))], BangSuffix))], Apply(Var { module_name: "Task", ident: "await" }, [Apply(Var { module_name: "", ident: "isFalsey" }, [@296-306 Var { module_name: "Bool", ident: "false" }], Space), Closure([Identifier { ident: "#!a1" }], @143-445 If([(@285-307 ParensAround(Var { module_name: "", ident: "#!a1" }), @343-350 Apply(@343-350 Var { module_name: "Task", ident: "await" }, [@343-350 Apply(@343-350 Var { module_name: "", ident: "line" }, [@344-350 Str(PlainLine("nope"))], Space), @343-350 Closure([@343-350 RecordDestructure([])], @375-380 Apply(@375-377 Var { module_name: "", ident: "ok" }, [@378-380 Record([])], Space))], BangSuffix))], @430-445 Apply(@430-445 Var { module_name: "", ident: "line" }, [@436-445 Str(PlainLine("success"))], Space)))], BangSuffix)))], BangSuffix) }] }, @463-466 Var { module_name: "", ident: "msg" }))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unwrap a trailing binops
|
||||
```roc
|
||||
copy = \a,b ->
|
||||
Task.await line "FOO" \{} ->
|
||||
CMD.new "cp"
|
||||
|> mapErr! ERR
|
||||
|
||||
copy = \a,b ->
|
||||
Task.await line "FOO" \{} ->
|
||||
Task.await (CMD.new "cp" |> mapErr ERR) \#!a0 -> #!a0
|
||||
|
||||
copy = \a,b ->
|
||||
Task.await line "FOO" \{} ->
|
||||
CMD.new "cp" |> mapErr ERR
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn trailing_binops() {
|
||||
run_test(
|
||||
r#"
|
||||
copy = \a,b ->
|
||||
line! "FOO"
|
||||
|
||||
CMD.new "cp"
|
||||
|> mapErr! ERR
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-103], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "copy" }, @7-103 Closure([@8-9 Identifier { ident: "a" }, @10-11 Identifier { ident: "b" }], @36-42 Apply(@36-42 Var { module_name: "Task", ident: "await" }, [@36-42 Apply(@36-42 Var { module_name: "", ident: "line" }, [@37-42 Str(PlainLine("FOO"))], Space), @36-42 Closure([@36-42 RecordDestructure([])], @60-103 Apply(@60-103 Var { module_name: "", ident: "mapErr" }, [@60-72 Apply(@60-67 Var { module_name: "CMD", ident: "new" }, [@68-72 Str(PlainLine("cp"))], Space), @100-103 Tag("ERR")], BinOp(Pizza)))], BangSuffix)))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unwrap a when expression
|
||||
```roc
|
||||
list =
|
||||
when getList! is
|
||||
[] -> "empty"
|
||||
_ -> "non-empty"
|
||||
|
||||
list =
|
||||
Task.await getList \#!a0 ->
|
||||
when #!a0 is
|
||||
[] -> "empty"
|
||||
_ -> "non-empty"
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn when_simple() {
|
||||
run_test(
|
||||
r#"
|
||||
list =
|
||||
when getList! is
|
||||
[] -> "empty"
|
||||
_ -> "non-empty"
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-111], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "list" }, @0-111 Apply(@0-111 Var { module_name: "Task", ident: "await" }, [@29-37 Var { module_name: "", ident: "getList" }, @0-111 Closure([@29-37 Identifier { ident: "#!a0" }], @0-111 When(@29-37 Var { module_name: "", ident: "#!a0" }, [WhenBranch { patterns: [@61-63 List([])], value: @67-74 Str(PlainLine("empty")), guard: None }, WhenBranch { patterns: [@95-96 Underscore("")], value: @100-111 Str(PlainLine("non-empty")), guard: None }]))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unwrap a when expression
|
||||
```roc
|
||||
list =
|
||||
when getList! is
|
||||
[] ->
|
||||
line! "foo"
|
||||
line! "bar"
|
||||
_ ->
|
||||
ok {}
|
||||
|
||||
list =
|
||||
Task.await getList \#!a0 ->
|
||||
when getList is
|
||||
[] ->
|
||||
line! "foo"
|
||||
line! "bar"
|
||||
_ ->
|
||||
ok {}
|
||||
|
||||
list =
|
||||
Task.await getList \#!a0 ->
|
||||
when getList is
|
||||
[] ->
|
||||
Task.await line "foo" \{} -> line! "bar"
|
||||
_ ->
|
||||
ok {}
|
||||
```
|
||||
*/
|
||||
#[test]
|
||||
fn when_branches() {
|
||||
run_test(
|
||||
r#"
|
||||
list =
|
||||
when getList! is
|
||||
[] ->
|
||||
line! "foo"
|
||||
line! "bar"
|
||||
_ ->
|
||||
ok {}
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-195], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "list" }, @0-195 Apply(@0-195 Var { module_name: "Task", ident: "await" }, [@29-37 Var { module_name: "", ident: "getList" }, @0-195 Closure([@29-37 Identifier { ident: "#!a0" }], @0-195 When(@29-37 Var { module_name: "", ident: "#!a0" }, [WhenBranch { patterns: [@61-63 List([])], value: @97-103 Apply(@97-103 Var { module_name: "Task", ident: "await" }, [@97-103 Apply(@97-103 Var { module_name: "", ident: "line" }, [@98-103 Str(PlainLine("foo"))], Space), @97-103 Closure([@97-103 RecordDestructure([])], @128-139 Apply(@128-139 Var { module_name: "", ident: "line" }, [@134-139 Str(PlainLine("bar"))], Space))], BangSuffix), guard: None }, WhenBranch { patterns: [@160-161 Underscore("")], value: @190-195 Apply(@190-192 Var { module_name: "", ident: "ok" }, [@193-195 Record([])], Space), guard: None }]))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trailing_suffix_inside_when() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
result = Stdin.line!
|
||||
|
||||
when result is
|
||||
End ->
|
||||
Task.ok {}
|
||||
|
||||
Input name ->
|
||||
Stdout.line! "Hello, $(name)"
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-226], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @32-43 Apply(@32-43 Var { module_name: "Task", ident: "await" }, [@32-43 Var { module_name: "Stdin", ident: "line" }, @32-43 Closure([@23-29 Identifier { ident: "result" }], @61-226 When(@66-72 Var { module_name: "", ident: "result" }, [WhenBranch { patterns: [@96-99 Tag("End")], value: @127-137 Apply(@127-134 Var { module_name: "Task", ident: "ok" }, [@135-137 Record([])], Space), guard: None }, WhenBranch { patterns: [@159-169 Apply(@159-164 Tag("Input"), [@165-169 Identifier { ident: "name" }])], value: @197-226 Apply(@197-226 Var { module_name: "Stdout", ident: "line" }, [@210-226 Str(Line([Plaintext("Hello, "), Interpolated(@220-224 Var { module_name: "", ident: "name" })]))], Space), guard: None }]))], BangSuffix))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
main =
|
||||
foo = getFoo!
|
||||
dbg foo
|
||||
bar foo
|
||||
|
||||
main =
|
||||
Task.await getFoo \foo ->
|
||||
dbg foo
|
||||
bar! foo
|
||||
|
||||
main =
|
||||
Task.await getFoo \foo ->
|
||||
dbg foo
|
||||
bar foo
|
||||
*/
|
||||
#[test]
|
||||
fn dbg_simple() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
foo = getFoo!
|
||||
dbg foo
|
||||
bar! foo
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-85], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @29-36 Apply(@29-36 Var { module_name: "Task", ident: "await" }, [@29-36 Var { module_name: "", ident: "getFoo" }, @29-36 Closure([@23-26 Identifier { ident: "foo" }], @53-85 LowLevelDbg(("test.roc:4", " "), @57-60 Apply(@57-60 Var { module_name: "Inspect", ident: "toStr" }, [@57-60 Var { module_name: "", ident: "foo" }], Space), @77-85 Apply(@77-85 Var { module_name: "", ident: "bar" }, [@82-85 Var { module_name: "", ident: "foo" }], Space)))], BangSuffix))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
// main =
|
||||
// Task.await a \#!a0 ->
|
||||
// c = b #!a0
|
||||
// Task.ok c
|
||||
#[test]
|
||||
fn apply_argument_single() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
c = b a!
|
||||
c
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-49], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @27-31 Apply(@27-31 Var { module_name: "Task", ident: "await" }, [@29-30 Var { module_name: "", ident: "a" }, @27-31 Closure([@29-30 Identifier { ident: "#!a0" }], @27-31 Defs(Defs { tags: [Index(2147483650)], regions: [@27-31], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@23-24 Identifier { ident: "c" }, @27-31 Apply(@27-28 Var { module_name: "", ident: "b" }, [@29-30 TaskAwaitBang(Var { module_name: "", ident: "a" })], Space)), Body(@23-24 Identifier { ident: "c" }, @27-31 Apply(@27-28 Var { module_name: "", ident: "b" }, [@29-30 Var { module_name: "", ident: "#!a0" }], Space)), Body(@23-24 Identifier { ident: "c" }, @27-31 Apply(@27-28 Var { module_name: "", ident: "b" }, [@29-30 Var { module_name: "", ident: "#!a0" }], Space))] }, @48-49 Var { module_name: "", ident: "c" }))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
// main =
|
||||
// Task.await a \#!a0 ->
|
||||
// Task.await x \#!a1 ->
|
||||
// c = b #!a0 #!a1
|
||||
// Task.ok c
|
||||
#[test]
|
||||
fn apply_argument_multiple() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
c = b a! x!
|
||||
c
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-52], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @27-34 Apply(@27-34 Var { module_name: "Task", ident: "await" }, [@29-30 Var { module_name: "", ident: "a" }, @27-34 Closure([@29-30 Identifier { ident: "#!a0" }], @27-34 Apply(@27-34 Var { module_name: "Task", ident: "await" }, [@32-33 Var { module_name: "", ident: "x" }, @27-34 Closure([@32-33 Identifier { ident: "#!a1" }], @27-34 Defs(Defs { tags: [Index(2147483651)], regions: [@27-34], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@23-24 Identifier { ident: "c" }, @27-34 Apply(@27-28 Var { module_name: "", ident: "b" }, [@29-30 TaskAwaitBang(Var { module_name: "", ident: "a" }), @32-33 TaskAwaitBang(Var { module_name: "", ident: "x" })], Space)), Body(@23-24 Identifier { ident: "c" }, @27-34 Apply(@27-28 Var { module_name: "", ident: "b" }, [@29-30 Var { module_name: "", ident: "#!a0" }, @32-33 TaskAwaitBang(Var { module_name: "", ident: "x" })], Space)), Body(@23-24 Identifier { ident: "c" }, @27-34 Apply(@27-28 Var { module_name: "", ident: "b" }, [@29-30 Var { module_name: "", ident: "#!a0" }, @32-33 Var { module_name: "", ident: "#!a1" }], Space)), Body(@23-24 Identifier { ident: "c" }, @27-34 Apply(@27-28 Var { module_name: "", ident: "b" }, [@29-30 Var { module_name: "", ident: "#!a0" }, @32-33 Var { module_name: "", ident: "#!a1" }], Space))] }, @51-52 Var { module_name: "", ident: "c" }))], BangSuffix))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
// main =
|
||||
// Task.await a \#!a0 ->
|
||||
// c = b #!a0
|
||||
// Task.ok c
|
||||
#[test]
|
||||
fn bang_in_pipe_root() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
c = a! |> b
|
||||
c
|
||||
"#,
|
||||
r##"Defs { tags: [Index(2147483648)], regions: [@0-52], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @27-34 Apply(@27-34 Var { module_name: "Task", ident: "await" }, [@27-28 Var { module_name: "", ident: "a" }, @27-34 Closure([@27-28 Identifier { ident: "#!a0" }], @27-34 Defs(Defs { tags: [Index(2147483650)], regions: [@27-34], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@23-24 Identifier { ident: "c" }, @27-34 Apply(@33-34 Var { module_name: "", ident: "b" }, [@27-28 TaskAwaitBang(Var { module_name: "", ident: "a" })], BinOp(Pizza))), Body(@23-24 Identifier { ident: "c" }, @27-34 Apply(@33-34 Var { module_name: "", ident: "b" }, [@27-28 Var { module_name: "", ident: "#!a0" }], BinOp(Pizza))), Body(@23-24 Identifier { ident: "c" }, @27-34 Apply(@33-34 Var { module_name: "", ident: "b" }, [@27-28 Var { module_name: "", ident: "#!a0" }], BinOp(Pizza)))] }, @51-52 Var { module_name: "", ident: "c" }))], BangSuffix))] }"##,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expect_then_bang() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
expect 1 == 2
|
||||
x!
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-55], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @0-55 Expect(@30-36 Apply(@32-34 Var { module_name: "Bool", ident: "isEq" }, [@30-31 Num("1"), @35-36 Num("2")], BinOp(Equals)), @53-55 Var { module_name: "", ident: "x" }))] }"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deep_when() {
|
||||
run_test(
|
||||
r#"
|
||||
main =
|
||||
when a is
|
||||
0 ->
|
||||
when b is
|
||||
1 ->
|
||||
c!
|
||||
"#,
|
||||
r#"Defs { tags: [Index(2147483648)], regions: [@0-159], space_before: [Slice(start = 0, length = 0)], space_after: [Slice(start = 0, length = 0)], spaces: [], type_defs: [], value_defs: [Body(@0-4 Identifier { ident: "main" }, @0-159 When(@28-29 Var { module_name: "", ident: "a" }, [WhenBranch { patterns: [@53-54 NumLiteral("0")], value: @82-159 When(@87-88 Var { module_name: "", ident: "b" }, [WhenBranch { patterns: [@120-121 NumLiteral("1")], value: @157-159 Var { module_name: "", ident: "c" }, guard: None }]), guard: None }]))] }"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_suffixed_helpers {
|
||||
|
||||
use roc_can::suffixed::is_matching_intermediate_answer;
|
||||
use roc_module::called_via::CalledVia;
|
||||
use roc_module::ident::ModuleName;
|
||||
use roc_parse::ast::Expr;
|
||||
use roc_parse::ast::Pattern;
|
||||
use roc_region::all::Loc;
|
||||
|
||||
#[test]
|
||||
fn test_matching_answer() {
|
||||
let loc_pat = Loc::at_zero(Pattern::Identifier { ident: "#!a0" });
|
||||
let loc_new = Loc::at_zero(Expr::Var {
|
||||
module_name: "",
|
||||
ident: "#!a0",
|
||||
});
|
||||
|
||||
std::assert!(is_matching_intermediate_answer(&loc_pat, &loc_new));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_matching_answer_task_ok() {
|
||||
let loc_pat = Loc::at_zero(Pattern::Identifier { ident: "#!a0" });
|
||||
let intermetiate = &[&Loc::at_zero(Expr::Var {
|
||||
module_name: "",
|
||||
ident: "#!a0",
|
||||
})];
|
||||
let task_ok = Loc::at_zero(Expr::Var {
|
||||
module_name: ModuleName::TASK,
|
||||
ident: "ok",
|
||||
});
|
||||
|
||||
let loc_new = Loc::at_zero(Expr::Apply(&task_ok, intermetiate, CalledVia::BangSuffix));
|
||||
|
||||
std::assert!(is_matching_intermediate_answer(&loc_pat, &loc_new));
|
||||
}
|
||||
}
|
6
crates/compiler/checkmate/www/package-lock.json
generated
6
crates/compiler/checkmate/www/package-lock.json
generated
|
@ -7560,9 +7560,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/ejs": {
|
||||
"version": "3.1.9",
|
||||
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz",
|
||||
"integrity": "sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ==",
|
||||
"version": "3.1.10",
|
||||
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz",
|
||||
"integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"jake": "^10.8.5"
|
||||
|
|
|
@ -3682,7 +3682,7 @@ fn constraint_recursive_function(
|
|||
signature_closure_type,
|
||||
ret_type,
|
||||
),
|
||||
_ => todo!("TODO {:?}", (loc_symbol, &signature)),
|
||||
_ => todo!("TODO {:?}", (loc_symbol, types[signature])),
|
||||
};
|
||||
|
||||
let region = loc_function_def.region;
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,10 +1,16 @@
|
|||
use roc_can::{abilities::SpecializationLambdaSets, module::ExposedByModule};
|
||||
use roc_can::{
|
||||
abilities::SpecializationLambdaSets,
|
||||
expr::{Expr, WhenBranch, WhenBranchPattern},
|
||||
module::ExposedByModule,
|
||||
pattern::Pattern,
|
||||
};
|
||||
use roc_checkmate::with_checkmate;
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::symbol::{IdentIds, Symbol};
|
||||
use roc_region::all::Loc;
|
||||
use roc_solve_schema::UnificationMode;
|
||||
use roc_types::{
|
||||
subs::{instantiate_rigids, Subs, Variable},
|
||||
subs::{instantiate_rigids, RedundantMark, Subs, Variable},
|
||||
types::Polarity,
|
||||
};
|
||||
|
||||
|
@ -202,3 +208,42 @@ pub(crate) enum ExtensionKind {
|
|||
Record,
|
||||
TagUnion,
|
||||
}
|
||||
|
||||
/// Ok a -> Ok a
|
||||
/// A `when ... is` branch that matches `Ok a` and returns `Ok a`
|
||||
pub(crate) fn ok_to_ok_branch(
|
||||
pattern_var: Variable,
|
||||
result_var: Variable,
|
||||
field_var: Variable,
|
||||
symbol: &Symbol,
|
||||
env: &mut Env<'_>,
|
||||
) -> WhenBranch {
|
||||
WhenBranch {
|
||||
patterns: vec![WhenBranchPattern {
|
||||
pattern: Loc::at_zero(Pattern::AppliedTag {
|
||||
whole_var: pattern_var,
|
||||
ext_var: Variable::EMPTY_TAG_UNION,
|
||||
tag_name: "Ok".into(),
|
||||
arguments: vec![(field_var, Loc::at_zero(Pattern::Identifier(*symbol)))],
|
||||
}),
|
||||
degenerate: false,
|
||||
}],
|
||||
value: Loc::at_zero(Expr::Tag {
|
||||
tag_union_var: result_var,
|
||||
ext_var: env.new_ext_var(ExtensionKind::TagUnion),
|
||||
name: "Ok".into(),
|
||||
arguments: vec![(field_var, Loc::at_zero(Expr::Var(*symbol, field_var)))],
|
||||
}),
|
||||
guard: None,
|
||||
redundant: RedundantMark::known_non_redundant(),
|
||||
}
|
||||
}
|
||||
|
||||
/// `[]`
|
||||
/// Creates an empty list of the type provided.
|
||||
pub(crate) fn empty_list(var: Variable) -> Expr {
|
||||
Expr::List {
|
||||
elem_var: var,
|
||||
loc_elems: vec![],
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,5 +12,6 @@ roc_collections = { path = "../collections" }
|
|||
roc_module = { path = "../module" }
|
||||
roc_parse = { path = "../parse" }
|
||||
roc_region = { path = "../region" }
|
||||
roc_error_macros = { path = "../../error_macros" }
|
||||
|
||||
bumpalo.workspace = true
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
use crate::annotation::{Formattable, Newlines, Parens};
|
||||
use crate::annotation::{is_collection_multiline, Formattable, Newlines, Parens};
|
||||
use crate::collection::{fmt_collection, Braces};
|
||||
use crate::expr::fmt_str_literal;
|
||||
use crate::pattern::fmt_pattern;
|
||||
use crate::spaces::{fmt_default_newline, fmt_spaces, INDENT};
|
||||
use crate::spaces::{fmt_default_newline, fmt_default_spaces, fmt_spaces, INDENT};
|
||||
use crate::Buf;
|
||||
use roc_parse::ast::{
|
||||
AbilityMember, Defs, Expr, ExtractSpaces, Pattern, Spaces, StrLiteral, TypeAnnotation, TypeDef,
|
||||
TypeHeader, ValueDef,
|
||||
AbilityMember, Defs, Expr, ExtractSpaces, ImportAlias, ImportAsKeyword, ImportExposingKeyword,
|
||||
ImportedModuleName, IngestedFileAnnotation, IngestedFileImport, ModuleImport,
|
||||
ModuleImportParams, Pattern, Spaces, StrLiteral, TypeAnnotation, TypeDef, TypeHeader, ValueDef,
|
||||
};
|
||||
use roc_parse::header::Keyword;
|
||||
use roc_region::all::Loc;
|
||||
|
||||
/// A Located formattable value is also formattable
|
||||
|
@ -183,6 +187,226 @@ impl<'a> Formattable for TypeHeader<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Formattable for ModuleImport<'a> {
|
||||
fn is_multiline(&self) -> bool {
|
||||
let Self {
|
||||
before_name,
|
||||
name,
|
||||
params,
|
||||
alias,
|
||||
exposed,
|
||||
} = self;
|
||||
|
||||
!before_name.is_empty()
|
||||
|| name.is_multiline()
|
||||
|| params.is_multiline()
|
||||
|| alias.is_multiline()
|
||||
|| match exposed {
|
||||
Some(a) => a.keyword.is_multiline() || is_collection_multiline(&a.item),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn format_with_options(
|
||||
&self,
|
||||
buf: &mut Buf,
|
||||
_parens: Parens,
|
||||
_newlines: Newlines,
|
||||
indent: u16,
|
||||
) {
|
||||
let Self {
|
||||
before_name,
|
||||
name,
|
||||
params,
|
||||
alias,
|
||||
exposed,
|
||||
} = self;
|
||||
|
||||
buf.indent(indent);
|
||||
buf.push_str("import");
|
||||
|
||||
let indent = if !before_name.is_empty()
|
||||
|| (params.is_multiline() && exposed.is_some())
|
||||
|| alias.is_multiline()
|
||||
|| exposed.map_or(false, |e| e.keyword.is_multiline())
|
||||
{
|
||||
indent + INDENT
|
||||
} else {
|
||||
indent
|
||||
};
|
||||
|
||||
fmt_default_spaces(buf, before_name, indent);
|
||||
|
||||
name.format(buf, indent);
|
||||
params.format(buf, indent);
|
||||
alias.format(buf, indent);
|
||||
|
||||
if let Some(exposed) = exposed {
|
||||
exposed.keyword.format(buf, indent);
|
||||
fmt_collection(buf, indent, Braces::Square, exposed.item, Newlines::No);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Formattable for ModuleImportParams<'a> {
|
||||
fn is_multiline(&self) -> bool {
|
||||
let ModuleImportParams { before, params } = self;
|
||||
|
||||
!before.is_empty() || is_collection_multiline(params)
|
||||
}
|
||||
|
||||
fn format_with_options(&self, buf: &mut Buf, _parens: Parens, newlines: Newlines, indent: u16) {
|
||||
let ModuleImportParams { before, params } = self;
|
||||
|
||||
fmt_default_spaces(buf, before, indent);
|
||||
fmt_collection(buf, indent, Braces::Curly, *params, newlines);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Formattable for IngestedFileImport<'a> {
|
||||
fn is_multiline(&self) -> bool {
|
||||
let Self {
|
||||
before_path,
|
||||
path: _,
|
||||
name,
|
||||
annotation,
|
||||
} = self;
|
||||
!before_path.is_empty() || name.keyword.is_multiline() || annotation.is_multiline()
|
||||
}
|
||||
|
||||
fn format_with_options(
|
||||
&self,
|
||||
buf: &mut Buf,
|
||||
_parens: Parens,
|
||||
_newlines: Newlines,
|
||||
indent: u16,
|
||||
) {
|
||||
let Self {
|
||||
before_path,
|
||||
path,
|
||||
name,
|
||||
annotation,
|
||||
} = self;
|
||||
|
||||
buf.indent(indent);
|
||||
buf.push_str("import");
|
||||
|
||||
let indent = indent + INDENT;
|
||||
|
||||
fmt_default_spaces(buf, before_path, indent);
|
||||
fmt_str_literal(buf, path.value, indent);
|
||||
|
||||
name.keyword.format(buf, indent);
|
||||
buf.push_str(name.item.value);
|
||||
|
||||
annotation.format(buf, indent);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Formattable for ImportedModuleName<'a> {
|
||||
fn is_multiline(&self) -> bool {
|
||||
// No newlines in module name itself.
|
||||
false
|
||||
}
|
||||
|
||||
fn format_with_options(
|
||||
&self,
|
||||
buf: &mut Buf,
|
||||
_parens: Parens,
|
||||
_newlines: Newlines,
|
||||
indent: u16,
|
||||
) {
|
||||
buf.indent(indent);
|
||||
|
||||
if let Some(package_shorthand) = self.package {
|
||||
buf.push_str(package_shorthand);
|
||||
buf.push_str(".");
|
||||
}
|
||||
|
||||
self.name.format(buf, indent);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Formattable for ImportAlias<'a> {
|
||||
fn is_multiline(&self) -> bool {
|
||||
// No newlines in alias itself.
|
||||
false
|
||||
}
|
||||
|
||||
fn format_with_options(
|
||||
&self,
|
||||
buf: &mut Buf,
|
||||
_parens: Parens,
|
||||
_newlines: Newlines,
|
||||
indent: u16,
|
||||
) {
|
||||
buf.indent(indent);
|
||||
buf.push_str(self.as_str());
|
||||
}
|
||||
}
|
||||
|
||||
impl Formattable for ImportAsKeyword {
|
||||
fn is_multiline(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn format_with_options(
|
||||
&self,
|
||||
buf: &mut Buf<'_>,
|
||||
_parens: crate::annotation::Parens,
|
||||
_newlines: Newlines,
|
||||
indent: u16,
|
||||
) {
|
||||
buf.indent(indent);
|
||||
buf.push_str(ImportAsKeyword::KEYWORD);
|
||||
}
|
||||
}
|
||||
|
||||
impl Formattable for ImportExposingKeyword {
|
||||
fn is_multiline(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn format_with_options(
|
||||
&self,
|
||||
buf: &mut Buf<'_>,
|
||||
_parens: crate::annotation::Parens,
|
||||
_newlines: Newlines,
|
||||
indent: u16,
|
||||
) {
|
||||
buf.indent(indent);
|
||||
buf.push_str(ImportExposingKeyword::KEYWORD);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Formattable for IngestedFileAnnotation<'a> {
|
||||
fn is_multiline(&self) -> bool {
|
||||
let Self {
|
||||
before_colon,
|
||||
annotation,
|
||||
} = self;
|
||||
!before_colon.is_empty() || annotation.is_multiline()
|
||||
}
|
||||
|
||||
fn format_with_options(
|
||||
&self,
|
||||
buf: &mut Buf,
|
||||
_parens: Parens,
|
||||
_newlines: Newlines,
|
||||
indent: u16,
|
||||
) {
|
||||
let Self {
|
||||
before_colon,
|
||||
annotation,
|
||||
} = self;
|
||||
|
||||
fmt_default_spaces(buf, before_colon, indent);
|
||||
buf.push_str(":");
|
||||
buf.spaces(1);
|
||||
annotation.format(buf, indent);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Formattable for ValueDef<'a> {
|
||||
fn is_multiline(&self) -> bool {
|
||||
use roc_parse::ast::ValueDef::*;
|
||||
|
@ -196,10 +420,13 @@ impl<'a> Formattable for ValueDef<'a> {
|
|||
Expect { condition, .. } => condition.is_multiline(),
|
||||
ExpectFx { condition, .. } => condition.is_multiline(),
|
||||
Dbg { condition, .. } => condition.is_multiline(),
|
||||
ModuleImport(module_import) => module_import.is_multiline(),
|
||||
IngestedFileImport(ingested_file_import) => ingested_file_import.is_multiline(),
|
||||
Stmt(loc_expr) => loc_expr.is_multiline(),
|
||||
}
|
||||
}
|
||||
|
||||
fn format_with_options(&self, buf: &mut Buf, _parens: Parens, newlines: Newlines, indent: u16) {
|
||||
fn format_with_options(&self, buf: &mut Buf, parens: Parens, newlines: Newlines, indent: u16) {
|
||||
use roc_parse::ast::ValueDef::*;
|
||||
match self {
|
||||
Annotation(loc_pattern, loc_annotation) => {
|
||||
|
@ -238,6 +465,9 @@ impl<'a> Formattable for ValueDef<'a> {
|
|||
buf.newline();
|
||||
fmt_body(buf, &body_pattern.value, &body_expr.value, indent);
|
||||
}
|
||||
ModuleImport(module_import) => module_import.format(buf, indent),
|
||||
IngestedFileImport(ingested_file_import) => ingested_file_import.format(buf, indent),
|
||||
Stmt(loc_expr) => loc_expr.format_with_options(buf, parens, newlines, indent),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -357,9 +587,19 @@ pub fn fmt_defs(buf: &mut Buf, defs: &Defs, indent: u16) {
|
|||
}
|
||||
|
||||
pub fn fmt_body<'a>(buf: &mut Buf, pattern: &'a Pattern<'a>, body: &'a Expr<'a>, indent: u16) {
|
||||
pattern.format_with_options(buf, Parens::InApply, Newlines::No, indent);
|
||||
buf.indent(indent);
|
||||
buf.push_str(" =");
|
||||
// Check if this is an assignment into the unit value
|
||||
let is_unit_assignment = if let Pattern::RecordDestructure(collection) = pattern {
|
||||
collection.is_empty()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// Don't format the `{} =` for defs with this pattern
|
||||
if !is_unit_assignment {
|
||||
pattern.format_with_options(buf, Parens::InApply, Newlines::No, indent);
|
||||
buf.indent(indent);
|
||||
buf.push_str(" =");
|
||||
}
|
||||
|
||||
if body.is_multiline() {
|
||||
match body {
|
||||
|
|
|
@ -9,8 +9,8 @@ use crate::spaces::{
|
|||
use crate::Buf;
|
||||
use roc_module::called_via::{self, BinOp};
|
||||
use roc_parse::ast::{
|
||||
AssignedField, Base, Collection, CommentOrNewline, Expr, ExtractSpaces, Pattern,
|
||||
RecordBuilderField, WhenBranch,
|
||||
is_expr_suffixed, AssignedField, Base, Collection, CommentOrNewline, Expr, ExtractSpaces,
|
||||
Pattern, RecordBuilderField, WhenBranch,
|
||||
};
|
||||
use roc_parse::ast::{StrLiteral, StrSegment};
|
||||
use roc_parse::ident::Accessor;
|
||||
|
@ -31,23 +31,27 @@ impl<'a> Formattable for Expr<'a> {
|
|||
true
|
||||
}
|
||||
|
||||
MalformedSuffixed(loc_expr) => loc_expr.is_multiline(),
|
||||
|
||||
// These expressions never have newlines
|
||||
Float(..)
|
||||
| Num(..)
|
||||
| NonBase10Int { .. }
|
||||
| SingleQuote(_)
|
||||
| RecordAccess(_, _)
|
||||
| AccessorFunction(_)
|
||||
| TupleAccess(_, _)
|
||||
| Var { .. }
|
||||
| Underscore { .. }
|
||||
| MalformedIdent(_, _)
|
||||
| MalformedClosure
|
||||
| Tag(_)
|
||||
| OpaqueRef(_)
|
||||
| IngestedFile(_, _)
|
||||
| EmptyDefsFinal
|
||||
| Crash => false,
|
||||
|
||||
RecordAccess(inner, _) | TupleAccess(inner, _) | TaskAwaitBang(inner) => {
|
||||
inner.is_multiline()
|
||||
}
|
||||
|
||||
// These expressions always have newlines
|
||||
Defs(_, _) | When(_, _) => true,
|
||||
|
||||
|
@ -107,7 +111,6 @@ impl<'a> Formattable for Expr<'a> {
|
|||
Tuple(fields) => is_collection_multiline(fields),
|
||||
RecordUpdate { fields, .. } => is_collection_multiline(fields),
|
||||
RecordBuilder(fields) => is_collection_multiline(fields),
|
||||
Suffixed(subexpr) => subexpr.is_multiline(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -417,6 +420,9 @@ impl<'a> Formattable for Expr<'a> {
|
|||
indent,
|
||||
);
|
||||
}
|
||||
EmptyDefsFinal => {
|
||||
// no need to print anything
|
||||
}
|
||||
_ => {
|
||||
buf.ensure_ends_with_newline();
|
||||
buf.indent(indent);
|
||||
|
@ -433,6 +439,9 @@ impl<'a> Formattable for Expr<'a> {
|
|||
buf.push(')');
|
||||
}
|
||||
}
|
||||
EmptyDefsFinal => {
|
||||
// no need to print anything
|
||||
}
|
||||
Expect(condition, continuation) => {
|
||||
fmt_expect(buf, condition, continuation, self.is_multiline(), indent);
|
||||
}
|
||||
|
@ -504,19 +513,22 @@ impl<'a> Formattable for Expr<'a> {
|
|||
buf.push('.');
|
||||
buf.push_str(key);
|
||||
}
|
||||
TaskAwaitBang(expr) => {
|
||||
expr.format_with_options(buf, Parens::InApply, Newlines::Yes, indent);
|
||||
buf.push('!');
|
||||
}
|
||||
MalformedIdent(str, _) => {
|
||||
buf.indent(indent);
|
||||
buf.push_str(str)
|
||||
}
|
||||
MalformedSuffixed(loc_expr) => {
|
||||
buf.indent(indent);
|
||||
loc_expr.format_with_options(buf, parens, newlines, indent);
|
||||
}
|
||||
MalformedClosure => {}
|
||||
PrecedenceConflict { .. } => {}
|
||||
MultipleRecordBuilders { .. } => {}
|
||||
UnappliedRecordBuilder { .. } => {}
|
||||
IngestedFile(_, _) => {}
|
||||
Suffixed(sub_expr) => {
|
||||
sub_expr.format_with_options(buf, parens, newlines, indent);
|
||||
buf.push('!');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -726,14 +738,32 @@ fn fmt_binops<'a>(
|
|||
|| loc_right_side.value.is_multiline()
|
||||
|| lefts.iter().any(|(expr, _)| expr.value.is_multiline());
|
||||
|
||||
let is_any_lefts_suffixed = lefts.iter().any(|(left, _)| is_expr_suffixed(&left.value));
|
||||
let is_right_suffixed = is_expr_suffixed(&loc_right_side.value);
|
||||
let is_any_suffixed = is_any_lefts_suffixed || is_right_suffixed;
|
||||
|
||||
let mut is_first = false;
|
||||
let mut adjusted_indent = indent;
|
||||
|
||||
if is_any_suffixed {
|
||||
// we only want to indent the remaining lines if this is a suffixed expression.
|
||||
is_first = true;
|
||||
}
|
||||
|
||||
for (loc_left_side, loc_binop) in lefts {
|
||||
let binop = loc_binop.value;
|
||||
|
||||
loc_left_side.format_with_options(buf, Parens::InOperator, Newlines::No, indent);
|
||||
loc_left_side.format_with_options(buf, Parens::InOperator, Newlines::No, adjusted_indent);
|
||||
|
||||
if is_first {
|
||||
// indent the remaining lines, but only if the expression is suffixed.
|
||||
is_first = false;
|
||||
adjusted_indent = indent + 4;
|
||||
}
|
||||
|
||||
if is_multiline {
|
||||
buf.ensure_ends_with_newline();
|
||||
buf.indent(indent);
|
||||
buf.indent(adjusted_indent);
|
||||
} else {
|
||||
buf.spaces(1);
|
||||
}
|
||||
|
@ -743,7 +773,7 @@ fn fmt_binops<'a>(
|
|||
buf.spaces(1);
|
||||
}
|
||||
|
||||
loc_right_side.format_with_options(buf, Parens::InOperator, Newlines::Yes, indent);
|
||||
loc_right_side.format_with_options(buf, Parens::InOperator, Newlines::Yes, adjusted_indent);
|
||||
}
|
||||
|
||||
fn format_spaces(buf: &mut Buf, spaces: &[CommentOrNewline], newlines: Newlines, indent: u16) {
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use std::cmp::max;
|
||||
|
||||
use crate::annotation::{is_collection_multiline, Formattable, Newlines, Parens};
|
||||
use crate::collection::{fmt_collection, Braces};
|
||||
use crate::expr::fmt_str_literal;
|
||||
|
@ -5,12 +7,13 @@ use crate::spaces::RemoveSpaces;
|
|||
use crate::spaces::{fmt_comments_only, fmt_default_spaces, fmt_spaces, NewlineAt, INDENT};
|
||||
use crate::Buf;
|
||||
use bumpalo::Bump;
|
||||
use roc_parse::ast::{Collection, Header, Module, Spaced, Spaces};
|
||||
use roc_parse::ast::{Collection, CommentOrNewline, Header, Module, Spaced, Spaces};
|
||||
use roc_parse::header::{
|
||||
AppHeader, ExposedName, ExposesKeyword, GeneratesKeyword, HostedHeader, ImportsEntry,
|
||||
ImportsKeyword, InterfaceHeader, Keyword, KeywordItem, ModuleName, PackageEntry, PackageHeader,
|
||||
PackageKeyword, PackageName, PackagesKeyword, PlatformHeader, PlatformRequires,
|
||||
ProvidesKeyword, ProvidesTo, RequiresKeyword, To, ToKeyword, TypedIdent, WithKeyword,
|
||||
ImportsKeyword, Keyword, KeywordItem, ModuleHeader, ModuleName, PackageEntry, PackageHeader,
|
||||
PackageKeyword, PackageName, PackagesKeyword, PlatformHeader, PlatformKeyword,
|
||||
PlatformRequires, ProvidesKeyword, ProvidesTo, RequiresKeyword, To, ToKeyword, TypedIdent,
|
||||
WithKeyword,
|
||||
};
|
||||
use roc_parse::ident::UppercaseIdent;
|
||||
use roc_region::all::Loc;
|
||||
|
@ -18,8 +21,8 @@ use roc_region::all::Loc;
|
|||
pub fn fmt_module<'a>(buf: &mut Buf<'_>, module: &'a Module<'a>) {
|
||||
fmt_comments_only(buf, module.comments.iter(), NewlineAt::Bottom, 0);
|
||||
match &module.header {
|
||||
Header::Interface(header) => {
|
||||
fmt_interface_header(buf, header);
|
||||
Header::Module(header) => {
|
||||
fmt_module_header(buf, header);
|
||||
}
|
||||
Header::App(header) => {
|
||||
fmt_app_header(buf, header);
|
||||
|
@ -75,6 +78,7 @@ keywords! {
|
|||
RequiresKeyword,
|
||||
ProvidesKeyword,
|
||||
ToKeyword,
|
||||
PlatformKeyword,
|
||||
}
|
||||
|
||||
impl<V: Formattable> Formattable for Option<V> {
|
||||
|
@ -171,20 +175,25 @@ impl<'a, K: Formattable, V: Formattable> Formattable for KeywordItem<'a, K, V> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn fmt_interface_header<'a>(buf: &mut Buf, header: &'a InterfaceHeader<'a>) {
|
||||
pub fn fmt_module_header<'a>(buf: &mut Buf, header: &'a ModuleHeader<'a>) {
|
||||
buf.indent(0);
|
||||
buf.push_str("interface");
|
||||
let indent = INDENT;
|
||||
fmt_default_spaces(buf, header.before_name, indent);
|
||||
buf.push_str("module");
|
||||
|
||||
// module name
|
||||
buf.indent(indent);
|
||||
buf.push_str(header.name.value.as_str());
|
||||
let mut indent = fmt_spaces_with_outdent(buf, header.after_keyword, 0);
|
||||
|
||||
header.exposes.keyword.format(buf, indent);
|
||||
fmt_exposes(buf, header.exposes.item, indent);
|
||||
header.imports.keyword.format(buf, indent);
|
||||
fmt_imports(buf, header.imports.item, indent);
|
||||
if let Some(params) = &header.params {
|
||||
if is_collection_multiline(¶ms.params) {
|
||||
indent = INDENT;
|
||||
}
|
||||
|
||||
fmt_collection(buf, indent, Braces::Curly, params.params, Newlines::Yes);
|
||||
|
||||
indent = fmt_spaces_with_outdent(buf, params.before_arrow, indent);
|
||||
buf.push_str("->");
|
||||
indent = fmt_spaces_with_outdent(buf, params.after_arrow, indent);
|
||||
}
|
||||
|
||||
fmt_exposes(buf, header.exposes, indent);
|
||||
}
|
||||
|
||||
pub fn fmt_hosted_header<'a>(buf: &mut Buf, header: &'a HostedHeader<'a>) {
|
||||
|
@ -207,34 +216,34 @@ pub fn fmt_hosted_header<'a>(buf: &mut Buf, header: &'a HostedHeader<'a>) {
|
|||
pub fn fmt_app_header<'a>(buf: &mut Buf, header: &'a AppHeader<'a>) {
|
||||
buf.indent(0);
|
||||
buf.push_str("app");
|
||||
let indent = INDENT;
|
||||
fmt_default_spaces(buf, header.before_name, indent);
|
||||
|
||||
fmt_str_literal(buf, header.name.value, indent);
|
||||
let indent = fmt_spaces_with_outdent(buf, header.before_provides, 0);
|
||||
fmt_exposes(buf, header.provides, indent);
|
||||
|
||||
if let Some(packages) = &header.packages {
|
||||
packages.keyword.format(buf, indent);
|
||||
fmt_packages(buf, packages.item, indent);
|
||||
let indent = fmt_spaces_with_outdent(buf, header.before_packages, indent);
|
||||
fmt_packages(buf, header.packages.value, indent);
|
||||
}
|
||||
|
||||
pub fn fmt_spaces_with_outdent(buf: &mut Buf, spaces: &[CommentOrNewline], indent: u16) -> u16 {
|
||||
if spaces.iter().all(|c| c.is_newline()) {
|
||||
buf.spaces(1);
|
||||
indent
|
||||
} else {
|
||||
let indent = max(INDENT, indent + INDENT);
|
||||
fmt_default_spaces(buf, spaces, indent);
|
||||
indent
|
||||
}
|
||||
if let Some(imports) = &header.imports {
|
||||
imports.keyword.format(buf, indent);
|
||||
fmt_imports(buf, imports.item, indent);
|
||||
}
|
||||
header.provides.format(buf, indent);
|
||||
}
|
||||
|
||||
pub fn fmt_package_header<'a>(buf: &mut Buf, header: &'a PackageHeader<'a>) {
|
||||
buf.indent(0);
|
||||
buf.push_str("package");
|
||||
let indent = INDENT;
|
||||
fmt_default_spaces(buf, header.before_name, indent);
|
||||
|
||||
fmt_package_name(buf, header.name.value, indent);
|
||||
let indent = fmt_spaces_with_outdent(buf, header.before_exposes, 0);
|
||||
fmt_exposes(buf, header.exposes, indent);
|
||||
|
||||
header.exposes.keyword.format(buf, indent);
|
||||
fmt_exposes(buf, header.exposes.item, indent);
|
||||
header.packages.keyword.format(buf, indent);
|
||||
fmt_packages(buf, header.packages.item, indent);
|
||||
let indent = fmt_spaces_with_outdent(buf, header.before_packages, indent);
|
||||
fmt_packages(buf, header.packages.value, indent);
|
||||
}
|
||||
|
||||
pub fn fmt_platform_header<'a>(buf: &mut Buf, header: &'a PlatformHeader<'a>) {
|
||||
|
@ -465,6 +474,15 @@ fn fmt_packages_entry(buf: &mut Buf, entry: &PackageEntry, indent: u16) {
|
|||
buf.push_str(entry.shorthand);
|
||||
buf.push(':');
|
||||
fmt_default_spaces(buf, entry.spaces_after_shorthand, indent);
|
||||
|
||||
let indent = indent + INDENT;
|
||||
|
||||
if let Some(spaces_after) = entry.platform_marker {
|
||||
buf.indent(indent);
|
||||
buf.push_str(roc_parse::keyword::PLATFORM);
|
||||
fmt_default_spaces(buf, spaces_after, indent);
|
||||
}
|
||||
|
||||
fmt_package_name(buf, entry.package_name.value, indent);
|
||||
}
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ impl<'a> Formattable for Pattern<'a> {
|
|||
}
|
||||
},
|
||||
|
||||
Pattern::Identifier(_)
|
||||
Pattern::Identifier { .. }
|
||||
| Pattern::Tag(_)
|
||||
| Pattern::OpaqueRef(_)
|
||||
| Pattern::Apply(_, _)
|
||||
|
@ -88,9 +88,9 @@ impl<'a> Formattable for Pattern<'a> {
|
|||
use self::Pattern::*;
|
||||
|
||||
match self {
|
||||
Identifier(string) => {
|
||||
Identifier { ident: string } => {
|
||||
buf.indent(indent);
|
||||
buf.push_str(string)
|
||||
buf.push_str(string);
|
||||
}
|
||||
Tag(name) | OpaqueRef(name) => {
|
||||
buf.indent(indent);
|
||||
|
|
|
@ -4,13 +4,15 @@ use roc_module::called_via::{BinOp, UnaryOp};
|
|||
use roc_parse::{
|
||||
ast::{
|
||||
AbilityImpls, AbilityMember, AssignedField, Collection, CommentOrNewline, Defs, Expr,
|
||||
Header, Implements, ImplementsAbilities, ImplementsAbility, ImplementsClause, Module,
|
||||
Pattern, PatternAs, RecordBuilderField, Spaced, Spaces, StrLiteral, StrSegment, Tag,
|
||||
TypeAnnotation, TypeDef, TypeHeader, ValueDef, WhenBranch,
|
||||
Header, Implements, ImplementsAbilities, ImplementsAbility, ImplementsClause, ImportAlias,
|
||||
ImportAsKeyword, ImportExposingKeyword, ImportedModuleName, IngestedFileAnnotation,
|
||||
IngestedFileImport, Module, ModuleImport, ModuleImportParams, Pattern, PatternAs,
|
||||
RecordBuilderField, Spaced, Spaces, StrLiteral, StrSegment, Tag, TypeAnnotation, TypeDef,
|
||||
TypeHeader, ValueDef, WhenBranch,
|
||||
},
|
||||
header::{
|
||||
AppHeader, ExposedName, HostedHeader, ImportsEntry, InterfaceHeader, KeywordItem,
|
||||
ModuleName, PackageEntry, PackageHeader, PackageName, PlatformHeader, PlatformRequires,
|
||||
AppHeader, ExposedName, HostedHeader, ImportsEntry, KeywordItem, ModuleHeader, ModuleName,
|
||||
ModuleParams, PackageEntry, PackageHeader, PackageName, PlatformHeader, PlatformRequires,
|
||||
ProvidesTo, To, TypedIdent,
|
||||
},
|
||||
ident::{BadIdent, UppercaseIdent},
|
||||
|
@ -282,23 +284,26 @@ impl<'a> RemoveSpaces<'a> for ProvidesTo<'a> {
|
|||
impl<'a> RemoveSpaces<'a> for Module<'a> {
|
||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||
let header = match &self.header {
|
||||
Header::Interface(header) => Header::Interface(InterfaceHeader {
|
||||
before_name: &[],
|
||||
name: header.name.remove_spaces(arena),
|
||||
Header::Module(header) => Header::Module(ModuleHeader {
|
||||
after_keyword: &[],
|
||||
params: header.params.remove_spaces(arena),
|
||||
exposes: header.exposes.remove_spaces(arena),
|
||||
imports: header.imports.remove_spaces(arena),
|
||||
interface_imports: header.interface_imports.remove_spaces(arena),
|
||||
}),
|
||||
Header::App(header) => Header::App(AppHeader {
|
||||
before_name: &[],
|
||||
name: header.name.remove_spaces(arena),
|
||||
packages: header.packages.remove_spaces(arena),
|
||||
imports: header.imports.remove_spaces(arena),
|
||||
before_provides: &[],
|
||||
provides: header.provides.remove_spaces(arena),
|
||||
before_packages: &[],
|
||||
packages: header.packages.remove_spaces(arena),
|
||||
old_imports: header.old_imports.remove_spaces(arena),
|
||||
old_provides_to_new_package: header
|
||||
.old_provides_to_new_package
|
||||
.remove_spaces(arena),
|
||||
}),
|
||||
Header::Package(header) => Header::Package(PackageHeader {
|
||||
before_name: &[],
|
||||
name: header.name.remove_spaces(arena),
|
||||
before_exposes: &[],
|
||||
exposes: header.exposes.remove_spaces(arena),
|
||||
before_packages: &[],
|
||||
packages: header.packages.remove_spaces(arena),
|
||||
}),
|
||||
Header::Platform(header) => Header::Platform(PlatformHeader {
|
||||
|
@ -326,6 +331,16 @@ impl<'a> RemoveSpaces<'a> for Module<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for ModuleParams<'a> {
|
||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||
ModuleParams {
|
||||
params: self.params.remove_spaces(arena),
|
||||
before_arrow: &[],
|
||||
after_arrow: &[],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for Region {
|
||||
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
||||
Region::zero()
|
||||
|
@ -405,6 +420,10 @@ impl<'a> RemoveSpaces<'a> for PackageEntry<'a> {
|
|||
PackageEntry {
|
||||
shorthand: self.shorthand,
|
||||
spaces_after_shorthand: &[],
|
||||
platform_marker: match self.platform_marker {
|
||||
Some(_) => Some(&[]),
|
||||
None => None,
|
||||
},
|
||||
package_name: self.package_name.remove_spaces(arena),
|
||||
}
|
||||
}
|
||||
|
@ -567,6 +586,79 @@ impl<'a> RemoveSpaces<'a> for ValueDef<'a> {
|
|||
condition: arena.alloc(condition.remove_spaces(arena)),
|
||||
preceding_comment: Region::zero(),
|
||||
},
|
||||
ModuleImport(module_import) => ModuleImport(module_import.remove_spaces(arena)),
|
||||
IngestedFileImport(ingested_file_import) => {
|
||||
IngestedFileImport(ingested_file_import.remove_spaces(arena))
|
||||
}
|
||||
Stmt(loc_expr) => Stmt(arena.alloc(loc_expr.remove_spaces(arena))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for ModuleImport<'a> {
|
||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||
ModuleImport {
|
||||
before_name: &[],
|
||||
name: self.name.remove_spaces(arena),
|
||||
params: self.params.remove_spaces(arena),
|
||||
alias: self.alias.remove_spaces(arena),
|
||||
exposed: self.exposed.remove_spaces(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for ModuleImportParams<'a> {
|
||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||
ModuleImportParams {
|
||||
before: &[],
|
||||
params: self.params.remove_spaces(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for IngestedFileImport<'a> {
|
||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||
IngestedFileImport {
|
||||
before_path: &[],
|
||||
path: self.path.remove_spaces(arena),
|
||||
name: self.name.remove_spaces(arena),
|
||||
annotation: self.annotation.remove_spaces(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for ImportedModuleName<'a> {
|
||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||
ImportedModuleName {
|
||||
package: self.package.remove_spaces(arena),
|
||||
name: self.name.remove_spaces(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for ImportAlias<'a> {
|
||||
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for ImportAsKeyword {
|
||||
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for ImportExposingKeyword {
|
||||
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoveSpaces<'a> for IngestedFileAnnotation<'a> {
|
||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||
IngestedFileAnnotation {
|
||||
before_colon: &[],
|
||||
annotation: self.annotation.remove_spaces(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -668,6 +760,7 @@ impl<'a> RemoveSpaces<'a> for StrSegment<'a> {
|
|||
impl<'a> RemoveSpaces<'a> for Expr<'a> {
|
||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||
match *self {
|
||||
Expr::EmptyDefsFinal => Expr::EmptyDefsFinal,
|
||||
Expr::Float(a) => Expr::Float(a),
|
||||
Expr::Num(a) => Expr::Num(a),
|
||||
Expr::NonBase10Int {
|
||||
|
@ -680,10 +773,10 @@ impl<'a> RemoveSpaces<'a> for Expr<'a> {
|
|||
is_negative,
|
||||
},
|
||||
Expr::Str(a) => Expr::Str(a.remove_spaces(arena)),
|
||||
Expr::IngestedFile(a, b) => Expr::IngestedFile(a, b),
|
||||
Expr::RecordAccess(a, b) => Expr::RecordAccess(arena.alloc(a.remove_spaces(arena)), b),
|
||||
Expr::AccessorFunction(a) => Expr::AccessorFunction(a),
|
||||
Expr::TupleAccess(a, b) => Expr::TupleAccess(arena.alloc(a.remove_spaces(arena)), b),
|
||||
Expr::TaskAwaitBang(a) => Expr::TaskAwaitBang(arena.alloc(a.remove_spaces(arena))),
|
||||
Expr::List(a) => Expr::List(a.remove_spaces(arena)),
|
||||
Expr::RecordUpdate { update, fields } => Expr::RecordUpdate {
|
||||
update: arena.alloc(update.remove_spaces(arena)),
|
||||
|
@ -755,13 +848,13 @@ impl<'a> RemoveSpaces<'a> for Expr<'a> {
|
|||
}
|
||||
Expr::MalformedIdent(a, b) => Expr::MalformedIdent(a, remove_spaces_bad_ident(b)),
|
||||
Expr::MalformedClosure => Expr::MalformedClosure,
|
||||
Expr::MalformedSuffixed(a) => Expr::MalformedSuffixed(a),
|
||||
Expr::PrecedenceConflict(a) => Expr::PrecedenceConflict(a),
|
||||
Expr::MultipleRecordBuilders(a) => Expr::MultipleRecordBuilders(a),
|
||||
Expr::UnappliedRecordBuilder(a) => Expr::UnappliedRecordBuilder(a),
|
||||
Expr::SpaceBefore(a, _) => a.remove_spaces(arena),
|
||||
Expr::SpaceAfter(a, _) => a.remove_spaces(arena),
|
||||
Expr::SingleQuote(a) => Expr::Num(a),
|
||||
Expr::Suffixed(a) => a.remove_spaces(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -792,7 +885,7 @@ fn remove_spaces_bad_ident(ident: BadIdent) -> BadIdent {
|
|||
impl<'a> RemoveSpaces<'a> for Pattern<'a> {
|
||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||
match *self {
|
||||
Pattern::Identifier(a) => Pattern::Identifier(a),
|
||||
Pattern::Identifier { ident } => Pattern::Identifier { ident },
|
||||
Pattern::Tag(a) => Pattern::Tag(a),
|
||||
Pattern::OpaqueRef(a) => Pattern::OpaqueRef(a),
|
||||
Pattern::Apply(a, b) => Pattern::Apply(
|
||||
|
|
|
@ -1704,6 +1704,13 @@ trait Backend<'a> {
|
|||
|
||||
self.build_fn_call(sym, intrinsic.to_string(), args, arg_layouts, ret_layout)
|
||||
}
|
||||
LowLevel::ListConcatUtf8 => self.build_fn_call(
|
||||
sym,
|
||||
bitcode::LIST_CONCAT_UTF8.to_string(),
|
||||
args,
|
||||
arg_layouts,
|
||||
ret_layout,
|
||||
),
|
||||
LowLevel::PtrCast => {
|
||||
debug_assert_eq!(
|
||||
1,
|
||||
|
|
|
@ -1124,7 +1124,11 @@ pub fn construct_optimization_passes<'a>(
|
|||
}
|
||||
OptLevel::Size => {
|
||||
pmb.set_optimization_level(OptimizationLevel::Default);
|
||||
// 2 is equivalent to `-Oz`.
|
||||
pmb.set_size_level(2);
|
||||
|
||||
// TODO: For some usecase, like embedded, it is useful to expose this and tune it.
|
||||
// This really depends on if inlining causes enough simplifications to reduce code size.
|
||||
pmb.set_inliner_with_threshold(50);
|
||||
}
|
||||
OptLevel::Optimize => {
|
||||
|
@ -1134,9 +1138,10 @@ pub fn construct_optimization_passes<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
// Add optimization passes for Size and Optimize.
|
||||
if matches!(opt_level, OptLevel::Size | OptLevel::Optimize) {
|
||||
// TODO figure out which of these actually help
|
||||
// Add extra optimization passes for Optimize.
|
||||
if matches!(opt_level, OptLevel::Optimize) {
|
||||
// TODO: figure out which of these actually help.
|
||||
// Note, llvm probably already runs all of these as part of Aggressive.
|
||||
|
||||
// function passes
|
||||
|
||||
|
|
|
@ -845,6 +845,27 @@ pub(crate) fn run_low_level<'a, 'ctx>(
|
|||
}
|
||||
}
|
||||
}
|
||||
ListConcatUtf8 => {
|
||||
// List.concatUtf8: List U8, Str -> List U8
|
||||
arguments!(list, string);
|
||||
|
||||
match env.target.ptr_width() {
|
||||
PtrWidth::Bytes4 => call_str_bitcode_fn(
|
||||
env,
|
||||
&[list, string],
|
||||
&[],
|
||||
BitcodeReturns::List,
|
||||
bitcode::LIST_CONCAT_UTF8,
|
||||
),
|
||||
PtrWidth::Bytes8 => call_list_bitcode_fn(
|
||||
env,
|
||||
&[list.into_struct_value()],
|
||||
&[string],
|
||||
BitcodeReturns::List,
|
||||
bitcode::LIST_CONCAT_UTF8,
|
||||
),
|
||||
}
|
||||
}
|
||||
NumToStr => {
|
||||
// Num.toStr : Num a -> Str
|
||||
arguments_with_layouts!((num, num_layout));
|
||||
|
|
|
@ -481,6 +481,7 @@ impl<'a> LowLevelCall<'a> {
|
|||
|
||||
backend.call_host_fn_after_loading_args(bitcode::LIST_CONCAT);
|
||||
}
|
||||
ListConcatUtf8 => self.load_args_and_call_zig(backend, bitcode::LIST_CONCAT_UTF8),
|
||||
|
||||
ListReserve => {
|
||||
// List.reserve : List elem, U64 -> List elem
|
||||
|
|
|
@ -40,7 +40,7 @@ roc_problem = { path = "../problem" }
|
|||
roc_region = { path = "../region" }
|
||||
roc_solve_problem = { path = "../solve_problem" }
|
||||
ven_pretty = { path = "../../vendor/pretty" }
|
||||
roc_test_utils = { path = "../../test_utils" }
|
||||
roc_test_utils_dir = { path = "../../test_utils_dir" }
|
||||
|
||||
indoc.workspace = true
|
||||
insta.workspace = true
|
||||
|
|
|
@ -76,6 +76,7 @@ fn write_types_for_module_real(module_id: ModuleId, filename: &str, output_path:
|
|||
PathBuf::from(filename),
|
||||
source,
|
||||
cwd,
|
||||
None,
|
||||
Default::default(),
|
||||
target,
|
||||
function_kind,
|
||||
|
|
|
@ -104,12 +104,14 @@ pub fn load_and_monomorphize_from_str<'a>(
|
|||
filename: PathBuf,
|
||||
src: &'a str,
|
||||
src_dir: PathBuf,
|
||||
opt_main_path: Option<PathBuf>,
|
||||
roc_cache_dir: RocCacheDir<'_>,
|
||||
load_config: LoadConfig,
|
||||
) -> Result<MonomorphizedModule<'a>, LoadMonomorphizedError<'a>> {
|
||||
use LoadResult::*;
|
||||
|
||||
let load_start = LoadStart::from_str(arena, filename, src, roc_cache_dir, src_dir)?;
|
||||
let load_start =
|
||||
LoadStart::from_str(arena, filename, opt_main_path, src, roc_cache_dir, src_dir)?;
|
||||
let exposed_types = ExposedByModule::default();
|
||||
|
||||
match load(arena, load_start, exposed_types, roc_cache_dir, load_config)? {
|
||||
|
@ -121,6 +123,7 @@ pub fn load_and_monomorphize_from_str<'a>(
|
|||
pub fn load_and_monomorphize<'a>(
|
||||
arena: &'a Bump,
|
||||
filename: PathBuf,
|
||||
opt_main_path: Option<PathBuf>,
|
||||
roc_cache_dir: RocCacheDir<'_>,
|
||||
load_config: LoadConfig,
|
||||
) -> Result<MonomorphizedModule<'a>, LoadMonomorphizedError<'a>> {
|
||||
|
@ -129,6 +132,7 @@ pub fn load_and_monomorphize<'a>(
|
|||
let load_start = LoadStart::from_path(
|
||||
arena,
|
||||
filename,
|
||||
opt_main_path,
|
||||
load_config.render,
|
||||
roc_cache_dir,
|
||||
load_config.palette,
|
||||
|
@ -145,6 +149,7 @@ pub fn load_and_monomorphize<'a>(
|
|||
pub fn load_and_typecheck<'a>(
|
||||
arena: &'a Bump,
|
||||
filename: PathBuf,
|
||||
opt_main_path: Option<PathBuf>,
|
||||
roc_cache_dir: RocCacheDir<'_>,
|
||||
load_config: LoadConfig,
|
||||
) -> Result<LoadedModule, LoadingProblem<'a>> {
|
||||
|
@ -153,6 +158,7 @@ pub fn load_and_typecheck<'a>(
|
|||
let load_start = LoadStart::from_path(
|
||||
arena,
|
||||
filename,
|
||||
opt_main_path,
|
||||
load_config.render,
|
||||
roc_cache_dir,
|
||||
load_config.palette,
|
||||
|
@ -172,6 +178,7 @@ pub fn load_and_typecheck_str<'a>(
|
|||
filename: PathBuf,
|
||||
source: &'a str,
|
||||
src_dir: PathBuf,
|
||||
opt_main_path: Option<PathBuf>,
|
||||
target: Target,
|
||||
function_kind: FunctionKind,
|
||||
render: RenderTarget,
|
||||
|
@ -180,7 +187,14 @@ pub fn load_and_typecheck_str<'a>(
|
|||
) -> Result<LoadedModule, LoadingProblem<'a>> {
|
||||
use LoadResult::*;
|
||||
|
||||
let load_start = LoadStart::from_str(arena, filename, source, roc_cache_dir, src_dir)?;
|
||||
let load_start = LoadStart::from_str(
|
||||
arena,
|
||||
filename,
|
||||
opt_main_path,
|
||||
source,
|
||||
roc_cache_dir,
|
||||
src_dir,
|
||||
)?;
|
||||
|
||||
// NOTE: this function is meant for tests, and so we use single-threaded
|
||||
// solving so we don't use too many threads per-test. That gives higher
|
||||
|
|
|
@ -11,7 +11,7 @@ use roc_can::scope::Scope;
|
|||
use roc_collections::all::{ImMap, MutMap, SendSet};
|
||||
use roc_constrain::expr::constrain_expr;
|
||||
use roc_derive::SharedDerivedModule;
|
||||
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds};
|
||||
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds, PQModuleName, PackageModuleIds};
|
||||
use roc_parse::parser::{SourceError, SyntaxError};
|
||||
use roc_problem::can::Problem;
|
||||
use roc_region::all::Loc;
|
||||
|
@ -154,10 +154,10 @@ pub fn can_expr_with<'a>(
|
|||
let var = var_store.fresh();
|
||||
let var_index = constraints.push_variable(var);
|
||||
let expected = constraints.push_expected_type(Expected::NoExpectation(var_index));
|
||||
let mut module_ids = ModuleIds::default();
|
||||
let mut module_ids = PackageModuleIds::default();
|
||||
|
||||
// ensure the Test module is accessible in our tests
|
||||
module_ids.get_or_insert(&"Test".into());
|
||||
module_ids.get_or_insert(&PQModuleName::Unqualified("Test".into()));
|
||||
|
||||
// Desugar operators (convert them to Apply calls, taking into account
|
||||
// operator precedence and associativity rules), before doing other canonicalization.
|
||||
|
@ -174,10 +174,22 @@ pub fn can_expr_with<'a>(
|
|||
arena.alloc("TestPath"),
|
||||
);
|
||||
|
||||
let mut scope = Scope::new(home, IdentIds::default(), Default::default());
|
||||
let mut scope = Scope::new(
|
||||
home,
|
||||
"TestPath".into(),
|
||||
IdentIds::default(),
|
||||
Default::default(),
|
||||
);
|
||||
|
||||
let dep_idents = IdentIds::exposed_builtins(0);
|
||||
let mut env = Env::new(arena, home, &dep_idents, &module_ids);
|
||||
let mut env = Env::new(
|
||||
arena,
|
||||
home,
|
||||
Path::new("Test.roc"),
|
||||
&dep_idents,
|
||||
&module_ids,
|
||||
None,
|
||||
);
|
||||
let (loc_expr, output) = canonicalize_expr(
|
||||
&mut env,
|
||||
&mut var_store,
|
||||
|
@ -203,7 +215,7 @@ pub fn can_expr_with<'a>(
|
|||
all_ident_ids.insert(home, scope.locals.ident_ids);
|
||||
|
||||
let interns = Interns {
|
||||
module_ids: env.module_ids.clone(),
|
||||
module_ids: env.qualified_module_ids.clone().into_module_ids(),
|
||||
all_ident_ids,
|
||||
};
|
||||
|
||||
|
|
9
crates/compiler/load/tests/platform.roc
Normal file
9
crates/compiler/load/tests/platform.roc
Normal file
|
@ -0,0 +1,9 @@
|
|||
platform "test-platform"
|
||||
requires {} { main : * }
|
||||
exposes []
|
||||
packages {}
|
||||
imports []
|
||||
provides [mainForHost]
|
||||
|
||||
mainForHost : {} -> {}
|
||||
mainForHost = \{} -> {}
|
|
@ -28,6 +28,7 @@ mod test_reporting {
|
|||
use roc_reporting::report::{RocDocAllocator, RocDocBuilder};
|
||||
use roc_solve::FunctionKind;
|
||||
use roc_solve_problem::TypeError;
|
||||
use roc_test_utils_dir::TmpDir;
|
||||
use roc_types::subs::Subs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
@ -115,7 +116,7 @@ mod test_reporting {
|
|||
// We can't have all tests use "tmp" because tests run in parallel,
|
||||
// so append the test name to the tmp path.
|
||||
let tmp = format!("tmp/{subdir}");
|
||||
let dir = roc_test_utils::TmpDir::new(&tmp);
|
||||
let dir = TmpDir::new(&tmp);
|
||||
|
||||
let filename = PathBuf::from("Test.roc");
|
||||
let file_path = dir.path().join(filename);
|
||||
|
@ -133,6 +134,7 @@ mod test_reporting {
|
|||
let result = roc_load::load_and_typecheck(
|
||||
arena,
|
||||
full_file_path,
|
||||
None,
|
||||
RocCacheDir::Disallowed,
|
||||
load_config,
|
||||
);
|
||||
|
@ -646,7 +648,7 @@ mod test_reporting {
|
|||
if true then 1 else 2
|
||||
"
|
||||
),
|
||||
@r"
|
||||
@r###"
|
||||
── UNRECOGNIZED NAME in /code/proj/Main.roc ────────────────────────────────────
|
||||
|
||||
Nothing is named `true` in this scope.
|
||||
|
@ -656,11 +658,11 @@ mod test_reporting {
|
|||
|
||||
Did you mean one of these?
|
||||
|
||||
Str
|
||||
Frac
|
||||
Num
|
||||
Str
|
||||
Err
|
||||
"
|
||||
U8
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -811,10 +813,10 @@ mod test_reporting {
|
|||
|
||||
Did you mean one of these?
|
||||
|
||||
Ok
|
||||
List
|
||||
Err
|
||||
Box
|
||||
Str
|
||||
isDisabled
|
||||
"
|
||||
),
|
||||
);
|
||||
|
@ -2211,10 +2213,10 @@ mod test_reporting {
|
|||
|
||||
Did you mean one of these?
|
||||
|
||||
Ok
|
||||
U8
|
||||
Box
|
||||
Eq
|
||||
f
|
||||
"
|
||||
);
|
||||
|
||||
|
@ -4544,13 +4546,13 @@ mod test_reporting {
|
|||
|
||||
test_report!(
|
||||
comment_with_tab,
|
||||
"# comment with a \t\n4",
|
||||
"# comment with a \t char\n4",
|
||||
@r###"
|
||||
── TAB CHARACTER in tmp/comment_with_tab/Test.roc ──────────────────────────────
|
||||
|
||||
I encountered a tab character:
|
||||
|
||||
4│ # comment with a
|
||||
4│ # comment with a char
|
||||
^
|
||||
|
||||
Tab characters are not allowed in Roc code. Please use spaces instead!
|
||||
|
@ -4559,17 +4561,17 @@ mod test_reporting {
|
|||
|
||||
test_report!(
|
||||
comment_with_control_character,
|
||||
"# comment with a \x07\n",
|
||||
@r"
|
||||
"# comment with a \x07 char\n",
|
||||
@r###"
|
||||
── ASCII CONTROL CHARACTER in tmp/comment_with_control_character/Test.roc ──────
|
||||
|
||||
I encountered an ASCII control character:
|
||||
|
||||
4│ # comment with a
|
||||
4│ # comment with a char
|
||||
^
|
||||
|
||||
ASCII control characters are not allowed.
|
||||
"
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -4767,33 +4769,38 @@ mod test_reporting {
|
|||
"
|
||||
);
|
||||
|
||||
test_report!(
|
||||
def_missing_final_expression,
|
||||
indoc!(
|
||||
r"
|
||||
f : Foo.foo
|
||||
"
|
||||
),
|
||||
@r#"
|
||||
── MISSING FINAL EXPRESSION in tmp/def_missing_final_expression/Test.roc ───────
|
||||
// TODO investigate this test. It was disabled in https://github.com/roc-lang/roc/pull/6634
|
||||
// as the way Defs without final expressions are handled. The changes probably shouldn't have
|
||||
// changed this error report. The exact same test_syntax test for this has not changed, so
|
||||
// we know the parser is parsing the same thing. Therefore the way the AST is desugared must be
|
||||
// the cause of the change in error report.
|
||||
// test_report!(
|
||||
// def_missing_final_expression,
|
||||
// indoc!(
|
||||
// r"
|
||||
// f : Foo.foo
|
||||
// "
|
||||
// ),
|
||||
// @r#"
|
||||
// ── MISSING FINAL EXPRESSION in tmp/def_missing_final_expression/Test.roc ───────
|
||||
|
||||
I am partway through parsing a definition, but I got stuck here:
|
||||
// I am partway through parsing a definition, but I got stuck here:
|
||||
|
||||
1│ app "test" provides [main] to "./platform"
|
||||
2│
|
||||
3│ main =
|
||||
4│ f : Foo.foo
|
||||
^
|
||||
// 1│ app "test" provides [main] to "./platform"
|
||||
// 2│
|
||||
// 3│ main =
|
||||
// 4│ f : Foo.foo
|
||||
// ^
|
||||
|
||||
This definition is missing a final expression. A nested definition
|
||||
must be followed by either another definition, or an expression
|
||||
// This definition is missing a final expression. A nested definition
|
||||
// must be followed by either another definition, or an expression
|
||||
|
||||
x = 4
|
||||
y = 2
|
||||
// x = 4
|
||||
// y = 2
|
||||
|
||||
x + y
|
||||
"#
|
||||
);
|
||||
// x + y
|
||||
// "#
|
||||
// );
|
||||
|
||||
test_report!(
|
||||
expression_indentation_end,
|
||||
|
@ -4908,25 +4915,260 @@ mod test_reporting {
|
|||
"
|
||||
);
|
||||
|
||||
test_report!(
|
||||
unfinished_import,
|
||||
indoc!(
|
||||
r"
|
||||
import [
|
||||
"
|
||||
),
|
||||
@r###"
|
||||
── UNFINISHED IMPORT in tmp/unfinished_import/Test.roc ─────────────────────────
|
||||
|
||||
I was partway through parsing an `import`, but I got stuck here:
|
||||
|
||||
4│ import [
|
||||
^
|
||||
|
||||
I was expecting to see a module name, like:
|
||||
|
||||
import BigNum
|
||||
|
||||
Or a package module name, like:
|
||||
|
||||
import pf.Stdout
|
||||
|
||||
Or a file path to ingest, like:
|
||||
|
||||
import "users.json" as users : Str
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
weird_import_params_record,
|
||||
indoc!(
|
||||
r"
|
||||
import Menu { x = 4 }
|
||||
"
|
||||
),@r###"
|
||||
── RECORD PARSE PROBLEM in tmp/weird_import_params_record/Test.roc ─────────────
|
||||
|
||||
I am partway through parsing a record, but I got stuck here:
|
||||
|
||||
4│ import Menu { x = 4 }
|
||||
^
|
||||
|
||||
TODO provide more context.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
record_builder_in_module_params,
|
||||
indoc!(
|
||||
r"
|
||||
import Menu {
|
||||
echo,
|
||||
name: <- applyName
|
||||
}
|
||||
"
|
||||
),@r###"
|
||||
── RECORD BUILDER IN MODULE PARAMS in ...ord_builder_in_module_params/Test.roc ─
|
||||
|
||||
I was partway through parsing module params, but I got stuck here:
|
||||
|
||||
4│ import Menu {
|
||||
5│ echo,
|
||||
6│ name: <- applyName
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This looks like a record builder field, but those are not allowed in
|
||||
module params.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
record_update_in_module_params,
|
||||
indoc!(
|
||||
r"
|
||||
import Menu { myParams & echo: echoFn }
|
||||
"
|
||||
),@r###"
|
||||
── RECORD UPDATE IN MODULE PARAMS in ...ecord_update_in_module_params/Test.roc ─
|
||||
|
||||
I was partway through parsing module params, but I got stuck here:
|
||||
|
||||
4│ import Menu { myParams & echo: echoFn }
|
||||
^^^^^^^^
|
||||
|
||||
It looks like you're trying to update a record, but module params
|
||||
require a standalone record literal.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
unfinished_import_as_or_exposing,
|
||||
indoc!(
|
||||
r"
|
||||
import svg.Path a
|
||||
"
|
||||
),
|
||||
@r###"
|
||||
── UNFINISHED IMPORT in tmp/unfinished_import_as_or_exposing/Test.roc ──────────
|
||||
|
||||
I was partway through parsing an `import`, but I got stuck here:
|
||||
|
||||
4│ import svg.Path a
|
||||
^
|
||||
|
||||
I was expecting to see the `as` keyword, like:
|
||||
|
||||
import svg.Path as SvgPath
|
||||
|
||||
Or the `exposing` keyword, like:
|
||||
|
||||
import svg.Path exposing [arc, rx]
|
||||
|
||||
Or module params, like:
|
||||
|
||||
import Menu { echo, read }
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
unfinished_import_alias,
|
||||
indoc!(
|
||||
r"
|
||||
import svg.Path as
|
||||
"
|
||||
),
|
||||
@r###"
|
||||
── UNFINISHED IMPORT in tmp/unfinished_import_alias/Test.roc ───────────────────
|
||||
|
||||
I was partway through parsing an `import`, but I got stuck here:
|
||||
|
||||
4│ import svg.Path as
|
||||
^
|
||||
|
||||
I just saw the `as` keyword, so I was expecting to see an alias next.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
lowercase_import_alias,
|
||||
indoc!(
|
||||
r"
|
||||
import svg.Path as path
|
||||
"
|
||||
),
|
||||
@r###"
|
||||
── LOWERCASE ALIAS in tmp/lowercase_import_alias/Test.roc ──────────────────────
|
||||
|
||||
This import is using a lowercase alias:
|
||||
|
||||
4│ import svg.Path as path
|
||||
^^^^
|
||||
|
||||
Module names and aliases must start with an uppercase letter.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
unfinished_import_exposing,
|
||||
indoc!(
|
||||
r"
|
||||
import svg.Path exposing
|
||||
"
|
||||
),
|
||||
@r###"
|
||||
── UNFINISHED IMPORT in tmp/unfinished_import_exposing/Test.roc ────────────────
|
||||
|
||||
I was partway through parsing an `import`, but I got stuck here:
|
||||
|
||||
4│ import svg.Path exposing
|
||||
^
|
||||
|
||||
I just saw the `exposing` keyword, so I was expecting to see `[` next.
|
||||
"###);
|
||||
|
||||
test_report!(
|
||||
unfinished_import_exposing_name,
|
||||
indoc!(
|
||||
r"
|
||||
import svg.Path exposing [3
|
||||
"
|
||||
),
|
||||
@r###"
|
||||
── WEIRD EXPOSING in tmp/unfinished_import_exposing_name/Test.roc ──────────────
|
||||
|
||||
I'm partway through parsing an exposing list, but I got stuck here:
|
||||
|
||||
4│ import svg.Path exposing [3
|
||||
^
|
||||
|
||||
I was expecting a type, value, or function name next, like:
|
||||
|
||||
import Svg exposing [Path, arc, rx]
|
||||
"###);
|
||||
|
||||
test_report!(
|
||||
unfinished_ingested_file_name,
|
||||
indoc!(
|
||||
r#"
|
||||
import "example.json" as
|
||||
"#
|
||||
),
|
||||
@r###"
|
||||
── UNFINISHED IMPORT in tmp/unfinished_ingested_file_name/Test.roc ─────────────
|
||||
|
||||
I was partway through parsing an `import`, but I got stuck here:
|
||||
|
||||
4│ import "example.json" as
|
||||
^
|
||||
|
||||
I was expecting to see a name next, like:
|
||||
|
||||
import "users.json" as users : Str
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
ingested_file_import_ann_syntax_err,
|
||||
indoc!(
|
||||
r#"
|
||||
import "example.json" as example : List U8, U32
|
||||
"#
|
||||
),
|
||||
@r###"
|
||||
── UNFINISHED TYPE in tmp/ingested_file_import_ann_syntax_err/Test.roc ─────────
|
||||
|
||||
I am partway through parsing a type, but I got stuck here:
|
||||
|
||||
4│ import "example.json" as example : List U8, U32
|
||||
^
|
||||
|
||||
Note: I may be confused by indentation
|
||||
"###
|
||||
);
|
||||
|
||||
// TODO could do better by pointing out we're parsing a function type
|
||||
test_report!(
|
||||
dict_type_formatting,
|
||||
indoc!(
|
||||
r#"
|
||||
app "dict" imports [ Dict ] provides [main] to "./platform"
|
||||
app "dict" imports [] provides [main] to "./platform"
|
||||
|
||||
myDict : Dict.Dict Num.I64 Str
|
||||
myDict : Dict Num.I64 Str
|
||||
myDict = Dict.insert (Dict.empty {}) "foo" 42
|
||||
|
||||
main = myDict
|
||||
"#
|
||||
),
|
||||
@r#"
|
||||
@r###"
|
||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||
|
||||
Something is off with the body of the `myDict` definition:
|
||||
|
||||
3│ myDict : Dict.Dict Num.I64 Str
|
||||
3│ myDict : Dict Num.I64 Str
|
||||
4│ myDict = Dict.insert (Dict.empty {}) "foo" 42
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
|
@ -4937,14 +5179,14 @@ mod test_reporting {
|
|||
But the type annotation on `myDict` says it should be:
|
||||
|
||||
Dict I64 Str
|
||||
"#
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
alias_type_diff,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Set.{ Set }] provides [main] to "./platform"
|
||||
app "test" imports [] provides [main] to "./platform"
|
||||
|
||||
HSet a : Set a
|
||||
|
||||
|
@ -5799,9 +6041,9 @@ All branches in an `if` must have the same type!
|
|||
Did you mean one of these?
|
||||
|
||||
Str
|
||||
Err
|
||||
U8
|
||||
F64
|
||||
Box
|
||||
"###
|
||||
);
|
||||
|
||||
|
@ -6034,6 +6276,31 @@ In roc, functions are always written as a lambda, like{}
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn module_params_with_missing_arrow() {
|
||||
report_header_problem_as(
|
||||
indoc!(
|
||||
r#"
|
||||
module {echo, read} [menu]
|
||||
"#
|
||||
),
|
||||
indoc!(
|
||||
r#"
|
||||
── WEIRD MODULE PARAMS in /code/proj/Main.roc ──────────────────────────────────
|
||||
|
||||
I am partway through parsing a module header, but I got stuck here:
|
||||
|
||||
1│ module {echo, read} [menu]
|
||||
^
|
||||
|
||||
I am expecting `->` next, like:
|
||||
|
||||
module { echo, read } -> [menu]
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn platform_requires_rigids() {
|
||||
report_header_problem_as(
|
||||
|
@ -6103,9 +6370,7 @@ In roc, functions are always written as a lambda, like{}
|
|||
report_header_problem_as(
|
||||
indoc!(
|
||||
r"
|
||||
interface Foobar
|
||||
exposes [main, @Foo]
|
||||
imports [pf.Task, Base64]
|
||||
module [main, @Foo]
|
||||
"
|
||||
),
|
||||
indoc!(
|
||||
|
@ -6114,39 +6379,12 @@ In roc, functions are always written as a lambda, like{}
|
|||
|
||||
I am partway through parsing an `exposes` list, but I got stuck here:
|
||||
|
||||
1│ interface Foobar
|
||||
2│ exposes [main, @Foo]
|
||||
^
|
||||
1│ module [main, @Foo]
|
||||
^
|
||||
|
||||
I was expecting a type name, value name or function name next, like
|
||||
|
||||
exposes [Animal, default, tame]
|
||||
"
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_module_name() {
|
||||
report_header_problem_as(
|
||||
indoc!(
|
||||
r"
|
||||
interface foobar
|
||||
exposes [main, @Foo]
|
||||
imports [pf.Task, Base64]
|
||||
"
|
||||
),
|
||||
indoc!(
|
||||
r"
|
||||
── WEIRD MODULE NAME in /code/proj/Main.roc ────────────────────────────────────
|
||||
|
||||
I am partway through parsing a header, but got stuck here:
|
||||
|
||||
1│ interface foobar
|
||||
^
|
||||
|
||||
I am expecting a module name next, like BigNum or Main. Module names
|
||||
must start with an uppercase letter.
|
||||
[Animal, default, tame]
|
||||
"
|
||||
),
|
||||
)
|
||||
|
@ -7925,7 +8163,7 @@ In roc, functions are always written as a lambda, like{}
|
|||
"#
|
||||
),
|
||||
// TODO(opaques): error could be improved by saying that the opaque definition demands
|
||||
// that the argument be a U8, and linking to the definitin!
|
||||
// that the argument be a U8, and linking to the definition!
|
||||
@r#"
|
||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||
|
||||
|
@ -8401,17 +8639,38 @@ In roc, functions are always written as a lambda, like{}
|
|||
a
|
||||
"
|
||||
),
|
||||
@r"
|
||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
||||
@r###"
|
||||
── WILDCARD NOT ALLOWED HERE in /code/proj/Main.roc ────────────────────────────
|
||||
|
||||
The definition of `I` has an unbound type variable:
|
||||
The definition of `I` includes a wildcard (`*`) type variable:
|
||||
|
||||
4│ I : Num.Int *
|
||||
^
|
||||
|
||||
Tip: Type variables must be bound before the `:`. Perhaps you intended
|
||||
to add a type parameter to this type?
|
||||
"
|
||||
Type alias definitions may not use wildcard (`*`) type variables. Only
|
||||
named type variables are allowed.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
underscore_in_alias,
|
||||
indoc!(
|
||||
r"
|
||||
I : Num.Int _
|
||||
a : I
|
||||
a
|
||||
"
|
||||
),
|
||||
@r###"
|
||||
── UNDERSCORE NOT ALLOWED HERE in /code/proj/Main.roc ──────────────────────────
|
||||
|
||||
The definition of `I` includes an inferred (`_`) type:
|
||||
|
||||
4│ I : Num.Int _
|
||||
^
|
||||
|
||||
Type alias definitions may not use inferred types (`_`).
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -8423,17 +8682,17 @@ In roc, functions are always written as a lambda, like{}
|
|||
a
|
||||
"
|
||||
),
|
||||
@r"
|
||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
||||
@r###"
|
||||
── WILDCARD NOT ALLOWED HERE in /code/proj/Main.roc ────────────────────────────
|
||||
|
||||
The definition of `I` has an unbound type variable:
|
||||
The definition of `I` includes a wildcard (`*`) type variable:
|
||||
|
||||
4│ I := Num.Int *
|
||||
^
|
||||
|
||||
Tip: Type variables must be bound before the `:=`. Perhaps you intended
|
||||
to add a type parameter to this type?
|
||||
"
|
||||
Opaque type definitions may not use wildcard (`*`) type variables. Only
|
||||
named type variables are allowed.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -8445,19 +8704,18 @@ In roc, functions are always written as a lambda, like{}
|
|||
a
|
||||
"
|
||||
),
|
||||
@r"
|
||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
||||
@r###"
|
||||
── WILDCARD NOT ALLOWED HERE in /code/proj/Main.roc ────────────────────────────
|
||||
|
||||
The definition of `I` has 2 unbound type variables.
|
||||
|
||||
Here is one occurrence:
|
||||
The definition of `I` includes 2 wildcard (`*`) type variables. Here is
|
||||
one of them:
|
||||
|
||||
4│ I : [A (Num.Int *), B (Num.Int *)]
|
||||
^
|
||||
|
||||
Tip: Type variables must be bound before the `:`. Perhaps you intended
|
||||
to add a type parameter to this type?
|
||||
"
|
||||
Type alias definitions may not use wildcard (`*`) type variables. Only
|
||||
named type variables are allowed.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -8469,17 +8727,16 @@ In roc, functions are always written as a lambda, like{}
|
|||
a
|
||||
"
|
||||
),
|
||||
@r"
|
||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
||||
@r###"
|
||||
── UNDERSCORE NOT ALLOWED HERE in /code/proj/Main.roc ──────────────────────────
|
||||
|
||||
The definition of `I` has an unbound type variable:
|
||||
The definition of `I` includes an inferred (`_`) type:
|
||||
|
||||
4│ I : Num.Int _
|
||||
^
|
||||
|
||||
Tip: Type variables must be bound before the `:`. Perhaps you intended
|
||||
to add a type parameter to this type?
|
||||
"
|
||||
Type alias definitions may not use inferred types (`_`).
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -8491,17 +8748,19 @@ In roc, functions are always written as a lambda, like{}
|
|||
a
|
||||
"
|
||||
),
|
||||
@r"
|
||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
||||
@r###"
|
||||
── UNDECLARED TYPE VARIABLE in /code/proj/Main.roc ─────────────────────────────
|
||||
|
||||
The definition of `I` has an unbound type variable:
|
||||
The definition of `I` includes an undeclared type variable:
|
||||
|
||||
4│ I : Num.Int a
|
||||
^
|
||||
|
||||
Tip: Type variables must be bound before the `:`. Perhaps you intended
|
||||
to add a type parameter to this type?
|
||||
"
|
||||
All type variables in type alias definitions must be declared.
|
||||
|
||||
Tip: You can declare type variables by putting them right before the `:`
|
||||
symbol, separated by spaces.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -9304,7 +9563,7 @@ In roc, functions are always written as a lambda, like{}
|
|||
type_error_in_apply_is_circular,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Set] provides [go] to "./platform"
|
||||
app "test" imports [] provides [go] to "./platform"
|
||||
|
||||
S a : { set : Set.Set a }
|
||||
|
||||
|
@ -10892,7 +11151,9 @@ In roc, functions are always written as a lambda, like{}
|
|||
function_cannot_derive_encoding,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
||||
app "test" imports [] provides [main] to "./platform"
|
||||
|
||||
import Decode exposing [decoder]
|
||||
|
||||
main =
|
||||
myDecoder : Decoder (a -> a) fmt where fmt implements DecoderFormatting
|
||||
|
@ -10901,12 +11162,12 @@ In roc, functions are always written as a lambda, like{}
|
|||
myDecoder
|
||||
"#
|
||||
),
|
||||
@r"
|
||||
@r###"
|
||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||
|
||||
This expression has a type that does not implement the abilities it's expected to:
|
||||
|
||||
5│ myDecoder = decoder
|
||||
7│ myDecoder = decoder
|
||||
^^^^^^^
|
||||
|
||||
I can't generate an implementation of the `Decoding` ability for
|
||||
|
@ -10914,14 +11175,16 @@ In roc, functions are always written as a lambda, like{}
|
|||
a -> a
|
||||
|
||||
Note: `Decoding` cannot be generated for functions.
|
||||
"
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
nested_opaque_cannot_derive_encoding,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
||||
app "test" imports [] provides [main] to "./platform"
|
||||
|
||||
import Decode exposing [decoder]
|
||||
|
||||
A := {}
|
||||
|
||||
|
@ -10932,12 +11195,12 @@ In roc, functions are always written as a lambda, like{}
|
|||
myDecoder
|
||||
"#
|
||||
),
|
||||
@r"
|
||||
@r###"
|
||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||
|
||||
This expression has a type that does not implement the abilities it's expected to:
|
||||
|
||||
7│ myDecoder = decoder
|
||||
9│ myDecoder = decoder
|
||||
^^^^^^^
|
||||
|
||||
I can't generate an implementation of the `Decoding` ability for
|
||||
|
@ -10952,7 +11215,7 @@ In roc, functions are always written as a lambda, like{}
|
|||
|
||||
Tip: `A` does not implement `Decoding`. Consider adding a custom
|
||||
implementation or `implements Decode.Decoding` to the definition of `A`.
|
||||
"
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -11113,7 +11376,9 @@ In roc, functions are always written as a lambda, like{}
|
|||
infer_decoded_record_error_with_function_field,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [TotallyNotJson] provides [main] to "./platform"
|
||||
app "test" imports [] provides [main] to "./platform"
|
||||
|
||||
import TotallyNotJson
|
||||
|
||||
main =
|
||||
decoded = Str.toUtf8 "{\"first\":\"ab\",\"second\":\"cd\"}" |> Decode.fromBytes TotallyNotJson.json
|
||||
|
@ -11122,12 +11387,12 @@ In roc, functions are always written as a lambda, like{}
|
|||
_ -> "something went wrong"
|
||||
"#
|
||||
),
|
||||
@r"
|
||||
@r###"
|
||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||
|
||||
This expression has a type that does not implement the abilities it's expected to:
|
||||
|
||||
6│ Ok rcd -> rcd.first rcd.second
|
||||
8│ Ok rcd -> rcd.first rcd.second
|
||||
^^^^^^^^^
|
||||
|
||||
I can't generate an implementation of the `Decoding` ability for
|
||||
|
@ -11135,14 +11400,16 @@ In roc, functions are always written as a lambda, like{}
|
|||
* -> *
|
||||
|
||||
Note: `Decoding` cannot be generated for functions.
|
||||
"
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
record_with_optional_field_types_cannot_derive_decoding,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
||||
app "test" imports [] provides [main] to "./platform"
|
||||
|
||||
import Decode exposing [decoder]
|
||||
|
||||
main =
|
||||
myDecoder : Decoder {x : Str, y ? Str} fmt where fmt implements DecoderFormatting
|
||||
|
@ -11151,12 +11418,12 @@ In roc, functions are always written as a lambda, like{}
|
|||
myDecoder
|
||||
"#
|
||||
),
|
||||
@r"
|
||||
@r###"
|
||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||
|
||||
This expression has a type that does not implement the abilities it's expected to:
|
||||
|
||||
5│ myDecoder = decoder
|
||||
7│ myDecoder = decoder
|
||||
^^^^^^^
|
||||
|
||||
I can't generate an implementation of the `Decoding` ability for
|
||||
|
@ -11171,7 +11438,7 @@ In roc, functions are always written as a lambda, like{}
|
|||
over records that may or may not contain them at compile time, but are
|
||||
not a concept that extends to runtime!
|
||||
Maybe you wanted to use a `Result`?
|
||||
"
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -11353,21 +11620,23 @@ In roc, functions are always written as a lambda, like{}
|
|||
unused_value_import,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [List.{ concat }] provides [main] to "./platform"
|
||||
app "test" imports [] provides [main] to "./platform"
|
||||
|
||||
import List exposing [concat]
|
||||
|
||||
main = ""
|
||||
"#
|
||||
),
|
||||
@r#"
|
||||
@r###"
|
||||
── UNUSED IMPORT in /code/proj/Main.roc ────────────────────────────────────────
|
||||
|
||||
`List.concat` is not used in this module.
|
||||
List is imported but not used.
|
||||
|
||||
1│ app "test" imports [List.{ concat }] provides [main] to "./platform"
|
||||
^^^^^^
|
||||
3│ import List exposing [concat]
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Since `List.concat` isn't used, you don't need to import it.
|
||||
"#
|
||||
Since List isn't used, you don't need to import it.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
|
@ -11389,7 +11658,9 @@ In roc, functions are always written as a lambda, like{}
|
|||
unnecessary_builtin_type_import,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode.{ DecodeError }] provides [main, E] to "./platform"
|
||||
app "test" imports [] provides [main, E] to "./platform"
|
||||
|
||||
import Decode exposing [DecodeError]
|
||||
|
||||
E : DecodeError
|
||||
|
||||
|
@ -11399,6 +11670,54 @@ In roc, functions are always written as a lambda, like{}
|
|||
@r"
|
||||
"
|
||||
);
|
||||
test_report!(
|
||||
unknown_shorthand_no_deps,
|
||||
indoc!(
|
||||
r#"
|
||||
import foo.Foo
|
||||
|
||||
Foo.foo
|
||||
"#
|
||||
),
|
||||
@r###"
|
||||
── UNRECOGNIZED PACKAGE in tmp/unknown_shorthand_no_deps/Test.roc ──────────────
|
||||
|
||||
This module is trying to import from `foo`:
|
||||
|
||||
4│ import foo.Foo
|
||||
^^^^^^^
|
||||
|
||||
A lowercase name indicates a package shorthand, but no packages have
|
||||
been specified.
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
unknown_shorthand_in_app,
|
||||
indoc!(
|
||||
r#"
|
||||
app [main] { pf: platform "../../tests/platform.roc" }
|
||||
|
||||
import foo.Foo
|
||||
|
||||
main =
|
||||
Foo.foo
|
||||
"#
|
||||
),
|
||||
@r###"
|
||||
── UNRECOGNIZED PACKAGE in tmp/unknown_shorthand_in_app/Test.roc ───────────────
|
||||
|
||||
This module is trying to import from `foo`:
|
||||
|
||||
3│ import foo.Foo
|
||||
^^^^^^^
|
||||
|
||||
A lowercase name indicates a package shorthand, but I don't recognize
|
||||
this one. Did you mean one of these?
|
||||
|
||||
pf
|
||||
"###
|
||||
);
|
||||
|
||||
test_report!(
|
||||
invalid_toplevel_cycle,
|
||||
|
@ -13260,7 +13579,7 @@ In roc, functions are always written as a lambda, like{}
|
|||
4│ crash "" ""
|
||||
^^^^^
|
||||
|
||||
`crash` must be given exacly one message to crash with.
|
||||
`crash` must be given exactly one message to crash with.
|
||||
"#
|
||||
);
|
||||
|
||||
|
@ -13658,7 +13977,9 @@ In roc, functions are always written as a lambda, like{}
|
|||
derive_decoding_for_tuple,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
||||
app "test" imports [] provides [main] to "./platform"
|
||||
|
||||
import Decode exposing [decoder]
|
||||
|
||||
main =
|
||||
myDecoder : Decoder (U32, Str) fmt where fmt implements DecoderFormatting
|
||||
|
@ -13673,7 +13994,9 @@ In roc, functions are always written as a lambda, like{}
|
|||
cannot_decode_tuple_with_non_decode_element,
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
||||
app "test" imports [] provides [main] to "./platform"
|
||||
|
||||
import Decode exposing [decoder]
|
||||
|
||||
main =
|
||||
myDecoder : Decoder (U32, {} -> {}) fmt where fmt implements DecoderFormatting
|
||||
|
@ -13682,12 +14005,12 @@ In roc, functions are always written as a lambda, like{}
|
|||
myDecoder
|
||||
"#
|
||||
),
|
||||
@r"
|
||||
@r###"
|
||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||
|
||||
This expression has a type that does not implement the abilities it's expected to:
|
||||
|
||||
5│ myDecoder = decoder
|
||||
7│ myDecoder = decoder
|
||||
^^^^^^^
|
||||
|
||||
I can't generate an implementation of the `Decoding` ability for
|
||||
|
@ -13695,7 +14018,7 @@ In roc, functions are always written as a lambda, like{}
|
|||
U32, {} -> {}
|
||||
|
||||
Note: `Decoding` cannot be generated for functions.
|
||||
"
|
||||
"###
|
||||
);
|
||||
|
||||
test_no_problem!(
|
||||
|
|
|
@ -40,7 +40,7 @@ parking_lot.workspace = true
|
|||
tempfile.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
roc_test_utils = { path = "../../test_utils" }
|
||||
roc_test_utils_dir = { path = "../../test_utils_dir" }
|
||||
|
||||
indoc.workspace = true
|
||||
maplit.workspace = true
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue