mirror of
https://github.com/roc-lang/roc.git
synced 2025-09-26 13:29:12 +00:00
Merge remote-tracking branch 'upstream/main' into unmacro_parsers
This commit is contained in:
commit
ab217ede3f
680 changed files with 15718 additions and 14181 deletions
10
.github/workflows/basic_cli_build_release.yml
vendored
10
.github/workflows/basic_cli_build_release.yml
vendored
|
@ -11,7 +11,7 @@ env:
|
||||||
# use .tar.gz for quick testing
|
# use .tar.gz for quick testing
|
||||||
ARCHIVE_FORMAT: .tar.br
|
ARCHIVE_FORMAT: .tar.br
|
||||||
# Make a new basic-cli git tag and set it here before starting this workflow
|
# Make a new basic-cli git tag and set it here before starting this workflow
|
||||||
RELEASE_TAG: 0.9.0
|
RELEASE_TAG: 0.9.1
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
prepare:
|
prepare:
|
||||||
|
@ -97,7 +97,7 @@ jobs:
|
||||||
basic-cli/platform/linux-arm64.o
|
basic-cli/platform/linux-arm64.o
|
||||||
|
|
||||||
build-macos-x86_64-files:
|
build-macos-x86_64-files:
|
||||||
runs-on: [macos-11] # I expect the generated files to work on macOS 12 and up
|
runs-on: [macos-12] # I expect the generated files to work on macOS 12 and up
|
||||||
needs: [prepare]
|
needs: [prepare]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
@ -158,7 +158,11 @@ jobs:
|
||||||
- name: rename nightly folder
|
- name: rename nightly folder
|
||||||
run: mv roc_nightly* roc_nightly
|
run: mv roc_nightly* roc_nightly
|
||||||
|
|
||||||
- run: git clone https://github.com/roc-lang/basic-cli.git
|
- run: |
|
||||||
|
git clone https://github.com/roc-lang/basic-cli.git
|
||||||
|
cd basic-cli
|
||||||
|
git checkout $RELEASE_TAG
|
||||||
|
cd ..
|
||||||
|
|
||||||
- run: cp macos-apple-silicon-files/* ./basic-cli/platform
|
- run: cp macos-apple-silicon-files/* ./basic-cli/platform
|
||||||
|
|
||||||
|
|
|
@ -83,7 +83,7 @@ jobs:
|
||||||
basic-webserver/platform/linux-arm64.o
|
basic-webserver/platform/linux-arm64.o
|
||||||
|
|
||||||
build-macos-x86_64-files:
|
build-macos-x86_64-files:
|
||||||
runs-on: [macos-11] # I expect the generated files to work on macOS 12 and 13
|
runs-on: [macos-12] # I expect the generated files to work on macOS 12 and up
|
||||||
needs: [fetch-releases]
|
needs: [fetch-releases]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
2
.github/workflows/macos_x86_64.yml
vendored
2
.github/workflows/macos_x86_64.yml
vendored
|
@ -29,5 +29,5 @@ jobs:
|
||||||
|
|
||||||
- name: regular rust tests
|
- name: regular rust tests
|
||||||
run: cargo test --locked --release -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_tags::phantom_polymorphic_record && sccache --show-stats
|
run: cargo test --locked --release -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_tags::phantom_polymorphic_record && sccache --show-stats
|
||||||
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
|
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos x86_64 CI machine
|
||||||
# this issue may be caused by using older versions of XCode
|
# this issue may be caused by using older versions of XCode
|
||||||
|
|
|
@ -14,14 +14,17 @@ jobs:
|
||||||
test-and-build:
|
test-and-build:
|
||||||
name: Rust tests, build and package nightly release
|
name: Rust tests, build and package nightly release
|
||||||
runs-on: [self-hosted, macOS, ARM64]
|
runs-on: [self-hosted, macOS, ARM64]
|
||||||
|
env:
|
||||||
|
LIBRARY_PATH: /opt/homebrew/Cellar/zstd/1.5.6/lib
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- run: zig version
|
- name: Update PATH to use zig 11
|
||||||
|
run: |
|
||||||
|
echo "PATH=/Users/m1ci/Downloads/zig-macos-aarch64-0.11.0:$PATH" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: llvm version
|
- run: zig version
|
||||||
run: llc --version | grep LLVM
|
|
||||||
|
|
||||||
- name: run tests
|
- name: run tests
|
||||||
run: cargo test --locked --release
|
run: cargo test --locked --release
|
||||||
|
|
2
.github/workflows/nightly_macos_x86_64.yml
vendored
2
.github/workflows/nightly_macos_x86_64.yml
vendored
|
@ -28,7 +28,7 @@ jobs:
|
||||||
|
|
||||||
- name: execute rust tests
|
- name: execute rust tests
|
||||||
run: cargo test --release --locked -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_tags::phantom_polymorphic_record
|
run: cargo test --release --locked -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_tags::phantom_polymorphic_record
|
||||||
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
|
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos x86_64 CI machine
|
||||||
# this issue may be caused by using older versions of XCode
|
# this issue may be caused by using older versions of XCode
|
||||||
|
|
||||||
- name: build release
|
- name: build release
|
||||||
|
|
7
.github/workflows/test_nightly_many_os.yml
vendored
7
.github/workflows/test_nightly_many_os.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ macos-11, macos-12, macos-13, ubuntu-20.04, ubuntu-22.04 ]
|
os: [ macos-12, macos-13, ubuntu-20.04, ubuntu-22.04 ]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
steps:
|
steps:
|
||||||
|
@ -18,12 +18,15 @@ jobs:
|
||||||
with:
|
with:
|
||||||
version: 0.11.0
|
version: 0.11.0
|
||||||
|
|
||||||
|
- name: Install zlib on macOS-13
|
||||||
|
if: matrix.os == 'macos-13'
|
||||||
|
run: brew install zlib
|
||||||
|
|
||||||
- name: get the latest release archive for linux (x86_64)
|
- name: get the latest release archive for linux (x86_64)
|
||||||
if: startsWith(matrix.os, 'ubuntu')
|
if: startsWith(matrix.os, 'ubuntu')
|
||||||
run: |
|
run: |
|
||||||
curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
|
curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
|
||||||
|
|
||||||
|
|
||||||
- name: get the latest release archive for macos (x86_64)
|
- name: get the latest release archive for macos (x86_64)
|
||||||
if: startsWith(matrix.os, 'macos')
|
if: startsWith(matrix.os, 'macos')
|
||||||
run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
|
run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
|
||||||
|
|
14
.github/workflows/ubuntu_x86_64.yml
vendored
14
.github/workflows/ubuntu_x86_64.yml
vendored
|
@ -11,8 +11,6 @@ jobs:
|
||||||
name: test zig, rust, wasm...
|
name: test zig, rust, wasm...
|
||||||
runs-on: [self-hosted, i7-6700K]
|
runs-on: [self-hosted, i7-6700K]
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
env:
|
|
||||||
RUSTC_WRAPPER: /home/big-ci-user/.cargo/bin/sccache
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
@ -32,14 +30,14 @@ jobs:
|
||||||
run: cargo run --locked --release format --check crates/compiler/builtins/roc
|
run: cargo run --locked --release format --check crates/compiler/builtins/roc
|
||||||
|
|
||||||
- name: ensure there are no unused dependencies
|
- name: ensure there are no unused dependencies
|
||||||
run: cargo +nightly-2023-08-20 udeps --all-targets
|
run: cargo +nightly-2023-12-21 udeps --all-targets
|
||||||
|
|
||||||
- name: zig wasm tests
|
- name: zig wasm tests
|
||||||
run: cd crates/compiler/builtins/bitcode && ./run-wasm-tests.sh
|
run: cd crates/compiler/builtins/bitcode && ./run-wasm-tests.sh
|
||||||
|
|
||||||
- name: regular rust tests
|
- name: regular rust tests
|
||||||
# see #5904 for skipped test
|
# see #5904 for skipped test
|
||||||
run: cargo test --locked --release -- --skip cli_run::expects_dev_and_test && sccache --show-stats
|
run: cargo test --locked --release -- --skip cli_run::expects_dev_and_test
|
||||||
|
|
||||||
- name: tests examples in docs
|
- name: tests examples in docs
|
||||||
run: cargo test --doc --release
|
run: cargo test --doc --release
|
||||||
|
@ -48,19 +46,19 @@ jobs:
|
||||||
run: cd examples/platform-switching/rust-platform && LD_LIBRARY_PATH=. cargo test --release --locked
|
run: cd examples/platform-switching/rust-platform && LD_LIBRARY_PATH=. cargo test --release --locked
|
||||||
|
|
||||||
- name: test the dev backend # these tests require an explicit feature flag
|
- name: test the dev backend # these tests require an explicit feature flag
|
||||||
run: cargo test --locked --release --package test_gen --no-default-features --features gen-dev && sccache --show-stats
|
run: cargo test --locked --release --package test_gen --no-default-features --features gen-dev
|
||||||
|
|
||||||
- name: test gen-wasm single threaded # gen-wasm has some multithreading problems to do with the wasmer runtime
|
- name: test gen-wasm single threaded # gen-wasm has some multithreading problems to do with the wasmer runtime
|
||||||
run: cargo test --locked --release --package test_gen --no-default-features --features gen-wasm -- --test-threads=1 && sccache --show-stats
|
run: cargo test --locked --release --package test_gen --no-default-features --features gen-wasm -- --test-threads=1
|
||||||
|
|
||||||
- name: roc test all builtins
|
- name: roc test all builtins
|
||||||
run: ./ci/roc_test_builtins.sh
|
run: ./ci/roc_test_builtins.sh
|
||||||
|
|
||||||
- name: wasm repl test
|
- name: wasm repl test
|
||||||
run: crates/repl_test/test_wasm.sh && sccache --show-stats
|
run: crates/repl_test/test_wasm.sh
|
||||||
|
|
||||||
- name: test building wasm repl
|
- name: test building wasm repl
|
||||||
run: ./ci/www-repl.sh && sccache --show-stats
|
run: ./ci/www-repl.sh
|
||||||
|
|
||||||
#TODO i386 (32-bit linux) cli tests
|
#TODO i386 (32-bit linux) cli tests
|
||||||
#TODO verify-no-git-changes
|
#TODO verify-no-git-changes
|
||||||
|
|
4
.github/workflows/windows_release_build.yml
vendored
4
.github/workflows/windows_release_build.yml
vendored
|
@ -29,8 +29,8 @@ jobs:
|
||||||
- name: zig version
|
- name: zig version
|
||||||
run: zig version
|
run: zig version
|
||||||
|
|
||||||
- name: install rust nightly 1.72.0
|
- name: install rust nightly 1.76.0
|
||||||
run: rustup install nightly-2023-08-20
|
run: rustup install nightly-2023-12-21
|
||||||
|
|
||||||
- name: set up llvm 16
|
- name: set up llvm 16
|
||||||
run: |
|
run: |
|
||||||
|
|
4
.github/workflows/windows_tests.yml
vendored
4
.github/workflows/windows_tests.yml
vendored
|
@ -37,8 +37,8 @@ jobs:
|
||||||
cd crates\compiler\builtins\bitcode\
|
cd crates\compiler\builtins\bitcode\
|
||||||
zig build test
|
zig build test
|
||||||
|
|
||||||
- name: install rust nightly 1.73.0
|
- name: install rust nightly 1.76.0
|
||||||
run: rustup install nightly-2023-08-20
|
run: rustup install nightly-2023-12-21
|
||||||
|
|
||||||
- name: set up llvm 16
|
- name: set up llvm 16
|
||||||
run: |
|
run: |
|
||||||
|
|
24
Cargo.lock
generated
24
Cargo.lock
generated
|
@ -2502,12 +2502,14 @@ dependencies = [
|
||||||
"roc_module",
|
"roc_module",
|
||||||
"roc_packaging",
|
"roc_packaging",
|
||||||
"roc_parse",
|
"roc_parse",
|
||||||
|
"roc_problem",
|
||||||
"roc_region",
|
"roc_region",
|
||||||
"roc_reporting",
|
"roc_reporting",
|
||||||
"roc_solve",
|
"roc_solve",
|
||||||
"roc_target",
|
"roc_target",
|
||||||
"roc_types",
|
"roc_types",
|
||||||
"snafu",
|
"snafu",
|
||||||
|
"ven_pretty",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2799,6 +2801,8 @@ dependencies = [
|
||||||
"roc_tracing",
|
"roc_tracing",
|
||||||
"roc_types",
|
"roc_types",
|
||||||
"roc_unify",
|
"roc_unify",
|
||||||
|
"roc_work",
|
||||||
|
"roc_worker",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"ven_pretty",
|
"ven_pretty",
|
||||||
]
|
]
|
||||||
|
@ -3214,6 +3218,26 @@ dependencies = [
|
||||||
"roc_error_macros",
|
"roc_error_macros",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "roc_work"
|
||||||
|
version = "0.0.1"
|
||||||
|
dependencies = [
|
||||||
|
"roc_collections",
|
||||||
|
"roc_error_macros",
|
||||||
|
"roc_module",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "roc_worker"
|
||||||
|
version = "0.0.1"
|
||||||
|
dependencies = [
|
||||||
|
"crossbeam",
|
||||||
|
"roc_collections",
|
||||||
|
"roc_error_macros",
|
||||||
|
"roc_module",
|
||||||
|
"roc_work",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-demangle"
|
name = "rustc-demangle"
|
||||||
version = "0.1.23"
|
version = "0.1.23"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
|
|
||||||
VERSION 0.6
|
VERSION 0.6
|
||||||
FROM rust:1.73.0-slim-buster # make sure to update rust-toolchain.toml too so that everything uses the same rust version
|
FROM rust:1.76.0-slim-buster # make sure to update rust-toolchain.toml too so that everything uses the same rust version
|
||||||
WORKDIR /earthbuild
|
WORKDIR /earthbuild
|
||||||
|
|
||||||
prep-debian:
|
prep-debian:
|
||||||
|
|
44
README.md
44
README.md
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[Roc](https://www.roc-lang.org) is not ready for a 0.1 release yet, but we do have:
|
[Roc](https://www.roc-lang.org) is not ready for a 0.1 release yet, but we do have:
|
||||||
|
|
||||||
- [**installation** guide](https://github.com/roc-lang/roc/tree/main/getting_started)
|
- [**installation** guide](https://www.roc-lang.org/install)
|
||||||
- [**tutorial**](https://roc-lang.org/tutorial)
|
- [**tutorial**](https://roc-lang.org/tutorial)
|
||||||
- [**docs** for the standard library](https://www.roc-lang.org/builtins)
|
- [**docs** for the standard library](https://www.roc-lang.org/builtins)
|
||||||
- [**examples**](https://www.roc-lang.org/examples)
|
- [**examples**](https://www.roc-lang.org/examples)
|
||||||
|
@ -14,6 +14,8 @@ If you'd like to contribute, check out [good first issues](https://github.com/ro
|
||||||
## Sponsors
|
## Sponsors
|
||||||
|
|
||||||
You can 💜 **sponsor** 💜 Roc on:
|
You can 💜 **sponsor** 💜 Roc on:
|
||||||
|
|
||||||
|
- [Every.org](https://www.every.org/roc-programming-language-foundation?donateTo=roc-programming-language-foundation) (supports credit card, bank, Venmo, PayPal, cryptocurrencies, and more)
|
||||||
- [GitHub](https://github.com/sponsors/roc-lang)
|
- [GitHub](https://github.com/sponsors/roc-lang)
|
||||||
- [Liberapay](https://liberapay.com/roc_lang)
|
- [Liberapay](https://liberapay.com/roc_lang)
|
||||||
|
|
||||||
|
@ -33,24 +35,26 @@ If you would like your company to become a corporate sponsor of Roc's developmen
|
||||||
|
|
||||||
We'd also like to express our gratitude to our generous [individual sponsors](https://github.com/sponsors/roc-lang/)! A special thanks to those sponsoring $25/month or more:
|
We'd also like to express our gratitude to our generous [individual sponsors](https://github.com/sponsors/roc-lang/)! A special thanks to those sponsoring $25/month or more:
|
||||||
|
|
||||||
* [Angelo Ceccato](https://github.com/AngeloChecked)
|
- [Jackson Lucky](https://github.com/jluckyiv)
|
||||||
* [Niclas Overby](https://github.com/noverby)
|
- [Agus Zubiaga](https://github.com/agu-z)
|
||||||
* [Krzysztof G.](https://github.com/krzysztofgb)
|
- [Angelo Ceccato](https://github.com/AngeloChecked)
|
||||||
* [Sam Mohr](https://github.com/smores56)
|
- [Niclas Overby](https://github.com/noverby)
|
||||||
* [Steven Chen](https://github.com/megakilo)
|
- [Krzysztof G.](https://github.com/krzysztofgb)
|
||||||
* [Drew Lazzeri](https://github.com/asteroidb612)
|
- [Sam Mohr](https://github.com/smores56)
|
||||||
* [Alex Binaei](https://github.com/mrmizz)
|
- [Steven Chen](https://github.com/megakilo)
|
||||||
* [Jono Mallanyk](https://github.com/jonomallanyk)
|
- [Drew Lazzeri](https://github.com/asteroidb612)
|
||||||
* [Chris Packett](https://github.com/chris-packett)
|
- [Alex Binaei](https://github.com/mrmizz)
|
||||||
* [James Birtles](https://github.com/jamesbirtles)
|
- [Jono Mallanyk](https://github.com/jonomallanyk)
|
||||||
* [Ivo Balbaert](https://github.com/Ivo-Balbaert)
|
- [Chris Packett](https://github.com/chris-packett)
|
||||||
* [Lucas Rosa](https://github.com/rvcas)
|
- [James Birtles](https://github.com/jamesbirtles)
|
||||||
* [Jonas Schell](https://github.com/Ocupe)
|
- [Ivo Balbaert](https://github.com/Ivo-Balbaert)
|
||||||
* [Christopher Dolan](https://github.com/cdolan)
|
- [Lucas Rosa](https://github.com/rvcas)
|
||||||
* [Nick Gravgaard](https://github.com/nick-gravgaard)
|
- [Jonas Schell](https://github.com/Ocupe)
|
||||||
* [Zeljko Nesic](https://github.com/popara)
|
- [Christopher Dolan](https://github.com/cdolan)
|
||||||
* [Shritesh Bhattarai](https://github.com/shritesh)
|
- [Nick Gravgaard](https://github.com/nick-gravgaard)
|
||||||
* [Richard Feldman](https://github.com/rtfeldman)
|
- [Zeljko Nesic](https://github.com/popara)
|
||||||
* [Ayaz Hafiz](https://github.com/ayazhafiz)
|
- [Shritesh Bhattarai](https://github.com/shritesh)
|
||||||
|
- [Richard Feldman](https://github.com/rtfeldman)
|
||||||
|
- [Ayaz Hafiz](https://github.com/ayazhafiz)
|
||||||
|
|
||||||
Thank you all so much for helping Roc progress!
|
Thank you all so much for helping Roc progress!
|
||||||
|
|
|
@ -8,11 +8,8 @@ use roc_fmt::def::fmt_defs;
|
||||||
use roc_fmt::module::fmt_module;
|
use roc_fmt::module::fmt_module;
|
||||||
use roc_fmt::spaces::RemoveSpaces;
|
use roc_fmt::spaces::RemoveSpaces;
|
||||||
use roc_fmt::{Ast, Buf};
|
use roc_fmt::{Ast, Buf};
|
||||||
use roc_parse::{
|
use roc_parse::module::parse_module_defs;
|
||||||
module::{self, module_defs},
|
use roc_parse::{module, parser::SyntaxError, state::State};
|
||||||
parser::{Parser, SyntaxError},
|
|
||||||
state::State,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub enum FormatMode {
|
pub enum FormatMode {
|
||||||
|
@ -67,6 +64,7 @@ fn is_roc_file(path: &Path) -> bool {
|
||||||
|
|
||||||
pub fn format_files(files: std::vec::Vec<PathBuf>, mode: FormatMode) -> Result<(), String> {
|
pub fn format_files(files: std::vec::Vec<PathBuf>, mode: FormatMode) -> Result<(), String> {
|
||||||
let arena = Bump::new();
|
let arena = Bump::new();
|
||||||
|
let mut files_to_reformat = Vec::new(); // to track which files failed `roc format --check`
|
||||||
|
|
||||||
for file in flatten_directories(files) {
|
for file in flatten_directories(files) {
|
||||||
let src = std::fs::read_to_string(&file).unwrap();
|
let src = std::fs::read_to_string(&file).unwrap();
|
||||||
|
@ -75,9 +73,10 @@ pub fn format_files(files: std::vec::Vec<PathBuf>, mode: FormatMode) -> Result<(
|
||||||
Ok(buf) => {
|
Ok(buf) => {
|
||||||
match mode {
|
match mode {
|
||||||
FormatMode::CheckOnly => {
|
FormatMode::CheckOnly => {
|
||||||
// If we notice that this file needs to be formatted, return early
|
// If a file fails `format --check`, add it to the file
|
||||||
|
// list for reporting afterwards.
|
||||||
if buf.as_str() != src {
|
if buf.as_str() != src {
|
||||||
return Err("One or more files need to be reformatted.".to_string());
|
files_to_reformat.push(file.display().to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FormatMode::WriteToFile => {
|
FormatMode::WriteToFile => {
|
||||||
|
@ -155,7 +154,14 @@ pub fn format_files(files: std::vec::Vec<PathBuf>, mode: FormatMode) -> Result<(
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// After processing all files, check if any files failed `format --check`
|
||||||
|
if !files_to_reformat.is_empty() {
|
||||||
|
let file_list = files_to_reformat.join(", ");
|
||||||
|
return Err(format!(
|
||||||
|
"The following file(s) failed `roc format --check`:\n\t{}\nYou can fix this with `roc format filename.roc`.",
|
||||||
|
file_list
|
||||||
|
));
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,7 +234,9 @@ fn parse_all<'a>(arena: &'a Bump, src: &'a str) -> Result<Ast<'a>, SyntaxError<'
|
||||||
let (module, state) = module::parse_header(arena, State::new(src.as_bytes()))
|
let (module, state) = module::parse_header(arena, State::new(src.as_bytes()))
|
||||||
.map_err(|e| SyntaxError::Header(e.problem))?;
|
.map_err(|e| SyntaxError::Header(e.problem))?;
|
||||||
|
|
||||||
let (_, defs, _) = module_defs().parse(arena, state, 0).map_err(|(_, e)| e)?;
|
let (module, defs) = module.upgrade_header_imports(arena);
|
||||||
|
|
||||||
|
let defs = parse_module_defs(arena, state, defs)?;
|
||||||
|
|
||||||
Ok(Ast { module, defs })
|
Ok(Ast { module, defs })
|
||||||
}
|
}
|
||||||
|
@ -240,3 +248,105 @@ fn fmt_all<'a>(buf: &mut Buf<'a>, ast: &'a Ast) {
|
||||||
|
|
||||||
buf.fmt_end_of_file();
|
buf.fmt_end_of_file();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::Write;
|
||||||
|
use tempfile::{tempdir, TempDir};
|
||||||
|
|
||||||
|
const FORMATTED_ROC: &str = r#"app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
|
|
||||||
|
import pf.Stdout
|
||||||
|
import pf.Task
|
||||||
|
|
||||||
|
main =
|
||||||
|
Stdout.line! "I'm a Roc application!""#;
|
||||||
|
|
||||||
|
const UNFORMATTED_ROC: &str = r#"app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
|
|
||||||
|
|
||||||
|
import pf.Stdout
|
||||||
|
|
||||||
|
import pf.Task
|
||||||
|
|
||||||
|
main =
|
||||||
|
Stdout.line! "I'm a Roc application!"
|
||||||
|
"#;
|
||||||
|
|
||||||
|
fn setup_test_file(dir: &Path, file_name: &str, contents: &str) -> PathBuf {
|
||||||
|
let file_path = dir.join(file_name);
|
||||||
|
let mut file = File::create(&file_path).unwrap();
|
||||||
|
writeln!(file, "{}", contents).unwrap();
|
||||||
|
file.flush().unwrap();
|
||||||
|
file_path
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cleanup_temp_dir(dir: TempDir) {
|
||||||
|
let result = dir.close();
|
||||||
|
assert!(result.is_ok(), "Failed to delete temp directory");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_single_file_needs_reformatting() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file_path = setup_test_file(dir.path(), "test1.roc", UNFORMATTED_ROC);
|
||||||
|
|
||||||
|
let result = format_files(vec![file_path.clone()], FormatMode::CheckOnly);
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert_eq!(
|
||||||
|
result.unwrap_err(),
|
||||||
|
format!(
|
||||||
|
"The following file(s) failed `roc format --check`:\n\t{}\nYou can fix this with `roc format filename.roc`.",
|
||||||
|
&file_path.as_path().to_str().unwrap()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
cleanup_temp_dir(dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_multiple_files_needs_reformatting() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file1 = setup_test_file(dir.path(), "test1.roc", UNFORMATTED_ROC);
|
||||||
|
let file2 = setup_test_file(dir.path(), "test2.roc", UNFORMATTED_ROC);
|
||||||
|
|
||||||
|
let result = format_files(vec![file1, file2], FormatMode::CheckOnly);
|
||||||
|
assert!(result.is_err());
|
||||||
|
let error_message = result.unwrap_err();
|
||||||
|
assert!(error_message.contains("test1.roc") && error_message.contains("test2.roc"));
|
||||||
|
|
||||||
|
cleanup_temp_dir(dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_no_files_need_reformatting() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file_path = setup_test_file(dir.path(), "formatted.roc", FORMATTED_ROC);
|
||||||
|
|
||||||
|
let result = format_files(vec![file_path], FormatMode::CheckOnly);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
cleanup_temp_dir(dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_some_files_need_reformatting() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file_formatted = setup_test_file(dir.path(), "formatted.roc", FORMATTED_ROC);
|
||||||
|
let file1_unformated = setup_test_file(dir.path(), "test1.roc", UNFORMATTED_ROC);
|
||||||
|
let file2_unformated = setup_test_file(dir.path(), "test2.roc", UNFORMATTED_ROC);
|
||||||
|
|
||||||
|
let result = format_files(
|
||||||
|
vec![file_formatted, file1_unformated, file2_unformated],
|
||||||
|
FormatMode::CheckOnly,
|
||||||
|
);
|
||||||
|
assert!(result.is_err());
|
||||||
|
let error_message = result.unwrap_err();
|
||||||
|
assert!(error_message.contains("test1.roc") && error_message.contains("test2.roc"));
|
||||||
|
assert!(!error_message.contains("formatted.roc"));
|
||||||
|
|
||||||
|
cleanup_temp_dir(dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -72,6 +72,7 @@ pub const FLAG_STDOUT: &str = "stdout";
|
||||||
pub const FLAG_WASM_STACK_SIZE_KB: &str = "wasm-stack-size-kb";
|
pub const FLAG_WASM_STACK_SIZE_KB: &str = "wasm-stack-size-kb";
|
||||||
pub const FLAG_OUTPUT: &str = "output";
|
pub const FLAG_OUTPUT: &str = "output";
|
||||||
pub const FLAG_FUZZ: &str = "fuzz";
|
pub const FLAG_FUZZ: &str = "fuzz";
|
||||||
|
pub const FLAG_MAIN: &str = "main";
|
||||||
pub const ROC_FILE: &str = "ROC_FILE";
|
pub const ROC_FILE: &str = "ROC_FILE";
|
||||||
pub const ROC_DIR: &str = "ROC_DIR";
|
pub const ROC_DIR: &str = "ROC_DIR";
|
||||||
pub const GLUE_DIR: &str = "GLUE_DIR";
|
pub const GLUE_DIR: &str = "GLUE_DIR";
|
||||||
|
@ -149,6 +150,12 @@ pub fn build_app() -> Command {
|
||||||
.action(ArgAction::SetTrue)
|
.action(ArgAction::SetTrue)
|
||||||
.required(false);
|
.required(false);
|
||||||
|
|
||||||
|
let flag_main = Arg::new(FLAG_MAIN)
|
||||||
|
.long(FLAG_MAIN)
|
||||||
|
.help("The .roc file of the main app/package module to resolve dependencies from")
|
||||||
|
.value_parser(value_parser!(PathBuf))
|
||||||
|
.required(false);
|
||||||
|
|
||||||
let roc_file_to_run = Arg::new(ROC_FILE)
|
let roc_file_to_run = Arg::new(ROC_FILE)
|
||||||
.help("The .roc file of an app to run")
|
.help("The .roc file of an app to run")
|
||||||
.value_parser(value_parser!(PathBuf))
|
.value_parser(value_parser!(PathBuf))
|
||||||
|
@ -227,6 +234,7 @@ pub fn build_app() -> Command {
|
||||||
)
|
)
|
||||||
.subcommand(Command::new(CMD_TEST)
|
.subcommand(Command::new(CMD_TEST)
|
||||||
.about("Run all top-level `expect`s in a main module and any modules it imports")
|
.about("Run all top-level `expect`s in a main module and any modules it imports")
|
||||||
|
.arg(flag_main.clone())
|
||||||
.arg(flag_optimize.clone())
|
.arg(flag_optimize.clone())
|
||||||
.arg(flag_max_threads.clone())
|
.arg(flag_max_threads.clone())
|
||||||
.arg(flag_opt_size.clone())
|
.arg(flag_opt_size.clone())
|
||||||
|
@ -246,7 +254,7 @@ pub fn build_app() -> Command {
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new(ROC_FILE)
|
Arg::new(ROC_FILE)
|
||||||
.help("The .roc file for the main module")
|
.help("The .roc file to test")
|
||||||
.value_parser(value_parser!(PathBuf))
|
.value_parser(value_parser!(PathBuf))
|
||||||
.required(false)
|
.required(false)
|
||||||
.default_value(DEFAULT_ROC_FILENAME)
|
.default_value(DEFAULT_ROC_FILENAME)
|
||||||
|
@ -321,11 +329,12 @@ pub fn build_app() -> Command {
|
||||||
.about(concatcp!("Print the Roc compiler’s version, which is currently ", VERSION)))
|
.about(concatcp!("Print the Roc compiler’s version, which is currently ", VERSION)))
|
||||||
.subcommand(Command::new(CMD_CHECK)
|
.subcommand(Command::new(CMD_CHECK)
|
||||||
.about("Check the code for problems, but don’t build or run it")
|
.about("Check the code for problems, but don’t build or run it")
|
||||||
|
.arg(flag_main.clone())
|
||||||
.arg(flag_time.clone())
|
.arg(flag_time.clone())
|
||||||
.arg(flag_max_threads.clone())
|
.arg(flag_max_threads.clone())
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new(ROC_FILE)
|
Arg::new(ROC_FILE)
|
||||||
.help("The .roc file of an app to check")
|
.help("The .roc file to check")
|
||||||
.value_parser(value_parser!(PathBuf))
|
.value_parser(value_parser!(PathBuf))
|
||||||
.required(false)
|
.required(false)
|
||||||
.default_value(DEFAULT_ROC_FILENAME),
|
.default_value(DEFAULT_ROC_FILENAME),
|
||||||
|
@ -496,6 +505,8 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
|
||||||
// TODO may need to determine this dynamically based on dev builds.
|
// TODO may need to determine this dynamically based on dev builds.
|
||||||
let function_kind = FunctionKind::LambdaSet;
|
let function_kind = FunctionKind::LambdaSet;
|
||||||
|
|
||||||
|
let opt_main_path = matches.get_one::<PathBuf>(FLAG_MAIN);
|
||||||
|
|
||||||
// Step 1: compile the app and generate the .o file
|
// Step 1: compile the app and generate the .o file
|
||||||
let load_config = LoadConfig {
|
let load_config = LoadConfig {
|
||||||
target,
|
target,
|
||||||
|
@ -509,6 +520,7 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
|
||||||
let load_result = roc_load::load_and_monomorphize(
|
let load_result = roc_load::load_and_monomorphize(
|
||||||
arena,
|
arena,
|
||||||
path.to_path_buf(),
|
path.to_path_buf(),
|
||||||
|
opt_main_path.cloned(),
|
||||||
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
|
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
|
||||||
load_config,
|
load_config,
|
||||||
);
|
);
|
||||||
|
@ -747,7 +759,7 @@ pub fn build(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rather than building an executable or library, we're building
|
// Rather than building an executable or library, we're building
|
||||||
// a tarball so this code can be distributed via a HTTPS
|
// a tarball so this code can be distributed via HTTPS
|
||||||
let filename = roc_packaging::tarball::build(path, compression)?;
|
let filename = roc_packaging::tarball::build(path, compression)?;
|
||||||
let total_time_ms = start_time.elapsed().as_millis();
|
let total_time_ms = start_time.elapsed().as_millis();
|
||||||
let total_time = if total_time_ms > 1000 {
|
let total_time = if total_time_ms > 1000 {
|
||||||
|
|
|
@ -5,7 +5,7 @@ use roc_build::program::{check_file, CodeGenBackend};
|
||||||
use roc_cli::{
|
use roc_cli::{
|
||||||
build_app, format_files, format_src, test, BuildConfig, FormatMode, CMD_BUILD, CMD_CHECK,
|
build_app, format_files, format_src, test, BuildConfig, FormatMode, CMD_BUILD, CMD_CHECK,
|
||||||
CMD_DEV, CMD_DOCS, CMD_FORMAT, CMD_GEN_STUB_LIB, CMD_GLUE, CMD_PREPROCESS_HOST, CMD_REPL,
|
CMD_DEV, CMD_DOCS, CMD_FORMAT, CMD_GEN_STUB_LIB, CMD_GLUE, CMD_PREPROCESS_HOST, CMD_REPL,
|
||||||
CMD_RUN, CMD_TEST, CMD_VERSION, DIRECTORY_OR_FILES, FLAG_CHECK, FLAG_DEV, FLAG_LIB,
|
CMD_RUN, CMD_TEST, CMD_VERSION, DIRECTORY_OR_FILES, FLAG_CHECK, FLAG_DEV, FLAG_LIB, FLAG_MAIN,
|
||||||
FLAG_NO_LINK, FLAG_OUTPUT, FLAG_STDIN, FLAG_STDOUT, FLAG_TARGET, FLAG_TIME, GLUE_DIR,
|
FLAG_NO_LINK, FLAG_OUTPUT, FLAG_STDIN, FLAG_STDOUT, FLAG_TARGET, FLAG_TIME, GLUE_DIR,
|
||||||
GLUE_SPEC, ROC_FILE,
|
GLUE_SPEC, ROC_FILE,
|
||||||
};
|
};
|
||||||
|
@ -200,9 +200,12 @@ fn main() -> io::Result<()> {
|
||||||
Some(n) => Threading::AtMost(*n),
|
Some(n) => Threading::AtMost(*n),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let opt_main_path = matches.get_one::<PathBuf>(FLAG_MAIN);
|
||||||
|
|
||||||
match check_file(
|
match check_file(
|
||||||
&arena,
|
&arena,
|
||||||
roc_file_path.to_owned(),
|
roc_file_path.to_owned(),
|
||||||
|
opt_main_path.cloned(),
|
||||||
emit_timings,
|
emit_timings,
|
||||||
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
|
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
|
||||||
threading,
|
threading,
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
interface Base64 exposes [fromBytes, fromStr, toBytes, toStr] imports [Base64.Decode, Base64.Encode]
|
interface Base64 exposes [fromBytes, fromStr, toBytes, toStr] imports []
|
||||||
|
|
||||||
|
import Base64.Decode
|
||||||
|
import Base64.Encode
|
||||||
|
|
||||||
# base 64 encoding from a sequence of bytes
|
# base 64 encoding from a sequence of bytes
|
||||||
fromBytes : List U8 -> Result Str [InvalidInput]
|
fromBytes : List U8 -> Result Str [InvalidInput]
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
interface Base64.Decode exposes [fromBytes] imports [Bytes.Decode.{ ByteDecoder, DecodeProblem }]
|
interface Base64.Decode exposes [fromBytes] imports []
|
||||||
|
|
||||||
|
import Bytes.Decode exposing [ByteDecoder, DecodeProblem]
|
||||||
|
|
||||||
fromBytes : List U8 -> Result Str DecodeProblem
|
fromBytes : List U8 -> Result Str DecodeProblem
|
||||||
fromBytes = \bytes ->
|
fromBytes = \bytes ->
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
interface Base64.Encode
|
interface Base64.Encode
|
||||||
exposes [toBytes]
|
exposes [toBytes]
|
||||||
imports [Bytes.Encode.{ ByteEncoder }]
|
imports []
|
||||||
|
|
||||||
|
|
||||||
|
import Bytes.Encode exposing [ByteEncoder]
|
||||||
|
|
||||||
InvalidChar : U8
|
InvalidChar : U8
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
app "args"
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.9.0/oKWkaruh2zXxin_xfsYsCJobH1tO8_JvNkFzDwwzNUQ.tar.br" }
|
|
||||||
imports [pf.Stdout, pf.Arg, pf.Task.{ Task }]
|
import pf.Stdout
|
||||||
provides [main] to pf
|
import pf.Arg
|
||||||
|
import pf.Task exposing [Task]
|
||||||
|
|
||||||
main : Task {} I32
|
|
||||||
main =
|
main =
|
||||||
args <- Arg.list |> Task.await
|
args = Arg.list!
|
||||||
parser =
|
parser =
|
||||||
divCmd =
|
divCmd =
|
||||||
Arg.succeed (\dividend -> \divisor -> Div (Num.toF64 dividend) (Num.toF64 divisor))
|
Arg.succeed (\dividend -> \divisor -> Div (Num.toF64 dividend) (Num.toF64 divisor))
|
||||||
|
@ -55,9 +55,8 @@ main =
|
||||||
|> Num.toStr
|
|> Num.toStr
|
||||||
|> Stdout.line
|
|> Stdout.line
|
||||||
|
|
||||||
Err helpMenu ->
|
Err helpMenuErr ->
|
||||||
{} <- Stdout.line helpMenu |> Task.await
|
Task.err (Exit 1 "unable to parse args: $(Inspect.toStr helpMenuErr)")
|
||||||
Task.err 1
|
|
||||||
|
|
||||||
runCmd = \cmd ->
|
runCmd = \cmd ->
|
||||||
when cmd is
|
when cmd is
|
|
@ -1,7 +1,8 @@
|
||||||
app "countdown"
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.9.0/oKWkaruh2zXxin_xfsYsCJobH1tO8_JvNkFzDwwzNUQ.tar.br" }
|
|
||||||
imports [pf.Stdin, pf.Stdout, pf.Task.{ await, loop }]
|
import pf.Stdin
|
||||||
provides [main] to pf
|
import pf.Stdout
|
||||||
|
import pf.Task exposing [await, loop]
|
||||||
|
|
||||||
main =
|
main =
|
||||||
_ <- await (Stdout.line "\nLet's count down from 3 together - all you have to do is press <ENTER>.")
|
_ <- await (Stdout.line "\nLet's count down from 3 together - all you have to do is press <ENTER>.")
|
|
@ -1,21 +1,20 @@
|
||||||
app "echo"
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.9.0/oKWkaruh2zXxin_xfsYsCJobH1tO8_JvNkFzDwwzNUQ.tar.br" }
|
|
||||||
imports [pf.Stdin, pf.Stdout, pf.Task.{ Task }]
|
import pf.Stdin
|
||||||
provides [main] to pf
|
import pf.Stdout
|
||||||
|
import pf.Task exposing [Task]
|
||||||
|
|
||||||
main : Task {} I32
|
|
||||||
main =
|
main =
|
||||||
_ <- Task.await (Stdout.line "🗣 Shout into this cave and hear the echo! 👂👂👂")
|
_ <- Task.await (Stdout.line "🗣 Shout into this cave and hear the echo! 👂👂👂")
|
||||||
|
|
||||||
Task.loop {} tick
|
Task.loop {} tick
|
||||||
|
|
||||||
tick : {} -> Task [Step {}, Done {}] *
|
tick : {} -> Task [Step {}, Done {}] _
|
||||||
tick = \{} ->
|
tick = \{} ->
|
||||||
shout <- Task.await Stdin.line
|
when Stdin.line |> Task.result! is
|
||||||
|
Ok str -> Stdout.line (echo str) |> Task.map Step
|
||||||
when shout is
|
Err (StdinErr EndOfFile) -> Stdout.line (echo "Received end of input (EOF).") |> Task.map Done
|
||||||
Input s -> Stdout.line (echo s) |> Task.map Step
|
Err (StdinErr err) -> Stdout.line (echo "Unable to read input $(Inspect.toStr err)") |> Task.map Done
|
||||||
End -> Stdout.line (echo "Received end of input (EOF).") |> Task.map Done
|
|
||||||
|
|
||||||
echo : Str -> Str
|
echo : Str -> Str
|
||||||
echo = \shout ->
|
echo = \shout ->
|
|
@ -1,9 +1,10 @@
|
||||||
app "env"
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.9.0/oKWkaruh2zXxin_xfsYsCJobH1tO8_JvNkFzDwwzNUQ.tar.br" }
|
|
||||||
imports [pf.Stdout, pf.Stderr, pf.Env, pf.Task.{ Task }]
|
import pf.Stdout
|
||||||
provides [main] to pf
|
import pf.Stderr
|
||||||
|
import pf.Env
|
||||||
|
import pf.Task exposing [Task]
|
||||||
|
|
||||||
main : Task {} I32
|
|
||||||
main =
|
main =
|
||||||
task =
|
task =
|
||||||
Env.decode "EDITOR"
|
Env.decode "EDITOR"
|
36
crates/cli/tests/cli/fileBROKEN.roc
Normal file
36
crates/cli/tests/cli/fileBROKEN.roc
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
|
|
||||||
|
import pf.Stdout
|
||||||
|
import pf.Task exposing [Task]
|
||||||
|
import pf.File
|
||||||
|
import pf.Path
|
||||||
|
import pf.Env
|
||||||
|
import pf.Dir
|
||||||
|
|
||||||
|
main : Task {} [Exit I32 Str]_
|
||||||
|
main =
|
||||||
|
path = Path.fromStr "out.txt"
|
||||||
|
|
||||||
|
task =
|
||||||
|
cwd = Env.cwd!
|
||||||
|
Stdout.line! "cwd: $(Path.display cwd)"
|
||||||
|
dirEntries = Dir.list! cwd
|
||||||
|
contentsStr = Str.joinWith (List.map dirEntries Path.display) "\n "
|
||||||
|
Stdout.line! "Directory contents:\n $(contentsStr)\n"
|
||||||
|
Stdout.line! "Writing a string to out.txt"
|
||||||
|
File.writeUtf8! path "a string!"
|
||||||
|
contents = File.readUtf8! path
|
||||||
|
Stdout.line! "I read the file back. Its contents: \"$(contents)\""
|
||||||
|
|
||||||
|
when Task.result! task is
|
||||||
|
Ok {} -> Stdout.line! "Successfully wrote a string to out.txt"
|
||||||
|
Err err ->
|
||||||
|
msg =
|
||||||
|
when err is
|
||||||
|
FileWriteErr _ PermissionDenied -> "PermissionDenied"
|
||||||
|
FileWriteErr _ Unsupported -> "Unsupported"
|
||||||
|
FileWriteErr _ (Unrecognized _ other) -> other
|
||||||
|
FileReadErr _ _ -> "Error reading file"
|
||||||
|
_ -> "Uh oh, there was an error!"
|
||||||
|
|
||||||
|
Task.err (Exit 1 msg)
|
13
crates/cli/tests/cli/form.roc
Normal file
13
crates/cli/tests/cli/form.roc
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
|
|
||||||
|
import pf.Stdin
|
||||||
|
import pf.Stdout
|
||||||
|
import pf.Task exposing [await, Task]
|
||||||
|
|
||||||
|
main =
|
||||||
|
Stdout.line! "What's your first name?"
|
||||||
|
firstName = Stdin.line!
|
||||||
|
Stdout.line! "What's your last name?"
|
||||||
|
lastName = Stdin.line!
|
||||||
|
|
||||||
|
Stdout.line "Hi, $(firstName) $(lastName)! 👋"
|
24
crates/cli/tests/cli/http-get.roc
Normal file
24
crates/cli/tests/cli/http-get.roc
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
|
|
||||||
|
import pf.Http
|
||||||
|
import pf.Task exposing [Task]
|
||||||
|
import pf.Stdout
|
||||||
|
|
||||||
|
main =
|
||||||
|
request = {
|
||||||
|
method: Get,
|
||||||
|
headers: [],
|
||||||
|
url: "http://www.example.com",
|
||||||
|
mimeType: "",
|
||||||
|
body: [],
|
||||||
|
timeout: TimeoutMilliseconds 5000,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = Http.send! request
|
||||||
|
|
||||||
|
output =
|
||||||
|
when resp |> Http.handleStringResponse is
|
||||||
|
Err err -> crash (Http.errorToString err)
|
||||||
|
Ok body -> body
|
||||||
|
|
||||||
|
Stdout.line output
|
11
crates/cli/tests/cli/ingested-file-bytes-no-ann.roc
Normal file
11
crates/cli/tests/cli/ingested-file-bytes-no-ann.roc
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
|
|
||||||
|
import pf.Stdout
|
||||||
|
import "ingested-file.roc" as license
|
||||||
|
|
||||||
|
main =
|
||||||
|
license
|
||||||
|
|> List.map Num.toU64
|
||||||
|
|> List.sum
|
||||||
|
|> Num.toStr
|
||||||
|
|> Stdout.line!
|
12
crates/cli/tests/cli/ingested-file-bytes.roc
Normal file
12
crates/cli/tests/cli/ingested-file-bytes.roc
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
|
|
||||||
|
import pf.Stdout
|
||||||
|
import "ingested-file.roc" as license : _ # A type hole can also be used here.
|
||||||
|
|
||||||
|
main =
|
||||||
|
# Due to how license is used, it will be a List U8.
|
||||||
|
license
|
||||||
|
|> List.map Num.toU64
|
||||||
|
|> List.sum
|
||||||
|
|> Num.toStr
|
||||||
|
|> Stdout.line!
|
7
crates/cli/tests/cli/ingested-file.roc
Normal file
7
crates/cli/tests/cli/ingested-file.roc
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br" }
|
||||||
|
|
||||||
|
import pf.Stdout
|
||||||
|
import "ingested-file.roc" as ownCode : Str
|
||||||
|
|
||||||
|
main =
|
||||||
|
Stdout.line! "\nThis roc file can print its own source code. The source is:\n\n$(ownCode)"
|
|
@ -1,15 +1,12 @@
|
||||||
app "example"
|
app [main] {
|
||||||
packages {
|
cli: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br",
|
||||||
cli: "https://github.com/roc-lang/basic-cli/releases/download/0.9.0/oKWkaruh2zXxin_xfsYsCJobH1tO8_JvNkFzDwwzNUQ.tar.br",
|
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
||||||
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
}
|
||||||
}
|
|
||||||
imports [
|
import cli.Stdout
|
||||||
cli.Stdout,
|
import cli.Stderr
|
||||||
cli.Stderr,
|
import parser.Core exposing [Parser, buildPrimitiveParser, many]
|
||||||
parser.Core.{ Parser, buildPrimitiveParser, many },
|
import parser.String exposing [parseStr]
|
||||||
parser.String.{ parseStr },
|
|
||||||
]
|
|
||||||
provides [main] to cli
|
|
||||||
|
|
||||||
main =
|
main =
|
||||||
lettersInput = "AAAiBByAABBwBtCCCiAyArBBx"
|
lettersInput = "AAAiBByAABBwBtCCCiAyArBBx"
|
|
@ -1,22 +1,18 @@
|
||||||
app "example"
|
app [main] {
|
||||||
packages {
|
pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.10.0/vNe6s9hWzoTZtFmNkvEICPErI9ptji_ySjicO6CkucY.tar.br",
|
||||||
pf: "https://github.com/roc-lang/basic-cli/releases/download/0.9.0/oKWkaruh2zXxin_xfsYsCJobH1tO8_JvNkFzDwwzNUQ.tar.br",
|
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
||||||
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
}
|
||||||
}
|
|
||||||
imports [
|
import pf.Stdout
|
||||||
pf.Stdout,
|
import pf.Stderr
|
||||||
pf.Stderr,
|
import pf.Task exposing [Task]
|
||||||
pf.Task.{ Task },
|
import parser.Core exposing [Parser, map, keep]
|
||||||
parser.Core.{ Parser, map, keep },
|
import parser.String exposing [strFromUtf8]
|
||||||
parser.String.{ strFromUtf8 },
|
import parser.CSV exposing [CSV]
|
||||||
parser.CSV.{ CSV },
|
|
||||||
]
|
|
||||||
provides [main] to pf
|
|
||||||
|
|
||||||
input : Str
|
input : Str
|
||||||
input = "Airplane!,1980,\"Robert Hays,Julie Hagerty\"\r\nCaddyshack,1980,\"Chevy Chase,Rodney Dangerfield,Ted Knight,Michael O'Keefe,Bill Murray\""
|
input = "Airplane!,1980,\"Robert Hays,Julie Hagerty\"\r\nCaddyshack,1980,\"Chevy Chase,Rodney Dangerfield,Ted Knight,Michael O'Keefe,Bill Murray\""
|
||||||
|
|
||||||
main : Task {} *
|
|
||||||
main =
|
main =
|
||||||
when CSV.parseStr movieInfoParser input is
|
when CSV.parseStr movieInfoParser input is
|
||||||
Ok movies ->
|
Ok movies ->
|
|
@ -54,6 +54,7 @@ mod cli_run {
|
||||||
const OPTIMIZE_FLAG: &str = concatcp!("--", roc_cli::FLAG_OPTIMIZE);
|
const OPTIMIZE_FLAG: &str = concatcp!("--", roc_cli::FLAG_OPTIMIZE);
|
||||||
const LINKER_FLAG: &str = concatcp!("--", roc_cli::FLAG_LINKER);
|
const LINKER_FLAG: &str = concatcp!("--", roc_cli::FLAG_LINKER);
|
||||||
const CHECK_FLAG: &str = concatcp!("--", roc_cli::FLAG_CHECK);
|
const CHECK_FLAG: &str = concatcp!("--", roc_cli::FLAG_CHECK);
|
||||||
|
#[allow(dead_code)]
|
||||||
const PREBUILT_PLATFORM: &str = concatcp!("--", roc_cli::FLAG_PREBUILT);
|
const PREBUILT_PLATFORM: &str = concatcp!("--", roc_cli::FLAG_PREBUILT);
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
const TARGET_FLAG: &str = concatcp!("--", roc_cli::FLAG_TARGET);
|
const TARGET_FLAG: &str = concatcp!("--", roc_cli::FLAG_TARGET);
|
||||||
|
@ -646,6 +647,78 @@ mod cli_run {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[cfg_attr(windows, ignore)]
|
||||||
|
fn test_module_imports_pkg_w_flag() {
|
||||||
|
test_roc_expect(
|
||||||
|
"crates/cli/tests/module_imports_pkg",
|
||||||
|
"Module.roc",
|
||||||
|
&["--main", "tests/module_imports_pkg/app.roc"],
|
||||||
|
indoc!(
|
||||||
|
r#"
|
||||||
|
0 failed and 1 passed in <ignored for test> ms.
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[cfg_attr(windows, ignore)]
|
||||||
|
fn test_module_imports_pkg_no_flag() {
|
||||||
|
test_roc_expect(
|
||||||
|
"crates/cli/tests/module_imports_pkg",
|
||||||
|
"Module.roc",
|
||||||
|
&[],
|
||||||
|
indoc!(
|
||||||
|
r#"
|
||||||
|
── UNRECOGNIZED PACKAGE in tests/module_imports_pkg/Module.roc ─────────────────
|
||||||
|
|
||||||
|
This module is trying to import from `pkg`:
|
||||||
|
|
||||||
|
3│ import pkg.Foo
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
A lowercase name indicates a package shorthand, but I don't know which
|
||||||
|
packages are available.
|
||||||
|
|
||||||
|
When checking a module directly, I look for a `main.roc` app or
|
||||||
|
package to resolve shorthands from.
|
||||||
|
|
||||||
|
You can create it, or specify an existing one with the --main flag."#
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[cfg_attr(windows, ignore)]
|
||||||
|
fn test_module_imports_unknown_pkg() {
|
||||||
|
test_roc_expect(
|
||||||
|
"crates/cli/tests/module_imports_pkg",
|
||||||
|
"ImportsUnknownPkg.roc",
|
||||||
|
&["--main", "tests/module_imports_pkg/app.roc"],
|
||||||
|
indoc!(
|
||||||
|
r#"
|
||||||
|
── UNRECOGNIZED PACKAGE in tests/module_imports_pkg/ImportsUnknownPkg.roc ──────
|
||||||
|
|
||||||
|
This module is trying to import from `cli`:
|
||||||
|
|
||||||
|
3│ import cli.Foo
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
A lowercase name indicates a package shorthand, but I don't recognize
|
||||||
|
this one. Did you mean one of these?
|
||||||
|
|
||||||
|
pkg
|
||||||
|
|
||||||
|
Note: I'm using the following module to resolve package shorthands:
|
||||||
|
|
||||||
|
tests/module_imports_pkg/app.roc
|
||||||
|
|
||||||
|
You can specify a different one with the --main flag."#
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
fn transitive_expects() {
|
fn transitive_expects() {
|
||||||
|
@ -715,29 +788,6 @@ mod cli_run {
|
||||||
test_roc_app_slim("examples/gui", "hello-guiBROKEN.roc", "", UseValgrind::No)
|
test_roc_app_slim("examples/gui", "hello-guiBROKEN.roc", "", UseValgrind::No)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(windows, ignore)] // flaky error; issue #5024
|
|
||||||
#[serial(breakout)]
|
|
||||||
#[test]
|
|
||||||
fn breakout() {
|
|
||||||
test_roc_app_slim(
|
|
||||||
"examples/gui/breakout",
|
|
||||||
"breakoutBROKEN.roc",
|
|
||||||
"",
|
|
||||||
UseValgrind::No,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[serial(breakout)]
|
|
||||||
fn breakout_hello_gui() {
|
|
||||||
test_roc_app_slim(
|
|
||||||
"examples/gui/breakout",
|
|
||||||
"hello-guiBROKEN.roc",
|
|
||||||
"",
|
|
||||||
UseValgrind::No,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
fn quicksort() {
|
fn quicksort() {
|
||||||
|
@ -777,7 +827,7 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore = "missing __udivdi3 and some other symbols")]
|
#[cfg_attr(windows, ignore = "missing __udivdi3 and some other symbols")]
|
||||||
#[serial(cli_platform)]
|
#[serial(cli_platform)]
|
||||||
fn cli_args_check() {
|
fn cli_args_check() {
|
||||||
let path = file_path_from_root("examples/cli", "argsBROKEN.roc");
|
let path = file_path_from_root("crates/cli/tests/cli", "argsBROKEN.roc");
|
||||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||||
assert!(out.status.success());
|
assert!(out.status.success());
|
||||||
}
|
}
|
||||||
|
@ -804,7 +854,7 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
#[serial(cli_platform)]
|
#[serial(cli_platform)]
|
||||||
fn cli_countdown_check() {
|
fn cli_countdown_check() {
|
||||||
let path = file_path_from_root("examples/cli", "countdown.roc");
|
let path = file_path_from_root("crates/cli/tests/cli", "countdown.roc");
|
||||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||||
assert!(out.status.success());
|
assert!(out.status.success());
|
||||||
}
|
}
|
||||||
|
@ -813,7 +863,7 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
#[serial(cli_platform)]
|
#[serial(cli_platform)]
|
||||||
fn cli_echo_check() {
|
fn cli_echo_check() {
|
||||||
let path = file_path_from_root("examples/cli", "echo.roc");
|
let path = file_path_from_root("crates/cli/tests/cli", "echo.roc");
|
||||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||||
assert!(out.status.success());
|
assert!(out.status.success());
|
||||||
}
|
}
|
||||||
|
@ -822,7 +872,7 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
#[serial(cli_platform)]
|
#[serial(cli_platform)]
|
||||||
fn cli_file_check() {
|
fn cli_file_check() {
|
||||||
let path = file_path_from_root("examples/cli", "fileBROKEN.roc");
|
let path = file_path_from_root("crates/cli/tests/cli", "fileBROKEN.roc");
|
||||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||||
assert!(out.status.success());
|
assert!(out.status.success());
|
||||||
}
|
}
|
||||||
|
@ -831,7 +881,7 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
#[serial(cli_platform)]
|
#[serial(cli_platform)]
|
||||||
fn cli_form_check() {
|
fn cli_form_check() {
|
||||||
let path = file_path_from_root("examples/cli", "form.roc");
|
let path = file_path_from_root("crates/cli/tests/cli", "form.roc");
|
||||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||||
assert!(out.status.success());
|
assert!(out.status.success());
|
||||||
}
|
}
|
||||||
|
@ -840,7 +890,7 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
#[serial(cli_platform)]
|
#[serial(cli_platform)]
|
||||||
fn cli_http_get_check() {
|
fn cli_http_get_check() {
|
||||||
let path = file_path_from_root("examples/cli", "http-get.roc");
|
let path = file_path_from_root("crates/cli/tests/cli", "http-get.roc");
|
||||||
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
let out = run_roc([CMD_CHECK, path.to_str().unwrap()], &[], &[]);
|
||||||
assert!(out.status.success());
|
assert!(out.status.success());
|
||||||
}
|
}
|
||||||
|
@ -896,27 +946,12 @@ mod cli_run {
|
||||||
test_roc_app_slim("examples/swiftui", "main.roc", "", UseValgrind::No)
|
test_roc_app_slim("examples/swiftui", "main.roc", "", UseValgrind::No)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[cfg_attr(windows, ignore)]
|
|
||||||
fn static_site_gen() {
|
|
||||||
test_roc_app(
|
|
||||||
"examples/static-site-gen",
|
|
||||||
"static-site.roc",
|
|
||||||
&[],
|
|
||||||
&[Arg::ExamplePath("input"), Arg::ExamplePath("output")],
|
|
||||||
&[],
|
|
||||||
"Processed 4 files with 3 successes and 0 errors\n",
|
|
||||||
UseValgrind::No,
|
|
||||||
TestCliCommands::Run,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[serial(cli_platform)]
|
#[serial(cli_platform)]
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
fn with_env_vars() {
|
fn with_env_vars() {
|
||||||
test_roc_app(
|
test_roc_app(
|
||||||
"examples/cli",
|
"crates/cli/tests/cli",
|
||||||
"env.roc",
|
"env.roc",
|
||||||
&[],
|
&[],
|
||||||
&[],
|
&[],
|
||||||
|
@ -938,28 +973,16 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
fn ingested_file() {
|
fn ingested_file() {
|
||||||
test_roc_app(
|
test_roc_app(
|
||||||
"examples/cli",
|
"crates/cli/tests/cli",
|
||||||
"ingested-file.roc",
|
"ingested-file.roc",
|
||||||
&[],
|
&[],
|
||||||
&[],
|
&[],
|
||||||
&[],
|
&[],
|
||||||
indoc!(
|
format!(
|
||||||
r#"
|
"\nThis roc file can print its own source code. The source is:\n\n{}\n",
|
||||||
This roc file can print its own source code. The source is:
|
include_str!("cli/ingested-file.roc")
|
||||||
|
)
|
||||||
app "ingested-file"
|
.as_str(),
|
||||||
packages { pf: "https://github.com/roc-lang/basic-cli/releases/download/0.9.0/oKWkaruh2zXxin_xfsYsCJobH1tO8_JvNkFzDwwzNUQ.tar.br" }
|
|
||||||
imports [
|
|
||||||
pf.Stdout,
|
|
||||||
"ingested-file.roc" as ownCode : Str,
|
|
||||||
]
|
|
||||||
provides [main] to pf
|
|
||||||
|
|
||||||
main =
|
|
||||||
Stdout.line "\nThis roc file can print its own source code. The source is:\n\n$(ownCode)"
|
|
||||||
|
|
||||||
"#
|
|
||||||
),
|
|
||||||
UseValgrind::No,
|
UseValgrind::No,
|
||||||
TestCliCommands::Run,
|
TestCliCommands::Run,
|
||||||
)
|
)
|
||||||
|
@ -970,12 +993,27 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
fn ingested_file_bytes() {
|
fn ingested_file_bytes() {
|
||||||
test_roc_app(
|
test_roc_app(
|
||||||
"examples/cli",
|
"crates/cli/tests/cli",
|
||||||
"ingested-file-bytes.roc",
|
"ingested-file-bytes.roc",
|
||||||
&[],
|
&[],
|
||||||
&[],
|
&[],
|
||||||
&[],
|
&[],
|
||||||
"162088\n",
|
"27101\n",
|
||||||
|
UseValgrind::No,
|
||||||
|
TestCliCommands::Run,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
#[serial(cli_platform)]
|
||||||
|
#[cfg_attr(windows, ignore)]
|
||||||
|
fn ingested_file_bytes_no_ann() {
|
||||||
|
test_roc_app(
|
||||||
|
"crates/cli/tests/cli",
|
||||||
|
"ingested-file-bytes-no-ann.roc",
|
||||||
|
&[],
|
||||||
|
&[],
|
||||||
|
&[],
|
||||||
|
"27101\n",
|
||||||
UseValgrind::No,
|
UseValgrind::No,
|
||||||
TestCliCommands::Run,
|
TestCliCommands::Run,
|
||||||
)
|
)
|
||||||
|
@ -986,8 +1024,8 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
fn parse_movies_csv() {
|
fn parse_movies_csv() {
|
||||||
test_roc_app_slim(
|
test_roc_app_slim(
|
||||||
"examples/parser",
|
"crates/cli/tests/cli",
|
||||||
"parse-movies-csv.roc",
|
"parser-movies-csv.roc",
|
||||||
"2 movies were found:\n\nThe movie 'Airplane!' was released in 1980 and stars Robert Hays and Julie Hagerty\nThe movie 'Caddyshack' was released in 1980 and stars Chevy Chase, Rodney Dangerfield, Ted Knight, Michael O'Keefe and Bill Murray\n\nParse success!\n\n",
|
"2 movies were found:\n\nThe movie 'Airplane!' was released in 1980 and stars Robert Hays and Julie Hagerty\nThe movie 'Caddyshack' was released in 1980 and stars Chevy Chase, Rodney Dangerfield, Ted Knight, Michael O'Keefe and Bill Murray\n\nParse success!\n\n",
|
||||||
UseValgrind::No,
|
UseValgrind::No,
|
||||||
)
|
)
|
||||||
|
@ -998,8 +1036,8 @@ mod cli_run {
|
||||||
#[cfg_attr(windows, ignore)]
|
#[cfg_attr(windows, ignore)]
|
||||||
fn parse_letter_counts() {
|
fn parse_letter_counts() {
|
||||||
test_roc_app_slim(
|
test_roc_app_slim(
|
||||||
"examples/parser",
|
"crates/cli/tests/cli",
|
||||||
"letter-counts.roc",
|
"parser-letter-counts.roc",
|
||||||
"I counted 7 letter A's!\n",
|
"I counted 7 letter A's!\n",
|
||||||
UseValgrind::No,
|
UseValgrind::No,
|
||||||
)
|
)
|
||||||
|
@ -1025,20 +1063,21 @@ mod cli_run {
|
||||||
// TODO not sure if this cfg should still be here: #[cfg(not(debug_assertions))]
|
// TODO not sure if this cfg should still be here: #[cfg(not(debug_assertions))]
|
||||||
// this is for testing the benchmarks, to perform proper benchmarks see crates/cli/benches/README.md
|
// this is for testing the benchmarks, to perform proper benchmarks see crates/cli/benches/README.md
|
||||||
mod test_benchmarks {
|
mod test_benchmarks {
|
||||||
|
#[allow(unused_imports)]
|
||||||
use super::{TestCliCommands, UseValgrind};
|
use super::{TestCliCommands, UseValgrind};
|
||||||
use cli_utils::helpers::cli_testing_dir;
|
use cli_utils::helpers::cli_testing_dir;
|
||||||
|
|
||||||
|
#[allow(unused_imports)]
|
||||||
use super::{check_output_with_stdin, OPTIMIZE_FLAG, PREBUILT_PLATFORM};
|
use super::{check_output_with_stdin, OPTIMIZE_FLAG, PREBUILT_PLATFORM};
|
||||||
|
|
||||||
|
#[allow(unused_imports)]
|
||||||
use std::{path::Path, sync::Once};
|
use std::{path::Path, sync::Once};
|
||||||
|
|
||||||
static BENCHMARKS_BUILD_PLATFORM: Once = Once::new();
|
|
||||||
|
|
||||||
fn test_benchmark(
|
fn test_benchmark(
|
||||||
roc_filename: &str,
|
roc_filename: &str,
|
||||||
stdin: &[&str],
|
stdin: &[&str],
|
||||||
expected_ending: &str,
|
expected_ending: &str,
|
||||||
use_valgrind: UseValgrind,
|
_use_valgrind: UseValgrind,
|
||||||
) {
|
) {
|
||||||
let file_name = cli_testing_dir("benchmarks").join(roc_filename);
|
let file_name = cli_testing_dir("benchmarks").join(roc_filename);
|
||||||
|
|
||||||
|
@ -1062,15 +1101,18 @@ mod cli_run {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(all(not(feature = "wasm32-cli-run"), not(feature = "i386-cli-run")))]
|
#[cfg(all(not(feature = "wasm32-cli-run"), not(feature = "i386-cli-run")))]
|
||||||
check_output_regular(&file_name, stdin, expected_ending, use_valgrind);
|
check_output_regular(&file_name, stdin, expected_ending, _use_valgrind);
|
||||||
|
|
||||||
#[cfg(feature = "wasm32-cli-run")]
|
#[cfg(feature = "wasm32-cli-run")]
|
||||||
check_output_wasm(&file_name, stdin, expected_ending);
|
check_output_wasm(&file_name, stdin, expected_ending);
|
||||||
|
|
||||||
#[cfg(feature = "i386-cli-run")]
|
#[cfg(feature = "i386-cli-run")]
|
||||||
check_output_i386(&file_name, stdin, expected_ending, use_valgrind);
|
check_output_i386(&file_name, stdin, expected_ending, _use_valgrind);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(all(not(feature = "wasm32-cli-run"), not(feature = "i386-cli-run")))]
|
||||||
|
static BENCHMARKS_BUILD_PLATFORM: Once = Once::new();
|
||||||
|
|
||||||
#[cfg(all(not(feature = "wasm32-cli-run"), not(feature = "i386-cli-run")))]
|
#[cfg(all(not(feature = "wasm32-cli-run"), not(feature = "i386-cli-run")))]
|
||||||
fn check_output_regular(
|
fn check_output_regular(
|
||||||
file_name: &Path,
|
file_name: &Path,
|
||||||
|
@ -1439,7 +1481,7 @@ mod cli_run {
|
||||||
r#"
|
r#"
|
||||||
── UNUSED IMPORT in ...nown_bad/UnusedImportButWithALongFileNameForTesting.roc ─
|
── UNUSED IMPORT in ...nown_bad/UnusedImportButWithALongFileNameForTesting.roc ─
|
||||||
|
|
||||||
Nothing from Symbol is used in this module.
|
Symbol is imported but not used.
|
||||||
|
|
||||||
3│ imports [Symbol.{ Ident }]
|
3│ imports [Symbol.{ Ident }]
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
@ -1483,7 +1525,7 @@ mod cli_run {
|
||||||
r#"
|
r#"
|
||||||
── UNUSED IMPORT in tests/known_bad/UnusedImport.roc ───────────────────────────
|
── UNUSED IMPORT in tests/known_bad/UnusedImport.roc ───────────────────────────
|
||||||
|
|
||||||
Nothing from Symbol is used in this module.
|
Symbol is imported but not used.
|
||||||
|
|
||||||
3│ imports [Symbol.{ Ident }]
|
3│ imports [Symbol.{ Ident }]
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
app "formatted"
|
app [main] { pf: "platform/main.roc" }
|
||||||
packages { pf: "platform/main.roc" } imports []
|
|
||||||
provides [main] to pf
|
|
||||||
|
|
||||||
main : Str
|
main : Str
|
||||||
main = Dep1.value1 {}
|
main = Dep1.value1 {}
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
app "formatted"
|
app [main] { pf: "platform/main.roc" }
|
||||||
packages { pf: "platform/main.roc" }
|
|
||||||
provides [main] to pf
|
|
||||||
|
|
||||||
main : Str
|
main : Str
|
||||||
main = Dep1.value1 {}
|
main = Dep1.value1 {}
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
app "formatted"
|
app [main] { pf: "platform/main.roc" }
|
||||||
packages { pf: "platform/main.roc" } imports []
|
|
||||||
provides [main] to pf
|
|
||||||
|
|
||||||
main : Str
|
main : Str
|
||||||
main = Dep1.value1 {}
|
main = Dep1.value1 {}
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
package "csv"
|
package [Csv] {}
|
||||||
exposes [Csv]
|
|
||||||
packages {}
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
package "json"
|
package [JsonParser] {}
|
||||||
exposes [JsonParser]
|
|
||||||
packages {}
|
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
module [valueFromPkg]
|
||||||
|
|
||||||
|
import cli.Foo
|
||||||
|
|
||||||
|
valueFromPkg = Foo.foo
|
||||||
|
|
||||||
|
expect valueFromPkg == "Foo"
|
7
crates/cli/tests/module_imports_pkg/Module.roc
Normal file
7
crates/cli/tests/module_imports_pkg/Module.roc
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
module [valueFromPkg]
|
||||||
|
|
||||||
|
import pkg.Foo
|
||||||
|
|
||||||
|
valueFromPkg = Foo.foo
|
||||||
|
|
||||||
|
expect valueFromPkg == "Foo"
|
8
crates/cli/tests/module_imports_pkg/app.roc
Normal file
8
crates/cli/tests/module_imports_pkg/app.roc
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
app [main] {
|
||||||
|
pkg: "./pkg/main.roc"
|
||||||
|
}
|
||||||
|
|
||||||
|
import Module
|
||||||
|
|
||||||
|
main =
|
||||||
|
Module.valueFromPkg
|
3
crates/cli/tests/module_imports_pkg/pkg/Foo.roc
Normal file
3
crates/cli/tests/module_imports_pkg/pkg/Foo.roc
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
module [foo]
|
||||||
|
|
||||||
|
foo = "Foo"
|
1
crates/cli/tests/module_imports_pkg/pkg/main.roc
Normal file
1
crates/cli/tests/module_imports_pkg/pkg/main.roc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
package [Foo] {}
|
|
@ -50,8 +50,15 @@ pub fn legacy_host_file(target: Target, platform_main_roc: &Path) -> PathBuf {
|
||||||
.replace(roc_linker::PRECOMPILED_HOST_EXT, lib_ext);
|
.replace(roc_linker::PRECOMPILED_HOST_EXT, lib_ext);
|
||||||
|
|
||||||
let lib_path = platform_main_roc.with_file_name(file_name);
|
let lib_path = platform_main_roc.with_file_name(file_name);
|
||||||
|
|
||||||
|
let default_host_path: PathBuf = platform_main_roc
|
||||||
|
.with_file_name("libhost")
|
||||||
|
.with_extension(lib_ext);
|
||||||
|
|
||||||
if lib_path.exists() {
|
if lib_path.exists() {
|
||||||
lib_path
|
lib_path
|
||||||
|
} else if default_host_path.exists() {
|
||||||
|
default_host_path
|
||||||
} else {
|
} else {
|
||||||
let obj_ext = target.object_file_ext();
|
let obj_ext = target.object_file_ext();
|
||||||
lib_path.with_extension(obj_ext)
|
lib_path.with_extension(obj_ext)
|
||||||
|
@ -526,7 +533,7 @@ pub fn rebuild_host(
|
||||||
// on windows, we need the nightly toolchain so we can use `-Z export-executable-symbols`
|
// on windows, we need the nightly toolchain so we can use `-Z export-executable-symbols`
|
||||||
// using `+nightly` only works when running cargo through rustup
|
// using `+nightly` only works when running cargo through rustup
|
||||||
let mut cmd = rustup();
|
let mut cmd = rustup();
|
||||||
cmd.args(["run", "nightly-2023-08-20", "cargo"]);
|
cmd.args(["run", "nightly-2023-12-21", "cargo"]);
|
||||||
|
|
||||||
cmd
|
cmd
|
||||||
} else {
|
} else {
|
||||||
|
@ -1132,6 +1139,8 @@ fn link_macos(
|
||||||
// "-lgcc", // TODO will eventually need compiler_rt from gcc or something - see https://github.com/roc-lang/roc/pull/554#discussion_r496370840
|
// "-lgcc", // TODO will eventually need compiler_rt from gcc or something - see https://github.com/roc-lang/roc/pull/554#discussion_r496370840
|
||||||
"-framework",
|
"-framework",
|
||||||
"Security",
|
"Security",
|
||||||
|
"-framework",
|
||||||
|
"SystemConfiguration",
|
||||||
// Output
|
// Output
|
||||||
"-o",
|
"-o",
|
||||||
output_path.to_str().unwrap(), // app
|
output_path.to_str().unwrap(), // app
|
||||||
|
|
|
@ -735,9 +735,14 @@ pub fn build_file<'a>(
|
||||||
let compilation_start = Instant::now();
|
let compilation_start = Instant::now();
|
||||||
|
|
||||||
// Step 1: compile the app and generate the .o file
|
// Step 1: compile the app and generate the .o file
|
||||||
let loaded =
|
let loaded = roc_load::load_and_monomorphize(
|
||||||
roc_load::load_and_monomorphize(arena, app_module_path.clone(), roc_cache_dir, load_config)
|
arena,
|
||||||
.map_err(|e| BuildFileError::from_mono_error(e, compilation_start))?;
|
app_module_path.clone(),
|
||||||
|
None,
|
||||||
|
roc_cache_dir,
|
||||||
|
load_config,
|
||||||
|
)
|
||||||
|
.map_err(|e| BuildFileError::from_mono_error(e, compilation_start))?;
|
||||||
|
|
||||||
build_loaded_file(
|
build_loaded_file(
|
||||||
arena,
|
arena,
|
||||||
|
@ -1187,6 +1192,7 @@ fn build_and_preprocess_host_lowlevel(
|
||||||
pub fn check_file<'a>(
|
pub fn check_file<'a>(
|
||||||
arena: &'a Bump,
|
arena: &'a Bump,
|
||||||
roc_file_path: PathBuf,
|
roc_file_path: PathBuf,
|
||||||
|
opt_main_path: Option<PathBuf>,
|
||||||
emit_timings: bool,
|
emit_timings: bool,
|
||||||
roc_cache_dir: RocCacheDir<'_>,
|
roc_cache_dir: RocCacheDir<'_>,
|
||||||
threading: Threading,
|
threading: Threading,
|
||||||
|
@ -1209,8 +1215,13 @@ pub fn check_file<'a>(
|
||||||
threading,
|
threading,
|
||||||
exec_mode: ExecutionMode::Check,
|
exec_mode: ExecutionMode::Check,
|
||||||
};
|
};
|
||||||
let mut loaded =
|
let mut loaded = roc_load::load_and_typecheck(
|
||||||
roc_load::load_and_typecheck(arena, roc_file_path, roc_cache_dir, load_config)?;
|
arena,
|
||||||
|
roc_file_path,
|
||||||
|
opt_main_path,
|
||||||
|
roc_cache_dir,
|
||||||
|
load_config,
|
||||||
|
)?;
|
||||||
|
|
||||||
let buf = &mut String::with_capacity(1024);
|
let buf = &mut String::with_capacity(1024);
|
||||||
|
|
||||||
|
@ -1292,6 +1303,7 @@ pub fn build_str_test<'a>(
|
||||||
PathBuf::from("valgrind_test.roc"),
|
PathBuf::from("valgrind_test.roc"),
|
||||||
app_module_source,
|
app_module_source,
|
||||||
app_module_path.to_path_buf(),
|
app_module_path.to_path_buf(),
|
||||||
|
None,
|
||||||
roc_cache_dir,
|
roc_cache_dir,
|
||||||
load_config,
|
load_config,
|
||||||
)
|
)
|
||||||
|
|
|
@ -80,7 +80,7 @@ It's one thing to actually write these functions, it's _another_ thing to let th
|
||||||
|
|
||||||
## Specifying how we pass args to the function
|
## Specifying how we pass args to the function
|
||||||
|
|
||||||
### builtins/mono/src/borrow.rs
|
### builtins/mono/src/inc_dec.rs
|
||||||
|
|
||||||
After we have all of this, we need to specify if the arguments we're passing are owned, borrowed or irrelevant. Towards the bottom of this file, add a new case for your builtin and specify each arg. Be sure to read the comment, as it explains this in more detail.
|
After we have all of this, we need to specify if the arguments we're passing are owned, borrowed or irrelevant. Towards the bottom of this file, add a new case for your builtin and specify each arg. Be sure to read the comment, as it explains this in more detail.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const utils = @import("utils.zig");
|
const utils = @import("utils.zig");
|
||||||
|
const str = @import("str.zig");
|
||||||
const UpdateMode = utils.UpdateMode;
|
const UpdateMode = utils.UpdateMode;
|
||||||
const mem = std.mem;
|
const mem = std.mem;
|
||||||
const math = std.math;
|
const math = std.math;
|
||||||
|
@ -1033,3 +1034,34 @@ test "listConcat: non-unique with unique overlapping" {
|
||||||
|
|
||||||
try expect(concatted.eql(wanted));
|
try expect(concatted.eql(wanted));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn listConcatUtf8(
|
||||||
|
list: RocList,
|
||||||
|
string: str.RocStr,
|
||||||
|
) callconv(.C) RocList {
|
||||||
|
if (string.len() == 0) {
|
||||||
|
return list;
|
||||||
|
} else {
|
||||||
|
const combined_length = list.len() + string.len();
|
||||||
|
|
||||||
|
// List U8 has alignment 1 and element_width 1
|
||||||
|
var result = list.reallocate(1, combined_length, 1);
|
||||||
|
// We just allocated combined_length, which is > 0 because string.len() > 0
|
||||||
|
var bytes = result.bytes orelse unreachable;
|
||||||
|
@memcpy(bytes[list.len()..combined_length], string.asU8ptr()[0..string.len()]);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test "listConcatUtf8" {
|
||||||
|
const list = RocList.fromSlice(u8, &[_]u8{ 1, 2, 3, 4 });
|
||||||
|
defer list.decref(1);
|
||||||
|
const string_bytes = "🐦";
|
||||||
|
const string = str.RocStr.init(string_bytes, string_bytes.len);
|
||||||
|
defer string.decref();
|
||||||
|
const ret = listConcatUtf8(list, string);
|
||||||
|
const expected = RocList.fromSlice(u8, &[_]u8{ 1, 2, 3, 4, 240, 159, 144, 166 });
|
||||||
|
defer expected.decref(1);
|
||||||
|
try expect(ret.eql(expected));
|
||||||
|
}
|
||||||
|
|
|
@ -85,6 +85,7 @@ comptime {
|
||||||
exportListFn(list.listCapacity, "capacity");
|
exportListFn(list.listCapacity, "capacity");
|
||||||
exportListFn(list.listAllocationPtr, "allocation_ptr");
|
exportListFn(list.listAllocationPtr, "allocation_ptr");
|
||||||
exportListFn(list.listReleaseExcessCapacity, "release_excess_capacity");
|
exportListFn(list.listReleaseExcessCapacity, "release_excess_capacity");
|
||||||
|
exportListFn(list.listConcatUtf8, "concat_utf8");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Num Module
|
// Num Module
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
interface Bool
|
module [Bool, Eq, true, false, and, or, not, isEq, isNotEq]
|
||||||
exposes [Bool, Eq, true, false, and, or, not, isEq, isNotEq]
|
|
||||||
imports []
|
|
||||||
|
|
||||||
## Defines a type that can be compared for total equality.
|
## Defines a type that can be compared for total equality.
|
||||||
##
|
##
|
||||||
|
|
|
@ -2,9 +2,7 @@
|
||||||
## - Holding unknown Roc types when developing [platforms](https://github.com/roc-lang/roc/wiki/Roc-concepts-explained#platform).
|
## - Holding unknown Roc types when developing [platforms](https://github.com/roc-lang/roc/wiki/Roc-concepts-explained#platform).
|
||||||
## - To improve performance in rare cases.
|
## - To improve performance in rare cases.
|
||||||
##
|
##
|
||||||
interface Box
|
module [box, unbox]
|
||||||
exposes [box, unbox]
|
|
||||||
imports []
|
|
||||||
|
|
||||||
## Allocates a value on the heap. Boxing is an expensive process as it copies
|
## Allocates a value on the heap. Boxing is an expensive process as it copies
|
||||||
## the value from the stack to the heap. This may provide a performance
|
## the value from the stack to the heap. This may provide a performance
|
||||||
|
|
|
@ -1,55 +1,53 @@
|
||||||
interface Decode
|
module [
|
||||||
exposes [
|
DecodeError,
|
||||||
DecodeError,
|
DecodeResult,
|
||||||
DecodeResult,
|
Decoder,
|
||||||
Decoder,
|
Decoding,
|
||||||
Decoding,
|
DecoderFormatting,
|
||||||
DecoderFormatting,
|
decoder,
|
||||||
decoder,
|
u8,
|
||||||
u8,
|
u16,
|
||||||
u16,
|
u32,
|
||||||
u32,
|
u64,
|
||||||
u64,
|
u128,
|
||||||
u128,
|
i8,
|
||||||
i8,
|
i16,
|
||||||
i16,
|
i32,
|
||||||
i32,
|
i64,
|
||||||
i64,
|
i128,
|
||||||
i128,
|
f32,
|
||||||
f32,
|
f64,
|
||||||
f64,
|
dec,
|
||||||
dec,
|
bool,
|
||||||
bool,
|
string,
|
||||||
string,
|
list,
|
||||||
list,
|
record,
|
||||||
record,
|
tuple,
|
||||||
tuple,
|
custom,
|
||||||
custom,
|
decodeWith,
|
||||||
decodeWith,
|
fromBytesPartial,
|
||||||
fromBytesPartial,
|
fromBytes,
|
||||||
fromBytes,
|
mapResult,
|
||||||
mapResult,
|
]
|
||||||
]
|
|
||||||
imports [
|
import List
|
||||||
List,
|
import Result exposing [Result]
|
||||||
Result.{ Result },
|
import Num exposing [
|
||||||
Num.{
|
U8,
|
||||||
U8,
|
U16,
|
||||||
U16,
|
U32,
|
||||||
U32,
|
U64,
|
||||||
U64,
|
U128,
|
||||||
U128,
|
I8,
|
||||||
I8,
|
I16,
|
||||||
I16,
|
I32,
|
||||||
I32,
|
I64,
|
||||||
I64,
|
I128,
|
||||||
I128,
|
F32,
|
||||||
F32,
|
F64,
|
||||||
F64,
|
Dec,
|
||||||
Dec,
|
]
|
||||||
},
|
import Bool exposing [Bool]
|
||||||
Bool.{ Bool },
|
|
||||||
]
|
|
||||||
|
|
||||||
## Error types when decoding a `List U8` of utf-8 bytes using a [Decoder]
|
## Error types when decoding a `List U8` of utf-8 bytes using a [Decoder]
|
||||||
DecodeError : [TooShort]
|
DecodeError : [TooShort]
|
||||||
|
|
|
@ -1,43 +1,41 @@
|
||||||
interface Dict
|
module [
|
||||||
exposes [
|
Dict,
|
||||||
Dict,
|
empty,
|
||||||
empty,
|
withCapacity,
|
||||||
withCapacity,
|
single,
|
||||||
single,
|
clear,
|
||||||
clear,
|
capacity,
|
||||||
capacity,
|
reserve,
|
||||||
reserve,
|
releaseExcessCapacity,
|
||||||
releaseExcessCapacity,
|
len,
|
||||||
len,
|
isEmpty,
|
||||||
isEmpty,
|
get,
|
||||||
get,
|
contains,
|
||||||
contains,
|
insert,
|
||||||
insert,
|
remove,
|
||||||
remove,
|
update,
|
||||||
update,
|
walk,
|
||||||
walk,
|
walkUntil,
|
||||||
walkUntil,
|
keepIf,
|
||||||
keepIf,
|
dropIf,
|
||||||
dropIf,
|
toList,
|
||||||
toList,
|
fromList,
|
||||||
fromList,
|
keys,
|
||||||
keys,
|
values,
|
||||||
values,
|
insertAll,
|
||||||
insertAll,
|
keepShared,
|
||||||
keepShared,
|
removeAll,
|
||||||
removeAll,
|
map,
|
||||||
map,
|
joinMap,
|
||||||
joinMap,
|
]
|
||||||
]
|
|
||||||
imports [
|
import Bool exposing [Bool, Eq]
|
||||||
Bool.{ Bool, Eq },
|
import Result exposing [Result]
|
||||||
Result.{ Result },
|
import List
|
||||||
List,
|
import Str
|
||||||
Str,
|
import Num exposing [U64, F32, U32, U8]
|
||||||
Num.{ U64, F32, U32, U8, I8 },
|
import Hash exposing [Hasher, Hash]
|
||||||
Hash.{ Hasher, Hash },
|
import Inspect exposing [Inspect, Inspector, InspectFormatter]
|
||||||
Inspect.{ Inspect, Inspector, InspectFormatter },
|
|
||||||
]
|
|
||||||
|
|
||||||
## A [dictionary](https://en.wikipedia.org/wiki/Associative_array) that lets you
|
## A [dictionary](https://en.wikipedia.org/wiki/Associative_array) that lets you
|
||||||
## associate keys with values.
|
## associate keys with values.
|
||||||
|
|
|
@ -1,51 +1,49 @@
|
||||||
interface Encode
|
module [
|
||||||
exposes [
|
Encoder,
|
||||||
Encoder,
|
Encoding,
|
||||||
Encoding,
|
toEncoder,
|
||||||
toEncoder,
|
EncoderFormatting,
|
||||||
EncoderFormatting,
|
u8,
|
||||||
u8,
|
u16,
|
||||||
u16,
|
u32,
|
||||||
u32,
|
u64,
|
||||||
u64,
|
u128,
|
||||||
u128,
|
i8,
|
||||||
i8,
|
i16,
|
||||||
i16,
|
i32,
|
||||||
i32,
|
i64,
|
||||||
i64,
|
i128,
|
||||||
i128,
|
f32,
|
||||||
f32,
|
f64,
|
||||||
f64,
|
dec,
|
||||||
dec,
|
bool,
|
||||||
bool,
|
string,
|
||||||
string,
|
list,
|
||||||
list,
|
record,
|
||||||
record,
|
tag,
|
||||||
tag,
|
tuple,
|
||||||
tuple,
|
custom,
|
||||||
custom,
|
appendWith,
|
||||||
appendWith,
|
append,
|
||||||
append,
|
toBytes,
|
||||||
toBytes,
|
]
|
||||||
]
|
|
||||||
imports [
|
import Num exposing [
|
||||||
Num.{
|
U8,
|
||||||
U8,
|
U16,
|
||||||
U16,
|
U32,
|
||||||
U32,
|
U64,
|
||||||
U64,
|
U128,
|
||||||
U128,
|
I8,
|
||||||
I8,
|
I16,
|
||||||
I16,
|
I32,
|
||||||
I32,
|
I64,
|
||||||
I64,
|
I128,
|
||||||
I128,
|
F32,
|
||||||
F32,
|
F64,
|
||||||
F64,
|
Dec,
|
||||||
Dec,
|
]
|
||||||
},
|
import Bool exposing [Bool]
|
||||||
Bool.{ Bool },
|
|
||||||
]
|
|
||||||
|
|
||||||
Encoder fmt := List U8, fmt -> List U8 where fmt implements EncoderFormatting
|
Encoder fmt := List U8, fmt -> List U8 where fmt implements EncoderFormatting
|
||||||
|
|
||||||
|
|
|
@ -1,31 +1,42 @@
|
||||||
interface Hash
|
module [
|
||||||
exposes [
|
Hash,
|
||||||
Hash,
|
Hasher,
|
||||||
Hasher,
|
hash,
|
||||||
hash,
|
addBytes,
|
||||||
addBytes,
|
addU8,
|
||||||
addU8,
|
addU16,
|
||||||
addU16,
|
addU32,
|
||||||
addU32,
|
addU64,
|
||||||
addU64,
|
addU128,
|
||||||
addU128,
|
hashBool,
|
||||||
hashBool,
|
hashI8,
|
||||||
hashI8,
|
hashI16,
|
||||||
hashI16,
|
hashI32,
|
||||||
hashI32,
|
hashI64,
|
||||||
hashI64,
|
hashI128,
|
||||||
hashI128,
|
hashDec,
|
||||||
hashDec,
|
complete,
|
||||||
complete,
|
hashStrBytes,
|
||||||
hashStrBytes,
|
hashList,
|
||||||
hashList,
|
hashUnordered,
|
||||||
hashUnordered,
|
]
|
||||||
] imports [
|
|
||||||
Bool.{ Bool },
|
import Bool exposing [Bool]
|
||||||
List,
|
import List
|
||||||
Str,
|
import Str
|
||||||
Num.{ U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, Dec },
|
import Num exposing [
|
||||||
]
|
U8,
|
||||||
|
U16,
|
||||||
|
U32,
|
||||||
|
U64,
|
||||||
|
U128,
|
||||||
|
I8,
|
||||||
|
I16,
|
||||||
|
I32,
|
||||||
|
I64,
|
||||||
|
I128,
|
||||||
|
Dec,
|
||||||
|
]
|
||||||
|
|
||||||
## A value that can be hashed.
|
## A value that can be hashed.
|
||||||
Hash implements
|
Hash implements
|
||||||
|
|
|
@ -1,46 +1,44 @@
|
||||||
interface Inspect
|
module [
|
||||||
exposes [
|
Inspect,
|
||||||
Inspect,
|
Inspector,
|
||||||
Inspector,
|
InspectFormatter,
|
||||||
InspectFormatter,
|
ElemWalker,
|
||||||
ElemWalker,
|
KeyValWalker,
|
||||||
KeyValWalker,
|
inspect,
|
||||||
inspect,
|
init,
|
||||||
init,
|
list,
|
||||||
list,
|
set,
|
||||||
set,
|
dict,
|
||||||
dict,
|
tag,
|
||||||
tag,
|
tuple,
|
||||||
tuple,
|
record,
|
||||||
record,
|
bool,
|
||||||
bool,
|
str,
|
||||||
str,
|
function,
|
||||||
function,
|
opaque,
|
||||||
opaque,
|
u8,
|
||||||
u8,
|
i8,
|
||||||
i8,
|
u16,
|
||||||
u16,
|
i16,
|
||||||
i16,
|
u32,
|
||||||
u32,
|
i32,
|
||||||
i32,
|
u64,
|
||||||
u64,
|
i64,
|
||||||
i64,
|
u128,
|
||||||
u128,
|
i128,
|
||||||
i128,
|
f32,
|
||||||
f32,
|
f64,
|
||||||
f64,
|
dec,
|
||||||
dec,
|
custom,
|
||||||
custom,
|
apply,
|
||||||
apply,
|
toInspector,
|
||||||
toInspector,
|
toStr,
|
||||||
toStr,
|
]
|
||||||
]
|
|
||||||
imports [
|
import Bool exposing [Bool]
|
||||||
Bool.{ Bool },
|
import Num exposing [U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec]
|
||||||
Num.{ U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec },
|
import List
|
||||||
List,
|
import Str
|
||||||
Str,
|
|
||||||
]
|
|
||||||
|
|
||||||
KeyValWalker state collection key val : collection, state, (state, key, val -> state) -> state
|
KeyValWalker state collection key val : collection, state, (state, key, val -> state) -> state
|
||||||
ElemWalker state collection elem : collection, state, (state, elem -> state) -> state
|
ElemWalker state collection elem : collection, state, (state, elem -> state) -> state
|
||||||
|
|
|
@ -1,81 +1,80 @@
|
||||||
interface List
|
module [
|
||||||
exposes [
|
isEmpty,
|
||||||
isEmpty,
|
get,
|
||||||
get,
|
set,
|
||||||
set,
|
replace,
|
||||||
replace,
|
update,
|
||||||
update,
|
append,
|
||||||
append,
|
appendIfOk,
|
||||||
appendIfOk,
|
prepend,
|
||||||
prepend,
|
prependIfOk,
|
||||||
prependIfOk,
|
map,
|
||||||
map,
|
len,
|
||||||
len,
|
withCapacity,
|
||||||
withCapacity,
|
walkBackwards,
|
||||||
walkBackwards,
|
concat,
|
||||||
concat,
|
first,
|
||||||
first,
|
single,
|
||||||
single,
|
repeat,
|
||||||
repeat,
|
reverse,
|
||||||
reverse,
|
join,
|
||||||
join,
|
keepIf,
|
||||||
keepIf,
|
contains,
|
||||||
contains,
|
sum,
|
||||||
sum,
|
walk,
|
||||||
walk,
|
last,
|
||||||
last,
|
keepOks,
|
||||||
keepOks,
|
keepErrs,
|
||||||
keepErrs,
|
mapWithIndex,
|
||||||
mapWithIndex,
|
map2,
|
||||||
map2,
|
map3,
|
||||||
map3,
|
product,
|
||||||
product,
|
walkWithIndex,
|
||||||
walkWithIndex,
|
walkUntil,
|
||||||
walkUntil,
|
walkWithIndexUntil,
|
||||||
walkWithIndexUntil,
|
walkFrom,
|
||||||
walkFrom,
|
walkFromUntil,
|
||||||
walkFromUntil,
|
range,
|
||||||
range,
|
sortWith,
|
||||||
sortWith,
|
swap,
|
||||||
swap,
|
dropAt,
|
||||||
dropAt,
|
min,
|
||||||
min,
|
max,
|
||||||
max,
|
map4,
|
||||||
map4,
|
mapTry,
|
||||||
mapTry,
|
walkTry,
|
||||||
walkTry,
|
joinMap,
|
||||||
joinMap,
|
any,
|
||||||
any,
|
takeFirst,
|
||||||
takeFirst,
|
takeLast,
|
||||||
takeLast,
|
dropFirst,
|
||||||
dropFirst,
|
dropLast,
|
||||||
dropLast,
|
findFirst,
|
||||||
findFirst,
|
findLast,
|
||||||
findLast,
|
findFirstIndex,
|
||||||
findFirstIndex,
|
findLastIndex,
|
||||||
findLastIndex,
|
sublist,
|
||||||
sublist,
|
intersperse,
|
||||||
intersperse,
|
split,
|
||||||
split,
|
splitFirst,
|
||||||
splitFirst,
|
splitLast,
|
||||||
splitLast,
|
startsWith,
|
||||||
startsWith,
|
endsWith,
|
||||||
endsWith,
|
all,
|
||||||
all,
|
dropIf,
|
||||||
dropIf,
|
sortAsc,
|
||||||
sortAsc,
|
sortDesc,
|
||||||
sortDesc,
|
reserve,
|
||||||
reserve,
|
releaseExcessCapacity,
|
||||||
releaseExcessCapacity,
|
walkBackwardsUntil,
|
||||||
walkBackwardsUntil,
|
countIf,
|
||||||
countIf,
|
chunksOf,
|
||||||
chunksOf,
|
concatUtf8,
|
||||||
]
|
]
|
||||||
imports [
|
|
||||||
Bool.{ Bool, Eq },
|
import Bool exposing [Bool, Eq]
|
||||||
Result.{ Result },
|
import Result exposing [Result]
|
||||||
Num.{ U64, Num, Int },
|
import Num exposing [U64, Num, U8]
|
||||||
]
|
|
||||||
|
|
||||||
## ## Types
|
## ## Types
|
||||||
##
|
##
|
||||||
|
@ -1326,3 +1325,12 @@ iterBackwardsHelp = \list, state, f, prevIndex ->
|
||||||
Break b -> Break b
|
Break b -> Break b
|
||||||
else
|
else
|
||||||
Continue state
|
Continue state
|
||||||
|
|
||||||
|
## Concatenates the bytes of a string encoded as utf8 to a list of bytes.
|
||||||
|
## ```roc
|
||||||
|
## expect (List.concatUtf8 [1, 2, 3, 4] "🐦") == [1, 2, 3, 4, 240, 159, 144, 166]
|
||||||
|
## ```
|
||||||
|
concatUtf8 : List U8, Str -> List U8
|
||||||
|
|
||||||
|
expect (List.concatUtf8 [1, 2, 3, 4] "🐦") == [1, 2, 3, 4, 240, 159, 144, 166]
|
||||||
|
|
||||||
|
|
|
@ -1,166 +1,168 @@
|
||||||
interface Num
|
module [
|
||||||
exposes [
|
Num,
|
||||||
Num,
|
Int,
|
||||||
Int,
|
Frac,
|
||||||
Frac,
|
Integer,
|
||||||
Integer,
|
FloatingPoint,
|
||||||
FloatingPoint,
|
I128,
|
||||||
I128,
|
I64,
|
||||||
I64,
|
I32,
|
||||||
I32,
|
I16,
|
||||||
I16,
|
I8,
|
||||||
I8,
|
U128,
|
||||||
U128,
|
U64,
|
||||||
U64,
|
U32,
|
||||||
U32,
|
U16,
|
||||||
U16,
|
U8,
|
||||||
U8,
|
Signed128,
|
||||||
Signed128,
|
Signed64,
|
||||||
Signed64,
|
Signed32,
|
||||||
Signed32,
|
Signed16,
|
||||||
Signed16,
|
Signed8,
|
||||||
Signed8,
|
Unsigned128,
|
||||||
Unsigned128,
|
Unsigned64,
|
||||||
Unsigned64,
|
Unsigned32,
|
||||||
Unsigned32,
|
Unsigned16,
|
||||||
Unsigned16,
|
Unsigned8,
|
||||||
Unsigned8,
|
Dec,
|
||||||
Dec,
|
F64,
|
||||||
F64,
|
F32,
|
||||||
F32,
|
Decimal,
|
||||||
Decimal,
|
Binary32,
|
||||||
Binary32,
|
Binary64,
|
||||||
Binary64,
|
e,
|
||||||
e,
|
pi,
|
||||||
pi,
|
tau,
|
||||||
tau,
|
abs,
|
||||||
abs,
|
absDiff,
|
||||||
absDiff,
|
neg,
|
||||||
neg,
|
add,
|
||||||
add,
|
sub,
|
||||||
sub,
|
mul,
|
||||||
mul,
|
min,
|
||||||
min,
|
max,
|
||||||
max,
|
isLt,
|
||||||
isLt,
|
isLte,
|
||||||
isLte,
|
isGt,
|
||||||
isGt,
|
isGte,
|
||||||
isGte,
|
isApproxEq,
|
||||||
isApproxEq,
|
sin,
|
||||||
sin,
|
cos,
|
||||||
cos,
|
tan,
|
||||||
tan,
|
atan,
|
||||||
atan,
|
acos,
|
||||||
acos,
|
asin,
|
||||||
asin,
|
isZero,
|
||||||
isZero,
|
isEven,
|
||||||
isEven,
|
isOdd,
|
||||||
isOdd,
|
toFrac,
|
||||||
toFrac,
|
isPositive,
|
||||||
isPositive,
|
isNegative,
|
||||||
isNegative,
|
isNaN,
|
||||||
isNaN,
|
isInfinite,
|
||||||
isInfinite,
|
isFinite,
|
||||||
isFinite,
|
rem,
|
||||||
rem,
|
remChecked,
|
||||||
remChecked,
|
div,
|
||||||
div,
|
divChecked,
|
||||||
divChecked,
|
sqrt,
|
||||||
sqrt,
|
sqrtChecked,
|
||||||
sqrtChecked,
|
log,
|
||||||
log,
|
logChecked,
|
||||||
logChecked,
|
round,
|
||||||
round,
|
ceiling,
|
||||||
ceiling,
|
floor,
|
||||||
floor,
|
compare,
|
||||||
compare,
|
pow,
|
||||||
pow,
|
powInt,
|
||||||
powInt,
|
countLeadingZeroBits,
|
||||||
countLeadingZeroBits,
|
countTrailingZeroBits,
|
||||||
countTrailingZeroBits,
|
countOneBits,
|
||||||
countOneBits,
|
addWrap,
|
||||||
addWrap,
|
addChecked,
|
||||||
addChecked,
|
addSaturated,
|
||||||
addSaturated,
|
bitwiseAnd,
|
||||||
bitwiseAnd,
|
bitwiseXor,
|
||||||
bitwiseXor,
|
bitwiseOr,
|
||||||
bitwiseOr,
|
bitwiseNot,
|
||||||
bitwiseNot,
|
shiftLeftBy,
|
||||||
shiftLeftBy,
|
shiftRightBy,
|
||||||
shiftRightBy,
|
shiftRightZfBy,
|
||||||
shiftRightZfBy,
|
subWrap,
|
||||||
subWrap,
|
subChecked,
|
||||||
subChecked,
|
subSaturated,
|
||||||
subSaturated,
|
mulWrap,
|
||||||
mulWrap,
|
mulSaturated,
|
||||||
mulSaturated,
|
mulChecked,
|
||||||
mulChecked,
|
intCast,
|
||||||
intCast,
|
divCeil,
|
||||||
divCeil,
|
divCeilChecked,
|
||||||
divCeilChecked,
|
divTrunc,
|
||||||
divTrunc,
|
divTruncChecked,
|
||||||
divTruncChecked,
|
toStr,
|
||||||
toStr,
|
isMultipleOf,
|
||||||
isMultipleOf,
|
minI8,
|
||||||
minI8,
|
maxI8,
|
||||||
maxI8,
|
minU8,
|
||||||
minU8,
|
maxU8,
|
||||||
maxU8,
|
minI16,
|
||||||
minI16,
|
maxI16,
|
||||||
maxI16,
|
minU16,
|
||||||
minU16,
|
maxU16,
|
||||||
maxU16,
|
minI32,
|
||||||
minI32,
|
maxI32,
|
||||||
maxI32,
|
minU32,
|
||||||
minU32,
|
maxU32,
|
||||||
maxU32,
|
minI64,
|
||||||
minI64,
|
maxI64,
|
||||||
maxI64,
|
minU64,
|
||||||
minU64,
|
maxU64,
|
||||||
maxU64,
|
minI128,
|
||||||
minI128,
|
maxI128,
|
||||||
maxI128,
|
minU128,
|
||||||
minU128,
|
maxU128,
|
||||||
maxU128,
|
minF32,
|
||||||
minF32,
|
maxF32,
|
||||||
maxF32,
|
minF64,
|
||||||
minF64,
|
maxF64,
|
||||||
maxF64,
|
toI8,
|
||||||
toI8,
|
toI8Checked,
|
||||||
toI8Checked,
|
toI16,
|
||||||
toI16,
|
toI16Checked,
|
||||||
toI16Checked,
|
toI32,
|
||||||
toI32,
|
toI32Checked,
|
||||||
toI32Checked,
|
toI64,
|
||||||
toI64,
|
toI64Checked,
|
||||||
toI64Checked,
|
toI128,
|
||||||
toI128,
|
toI128Checked,
|
||||||
toI128Checked,
|
toU8,
|
||||||
toU8,
|
toU8Checked,
|
||||||
toU8Checked,
|
toU16,
|
||||||
toU16,
|
toU16Checked,
|
||||||
toU16Checked,
|
toU32,
|
||||||
toU32,
|
toU32Checked,
|
||||||
toU32Checked,
|
toU64,
|
||||||
toU64,
|
toU64Checked,
|
||||||
toU64Checked,
|
toU128,
|
||||||
toU128,
|
toU128Checked,
|
||||||
toU128Checked,
|
toF32,
|
||||||
toF32,
|
toF32Checked,
|
||||||
toF32Checked,
|
toF64,
|
||||||
toF64,
|
toF64Checked,
|
||||||
toF64Checked,
|
withoutDecimalPoint,
|
||||||
withoutDecimalPoint,
|
withDecimalPoint,
|
||||||
withDecimalPoint,
|
f32ToParts,
|
||||||
f32ToParts,
|
f64ToParts,
|
||||||
f64ToParts,
|
f32FromParts,
|
||||||
f32FromParts,
|
f64FromParts,
|
||||||
f64FromParts,
|
nanF32,
|
||||||
]
|
nanF64,
|
||||||
imports [
|
infinityF32,
|
||||||
Bool.{ Bool },
|
infinityF64,
|
||||||
Result.{ Result },
|
]
|
||||||
]
|
|
||||||
|
import Bool exposing [Bool]
|
||||||
|
import Result exposing [Result]
|
||||||
|
|
||||||
## Represents a number that could be either an [Int] or a [Frac].
|
## Represents a number that could be either an [Int] or a [Frac].
|
||||||
##
|
##
|
||||||
|
@ -1435,3 +1437,19 @@ f32FromParts : { sign : Bool, exponent : U8, fraction : U32 } -> F32
|
||||||
## The fraction should not be bigger than 0x000F_FFFF_FFFF_FFFF, any bigger value will be truncated.
|
## The fraction should not be bigger than 0x000F_FFFF_FFFF_FFFF, any bigger value will be truncated.
|
||||||
## The exponent should not be bigger than 0x07FF, any bigger value will be truncated.
|
## The exponent should not be bigger than 0x07FF, any bigger value will be truncated.
|
||||||
f64FromParts : { sign : Bool, exponent : U16, fraction : U64 } -> F64
|
f64FromParts : { sign : Bool, exponent : U16, fraction : U64 } -> F64
|
||||||
|
|
||||||
|
## The value for not-a-number for a [F32] according to the IEEE 754 standard.
|
||||||
|
nanF32 : F32
|
||||||
|
nanF32 = 0.0f32 / 0.0
|
||||||
|
|
||||||
|
## The value for not-a-number for a [F64] according to the IEEE 754 standard.
|
||||||
|
nanF64 : F64
|
||||||
|
nanF64 = 0.0f64 / 0.0
|
||||||
|
|
||||||
|
## The value for infinity for a [F32] according to the IEEE 754 standard.
|
||||||
|
infinityF32 : F32
|
||||||
|
infinityF32 = 1.0f32 / 0.0
|
||||||
|
|
||||||
|
## The value for infinity for a [F64] according to the IEEE 754 standard.
|
||||||
|
infinityF64 : F64
|
||||||
|
infinityF64 = 1.0f64 / 0.0
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
interface Result
|
module [Result, isOk, isErr, map, mapErr, try, onErr, withDefault]
|
||||||
exposes [Result, isOk, isErr, map, mapErr, try, onErr, withDefault]
|
|
||||||
imports [Bool.{ Bool }]
|
import Bool exposing [Bool]
|
||||||
|
|
||||||
## The result of an operation that could fail: either the operation went
|
## The result of an operation that could fail: either the operation went
|
||||||
## okay, or else there was an error of some sort.
|
## okay, or else there was an error of some sort.
|
||||||
|
|
|
@ -1,37 +1,35 @@
|
||||||
interface Set
|
module [
|
||||||
exposes [
|
Set,
|
||||||
Set,
|
empty,
|
||||||
empty,
|
withCapacity,
|
||||||
withCapacity,
|
reserve,
|
||||||
reserve,
|
releaseExcessCapacity,
|
||||||
releaseExcessCapacity,
|
single,
|
||||||
single,
|
walk,
|
||||||
walk,
|
walkUntil,
|
||||||
walkUntil,
|
keepIf,
|
||||||
keepIf,
|
dropIf,
|
||||||
dropIf,
|
insert,
|
||||||
insert,
|
len,
|
||||||
len,
|
isEmpty,
|
||||||
isEmpty,
|
capacity,
|
||||||
capacity,
|
remove,
|
||||||
remove,
|
contains,
|
||||||
contains,
|
toList,
|
||||||
toList,
|
fromList,
|
||||||
fromList,
|
union,
|
||||||
union,
|
intersection,
|
||||||
intersection,
|
difference,
|
||||||
difference,
|
map,
|
||||||
map,
|
joinMap,
|
||||||
joinMap,
|
]
|
||||||
]
|
|
||||||
imports [
|
import List
|
||||||
List,
|
import Bool exposing [Bool, Eq]
|
||||||
Bool.{ Bool, Eq },
|
import Dict
|
||||||
Dict.{ Dict },
|
import Num exposing [U64]
|
||||||
Num.{ U64 },
|
import Hash exposing [Hash, Hasher]
|
||||||
Hash.{ Hash, Hasher },
|
import Inspect exposing [Inspect, Inspector, InspectFormatter]
|
||||||
Inspect.{ Inspect, Inspector, InspectFormatter },
|
|
||||||
]
|
|
||||||
|
|
||||||
## Provides a [set](https://en.wikipedia.org/wiki/Set_(abstract_data_type))
|
## Provides a [set](https://en.wikipedia.org/wiki/Set_(abstract_data_type))
|
||||||
## type which stores a collection of unique values, without any ordering
|
## type which stores a collection of unique values, without any ordering
|
||||||
|
|
|
@ -326,55 +326,53 @@
|
||||||
## If a situation like this comes up, a slice can be turned into a separate string by using [`Str.concat`](https://www.roc-lang.org/builtins/Str#concat) to concatenate the slice onto an empty string (or one created with [`Str.withCapacity`](https://www.roc-lang.org/builtins/Str#withCapacity)).
|
## If a situation like this comes up, a slice can be turned into a separate string by using [`Str.concat`](https://www.roc-lang.org/builtins/Str#concat) to concatenate the slice onto an empty string (or one created with [`Str.withCapacity`](https://www.roc-lang.org/builtins/Str#withCapacity)).
|
||||||
##
|
##
|
||||||
## Currently, the only way to get seamless slices of strings is by calling certain `Str` functions which return them. In general, `Str` functions which accept a string and return a subset of that string tend to do this. [`Str.trim`](https://www.roc-lang.org/builtins/Str#trim) is another example of a function which returns a seamless slice.
|
## Currently, the only way to get seamless slices of strings is by calling certain `Str` functions which return them. In general, `Str` functions which accept a string and return a subset of that string tend to do this. [`Str.trim`](https://www.roc-lang.org/builtins/Str#trim) is another example of a function which returns a seamless slice.
|
||||||
interface Str
|
module [
|
||||||
exposes [
|
Utf8Problem,
|
||||||
Utf8Problem,
|
Utf8ByteProblem,
|
||||||
Utf8ByteProblem,
|
concat,
|
||||||
concat,
|
isEmpty,
|
||||||
isEmpty,
|
joinWith,
|
||||||
joinWith,
|
split,
|
||||||
split,
|
repeat,
|
||||||
repeat,
|
countUtf8Bytes,
|
||||||
countUtf8Bytes,
|
toUtf8,
|
||||||
toUtf8,
|
fromUtf8,
|
||||||
fromUtf8,
|
startsWith,
|
||||||
startsWith,
|
endsWith,
|
||||||
endsWith,
|
trim,
|
||||||
trim,
|
trimStart,
|
||||||
trimStart,
|
trimEnd,
|
||||||
trimEnd,
|
toDec,
|
||||||
toDec,
|
toF64,
|
||||||
toF64,
|
toF32,
|
||||||
toF32,
|
toU128,
|
||||||
toU128,
|
toI128,
|
||||||
toI128,
|
toU64,
|
||||||
toU64,
|
toI64,
|
||||||
toI64,
|
toU32,
|
||||||
toU32,
|
toI32,
|
||||||
toI32,
|
toU16,
|
||||||
toU16,
|
toI16,
|
||||||
toI16,
|
toU8,
|
||||||
toU8,
|
toI8,
|
||||||
toI8,
|
replaceEach,
|
||||||
replaceEach,
|
replaceFirst,
|
||||||
replaceFirst,
|
replaceLast,
|
||||||
replaceLast,
|
splitFirst,
|
||||||
splitFirst,
|
splitLast,
|
||||||
splitLast,
|
walkUtf8,
|
||||||
walkUtf8,
|
walkUtf8WithIndex,
|
||||||
walkUtf8WithIndex,
|
reserve,
|
||||||
reserve,
|
releaseExcessCapacity,
|
||||||
releaseExcessCapacity,
|
withCapacity,
|
||||||
withCapacity,
|
withPrefix,
|
||||||
withPrefix,
|
contains,
|
||||||
contains,
|
]
|
||||||
]
|
|
||||||
imports [
|
import Bool exposing [Bool]
|
||||||
Bool.{ Bool, Eq },
|
import Result exposing [Result]
|
||||||
Result.{ Result },
|
import List
|
||||||
List,
|
import Num exposing [Num, U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec]
|
||||||
Num.{ Num, U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec },
|
|
||||||
]
|
|
||||||
|
|
||||||
Utf8ByteProblem : [
|
Utf8ByteProblem : [
|
||||||
InvalidStartByte,
|
InvalidStartByte,
|
||||||
|
|
|
@ -1,44 +1,18 @@
|
||||||
## THIS MODULE IS DEPRECATED AND CURRENTLY IN THE PROCESS OF BEING REMOVED
|
## THIS MODULE IS DEPRECATED AND CURRENTLY IN THE PROCESS OF BEING REMOVED
|
||||||
## FROM STD LIBRARY
|
## FROM STD LIBRARY
|
||||||
interface TotallyNotJson
|
module [
|
||||||
exposes [
|
Json,
|
||||||
Json,
|
json,
|
||||||
json,
|
jsonWithOptions,
|
||||||
jsonWithOptions,
|
]
|
||||||
]
|
|
||||||
imports [
|
import List
|
||||||
List,
|
import Str
|
||||||
Str,
|
import Result
|
||||||
Result.{ Result },
|
import Encode exposing [EncoderFormatting, appendWith]
|
||||||
Encode,
|
import Decode exposing [DecoderFormatting, DecodeResult]
|
||||||
Encode.{
|
import Num exposing [U8, U16, U64, F32, F64, Dec]
|
||||||
Encoder,
|
import Bool exposing [Bool]
|
||||||
EncoderFormatting,
|
|
||||||
appendWith,
|
|
||||||
},
|
|
||||||
Decode,
|
|
||||||
Decode.{
|
|
||||||
DecoderFormatting,
|
|
||||||
DecodeResult,
|
|
||||||
},
|
|
||||||
Num.{
|
|
||||||
U8,
|
|
||||||
U16,
|
|
||||||
U32,
|
|
||||||
U64,
|
|
||||||
U128,
|
|
||||||
I8,
|
|
||||||
I16,
|
|
||||||
I32,
|
|
||||||
I64,
|
|
||||||
I128,
|
|
||||||
F32,
|
|
||||||
F64,
|
|
||||||
Dec,
|
|
||||||
},
|
|
||||||
Bool.{ Bool, Eq },
|
|
||||||
Result,
|
|
||||||
]
|
|
||||||
|
|
||||||
## An opaque type with the `EncoderFormatting` and
|
## An opaque type with the `EncoderFormatting` and
|
||||||
## `DecoderFormatting` abilities.
|
## `DecoderFormatting` abilities.
|
||||||
|
|
|
@ -1,3 +1,15 @@
|
||||||
package "builtins"
|
package [
|
||||||
exposes [Str, Num, Bool, Result, List, Dict, Set, Decode, Encode, Hash, Box, TotallyNotJson, Inspect]
|
Str,
|
||||||
packages {}
|
Num,
|
||||||
|
Bool,
|
||||||
|
Result,
|
||||||
|
List,
|
||||||
|
Dict,
|
||||||
|
Set,
|
||||||
|
Decode,
|
||||||
|
Encode,
|
||||||
|
Hash,
|
||||||
|
Box,
|
||||||
|
TotallyNotJson,
|
||||||
|
Inspect,
|
||||||
|
] {}
|
||||||
|
|
|
@ -390,6 +390,7 @@ pub const LIST_RESERVE: &str = "roc_builtins.list.reserve";
|
||||||
pub const LIST_CAPACITY: &str = "roc_builtins.list.capacity";
|
pub const LIST_CAPACITY: &str = "roc_builtins.list.capacity";
|
||||||
pub const LIST_ALLOCATION_PTR: &str = "roc_builtins.list.allocation_ptr";
|
pub const LIST_ALLOCATION_PTR: &str = "roc_builtins.list.allocation_ptr";
|
||||||
pub const LIST_RELEASE_EXCESS_CAPACITY: &str = "roc_builtins.list.release_excess_capacity";
|
pub const LIST_RELEASE_EXCESS_CAPACITY: &str = "roc_builtins.list.release_excess_capacity";
|
||||||
|
pub const LIST_CONCAT_UTF8: &str = "roc_builtins.list.concat_utf8";
|
||||||
|
|
||||||
pub const DEC_ABS: &str = "roc_builtins.dec.abs";
|
pub const DEC_ABS: &str = "roc_builtins.dec.abs";
|
||||||
pub const DEC_ACOS: &str = "roc_builtins.dec.acos";
|
pub const DEC_ACOS: &str = "roc_builtins.dec.acos";
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::env::Env;
|
use crate::env::Env;
|
||||||
use crate::procedure::References;
|
use crate::procedure::{QualifiedReference, References};
|
||||||
use crate::scope::{PendingAbilitiesInScope, Scope};
|
use crate::scope::{PendingAbilitiesInScope, Scope};
|
||||||
use roc_collections::{ImMap, MutSet, SendMap, VecMap, VecSet};
|
use roc_collections::{ImMap, MutSet, SendMap, VecMap, VecSet};
|
||||||
use roc_module::ident::{Ident, Lowercase, TagName};
|
use roc_module::ident::{Ident, Lowercase, TagName};
|
||||||
|
@ -17,7 +17,7 @@ use roc_types::types::{
|
||||||
pub struct Annotation {
|
pub struct Annotation {
|
||||||
pub typ: Type,
|
pub typ: Type,
|
||||||
pub introduced_variables: IntroducedVariables,
|
pub introduced_variables: IntroducedVariables,
|
||||||
pub references: VecSet<Symbol>,
|
pub references: References,
|
||||||
pub aliases: VecMap<Symbol, Alias>,
|
pub aliases: VecMap<Symbol, Alias>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -28,9 +28,7 @@ impl Annotation {
|
||||||
references: &mut References,
|
references: &mut References,
|
||||||
introduced_variables: &mut IntroducedVariables,
|
introduced_variables: &mut IntroducedVariables,
|
||||||
) {
|
) {
|
||||||
for symbol in self.references.iter() {
|
references.union_mut(&self.references);
|
||||||
references.insert_type_lookup(*symbol);
|
|
||||||
}
|
|
||||||
|
|
||||||
introduced_variables.union(&self.introduced_variables);
|
introduced_variables.union(&self.introduced_variables);
|
||||||
|
|
||||||
|
@ -291,7 +289,7 @@ pub(crate) fn canonicalize_annotation(
|
||||||
annotation_for: AnnotationFor,
|
annotation_for: AnnotationFor,
|
||||||
) -> Annotation {
|
) -> Annotation {
|
||||||
let mut introduced_variables = IntroducedVariables::default();
|
let mut introduced_variables = IntroducedVariables::default();
|
||||||
let mut references = VecSet::default();
|
let mut references = References::new();
|
||||||
let mut aliases = VecMap::default();
|
let mut aliases = VecMap::default();
|
||||||
|
|
||||||
let (annotation, region) = match annotation {
|
let (annotation, region) = match annotation {
|
||||||
|
@ -381,13 +379,17 @@ pub(crate) fn make_apply_symbol(
|
||||||
scope: &mut Scope,
|
scope: &mut Scope,
|
||||||
module_name: &str,
|
module_name: &str,
|
||||||
ident: &str,
|
ident: &str,
|
||||||
|
references: &mut References,
|
||||||
) -> Result<Symbol, Type> {
|
) -> Result<Symbol, Type> {
|
||||||
if module_name.is_empty() {
|
if module_name.is_empty() {
|
||||||
// Since module_name was empty, this is an unqualified type.
|
// Since module_name was empty, this is an unqualified type.
|
||||||
// Look it up in scope!
|
// Look it up in scope!
|
||||||
|
|
||||||
match scope.lookup_str(ident, region) {
|
match scope.lookup_str(ident, region) {
|
||||||
Ok(symbol) => Ok(symbol),
|
Ok(symbol) => {
|
||||||
|
references.insert_type_lookup(symbol, QualifiedReference::Unqualified);
|
||||||
|
Ok(symbol)
|
||||||
|
}
|
||||||
Err(problem) => {
|
Err(problem) => {
|
||||||
env.problem(roc_problem::can::Problem::RuntimeError(problem));
|
env.problem(roc_problem::can::Problem::RuntimeError(problem));
|
||||||
|
|
||||||
|
@ -396,7 +398,10 @@ pub(crate) fn make_apply_symbol(
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match env.qualified_lookup(scope, module_name, ident, region) {
|
match env.qualified_lookup(scope, module_name, ident, region) {
|
||||||
Ok(symbol) => Ok(symbol),
|
Ok(symbol) => {
|
||||||
|
references.insert_type_lookup(symbol, QualifiedReference::Qualified);
|
||||||
|
Ok(symbol)
|
||||||
|
}
|
||||||
Err(problem) => {
|
Err(problem) => {
|
||||||
// Either the module wasn't imported, or
|
// Either the module wasn't imported, or
|
||||||
// it was imported but it doesn't expose this ident.
|
// it was imported but it doesn't expose this ident.
|
||||||
|
@ -537,7 +542,7 @@ fn can_annotation_help(
|
||||||
var_store: &mut VarStore,
|
var_store: &mut VarStore,
|
||||||
introduced_variables: &mut IntroducedVariables,
|
introduced_variables: &mut IntroducedVariables,
|
||||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||||
references: &mut VecSet<Symbol>,
|
references: &mut References,
|
||||||
) -> Type {
|
) -> Type {
|
||||||
use roc_parse::ast::TypeAnnotation::*;
|
use roc_parse::ast::TypeAnnotation::*;
|
||||||
|
|
||||||
|
@ -580,15 +585,14 @@ fn can_annotation_help(
|
||||||
Type::Function(args, Box::new(closure), Box::new(ret))
|
Type::Function(args, Box::new(closure), Box::new(ret))
|
||||||
}
|
}
|
||||||
Apply(module_name, ident, type_arguments) => {
|
Apply(module_name, ident, type_arguments) => {
|
||||||
let symbol = match make_apply_symbol(env, region, scope, module_name, ident) {
|
let symbol = match make_apply_symbol(env, region, scope, module_name, ident, references)
|
||||||
|
{
|
||||||
Err(problem) => return problem,
|
Err(problem) => return problem,
|
||||||
Ok(symbol) => symbol,
|
Ok(symbol) => symbol,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut args = Vec::new();
|
let mut args = Vec::new();
|
||||||
|
|
||||||
references.insert(symbol);
|
|
||||||
|
|
||||||
if scope.abilities_store.is_ability(symbol) {
|
if scope.abilities_store.is_ability(symbol) {
|
||||||
let fresh_ty_var = find_fresh_var_name(introduced_variables);
|
let fresh_ty_var = find_fresh_var_name(introduced_variables);
|
||||||
|
|
||||||
|
@ -744,14 +748,15 @@ fn can_annotation_help(
|
||||||
let mut vars = Vec::with_capacity(loc_vars.len());
|
let mut vars = Vec::with_capacity(loc_vars.len());
|
||||||
let mut lowercase_vars: Vec<Loc<AliasVar>> = Vec::with_capacity(loc_vars.len());
|
let mut lowercase_vars: Vec<Loc<AliasVar>> = Vec::with_capacity(loc_vars.len());
|
||||||
|
|
||||||
references.insert(symbol);
|
references.insert_type_lookup(symbol, QualifiedReference::Unqualified);
|
||||||
|
|
||||||
for loc_var in *loc_vars {
|
for loc_var in *loc_vars {
|
||||||
let var = match loc_var.value {
|
let var = match loc_var.value {
|
||||||
Pattern::Identifier {
|
Pattern::Identifier { ident: name, .. }
|
||||||
ident: name,
|
if name.chars().next().unwrap().is_lowercase() =>
|
||||||
suffixed: _,
|
{
|
||||||
} if name.chars().next().unwrap().is_lowercase() => name,
|
name
|
||||||
|
}
|
||||||
_ => unreachable!("I thought this was validated during parsing"),
|
_ => unreachable!("I thought this was validated during parsing"),
|
||||||
};
|
};
|
||||||
let var_name = Lowercase::from(var);
|
let var_name = Lowercase::from(var);
|
||||||
|
@ -1056,7 +1061,7 @@ fn canonicalize_has_clause(
|
||||||
introduced_variables: &mut IntroducedVariables,
|
introduced_variables: &mut IntroducedVariables,
|
||||||
clause: &Loc<roc_parse::ast::ImplementsClause<'_>>,
|
clause: &Loc<roc_parse::ast::ImplementsClause<'_>>,
|
||||||
pending_abilities_in_scope: &PendingAbilitiesInScope,
|
pending_abilities_in_scope: &PendingAbilitiesInScope,
|
||||||
references: &mut VecSet<Symbol>,
|
references: &mut References,
|
||||||
) -> Result<(), Type> {
|
) -> Result<(), Type> {
|
||||||
let Loc {
|
let Loc {
|
||||||
region,
|
region,
|
||||||
|
@ -1079,7 +1084,7 @@ fn canonicalize_has_clause(
|
||||||
{
|
{
|
||||||
let ability = match ability {
|
let ability = match ability {
|
||||||
TypeAnnotation::Apply(module_name, ident, _type_arguments) => {
|
TypeAnnotation::Apply(module_name, ident, _type_arguments) => {
|
||||||
let symbol = make_apply_symbol(env, region, scope, module_name, ident)?;
|
let symbol = make_apply_symbol(env, region, scope, module_name, ident, references)?;
|
||||||
|
|
||||||
// Ability defined locally, whose members we are constructing right now...
|
// Ability defined locally, whose members we are constructing right now...
|
||||||
if !pending_abilities_in_scope.contains_key(&symbol)
|
if !pending_abilities_in_scope.contains_key(&symbol)
|
||||||
|
@ -1097,7 +1102,6 @@ fn canonicalize_has_clause(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
references.insert(ability);
|
|
||||||
let already_seen = can_abilities.insert(ability);
|
let already_seen = can_abilities.insert(ability);
|
||||||
|
|
||||||
if already_seen {
|
if already_seen {
|
||||||
|
@ -1131,7 +1135,7 @@ fn can_extension_type(
|
||||||
var_store: &mut VarStore,
|
var_store: &mut VarStore,
|
||||||
introduced_variables: &mut IntroducedVariables,
|
introduced_variables: &mut IntroducedVariables,
|
||||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||||
references: &mut VecSet<Symbol>,
|
references: &mut References,
|
||||||
opt_ext: &Option<&Loc<TypeAnnotation>>,
|
opt_ext: &Option<&Loc<TypeAnnotation>>,
|
||||||
ext_problem_kind: roc_problem::can::ExtensionTypeKind,
|
ext_problem_kind: roc_problem::can::ExtensionTypeKind,
|
||||||
) -> (Type, ExtImplicitOpenness) {
|
) -> (Type, ExtImplicitOpenness) {
|
||||||
|
@ -1334,7 +1338,7 @@ fn can_assigned_fields<'a>(
|
||||||
var_store: &mut VarStore,
|
var_store: &mut VarStore,
|
||||||
introduced_variables: &mut IntroducedVariables,
|
introduced_variables: &mut IntroducedVariables,
|
||||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||||
references: &mut VecSet<Symbol>,
|
references: &mut References,
|
||||||
) -> SendMap<Lowercase, RecordField<Type>> {
|
) -> SendMap<Lowercase, RecordField<Type>> {
|
||||||
use roc_parse::ast::AssignedField::*;
|
use roc_parse::ast::AssignedField::*;
|
||||||
use roc_types::types::RecordField::*;
|
use roc_types::types::RecordField::*;
|
||||||
|
@ -1449,7 +1453,7 @@ fn can_assigned_tuple_elems(
|
||||||
var_store: &mut VarStore,
|
var_store: &mut VarStore,
|
||||||
introduced_variables: &mut IntroducedVariables,
|
introduced_variables: &mut IntroducedVariables,
|
||||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||||
references: &mut VecSet<Symbol>,
|
references: &mut References,
|
||||||
) -> VecMap<usize, Type> {
|
) -> VecMap<usize, Type> {
|
||||||
let mut elem_types = VecMap::with_capacity(elems.len());
|
let mut elem_types = VecMap::with_capacity(elems.len());
|
||||||
|
|
||||||
|
@ -1483,7 +1487,7 @@ fn can_tags<'a>(
|
||||||
var_store: &mut VarStore,
|
var_store: &mut VarStore,
|
||||||
introduced_variables: &mut IntroducedVariables,
|
introduced_variables: &mut IntroducedVariables,
|
||||||
local_aliases: &mut VecMap<Symbol, Alias>,
|
local_aliases: &mut VecMap<Symbol, Alias>,
|
||||||
references: &mut VecSet<Symbol>,
|
references: &mut References,
|
||||||
) -> Vec<(TagName, Vec<Type>)> {
|
) -> Vec<(TagName, Vec<Type>)> {
|
||||||
let mut tag_types = Vec::with_capacity(tags.len());
|
let mut tag_types = Vec::with_capacity(tags.len());
|
||||||
|
|
||||||
|
|
|
@ -150,6 +150,7 @@ map_symbol_to_lowlevel_and_arity! {
|
||||||
ListSwap; LIST_SWAP; 3,
|
ListSwap; LIST_SWAP; 3,
|
||||||
ListGetCapacity; LIST_CAPACITY; 1,
|
ListGetCapacity; LIST_CAPACITY; 1,
|
||||||
ListReleaseExcessCapacity; LIST_RELEASE_EXCESS_CAPACITY; 1,
|
ListReleaseExcessCapacity; LIST_RELEASE_EXCESS_CAPACITY; 1,
|
||||||
|
ListConcatUtf8; LIST_CONCAT_UTF8; 2,
|
||||||
|
|
||||||
ListGetUnsafe; DICT_LIST_GET_UNSAFE; 2,
|
ListGetUnsafe; DICT_LIST_GET_UNSAFE; 2,
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,8 @@ use roc_collections::{ImSet, MutMap, SendMap};
|
||||||
use roc_error_macros::internal_error;
|
use roc_error_macros::internal_error;
|
||||||
use roc_module::ident::Ident;
|
use roc_module::ident::Ident;
|
||||||
use roc_module::ident::Lowercase;
|
use roc_module::ident::Lowercase;
|
||||||
|
use roc_module::ident::ModuleName;
|
||||||
|
use roc_module::ident::QualifiedModuleName;
|
||||||
use roc_module::symbol::IdentId;
|
use roc_module::symbol::IdentId;
|
||||||
use roc_module::symbol::ModuleId;
|
use roc_module::symbol::ModuleId;
|
||||||
use roc_module::symbol::Symbol;
|
use roc_module::symbol::Symbol;
|
||||||
|
@ -52,6 +54,10 @@ use roc_types::types::MemberImpl;
|
||||||
use roc_types::types::OptAbleType;
|
use roc_types::types::OptAbleType;
|
||||||
use roc_types::types::{Alias, Type};
|
use roc_types::types::{Alias, Type};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
use std::fs;
|
||||||
|
use std::io::Read;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Def {
|
pub struct Def {
|
||||||
|
@ -158,6 +164,12 @@ enum PendingValueDef<'a> {
|
||||||
&'a Loc<ast::TypeAnnotation<'a>>,
|
&'a Loc<ast::TypeAnnotation<'a>>,
|
||||||
&'a Loc<ast::Expr<'a>>,
|
&'a Loc<ast::Expr<'a>>,
|
||||||
),
|
),
|
||||||
|
/// Ingested file
|
||||||
|
IngestedFile(
|
||||||
|
Loc<Pattern>,
|
||||||
|
Option<Loc<ast::TypeAnnotation<'a>>>,
|
||||||
|
Loc<ast::StrLiteral<'a>>,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PendingValueDef<'_> {
|
impl PendingValueDef<'_> {
|
||||||
|
@ -166,6 +178,7 @@ impl PendingValueDef<'_> {
|
||||||
PendingValueDef::AnnotationOnly(_, loc_pattern, _) => loc_pattern,
|
PendingValueDef::AnnotationOnly(_, loc_pattern, _) => loc_pattern,
|
||||||
PendingValueDef::Body(loc_pattern, _) => loc_pattern,
|
PendingValueDef::Body(loc_pattern, _) => loc_pattern,
|
||||||
PendingValueDef::TypedBody(_, loc_pattern, _, _) => loc_pattern,
|
PendingValueDef::TypedBody(_, loc_pattern, _, _) => loc_pattern,
|
||||||
|
PendingValueDef::IngestedFile(loc_pattern, _, _) => loc_pattern,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -357,9 +370,7 @@ fn canonicalize_alias<'a>(
|
||||||
);
|
);
|
||||||
|
|
||||||
// Record all the annotation's references in output.references.lookups
|
// Record all the annotation's references in output.references.lookups
|
||||||
for symbol in can_ann.references {
|
output.references.union_mut(&can_ann.references);
|
||||||
output.references.insert_type_lookup(symbol);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut can_vars: Vec<Loc<AliasVar>> = Vec::with_capacity(vars.len());
|
let mut can_vars: Vec<Loc<AliasVar>> = Vec::with_capacity(vars.len());
|
||||||
let mut is_phantom = false;
|
let mut is_phantom = false;
|
||||||
|
@ -428,36 +439,54 @@ fn canonicalize_alias<'a>(
|
||||||
return Err(());
|
return Err(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let num_unbound = named.len() + wildcards.len() + inferred.len();
|
// Report errors for wildcards (*), underscores (_), and named vars that weren't declared.
|
||||||
if num_unbound > 0 {
|
let mut no_problems = true;
|
||||||
let one_occurrence = named
|
|
||||||
.iter()
|
|
||||||
.map(|nv| Loc::at(nv.first_seen(), nv.variable()))
|
|
||||||
.chain(wildcards)
|
|
||||||
.chain(inferred)
|
|
||||||
.next()
|
|
||||||
.unwrap()
|
|
||||||
.region;
|
|
||||||
|
|
||||||
env.problems.push(Problem::UnboundTypeVariable {
|
if let Some(loc_var) = wildcards.first() {
|
||||||
|
env.problems.push(Problem::WildcardNotAllowed {
|
||||||
typ: symbol,
|
typ: symbol,
|
||||||
num_unbound,
|
num_wildcards: wildcards.len(),
|
||||||
one_occurrence,
|
one_occurrence: loc_var.region,
|
||||||
kind,
|
kind,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Bail out
|
no_problems = false;
|
||||||
return Err(());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(create_alias(
|
if let Some(loc_var) = inferred.first() {
|
||||||
symbol,
|
env.problems.push(Problem::UnderscoreNotAllowed {
|
||||||
name.region,
|
typ: symbol,
|
||||||
can_vars.clone(),
|
num_underscores: inferred.len(),
|
||||||
infer_ext_in_output,
|
one_occurrence: loc_var.region,
|
||||||
can_ann.typ,
|
kind,
|
||||||
kind,
|
});
|
||||||
))
|
|
||||||
|
no_problems = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(nv) = named.first() {
|
||||||
|
env.problems.push(Problem::UndeclaredTypeVar {
|
||||||
|
typ: symbol,
|
||||||
|
num_unbound: named.len(),
|
||||||
|
one_occurrence: nv.first_seen(),
|
||||||
|
kind,
|
||||||
|
});
|
||||||
|
|
||||||
|
no_problems = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if no_problems {
|
||||||
|
Ok(create_alias(
|
||||||
|
symbol,
|
||||||
|
name.region,
|
||||||
|
can_vars.clone(),
|
||||||
|
infer_ext_in_output,
|
||||||
|
can_ann.typ,
|
||||||
|
kind,
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Canonicalizes a claimed ability implementation like `{ eq }` or `{ eq: myEq }`.
|
/// Canonicalizes a claimed ability implementation like `{ eq }` or `{ eq: myEq }`.
|
||||||
|
@ -495,7 +524,7 @@ fn canonicalize_claimed_ability_impl<'a>(
|
||||||
// OPTION-1: The implementation identifier is the only identifier of that name in the
|
// OPTION-1: The implementation identifier is the only identifier of that name in the
|
||||||
// scope. For example,
|
// scope. For example,
|
||||||
//
|
//
|
||||||
// interface F imports [] exposes []
|
// module []
|
||||||
//
|
//
|
||||||
// Hello := {} implements [Encoding.{ toEncoder }]
|
// Hello := {} implements [Encoding.{ toEncoder }]
|
||||||
//
|
//
|
||||||
|
@ -507,7 +536,9 @@ fn canonicalize_claimed_ability_impl<'a>(
|
||||||
// OPTION-2: The implementation identifier is a unique shadow of the ability member,
|
// OPTION-2: The implementation identifier is a unique shadow of the ability member,
|
||||||
// which has also been explicitly imported. For example,
|
// which has also been explicitly imported. For example,
|
||||||
//
|
//
|
||||||
// interface F imports [Encoding.{ toEncoder }] exposes []
|
// module []
|
||||||
|
//
|
||||||
|
// import Encoding exposing [toEncoder]
|
||||||
//
|
//
|
||||||
// Hello := {} implements [Encoding.{ toEncoder }]
|
// Hello := {} implements [Encoding.{ toEncoder }]
|
||||||
//
|
//
|
||||||
|
@ -547,11 +578,7 @@ fn canonicalize_claimed_ability_impl<'a>(
|
||||||
}
|
}
|
||||||
AssignedField::RequiredValue(label, _spaces, value) => {
|
AssignedField::RequiredValue(label, _spaces, value) => {
|
||||||
let impl_ident = match value.value {
|
let impl_ident = match value.value {
|
||||||
ast::Expr::Var {
|
ast::Expr::Var { module_name, ident } => {
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed: _,
|
|
||||||
} => {
|
|
||||||
if module_name.is_empty() {
|
if module_name.is_empty() {
|
||||||
ident
|
ident
|
||||||
} else {
|
} else {
|
||||||
|
@ -707,6 +734,8 @@ fn canonicalize_opaque<'a>(
|
||||||
AliasKind::Opaque,
|
AliasKind::Opaque,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
let mut references = References::new();
|
||||||
|
|
||||||
let mut derived_defs = Vec::new();
|
let mut derived_defs = Vec::new();
|
||||||
if let Some(has_abilities) = has_abilities {
|
if let Some(has_abilities) = has_abilities {
|
||||||
let has_abilities = has_abilities.value.collection();
|
let has_abilities = has_abilities.value.collection();
|
||||||
|
@ -725,7 +754,8 @@ fn canonicalize_opaque<'a>(
|
||||||
// Op := {} has [Eq]
|
// Op := {} has [Eq]
|
||||||
let (ability, members) = match ability.value {
|
let (ability, members) = match ability.value {
|
||||||
ast::TypeAnnotation::Apply(module_name, ident, []) => {
|
ast::TypeAnnotation::Apply(module_name, ident, []) => {
|
||||||
match make_apply_symbol(env, region, scope, module_name, ident) {
|
match make_apply_symbol(env, region, scope, module_name, ident, &mut references)
|
||||||
|
{
|
||||||
Ok(ability) => {
|
Ok(ability) => {
|
||||||
let opt_members = scope
|
let opt_members = scope
|
||||||
.abilities_store
|
.abilities_store
|
||||||
|
@ -918,6 +948,8 @@ fn canonicalize_opaque<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
output.references.union_mut(&references);
|
||||||
|
|
||||||
Ok(CanonicalizedOpaque {
|
Ok(CanonicalizedOpaque {
|
||||||
opaque_def: alias,
|
opaque_def: alias,
|
||||||
derived_defs,
|
derived_defs,
|
||||||
|
@ -932,7 +964,12 @@ pub(crate) fn canonicalize_defs<'a>(
|
||||||
scope: &mut Scope,
|
scope: &mut Scope,
|
||||||
loc_defs: &'a mut roc_parse::ast::Defs<'a>,
|
loc_defs: &'a mut roc_parse::ast::Defs<'a>,
|
||||||
pattern_type: PatternType,
|
pattern_type: PatternType,
|
||||||
) -> (CanDefs, Output, MutMap<Symbol, Region>) {
|
) -> (
|
||||||
|
CanDefs,
|
||||||
|
Output,
|
||||||
|
MutMap<Symbol, Region>,
|
||||||
|
Vec<IntroducedImport>,
|
||||||
|
) {
|
||||||
// Canonicalizing defs while detecting shadowing involves a multi-step process:
|
// Canonicalizing defs while detecting shadowing involves a multi-step process:
|
||||||
//
|
//
|
||||||
// 1. Go through each of the patterns.
|
// 1. Go through each of the patterns.
|
||||||
|
@ -959,7 +996,7 @@ pub(crate) fn canonicalize_defs<'a>(
|
||||||
// there are opaques that implement an ability using a value symbol). But, value symbols might
|
// there are opaques that implement an ability using a value symbol). But, value symbols might
|
||||||
// shadow symbols defined in a local ability def.
|
// shadow symbols defined in a local ability def.
|
||||||
|
|
||||||
for (_, either_index) in loc_defs.tags.iter().enumerate() {
|
for either_index in loc_defs.tags.iter() {
|
||||||
if let Ok(type_index) = either_index.split() {
|
if let Ok(type_index) = either_index.split() {
|
||||||
let type_def = &loc_defs.type_defs[type_index.index()];
|
let type_def = &loc_defs.type_defs[type_index.index()];
|
||||||
let pending_type_def = to_pending_type_def(env, type_def, scope, pattern_type);
|
let pending_type_def = to_pending_type_def(env, type_def, scope, pattern_type);
|
||||||
|
@ -982,6 +1019,7 @@ pub(crate) fn canonicalize_defs<'a>(
|
||||||
env,
|
env,
|
||||||
var_store,
|
var_store,
|
||||||
value_def,
|
value_def,
|
||||||
|
region,
|
||||||
scope,
|
scope,
|
||||||
&pending_abilities_in_scope,
|
&pending_abilities_in_scope,
|
||||||
&mut output,
|
&mut output,
|
||||||
|
@ -1038,7 +1076,12 @@ fn canonicalize_value_defs<'a>(
|
||||||
pattern_type: PatternType,
|
pattern_type: PatternType,
|
||||||
mut aliases: VecMap<Symbol, Alias>,
|
mut aliases: VecMap<Symbol, Alias>,
|
||||||
mut symbols_introduced: MutMap<Symbol, Region>,
|
mut symbols_introduced: MutMap<Symbol, Region>,
|
||||||
) -> (CanDefs, Output, MutMap<Symbol, Region>) {
|
) -> (
|
||||||
|
CanDefs,
|
||||||
|
Output,
|
||||||
|
MutMap<Symbol, Region>,
|
||||||
|
Vec<IntroducedImport>,
|
||||||
|
) {
|
||||||
// Canonicalize all the patterns, record shadowing problems, and store
|
// Canonicalize all the patterns, record shadowing problems, and store
|
||||||
// the ast::Expr values in pending_exprs for further canonicalization
|
// the ast::Expr values in pending_exprs for further canonicalization
|
||||||
// once we've finished assembling the entire scope.
|
// once we've finished assembling the entire scope.
|
||||||
|
@ -1047,6 +1090,8 @@ fn canonicalize_value_defs<'a>(
|
||||||
let mut pending_expects = Vec::with_capacity(value_defs.len());
|
let mut pending_expects = Vec::with_capacity(value_defs.len());
|
||||||
let mut pending_expect_fx = Vec::with_capacity(value_defs.len());
|
let mut pending_expect_fx = Vec::with_capacity(value_defs.len());
|
||||||
|
|
||||||
|
let mut imports_introduced = Vec::with_capacity(value_defs.len());
|
||||||
|
|
||||||
for loc_pending_def in value_defs {
|
for loc_pending_def in value_defs {
|
||||||
match loc_pending_def.value {
|
match loc_pending_def.value {
|
||||||
PendingValue::Def(pending_def) => {
|
PendingValue::Def(pending_def) => {
|
||||||
|
@ -1066,6 +1111,11 @@ fn canonicalize_value_defs<'a>(
|
||||||
PendingValue::ExpectFx(pending_expect) => {
|
PendingValue::ExpectFx(pending_expect) => {
|
||||||
pending_expect_fx.push(pending_expect);
|
pending_expect_fx.push(pending_expect);
|
||||||
}
|
}
|
||||||
|
PendingValue::ModuleImport(introduced_import) => {
|
||||||
|
imports_introduced.push(introduced_import);
|
||||||
|
}
|
||||||
|
PendingValue::InvalidIngestedFile => { /* skip */ }
|
||||||
|
PendingValue::ImportNameConflict => { /* skip */ }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1175,7 +1225,7 @@ fn canonicalize_value_defs<'a>(
|
||||||
aliases,
|
aliases,
|
||||||
};
|
};
|
||||||
|
|
||||||
(can_defs, output, symbols_introduced)
|
(can_defs, output, symbols_introduced, imports_introduced)
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CanonicalizedTypeDefs<'a> {
|
struct CanonicalizedTypeDefs<'a> {
|
||||||
|
@ -1389,9 +1439,7 @@ fn resolve_abilities(
|
||||||
);
|
);
|
||||||
|
|
||||||
// Record all the annotation's references in output.references.lookups
|
// Record all the annotation's references in output.references.lookups
|
||||||
for symbol in member_annot.references {
|
output.references.union_mut(&member_annot.references);
|
||||||
output.references.insert_type_lookup(symbol);
|
|
||||||
}
|
|
||||||
|
|
||||||
// What variables in the annotation are bound to the parent ability, and what variables
|
// What variables in the annotation are bound to the parent ability, and what variables
|
||||||
// are bound to some other ability?
|
// are bound to some other ability?
|
||||||
|
@ -2306,6 +2354,75 @@ fn canonicalize_pending_value_def<'a>(
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
IngestedFile(loc_pattern, opt_loc_ann, path_literal) => {
|
||||||
|
let relative_path =
|
||||||
|
if let ast::StrLiteral::PlainLine(ingested_path) = path_literal.value {
|
||||||
|
ingested_path
|
||||||
|
} else {
|
||||||
|
todo!(
|
||||||
|
"Only plain strings are supported. Other cases should be made impossible here"
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut file_path: PathBuf = env.module_path.into();
|
||||||
|
// Remove the header file name and push the new path.
|
||||||
|
file_path.pop();
|
||||||
|
file_path.push(relative_path);
|
||||||
|
|
||||||
|
let mut bytes = vec![];
|
||||||
|
|
||||||
|
let expr = match fs::File::open(&file_path)
|
||||||
|
.and_then(|mut file| file.read_to_end(&mut bytes))
|
||||||
|
{
|
||||||
|
Ok(_) => Expr::IngestedFile(file_path.into(), Arc::new(bytes), var_store.fresh()),
|
||||||
|
Err(e) => {
|
||||||
|
env.problems.push(Problem::FileProblem {
|
||||||
|
filename: file_path.to_path_buf(),
|
||||||
|
error: e.kind(),
|
||||||
|
});
|
||||||
|
|
||||||
|
Expr::RuntimeError(RuntimeError::ReadIngestedFileError {
|
||||||
|
filename: file_path.to_path_buf(),
|
||||||
|
error: e.kind(),
|
||||||
|
region: path_literal.region,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let loc_expr = Loc::at(path_literal.region, expr);
|
||||||
|
|
||||||
|
let opt_loc_can_ann = if let Some(loc_ann) = opt_loc_ann {
|
||||||
|
let can_ann = canonicalize_annotation(
|
||||||
|
env,
|
||||||
|
scope,
|
||||||
|
&loc_ann.value,
|
||||||
|
loc_ann.region,
|
||||||
|
var_store,
|
||||||
|
pending_abilities_in_scope,
|
||||||
|
AnnotationFor::Value,
|
||||||
|
);
|
||||||
|
|
||||||
|
output.references.union_mut(&can_ann.references);
|
||||||
|
|
||||||
|
Some(Loc::at(loc_ann.region, can_ann))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let def = single_can_def(
|
||||||
|
loc_pattern,
|
||||||
|
loc_expr,
|
||||||
|
var_store.fresh(),
|
||||||
|
opt_loc_can_ann,
|
||||||
|
SendMap::default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
DefOutput {
|
||||||
|
output,
|
||||||
|
references: DefReferences::Value(References::new()),
|
||||||
|
def,
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Disallow ability specializations that aren't on the toplevel (note: we might loosen this
|
// Disallow ability specializations that aren't on the toplevel (note: we might loosen this
|
||||||
|
@ -2464,7 +2581,7 @@ pub fn can_defs_with_return<'a>(
|
||||||
loc_defs: &'a mut Defs<'a>,
|
loc_defs: &'a mut Defs<'a>,
|
||||||
loc_ret: &'a Loc<ast::Expr<'a>>,
|
loc_ret: &'a Loc<ast::Expr<'a>>,
|
||||||
) -> (Expr, Output) {
|
) -> (Expr, Output) {
|
||||||
let (unsorted, defs_output, symbols_introduced) = canonicalize_defs(
|
let (unsorted, defs_output, symbols_introduced, imports_introduced) = canonicalize_defs(
|
||||||
env,
|
env,
|
||||||
Output::default(),
|
Output::default(),
|
||||||
var_store,
|
var_store,
|
||||||
|
@ -2498,6 +2615,8 @@ pub fn can_defs_with_return<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
report_unused_imports(imports_introduced, &output.references, env, scope);
|
||||||
|
|
||||||
let mut loc_expr: Loc<Expr> = ret_expr;
|
let mut loc_expr: Loc<Expr> = ret_expr;
|
||||||
|
|
||||||
for declaration in declarations.into_iter().rev() {
|
for declaration in declarations.into_iter().rev() {
|
||||||
|
@ -2507,6 +2626,28 @@ pub fn can_defs_with_return<'a>(
|
||||||
(loc_expr.value, output)
|
(loc_expr.value, output)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn report_unused_imports(
|
||||||
|
imports_introduced: Vec<IntroducedImport>,
|
||||||
|
references: &References,
|
||||||
|
env: &mut Env<'_>,
|
||||||
|
scope: &mut Scope,
|
||||||
|
) {
|
||||||
|
for import in imports_introduced {
|
||||||
|
if references.has_module_lookup(import.module_id) {
|
||||||
|
for (symbol, region) in &import.exposed_symbols {
|
||||||
|
if !references.has_unqualified_type_or_value_lookup(*symbol)
|
||||||
|
&& !scope.abilities_store.is_specialization_name(*symbol)
|
||||||
|
&& !import.is_task(env)
|
||||||
|
{
|
||||||
|
env.problem(Problem::UnusedImport(*symbol, *region));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if !import.is_task(env) {
|
||||||
|
env.problem(Problem::UnusedModuleImport(import.module_id, import.region));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn decl_to_let(decl: Declaration, loc_ret: Loc<Expr>) -> Loc<Expr> {
|
fn decl_to_let(decl: Declaration, loc_ret: Loc<Expr>) -> Loc<Expr> {
|
||||||
match decl {
|
match decl {
|
||||||
Declaration::Declare(def) => {
|
Declaration::Declare(def) => {
|
||||||
|
@ -2574,10 +2715,9 @@ fn to_pending_alias_or_opaque<'a>(
|
||||||
|
|
||||||
for loc_var in vars.iter() {
|
for loc_var in vars.iter() {
|
||||||
match loc_var.value {
|
match loc_var.value {
|
||||||
ast::Pattern::Identifier {
|
ast::Pattern::Identifier { ident: name, .. }
|
||||||
ident: name,
|
if name.chars().next().unwrap().is_lowercase() =>
|
||||||
suffixed: _,
|
{
|
||||||
} if name.chars().next().unwrap().is_lowercase() => {
|
|
||||||
let lowercase = Lowercase::from(name);
|
let lowercase = Lowercase::from(name);
|
||||||
can_rigids.push(Loc {
|
can_rigids.push(Loc {
|
||||||
value: lowercase,
|
value: lowercase,
|
||||||
|
@ -2755,7 +2895,10 @@ enum PendingValue<'a> {
|
||||||
Dbg(PendingExpectOrDbg<'a>),
|
Dbg(PendingExpectOrDbg<'a>),
|
||||||
Expect(PendingExpectOrDbg<'a>),
|
Expect(PendingExpectOrDbg<'a>),
|
||||||
ExpectFx(PendingExpectOrDbg<'a>),
|
ExpectFx(PendingExpectOrDbg<'a>),
|
||||||
|
ModuleImport(IntroducedImport),
|
||||||
SignatureDefMismatch,
|
SignatureDefMismatch,
|
||||||
|
InvalidIngestedFile,
|
||||||
|
ImportNameConflict,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct PendingExpectOrDbg<'a> {
|
struct PendingExpectOrDbg<'a> {
|
||||||
|
@ -2763,10 +2906,28 @@ struct PendingExpectOrDbg<'a> {
|
||||||
preceding_comment: Region,
|
preceding_comment: Region,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct IntroducedImport {
|
||||||
|
module_id: ModuleId,
|
||||||
|
region: Region,
|
||||||
|
exposed_symbols: Vec<(Symbol, Region)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntroducedImport {
|
||||||
|
pub fn is_task(&self, env: &Env<'_>) -> bool {
|
||||||
|
// Temporarily needed for `!` convenience. Can be removed when Task becomes a builtin.
|
||||||
|
match env.qualified_module_ids.get_name(self.module_id) {
|
||||||
|
Some(name) => name.as_inner().as_str() == "Task",
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn to_pending_value_def<'a>(
|
fn to_pending_value_def<'a>(
|
||||||
env: &mut Env<'a>,
|
env: &mut Env<'a>,
|
||||||
var_store: &mut VarStore,
|
var_store: &mut VarStore,
|
||||||
def: &'a ast::ValueDef<'a>,
|
def: &'a ast::ValueDef<'a>,
|
||||||
|
region: Region,
|
||||||
scope: &mut Scope,
|
scope: &mut Scope,
|
||||||
pending_abilities_in_scope: &PendingAbilitiesInScope,
|
pending_abilities_in_scope: &PendingAbilitiesInScope,
|
||||||
output: &mut Output,
|
output: &mut Output,
|
||||||
|
@ -2880,6 +3041,116 @@ fn to_pending_value_def<'a>(
|
||||||
preceding_comment: *preceding_comment,
|
preceding_comment: *preceding_comment,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
ModuleImport(module_import) => {
|
||||||
|
let qualified_module_name: QualifiedModuleName = module_import.name.value.into();
|
||||||
|
let module_name = qualified_module_name.module.clone();
|
||||||
|
let pq_module_name = qualified_module_name.into_pq_module_name(env.opt_shorthand);
|
||||||
|
|
||||||
|
let module_id = env
|
||||||
|
.qualified_module_ids
|
||||||
|
.get_id(&pq_module_name)
|
||||||
|
.expect("Module id should have been added in load");
|
||||||
|
|
||||||
|
let name_with_alias = match module_import.alias {
|
||||||
|
Some(alias) => ModuleName::from(alias.item.value.as_str()),
|
||||||
|
None => module_name.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(existing_import) =
|
||||||
|
scope
|
||||||
|
.modules
|
||||||
|
.insert(name_with_alias.clone(), module_id, region)
|
||||||
|
{
|
||||||
|
env.problems.push(Problem::ImportNameConflict {
|
||||||
|
name: name_with_alias,
|
||||||
|
is_alias: module_import.alias.is_some(),
|
||||||
|
new_module_id: module_id,
|
||||||
|
new_import_region: region,
|
||||||
|
existing_import,
|
||||||
|
});
|
||||||
|
|
||||||
|
return PendingValue::ImportNameConflict;
|
||||||
|
}
|
||||||
|
|
||||||
|
let exposed_names = module_import
|
||||||
|
.exposed
|
||||||
|
.map(|kw| kw.item.items)
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
if exposed_names.is_empty() && !env.home.is_builtin() && module_id.is_automatically_imported() {
|
||||||
|
env.problems.push(Problem::ExplicitBuiltinImport(module_id, region));
|
||||||
|
}
|
||||||
|
|
||||||
|
let exposed_ids = env
|
||||||
|
.dep_idents
|
||||||
|
.get(&module_id)
|
||||||
|
.expect("Module id should have been added in load");
|
||||||
|
|
||||||
|
let mut exposed_symbols = Vec::with_capacity(exposed_names.len());
|
||||||
|
|
||||||
|
for loc_name in exposed_names {
|
||||||
|
let exposed_name = loc_name.value.item();
|
||||||
|
let name = exposed_name.as_str();
|
||||||
|
let ident = Ident::from(name);
|
||||||
|
|
||||||
|
match exposed_ids.get_id(name) {
|
||||||
|
Some(ident_id) => {
|
||||||
|
let symbol = Symbol::new(module_id, ident_id);
|
||||||
|
exposed_symbols.push((symbol, loc_name.region));
|
||||||
|
|
||||||
|
if let Err((_shadowed_symbol, existing_symbol_region)) = scope.import_symbol(ident, symbol, loc_name.region) {
|
||||||
|
if symbol.is_automatically_imported() {
|
||||||
|
env.problem(Problem::ExplicitBuiltinTypeImport(
|
||||||
|
symbol,
|
||||||
|
loc_name.region,
|
||||||
|
));
|
||||||
|
} else {
|
||||||
|
env.problem(Problem::ImportShadowsSymbol {
|
||||||
|
region: loc_name.region,
|
||||||
|
new_symbol: symbol,
|
||||||
|
existing_symbol_region,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
env.problem(Problem::RuntimeError(RuntimeError::ValueNotExposed {
|
||||||
|
module_name: module_name.clone(),
|
||||||
|
ident,
|
||||||
|
region: loc_name.region,
|
||||||
|
exposed_values: exposed_ids.exposed_values(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
PendingValue::ModuleImport(IntroducedImport {
|
||||||
|
module_id,
|
||||||
|
region,
|
||||||
|
exposed_symbols,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
IngestedFileImport(ingested_file) => {
|
||||||
|
let loc_name = ingested_file.name.item;
|
||||||
|
|
||||||
|
let symbol = match scope.introduce(loc_name.value.into(), loc_name.region) {
|
||||||
|
Ok(symbol ) => symbol,
|
||||||
|
Err((original, shadow, _)) => {
|
||||||
|
env.problem(Problem::Shadowing {
|
||||||
|
original_region: original.region,
|
||||||
|
shadow,
|
||||||
|
kind: ShadowKind::Variable
|
||||||
|
});
|
||||||
|
|
||||||
|
return PendingValue::InvalidIngestedFile;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let loc_pattern = Loc::at(loc_name.region, Pattern::Identifier(symbol));
|
||||||
|
|
||||||
|
PendingValue::Def(PendingValueDef::IngestedFile(loc_pattern, ingested_file.annotation.map(|ann| ann.annotation), ingested_file.path))
|
||||||
|
}
|
||||||
Stmt(_) => internal_error!("a Stmt was not desugared correctly, should have been converted to a Body(...) in desguar"),
|
Stmt(_) => internal_error!("a Stmt was not desugared correctly, should have been converted to a Body(...) in desguar"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,10 +25,7 @@ fn to_encoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
opaque_ref,
|
opaque_ref,
|
||||||
&*env.arena.alloc([Loc::at(
|
&*env.arena.alloc([Loc::at(
|
||||||
DERIVED_REGION,
|
DERIVED_REGION,
|
||||||
ast::Pattern::Identifier {
|
ast::Pattern::Identifier { ident: payload },
|
||||||
ident: payload,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
)]),
|
)]),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -37,12 +34,10 @@ fn to_encoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Encode",
|
module_name: "Encode",
|
||||||
ident: "toEncoder",
|
ident: "toEncoder",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
&*env.arena.alloc([&*alloc_expr(ast::Expr::Var {
|
&*env.arena.alloc([&*alloc_expr(ast::Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: payload,
|
ident: payload,
|
||||||
suffixed: 0,
|
|
||||||
})]),
|
})]),
|
||||||
roc_module::called_via::CalledVia::Space,
|
roc_module::called_via::CalledVia::Space,
|
||||||
));
|
));
|
||||||
|
@ -67,23 +62,19 @@ fn decoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Decode",
|
module_name: "Decode",
|
||||||
ident: "decodeWith",
|
ident: "decodeWith",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
env.arena.alloc([
|
env.arena.alloc([
|
||||||
&*alloc_expr(ast::Expr::Var {
|
&*alloc_expr(ast::Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: bytes,
|
ident: bytes,
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Decode",
|
module_name: "Decode",
|
||||||
ident: "decoder",
|
ident: "decoder",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: fmt,
|
ident: fmt,
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
]),
|
]),
|
||||||
CalledVia::Space,
|
CalledVia::Space,
|
||||||
|
@ -94,7 +85,6 @@ fn decoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Decode",
|
module_name: "Decode",
|
||||||
ident: "mapResult",
|
ident: "mapResult",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
env.arena.alloc([
|
env.arena.alloc([
|
||||||
&*alloc_expr(call_decode_with),
|
&*alloc_expr(call_decode_with),
|
||||||
|
@ -107,20 +97,8 @@ fn decoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
// Decode.mapResult (Decode.decodeWith bytes Decode.decoder fmt) @Opaq
|
// Decode.mapResult (Decode.decodeWith bytes Decode.decoder fmt) @Opaq
|
||||||
let custom_closure = ast::Expr::Closure(
|
let custom_closure = ast::Expr::Closure(
|
||||||
env.arena.alloc([
|
env.arena.alloc([
|
||||||
Loc::at(
|
Loc::at(DERIVED_REGION, ast::Pattern::Identifier { ident: bytes }),
|
||||||
DERIVED_REGION,
|
Loc::at(DERIVED_REGION, ast::Pattern::Identifier { ident: fmt }),
|
||||||
ast::Pattern::Identifier {
|
|
||||||
ident: bytes,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
Loc::at(
|
|
||||||
DERIVED_REGION,
|
|
||||||
ast::Pattern::Identifier {
|
|
||||||
ident: fmt,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
]),
|
]),
|
||||||
alloc_expr(call_map_result),
|
alloc_expr(call_map_result),
|
||||||
);
|
);
|
||||||
|
@ -130,7 +108,6 @@ fn decoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Decode",
|
module_name: "Decode",
|
||||||
ident: "custom",
|
ident: "custom",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
env.arena.alloc([&*alloc_expr(custom_closure)]),
|
env.arena.alloc([&*alloc_expr(custom_closure)]),
|
||||||
CalledVia::Space,
|
CalledVia::Space,
|
||||||
|
@ -153,10 +130,7 @@ fn hash<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
opaque_ref,
|
opaque_ref,
|
||||||
&*env.arena.alloc([Loc::at(
|
&*env.arena.alloc([Loc::at(
|
||||||
DERIVED_REGION,
|
DERIVED_REGION,
|
||||||
ast::Pattern::Identifier {
|
ast::Pattern::Identifier { ident: payload },
|
||||||
ident: payload,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
)]),
|
)]),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -165,18 +139,15 @@ fn hash<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Hash",
|
module_name: "Hash",
|
||||||
ident: "hash",
|
ident: "hash",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
&*env.arena.alloc([
|
&*env.arena.alloc([
|
||||||
&*alloc_expr(ast::Expr::Var {
|
&*alloc_expr(ast::Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: hasher,
|
ident: hasher,
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
&*alloc_expr(ast::Expr::Var {
|
&*alloc_expr(ast::Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: payload,
|
ident: payload,
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
]),
|
]),
|
||||||
roc_module::called_via::CalledVia::Space,
|
roc_module::called_via::CalledVia::Space,
|
||||||
|
@ -185,13 +156,7 @@ fn hash<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
// \hasher, @Opaq payload -> Hash.hash hasher payload
|
// \hasher, @Opaq payload -> Hash.hash hasher payload
|
||||||
ast::Expr::Closure(
|
ast::Expr::Closure(
|
||||||
env.arena.alloc([
|
env.arena.alloc([
|
||||||
Loc::at(
|
Loc::at(DERIVED_REGION, ast::Pattern::Identifier { ident: hasher }),
|
||||||
DERIVED_REGION,
|
|
||||||
ast::Pattern::Identifier {
|
|
||||||
ident: hasher,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
Loc::at(DERIVED_REGION, opaque_apply_pattern),
|
Loc::at(DERIVED_REGION, opaque_apply_pattern),
|
||||||
]),
|
]),
|
||||||
call_member,
|
call_member,
|
||||||
|
@ -211,10 +176,7 @@ fn is_eq<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
opaque_ref,
|
opaque_ref,
|
||||||
&*env.arena.alloc([Loc::at(
|
&*env.arena.alloc([Loc::at(
|
||||||
DERIVED_REGION,
|
DERIVED_REGION,
|
||||||
ast::Pattern::Identifier {
|
ast::Pattern::Identifier { ident: payload1 },
|
||||||
ident: payload1,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
)]),
|
)]),
|
||||||
);
|
);
|
||||||
// \@Opaq payload2
|
// \@Opaq payload2
|
||||||
|
@ -222,10 +184,7 @@ fn is_eq<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
opaque_ref,
|
opaque_ref,
|
||||||
&*env.arena.alloc([Loc::at(
|
&*env.arena.alloc([Loc::at(
|
||||||
DERIVED_REGION,
|
DERIVED_REGION,
|
||||||
ast::Pattern::Identifier {
|
ast::Pattern::Identifier { ident: payload2 },
|
||||||
ident: payload2,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
)]),
|
)]),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -234,18 +193,15 @@ fn is_eq<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Bool",
|
module_name: "Bool",
|
||||||
ident: "isEq",
|
ident: "isEq",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
&*env.arena.alloc([
|
&*env.arena.alloc([
|
||||||
&*alloc_expr(ast::Expr::Var {
|
&*alloc_expr(ast::Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: payload1,
|
ident: payload1,
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
&*alloc_expr(ast::Expr::Var {
|
&*alloc_expr(ast::Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: payload2,
|
ident: payload2,
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
]),
|
]),
|
||||||
roc_module::called_via::CalledVia::Space,
|
roc_module::called_via::CalledVia::Space,
|
||||||
|
@ -274,10 +230,7 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
opaque_ref,
|
opaque_ref,
|
||||||
&*env.arena.alloc([Loc::at(
|
&*env.arena.alloc([Loc::at(
|
||||||
DERIVED_REGION,
|
DERIVED_REGION,
|
||||||
ast::Pattern::Identifier {
|
ast::Pattern::Identifier { ident: payload },
|
||||||
ident: payload,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
)]),
|
)]),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -286,12 +239,10 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Inspect",
|
module_name: "Inspect",
|
||||||
ident: "toInspector",
|
ident: "toInspector",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
&*env.arena.alloc([&*alloc_expr(ast::Expr::Var {
|
&*env.arena.alloc([&*alloc_expr(ast::Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: payload,
|
ident: payload,
|
||||||
suffixed: 0,
|
|
||||||
})]),
|
})]),
|
||||||
roc_module::called_via::CalledVia::Space,
|
roc_module::called_via::CalledVia::Space,
|
||||||
));
|
));
|
||||||
|
@ -306,7 +257,6 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Inspect",
|
module_name: "Inspect",
|
||||||
ident: "tag",
|
ident: "tag",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
&*env.arena.alloc([&*opaque_name, &*to_inspector_list]),
|
&*env.arena.alloc([&*opaque_name, &*to_inspector_list]),
|
||||||
roc_module::called_via::CalledVia::Space,
|
roc_module::called_via::CalledVia::Space,
|
||||||
|
@ -319,14 +269,12 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Inspect",
|
module_name: "Inspect",
|
||||||
ident: "apply",
|
ident: "apply",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
&*env.arena.alloc([
|
&*env.arena.alloc([
|
||||||
&*opaque_inspector,
|
&*opaque_inspector,
|
||||||
&*alloc_expr(ast::Expr::Var {
|
&*alloc_expr(ast::Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: fmt,
|
ident: fmt,
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
]),
|
]),
|
||||||
roc_module::called_via::CalledVia::Space,
|
roc_module::called_via::CalledVia::Space,
|
||||||
|
@ -335,10 +283,7 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
let custom_closure = alloc_expr(ast::Expr::Closure(
|
let custom_closure = alloc_expr(ast::Expr::Closure(
|
||||||
env.arena.alloc([Loc::at(
|
env.arena.alloc([Loc::at(
|
||||||
DERIVED_REGION,
|
DERIVED_REGION,
|
||||||
ast::Pattern::Identifier {
|
ast::Pattern::Identifier { ident: fmt },
|
||||||
ident: fmt,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
)]),
|
)]),
|
||||||
apply_opaque_inspector,
|
apply_opaque_inspector,
|
||||||
));
|
));
|
||||||
|
@ -348,7 +293,6 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
|
||||||
alloc_expr(ast::Expr::Var {
|
alloc_expr(ast::Expr::Var {
|
||||||
module_name: "Inspect",
|
module_name: "Inspect",
|
||||||
ident: "custom",
|
ident: "custom",
|
||||||
suffixed: 0,
|
|
||||||
}),
|
}),
|
||||||
env.arena.alloc([&*custom_closure]),
|
env.arena.alloc([&*custom_closure]),
|
||||||
CalledVia::Space,
|
CalledVia::Space,
|
||||||
|
|
|
@ -9,7 +9,7 @@ use roc_module::called_via::{BinOp, CalledVia};
|
||||||
use roc_module::ident::ModuleName;
|
use roc_module::ident::ModuleName;
|
||||||
use roc_parse::ast::Expr::{self, *};
|
use roc_parse::ast::Expr::{self, *};
|
||||||
use roc_parse::ast::{
|
use roc_parse::ast::{
|
||||||
wrap_in_task_ok, AssignedField, Collection, Pattern, RecordBuilderField, StrLiteral,
|
AssignedField, Collection, ModuleImportParams, Pattern, RecordBuilderField, StrLiteral,
|
||||||
StrSegment, ValueDef, WhenBranch,
|
StrSegment, ValueDef, WhenBranch,
|
||||||
};
|
};
|
||||||
use roc_region::all::{LineInfo, Loc, Region};
|
use roc_region::all::{LineInfo, Loc, Region};
|
||||||
|
@ -57,11 +57,7 @@ fn new_op_call_expr<'a>(
|
||||||
let args = arena.alloc([left, right]);
|
let args = arena.alloc([left, right]);
|
||||||
|
|
||||||
let loc_expr = arena.alloc(Loc {
|
let loc_expr = arena.alloc(Loc {
|
||||||
value: Expr::Var {
|
value: Expr::Var { module_name, ident },
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
region: loc_op.region,
|
region: loc_op.region,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -134,6 +130,28 @@ fn desugar_value_def<'a>(
|
||||||
preceding_comment: *preceding_comment,
|
preceding_comment: *preceding_comment,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ModuleImport(roc_parse::ast::ModuleImport {
|
||||||
|
before_name,
|
||||||
|
name,
|
||||||
|
params,
|
||||||
|
alias,
|
||||||
|
exposed,
|
||||||
|
}) => {
|
||||||
|
let desugared_params =
|
||||||
|
params.map(|ModuleImportParams { before, params }| ModuleImportParams {
|
||||||
|
before,
|
||||||
|
params: desugar_field_collection(arena, params, src, line_info, module_path),
|
||||||
|
});
|
||||||
|
|
||||||
|
ModuleImport(roc_parse::ast::ModuleImport {
|
||||||
|
before_name,
|
||||||
|
name: *name,
|
||||||
|
params: desugared_params,
|
||||||
|
alias: *alias,
|
||||||
|
exposed: *exposed,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
IngestedFileImport(_) => *def,
|
||||||
|
|
||||||
Stmt(stmt_expr) => {
|
Stmt(stmt_expr) => {
|
||||||
// desugar into a Body({}, stmt_expr)
|
// desugar into a Body({}, stmt_expr)
|
||||||
|
@ -161,8 +179,8 @@ pub fn desugar_defs_node_values<'a>(
|
||||||
*value_def = desugar_value_def(arena, arena.alloc(*value_def), src, line_info, module_path);
|
*value_def = desugar_value_def(arena, arena.alloc(*value_def), src, line_info, module_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
// `desugar_defs_node_values` is called recursively in `desugar_expr` and we
|
// `desugar_defs_node_values` is called recursively in `desugar_expr`
|
||||||
// only we only want to unwrap suffixed nodes if they are a top level def.
|
// and we only want to unwrap suffixed nodes if they are a top level def.
|
||||||
//
|
//
|
||||||
// check here first so we only unwrap the expressions once, and after they have
|
// check here first so we only unwrap the expressions once, and after they have
|
||||||
// been desugared
|
// been desugared
|
||||||
|
@ -176,7 +194,7 @@ pub fn desugar_defs_node_values<'a>(
|
||||||
/// For each top-level ValueDef in our module, we will unwrap any suffixed
|
/// For each top-level ValueDef in our module, we will unwrap any suffixed
|
||||||
/// expressions
|
/// expressions
|
||||||
///
|
///
|
||||||
/// e.g. `say! "hi"` desugars to `Task.await (say "hi") -> \{} -> ...`
|
/// e.g. `say! "hi"` desugars to `Task.await (say "hi") \{} -> ...`
|
||||||
pub fn desugar_value_def_suffixed<'a>(arena: &'a Bump, value_def: ValueDef<'a>) -> ValueDef<'a> {
|
pub fn desugar_value_def_suffixed<'a>(arena: &'a Bump, value_def: ValueDef<'a>) -> ValueDef<'a> {
|
||||||
use ValueDef::*;
|
use ValueDef::*;
|
||||||
|
|
||||||
|
@ -193,13 +211,7 @@ pub fn desugar_value_def_suffixed<'a>(arena: &'a Bump, value_def: ValueDef<'a>)
|
||||||
arena,
|
arena,
|
||||||
Body(
|
Body(
|
||||||
loc_pattern,
|
loc_pattern,
|
||||||
apply_task_await(
|
apply_task_await(arena, loc_expr.region, sub_arg, sub_pat, sub_new),
|
||||||
arena,
|
|
||||||
loc_expr.region,
|
|
||||||
sub_arg,
|
|
||||||
sub_pat,
|
|
||||||
wrap_in_task_ok(arena, sub_new),
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
Err(..) => Body(
|
Err(..) => Body(
|
||||||
|
@ -241,7 +253,7 @@ pub fn desugar_value_def_suffixed<'a>(arena: &'a Bump, value_def: ValueDef<'a>)
|
||||||
body_expr.region,
|
body_expr.region,
|
||||||
sub_arg,
|
sub_arg,
|
||||||
sub_pat,
|
sub_pat,
|
||||||
wrap_in_task_ok(arena, sub_new),
|
sub_new,
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
@ -257,6 +269,7 @@ pub fn desugar_value_def_suffixed<'a>(arena: &'a Bump, value_def: ValueDef<'a>)
|
||||||
|
|
||||||
// TODO support desugaring of Dbg, Expect, and ExpectFx
|
// TODO support desugaring of Dbg, Expect, and ExpectFx
|
||||||
Dbg { .. } | Expect { .. } | ExpectFx { .. } => value_def,
|
Dbg { .. } | Expect { .. } | ExpectFx { .. } => value_def,
|
||||||
|
ModuleImport { .. } | IngestedFileImport(_) => value_def,
|
||||||
|
|
||||||
Stmt(..) => {
|
Stmt(..) => {
|
||||||
internal_error!(
|
internal_error!(
|
||||||
|
@ -291,7 +304,6 @@ pub fn desugar_expr<'a>(
|
||||||
| UnappliedRecordBuilder { .. }
|
| UnappliedRecordBuilder { .. }
|
||||||
| Tag(_)
|
| Tag(_)
|
||||||
| OpaqueRef(_)
|
| OpaqueRef(_)
|
||||||
| IngestedFile(_, _)
|
|
||||||
| Crash => loc_expr,
|
| Crash => loc_expr,
|
||||||
|
|
||||||
Str(str_literal) => match str_literal {
|
Str(str_literal) => match str_literal {
|
||||||
|
@ -342,6 +354,15 @@ pub fn desugar_expr<'a>(
|
||||||
|
|
||||||
arena.alloc(Loc { region, value })
|
arena.alloc(Loc { region, value })
|
||||||
}
|
}
|
||||||
|
// desugar the sub_expression, but leave the TaskAwaitBang as this will
|
||||||
|
// be unwrapped later in desugar_value_def_suffixed
|
||||||
|
TaskAwaitBang(sub_expr) => {
|
||||||
|
let intermediate = arena.alloc(Loc::at(loc_expr.region, **sub_expr));
|
||||||
|
let new_sub_loc_expr = desugar_expr(arena, intermediate, src, line_info, module_path);
|
||||||
|
let new_sub_expr = arena.alloc(new_sub_loc_expr.value);
|
||||||
|
|
||||||
|
arena.alloc(Loc::at(loc_expr.region, TaskAwaitBang(new_sub_expr)))
|
||||||
|
}
|
||||||
RecordAccess(sub_expr, paths) => {
|
RecordAccess(sub_expr, paths) => {
|
||||||
let region = loc_expr.region;
|
let region = loc_expr.region;
|
||||||
let loc_sub_expr = Loc {
|
let loc_sub_expr = Loc {
|
||||||
|
@ -377,15 +398,7 @@ pub fn desugar_expr<'a>(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
Record(fields) => {
|
Record(fields) => {
|
||||||
let mut allocated = Vec::with_capacity_in(fields.len(), arena);
|
let fields = desugar_field_collection(arena, *fields, src, line_info, module_path);
|
||||||
for field in fields.iter() {
|
|
||||||
let value = desugar_field(arena, &field.value, src, line_info, module_path);
|
|
||||||
allocated.push(Loc {
|
|
||||||
value,
|
|
||||||
region: field.region,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
let fields = fields.replace_items(allocated.into_bump_slice());
|
|
||||||
arena.alloc(Loc {
|
arena.alloc(Loc {
|
||||||
region: loc_expr.region,
|
region: loc_expr.region,
|
||||||
value: Record(fields),
|
value: Record(fields),
|
||||||
|
@ -601,12 +614,10 @@ pub fn desugar_expr<'a>(
|
||||||
Negate => Var {
|
Negate => Var {
|
||||||
module_name: ModuleName::NUM,
|
module_name: ModuleName::NUM,
|
||||||
ident: "neg",
|
ident: "neg",
|
||||||
suffixed: 0,
|
|
||||||
},
|
},
|
||||||
Not => Var {
|
Not => Var {
|
||||||
module_name: ModuleName::BOOL,
|
module_name: ModuleName::BOOL,
|
||||||
ident: "not",
|
ident: "not",
|
||||||
suffixed: 0,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let loc_fn_var = arena.alloc(Loc { region, value });
|
let loc_fn_var = arena.alloc(Loc { region, value });
|
||||||
|
@ -704,7 +715,6 @@ pub fn desugar_expr<'a>(
|
||||||
let inspect_fn = Var {
|
let inspect_fn = Var {
|
||||||
module_name: ModuleName::INSPECT,
|
module_name: ModuleName::INSPECT,
|
||||||
ident: "toStr",
|
ident: "toStr",
|
||||||
suffixed: 0,
|
|
||||||
};
|
};
|
||||||
let loc_inspect_fn_var = arena.alloc(Loc {
|
let loc_inspect_fn_var = arena.alloc(Loc {
|
||||||
value: inspect_fn,
|
value: inspect_fn,
|
||||||
|
@ -759,7 +769,6 @@ pub fn desugar_expr<'a>(
|
||||||
Expr::Var {
|
Expr::Var {
|
||||||
module_name: ModuleName::TASK,
|
module_name: ModuleName::TASK,
|
||||||
ident: "ok",
|
ident: "ok",
|
||||||
suffixed: 0,
|
|
||||||
},
|
},
|
||||||
)),
|
)),
|
||||||
arena.alloc(apply_args),
|
arena.alloc(apply_args),
|
||||||
|
@ -823,6 +832,24 @@ fn desugar_str_segments<'a>(
|
||||||
.into_bump_slice()
|
.into_bump_slice()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn desugar_field_collection<'a>(
|
||||||
|
arena: &'a Bump,
|
||||||
|
fields: Collection<'a, Loc<AssignedField<'a, Expr<'a>>>>,
|
||||||
|
src: &'a str,
|
||||||
|
line_info: &mut Option<LineInfo>,
|
||||||
|
module_path: &str,
|
||||||
|
) -> Collection<'a, Loc<AssignedField<'a, Expr<'a>>>> {
|
||||||
|
let mut allocated = Vec::with_capacity_in(fields.len(), arena);
|
||||||
|
|
||||||
|
for field in fields.iter() {
|
||||||
|
let value = desugar_field(arena, &field.value, src, line_info, module_path);
|
||||||
|
|
||||||
|
allocated.push(Loc::at(field.region, value));
|
||||||
|
}
|
||||||
|
|
||||||
|
fields.replace_items(allocated.into_bump_slice())
|
||||||
|
}
|
||||||
|
|
||||||
fn desugar_field<'a>(
|
fn desugar_field<'a>(
|
||||||
arena: &'a Bump,
|
arena: &'a Bump,
|
||||||
field: &'a AssignedField<'a, Expr<'a>>,
|
field: &'a AssignedField<'a, Expr<'a>>,
|
||||||
|
@ -855,7 +882,6 @@ fn desugar_field<'a>(
|
||||||
value: Var {
|
value: Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: loc_str.value,
|
ident: loc_str.value,
|
||||||
suffixed: 0,
|
|
||||||
},
|
},
|
||||||
region: loc_str.region,
|
region: loc_str.region,
|
||||||
};
|
};
|
||||||
|
@ -1036,7 +1062,6 @@ fn record_builder_arg<'a>(
|
||||||
value: Expr::Var {
|
value: Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: arena.alloc("#".to_owned() + label.value),
|
ident: arena.alloc("#".to_owned() + label.value),
|
||||||
suffixed: 0,
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1076,10 +1101,7 @@ fn record_builder_arg<'a>(
|
||||||
|
|
||||||
for label in apply_field_names.iter().rev() {
|
for label in apply_field_names.iter().rev() {
|
||||||
let name = arena.alloc("#".to_owned() + label.value);
|
let name = arena.alloc("#".to_owned() + label.value);
|
||||||
let ident = roc_parse::ast::Pattern::Identifier {
|
let ident = roc_parse::ast::Pattern::Identifier { ident: name };
|
||||||
ident: name,
|
|
||||||
suffixed: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
let arg_pattern = arena.alloc(Loc {
|
let arg_pattern = arena.alloc(Loc {
|
||||||
value: ident,
|
value: ident,
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use crate::procedure::References;
|
use crate::procedure::References;
|
||||||
use crate::scope::Scope;
|
use crate::scope::Scope;
|
||||||
use bumpalo::Bump;
|
use bumpalo::Bump;
|
||||||
use roc_collections::{MutMap, VecSet};
|
use roc_collections::{MutMap, VecSet};
|
||||||
use roc_module::ident::{Ident, Lowercase, ModuleName};
|
use roc_module::ident::{Ident, ModuleName};
|
||||||
use roc_module::symbol::{IdentIdsByModule, ModuleId, ModuleIds, Symbol};
|
use roc_module::symbol::{IdentIdsByModule, ModuleId, PQModuleName, PackageModuleIds, Symbol};
|
||||||
use roc_problem::can::{Problem, RuntimeError};
|
use roc_problem::can::{Problem, RuntimeError};
|
||||||
use roc_region::all::{Loc, Region};
|
use roc_region::all::{Loc, Region};
|
||||||
|
|
||||||
|
@ -13,9 +15,11 @@ pub struct Env<'a> {
|
||||||
/// are assumed to be relative to this path.
|
/// are assumed to be relative to this path.
|
||||||
pub home: ModuleId,
|
pub home: ModuleId,
|
||||||
|
|
||||||
|
pub module_path: &'a Path,
|
||||||
|
|
||||||
pub dep_idents: &'a IdentIdsByModule,
|
pub dep_idents: &'a IdentIdsByModule,
|
||||||
|
|
||||||
pub module_ids: &'a ModuleIds,
|
pub qualified_module_ids: &'a PackageModuleIds<'a>,
|
||||||
|
|
||||||
/// Problems we've encountered along the way, which will be reported to the user at the end.
|
/// Problems we've encountered along the way, which will be reported to the user at the end.
|
||||||
pub problems: Vec<Problem>,
|
pub problems: Vec<Problem>,
|
||||||
|
@ -35,26 +39,32 @@ pub struct Env<'a> {
|
||||||
pub top_level_symbols: VecSet<Symbol>,
|
pub top_level_symbols: VecSet<Symbol>,
|
||||||
|
|
||||||
pub arena: &'a Bump,
|
pub arena: &'a Bump,
|
||||||
|
|
||||||
|
pub opt_shorthand: Option<&'a str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Env<'a> {
|
impl<'a> Env<'a> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
arena: &'a Bump,
|
arena: &'a Bump,
|
||||||
home: ModuleId,
|
home: ModuleId,
|
||||||
|
module_path: &'a Path,
|
||||||
dep_idents: &'a IdentIdsByModule,
|
dep_idents: &'a IdentIdsByModule,
|
||||||
module_ids: &'a ModuleIds,
|
qualified_module_ids: &'a PackageModuleIds<'a>,
|
||||||
|
opt_shorthand: Option<&'a str>,
|
||||||
) -> Env<'a> {
|
) -> Env<'a> {
|
||||||
Env {
|
Env {
|
||||||
arena,
|
arena,
|
||||||
home,
|
home,
|
||||||
|
module_path,
|
||||||
dep_idents,
|
dep_idents,
|
||||||
module_ids,
|
qualified_module_ids,
|
||||||
problems: Vec::new(),
|
problems: Vec::new(),
|
||||||
closures: MutMap::default(),
|
closures: MutMap::default(),
|
||||||
qualified_value_lookups: VecSet::default(),
|
qualified_value_lookups: VecSet::default(),
|
||||||
qualified_type_lookups: VecSet::default(),
|
qualified_type_lookups: VecSet::default(),
|
||||||
tailcallable_symbol: None,
|
tailcallable_symbol: None,
|
||||||
top_level_symbols: VecSet::default(),
|
top_level_symbols: VecSet::default(),
|
||||||
|
opt_shorthand,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,17 +82,20 @@ impl<'a> Env<'a> {
|
||||||
|
|
||||||
let module_name = ModuleName::from(module_name_str);
|
let module_name = ModuleName::from(module_name_str);
|
||||||
|
|
||||||
match self.module_ids.get_id(&module_name) {
|
match scope.modules.get_id(&module_name) {
|
||||||
Some(module_id) => self.qualified_lookup_help(scope, module_id, ident, region),
|
Some(module_id) => self.qualified_lookup_help(scope, module_id, ident, region),
|
||||||
None => Err(RuntimeError::ModuleNotImported {
|
None => Err(RuntimeError::ModuleNotImported {
|
||||||
module_name,
|
module_name: module_name.clone(),
|
||||||
imported_modules: self
|
imported_modules: scope
|
||||||
.module_ids
|
.modules
|
||||||
.available_modules()
|
.available_names()
|
||||||
.map(|string| string.as_ref().into())
|
.map(|string| string.as_ref().into())
|
||||||
.collect(),
|
.collect(),
|
||||||
region,
|
region,
|
||||||
module_exists: false,
|
module_exists: self
|
||||||
|
.qualified_module_ids
|
||||||
|
.get_id(&PQModuleName::Unqualified(module_name))
|
||||||
|
.is_some(),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -94,7 +107,11 @@ impl<'a> Env<'a> {
|
||||||
ident: &str,
|
ident: &str,
|
||||||
region: Region,
|
region: Region,
|
||||||
) -> Result<Symbol, RuntimeError> {
|
) -> Result<Symbol, RuntimeError> {
|
||||||
self.qualified_lookup_help(scope, module_id, ident, region)
|
if !scope.modules.has_id(module_id) {
|
||||||
|
Err(self.module_exists_but_not_imported(scope, module_id, region))
|
||||||
|
} else {
|
||||||
|
self.qualified_lookup_help(scope, module_id, ident, region)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns Err if the symbol resolved, but it was not exposed by the given module
|
/// Returns Err if the symbol resolved, but it was not exposed by the given module
|
||||||
|
@ -153,43 +170,46 @@ impl<'a> Env<'a> {
|
||||||
|
|
||||||
Ok(symbol)
|
Ok(symbol)
|
||||||
}
|
}
|
||||||
None => {
|
None => Err(RuntimeError::ValueNotExposed {
|
||||||
let exposed_values = exposed_ids
|
module_name: self
|
||||||
.ident_strs()
|
.qualified_module_ids
|
||||||
.filter(|(_, ident)| ident.starts_with(|c: char| c.is_lowercase()))
|
.get_name(module_id)
|
||||||
.map(|(_, ident)| Lowercase::from(ident))
|
.expect("Module ID known, but not in the module IDs somehow")
|
||||||
.collect();
|
.as_inner()
|
||||||
Err(RuntimeError::ValueNotExposed {
|
.clone(),
|
||||||
module_name: self
|
ident: Ident::from(ident),
|
||||||
.module_ids
|
region,
|
||||||
.get_name(module_id)
|
exposed_values: exposed_ids.exposed_values(),
|
||||||
.expect("Module ID known, but not in the module IDs somehow")
|
}),
|
||||||
.clone(),
|
|
||||||
ident: Ident::from(ident),
|
|
||||||
region,
|
|
||||||
exposed_values,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
None => Err(RuntimeError::ModuleNotImported {
|
_ => Err(self.module_exists_but_not_imported(scope, module_id, region)),
|
||||||
module_name: self
|
|
||||||
.module_ids
|
|
||||||
.get_name(module_id)
|
|
||||||
.expect("Module ID known, but not in the module IDs somehow")
|
|
||||||
.clone(),
|
|
||||||
imported_modules: self
|
|
||||||
.dep_idents
|
|
||||||
.keys()
|
|
||||||
.filter_map(|module_id| self.module_ids.get_name(*module_id))
|
|
||||||
.map(|module_name| module_name.as_ref().into())
|
|
||||||
.collect(),
|
|
||||||
region,
|
|
||||||
module_exists: true,
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn module_exists_but_not_imported(
|
||||||
|
&self,
|
||||||
|
scope: &Scope,
|
||||||
|
module_id: ModuleId,
|
||||||
|
region: Region,
|
||||||
|
) -> RuntimeError {
|
||||||
|
RuntimeError::ModuleNotImported {
|
||||||
|
module_name: self
|
||||||
|
.qualified_module_ids
|
||||||
|
.get_name(module_id)
|
||||||
|
.expect("Module ID known, but not in the module IDs somehow")
|
||||||
|
.as_inner()
|
||||||
|
.clone(),
|
||||||
|
imported_modules: scope
|
||||||
|
.modules
|
||||||
|
.available_names()
|
||||||
|
.map(|string| string.as_ref().into())
|
||||||
|
.collect(),
|
||||||
|
region,
|
||||||
|
module_exists: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn problem(&mut self, problem: Problem) {
|
pub fn problem(&mut self, problem: Problem) {
|
||||||
self.problems.push(problem)
|
self.problems.push(problem)
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::num::{
|
||||||
int_expr_from_result, num_expr_from_result, FloatBound, IntBound, NumBound,
|
int_expr_from_result, num_expr_from_result, FloatBound, IntBound, NumBound,
|
||||||
};
|
};
|
||||||
use crate::pattern::{canonicalize_pattern, BindingsFromPattern, Pattern, PermitShadows};
|
use crate::pattern::{canonicalize_pattern, BindingsFromPattern, Pattern, PermitShadows};
|
||||||
use crate::procedure::References;
|
use crate::procedure::{QualifiedReference, References};
|
||||||
use crate::scope::Scope;
|
use crate::scope::Scope;
|
||||||
use crate::traverse::{walk_expr, Visitor};
|
use crate::traverse::{walk_expr, Visitor};
|
||||||
use roc_collections::soa::Index;
|
use roc_collections::soa::Index;
|
||||||
|
@ -27,8 +27,6 @@ use roc_types::num::SingleQuoteBound;
|
||||||
use roc_types::subs::{ExhaustiveMark, IllegalCycleMark, RedundantMark, VarStore, Variable};
|
use roc_types::subs::{ExhaustiveMark, IllegalCycleMark, RedundantMark, VarStore, Variable};
|
||||||
use roc_types::types::{Alias, Category, IndexOrField, LambdaSet, OptAbleVar, Type};
|
use roc_types::types::{Alias, Category, IndexOrField, LambdaSet, OptAbleVar, Type};
|
||||||
use std::fmt::{Debug, Display};
|
use std::fmt::{Debug, Display};
|
||||||
use std::fs::File;
|
|
||||||
use std::io::Read;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::{char, u32};
|
use std::{char, u32};
|
||||||
|
@ -742,48 +740,6 @@ pub fn canonicalize_expr<'a>(
|
||||||
|
|
||||||
ast::Expr::Str(literal) => flatten_str_literal(env, var_store, scope, literal),
|
ast::Expr::Str(literal) => flatten_str_literal(env, var_store, scope, literal),
|
||||||
|
|
||||||
ast::Expr::IngestedFile(file_path, _) => match File::open(file_path) {
|
|
||||||
Ok(mut file) => {
|
|
||||||
let mut bytes = vec![];
|
|
||||||
match file.read_to_end(&mut bytes) {
|
|
||||||
Ok(_) => (
|
|
||||||
Expr::IngestedFile(
|
|
||||||
file_path.to_path_buf().into(),
|
|
||||||
Arc::new(bytes),
|
|
||||||
var_store.fresh(),
|
|
||||||
),
|
|
||||||
Output::default(),
|
|
||||||
),
|
|
||||||
Err(e) => {
|
|
||||||
env.problems.push(Problem::FileProblem {
|
|
||||||
filename: file_path.to_path_buf(),
|
|
||||||
error: e.kind(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// This will not manifest as a real runtime error and is just returned to have a value here.
|
|
||||||
// The pushed FileProblem will be fatal to compilation.
|
|
||||||
(
|
|
||||||
Expr::RuntimeError(roc_problem::can::RuntimeError::NoImplementation),
|
|
||||||
Output::default(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
env.problems.push(Problem::FileProblem {
|
|
||||||
filename: file_path.to_path_buf(),
|
|
||||||
error: e.kind(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// This will not manifest as a real runtime error and is just returned to have a value here.
|
|
||||||
// The pushed FileProblem will be fatal to compilation.
|
|
||||||
(
|
|
||||||
Expr::RuntimeError(roc_problem::can::RuntimeError::NoImplementation),
|
|
||||||
Output::default(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
ast::Expr::SingleQuote(string) => {
|
ast::Expr::SingleQuote(string) => {
|
||||||
let mut it = string.chars().peekable();
|
let mut it = string.chars().peekable();
|
||||||
if let Some(char) = it.next() {
|
if let Some(char) = it.next() {
|
||||||
|
@ -885,7 +841,9 @@ pub fn canonicalize_expr<'a>(
|
||||||
}
|
}
|
||||||
Ok((name, opaque_def)) => {
|
Ok((name, opaque_def)) => {
|
||||||
let argument = Box::new(args.pop().unwrap());
|
let argument = Box::new(args.pop().unwrap());
|
||||||
output.references.insert_type_lookup(name);
|
output
|
||||||
|
.references
|
||||||
|
.insert_type_lookup(name, QualifiedReference::Unqualified);
|
||||||
|
|
||||||
let (type_arguments, lambda_set_variables, specialized_def_type) =
|
let (type_arguments, lambda_set_variables, specialized_def_type) =
|
||||||
freshen_opaque_def(var_store, opaque_def);
|
freshen_opaque_def(var_store, opaque_def);
|
||||||
|
@ -1019,11 +977,9 @@ pub fn canonicalize_expr<'a>(
|
||||||
(expr, output)
|
(expr, output)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Expr::Var {
|
ast::Expr::Var { module_name, ident } => {
|
||||||
module_name,
|
canonicalize_var_lookup(env, var_store, scope, module_name, ident, region)
|
||||||
ident,
|
}
|
||||||
suffixed: _, // TODO should we use suffixed here?
|
|
||||||
} => canonicalize_var_lookup(env, var_store, scope, module_name, ident, region),
|
|
||||||
ast::Expr::Underscore(name) => {
|
ast::Expr::Underscore(name) => {
|
||||||
// we parse underscores, but they are not valid expression syntax
|
// we parse underscores, but they are not valid expression syntax
|
||||||
|
|
||||||
|
@ -1171,6 +1127,7 @@ pub fn canonicalize_expr<'a>(
|
||||||
output,
|
output,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
ast::Expr::TaskAwaitBang(..) => internal_error!("a Expr::TaskAwaitBang expression was not completely removed in desugar_value_def_suffixed"),
|
||||||
ast::Expr::Tag(tag) => {
|
ast::Expr::Tag(tag) => {
|
||||||
let variant_var = var_store.fresh();
|
let variant_var = var_store.fresh();
|
||||||
let ext_var = var_store.fresh();
|
let ext_var = var_store.fresh();
|
||||||
|
@ -1198,7 +1155,9 @@ pub fn canonicalize_expr<'a>(
|
||||||
}
|
}
|
||||||
Ok((name, opaque_def)) => {
|
Ok((name, opaque_def)) => {
|
||||||
let mut output = Output::default();
|
let mut output = Output::default();
|
||||||
output.references.insert_type_lookup(name);
|
output
|
||||||
|
.references
|
||||||
|
.insert_type_lookup(name, QualifiedReference::Unqualified);
|
||||||
|
|
||||||
let (type_arguments, lambda_set_variables, specialized_def_type) =
|
let (type_arguments, lambda_set_variables, specialized_def_type) =
|
||||||
freshen_opaque_def(var_store, opaque_def);
|
freshen_opaque_def(var_store, opaque_def);
|
||||||
|
@ -1893,7 +1852,9 @@ fn canonicalize_var_lookup(
|
||||||
// Look it up in scope!
|
// Look it up in scope!
|
||||||
match scope.lookup_str(ident, region) {
|
match scope.lookup_str(ident, region) {
|
||||||
Ok(symbol) => {
|
Ok(symbol) => {
|
||||||
output.references.insert_value_lookup(symbol);
|
output
|
||||||
|
.references
|
||||||
|
.insert_value_lookup(symbol, QualifiedReference::Unqualified);
|
||||||
|
|
||||||
if scope.abilities_store.is_ability_member_name(symbol) {
|
if scope.abilities_store.is_ability_member_name(symbol) {
|
||||||
AbilityMember(
|
AbilityMember(
|
||||||
|
@ -1916,7 +1877,9 @@ fn canonicalize_var_lookup(
|
||||||
// Look it up in the env!
|
// Look it up in the env!
|
||||||
match env.qualified_lookup(scope, module_name, ident, region) {
|
match env.qualified_lookup(scope, module_name, ident, region) {
|
||||||
Ok(symbol) => {
|
Ok(symbol) => {
|
||||||
output.references.insert_value_lookup(symbol);
|
output
|
||||||
|
.references
|
||||||
|
.insert_value_lookup(symbol, QualifiedReference::Qualified);
|
||||||
|
|
||||||
if scope.abilities_store.is_ability_member_name(symbol) {
|
if scope.abilities_store.is_ability_member_name(symbol) {
|
||||||
AbilityMember(
|
AbilityMember(
|
||||||
|
@ -2427,7 +2390,6 @@ pub fn is_valid_interpolation(expr: &ast::Expr<'_>) -> bool {
|
||||||
| ast::Expr::Expect(_, _)
|
| ast::Expr::Expect(_, _)
|
||||||
| ast::Expr::When(_, _)
|
| ast::Expr::When(_, _)
|
||||||
| ast::Expr::Backpassing(_, _, _)
|
| ast::Expr::Backpassing(_, _, _)
|
||||||
| ast::Expr::IngestedFile(_, _)
|
|
||||||
| ast::Expr::SpaceBefore(_, _)
|
| ast::Expr::SpaceBefore(_, _)
|
||||||
| ast::Expr::Str(StrLiteral::Block(_))
|
| ast::Expr::Str(StrLiteral::Block(_))
|
||||||
| ast::Expr::SpaceAfter(_, _)
|
| ast::Expr::SpaceAfter(_, _)
|
||||||
|
@ -2464,7 +2426,8 @@ pub fn is_valid_interpolation(expr: &ast::Expr<'_>) -> bool {
|
||||||
| ast::Expr::Closure(_, loc_expr) => is_valid_interpolation(&loc_expr.value),
|
| ast::Expr::Closure(_, loc_expr) => is_valid_interpolation(&loc_expr.value),
|
||||||
ast::Expr::TupleAccess(sub_expr, _)
|
ast::Expr::TupleAccess(sub_expr, _)
|
||||||
| ast::Expr::ParensAround(sub_expr)
|
| ast::Expr::ParensAround(sub_expr)
|
||||||
| ast::Expr::RecordAccess(sub_expr, _) => is_valid_interpolation(sub_expr),
|
| ast::Expr::RecordAccess(sub_expr, _)
|
||||||
|
| ast::Expr::TaskAwaitBang(sub_expr) => is_valid_interpolation(sub_expr),
|
||||||
ast::Expr::Apply(loc_expr, args, _called_via) => {
|
ast::Expr::Apply(loc_expr, args, _called_via) => {
|
||||||
is_valid_interpolation(&loc_expr.value)
|
is_valid_interpolation(&loc_expr.value)
|
||||||
&& args
|
&& args
|
||||||
|
|
|
@ -1,19 +1,22 @@
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use crate::abilities::{AbilitiesStore, ImplKey, PendingAbilitiesStore, ResolvedImpl};
|
use crate::abilities::{AbilitiesStore, ImplKey, PendingAbilitiesStore, ResolvedImpl};
|
||||||
use crate::annotation::{canonicalize_annotation, AnnotationFor};
|
use crate::annotation::{canonicalize_annotation, AnnotationFor};
|
||||||
use crate::def::{canonicalize_defs, Def};
|
use crate::def::{canonicalize_defs, report_unused_imports, Def};
|
||||||
use crate::effect_module::HostedGeneratedFunctions;
|
use crate::effect_module::HostedGeneratedFunctions;
|
||||||
use crate::env::Env;
|
use crate::env::Env;
|
||||||
use crate::expr::{
|
use crate::expr::{
|
||||||
ClosureData, DbgLookup, Declarations, ExpectLookup, Expr, Output, PendingDerives,
|
ClosureData, DbgLookup, Declarations, ExpectLookup, Expr, Output, PendingDerives,
|
||||||
};
|
};
|
||||||
use crate::pattern::{BindingsFromPattern, Pattern};
|
use crate::pattern::{BindingsFromPattern, Pattern};
|
||||||
|
use crate::procedure::References;
|
||||||
use crate::scope::Scope;
|
use crate::scope::Scope;
|
||||||
use bumpalo::Bump;
|
use bumpalo::Bump;
|
||||||
use roc_collections::{MutMap, SendMap, VecMap, VecSet};
|
use roc_collections::{MutMap, SendMap, VecMap, VecSet};
|
||||||
use roc_error_macros::internal_error;
|
use roc_error_macros::internal_error;
|
||||||
use roc_module::ident::Ident;
|
use roc_module::ident::Ident;
|
||||||
use roc_module::ident::Lowercase;
|
use roc_module::ident::Lowercase;
|
||||||
use roc_module::symbol::{IdentIds, IdentIdsByModule, ModuleId, ModuleIds, Symbol};
|
use roc_module::symbol::{IdentIds, IdentIdsByModule, ModuleId, PackageModuleIds, Symbol};
|
||||||
use roc_parse::ast::{Defs, TypeAnnotation};
|
use roc_parse::ast::{Defs, TypeAnnotation};
|
||||||
use roc_parse::header::HeaderType;
|
use roc_parse::header::HeaderType;
|
||||||
use roc_parse::pattern::PatternType;
|
use roc_parse::pattern::PatternType;
|
||||||
|
@ -127,7 +130,6 @@ pub struct Module {
|
||||||
pub exposed_imports: MutMap<Symbol, Region>,
|
pub exposed_imports: MutMap<Symbol, Region>,
|
||||||
pub exposed_symbols: VecSet<Symbol>,
|
pub exposed_symbols: VecSet<Symbol>,
|
||||||
pub referenced_values: VecSet<Symbol>,
|
pub referenced_values: VecSet<Symbol>,
|
||||||
pub referenced_types: VecSet<Symbol>,
|
|
||||||
/// all aliases. `bool` indicates whether it is exposed
|
/// all aliases. `bool` indicates whether it is exposed
|
||||||
pub aliases: MutMap<Symbol, (bool, Alias)>,
|
pub aliases: MutMap<Symbol, (bool, Alias)>,
|
||||||
pub rigid_variables: RigidVariables,
|
pub rigid_variables: RigidVariables,
|
||||||
|
@ -152,7 +154,6 @@ pub struct ModuleOutput {
|
||||||
pub exposed_symbols: VecSet<Symbol>,
|
pub exposed_symbols: VecSet<Symbol>,
|
||||||
pub problems: Vec<Problem>,
|
pub problems: Vec<Problem>,
|
||||||
pub referenced_values: VecSet<Symbol>,
|
pub referenced_values: VecSet<Symbol>,
|
||||||
pub referenced_types: VecSet<Symbol>,
|
|
||||||
pub symbols_from_requires: Vec<(Loc<Symbol>, Loc<Type>)>,
|
pub symbols_from_requires: Vec<(Loc<Symbol>, Loc<Type>)>,
|
||||||
pub pending_derives: PendingDerives,
|
pub pending_derives: PendingDerives,
|
||||||
pub scope: Scope,
|
pub scope: Scope,
|
||||||
|
@ -275,21 +276,38 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
loc_defs: &'a mut Defs<'a>,
|
loc_defs: &'a mut Defs<'a>,
|
||||||
header_type: &roc_parse::header::HeaderType,
|
header_type: &roc_parse::header::HeaderType,
|
||||||
home: ModuleId,
|
home: ModuleId,
|
||||||
module_path: &str,
|
module_path: &'a str,
|
||||||
src: &'a str,
|
src: &'a str,
|
||||||
module_ids: &'a ModuleIds,
|
qualified_module_ids: &'a PackageModuleIds<'a>,
|
||||||
exposed_ident_ids: IdentIds,
|
exposed_ident_ids: IdentIds,
|
||||||
dep_idents: &'a IdentIdsByModule,
|
dep_idents: &'a IdentIdsByModule,
|
||||||
aliases: MutMap<Symbol, Alias>,
|
aliases: MutMap<Symbol, Alias>,
|
||||||
imported_abilities_state: PendingAbilitiesStore,
|
imported_abilities_state: PendingAbilitiesStore,
|
||||||
exposed_imports: MutMap<Ident, (Symbol, Region)>,
|
initial_scope: MutMap<Ident, (Symbol, Region)>,
|
||||||
exposed_symbols: VecSet<Symbol>,
|
exposed_symbols: VecSet<Symbol>,
|
||||||
symbols_from_requires: &[(Loc<Symbol>, Loc<TypeAnnotation<'a>>)],
|
symbols_from_requires: &[(Loc<Symbol>, Loc<TypeAnnotation<'a>>)],
|
||||||
var_store: &mut VarStore,
|
var_store: &mut VarStore,
|
||||||
|
opt_shorthand: Option<&'a str>,
|
||||||
) -> ModuleOutput {
|
) -> ModuleOutput {
|
||||||
let mut can_exposed_imports = MutMap::default();
|
let mut can_exposed_imports = MutMap::default();
|
||||||
let mut scope = Scope::new(home, exposed_ident_ids, imported_abilities_state);
|
let mut scope = Scope::new(
|
||||||
let mut env = Env::new(arena, home, dep_idents, module_ids);
|
home,
|
||||||
|
qualified_module_ids
|
||||||
|
.get_name(home)
|
||||||
|
.expect("home module not found")
|
||||||
|
.as_inner()
|
||||||
|
.to_owned(),
|
||||||
|
exposed_ident_ids,
|
||||||
|
imported_abilities_state,
|
||||||
|
);
|
||||||
|
let mut env = Env::new(
|
||||||
|
arena,
|
||||||
|
home,
|
||||||
|
arena.alloc(Path::new(module_path)),
|
||||||
|
dep_idents,
|
||||||
|
qualified_module_ids,
|
||||||
|
opt_shorthand,
|
||||||
|
);
|
||||||
|
|
||||||
for (name, alias) in aliases.into_iter() {
|
for (name, alias) in aliases.into_iter() {
|
||||||
scope.add_alias(
|
scope.add_alias(
|
||||||
|
@ -317,26 +335,21 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
|
|
||||||
let mut rigid_variables = RigidVariables::default();
|
let mut rigid_variables = RigidVariables::default();
|
||||||
|
|
||||||
// Exposed values are treated like defs that appear before any others, e.g.
|
// Iniital scope values are treated like defs that appear before any others.
|
||||||
//
|
// They include builtin types that are automatically imported, and for a platform
|
||||||
// imports [Foo.{ bar, baz }]
|
// package, the required values from the app.
|
||||||
//
|
|
||||||
// ...is basically the same as if we'd added these extra defs at the start of the module:
|
|
||||||
//
|
|
||||||
// bar = Foo.bar
|
|
||||||
// baz = Foo.baz
|
|
||||||
//
|
//
|
||||||
// Here we essentially add those "defs" to "the beginning of the module"
|
// Here we essentially add those "defs" to "the beginning of the module"
|
||||||
// by canonicalizing them right before we canonicalize the actual ast::Def nodes.
|
// by canonicalizing them right before we canonicalize the actual ast::Def nodes.
|
||||||
for (ident, (symbol, region)) in exposed_imports {
|
for (ident, (symbol, region)) in initial_scope {
|
||||||
let first_char = ident.as_inline_str().as_str().chars().next().unwrap();
|
let first_char = ident.as_inline_str().as_str().chars().next().unwrap();
|
||||||
|
|
||||||
if first_char.is_lowercase() {
|
if first_char.is_lowercase() {
|
||||||
match scope.import(ident, symbol, region) {
|
match scope.import_symbol(ident, symbol, region) {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
// Add an entry to exposed_imports using the current module's name
|
// Add an entry to exposed_imports using the current module's name
|
||||||
// as the key; e.g. if this is the Foo module and we have
|
// as the key; e.g. if this is the Foo module and we have
|
||||||
// exposes [Bar.{ baz }] then insert Foo.baz as the key, so when
|
// Bar exposes [baz] then insert Foo.baz as the key, so when
|
||||||
// anything references `baz` in this Foo module, it will resolve to Bar.baz.
|
// anything references `baz` in this Foo module, it will resolve to Bar.baz.
|
||||||
can_exposed_imports.insert(symbol, region);
|
can_exposed_imports.insert(symbol, region);
|
||||||
}
|
}
|
||||||
|
@ -355,7 +368,7 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
|
|
||||||
// but now we know this symbol by a different identifier, so we still need to add it to
|
// but now we know this symbol by a different identifier, so we still need to add it to
|
||||||
// the scope
|
// the scope
|
||||||
match scope.import(ident, symbol, region) {
|
match scope.import_symbol(ident, symbol, region) {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
// here we do nothing special
|
// here we do nothing special
|
||||||
}
|
}
|
||||||
|
@ -369,7 +382,7 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let (defs, output, symbols_introduced) = canonicalize_defs(
|
let (defs, output, symbols_introduced, imports_introduced) = canonicalize_defs(
|
||||||
&mut env,
|
&mut env,
|
||||||
Output::default(),
|
Output::default(),
|
||||||
var_store,
|
var_store,
|
||||||
|
@ -410,18 +423,15 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut referenced_values = VecSet::default();
|
let mut referenced_values = VecSet::default();
|
||||||
let mut referenced_types = VecSet::default();
|
|
||||||
|
|
||||||
// Gather up all the symbols that were referenced across all the defs' lookups.
|
// Gather up all the symbols that were referenced across all the defs' lookups.
|
||||||
referenced_values.extend(output.references.value_lookups().copied());
|
referenced_values.extend(output.references.value_lookups().copied());
|
||||||
referenced_types.extend(output.references.type_lookups().copied());
|
|
||||||
|
|
||||||
// Gather up all the symbols that were referenced across all the defs' calls.
|
// Gather up all the symbols that were referenced across all the defs' calls.
|
||||||
referenced_values.extend(output.references.calls().copied());
|
referenced_values.extend(output.references.calls().copied());
|
||||||
|
|
||||||
// Gather up all the symbols that were referenced from other modules.
|
// Gather up all the symbols that were referenced from other modules.
|
||||||
referenced_values.extend(env.qualified_value_lookups.iter().copied());
|
referenced_values.extend(env.qualified_value_lookups.iter().copied());
|
||||||
referenced_types.extend(env.qualified_type_lookups.iter().copied());
|
|
||||||
|
|
||||||
// NOTE previously we inserted builtin defs into the list of defs here
|
// NOTE previously we inserted builtin defs into the list of defs here
|
||||||
// this is now done later, in file.rs.
|
// this is now done later, in file.rs.
|
||||||
|
@ -433,6 +443,7 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
|
|
||||||
let new_output = Output {
|
let new_output = Output {
|
||||||
aliases: output.aliases,
|
aliases: output.aliases,
|
||||||
|
references: output.references,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -482,6 +493,8 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
report_unused_imports(imports_introduced, &output.references, &mut env, &mut scope);
|
||||||
|
|
||||||
if let GeneratedInfo::Hosted {
|
if let GeneratedInfo::Hosted {
|
||||||
effect_symbol,
|
effect_symbol,
|
||||||
generated_functions,
|
generated_functions,
|
||||||
|
@ -545,7 +558,7 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
let annotation = crate::annotation::Annotation {
|
let annotation = crate::annotation::Annotation {
|
||||||
typ: def_annotation.signature,
|
typ: def_annotation.signature,
|
||||||
introduced_variables: def_annotation.introduced_variables,
|
introduced_variables: def_annotation.introduced_variables,
|
||||||
references: Default::default(),
|
references: References::new(),
|
||||||
aliases: Default::default(),
|
aliases: Default::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -603,7 +616,7 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
let annotation = crate::annotation::Annotation {
|
let annotation = crate::annotation::Annotation {
|
||||||
typ: def_annotation.signature,
|
typ: def_annotation.signature,
|
||||||
introduced_variables: def_annotation.introduced_variables,
|
introduced_variables: def_annotation.introduced_variables,
|
||||||
references: Default::default(),
|
references: References::new(),
|
||||||
aliases: Default::default(),
|
aliases: Default::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -700,14 +713,12 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
|
|
||||||
// Incorporate any remaining output.lookups entries into references.
|
// Incorporate any remaining output.lookups entries into references.
|
||||||
referenced_values.extend(output.references.value_lookups().copied());
|
referenced_values.extend(output.references.value_lookups().copied());
|
||||||
referenced_types.extend(output.references.type_lookups().copied());
|
|
||||||
|
|
||||||
// Incorporate any remaining output.calls entries into references.
|
// Incorporate any remaining output.calls entries into references.
|
||||||
referenced_values.extend(output.references.calls().copied());
|
referenced_values.extend(output.references.calls().copied());
|
||||||
|
|
||||||
// Gather up all the symbols that were referenced from other modules.
|
// Gather up all the symbols that were referenced from other modules.
|
||||||
referenced_values.extend(env.qualified_value_lookups.iter().copied());
|
referenced_values.extend(env.qualified_value_lookups.iter().copied());
|
||||||
referenced_types.extend(env.qualified_type_lookups.iter().copied());
|
|
||||||
|
|
||||||
let mut fix_closures_no_capture_symbols = VecSet::default();
|
let mut fix_closures_no_capture_symbols = VecSet::default();
|
||||||
let mut fix_closures_closure_captures = VecMap::default();
|
let mut fix_closures_closure_captures = VecMap::default();
|
||||||
|
@ -803,7 +814,6 @@ pub fn canonicalize_module_defs<'a>(
|
||||||
rigid_variables,
|
rigid_variables,
|
||||||
declarations,
|
declarations,
|
||||||
referenced_values,
|
referenced_values,
|
||||||
referenced_types,
|
|
||||||
exposed_imports: can_exposed_imports,
|
exposed_imports: can_exposed_imports,
|
||||||
problems: env.problems,
|
problems: env.problems,
|
||||||
symbols_from_requires,
|
symbols_from_requires,
|
||||||
|
|
|
@ -265,10 +265,7 @@ pub fn canonicalize_def_header_pattern<'a>(
|
||||||
|
|
||||||
match pattern {
|
match pattern {
|
||||||
// Identifiers that shadow ability members may appear (and may only appear) at the header of a def.
|
// Identifiers that shadow ability members may appear (and may only appear) at the header of a def.
|
||||||
Identifier {
|
Identifier { ident: name } => {
|
||||||
ident: name,
|
|
||||||
suffixed: _,
|
|
||||||
} => {
|
|
||||||
match scope.introduce_or_shadow_ability_member(
|
match scope.introduce_or_shadow_ability_member(
|
||||||
pending_abilities_in_scope,
|
pending_abilities_in_scope,
|
||||||
(*name).into(),
|
(*name).into(),
|
||||||
|
@ -376,13 +373,12 @@ pub fn canonicalize_pattern<'a>(
|
||||||
use PatternType::*;
|
use PatternType::*;
|
||||||
|
|
||||||
let can_pattern = match pattern {
|
let can_pattern = match pattern {
|
||||||
Identifier {
|
Identifier { ident: name } => {
|
||||||
ident: name,
|
match canonicalize_pattern_symbol(env, scope, output, region, permit_shadows, name) {
|
||||||
suffixed: _,
|
Ok(symbol) => Pattern::Identifier(symbol),
|
||||||
} => match canonicalize_pattern_symbol(env, scope, output, region, permit_shadows, name) {
|
Err(pattern) => pattern,
|
||||||
Ok(symbol) => Pattern::Identifier(symbol),
|
}
|
||||||
Err(pattern) => pattern,
|
}
|
||||||
},
|
|
||||||
Underscore(name) => {
|
Underscore(name) => {
|
||||||
// An underscored identifier can't be used, but we'll still add it to the scope
|
// An underscored identifier can't be used, but we'll still add it to the scope
|
||||||
// for better error messages if someone tries to use it.
|
// for better error messages if someone tries to use it.
|
||||||
|
@ -450,7 +446,10 @@ pub fn canonicalize_pattern<'a>(
|
||||||
let (type_arguments, lambda_set_variables, specialized_def_type) =
|
let (type_arguments, lambda_set_variables, specialized_def_type) =
|
||||||
freshen_opaque_def(var_store, opaque_def);
|
freshen_opaque_def(var_store, opaque_def);
|
||||||
|
|
||||||
output.references.insert_type_lookup(opaque);
|
output.references.insert_type_lookup(
|
||||||
|
opaque,
|
||||||
|
crate::procedure::QualifiedReference::Unqualified,
|
||||||
|
);
|
||||||
|
|
||||||
Pattern::UnwrappedOpaque {
|
Pattern::UnwrappedOpaque {
|
||||||
whole_var: var_store.fresh(),
|
whole_var: var_store.fresh(),
|
||||||
|
@ -632,10 +631,7 @@ pub fn canonicalize_pattern<'a>(
|
||||||
|
|
||||||
for loc_pattern in patterns.iter() {
|
for loc_pattern in patterns.iter() {
|
||||||
match loc_pattern.value {
|
match loc_pattern.value {
|
||||||
Identifier {
|
Identifier { ident: label } => {
|
||||||
ident: label,
|
|
||||||
suffixed: _,
|
|
||||||
} => {
|
|
||||||
match scope.introduce(label.into(), region) {
|
match scope.introduce(label.into(), region) {
|
||||||
Ok(symbol) => {
|
Ok(symbol) => {
|
||||||
output.references.insert_bound(symbol);
|
output.references.insert_bound(symbol);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
use crate::pattern::Pattern;
|
use crate::pattern::Pattern;
|
||||||
use roc_module::symbol::Symbol;
|
use roc_module::symbol::{ModuleId, Symbol};
|
||||||
use roc_region::all::{Loc, Region};
|
use roc_region::all::{Loc, Region};
|
||||||
use roc_types::subs::Variable;
|
use roc_types::subs::Variable;
|
||||||
|
|
||||||
|
@ -46,6 +46,23 @@ impl ReferencesBitflags {
|
||||||
const TYPE_LOOKUP: Self = ReferencesBitflags(2);
|
const TYPE_LOOKUP: Self = ReferencesBitflags(2);
|
||||||
const CALL: Self = ReferencesBitflags(4);
|
const CALL: Self = ReferencesBitflags(4);
|
||||||
const BOUND: Self = ReferencesBitflags(8);
|
const BOUND: Self = ReferencesBitflags(8);
|
||||||
|
const QUALIFIED: Self = ReferencesBitflags(16);
|
||||||
|
const UNQUALIFIED: Self = ReferencesBitflags(32);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug)]
|
||||||
|
pub enum QualifiedReference {
|
||||||
|
Unqualified,
|
||||||
|
Qualified,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl QualifiedReference {
|
||||||
|
fn flags(&self, flags: ReferencesBitflags) -> ReferencesBitflags {
|
||||||
|
match self {
|
||||||
|
Self::Unqualified => ReferencesBitflags(flags.0 | ReferencesBitflags::UNQUALIFIED.0),
|
||||||
|
Self::Qualified => ReferencesBitflags(flags.0 | ReferencesBitflags::QUALIFIED.0),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
#[derive(Clone, Debug, Default)]
|
||||||
|
@ -108,12 +125,12 @@ impl References {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_value_lookup(&mut self, symbol: Symbol) {
|
pub fn insert_value_lookup(&mut self, symbol: Symbol, qualified: QualifiedReference) {
|
||||||
self.insert(symbol, ReferencesBitflags::VALUE_LOOKUP);
|
self.insert(symbol, qualified.flags(ReferencesBitflags::VALUE_LOOKUP));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_type_lookup(&mut self, symbol: Symbol) {
|
pub fn insert_type_lookup(&mut self, symbol: Symbol, qualified: QualifiedReference) {
|
||||||
self.insert(symbol, ReferencesBitflags::TYPE_LOOKUP);
|
self.insert(symbol, qualified.flags(ReferencesBitflags::TYPE_LOOKUP));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_bound(&mut self, symbol: Symbol) {
|
pub fn insert_bound(&mut self, symbol: Symbol) {
|
||||||
|
@ -178,7 +195,24 @@ impl References {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn has_unqualified_type_or_value_lookup(&self, symbol: Symbol) -> bool {
|
||||||
|
let mask = ReferencesBitflags::VALUE_LOOKUP.0 | ReferencesBitflags::TYPE_LOOKUP.0;
|
||||||
|
let it = self.symbols.iter().zip(self.bitflags.iter());
|
||||||
|
|
||||||
|
for (a, b) in it {
|
||||||
|
if *a == symbol && b.0 & mask > 0 && b.0 & ReferencesBitflags::UNQUALIFIED.0 > 0 {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
pub fn references_type_def(&self, symbol: Symbol) -> bool {
|
pub fn references_type_def(&self, symbol: Symbol) -> bool {
|
||||||
self.has_type_lookup(symbol)
|
self.has_type_lookup(symbol)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn has_module_lookup(&self, module_id: ModuleId) -> bool {
|
||||||
|
self.symbols.iter().any(|sym| sym.module_id() == module_id)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use roc_collections::{VecMap, VecSet};
|
use roc_collections::{VecMap, VecSet};
|
||||||
use roc_error_macros::internal_error;
|
use roc_error_macros::internal_error;
|
||||||
use roc_module::ident::Ident;
|
use roc_module::ident::{Ident, ModuleName};
|
||||||
use roc_module::symbol::{IdentId, IdentIds, ModuleId, Symbol};
|
use roc_module::symbol::{IdentId, IdentIds, ModuleId, ScopeModules, Symbol};
|
||||||
use roc_problem::can::RuntimeError;
|
use roc_problem::can::RuntimeError;
|
||||||
use roc_region::all::{Loc, Region};
|
use roc_region::all::{Loc, Region};
|
||||||
use roc_types::subs::Variable;
|
use roc_types::subs::Variable;
|
||||||
|
@ -29,8 +29,11 @@ pub struct Scope {
|
||||||
/// The first `exposed_ident_count` identifiers are exposed
|
/// The first `exposed_ident_count` identifiers are exposed
|
||||||
exposed_ident_count: usize,
|
exposed_ident_count: usize,
|
||||||
|
|
||||||
/// Identifiers that are imported (and introduced in the header)
|
/// Modules that are imported
|
||||||
imports: Vec<(Ident, Symbol, Region)>,
|
pub modules: ScopeModules,
|
||||||
|
|
||||||
|
/// Identifiers that are imported
|
||||||
|
imported_symbols: Vec<(Ident, Symbol, Region)>,
|
||||||
|
|
||||||
/// Shadows of an ability member, for example a local specialization of `eq` for the ability
|
/// Shadows of an ability member, for example a local specialization of `eq` for the ability
|
||||||
/// member `Eq implements eq : a, a -> Bool where a implements Eq` gets a shadow symbol it can use for its
|
/// member `Eq implements eq : a, a -> Bool where a implements Eq` gets a shadow symbol it can use for its
|
||||||
|
@ -50,16 +53,15 @@ pub struct Scope {
|
||||||
impl Scope {
|
impl Scope {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
home: ModuleId,
|
home: ModuleId,
|
||||||
|
module_name: ModuleName,
|
||||||
initial_ident_ids: IdentIds,
|
initial_ident_ids: IdentIds,
|
||||||
starting_abilities_store: PendingAbilitiesStore,
|
starting_abilities_store: PendingAbilitiesStore,
|
||||||
) -> Scope {
|
) -> Scope {
|
||||||
let default_imports =
|
// Add all `Apply` types.
|
||||||
// Add all `Apply` types.
|
let default_imports = Symbol::apply_types_in_scope()
|
||||||
(Symbol::apply_types_in_scope().into_iter())
|
.into_iter()
|
||||||
// Add all tag names we might want to suggest as hints in error messages.
|
.map(|(a, (b, c))| (a, b, c))
|
||||||
.chain(Symbol::symbols_in_scope_for_hints());
|
.collect();
|
||||||
|
|
||||||
let default_imports = default_imports.map(|(a, (b, c))| (a, b, c)).collect();
|
|
||||||
|
|
||||||
Scope {
|
Scope {
|
||||||
home,
|
home,
|
||||||
|
@ -68,7 +70,8 @@ impl Scope {
|
||||||
aliases: VecMap::default(),
|
aliases: VecMap::default(),
|
||||||
abilities_store: starting_abilities_store,
|
abilities_store: starting_abilities_store,
|
||||||
shadows: VecMap::default(),
|
shadows: VecMap::default(),
|
||||||
imports: default_imports,
|
modules: ScopeModules::new(home, module_name),
|
||||||
|
imported_symbols: default_imports,
|
||||||
ignored_locals: VecMap::default(),
|
ignored_locals: VecMap::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -82,9 +85,9 @@ impl Scope {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_docs_imports(&mut self) {
|
pub fn add_docs_imports(&mut self) {
|
||||||
self.imports
|
self.imported_symbols
|
||||||
.push(("Dict".into(), Symbol::DICT_DICT, Region::zero()));
|
.push(("Dict".into(), Symbol::DICT_DICT, Region::zero()));
|
||||||
self.imports
|
self.imported_symbols
|
||||||
.push(("Set".into(), Symbol::SET_SET, Region::zero()));
|
.push(("Set".into(), Symbol::SET_SET, Region::zero()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,7 +116,7 @@ impl Scope {
|
||||||
|
|
||||||
fn idents_in_scope(&self) -> impl Iterator<Item = Ident> + '_ {
|
fn idents_in_scope(&self) -> impl Iterator<Item = Ident> + '_ {
|
||||||
let it1 = self.locals.idents_in_scope();
|
let it1 = self.locals.idents_in_scope();
|
||||||
let it2 = self.imports.iter().map(|t| t.0.clone());
|
let it2 = self.imported_symbols.iter().map(|t| t.0.clone());
|
||||||
|
|
||||||
it2.chain(it1)
|
it2.chain(it1)
|
||||||
}
|
}
|
||||||
|
@ -139,7 +142,7 @@ impl Scope {
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
// opaque types can only be wrapped/unwrapped in the scope they are defined in (and below)
|
// opaque types can only be wrapped/unwrapped in the scope they are defined in (and below)
|
||||||
let error = if let Some((_, decl_region)) = self.has_imported(opaque_str) {
|
let error = if let Some((_, decl_region)) = self.has_imported_symbol(opaque_str) {
|
||||||
// specific error for when the opaque is imported, which definitely does not work
|
// specific error for when the opaque is imported, which definitely does not work
|
||||||
RuntimeError::OpaqueOutsideScope {
|
RuntimeError::OpaqueOutsideScope {
|
||||||
opaque,
|
opaque,
|
||||||
|
@ -202,8 +205,8 @@ impl Scope {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_imported(&self, ident: &str) -> Option<(Symbol, Region)> {
|
fn has_imported_symbol(&self, ident: &str) -> Option<(Symbol, Region)> {
|
||||||
for (import, shadow, original_region) in self.imports.iter() {
|
for (import, shadow, original_region) in self.imported_symbols.iter() {
|
||||||
if ident == import.as_str() {
|
if ident == import.as_str() {
|
||||||
return Some((*shadow, *original_region));
|
return Some((*shadow, *original_region));
|
||||||
}
|
}
|
||||||
|
@ -215,7 +218,7 @@ impl Scope {
|
||||||
/// Is an identifier in scope, either in the locals or imports
|
/// Is an identifier in scope, either in the locals or imports
|
||||||
fn scope_contains_ident(&self, ident: &str) -> ContainsIdent {
|
fn scope_contains_ident(&self, ident: &str) -> ContainsIdent {
|
||||||
// exposed imports are likely to be small
|
// exposed imports are likely to be small
|
||||||
match self.has_imported(ident) {
|
match self.has_imported_symbol(ident) {
|
||||||
Some((symbol, region)) => ContainsIdent::InScope(symbol, region),
|
Some((symbol, region)) => ContainsIdent::InScope(symbol, region),
|
||||||
None => self.locals.contains_ident(ident),
|
None => self.locals.contains_ident(ident),
|
||||||
}
|
}
|
||||||
|
@ -379,19 +382,19 @@ impl Scope {
|
||||||
///
|
///
|
||||||
/// Returns Err if this would shadow an existing ident, including the
|
/// Returns Err if this would shadow an existing ident, including the
|
||||||
/// Symbol and Region of the ident we already had in scope under that name.
|
/// Symbol and Region of the ident we already had in scope under that name.
|
||||||
pub fn import(
|
pub fn import_symbol(
|
||||||
&mut self,
|
&mut self,
|
||||||
ident: Ident,
|
ident: Ident,
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
region: Region,
|
region: Region,
|
||||||
) -> Result<(), (Symbol, Region)> {
|
) -> Result<(), (Symbol, Region)> {
|
||||||
if let Some((s, r)) = self.has_imported(ident.as_str()) {
|
match self.scope_contains_ident(ident.as_str()) {
|
||||||
return Err((s, r));
|
ContainsIdent::InScope(symbol, region) => Err((symbol, region)),
|
||||||
|
ContainsIdent::NotPresent | ContainsIdent::NotInScope(_) => {
|
||||||
|
self.imported_symbols.push((ident, symbol, region));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.imports.push((ident, symbol, region));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_alias(
|
pub fn add_alias(
|
||||||
|
@ -423,17 +426,22 @@ impl Scope {
|
||||||
//
|
//
|
||||||
// - abilities_store: ability definitions not allowed in inner scopes
|
// - abilities_store: ability definitions not allowed in inner scopes
|
||||||
// - locals: everything introduced in the inner scope is marked as not in scope in the rollback
|
// - locals: everything introduced in the inner scope is marked as not in scope in the rollback
|
||||||
|
// - imports: everything that was imported in the inner scope is removed in the rollback
|
||||||
// - aliases: stored in a VecMap, we just discard anything added in an inner scope
|
// - aliases: stored in a VecMap, we just discard anything added in an inner scope
|
||||||
// - exposed_ident_count: unchanged
|
// - exposed_ident_count: unchanged
|
||||||
// - home: unchanged
|
// - home: unchanged
|
||||||
let aliases_count = self.aliases.len();
|
let aliases_count = self.aliases.len();
|
||||||
let ignored_locals_count = self.ignored_locals.len();
|
let ignored_locals_count = self.ignored_locals.len();
|
||||||
let locals_snapshot = self.locals.in_scope.len();
|
let locals_snapshot = self.locals.in_scope.len();
|
||||||
|
let imported_symbols_snapshot = self.imported_symbols.len();
|
||||||
|
let imported_modules_snapshot = self.modules.len();
|
||||||
|
|
||||||
let result = f(self);
|
let result = f(self);
|
||||||
|
|
||||||
self.aliases.truncate(aliases_count);
|
self.aliases.truncate(aliases_count);
|
||||||
self.ignored_locals.truncate(ignored_locals_count);
|
self.ignored_locals.truncate(ignored_locals_count);
|
||||||
|
self.imported_symbols.truncate(imported_symbols_snapshot);
|
||||||
|
self.modules.truncate(imported_modules_snapshot);
|
||||||
|
|
||||||
// anything added in the inner scope is no longer in scope now
|
// anything added in the inner scope is no longer in scope now
|
||||||
for i in locals_snapshot..self.locals.in_scope.len() {
|
for i in locals_snapshot..self.locals.in_scope.len() {
|
||||||
|
@ -651,6 +659,7 @@ mod test {
|
||||||
let _register_module_debug_names = ModuleIds::default();
|
let _register_module_debug_names = ModuleIds::default();
|
||||||
let mut scope = Scope::new(
|
let mut scope = Scope::new(
|
||||||
ModuleId::ATTR,
|
ModuleId::ATTR,
|
||||||
|
"#Attr".into(),
|
||||||
IdentIds::default(),
|
IdentIds::default(),
|
||||||
PendingAbilitiesStore::default(),
|
PendingAbilitiesStore::default(),
|
||||||
);
|
);
|
||||||
|
@ -670,6 +679,7 @@ mod test {
|
||||||
let _register_module_debug_names = ModuleIds::default();
|
let _register_module_debug_names = ModuleIds::default();
|
||||||
let mut scope = Scope::new(
|
let mut scope = Scope::new(
|
||||||
ModuleId::ATTR,
|
ModuleId::ATTR,
|
||||||
|
"#Attr".into(),
|
||||||
IdentIds::default(),
|
IdentIds::default(),
|
||||||
PendingAbilitiesStore::default(),
|
PendingAbilitiesStore::default(),
|
||||||
);
|
);
|
||||||
|
@ -699,6 +709,7 @@ mod test {
|
||||||
let _register_module_debug_names = ModuleIds::default();
|
let _register_module_debug_names = ModuleIds::default();
|
||||||
let mut scope = Scope::new(
|
let mut scope = Scope::new(
|
||||||
ModuleId::ATTR,
|
ModuleId::ATTR,
|
||||||
|
"#Attr".into(),
|
||||||
IdentIds::default(),
|
IdentIds::default(),
|
||||||
PendingAbilitiesStore::default(),
|
PendingAbilitiesStore::default(),
|
||||||
);
|
);
|
||||||
|
@ -720,6 +731,7 @@ mod test {
|
||||||
let _register_module_debug_names = ModuleIds::default();
|
let _register_module_debug_names = ModuleIds::default();
|
||||||
let scope = Scope::new(
|
let scope = Scope::new(
|
||||||
ModuleId::ATTR,
|
ModuleId::ATTR,
|
||||||
|
"#Attr".into(),
|
||||||
IdentIds::default(),
|
IdentIds::default(),
|
||||||
PendingAbilitiesStore::default(),
|
PendingAbilitiesStore::default(),
|
||||||
);
|
);
|
||||||
|
@ -728,13 +740,7 @@ mod test {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&idents,
|
&idents,
|
||||||
&[
|
&[Ident::from("Str"), Ident::from("List"), Ident::from("Box"),]
|
||||||
Ident::from("Str"),
|
|
||||||
Ident::from("List"),
|
|
||||||
Ident::from("Box"),
|
|
||||||
Ident::from("Ok"),
|
|
||||||
Ident::from("Err"),
|
|
||||||
]
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -743,6 +749,7 @@ mod test {
|
||||||
let _register_module_debug_names = ModuleIds::default();
|
let _register_module_debug_names = ModuleIds::default();
|
||||||
let mut scope = Scope::new(
|
let mut scope = Scope::new(
|
||||||
ModuleId::ATTR,
|
ModuleId::ATTR,
|
||||||
|
"#Attr".into(),
|
||||||
IdentIds::default(),
|
IdentIds::default(),
|
||||||
PendingAbilitiesStore::default(),
|
PendingAbilitiesStore::default(),
|
||||||
);
|
);
|
||||||
|
@ -751,13 +758,7 @@ mod test {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&idents,
|
&idents,
|
||||||
&[
|
&[Ident::from("Str"), Ident::from("List"), Ident::from("Box"),]
|
||||||
Ident::from("Str"),
|
|
||||||
Ident::from("List"),
|
|
||||||
Ident::from("Box"),
|
|
||||||
Ident::from("Ok"),
|
|
||||||
Ident::from("Err"),
|
|
||||||
]
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let builtin_count = idents.len();
|
let builtin_count = idents.len();
|
||||||
|
@ -810,6 +811,7 @@ mod test {
|
||||||
let _register_module_debug_names = ModuleIds::default();
|
let _register_module_debug_names = ModuleIds::default();
|
||||||
let mut scope = Scope::new(
|
let mut scope = Scope::new(
|
||||||
ModuleId::ATTR,
|
ModuleId::ATTR,
|
||||||
|
"#Attr".into(),
|
||||||
IdentIds::default(),
|
IdentIds::default(),
|
||||||
PendingAbilitiesStore::default(),
|
PendingAbilitiesStore::default(),
|
||||||
);
|
);
|
||||||
|
@ -820,7 +822,7 @@ mod test {
|
||||||
|
|
||||||
assert!(scope.lookup(&ident, region).is_err());
|
assert!(scope.lookup(&ident, region).is_err());
|
||||||
|
|
||||||
assert!(scope.import(ident.clone(), symbol, region).is_ok());
|
assert!(scope.import_symbol(ident.clone(), symbol, region).is_ok());
|
||||||
|
|
||||||
assert!(scope.lookup(&ident, region).is_ok());
|
assert!(scope.lookup(&ident, region).is_ok());
|
||||||
|
|
||||||
|
@ -832,6 +834,7 @@ mod test {
|
||||||
let _register_module_debug_names = ModuleIds::default();
|
let _register_module_debug_names = ModuleIds::default();
|
||||||
let mut scope = Scope::new(
|
let mut scope = Scope::new(
|
||||||
ModuleId::ATTR,
|
ModuleId::ATTR,
|
||||||
|
"#Attr".into(),
|
||||||
IdentIds::default(),
|
IdentIds::default(),
|
||||||
PendingAbilitiesStore::default(),
|
PendingAbilitiesStore::default(),
|
||||||
);
|
);
|
||||||
|
@ -842,7 +845,7 @@ mod test {
|
||||||
let region1 = Region::from_pos(Position { offset: 10 });
|
let region1 = Region::from_pos(Position { offset: 10 });
|
||||||
let region2 = Region::from_pos(Position { offset: 20 });
|
let region2 = Region::from_pos(Position { offset: 20 });
|
||||||
|
|
||||||
scope.import(ident.clone(), symbol, region1).unwrap();
|
scope.import_symbol(ident.clone(), symbol, region1).unwrap();
|
||||||
|
|
||||||
let (original, _ident, shadow_symbol) =
|
let (original, _ident, shadow_symbol) =
|
||||||
scope.introduce(ident.clone(), region2).unwrap_err();
|
scope.introduce(ident.clone(), region2).unwrap_err();
|
||||||
|
|
|
@ -6,7 +6,7 @@ use roc_error_macros::internal_error;
|
||||||
use roc_module::called_via::CalledVia;
|
use roc_module::called_via::CalledVia;
|
||||||
use roc_module::ident::ModuleName;
|
use roc_module::ident::ModuleName;
|
||||||
use roc_parse::ast::Expr::{self, *};
|
use roc_parse::ast::Expr::{self, *};
|
||||||
use roc_parse::ast::{is_loc_expr_suffixed, wrap_in_task_ok, Pattern, ValueDef, WhenBranch};
|
use roc_parse::ast::{is_expr_suffixed, Pattern, ValueDef, WhenBranch};
|
||||||
use roc_region::all::{Loc, Region};
|
use roc_region::all::{Loc, Region};
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
|
|
||||||
|
@ -31,11 +31,9 @@ fn next_suffixed_answer_pattern(arena: &Bump) -> (Expr, Pattern) {
|
||||||
Expr::Var {
|
Expr::Var {
|
||||||
module_name: "",
|
module_name: "",
|
||||||
ident: answer_ident,
|
ident: answer_ident,
|
||||||
suffixed: 0,
|
|
||||||
},
|
},
|
||||||
Pattern::Identifier {
|
Pattern::Identifier {
|
||||||
ident: answer_ident.as_str(),
|
ident: answer_ident.as_str(),
|
||||||
suffixed: 0,
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -95,63 +93,10 @@ pub fn unwrap_suffixed_expression<'a>(
|
||||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||||
let unwrapped_expression = {
|
let unwrapped_expression = {
|
||||||
match loc_expr.value {
|
match loc_expr.value {
|
||||||
Expr::Var {
|
Expr::TaskAwaitBang(sub_expr) => {
|
||||||
module_name,
|
let unwrapped_sub_expr = arena.alloc(Loc::at(loc_expr.region, *sub_expr));
|
||||||
ident,
|
|
||||||
suffixed,
|
|
||||||
} => {
|
|
||||||
match suffixed {
|
|
||||||
0 => Ok(loc_expr),
|
|
||||||
1 => {
|
|
||||||
let unwrapped_var = arena.alloc(Loc::at(
|
|
||||||
loc_expr.region,
|
|
||||||
Expr::Var {
|
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed: suffixed.saturating_sub(1),
|
|
||||||
},
|
|
||||||
));
|
|
||||||
|
|
||||||
init_unwrapped_err(arena, unwrapped_var, maybe_def_pat)
|
init_unwrapped_err(arena, unwrapped_sub_expr, maybe_def_pat)
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
let unwrapped_var = arena.alloc(Loc::at(
|
|
||||||
loc_expr.region,
|
|
||||||
Expr::Var {
|
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
));
|
|
||||||
|
|
||||||
// we generate an intermediate pattern `#!a0` etc
|
|
||||||
// so we dont unwrap the definition pattern
|
|
||||||
let (mut answer_var, answer_pat) = next_suffixed_answer_pattern(arena);
|
|
||||||
|
|
||||||
// we transfer the suffix from the Var to the intermediate answer Var
|
|
||||||
// as that will need to be unwrapped in a future call
|
|
||||||
if let Expr::Var {
|
|
||||||
module_name: "",
|
|
||||||
ident: answer_ident,
|
|
||||||
suffixed: 0,
|
|
||||||
} = answer_var
|
|
||||||
{
|
|
||||||
answer_var = Expr::Var {
|
|
||||||
module_name: "",
|
|
||||||
ident: answer_ident,
|
|
||||||
suffixed: suffixed.saturating_sub(1),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
internal_error!("expected a suffixed Var to be generated");
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(EUnwrapped::UnwrappedSubExpr {
|
|
||||||
sub_arg: unwrapped_var,
|
|
||||||
sub_pat: arena.alloc(Loc::at(unwrapped_var.region, answer_pat)),
|
|
||||||
sub_new: arena.alloc(Loc::at(unwrapped_var.region, answer_var)),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Expr::Defs(..) => unwrap_suffixed_expression_defs_help(arena, loc_expr, maybe_def_pat),
|
Expr::Defs(..) => unwrap_suffixed_expression_defs_help(arena, loc_expr, maybe_def_pat),
|
||||||
|
@ -177,11 +122,87 @@ pub fn unwrap_suffixed_expression<'a>(
|
||||||
Expr::SpaceBefore(..) | Expr::SpaceAfter(..) => {
|
Expr::SpaceBefore(..) | Expr::SpaceAfter(..) => {
|
||||||
internal_error!(
|
internal_error!(
|
||||||
"SpaceBefore and SpaceAfter should have been removed in desugar_expr"
|
"SpaceBefore and SpaceAfter should have been removed in desugar_expr"
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Expr::BinOps(..) => {
|
Expr::BinOps(..) => {
|
||||||
internal_error!("BinOps should have been desugared in desugar_expr")
|
internal_error!("BinOps should have been desugared in desugar_expr");
|
||||||
|
}
|
||||||
|
|
||||||
|
Expr::LowLevelDbg(dbg_src, arg, rest) => {
|
||||||
|
if is_expr_suffixed(&arg.value) {
|
||||||
|
// we cannot unwrap a suffixed expression within dbg
|
||||||
|
// e.g. dbg (foo! "bar")
|
||||||
|
return Err(EUnwrapped::Malformed);
|
||||||
|
}
|
||||||
|
|
||||||
|
match unwrap_suffixed_expression(arena, rest, maybe_def_pat) {
|
||||||
|
Ok(unwrapped_expr) => {
|
||||||
|
let new_dbg = arena.alloc(Loc::at(
|
||||||
|
loc_expr.region,
|
||||||
|
LowLevelDbg(dbg_src, arg, unwrapped_expr),
|
||||||
|
));
|
||||||
|
return Ok(new_dbg);
|
||||||
|
}
|
||||||
|
Err(EUnwrapped::UnwrappedDefExpr(unwrapped_expr)) => {
|
||||||
|
let new_dbg = arena.alloc(Loc::at(
|
||||||
|
loc_expr.region,
|
||||||
|
LowLevelDbg(dbg_src, arg, unwrapped_expr),
|
||||||
|
));
|
||||||
|
Err(EUnwrapped::UnwrappedDefExpr(new_dbg))
|
||||||
|
}
|
||||||
|
Err(EUnwrapped::UnwrappedSubExpr {
|
||||||
|
sub_arg: unwrapped_expr,
|
||||||
|
sub_pat,
|
||||||
|
sub_new,
|
||||||
|
}) => {
|
||||||
|
let new_dbg = arena.alloc(Loc::at(
|
||||||
|
loc_expr.region,
|
||||||
|
LowLevelDbg(dbg_src, arg, unwrapped_expr),
|
||||||
|
));
|
||||||
|
Err(EUnwrapped::UnwrappedSubExpr {
|
||||||
|
sub_arg: new_dbg,
|
||||||
|
sub_pat,
|
||||||
|
sub_new,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Err(EUnwrapped::Malformed) => Err(EUnwrapped::Malformed),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Expr::Expect(condition, continuation) => {
|
||||||
|
if is_expr_suffixed(&condition.value) {
|
||||||
|
// we cannot unwrap a suffixed expression within expect
|
||||||
|
// e.g. expect (foo! "bar")
|
||||||
|
return Err(EUnwrapped::Malformed);
|
||||||
|
}
|
||||||
|
|
||||||
|
match unwrap_suffixed_expression(arena, continuation, maybe_def_pat) {
|
||||||
|
Ok(unwrapped_expr) => {
|
||||||
|
let new_expect = arena
|
||||||
|
.alloc(Loc::at(loc_expr.region, Expect(condition, unwrapped_expr)));
|
||||||
|
return Ok(new_expect);
|
||||||
|
}
|
||||||
|
Err(EUnwrapped::UnwrappedDefExpr(unwrapped_expr)) => {
|
||||||
|
let new_expect = arena
|
||||||
|
.alloc(Loc::at(loc_expr.region, Expect(condition, unwrapped_expr)));
|
||||||
|
Err(EUnwrapped::UnwrappedDefExpr(new_expect))
|
||||||
|
}
|
||||||
|
Err(EUnwrapped::UnwrappedSubExpr {
|
||||||
|
sub_arg: unwrapped_expr,
|
||||||
|
sub_pat,
|
||||||
|
sub_new,
|
||||||
|
}) => {
|
||||||
|
let new_expect = arena
|
||||||
|
.alloc(Loc::at(loc_expr.region, Expect(condition, unwrapped_expr)));
|
||||||
|
Err(EUnwrapped::UnwrappedSubExpr {
|
||||||
|
sub_arg: new_expect,
|
||||||
|
sub_pat,
|
||||||
|
sub_new,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Err(EUnwrapped::Malformed) => Err(EUnwrapped::Malformed),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// we only need to unwrap some expressions, leave the rest as is
|
// we only need to unwrap some expressions, leave the rest as is
|
||||||
|
@ -192,8 +213,8 @@ pub fn unwrap_suffixed_expression<'a>(
|
||||||
// KEEP THIS HERE FOR DEBUGGING
|
// KEEP THIS HERE FOR DEBUGGING
|
||||||
// USEFUL TO SEE THE UNWRAPPING
|
// USEFUL TO SEE THE UNWRAPPING
|
||||||
// OF AST NODES AS THEY DESCEND
|
// OF AST NODES AS THEY DESCEND
|
||||||
// if is_loc_expr_suffixed(loc_expr) {
|
// if is_expr_suffixed(&loc_expr.value) {
|
||||||
// dbg!(&loc_expr, &unwrapped_expression);
|
// dbg!(&maybe_def_pat, &loc_expr, &unwrapped_expression);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
unwrapped_expression
|
unwrapped_expression
|
||||||
|
@ -248,19 +269,7 @@ pub fn unwrap_suffixed_expression_closure_help<'a>(
|
||||||
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
) -> Result<&'a Loc<Expr<'a>>, EUnwrapped<'a>> {
|
||||||
match loc_expr.value {
|
match loc_expr.value {
|
||||||
Expr::Closure(closure_args, closure_loc_ret) => {
|
Expr::Closure(closure_args, closure_loc_ret) => {
|
||||||
|
// note we use `None` here as we don't want to pass a DefExpr up and
|
||||||
// Check to make sure that arguments are not suffixed
|
|
||||||
let suffixed_arg_count = closure_args
|
|
||||||
.iter()
|
|
||||||
.filter(|loc_pat| loc_pat.value.is_suffixed())
|
|
||||||
.count();
|
|
||||||
|
|
||||||
if suffixed_arg_count > 0 {
|
|
||||||
debug_assert!(false,"closure arguments should not be suffixed");
|
|
||||||
return Err(EUnwrapped::Malformed);
|
|
||||||
}
|
|
||||||
|
|
||||||
// note we use `None` here as we don't want to pass a DefExpr up and
|
|
||||||
// unwrap the definition pattern for the closure
|
// unwrap the definition pattern for the closure
|
||||||
match unwrap_suffixed_expression(arena, closure_loc_ret, None) {
|
match unwrap_suffixed_expression(arena, closure_loc_ret, None) {
|
||||||
Ok(unwrapped_expr) => {
|
Ok(unwrapped_expr) => {
|
||||||
|
@ -292,17 +301,14 @@ pub fn unwrap_suffixed_expression_apply_help<'a>(
|
||||||
|
|
||||||
// Any suffixed arguments will be innermost, therefore we unwrap those first
|
// Any suffixed arguments will be innermost, therefore we unwrap those first
|
||||||
let local_args = arena.alloc_slice_copy(apply_args);
|
let local_args = arena.alloc_slice_copy(apply_args);
|
||||||
for (_, arg) in local_args.iter_mut().enumerate() {
|
for arg in local_args.iter_mut() {
|
||||||
match unwrap_suffixed_expression(arena, arg, maybe_def_pat) {
|
// Args are always expressions, don't pass `maybe_def_pat`
|
||||||
|
match unwrap_suffixed_expression(arena, arg, None) {
|
||||||
Ok(new_arg) => {
|
Ok(new_arg) => {
|
||||||
*arg = new_arg;
|
*arg = new_arg;
|
||||||
}
|
}
|
||||||
Err(EUnwrapped::UnwrappedDefExpr(unwrapped_arg)) => {
|
Err(EUnwrapped::UnwrappedDefExpr(..)) => {
|
||||||
*arg = unwrapped_arg;
|
internal_error!("unreachable, unwrapped arg cannot be def expression as `None` was passed as pattern");
|
||||||
|
|
||||||
let new_apply = arena.alloc(Loc::at(loc_expr.region, Apply(function, local_args, called_via)));
|
|
||||||
|
|
||||||
return Err(EUnwrapped::UnwrappedDefExpr(new_apply));
|
|
||||||
}
|
}
|
||||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new: new_arg }) => {
|
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new: new_arg }) => {
|
||||||
|
|
||||||
|
@ -316,21 +322,15 @@ pub fn unwrap_suffixed_expression_apply_help<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
// special case for when our Apply function is a suffixed Var (but not multiple suffixed)
|
// special case for when our Apply function is a suffixed Var (but not multiple suffixed)
|
||||||
if let Expr::Var { module_name, ident, suffixed } = function.value {
|
if let Expr::TaskAwaitBang(sub_expr) = function.value {
|
||||||
if suffixed == 1 {
|
let unwrapped_function = arena.alloc(Loc::at(
|
||||||
let unwrapped_function = arena.alloc(Loc::at(
|
loc_expr.region,
|
||||||
loc_expr.region,
|
*sub_expr,
|
||||||
Expr::Var {
|
));
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed: suffixed - 1,
|
|
||||||
},
|
|
||||||
));
|
|
||||||
|
|
||||||
let new_apply = arena.alloc(Loc::at(loc_expr.region, Expr::Apply(unwrapped_function, local_args, called_via)));
|
let new_apply = arena.alloc(Loc::at(loc_expr.region, Expr::Apply(unwrapped_function, local_args, called_via)));
|
||||||
|
|
||||||
return init_unwrapped_err(arena, new_apply, maybe_def_pat);
|
return init_unwrapped_err(arena, new_apply, maybe_def_pat);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// function is another expression
|
// function is another expression
|
||||||
|
@ -368,7 +368,7 @@ pub fn unwrap_suffixed_expression_if_then_else_help<'a>(
|
||||||
let (current_if_then_statement, current_if_then_expression) = if_then;
|
let (current_if_then_statement, current_if_then_expression) = if_then;
|
||||||
|
|
||||||
// unwrap suffixed (innermost) expressions e.g. `if true then doThing! then ...`
|
// unwrap suffixed (innermost) expressions e.g. `if true then doThing! then ...`
|
||||||
if is_loc_expr_suffixed(current_if_then_expression) {
|
if is_expr_suffixed(¤t_if_then_expression.value) {
|
||||||
// split if_thens around the current index
|
// split if_thens around the current index
|
||||||
let (before, after) = roc_parse::ast::split_around(if_thens, index);
|
let (before, after) = roc_parse::ast::split_around(if_thens, index);
|
||||||
|
|
||||||
|
@ -398,13 +398,8 @@ pub fn unwrap_suffixed_expression_if_then_else_help<'a>(
|
||||||
sub_pat,
|
sub_pat,
|
||||||
sub_new,
|
sub_new,
|
||||||
}) => {
|
}) => {
|
||||||
let unwrapped_expression = apply_task_await(
|
let unwrapped_expression =
|
||||||
arena,
|
apply_task_await(arena, sub_arg.region, sub_arg, sub_pat, sub_new);
|
||||||
sub_arg.region,
|
|
||||||
sub_arg,
|
|
||||||
sub_pat,
|
|
||||||
wrap_in_task_ok(arena, sub_new),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut new_if_thens = Vec::new_in(arena);
|
let mut new_if_thens = Vec::new_in(arena);
|
||||||
|
|
||||||
|
@ -429,7 +424,7 @@ pub fn unwrap_suffixed_expression_if_then_else_help<'a>(
|
||||||
// unwrap suffixed statements e.g. `if isThing! then ...`
|
// unwrap suffixed statements e.g. `if isThing! then ...`
|
||||||
// note we want to split and nest if-then's so we only run Task's
|
// note we want to split and nest if-then's so we only run Task's
|
||||||
// that are required
|
// that are required
|
||||||
if is_loc_expr_suffixed(current_if_then_statement) {
|
if is_expr_suffixed(¤t_if_then_statement.value) {
|
||||||
// split if_thens around the current index
|
// split if_thens around the current index
|
||||||
let (before, after) = roc_parse::ast::split_around(if_thens, index);
|
let (before, after) = roc_parse::ast::split_around(if_thens, index);
|
||||||
|
|
||||||
|
@ -541,13 +536,8 @@ pub fn unwrap_suffixed_expression_if_then_else_help<'a>(
|
||||||
sub_pat,
|
sub_pat,
|
||||||
sub_new,
|
sub_new,
|
||||||
}) => {
|
}) => {
|
||||||
let unwrapped_final_else = apply_task_await(
|
let unwrapped_final_else =
|
||||||
arena,
|
apply_task_await(arena, sub_arg.region, sub_arg, sub_pat, sub_new);
|
||||||
sub_arg.region,
|
|
||||||
sub_arg,
|
|
||||||
sub_pat,
|
|
||||||
wrap_in_task_ok(arena, sub_new),
|
|
||||||
);
|
|
||||||
|
|
||||||
let new_if = arena.alloc(Loc::at(
|
let new_if = arena.alloc(Loc::at(
|
||||||
loc_expr.region,
|
loc_expr.region,
|
||||||
|
@ -572,20 +562,22 @@ pub fn unwrap_suffixed_expression_when_help<'a>(
|
||||||
Expr::When(condition, branches) => {
|
Expr::When(condition, branches) => {
|
||||||
|
|
||||||
// first unwrap any when branches values
|
// first unwrap any when branches values
|
||||||
// e.g.
|
// e.g.
|
||||||
// when foo is
|
// when foo is
|
||||||
// [] -> line! "bar"
|
// [] -> line! "bar"
|
||||||
// _ -> line! "baz"
|
// _ -> line! "baz"
|
||||||
for (branch_index, WhenBranch{value: branch_loc_expr,patterns, guard}) in branches.iter().enumerate() {
|
for (branch_index, WhenBranch{value: branch_loc_expr,patterns, guard}) in branches.iter().enumerate() {
|
||||||
|
|
||||||
// if the branch isn't suffixed we can leave it alone
|
// if the branch isn't suffixed we can leave it alone
|
||||||
if is_loc_expr_suffixed(branch_loc_expr) {
|
if is_expr_suffixed(&branch_loc_expr.value) {
|
||||||
let unwrapped_branch_value = match unwrap_suffixed_expression(arena, branch_loc_expr, None) {
|
let unwrapped_branch_value = match unwrap_suffixed_expression(arena, branch_loc_expr, None) {
|
||||||
Ok(unwrapped_branch_value) => unwrapped_branch_value,
|
Ok(unwrapped_branch_value) => unwrapped_branch_value,
|
||||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => apply_task_await(arena, branch_loc_expr.region, sub_arg, sub_pat, sub_new),
|
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => apply_task_await(arena, branch_loc_expr.region, sub_arg, sub_pat, sub_new),
|
||||||
Err(..) => return Err(EUnwrapped::Malformed),
|
Err(..) => return Err(EUnwrapped::Malformed),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// TODO: unwrap guard
|
||||||
|
|
||||||
let new_branch = WhenBranch{value: *unwrapped_branch_value, patterns, guard: *guard};
|
let new_branch = WhenBranch{value: *unwrapped_branch_value, patterns, guard: *guard};
|
||||||
let mut new_branches = Vec::new_in(arena);
|
let mut new_branches = Vec::new_in(arena);
|
||||||
let (before, rest) = branches.split_at(branch_index);
|
let (before, rest) = branches.split_at(branch_index);
|
||||||
|
@ -645,7 +637,7 @@ pub fn unwrap_suffixed_expression_defs_help<'a>(
|
||||||
};
|
};
|
||||||
|
|
||||||
let maybe_suffixed_value_def = match current_value_def {
|
let maybe_suffixed_value_def = match current_value_def {
|
||||||
Annotation(..) | Dbg{..} | Expect{..} | ExpectFx{..} | Stmt(..) => None,
|
Annotation(..) | Dbg{..} | Expect{..} | ExpectFx{..} | Stmt(..) | ModuleImport{..} | IngestedFileImport(_) => None,
|
||||||
AnnotatedBody { body_pattern, body_expr, .. } => Some((body_pattern, body_expr)),
|
AnnotatedBody { body_pattern, body_expr, .. } => Some((body_pattern, body_expr)),
|
||||||
Body (def_pattern, def_expr, .. ) => Some((def_pattern, def_expr)),
|
Body (def_pattern, def_expr, .. ) => Some((def_pattern, def_expr)),
|
||||||
};
|
};
|
||||||
|
@ -670,7 +662,7 @@ pub fn unwrap_suffixed_expression_defs_help<'a>(
|
||||||
Ok(next_expr) => next_expr,
|
Ok(next_expr) => next_expr,
|
||||||
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
Err(EUnwrapped::UnwrappedSubExpr { sub_arg, sub_pat, sub_new }) => {
|
||||||
// We need to apply Task.ok here as the defs final expression was unwrapped
|
// We need to apply Task.ok here as the defs final expression was unwrapped
|
||||||
apply_task_await(arena,def_expr.region,sub_arg,sub_pat,wrap_in_task_ok(arena, sub_new))
|
apply_task_await(arena,def_expr.region,sub_arg,sub_pat,sub_new)
|
||||||
}
|
}
|
||||||
Err(EUnwrapped::UnwrappedDefExpr(..)) | Err(EUnwrapped::Malformed) => {
|
Err(EUnwrapped::UnwrappedDefExpr(..)) | Err(EUnwrapped::Malformed) => {
|
||||||
// TODO handle case when we have maybe_def_pat so can return an unwrapped up
|
// TODO handle case when we have maybe_def_pat so can return an unwrapped up
|
||||||
|
@ -816,7 +808,6 @@ pub fn apply_task_await<'a>(
|
||||||
value: Var {
|
value: Var {
|
||||||
module_name: ModuleName::TASK,
|
module_name: ModuleName::TASK,
|
||||||
ident: "await",
|
ident: "await",
|
||||||
suffixed: 0,
|
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
arena.alloc(task_await_apply_args),
|
arena.alloc(task_await_apply_args),
|
||||||
|
@ -857,15 +848,21 @@ fn is_matching_empty_record<'a>(
|
||||||
is_empty_record && is_pattern_empty_record
|
is_empty_record && is_pattern_empty_record
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_matching_intermediate_answer<'a>(
|
pub fn is_matching_intermediate_answer<'a>(
|
||||||
loc_pat: &'a Loc<Pattern<'a>>,
|
loc_pat: &'a Loc<Pattern<'a>>,
|
||||||
loc_expr: &'a Loc<Expr<'a>>,
|
loc_new: &'a Loc<Expr<'a>>,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let pat_ident = match loc_pat.value {
|
let pat_ident = match loc_pat.value {
|
||||||
Pattern::Identifier { ident, .. } => Some(ident),
|
Pattern::Identifier { ident, .. } => Some(ident),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
let exp_ident = match extract_wrapped_task_ok_value(loc_expr) {
|
let exp_ident = match loc_new.value {
|
||||||
|
Expr::Var {
|
||||||
|
module_name, ident, ..
|
||||||
|
} if module_name.is_empty() && ident.starts_with('#') => Some(ident),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
let exp_ident_in_task = match extract_wrapped_task_ok_value(loc_new) {
|
||||||
Some(task_expr) => match task_expr.value {
|
Some(task_expr) => match task_expr.value {
|
||||||
Expr::Var {
|
Expr::Var {
|
||||||
module_name, ident, ..
|
module_name, ident, ..
|
||||||
|
@ -874,8 +871,9 @@ fn is_matching_intermediate_answer<'a>(
|
||||||
},
|
},
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
match (pat_ident, exp_ident) {
|
match (pat_ident, exp_ident, exp_ident_in_task) {
|
||||||
(Some(a), Some(b)) => a == b,
|
(Some(a), Some(b), None) => a == b,
|
||||||
|
(Some(a), None, Some(b)) => a == b,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,12 +7,13 @@ use roc_can::expr::Output;
|
||||||
use roc_can::expr::{canonicalize_expr, Expr};
|
use roc_can::expr::{canonicalize_expr, Expr};
|
||||||
use roc_can::scope::Scope;
|
use roc_can::scope::Scope;
|
||||||
use roc_collections::all::MutMap;
|
use roc_collections::all::MutMap;
|
||||||
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds, Symbol};
|
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds, PackageModuleIds, Symbol};
|
||||||
use roc_problem::can::Problem;
|
use roc_problem::can::Problem;
|
||||||
use roc_region::all::{Loc, Region};
|
use roc_region::all::{Loc, Region};
|
||||||
use roc_types::subs::{VarStore, Variable};
|
use roc_types::subs::{VarStore, Variable};
|
||||||
use roc_types::types::{AliasVar, Type};
|
use roc_types::types::{AliasVar, Type};
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
pub fn test_home() -> ModuleId {
|
pub fn test_home() -> ModuleId {
|
||||||
ModuleIds::default().get_or_insert(&"Test".into())
|
ModuleIds::default().get_or_insert(&"Test".into())
|
||||||
|
@ -43,7 +44,7 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
|
||||||
|
|
||||||
let mut var_store = VarStore::default();
|
let mut var_store = VarStore::default();
|
||||||
let var = var_store.fresh();
|
let var = var_store.fresh();
|
||||||
let module_ids = ModuleIds::default();
|
let qualified_module_ids = PackageModuleIds::default();
|
||||||
|
|
||||||
// Desugar operators (convert them to Apply calls, taking into account
|
// Desugar operators (convert them to Apply calls, taking into account
|
||||||
// operator precedence and associativity rules), before doing other canonicalization.
|
// operator precedence and associativity rules), before doing other canonicalization.
|
||||||
|
@ -60,7 +61,12 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
|
||||||
arena.alloc("TestPath"),
|
arena.alloc("TestPath"),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut scope = Scope::new(home, IdentIds::default(), Default::default());
|
let mut scope = Scope::new(
|
||||||
|
home,
|
||||||
|
"TestPath".into(),
|
||||||
|
IdentIds::default(),
|
||||||
|
Default::default(),
|
||||||
|
);
|
||||||
scope.add_alias(
|
scope.add_alias(
|
||||||
Symbol::NUM_INT,
|
Symbol::NUM_INT,
|
||||||
Region::zero(),
|
Region::zero(),
|
||||||
|
@ -74,7 +80,14 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
|
||||||
);
|
);
|
||||||
|
|
||||||
let dep_idents = IdentIds::exposed_builtins(0);
|
let dep_idents = IdentIds::exposed_builtins(0);
|
||||||
let mut env = Env::new(arena, home, &dep_idents, &module_ids);
|
let mut env = Env::new(
|
||||||
|
arena,
|
||||||
|
home,
|
||||||
|
Path::new("Test.roc"),
|
||||||
|
&dep_idents,
|
||||||
|
&qualified_module_ids,
|
||||||
|
None,
|
||||||
|
);
|
||||||
let (loc_expr, output) = canonicalize_expr(
|
let (loc_expr, output) = canonicalize_expr(
|
||||||
&mut env,
|
&mut env,
|
||||||
&mut var_store,
|
&mut var_store,
|
||||||
|
@ -87,7 +100,7 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
|
||||||
all_ident_ids.insert(home, scope.locals.ident_ids);
|
all_ident_ids.insert(home, scope.locals.ident_ids);
|
||||||
|
|
||||||
let interns = Interns {
|
let interns = Interns {
|
||||||
module_ids: env.module_ids.clone(),
|
module_ids: env.qualified_module_ids.clone().into_module_ids(),
|
||||||
all_ident_ids,
|
all_ident_ids,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
20
crates/compiler/checkmate/www/package-lock.json
generated
20
crates/compiler/checkmate/www/package-lock.json
generated
|
@ -6021,12 +6021,12 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/braces": {
|
"node_modules/braces": {
|
||||||
"version": "3.0.2",
|
"version": "3.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
||||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"fill-range": "^7.0.1"
|
"fill-range": "^7.1.1"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
|
@ -7560,9 +7560,9 @@
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/ejs": {
|
"node_modules/ejs": {
|
||||||
"version": "3.1.9",
|
"version": "3.1.10",
|
||||||
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz",
|
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz",
|
||||||
"integrity": "sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ==",
|
"integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"jake": "^10.8.5"
|
"jake": "^10.8.5"
|
||||||
|
@ -8887,9 +8887,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/fill-range": {
|
"node_modules/fill-range": {
|
||||||
"version": "7.0.1",
|
"version": "7.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"to-regex-range": "^5.0.1"
|
"to-regex-range": "^5.0.1"
|
||||||
|
|
|
@ -3682,7 +3682,7 @@ fn constraint_recursive_function(
|
||||||
signature_closure_type,
|
signature_closure_type,
|
||||||
ret_type,
|
ret_type,
|
||||||
),
|
),
|
||||||
_ => todo!("TODO {:?}", (loc_symbol, &signature)),
|
_ => todo!("TODO {:?}", (loc_symbol, types[signature])),
|
||||||
};
|
};
|
||||||
|
|
||||||
let region = loc_function_def.region;
|
let region = loc_function_def.region;
|
||||||
|
|
|
@ -1,11 +1,15 @@
|
||||||
use crate::annotation::{Formattable, Newlines, Parens};
|
use crate::annotation::{is_collection_multiline, Formattable, Newlines, Parens};
|
||||||
|
use crate::collection::{fmt_collection, Braces};
|
||||||
|
use crate::expr::fmt_str_literal;
|
||||||
use crate::pattern::fmt_pattern;
|
use crate::pattern::fmt_pattern;
|
||||||
use crate::spaces::{fmt_default_newline, fmt_spaces, INDENT};
|
use crate::spaces::{fmt_default_newline, fmt_default_spaces, fmt_spaces, INDENT};
|
||||||
use crate::Buf;
|
use crate::Buf;
|
||||||
use roc_parse::ast::{
|
use roc_parse::ast::{
|
||||||
AbilityMember, Defs, Expr, ExtractSpaces, Pattern, Spaces, StrLiteral, TypeAnnotation, TypeDef,
|
AbilityMember, Defs, Expr, ExtractSpaces, ImportAlias, ImportAsKeyword, ImportExposingKeyword,
|
||||||
TypeHeader, ValueDef,
|
ImportedModuleName, IngestedFileAnnotation, IngestedFileImport, ModuleImport,
|
||||||
|
ModuleImportParams, Pattern, Spaces, StrLiteral, TypeAnnotation, TypeDef, TypeHeader, ValueDef,
|
||||||
};
|
};
|
||||||
|
use roc_parse::header::Keyword;
|
||||||
use roc_region::all::Loc;
|
use roc_region::all::Loc;
|
||||||
|
|
||||||
/// A Located formattable value is also formattable
|
/// A Located formattable value is also formattable
|
||||||
|
@ -183,6 +187,226 @@ impl<'a> Formattable for TypeHeader<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> Formattable for ModuleImport<'a> {
|
||||||
|
fn is_multiline(&self) -> bool {
|
||||||
|
let Self {
|
||||||
|
before_name,
|
||||||
|
name,
|
||||||
|
params,
|
||||||
|
alias,
|
||||||
|
exposed,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
!before_name.is_empty()
|
||||||
|
|| name.is_multiline()
|
||||||
|
|| params.is_multiline()
|
||||||
|
|| alias.is_multiline()
|
||||||
|
|| match exposed {
|
||||||
|
Some(a) => a.keyword.is_multiline() || is_collection_multiline(&a.item),
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_with_options(
|
||||||
|
&self,
|
||||||
|
buf: &mut Buf,
|
||||||
|
_parens: Parens,
|
||||||
|
_newlines: Newlines,
|
||||||
|
indent: u16,
|
||||||
|
) {
|
||||||
|
let Self {
|
||||||
|
before_name,
|
||||||
|
name,
|
||||||
|
params,
|
||||||
|
alias,
|
||||||
|
exposed,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
buf.indent(indent);
|
||||||
|
buf.push_str("import");
|
||||||
|
|
||||||
|
let indent = if !before_name.is_empty()
|
||||||
|
|| (params.is_multiline() && exposed.is_some())
|
||||||
|
|| alias.is_multiline()
|
||||||
|
|| exposed.map_or(false, |e| e.keyword.is_multiline())
|
||||||
|
{
|
||||||
|
indent + INDENT
|
||||||
|
} else {
|
||||||
|
indent
|
||||||
|
};
|
||||||
|
|
||||||
|
fmt_default_spaces(buf, before_name, indent);
|
||||||
|
|
||||||
|
name.format(buf, indent);
|
||||||
|
params.format(buf, indent);
|
||||||
|
alias.format(buf, indent);
|
||||||
|
|
||||||
|
if let Some(exposed) = exposed {
|
||||||
|
exposed.keyword.format(buf, indent);
|
||||||
|
fmt_collection(buf, indent, Braces::Square, exposed.item, Newlines::No);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Formattable for ModuleImportParams<'a> {
|
||||||
|
fn is_multiline(&self) -> bool {
|
||||||
|
let ModuleImportParams { before, params } = self;
|
||||||
|
|
||||||
|
!before.is_empty() || is_collection_multiline(params)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_with_options(&self, buf: &mut Buf, _parens: Parens, newlines: Newlines, indent: u16) {
|
||||||
|
let ModuleImportParams { before, params } = self;
|
||||||
|
|
||||||
|
fmt_default_spaces(buf, before, indent);
|
||||||
|
fmt_collection(buf, indent, Braces::Curly, *params, newlines);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Formattable for IngestedFileImport<'a> {
|
||||||
|
fn is_multiline(&self) -> bool {
|
||||||
|
let Self {
|
||||||
|
before_path,
|
||||||
|
path: _,
|
||||||
|
name,
|
||||||
|
annotation,
|
||||||
|
} = self;
|
||||||
|
!before_path.is_empty() || name.keyword.is_multiline() || annotation.is_multiline()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_with_options(
|
||||||
|
&self,
|
||||||
|
buf: &mut Buf,
|
||||||
|
_parens: Parens,
|
||||||
|
_newlines: Newlines,
|
||||||
|
indent: u16,
|
||||||
|
) {
|
||||||
|
let Self {
|
||||||
|
before_path,
|
||||||
|
path,
|
||||||
|
name,
|
||||||
|
annotation,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
buf.indent(indent);
|
||||||
|
buf.push_str("import");
|
||||||
|
|
||||||
|
let indent = indent + INDENT;
|
||||||
|
|
||||||
|
fmt_default_spaces(buf, before_path, indent);
|
||||||
|
fmt_str_literal(buf, path.value, indent);
|
||||||
|
|
||||||
|
name.keyword.format(buf, indent);
|
||||||
|
buf.push_str(name.item.value);
|
||||||
|
|
||||||
|
annotation.format(buf, indent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Formattable for ImportedModuleName<'a> {
|
||||||
|
fn is_multiline(&self) -> bool {
|
||||||
|
// No newlines in module name itself.
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_with_options(
|
||||||
|
&self,
|
||||||
|
buf: &mut Buf,
|
||||||
|
_parens: Parens,
|
||||||
|
_newlines: Newlines,
|
||||||
|
indent: u16,
|
||||||
|
) {
|
||||||
|
buf.indent(indent);
|
||||||
|
|
||||||
|
if let Some(package_shorthand) = self.package {
|
||||||
|
buf.push_str(package_shorthand);
|
||||||
|
buf.push_str(".");
|
||||||
|
}
|
||||||
|
|
||||||
|
self.name.format(buf, indent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Formattable for ImportAlias<'a> {
|
||||||
|
fn is_multiline(&self) -> bool {
|
||||||
|
// No newlines in alias itself.
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_with_options(
|
||||||
|
&self,
|
||||||
|
buf: &mut Buf,
|
||||||
|
_parens: Parens,
|
||||||
|
_newlines: Newlines,
|
||||||
|
indent: u16,
|
||||||
|
) {
|
||||||
|
buf.indent(indent);
|
||||||
|
buf.push_str(self.as_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Formattable for ImportAsKeyword {
|
||||||
|
fn is_multiline(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_with_options(
|
||||||
|
&self,
|
||||||
|
buf: &mut Buf<'_>,
|
||||||
|
_parens: crate::annotation::Parens,
|
||||||
|
_newlines: Newlines,
|
||||||
|
indent: u16,
|
||||||
|
) {
|
||||||
|
buf.indent(indent);
|
||||||
|
buf.push_str(ImportAsKeyword::KEYWORD);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Formattable for ImportExposingKeyword {
|
||||||
|
fn is_multiline(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_with_options(
|
||||||
|
&self,
|
||||||
|
buf: &mut Buf<'_>,
|
||||||
|
_parens: crate::annotation::Parens,
|
||||||
|
_newlines: Newlines,
|
||||||
|
indent: u16,
|
||||||
|
) {
|
||||||
|
buf.indent(indent);
|
||||||
|
buf.push_str(ImportExposingKeyword::KEYWORD);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Formattable for IngestedFileAnnotation<'a> {
|
||||||
|
fn is_multiline(&self) -> bool {
|
||||||
|
let Self {
|
||||||
|
before_colon,
|
||||||
|
annotation,
|
||||||
|
} = self;
|
||||||
|
!before_colon.is_empty() || annotation.is_multiline()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_with_options(
|
||||||
|
&self,
|
||||||
|
buf: &mut Buf,
|
||||||
|
_parens: Parens,
|
||||||
|
_newlines: Newlines,
|
||||||
|
indent: u16,
|
||||||
|
) {
|
||||||
|
let Self {
|
||||||
|
before_colon,
|
||||||
|
annotation,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
fmt_default_spaces(buf, before_colon, indent);
|
||||||
|
buf.push_str(":");
|
||||||
|
buf.spaces(1);
|
||||||
|
annotation.format(buf, indent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> Formattable for ValueDef<'a> {
|
impl<'a> Formattable for ValueDef<'a> {
|
||||||
fn is_multiline(&self) -> bool {
|
fn is_multiline(&self) -> bool {
|
||||||
use roc_parse::ast::ValueDef::*;
|
use roc_parse::ast::ValueDef::*;
|
||||||
|
@ -196,6 +420,8 @@ impl<'a> Formattable for ValueDef<'a> {
|
||||||
Expect { condition, .. } => condition.is_multiline(),
|
Expect { condition, .. } => condition.is_multiline(),
|
||||||
ExpectFx { condition, .. } => condition.is_multiline(),
|
ExpectFx { condition, .. } => condition.is_multiline(),
|
||||||
Dbg { condition, .. } => condition.is_multiline(),
|
Dbg { condition, .. } => condition.is_multiline(),
|
||||||
|
ModuleImport(module_import) => module_import.is_multiline(),
|
||||||
|
IngestedFileImport(ingested_file_import) => ingested_file_import.is_multiline(),
|
||||||
Stmt(loc_expr) => loc_expr.is_multiline(),
|
Stmt(loc_expr) => loc_expr.is_multiline(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -239,6 +465,8 @@ impl<'a> Formattable for ValueDef<'a> {
|
||||||
buf.newline();
|
buf.newline();
|
||||||
fmt_body(buf, &body_pattern.value, &body_expr.value, indent);
|
fmt_body(buf, &body_pattern.value, &body_expr.value, indent);
|
||||||
}
|
}
|
||||||
|
ModuleImport(module_import) => module_import.format(buf, indent),
|
||||||
|
IngestedFileImport(ingested_file_import) => ingested_file_import.format(buf, indent),
|
||||||
Stmt(loc_expr) => loc_expr.format_with_options(buf, parens, newlines, indent),
|
Stmt(loc_expr) => loc_expr.format_with_options(buf, parens, newlines, indent),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ use crate::spaces::{
|
||||||
use crate::Buf;
|
use crate::Buf;
|
||||||
use roc_module::called_via::{self, BinOp};
|
use roc_module::called_via::{self, BinOp};
|
||||||
use roc_parse::ast::{
|
use roc_parse::ast::{
|
||||||
is_loc_expr_suffixed, AssignedField, Base, Collection, CommentOrNewline, Expr, ExtractSpaces,
|
is_expr_suffixed, AssignedField, Base, Collection, CommentOrNewline, Expr, ExtractSpaces,
|
||||||
Pattern, RecordBuilderField, WhenBranch,
|
Pattern, RecordBuilderField, WhenBranch,
|
||||||
};
|
};
|
||||||
use roc_parse::ast::{StrLiteral, StrSegment};
|
use roc_parse::ast::{StrLiteral, StrSegment};
|
||||||
|
@ -38,19 +38,20 @@ impl<'a> Formattable for Expr<'a> {
|
||||||
| Num(..)
|
| Num(..)
|
||||||
| NonBase10Int { .. }
|
| NonBase10Int { .. }
|
||||||
| SingleQuote(_)
|
| SingleQuote(_)
|
||||||
| RecordAccess(_, _)
|
|
||||||
| AccessorFunction(_)
|
| AccessorFunction(_)
|
||||||
| TupleAccess(_, _)
|
|
||||||
| Var { .. }
|
| Var { .. }
|
||||||
| Underscore { .. }
|
| Underscore { .. }
|
||||||
| MalformedIdent(_, _)
|
| MalformedIdent(_, _)
|
||||||
| MalformedClosure
|
| MalformedClosure
|
||||||
| Tag(_)
|
| Tag(_)
|
||||||
| OpaqueRef(_)
|
| OpaqueRef(_)
|
||||||
| IngestedFile(_, _)
|
|
||||||
| EmptyDefsFinal
|
| EmptyDefsFinal
|
||||||
| Crash => false,
|
| Crash => false,
|
||||||
|
|
||||||
|
RecordAccess(inner, _) | TupleAccess(inner, _) | TaskAwaitBang(inner) => {
|
||||||
|
inner.is_multiline()
|
||||||
|
}
|
||||||
|
|
||||||
// These expressions always have newlines
|
// These expressions always have newlines
|
||||||
Defs(_, _) | When(_, _) => true,
|
Defs(_, _) | When(_, _) => true,
|
||||||
|
|
||||||
|
@ -170,11 +171,7 @@ impl<'a> Formattable for Expr<'a> {
|
||||||
Str(literal) => {
|
Str(literal) => {
|
||||||
fmt_str_literal(buf, *literal, indent);
|
fmt_str_literal(buf, *literal, indent);
|
||||||
}
|
}
|
||||||
Var {
|
Var { module_name, ident } => {
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed,
|
|
||||||
} => {
|
|
||||||
buf.indent(indent);
|
buf.indent(indent);
|
||||||
if !module_name.is_empty() {
|
if !module_name.is_empty() {
|
||||||
buf.push_str(module_name);
|
buf.push_str(module_name);
|
||||||
|
@ -182,11 +179,6 @@ impl<'a> Formattable for Expr<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.push_str(ident);
|
buf.push_str(ident);
|
||||||
|
|
||||||
let count: u8 = *suffixed;
|
|
||||||
for _ in 0..count {
|
|
||||||
buf.push('!');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Underscore(name) => {
|
Underscore(name) => {
|
||||||
buf.indent(indent);
|
buf.indent(indent);
|
||||||
|
@ -512,60 +504,18 @@ impl<'a> Formattable for Expr<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
RecordAccess(expr, key) => {
|
RecordAccess(expr, key) => {
|
||||||
// Check for any `!` suffixes and format these at the end of expression
|
expr.format_with_options(buf, Parens::InApply, Newlines::Yes, indent);
|
||||||
let (expr_to_format, suffix_count) = if let Var {
|
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed,
|
|
||||||
} = expr
|
|
||||||
{
|
|
||||||
(
|
|
||||||
Var {
|
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
suffixed,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
(**expr, &0u8)
|
|
||||||
};
|
|
||||||
|
|
||||||
expr_to_format.format_with_options(buf, Parens::InApply, Newlines::Yes, indent);
|
|
||||||
buf.push('.');
|
buf.push('.');
|
||||||
buf.push_str(key);
|
buf.push_str(key);
|
||||||
|
|
||||||
for _ in 0..*suffix_count {
|
|
||||||
buf.push('!');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
TupleAccess(expr, key) => {
|
TupleAccess(expr, key) => {
|
||||||
// Check for any `!` suffixes and format these at the end of expression
|
expr.format_with_options(buf, Parens::InApply, Newlines::Yes, indent);
|
||||||
let (expr_to_format, suffix_count) = if let Var {
|
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed,
|
|
||||||
} = expr
|
|
||||||
{
|
|
||||||
(
|
|
||||||
Var {
|
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed: 0,
|
|
||||||
},
|
|
||||||
suffixed,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
(**expr, &0u8)
|
|
||||||
};
|
|
||||||
|
|
||||||
expr_to_format.format_with_options(buf, Parens::InApply, Newlines::Yes, indent);
|
|
||||||
buf.push('.');
|
buf.push('.');
|
||||||
buf.push_str(key);
|
buf.push_str(key);
|
||||||
|
}
|
||||||
for _ in 0..*suffix_count {
|
TaskAwaitBang(expr) => {
|
||||||
buf.push('!');
|
expr.format_with_options(buf, Parens::InApply, Newlines::Yes, indent);
|
||||||
}
|
buf.push('!');
|
||||||
}
|
}
|
||||||
MalformedIdent(str, _) => {
|
MalformedIdent(str, _) => {
|
||||||
buf.indent(indent);
|
buf.indent(indent);
|
||||||
|
@ -579,7 +529,6 @@ impl<'a> Formattable for Expr<'a> {
|
||||||
PrecedenceConflict { .. } => {}
|
PrecedenceConflict { .. } => {}
|
||||||
MultipleRecordBuilders { .. } => {}
|
MultipleRecordBuilders { .. } => {}
|
||||||
UnappliedRecordBuilder { .. } => {}
|
UnappliedRecordBuilder { .. } => {}
|
||||||
IngestedFile(_, _) => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -789,8 +738,8 @@ fn fmt_binops<'a>(
|
||||||
|| loc_right_side.value.is_multiline()
|
|| loc_right_side.value.is_multiline()
|
||||||
|| lefts.iter().any(|(expr, _)| expr.value.is_multiline());
|
|| lefts.iter().any(|(expr, _)| expr.value.is_multiline());
|
||||||
|
|
||||||
let is_any_lefts_suffixed = lefts.iter().any(|(left, _)| is_loc_expr_suffixed(left));
|
let is_any_lefts_suffixed = lefts.iter().any(|(left, _)| is_expr_suffixed(&left.value));
|
||||||
let is_right_suffixed = is_loc_expr_suffixed(loc_right_side);
|
let is_right_suffixed = is_expr_suffixed(&loc_right_side.value);
|
||||||
let is_any_suffixed = is_any_lefts_suffixed || is_right_suffixed;
|
let is_any_suffixed = is_any_lefts_suffixed || is_right_suffixed;
|
||||||
|
|
||||||
let mut is_first = false;
|
let mut is_first = false;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::cmp::max;
|
||||||
|
|
||||||
use crate::annotation::{is_collection_multiline, Formattable, Newlines, Parens};
|
use crate::annotation::{is_collection_multiline, Formattable, Newlines, Parens};
|
||||||
use crate::collection::{fmt_collection, Braces};
|
use crate::collection::{fmt_collection, Braces};
|
||||||
use crate::expr::fmt_str_literal;
|
use crate::expr::fmt_str_literal;
|
||||||
|
@ -5,12 +7,13 @@ use crate::spaces::RemoveSpaces;
|
||||||
use crate::spaces::{fmt_comments_only, fmt_default_spaces, fmt_spaces, NewlineAt, INDENT};
|
use crate::spaces::{fmt_comments_only, fmt_default_spaces, fmt_spaces, NewlineAt, INDENT};
|
||||||
use crate::Buf;
|
use crate::Buf;
|
||||||
use bumpalo::Bump;
|
use bumpalo::Bump;
|
||||||
use roc_parse::ast::{Collection, Header, Module, Spaced, Spaces};
|
use roc_parse::ast::{Collection, CommentOrNewline, Header, Module, Spaced, Spaces};
|
||||||
use roc_parse::header::{
|
use roc_parse::header::{
|
||||||
AppHeader, ExposedName, ExposesKeyword, GeneratesKeyword, HostedHeader, ImportsEntry,
|
AppHeader, ExposedName, ExposesKeyword, GeneratesKeyword, HostedHeader, ImportsEntry,
|
||||||
ImportsKeyword, InterfaceHeader, Keyword, KeywordItem, ModuleName, PackageEntry, PackageHeader,
|
ImportsKeyword, Keyword, KeywordItem, ModuleHeader, ModuleName, PackageEntry, PackageHeader,
|
||||||
PackageKeyword, PackageName, PackagesKeyword, PlatformHeader, PlatformRequires,
|
PackageKeyword, PackageName, PackagesKeyword, PlatformHeader, PlatformKeyword,
|
||||||
ProvidesKeyword, ProvidesTo, RequiresKeyword, To, ToKeyword, TypedIdent, WithKeyword,
|
PlatformRequires, ProvidesKeyword, ProvidesTo, RequiresKeyword, To, ToKeyword, TypedIdent,
|
||||||
|
WithKeyword,
|
||||||
};
|
};
|
||||||
use roc_parse::ident::UppercaseIdent;
|
use roc_parse::ident::UppercaseIdent;
|
||||||
use roc_region::all::Loc;
|
use roc_region::all::Loc;
|
||||||
|
@ -18,8 +21,8 @@ use roc_region::all::Loc;
|
||||||
pub fn fmt_module<'a>(buf: &mut Buf<'_>, module: &'a Module<'a>) {
|
pub fn fmt_module<'a>(buf: &mut Buf<'_>, module: &'a Module<'a>) {
|
||||||
fmt_comments_only(buf, module.comments.iter(), NewlineAt::Bottom, 0);
|
fmt_comments_only(buf, module.comments.iter(), NewlineAt::Bottom, 0);
|
||||||
match &module.header {
|
match &module.header {
|
||||||
Header::Interface(header) => {
|
Header::Module(header) => {
|
||||||
fmt_interface_header(buf, header);
|
fmt_module_header(buf, header);
|
||||||
}
|
}
|
||||||
Header::App(header) => {
|
Header::App(header) => {
|
||||||
fmt_app_header(buf, header);
|
fmt_app_header(buf, header);
|
||||||
|
@ -75,6 +78,7 @@ keywords! {
|
||||||
RequiresKeyword,
|
RequiresKeyword,
|
||||||
ProvidesKeyword,
|
ProvidesKeyword,
|
||||||
ToKeyword,
|
ToKeyword,
|
||||||
|
PlatformKeyword,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<V: Formattable> Formattable for Option<V> {
|
impl<V: Formattable> Formattable for Option<V> {
|
||||||
|
@ -171,20 +175,25 @@ impl<'a, K: Formattable, V: Formattable> Formattable for KeywordItem<'a, K, V> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fmt_interface_header<'a>(buf: &mut Buf, header: &'a InterfaceHeader<'a>) {
|
pub fn fmt_module_header<'a>(buf: &mut Buf, header: &'a ModuleHeader<'a>) {
|
||||||
buf.indent(0);
|
buf.indent(0);
|
||||||
buf.push_str("interface");
|
buf.push_str("module");
|
||||||
let indent = INDENT;
|
|
||||||
fmt_default_spaces(buf, header.before_name, indent);
|
|
||||||
|
|
||||||
// module name
|
let mut indent = fmt_spaces_with_outdent(buf, header.after_keyword, 0);
|
||||||
buf.indent(indent);
|
|
||||||
buf.push_str(header.name.value.as_str());
|
|
||||||
|
|
||||||
header.exposes.keyword.format(buf, indent);
|
if let Some(params) = &header.params {
|
||||||
fmt_exposes(buf, header.exposes.item, indent);
|
if is_collection_multiline(¶ms.params) {
|
||||||
header.imports.keyword.format(buf, indent);
|
indent = INDENT;
|
||||||
fmt_imports(buf, header.imports.item, indent);
|
}
|
||||||
|
|
||||||
|
fmt_collection(buf, indent, Braces::Curly, params.params, Newlines::Yes);
|
||||||
|
|
||||||
|
indent = fmt_spaces_with_outdent(buf, params.before_arrow, indent);
|
||||||
|
buf.push_str("->");
|
||||||
|
indent = fmt_spaces_with_outdent(buf, params.after_arrow, indent);
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt_exposes(buf, header.exposes, indent);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fmt_hosted_header<'a>(buf: &mut Buf, header: &'a HostedHeader<'a>) {
|
pub fn fmt_hosted_header<'a>(buf: &mut Buf, header: &'a HostedHeader<'a>) {
|
||||||
|
@ -207,34 +216,34 @@ pub fn fmt_hosted_header<'a>(buf: &mut Buf, header: &'a HostedHeader<'a>) {
|
||||||
pub fn fmt_app_header<'a>(buf: &mut Buf, header: &'a AppHeader<'a>) {
|
pub fn fmt_app_header<'a>(buf: &mut Buf, header: &'a AppHeader<'a>) {
|
||||||
buf.indent(0);
|
buf.indent(0);
|
||||||
buf.push_str("app");
|
buf.push_str("app");
|
||||||
let indent = INDENT;
|
|
||||||
fmt_default_spaces(buf, header.before_name, indent);
|
|
||||||
|
|
||||||
fmt_str_literal(buf, header.name.value, indent);
|
let indent = fmt_spaces_with_outdent(buf, header.before_provides, 0);
|
||||||
|
fmt_exposes(buf, header.provides, indent);
|
||||||
|
|
||||||
if let Some(packages) = &header.packages {
|
let indent = fmt_spaces_with_outdent(buf, header.before_packages, indent);
|
||||||
packages.keyword.format(buf, indent);
|
fmt_packages(buf, header.packages.value, indent);
|
||||||
fmt_packages(buf, packages.item, indent);
|
}
|
||||||
|
|
||||||
|
pub fn fmt_spaces_with_outdent(buf: &mut Buf, spaces: &[CommentOrNewline], indent: u16) -> u16 {
|
||||||
|
if spaces.iter().all(|c| c.is_newline()) {
|
||||||
|
buf.spaces(1);
|
||||||
|
indent
|
||||||
|
} else {
|
||||||
|
let indent = max(INDENT, indent + INDENT);
|
||||||
|
fmt_default_spaces(buf, spaces, indent);
|
||||||
|
indent
|
||||||
}
|
}
|
||||||
if let Some(imports) = &header.imports {
|
|
||||||
imports.keyword.format(buf, indent);
|
|
||||||
fmt_imports(buf, imports.item, indent);
|
|
||||||
}
|
|
||||||
header.provides.format(buf, indent);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fmt_package_header<'a>(buf: &mut Buf, header: &'a PackageHeader<'a>) {
|
pub fn fmt_package_header<'a>(buf: &mut Buf, header: &'a PackageHeader<'a>) {
|
||||||
buf.indent(0);
|
buf.indent(0);
|
||||||
buf.push_str("package");
|
buf.push_str("package");
|
||||||
let indent = INDENT;
|
|
||||||
fmt_default_spaces(buf, header.before_name, indent);
|
|
||||||
|
|
||||||
fmt_package_name(buf, header.name.value, indent);
|
let indent = fmt_spaces_with_outdent(buf, header.before_exposes, 0);
|
||||||
|
fmt_exposes(buf, header.exposes, indent);
|
||||||
|
|
||||||
header.exposes.keyword.format(buf, indent);
|
let indent = fmt_spaces_with_outdent(buf, header.before_packages, indent);
|
||||||
fmt_exposes(buf, header.exposes.item, indent);
|
fmt_packages(buf, header.packages.value, indent);
|
||||||
header.packages.keyword.format(buf, indent);
|
|
||||||
fmt_packages(buf, header.packages.item, indent);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fmt_platform_header<'a>(buf: &mut Buf, header: &'a PlatformHeader<'a>) {
|
pub fn fmt_platform_header<'a>(buf: &mut Buf, header: &'a PlatformHeader<'a>) {
|
||||||
|
@ -465,6 +474,15 @@ fn fmt_packages_entry(buf: &mut Buf, entry: &PackageEntry, indent: u16) {
|
||||||
buf.push_str(entry.shorthand);
|
buf.push_str(entry.shorthand);
|
||||||
buf.push(':');
|
buf.push(':');
|
||||||
fmt_default_spaces(buf, entry.spaces_after_shorthand, indent);
|
fmt_default_spaces(buf, entry.spaces_after_shorthand, indent);
|
||||||
|
|
||||||
|
let indent = indent + INDENT;
|
||||||
|
|
||||||
|
if let Some(spaces_after) = entry.platform_marker {
|
||||||
|
buf.indent(indent);
|
||||||
|
buf.push_str(roc_parse::keyword::PLATFORM);
|
||||||
|
fmt_default_spaces(buf, spaces_after, indent);
|
||||||
|
}
|
||||||
|
|
||||||
fmt_package_name(buf, entry.package_name.value, indent);
|
fmt_package_name(buf, entry.package_name.value, indent);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -88,16 +88,9 @@ impl<'a> Formattable for Pattern<'a> {
|
||||||
use self::Pattern::*;
|
use self::Pattern::*;
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
Identifier {
|
Identifier { ident: string } => {
|
||||||
ident: string,
|
|
||||||
suffixed,
|
|
||||||
} => {
|
|
||||||
buf.indent(indent);
|
buf.indent(indent);
|
||||||
buf.push_str(string);
|
buf.push_str(string);
|
||||||
|
|
||||||
for _ in 0..*suffixed {
|
|
||||||
buf.push('!');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Tag(name) | OpaqueRef(name) => {
|
Tag(name) | OpaqueRef(name) => {
|
||||||
buf.indent(indent);
|
buf.indent(indent);
|
||||||
|
@ -277,21 +270,13 @@ impl<'a> Formattable for Pattern<'a> {
|
||||||
buf.indent(indent);
|
buf.indent(indent);
|
||||||
buf.push_str(string);
|
buf.push_str(string);
|
||||||
}
|
}
|
||||||
QualifiedIdentifier {
|
QualifiedIdentifier { module_name, ident } => {
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed,
|
|
||||||
} => {
|
|
||||||
buf.indent(indent);
|
buf.indent(indent);
|
||||||
if !module_name.is_empty() {
|
if !module_name.is_empty() {
|
||||||
buf.push_str(module_name);
|
buf.push_str(module_name);
|
||||||
buf.push('.');
|
buf.push('.');
|
||||||
}
|
}
|
||||||
|
|
||||||
for _ in 0..*suffixed {
|
|
||||||
buf.push('!');
|
|
||||||
}
|
|
||||||
|
|
||||||
buf.push_str(ident);
|
buf.push_str(ident);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,13 +4,15 @@ use roc_module::called_via::{BinOp, UnaryOp};
|
||||||
use roc_parse::{
|
use roc_parse::{
|
||||||
ast::{
|
ast::{
|
||||||
AbilityImpls, AbilityMember, AssignedField, Collection, CommentOrNewline, Defs, Expr,
|
AbilityImpls, AbilityMember, AssignedField, Collection, CommentOrNewline, Defs, Expr,
|
||||||
Header, Implements, ImplementsAbilities, ImplementsAbility, ImplementsClause, Module,
|
Header, Implements, ImplementsAbilities, ImplementsAbility, ImplementsClause, ImportAlias,
|
||||||
Pattern, PatternAs, RecordBuilderField, Spaced, Spaces, StrLiteral, StrSegment, Tag,
|
ImportAsKeyword, ImportExposingKeyword, ImportedModuleName, IngestedFileAnnotation,
|
||||||
TypeAnnotation, TypeDef, TypeHeader, ValueDef, WhenBranch,
|
IngestedFileImport, Module, ModuleImport, ModuleImportParams, Pattern, PatternAs,
|
||||||
|
RecordBuilderField, Spaced, Spaces, StrLiteral, StrSegment, Tag, TypeAnnotation, TypeDef,
|
||||||
|
TypeHeader, ValueDef, WhenBranch,
|
||||||
},
|
},
|
||||||
header::{
|
header::{
|
||||||
AppHeader, ExposedName, HostedHeader, ImportsEntry, InterfaceHeader, KeywordItem,
|
AppHeader, ExposedName, HostedHeader, ImportsEntry, KeywordItem, ModuleHeader, ModuleName,
|
||||||
ModuleName, PackageEntry, PackageHeader, PackageName, PlatformHeader, PlatformRequires,
|
ModuleParams, PackageEntry, PackageHeader, PackageName, PlatformHeader, PlatformRequires,
|
||||||
ProvidesTo, To, TypedIdent,
|
ProvidesTo, To, TypedIdent,
|
||||||
},
|
},
|
||||||
ident::{BadIdent, UppercaseIdent},
|
ident::{BadIdent, UppercaseIdent},
|
||||||
|
@ -147,6 +149,14 @@ pub fn fmt_comments_only<'a, 'buf, I>(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fmt_comment(buf: &mut Buf, comment: &str) {
|
fn fmt_comment(buf: &mut Buf, comment: &str) {
|
||||||
|
// Format shebangs without whitespace. We look for " !" as well to fix incorrect formatting from
|
||||||
|
// the past.
|
||||||
|
if buf.is_empty() && (comment.starts_with('!') || comment.starts_with(" !")) {
|
||||||
|
buf.push('#');
|
||||||
|
buf.push_str(comment.trim());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// The '#' in a comment should always be preceded by a newline or a space,
|
// The '#' in a comment should always be preceded by a newline or a space,
|
||||||
// unless it's the very beginning of the buffer.
|
// unless it's the very beginning of the buffer.
|
||||||
if !buf.is_empty() && !buf.ends_with_space() && !buf.ends_with_newline() {
|
if !buf.is_empty() && !buf.ends_with_space() && !buf.ends_with_newline() {
|
||||||
|
@ -282,23 +292,26 @@ impl<'a> RemoveSpaces<'a> for ProvidesTo<'a> {
|
||||||
impl<'a> RemoveSpaces<'a> for Module<'a> {
|
impl<'a> RemoveSpaces<'a> for Module<'a> {
|
||||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||||
let header = match &self.header {
|
let header = match &self.header {
|
||||||
Header::Interface(header) => Header::Interface(InterfaceHeader {
|
Header::Module(header) => Header::Module(ModuleHeader {
|
||||||
before_name: &[],
|
after_keyword: &[],
|
||||||
name: header.name.remove_spaces(arena),
|
params: header.params.remove_spaces(arena),
|
||||||
exposes: header.exposes.remove_spaces(arena),
|
exposes: header.exposes.remove_spaces(arena),
|
||||||
imports: header.imports.remove_spaces(arena),
|
interface_imports: header.interface_imports.remove_spaces(arena),
|
||||||
}),
|
}),
|
||||||
Header::App(header) => Header::App(AppHeader {
|
Header::App(header) => Header::App(AppHeader {
|
||||||
before_name: &[],
|
before_provides: &[],
|
||||||
name: header.name.remove_spaces(arena),
|
|
||||||
packages: header.packages.remove_spaces(arena),
|
|
||||||
imports: header.imports.remove_spaces(arena),
|
|
||||||
provides: header.provides.remove_spaces(arena),
|
provides: header.provides.remove_spaces(arena),
|
||||||
|
before_packages: &[],
|
||||||
|
packages: header.packages.remove_spaces(arena),
|
||||||
|
old_imports: header.old_imports.remove_spaces(arena),
|
||||||
|
old_provides_to_new_package: header
|
||||||
|
.old_provides_to_new_package
|
||||||
|
.remove_spaces(arena),
|
||||||
}),
|
}),
|
||||||
Header::Package(header) => Header::Package(PackageHeader {
|
Header::Package(header) => Header::Package(PackageHeader {
|
||||||
before_name: &[],
|
before_exposes: &[],
|
||||||
name: header.name.remove_spaces(arena),
|
|
||||||
exposes: header.exposes.remove_spaces(arena),
|
exposes: header.exposes.remove_spaces(arena),
|
||||||
|
before_packages: &[],
|
||||||
packages: header.packages.remove_spaces(arena),
|
packages: header.packages.remove_spaces(arena),
|
||||||
}),
|
}),
|
||||||
Header::Platform(header) => Header::Platform(PlatformHeader {
|
Header::Platform(header) => Header::Platform(PlatformHeader {
|
||||||
|
@ -326,6 +339,16 @@ impl<'a> RemoveSpaces<'a> for Module<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> RemoveSpaces<'a> for ModuleParams<'a> {
|
||||||
|
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||||
|
ModuleParams {
|
||||||
|
params: self.params.remove_spaces(arena),
|
||||||
|
before_arrow: &[],
|
||||||
|
after_arrow: &[],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> RemoveSpaces<'a> for Region {
|
impl<'a> RemoveSpaces<'a> for Region {
|
||||||
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
||||||
Region::zero()
|
Region::zero()
|
||||||
|
@ -405,6 +428,10 @@ impl<'a> RemoveSpaces<'a> for PackageEntry<'a> {
|
||||||
PackageEntry {
|
PackageEntry {
|
||||||
shorthand: self.shorthand,
|
shorthand: self.shorthand,
|
||||||
spaces_after_shorthand: &[],
|
spaces_after_shorthand: &[],
|
||||||
|
platform_marker: match self.platform_marker {
|
||||||
|
Some(_) => Some(&[]),
|
||||||
|
None => None,
|
||||||
|
},
|
||||||
package_name: self.package_name.remove_spaces(arena),
|
package_name: self.package_name.remove_spaces(arena),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -567,11 +594,83 @@ impl<'a> RemoveSpaces<'a> for ValueDef<'a> {
|
||||||
condition: arena.alloc(condition.remove_spaces(arena)),
|
condition: arena.alloc(condition.remove_spaces(arena)),
|
||||||
preceding_comment: Region::zero(),
|
preceding_comment: Region::zero(),
|
||||||
},
|
},
|
||||||
|
ModuleImport(module_import) => ModuleImport(module_import.remove_spaces(arena)),
|
||||||
|
IngestedFileImport(ingested_file_import) => {
|
||||||
|
IngestedFileImport(ingested_file_import.remove_spaces(arena))
|
||||||
|
}
|
||||||
Stmt(loc_expr) => Stmt(arena.alloc(loc_expr.remove_spaces(arena))),
|
Stmt(loc_expr) => Stmt(arena.alloc(loc_expr.remove_spaces(arena))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> RemoveSpaces<'a> for ModuleImport<'a> {
|
||||||
|
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||||
|
ModuleImport {
|
||||||
|
before_name: &[],
|
||||||
|
name: self.name.remove_spaces(arena),
|
||||||
|
params: self.params.remove_spaces(arena),
|
||||||
|
alias: self.alias.remove_spaces(arena),
|
||||||
|
exposed: self.exposed.remove_spaces(arena),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RemoveSpaces<'a> for ModuleImportParams<'a> {
|
||||||
|
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||||
|
ModuleImportParams {
|
||||||
|
before: &[],
|
||||||
|
params: self.params.remove_spaces(arena),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RemoveSpaces<'a> for IngestedFileImport<'a> {
|
||||||
|
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||||
|
IngestedFileImport {
|
||||||
|
before_path: &[],
|
||||||
|
path: self.path.remove_spaces(arena),
|
||||||
|
name: self.name.remove_spaces(arena),
|
||||||
|
annotation: self.annotation.remove_spaces(arena),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RemoveSpaces<'a> for ImportedModuleName<'a> {
|
||||||
|
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||||
|
ImportedModuleName {
|
||||||
|
package: self.package.remove_spaces(arena),
|
||||||
|
name: self.name.remove_spaces(arena),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RemoveSpaces<'a> for ImportAlias<'a> {
|
||||||
|
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
||||||
|
*self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RemoveSpaces<'a> for ImportAsKeyword {
|
||||||
|
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
||||||
|
*self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RemoveSpaces<'a> for ImportExposingKeyword {
|
||||||
|
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
||||||
|
*self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RemoveSpaces<'a> for IngestedFileAnnotation<'a> {
|
||||||
|
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||||
|
IngestedFileAnnotation {
|
||||||
|
before_colon: &[],
|
||||||
|
annotation: self.annotation.remove_spaces(arena),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> RemoveSpaces<'a> for Implements<'a> {
|
impl<'a> RemoveSpaces<'a> for Implements<'a> {
|
||||||
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
|
||||||
Implements::Implements
|
Implements::Implements
|
||||||
|
@ -682,10 +781,10 @@ impl<'a> RemoveSpaces<'a> for Expr<'a> {
|
||||||
is_negative,
|
is_negative,
|
||||||
},
|
},
|
||||||
Expr::Str(a) => Expr::Str(a.remove_spaces(arena)),
|
Expr::Str(a) => Expr::Str(a.remove_spaces(arena)),
|
||||||
Expr::IngestedFile(a, b) => Expr::IngestedFile(a, b),
|
|
||||||
Expr::RecordAccess(a, b) => Expr::RecordAccess(arena.alloc(a.remove_spaces(arena)), b),
|
Expr::RecordAccess(a, b) => Expr::RecordAccess(arena.alloc(a.remove_spaces(arena)), b),
|
||||||
Expr::AccessorFunction(a) => Expr::AccessorFunction(a),
|
Expr::AccessorFunction(a) => Expr::AccessorFunction(a),
|
||||||
Expr::TupleAccess(a, b) => Expr::TupleAccess(arena.alloc(a.remove_spaces(arena)), b),
|
Expr::TupleAccess(a, b) => Expr::TupleAccess(arena.alloc(a.remove_spaces(arena)), b),
|
||||||
|
Expr::TaskAwaitBang(a) => Expr::TaskAwaitBang(arena.alloc(a.remove_spaces(arena))),
|
||||||
Expr::List(a) => Expr::List(a.remove_spaces(arena)),
|
Expr::List(a) => Expr::List(a.remove_spaces(arena)),
|
||||||
Expr::RecordUpdate { update, fields } => Expr::RecordUpdate {
|
Expr::RecordUpdate { update, fields } => Expr::RecordUpdate {
|
||||||
update: arena.alloc(update.remove_spaces(arena)),
|
update: arena.alloc(update.remove_spaces(arena)),
|
||||||
|
@ -694,15 +793,7 @@ impl<'a> RemoveSpaces<'a> for Expr<'a> {
|
||||||
Expr::Record(a) => Expr::Record(a.remove_spaces(arena)),
|
Expr::Record(a) => Expr::Record(a.remove_spaces(arena)),
|
||||||
Expr::RecordBuilder(a) => Expr::RecordBuilder(a.remove_spaces(arena)),
|
Expr::RecordBuilder(a) => Expr::RecordBuilder(a.remove_spaces(arena)),
|
||||||
Expr::Tuple(a) => Expr::Tuple(a.remove_spaces(arena)),
|
Expr::Tuple(a) => Expr::Tuple(a.remove_spaces(arena)),
|
||||||
Expr::Var {
|
Expr::Var { module_name, ident } => Expr::Var { module_name, ident },
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed,
|
|
||||||
} => Expr::Var {
|
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed,
|
|
||||||
},
|
|
||||||
Expr::Underscore(a) => Expr::Underscore(a),
|
Expr::Underscore(a) => Expr::Underscore(a),
|
||||||
Expr::Tag(a) => Expr::Tag(a),
|
Expr::Tag(a) => Expr::Tag(a),
|
||||||
Expr::OpaqueRef(a) => Expr::OpaqueRef(a),
|
Expr::OpaqueRef(a) => Expr::OpaqueRef(a),
|
||||||
|
@ -802,7 +893,7 @@ fn remove_spaces_bad_ident(ident: BadIdent) -> BadIdent {
|
||||||
impl<'a> RemoveSpaces<'a> for Pattern<'a> {
|
impl<'a> RemoveSpaces<'a> for Pattern<'a> {
|
||||||
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
fn remove_spaces(&self, arena: &'a Bump) -> Self {
|
||||||
match *self {
|
match *self {
|
||||||
Pattern::Identifier { ident, suffixed } => Pattern::Identifier { ident, suffixed },
|
Pattern::Identifier { ident } => Pattern::Identifier { ident },
|
||||||
Pattern::Tag(a) => Pattern::Tag(a),
|
Pattern::Tag(a) => Pattern::Tag(a),
|
||||||
Pattern::OpaqueRef(a) => Pattern::OpaqueRef(a),
|
Pattern::OpaqueRef(a) => Pattern::OpaqueRef(a),
|
||||||
Pattern::Apply(a, b) => Pattern::Apply(
|
Pattern::Apply(a, b) => Pattern::Apply(
|
||||||
|
@ -835,15 +926,9 @@ impl<'a> RemoveSpaces<'a> for Pattern<'a> {
|
||||||
Pattern::Underscore(a) => Pattern::Underscore(a),
|
Pattern::Underscore(a) => Pattern::Underscore(a),
|
||||||
Pattern::Malformed(a) => Pattern::Malformed(a),
|
Pattern::Malformed(a) => Pattern::Malformed(a),
|
||||||
Pattern::MalformedIdent(a, b) => Pattern::MalformedIdent(a, remove_spaces_bad_ident(b)),
|
Pattern::MalformedIdent(a, b) => Pattern::MalformedIdent(a, remove_spaces_bad_ident(b)),
|
||||||
Pattern::QualifiedIdentifier {
|
Pattern::QualifiedIdentifier { module_name, ident } => {
|
||||||
module_name,
|
Pattern::QualifiedIdentifier { module_name, ident }
|
||||||
ident,
|
}
|
||||||
suffixed,
|
|
||||||
} => Pattern::QualifiedIdentifier {
|
|
||||||
module_name,
|
|
||||||
ident,
|
|
||||||
suffixed,
|
|
||||||
},
|
|
||||||
Pattern::SpaceBefore(a, _) => a.remove_spaces(arena),
|
Pattern::SpaceBefore(a, _) => a.remove_spaces(arena),
|
||||||
Pattern::SpaceAfter(a, _) => a.remove_spaces(arena),
|
Pattern::SpaceAfter(a, _) => a.remove_spaces(arena),
|
||||||
Pattern::SingleQuote(a) => Pattern::SingleQuote(a),
|
Pattern::SingleQuote(a) => Pattern::SingleQuote(a),
|
||||||
|
|
|
@ -1704,6 +1704,13 @@ trait Backend<'a> {
|
||||||
|
|
||||||
self.build_fn_call(sym, intrinsic.to_string(), args, arg_layouts, ret_layout)
|
self.build_fn_call(sym, intrinsic.to_string(), args, arg_layouts, ret_layout)
|
||||||
}
|
}
|
||||||
|
LowLevel::ListConcatUtf8 => self.build_fn_call(
|
||||||
|
sym,
|
||||||
|
bitcode::LIST_CONCAT_UTF8.to_string(),
|
||||||
|
args,
|
||||||
|
arg_layouts,
|
||||||
|
ret_layout,
|
||||||
|
),
|
||||||
LowLevel::PtrCast => {
|
LowLevel::PtrCast => {
|
||||||
debug_assert_eq!(
|
debug_assert_eq!(
|
||||||
1,
|
1,
|
||||||
|
|
|
@ -1124,7 +1124,11 @@ pub fn construct_optimization_passes<'a>(
|
||||||
}
|
}
|
||||||
OptLevel::Size => {
|
OptLevel::Size => {
|
||||||
pmb.set_optimization_level(OptimizationLevel::Default);
|
pmb.set_optimization_level(OptimizationLevel::Default);
|
||||||
|
// 2 is equivalent to `-Oz`.
|
||||||
|
pmb.set_size_level(2);
|
||||||
|
|
||||||
// TODO: For some usecase, like embedded, it is useful to expose this and tune it.
|
// TODO: For some usecase, like embedded, it is useful to expose this and tune it.
|
||||||
|
// This really depends on if inlining causes enough simplifications to reduce code size.
|
||||||
pmb.set_inliner_with_threshold(50);
|
pmb.set_inliner_with_threshold(50);
|
||||||
}
|
}
|
||||||
OptLevel::Optimize => {
|
OptLevel::Optimize => {
|
||||||
|
@ -1134,9 +1138,10 @@ pub fn construct_optimization_passes<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add optimization passes for Size and Optimize.
|
// Add extra optimization passes for Optimize.
|
||||||
if matches!(opt_level, OptLevel::Size | OptLevel::Optimize) {
|
if matches!(opt_level, OptLevel::Optimize) {
|
||||||
// TODO figure out which of these actually help
|
// TODO: figure out which of these actually help.
|
||||||
|
// Note, llvm probably already runs all of these as part of Aggressive.
|
||||||
|
|
||||||
// function passes
|
// function passes
|
||||||
|
|
||||||
|
|
|
@ -845,6 +845,27 @@ pub(crate) fn run_low_level<'a, 'ctx>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ListConcatUtf8 => {
|
||||||
|
// List.concatUtf8: List U8, Str -> List U8
|
||||||
|
arguments!(list, string);
|
||||||
|
|
||||||
|
match env.target.ptr_width() {
|
||||||
|
PtrWidth::Bytes4 => call_str_bitcode_fn(
|
||||||
|
env,
|
||||||
|
&[list, string],
|
||||||
|
&[],
|
||||||
|
BitcodeReturns::List,
|
||||||
|
bitcode::LIST_CONCAT_UTF8,
|
||||||
|
),
|
||||||
|
PtrWidth::Bytes8 => call_list_bitcode_fn(
|
||||||
|
env,
|
||||||
|
&[list.into_struct_value()],
|
||||||
|
&[string],
|
||||||
|
BitcodeReturns::List,
|
||||||
|
bitcode::LIST_CONCAT_UTF8,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
NumToStr => {
|
NumToStr => {
|
||||||
// Num.toStr : Num a -> Str
|
// Num.toStr : Num a -> Str
|
||||||
arguments_with_layouts!((num, num_layout));
|
arguments_with_layouts!((num, num_layout));
|
||||||
|
|
|
@ -481,6 +481,7 @@ impl<'a> LowLevelCall<'a> {
|
||||||
|
|
||||||
backend.call_host_fn_after_loading_args(bitcode::LIST_CONCAT);
|
backend.call_host_fn_after_loading_args(bitcode::LIST_CONCAT);
|
||||||
}
|
}
|
||||||
|
ListConcatUtf8 => self.load_args_and_call_zig(backend, bitcode::LIST_CONCAT_UTF8),
|
||||||
|
|
||||||
ListReserve => {
|
ListReserve => {
|
||||||
// List.reserve : List elem, U64 -> List elem
|
// List.reserve : List elem, U64 -> List elem
|
||||||
|
@ -2161,13 +2162,13 @@ impl<'a> LowLevelCall<'a> {
|
||||||
|
|
||||||
// Empty record is always equal to empty record.
|
// Empty record is always equal to empty record.
|
||||||
// There are no runtime arguments to check, so just emit true or false.
|
// There are no runtime arguments to check, so just emit true or false.
|
||||||
LayoutRepr::Struct(field_layouts) if field_layouts.is_empty() => {
|
LayoutRepr::Struct([]) => {
|
||||||
backend.code_builder.i32_const(!invert_result as i32);
|
backend.code_builder.i32_const(!invert_result as i32);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Void is always equal to void. This is the type for the contents of the empty list in `[] == []`
|
// Void is always equal to void. This is the type for the contents of the empty list in `[] == []`
|
||||||
// This instruction will never execute, but we need an i32 for module validation
|
// This instruction will never execute, but we need an i32 for module validation
|
||||||
LayoutRepr::Union(UnionLayout::NonRecursive(tags)) if tags.is_empty() => {
|
LayoutRepr::Union(UnionLayout::NonRecursive([])) => {
|
||||||
backend.code_builder.i32_const(!invert_result as i32);
|
backend.code_builder.i32_const(!invert_result as i32);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -76,6 +76,7 @@ fn write_types_for_module_real(module_id: ModuleId, filename: &str, output_path:
|
||||||
PathBuf::from(filename),
|
PathBuf::from(filename),
|
||||||
source,
|
source,
|
||||||
cwd,
|
cwd,
|
||||||
|
None,
|
||||||
Default::default(),
|
Default::default(),
|
||||||
target,
|
target,
|
||||||
function_kind,
|
function_kind,
|
||||||
|
|
|
@ -104,12 +104,14 @@ pub fn load_and_monomorphize_from_str<'a>(
|
||||||
filename: PathBuf,
|
filename: PathBuf,
|
||||||
src: &'a str,
|
src: &'a str,
|
||||||
src_dir: PathBuf,
|
src_dir: PathBuf,
|
||||||
|
opt_main_path: Option<PathBuf>,
|
||||||
roc_cache_dir: RocCacheDir<'_>,
|
roc_cache_dir: RocCacheDir<'_>,
|
||||||
load_config: LoadConfig,
|
load_config: LoadConfig,
|
||||||
) -> Result<MonomorphizedModule<'a>, LoadMonomorphizedError<'a>> {
|
) -> Result<MonomorphizedModule<'a>, LoadMonomorphizedError<'a>> {
|
||||||
use LoadResult::*;
|
use LoadResult::*;
|
||||||
|
|
||||||
let load_start = LoadStart::from_str(arena, filename, src, roc_cache_dir, src_dir)?;
|
let load_start =
|
||||||
|
LoadStart::from_str(arena, filename, opt_main_path, src, roc_cache_dir, src_dir)?;
|
||||||
let exposed_types = ExposedByModule::default();
|
let exposed_types = ExposedByModule::default();
|
||||||
|
|
||||||
match load(arena, load_start, exposed_types, roc_cache_dir, load_config)? {
|
match load(arena, load_start, exposed_types, roc_cache_dir, load_config)? {
|
||||||
|
@ -121,6 +123,7 @@ pub fn load_and_monomorphize_from_str<'a>(
|
||||||
pub fn load_and_monomorphize<'a>(
|
pub fn load_and_monomorphize<'a>(
|
||||||
arena: &'a Bump,
|
arena: &'a Bump,
|
||||||
filename: PathBuf,
|
filename: PathBuf,
|
||||||
|
opt_main_path: Option<PathBuf>,
|
||||||
roc_cache_dir: RocCacheDir<'_>,
|
roc_cache_dir: RocCacheDir<'_>,
|
||||||
load_config: LoadConfig,
|
load_config: LoadConfig,
|
||||||
) -> Result<MonomorphizedModule<'a>, LoadMonomorphizedError<'a>> {
|
) -> Result<MonomorphizedModule<'a>, LoadMonomorphizedError<'a>> {
|
||||||
|
@ -129,6 +132,7 @@ pub fn load_and_monomorphize<'a>(
|
||||||
let load_start = LoadStart::from_path(
|
let load_start = LoadStart::from_path(
|
||||||
arena,
|
arena,
|
||||||
filename,
|
filename,
|
||||||
|
opt_main_path,
|
||||||
load_config.render,
|
load_config.render,
|
||||||
roc_cache_dir,
|
roc_cache_dir,
|
||||||
load_config.palette,
|
load_config.palette,
|
||||||
|
@ -145,6 +149,7 @@ pub fn load_and_monomorphize<'a>(
|
||||||
pub fn load_and_typecheck<'a>(
|
pub fn load_and_typecheck<'a>(
|
||||||
arena: &'a Bump,
|
arena: &'a Bump,
|
||||||
filename: PathBuf,
|
filename: PathBuf,
|
||||||
|
opt_main_path: Option<PathBuf>,
|
||||||
roc_cache_dir: RocCacheDir<'_>,
|
roc_cache_dir: RocCacheDir<'_>,
|
||||||
load_config: LoadConfig,
|
load_config: LoadConfig,
|
||||||
) -> Result<LoadedModule, LoadingProblem<'a>> {
|
) -> Result<LoadedModule, LoadingProblem<'a>> {
|
||||||
|
@ -153,6 +158,7 @@ pub fn load_and_typecheck<'a>(
|
||||||
let load_start = LoadStart::from_path(
|
let load_start = LoadStart::from_path(
|
||||||
arena,
|
arena,
|
||||||
filename,
|
filename,
|
||||||
|
opt_main_path,
|
||||||
load_config.render,
|
load_config.render,
|
||||||
roc_cache_dir,
|
roc_cache_dir,
|
||||||
load_config.palette,
|
load_config.palette,
|
||||||
|
@ -172,6 +178,7 @@ pub fn load_and_typecheck_str<'a>(
|
||||||
filename: PathBuf,
|
filename: PathBuf,
|
||||||
source: &'a str,
|
source: &'a str,
|
||||||
src_dir: PathBuf,
|
src_dir: PathBuf,
|
||||||
|
opt_main_path: Option<PathBuf>,
|
||||||
target: Target,
|
target: Target,
|
||||||
function_kind: FunctionKind,
|
function_kind: FunctionKind,
|
||||||
render: RenderTarget,
|
render: RenderTarget,
|
||||||
|
@ -180,7 +187,14 @@ pub fn load_and_typecheck_str<'a>(
|
||||||
) -> Result<LoadedModule, LoadingProblem<'a>> {
|
) -> Result<LoadedModule, LoadingProblem<'a>> {
|
||||||
use LoadResult::*;
|
use LoadResult::*;
|
||||||
|
|
||||||
let load_start = LoadStart::from_str(arena, filename, source, roc_cache_dir, src_dir)?;
|
let load_start = LoadStart::from_str(
|
||||||
|
arena,
|
||||||
|
filename,
|
||||||
|
opt_main_path,
|
||||||
|
source,
|
||||||
|
roc_cache_dir,
|
||||||
|
src_dir,
|
||||||
|
)?;
|
||||||
|
|
||||||
// NOTE: this function is meant for tests, and so we use single-threaded
|
// NOTE: this function is meant for tests, and so we use single-threaded
|
||||||
// solving so we don't use too many threads per-test. That gives higher
|
// solving so we don't use too many threads per-test. That gives higher
|
||||||
|
|
|
@ -11,7 +11,7 @@ use roc_can::scope::Scope;
|
||||||
use roc_collections::all::{ImMap, MutMap, SendSet};
|
use roc_collections::all::{ImMap, MutMap, SendSet};
|
||||||
use roc_constrain::expr::constrain_expr;
|
use roc_constrain::expr::constrain_expr;
|
||||||
use roc_derive::SharedDerivedModule;
|
use roc_derive::SharedDerivedModule;
|
||||||
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds};
|
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds, PQModuleName, PackageModuleIds};
|
||||||
use roc_parse::parser::{SourceError, SyntaxError};
|
use roc_parse::parser::{SourceError, SyntaxError};
|
||||||
use roc_problem::can::Problem;
|
use roc_problem::can::Problem;
|
||||||
use roc_region::all::Loc;
|
use roc_region::all::Loc;
|
||||||
|
@ -154,10 +154,10 @@ pub fn can_expr_with<'a>(
|
||||||
let var = var_store.fresh();
|
let var = var_store.fresh();
|
||||||
let var_index = constraints.push_variable(var);
|
let var_index = constraints.push_variable(var);
|
||||||
let expected = constraints.push_expected_type(Expected::NoExpectation(var_index));
|
let expected = constraints.push_expected_type(Expected::NoExpectation(var_index));
|
||||||
let mut module_ids = ModuleIds::default();
|
let mut module_ids = PackageModuleIds::default();
|
||||||
|
|
||||||
// ensure the Test module is accessible in our tests
|
// ensure the Test module is accessible in our tests
|
||||||
module_ids.get_or_insert(&"Test".into());
|
module_ids.get_or_insert(&PQModuleName::Unqualified("Test".into()));
|
||||||
|
|
||||||
// Desugar operators (convert them to Apply calls, taking into account
|
// Desugar operators (convert them to Apply calls, taking into account
|
||||||
// operator precedence and associativity rules), before doing other canonicalization.
|
// operator precedence and associativity rules), before doing other canonicalization.
|
||||||
|
@ -174,10 +174,22 @@ pub fn can_expr_with<'a>(
|
||||||
arena.alloc("TestPath"),
|
arena.alloc("TestPath"),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut scope = Scope::new(home, IdentIds::default(), Default::default());
|
let mut scope = Scope::new(
|
||||||
|
home,
|
||||||
|
"TestPath".into(),
|
||||||
|
IdentIds::default(),
|
||||||
|
Default::default(),
|
||||||
|
);
|
||||||
|
|
||||||
let dep_idents = IdentIds::exposed_builtins(0);
|
let dep_idents = IdentIds::exposed_builtins(0);
|
||||||
let mut env = Env::new(arena, home, &dep_idents, &module_ids);
|
let mut env = Env::new(
|
||||||
|
arena,
|
||||||
|
home,
|
||||||
|
Path::new("Test.roc"),
|
||||||
|
&dep_idents,
|
||||||
|
&module_ids,
|
||||||
|
None,
|
||||||
|
);
|
||||||
let (loc_expr, output) = canonicalize_expr(
|
let (loc_expr, output) = canonicalize_expr(
|
||||||
&mut env,
|
&mut env,
|
||||||
&mut var_store,
|
&mut var_store,
|
||||||
|
@ -203,7 +215,7 @@ pub fn can_expr_with<'a>(
|
||||||
all_ident_ids.insert(home, scope.locals.ident_ids);
|
all_ident_ids.insert(home, scope.locals.ident_ids);
|
||||||
|
|
||||||
let interns = Interns {
|
let interns = Interns {
|
||||||
module_ids: env.module_ids.clone(),
|
module_ids: env.qualified_module_ids.clone().into_module_ids(),
|
||||||
all_ident_ids,
|
all_ident_ids,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
9
crates/compiler/load/tests/platform.roc
Normal file
9
crates/compiler/load/tests/platform.roc
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
platform "test-platform"
|
||||||
|
requires {} { main : * }
|
||||||
|
exposes []
|
||||||
|
packages {}
|
||||||
|
imports []
|
||||||
|
provides [mainForHost]
|
||||||
|
|
||||||
|
mainForHost : {} -> {}
|
||||||
|
mainForHost = \{} -> {}
|
|
@ -134,6 +134,7 @@ mod test_reporting {
|
||||||
let result = roc_load::load_and_typecheck(
|
let result = roc_load::load_and_typecheck(
|
||||||
arena,
|
arena,
|
||||||
full_file_path,
|
full_file_path,
|
||||||
|
None,
|
||||||
RocCacheDir::Disallowed,
|
RocCacheDir::Disallowed,
|
||||||
load_config,
|
load_config,
|
||||||
);
|
);
|
||||||
|
@ -647,7 +648,7 @@ mod test_reporting {
|
||||||
if true then 1 else 2
|
if true then 1 else 2
|
||||||
"
|
"
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── UNRECOGNIZED NAME in /code/proj/Main.roc ────────────────────────────────────
|
── UNRECOGNIZED NAME in /code/proj/Main.roc ────────────────────────────────────
|
||||||
|
|
||||||
Nothing is named `true` in this scope.
|
Nothing is named `true` in this scope.
|
||||||
|
@ -657,11 +658,11 @@ mod test_reporting {
|
||||||
|
|
||||||
Did you mean one of these?
|
Did you mean one of these?
|
||||||
|
|
||||||
|
Str
|
||||||
Frac
|
Frac
|
||||||
Num
|
Num
|
||||||
Str
|
U8
|
||||||
Err
|
"###
|
||||||
"
|
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -812,10 +813,10 @@ mod test_reporting {
|
||||||
|
|
||||||
Did you mean one of these?
|
Did you mean one of these?
|
||||||
|
|
||||||
Ok
|
|
||||||
List
|
List
|
||||||
Err
|
|
||||||
Box
|
Box
|
||||||
|
Str
|
||||||
|
isDisabled
|
||||||
"
|
"
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
@ -2212,10 +2213,10 @@ mod test_reporting {
|
||||||
|
|
||||||
Did you mean one of these?
|
Did you mean one of these?
|
||||||
|
|
||||||
Ok
|
|
||||||
U8
|
U8
|
||||||
Box
|
Box
|
||||||
Eq
|
Eq
|
||||||
|
f
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -4545,13 +4546,13 @@ mod test_reporting {
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
comment_with_tab,
|
comment_with_tab,
|
||||||
"# comment with a \t\n4",
|
"# comment with a \t char\n4",
|
||||||
@r###"
|
@r###"
|
||||||
── TAB CHARACTER in tmp/comment_with_tab/Test.roc ──────────────────────────────
|
── TAB CHARACTER in tmp/comment_with_tab/Test.roc ──────────────────────────────
|
||||||
|
|
||||||
I encountered a tab character:
|
I encountered a tab character:
|
||||||
|
|
||||||
4│ # comment with a
|
4│ # comment with a char
|
||||||
^
|
^
|
||||||
|
|
||||||
Tab characters are not allowed in Roc code. Please use spaces instead!
|
Tab characters are not allowed in Roc code. Please use spaces instead!
|
||||||
|
@ -4560,17 +4561,17 @@ mod test_reporting {
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
comment_with_control_character,
|
comment_with_control_character,
|
||||||
"# comment with a \x07\n",
|
"# comment with a \x07 char\n",
|
||||||
@r"
|
@r###"
|
||||||
── ASCII CONTROL CHARACTER in tmp/comment_with_control_character/Test.roc ──────
|
── ASCII CONTROL CHARACTER in tmp/comment_with_control_character/Test.roc ──────
|
||||||
|
|
||||||
I encountered an ASCII control character:
|
I encountered an ASCII control character:
|
||||||
|
|
||||||
4│ # comment with a
|
4│ # comment with a char
|
||||||
^
|
^
|
||||||
|
|
||||||
ASCII control characters are not allowed.
|
ASCII control characters are not allowed.
|
||||||
"
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -4771,7 +4772,7 @@ mod test_reporting {
|
||||||
// TODO investigate this test. It was disabled in https://github.com/roc-lang/roc/pull/6634
|
// TODO investigate this test. It was disabled in https://github.com/roc-lang/roc/pull/6634
|
||||||
// as the way Defs without final expressions are handled. The changes probably shouldn't have
|
// as the way Defs without final expressions are handled. The changes probably shouldn't have
|
||||||
// changed this error report. The exact same test_syntax test for this has not changed, so
|
// changed this error report. The exact same test_syntax test for this has not changed, so
|
||||||
// we know the parser is parsing thesame thing. Therefore the way the AST is desugared must be
|
// we know the parser is parsing the same thing. Therefore the way the AST is desugared must be
|
||||||
// the cause of the change in error report.
|
// the cause of the change in error report.
|
||||||
// test_report!(
|
// test_report!(
|
||||||
// def_missing_final_expression,
|
// def_missing_final_expression,
|
||||||
|
@ -4914,25 +4915,260 @@ mod test_reporting {
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
unfinished_import,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
import [
|
||||||
|
"
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── UNFINISHED IMPORT in tmp/unfinished_import/Test.roc ─────────────────────────
|
||||||
|
|
||||||
|
I was partway through parsing an `import`, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import [
|
||||||
|
^
|
||||||
|
|
||||||
|
I was expecting to see a module name, like:
|
||||||
|
|
||||||
|
import BigNum
|
||||||
|
|
||||||
|
Or a package module name, like:
|
||||||
|
|
||||||
|
import pf.Stdout
|
||||||
|
|
||||||
|
Or a file path to ingest, like:
|
||||||
|
|
||||||
|
import "users.json" as users : Str
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
weird_import_params_record,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
import Menu { x = 4 }
|
||||||
|
"
|
||||||
|
),@r###"
|
||||||
|
── RECORD PARSE PROBLEM in tmp/weird_import_params_record/Test.roc ─────────────
|
||||||
|
|
||||||
|
I am partway through parsing a record, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import Menu { x = 4 }
|
||||||
|
^
|
||||||
|
|
||||||
|
TODO provide more context.
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
record_builder_in_module_params,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
import Menu {
|
||||||
|
echo,
|
||||||
|
name: <- applyName
|
||||||
|
}
|
||||||
|
"
|
||||||
|
),@r###"
|
||||||
|
── RECORD BUILDER IN MODULE PARAMS in ...ord_builder_in_module_params/Test.roc ─
|
||||||
|
|
||||||
|
I was partway through parsing module params, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import Menu {
|
||||||
|
5│ echo,
|
||||||
|
6│ name: <- applyName
|
||||||
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
This looks like a record builder field, but those are not allowed in
|
||||||
|
module params.
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
record_update_in_module_params,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
import Menu { myParams & echo: echoFn }
|
||||||
|
"
|
||||||
|
),@r###"
|
||||||
|
── RECORD UPDATE IN MODULE PARAMS in ...ecord_update_in_module_params/Test.roc ─
|
||||||
|
|
||||||
|
I was partway through parsing module params, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import Menu { myParams & echo: echoFn }
|
||||||
|
^^^^^^^^
|
||||||
|
|
||||||
|
It looks like you're trying to update a record, but module params
|
||||||
|
require a standalone record literal.
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
unfinished_import_as_or_exposing,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
import svg.Path a
|
||||||
|
"
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── UNFINISHED IMPORT in tmp/unfinished_import_as_or_exposing/Test.roc ──────────
|
||||||
|
|
||||||
|
I was partway through parsing an `import`, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import svg.Path a
|
||||||
|
^
|
||||||
|
|
||||||
|
I was expecting to see the `as` keyword, like:
|
||||||
|
|
||||||
|
import svg.Path as SvgPath
|
||||||
|
|
||||||
|
Or the `exposing` keyword, like:
|
||||||
|
|
||||||
|
import svg.Path exposing [arc, rx]
|
||||||
|
|
||||||
|
Or module params, like:
|
||||||
|
|
||||||
|
import Menu { echo, read }
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
unfinished_import_alias,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
import svg.Path as
|
||||||
|
"
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── UNFINISHED IMPORT in tmp/unfinished_import_alias/Test.roc ───────────────────
|
||||||
|
|
||||||
|
I was partway through parsing an `import`, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import svg.Path as
|
||||||
|
^
|
||||||
|
|
||||||
|
I just saw the `as` keyword, so I was expecting to see an alias next.
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
lowercase_import_alias,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
import svg.Path as path
|
||||||
|
"
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── LOWERCASE ALIAS in tmp/lowercase_import_alias/Test.roc ──────────────────────
|
||||||
|
|
||||||
|
This import is using a lowercase alias:
|
||||||
|
|
||||||
|
4│ import svg.Path as path
|
||||||
|
^^^^
|
||||||
|
|
||||||
|
Module names and aliases must start with an uppercase letter.
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
unfinished_import_exposing,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
import svg.Path exposing
|
||||||
|
"
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── UNFINISHED IMPORT in tmp/unfinished_import_exposing/Test.roc ────────────────
|
||||||
|
|
||||||
|
I was partway through parsing an `import`, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import svg.Path exposing
|
||||||
|
^
|
||||||
|
|
||||||
|
I just saw the `exposing` keyword, so I was expecting to see `[` next.
|
||||||
|
"###);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
unfinished_import_exposing_name,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
import svg.Path exposing [3
|
||||||
|
"
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── WEIRD EXPOSING in tmp/unfinished_import_exposing_name/Test.roc ──────────────
|
||||||
|
|
||||||
|
I'm partway through parsing an exposing list, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import svg.Path exposing [3
|
||||||
|
^
|
||||||
|
|
||||||
|
I was expecting a type, value, or function name next, like:
|
||||||
|
|
||||||
|
import Svg exposing [Path, arc, rx]
|
||||||
|
"###);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
unfinished_ingested_file_name,
|
||||||
|
indoc!(
|
||||||
|
r#"
|
||||||
|
import "example.json" as
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── UNFINISHED IMPORT in tmp/unfinished_ingested_file_name/Test.roc ─────────────
|
||||||
|
|
||||||
|
I was partway through parsing an `import`, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import "example.json" as
|
||||||
|
^
|
||||||
|
|
||||||
|
I was expecting to see a name next, like:
|
||||||
|
|
||||||
|
import "users.json" as users : Str
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
ingested_file_import_ann_syntax_err,
|
||||||
|
indoc!(
|
||||||
|
r#"
|
||||||
|
import "example.json" as example : List U8, U32
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── UNFINISHED TYPE in tmp/ingested_file_import_ann_syntax_err/Test.roc ─────────
|
||||||
|
|
||||||
|
I am partway through parsing a type, but I got stuck here:
|
||||||
|
|
||||||
|
4│ import "example.json" as example : List U8, U32
|
||||||
|
^
|
||||||
|
|
||||||
|
Note: I may be confused by indentation
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
// TODO could do better by pointing out we're parsing a function type
|
// TODO could do better by pointing out we're parsing a function type
|
||||||
test_report!(
|
test_report!(
|
||||||
dict_type_formatting,
|
dict_type_formatting,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "dict" imports [ Dict ] provides [main] to "./platform"
|
app "dict" imports [] provides [main] to "./platform"
|
||||||
|
|
||||||
myDict : Dict.Dict Num.I64 Str
|
myDict : Dict Num.I64 Str
|
||||||
myDict = Dict.insert (Dict.empty {}) "foo" 42
|
myDict = Dict.insert (Dict.empty {}) "foo" 42
|
||||||
|
|
||||||
main = myDict
|
main = myDict
|
||||||
"#
|
"#
|
||||||
),
|
),
|
||||||
@r#"
|
@r###"
|
||||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||||
|
|
||||||
Something is off with the body of the `myDict` definition:
|
Something is off with the body of the `myDict` definition:
|
||||||
|
|
||||||
3│ myDict : Dict.Dict Num.I64 Str
|
3│ myDict : Dict Num.I64 Str
|
||||||
4│ myDict = Dict.insert (Dict.empty {}) "foo" 42
|
4│ myDict = Dict.insert (Dict.empty {}) "foo" 42
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
@ -4943,14 +5179,14 @@ mod test_reporting {
|
||||||
But the type annotation on `myDict` says it should be:
|
But the type annotation on `myDict` says it should be:
|
||||||
|
|
||||||
Dict I64 Str
|
Dict I64 Str
|
||||||
"#
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
alias_type_diff,
|
alias_type_diff,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [Set.{ Set }] provides [main] to "./platform"
|
app "test" imports [] provides [main] to "./platform"
|
||||||
|
|
||||||
HSet a : Set a
|
HSet a : Set a
|
||||||
|
|
||||||
|
@ -5805,9 +6041,9 @@ All branches in an `if` must have the same type!
|
||||||
Did you mean one of these?
|
Did you mean one of these?
|
||||||
|
|
||||||
Str
|
Str
|
||||||
Err
|
|
||||||
U8
|
U8
|
||||||
F64
|
F64
|
||||||
|
Box
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -6040,6 +6276,31 @@ In roc, functions are always written as a lambda, like{}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn module_params_with_missing_arrow() {
|
||||||
|
report_header_problem_as(
|
||||||
|
indoc!(
|
||||||
|
r#"
|
||||||
|
module {echo, read} [menu]
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
indoc!(
|
||||||
|
r#"
|
||||||
|
── WEIRD MODULE PARAMS in /code/proj/Main.roc ──────────────────────────────────
|
||||||
|
|
||||||
|
I am partway through parsing a module header, but I got stuck here:
|
||||||
|
|
||||||
|
1│ module {echo, read} [menu]
|
||||||
|
^
|
||||||
|
|
||||||
|
I am expecting `->` next, like:
|
||||||
|
|
||||||
|
module { echo, read } -> [menu]
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn platform_requires_rigids() {
|
fn platform_requires_rigids() {
|
||||||
report_header_problem_as(
|
report_header_problem_as(
|
||||||
|
@ -6109,9 +6370,7 @@ In roc, functions are always written as a lambda, like{}
|
||||||
report_header_problem_as(
|
report_header_problem_as(
|
||||||
indoc!(
|
indoc!(
|
||||||
r"
|
r"
|
||||||
interface Foobar
|
module [main, @Foo]
|
||||||
exposes [main, @Foo]
|
|
||||||
imports [pf.Task, Base64]
|
|
||||||
"
|
"
|
||||||
),
|
),
|
||||||
indoc!(
|
indoc!(
|
||||||
|
@ -6120,39 +6379,12 @@ In roc, functions are always written as a lambda, like{}
|
||||||
|
|
||||||
I am partway through parsing an `exposes` list, but I got stuck here:
|
I am partway through parsing an `exposes` list, but I got stuck here:
|
||||||
|
|
||||||
1│ interface Foobar
|
1│ module [main, @Foo]
|
||||||
2│ exposes [main, @Foo]
|
^
|
||||||
^
|
|
||||||
|
|
||||||
I was expecting a type name, value name or function name next, like
|
I was expecting a type name, value name or function name next, like
|
||||||
|
|
||||||
exposes [Animal, default, tame]
|
[Animal, default, tame]
|
||||||
"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn invalid_module_name() {
|
|
||||||
report_header_problem_as(
|
|
||||||
indoc!(
|
|
||||||
r"
|
|
||||||
interface foobar
|
|
||||||
exposes [main, @Foo]
|
|
||||||
imports [pf.Task, Base64]
|
|
||||||
"
|
|
||||||
),
|
|
||||||
indoc!(
|
|
||||||
r"
|
|
||||||
── WEIRD MODULE NAME in /code/proj/Main.roc ────────────────────────────────────
|
|
||||||
|
|
||||||
I am partway through parsing a header, but got stuck here:
|
|
||||||
|
|
||||||
1│ interface foobar
|
|
||||||
^
|
|
||||||
|
|
||||||
I am expecting a module name next, like BigNum or Main. Module names
|
|
||||||
must start with an uppercase letter.
|
|
||||||
"
|
"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -7931,7 +8163,7 @@ In roc, functions are always written as a lambda, like{}
|
||||||
"#
|
"#
|
||||||
),
|
),
|
||||||
// TODO(opaques): error could be improved by saying that the opaque definition demands
|
// TODO(opaques): error could be improved by saying that the opaque definition demands
|
||||||
// that the argument be a U8, and linking to the definitin!
|
// that the argument be a U8, and linking to the definition!
|
||||||
@r#"
|
@r#"
|
||||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||||
|
|
||||||
|
@ -8407,17 +8639,38 @@ In roc, functions are always written as a lambda, like{}
|
||||||
a
|
a
|
||||||
"
|
"
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
── WILDCARD NOT ALLOWED HERE in /code/proj/Main.roc ────────────────────────────
|
||||||
|
|
||||||
The definition of `I` has an unbound type variable:
|
The definition of `I` includes a wildcard (`*`) type variable:
|
||||||
|
|
||||||
4│ I : Num.Int *
|
4│ I : Num.Int *
|
||||||
^
|
^
|
||||||
|
|
||||||
Tip: Type variables must be bound before the `:`. Perhaps you intended
|
Type alias definitions may not use wildcard (`*`) type variables. Only
|
||||||
to add a type parameter to this type?
|
named type variables are allowed.
|
||||||
"
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
underscore_in_alias,
|
||||||
|
indoc!(
|
||||||
|
r"
|
||||||
|
I : Num.Int _
|
||||||
|
a : I
|
||||||
|
a
|
||||||
|
"
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── UNDERSCORE NOT ALLOWED HERE in /code/proj/Main.roc ──────────────────────────
|
||||||
|
|
||||||
|
The definition of `I` includes an inferred (`_`) type:
|
||||||
|
|
||||||
|
4│ I : Num.Int _
|
||||||
|
^
|
||||||
|
|
||||||
|
Type alias definitions may not use inferred types (`_`).
|
||||||
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -8429,17 +8682,17 @@ In roc, functions are always written as a lambda, like{}
|
||||||
a
|
a
|
||||||
"
|
"
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
── WILDCARD NOT ALLOWED HERE in /code/proj/Main.roc ────────────────────────────
|
||||||
|
|
||||||
The definition of `I` has an unbound type variable:
|
The definition of `I` includes a wildcard (`*`) type variable:
|
||||||
|
|
||||||
4│ I := Num.Int *
|
4│ I := Num.Int *
|
||||||
^
|
^
|
||||||
|
|
||||||
Tip: Type variables must be bound before the `:=`. Perhaps you intended
|
Opaque type definitions may not use wildcard (`*`) type variables. Only
|
||||||
to add a type parameter to this type?
|
named type variables are allowed.
|
||||||
"
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -8451,19 +8704,18 @@ In roc, functions are always written as a lambda, like{}
|
||||||
a
|
a
|
||||||
"
|
"
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
── WILDCARD NOT ALLOWED HERE in /code/proj/Main.roc ────────────────────────────
|
||||||
|
|
||||||
The definition of `I` has 2 unbound type variables.
|
The definition of `I` includes 2 wildcard (`*`) type variables. Here is
|
||||||
|
one of them:
|
||||||
Here is one occurrence:
|
|
||||||
|
|
||||||
4│ I : [A (Num.Int *), B (Num.Int *)]
|
4│ I : [A (Num.Int *), B (Num.Int *)]
|
||||||
^
|
^
|
||||||
|
|
||||||
Tip: Type variables must be bound before the `:`. Perhaps you intended
|
Type alias definitions may not use wildcard (`*`) type variables. Only
|
||||||
to add a type parameter to this type?
|
named type variables are allowed.
|
||||||
"
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -8475,17 +8727,16 @@ In roc, functions are always written as a lambda, like{}
|
||||||
a
|
a
|
||||||
"
|
"
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
── UNDERSCORE NOT ALLOWED HERE in /code/proj/Main.roc ──────────────────────────
|
||||||
|
|
||||||
The definition of `I` has an unbound type variable:
|
The definition of `I` includes an inferred (`_`) type:
|
||||||
|
|
||||||
4│ I : Num.Int _
|
4│ I : Num.Int _
|
||||||
^
|
^
|
||||||
|
|
||||||
Tip: Type variables must be bound before the `:`. Perhaps you intended
|
Type alias definitions may not use inferred types (`_`).
|
||||||
to add a type parameter to this type?
|
"###
|
||||||
"
|
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -8497,17 +8748,19 @@ In roc, functions are always written as a lambda, like{}
|
||||||
a
|
a
|
||||||
"
|
"
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── UNBOUND TYPE VARIABLE in /code/proj/Main.roc ────────────────────────────────
|
── UNDECLARED TYPE VARIABLE in /code/proj/Main.roc ─────────────────────────────
|
||||||
|
|
||||||
The definition of `I` has an unbound type variable:
|
The definition of `I` includes an undeclared type variable:
|
||||||
|
|
||||||
4│ I : Num.Int a
|
4│ I : Num.Int a
|
||||||
^
|
^
|
||||||
|
|
||||||
Tip: Type variables must be bound before the `:`. Perhaps you intended
|
All type variables in type alias definitions must be declared.
|
||||||
to add a type parameter to this type?
|
|
||||||
"
|
Tip: You can declare type variables by putting them right before the `:`
|
||||||
|
symbol, separated by spaces.
|
||||||
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -9310,7 +9563,7 @@ In roc, functions are always written as a lambda, like{}
|
||||||
type_error_in_apply_is_circular,
|
type_error_in_apply_is_circular,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [Set] provides [go] to "./platform"
|
app "test" imports [] provides [go] to "./platform"
|
||||||
|
|
||||||
S a : { set : Set.Set a }
|
S a : { set : Set.Set a }
|
||||||
|
|
||||||
|
@ -10898,7 +11151,9 @@ In roc, functions are always written as a lambda, like{}
|
||||||
function_cannot_derive_encoding,
|
function_cannot_derive_encoding,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
app "test" imports [] provides [main] to "./platform"
|
||||||
|
|
||||||
|
import Decode exposing [decoder]
|
||||||
|
|
||||||
main =
|
main =
|
||||||
myDecoder : Decoder (a -> a) fmt where fmt implements DecoderFormatting
|
myDecoder : Decoder (a -> a) fmt where fmt implements DecoderFormatting
|
||||||
|
@ -10907,12 +11162,12 @@ In roc, functions are always written as a lambda, like{}
|
||||||
myDecoder
|
myDecoder
|
||||||
"#
|
"#
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||||
|
|
||||||
This expression has a type that does not implement the abilities it's expected to:
|
This expression has a type that does not implement the abilities it's expected to:
|
||||||
|
|
||||||
5│ myDecoder = decoder
|
7│ myDecoder = decoder
|
||||||
^^^^^^^
|
^^^^^^^
|
||||||
|
|
||||||
I can't generate an implementation of the `Decoding` ability for
|
I can't generate an implementation of the `Decoding` ability for
|
||||||
|
@ -10920,14 +11175,16 @@ In roc, functions are always written as a lambda, like{}
|
||||||
a -> a
|
a -> a
|
||||||
|
|
||||||
Note: `Decoding` cannot be generated for functions.
|
Note: `Decoding` cannot be generated for functions.
|
||||||
"
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
nested_opaque_cannot_derive_encoding,
|
nested_opaque_cannot_derive_encoding,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
app "test" imports [] provides [main] to "./platform"
|
||||||
|
|
||||||
|
import Decode exposing [decoder]
|
||||||
|
|
||||||
A := {}
|
A := {}
|
||||||
|
|
||||||
|
@ -10938,12 +11195,12 @@ In roc, functions are always written as a lambda, like{}
|
||||||
myDecoder
|
myDecoder
|
||||||
"#
|
"#
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||||
|
|
||||||
This expression has a type that does not implement the abilities it's expected to:
|
This expression has a type that does not implement the abilities it's expected to:
|
||||||
|
|
||||||
7│ myDecoder = decoder
|
9│ myDecoder = decoder
|
||||||
^^^^^^^
|
^^^^^^^
|
||||||
|
|
||||||
I can't generate an implementation of the `Decoding` ability for
|
I can't generate an implementation of the `Decoding` ability for
|
||||||
|
@ -10958,7 +11215,7 @@ In roc, functions are always written as a lambda, like{}
|
||||||
|
|
||||||
Tip: `A` does not implement `Decoding`. Consider adding a custom
|
Tip: `A` does not implement `Decoding`. Consider adding a custom
|
||||||
implementation or `implements Decode.Decoding` to the definition of `A`.
|
implementation or `implements Decode.Decoding` to the definition of `A`.
|
||||||
"
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -11119,7 +11376,9 @@ In roc, functions are always written as a lambda, like{}
|
||||||
infer_decoded_record_error_with_function_field,
|
infer_decoded_record_error_with_function_field,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [TotallyNotJson] provides [main] to "./platform"
|
app "test" imports [] provides [main] to "./platform"
|
||||||
|
|
||||||
|
import TotallyNotJson
|
||||||
|
|
||||||
main =
|
main =
|
||||||
decoded = Str.toUtf8 "{\"first\":\"ab\",\"second\":\"cd\"}" |> Decode.fromBytes TotallyNotJson.json
|
decoded = Str.toUtf8 "{\"first\":\"ab\",\"second\":\"cd\"}" |> Decode.fromBytes TotallyNotJson.json
|
||||||
|
@ -11128,12 +11387,12 @@ In roc, functions are always written as a lambda, like{}
|
||||||
_ -> "something went wrong"
|
_ -> "something went wrong"
|
||||||
"#
|
"#
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||||
|
|
||||||
This expression has a type that does not implement the abilities it's expected to:
|
This expression has a type that does not implement the abilities it's expected to:
|
||||||
|
|
||||||
6│ Ok rcd -> rcd.first rcd.second
|
8│ Ok rcd -> rcd.first rcd.second
|
||||||
^^^^^^^^^
|
^^^^^^^^^
|
||||||
|
|
||||||
I can't generate an implementation of the `Decoding` ability for
|
I can't generate an implementation of the `Decoding` ability for
|
||||||
|
@ -11141,14 +11400,16 @@ In roc, functions are always written as a lambda, like{}
|
||||||
* -> *
|
* -> *
|
||||||
|
|
||||||
Note: `Decoding` cannot be generated for functions.
|
Note: `Decoding` cannot be generated for functions.
|
||||||
"
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
record_with_optional_field_types_cannot_derive_decoding,
|
record_with_optional_field_types_cannot_derive_decoding,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
app "test" imports [] provides [main] to "./platform"
|
||||||
|
|
||||||
|
import Decode exposing [decoder]
|
||||||
|
|
||||||
main =
|
main =
|
||||||
myDecoder : Decoder {x : Str, y ? Str} fmt where fmt implements DecoderFormatting
|
myDecoder : Decoder {x : Str, y ? Str} fmt where fmt implements DecoderFormatting
|
||||||
|
@ -11157,12 +11418,12 @@ In roc, functions are always written as a lambda, like{}
|
||||||
myDecoder
|
myDecoder
|
||||||
"#
|
"#
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||||
|
|
||||||
This expression has a type that does not implement the abilities it's expected to:
|
This expression has a type that does not implement the abilities it's expected to:
|
||||||
|
|
||||||
5│ myDecoder = decoder
|
7│ myDecoder = decoder
|
||||||
^^^^^^^
|
^^^^^^^
|
||||||
|
|
||||||
I can't generate an implementation of the `Decoding` ability for
|
I can't generate an implementation of the `Decoding` ability for
|
||||||
|
@ -11177,7 +11438,7 @@ In roc, functions are always written as a lambda, like{}
|
||||||
over records that may or may not contain them at compile time, but are
|
over records that may or may not contain them at compile time, but are
|
||||||
not a concept that extends to runtime!
|
not a concept that extends to runtime!
|
||||||
Maybe you wanted to use a `Result`?
|
Maybe you wanted to use a `Result`?
|
||||||
"
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -11359,21 +11620,23 @@ In roc, functions are always written as a lambda, like{}
|
||||||
unused_value_import,
|
unused_value_import,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [List.{ concat }] provides [main] to "./platform"
|
app "test" imports [] provides [main] to "./platform"
|
||||||
|
|
||||||
|
import List exposing [concat]
|
||||||
|
|
||||||
main = ""
|
main = ""
|
||||||
"#
|
"#
|
||||||
),
|
),
|
||||||
@r#"
|
@r###"
|
||||||
── UNUSED IMPORT in /code/proj/Main.roc ────────────────────────────────────────
|
── UNUSED IMPORT in /code/proj/Main.roc ────────────────────────────────────────
|
||||||
|
|
||||||
`List.concat` is not used in this module.
|
List is imported but not used.
|
||||||
|
|
||||||
1│ app "test" imports [List.{ concat }] provides [main] to "./platform"
|
3│ import List exposing [concat]
|
||||||
^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Since `List.concat` isn't used, you don't need to import it.
|
Since List isn't used, you don't need to import it.
|
||||||
"#
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
|
@ -11395,7 +11658,9 @@ In roc, functions are always written as a lambda, like{}
|
||||||
unnecessary_builtin_type_import,
|
unnecessary_builtin_type_import,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [Decode.{ DecodeError }] provides [main, E] to "./platform"
|
app "test" imports [] provides [main, E] to "./platform"
|
||||||
|
|
||||||
|
import Decode exposing [DecodeError]
|
||||||
|
|
||||||
E : DecodeError
|
E : DecodeError
|
||||||
|
|
||||||
|
@ -11405,6 +11670,54 @@ In roc, functions are always written as a lambda, like{}
|
||||||
@r"
|
@r"
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
|
test_report!(
|
||||||
|
unknown_shorthand_no_deps,
|
||||||
|
indoc!(
|
||||||
|
r#"
|
||||||
|
import foo.Foo
|
||||||
|
|
||||||
|
Foo.foo
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── UNRECOGNIZED PACKAGE in tmp/unknown_shorthand_no_deps/Test.roc ──────────────
|
||||||
|
|
||||||
|
This module is trying to import from `foo`:
|
||||||
|
|
||||||
|
4│ import foo.Foo
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
A lowercase name indicates a package shorthand, but no packages have
|
||||||
|
been specified.
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
|
test_report!(
|
||||||
|
unknown_shorthand_in_app,
|
||||||
|
indoc!(
|
||||||
|
r#"
|
||||||
|
app [main] { pf: platform "../../tests/platform.roc" }
|
||||||
|
|
||||||
|
import foo.Foo
|
||||||
|
|
||||||
|
main =
|
||||||
|
Foo.foo
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
@r###"
|
||||||
|
── UNRECOGNIZED PACKAGE in tmp/unknown_shorthand_in_app/Test.roc ───────────────
|
||||||
|
|
||||||
|
This module is trying to import from `foo`:
|
||||||
|
|
||||||
|
3│ import foo.Foo
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
A lowercase name indicates a package shorthand, but I don't recognize
|
||||||
|
this one. Did you mean one of these?
|
||||||
|
|
||||||
|
pf
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
|
||||||
test_report!(
|
test_report!(
|
||||||
invalid_toplevel_cycle,
|
invalid_toplevel_cycle,
|
||||||
|
@ -13266,7 +13579,7 @@ In roc, functions are always written as a lambda, like{}
|
||||||
4│ crash "" ""
|
4│ crash "" ""
|
||||||
^^^^^
|
^^^^^
|
||||||
|
|
||||||
`crash` must be given exacly one message to crash with.
|
`crash` must be given exactly one message to crash with.
|
||||||
"#
|
"#
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -13664,7 +13977,9 @@ In roc, functions are always written as a lambda, like{}
|
||||||
derive_decoding_for_tuple,
|
derive_decoding_for_tuple,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
app "test" imports [] provides [main] to "./platform"
|
||||||
|
|
||||||
|
import Decode exposing [decoder]
|
||||||
|
|
||||||
main =
|
main =
|
||||||
myDecoder : Decoder (U32, Str) fmt where fmt implements DecoderFormatting
|
myDecoder : Decoder (U32, Str) fmt where fmt implements DecoderFormatting
|
||||||
|
@ -13679,7 +13994,9 @@ In roc, functions are always written as a lambda, like{}
|
||||||
cannot_decode_tuple_with_non_decode_element,
|
cannot_decode_tuple_with_non_decode_element,
|
||||||
indoc!(
|
indoc!(
|
||||||
r#"
|
r#"
|
||||||
app "test" imports [Decode.{decoder}] provides [main] to "./platform"
|
app "test" imports [] provides [main] to "./platform"
|
||||||
|
|
||||||
|
import Decode exposing [decoder]
|
||||||
|
|
||||||
main =
|
main =
|
||||||
myDecoder : Decoder (U32, {} -> {}) fmt where fmt implements DecoderFormatting
|
myDecoder : Decoder (U32, {} -> {}) fmt where fmt implements DecoderFormatting
|
||||||
|
@ -13688,12 +14005,12 @@ In roc, functions are always written as a lambda, like{}
|
||||||
myDecoder
|
myDecoder
|
||||||
"#
|
"#
|
||||||
),
|
),
|
||||||
@r"
|
@r###"
|
||||||
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
── TYPE MISMATCH in /code/proj/Main.roc ────────────────────────────────────────
|
||||||
|
|
||||||
This expression has a type that does not implement the abilities it's expected to:
|
This expression has a type that does not implement the abilities it's expected to:
|
||||||
|
|
||||||
5│ myDecoder = decoder
|
7│ myDecoder = decoder
|
||||||
^^^^^^^
|
^^^^^^^
|
||||||
|
|
||||||
I can't generate an implementation of the `Decoding` ability for
|
I can't generate an implementation of the `Decoding` ability for
|
||||||
|
@ -13701,7 +14018,7 @@ In roc, functions are always written as a lambda, like{}
|
||||||
U32, {} -> {}
|
U32, {} -> {}
|
||||||
|
|
||||||
Note: `Decoding` cannot be generated for functions.
|
Note: `Decoding` cannot be generated for functions.
|
||||||
"
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
test_no_problem!(
|
test_no_problem!(
|
||||||
|
|
|
@ -10,6 +10,7 @@ version.workspace = true
|
||||||
[dependencies]
|
[dependencies]
|
||||||
roc_builtins = { path = "../builtins" }
|
roc_builtins = { path = "../builtins" }
|
||||||
roc_can = { path = "../can" }
|
roc_can = { path = "../can" }
|
||||||
|
roc_work = { path = "../work" }
|
||||||
roc_checkmate = { path = "../checkmate" }
|
roc_checkmate = { path = "../checkmate" }
|
||||||
roc_collections = { path = "../collections" }
|
roc_collections = { path = "../collections" }
|
||||||
roc_constrain = { path = "../constrain" }
|
roc_constrain = { path = "../constrain" }
|
||||||
|
@ -31,6 +32,7 @@ roc_target = { path = "../roc_target" }
|
||||||
roc_tracing = { path = "../../tracing" }
|
roc_tracing = { path = "../../tracing" }
|
||||||
roc_types = { path = "../types" }
|
roc_types = { path = "../types" }
|
||||||
roc_unify = { path = "../unify" }
|
roc_unify = { path = "../unify" }
|
||||||
|
roc_worker = { path = "../worker" }
|
||||||
|
|
||||||
ven_pretty = { path = "../../vendor/pretty" }
|
ven_pretty = { path = "../../vendor/pretty" }
|
||||||
|
|
||||||
|
|
|
@ -212,11 +212,7 @@ fn generate_entry_docs(
|
||||||
match either_index.split() {
|
match either_index.split() {
|
||||||
Err(value_index) => match &defs.value_defs[value_index.index()] {
|
Err(value_index) => match &defs.value_defs[value_index.index()] {
|
||||||
ValueDef::Annotation(loc_pattern, loc_ann) => {
|
ValueDef::Annotation(loc_pattern, loc_ann) => {
|
||||||
if let Pattern::Identifier {
|
if let Pattern::Identifier { ident: identifier } = loc_pattern.value {
|
||||||
ident: identifier,
|
|
||||||
suffixed: _,
|
|
||||||
} = loc_pattern.value
|
|
||||||
{
|
|
||||||
// Check if this module exposes the def
|
// Check if this module exposes the def
|
||||||
if let Some(ident_id) = ident_ids.get_id(identifier) {
|
if let Some(ident_id) = ident_ids.get_id(identifier) {
|
||||||
let name = identifier.to_string();
|
let name = identifier.to_string();
|
||||||
|
@ -237,11 +233,7 @@ fn generate_entry_docs(
|
||||||
ann_type,
|
ann_type,
|
||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
if let Pattern::Identifier {
|
if let Pattern::Identifier { ident: identifier } = ann_pattern.value {
|
||||||
ident: identifier,
|
|
||||||
suffixed: _,
|
|
||||||
} = ann_pattern.value
|
|
||||||
{
|
|
||||||
// Check if this module exposes the def
|
// Check if this module exposes the def
|
||||||
if let Some(ident_id) = ident_ids.get_id(identifier) {
|
if let Some(ident_id) = ident_ids.get_id(identifier) {
|
||||||
let doc_def = DocDef {
|
let doc_def = DocDef {
|
||||||
|
@ -257,11 +249,7 @@ fn generate_entry_docs(
|
||||||
}
|
}
|
||||||
|
|
||||||
ValueDef::Body(pattern, _) => {
|
ValueDef::Body(pattern, _) => {
|
||||||
if let Pattern::Identifier {
|
if let Pattern::Identifier { ident: identifier } = pattern.value {
|
||||||
ident: identifier,
|
|
||||||
suffixed: _,
|
|
||||||
} = pattern.value
|
|
||||||
{
|
|
||||||
// Check if this module exposes the def
|
// Check if this module exposes the def
|
||||||
if let Some(ident_id) = ident_ids.get_id(identifier) {
|
if let Some(ident_id) = ident_ids.get_id(identifier) {
|
||||||
let doc_def = DocDef {
|
let doc_def = DocDef {
|
||||||
|
@ -287,6 +275,12 @@ fn generate_entry_docs(
|
||||||
ValueDef::ExpectFx { .. } => {
|
ValueDef::ExpectFx { .. } => {
|
||||||
// Don't generate docs for `expect-fx`s
|
// Don't generate docs for `expect-fx`s
|
||||||
}
|
}
|
||||||
|
ValueDef::ModuleImport { .. } => {
|
||||||
|
// Don't generate docs for module imports
|
||||||
|
}
|
||||||
|
ValueDef::IngestedFileImport { .. } => {
|
||||||
|
// Don't generate docs for ingested file imports
|
||||||
|
}
|
||||||
|
|
||||||
ValueDef::Stmt(loc_expr) => {
|
ValueDef::Stmt(loc_expr) => {
|
||||||
if let roc_parse::ast::Expr::Var {
|
if let roc_parse::ast::Expr::Var {
|
||||||
|
@ -316,11 +310,7 @@ fn generate_entry_docs(
|
||||||
let mut type_vars = Vec::new();
|
let mut type_vars = Vec::new();
|
||||||
|
|
||||||
for var in vars.iter() {
|
for var in vars.iter() {
|
||||||
if let Pattern::Identifier {
|
if let Pattern::Identifier { ident: ident_name } = var.value {
|
||||||
ident: ident_name,
|
|
||||||
suffixed: _,
|
|
||||||
} = var.value
|
|
||||||
{
|
|
||||||
type_vars.push(ident_name.to_string());
|
type_vars.push(ident_name.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -354,11 +344,7 @@ fn generate_entry_docs(
|
||||||
let mut type_vars = Vec::new();
|
let mut type_vars = Vec::new();
|
||||||
|
|
||||||
for var in vars.iter() {
|
for var in vars.iter() {
|
||||||
if let Pattern::Identifier {
|
if let Pattern::Identifier { ident: ident_name } = var.value {
|
||||||
ident: ident_name,
|
|
||||||
suffixed: _,
|
|
||||||
} = var.value
|
|
||||||
{
|
|
||||||
type_vars.push(ident_name.to_string());
|
type_vars.push(ident_name.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -382,11 +368,7 @@ fn generate_entry_docs(
|
||||||
let mut type_vars = Vec::new();
|
let mut type_vars = Vec::new();
|
||||||
|
|
||||||
for var in vars.iter() {
|
for var in vars.iter() {
|
||||||
if let Pattern::Identifier {
|
if let Pattern::Identifier { ident: ident_name } = var.value {
|
||||||
ident: ident_name,
|
|
||||||
suffixed: _,
|
|
||||||
} = var.value
|
|
||||||
{
|
|
||||||
type_vars.push(ident_name.to_string());
|
type_vars.push(ident_name.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -648,7 +630,7 @@ fn type_to_docs(in_func_type_ann: bool, type_annotation: ast::TypeAnnotation) ->
|
||||||
.vars
|
.vars
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|loc_pattern| match loc_pattern.value {
|
.filter_map(|loc_pattern| match loc_pattern.value {
|
||||||
ast::Pattern::Identifier { ident, suffixed: _ } => Some(ident.to_string()),
|
ast::Pattern::Identifier { ident } => Some(ident.to_string()),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,22 +0,0 @@
|
||||||
fn report_missing_package_shorthand2<'a>(
|
|
||||||
packages: &[Loc<PackageEntry>],
|
|
||||||
imports: &[Loc<ImportsEntry>],
|
|
||||||
) -> Option<LoadingProblem<'a>> {
|
|
||||||
imports.iter().find_map(|i| match i.value {
|
|
||||||
ImportsEntry::Module(_, _) | ImportsEntry::IngestedFile(_, _) => None,
|
|
||||||
ImportsEntry::Package(shorthand, name, _) => {
|
|
||||||
let name=name.as_str();
|
|
||||||
if packages
|
|
||||||
.iter()
|
|
||||||
.find(|p| p.value.shorthand == shorthand)
|
|
||||||
.is_none()
|
|
||||||
{
|
|
||||||
Some(
|
|
||||||
LoadingProblem::FormattedReport(
|
|
||||||
format!("The package shorthand '{shorthand}' that you are importing the module '{name}' from in '{shorthand}.{name}', doesn't exist in this module.\nImport it in the \"packages\" section of the header.")))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
|
@ -8,7 +8,6 @@ pub mod docs;
|
||||||
pub mod file;
|
pub mod file;
|
||||||
pub mod module;
|
pub mod module;
|
||||||
mod module_cache;
|
mod module_cache;
|
||||||
mod work;
|
|
||||||
|
|
||||||
#[cfg(target_family = "wasm")]
|
#[cfg(target_family = "wasm")]
|
||||||
mod wasm_instant;
|
mod wasm_instant;
|
||||||
|
|
|
@ -13,7 +13,7 @@ use roc_module::symbol::{
|
||||||
};
|
};
|
||||||
use roc_mono::ir::{GlueLayouts, HostExposedLambdaSets, LambdaSetId, Proc, ProcLayout, ProcsBase};
|
use roc_mono::ir::{GlueLayouts, HostExposedLambdaSets, LambdaSetId, Proc, ProcLayout, ProcsBase};
|
||||||
use roc_mono::layout::{LayoutCache, STLayoutInterner};
|
use roc_mono::layout::{LayoutCache, STLayoutInterner};
|
||||||
use roc_parse::ast::{CommentOrNewline, Defs, TypeAnnotation, ValueDef};
|
use roc_parse::ast::{CommentOrNewline, Defs, TypeAnnotation};
|
||||||
use roc_parse::header::{HeaderType, PackageName};
|
use roc_parse::header::{HeaderType, PackageName};
|
||||||
use roc_region::all::{Loc, Region};
|
use roc_region::all::{Loc, Region};
|
||||||
use roc_solve::module::Solved;
|
use roc_solve::module::Solved;
|
||||||
|
@ -30,6 +30,7 @@ use std::time::{Duration, Instant};
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct LoadedModule {
|
pub struct LoadedModule {
|
||||||
pub module_id: ModuleId,
|
pub module_id: ModuleId,
|
||||||
|
pub filename: PathBuf,
|
||||||
pub interns: Interns,
|
pub interns: Interns,
|
||||||
pub solved: Solved<Subs>,
|
pub solved: Solved<Subs>,
|
||||||
pub can_problems: MutMap<ModuleId, Vec<roc_problem::can::Problem>>,
|
pub can_problems: MutMap<ModuleId, Vec<roc_problem::can::Problem>>,
|
||||||
|
@ -54,6 +55,13 @@ pub struct LoadedModule {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LoadedModule {
|
impl LoadedModule {
|
||||||
|
/// Infer the filename for the given ModuleId, based on this root module's filename.
|
||||||
|
pub fn filename(&self, module_id: ModuleId) -> PathBuf {
|
||||||
|
let module_name = self.interns.module_name(module_id);
|
||||||
|
|
||||||
|
module_name.filename(&self.filename)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn total_problems(&self) -> usize {
|
pub fn total_problems(&self) -> usize {
|
||||||
let mut total = 0;
|
let mut total = 0;
|
||||||
|
|
||||||
|
@ -88,26 +96,20 @@ pub(crate) struct ModuleHeader<'a> {
|
||||||
pub(crate) module_id: ModuleId,
|
pub(crate) module_id: ModuleId,
|
||||||
pub(crate) module_path: PathBuf,
|
pub(crate) module_path: PathBuf,
|
||||||
pub(crate) is_root_module: bool,
|
pub(crate) is_root_module: bool,
|
||||||
pub(crate) exposed_ident_ids: IdentIds,
|
|
||||||
pub(crate) deps_by_name: MutMap<PQModuleName<'a>, ModuleId>,
|
|
||||||
pub(crate) packages: MutMap<&'a str, PackageName<'a>>,
|
pub(crate) packages: MutMap<&'a str, PackageName<'a>>,
|
||||||
pub(crate) imported_modules: MutMap<ModuleId, Region>,
|
|
||||||
pub(crate) package_qualified_imported_modules: MutSet<PackageQualified<'a, ModuleId>>,
|
|
||||||
pub(crate) exposes: Vec<Symbol>,
|
|
||||||
pub(crate) exposed_imports: MutMap<Ident, (Symbol, Region)>,
|
|
||||||
pub(crate) parse_state: roc_parse::state::State<'a>,
|
pub(crate) parse_state: roc_parse::state::State<'a>,
|
||||||
pub(crate) header_type: HeaderType<'a>,
|
pub(crate) header_type: HeaderType<'a>,
|
||||||
pub(crate) header_comments: &'a [CommentOrNewline<'a>],
|
pub(crate) header_comments: &'a [CommentOrNewline<'a>],
|
||||||
pub(crate) symbols_from_requires: Vec<(Loc<Symbol>, Loc<TypeAnnotation<'a>>)>,
|
pub(crate) header_imports: Option<roc_parse::header::ImportsKeywordItem<'a>>,
|
||||||
pub(crate) module_timing: ModuleTiming,
|
pub(crate) module_timing: ModuleTiming,
|
||||||
pub(crate) defined_values: Vec<ValueDef<'a>>,
|
pub(crate) opt_shorthand: Option<&'a str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct ConstrainedModule {
|
pub(crate) struct ConstrainedModule {
|
||||||
pub(crate) module: Module,
|
pub(crate) module: Module,
|
||||||
pub(crate) declarations: Declarations,
|
pub(crate) declarations: Declarations,
|
||||||
pub(crate) imported_modules: MutMap<ModuleId, Region>,
|
pub(crate) available_modules: MutMap<ModuleId, Region>,
|
||||||
pub(crate) constraints: Constraints,
|
pub(crate) constraints: Constraints,
|
||||||
pub(crate) constraint: ConstraintSoa,
|
pub(crate) constraint: ConstraintSoa,
|
||||||
pub(crate) ident_ids: IdentIds,
|
pub(crate) ident_ids: IdentIds,
|
||||||
|
@ -195,13 +197,17 @@ pub struct ParsedModule<'a> {
|
||||||
pub src: &'a str,
|
pub src: &'a str,
|
||||||
pub module_timing: ModuleTiming,
|
pub module_timing: ModuleTiming,
|
||||||
pub deps_by_name: MutMap<PQModuleName<'a>, ModuleId>,
|
pub deps_by_name: MutMap<PQModuleName<'a>, ModuleId>,
|
||||||
pub imported_modules: MutMap<ModuleId, Region>,
|
|
||||||
pub exposed_ident_ids: IdentIds,
|
pub exposed_ident_ids: IdentIds,
|
||||||
pub exposed_imports: MutMap<Ident, (Symbol, Region)>,
|
|
||||||
pub parsed_defs: Defs<'a>,
|
pub parsed_defs: Defs<'a>,
|
||||||
pub symbols_from_requires: Vec<(Loc<Symbol>, Loc<TypeAnnotation<'a>>)>,
|
pub symbols_from_requires: Vec<(Loc<Symbol>, Loc<TypeAnnotation<'a>>)>,
|
||||||
pub header_type: HeaderType<'a>,
|
pub header_type: HeaderType<'a>,
|
||||||
pub header_comments: &'a [CommentOrNewline<'a>],
|
pub header_comments: &'a [CommentOrNewline<'a>],
|
||||||
|
pub available_modules: MutMap<ModuleId, Region>,
|
||||||
|
pub package_qualified_available_modules: MutSet<PackageQualified<'a, ModuleId>>,
|
||||||
|
pub packages: MutMap<&'a str, PackageName<'a>>,
|
||||||
|
pub initial_scope: MutMap<Ident, (Symbol, Region)>,
|
||||||
|
pub exposes: Vec<Symbol>,
|
||||||
|
pub opt_shorthand: Option<&'a str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
interface Dep1
|
interface Dep1
|
||||||
exposes [three, str, Unit, Identity, one, two]
|
exposes [three, str, Unit, Identity, one, two]
|
||||||
imports [Dep3.Blah.{ foo }]
|
imports []
|
||||||
|
|
||||||
|
import Dep3Blah exposing [foo]
|
||||||
|
|
||||||
one = 1
|
one = 1
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
interface Dep2
|
interface Dep2
|
||||||
exposes [one, two, blah]
|
exposes [one, two, blah]
|
||||||
imports [Dep3.Blah.{ foo, bar }]
|
imports []
|
||||||
|
|
||||||
|
import Dep3Blah exposing [foo, bar]
|
||||||
|
|
||||||
one = 1
|
one = 1
|
||||||
|
|
||||||
blah = foo
|
blah = foo
|
||||||
|
|
||||||
two = 2.0
|
two = 2.0
|
||||||
|
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
interface Dep3.Other
|
|
||||||
exposes [foo, bar]
|
|
||||||
imports []
|
|
||||||
|
|
||||||
foo = "foo from Dep3.Other"
|
|
||||||
bar = "bar from Dep3.Other"
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue