Merge branch 'main' of github.com:roc-lang/roc into editor_launch_fix

This commit is contained in:
Anton-4 2022-11-04 15:15:55 +01:00
commit 590c61a6fd
No known key found for this signature in database
GPG key ID: A13F4A6E21141925
320 changed files with 13622 additions and 6416 deletions

View file

@ -1,4 +0,0 @@
AUTHORS
nix
.envrc
.gitignore

View file

@ -23,25 +23,18 @@ jobs:
ref: "main"
clean: "true"
- name: Earthly version
run: earthly --version
- name: on main; prepare a self-contained benchmark folder
run: ./ci/safe-earthly.sh --build-arg BENCH_SUFFIX=main +prep-bench-folder
run: nix develop -c ./ci/benchmarks/prep_folder.sh main
- uses: actions/checkout@v3
with:
clean: "false" # we want to keep the benchmark folder
- name: on current branch; prepare a self-contained benchmark folder
run: ./ci/safe-earthly.sh +prep-bench-folder
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
run: nix develop -c ./ci/benchmarks/prep_folder.sh branch
- name: build benchmark runner
run: cd ci/bench-runner && cargo build --release && cd ../..
run: nix develop -c bash -c "cd ci/benchmarks/bench-runner && cargo build --release && cd ../../.."
- name: run benchmarks with regression check
run: ./ci/bench-runner/target/release/bench-runner --check-executables-changed
run: nix develop -c ./ci/benchmarks/bench-runner/target/release/bench-runner --check-executables-changed

View file

@ -20,7 +20,7 @@ jobs:
clean: "true"
- name: execute tests with --release
run: /home/big-ci-user/.nix-profile/bin/nix develop -c cargo test --locked --release
run: nix develop -c cargo test --locked --release
- name: test wasm32 cli_run
run: /home/big-ci-user/.nix-profile/bin/nix develop -c cargo test --locked --release --features="wasm32-cli-run"
run: nix develop -c cargo test --locked --release --features="wasm32-cli-run"

View file

@ -15,10 +15,17 @@ jobs:
runs-on: windows-2022
env:
LLVM_SYS_130_PREFIX: C:\LLVM-13.0.1-win64
timeout-minutes: 90
timeout-minutes: 150
steps:
- uses: actions/checkout@v2
- run: Add-Content -Path "$env:GITHUB_ENV" -Value "GITHUB_RUNNER_CPU=$((Get-CimInstance Win32_Processor).Name)"
- uses: Swatinem/rust-cache@v2
with:
shared-key: "rust-cache-windows-${{env.GITHUB_RUNNER_CPU}}"
- name: download and install zig
run: |
curl.exe --output "C:\zig-windows-x86_64-0.9.1.zip" --url https://ziglang.org/download/0.9.1/zig-windows-x86_64-0.9.1.zip
@ -34,5 +41,12 @@ jobs:
curl.exe -L -O https://github.com/roc-lang/llvm-package-windows/releases/download/v13.0.1/LLVM-13.0.1-win64.7z
7z x LLVM-13.0.1-win64.7z -oC:\LLVM-13.0.1-win64
- name: build
run: cargo build
- name: Build tests --release without running. Twice for zig lld-link error.
run: cargo test --locked --release --no-run || cargo test --locked --release --no-run
# Why are these tests not build with previous command? => fingerprint error. Use `CARGO_LOG=cargo::core::compiler::fingerprint=info` to investigate
- name: Build specific tests without running. Twice for zig lld-link error.
run: cargo test --locked --release --no-run -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_editor -p roc_linker || cargo test --locked --release --no-run -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_editor -p roc_linker
- name: Actually run the tests.
run: cargo test --locked --release -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_editor -p roc_linker

3
.gitignore vendored
View file

@ -6,6 +6,9 @@ zig-cache
*.rs.bk
*.o
*.obj
*.dll
*.lib
*.def
*.tmp
*.wasm
*.exe

View file

@ -113,3 +113,11 @@ João Mota <jackthemotorcycle@gmail.com>
Marcos Prieto <marcospri@gmail.com>
Prajwal S N <prajwalnadig21@gmail.com>
Christopher Duncan <chris.duncan.arauz+git@protonmail.com>
Luke Boswell <lukewilliamboswell@gmail.com>
Luca Cervello <luca.cervello@gmail.com>
Josh Mak <joshmak@berkeley.edu>
Travis Staloch <twostepted@gmail.com>
Nick Gravgaard <nick@nickgravgaard.com>
Keerthana Kasthuril <76804118+keerthanak-tw@users.noreply.github.com>
Salman Shaik <salmansiddiq.shaik@gmail.com>
Austin Clements <austinclementsbass@gmail.com>

View file

@ -1,6 +1,6 @@
# Building the Roc compiler from source
Installation should be a smooth process, let us now if anything does not work perfectly on [Roc Zulip](https://roc.zulipchat.com) or by creating an issue.
If you run into any problems getting Roc built from source, please ask for help in the `#beginners` channel on [Roc Zulip](https://roc.zulipchat.com) (the fastest way), or create an issue in this repo!
## Using Nix

View file

@ -33,22 +33,55 @@ cargo clippy --workspace --tests -- --deny warnings
Execute `cargo fmt --all` to fix the formatting.
## Generating Docs
If you make changes to [Roc's Standard Library](https://www.roc-lang.org/builtins/Str), you can add comments to the code following [the CommonMark Spec](https://spec.commonmark.org/current/) to further explain your intentions. You can view these changes locally with:
```sh
cargo run docs crates/compiler/builtins/roc
```
This command will generate the documentation in the [`generated-docs`](generated-docs) directory.
## Contribution Tips
- If you've never made a pull request on github before, [this](https://www.freecodecamp.org/news/how-to-make-your-first-pull-request-on-github-3/) will be a good place to start.
- Create an issue if the purpose of a struct/field/type/function/... is not immediately clear from its name or nearby comments.
- You can find good first issues [here][good-first-issues].
- You can find good first issues [here][good-first-issues]. Once you have gained some experience you can take a look at the [intermediate issues](https://github.com/roc-lang/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22intermediate+issue%22).
- [Fork](https://github.com/roc-lang/roc/fork) the repo so that you can apply your changes first on your own copy of the roc repo.
- It's a good idea to open a draft pull request as you begin working on something. This way, others can see that you're working on it, which avoids duplicate effort, and others can give feedback sooner rather than later if they notice a problem in the direction things are going. Click the button "ready for review" when it's ready.
- All your commits need to be signed [to prevent impersonation](https://dev.to/martiliones/how-i-got-linus-torvalds-in-my-contributors-on-github-3k4g):
1. If you have a Yubikey, follow [guide 1](https://dev.to/paulmicheli/using-your-yubikey-to-get-started-with-gpg-3h4k), [guide 2](https://dev.to/paulmicheli/using-your-yubikey-for-signed-git-commits-4l73) and skip the steps below.
2. [Make a key to sign your commits.](https://docs.github.com/en/authentication/managing-commit-signature-verification/generating-a-new-gpg-key).
3. [Configure git to use your key.](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key)
4. Make git sign your commits automatically:
- If you don't have signing set up on your device and you only want to change a single file, it will be easier to use [github's edit button](https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files). This will sign your commit automatically.
- For multi-file or complex changes you will want to set up signing on your device:
1. If you have a Yubikey, follow [guide 1](https://dev.to/paulmicheli/using-your-yubikey-to-get-started-with-gpg-3h4k), [guide 2](https://dev.to/paulmicheli/using-your-yubikey-for-signed-git-commits-4l73) and skip the steps below.
2. [Make a key to sign your commits.](https://docs.github.com/en/authentication/managing-commit-signature-verification/generating-a-new-gpg-key)
3. [Configure git to use your key.](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key)
4. Make git sign your commits automatically:
```sh
git config --global commit.gpgsign true
```
```sh
git config --global commit.gpgsign true
```
### Forgot to sign commits?
You can find which commits need to be signed by running `git log --show-signature`.
If you have only one commit, running `git commit --amend --no-edit -S` would sign the latest commit 🚀.
In case you have multiple commits, you can sign them in two ways:
1. Switching to interactive rebase mode and editing the file:
- Enter into interactive mode, by running `git rebase -i HEAD~n` where `n` is the number of commits up to the most current commit you would like to see.
- This would display a set of commits in a text file like below:
```
pick hash2 commit message 2
pick hash1 commit message 1
```
- After every commit you want to sign, add `exec git commit --amend --no-edit -S`.
2. Or run git rebase recursively:
- Find the oldest commit you want to sign, using the `git log --show-signature` command.
- Run the command `git rebase --exec 'git commit --amend --no-edit -n -S' -i HASH` which would sign all commits up to commit `HASH`.
If you already pushed unsigned commits, you mmay have to do a force push with `git push origin -f <branch_name>`.
## Can we do better?

17
Cargo.lock generated
View file

@ -2104,12 +2104,6 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memexec"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc62ccb14881da5d1862cda3a9648fb4a4897b2aff0b2557b89da44a5e550b7c"
[[package]]
name = "memmap2"
version = "0.3.1"
@ -3404,7 +3398,6 @@ dependencies = [
"inkwell 0.1.0",
"libc",
"libloading",
"memexec",
"mimalloc",
"once_cell",
"parking_lot 0.12.1",
@ -3603,6 +3596,7 @@ name = "roc_exhaustive"
version = "0.0.1"
dependencies = [
"roc_collections",
"roc_error_macros",
"roc_module",
"roc_region",
]
@ -3757,7 +3751,6 @@ version = "0.0.1"
dependencies = [
"bincode",
"bumpalo",
"clap 3.2.20",
"iced-x86",
"indoc",
"libc",
@ -3767,7 +3760,9 @@ dependencies = [
"roc_build",
"roc_collections",
"roc_error_macros",
"roc_load",
"roc_mono",
"roc_reporting",
"serde",
"target-lexicon",
"tempfile",
@ -3962,6 +3957,7 @@ dependencies = [
"pretty_assertions",
"roc_build",
"roc_builtins",
"roc_can",
"roc_collections",
"roc_gen_llvm",
"roc_intern",
@ -3999,6 +3995,7 @@ dependencies = [
"roc_reporting",
"roc_target",
"roc_types",
"roc_utils",
"wasi_libc_sys",
"wasm-bindgen",
"wasm-bindgen-futures",
@ -4866,6 +4863,7 @@ dependencies = [
"roc_target",
"roc_types",
"roc_unify",
"roc_utils",
"target-lexicon",
"tempfile",
"wasi_libc_sys",
@ -5268,6 +5266,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasi_libc_sys"
version = "0.0.1"
dependencies = [
"roc_utils",
]
[[package]]
name = "wasm-bindgen"

View file

@ -58,7 +58,7 @@ members = [
"crates/wasi-libc-sys",
]
exclude = [
"ci/bench-runner",
"ci/benchmarks/bench-runner",
# Examples sometimes have Rust hosts in their platforms. The compiler should ignore those.
"crates/cli_testing_examples",
"examples",

View file

@ -1,72 +0,0 @@
FROM rust:1.61.0-slim-bullseye # make sure to update rust-toolchain.toml too so that everything uses the same rust version
WORKDIR /earthbuild
prep-debian:
RUN apt -y update
install-other-libs:
FROM +prep-debian
RUN apt -y install wget git
RUN apt -y install libxcb-shape0-dev libxcb-xfixes0-dev # for editor clipboard
RUN apt -y install libasound2-dev # for editor sounds
RUN apt -y install libunwind-dev pkg-config libx11-dev zlib1g-dev
RUN apt -y install unzip # for www/build.sh
install-zig-llvm-valgrind:
FROM +install-other-libs
# editor
RUN apt -y install libxkbcommon-dev
# zig
RUN wget -c https://ziglang.org/download/0.9.1/zig-linux-x86_64-0.9.1.tar.xz --no-check-certificate
RUN tar -xf zig-linux-x86_64-0.9.1.tar.xz
RUN ln -s /earthbuild/zig-linux-x86_64-0.9.1/zig /bin/zig
# zig builtins wasm tests
RUN apt -y install build-essential
RUN cargo install wasmer-cli --features "singlepass"
RUN cargo install bindgen
# llvm
RUN apt -y install lsb-release software-properties-common gnupg
RUN wget https://apt.llvm.org/llvm.sh
RUN chmod +x llvm.sh
RUN ./llvm.sh 13
RUN ln -s /usr/bin/clang-13 /usr/bin/clang
# use lld as linker
RUN ln -s /usr/bin/lld-13 /usr/bin/ld.lld
ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
# valgrind
RUN apt -y install valgrind
# wasm repl & tests
RUN rustup target add wasm32-unknown-unknown wasm32-wasi
RUN apt -y install libssl-dev
RUN OPENSSL_NO_VENDOR=1 cargo install wasm-pack
# criterion
RUN cargo install cargo-criterion
# sccache
RUN cargo install sccache
RUN sccache -V
ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache
ENV SCCACHE_DIR=/earthbuild/sccache_dir
ENV CARGO_INCREMENTAL=0 # no need to recompile package when using new function
copy-dirs:
FROM +install-zig-llvm-valgrind
COPY --dir crates Cargo.toml Cargo.lock version.txt www ./
# compile everything needed for benchmarks and output a self-contained dir from which benchmarks can be run.
prep-bench-folder:
FROM +copy-dirs
# to make use of avx, avx2, sse2, sse4.2... instructions
ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
ARG BENCH_SUFFIX=branch
RUN cargo criterion -V
RUN --mount=type=cache,target=$SCCACHE_DIR cd crates/cli && cargo criterion --no-run
RUN mkdir -p bench-folder/crates/cli_testing_examples/benchmarks
RUN mkdir -p bench-folder/crates/compiler/builtins/bitcode/src
RUN mkdir -p bench-folder/target/release/deps
RUN cp crates/cli_testing_examples/benchmarks/*.roc bench-folder/crates/cli_testing_examples/benchmarks/
RUN cp -r crates/cli_testing_examples/benchmarks/platform bench-folder/crates/cli_testing_examples/benchmarks/
RUN cp crates/compiler/builtins/bitcode/src/str.zig bench-folder/crates/compiler/builtins/bitcode/src
RUN cp target/release/roc bench-folder/target/release
# copy the most recent time bench to bench-folder
RUN cp target/release/deps/`ls -t target/release/deps/ | grep time_bench | head -n 1` bench-folder/target/release/deps/time_bench
SAVE ARTIFACT bench-folder AS LOCAL bench-folder-$BENCH_SUFFIX

12
FAQ.md
View file

@ -1,3 +1,5 @@
Click the ☰ button in the top left to see and search the table of contents.
# Frequently Asked Questions
## Where did the name Roc come from?
@ -45,6 +47,16 @@ This is an unusual approach, but there are more details in [this 2021 interview]
In the meantime, using CoffeeScript syntax highlighting for .roc files turns out to work surprisingly well!
## Why won't the editor be able to edit non-roc files like .md, .gitignore, .yml, ... ?
The downside of having the Roc editor support files other than .roc is that it seems extremely difficult to avoid scope creep if we allow it. For example, it starts with just editing json as plaintext but then it's annoying that there's no syntax highlighting, so maybe we add the capability to do syntax highlighting for json but of course then some people want it for toml, .md, etc, so we need to add a way to specify custom syntax highlighting rules for all of those.
Then of course people don't want to be copy/pasting syntax highlighting rules from online, so maybe someone develops a third party "plugin manager" for the editor to distribute these syntax highlighting definitions.
So maybe we add sharing syntax highlighting as a first-class thing, so people don't have to download a separate tool to use their editor normally but then some people who are using it for .json and .yaml start using it for .css too. Syntax highlighting is okay but it's annoying that they don't get error reporting when they mess up syntax or type an invalid selector or import and pretty soon there's demand for the Roc editor to do all the hardest parts of VS code.
We have to draw the line somewhere in there...but where to draw it?
It seems like drawing a bright line at .roc files is the most straightforward. It means the roc editor is the absolute best at editing .roc files and it isn't a weak editor for anything else because it doesn't try to be an editor for anything else and it means the scope is very clear.
## Why is there no way to specify "import everything this module exposes" in `imports`?
In [Elm](https://elm-lang.org), it's possible to import a module in a way that brings everything that module

View file

@ -1,8 +1,7 @@
# Tutorial
This is a tutorial for how to build Roc applications. It covers the REPL, basic
types (strings, lists, tags, and functions), syntax (`when`, `if then else`)
and more!
This is a tutorial to learn how to build Roc applications.
It covers the REPL, basic types like strings, lists, tags, and functions, syntax like `when` and `if then else`, and more!
Enjoy!
@ -10,7 +9,7 @@ Enjoy!
Learn how to install roc on your machine [here](https://github.com/roc-lang/roc/tree/main/getting_started#installation).
## Strings and Numbers
## REPL (Read - Eval - Print - Loop)
Lets start by getting acquainted with Rocs Read Eval Print Loop, or REPL for
short. Run this in a terminal:
@ -34,12 +33,14 @@ Try typing this in and pressing enter:
Congratulations! You've just written your first Roc code!
Specifically, you entered the *expression* `"Hello, World!"` into the REPL,
## Strings and Numbers
Previously you entered the *expression* `"Hello, World!"` into the REPL,
and the REPL printed it back out. It also printed `: Str`, which is the
expression's type. We'll talk about types later; for now, we'll ignore the `:`
and whatever comes after it whenever the REPL prints them.
Let's try putting in a more complicated expression:
Let's try a more complicated expression:
```coffee
>> 1 + 1
@ -111,7 +112,7 @@ to use them for more than that anyway!
Let's move out of the REPL and create our first Roc application.
Create a new file called `Hello.roc` and put this inside it:
Create a new file called `Hello.roc` and put the following code inside it:
```coffee
app "hello"
@ -195,7 +196,7 @@ you like, and everything will still work the same way!
This works because Roc expressions don't have *side effects*. We'll talk more
about side effects later.
## Functions and `if`
## Functions
So far we've called functions like `Num.toStr`, `Str.concat`, and `Stdout.line`.
Next let's try defining a function of our own.
@ -219,6 +220,10 @@ defines a function's arguments, and the expression after the `->` is the body
of the function. The expression at the end of the body (`Num.toStr (num1 + num2)`
in this case) is returned automatically.
Note that there is no separate syntax for named and anonymous functions in Roc.
## if then else
Let's modify the function to return an empty string if the numbers add to zero.
```coffee
@ -348,6 +353,7 @@ a function that takes a record and returns its `x` field. You can do this with a
For example:
```elm
# function returnFoo takes a Record and returns the 'foo' field of that record.
returnFoo = .foo
returnFoo { foo: "hi!", bar: "blah" }
@ -447,7 +453,8 @@ stoplightStr =
"yellow"
```
We can express this logic more concisely using `when`/`is` instead of `if`/`then`:
### Pattern matching
We can express the same logic more concisely using `when`/`is` instead of `if`/`then`:
```elm
stoplightStr =
@ -465,7 +472,7 @@ conditions are specified; here, we specify between `when` and `is` that we're ma
Besides being more concise, there are other advantages to using `when` here.
1. We don't have to specify an `else` branch, so the code can be more self-documenting about exactly what all the options are.
2. We get more compiler help. If we try deleting any of these branches, we'll get a compile-time error saying that we forgot to cover a case that could come up. For example, if we delete the `Green ->` branch, the compiler will say that we didn't handle the possibility that `stoplightColor` could be `Green`. It knows this because `Green` is one of the possibilities in our `stoplightColor = if …` definition.
2. We get more compiler help. If we try deleting any of these branches, we'll get a compile-time error saying that we forgot to cover a case that could come up. For example, if we delete the `Green ->` branch, the compiler will say that we didn't handle the possibility that `stoplightColor` could be `Green`. It knows this because `Green` is one of the possibilities in the `stoplightColor` definition we made earlier.
We can still have the equivalent of an `else` branch in our `when` if we like. Instead of writing "else", we write
"_ ->" like so:
@ -778,7 +785,7 @@ List.walk [1, 2, 3, 4, 5] { evens: [], odds: [] } \state, elem ->
`List.walk` walks through each element of the list, building up a state as it goes. At the end,
it returns the final state - whatever it ended up being after processing the last element. The `\state, elem ->`
function it takes as its last argument accepts both the current state as well as the current list element
function that `List.walk` takes as its last argument accepts both the current state as well as the current list element
it's looking at, and then returns the new state based on whatever it decides to do with that element.
In this example, we walk over the list `[1, 2, 3, 4, 5]` and add each element to either the `evens` or `odds`
@ -801,7 +808,7 @@ the initial state gets returned immediately.)
> **Note:** Other languages give this operation different names, such as "fold," "reduce," "accumulate,"
> "aggregate," "compress," and "inject."
### Getting an individual element from a list
### Getting an element from a List
Another thing we can do with a list is to get an individual element out of it. `List.get` is a common way to do this;
it takes a list and an index, and then returns the element at that index...if there is one. But what if there isn't?
@ -940,6 +947,8 @@ accuracy. If the annotation ever doesn't fit with the implementation, we'll get
The annotation `fullName : Str, Str -> Str` says "`fullName` is a function that takes two strings as
arguments and returns a string."
#### Strings
We can give type annotations to any value, not just functions. For example:
```coffee
@ -952,6 +961,8 @@ lastName = "Lee"
These annotations say that both `firstName` and `lastName` have the type `Str`.
#### Records
We can annotate records similarly. For example, we could move `firstName` and `lastName` into a record like so:
```coffee
@ -962,7 +973,9 @@ jen : { firstName : Str, lastName : Str }
jen = { firstName: "Jen", lastName: "Majura" }
```
When we have a recurring type annotation like this, it can be nice to give it its own name. We do this like
#### Type Aliasing
When we have a recurring type annotation like before, it can be nice to give it its own name. We do this like
so:
```coffee
@ -980,6 +993,8 @@ instead of to a value. Just like how you can read `name : Str` as "`name` has th
you can also read `Musician : { firstName : Str, lastName : Str }` as "`Musician` has the type
`{ firstName : Str, lastName : Str }`."
#### Tag Unions
We can also give type annotations to tag unions:
```coffee
@ -993,6 +1008,8 @@ colorFromStr = \string ->
You can read the type `[Red, Green, Yellow]` as "a tag union of the tags `Red`, `Green`, and `Yellow`."
#### List
When we annotate a list type, we have to specify the type of its elements:
```coffee
@ -1005,6 +1022,8 @@ You can read `List Str` as "a list of strings." Here, `Str` is a *type parameter
parameter; there's no way to give something a type of `List` without a type parameter - you have to specify
what type of list it is, such as `List Str` or `List Bool` or `List { firstName : Str, lastName : Str }`.
#### Wildcard type
There are some functions that work on any list, regardless of its type parameter. For example, `List.isEmpty`
has this type:
@ -1039,6 +1058,8 @@ We saw that `List.isEmpty` has the type `List * -> Bool`, so we might think the
`reverse : List * -> List *`. However, remember that we also saw that the type of the empty list is `List *`?
`List * -> List *` is actually the type of a function that always returns empty lists! That's not what we want.
#### Type Variables
What we want is something like one of these:
```coffee
@ -1070,6 +1091,63 @@ of the type annotation, or even the function's implementation! The only way to h
Similarly, the only way to have a function whose type is `a -> a` is if the function's implementation returns
its argument without modifying it in any way. This is known as [the identity function](https://en.wikipedia.org/wiki/Identity_function).
## Tag Unions
We can also annotate types that include tags:
```coffee
colorFromStr : Str -> [Red, Green, Yellow]
colorFromStr = \string ->
when string is
"red" -> Red
"green" -> Green
_ -> Yellow
```
You can read the type `[Red, Green, Yellow]` as "a *tag union* of the tags `Red`, `Green`, and `Yellow`."
Some tag unions have only one tag in them. For example:
```coffee
redTag : [Red]
redTag = Red
```
Tag unions can accumulate additional tags based on how they're used in the program. Consider this `if` expression:
```elm
\str ->
if Str.isEmpty str then
Ok "it was empty"
else
Err ["it was not empty"]
```
Here, Roc sees that the first branch has the type `[Ok Str]` and that the `else` branch has
the type `[Err (List Str)]`, so it concludes that the whole `if` expression evaluates to the
combination of those two tag unions: `[Ok Str, Err (List Str)]`.
This means the entire `\str -> …` funcion here has the type `Str -> [Ok Str, Err (List Str)]`.
However, it would be most common to annotate it as `Result Str (List Str)` instead, because
the `Result` type (for operations like `Result.withDefault`, which we saw earlier) is a type
alias for a tag union with `Ok` and `Err` tags that each have one payload:
```haskell
Result ok err : [Ok ok, Err err]
```
We just saw how tag unions get combined when different branches of a conditional return different tags. Another way tag unions can get combined is through pattern matching. For example:
```coffeescript
when color is
Red -> "red"
Yellow -> "yellow"
Green -> "green"
```
Here, Roc's compiler will infer that `color`'s type is `[Red, Yellow, Green]`, because
those are the three possibilities this `when` handles.
## Numeric types
Roc has different numeric types that each have different tradeoffs.
@ -1128,6 +1206,8 @@ Here are the different fixed-size integer types that Roc supports:
| `-170_141_183_460_469_231_731_687_303_715_884_105_728`<br/>`170_141_183_460_469_231_731_687_303_715_884_105_727` | `I128` | 16 Bytes |
| `0`<br/>(over 340 undecillion) `340_282_366_920_938_463_463_374_607_431_768_211_455` | `U128` | 16 Bytes |
#### Nat
Roc also has one variable-size integer type: `Nat` (short for "natural number").
The size of `Nat` is equal to the size of a memory address, which varies by system.
For example, when compiling for a 64-bit system, `Nat` works the same way as `U64`.
@ -1310,35 +1390,14 @@ An age-old debugging technique is printing out a variable to the terminal. In Ro
The failure output will include both the value of `x` as well as the comment immediately above it,
which lets you use that comment for extra context in your output.
## Interface modules
## Roc Modules
[This part of the tutorial has not been written yet. Coming soon!]
Every `.roc` file is a *module*. There are three types of modules:
- builtin
- app
- interface
## Builtin modules
There are several modules that are built into the Roc compiler, which are imported automatically into every
Roc module. They are:
1. `Bool`
2. `Str`
3. `Num`
4. `List`
5. `Result`
6. `Dict`
7. `Set`
You may have noticed that we already used the first five - for example, when we wrote `Str.concat` and `Num.isEven`,
we were referencing functions stored in the `Str` and `Num` modules.
These modules are not ordinary `.roc` files that live on your filesystem. Rather, they are built directly into the
Roc compiler. That's why they're called "builtins!"
Besides being built into the compiler, the builtin modules are different from other modules in that:
- They are always imported. You never need to add them to `imports`.
- All their types are imported unqualified automatically. So you never need to write `Num.Nat`, because it's as if the `Num` module was imported using `imports [Num.{ Nat }]` (and the same for all the other types in the `Num` module).
## The app module header
### App module
Let's take a closer look at the part of `Hello.roc` above `main`:
@ -1349,9 +1408,9 @@ app "hello"
provides main to pf
```
This is known as a *module header*. Every `.roc` file is a *module*, and there
are different types of modules. We know this particular one is an *application module*
This is known as a *module header*. We know this particular one is an *application module*
(or *app module* for short) because it begins with the `app` keyword.
Every Roc program has one app module.
The line `app "hello"` states that this module defines a Roc application, and
that building this application should produce an executable named `hello`. This
@ -1397,7 +1456,8 @@ this `imports` line tells the Roc compiler that when we call `Stdout.line`, it
should look for that `line` function in the `Stdout` module of the
`examples/cli/cli-platform/main.roc` package.
If we would like to include other modules in our application, say `AdditionalModule.roc` and `AnotherModule.roc`, then they can be imported directly in `imports` like this:
If we would like to include other modules in our application, say `AdditionalModule.roc`
and `AnotherModule.roc`, then they can be imported directly in `imports` like this:
```coffee
packages { pf: "examples/cli/cli-platform/main.roc" }
@ -1405,6 +1465,86 @@ imports [pf.Stdout, pf.Program, AdditionalModule, AnotherModule]
provides main to pf
```
### Interface module
Let's take a look at the following module header:
```coffee
interface Parser.Core
exposes [
Parser,
ParseResult,
buildPrimitiveParser
]
imports []
```
This says that the current .roc file is an *interface module* because it begins with the `interface` keyword.
We are naming this module when we write `interface Parser.Core`. It means that this file is in
a package `Parser` and the current module is named `core`.
When we write `exposes [Parser, ParseResult, ...]`, it specifies the definitions we
want to *expose*. Exposing makes them importable from other modules.
Now lets import this interface from an *app module*:
```coffee
app 'interface-example'
packages { pf: "examples/cli/cli-platform/main.roc" }
imports [Parser.Core.{ Parser, buildPrimitiveParser }]
provides main to pf
```
Here we are importing a type and a function from the 'Core' module from the package 'Parser'. Now we can use e.g.
`buildPrimitiveParser` in this module without having to write `Parser.Core.buildPrimitiveParser`.
### Builtin modules
There are several modules that are built into the Roc compiler, which are imported automatically into every
Roc module. They are:
1. `Bool`
2. `Str`
3. `Num`
4. `List`
5. `Result`
6. `Dict`
7. `Set`
You may have noticed that we already used the first five - for example, when we wrote `Str.concat` and `Num.isEven`,
we were referencing functions stored in the `Str` and `Num` modules.
These modules are not ordinary `.roc` files that live on your filesystem. Rather, they are built directly into the
Roc compiler. That's why they're called "builtins!"
Besides being built into the compiler, the builtin modules are different from other modules in that:
- They are always imported. You never need to add them to `imports`.
- All their types are imported unqualified automatically. So you never need to write `Num.Nat`, because it's as if the `Num` module was imported using `imports [Num.{ Nat }]` (and the same for all the other types in the `Num` module).
## Platforms
TODO
## Comments
Comments that begin with `##` will be included in generated documentation (```roc docs```). They require a single space after the `##`, and can include code blocks by adding five spaces after `##`.
```coffee
## This is a comment for documentation, and includes a code block.
##
## x = 2
## expect x == 2
```
Roc also supports inline comments and line comments with `#`. They can be used to add information that won't be included in documentation.
```coffee
# This is a line comment that won't appear in documentation.
myFunction : U8 -> U8
myFunction = \bit -> bit % 2 # this is an inline comment
```
Roc does not have multiline comment syntax.
## Tasks
Tasks are technically not part of the Roc language, but they're very common in
@ -1536,6 +1676,8 @@ It's most common in Roc to call functions from other modules in a *qualified* wa
for a function with an uncommon name (like "await") which often gets called repeatedly
across a small number of lines of code.
### Backpassing
Speaking of calling `await` repeatedly, if we keep calling it more and more on this
code, we'll end up doing a lot of indenting. If we'd rather not indent so much, we
can rewrite `task` into this style which looks different but does the same thing:
@ -1637,6 +1779,38 @@ Some important things to note about backpassing and `await`:
- Backpassing syntax does not need to be used with `await` in particular. It can be used with any function.
- Roc's compiler treats functions defined with backpassing exactly the same way as functions defined the other way. The only difference between `\text ->` and `text <-` is how they look, so feel free to use whichever looks nicer to you!
### Empty Tag Unions
If you look up the type of [`Program.exit`](https://www.roc-lang.org/examples/cli/Program#exit),
you may notice that it takes a `Task` where the error type is `[]`. What does that mean?
Just like how `{}` is the type of an empty record, `[]` is the type of an empty tag union.
There is no way to create an empty tag union at runtime, since creating a tag union requires
making an actual tag, and an empty tag union has no tags in it!
This means if you have a function with the type `[] -> Str`, you can be sure that it will
never execute. It requires an argument that can't be provided! Similarly, if you have a
function with the type `Str -> []`, you can call it, but you can be sure it will not terminate
normally. The only way to implement a function like that is using [infinite recursion](https://en.wikipedia.org/wiki/Infinite_loop#Infinite_recursion), which will either run indefinitely or else crash with a [stack overflow](https://en.wikipedia.org/wiki/Stack_overflow).
Empty tag unions can be useful as type parameters. For example, a function with the type
`List [] -> Str` can be successfully called, but only if you pass it an empty list. That's because
an empty list has the type `List *`, which means it can be used wherever any type of `List` is
needed - even a `List []`!
Similarly, a function which accepts a `Result Str []` only accepts a "Result which is always `Ok`" - so you could call that function passing something like `Ok "hello"` with no problem,
but if you tried to give it an `Err`, you'd get a type mismatch.
Applying this to `Task`, a task with `[]` for its error type is a "task which can never fail." The only way to obtain one is by obtaining a task with an error type of `*`, since that works with any task. You can get one of these "tasks that can never fail" by using [`Task.succeed`](https://www.roc-lang.org/examples/cli/Task#succeed) or, more commonly, by handling all possible errors using [`Task.attempt`](https://www.roc-lang.org/examples/cli/Task#attempt).
## What now?
That's it, you can start writing Roc apps now!
Modifying an example from the [examples folder](./examples) is probably a good place to start.
[Advent of Code](https://adventofcode.com/2021) problems can also be fun to get to know Roc.
If you are hungry for more, check out the Advanced Concepts below.
## Appendix: Advanced Concepts
Here are some concepts you likely won't need as a beginner, but may want to know about eventually.
@ -1857,200 +2031,65 @@ type that accumulates more and more fields as it progresses through a series of
### Open and Closed Tag Unions
Just like how Roc has open records and closed records, it also has open and closed tag unions.
Just like how Roc has open records and closed records, it also has open and closed tag unions. Similarly to how an open record can have other fields besides the ones explicitly listed, an open tag union can have other tags beyond the ones explicitly listed.
The *open tag union* (or *open union* for short) `[Foo Str, Bar Bool]*` represents a tag that might
be `Foo Str` and might be `Bar Bool`, but might also be some other tag whose type isn't known at compile time.
Because an open union represents possibilities that are impossible to know ahead of time, any `when` I use on a
`[Foo Str, Bar Bool]*` value must include a catch-all `_ ->` branch. Otherwise, if one of those
unknown tags were to come up, the `when` would not know what to do with it! For example:
For example, here `[Red, Green]` is a closed union like the ones we saw earlier:
```coffee
example : [Foo Str, Bar Bool]* -> Bool
example = \tag ->
when tag is
Foo str -> Str.isEmpty str
Bar bool -> bool
_ -> Bool.false
```
colorToStr : [Red, Green] -> String
colorToStr = \color ->
when color is
Red -> "red"
Green -> "green"
In contrast, a *closed tag union* (or *closed union*) like `[Foo Str, Bar Bool]` (without the `*`)
represents an exhaustive set of possible tags. If I use a `when` on one of these, I can match on `Foo`
only and then on `Bar` only, with no need for a catch-all branch. For example:
Now let's compare to an *open union* version:
```coffee
example : [Foo Str, Bar Bool] -> Bool
example = \tag ->
when tag is
Foo str -> Str.isEmpty str
Bar bool -> bool
colorOrOther : [Red, Green]* -> String
colorOrOther = \color ->
when color is
Red -> "red"
Green -> "green"
_ -> "other"
```
If we were to remove the type annotations from the previous two code examples, Roc would infer the same
types for them anyway.
Two things have changed compared to the first example.
1. The `when color is` now has an extra branch: `_ -> "other"`
2. Since this branch matches any tag, the type annotation for the `color` argument changed from the closed union `[Red, Green]` to the _open union_ `[Red, Green]*`.
It would infer `tag : [Foo Str, Bar Bool]` for the latter example because the `when tag is` expression
only includes a `Foo Str` branch and a `Bar Bool` branch, and nothing else. Since the `when` doesn't handle
any other possibilities, these two tags must be the only possible ones the `tag` argument could be.
It would infer `tag : [Foo Str, Bar Bool]*` for the former example because the `when tag is` expression
includes a `Foo Str` branch and a `Bar Bool` branch - meaning we know about at least those two specific
possibilities - but also a `_ ->` branch, indicating that there may be other tags we don't know about. Since
the `when` is flexible enough to handle all possible tags, `tag` gets inferred as an open union.
Putting these together, whether a tag union is inferred to be open or closed depends on which possibilities
the implementation actually handles.
> **Aside:** As with open and closed records, we can use type annotations to make tag union types less flexible
> than what would be inferred. If we added a `_ ->` branch to the second example above, the compiler would still
> accept `example : [Foo Str, Bar Bool] -> Bool` as the type annotation, even though the catch-all branch
> would permit the more flexible `example : [Foo Str, Bar Bool]* -> Bool` annotation instead.
### Combining Open Unions
When we make a new record, it's inferred to be a closed record. For example, in `foo { a: "hi" }`,
the type of `{ a: "hi" }` is inferred to be `{ a : Str }`. In contrast, when we make a new tag, it's inferred
to be an open union. So in `foo (Bar "hi")`, the type of `Bar "hi"` is inferred to be `[Bar Str]*`.
This is because open unions can accumulate additional tags based on how they're used in the program,
whereas closed unions cannot. For example, let's look at this conditional:
```elm
if x > 5 then
"foo"
else
7
```
This will be a type mismatch because the two branches have incompatible types. Strings and numbers are not
type-compatible! Now let's look at another example:
```elm
if x > 5 then
Ok "foo"
else
Err "bar"
```
This shouldn't be a type mismatch, because we can see that the two branches are compatible; they are both
tags that could easily coexist in the same tag union. But if the compiler inferred the type of `Ok "foo"` to be
the closed union `[Ok Str]`, and likewise for `Err "bar"` and `[Err Str]`, then this would have to be
a type mismatch - because those two closed unions are incompatible.
Instead, the compiler infers `Ok "foo"` to be the open union `[Ok Str]*`, and `Err "bar"` to be the open
union `[Err Str]*`. Then, when using them together in this conditional, the inferred type of the conditional
becomes `[Ok Str, Err Str]*` - that is, the combination of the unions in each of its branches. (Branches in
a `when` work the same way with open unions.)
Earlier we saw how a function which accepts an open union must account for more possibilities, by including
catch-all `_ ->` patterns in its `when` expressions. So *accepting* an open union means you have more requirements.
In contrast, when you already *have* a value which is an open union, you have fewer requirements. A value
which is an open union (like `Ok "foo"`, which has the type `[Ok Str]*`) can be provided to anything that's
expecting a tag union (no matter whether it's open or closed), as long as the expected tag union includes at least
the tags in the open union you're providing.
So if I have an `[Ok Str]*` value, I can pass it to functions with any of these types (among others):
- `[Ok Str]* -> Bool`
- `[Ok Str] -> Bool`
- `[Ok Str, Err Bool]* -> Bool`
- `[Ok Str, Err Bool] -> Bool`
- `[Ok Str, Err Bool, Whatever]* -> Bool`
- `[Ok Str, Err Bool, Whatever] -> Bool`
- `Result Str Bool -> Bool`
- `[Err Bool, Whatever]* -> Bool`
That last one works because a function accepting an open union can accept any unrecognized tag, including
`Ok Str` - even though it is not mentioned as one of the tags in `[Err Bool, Whatever]*`! Remember, when
a function accepts an open tag union, any `when` branches on that union must include a catch-all `_ ->` branch,
which is the branch that will end up handling the `Ok Str` value we pass in.
However, I could not pass an `[Ok Str]*` to a function with a *closed* tag union argument that did not
mention `Ok Str` as one of its tags. So if I tried to pass `[Ok Str]*` to a function with the type
`[Err Bool, Whatever] -> Str`, I would get a type mismatch - because a `when` in that function could
be handling the `Err Bool` possibility and the `Whatever` possibility, and since it would not necessarily have
a catch-all `_ ->` branch, it might not know what to do with an `Ok Str` if it received one.
> **Note:** It wouldn't be accurate to say that a function which accepts an open union handles
> "all possible tags." For example, if I have a function `[Ok Str]* -> Bool` and I pass it
> `Ok 5`, that will still be a type mismatch. If you think about it, a `when` in that function might
> have the branch `Ok str ->` which assumes there's a string inside that `Ok`, and if `Ok 5` type-checked,
> then that assumption would be false and things would break!
>
> So `[Ok Str]*` is more restrictive than `[]*`. It's basically saying "this may or may not be an `Ok` tag,
> but if it is an `Ok` tag, then it's guaranteed to have a payload of exactly `Str`."
In summary, here's a way to think about the difference between open unions in a value you have, compared to a value you're accepting:
- If you *have* a closed union, that means it has all the tags it ever will, and can't accumulate more.
- If you *have* an open union, that means it can accumulate more tags through conditional branches.
- If you *accept* a closed union, that means you only have to handle the possibilities listed in the union.
- If you *accept* an open union, that means you have to handle the possibility that it has a tag you can't know about.
### Type Variables in Tag Unions
Earlier we saw these two examples, one with an open tag union and the other with a closed one:
Also like with open records, you can name the type variable in an open tag union. For example:
```coffee
example : [Foo Str, Bar Bool]* -> Bool
example = \tag ->
when tag is
Foo str -> Str.isEmpty str
Bar bool -> bool
_ -> Bool.false
```
```coffee
example : [Foo Str, Bar Bool] -> Bool
example = \tag ->
when tag is
Foo str -> Str.isEmpty str
Bar bool -> bool
```
Similarly to how there are open records with a `*`, closed records with nothing,
and constrained records with a named type variable, we can also have *constrained tag unions*
with a named type variable. Here's an example:
```coffee
example : [Foo Str, Bar Bool]a -> [Foo Str, Bar Bool]a
example = \tag ->
when tag is
Foo str -> Bar (Str.isEmpty str)
Bar _ -> Bar Bool.false
stopGoOther : [Red, Green]a -> [Stop, Go]a
stopGoOther = \color ->
when color is
Red -> Stop
Green -> Go
other -> other
```
This type says that the `example` function will take either a `Foo Str` tag, or a `Bar Bool` tag,
or possibly another tag we don't know about at compile time - and it also says that the function's
return type is the same as the type of its argument.
You can read this type annotation as "`stopGoOther` takes either a `Red` tag, a `Green` tag, or some other tag. It returns either a `Stop` tag, a `Go` tag, or any one of the tags it received in its argument."
So if we give this function a `[Foo Str, Bar Bool, Baz (List Str)]` argument, then it will be guaranteed
to return a `[Foo Str, Bar Bool, Baz (List Str)]` value. This is more constrained than a function that
returned `[Foo Str, Bar Bool]*` because that would say it could return *any* other tag (in addition to
the `Foo Str` and `Bar Bool` we already know about).
So let's say you called this `stopGoOther` function passing `Foo "hello"`. Then the `a` type variable would be the closed union `[Foo Str]`, and `stopGoOther` would return a union with the type `[Stop, Go][Foo Str]` - which is equivalent to `[Stop, Go, Foo Str]`.
If we removed the type annotation from `example` above, Roc's compiler would infer the same type anyway.
This may be surprising if you look closely at the body of the function, because:
Just like with records, you can replace the type variable in tag union types with a concrete type.
For example, `[Foo Str][Bar Bool][Baz (List Str)]` is equivalent to `[Foo Str, Bar Bool, Baz (List Str)]`.
- The return type includes `Foo Str`, but no branch explicitly returns `Foo`. Couldn't the return type be `[Bar Bool]a` instead?
- The argument type includes `Bar Bool` even though we never look at `Bar`'s payload. Couldn't the argument type be inferred to be `Bar *` instead of `Bar Bool`, since we never look at it?
Also just like with records, you can use this to compose tag union type aliases. For example, you can write `NetworkError : [Timeout, Disconnected]` and then `Problem : [InvalidInput, UnknownFormat]NetworkError`.
The reason it has this type is the `other -> other` branch. Take a look at that branch, and ask this question:
"What is the type of `other`?" There has to be exactly one answer! It can't be the case that `other` has one
type before the `->` and another type after it; whenever you see a named value in Roc, it is guaranteed to have
the same type everywhere it appears in that scope.
Note that that a function which accepts an open union does not accept "all possible tags."
For example, if I have a function `[Ok Str]* -> Bool` and I pass it
`Ok 5`, that will still be a type mismatch. A `when` on that function's argument might
have the branch `Ok str ->` which assumes there's a string inside that `Ok`,
and if `Ok 5` type-checked, then that assumption would be false and things would break!
For this reason, any time you see a function that only runs a `when` on its only argument, and that `when`
includes a branch like `x -> x` or `other -> other`, the function's argument type and return type must necessarily
be equivalent.
So `[Ok Str]*` is more restrictive than `[]*`. It's basically saying "this may or may not be an `Ok` tag, but if it _is_ an `Ok` tag, then it's guaranteed to have a payload of exactly `Str`."
> **Note:** Just like with records, you can also replace the type variable in tag union types with a concrete type.
> For example, `[Foo Str][Bar Bool][Baz (List Str)]` is equivalent to `[Foo Str, Bar Bool, Baz (List Str)]`.
>
> Also just like with records, you can use this to compose tag union type aliases. For example, you can write
> `NetworkError : [Timeout, Disconnected]` and then `Problem : [InvalidInput, UnknownFormat]NetworkError`
> **Note:** As with open and closed records, we can use type annotations to make tag union types less flexible
> than what the compiler would infer. For example, if we changed the type of the second
> `colorOrOther` function from the open `[Red, Green]*` to the closed `[Red, Green]`, Roc's compiler
> would accept it as a valid annotation, but it would give a warning that the `_ -> "other"`
> branch had become unreachable.
### Phantom Types

View file

@ -240,9 +240,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.5.4"
version = "1.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
dependencies = [
"aho-corasick",
"memchr",

View file

@ -7,7 +7,7 @@ edition = "2021"
[dependencies]
clap = { version = "3.1.15", features = ["derive"] }
regex = "1.5.4"
regex = "1.5.5"
is_executable = "1.0.1"
ring = "0.16.20"
data-encoding = "2.3.2"

View file

@ -33,7 +33,7 @@ fn main() {
if check_if_bench_executables_changed() {
println!(
"Comparison of sha256 of executables reveals changes, doing full benchmarks..."
"\n\nComparison of sha256 of executables reveals changes, doing full benchmarks...\n\n"
);
let all_regressed_benches = do_all_benches(optional_args.nr_repeat_benchmarks);
@ -51,8 +51,8 @@ fn main() {
eprintln!(
r#"I can't find bench-folder-main and bench-folder-branch from the current directory.
I should be executed from the repo root.
Use `./ci/safe-earthly.sh --build-arg BENCH_SUFFIX=main +prep-bench-folder` to generate bench-folder-main.
Use `./ci/safe-earthly.sh +prep-bench-folder` to generate bench-folder-branch."#
Use `./ci/benchmarks/prep_folder.sh main` to generate bench-folder-main.
Use `./ci/benchmarks/prep_folder.sh branch` to generate bench-folder-branch."#
);
process::exit(1)
@ -85,6 +85,8 @@ fn do_all_benches(nr_repeat_benchmarks: usize) -> HashSet<String> {
return HashSet::new();
}
println!("\n\nDoing benchmarks {:?} times to reduce flukes.\n\n", nr_repeat_benchmarks);
for _ in 1..nr_repeat_benchmarks {
delete_old_bench_results();
do_benchmark("main");
@ -112,7 +114,7 @@ fn do_benchmark(branch_name: &'static str) -> HashSet<String> {
))
.args(&["--bench", "--noplot"])
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.stderr(Stdio::inherit())
.spawn()
.unwrap_or_else(|_| panic!("Failed to benchmark {}.", branch_name));
@ -133,14 +135,14 @@ fn do_benchmark(branch_name: &'static str) -> HashSet<String> {
"Failed to get line that contains benchmark name from last_three_lines_queue.",
);
let regex_match = bench_name_regex.find(regressed_bench_name_line).expect("This line should hoave the benchmark name between double quotes but I could not match it");
let regex_match = bench_name_regex.find(regressed_bench_name_line).expect("This line should have the benchmark name between double quotes but I could not match it");
regressed_benches.insert(regex_match.as_str().to_string().replace("\"", ""));
}
last_three_lines_queue.push_front(line_str.clone());
println!("bench {:?}: {:?}", branch_name, line_str);
println!(">>bench {:?}: {:?}", branch_name, line_str);
}
regressed_benches
@ -186,8 +188,20 @@ fn sha256_digest<R: Read>(mut reader: R) -> Result<Digest, io::Error> {
}
fn sha_file(file_path: &Path) -> Result<String, io::Error> {
let input = File::open(file_path)?;
let reader = BufReader::new(input);
// Debug info is dependent on the dir in which executable was created,
// so we need to strip that to be able to compare binaries.
let no_debug_info_file_path = file_path.to_str().unwrap().to_string() + ("_no_debug_info");
std::fs::copy(file_path, &no_debug_info_file_path)?;
let strip_output = Command::new("strip")
.args(["--strip-debug", &no_debug_info_file_path])
.output()
.expect("failed to execute process");
assert!(strip_output.status.success());
let no_debug_info_file = File::open(no_debug_info_file_path)?;
let reader = BufReader::new(no_debug_info_file);
let digest = sha256_digest(reader)?;
Ok(HEXUPPER.encode(digest.as_ref()))

26
ci/benchmarks/prep_folder.sh Executable file
View file

@ -0,0 +1,26 @@
#!/usr/bin/env bash
# compile everything needed for benchmarks and output a self-contained dir from which benchmarks can be run.
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
set -euxo pipefail
# to make use of avx, avx2, sse2, sse4.2... instructions
RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
BENCH_SUFFIX=$1
cargo criterion -V
cd crates/cli && cargo criterion --no-run && cd ../..
mkdir -p bench-folder/crates/cli_testing_examples/benchmarks
mkdir -p bench-folder/crates/compiler/builtins/bitcode/src
mkdir -p bench-folder/target/release/deps
mkdir -p bench-folder/target/release/lib
cp "crates/cli_testing_examples/benchmarks/"*".roc" bench-folder/crates/cli_testing_examples/benchmarks/
cp -r crates/cli_testing_examples/benchmarks/platform bench-folder/crates/cli_testing_examples/benchmarks/
cp crates/compiler/builtins/bitcode/src/str.zig bench-folder/crates/compiler/builtins/bitcode/src
cp target/release/roc bench-folder/target/release
cp -r target/release/lib bench-folder/target/release
# copy the most recent time bench to bench-folder
cp target/release/deps/`ls -t target/release/deps/ | grep time_bench | head -n 1` bench-folder/target/release/deps/time_bench
mv bench-folder bench-folder-$BENCH_SUFFIX

View file

@ -1,5 +1,8 @@
#!/usr/bin/env bash
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
set -euxo pipefail
mkdir -p $HOME/.cargo
echo -e "[build]\nrustflags = [\"-C\", \"link-arg=-fuse-ld=lld\", \"-C\", \"target-cpu=native\"]" > $HOME/.cargo/config

View file

@ -2,6 +2,9 @@
# assumes roc_releases.json is present
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
set -euxo pipefail
LATEST_RELEASE_URL=`cat roc_releases.json | jq --arg arch $1 --arg today $(date +'%Y-%m-%d') '.[0] | .assets | map(.browser_download_url) | map(select(. | contains("\($arch)-\($today)"))) | .[0]'`
if [[ "$LATEST_RELEASE_URL" == "null" ]]

View file

@ -1,4 +1,8 @@
#!/usr/bin/env bash
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
set -euxo pipefail
cp target/release/roc ./roc # to be able to exclude "target" later in the tar command
cp -r target/release/lib ./lib
tar -czvf $1 --exclude="target" --exclude="zig-cache" roc lib LICENSE LEGAL_DETAILS examples/helloWorld.roc examples/platform-switching examples/cli crates/roc_std

View file

@ -1,23 +0,0 @@
#!/usr/bin/env bash
LOG_FILE="earthly_log.txt"
touch $LOG_FILE
# first arg + everything after
ARGS=${@:1}
FULL_CMD="earthly --config ci/earthly-conf.yml $ARGS"
echo $FULL_CMD
script -efq $LOG_FILE -c "$FULL_CMD"
EXIT_CODE=$?
if grep -q "failed to mount" "$LOG_FILE"; then
echo ""
echo ""
echo "------<<<<<<!!!!!!>>>>>>------"
echo "DETECTED FAILURE TO MOUNT ERROR: running without cache"
echo "------<<<<<<!!!!!!>>>>>>------"
echo ""
echo ""
earthly --config ci/earthly-conf.yml --no-cache $ARGS
else
exit $EXIT_CODE
fi

View file

@ -1,3 +1,7 @@
#!/usr/bin/env bash
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
set -euxo pipefail
# version.txt is used by the CLI: roc --version
printf 'nightly pre-release, built from commit ' > version.txt && git log --pretty=format:'%h' -n 1 >> version.txt && printf ' on ' >> version.txt && date -u >> version.txt

View file

@ -1,3 +1,6 @@
#!/usr/bin/env bash
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
set -euxo pipefail
crates/repl_wasm/build-www.sh `pwd`/roc_repl_wasm.tar.gz

148
crates/README.md Normal file
View file

@ -0,0 +1,148 @@
# Roc Internals
Roc has different rust crates for various binaries and libraries. Their roles are briefly described below. If you'd like to learn more, have any questions, or suspect something is out of date, please start a discussion on the [Roc Zulip](https://roc.zulipchat.com/)!
You can use `cargo doc` to generate docs for a specific package; e.g.
```
cargo doc --package roc_ast --open
```
## `ast/` - `roc_ast`
Code to represent the [Abstract Syntax Tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree) as used by the editor.
In contrast to the compiler, the types in this AST do not keep track of the location of the matching code in the source file.
## `cli/` - `roc_cli`
The `roc` binary that brings together all functionality in the Roc toolset.
## `cli_utils/` - `cli_utils`
Provides shared code for cli tests and benchmarks.
## `code_markup/` - `roc_code_markup`
A [markup language](https://en.wikipedia.org/wiki/Markup_language) to display Roc code in the editor.
## `compiler/`
Compiles `.roc` files and combines them with their platform into an executable binary. See [compiler/README.md](./compiler/README.md) for more information.
TODO explain what "compiler frontend" is
TODO explain what "compiler backend" is
The compiler includes the following sub-crates;
- `roc_alias_analysis` Performs analysis and optimizations to remove unneeded [reference counts](https://en.wikipedia.org/wiki/Reference_counting) at runtime, and supports in-place mutation.
- `arena-pool` An implementation of an [arena allocator](https://mgravell.github.io/Pipelines.Sockets.Unofficial/docs/arenas.html) designed for the compiler's workloads.
- `roc_build` Responsible for coordinating building and linking of a Roc app with its host.
- `roc_builtins` provides the Roc functions and modules that are implicitly imported into every module. See [README.md](./compiler/builtins/README.md) for more information.
- `roc_can` [Canonicalize](https://en.wikipedia.org/wiki/Canonicalization) a roc [abstract syntax tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree), [resolving symbols](https://stackoverflow.com/a/1175493/4200103), [re-ordering definitions](https://www.oreilly.com/library/view/c-high-performance/9781787120952/546b5677-9157-4333-bc90-16db696436ac.xhtml), and preparing a module for [type inference](https://en.wikipedia.org/wiki/Type_inference).
- `roc_collections` Domain-specific collections created for the needs of the compiler.
- `roc_constrain` Responsible for building the set of constraints that are used during [type inference](https://en.wikipedia.org/wiki/Type_inference) of a program, and for gathering context needed for pleasant error messages when a type error occurs.
- `roc_debug_flags` Environment variables that can be toggled to aid debugging of the compiler itself.
- `roc_derive` provides auto-derivers for builtin abilities like `Hash` and `Decode`.
- `roc_exhaustive` provides [exhaustiveness](https://dev.to/babak/exhaustive-type-checking-with-typescript-4l3f) checking for Roc.
- `roc_fmt` The roc code formatter.
- `roc_gen_dev` provides the compiler backend to generate Roc binaries fast, for a nice developer experience. See [README.md](./compiler/gen_dev/README.md) for more information.
- `roc_gen_llvm` provides the LLVM backend to generate Roc binaries. Used to generate a binary with the fastest possible execution speed.
- `roc_gen_wasm` provides the WASM backend to generate Roc binaries. See [README.md](./compiler/gen_wasm/README.md) for more information.
- `roc_ident` Implements data structures used for efficiently representing small strings, like identifiers.
- `roc_intern` provides generic interners for concurrent and single-thread use cases.
- `roc_late_solve` provides type unification and solving primitives from the perspective of the compiler backend.
- `roc_load` Used to load a .roc file and coordinate the compiler pipeline, including parsing, type checking, and [code generation](https://en.wikipedia.org/wiki/Code_generation_(compiler)).
- `roc_load_internal` The internal implementation of roc_load, separate from roc_load to support caching.
- `roc_module` Implements data structures used for efficiently representing unique modules and identifiers in Roc programs.
- `roc_mono` Roc's main intermediate representation (IR), which is responsible for [monomorphization](https://en.wikipedia.org/wiki/Monomorphization), defunctionalization, inserting [ref-count](https://en.wikipedia.org/wiki/Reference_counting) instructions, and transforming a Roc program into a form that is easy to consume by a backend.
- `roc_parse` Implements the Roc parser, which transforms a textual representation of a Roc program to an [abstract syntax tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree).
- `roc_problem` provides types to describe problems that can occur when compiling `.roc` code.
- `roc_region` Data structures for storing source-code-location information, used heavily for contextual error messages.
- `roc_target` provides types and helpers for compiler targets such as `default_x86_64`.
- `roc_serialize` provides helpers for serializing and deserializing to/from bytes.
- `roc_solve` The entry point of Roc's [type inference](https://en.wikipedia.org/wiki/Type_inference) system. Implements type inference and specialization of abilities.
- `roc_solve_problem` provides types to describe problems that can occur during solving.
- `roc_str` provides `Roc` styled collection [reference counting](https://en.wikipedia.org/wiki/Reference_counting). See [README.md](./compiler/str/README.md) for more information.
- `test_derive` Tests Roc's auto-derivers.
- `test_gen` contains all of Roc's [code generation](https://en.wikipedia.org/wiki/Code_generation_(compiler)) tests. See [README.md](./compiler/test_gen/README.md) for more information.
- `test_mono` Tests Roc's generation of the mono intermediate representation.
- `test_mono_macros` Macros for use in `test_mono`.
- `roc_types` Various representations and utilities for dealing with types in the Roc compiler.
- `roc_unify` Implements Roc's unification algorithm, the heartstone of Roc's [type inference](https://en.wikipedia.org/wiki/Type_inference).
## `docs/` - `roc_docs`
Generates html documentation from Roc files.
Used for [roc-lang.org/builtins/Num](https://www.roc-lang.org/builtins/Num).
## `docs_cli/` - `roc_docs_cli` library and `roc-docs` binary
Provides a binary that is only used for static build servers.
## `editor/` - `roc_editor`
Roc's editor. See [README.md](./editor/README.md) for more information.
## `error_macros/` - `roc_error_macros`
Provides macros for consistent reporting of errors in Roc's rust code.
## `glue/` - `roc_glue`
The `roc_glue` crate generates code needed for platform hosts to communicate with Roc apps. This tool is not necessary for writing a platform in another language, however, it's a great convenience! Currently supports Rust platforms, and the plan is to support any language via a plugin model.
## `highlight/` - `roc_highlight`
Provides syntax highlighting for the editor by transforming a string to markup nodes.
## `linker/` - `roc_linker`
Surgical linker that links platforms to Roc applications. We created our own linker for performance, since regular linkers add complexity that is not needed for linking Roc apps. Because we want `roc` to manage the build system and final linking of the executable, it is significantly less practical to use a regular linker. See [README.md](./linker/README.md) for more information.
## `repl_cli/` - `roc_repl_cli`
Command Line Interface(CLI) functionality for the Read-Evaluate-Print-Loop (REPL).
## `repl_eval/` - `roc_repl_eval`
Provides the functionality for the REPL to evaluate Roc expressions.
## `repl_expect/` - `roc_repl_expect`
Supports evaluating `expect` and printing contextual information when they fail.
## `repl_test/` - `repl_test`
Tests the roc REPL.
## `repl_wasm/` - `roc_repl_wasm`
Provides a build of the REPL for the Roc website using WebAssembly. See [README.md](./repl_wasm/README.md) for more information.
## `reporting/` - `roc_reporting`
Responsible for generating warning and error messages.
## `roc_std/` - `roc_std`
Provides Rust representations of Roc data structures.
## `test_utils/` - `roc_test_utils`
Provides testing utility functions for use throughout the Rust code base.
## `tracing/` - `roc_tracing`
Provides tracing utility functions for various executable entry points.
## `utils/` - `roc_utils`
Provides utility functions used all over the code base.
## `vendor/`
These are files that were originally obtained somewhere else (e.g. crates.io) but which we needed to fork for some Roc-specific reason. See [README.md](./vendor/README.md) for more information.
## `wasi-libc-sys/` - `wasi_libc_sys`
Provides a Rust wrapper for the WebAssembly test platform built on libc and is primarily used for testing purposes.

View file

@ -2199,7 +2199,7 @@ pub mod test_constrain {
Foo
"#
),
"[Foo]*",
"[Foo]",
)
}
@ -2235,7 +2235,7 @@ pub mod test_constrain {
if True then Green else Red
"#
),
"[Green, Red]*",
"[Green, Red]",
)
}
@ -2249,7 +2249,7 @@ pub mod test_constrain {
Red -> Purple
"#
),
"[Blue, Purple]*",
"[Blue, Purple]",
)
}
@ -2302,7 +2302,7 @@ pub mod test_constrain {
\a, b -> Pair a b
"#
),
"a, b -> [Pair a b]*",
"a, b -> [Pair a b]",
);
}
@ -2445,7 +2445,7 @@ pub mod test_constrain {
curryPair
"#
),
"a -> (b -> [Pair a b]*)",
"a -> (b -> [Pair a b])",
);
}
@ -2658,7 +2658,7 @@ pub mod test_constrain {
B -> Y
"#
),
"[A, B] -> [X, Y]*",
"[A, B] -> [X, Y]",
)
}
@ -2674,7 +2674,7 @@ pub mod test_constrain {
_ -> Z
"#
),
"[A, B]* -> [X, Y, Z]*",
"[A, B]* -> [X, Y, Z]",
)
}
@ -2689,7 +2689,7 @@ pub mod test_constrain {
A N -> Y
"#
),
"[A [M, N]] -> [X, Y]*",
"[A [M, N]] -> [X, Y]",
)
}
@ -2705,7 +2705,7 @@ pub mod test_constrain {
A _ -> Z
"#
),
"[A [M, N]] -> [X, Y, Z]*",
"[A [M, N]] -> [X, Y, Z]",
)
}
@ -2737,7 +2737,7 @@ pub mod test_constrain {
A N -> X
"#
),
"[A [M, N], B] -> [X]*",
"[A [M, N], B] -> [X]",
)
}

View file

@ -352,7 +352,7 @@ pub fn expr_to_expr2<'a>(
for (node_id, branch) in can_branches.iter_node_ids().zip(branches.iter()) {
let (can_when_branch, branch_references) =
canonicalize_when_branch(env, scope, *branch, &mut output);
canonicalize_when_branch(env, scope, branch, &mut output);
output.references.union_mut(branch_references);

View file

@ -3,7 +3,7 @@ use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol;
use roc_types::subs::Variable;
#[derive(Clone, Debug, PartialEq, Default)]
#[derive(Clone, Debug, PartialEq, Eq, Default)]
pub struct IntroducedVariables {
// Rigids must be unique within a type annotation.
// E.g. in `identity : a -> a`, there should only be one

View file

@ -447,6 +447,9 @@ pub fn to_pattern2<'a>(
unreachable!("should have been handled in RecordDestructure");
}
List(..) => todo!(),
ListRest => todo!(),
Malformed(_str) => {
let problem = MalformedPatternProblem::Unknown;
malformed_pattern(env, problem, region)

View file

@ -414,24 +414,7 @@ pub fn to_type2<'a>(
for (node_id, (label, field)) in field_types.iter_node_ids().zip(field_types_map) {
let poolstr = PoolStr::new(label.as_str(), env.pool);
let rec_field = match field {
RecordField::Optional(_) => {
let field_id = env.pool.add(field.into_inner());
RecordField::Optional(field_id)
}
RecordField::RigidOptional(_) => {
let field_id = env.pool.add(field.into_inner());
RecordField::RigidOptional(field_id)
}
RecordField::Demanded(_) => {
let field_id = env.pool.add(field.into_inner());
RecordField::Demanded(field_id)
}
RecordField::Required(_) => {
let field_id = env.pool.add(field.into_inner());
RecordField::Required(field_id)
}
};
let rec_field = field.map_owned(|field| env.pool.add(field));
env.pool[node_id] = (poolstr, rec_field);
}

View file

@ -1,3 +1,8 @@
//! Library to represent the [Abstract Syntax Tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree).
//!
//! Used by roc_editor.
//! In contrast to the compiler, the types in this AST do not
//! keep track of the location of the matching code in the source file.
pub mod ast_error;
mod builtin_aliases;
mod canonicalization;

View file

@ -13,7 +13,7 @@ use roc_types::subs::{
Subs, SubsSlice, UnionLambdas, UnionTags, Variable, VariableSubsSlice,
};
use roc_types::types::{
gather_fields_unsorted_iter, Alias, AliasKind, Category, ErrorType, PatternCategory,
gather_fields_unsorted_iter, Alias, AliasKind, Category, ErrorType, PatternCategory, Polarity,
RecordField,
};
use roc_unify::unify::unify;
@ -228,7 +228,13 @@ fn solve<'a>(
expectation.get_type_ref(),
);
match unify(&mut UEnv::new(subs), actual, expected, Mode::EQ) {
match unify(
&mut UEnv::new(subs),
actual,
expected,
Mode::EQ,
Polarity::OF_VALUE,
) {
Success {
vars,
must_implement_ability: _,
@ -327,7 +333,13 @@ fn solve<'a>(
expectation.get_type_ref(),
);
match unify(&mut UEnv::new(subs), actual, expected, Mode::EQ) {
match unify(
&mut UEnv::new(subs),
actual,
expected,
Mode::EQ,
Polarity::OF_VALUE,
) {
Success {
vars,
must_implement_ability: _,
@ -404,7 +416,13 @@ fn solve<'a>(
);
// TODO(ayazhafiz): presence constraints for Expr2/Type2
match unify(&mut UEnv::new(subs), actual, expected, Mode::EQ) {
match unify(
&mut UEnv::new(subs),
actual,
expected,
Mode::EQ,
Polarity::OF_PATTERN,
) {
Success {
vars,
must_implement_ability: _,
@ -718,7 +736,13 @@ fn solve<'a>(
);
let includes = type_to_var(arena, mempool, subs, rank, pools, cached_aliases, &tag_ty);
match unify(&mut UEnv::new(subs), actual, includes, Mode::PRESENT) {
match unify(
&mut UEnv::new(subs),
actual,
includes,
Mode::PRESENT,
Polarity::OF_PATTERN,
) {
Success {
vars,
must_implement_ability: _,
@ -834,6 +858,15 @@ fn type_to_variable<'a>(
cached,
mempool.get(*type_id),
)),
RigidRequired(type_id) => RigidRequired(type_to_variable(
arena,
mempool,
subs,
rank,
pools,
cached,
mempool.get(*type_id),
)),
Optional(type_id) => Optional(type_to_variable(
arena,
mempool,
@ -925,7 +958,7 @@ fn type_to_variable<'a>(
arg_vars.push(arg_var);
}
let arg_vars = AliasVariables::insert_into_subs(subs, arg_vars, []);
let arg_vars = AliasVariables::insert_into_subs(subs, arg_vars, [], []);
let alias_var = type_to_variable(arena, mempool, subs, rank, pools, cached, alias_type);
@ -1182,7 +1215,7 @@ fn circular_error(
loc_var: &Loc<Variable>,
) {
let var = loc_var.value;
let (error_type, _) = subs.var_to_error_type(var);
let (error_type, _) = subs.var_to_error_type(var, Polarity::Pos);
let problem = TypeError::CircularType(loc_var.region, symbol, error_type);
subs.set_content(var, Content::Error);

View file

@ -77,9 +77,6 @@ roc_gen_llvm = {path = "../compiler/gen_llvm"}
inkwell = {path = "../vendor/inkwell"}
signal-hook = "0.3.14"
[target.'cfg(windows)'.dependencies]
memexec = "0.2.0"
# for now, uses unix/libc functions that windows does not support
[target.'cfg(not(windows))'.dependencies]
roc_repl_expect = { path = "../repl_expect" }

View file

@ -252,7 +252,9 @@ pub fn build_file<'a>(
}
let rebuild_timing = if linking_strategy == LinkingStrategy::Additive {
let rebuild_duration = rebuild_thread.join().unwrap();
let rebuild_duration = rebuild_thread
.join()
.expect("Failed to (re)build platform.");
if emit_timings && !prebuilt {
println!(
"Finished rebuilding the platform in {} ms\n",
@ -304,7 +306,7 @@ pub fn build_file<'a>(
}
if let HostRebuildTiming::ConcurrentWithApp(thread) = rebuild_timing {
let rebuild_duration = thread.join().unwrap();
let rebuild_duration = thread.join().expect("Failed to (re)build platform.");
if emit_timings && !prebuilt {
println!(
"Finished rebuilding the platform in {} ms\n",

View file

@ -157,7 +157,9 @@ fn parse_all<'a>(arena: &'a Bump, src: &'a str) -> Result<Ast<'a>, SyntaxError<'
let (module, state) = module::parse_header(arena, State::new(src.as_bytes()))
.map_err(|e| SyntaxError::Header(e.problem))?;
let (_, defs, _) = module_defs().parse(arena, state).map_err(|(_, e, _)| e)?;
let (_, defs, _) = module_defs()
.parse(arena, state, 0)
.map_err(|(_, e, _)| e)?;
Ok(Ast { module, defs })
}

View file

@ -1,3 +1,5 @@
//! Provides the core CLI functionality for the `roc` binary
#[macro_use]
extern crate const_format;
@ -44,6 +46,7 @@ pub const CMD_VERSION: &str = "version";
pub const CMD_FORMAT: &str = "format";
pub const CMD_TEST: &str = "test";
pub const CMD_GLUE: &str = "glue";
pub const CMD_GEN_STUB_LIB: &str = "gen-stub-lib";
pub const FLAG_DEBUG: &str = "debug";
pub const FLAG_DEV: &str = "dev";
@ -276,6 +279,23 @@ pub fn build_app<'a>() -> Command<'a> {
.required(true)
)
)
.subcommand(Command::new(CMD_GEN_STUB_LIB)
.about("Generate a stubbed shared library that can be used for linking a platform binary.\nThe stubbed library has prototypes, but no function bodies.\n\nNote: This command will be removed in favor of just using `roc build` once all platforms support the surgical linker")
.arg(
Arg::new(ROC_FILE)
.help("The .roc file for an app using the platform")
.allow_invalid_utf8(true)
.required(true)
)
.arg(
Arg::new(FLAG_TARGET)
.long(FLAG_TARGET)
.help("Choose a different target")
.default_value(Target::default().as_str())
.possible_values(Target::OPTIONS)
.required(false),
)
)
.trailing_var_arg(true)
.arg(flag_optimize)
.arg(flag_max_threads.clone())
@ -779,6 +799,52 @@ fn make_argv_envp<'a, I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
(argv_cstrings, envp_cstrings)
}
#[cfg_attr(not(target_family = "windows"), allow(unused))]
fn make_argv_envp_windows<'a, I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
arena: &'a Bump,
executable: &ExecutableFile,
args: I,
) -> (
bumpalo::collections::Vec<'a, CString>,
bumpalo::collections::Vec<'a, CString>,
) {
use bumpalo::collections::CollectIn;
let path = executable.as_path();
let path_cstring = CString::new(path.as_os_str().to_str().unwrap().as_bytes()).unwrap();
// argv is an array of pointers to strings passed to the new program
// as its command-line arguments. By convention, the first of these
// strings (i.e., argv[0]) should contain the filename associated
// with the file being executed. The argv array must be terminated
// by a NULL pointer. (Thus, in the new program, argv[argc] will be NULL.)
let it = args
.into_iter()
.map(|x| CString::new(x.as_ref().to_str().unwrap().as_bytes()).unwrap());
let argv_cstrings: bumpalo::collections::Vec<CString> =
std::iter::once(path_cstring).chain(it).collect_in(arena);
// envp is an array of pointers to strings, conventionally of the
// form key=value, which are passed as the environment of the new
// program. The envp array must be terminated by a NULL pointer.
let mut buffer = Vec::with_capacity(100);
let envp_cstrings: bumpalo::collections::Vec<CString> = std::env::vars_os()
.map(|(k, v)| {
buffer.clear();
use std::io::Write;
buffer.write_all(k.to_str().unwrap().as_bytes()).unwrap();
buffer.write_all(b"=").unwrap();
buffer.write_all(v.to_str().unwrap().as_bytes()).unwrap();
CString::new(buffer.as_slice()).unwrap()
})
.collect_in(arena);
(argv_cstrings, envp_cstrings)
}
/// Run on the native OS (not on wasm)
#[cfg(target_family = "unix")]
fn roc_run_native<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
@ -874,12 +940,9 @@ impl ExecutableFile {
#[cfg(target_family = "windows")]
ExecutableFile::OnDisk(_, path) => {
let _ = argv;
let _ = envp;
use memexec::memexec_exe;
let bytes = std::fs::read(path).unwrap();
memexec_exe(&bytes).unwrap();
std::process::exit(0);
let path_cstring = CString::new(path.to_str().unwrap()).unwrap();
libc::execve(path_cstring.as_ptr().cast(), argv.as_ptr(), envp.as_ptr())
}
}
}
@ -975,7 +1038,7 @@ fn roc_run_executable_file_path(binary_bytes: &[u8]) -> std::io::Result<Executab
fn roc_run_native<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
arena: Bump, // This should be passed an owned value, not a reference, so we can usefully mem::forget it!
opt_level: OptLevel,
_args: I,
args: I,
binary_bytes: &[u8],
_expectations: VecMap<ModuleId, Expectations>,
_interns: Interns,
@ -986,9 +1049,7 @@ fn roc_run_native<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
let executable = roc_run_executable_file_path(binary_bytes)?;
// TODO forward the arguments
// let (argv_cstrings, envp_cstrings) = make_argv_envp(&arena, &executable, args);
let argv_cstrings = bumpalo::vec![ in &arena; CString::default()];
let envp_cstrings = bumpalo::vec![ in &arena; CString::default()];
let (argv_cstrings, envp_cstrings) = make_argv_envp_windows(&arena, &executable, args);
let argv: bumpalo::collections::Vec<*const c_char> = argv_cstrings
.iter()

View file

@ -1,10 +1,11 @@
//! the `roc` binary
use roc_build::link::LinkType;
use roc_cli::build::check_file;
use roc_cli::{
build_app, format, test, BuildConfig, FormatMode, Target, CMD_BUILD, CMD_CHECK, CMD_DEV,
CMD_DOCS, CMD_EDIT, CMD_FORMAT, CMD_GLUE, CMD_REPL, CMD_RUN, CMD_TEST, CMD_VERSION,
DIRECTORY_OR_FILES, FLAG_CHECK, FLAG_LIB, FLAG_NO_LINK, FLAG_TARGET, FLAG_TIME, GLUE_FILE,
ROC_FILE,
CMD_DOCS, CMD_EDIT, CMD_FORMAT, CMD_GEN_STUB_LIB, CMD_GLUE, CMD_REPL, CMD_RUN, CMD_TEST,
CMD_VERSION, DIRECTORY_OR_FILES, FLAG_CHECK, FLAG_LIB, FLAG_NO_LINK, FLAG_TARGET, FLAG_TIME,
GLUE_FILE, ROC_FILE,
};
use roc_docs::generate_docs_html;
use roc_error_macros::user_error;
@ -93,6 +94,12 @@ fn main() -> io::Result<()> {
Ok(1)
}
}
Some((CMD_GEN_STUB_LIB, matches)) => {
let input_path = Path::new(matches.value_of_os(ROC_FILE).unwrap());
let target: Target = matches.value_of_t(FLAG_TARGET).unwrap_or_default();
roc_linker::generate_stub_lib(input_path, &target.to_triple())
}
Some((CMD_BUILD, matches)) => {
let target: Target = matches.value_of_t(FLAG_TARGET).unwrap_or_default();

View file

@ -43,7 +43,7 @@ mod cli_run {
#[cfg(not(all(target_os = "linux", target_arch = "x86_64")))]
const TEST_LEGACY_LINKER: bool = false;
#[cfg(not(target_os = "macos"))]
#[cfg(all(unix, not(target_os = "macos")))]
const ALLOW_VALGRIND: bool = true;
// Disallow valgrind on macOS by default, because it reports a ton
@ -52,6 +52,9 @@ mod cli_run {
#[cfg(target_os = "macos")]
const ALLOW_VALGRIND: bool = false;
#[cfg(windows)]
const ALLOW_VALGRIND: bool = false;
#[derive(Debug, PartialEq, Eq)]
enum Arg<'a> {
ExamplePath(&'a str),
@ -82,6 +85,12 @@ mod cli_run {
let (before_first_digit, _) = err.split_at(err.rfind("found in ").unwrap());
let err = format!("{}found in <ignored for test> ms.", before_first_digit);
// make paths consistent
let err = err.replace('\\', "/");
// consistency with typewriters, very important
let err = err.replace('\r', "");
assert_multiline_str_eq!(err.as_str(), expected);
}
@ -110,6 +119,11 @@ mod cli_run {
let ignorable = "🔨 Rebuilding platform...\n";
let stderr = compile_out.stderr.replacen(ignorable, "", 1);
// for some reason, llvm prints out this warning when targeting windows
let ignorable = "warning: ignoring debug info with an invalid version (0) in app\r\n";
let stderr = stderr.replacen(ignorable, "", 1);
let is_reporting_runtime = stderr.starts_with("runtime: ") && stderr.ends_with("ms\n");
if !(stderr.is_empty() || is_reporting_runtime) {
panic!("`roc` command had unexpected stderr: {}", stderr);
@ -149,7 +163,10 @@ mod cli_run {
let flags = {
let mut vec = flags.to_vec();
vec.push("--max-threads=1");
// max-threads segfaults on windows right now
if !cfg!(windows) {
vec.push("--max-threads=1");
}
vec.into_iter()
};
@ -397,6 +414,11 @@ mod cli_run {
)
}
#[cfg(windows)]
const LINE_ENDING: &str = "\r\n";
#[cfg(not(windows))]
const LINE_ENDING: &str = "\n";
#[test]
// uses C platform
fn platform_switching_main() {
@ -404,7 +426,7 @@ mod cli_run {
"examples/platform-switching",
"main.roc",
"rocLovesPlatforms",
"Which platform am I running on now?\n",
&("Which platform am I running on now?".to_string() + LINE_ENDING),
true,
)
}
@ -991,11 +1013,11 @@ mod cli_run {
This #UserApp.main value is a:
Task.Task {} * [Write [Stdout]*]* ?
Task.Task {} * [Write [Stdout]]
But the type annotation on main says it should be:
InternalProgram.InternalProgram ?
InternalProgram.InternalProgram
Tip: Type comparisons between an opaque type are only ever equal if
both types are the same opaque type. Did you mean to create an opaque
@ -1012,11 +1034,11 @@ mod cli_run {
This #UserApp.main value is a:
Task.Task {} * [Write [Stdout]*]* ?
Task.Task {} * [Write [Stdout]]
But toEffect needs its 1st argument to be:
InternalProgram.InternalProgram ?
InternalProgram.InternalProgram
Tip: Type comparisons between an opaque type are only ever equal if
both types are the same opaque type. Did you mean to create an opaque
@ -1132,7 +1154,7 @@ fn run_with_wasmer(wasm_path: &std::path::Path, stdin: &[&str]) -> String {
// .unwrap();
let store = Store::default();
let module = Module::from_file(&store, &wasm_path).unwrap();
let module = Module::from_file(&store, wasm_path).unwrap();
let mut fake_stdin = wasmer_wasi::Pipe::new();
let fake_stdout = wasmer_wasi::Pipe::new();

View file

@ -71,26 +71,21 @@ pub export fn main() i32 {
const stdout = std.io.getStdOut().writer();
const stderr = std.io.getStdErr().writer();
// start time
var ts1: std.os.timespec = undefined;
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts1) catch unreachable;
var timer = std.time.Timer.start() catch unreachable;
// actually call roc to populate the callresult
var callresult = RocStr.empty();
roc__mainForHost_1_exposed_generic(&callresult);
// end time
var ts2: std.os.timespec = undefined;
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts2) catch unreachable;
const nanos = timer.read();
const seconds = (@intToFloat(f64, nanos) / 1_000_000_000.0);
// stdout the result
stdout.print("{s}\n", .{callresult.asSlice()}) catch unreachable;
callresult.deinit();
const delta = to_seconds(ts2) - to_seconds(ts1);
stderr.print("runtime: {d:.3}ms\n", .{delta * 1000}) catch unreachable;
stderr.print("runtime: {d:.3}ms\n", .{seconds * 1000}) catch unreachable;
return 0;
}

View file

@ -70,26 +70,21 @@ pub export fn main() i32 {
const stdout = std.io.getStdOut().writer();
const stderr = std.io.getStdErr().writer();
// start time
var ts1: std.os.timespec = undefined;
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts1) catch unreachable;
var timer = std.time.Timer.start() catch unreachable;
// actually call roc to populate the callresult
var callresult = RocStr.empty();
roc__mainForHost_1_exposed_generic(&callresult);
// end time
var ts2: std.os.timespec = undefined;
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts2) catch unreachable;
const nanos = timer.read();
const seconds = (@intToFloat(f64, nanos) / 1_000_000_000.0);
// stdout the result
stdout.print("{s}\n", .{callresult.asSlice()}) catch unreachable;
callresult.deinit();
const delta = to_seconds(ts2) - to_seconds(ts1);
stderr.print("runtime: {d:.3}ms\n", .{delta * 1000}) catch unreachable;
stderr.print("runtime: {d:.3}ms\n", .{seconds * 1000}) catch unreachable;
return 0;
}

View file

@ -83,23 +83,18 @@ export fn roc_memset(dst: [*]u8, value: i32, size: usize) callconv(.C) void {
pub export fn main() u8 {
const stdout = std.io.getStdOut().writer();
const stderr = std.io.getStdErr().writer();
// start time
var ts1: std.os.timespec = undefined;
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts1) catch unreachable;
var timer = std.time.Timer.start() catch unreachable;
const result = roc__mainForHost_1_exposed(10);
// end time
var ts2: std.os.timespec = undefined;
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts2) catch unreachable;
const nanos = timer.read();
const seconds = (@intToFloat(f64, nanos) / 1_000_000_000.0);
stdout.print("{d}\n", .{result}) catch unreachable;
const delta = to_seconds(ts2) - to_seconds(ts1);
stderr.print("runtime: {d:.3}ms\n", .{delta * 1000}) catch unreachable;
const stderr = std.io.getStdErr().writer();
stderr.print("runtime: {d:.3}ms\n", .{seconds * 1000}) catch unreachable;
return 0;
}

View file

@ -103,9 +103,7 @@ pub export fn main() u8 {
var roc_list = RocList{ .elements = numbers, .length = NUM_NUMS, .capacity = NUM_NUMS };
// start time
var ts1: std.os.timespec = undefined;
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts1) catch unreachable;
var timer = std.time.Timer.start() catch unreachable;
// actually call roc to populate the callresult
const callresult: RocList = roc__mainForHost_1_exposed(roc_list);
@ -114,9 +112,8 @@ pub export fn main() u8 {
const length = std.math.min(20, callresult.length);
var result = callresult.elements[0..length];
// end time
var ts2: std.os.timespec = undefined;
std.os.clock_gettime(std.os.CLOCK.REALTIME, &ts2) catch unreachable;
const nanos = timer.read();
const seconds = (@intToFloat(f64, nanos) / 1_000_000_000.0);
for (result) |x, i| {
if (i == 0) {
@ -128,9 +125,8 @@ pub export fn main() u8 {
}
}
// TODO apparently the typestamps are still (partially) undefined?
// const delta = to_seconds(ts2) - to_seconds(ts1);
// stderr.print("runtime: {d:.3}ms\n", .{delta * 1000}) catch unreachable;
const stderr = std.io.getStdErr().writer();
stderr.print("runtime: {d:.3}ms\n", .{seconds * 1000}) catch unreachable;
return 0;
}

View file

@ -1,7 +1,7 @@
interface Base64 exposes [fromBytes, fromStr, toBytes, toStr] imports [Base64.Decode, Base64.Encode]
# base 64 encoding from a sequence of bytes
fromBytes : List U8 -> Result Str [InvalidInput]*
fromBytes : List U8 -> Result Str [InvalidInput]
fromBytes = \bytes ->
when Base64.Decode.fromBytes bytes is
Ok v ->
@ -11,16 +11,16 @@ fromBytes = \bytes ->
Err InvalidInput
# base 64 encoding from a string
fromStr : Str -> Result Str [InvalidInput]*
fromStr : Str -> Result Str [InvalidInput]
fromStr = \str ->
fromBytes (Str.toUtf8 str)
# base64-encode bytes to the original
toBytes : Str -> Result (List U8) [InvalidInput]*
toBytes : Str -> Result (List U8) [InvalidInput]
toBytes = \str ->
Ok (Base64.Encode.toBytes str)
toStr : Str -> Result Str [InvalidInput]*
toStr : Str -> Result Str [InvalidInput]
toStr = \str ->
when toBytes str is
Ok bytes ->

View file

@ -36,7 +36,7 @@ nestHelp = \{ s, f, m, x } ->
Expr : [Val I64, Var Str, Add Expr Expr, Mul Expr Expr, Pow Expr Expr, Ln Expr]
divmod : I64, I64 -> Result { div : I64, mod : I64 } [DivByZero]*
divmod : I64, I64 -> Result { div : I64, mod : I64 } [DivByZero]
divmod = \l, r ->
when Pair (Num.divTruncChecked l r) (Num.remChecked l r) is
Pair (Ok div) (Ok mod) -> Ok { div, mod }

View file

@ -1,5 +1,6 @@
const std = @import("std");
const str = @import("str");
const builtin = @import("builtin");
const RocStr = str.RocStr;
const testing = std.testing;
const expectEqual = testing.expectEqual;
@ -15,7 +16,6 @@ comptime {
// -fcompiler-rt in link.rs instead of doing this. Note that this
// workaround is present in many host.zig files, so make sure to undo
// it everywhere!
const builtin = @import("builtin");
if (builtin.os.tag == .macos) {
_ = @import("compiler_rt");
}
@ -210,7 +210,9 @@ fn roc_fx_getInt_help() !i64 {
const stdin = std.io.getStdIn().reader();
var buf: [40]u8 = undefined;
const line: []u8 = (try stdin.readUntilDelimiterOrEof(&buf, '\n')) orelse "";
// make sure to strip `\r` on windows
const raw_line: []u8 = (try stdin.readUntilDelimiterOrEof(&buf, '\n')) orelse "";
const line = std.mem.trimRight(u8, raw_line, &std.ascii.spaces);
return std.fmt.parseInt(i64, line, 10);
}

View file

@ -4,6 +4,7 @@ extern crate roc_load;
extern crate roc_module;
extern crate tempfile;
use roc_utils::cargo;
use roc_utils::root_dir;
use serde::Deserialize;
use serde_xml_rs::from_str;
@ -55,21 +56,20 @@ pub fn build_roc_bin_cached() -> PathBuf {
vec!["build", "--release", "--bin", "roc"]
};
let run_command = "cargo";
let mut cargo_cmd = cargo();
let output = Command::new(run_command)
.current_dir(root_project_dir)
.args(&args)
.output()
.unwrap();
cargo_cmd.current_dir(root_project_dir).args(&args);
if !output.status.success() {
let cargo_cmd_str = format!("{:?}", cargo_cmd);
let cargo_output = cargo_cmd.output().unwrap();
if !cargo_output.status.success() {
panic!(
"{} {} failed:\n\n stdout was:\n\n {}\n\n stderr was:\n\n {}\n",
run_command,
args.join(" "),
String::from_utf8(output.stdout).unwrap(),
String::from_utf8(output.stderr).unwrap()
"The following cargo command failed:\n\n {}\n\n stdout was:\n\n {}\n\n stderr was:\n\n {}\n",
cargo_cmd_str,
String::from_utf8(cargo_output.stdout).unwrap(),
String::from_utf8(cargo_output.stderr).unwrap()
);
}
}
@ -86,7 +86,7 @@ where
}
pub fn path_to_roc_binary() -> PathBuf {
path_to_binary("roc")
path_to_binary(if cfg!(windows) { "roc.exe" } else { "roc" })
}
pub fn path_to_binary(binary_name: &str) -> PathBuf {

View file

@ -1,2 +1,3 @@
//! Provides shared code for cli tests and benchmarks
pub mod bench_utils;
pub mod helpers;

View file

@ -4,21 +4,15 @@ use roc_builtins::bitcode;
use roc_error_macros::internal_error;
use roc_mono::ir::OptLevel;
use roc_utils::get_lib_path;
use roc_utils::{cargo, clang, zig};
use std::collections::HashMap;
use std::env;
use std::io;
use std::path::{Path, PathBuf};
use std::process::{self, Child, Command, Output};
use std::process::{self, Child, Command};
use target_lexicon::{Architecture, OperatingSystem, Triple};
use wasi_libc_sys::{WASI_COMPILER_RT_PATH, WASI_LIBC_PATH};
fn zig_executable() -> String {
match std::env::var("ROC_ZIG") {
Ok(path) => path,
Err(_) => "zig".into(),
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum LinkType {
// These numbers correspond to the --lib and --no-link flags
@ -113,9 +107,9 @@ pub fn build_zig_host_native(
target: &str,
opt_level: OptLevel,
shared_lib_path: Option<&Path>,
) -> Output {
let mut command = Command::new(&zig_executable());
command
) -> Command {
let mut zig_cmd = zig();
zig_cmd
.env_clear()
.env("PATH", env_path)
.env("HOME", env_home);
@ -130,7 +124,7 @@ pub fn build_zig_host_native(
bitcode::get_builtins_host_obj_path()
};
command.args(&[
zig_cmd.args([
"build-exe",
"-fPIE",
"-rdynamic", // make sure roc_alloc and friends are exposed
@ -138,12 +132,12 @@ pub fn build_zig_host_native(
&builtins_obj,
]);
} else {
command.args(&["build-obj", "-fPIC"]);
zig_cmd.args(["build-obj", "-fPIC"]);
}
command.args(&[
zig_cmd.args([
zig_host_src,
emit_bin,
&format!("-femit-bin={}", emit_bin),
"--pkg-begin",
"str",
zig_str_path,
@ -160,7 +154,7 @@ pub fn build_zig_host_native(
// when we use zig 0.9. It looks like zig 0.10 is going to fix
// this problem for us, so this is a temporary workaround
if !target.contains("windows") {
command.args(&[
zig_cmd.args([
// include the zig runtime
"-fcompiler-rt",
]);
@ -168,16 +162,16 @@ pub fn build_zig_host_native(
// valgrind does not yet support avx512 instructions, see #1963.
if env::var("NO_AVX512").is_ok() {
command.args(&["-mcpu", "x86_64"]);
zig_cmd.args(["-mcpu", "x86_64"]);
}
if matches!(opt_level, OptLevel::Optimize) {
command.args(&["-O", "ReleaseSafe"]);
zig_cmd.args(["-O", "ReleaseSafe"]);
} else if matches!(opt_level, OptLevel::Size) {
command.args(&["-O", "ReleaseSmall"]);
zig_cmd.args(["-O", "ReleaseSmall"]);
}
command.output().unwrap()
zig_cmd
}
#[cfg(windows)]
@ -191,27 +185,27 @@ pub fn build_zig_host_native(
target: &str,
opt_level: OptLevel,
shared_lib_path: Option<&Path>,
) -> Output {
let mut command = Command::new(&zig_executable());
command
) -> Command {
let mut zig_cmd = zig();
zig_cmd
.env_clear()
.env("PATH", env_path)
.env("HOME", env_home);
if let Some(shared_lib_path) = shared_lib_path {
command.args(&[
zig_cmd.args(&[
"build-exe",
"-fPIE",
// "-fPIE", PIE seems to fail on windows
shared_lib_path.to_str().unwrap(),
&bitcode::get_builtins_host_obj_path(),
&bitcode::get_builtins_windows_obj_path(),
]);
} else {
command.args(&["build-obj", "-fPIC"]);
zig_cmd.args(&["build-obj", "-fPIC"]);
}
command.args(&[
zig_cmd.args(&[
zig_host_src,
emit_bin,
&format!("-femit-bin={}", emit_bin),
"--pkg-begin",
"str",
zig_str_path,
@ -219,20 +213,20 @@ pub fn build_zig_host_native(
// include the zig runtime
// "-fcompiler-rt", compiler-rt causes segfaults on windows; investigate why
// include libc
"--library",
"c",
"-lc",
"-rdynamic",
// cross-compile?
"-target",
target,
]);
if matches!(opt_level, OptLevel::Optimize) {
command.args(&["-O", "ReleaseSafe"]);
zig_cmd.args(&["-O", "ReleaseSafe"]);
} else if matches!(opt_level, OptLevel::Size) {
command.args(&["-O", "ReleaseSmall"]);
zig_cmd.args(&["-O", "ReleaseSmall"]);
}
command.output().unwrap()
zig_cmd
}
#[cfg(target_os = "macos")]
@ -247,14 +241,11 @@ pub fn build_zig_host_native(
opt_level: OptLevel,
shared_lib_path: Option<&Path>,
// For compatibility with the non-macOS def above. Keep these in sync.
) -> Output {
) -> Command {
use serde_json::Value;
// Run `zig env` to find the location of zig's std/ directory
let zig_env_output = Command::new(&zig_executable())
.args(&["env"])
.output()
.unwrap();
let zig_env_output = zig().args(&["env"]).output().unwrap();
let zig_env_json = if zig_env_output.status.success() {
std::str::from_utf8(&zig_env_output.stdout).unwrap_or_else(|utf8_err| {
@ -291,24 +282,24 @@ pub fn build_zig_host_native(
zig_compiler_rt_path.push("special");
zig_compiler_rt_path.push("compiler_rt.zig");
let mut command = Command::new(&zig_executable());
command
let mut zig_cmd = zig();
zig_cmd
.env_clear()
.env("PATH", &env_path)
.env("HOME", &env_home);
if let Some(shared_lib_path) = shared_lib_path {
command.args(&[
zig_cmd.args(&[
"build-exe",
"-fPIE",
shared_lib_path.to_str().unwrap(),
&bitcode::get_builtins_host_obj_path(),
]);
} else {
command.args(&["build-obj", "-fPIC"]);
zig_cmd.args(&["build-obj", "-fPIC"]);
}
command.args(&[
zig_cmd.args(&[
zig_host_src,
emit_bin,
&format!("-femit-bin={}", emit_bin),
"--pkg-begin",
"str",
zig_str_path,
@ -323,11 +314,12 @@ pub fn build_zig_host_native(
"c",
]);
if matches!(opt_level, OptLevel::Optimize) {
command.args(&["-O", "ReleaseSafe"]);
zig_cmd.args(&["-O", "ReleaseSafe"]);
} else if matches!(opt_level, OptLevel::Size) {
command.args(&["-O", "ReleaseSmall"]);
zig_cmd.args(&["-O", "ReleaseSmall"]);
}
command.output().unwrap()
zig_cmd
}
pub fn build_zig_host_wasm32(
@ -338,7 +330,7 @@ pub fn build_zig_host_wasm32(
zig_str_path: &str,
opt_level: OptLevel,
shared_lib_path: Option<&Path>,
) -> Output {
) -> Command {
if shared_lib_path.is_some() {
unimplemented!("Linking a shared library to wasm not yet implemented");
}
@ -358,7 +350,7 @@ pub fn build_zig_host_wasm32(
// we'd like to compile with `-target wasm32-wasi` but that is blocked on
//
// https://github.com/ziglang/zig/issues/9414
let mut command = Command::new(&zig_executable());
let mut zig_cmd = zig();
let args = &[
"build-obj",
zig_host_src,
@ -379,18 +371,19 @@ pub fn build_zig_host_wasm32(
"--strip",
];
command
zig_cmd
.env_clear()
.env("PATH", env_path)
.env("HOME", env_home)
.args(args);
if matches!(opt_level, OptLevel::Optimize) {
command.args(&["-O", "ReleaseSafe"]);
zig_cmd.args(["-O", "ReleaseSafe"]);
} else if matches!(opt_level, OptLevel::Size) {
command.args(&["-O", "ReleaseSmall"]);
zig_cmd.args(["-O", "ReleaseSmall"]);
}
command.output().unwrap()
zig_cmd
}
#[allow(clippy::too_many_arguments)]
@ -403,15 +396,15 @@ pub fn build_c_host_native(
sources: &[&str],
opt_level: OptLevel,
shared_lib_path: Option<&Path>,
) -> Output {
let mut command = Command::new("clang");
command
) -> Command {
let mut clang_cmd = clang();
clang_cmd
.env_clear()
.env("PATH", &env_path)
.env("CPATH", &env_cpath)
.env("HOME", &env_home)
.env("PATH", env_path)
.env("CPATH", env_cpath)
.env("HOME", env_home)
.args(sources)
.args(&["-o", dest]);
.args(["-o", dest]);
if let Some(shared_lib_path) = shared_lib_path {
match target.operating_system {
OperatingSystem::Windows => {
@ -432,9 +425,14 @@ pub fn build_c_host_native(
);
}
_ => {
command.args(&[
clang_cmd.args([
shared_lib_path.to_str().unwrap(),
&bitcode::get_builtins_host_obj_path(),
// This line is commented out because
// @bhansconnect: With the addition of Str.graphemes, always
// linking the built-ins led to a surgical linker bug for
// optimized builds. Disabling until it is needed for dev
// builds.
// &bitcode::get_builtins_host_obj_path(),
"-fPIE",
"-pie",
"-lm",
@ -446,14 +444,15 @@ pub fn build_c_host_native(
}
}
} else {
command.args(&["-fPIC", "-c"]);
clang_cmd.args(["-fPIC", "-c"]);
}
if matches!(opt_level, OptLevel::Optimize) {
command.arg("-O3");
clang_cmd.arg("-O3");
} else if matches!(opt_level, OptLevel::Size) {
command.arg("-Os");
clang_cmd.arg("-Os");
}
command.output().unwrap()
clang_cmd
}
#[allow(clippy::too_many_arguments)]
@ -466,7 +465,7 @@ pub fn build_swift_host_native(
shared_lib_path: Option<&Path>,
objc_header_path: Option<&str>,
arch: Architecture,
) -> Output {
) -> Command {
if shared_lib_path.is_some() {
unimplemented!("Linking a shared library to Swift not yet implemented");
}
@ -474,8 +473,8 @@ pub fn build_swift_host_native(
let mut command = Command::new("arch");
command
.env_clear()
.env("PATH", &env_path)
.env("HOME", &env_home);
.env("PATH", env_path)
.env("HOME", env_home);
match arch {
Architecture::Aarch64(_) => command.arg("-arm64"),
@ -488,10 +487,10 @@ pub fn build_swift_host_native(
.args(sources)
.arg("-emit-object")
.arg("-parse-as-library")
.args(&["-o", dest]);
.args(["-o", dest]);
if let Some(objc_header) = objc_header_path {
command.args(&["-import-objc-header", objc_header]);
command.args(["-import-objc-header", objc_header]);
}
if matches!(opt_level, OptLevel::Optimize) {
@ -500,7 +499,7 @@ pub fn build_swift_host_native(
command.arg("-Osize");
}
command.output().unwrap()
command
}
pub fn rebuild_host(
@ -518,36 +517,33 @@ pub fn rebuild_host(
let swift_host_src = host_input_path.with_file_name("host.swift");
let swift_host_header_src = host_input_path.with_file_name("host.h");
let os = roc_target::OperatingSystem::from(target.operating_system);
let executable_extension = match os {
roc_target::OperatingSystem::Windows => "exe",
roc_target::OperatingSystem::Unix => "",
roc_target::OperatingSystem::Wasi => "",
};
let object_extension = match os {
roc_target::OperatingSystem::Windows => "obj",
roc_target::OperatingSystem::Unix => "o",
roc_target::OperatingSystem::Wasi => "o",
};
let host_dest = if matches!(target.architecture, Architecture::Wasm32) {
if matches!(opt_level, OptLevel::Development) {
host_input_path.with_file_name("host.o")
} else {
host_input_path.with_file_name("host.bc")
}
} else if shared_lib_path.is_some() {
host_input_path
.with_file_name("dynhost")
.with_extension(executable_extension)
} else {
let os = roc_target::OperatingSystem::from(target.operating_system);
if shared_lib_path.is_some() {
let extension = match os {
roc_target::OperatingSystem::Windows => "exe",
roc_target::OperatingSystem::Unix => "",
roc_target::OperatingSystem::Wasi => "",
};
host_input_path
.with_file_name("dynhost")
.with_extension(extension)
} else {
let extension = match os {
roc_target::OperatingSystem::Windows => "obj",
roc_target::OperatingSystem::Unix => "o",
roc_target::OperatingSystem::Wasi => "o",
};
host_input_path
.with_file_name("host")
.with_extension(extension)
}
host_input_path
.with_file_name("host")
.with_extension(object_extension)
};
let env_path = env::var("PATH").unwrap_or_else(|_| "".to_string());
@ -565,7 +561,7 @@ pub fn rebuild_host(
&zig_str_path
);
let output = match target.architecture {
let zig_cmd = match target.architecture {
Architecture::Wasm32 => {
let emit_bin = if matches!(opt_level, OptLevel::Development) {
format!("-femit-bin={}", host_dest.to_str().unwrap())
@ -583,8 +579,6 @@ pub fn rebuild_host(
)
}
Architecture::X86_64 => {
let emit_bin = format!("-femit-bin={}", host_dest.to_str().unwrap());
let target = match target.operating_system {
OperatingSystem::Windows => "x86_64-windows-gnu",
_ => "native",
@ -593,7 +587,7 @@ pub fn rebuild_host(
build_zig_host_native(
&env_path,
&env_home,
&emit_bin,
host_dest.to_str().unwrap(),
zig_host_src.to_str().unwrap(),
zig_str_path.to_str().unwrap(),
target,
@ -601,37 +595,31 @@ pub fn rebuild_host(
shared_lib_path,
)
}
Architecture::X86_32(_) => {
let emit_bin = format!("-femit-bin={}", host_dest.to_str().unwrap());
build_zig_host_native(
&env_path,
&env_home,
&emit_bin,
zig_host_src.to_str().unwrap(),
zig_str_path.to_str().unwrap(),
"i386-linux-musl",
opt_level,
shared_lib_path,
)
}
Architecture::X86_32(_) => build_zig_host_native(
&env_path,
&env_home,
host_dest.to_str().unwrap(),
zig_host_src.to_str().unwrap(),
zig_str_path.to_str().unwrap(),
"i386-linux-musl",
opt_level,
shared_lib_path,
),
Architecture::Aarch64(_) => {
let emit_bin = format!("-femit-bin={}", host_dest.to_str().unwrap());
build_zig_host_native(
&env_path,
&env_home,
&emit_bin,
zig_host_src.to_str().unwrap(),
zig_str_path.to_str().unwrap(),
target_zig_str(target),
opt_level,
shared_lib_path,
)
}
Architecture::Aarch64(_) => build_zig_host_native(
&env_path,
&env_home,
host_dest.to_str().unwrap(),
zig_host_src.to_str().unwrap(),
zig_str_path.to_str().unwrap(),
target_zig_str(target),
opt_level,
shared_lib_path,
),
_ => internal_error!("Unsupported architecture {:?}", target.architecture),
};
validate_output("host.zig", &zig_executable(), output)
run_build_command(zig_cmd, "host.zig")
} else if cargo_host_src.exists() {
// Compile and link Cargo.toml, if it exists
let cargo_dir = host_input_path.parent().unwrap();
@ -644,31 +632,33 @@ pub fn rebuild_host(
},
);
let mut command = Command::new("cargo");
command.arg("build").current_dir(cargo_dir);
let mut cargo_cmd = cargo();
cargo_cmd.arg("build").current_dir(cargo_dir);
// Rust doesn't expose size without editing the cargo.toml. Instead just use release.
if matches!(opt_level, OptLevel::Optimize | OptLevel::Size) {
command.arg("--release");
cargo_cmd.arg("--release");
}
let source_file = if shared_lib_path.is_some() {
command.env("RUSTFLAGS", "-C link-dead-code");
command.args(&["--bin", "host"]);
cargo_cmd.env("RUSTFLAGS", "-C link-dead-code");
cargo_cmd.args(["--bin", "host"]);
"src/main.rs"
} else {
command.arg("--lib");
cargo_cmd.arg("--lib");
"src/lib.rs"
};
let output = command.output().unwrap();
validate_output(source_file, "cargo build", output);
run_build_command(cargo_cmd, source_file);
if shared_lib_path.is_some() {
// For surgical linking, just copy the dynamically linked rust app.
std::fs::copy(cargo_out_dir.join("host"), &host_dest).unwrap();
let mut exe_path = cargo_out_dir.join("host");
exe_path.set_extension(executable_extension);
std::fs::copy(&exe_path, &host_dest).unwrap();
} else {
// Cargo hosts depend on a c wrapper for the api. Compile host.c as well.
let output = build_c_host_native(
let clang_cmd = build_c_host_native(
target,
&env_path,
&env_home,
@ -678,23 +668,22 @@ pub fn rebuild_host(
opt_level,
shared_lib_path,
);
validate_output("host.c", "clang", output);
let output = Command::new("ld")
.env_clear()
.env("PATH", &env_path)
.args(&[
"-r",
"-L",
cargo_out_dir.to_str().unwrap(),
c_host_dest.to_str().unwrap(),
"-lhost",
"-o",
host_dest.to_str().unwrap(),
])
.output()
.unwrap();
validate_output("c_host.o", "ld", output);
run_build_command(clang_cmd, "host.c");
let mut ld_cmd = Command::new("ld");
ld_cmd.env_clear().env("PATH", &env_path).args([
"-r",
"-L",
cargo_out_dir.to_str().unwrap(),
c_host_dest.to_str().unwrap(),
"-lhost",
"-o",
host_dest.to_str().unwrap(),
]);
run_build_command(ld_cmd, "c_host.o");
// Clean up c_host.o
if c_host_dest.exists() {
@ -703,25 +692,24 @@ pub fn rebuild_host(
}
} else if rust_host_src.exists() {
// Compile and link host.rs, if it exists
let mut command = Command::new("rustc");
command.args(&[
let mut rustc_cmd = Command::new("rustc");
rustc_cmd.args([
rust_host_src.to_str().unwrap(),
"-o",
rust_host_dest.to_str().unwrap(),
]);
if matches!(opt_level, OptLevel::Optimize) {
command.arg("-O");
rustc_cmd.arg("-O");
} else if matches!(opt_level, OptLevel::Size) {
command.arg("-C opt-level=s");
rustc_cmd.arg("-C opt-level=s");
}
let output = command.output().unwrap();
validate_output("host.rs", "rustc", output);
run_build_command(rustc_cmd, "host.rs");
// Rust hosts depend on a c wrapper for the api. Compile host.c as well.
if shared_lib_path.is_some() {
// If compiling to executable, let c deal with linking as well.
let output = build_c_host_native(
let clang_cmd = build_c_host_native(
target,
&env_path,
&env_home,
@ -734,9 +722,9 @@ pub fn rebuild_host(
opt_level,
shared_lib_path,
);
validate_output("host.c", "clang", output);
run_build_command(clang_cmd, "host.c");
} else {
let output = build_c_host_native(
let clang_cmd = build_c_host_native(
target,
&env_path,
&env_home,
@ -747,21 +735,19 @@ pub fn rebuild_host(
shared_lib_path,
);
validate_output("host.c", "clang", output);
let output = Command::new("ld")
.env_clear()
.env("PATH", &env_path)
.args(&[
"-r",
c_host_dest.to_str().unwrap(),
rust_host_dest.to_str().unwrap(),
"-o",
host_dest.to_str().unwrap(),
])
.output()
.unwrap();
run_build_command(clang_cmd, "host.c");
validate_output("rust_host.o", "ld", output);
let mut ld_cmd = Command::new("ld");
ld_cmd.env_clear().env("PATH", &env_path).args([
"-r",
c_host_dest.to_str().unwrap(),
rust_host_dest.to_str().unwrap(),
"-o",
host_dest.to_str().unwrap(),
]);
run_build_command(ld_cmd, "rust_host.o");
}
// Clean up rust_host.o and c_host.o
@ -773,7 +759,7 @@ pub fn rebuild_host(
}
} else if c_host_src.exists() {
// Compile host.c, if it exists
let output = build_c_host_native(
let clang_cmd = build_c_host_native(
target,
&env_path,
&env_home,
@ -783,10 +769,11 @@ pub fn rebuild_host(
opt_level,
shared_lib_path,
);
validate_output("host.c", "clang", output);
run_build_command(clang_cmd, "host.c");
} else if swift_host_src.exists() {
// Compile host.swift, if it exists
let output = build_swift_host_native(
let swiftc_cmd = build_swift_host_native(
&env_path,
&env_home,
host_dest.to_str().unwrap(),
@ -798,7 +785,8 @@ pub fn rebuild_host(
.then(|| swift_host_header_src.to_str().unwrap()),
target.architecture,
);
validate_output("host.swift", "swiftc", output);
run_build_command(swiftc_cmd, "host.swift");
}
host_dest
@ -867,10 +855,10 @@ fn link_linux(
if let Architecture::X86_32(_) = target.architecture {
return Ok((
Command::new(&zig_executable())
.args(&["build-exe"])
zig()
.args(["build-exe"])
.args(input_paths)
.args(&[
.args([
"-target",
"i386-linux-musl",
"-lc",
@ -1023,7 +1011,7 @@ fn link_linux(
.filter(|&(ref k, _)| k.starts_with("NIX_"))
.collect::<HashMap<String, String>>(),
)
.args(&[
.args([
"--gc-sections",
"--eh-frame-hdr",
"-A",
@ -1033,11 +1021,11 @@ fn link_linux(
&*crtn_path.to_string_lossy(),
])
.args(&base_args)
.args(&["-dynamic-linker", ld_linux])
.args(["-dynamic-linker", ld_linux])
.args(input_paths)
// ld.lld requires this argument, and does not accept --arch
// .args(&["-L/usr/lib/x86_64-linux-gnu"])
.args(&[
.args([
// Libraries - see https://github.com/roc-lang/roc/pull/554#discussion_r496365925
// for discussion and further references
"-lc",
@ -1088,7 +1076,7 @@ fn link_macos(
// The `-l` flags should go after the `.o` arguments
// Don't allow LD_ env vars to affect this
.env_clear()
.args(&[
.args([
// NOTE: we don't do --gc-sections on macOS because the default
// macOS linker doesn't support it, but it's a performance
// optimization, so if we ever switch to a different linker,
@ -1120,7 +1108,7 @@ fn link_macos(
ld_command.arg(roc_link_flag);
}
ld_command.args(&[
ld_command.args([
// Libraries - see https://github.com/roc-lang/roc/pull/554#discussion_r496392274
// for discussion and further references
"-lSystem",
@ -1160,7 +1148,7 @@ fn link_macos(
Architecture::Aarch64(_) => {
ld_child.wait()?;
let codesign_child = Command::new("codesign")
.args(&["-s", "-", output_path.to_str().unwrap()])
.args(["-s", "-", output_path.to_str().unwrap()])
.spawn()?;
Ok((codesign_child, output_path))
@ -1196,10 +1184,10 @@ fn link_wasm32(
let zig_str_path = find_zig_str_path();
let wasi_libc_path = find_wasi_libc_path();
let child = Command::new(&zig_executable())
let child = zig()
// .env_clear()
// .env("PATH", &env_path)
.args(&["build-exe"])
.args(["build-exe"])
.args(input_paths)
.args([
// include wasi libc
@ -1233,8 +1221,8 @@ fn link_windows(
match link_type {
LinkType::Dylib => {
let child = Command::new(&zig_executable())
.args(&["build-lib"])
let child = zig()
.args(["build-lib"])
.args(input_paths)
.args([
"-lc",
@ -1255,8 +1243,8 @@ fn link_windows(
Ok((child, output_path))
}
LinkType::Executable => {
let child = Command::new(&zig_executable())
.args(&["build-exe"])
let child = zig()
.args(["build-exe"])
.args(input_paths)
.args([
"-target",
@ -1343,7 +1331,7 @@ pub fn preprocess_host_wasm32(host_input_path: &Path, preprocessed_host_path: &P
(but seems to be an unofficial API)
*/
let mut command = Command::new(&zig_executable());
let mut zig_cmd = zig();
let args = &[
"wasm-ld",
&bitcode::get_builtins_wasm32_obj_path(),
@ -1358,28 +1346,30 @@ pub fn preprocess_host_wasm32(host_input_path: &Path, preprocessed_host_path: &P
"--relocatable",
];
command.args(args);
zig_cmd.args(args);
// println!("\npreprocess_host_wasm32");
// println!("zig {}\n", args.join(" "));
let output = command.output().unwrap();
validate_output(output_file, "zig", output)
run_build_command(zig_cmd, output_file)
}
fn validate_output(file_name: &str, cmd_name: &str, output: Output) {
if !output.status.success() {
match std::str::from_utf8(&output.stderr) {
fn run_build_command(mut command: Command, file_to_build: &str) {
let cmd_str = format!("{:?}", &command);
let cmd_output = command.output().unwrap();
if !cmd_output.status.success() {
match std::str::from_utf8(&cmd_output.stderr) {
Ok(stderr) => internal_error!(
"Failed to rebuild {} - stderr of the `{}` command was:\n{}",
file_name,
cmd_name,
"Error:\n Failed to rebuild {}:\n The executed command was:\n {}\n stderr of that command:\n {}",
file_to_build,
cmd_str,
stderr
),
Err(utf8_err) => internal_error!(
"Failed to rebuild {} - stderr of the `{}` command was invalid utf8 ({:?})",
file_name,
cmd_name,
"Error:\n Failed to rebuild {}:\n The executed command was:\n {}\n stderr of that command could not be parsed as valid utf8:\n {}",
file_to_build,
cmd_str,
utf8_err
),
}

View file

@ -347,7 +347,7 @@ fn gen_from_mono_module_llvm(
// run the debugir https://github.com/vaivaswatha/debugir tool
match Command::new("debugir")
.args(&["-instnamer", app_ll_file.to_str().unwrap()])
.args(["-instnamer", app_ll_file.to_str().unwrap()])
.output()
{
Ok(_) => {}
@ -369,7 +369,7 @@ fn gen_from_mono_module_llvm(
| Architecture::Aarch64(_)
| Architecture::Wasm32 => {
let ll_to_bc = Command::new("llvm-as")
.args(&[
.args([
app_ll_dbg_file.to_str().unwrap(),
"-o",
app_bc_file.to_str().unwrap(),

View file

@ -16,6 +16,7 @@ lazy_static = "1.4.0"
[build-dependencies]
# dunce can be removed once ziglang/zig#5109 is fixed
dunce = "1.0.3"
roc_utils = { path = "../../utils" }
[target.'cfg(target_os = "macos")'.build-dependencies]
tempfile = "3.2.0"

View file

@ -1,5 +1,6 @@
#!/usr/bin/env bash
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
set -euxo pipefail
zig build-exe benchmark/dec.zig -O ReleaseFast --main-pkg-path .

View file

@ -745,37 +745,54 @@ pub fn listConcat(list_a: RocList, list_b: RocList, alignment: u32, element_widt
} else if (list_a.isUnique()) {
const total_length: usize = list_a.len() + list_b.len();
if (list_a.bytes) |source| {
const new_source = if (list_a.capacity >= total_length)
source
else
utils.unsafeReallocate(
source,
alignment,
list_a.len(),
total_length,
element_width,
);
const resized_list_a = list_a.reallocate(alignment, total_length, element_width);
if (list_b.bytes) |source_b| {
@memcpy(new_source + list_a.len() * element_width, source_b, list_b.len() * element_width);
}
// These must exist, otherwise, the lists would have been empty.
const source_a = resized_list_a.bytes orelse unreachable;
const source_b = list_b.bytes orelse unreachable;
@memcpy(source_a + list_a.len() * element_width, source_b, list_b.len() * element_width);
return RocList{ .bytes = new_source, .length = total_length, .capacity = total_length };
}
// decrement list b.
utils.decref(source_b, list_b.len(), alignment);
return resized_list_a;
} else if (list_b.isUnique()) {
const total_length: usize = list_a.len() + list_b.len();
const resized_list_b = list_b.reallocate(alignment, total_length, element_width);
// These must exist, otherwise, the lists would have been empty.
const source_a = list_a.bytes orelse unreachable;
const source_b = resized_list_b.bytes orelse unreachable;
// This is a bit special, we need to first copy the elements of list_b to the end,
// then copy the elements of list_a to the beginning.
// This first call must use mem.copy because the slices might overlap.
const byte_count_a = list_a.len() * element_width;
const byte_count_b = list_b.len() * element_width;
mem.copy(u8, source_b[byte_count_a .. byte_count_a + byte_count_b], source_b[0..byte_count_b]);
@memcpy(source_b, source_a, byte_count_a);
// decrement list a.
utils.decref(source_a, list_a.len(), alignment);
return resized_list_b;
}
const total_length: usize = list_a.len() + list_b.len();
const output = RocList.allocate(alignment, total_length, element_width);
if (output.bytes) |target| {
if (list_a.bytes) |source| {
@memcpy(target, source, list_a.len() * element_width);
}
if (list_b.bytes) |source| {
@memcpy(target + list_a.len() * element_width, source, list_b.len() * element_width);
}
}
// These must exist, otherwise, the lists would have been empty.
const target = output.bytes orelse unreachable;
const source_a = list_a.bytes orelse unreachable;
const source_b = list_b.bytes orelse unreachable;
@memcpy(target, source_a, list_a.len() * element_width);
@memcpy(target + list_a.len() * element_width, source_b, list_b.len() * element_width);
// decrement list a and b.
utils.decref(source_a, list_a.len(), alignment);
utils.decref(source_b, list_b.len(), alignment);
return output;
}

View file

@ -145,6 +145,7 @@ comptime {
exportStrFn(str.strTrimRight, "trim_right");
exportStrFn(str.strCloneTo, "clone_to");
exportStrFn(str.withCapacity, "with_capacity");
exportStrFn(str.strGraphemes, "graphemes");
inline for (INTEGERS) |T| {
str.exportFromInt(T, ROC_BUILTINS ++ "." ++ STR ++ ".from_int.");

View file

@ -1,5 +1,6 @@
const utils = @import("utils.zig");
const RocList = @import("list.zig").RocList;
const grapheme = @import("helpers/grapheme.zig");
const UpdateMode = utils.UpdateMode;
const std = @import("std");
const mem = std.mem;
@ -1212,7 +1213,6 @@ test "countSegments: string equals delimiter" {
}
// Str.countGraphemeClusters
const grapheme = @import("helpers/grapheme.zig");
pub fn countGraphemeClusters(string: RocStr) callconv(.C) usize {
if (string.isEmpty()) {
return 0;
@ -1248,59 +1248,76 @@ pub fn countGraphemeClusters(string: RocStr) callconv(.C) usize {
return count;
}
test "countGraphemeClusters: empty string" {
const count = countGraphemeClusters(RocStr.empty());
try expectEqual(count, 0);
// Str.graphemes
pub fn strGraphemes(roc_str: RocStr) callconv(.C) RocList {
var break_state: ?grapheme.BoundClass = null;
var opt_last_codepoint: ?u21 = null;
var index: usize = 0;
var last_codepoint_len: u8 = 0;
var result = RocList.allocate(@alignOf(RocStr), countGraphemeClusters(roc_str), @sizeOf(RocStr));
const graphemes = result.elements(RocStr) orelse return result;
var slice = roc_str.asSlice();
var iter = (unicode.Utf8View.init(slice) catch unreachable).iterator();
while (iter.nextCodepoint()) |cur_codepoint| {
const cur_codepoint_len = unicode.utf8CodepointSequenceLength(cur_codepoint) catch unreachable;
if (opt_last_codepoint) |last_codepoint| {
var did_break = grapheme.isGraphemeBreak(last_codepoint, cur_codepoint, &break_state);
if (did_break) {
graphemes[index] = RocStr.fromSlice(slice[0..last_codepoint_len]);
slice = slice[last_codepoint_len..];
index += 1;
break_state = null;
last_codepoint_len = 0;
}
}
last_codepoint_len += cur_codepoint_len;
opt_last_codepoint = cur_codepoint;
}
// Append last grapheme
graphemes[index] = RocStr.fromSlice(slice);
return result;
}
test "countGraphemeClusters: ascii characters" {
const bytes_arr = "abcd";
const bytes_len = bytes_arr.len;
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
// these test both countGraphemeClusters() and strGraphemes()
fn graphemesTest(input: []const u8, expected: []const []const u8) !void {
const rocstr = RocStr.fromSlice(input);
defer rocstr.deinit();
const count = countGraphemeClusters(rocstr);
try expectEqual(expected.len, count);
const count = countGraphemeClusters(str);
try expectEqual(count, 4);
const graphemes = strGraphemes(rocstr);
defer graphemes.deinit(u8);
if (input.len == 0) return; // empty string
const elems = graphemes.elements(RocStr) orelse unreachable;
for (expected) |g, i| {
try std.testing.expectEqualStrings(g, elems[i].asSlice());
}
}
test "countGraphemeClusters: utf8 characters" {
const bytes_arr = "ãxā";
const bytes_len = bytes_arr.len;
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
const count = countGraphemeClusters(str);
try expectEqual(count, 3);
test "graphemes: empty string" {
try graphemesTest("", &.{});
}
test "countGraphemeClusters: emojis" {
const bytes_arr = "🤔🤔🤔";
const bytes_len = bytes_arr.len;
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
const count = countGraphemeClusters(str);
try expectEqual(count, 3);
test "graphemes: ascii characters" {
try graphemesTest("abcd", &.{ "a", "b", "c", "d" });
}
test "countGraphemeClusters: emojis and ut8 characters" {
const bytes_arr = "🤔å🤔¥🤔ç";
const bytes_len = bytes_arr.len;
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
const count = countGraphemeClusters(str);
try expectEqual(count, 6);
test "graphemes: utf8 characters" {
try graphemesTest("ãxā", &.{ "ã", "x", "ā" });
}
test "countGraphemeClusters: emojis, ut8, and ascii characters" {
const bytes_arr = "6🤔å🤔e¥🤔çpp";
const bytes_len = bytes_arr.len;
const str = RocStr.init(bytes_arr, bytes_len);
defer str.deinit();
test "graphemes: emojis" {
try graphemesTest("🤔🤔🤔", &.{ "🤔", "🤔", "🤔" });
}
const count = countGraphemeClusters(str);
try expectEqual(count, 10);
test "graphemes: emojis and ut8 characters" {
try graphemesTest("🤔å🤔¥🤔ç", &.{ "🤔", "å", "🤔", "¥", "🤔", "ç" });
}
test "graphemes: emojis, ut8, and ascii characters" {
try graphemesTest("6🤔å🤔e¥🤔çpp", &.{ "6", "🤔", "å", "🤔", "e", "¥", "🤔", "ç", "p", "p" });
}
pub fn countUtf8Bytes(string: RocStr) callconv(.C) usize {

View file

@ -1,6 +1,5 @@
use std::convert::AsRef;
use roc_utils::zig;
use std::env;
use std::ffi::OsStr;
use std::fs;
use std::io;
use std::path::Path;
@ -14,13 +13,6 @@ use tempfile::tempdir;
/// To debug the zig code with debug prints, we need to disable the wasm code gen
const DEBUG: bool = false;
fn zig_executable() -> String {
match std::env::var("ROC_ZIG") {
Ok(path) => path,
Err(_) => "zig".into(),
}
}
fn main() {
println!("cargo:rerun-if-changed=build.rs");
@ -95,12 +87,13 @@ fn generate_object_file(bitcode_path: &Path, zig_object: &str, object_file_name:
println!("Compiling zig object `{}` to: {}", zig_object, src_obj);
if !DEBUG {
run_command(
&bitcode_path,
&zig_executable(),
&["build", zig_object, "-Drelease=true"],
0,
);
let mut zig_cmd = zig();
zig_cmd
.current_dir(bitcode_path)
.args(["build", zig_object, "-Drelease=true"]);
run_command(zig_cmd, 0);
println!("Moving zig object `{}` to: {}", zig_object, dest_obj);
@ -130,12 +123,13 @@ fn generate_bc_file(bitcode_path: &Path, zig_object: &str, file_name: &str) {
#[cfg(target_os = "macos")]
let _ = fs::remove_dir_all("./bitcode/zig-cache");
run_command(
&bitcode_path,
&zig_executable(),
&["build", zig_object, "-Drelease=true"],
0,
);
let mut zig_cmd = zig();
zig_cmd
.current_dir(bitcode_path)
.args(["build", zig_object, "-Drelease=true"]);
run_command(zig_cmd, 0);
}
pub fn get_lib_dir() -> PathBuf {
@ -174,7 +168,7 @@ fn copy_zig_builtins_to_target_dir(bitcode_path: &Path) {
// recursively copy all the .zig files from this directory, but do *not* recurse into zig-cache/
fn cp_unless_zig_cache(src_dir: &Path, target_dir: &Path) -> io::Result<()> {
// Make sure the destination directory exists before we try to copy anything into it.
std::fs::create_dir_all(&target_dir).unwrap_or_else(|err| {
std::fs::create_dir_all(target_dir).unwrap_or_else(|err| {
panic!(
"Failed to create output library directory for zig bitcode {:?}: {:?}",
target_dir, err
@ -204,19 +198,10 @@ fn cp_unless_zig_cache(src_dir: &Path, target_dir: &Path) -> io::Result<()> {
Ok(())
}
fn run_command<S, I: Copy, P: AsRef<Path> + Copy>(
path: P,
command_str: &str,
args: I,
flaky_fail_counter: usize,
) where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let output_result = Command::new(OsStr::new(&command_str))
.current_dir(path)
.args(args)
.output();
fn run_command(mut command: Command, flaky_fail_counter: usize) {
let command_str = format!("{:?}", &command);
let output_result = command.output();
match output_result {
Ok(output) => match output.status.success() {
@ -227,14 +212,14 @@ fn run_command<S, I: Copy, P: AsRef<Path> + Copy>(
Err(_) => format!("Failed to run \"{}\"", command_str),
};
// flaky test error that only occurs sometimes inside MacOS ci run
// Flaky test errors that only occur sometimes on MacOS ci server.
if error_str.contains("FileNotFound")
|| error_str.contains("unable to save cached ZIR code")
{
if flaky_fail_counter == 10 {
panic!("{} failed 10 times in a row. The following error is unlikely to be a flaky error: {}", command_str, error_str);
} else {
run_command(path, command_str, args, flaky_fail_counter + 1)
run_command(command, flaky_fail_counter + 1)
}
} else {
panic!("{} failed: {}", command_str, error_str);

View file

@ -2,28 +2,34 @@ interface Bool
exposes [Bool, Eq, true, false, and, or, not, isEq, isNotEq]
imports []
## A type that can be compared for total equality.
## Defines a type that can be compared for total equality.
##
## Total equality means that all values of the type can be compared to each
## other, and two values `a`, `b` are identical if and only if `isEq a b` is
## `Bool.true`.
##
## Not all types support total equality. For example, an [F32] or [F64] can
## be a `NaN` ([not a number](https://en.wikipedia.org/wiki/NaN)), and the [IEEE-754](https://en.wikipedia.org/wiki/IEEE_754)
## floating point standard specifies that two `NaN`s are never equal to each other.
## Not all types support total equality. For example, [F32] and [F64] can
## be a `NaN` ([Not a Number](https://en.wikipedia.org/wiki/NaN)), and the
## [IEEE-754](https://en.wikipedia.org/wiki/IEEE_754) floating point standard
## specifies that two `NaN`s are not equal.
Eq has
## Returns `Bool.true` if the two values are equal, and `Bool.false` otherwise.
## Returns `Bool.true` if the input values are equal. This is
## equivalent to the logic
## [XNOR](https://en.wikipedia.org/wiki/Logical_equality) gate. The infix
## operator `==` can be used as shorthand for `Bool.isEq`.
##
## `a == b` is shorthand for `Bool.isEq a b`.
## **Note** that when `isEq` is determined by the Roc compiler, values are
## compared using structural equality. The rules for this are as follows:
##
## When `isEq` is derived by the Roc compiler, values are compared via
## structural equality. Structural equality works as follows:
##
## 1. Tags are equal if they have the same tag name, and also their contents (if any) are equal.
## 2. Records are equal if all their fields are equal.
## 3. Collections ([Str], [List], [Dict], and [Set]) are equal if they are the same length, and also all their corresponding elements are equal.
## 4. [Num](Num#Num) values are equal if their numbers are equal, with one exception: if both arguments to `isEq` are *NaN*, then `isEq` returns `Bool.false`. See `Num.isNaN` for more about *NaN*.
## 5. Functions can never be compared for structural equality. Roc cannot derive `isEq` for types that contain functions!
## 1. Tags are equal if their name and also contents are equal.
## 2. Records are equal if their fields are equal.
## 3. The collections [Str], [List], [Dict], and [Set] are equal iff they
## are the same length and their elements are equal.
## 4. [Num] values are equal if their numbers are equal. However, if both
## inputs are *NaN* then `isEq` returns `Bool.false`. Refer to `Num.isNaN`
## for more detail.
## 5. Functions cannot be compared for structural equality, therefore Roc
## cannot derive `isEq` for types that contain functions.
isEq : a, a -> Bool | a has Eq
Bool := [True, False] has [Eq { isEq: boolIsEq }]
@ -38,67 +44,67 @@ true = @Bool True
false : Bool
false = @Bool False
## Returns `Bool.true` when given `Bool.true` and `Bool.true`, and `Bool.false` when either argument is `Bool.false`.
## Returns `Bool.true` when both inputs are `Bool.true`. This is equivalent to
## the logic [AND](https://en.wikipedia.org/wiki/Logical_conjunction)
## gate. The infix operator `&&` can also be used as shorthand for
## `Bool.and`.
##
## `a && b` is shorthand for `Bool.and a b`
## expect (Bool.and Bool.true Bool.true) == Bool.true
## expect (Bool.true && Bool.true) == Bool.true
## expect (Bool.false && Bool.true) == Bool.false
## expect (Bool.true && Bool.false) == Bool.false
## expect (Bool.false && Bool.false) == Bool.false
##
## >>> Bool.true && Bool.true
##
## >>> Bool.true && Bool.false
##
## >>> Bool.false && Bool.true
##
## >>> Bool.false && Bool.false
##
## ## Performance Notes
##
## In some languages, `&&` and `||` are special-cased in the compiler to skip
## evaluating the expression after the operator under certain circumstances.
## For example, in some languages, `enablePets && likesDogs user` would compile
## to the equivalent of:
## **Performance Note** that in Roc the `&&` and `||` work the same way as any
## other function. However, in some languages `&&` and `||` are special-cased.
## In these languages the compiler will skip evaluating the expression after the
## first operator under certain circumstances. For example an expression like
## `enablePets && likesDogs user` would compile to.
##
## if enablePets then
## likesDogs user
## else
## Bool.false
##
## In Roc, however, `&&` and `||` are not special. They work the same way as
## other functions. Conditionals like `if` and `when` have a performance cost,
## and sometimes calling a function like `likesDogs user` can be faster across
## the board than doing an `if` to decide whether to skip calling it.
##
## (Naturally, if you expect the `if` to improve performance, you can always add
## one explicitly!)
## Roc does not do this because conditionals like `if` and `when` have a
## performance cost. Calling a function can sometimes be faster across the board
## than doing an `if` to decide whether to skip calling it.
and : Bool, Bool -> Bool
## Returns `Bool.true` when given `Bool.true` for either argument, and `Bool.false` only when given `Bool.false` and `Bool.false`.
## Returns `Bool.true` when either input is a `Bool.true`. This is equivalent to
## the logic [OR](https://en.wikipedia.org/wiki/Logical_disjunction) gate.
## The infix operator `||` can also be used as shorthand for `Bool.or`.
##
## `a || b` is shorthand for `Bool.or a b`.
## expect (Bool.or Bool.false Bool.true) == Bool.true
## expect (Bool.true || Bool.true) == Bool.true
## expect (Bool.false || Bool.true) == Bool.true
## expect (Bool.true || Bool.false) == Bool.true
## expect (Bool.false || Bool.false) == Bool.false
##
## >>> Bool.true || Bool.true
##
## >>> Bool.true || Bool.false
##
## >>> Bool.false || Bool.true
##
## >>> Bool.false || Bool.false
##
## ## Performance Notes
##
## In some languages, `&&` and `||` are special-cased in the compiler to skip
## evaluating the expression after the operator under certain circumstances.
## In Roc, this is not the case. See the performance notes for [Bool.and] for details.
## **Performance Note** that in Roc the `&&` and `||` work the same way as any
## other functions. However, in some languages `&&` and `||` are special-cased.
## Refer to the note in `Bool.and` for more detail.
or : Bool, Bool -> Bool
# xor : Bool, Bool -> Bool # currently unimplemented
## Returns `Bool.false` when given `Bool.true`, and vice versa.
## Returns `Bool.false` when given `Bool.true`, and vice versa. This is
## equivalent to the logic [NOT](https://en.wikipedia.org/wiki/Negation)
## gate. The operator `!` can also be used as shorthand for `Bool.not`.
##
## expect (Bool.not Bool.false) == Bool.true
## expect (!Bool.false) == Bool.true
not : Bool -> Bool
## Calls [isEq] on the given values, then calls [not] on the result.
## This will call the function `Bool.isEq` on the inputs, and then `Bool.not`
## on the result. The is equivalent to the logic
## [XOR](https://en.wikipedia.org/wiki/Exclusive_or) gate. The infix operator
## `!=` can also be used as shorthand for `Bool.isNotEq`.
##
## `a != b` is shorthand for `Bool.isNotEq a b`
## **Note** that `isNotEq` does not accept arguments whose types contain
## functions.
##
## Note that `isNotEq` takes `'val` instead of `val`, which means `isNotEq` does not
## accept arguments whose types contain functions.
## expect (Bool.isNotEq Bool.false Bool.true) == Bool.true
## expect (Bool.false != Bool.false) == Bool.false
## expect "Apples" != "Oranges"
isNotEq : a, a -> Bool | a has Eq
isNotEq = \a, b -> structuralNotEq a b

View file

@ -2,7 +2,17 @@ interface Box
exposes [box, unbox]
imports []
## Allocate a value on the heap. Boxing is an expensive process as it copies
## the value from the stack to the heap. This may provide a performance
## optimization for advanced use cases with large values. A platform may require
## that some values are boxed.
##
## expect Box.unbox (Box.box "Stack Faster") == "Stack Faster"
box : a -> Box a
## Returns a boxed value.
##
## expect Box.unbox (Box.box "Stack Faster") == "Stack Faster"
unbox : Box a -> a
# # we'd need reset/reuse for box for this to be efficient

View file

@ -27,6 +27,7 @@ interface Decode
decodeWith,
fromBytesPartial,
fromBytes,
mapResult,
]
imports [
List,
@ -96,3 +97,6 @@ fromBytes = \bytes, fmt ->
Err TooShort -> Err TooShort
else
Err (Leftover rest)
mapResult : DecodeResult a, (a -> b) -> DecodeResult b
mapResult = \{ result, rest }, mapper -> { result: Result.map result mapper, rest }

View file

@ -26,15 +26,20 @@ interface Dict
Hash.{ Hasher },
]
## A [dictionary](https://en.wikipedia.org/wiki/Associative_array) that lets you can associate keys with values.
## A [dictionary](https://en.wikipedia.org/wiki/Associative_array) that lets you
## associate keys with values.
##
## ### Inserting
##
## The most basic way to use a dictionary is to start with an empty one and then:
## 1. Call [Dict.insert] passing a key and a value, to associate that key with that value in the dictionary.
## 2. Later, call [Dict.get] passing the same key as before, and it will return the value you stored.
## The most basic way to use a dictionary is to start with an empty one and
## then:
## 1. Call [Dict.insert] passing a key and a value, to associate that key with
## that value in the dictionary.
## 2. Later, call [Dict.get] passing the same key as before, and it will return
## the value you stored.
##
## Here's an example of a dictionary which uses a city's name as the key, and its population as the associated value.
## Here's an example of a dictionary which uses a city's name as the key, and
## its population as the associated value.
##
## populationByCity =
## Dict.empty
@ -46,11 +51,12 @@ interface Dict
##
## ### Accessing keys or values
##
## We can use [Dict.keys] and [Dict.values] functions to get only the keys or only the values.
## We can use [Dict.keys] and [Dict.values] functions to get only the keys or
## only the values.
##
## You may notice that these lists have the same order as the original insertion order. This will be true if
## all you ever do is [insert] and [get] operations on the dictionary, but [remove] operations can change this order.
## Let's see how that looks.
## You may notice that these lists have the same order as the original insertion
## order. This will be true if all you ever do is [Dict.insert] and [Dict.get] operations
## on the dictionary, but [Dict.remove] operations can change this order.
##
## ### Removing
##
@ -62,30 +68,44 @@ interface Dict
## ==
## ["London", "Amsterdam", "Shanghai", "Delhi"]
##
## Notice that the order changed! Philadelphia has been not only removed from the list, but Amsterdam - the last
## entry we inserted - has been moved into the spot where Philadelphia was previously. This is exactly what
## [Dict.remove] does: it removes an element and moves the most recent insertion into the vacated spot.
## Notice that the order has changed. Philadelphia was not only removed from the
## list, but Amsterdam - the last entry we inserted - has been moved into the
## spot where Philadelphia was previously. This is exactly what [Dict.remove]
## does. It removes an element and moves the most recent insertion into the
## vacated spot.
##
## This move is done as a performance optimization, and it lets [remove] have
## [constant time complexity](https://en.wikipedia.org/wiki/Time_complexity#Constant_time). ##
## This move is done as a performance optimization, and it lets [Dict.remove]
## have [constant time complexity](https://en.wikipedia.org/wiki/Time_complexity#Constant_time).
##
## ### Equality
##
## When comparing two dictionaries for equality, they are `==` only if their both their contents and their
## orderings match. This preserves the property that if `dict1 == dict2`, you should be able to rely on
## `fn dict1 == fn dict2` also being `Bool.true`, even if `fn` relies on the dictionary's ordering.
Dict k v := List [Pair k v] has [Eq { isEq: dictEq }]
## Two dictionaries are equal when their contents and orderings match. This
## means that when `dict1 == dict2`, the expression `fn dict1 == fn dict2` will
## also evaluate to `Bool.true`. The function `fn` can count on the ordering of
## values in the dictionary to also match.
Dict k v := List [Pair k v] has [Eq]
dictEq = \@Dict l1, @Dict l2 -> l1 == l2
## An empty dictionary.
## Return an empty dictionary.
empty : Dict k v
empty = @Dict []
## Return a dictionary with space allocated for a number of entries. This
## may provide a performance optimisation if you know how many entries will be
## inserted.
withCapacity : Nat -> Dict k v
withCapacity = \n -> @Dict (List.withCapacity n)
get : Dict k v, k -> Result v [KeyNotFound]* | k has Eq
## Get the value for a given key. If there is a value for the specified key it
## will return [Ok value], otherwise return [Err KeyNotFound].
##
## dictionary =
## Dict.empty
## |> Dict.insert 1 "Apple"
## |> Dict.insert 2 "Orange"
##
## expect Dict.get dictionary 1 == Ok "Apple"
## expect Dict.get dictionary 2000 == Err KeyNotFound
get : Dict k v, k -> Result v [KeyNotFound] | k has Eq
get = \@Dict list, needle ->
when List.findFirst list (\Pair key _ -> key == needle) is
Ok (Pair _ v) ->
@ -94,10 +114,27 @@ get = \@Dict list, needle ->
Err NotFound ->
Err KeyNotFound
## Iterate through the keys and values in the dictionary and call the provided
## function with signature `state, k, v -> state` for each value, with an
## initial `state` value provided for the first call.
##
## expect
## Dict.empty
## |> Dict.insert "Apples" 12
## |> Dict.insert "Orange" 24
## |> Dict.walk 0 (\count, _, qty -> count + qty)
## |> Bool.isEq 36
walk : Dict k v, state, (state, k, v -> state) -> state
walk = \@Dict list, initialState, transform ->
List.walk list initialState (\state, Pair k v -> transform state k v)
## Insert a value into the dictionary at a specified key.
##
## expect
## Dict.empty
## |> Dict.insert "Apples" 12
## |> Dict.get "Apples"
## |> Bool.isEq (Ok 12)
insert : Dict k v, k, v -> Dict k v | k has Eq
insert = \@Dict list, k, v ->
when List.findFirstIndex list (\Pair key _ -> key == k) is
@ -109,10 +146,27 @@ insert = \@Dict list, k, v ->
|> List.set index (Pair k v)
|> @Dict
## Returns the number of values in the dictionary.
##
## expect
## Dict.empty
## |> Dict.insert "One" "A Song"
## |> Dict.insert "Two" "Candy Canes"
## |> Dict.insert "Three" "Boughs of Holly"
## |> Dict.len
## |> Bool.isEq 3
len : Dict k v -> Nat
len = \@Dict list ->
List.len list
## Remove a value from the dictionary for a specified key.
##
## expect
## Dict.empty
## |> Dict.insert "Some" "Value"
## |> Dict.remove "Some"
## |> Dict.len
## |> Bool.isEq 0
remove : Dict k v, k -> Dict k v | k has Eq
remove = \@Dict list, key ->
when List.findFirstIndex list (\Pair k _ -> k == key) is
@ -127,7 +181,20 @@ remove = \@Dict list, key ->
|> List.dropLast
|> @Dict
## Insert or remove a value in a Dict based on its presence
## Insert or remove a value for a specified key. This function enables a
## performance optimisation for the use case of providing a default when a value
## is missing. This is more efficient than doing both a `Dict.get` and then a
## `Dict.insert` call, and supports being piped.
##
## alterValue : [Present Bool, Missing] -> [Present Bool, Missing]
## alterValue = \possibleValue ->
## when possibleValue is
## Missing -> Present Bool.false
## Present value -> if value then Missing else Present Bool.true
##
## expect Dict.update Dict.empty "a" alterValue == Dict.single "a" Bool.false
## expect Dict.update (Dict.single "a" Bool.false) "a" alterValue == Dict.single "a" Bool.true
## expect Dict.update (Dict.single "a" Bool.true) "a" alterValue == Dict.empty
update : Dict k v, k, ([Present v, Missing] -> [Present v, Missing]) -> Dict k v | k has Eq
update = \dict, key, alter ->
possibleValue =
@ -139,56 +206,139 @@ update = \dict, key, alter ->
Present value -> insert dict key value
Missing -> remove dict key
## Internal for testing only
# Internal for testing only
alterValue : [Present Bool, Missing] -> [Present Bool, Missing]
alterValue = \possibleValue ->
when possibleValue is
Missing -> Present Bool.false
Present value if Bool.not value -> Present Bool.true
Present _ -> Missing
Present value -> if value then Missing else Present Bool.true
expect update empty "a" alterValue == single "a" Bool.false
expect update (single "a" Bool.false) "a" alterValue == single "a" Bool.true
expect update (single "a" Bool.true) "a" alterValue == empty
## Check if the dictionary has a value for a specified key.
##
## expect
## Dict.empty
## |> Dict.insert 1234 "5678"
## |> Dict.contains 1234
contains : Dict k v, k -> Bool | k has Eq
contains = \@Dict list, needle ->
step = \_, Pair key _val ->
if key == needle then
Break {}
else
Continue {}
List.any list \Pair key _val -> key == needle
when List.iterate list {} step is
Continue _ -> Bool.false
Break _ -> Bool.true
expect contains empty "a" == Bool.false
expect contains (single "a" {}) "a" == Bool.true
expect contains (single "b" {}) "a" == Bool.false
expect
Dict.empty
|> Dict.insert 1234 "5678"
|> Dict.contains 1234
|> Bool.isEq Bool.true
## Returns a dictionary containing the key and value provided as input.
##
## expect
## Dict.single "A" "B"
## |> Bool.isEq (Dict.insert Dict.empty "A" "B")
single : k, v -> Dict k v
single = \key, value ->
@Dict [Pair key value]
## Returns a [List] of the dictionary's keys.
## Returns the keys of a dictionary as a [List].
##
## expect
## Dict.single 1 "One"
## |> Dict.insert 2 "Two"
## |> Dict.insert 3 "Three"
## |> Dict.insert 4 "Four"
## |> Dict.keys
## |> Bool.isEq [1,2,3,4]
keys : Dict k v -> List k
keys = \@Dict list ->
List.map list (\Pair k _ -> k)
## Returns a [List] of the Dict's values
## Returns the values of a dictionary as a [List].
##
## expect
## Dict.single 1 "One"
## |> Dict.insert 2 "Two"
## |> Dict.insert 3 "Three"
## |> Dict.insert 4 "Four"
## |> Dict.values
## |> Bool.isEq ["One","Two","Three","Four"]
values : Dict k v -> List v
values = \@Dict list ->
List.map list (\Pair _ v -> v)
# union : Dict k v, Dict k v -> Dict k v
## Combine two dictionaries by keeping the [union](https://en.wikipedia.org/wiki/Union_(set_theory))
## of all the key-value pairs. This means that all the key-value pairs in
## both dictionaries will be combined. Note that where there are pairs
## with the same key, the value contained in the first input will be
## retained, and the value in the second input will be removed.
##
## first =
## Dict.single 1 "Keep Me"
## |> Dict.insert 2 "And Me"
##
## second =
## Dict.single 1 "Not Me"
## |> Dict.insert 3 "Me Too"
## |> Dict.insert 4 "And Also Me"
##
## expected =
## Dict.single 1 "Keep Me"
## |> Dict.insert 2 "And Me"
## |> Dict.insert 3 "Me Too"
## |> Dict.insert 4 "And Also Me"
##
## expect
## Dict.insertAll first second == expected
insertAll : Dict k v, Dict k v -> Dict k v | k has Eq
insertAll = \xs, @Dict ys ->
List.walk ys xs (\state, Pair k v -> Dict.insertIfVacant state k v)
# intersection : Dict k v, Dict k v -> Dict k v
## Combine two dictionaries by keeping the [intersection](https://en.wikipedia.org/wiki/Intersection_(set_theory))
## of all the key-value pairs. This means that we keep only those pairs
## that are in both dictionaries. Note that where there are pairs with
## the same key, the value contained in the first input will be retained,
## and the value in the second input will be removed.
##
## first =
## Dict.single 1 "Keep Me"
## |> Dict.insert 2 "And Me"
##
## second =
## Dict.single 1 "Keep Me"
## |> Dict.insert 2 "And Me"
## |> Dict.insert 3 "But Not Me"
## |> Dict.insert 4 "Or Me"
##
## expect Dict.keepShared first second == first
keepShared : Dict k v, Dict k v -> Dict k v | k has Eq
keepShared = \@Dict xs, ys ->
List.keepIf xs (\Pair k _ -> Dict.contains ys k)
|> @Dict
# difference : Dict k v, Dict k v -> Dict k v
## Remove the key-value pairs in the first input that are also in the second
## using the [set difference](https://en.wikipedia.org/wiki/Complement_(set_theory)#Relative_complement)
## of the values. This means that we will be left with only those pairs that
## are in the first dictionary and whose keys are not in the second.
##
## first =
## Dict.single 1 "Keep Me"
## |> Dict.insert 2 "And Me"
## |> Dict.insert 3 "Remove Me"
##
## second =
## Dict.single 3 "Remove Me"
## |> Dict.insert 4 "I do nothing..."
##
## expected =
## Dict.single 1 "Keep Me"
## |> Dict.insert 2 "And Me"
##
## expect Dict.removeAll first second == expected
removeAll : Dict k v, Dict k v -> Dict k v | k has Eq
removeAll = \xs, @Dict ys ->
List.walk ys xs (\state, Pair k _ -> Dict.remove state k)
@ -224,11 +374,6 @@ LowLevelHasher := { originalSeed : U64, state : U64 } has [
addU32,
addU64,
addU128,
addI8,
addI16,
addI32,
addI64,
addI128,
complete,
},
]
@ -250,17 +395,6 @@ combineState = \@LowLevelHasher { originalSeed, state }, { a, b, seed, length }
complete = \@LowLevelHasher { state } -> state
addI8 = \hasher, i8 ->
addU8 hasher (Num.toU8 i8)
addI16 = \hasher, i16 ->
addU16 hasher (Num.toU16 i16)
addI32 = \hasher, i32 ->
addU32 hasher (Num.toU32 i32)
addI64 = \hasher, i64 ->
addU64 hasher (Num.toU64 i64)
addI128 = \hasher, i128 ->
addU128 hasher (Num.toU128 i128)
# These implementations hash each value individually with the seed and then mix
# the resulting hash with the state. There are other options that may be faster
# like using the output of the last hash as the seed to the current hash.

View file

@ -9,11 +9,11 @@ interface Hash
addU32,
addU64,
addU128,
addI8,
addI16,
addI32,
addI64,
addI128,
hashI8,
hashI16,
hashI32,
hashI64,
hashI128,
complete,
hashStrBytes,
hashList,
@ -55,21 +55,6 @@ Hasher has
## Adds a single U128 to the hasher.
addU128 : a, U128 -> a | a has Hasher
## Adds a single I8 to the hasher.
addI8 : a, I8 -> a | a has Hasher
## Adds a single I16 to the hasher.
addI16 : a, I16 -> a | a has Hasher
## Adds a single I32 to the hasher.
addI32 : a, I32 -> a | a has Hasher
## Adds a single I64 to the hasher.
addI64 : a, I64 -> a | a has Hasher
## Adds a single I128 to the hasher.
addI128 : a, I128 -> a | a has Hasher
## Completes the hasher, extracting a hash value from its
## accumulated hash state.
complete : a -> U64 | a has Hasher
@ -83,6 +68,26 @@ hashList = \hasher, lst ->
List.walk lst hasher \accumHasher, elem ->
hash accumHasher elem
## Adds a single I8 to a hasher.
hashI8 : a, I8 -> a | a has Hasher
hashI8 = \hasher, n -> addU8 hasher (Num.toU8 n)
## Adds a single I16 to a hasher.
hashI16 : a, I16 -> a | a has Hasher
hashI16 = \hasher, n -> addU16 hasher (Num.toU16 n)
## Adds a single I32 to a hasher.
hashI32 : a, I32 -> a | a has Hasher
hashI32 = \hasher, n -> addU32 hasher (Num.toU32 n)
## Adds a single I64 to a hasher.
hashI64 : a, I64 -> a | a has Hasher
hashI64 = \hasher, n -> addU64 hasher (Num.toU64 n)
## Adds a single I128 to a hasher.
hashI128 : a, I128 -> a | a has Hasher
hashI128 = \hasher, n -> addU128 hasher (Num.toU128 n)
## Adds a container of [Hash]able elements to a [Hasher] by hashing each element.
## The container is iterated using the walk method passed in.
## The order of the elements does not affect the final hash.

View file

@ -8,7 +8,6 @@ interface List
map,
len,
withCapacity,
iterate,
walkBackwards,
concat,
first,
@ -219,7 +218,7 @@ isEmpty = \list ->
# but will cause a reference count increment on the value it got out of the list
getUnsafe : List a, Nat -> a
get : List a, Nat -> Result a [OutOfBounds]*
get : List a, Nat -> Result a [OutOfBounds]
get = \list, index ->
if index < List.len list then
Ok (List.getUnsafe list index)
@ -298,7 +297,7 @@ reserve : List a, Nat -> List a
concat : List a, List a -> List a
## Returns the last element in the list, or `ListWasEmpty` if it was empty.
last : List a -> Result a [ListWasEmpty]*
last : List a -> Result a [ListWasEmpty]
last = \list ->
when List.get list (Num.subSaturated (List.len list) 1) is
Ok v -> Ok v
@ -683,7 +682,7 @@ sortDesc = \list -> List.sortWith list (\a, b -> Num.compare b a)
swap : List a, Nat, Nat -> List a
## Returns the first element in the list, or `ListWasEmpty` if it was empty.
first : List a -> Result a [ListWasEmpty]*
first : List a -> Result a [ListWasEmpty]
first = \list ->
when List.get list 0 is
Ok v -> Ok v
@ -776,7 +775,7 @@ drop = \list, n ->
## To replace the element at a given index, instead of dropping it, see [List.set].
dropAt : List elem, Nat -> List elem
min : List (Num a) -> Result (Num a) [ListWasEmpty]*
min : List (Num a) -> Result (Num a) [ListWasEmpty]
min = \list ->
when List.first list is
Ok initial ->
@ -793,7 +792,7 @@ minHelp = \list, initial ->
else
bestSoFar
max : List (Num a) -> Result (Num a) [ListWasEmpty]*
max : List (Num a) -> Result (Num a) [ListWasEmpty]
max = \list ->
when List.first list is
Ok initial ->
@ -820,7 +819,7 @@ joinMap = \list, mapper ->
## Returns the first element of the list satisfying a predicate function.
## If no satisfying element is found, an `Err NotFound` is returned.
findFirst : List elem, (elem -> Bool) -> Result elem [NotFound]*
findFirst : List elem, (elem -> Bool) -> Result elem [NotFound]
findFirst = \list, pred ->
callback = \_, elem ->
if pred elem then
@ -834,7 +833,7 @@ findFirst = \list, pred ->
## Returns the last element of the list satisfying a predicate function.
## If no satisfying element is found, an `Err NotFound` is returned.
findLast : List elem, (elem -> Bool) -> Result elem [NotFound]*
findLast : List elem, (elem -> Bool) -> Result elem [NotFound]
findLast = \list, pred ->
callback = \_, elem ->
if pred elem then
@ -849,7 +848,7 @@ findLast = \list, pred ->
## Returns the index at which the first element in the list
## satisfying a predicate function can be found.
## If no satisfying element is found, an `Err NotFound` is returned.
findFirstIndex : List elem, (elem -> Bool) -> Result Nat [NotFound]*
findFirstIndex : List elem, (elem -> Bool) -> Result Nat [NotFound]
findFirstIndex = \list, matcher ->
foundIndex = List.iterate list 0 \index, elem ->
if matcher elem then
@ -864,7 +863,7 @@ findFirstIndex = \list, matcher ->
## Returns the last index at which the first element in the list
## satisfying a predicate function can be found.
## If no satisfying element is found, an `Err NotFound` is returned.
findLastIndex : List elem, (elem -> Bool) -> Result Nat [NotFound]*
findLastIndex : List elem, (elem -> Bool) -> Result Nat [NotFound]
findLastIndex = \list, matches ->
foundIndex = List.iterateBackwards list (List.len list) \prevIndex, elem ->
if matches elem then
@ -962,7 +961,7 @@ split = \elements, userSplitIndex ->
## remaining elements after that occurrence. If the delimiter is not found, returns `Err`.
##
## List.splitFirst [Foo, Z, Bar, Z, Baz] Z == Ok { before: [Foo], after: [Bar, Baz] }
splitFirst : List elem, elem -> Result { before : List elem, after : List elem } [NotFound]* | elem has Eq
splitFirst : List elem, elem -> Result { before : List elem, after : List elem } [NotFound] | elem has Eq
splitFirst = \list, delimiter ->
when List.findFirstIndex list (\elem -> elem == delimiter) is
Ok index ->
@ -977,7 +976,7 @@ splitFirst = \list, delimiter ->
## remaining elements after that occurrence. If the delimiter is not found, returns `Err`.
##
## List.splitLast [Foo, Z, Bar, Z, Baz] Z == Ok { before: [Foo, Bar], after: [Baz] }
splitLast : List elem, elem -> Result { before : List elem, after : List elem } [NotFound]* | elem has Eq
splitLast : List elem, elem -> Result { before : List elem, after : List elem } [NotFound] | elem has Eq
splitLast = \list, delimiter ->
when List.findLastIndex list (\elem -> elem == delimiter) is
Ok index ->

View file

@ -743,7 +743,7 @@ atan : Frac a -> Frac a
## >>> Num.sqrt -4.0f64
sqrt : Frac a -> Frac a
sqrtChecked : Frac a -> Result (Frac a) [SqrtOfNegative]*
sqrtChecked : Frac a -> Result (Frac a) [SqrtOfNegative]
sqrtChecked = \x ->
if x < 0.0 then
Err SqrtOfNegative
@ -752,7 +752,7 @@ sqrtChecked = \x ->
log : Frac a -> Frac a
logChecked : Frac a -> Result (Frac a) [LogNeedsPositive]*
logChecked : Frac a -> Result (Frac a) [LogNeedsPositive]
logChecked = \x ->
if x <= 0.0 then
Err LogNeedsPositive
@ -791,7 +791,7 @@ logChecked = \x ->
## >>> |> Num.div 2.0
div : Frac a, Frac a -> Frac a
divChecked : Frac a, Frac a -> Result (Frac a) [DivByZero]*
divChecked : Frac a, Frac a -> Result (Frac a) [DivByZero]
divChecked = \a, b ->
if Num.isZero b then
Err DivByZero
@ -800,7 +800,7 @@ divChecked = \a, b ->
divCeil : Int a, Int a -> Int a
divCeilChecked : Int a, Int a -> Result (Int a) [DivByZero]*
divCeilChecked : Int a, Int a -> Result (Int a) [DivByZero]
divCeilChecked = \a, b ->
if Num.isZero b then
Err DivByZero
@ -825,7 +825,7 @@ divCeilChecked = \a, b ->
##
divTrunc : Int a, Int a -> Int a
divTruncChecked : Int a, Int a -> Result (Int a) [DivByZero]*
divTruncChecked : Int a, Int a -> Result (Int a) [DivByZero]
divTruncChecked = \a, b ->
if Num.isZero b then
Err DivByZero
@ -845,7 +845,7 @@ divTruncChecked = \a, b ->
## >>> Num.rem -8 -3
rem : Int a, Int a -> Int a
remChecked : Int a, Int a -> Result (Int a) [DivByZero]*
remChecked : Int a, Int a -> Result (Int a) [DivByZero]
remChecked = \a, b ->
if Num.isZero b then
Err DivByZero
@ -944,7 +944,7 @@ addSaturated : Num a, Num a -> Num a
##
## This is the same as [Num.add] except if the operation overflows, instead of
## panicking or returning ∞ or -∞, it will return `Err Overflow`.
addChecked : Num a, Num a -> Result (Num a) [Overflow]*
addChecked : Num a, Num a -> Result (Num a) [Overflow]
addChecked = \a, b ->
result = addCheckedLowlevel a b
@ -970,7 +970,7 @@ subSaturated : Num a, Num a -> Num a
##
## This is the same as [Num.sub] except if the operation overflows, instead of
## panicking or returning ∞ or -∞, it will return `Err Overflow`.
subChecked : Num a, Num a -> Result (Num a) [Overflow]*
subChecked : Num a, Num a -> Result (Num a) [Overflow]
subChecked = \a, b ->
result = subCheckedLowlevel a b
@ -994,7 +994,7 @@ mulSaturated : Num a, Num a -> Num a
##
## This is the same as [Num.mul] except if the operation overflows, instead of
## panicking or returning ∞ or -∞, it will return `Err Overflow`.
mulChecked : Num a, Num a -> Result (Num a) [Overflow]*
mulChecked : Num a, Num a -> Result (Num a) [Overflow]
mulChecked = \a, b ->
result = mulCheckedLowlevel a b
@ -1250,19 +1250,19 @@ toF64 : Num * -> F64
## Converts a [Int] to an [I8].
## If the given integer can't be precisely represented in an [I8], returns
## `Err OutOfBounds`.
toI8Checked : Int * -> Result I8 [OutOfBounds]*
toI16Checked : Int * -> Result I16 [OutOfBounds]*
toI32Checked : Int * -> Result I32 [OutOfBounds]*
toI64Checked : Int * -> Result I64 [OutOfBounds]*
toI128Checked : Int * -> Result I128 [OutOfBounds]*
toU8Checked : Int * -> Result U8 [OutOfBounds]*
toU16Checked : Int * -> Result U16 [OutOfBounds]*
toU32Checked : Int * -> Result U32 [OutOfBounds]*
toU64Checked : Int * -> Result U64 [OutOfBounds]*
toU128Checked : Int * -> Result U128 [OutOfBounds]*
toNatChecked : Int * -> Result Nat [OutOfBounds]*
toF32Checked : Num * -> Result F32 [OutOfBounds]*
toF64Checked : Num * -> Result F64 [OutOfBounds]*
toI8Checked : Int * -> Result I8 [OutOfBounds]
toI16Checked : Int * -> Result I16 [OutOfBounds]
toI32Checked : Int * -> Result I32 [OutOfBounds]
toI64Checked : Int * -> Result I64 [OutOfBounds]
toI128Checked : Int * -> Result I128 [OutOfBounds]
toU8Checked : Int * -> Result U8 [OutOfBounds]
toU16Checked : Int * -> Result U16 [OutOfBounds]
toU32Checked : Int * -> Result U32 [OutOfBounds]
toU64Checked : Int * -> Result U64 [OutOfBounds]
toU128Checked : Int * -> Result U128 [OutOfBounds]
toNatChecked : Int * -> Result Nat [OutOfBounds]
toF32Checked : Num * -> Result F32 [OutOfBounds]
toF64Checked : Num * -> Result F64 [OutOfBounds]
# Special Floating-Point operations
## When given a [F64] or [F32] value, returns `Bool.false` if that value is

View file

@ -16,9 +16,7 @@ interface Set
]
imports [List, Bool.{ Bool, Eq }, Dict.{ Dict }, Num.{ Nat }]
Set k := Dict.Dict k {} has [Eq { isEq: setEq }]
setEq = \@Set d1, @Set d2 -> d1 == d2
Set k := Dict.Dict k {} has [Eq]
fromDict : Dict k {} -> Set k
fromDict = \dict -> @Set dict

View file

@ -45,6 +45,7 @@ interface Str
walkScalarsUntil,
withCapacity,
withPrefix,
graphemes,
]
imports [
Bool.{ Bool, Eq },
@ -180,6 +181,9 @@ repeat : Str, Nat -> Str
## expect Str.countGraphemes "üïä" == 4
countGraphemes : Str -> Nat
## Split a string into its constituent grapheme clusters
graphemes : Str -> List Str
## If the string begins with a [Unicode code point](http://www.unicode.org/glossary/#code_point)
## equal to the given [U32], return `Bool.true`. Otherwise return `Bool.false`.
##
@ -216,7 +220,7 @@ toUtf8 : Str -> List U8
##
## expect Str.fromUtf8 [233, 185, 143] == Ok "鹏"
## expect Str.fromUtf8 [0xb0] == Err (BadUtf8 InvalidStartByte 0)
fromUtf8 : List U8 -> Result Str [BadUtf8 Utf8ByteProblem Nat]*
fromUtf8 : List U8 -> Result Str [BadUtf8 Utf8ByteProblem Nat]
fromUtf8 = \bytes ->
result = fromUtf8RangeLowlevel bytes 0 (List.len bytes)
@ -229,7 +233,7 @@ fromUtf8 = \bytes ->
## into a [Str]
##
## expect Str.fromUtf8Range [72, 105, 80, 103] { start : 0, count : 2 } == Ok "Hi"
fromUtf8Range : List U8, { start : Nat, count : Nat } -> Result Str [BadUtf8 Utf8ByteProblem Nat, OutOfBounds]*
fromUtf8Range : List U8, { start : Nat, count : Nat } -> Result Str [BadUtf8 Utf8ByteProblem Nat, OutOfBounds]
fromUtf8Range = \bytes, config ->
if config.start + config.count <= List.len bytes then
result = fromUtf8RangeLowlevel bytes config.start config.count
@ -284,7 +288,7 @@ trimRight : Str -> Str
## expect Str.toDec "10" == Ok 10dec
## expect Str.toDec "-0.25" == Ok -0.25dec
## expect Str.toDec "not a number" == Err InvalidNumStr
toDec : Str -> Result Dec [InvalidNumStr]*
toDec : Str -> Result Dec [InvalidNumStr]
toDec = \string -> strToNumHelp string
## Encode a [Str] to a [F64]. A [F64] value is a 64-bit
@ -293,7 +297,7 @@ toDec = \string -> strToNumHelp string
##
## expect Str.toF64 "0.10" == Ok 0.10f64
## expect Str.toF64 "not a number" == Err InvalidNumStr
toF64 : Str -> Result F64 [InvalidNumStr]*
toF64 : Str -> Result F64 [InvalidNumStr]
toF64 = \string -> strToNumHelp string
## Encode a [Str] to a [F32].A [F32] value is a 32-bit
@ -302,7 +306,7 @@ toF64 = \string -> strToNumHelp string
##
## expect Str.toF32 "0.10" == Ok 0.10f32
## expect Str.toF32 "not a number" == Err InvalidNumStr
toF32 : Str -> Result F32 [InvalidNumStr]*
toF32 : Str -> Result F32 [InvalidNumStr]
toF32 = \string -> strToNumHelp string
## Convert a [Str] to a [Nat]. If the given number doesn't fit in [Nat], it will be [truncated](https://www.ualberta.ca/computing-science/media-library/teaching-resources/java/truncation-rounding.html).
@ -320,7 +324,7 @@ toF32 = \string -> strToNumHelp string
##
## expect Str.toNat "9_000_000_000" == Ok 9000000000
## expect Str.toNat "not a number" == Err InvalidNumStr
toNat : Str -> Result Nat [InvalidNumStr]*
toNat : Str -> Result Nat [InvalidNumStr]
toNat = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U128] integer. A [U128] value can hold numbers
@ -331,7 +335,7 @@ toNat = \string -> strToNumHelp string
## expect Str.toU128 "0.1" == Err InvalidNumStr
## expect Str.toU128 "-1" == Err InvalidNumStr
## expect Str.toU128 "not a number" == Err InvalidNumStr
toU128 : Str -> Result U128 [InvalidNumStr]*
toU128 : Str -> Result U128 [InvalidNumStr]
toU128 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I128] integer. A [I128] value can hold numbers
@ -343,7 +347,7 @@ toU128 = \string -> strToNumHelp string
## expect Str.toI128 "-1" == Ok -1i128
## expect Str.toI128 "0.1" == Err InvalidNumStr
## expect Str.toI128 "not a number" == Err InvalidNumStr
toI128 : Str -> Result I128 [InvalidNumStr]*
toI128 : Str -> Result I128 [InvalidNumStr]
toI128 = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U64] integer. A [U64] value can hold numbers
@ -354,7 +358,7 @@ toI128 = \string -> strToNumHelp string
## expect Str.toU64 "0.1" == Err InvalidNumStr
## expect Str.toU64 "-1" == Err InvalidNumStr
## expect Str.toU64 "not a number" == Err InvalidNumStr
toU64 : Str -> Result U64 [InvalidNumStr]*
toU64 : Str -> Result U64 [InvalidNumStr]
toU64 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I64] integer. A [I64] value can hold numbers
@ -365,7 +369,7 @@ toU64 = \string -> strToNumHelp string
## expect Str.toI64 "-1" == Ok -1i64
## expect Str.toI64 "0.1" == Err InvalidNumStr
## expect Str.toI64 "not a number" == Err InvalidNumStr
toI64 : Str -> Result I64 [InvalidNumStr]*
toI64 : Str -> Result I64 [InvalidNumStr]
toI64 = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U32] integer. A [U32] value can hold numbers
@ -376,7 +380,7 @@ toI64 = \string -> strToNumHelp string
## expect Str.toU32 "0.1" == Err InvalidNumStr
## expect Str.toU32 "-1" == Err InvalidNumStr
## expect Str.toU32 "not a number" == Err InvalidNumStr
toU32 : Str -> Result U32 [InvalidNumStr]*
toU32 : Str -> Result U32 [InvalidNumStr]
toU32 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I32] integer. A [I32] value can hold numbers
@ -387,7 +391,7 @@ toU32 = \string -> strToNumHelp string
## expect Str.toI32 "-1" == Ok -1i32
## expect Str.toI32 "0.1" == Err InvalidNumStr
## expect Str.toI32 "not a number" == Err InvalidNumStr
toI32 : Str -> Result I32 [InvalidNumStr]*
toI32 : Str -> Result I32 [InvalidNumStr]
toI32 = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U16] integer. A [U16] value can hold numbers
@ -397,7 +401,7 @@ toI32 = \string -> strToNumHelp string
## expect Str.toU16 "0.1" == Err InvalidNumStr
## expect Str.toU16 "-1" == Err InvalidNumStr
## expect Str.toU16 "not a number" == Err InvalidNumStr
toU16 : Str -> Result U16 [InvalidNumStr]*
toU16 : Str -> Result U16 [InvalidNumStr]
toU16 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I16] integer. A [I16] value can hold numbers
@ -408,7 +412,7 @@ toU16 = \string -> strToNumHelp string
## expect Str.toI16 "-1" == Ok -1i16
## expect Str.toI16 "0.1" == Err InvalidNumStr
## expect Str.toI16 "not a number" == Err InvalidNumStr
toI16 : Str -> Result I16 [InvalidNumStr]*
toI16 : Str -> Result I16 [InvalidNumStr]
toI16 = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U8] integer. A [U8] value can hold numbers
@ -418,7 +422,7 @@ toI16 = \string -> strToNumHelp string
## expect Str.toU8 "-0.1" == Err InvalidNumStr
## expect Str.toU8 "not a number" == Err InvalidNumStr
## expect Str.toU8 "1500" == Err InvalidNumStr
toU8 : Str -> Result U8 [InvalidNumStr]*
toU8 : Str -> Result U8 [InvalidNumStr]
toU8 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I8] integer. A [I8] value can hold numbers
@ -428,7 +432,7 @@ toU8 = \string -> strToNumHelp string
## expect Str.toI8 "-15" == Ok -15i8
## expect Str.toI8 "150.00" == Err InvalidNumStr
## expect Str.toI8 "not a number" == Err InvalidNumStr
toI8 : Str -> Result I8 [InvalidNumStr]*
toI8 : Str -> Result I8 [InvalidNumStr]
toI8 = \string -> strToNumHelp string
## Get the byte at the given index, without performing a bounds check.
@ -447,7 +451,7 @@ substringUnsafe : Str, Nat, Nat -> Str
##
## expect Str.replaceEach "foo/bar/baz" "/" "_" == Ok "foo_bar_baz"
## expect Str.replaceEach "not here" "/" "_" == Err NotFound
replaceEach : Str, Str, Str -> Result Str [NotFound]*
replaceEach : Str, Str, Str -> Result Str [NotFound]
replaceEach = \haystack, needle, flower ->
when splitFirst haystack needle is
Ok { before, after } ->
@ -479,7 +483,7 @@ expect Str.replaceEach "abXdeXghi" "X" "_" == Ok "ab_de_ghi"
##
## expect Str.replaceFirst "foo/bar/baz" "/" "_" == Ok "foo_bar/baz"
## expect Str.replaceFirst "no slashes here" "/" "_" == Err NotFound
replaceFirst : Str, Str, Str -> Result Str [NotFound]*
replaceFirst : Str, Str, Str -> Result Str [NotFound]
replaceFirst = \haystack, needle, flower ->
when splitFirst haystack needle is
Ok { before, after } ->
@ -494,7 +498,7 @@ expect Str.replaceFirst "abXdeXghi" "X" "_" == Ok "ab_deXghi"
##
## expect Str.replaceLast "foo/bar/baz" "/" "_" == Ok "foo/bar_baz"
## expect Str.replaceLast "no slashes here" "/" "_" == Err NotFound
replaceLast : Str, Str, Str -> Result Str [NotFound]*
replaceLast : Str, Str, Str -> Result Str [NotFound]
replaceLast = \haystack, needle, flower ->
when splitLast haystack needle is
Ok { before, after } ->
@ -510,7 +514,7 @@ expect Str.replaceLast "abXdeXghi" "X" "_" == Ok "abXde_ghi"
##
## expect Str.splitFirst "foo/bar/baz" "/" == Ok { before: "foo", after: "bar/baz" }
## expect Str.splitFirst "no slashes here" "/" == Err NotFound
splitFirst : Str, Str -> Result { before : Str, after : Str } [NotFound]*
splitFirst : Str, Str -> Result { before : Str, after : Str } [NotFound]
splitFirst = \haystack, needle ->
when firstMatch haystack needle is
Some index ->
@ -563,7 +567,7 @@ firstMatchHelp = \haystack, needle, index, lastPossible ->
##
## expect Str.splitLast "foo/bar/baz" "/" == Ok { before: "foo/bar", after: "baz" }
## expect Str.splitLast "no slashes here" "/" == Err NotFound
splitLast : Str, Str -> Result { before : Str, after : Str } [NotFound]*
splitLast : Str, Str -> Result { before : Str, after : Str } [NotFound]
splitLast = \haystack, needle ->
when lastMatch haystack needle is
Some index ->
@ -680,7 +684,7 @@ appendScalarUnsafe : Str, U32 -> Str
##
## expect Str.appendScalar "H" 105 == Ok "Hi"
## expect Str.appendScalar "😢" 0xabcdef == Err InvalidScalar
appendScalar : Str, U32 -> Result Str [InvalidScalar]*
appendScalar : Str, U32 -> Result Str [InvalidScalar]
appendScalar = \string, scalar ->
if isValidScalar scalar then
Ok (appendScalarUnsafe string scalar)
@ -745,7 +749,7 @@ walkScalarsUntilHelp = \string, state, step, index, length ->
strToNum : Str -> { berrorcode : U8, aresult : Num * }
strToNumHelp : Str -> Result (Num a) [InvalidNumStr]*
strToNumHelp : Str -> Result (Num a) [InvalidNumStr]
strToNumHelp = \string ->
result : { berrorcode : U8, aresult : Num a }
result = strToNum string

View file

@ -362,6 +362,7 @@ pub const STR_APPEND_SCALAR: &str = "roc_builtins.str.append_scalar";
pub const STR_GET_SCALAR_UNSAFE: &str = "roc_builtins.str.get_scalar_unsafe";
pub const STR_CLONE_TO: &str = "roc_builtins.str.clone_to";
pub const STR_WITH_CAPACITY: &str = "roc_builtins.str.with_capacity";
pub const STR_GRAPHEMES: &str = "roc_builtins.str.graphemes";
pub const LIST_MAP: &str = "roc_builtins.list.map";
pub const LIST_MAP2: &str = "roc_builtins.list.map2";

View file

@ -28,7 +28,7 @@ pub struct MemberVariables {
/// The member and its signature is defined locally, in the module the store is created for.
/// We need to instantiate and introduce this during solving.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ResolvedMemberType(Variable);
/// Member type information that needs to be resolved from imports.
@ -56,7 +56,7 @@ impl ResolvePhase for Pending {
type MemberType = PendingMemberType;
}
#[derive(Default, Debug, Clone, Copy, PartialEq)]
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
pub struct Resolved;
impl ResolvePhase for Resolved {
type MemberType = ResolvedMemberType;
@ -127,7 +127,6 @@ pub struct ImplKey {
#[derive(Clone, Debug)]
pub enum ResolvedImpl {
Impl(MemberSpecializationInfo<Resolved>),
Derived,
Error,
}
@ -452,7 +451,7 @@ impl IAbilitiesStore<Resolved> {
Ok(())
}
MemberImpl::Derived | MemberImpl::Error => Err(MarkError::ImplIsNotCustom),
MemberImpl::Error => Err(MarkError::ImplIsNotCustom),
},
None => Err(MarkError::NoDeclaredImpl),
}
@ -498,7 +497,6 @@ impl IAbilitiesStore<Pending> {
self.import_specialization(specialization);
MemberImpl::Impl(specialization.symbol)
}
ResolvedImpl::Derived => MemberImpl::Derived,
ResolvedImpl::Error => MemberImpl::Error,
};
@ -957,14 +955,12 @@ mod serialize {
#[repr(C)]
enum SerMemberImpl {
Impl(Symbol),
Derived,
Error,
}
impl From<&MemberImpl> for SerMemberImpl {
fn from(k: &MemberImpl) -> Self {
match k {
MemberImpl::Impl(s) => Self::Impl(*s),
MemberImpl::Derived => Self::Derived,
MemberImpl::Error => Self::Error,
}
}
@ -973,7 +969,6 @@ mod serialize {
fn from(k: &SerMemberImpl) -> Self {
match k {
SerMemberImpl::Impl(s) => Self::Impl(*s),
SerMemberImpl::Derived => Self::Derived,
SerMemberImpl::Error => Self::Error,
}
}
@ -1134,14 +1129,12 @@ mod serialize {
#[repr(C)]
enum SerResolvedImpl {
Impl(SerMemberSpecInfo),
Derived,
Error,
}
impl SerResolvedImpl {
fn num_regions(&self) -> usize {
match self {
SerResolvedImpl::Impl(spec) => spec.1.len(),
SerResolvedImpl::Derived => 0,
SerResolvedImpl::Error => 0,
}
}
@ -1186,7 +1179,6 @@ mod serialize {
);
SerResolvedImpl::Impl(SerMemberSpecInfo(*symbol, regions, vars))
}
ResolvedImpl::Derived => SerResolvedImpl::Derived,
ResolvedImpl::Error => SerResolvedImpl::Error,
};
@ -1237,7 +1229,6 @@ mod serialize {
});
ResolvedImpl::Impl(spec_info)
}
SerResolvedImpl::Derived => ResolvedImpl::Derived,
SerResolvedImpl::Error => ResolvedImpl::Error,
};
@ -1310,7 +1301,7 @@ mod test {
store.register_declared_implementations(
Symbol::ATTR_ATTR,
[(Symbol::ARG_5, MemberImpl::Derived)],
[(Symbol::ARG_5, MemberImpl::Error)],
);
store

View file

@ -9,8 +9,8 @@ use roc_problem::can::ShadowKind;
use roc_region::all::{Loc, Region};
use roc_types::subs::{VarStore, Variable};
use roc_types::types::{
name_type_var, Alias, AliasCommon, AliasKind, AliasVar, LambdaSet, OptAbleType, OptAbleVar,
Problem, RecordField, Type, TypeExtension,
name_type_var, AbilitySet, Alias, AliasCommon, AliasKind, AliasVar, LambdaSet, OptAbleType,
OptAbleVar, Problem, RecordField, Type, TypeExtension,
};
#[derive(Clone, Debug)]
@ -105,10 +105,10 @@ impl OwnedNamedOrAble {
}
}
pub fn opt_ability(&self) -> Option<Symbol> {
pub fn opt_abilities(&self) -> Option<&AbilitySet> {
match self {
OwnedNamedOrAble::Named(_) => None,
OwnedNamedOrAble::Able(av) => Some(av.ability),
OwnedNamedOrAble::Able(av) => Some(&av.abilities),
}
}
}
@ -127,7 +127,7 @@ pub struct NamedVariable {
pub struct AbleVariable {
pub variable: Variable,
pub name: Lowercase,
pub ability: Symbol,
pub abilities: AbilitySet,
// NB: there may be multiple occurrences of a variable
pub first_seen: Region,
}
@ -136,9 +136,14 @@ pub struct AbleVariable {
pub struct IntroducedVariables {
pub wildcards: Vec<Loc<Variable>>,
pub lambda_sets: Vec<Variable>,
/// Explicit inference variables, i.e. `_`
pub inferred: Vec<Loc<Variable>>,
/// Named type variables
pub named: VecSet<NamedVariable>,
/// Named type variables bound to an ability
pub able: VecSet<AbleVariable>,
/// Extension variables which should be inferred in output position.
pub infer_ext_in_output: Vec<Variable>,
pub host_exposed_aliases: VecMap<Symbol, Variable>,
}
@ -150,6 +155,7 @@ impl IntroducedVariables {
.chain(self.inferred.iter().map(|v| &v.value))
.chain(self.named.iter().map(|nv| &nv.variable))
.chain(self.able.iter().map(|av| &av.variable))
.chain(self.infer_ext_in_output.iter())
.chain(self.host_exposed_aliases.values())
.all(|&v| v != var));
}
@ -166,12 +172,12 @@ impl IntroducedVariables {
self.named.insert(named_variable);
}
pub fn insert_able(&mut self, name: Lowercase, var: Loc<Variable>, ability: Symbol) {
pub fn insert_able(&mut self, name: Lowercase, var: Loc<Variable>, abilities: AbilitySet) {
self.debug_assert_not_already_present(var.value);
let able_variable = AbleVariable {
name,
ability,
abilities,
variable: var.value,
first_seen: var.region,
};
@ -189,6 +195,11 @@ impl IntroducedVariables {
self.inferred.push(var);
}
pub fn insert_infer_ext_in_output(&mut self, var: Variable) {
self.debug_assert_not_already_present(var);
self.infer_ext_in_output.push(var);
}
pub fn insert_lambda_set(&mut self, var: Variable) {
self.debug_assert_not_already_present(var);
self.lambda_sets.push(var);
@ -208,6 +219,8 @@ impl IntroducedVariables {
self.named.extend(other.named.iter().cloned());
self.able.extend(other.able.iter().cloned());
self.infer_ext_in_output
.extend(other.infer_ext_in_output.iter().cloned());
}
pub fn union_owned(&mut self, other: Self) {
@ -217,7 +230,8 @@ impl IntroducedVariables {
self.host_exposed_aliases.extend(other.host_exposed_aliases);
self.named.extend(other.named);
self.able.extend(other.able.iter().cloned());
self.able.extend(other.able);
self.infer_ext_in_output.extend(other.infer_ext_in_output);
}
pub fn var_by_name(&self, name: &Lowercase) -> Option<Variable> {
@ -260,14 +274,21 @@ fn malformed(env: &mut Env, region: Region, name: &str) {
env.problem(roc_problem::can::Problem::RuntimeError(problem));
}
pub(crate) enum AnnotationFor {
Value,
Alias,
Opaque,
}
/// Canonicalizes a top-level type annotation.
pub fn canonicalize_annotation(
pub(crate) fn canonicalize_annotation(
env: &mut Env,
scope: &mut Scope,
annotation: &TypeAnnotation,
region: Region,
var_store: &mut VarStore,
pending_abilities_in_scope: &PendingAbilitiesInScope,
annotation_for: AnnotationFor,
) -> Annotation {
let mut introduced_variables = IntroducedVariables::default();
let mut references = VecSet::default();
@ -301,8 +322,16 @@ pub fn canonicalize_annotation(
annot => (annot, region),
};
let pol = match annotation_for {
// Values always have positive polarity.
AnnotationFor::Value => CanPolarity::Pos,
AnnotationFor::Alias => CanPolarity::InAlias,
AnnotationFor::Opaque => CanPolarity::InOpaque,
};
let typ = can_annotation_help(
env,
pol,
annotation,
region,
scope,
@ -320,6 +349,32 @@ pub fn canonicalize_annotation(
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
enum CanPolarity {
/// In an alias; polarity should be disregarded for now.
InAlias,
/// In an opaque type; polarity should be disregarded for now.
InOpaque,
Neg,
Pos,
}
impl CanPolarity {
fn set_neg(self) -> Self {
match self {
CanPolarity::InAlias | CanPolarity::InOpaque => self,
CanPolarity::Neg | CanPolarity::Pos => CanPolarity::Neg,
}
}
fn set_pos(self) -> Self {
match self {
CanPolarity::InAlias | CanPolarity::InOpaque => self,
CanPolarity::Neg | CanPolarity::Pos => CanPolarity::Pos,
}
}
}
pub(crate) fn make_apply_symbol(
env: &mut Env,
region: Region,
@ -450,7 +505,9 @@ pub fn find_type_def_symbols(
stack.push(&annotation.value);
for has_clause in clauses.iter() {
stack.push(&has_clause.value.ability.value);
for ab in has_clause.value.abilities {
stack.push(&ab.value);
}
}
}
Inferred | Wildcard | Malformed(_) => {}
@ -470,6 +527,7 @@ fn find_fresh_var_name(introduced_variables: &IntroducedVariables) -> Lowercase
#[allow(clippy::too_many_arguments)]
fn can_annotation_help(
env: &mut Env,
pol: CanPolarity,
annotation: &roc_parse::ast::TypeAnnotation,
region: Region,
scope: &mut Scope,
@ -487,6 +545,7 @@ fn can_annotation_help(
for arg in *argument_types {
let arg_ann = can_annotation_help(
env,
pol.set_neg(),
&arg.value,
arg.region,
scope,
@ -501,6 +560,7 @@ fn can_annotation_help(
let ret = can_annotation_help(
env,
pol.set_pos(),
&return_type.value,
return_type.region,
scope,
@ -537,13 +597,18 @@ fn can_annotation_help(
// Generate an variable bound to the ability so we can keep compiling.
let var = var_store.fresh();
introduced_variables.insert_able(fresh_ty_var, Loc::at(region, var), symbol);
introduced_variables.insert_able(
fresh_ty_var,
Loc::at(region, var),
AbilitySet::singleton(symbol),
);
return Type::Variable(var);
}
for arg in *type_arguments {
let arg_ann = can_annotation_help(
env,
pol,
&arg.value,
arg.region,
scope,
@ -578,7 +643,7 @@ fn can_annotation_help(
arg_ann.region,
OptAbleType {
typ: arg_ann.value,
opt_ability: alias_arg.value.opt_bound_ability,
opt_abilities: alias_arg.value.opt_bound_abilities.clone(),
},
));
}
@ -594,10 +659,33 @@ fn can_annotation_help(
lambda_set_variables.push(LambdaSet(Type::Variable(lvar)));
}
let mut infer_ext_in_output_types =
Vec::with_capacity(alias.infer_ext_in_output_variables.len());
for _ in 0..alias.infer_ext_in_output_variables.len() {
// Unfortunately the polarity might still be undetermined at this point,
// since this might be a delayed alias inside an alias. In these cases
// generate fresh variables to hold the extension-variables-to-be-inferred,
// which will be instantiated when the alias is used at a concrete site.
// Otherwise, instantiate the variables with how they should behave based
// on the polarity
let typ = match pol {
CanPolarity::InAlias | CanPolarity::Pos => {
let var = var_store.fresh();
introduced_variables.insert_infer_ext_in_output(var);
Type::Variable(var)
}
// TODO: determine for opaques
CanPolarity::InOpaque => Type::EmptyTagUnion,
CanPolarity::Neg => Type::EmptyTagUnion,
};
infer_ext_in_output_types.push(typ);
}
Type::DelayedAlias(AliasCommon {
symbol,
type_arguments: type_var_to_arg,
lambda_set_variables,
infer_ext_in_output_types,
})
}
None => Type::Apply(symbol, args, region),
@ -643,6 +731,7 @@ fn can_annotation_help(
let inner_type = can_annotation_help(
env,
CanPolarity::InOpaque,
&loc_inner.value,
region,
scope,
@ -673,7 +762,7 @@ fn can_annotation_help(
AliasVar {
name: var_name,
var,
opt_bound_ability: None,
opt_bound_abilities: None,
},
));
} else {
@ -688,7 +777,7 @@ fn can_annotation_help(
AliasVar {
name: var_name,
var,
opt_bound_ability: None,
opt_bound_abilities: None,
},
));
}
@ -742,10 +831,14 @@ fn can_annotation_help(
hidden_variables.remove(&loc_var.value.var);
}
// TODO: handle implicit ext variables in `as` aliases
let infer_ext_in_output = vec![];
scope.add_alias(
symbol,
region,
lowercase_vars,
infer_ext_in_output,
alias_actual,
AliasKind::Structural, // aliases in "as" are never opaque
);
@ -766,14 +859,13 @@ fn can_annotation_help(
} else {
Type::Alias {
symbol,
type_arguments: vars
.into_iter()
.map(|typ| OptAbleType {
typ,
opt_ability: None,
})
.collect(),
type_arguments: vars.into_iter().map(OptAbleType::unbound).collect(),
lambda_set_variables: alias.lambda_set_variables.clone(),
infer_ext_in_output_types: alias
.infer_ext_in_output_variables
.iter()
.map(|v| Type::Variable(*v))
.collect(),
actual: Box::new(alias.typ.clone()),
kind: alias.kind,
}
@ -783,6 +875,7 @@ fn can_annotation_help(
Record { fields, ext } => {
let ext_type = can_extension_type(
env,
pol,
scope,
var_store,
introduced_variables,
@ -806,6 +899,7 @@ fn can_annotation_help(
} else {
let field_types = can_assigned_fields(
env,
pol,
&fields.items,
region,
scope,
@ -821,6 +915,7 @@ fn can_annotation_help(
TagUnion { tags, ext, .. } => {
let ext_type = can_extension_type(
env,
pol,
scope,
var_store,
introduced_variables,
@ -844,6 +939,7 @@ fn can_annotation_help(
} else {
let mut tag_types = can_tags(
env,
pol,
tags.items,
region,
scope,
@ -863,6 +959,7 @@ fn can_annotation_help(
}
SpaceBefore(nested, _) | SpaceAfter(nested, _) => can_annotation_help(
env,
pol,
nested,
region,
scope,
@ -920,7 +1017,7 @@ fn canonicalize_has_clause(
) -> Result<(), Type> {
let Loc {
region,
value: roc_parse::ast::HasClause { var, ability },
value: roc_parse::ast::HasClause { var, abilities },
} = clause;
let region = *region;
@ -931,29 +1028,39 @@ fn canonicalize_has_clause(
);
let var_name = Lowercase::from(var_name);
let ability = match ability.value {
TypeAnnotation::Apply(module_name, ident, _type_arguments) => {
let symbol = make_apply_symbol(env, ability.region, scope, module_name, ident)?;
let mut can_abilities = AbilitySet::with_capacity(abilities.len());
for &Loc {
region,
value: ability,
} in *abilities
{
let ability = match ability {
TypeAnnotation::Apply(module_name, ident, _type_arguments) => {
let symbol = make_apply_symbol(env, region, scope, module_name, ident)?;
// Ability defined locally, whose members we are constructing right now...
if !pending_abilities_in_scope.contains_key(&symbol)
// Ability defined locally, whose members we are constructing right now...
if !pending_abilities_in_scope.contains_key(&symbol)
// or an ability that was imported from elsewhere
&& !scope.abilities_store.is_ability(symbol)
{
let region = ability.region;
{
env.problem(roc_problem::can::Problem::HasClauseIsNotAbility { region });
return Err(Type::Erroneous(Problem::HasClauseIsNotAbility(region)));
}
symbol
}
_ => {
env.problem(roc_problem::can::Problem::HasClauseIsNotAbility { region });
return Err(Type::Erroneous(Problem::HasClauseIsNotAbility(region)));
}
symbol
}
_ => {
let region = ability.region;
env.problem(roc_problem::can::Problem::HasClauseIsNotAbility { region });
return Err(Type::Erroneous(Problem::HasClauseIsNotAbility(region)));
}
};
};
references.insert(ability);
references.insert(ability);
let already_seen = can_abilities.insert(ability);
if already_seen {
env.problem(roc_problem::can::Problem::DuplicateHasAbility { ability, region });
}
}
if let Some(shadowing) = introduced_variables.named_var_by_name(&var_name) {
let var_name_ident = var_name.to_string().into();
@ -971,7 +1078,7 @@ fn canonicalize_has_clause(
let var = var_store.fresh();
introduced_variables.insert_able(var_name, Loc::at(region, var), ability);
introduced_variables.insert_able(var_name, Loc::at(region, var), can_abilities);
Ok(())
}
@ -979,6 +1086,7 @@ fn canonicalize_has_clause(
#[allow(clippy::too_many_arguments)]
fn can_extension_type<'a>(
env: &mut Env,
pol: CanPolarity,
scope: &mut Scope,
var_store: &mut VarStore,
introduced_variables: &mut IntroducedVariables,
@ -1003,15 +1111,16 @@ fn can_extension_type<'a>(
use roc_problem::can::ExtensionTypeKind;
let (empty_ext_type, valid_extension_type): (_, fn(&Type) -> bool) = match ext_problem_kind {
ExtensionTypeKind::Record => (Type::EmptyRec, valid_record_ext_type),
ExtensionTypeKind::TagUnion => (Type::EmptyTagUnion, valid_tag_ext_type),
let valid_extension_type: fn(&Type) -> bool = match ext_problem_kind {
ExtensionTypeKind::Record => valid_record_ext_type,
ExtensionTypeKind::TagUnion => valid_tag_ext_type,
};
match opt_ext {
Some(loc_ann) => {
let ext_type = can_annotation_help(
env,
pol,
&loc_ann.value,
loc_ann.region,
scope,
@ -1021,6 +1130,17 @@ fn can_extension_type<'a>(
references,
);
if valid_extension_type(shallow_dealias_with_scope(scope, &ext_type)) {
if matches!(loc_ann.extract_spaces().item, TypeAnnotation::Wildcard)
&& matches!(ext_problem_kind, ExtensionTypeKind::TagUnion)
&& pol == CanPolarity::Pos
{
// Wildcards are redundant in positive positions, since they will always be
// inferred as necessary there!
env.problem(roc_problem::can::Problem::UnnecessaryOutputWildcard {
region: loc_ann.region,
})
}
ext_type
} else {
// Report an error but mark the extension variable to be inferred
@ -1040,7 +1160,22 @@ fn can_extension_type<'a>(
Type::Variable(var)
}
}
None => empty_ext_type,
None => match ext_problem_kind {
ExtensionTypeKind::Record => Type::EmptyRec,
ExtensionTypeKind::TagUnion => {
// In negative positions a missing extension variable forces a closed tag union;
// otherwise, open-in-output-position means we give the tag an inference variable.
match pol {
CanPolarity::Neg | CanPolarity::InOpaque => Type::EmptyTagUnion,
CanPolarity::Pos | CanPolarity::InAlias => {
let var = var_store.fresh();
introduced_variables.insert_infer_ext_in_output(var);
Type::Variable(var)
}
}
}
},
}
}
@ -1123,7 +1258,7 @@ pub fn freshen_opaque_def(
.iter()
.map(|alias_var| OptAbleVar {
var: var_store.fresh(),
opt_ability: alias_var.value.opt_bound_ability,
opt_abilities: alias_var.value.opt_bound_abilities.clone(),
})
.collect();
@ -1170,6 +1305,7 @@ where
#[allow(clippy::too_many_arguments)]
fn can_assigned_fields<'a>(
env: &mut Env,
pol: CanPolarity,
fields: &&[Loc<AssignedField<'a, TypeAnnotation<'a>>>],
region: Region,
scope: &mut Scope,
@ -1199,6 +1335,7 @@ fn can_assigned_fields<'a>(
RequiredValue(field_name, _, annotation) => {
let field_type = can_annotation_help(
env,
pol,
&annotation.value,
annotation.region,
scope,
@ -1209,13 +1346,14 @@ fn can_assigned_fields<'a>(
);
let label = Lowercase::from(field_name.value);
field_types.insert(label.clone(), Required(field_type));
field_types.insert(label.clone(), RigidRequired(field_type));
break 'inner label;
}
OptionalValue(field_name, _, annotation) => {
let field_type = can_annotation_help(
env,
pol,
&annotation.value,
annotation.region,
scope,
@ -1246,7 +1384,7 @@ fn can_assigned_fields<'a>(
}
};
field_types.insert(field_name.clone(), Required(field_type));
field_types.insert(field_name.clone(), RigidRequired(field_type));
break 'inner field_name;
}
@ -1283,6 +1421,7 @@ fn can_assigned_fields<'a>(
#[allow(clippy::too_many_arguments)]
fn can_tags<'a>(
env: &mut Env,
pol: CanPolarity,
tags: &'a [Loc<Tag<'a>>],
region: Region,
scope: &mut Scope,
@ -1312,6 +1451,7 @@ fn can_tags<'a>(
for arg in args.iter() {
let ann = can_annotation_help(
env,
pol,
&arg.value,
arg.region,
scope,

View file

@ -125,6 +125,7 @@ map_symbol_to_lowlevel_and_arity! {
StrToNum; STR_TO_NUM; 1,
StrGetCapacity; STR_CAPACITY; 1,
StrWithCapacity; STR_WITH_CAPACITY; 1,
StrGraphemes; STR_GRAPHEMES; 1,
ListLen; LIST_LEN; 1,
ListWithCapacity; LIST_WITH_CAPACITY; 1,
@ -247,7 +248,7 @@ fn lowlevel_1(symbol: Symbol, op: LowLevel, var_store: &mut VarStore) -> Def {
let body = RunLowLevel {
op,
args: vec![(arg1_var, Var(Symbol::ARG_1))],
args: vec![(arg1_var, Var(Symbol::ARG_1, arg1_var))],
ret_var,
};
@ -268,8 +269,8 @@ fn lowlevel_2(symbol: Symbol, op: LowLevel, var_store: &mut VarStore) -> Def {
let body = RunLowLevel {
op,
args: vec![
(arg1_var, Var(Symbol::ARG_1)),
(arg2_var, Var(Symbol::ARG_2)),
(arg1_var, Var(Symbol::ARG_1, arg1_var)),
(arg2_var, Var(Symbol::ARG_2, arg2_var)),
],
ret_var,
};
@ -292,9 +293,9 @@ fn lowlevel_3(symbol: Symbol, op: LowLevel, var_store: &mut VarStore) -> Def {
let body = RunLowLevel {
op,
args: vec![
(arg1_var, Var(Symbol::ARG_1)),
(arg2_var, Var(Symbol::ARG_2)),
(arg3_var, Var(Symbol::ARG_3)),
(arg1_var, Var(Symbol::ARG_1, arg1_var)),
(arg2_var, Var(Symbol::ARG_2, arg2_var)),
(arg3_var, Var(Symbol::ARG_3, arg3_var)),
],
ret_var,
};
@ -322,10 +323,10 @@ fn lowlevel_4(symbol: Symbol, op: LowLevel, var_store: &mut VarStore) -> Def {
let body = RunLowLevel {
op,
args: vec![
(arg1_var, Var(Symbol::ARG_1)),
(arg2_var, Var(Symbol::ARG_2)),
(arg3_var, Var(Symbol::ARG_3)),
(arg4_var, Var(Symbol::ARG_4)),
(arg1_var, Var(Symbol::ARG_1, arg1_var)),
(arg2_var, Var(Symbol::ARG_2, arg2_var)),
(arg3_var, Var(Symbol::ARG_3, arg3_var)),
(arg4_var, Var(Symbol::ARG_4, arg4_var)),
],
ret_var,
};
@ -355,11 +356,11 @@ fn lowlevel_5(symbol: Symbol, op: LowLevel, var_store: &mut VarStore) -> Def {
let body = RunLowLevel {
op,
args: vec![
(arg1_var, Var(Symbol::ARG_1)),
(arg2_var, Var(Symbol::ARG_2)),
(arg3_var, Var(Symbol::ARG_3)),
(arg4_var, Var(Symbol::ARG_4)),
(arg5_var, Var(Symbol::ARG_5)),
(arg1_var, Var(Symbol::ARG_1, arg1_var)),
(arg2_var, Var(Symbol::ARG_2, arg2_var)),
(arg3_var, Var(Symbol::ARG_3, arg3_var)),
(arg4_var, Var(Symbol::ARG_4, arg4_var)),
(arg5_var, Var(Symbol::ARG_5, arg5_var)),
],
ret_var,
};
@ -486,7 +487,7 @@ fn to_num_checked(symbol: Symbol, var_store: &mut VarStore, lowlevel: LowLevel)
ext_var: var_store.fresh(),
field: "b".into(),
field_var: var_store.fresh(),
loc_expr: Box::new(no_region(Var(Symbol::ARG_2))),
loc_expr: Box::new(no_region(Var(Symbol::ARG_2, var_store.fresh()))),
},
),
// out of bounds!
@ -509,7 +510,7 @@ fn to_num_checked(symbol: Symbol, var_store: &mut VarStore, lowlevel: LowLevel)
ext_var: var_store.fresh(),
field: "a".into(),
field_var: num_var_2,
loc_expr: Box::new(no_region(Var(Symbol::ARG_2))),
loc_expr: Box::new(no_region(Var(Symbol::ARG_2, var_store.fresh()))),
},
],
var_store,
@ -523,7 +524,7 @@ fn to_num_checked(symbol: Symbol, var_store: &mut VarStore, lowlevel: LowLevel)
loc_pattern: no_region(Pattern::Identifier(Symbol::ARG_2)),
loc_expr: no_region(RunLowLevel {
op: lowlevel,
args: vec![(num_var_1, Var(Symbol::ARG_1))],
args: vec![(num_var_1, Var(Symbol::ARG_1, var_store.fresh()))],
ret_var: record_var,
}),
expr_var: record_var,
@ -549,7 +550,7 @@ fn to_num_is_zero(symbol: Symbol, var_store: &mut VarStore) -> Def {
let body = Expr::RunLowLevel {
op: LowLevel::Eq,
args: vec![
(num_var, Var(Symbol::ARG_1)),
(num_var, Var(Symbol::ARG_1, num_var)),
(
num_var,
Num(

View file

@ -1,3 +1,5 @@
use std::cell::Cell;
use crate::abilities::SpecializationId;
use crate::exhaustive::{ExhaustiveContext, SketchedRows};
use crate::expected::{Expected, PExpected};
@ -8,17 +10,17 @@ use roc_region::all::{Loc, Region};
use roc_types::subs::{ExhaustiveMark, IllegalCycleMark, Variable};
use roc_types::types::{Category, PatternCategory, Type};
#[derive(Debug)]
pub struct Constraints {
pub constraints: Vec<Constraint>,
pub types: Vec<Type>,
pub types: Vec<Cell<Type>>,
pub type_slices: Vec<TypeOrVar>,
pub variables: Vec<Variable>,
pub loc_symbols: Vec<(Symbol, Region)>,
pub let_constraints: Vec<LetConstraint>,
pub categories: Vec<Category>,
pub pattern_categories: Vec<PatternCategory>,
pub expectations: Vec<Expected<Type>>,
pub pattern_expectations: Vec<PExpected<Type>>,
pub expectations: Vec<Expected<TypeOrVar>>,
pub pattern_expectations: Vec<PExpected<TypeOrVar>>,
pub includes_tags: Vec<IncludesTag>,
pub strings: Vec<&'static str>,
pub sketched_rows: Vec<SketchedRows>,
@ -27,16 +29,44 @@ pub struct Constraints {
pub cycles: Vec<Cycle>,
}
impl std::fmt::Debug for Constraints {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Constraints")
.field("constraints", &self.constraints)
.field("types", &"<types>")
.field("type_slices", &self.type_slices)
.field("variables", &self.variables)
.field("loc_symbols", &self.loc_symbols)
.field("let_constraints", &self.let_constraints)
.field("categories", &self.categories)
.field("pattern_categories", &self.pattern_categories)
.field("expectations", &"<expectations>")
.field("pattern_expectations", &"<pattern expectations>")
.field("includes_tags", &self.includes_tags)
.field("strings", &self.strings)
.field("sketched_rows", &self.sketched_rows)
.field("eq", &self.eq)
.field("pattern_eq", &self.pattern_eq)
.field("cycles", &self.cycles)
.finish()
}
}
impl Default for Constraints {
fn default() -> Self {
Self::new()
}
}
pub type ExpectedTypeIndex = Index<Expected<TypeOrVar>>;
pub type PExpectedTypeIndex = Index<PExpected<TypeOrVar>>;
pub type TypeOrVar = EitherIndex<Cell<Type>, Variable>;
impl Constraints {
pub fn new() -> Self {
let constraints = Vec::new();
let mut types = Vec::new();
let type_slices = Vec::with_capacity(16);
let variables = Vec::new();
let loc_symbols = Vec::new();
let let_constraints = Vec::new();
@ -52,9 +82,9 @@ impl Constraints {
let cycles = Vec::new();
types.extend([
Type::EmptyRec,
Type::EmptyTagUnion,
Type::Apply(Symbol::STR_STR, vec![], Region::zero()),
Cell::new(Type::EmptyRec),
Cell::new(Type::EmptyTagUnion),
Cell::new(Type::Apply(Symbol::STR_STR, vec![], Region::zero())),
]);
categories.extend([
@ -91,6 +121,7 @@ impl Constraints {
Self {
constraints,
types,
type_slices,
variables,
loc_symbols,
let_constraints,
@ -107,9 +138,9 @@ impl Constraints {
}
}
pub const EMPTY_RECORD: Index<Type> = Index::new(0);
pub const EMPTY_TAG_UNION: Index<Type> = Index::new(1);
pub const STR: Index<Type> = Index::new(2);
pub const EMPTY_RECORD: Index<Cell<Type>> = Index::new(0);
pub const EMPTY_TAG_UNION: Index<Cell<Type>> = Index::new(1);
pub const STR: Index<Cell<Type>> = Index::new(2);
pub const CATEGORY_RECORD: Index<Category> = Index::new(0);
pub const CATEGORY_FOREIGNCALL: Index<Category> = Index::new(1);
@ -139,7 +170,7 @@ impl Constraints {
pub const PCATEGORY_CHARACTER: Index<PatternCategory> = Index::new(10);
#[inline(always)]
pub fn push_type(&mut self, typ: Type) -> EitherIndex<Type, Variable> {
pub fn push_type(&mut self, typ: Type) -> EitherIndex<Cell<Type>, Variable> {
match typ {
Type::EmptyRec => EitherIndex::from_left(Self::EMPTY_RECORD),
Type::EmptyTagUnion => EitherIndex::from_left(Self::EMPTY_TAG_UNION),
@ -148,7 +179,7 @@ impl Constraints {
}
Type::Variable(var) => Self::push_type_variable(var),
other => {
let index: Index<Type> = Index::push_new(&mut self.types, other);
let index: Index<Cell<Type>> = Index::push_new(&mut self.types, Cell::new(other));
EitherIndex::from_left(index)
}
}
@ -175,7 +206,7 @@ impl Constraints {
}
#[inline(always)]
const fn push_type_variable(var: Variable) -> EitherIndex<Type, Variable> {
const fn push_type_variable(var: Variable) -> TypeOrVar {
// that's right, we use the variable's integer value as the index
// that way, we don't need to push anything onto a vector
let index: Index<Variable> = Index::new(var.index());
@ -183,11 +214,14 @@ impl Constraints {
EitherIndex::from_right(index)
}
#[inline(always)]
pub fn push_expected_type(&mut self, expected: Expected<Type>) -> Index<Expected<Type>> {
pub fn push_expected_type(&mut self, expected: Expected<TypeOrVar>) -> ExpectedTypeIndex {
Index::push_new(&mut self.expectations, expected)
}
pub fn push_pat_expected_type(&mut self, expected: PExpected<TypeOrVar>) -> PExpectedTypeIndex {
Index::push_new(&mut self.pattern_expectations, expected)
}
#[inline(always)]
pub fn push_category(&mut self, category: Category) -> Index<Category> {
match category {
@ -227,47 +261,39 @@ impl Constraints {
}
}
#[inline(always)]
pub fn equal_types(
&mut self,
typ: Type,
expected: Expected<Type>,
type_index: TypeOrVar,
expected_index: ExpectedTypeIndex,
category: Category,
region: Region,
) -> Constraint {
let type_index = self.push_type(typ);
let expected_index = Index::push_new(&mut self.expectations, expected);
let category_index = Self::push_category(self, category);
Constraint::Eq(Eq(type_index, expected_index, category_index, region))
}
#[inline(always)]
pub fn equal_types_var(
&mut self,
var: Variable,
expected: Expected<Type>,
expected_index: ExpectedTypeIndex,
category: Category,
region: Region,
) -> Constraint {
let type_index = Self::push_type_variable(var);
let expected_index = Index::push_new(&mut self.expectations, expected);
let category_index = Self::push_category(self, category);
Constraint::Eq(Eq(type_index, expected_index, category_index, region))
}
#[inline(always)]
pub fn equal_types_with_storage(
&mut self,
typ: Type,
expected: Expected<Type>,
type_index: TypeOrVar,
expected_index: ExpectedTypeIndex,
category: Category,
region: Region,
storage_var: Variable,
) -> Constraint {
let type_index = self.push_type(typ);
let expected_index = Index::push_new(&mut self.expectations, expected);
let category_index = Self::push_category(self, category);
let equal = Constraint::Eq(Eq(type_index, expected_index, category_index, region));
@ -287,13 +313,11 @@ impl Constraints {
pub fn equal_pattern_types(
&mut self,
typ: Type,
expected: PExpected<Type>,
type_index: TypeOrVar,
expected_index: PExpectedTypeIndex,
category: PatternCategory,
region: Region,
) -> Constraint {
let type_index = self.push_type(typ);
let expected_index = Index::push_new(&mut self.pattern_expectations, expected);
let category_index = Self::push_pattern_category(self, category);
Constraint::Pattern(type_index, expected_index, category_index, region)
@ -301,43 +325,34 @@ impl Constraints {
pub fn pattern_presence(
&mut self,
typ: Type,
expected: PExpected<Type>,
type_index: TypeOrVar,
expected_index: PExpectedTypeIndex,
category: PatternCategory,
region: Region,
) -> Constraint {
let type_index = self.push_type(typ);
let expected_index = Index::push_new(&mut self.pattern_expectations, expected);
let category_index = Index::push_new(&mut self.pattern_categories, category);
Constraint::PatternPresence(type_index, expected_index, category_index, region)
}
pub fn is_open_type(&mut self, typ: Type) -> Constraint {
let type_index = self.push_type(typ);
pub fn is_open_type(&mut self, type_index: TypeOrVar) -> Constraint {
Constraint::IsOpenType(type_index)
}
pub fn includes_tag<I>(
pub fn includes_tag(
&mut self,
typ: Type,
type_index: TypeOrVar,
tag_name: TagName,
types: I,
payloads: Slice<Variable>,
category: PatternCategory,
region: Region,
) -> Constraint
where
I: IntoIterator<Item = Type>,
{
let type_index = Index::push_new(&mut self.types, typ);
) -> Constraint {
let category_index = Index::push_new(&mut self.pattern_categories, category);
let types_slice = Slice::extend_new(&mut self.types, types);
let includes_tag = IncludesTag {
type_index,
tag_name,
types: types_slice,
types: payloads,
pattern_category: category_index,
region,
};
@ -347,7 +362,7 @@ impl Constraints {
Constraint::IncludesTag(includes_tag_index)
}
fn variable_slice<I>(&mut self, it: I) -> Slice<Variable>
pub fn variable_slice<I>(&mut self, it: I) -> Slice<Variable>
where
I: IntoIterator<Item = Variable>,
{
@ -360,24 +375,24 @@ impl Constraints {
fn def_types_slice<I>(&mut self, it: I) -> DefTypes
where
I: IntoIterator<Item = (Symbol, Loc<Type>)>,
I: IntoIterator<Item = (Symbol, Loc<TypeOrVar>)>,
I::IntoIter: ExactSizeIterator,
{
let it = it.into_iter();
let types_start = self.types.len();
let types_start = self.type_slices.len();
let loc_symbols_start = self.loc_symbols.len();
// because we have an ExactSizeIterator, we can reserve space here
let length = it.len();
self.types.reserve(length);
self.type_slices.reserve(length);
self.loc_symbols.reserve(length);
for (symbol, loc_type) in it {
let Loc { region, value } = loc_type;
self.types.push(value);
self.type_slices.push(value);
self.loc_symbols.push((symbol, region));
}
@ -448,7 +463,7 @@ impl Constraints {
where
I1: IntoIterator<Item = Variable>,
I2: IntoIterator<Item = Variable>,
I3: IntoIterator<Item = (Symbol, Loc<Type>)>,
I3: IntoIterator<Item = (Symbol, Loc<TypeOrVar>)>,
I3::IntoIter: ExactSizeIterator,
{
// defs and ret constraint are stored consequtively, so we only need to store one index
@ -494,7 +509,7 @@ impl Constraints {
) -> Constraint
where
I1: IntoIterator<Item = Variable>,
I2: IntoIterator<Item = (Symbol, Loc<Type>)>,
I2: IntoIterator<Item = (Symbol, Loc<TypeOrVar>)>,
I2::IntoIter: ExactSizeIterator,
{
// defs and ret constraint are stored consequtively, so we only need to store one index
@ -546,14 +561,10 @@ impl Constraints {
pub fn lookup(
&mut self,
symbol: Symbol,
expected: Expected<Type>,
expected_index: ExpectedTypeIndex,
region: Region,
) -> Constraint {
Constraint::Lookup(
symbol,
Index::push_new(&mut self.expectations, expected),
region,
)
Constraint::Lookup(symbol, expected_index, region)
}
pub fn contains_save_the_environment(&self, constraint: &Constraint) -> bool {
@ -592,20 +603,7 @@ impl Constraints {
pub fn store(
&mut self,
typ: Type,
variable: Variable,
filename: &'static str,
line_number: u32,
) -> Constraint {
let type_index = self.push_type(typ);
let string_index = Index::push_new(&mut self.strings, filename);
Constraint::Store(type_index, variable, string_index, line_number)
}
pub fn store_index(
&mut self,
type_index: EitherIndex<Type, Variable>,
type_index: TypeOrVar,
variable: Variable,
filename: &'static str,
line_number: u32,
@ -620,8 +618,8 @@ impl Constraints {
real_var: Variable,
real_region: Region,
category_and_expectation: Result<
(Category, Expected<Type>),
(PatternCategory, PExpected<Type>),
(Category, ExpectedTypeIndex),
(PatternCategory, PExpectedTypeIndex),
>,
sketched_rows: SketchedRows,
context: ExhaustiveContext,
@ -633,14 +631,12 @@ impl Constraints {
let equality = match category_and_expectation {
Ok((category, expected)) => {
let category = Index::push_new(&mut self.categories, category);
let expected = Index::push_new(&mut self.expectations, expected);
let equality = Eq(real_var, expected, category, real_region);
let equality = Index::push_new(&mut self.eq, equality);
Ok(equality)
}
Err((category, expected)) => {
let category = Index::push_new(&mut self.pattern_categories, category);
let expected = Index::push_new(&mut self.pattern_expectations, expected);
let equality = PatternEq(real_var, expected, category, real_region);
let equality = Index::push_new(&mut self.pattern_eq, equality);
Err(equality)
@ -682,16 +678,16 @@ roc_error_macros::assert_sizeof_aarch64!(Constraint, 3 * 8);
#[derive(Clone, Copy, Debug)]
pub struct Eq(
pub EitherIndex<Type, Variable>,
pub Index<Expected<Type>>,
pub TypeOrVar,
pub ExpectedTypeIndex,
pub Index<Category>,
pub Region,
);
#[derive(Clone, Copy, Debug)]
pub struct PatternEq(
pub EitherIndex<Type, Variable>,
pub Index<PExpected<Type>>,
pub TypeOrVar,
pub PExpectedTypeIndex,
pub Index<PatternCategory>,
pub Region,
);
@ -723,16 +719,11 @@ pub struct OpportunisticResolve {
#[derive(Clone, Copy)]
pub enum Constraint {
Eq(Eq),
Store(
EitherIndex<Type, Variable>,
Variable,
Index<&'static str>,
u32,
),
Lookup(Symbol, Index<Expected<Type>>, Region),
Store(TypeOrVar, Variable, Index<&'static str>, u32),
Lookup(Symbol, ExpectedTypeIndex, Region),
Pattern(
EitherIndex<Type, Variable>,
Index<PExpected<Type>>,
TypeOrVar,
PExpectedTypeIndex,
Index<PatternCategory>,
Region,
),
@ -747,11 +738,11 @@ pub enum Constraint {
Let(Index<LetConstraint>, Slice<Variable>),
And(Slice<Constraint>),
/// Presence constraints
IsOpenType(EitherIndex<Type, Variable>), // Theory; always applied to a variable? if yes the use that
IsOpenType(TypeOrVar), // Theory; always applied to a variable? if yes the use that
IncludesTag(Index<IncludesTag>),
PatternPresence(
EitherIndex<Type, Variable>,
Index<PExpected<Type>>,
TypeOrVar,
PExpectedTypeIndex,
Index<PatternCategory>,
Region,
),
@ -768,7 +759,7 @@ pub enum Constraint {
#[derive(Debug, Clone, Copy, Default)]
pub struct DefTypes {
pub types: Slice<Type>,
pub types: Slice<TypeOrVar>,
pub loc_symbols: Slice<(Symbol, Region)>,
}
@ -782,9 +773,9 @@ pub struct LetConstraint {
#[derive(Debug, Clone)]
pub struct IncludesTag {
pub type_index: Index<Type>,
pub type_index: TypeOrVar,
pub tag_name: TagName,
pub types: Slice<Type>,
pub types: Slice<Variable>,
pub pattern_category: Index<PatternCategory>,
pub region: Region,
}

View file

@ -1,7 +1,7 @@
use crate::{
def::Def,
expr::{AccessorData, ClosureData, Expr, Field, OpaqueWrapFunctionData, WhenBranchPattern},
pattern::{DestructType, Pattern, RecordDestruct},
pattern::{DestructType, ListPatterns, Pattern, RecordDestruct},
};
use roc_module::{
ident::{Lowercase, TagName},
@ -162,7 +162,7 @@ impl<'a> CopyEnv for AcrossSubs<'a> {
#[inline(always)]
fn clone_lambda_names(&mut self, lambda_names: SubsSlice<Symbol>) -> SubsSlice<Symbol> {
SubsSlice::extend_new(
&mut self.target.closure_names,
&mut self.target.symbol_names,
self.source.get_subs_slice(lambda_names).iter().cloned(),
)
}
@ -255,9 +255,9 @@ fn deep_copy_expr_help<C: CopyEnv>(env: &mut C, copied: &mut Vec<Variable>, expr
elem_var: sub!(*elem_var),
loc_elems: loc_elems.iter().map(|le| le.map(|e| go_help!(e))).collect(),
},
Var(sym) => Var(*sym),
Var(sym, var) => Var(*sym, sub!(*var)),
&AbilityMember(sym, specialization, specialization_var) => {
AbilityMember(sym, specialization, specialization_var)
AbilityMember(sym, specialization, sub!(specialization_var))
}
When {
loc_cond,
@ -707,6 +707,18 @@ fn deep_copy_pattern_help<C: CopyEnv>(
})
.collect(),
},
List {
list_var,
elem_var,
patterns: ListPatterns { patterns, opt_rest },
} => List {
list_var: sub!(*list_var),
elem_var: sub!(*elem_var),
patterns: ListPatterns {
patterns: patterns.iter().map(|lp| lp.map(|p| go_help!(p))).collect(),
opt_rest: *opt_rest,
},
},
NumLiteral(var, s, n, bound) => NumLiteral(sub!(*var), s.clone(), *n, *bound),
IntLiteral(v1, v2, s, n, bound) => IntLiteral(sub!(*v1), sub!(*v2), s.clone(), *n, *bound),
FloatLiteral(v1, v2, s, n, bound) => {
@ -821,11 +833,14 @@ fn deep_copy_type_vars<C: CopyEnv>(
let new_content = match content {
// The vars for which we want to do something interesting.
FlexVar(opt_name) => FlexVar(opt_name.map(|n| env.clone_name(n))),
FlexAbleVar(opt_name, ability) => {
FlexAbleVar(opt_name.map(|n| env.clone_name(n)), ability)
}
FlexAbleVar(opt_name, abilities) => FlexAbleVar(
opt_name.map(|n| env.clone_name(n)),
env.clone_lambda_names(abilities),
),
RigidVar(name) => RigidVar(env.clone_name(name)),
RigidAbleVar(name, ability) => RigidAbleVar(env.clone_name(name), ability),
RigidAbleVar(name, abilities) => {
RigidAbleVar(env.clone_name(name), env.clone_lambda_names(abilities))
}
// Everything else is a mechanical descent.
Structure(flat_type) => match flat_type {
@ -1044,8 +1059,8 @@ mod test {
use roc_region::all::Loc;
use roc_types::{
subs::{
self, Content, Content::*, Descriptor, FlatType, Mark, OptVariable, Rank, Subs,
SubsIndex, SubsSlice, Variable,
self, Content, Content::*, Descriptor, FlatType, GetSubsSlice, Mark, OptVariable, Rank,
Subs, SubsIndex, SubsSlice, Variable,
},
types::Uls,
};
@ -1107,7 +1122,8 @@ mod test {
let mut subs = Subs::new();
let field_name = SubsIndex::push_new(&mut subs.field_names, "a".into());
let var = new_var(&mut subs, FlexAbleVar(Some(field_name), Symbol::UNDERSCORE));
let abilities = SubsSlice::extend_new(&mut subs.symbol_names, [Symbol::UNDERSCORE]);
let var = new_var(&mut subs, FlexAbleVar(Some(field_name), abilities));
let mut copied = vec![];
@ -1116,8 +1132,9 @@ mod test {
assert_ne!(var, copy);
match subs.get_content_without_compacting(var) {
FlexAbleVar(Some(name), Symbol::UNDERSCORE) => {
FlexAbleVar(Some(name), abilities) => {
assert_eq!(subs[*name].as_str(), "a");
assert_eq!(subs.get_subs_slice(*abilities), [Symbol::UNDERSCORE]);
}
it => unreachable!("{:?}", it),
}
@ -1128,7 +1145,8 @@ mod test {
let mut subs = Subs::new();
let field_name = SubsIndex::push_new(&mut subs.field_names, "a".into());
let var = new_var(&mut subs, RigidAbleVar(field_name, Symbol::UNDERSCORE));
let abilities = SubsSlice::extend_new(&mut subs.symbol_names, [Symbol::UNDERSCORE]);
let var = new_var(&mut subs, RigidAbleVar(field_name, abilities));
let mut copied = vec![];
@ -1136,8 +1154,9 @@ mod test {
assert_ne!(var, copy);
match subs.get_content_without_compacting(var) {
RigidAbleVar(name, Symbol::UNDERSCORE) => {
RigidAbleVar(name, abilities) => {
assert_eq!(subs[*name].as_str(), "a");
assert_eq!(subs.get_subs_slice(*abilities), [Symbol::UNDERSCORE]);
}
it => internal_error!("{:?}", it),
}

View file

@ -5,8 +5,10 @@ use crate::abilities::PendingMemberType;
use crate::annotation::canonicalize_annotation;
use crate::annotation::find_type_def_symbols;
use crate::annotation::make_apply_symbol;
use crate::annotation::AnnotationFor;
use crate::annotation::IntroducedVariables;
use crate::annotation::OwnedNamedOrAble;
use crate::derive;
use crate::env::Env;
use crate::expr::AccessorData;
use crate::expr::AnnotatedMark;
@ -127,11 +129,7 @@ enum PendingValueDef<'a> {
&'a Loc<ast::TypeAnnotation<'a>>,
),
/// A body with no type annotation
Body(
&'a Loc<ast::Pattern<'a>>,
Loc<Pattern>,
&'a Loc<ast::Expr<'a>>,
),
Body(Loc<Pattern>, &'a Loc<ast::Expr<'a>>),
/// A body with a type annotation
TypedBody(
&'a Loc<ast::Pattern<'a>>,
@ -145,7 +143,7 @@ impl PendingValueDef<'_> {
fn loc_pattern(&self) -> &Loc<Pattern> {
match self {
PendingValueDef::AnnotationOnly(_, loc_pattern, _) => loc_pattern,
PendingValueDef::Body(_, loc_pattern, _) => loc_pattern,
PendingValueDef::Body(loc_pattern, _) => loc_pattern,
PendingValueDef::TypedBody(_, loc_pattern, _, _) => loc_pattern,
}
}
@ -168,6 +166,7 @@ enum PendingTypeDef<'a> {
/// An opaque type alias, e.g. `Age := U32`.
Opaque {
name_str: &'a str,
name: Loc<Symbol>,
vars: Vec<Loc<Lowercase>>,
ann: &'a Loc<ast::TypeAnnotation<'a>>,
@ -212,6 +211,7 @@ impl PendingTypeDef<'_> {
Some((name.value, region))
}
PendingTypeDef::Opaque {
name_str: _,
name,
vars: _,
ann,
@ -321,6 +321,10 @@ fn canonicalize_alias<'a>(
kind: AliasKind,
) -> Result<Alias, ()> {
let symbol = name.value;
let annotation_for = match kind {
AliasKind::Structural => AnnotationFor::Alias,
AliasKind::Opaque => AnnotationFor::Opaque,
};
let can_ann = canonicalize_annotation(
env,
scope,
@ -328,6 +332,7 @@ fn canonicalize_alias<'a>(
ann.region,
var_store,
pending_abilities_in_scope,
annotation_for,
);
// Record all the annotation's references in output.references.lookups
@ -343,6 +348,7 @@ fn canonicalize_alias<'a>(
able,
wildcards,
inferred,
infer_ext_in_output,
..
} = can_ann.introduced_variables;
@ -358,14 +364,14 @@ fn canonicalize_alias<'a>(
// This is a valid lowercase rigid var for the type def.
let named_variable = named.swap_remove(index);
let var = named_variable.variable();
let opt_bound_ability = named_variable.opt_ability();
let opt_bound_abilities = named_variable.opt_abilities().map(ToOwned::to_owned);
let name = named_variable.name();
can_vars.push(Loc {
value: AliasVar {
name,
var,
opt_bound_ability,
opt_bound_abilities,
},
region: loc_lowercase.region,
});
@ -387,7 +393,7 @@ fn canonicalize_alias<'a>(
value: AliasVar {
name: loc_lowercase.value.clone(),
var: var_store.fresh(),
opt_bound_ability: None,
opt_bound_abilities: None,
},
region: loc_lowercase.region,
});
@ -427,6 +433,7 @@ fn canonicalize_alias<'a>(
symbol,
name.region,
can_vars.clone(),
infer_ext_in_output,
can_ann.typ,
kind,
))
@ -641,6 +648,13 @@ fn separate_implemented_and_required_members(
}
}
type DerivedDef<'a> = Loc<PendingValue<'a>>;
struct CanonicalizedOpaque<'a> {
opaque_def: Alias,
derived_defs: Vec<DerivedDef<'a>>,
}
#[inline(always)]
#[allow(clippy::too_many_arguments)]
fn canonicalize_opaque<'a>(
@ -651,10 +665,11 @@ fn canonicalize_opaque<'a>(
pending_abilities_in_scope: &PendingAbilitiesInScope,
name: Loc<Symbol>,
name_str: &'a str,
ann: &'a Loc<ast::TypeAnnotation<'a>>,
vars: &[Loc<Lowercase>],
has_abilities: Option<&'a Loc<ast::HasAbilities<'a>>>,
) -> Result<Alias, ()> {
) -> Result<CanonicalizedOpaque<'a>, ()> {
let alias = canonicalize_alias(
env,
output,
@ -667,6 +682,7 @@ fn canonicalize_opaque<'a>(
AliasKind::Opaque,
)?;
let mut derived_defs = Vec::new();
if let Some(has_abilities) = has_abilities {
let has_abilities = has_abilities.value.collection();
@ -808,7 +824,24 @@ fn canonicalize_opaque<'a>(
.abilities_store
.register_declared_implementations(name.value, impls);
} else if let Some((_, members)) = ability.derivable_ability() {
let impls = members.iter().map(|member| (*member, MemberImpl::Derived));
let num_members = members.len();
derived_defs.reserve(num_members);
let mut impls = Vec::with_capacity(num_members);
for &member in members.iter() {
let (derived_impl, impl_pat, impl_body) =
derive::synthesize_member_impl(env, scope, name_str, member);
let derived_def = Loc::at(
derive::DERIVED_REGION,
PendingValue::Def(PendingValueDef::Body(impl_pat, impl_body)),
);
impls.push((member, MemberImpl::Impl(derived_impl)));
derived_defs.push(derived_def);
}
scope
.abilities_store
.register_declared_implementations(name.value, impls);
@ -834,7 +867,7 @@ fn canonicalize_opaque<'a>(
alias_var.region,
OptAbleType {
typ: Type::Variable(var_store.fresh()),
opt_ability: alias_var.value.opt_bound_ability,
opt_abilities: alias_var.value.opt_bound_abilities.clone(),
},
)
})
@ -844,6 +877,11 @@ fn canonicalize_opaque<'a>(
.iter()
.map(|_| LambdaSet(Type::Variable(var_store.fresh())))
.collect(),
infer_ext_in_output_types: alias
.infer_ext_in_output_variables
.iter()
.map(|_| Type::Variable(var_store.fresh()))
.collect(),
});
let old = output
@ -854,7 +892,10 @@ fn canonicalize_opaque<'a>(
}
}
Ok(alias)
Ok(CanonicalizedOpaque {
opaque_def: alias,
derived_defs,
})
}
#[inline(always)]
@ -929,7 +970,11 @@ pub(crate) fn canonicalize_defs<'a>(
scope.register_debug_idents();
}
let (aliases, symbols_introduced) = canonicalize_type_defs(
let CanonicalizedTypeDefs {
aliases,
symbols_introduced,
derived_defs,
} = canonicalize_type_defs(
env,
&mut output,
var_store,
@ -938,6 +983,11 @@ pub(crate) fn canonicalize_defs<'a>(
pending_type_defs,
);
// Add the derived ASTs, so that we create proper canonicalized defs for them.
// They can go at the end, and derived defs should never reference anything other than builtin
// ability members.
pending_value_defs.extend(derived_defs);
// Now that we have the scope completely assembled, and shadowing resolved,
// we're ready to canonicalize any body exprs.
canonicalize_value_defs(
@ -1086,6 +1136,12 @@ fn canonicalize_value_defs<'a>(
(can_defs, output, symbols_introduced)
}
struct CanonicalizedTypeDefs<'a> {
aliases: VecMap<Symbol, Alias>,
symbols_introduced: MutMap<Symbol, Region>,
derived_defs: Vec<DerivedDef<'a>>,
}
fn canonicalize_type_defs<'a>(
env: &mut Env<'a>,
output: &mut Output,
@ -1093,7 +1149,7 @@ fn canonicalize_type_defs<'a>(
scope: &mut Scope,
pending_abilities_in_scope: &PendingAbilitiesInScope,
pending_type_defs: Vec<PendingTypeDef<'a>>,
) -> (VecMap<Symbol, Alias>, MutMap<Symbol, Region>) {
) -> CanonicalizedTypeDefs<'a> {
enum TypeDef<'a> {
Alias(
Loc<Symbol>,
@ -1101,6 +1157,7 @@ fn canonicalize_type_defs<'a>(
&'a Loc<ast::TypeAnnotation<'a>>,
),
Opaque(
&'a str,
Loc<Symbol>,
Vec<Loc<Lowercase>>,
&'a Loc<ast::TypeAnnotation<'a>>,
@ -1129,6 +1186,7 @@ fn canonicalize_type_defs<'a>(
type_defs.insert(name.value, TypeDef::Alias(name, vars, ann));
}
PendingTypeDef::Opaque {
name_str,
name,
vars,
ann,
@ -1141,7 +1199,10 @@ fn canonicalize_type_defs<'a>(
// builtin abilities, and hence do not affect the type def sorting. We'll insert
// references of usages when canonicalizing the derives.
type_defs.insert(name.value, TypeDef::Opaque(name, vars, ann, derived));
type_defs.insert(
name.value,
TypeDef::Opaque(name_str, name, vars, ann, derived),
);
}
PendingTypeDef::Ability { name, members } => {
let mut referenced_symbols = Vec::with_capacity(2);
@ -1167,6 +1228,7 @@ fn canonicalize_type_defs<'a>(
let sorted = sort_type_defs_before_introduction(referenced_type_symbols);
let mut aliases = VecMap::default();
let mut abilities = MutMap::default();
let mut all_derived_defs = Vec::new();
for type_name in sorted {
match type_defs.remove(&type_name).unwrap() {
@ -1188,7 +1250,7 @@ fn canonicalize_type_defs<'a>(
}
}
TypeDef::Opaque(name, vars, ann, derived) => {
TypeDef::Opaque(name_str, name, vars, ann, derived) => {
let alias_and_derives = canonicalize_opaque(
env,
output,
@ -1196,13 +1258,19 @@ fn canonicalize_type_defs<'a>(
scope,
pending_abilities_in_scope,
name,
name_str,
ann,
&vars,
derived,
);
if let Ok(alias) = alias_and_derives {
aliases.insert(name.value, alias);
if let Ok(CanonicalizedOpaque {
opaque_def,
derived_defs,
}) = alias_and_derives
{
aliases.insert(name.value, opaque_def);
all_derived_defs.extend(derived_defs);
}
}
@ -1223,6 +1291,7 @@ fn canonicalize_type_defs<'a>(
*symbol,
alias.region,
alias.type_variables.clone(),
alias.infer_ext_in_output_variables.clone(),
alias.typ.clone(),
alias.kind,
);
@ -1238,7 +1307,11 @@ fn canonicalize_type_defs<'a>(
pending_abilities_in_scope,
);
(aliases, symbols_introduced)
CanonicalizedTypeDefs {
aliases,
symbols_introduced,
derived_defs: all_derived_defs,
}
}
/// Resolve all pending abilities, to add them to scope.
@ -1270,6 +1343,7 @@ fn resolve_abilities<'a>(
typ.region,
var_store,
pending_abilities_in_scope,
AnnotationFor::Value,
);
// Record all the annotation's references in output.references.lookups
@ -1286,7 +1360,7 @@ fn resolve_abilities<'a>(
.introduced_variables
.able
.iter()
.partition(|av| av.ability == ability);
.partition(|av| av.abilities.contains(&ability));
let var_bound_to_ability = match variables_bound_to_ability.as_slice() {
[one] => one.variable,
@ -1453,7 +1527,6 @@ impl DefOrdering {
#[inline(always)]
pub(crate) fn sort_can_defs_new(
env: &mut Env<'_>,
scope: &mut Scope,
var_store: &mut VarStore,
defs: CanDefs,
@ -1524,67 +1597,48 @@ pub(crate) fn sort_can_defs_new(
let def = defs.pop().unwrap();
let index = group.first_one().unwrap();
let bad_recursion_body = if def_ordering.direct_references.get_row_col(index, index)
{
// a definition like `x = x + 1`, which is invalid in roc.
// We need to convert the body of the def to a runtime error.
let symbol = def_ordering.get_symbol(index).unwrap();
if def_ordering.references.get_row_col(index, index) {
// push the "header" for this group of recursive definitions
let cycle_mark = IllegalCycleMark::new(var_store);
declarations.push_recursive_group(1, cycle_mark);
let entries = vec![make_cycle_entry(symbol, &def)];
// then push the definition
let (symbol, specializes) = match def.loc_pattern.value {
Pattern::Identifier(symbol) => (symbol, None),
let problem = Problem::RuntimeError(RuntimeError::CircularDef(entries.clone()));
env.problem(problem);
Pattern::AbilityMemberSpecialization { ident, specializes } => {
(ident, Some(specializes))
}
Some(Expr::RuntimeError(RuntimeError::CircularDef(entries)))
} else {
None
};
_ => {
internal_error!("destructures cannot participate in a recursive group; it's always a type error")
}
};
let is_illegally_self_recursive = bad_recursion_body.is_some();
let set_opt_invalid_recursion_body = |e: &mut Expr| match bad_recursion_body {
Some(err) => *e = err,
None => {}
};
if def_ordering.references.get_row_col(index, index) && !is_illegally_self_recursive
{
// this function calls itself, and must be typechecked as a recursive def
match def.loc_pattern.value {
Pattern::Identifier(symbol) => match def.loc_expr.value {
Closure(closure_data) => {
declarations.push_recursive_def(
Loc::at(def.loc_pattern.region, symbol),
Loc::at(def.loc_expr.region, closure_data),
def.expr_var,
def.annotation,
None,
);
}
e => todo!("{:?}", e),
},
Pattern::AbilityMemberSpecialization {
ident: symbol,
specializes,
} => match def.loc_expr.value {
Closure(closure_data) => {
declarations.push_recursive_def(
Loc::at(def.loc_pattern.region, symbol),
Loc::at(def.loc_expr.region, closure_data),
def.expr_var,
def.annotation,
Some(specializes),
);
}
_ => todo!(),
},
_ => todo!("{:?}", &def.loc_pattern.value),
match def.loc_expr.value {
Closure(closure_data) => {
declarations.push_recursive_def(
Loc::at(def.loc_pattern.region, symbol),
Loc::at(def.loc_expr.region, closure_data),
def.expr_var,
def.annotation,
specializes,
);
}
_ => {
declarations.push_value_def(
Loc::at(def.loc_pattern.region, symbol),
def.loc_expr,
def.expr_var,
def.annotation,
specializes,
);
}
}
} else {
match def.loc_pattern.value {
Pattern::Identifier(symbol) => match def.loc_expr.value {
Closure(mut closure_data) => {
set_opt_invalid_recursion_body(&mut closure_data.loc_body.value);
Closure(closure_data) => {
declarations.push_function_def(
Loc::at(def.loc_pattern.region, symbol),
Loc::at(def.loc_expr.region, closure_data),
@ -1594,9 +1648,6 @@ pub(crate) fn sort_can_defs_new(
);
}
_ => {
let mut def = def;
set_opt_invalid_recursion_body(&mut def.loc_expr.value);
declarations.push_value_def(
Loc::at(def.loc_pattern.region, symbol),
def.loc_expr,
@ -1610,9 +1661,7 @@ pub(crate) fn sort_can_defs_new(
ident: symbol,
specializes,
} => match def.loc_expr.value {
Closure(mut closure_data) => {
set_opt_invalid_recursion_body(&mut closure_data.loc_body.value);
Closure(closure_data) => {
declarations.push_function_def(
Loc::at(def.loc_pattern.region, symbol),
Loc::at(def.loc_expr.region, closure_data),
@ -1622,9 +1671,6 @@ pub(crate) fn sort_can_defs_new(
);
}
_ => {
let mut def = def;
set_opt_invalid_recursion_body(&mut def.loc_expr.value);
declarations.push_value_def(
Loc::at(def.loc_pattern.region, symbol),
def.loc_expr,
@ -1635,9 +1681,6 @@ pub(crate) fn sort_can_defs_new(
}
},
_ => {
let mut def = def;
set_opt_invalid_recursion_body(&mut def.loc_expr.value);
declarations.push_destructure_def(
def.loc_pattern,
def.loc_expr,
@ -1749,17 +1792,7 @@ pub(crate) fn sort_can_defs(
Pattern::AbilityMemberSpecialization { .. }
);
let declaration = if def_ordering.direct_references.get_row_col(index, index) {
// a definition like `x = x + 1`, which is invalid in roc
let symbol = def_ordering.get_symbol(index).unwrap();
let entries = vec![make_cycle_entry(symbol, &def)];
let problem = Problem::RuntimeError(RuntimeError::CircularDef(entries.clone()));
env.problem(problem);
Declaration::InvalidCycle(entries)
} else if def_ordering.references.get_row_col(index, index) {
let declaration = if def_ordering.references.get_row_col(index, index) {
debug_assert!(!is_specialization, "Self-recursive specializations can only be determined during solving - but it was determined for {:?} now, that's a bug!", def);
// this function calls itself, and must be typechecked as a recursive def
@ -1788,7 +1821,7 @@ pub(crate) fn sort_can_defs(
.strongly_connected_components_subset(group);
debug_assert!(
!group.iter_ones().any(|index| matches!((&defs[index]).as_ref().unwrap().loc_pattern.value, Pattern::AbilityMemberSpecialization{..})),
!group.iter_ones().any(|index| matches!(defs[index].as_ref().unwrap().loc_pattern.value, Pattern::AbilityMemberSpecialization{..})),
"A specialization is involved in a recursive cycle - this should not be knowable until solving");
let declaration = if direct_sccs.groups().count() == 1 {
@ -1888,6 +1921,14 @@ fn pattern_to_vars_by_symbol(
}
}
List {
patterns, elem_var, ..
} => {
for pat in patterns.patterns.iter() {
pattern_to_vars_by_symbol(vars_by_symbol, &pat.value, *elem_var);
}
}
NumLiteral(..)
| IntLiteral(..)
| FloatLiteral(..)
@ -1978,6 +2019,7 @@ fn canonicalize_pending_value_def<'a>(
loc_ann.region,
var_store,
pending_abilities_in_scope,
AnnotationFor::Value,
);
// Record all the annotation's references in output.references.lookups
@ -2077,6 +2119,7 @@ fn canonicalize_pending_value_def<'a>(
loc_ann.region,
var_store,
pending_abilities_in_scope,
AnnotationFor::Value,
);
// Record all the annotation's references in output.references.lookups
@ -2096,7 +2139,7 @@ fn canonicalize_pending_value_def<'a>(
Some(Loc::at(loc_ann.region, type_annotation)),
)
}
Body(_loc_pattern, loc_can_pattern, loc_expr) => {
Body(loc_can_pattern, loc_expr) => {
//
canonicalize_pending_body(
env,
@ -2369,6 +2412,7 @@ fn to_pending_alias_or_opaque<'a>(
}
}
let name_str = name.value;
let name = Loc {
region: name.region,
value: symbol,
@ -2381,6 +2425,7 @@ fn to_pending_alias_or_opaque<'a>(
ann,
},
AliasKind::Opaque => PendingTypeDef::Opaque {
name_str,
name,
vars: can_rigids,
ann,
@ -2572,11 +2617,7 @@ fn to_pending_value_def<'a>(
loc_pattern.region,
);
PendingValue::Def(PendingValueDef::Body(
loc_pattern,
loc_can_pattern,
loc_expr,
))
PendingValue::Def(PendingValueDef::Body(loc_can_pattern, loc_expr))
}
AnnotatedBody {
@ -2756,11 +2797,13 @@ fn correct_mutual_recursive_type_alias<'a>(
};
let mut new_lambda_sets = ImSet::default();
let mut new_infer_ext_vars = ImSet::default();
alias_type.instantiate_aliases(
alias_region,
&can_instantiate_symbol,
var_store,
&mut new_lambda_sets,
&mut new_infer_ext_vars,
);
let alias = if cycle.count_ones() > 1 {
@ -2782,6 +2825,11 @@ fn correct_mutual_recursive_type_alias<'a>(
.map(|var| LambdaSet(Type::Variable(*var))),
);
// add any new infer-in-output extension variables that the instantiation created to the current alias
alias
.infer_ext_in_output_variables
.extend(new_infer_ext_vars);
// Now mark the alias recursive, if it needs to be.
let rec = symbols_introduced[index];
let is_self_recursive = cycle.count_ones() == 1 && matrix.get_row_col(index, index);
@ -2856,10 +2904,14 @@ fn make_tag_union_of_alias_recursive<'a>(
let alias_opt_able_vars = alias.type_variables.iter().map(|l| OptAbleType {
typ: Type::Variable(l.value.var),
opt_ability: l.value.opt_bound_ability,
opt_abilities: l.value.opt_bound_abilities.clone(),
});
let lambda_set_vars = alias.lambda_set_variables.iter();
let infer_ext_in_output_variables = alias
.infer_ext_in_output_variables
.iter()
.map(|v| Type::Variable(*v));
let made_recursive = make_tag_union_recursive_help(
env,
@ -2867,6 +2919,7 @@ fn make_tag_union_of_alias_recursive<'a>(
alias_args,
alias_opt_able_vars,
lambda_set_vars,
infer_ext_in_output_variables,
alias.kind,
alias.region,
others,
@ -2921,6 +2974,7 @@ fn make_tag_union_recursive_help<'a, 'b>(
alias_args: impl Iterator<Item = Type>,
alias_opt_able_vars: impl Iterator<Item = OptAbleType>,
lambda_set_variables: impl Iterator<Item = &'b LambdaSet>,
infer_ext_in_output_variables: impl Iterator<Item = Type>,
alias_kind: AliasKind,
region: Region,
others: Vec<Symbol>,
@ -2950,6 +3004,7 @@ fn make_tag_union_recursive_help<'a, 'b>(
symbol,
type_arguments: alias_opt_able_vars.collect(),
lambda_set_variables: lambda_set_variables.cloned().collect(),
infer_ext_in_output_types: infer_ext_in_output_variables.collect(),
actual: Box::new(Type::Variable(recursion_variable)),
kind: AliasKind::Opaque,
},
@ -2982,6 +3037,7 @@ fn make_tag_union_recursive_help<'a, 'b>(
actual,
type_arguments,
lambda_set_variables,
infer_ext_in_output_types,
kind,
..
} => {
@ -2999,6 +3055,7 @@ fn make_tag_union_recursive_help<'a, 'b>(
alias_args.into_iter(),
type_arguments.iter().cloned(),
lambda_set_variables.iter(),
infer_ext_in_output_types.iter().cloned(),
*kind,
region,
others,

View file

@ -0,0 +1,249 @@
//! Derives parse trees for ability member impls of Opaques.
//! These are derived at canonicalization time rather than type-checking time,
//! as structural types are, due to the following reasons:
//! - Derived impls for opaques are not generalizable, and hence cannot be owned by the Derived
//! module, because they may require immediate specialization unknown to the Derived module.
//! - Derived impls for opaques are typically very small, effectively deferring the
//! implementation to the value they wrap.
use roc_error_macros::internal_error;
use roc_module::{called_via::CalledVia, symbol::Symbol};
use roc_parse::ast;
use roc_region::all::{Loc, Region};
use crate::{env::Env, pattern::Pattern, scope::Scope};
fn to_encoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
let alloc_pat = |it| env.arena.alloc(Loc::at(DERIVED_REGION, it));
let alloc_expr = |it| env.arena.alloc(Loc::at(DERIVED_REGION, it));
let payload = "#payload";
// \@Opaq payload
let opaque_ref = alloc_pat(ast::Pattern::OpaqueRef(at_opaque));
let opaque_apply_pattern = ast::Pattern::Apply(
opaque_ref,
&*env
.arena
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(payload))]),
);
// Encode.toEncoder payload
let call_member = alloc_expr(ast::Expr::Apply(
alloc_expr(ast::Expr::Var {
module_name: "Encode",
ident: "toEncoder",
}),
&*env.arena.alloc([&*alloc_expr(ast::Expr::Var {
module_name: "",
ident: payload,
})]),
roc_module::called_via::CalledVia::Space,
));
// \@Opaq payload -> Encode.toEncoder payload
ast::Expr::Closure(
env.arena
.alloc([Loc::at(DERIVED_REGION, opaque_apply_pattern)]),
call_member,
)
}
fn decoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
let alloc_expr = |it| env.arena.alloc(Loc::at(DERIVED_REGION, it));
let call_custom = {
let bytes = "#bytes";
let fmt = "#fmt";
// Decode.decodeWith bytes Decode.decoder fmt
let call_decode_with = ast::Expr::Apply(
alloc_expr(ast::Expr::Var {
module_name: "Decode",
ident: "decodeWith",
}),
env.arena.alloc([
&*alloc_expr(ast::Expr::Var {
module_name: "",
ident: bytes,
}),
alloc_expr(ast::Expr::Var {
module_name: "Decode",
ident: "decoder",
}),
alloc_expr(ast::Expr::Var {
module_name: "",
ident: fmt,
}),
]),
CalledVia::Space,
);
// Decode.mapResult (Decode.decodeWith bytes Decode.decoder fmt) @Opaq
let call_map_result = ast::Expr::Apply(
alloc_expr(ast::Expr::Var {
module_name: "Decode",
ident: "mapResult",
}),
env.arena.alloc([
&*alloc_expr(call_decode_with),
alloc_expr(ast::Expr::OpaqueRef(at_opaque)),
]),
CalledVia::Space,
);
// \bytes, fmt ->
// Decode.mapResult (Decode.decodeWith bytes Decode.decoder fmt) @Opaq
let custom_closure = ast::Expr::Closure(
env.arena.alloc([
Loc::at(DERIVED_REGION, ast::Pattern::Identifier(bytes)),
Loc::at(DERIVED_REGION, ast::Pattern::Identifier(fmt)),
]),
alloc_expr(call_map_result),
);
// Decode.custom \bytes, fmt -> ...
ast::Expr::Apply(
alloc_expr(ast::Expr::Var {
module_name: "Decode",
ident: "custom",
}),
env.arena.alloc([&*alloc_expr(custom_closure)]),
CalledVia::Space,
)
};
call_custom
}
fn hash<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
let alloc_pat = |it| env.arena.alloc(Loc::at(DERIVED_REGION, it));
let alloc_expr = |it| env.arena.alloc(Loc::at(DERIVED_REGION, it));
let hasher = "#hasher";
let payload = "#payload";
// \@Opaq payload
let opaque_ref = alloc_pat(ast::Pattern::OpaqueRef(at_opaque));
let opaque_apply_pattern = ast::Pattern::Apply(
opaque_ref,
&*env
.arena
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(payload))]),
);
// Hash.hash hasher payload
let call_member = alloc_expr(ast::Expr::Apply(
alloc_expr(ast::Expr::Var {
module_name: "Hash",
ident: "hash",
}),
&*env.arena.alloc([
&*alloc_expr(ast::Expr::Var {
module_name: "",
ident: hasher,
}),
&*alloc_expr(ast::Expr::Var {
module_name: "",
ident: payload,
}),
]),
roc_module::called_via::CalledVia::Space,
));
// \hasher, @Opaq payload -> Hash.hash hasher payload
ast::Expr::Closure(
env.arena.alloc([
Loc::at(DERIVED_REGION, ast::Pattern::Identifier(hasher)),
Loc::at(DERIVED_REGION, opaque_apply_pattern),
]),
call_member,
)
}
fn is_eq<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
let alloc_pat = |it| env.arena.alloc(Loc::at(DERIVED_REGION, it));
let alloc_expr = |it| env.arena.alloc(Loc::at(DERIVED_REGION, it));
let payload1 = "#payload1";
let payload2 = "#payload2";
let opaque_ref = alloc_pat(ast::Pattern::OpaqueRef(at_opaque));
// \@Opaq payload1
let opaque1 = ast::Pattern::Apply(
opaque_ref,
&*env
.arena
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(payload1))]),
);
// \@Opaq payload2
let opaque2 = ast::Pattern::Apply(
opaque_ref,
&*env
.arena
.alloc([Loc::at(DERIVED_REGION, ast::Pattern::Identifier(payload2))]),
);
// Bool.isEq payload1 payload2
let call_member = alloc_expr(ast::Expr::Apply(
alloc_expr(ast::Expr::Var {
module_name: "Bool",
ident: "isEq",
}),
&*env.arena.alloc([
&*alloc_expr(ast::Expr::Var {
module_name: "",
ident: payload1,
}),
&*alloc_expr(ast::Expr::Var {
module_name: "",
ident: payload2,
}),
]),
roc_module::called_via::CalledVia::Space,
));
// \@Opaq payload1, @Opaq payload2 -> Bool.isEq payload1 payload2
ast::Expr::Closure(
env.arena.alloc([
Loc::at(DERIVED_REGION, opaque1),
Loc::at(DERIVED_REGION, opaque2),
]),
call_member,
)
}
pub const DERIVED_REGION: Region = Region::zero();
pub(crate) fn synthesize_member_impl<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
opaque_name: &'a str,
ability_member: Symbol,
) -> (Symbol, Loc<Pattern>, &'a Loc<ast::Expr<'a>>) {
// @Opaq
let at_opaque = env.arena.alloc_str(&format!("@{}", opaque_name));
let (impl_name, def_body): (String, ast::Expr<'a>) = match ability_member {
Symbol::ENCODE_TO_ENCODER => (
format!("#{}_toEncoder", opaque_name),
to_encoder(env, at_opaque),
),
Symbol::DECODE_DECODER => (format!("#{}_decoder", opaque_name), decoder(env, at_opaque)),
Symbol::HASH_HASH => (format!("#{}_hash", opaque_name), hash(env, at_opaque)),
Symbol::BOOL_IS_EQ => (format!("#{}_isEq", opaque_name), is_eq(env, at_opaque)),
other => internal_error!("{:?} is not a derivable ability member!", other),
};
let impl_symbol = scope
.introduce_str(&impl_name, DERIVED_REGION)
.expect("this name is not unique");
let def_pattern = Pattern::Identifier(impl_symbol);
(
impl_symbol,
Loc::at(DERIVED_REGION, def_pattern),
env.arena.alloc(Loc::at(DERIVED_REGION, def_body)),
)
}

View file

@ -124,14 +124,15 @@ fn build_effect_always(
Loc::at_zero(empty_record_pattern(var_store)),
)];
let body = Expr::Var(value_symbol);
let value_var = var_store.fresh();
let body = Expr::Var(value_symbol, value_var);
Expr::Closure(ClosureData {
function_type: var_store.fresh(),
closure_type: var_store.fresh(),
return_type: var_store.fresh(),
name: inner_closure_symbol,
captured_symbols: vec![(value_symbol, var_store.fresh())],
captured_symbols: vec![(value_symbol, value_var)],
recursive: Recursive::NotRecursive,
arguments,
loc_body: Box::new(Loc::at_zero(body)),
@ -231,20 +232,22 @@ fn build_effect_map(
.introduce("effect_map_thunk".into(), Region::zero())
.unwrap()
};
let thunk_var = var_store.fresh();
let mapper_symbol = {
scope
.introduce("effect_map_mapper".into(), Region::zero())
.unwrap()
};
let mapper_var = var_store.fresh();
let map_symbol = { scope.introduce("map".into(), Region::zero()).unwrap() };
// `thunk {}`
let force_thunk_call = {
let boxed = (
var_store.fresh(),
Loc::at_zero(Expr::Var(thunk_symbol)),
thunk_var,
Loc::at_zero(Expr::Var(thunk_symbol, thunk_var)),
var_store.fresh(),
var_store.fresh(),
);
@ -256,8 +259,8 @@ fn build_effect_map(
// `toEffect (thunk {})`
let mapper_call = {
let boxed = (
var_store.fresh(),
Loc::at_zero(Expr::Var(mapper_symbol)),
mapper_var,
Loc::at_zero(Expr::Var(mapper_symbol, mapper_var)),
var_store.fresh(),
var_store.fresh(),
);
@ -411,9 +414,9 @@ fn build_effect_map(
(map_symbol, def)
}
fn force_thunk(expr: Expr, var_store: &mut VarStore) -> Expr {
fn force_thunk(expr: Expr, thunk_var: Variable, var_store: &mut VarStore) -> Expr {
let boxed = (
var_store.fresh(),
thunk_var,
Loc::at_zero(expr),
var_store.fresh(),
var_store.fresh(),
@ -441,13 +444,19 @@ fn build_effect_after(
let outer_closure_symbol = new_symbol!(scope, "effect_after_inner");
// `effect {}`
let force_effect_call = force_thunk(Expr::Var(effect_symbol), var_store);
let force_effect_var = var_store.fresh();
let force_effect_call = force_thunk(
Expr::Var(effect_symbol, force_effect_var),
force_effect_var,
var_store,
);
// `toEffect (effect {})`
let to_effect_var = var_store.fresh();
let to_effect_call = {
let boxed = (
var_store.fresh(),
Loc::at_zero(Expr::Var(to_effect_symbol)),
to_effect_var,
Loc::at_zero(Expr::Var(to_effect_symbol, to_effect_var)),
var_store.fresh(),
var_store.fresh(),
);
@ -459,7 +468,12 @@ fn build_effect_after(
// let @Effect thunk = toEffect (effect {}) in thunk {}
let let_effect_thunk = {
// `thunk {}`
let force_inner_thunk_call = force_thunk(Expr::Var(thunk_symbol), var_store);
let force_inner_thunk_var = var_store.fresh();
let force_inner_thunk_call = force_thunk(
Expr::Var(thunk_symbol, force_inner_thunk_var),
force_inner_thunk_var,
var_store,
);
let (specialized_def_type, type_arguments, lambda_set_variables) =
build_fresh_opaque_variables(var_store);
@ -702,9 +716,10 @@ fn force_effect(
let ret_var = var_store.fresh();
let force_thunk_call = {
let thunk_var = var_store.fresh();
let boxed = (
var_store.fresh(),
Loc::at_zero(Expr::Var(thunk_symbol)),
thunk_var,
Loc::at_zero(Expr::Var(thunk_symbol, thunk_var)),
var_store.fresh(),
ret_var,
);
@ -884,6 +899,7 @@ fn build_effect_forever_inner_body(
effect: Symbol,
var_store: &mut VarStore,
) -> Expr {
let thunk1_var = var_store.fresh();
let thunk1_symbol = { scope.introduce("thunk1".into(), Region::zero()).unwrap() };
let thunk2_symbol = { scope.introduce("thunk2".into(), Region::zero()).unwrap() };
@ -909,7 +925,7 @@ fn build_effect_forever_inner_body(
Def {
loc_pattern: Loc::at_zero(pattern),
loc_expr: Loc::at_zero(Expr::Var(effect)),
loc_expr: Loc::at_zero(Expr::Var(effect, var_store.fresh())),
expr_var: var_store.fresh(),
pattern_vars,
annotation: None,
@ -920,8 +936,8 @@ fn build_effect_forever_inner_body(
let force_thunk_call = {
let ret_var = var_store.fresh();
let boxed = (
var_store.fresh(),
Loc::at_zero(Expr::Var(thunk1_symbol)),
thunk1_var,
Loc::at_zero(Expr::Var(thunk1_symbol, thunk1_var)),
var_store.fresh(),
ret_var,
);
@ -945,12 +961,13 @@ fn build_effect_forever_inner_body(
let forever_effect = {
let boxed = (
var_store.fresh(),
Loc::at_zero(Expr::Var(forever_symbol)),
Loc::at_zero(Expr::Var(forever_symbol, var_store.fresh())),
var_store.fresh(),
var_store.fresh(),
);
let arguments = vec![(var_store.fresh(), Loc::at_zero(Expr::Var(effect)))];
let effect_var = var_store.fresh();
let arguments = vec![(effect_var, Loc::at_zero(Expr::Var(effect, effect_var)))];
Expr::Call(Box::new(boxed), arguments, CalledVia::Space)
};
@ -1063,6 +1080,7 @@ fn build_effect_loop(
lambda_set_variables: vec![roc_types::types::LambdaSet(Type::Variable(
closure_var,
))],
infer_ext_in_output_types: vec![],
actual: Box::new(actual),
kind: AliasKind::Opaque,
}
@ -1198,14 +1216,16 @@ fn build_effect_loop_inner_body(
// `step state`
let rhs = {
let step_var = var_store.fresh();
let boxed = (
var_store.fresh(),
Loc::at_zero(Expr::Var(step_symbol)),
step_var,
Loc::at_zero(Expr::Var(step_symbol, step_var)),
var_store.fresh(),
var_store.fresh(),
);
let arguments = vec![(var_store.fresh(), Loc::at_zero(Expr::Var(state_symbol)))];
let state_var = var_store.fresh();
let arguments = vec![(state_var, Loc::at_zero(Expr::Var(state_symbol, state_var)))];
Expr::Call(Box::new(boxed), arguments, CalledVia::Space)
};
@ -1220,10 +1240,11 @@ fn build_effect_loop_inner_body(
// thunk1 {}
let force_thunk_call = {
let thunk1_var = var_store.fresh();
let ret_var = var_store.fresh();
let boxed = (
var_store.fresh(),
Loc::at_zero(Expr::Var(thunk1_symbol)),
thunk1_var,
Loc::at_zero(Expr::Var(thunk1_symbol, thunk1_var)),
var_store.fresh(),
ret_var,
);
@ -1236,16 +1257,22 @@ fn build_effect_loop_inner_body(
// recursive call `loop newState step`
let loop_new_state_step = {
let loop_var = var_store.fresh();
let boxed = (
var_store.fresh(),
Loc::at_zero(Expr::Var(loop_symbol)),
loop_var,
Loc::at_zero(Expr::Var(loop_symbol, loop_var)),
var_store.fresh(),
var_store.fresh(),
);
let new_state_var = var_store.fresh();
let step_var = var_store.fresh();
let arguments = vec![
(var_store.fresh(), Loc::at_zero(Expr::Var(new_state_symbol))),
(var_store.fresh(), Loc::at_zero(Expr::Var(step_symbol))),
(
new_state_var,
Loc::at_zero(Expr::Var(new_state_symbol, new_state_var)),
),
(step_var, Loc::at_zero(Expr::Var(step_symbol, step_var))),
];
Expr::Call(Box::new(boxed), arguments, CalledVia::Space)
};
@ -1283,7 +1310,7 @@ fn build_effect_loop_inner_body(
crate::expr::WhenBranch {
patterns: vec![done_pattern],
value: Loc::at_zero(Expr::Var(done_symbol)),
value: Loc::at_zero(Expr::Var(done_symbol, var_store.fresh())),
guard: None,
redundant: RedundantMark::new(var_store),
}
@ -1351,7 +1378,7 @@ pub fn build_host_exposed_def(
));
captured_symbols.push((arg_symbol, arg_var));
linked_symbol_arguments.push((arg_var, Expr::Var(arg_symbol)));
linked_symbol_arguments.push((arg_var, Expr::Var(arg_symbol, arg_var)));
}
let foreign_symbol_name = format!("roc_fx_{}", ident);
@ -1505,6 +1532,7 @@ fn build_effect_opaque(
symbol: effect_symbol,
type_arguments: vec![OptAbleType::unbound(Type::Variable(a_var))],
lambda_set_variables: vec![roc_types::types::LambdaSet(Type::Variable(closure_var))],
infer_ext_in_output_types: vec![],
actual: Box::new(actual),
kind: AliasKind::Opaque,
}
@ -1526,7 +1554,7 @@ fn build_fresh_opaque_variables(
);
let type_arguments = vec![OptAbleVar {
var: a_var,
opt_ability: None,
opt_abilities: None,
}];
let lambda_set_variables = vec![roc_types::types::LambdaSet(Type::Variable(closure_var))];

View file

@ -4,11 +4,14 @@ use roc_collections::all::HumanIndex;
use roc_collections::VecMap;
use roc_error_macros::internal_error;
use roc_exhaustive::{
is_useful, Ctor, CtorName, Error, Guard, Literal, Pattern, RenderAs, TagId, Union,
is_useful, Ctor, CtorName, Error, Guard, ListArity, Literal, Pattern, RenderAs, TagId, Union,
};
use roc_module::ident::{TagIdIntType, TagName};
use roc_module::ident::{Lowercase, TagIdIntType, TagName};
use roc_module::symbol::Symbol;
use roc_region::all::{Loc, Region};
use roc_types::subs::{Content, FlatType, RedundantMark, Subs, SubsFmtContent, Variable};
use roc_types::subs::{
Content, FlatType, GetSubsSlice, RedundantMark, Subs, SubsFmtContent, Variable,
};
use roc_types::types::AliasKind;
pub use roc_exhaustive::Context as ExhaustiveContext;
@ -22,12 +25,19 @@ pub struct ExhaustiveSummary {
pub redundancies: Vec<RedundantMark>,
}
#[derive(Debug)]
pub struct TypeError;
/// Exhaustiveness-checks [sketched rows][SketchedRows] against an expected type.
///
/// Returns an error if the sketch has a type error, in which case exhautiveness checking will not
/// have been performed.
pub fn check(
subs: &Subs,
real_var: Variable,
sketched_rows: SketchedRows,
context: ExhaustiveContext,
) -> ExhaustiveSummary {
) -> Result<ExhaustiveSummary, TypeError> {
let overall_region = sketched_rows.overall_region;
let mut all_errors = Vec::with_capacity(1);
@ -35,7 +45,7 @@ pub fn check(
non_redundant_rows,
errors,
redundancies,
} = sketched_rows.reify_to_non_redundant(subs, real_var);
} = sketched_rows.reify_to_non_redundant(subs, real_var)?;
all_errors.extend(errors);
let exhaustive = match roc_exhaustive::check(overall_region, context, non_redundant_rows) {
@ -46,11 +56,11 @@ pub fn check(
}
};
ExhaustiveSummary {
Ok(ExhaustiveSummary {
errors: all_errors,
exhaustive,
redundancies,
}
})
}
#[derive(Clone, Debug, PartialEq, Eq)]
@ -60,7 +70,8 @@ enum SketchedPattern {
/// A constructor whose expected union is not yet known.
/// We'll know the whole union when reifying the sketched pattern against an expected case type.
Ctor(TagName, Vec<SketchedPattern>),
KnownCtor(Union, IndexCtor<'static>, TagId, Vec<SketchedPattern>),
KnownCtor(Union, TagId, Vec<SketchedPattern>),
List(ListArity, Vec<SketchedPattern>),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@ -68,12 +79,42 @@ enum IndexCtor<'a> {
/// Index an opaque type. There should be one argument.
Opaque,
/// Index a record type. The arguments are the types of the record fields.
Record,
Record(&'a [Lowercase]),
/// Index a guard constructor. The arguments are a faux guard pattern, and then the real
/// pattern being guarded. E.g. `A B if g` becomes Guard { [True, (A B)] }.
Guard,
/// Index a tag union with the given tag constructor.
Tag(&'a TagName),
/// Index a list type. The argument is the element type.
List,
}
impl<'a> IndexCtor<'a> {
fn of_union(un: &'a Union, tag_id: TagId) -> Self {
let Union {
alternatives,
render_as,
} = un;
match render_as {
RenderAs::Tag => {
let tag_name = alternatives
.iter()
.find(|ctor| ctor.tag_id == tag_id)
.map(|Ctor { name, .. }| match name {
CtorName::Tag(tag) => tag,
CtorName::Opaque(_) => {
internal_error!("tag union should never have opaque alternative")
}
})
.expect("indexable tag ID must be known to alternatives");
Self::Tag(tag_name)
}
RenderAs::Opaque => Self::Opaque,
RenderAs::Record(fields) => Self::Record(fields),
RenderAs::Guard => Self::Guard,
}
}
}
/// Index a variable as a certain constructor, to get the expected argument types of that constructor.
@ -82,11 +123,11 @@ fn index_var(
mut var: Variable,
ctor: IndexCtor,
render_as: &RenderAs,
) -> Vec<Variable> {
) -> Result<Vec<Variable>, TypeError> {
if matches!(ctor, IndexCtor::Guard) {
// `A B if g` becomes Guard { [True, (A B)] }, so the arguments are a bool, and the type
// of the pattern.
return vec![Variable::BOOL, var];
return Ok(vec![Variable::BOOL, var]);
}
loop {
match subs.get_content_without_compacting(var) {
@ -95,10 +136,8 @@ fn index_var(
| Content::FlexAbleVar(_, _)
| Content::RigidAbleVar(_, _)
| Content::LambdaSet(_)
| Content::RangedNumber(..) => internal_error!("not a indexable constructor"),
Content::Error => {
internal_error!("errors should not be reachable during exhautiveness checking")
}
| Content::RangedNumber(..) => return Err(TypeError),
Content::Error => return Err(TypeError),
Content::RecursionVar {
structure,
opt_name: _,
@ -106,14 +145,19 @@ fn index_var(
var = *structure;
}
Content::Structure(structure) => match structure {
FlatType::Apply(_, _)
| FlatType::Func(_, _, _)
| FlatType::FunctionOrTagUnion(_, _, _) => {
internal_error!("not an indexable constructor")
FlatType::Func(_, _, _) | FlatType::FunctionOrTagUnion(_, _, _) => {
return Err(TypeError)
}
FlatType::Erroneous(_) => {
internal_error!("errors should not be reachable during exhautiveness checking")
FlatType::Erroneous(_) => return Err(TypeError),
FlatType::Apply(Symbol::LIST_LIST, args) => {
match (subs.get_subs_slice(*args), ctor) {
([elem_var], IndexCtor::List) => {
return Ok(vec![*elem_var]);
}
_ => internal_error!("list types can only be indexed by list patterns"),
}
}
FlatType::Apply(..) => internal_error!("not an indexable constructor"),
FlatType::Record(fields, ext) => {
let fields_order = match render_as {
RenderAs::Record(fields) => fields,
@ -137,7 +181,7 @@ fn index_var(
})
.collect();
return field_types;
return Ok(field_types);
}
FlatType::TagUnion(tags, ext) | FlatType::RecursiveTagUnion(_, tags, ext) => {
let tag_ctor = match ctor {
@ -155,10 +199,10 @@ fn index_var(
}
});
let vars = opt_vars.expect("constructor must be known in the indexable type if we are exhautiveness checking");
return vars;
return Ok(vars);
}
FlatType::EmptyRecord => {
debug_assert!(matches!(ctor, IndexCtor::Record));
debug_assert!(matches!(ctor, IndexCtor::Record(..)));
// If there are optional record fields we don't unify them, but we need to
// cover them. Since optional fields correspond to "any" patterns, we can pass
// through arbitrary types.
@ -168,7 +212,7 @@ fn index_var(
"record constructors must always be rendered as records"
),
};
return std::iter::repeat(Variable::NULL).take(num_fields).collect();
return Ok(std::iter::repeat(Variable::NULL).take(num_fields).collect());
}
FlatType::EmptyTagUnion => {
internal_error!("empty tag unions are not indexable")
@ -176,7 +220,7 @@ fn index_var(
},
Content::Alias(_, _, var, AliasKind::Opaque) => {
debug_assert!(matches!(ctor, IndexCtor::Opaque));
return vec![*var];
return Ok(vec![*var]);
}
Content::Alias(_, _, inner, AliasKind::Structural) => {
var = *inner;
@ -186,35 +230,44 @@ fn index_var(
}
impl SketchedPattern {
fn reify(self, subs: &Subs, real_var: Variable) -> Pattern {
fn reify(self, subs: &Subs, real_var: Variable) -> Result<Pattern, TypeError> {
match self {
Self::Anything => Pattern::Anything,
Self::Literal(lit) => Pattern::Literal(lit),
Self::KnownCtor(union, index_ctor, tag_id, patterns) => {
let arg_vars = index_var(subs, real_var, index_ctor, &union.render_as);
Self::Anything => Ok(Pattern::Anything),
Self::Literal(lit) => Ok(Pattern::Literal(lit)),
Self::KnownCtor(union, tag_id, patterns) => {
let index_ctor = IndexCtor::of_union(&union, tag_id);
let arg_vars = index_var(subs, real_var, index_ctor, &union.render_as)?;
debug_assert!(arg_vars.len() == patterns.len());
let args = (patterns.into_iter())
.zip(arg_vars)
.map(|(pat, var)| {
// FIXME
pat.reify(subs, var)
})
.collect();
.map(|(pat, var)| pat.reify(subs, var))
.collect::<Result<Vec<_>, _>>()?;
Pattern::Ctor(union, tag_id, args)
Ok(Pattern::Ctor(union, tag_id, args))
}
Self::Ctor(tag_name, patterns) => {
let arg_vars = index_var(subs, real_var, IndexCtor::Tag(&tag_name), &RenderAs::Tag);
let arg_vars =
index_var(subs, real_var, IndexCtor::Tag(&tag_name), &RenderAs::Tag)?;
let (union, tag_id) = convert_tag(subs, real_var, &tag_name);
debug_assert!(arg_vars.len() == patterns.len());
let args = (patterns.into_iter())
.zip(arg_vars)
.map(|(pat, var)| pat.reify(subs, var))
.collect();
.collect::<Result<Vec<_>, _>>()?;
Pattern::Ctor(union, tag_id, args)
Ok(Pattern::Ctor(union, tag_id, args))
}
Self::List(arity, patterns) => {
let elem_var = index_var(subs, real_var, IndexCtor::List, &RenderAs::Tag)?[0];
let patterns = patterns
.into_iter()
.map(|pat| pat.reify(subs, elem_var))
.collect::<Result<Vec<_>, _>>()?;
Ok(Pattern::List(arity, patterns))
}
}
}
@ -235,7 +288,11 @@ pub struct SketchedRows {
}
impl SketchedRows {
fn reify_to_non_redundant(self, subs: &Subs, real_var: Variable) -> NonRedundantSummary {
fn reify_to_non_redundant(
self,
subs: &Subs,
real_var: Variable,
) -> Result<NonRedundantSummary, TypeError> {
to_nonredundant_rows(subs, real_var, self)
}
}
@ -283,7 +340,23 @@ fn sketch_pattern(pattern: &crate::pattern::Pattern) -> SketchedPattern {
}],
};
SP::KnownCtor(union, IndexCtor::Record, tag_id, patterns)
SP::KnownCtor(union, tag_id, patterns)
}
List {
patterns,
list_var: _,
elem_var: _,
} => {
let arity = patterns.arity();
let sketched_elem_patterns = patterns
.patterns
.iter()
.map(|p| sketch_pattern(&p.value))
.collect();
SP::List(arity, sketched_elem_patterns)
}
AppliedTag {
@ -315,12 +388,7 @@ fn sketch_pattern(pattern: &crate::pattern::Pattern) -> SketchedPattern {
}],
};
SP::KnownCtor(
union,
IndexCtor::Opaque,
tag_id,
vec![sketch_pattern(&argument.value)],
)
SP::KnownCtor(union, tag_id, vec![sketch_pattern(&argument.value)])
}
// Treat this like a literal so we mark it as non-exhaustive
@ -390,7 +458,6 @@ pub fn sketch_when_branches(region: Region, patterns: &[expr::WhenBranch]) -> Sk
vec![SP::KnownCtor(
union,
IndexCtor::Guard,
tag_id,
// NB: ordering the guard pattern first seems to be better at catching
// non-exhaustive constructors in the second argument; see the paper to see if
@ -445,7 +512,7 @@ fn to_nonredundant_rows(
subs: &Subs,
real_var: Variable,
rows: SketchedRows,
) -> NonRedundantSummary {
) -> Result<NonRedundantSummary, TypeError> {
let SketchedRows {
rows,
overall_region,
@ -468,7 +535,7 @@ fn to_nonredundant_rows(
let next_row: Vec<Pattern> = patterns
.into_iter()
.map(|pattern| pattern.reify(subs, real_var))
.collect();
.collect::<Result<_, _>>()?;
let redundant_err = if !is_inhabited_row(&next_row) {
Some(Error::Unmatchable {
@ -499,11 +566,11 @@ fn to_nonredundant_rows(
}
}
NonRedundantSummary {
Ok(NonRedundantSummary {
non_redundant_rows: checked_rows,
redundancies,
errors,
}
})
}
fn is_inhabited_row(patterns: &[Pattern]) -> bool {
@ -518,10 +585,16 @@ fn is_inhabited_pattern(pat: &Pattern) -> bool {
Pattern::Literal(_) => {}
Pattern::Ctor(union, id, pats) => {
if !union.alternatives.iter().any(|alt| alt.tag_id == *id) {
// The tag ID was dropped from the union, which means that this tag ID is one
// that is not material to the union, and so is uninhabited!
return false;
}
stack.extend(pats);
}
Pattern::List(_, pats) => {
// List is uninhabited if any element is uninhabited.
stack.extend(pats);
}
}
}
true

View file

@ -38,6 +38,16 @@ impl<T> PExpected<T> {
}
}
#[inline(always)]
pub fn map<U>(self, f: impl FnOnce(T) -> U) -> PExpected<U> {
match self {
PExpected::NoExpectation(val) => PExpected::NoExpectation(f(val)),
PExpected::ForReason(reason, val, region) => {
PExpected::ForReason(reason, f(val), region)
}
}
}
pub fn replace<U>(self, new: U) -> PExpected<U> {
match self {
PExpected::NoExpectation(_val) => PExpected::NoExpectation(new),
@ -89,6 +99,17 @@ impl<T> Expected<T> {
}
}
#[inline(always)]
pub fn map<U>(self, f: impl FnOnce(T) -> U) -> Expected<U> {
match self {
Expected::NoExpectation(val) => Expected::NoExpectation(f(val)),
Expected::ForReason(reason, val, region) => Expected::ForReason(reason, f(val), region),
Expected::FromAnnotation(pattern, size, source, val) => {
Expected::FromAnnotation(pattern, size, source, f(val))
}
}
}
pub fn replace<U>(self, new: U) -> Expected<U> {
match self {
Expected::NoExpectation(_val) => Expected::NoExpectation(new),

View file

@ -65,7 +65,7 @@ impl Output {
}
}
#[derive(Clone, Debug, PartialEq, Copy)]
#[derive(Clone, Debug, PartialEq, Eq, Copy)]
pub enum IntValue {
I128([u8; 16]),
U128([u8; 16]),
@ -100,7 +100,7 @@ pub enum Expr {
},
// Lookups
Var(Symbol),
Var(Symbol, Variable),
AbilityMember(
/// Actual member name
Symbol,
@ -230,14 +230,14 @@ pub enum Expr {
Expect {
loc_condition: Box<Loc<Expr>>,
loc_continuation: Box<Loc<Expr>>,
lookups_in_cond: Vec<(Symbol, Variable)>,
lookups_in_cond: Vec<ExpectLookup>,
},
// not parsed, but is generated when lowering toplevel effectful expects
ExpectFx {
loc_condition: Box<Loc<Expr>>,
loc_continuation: Box<Loc<Expr>>,
lookups_in_cond: Vec<(Symbol, Variable)>,
lookups_in_cond: Vec<ExpectLookup>,
},
/// Rendered as empty box in editor
@ -247,6 +247,13 @@ pub enum Expr {
RuntimeError(RuntimeError),
}
#[derive(Clone, Copy, Debug)]
pub struct ExpectLookup {
pub symbol: Symbol,
pub var: Variable,
pub ability_info: Option<SpecializationId>,
}
impl Expr {
pub fn category(&self) -> Category {
match self {
@ -256,7 +263,7 @@ impl Expr {
Self::Str(..) => Category::Str,
Self::SingleQuote(..) => Category::Character,
Self::List { .. } => Category::List,
&Self::Var(sym) => Category::Lookup(sym),
&Self::Var(sym, _) => Category::Lookup(sym),
&Self::AbilityMember(sym, _, _) => Category::Lookup(sym),
Self::When { .. } => Category::When,
Self::If { .. } => Category::If,
@ -338,7 +345,7 @@ pub struct ClosureData {
///
/// We distinguish them from closures so we can have better error messages
/// during constraint generation.
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AccessorData {
pub name: Symbol,
pub function_var: Variable,
@ -372,7 +379,7 @@ impl AccessorData {
record_var,
ext_var,
field_var,
loc_expr: Box::new(Loc::at_zero(Expr::Var(record_symbol))),
loc_expr: Box::new(Loc::at_zero(Expr::Var(record_symbol, record_var))),
field,
};
@ -440,7 +447,10 @@ impl OpaqueWrapFunctionData {
let body = Expr::OpaqueRef {
opaque_var,
name: opaque_name,
argument: Box::new((argument_var, Loc::at_zero(Expr::Var(argument_symbol)))),
argument: Box::new((
argument_var,
Loc::at_zero(Expr::Var(argument_symbol, argument_var)),
)),
specialized_def_type: Box::new(specialized_def_type),
type_arguments,
lambda_set_variables,
@ -475,7 +485,7 @@ pub struct Field {
pub loc_expr: Box<Loc<Expr>>,
}
#[derive(Clone, Copy, Debug, PartialEq)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Recursive {
NotRecursive = 0,
Recursive = 1,
@ -592,7 +602,7 @@ pub fn canonicalize_expr<'a>(
} => {
let (can_update, update_out) =
canonicalize_expr(env, var_store, scope, loc_update.region, &loc_update.value);
if let Var(symbol) = &can_update.value {
if let Var(symbol, _) = &can_update.value {
match canonicalize_fields(env, var_store, scope, region, fields.items) {
Ok((can_fields, mut output)) => {
output.references.union_mut(&update_out.references);
@ -765,7 +775,7 @@ pub fn canonicalize_expr<'a>(
output.tail_call = None;
let expr = match fn_expr.value {
Var(symbol) => {
Var(symbol, _) => {
output.references.insert_call(symbol);
// we're tail-calling a symbol by name, check if it's the tail-callable symbol
@ -878,7 +888,7 @@ pub fn canonicalize_expr<'a>(
var_store,
inner_scope,
region,
*branch,
branch,
&mut output,
)
});
@ -994,7 +1004,7 @@ pub fn canonicalize_expr<'a>(
// Get all the lookups that were referenced in the condition,
// so we can print their values later.
let lookups_in_cond = get_lookup_symbols(&loc_condition.value, var_store);
let lookups_in_cond = get_lookup_symbols(&loc_condition.value);
let (loc_continuation, output2) = canonicalize_expr(
env,
@ -1483,7 +1493,7 @@ fn canonicalize_fields<'a>(
let mut output = Output::default();
for loc_field in fields.iter() {
match canonicalize_field(env, var_store, scope, &loc_field.value, loc_field.region) {
match canonicalize_field(env, var_store, scope, &loc_field.value) {
Ok((label, field_expr, field_out, field_var)) => {
let field = Field {
var: field_var,
@ -1536,7 +1546,6 @@ fn canonicalize_field<'a>(
var_store: &mut VarStore,
scope: &mut Scope,
field: &'a ast::AssignedField<'a, ast::Expr<'a>>,
region: Region,
) -> Result<(Lowercase, Loc<Expr>, Output, Variable), CanonicalizeFieldProblem> {
use roc_parse::ast::AssignedField::*;
@ -1566,7 +1575,7 @@ fn canonicalize_field<'a>(
}
SpaceBefore(sub_field, _) | SpaceAfter(sub_field, _) => {
canonicalize_field(env, var_store, scope, sub_field, region)
canonicalize_field(env, var_store, scope, sub_field)
}
Malformed(_string) => {
@ -1600,7 +1609,7 @@ fn canonicalize_var_lookup(
var_store.fresh(),
)
} else {
Var(symbol)
Var(symbol, var_store.fresh())
}
}
Err(problem) => {
@ -1623,7 +1632,7 @@ fn canonicalize_var_lookup(
var_store.fresh(),
)
} else {
Var(symbol)
Var(symbol, var_store.fresh())
}
}
Err(problem) => {
@ -1642,7 +1651,7 @@ fn canonicalize_var_lookup(
}
/// Currently uses the heuristic of "only inline if it's a builtin"
pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) -> Expr {
pub fn inline_calls(var_store: &mut VarStore, expr: Expr) -> Expr {
use Expr::*;
match expr {
@ -1657,7 +1666,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
| other @ EmptyRecord
| other @ Accessor { .. }
| other @ Update { .. }
| other @ Var(_)
| other @ Var(..)
| other @ AbilityMember(..)
| other @ RunLowLevel { .. }
| other @ TypedHole { .. }
@ -1671,7 +1680,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
let mut new_elems = Vec::with_capacity(loc_elems.len());
for loc_elem in loc_elems {
let value = inline_calls(var_store, scope, loc_elem.value);
let value = inline_calls(var_store, loc_elem.value);
new_elems.push(Loc {
value,
@ -1696,20 +1705,20 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
} => {
let loc_cond = Box::new(Loc {
region: loc_cond.region,
value: inline_calls(var_store, scope, loc_cond.value),
value: inline_calls(var_store, loc_cond.value),
});
let mut new_branches = Vec::with_capacity(branches.len());
for branch in branches {
let value = Loc {
value: inline_calls(var_store, scope, branch.value.value),
value: inline_calls(var_store, branch.value.value),
region: branch.value.region,
};
let guard = match branch.guard {
Some(loc_expr) => Some(Loc {
region: loc_expr.region,
value: inline_calls(var_store, scope, loc_expr.value),
value: inline_calls(var_store, loc_expr.value),
}),
None => None,
};
@ -1743,12 +1752,12 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
for (loc_cond, loc_expr) in branches {
let loc_cond = Loc {
value: inline_calls(var_store, scope, loc_cond.value),
value: inline_calls(var_store, loc_cond.value),
region: loc_cond.region,
};
let loc_expr = Loc {
value: inline_calls(var_store, scope, loc_expr.value),
value: inline_calls(var_store, loc_expr.value),
region: loc_expr.region,
};
@ -1757,7 +1766,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
let final_else = Box::new(Loc {
region: final_else.region,
value: inline_calls(var_store, scope, final_else.value),
value: inline_calls(var_store, final_else.value),
});
If {
@ -1775,12 +1784,12 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
} => {
let loc_condition = Loc {
region: loc_condition.region,
value: inline_calls(var_store, scope, loc_condition.value),
value: inline_calls(var_store, loc_condition.value),
};
let loc_continuation = Loc {
region: loc_continuation.region,
value: inline_calls(var_store, scope, loc_continuation.value),
value: inline_calls(var_store, loc_continuation.value),
};
Expect {
@ -1797,12 +1806,12 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
} => {
let loc_condition = Loc {
region: loc_condition.region,
value: inline_calls(var_store, scope, loc_condition.value),
value: inline_calls(var_store, loc_condition.value),
};
let loc_continuation = Loc {
region: loc_continuation.region,
value: inline_calls(var_store, scope, loc_continuation.value),
value: inline_calls(var_store, loc_continuation.value),
};
ExpectFx {
@ -1820,7 +1829,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
loc_pattern: def.loc_pattern,
loc_expr: Loc {
region: def.loc_expr.region,
value: inline_calls(var_store, scope, def.loc_expr.value),
value: inline_calls(var_store, def.loc_expr.value),
},
expr_var: def.expr_var,
pattern_vars: def.pattern_vars,
@ -1830,7 +1839,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
let loc_expr = Loc {
region: loc_expr.region,
value: inline_calls(var_store, scope, loc_expr.value),
value: inline_calls(var_store, loc_expr.value),
};
LetRec(new_defs, Box::new(loc_expr), mark)
@ -1841,7 +1850,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
loc_pattern: def.loc_pattern,
loc_expr: Loc {
region: def.loc_expr.region,
value: inline_calls(var_store, scope, def.loc_expr.value),
value: inline_calls(var_store, def.loc_expr.value),
},
expr_var: def.expr_var,
pattern_vars: def.pattern_vars,
@ -1850,7 +1859,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
let loc_expr = Loc {
region: loc_expr.region,
value: inline_calls(var_store, scope, loc_expr.value),
value: inline_calls(var_store, loc_expr.value),
};
LetNonRec(Box::new(def), Box::new(loc_expr))
@ -1868,7 +1877,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
}) => {
let loc_expr = *loc_body;
let loc_expr = Loc {
value: inline_calls(var_store, scope, loc_expr.value),
value: inline_calls(var_store, loc_expr.value),
region: loc_expr.region,
};
@ -1928,7 +1937,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
let (var, loc_expr) = *argument;
let argument = Box::new((
var,
loc_expr.map_owned(|expr| inline_calls(var_store, scope, expr)),
loc_expr.map_owned(|expr| inline_calls(var_store, expr)),
));
OpaqueRef {
@ -1960,67 +1969,71 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
let (fn_var, loc_expr, closure_var, expr_var) = *boxed_tuple;
match loc_expr.value {
Var(symbol) if symbol.is_builtin() => match builtin_defs_map(symbol, var_store) {
Some(Def {
loc_expr:
Loc {
value:
Closure(ClosureData {
recursive,
arguments: params,
loc_body: boxed_body,
..
}),
..
},
..
}) => {
debug_assert_eq!(recursive, Recursive::NotRecursive);
Var(symbol, _) if symbol.is_builtin() => {
match builtin_defs_map(symbol, var_store) {
Some(Def {
loc_expr:
Loc {
value:
Closure(ClosureData {
recursive,
arguments: params,
loc_body: boxed_body,
..
}),
..
},
..
}) => {
debug_assert_eq!(recursive, Recursive::NotRecursive);
// Since this is a canonicalized Expr, we should have
// already detected any arity mismatches and replaced this
// with a RuntimeError if there was a mismatch.
debug_assert_eq!(params.len(), args.len());
// Since this is a canonicalized Expr, we should have
// already detected any arity mismatches and replaced this
// with a RuntimeError if there was a mismatch.
debug_assert_eq!(params.len(), args.len());
// Start with the function's body as the answer.
let mut loc_answer = *boxed_body;
// Start with the function's body as the answer.
let mut loc_answer = *boxed_body;
// Wrap the body in one LetNonRec for each argument,
// such that at the end we have all the arguments in
// scope with the values the caller provided.
for ((_param_var, _exhaustive_mark, loc_pattern), (expr_var, loc_expr)) in
params.iter().cloned().zip(args.into_iter()).rev()
{
// TODO get the correct vars into here.
// Not sure if param_var should be involved.
let pattern_vars = SendMap::default();
// Wrap the body in one LetNonRec for each argument,
// such that at the end we have all the arguments in
// scope with the values the caller provided.
for (
(_param_var, _exhaustive_mark, loc_pattern),
(expr_var, loc_expr),
) in params.iter().cloned().zip(args.into_iter()).rev()
{
// TODO get the correct vars into here.
// Not sure if param_var should be involved.
let pattern_vars = SendMap::default();
let def = Def {
loc_pattern,
loc_expr,
expr_var,
pattern_vars,
annotation: None,
};
let def = Def {
loc_pattern,
loc_expr,
expr_var,
pattern_vars,
annotation: None,
};
loc_answer = Loc {
region: Region::zero(),
value: LetNonRec(Box::new(def), Box::new(loc_answer)),
};
loc_answer = Loc {
region: Region::zero(),
value: LetNonRec(Box::new(def), Box::new(loc_answer)),
};
}
loc_answer.value
}
Some(_) => {
unreachable!("Tried to inline a non-function");
}
None => {
unreachable!(
"Tried to inline a builtin that wasn't registered: {:?}",
symbol
);
}
loc_answer.value
}
Some(_) => {
unreachable!("Tried to inline a non-function");
}
None => {
unreachable!(
"Tried to inline a builtin that wasn't registered: {:?}",
symbol
);
}
},
}
_ => {
// For now, we only inline calls to builtins. Leave this alone!
Call(
@ -2172,7 +2185,10 @@ fn desugar_str_segments(var_store: &mut VarStore, segments: Vec<StrSegment>) ->
Interpolation(loc_interpolated_expr) => loc_interpolated_expr,
};
let fn_expr = Loc::at(Region::zero(), Expr::Var(Symbol::STR_CONCAT));
let fn_expr = Loc::at(
Region::zero(),
Expr::Var(Symbol::STR_CONCAT, var_store.fresh()),
);
let expr = Expr::Call(
Box::new((
var_store.fresh(),
@ -2531,7 +2547,7 @@ impl Declarations {
})
}
pub fn expects(&self) -> VecMap<Region, Vec<(Symbol, Variable)>> {
pub fn expects(&self) -> VecMap<Region, Vec<ExpectLookup>> {
let mut collector = ExpectCollector {
expects: VecMap::default(),
};
@ -2615,16 +2631,34 @@ pub struct DestructureDef {
pub pattern_vars: VecMap<Symbol, Variable>,
}
fn get_lookup_symbols(expr: &Expr, var_store: &mut VarStore) -> Vec<(Symbol, Variable)> {
fn get_lookup_symbols(expr: &Expr) -> Vec<ExpectLookup> {
let mut stack: Vec<&Expr> = vec![expr];
let mut symbols = Vec::new();
let mut lookups: Vec<ExpectLookup> = Vec::new();
while let Some(expr) = stack.pop() {
match expr {
Expr::Var(symbol) | Expr::Update { symbol, .. } | Expr::AbilityMember(symbol, _, _) => {
Expr::Var(symbol, var)
| Expr::Update {
symbol,
record_var: var,
..
} => {
// Don't introduce duplicates, or make unused variables
if !symbols.iter().any(|(sym, _)| sym == symbol) {
symbols.push((*symbol, var_store.fresh()));
if !lookups.iter().any(|l| l.symbol == *symbol) {
lookups.push(ExpectLookup {
symbol: *symbol,
var: *var,
ability_info: None,
});
}
}
Expr::AbilityMember(symbol, spec_id, var) => {
if !lookups.iter().any(|l| l.symbol == *symbol) {
lookups.push(ExpectLookup {
symbol: *symbol,
var: *var,
ability_info: *spec_id,
});
}
}
Expr::List { loc_elems, .. } => {
@ -2665,7 +2699,7 @@ fn get_lookup_symbols(expr: &Expr, var_store: &mut VarStore) -> Vec<(Symbol, Var
stack.reserve(1 + args.len());
match &boxed_expr.1.value {
Expr::Var(_) => {
Expr::Var(_, _) => {
// do nothing
}
function_expr => {
@ -2702,7 +2736,7 @@ fn get_lookup_symbols(expr: &Expr, var_store: &mut VarStore) -> Vec<(Symbol, Var
| Expr::ExpectFx {
loc_continuation, ..
} => {
stack.push(&(*loc_continuation).value);
stack.push(&loc_continuation.value);
// Intentionally ignore the lookups in the nested `expect` condition itself,
// because they couldn't possibly influence the outcome of this `expect`!
@ -2721,7 +2755,7 @@ fn get_lookup_symbols(expr: &Expr, var_store: &mut VarStore) -> Vec<(Symbol, Var
}
}
symbols
lookups
}
/// Here we transform
@ -2768,14 +2802,22 @@ fn toplevel_expect_to_inline_expect_help(mut loc_expr: Loc<Expr>, has_effects: b
loop {
match loc_expr.value {
Expr::LetNonRec(boxed_def, remainder) => {
lookups_in_cond.extend(boxed_def.pattern_vars.iter().map(|(a, b)| (*a, *b)));
lookups_in_cond.extend(boxed_def.pattern_vars.iter().map(|(a, b)| ExpectLookup {
symbol: *a,
var: *b,
ability_info: None,
}));
stack.push(StoredDef::NonRecursive(loc_expr.region, boxed_def));
loc_expr = *remainder;
}
Expr::LetRec(defs, remainder, mark) => {
for def in &defs {
lookups_in_cond.extend(def.pattern_vars.iter().map(|(a, b)| (*a, *b)));
lookups_in_cond.extend(def.pattern_vars.iter().map(|(a, b)| ExpectLookup {
symbol: *a,
var: *b,
ability_info: None,
}));
}
stack.push(StoredDef::Recursive(loc_expr.region, defs, mark));
@ -2818,7 +2860,7 @@ fn toplevel_expect_to_inline_expect_help(mut loc_expr: Loc<Expr>, has_effects: b
}
struct ExpectCollector {
expects: VecMap<Region, Vec<(Symbol, Variable)>>,
expects: VecMap<Region, Vec<ExpectLookup>>,
}
impl crate::traverse::Visitor for ExpectCollector {

View file

@ -7,6 +7,7 @@ pub mod builtins;
pub mod constraint;
pub mod copy;
pub mod def;
mod derive;
pub mod effect_module;
pub mod env;
pub mod exhaustive;
@ -20,3 +21,5 @@ pub mod procedure;
pub mod scope;
pub mod string;
pub mod traverse;
pub use derive::DERIVED_REGION;

View file

@ -1,9 +1,9 @@
use crate::abilities::{AbilitiesStore, ImplKey, PendingAbilitiesStore, ResolvedImpl};
use crate::annotation::canonicalize_annotation;
use crate::annotation::{canonicalize_annotation, AnnotationFor};
use crate::def::{canonicalize_defs, Def};
use crate::effect_module::HostedGeneratedFunctions;
use crate::env::Env;
use crate::expr::{ClosureData, Declarations, Expr, Output, PendingDerives};
use crate::expr::{ClosureData, Declarations, ExpectLookup, Expr, Output, PendingDerives};
use crate::pattern::{BindingsFromPattern, Pattern};
use crate::scope::Scope;
use bumpalo::Bump;
@ -18,7 +18,7 @@ use roc_parse::pattern::PatternType;
use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Loc, Region};
use roc_types::subs::{ExposedTypesStorageSubs, Subs, VarStore, Variable};
use roc_types::types::{Alias, AliasKind, AliasVar, Type};
use roc_types::types::{AbilitySet, Alias, AliasKind, AliasVar, Type};
/// The types of all exposed values/functions of a collection of modules
#[derive(Clone, Debug, Default)]
@ -130,13 +130,13 @@ pub struct Module {
pub aliases: MutMap<Symbol, (bool, Alias)>,
pub rigid_variables: RigidVariables,
pub abilities_store: PendingAbilitiesStore,
pub loc_expects: VecMap<Region, Vec<(Symbol, Variable)>>,
pub loc_expects: VecMap<Region, Vec<ExpectLookup>>,
}
#[derive(Debug, Default)]
pub struct RigidVariables {
pub named: MutMap<Variable, Lowercase>,
pub able: MutMap<Variable, (Lowercase, Symbol)>,
pub able: MutMap<Variable, (Lowercase, AbilitySet)>,
pub wildcards: VecSet<Variable>,
}
@ -152,7 +152,7 @@ pub struct ModuleOutput {
pub symbols_from_requires: Vec<(Loc<Symbol>, Loc<Type>)>,
pub pending_derives: PendingDerives,
pub scope: Scope,
pub loc_expects: VecMap<Region, Vec<(Symbol, Variable)>>,
pub loc_expects: VecMap<Region, Vec<ExpectLookup>>,
}
fn validate_generate_with<'a>(
@ -221,6 +221,7 @@ impl GeneratedInfo {
effect_symbol,
Region::zero(),
vec![Loc::at_zero(AliasVar::unbound("a".into(), a_var))],
vec![],
actual,
AliasKind::Opaque,
);
@ -282,6 +283,7 @@ pub fn canonicalize_module_defs<'a>(
name,
alias.region,
alias.type_variables,
alias.infer_ext_in_output_variables,
alias.typ,
alias.kind,
);
@ -387,7 +389,7 @@ pub fn canonicalize_module_defs<'a>(
for able in output.introduced_variables.able {
rigid_variables
.able
.insert(able.variable, (able.name, able.ability));
.insert(able.variable, (able.name, able.abilities));
}
for var in output.introduced_variables.wildcards {
@ -422,7 +424,7 @@ pub fn canonicalize_module_defs<'a>(
};
let (mut declarations, mut output) =
crate::def::sort_can_defs_new(&mut env, &mut scope, var_store, defs, new_output);
crate::def::sort_can_defs_new(&mut scope, var_store, defs, new_output);
debug_assert!(
output.pending_derives.is_empty(),
@ -442,6 +444,7 @@ pub fn canonicalize_module_defs<'a>(
loc_ann.region,
var_store,
pending_abilities_in_scope,
AnnotationFor::Value,
);
ann.add_to(
@ -895,6 +898,15 @@ fn fix_values_captured_in_closure_pattern(
}
}
}
List { patterns, .. } => {
for loc_pat in patterns.patterns.iter_mut() {
fix_values_captured_in_closure_pattern(
&mut loc_pat.value,
no_capture_symbols,
closure_captures,
);
}
}
Identifier(_)
| NumLiteral(..)
| IntLiteral(..)
@ -1040,7 +1052,7 @@ fn fix_values_captured_in_closure_expr(
| Float(..)
| Str(_)
| SingleQuote(..)
| Var(_)
| Var(..)
| AbilityMember(..)
| EmptyRecord
| TypedHole { .. }

View file

@ -79,7 +79,7 @@ fn desugar_value_def<'a>(arena: &'a Bump, def: &'a ValueDef<'a>) -> ValueDef<'a>
ann_pattern,
ann_type,
comment: *comment,
body_pattern: *body_pattern,
body_pattern,
body_expr: desugar_expr(arena, body_expr),
},
Expect {

View file

@ -6,6 +6,7 @@ use crate::num::{
ParsedNumResult,
};
use crate::scope::{PendingAbilitiesInScope, Scope};
use roc_exhaustive::ListArity;
use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::symbol::Symbol;
use roc_parse::ast::{self, StrLiteral, StrSegment};
@ -56,6 +57,11 @@ pub enum Pattern {
ext_var: Variable,
destructs: Vec<Loc<RecordDestruct>>,
},
List {
list_var: Variable,
elem_var: Variable,
patterns: ListPatterns,
},
NumLiteral(Variable, Box<str>, IntValue, NumBound),
IntLiteral(Variable, Variable, Box<str>, IntValue, IntBound),
FloatLiteral(Variable, Variable, Box<str>, f64, FloatBound),
@ -92,6 +98,10 @@ impl Pattern {
AppliedTag { whole_var, .. } => Some(*whole_var),
UnwrappedOpaque { whole_var, .. } => Some(*whole_var),
RecordDestructure { whole_var, .. } => Some(*whole_var),
List {
list_var: whole_var,
..
} => Some(*whole_var),
NumLiteral(var, ..) => Some(*var),
IntLiteral(var, ..) => Some(*var),
FloatLiteral(var, ..) => Some(*var),
@ -119,6 +129,7 @@ impl Pattern {
| MalformedPattern(..)
| AbilityMemberSpecialization { .. } => true,
RecordDestructure { destructs, .. } => destructs.is_empty(),
List { patterns, .. } => patterns.surely_exhaustive(),
AppliedTag { .. }
| NumLiteral(..)
| IntLiteral(..)
@ -145,6 +156,7 @@ impl Pattern {
UnwrappedOpaque { opaque, .. } => C::Opaque(*opaque),
RecordDestructure { destructs, .. } if destructs.is_empty() => C::EmptyRecord,
RecordDestructure { .. } => C::Record,
List { .. } => C::List,
NumLiteral(..) => C::Num,
IntLiteral(..) => C::Int,
FloatLiteral(..) => C::Float,
@ -161,6 +173,36 @@ impl Pattern {
}
}
#[derive(Clone, Debug)]
pub struct ListPatterns {
pub patterns: Vec<Loc<Pattern>>,
/// Where a rest pattern splits patterns before and after it, if it does at all.
/// If present, patterns at index >= the rest index appear after the rest pattern.
/// For example:
/// [ .., A, B ] -> patterns = [A, B], rest = 0
/// [ A, .., B ] -> patterns = [A, B], rest = 1
/// [ A, B, .. ] -> patterns = [A, B], rest = 2
pub opt_rest: Option<usize>,
}
impl ListPatterns {
/// Is this list pattern the trivially-exhaustive pattern `[..]`?
fn surely_exhaustive(&self) -> bool {
self.patterns.is_empty() && matches!(self.opt_rest, Some(0))
}
pub fn arity(&self) -> ListArity {
match self.opt_rest {
Some(i) => {
let before = i;
let after = self.patterns.len() - before;
ListArity::Slice(before, after)
}
None => ListArity::Exact(self.patterns.len()),
}
}
}
#[derive(Clone, Debug)]
pub struct RecordDestruct {
pub var: Variable,
@ -621,6 +663,76 @@ pub fn canonicalize_pattern<'a>(
unreachable!("should have been handled in RecordDestructure");
}
List(patterns) => {
// We want to admit the following cases:
//
// []
// [..]
// [.., P_1,* P_n]
// [P_1,* P_n, ..]
// [P_1,* P_m, .., P_n,* P_q]
// [P_1,* P_n]
//
// So, a list-rest pattern can appear anywhere in a list pattern, but can appear at
// most once.
let elem_var = var_store.fresh();
let list_var = var_store.fresh();
let mut rest_index = None;
let mut can_pats = Vec::with_capacity(patterns.len());
let mut opt_erroneous = None;
for (i, loc_pattern) in patterns.iter().enumerate() {
match &loc_pattern.value {
ListRest => match rest_index {
None => {
rest_index = Some(i);
}
Some(_) => {
env.problem(Problem::MultipleListRestPattern {
region: loc_pattern.region,
});
opt_erroneous = Some(Pattern::MalformedPattern(
MalformedPatternProblem::DuplicateListRestPattern,
loc_pattern.region,
));
}
},
pattern => {
let pat = canonicalize_pattern(
env,
var_store,
scope,
output,
pattern_type,
pattern,
loc_pattern.region,
permit_shadows,
);
can_pats.push(pat);
}
}
}
// If we encountered an erroneous pattern (e.g. one with shadowing),
// use the resulting RuntimeError. Otherwise, return a successful record destructure.
opt_erroneous.unwrap_or(Pattern::List {
list_var,
elem_var,
patterns: ListPatterns {
patterns: can_pats,
opt_rest: rest_index,
},
})
}
ListRest => {
// Parsing should make sure these only appear in list patterns, where we will generate
// better contextual errors.
let problem = MalformedPatternProblem::Unknown;
malformed_pattern(env, problem, region)
}
Malformed(_str) => {
let problem = MalformedPatternProblem::Unknown;
malformed_pattern(env, problem, region)
@ -736,6 +848,9 @@ impl<'a> BindingsFromPattern<'a> {
| MalformedPattern(_, _)
| UnsupportedPattern(_)
| OpaqueNotInScope(..) => (),
List { patterns, .. } => {
stack.extend(patterns.patterns.iter().rev().map(Pattern));
}
}
}
BindingsFromPatternWork::Destruct(loc_destruct) => {

View file

@ -3,6 +3,7 @@ use roc_module::ident::Ident;
use roc_module::symbol::{IdentId, IdentIds, ModuleId, Symbol};
use roc_problem::can::RuntimeError;
use roc_region::all::{Loc, Region};
use roc_types::subs::Variable;
use roc_types::types::{Alias, AliasKind, AliasVar, Type};
use crate::abilities::PendingAbilitiesStore;
@ -388,10 +389,11 @@ impl Scope {
name: Symbol,
region: Region,
vars: Vec<Loc<AliasVar>>,
infer_ext_in_output_variables: Vec<Variable>,
typ: Type,
kind: AliasKind,
) {
let alias = create_alias(name, region, vars, typ, kind);
let alias = create_alias(name, region, vars, infer_ext_in_output_variables, typ, kind);
self.aliases.insert(name, alias);
}
@ -447,6 +449,7 @@ pub fn create_alias(
name: Symbol,
region: Region,
vars: Vec<Loc<AliasVar>>,
infer_ext_in_output_variables: Vec<Variable>,
typ: Type,
kind: AliasKind,
) -> Alias {
@ -459,14 +462,16 @@ pub fn create_alias(
debug_assert!({
let mut hidden = type_variables;
for loc_var in vars.iter() {
hidden.remove(&loc_var.value.var);
for var in (vars.iter().map(|lv| lv.value.var))
.chain(infer_ext_in_output_variables.iter().copied())
{
hidden.remove(&var);
}
if !hidden.is_empty() {
panic!(
"Found unbound type variables {:?} \n in type alias {:?} {:?} : {:?}",
hidden, name, &vars, &typ
"Found unbound type variables {:?} \n in type alias {:?} {:?} {:?} : {:?}",
hidden, name, &vars, &infer_ext_in_output_variables, &typ
)
}
@ -482,6 +487,7 @@ pub fn create_alias(
region,
type_variables: vars,
lambda_set_variables,
infer_ext_in_output_variables,
recursion_variables,
typ,
kind,

View file

@ -472,6 +472,12 @@ pub fn walk_pattern<V: Visitor>(visitor: &mut V, pattern: &Pattern) {
RecordDestructure { destructs, .. } => destructs
.iter()
.for_each(|d| visitor.visit_record_destruct(&d.value, d.region)),
List {
patterns, elem_var, ..
} => patterns
.patterns
.iter()
.for_each(|p| visitor.visit_pattern(&p.value, p.region, Some(*elem_var))),
NumLiteral(..) => { /* terminal */ }
IntLiteral(..) => { /* terminal */ }
FloatLiteral(..) => { /* terminal */ }

View file

@ -63,6 +63,7 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
"a".into(),
Variable::EMPTY_RECORD,
))],
vec![],
Type::EmptyRec,
roc_types::types::AliasKind::Structural,
);

View file

@ -917,37 +917,6 @@ mod test_can {
assert_eq!(is_circular_def, false);
}
#[test]
fn invalid_self_recursion() {
let src = indoc!(
r#"
x = x
x
"#
);
let home = test_home();
let arena = Bump::new();
let CanExprOut {
loc_expr,
problems,
interns,
..
} = can_expr_with(&arena, home, src);
let is_circular_def = matches!(loc_expr.value, RuntimeError(RuntimeError::CircularDef(_)));
let problem = Problem::RuntimeError(RuntimeError::CircularDef(vec![CycleEntry {
symbol: interns.symbol(home, "x".into()),
symbol_region: Region::new(Position::new(0), Position::new(1)),
expr_region: Region::new(Position::new(4), Position::new(5)),
}]));
assert_eq!(is_circular_def, true);
assert_eq!(problems, vec![problem]);
}
#[test]
fn invalid_mutual_recursion() {
let src = indoc!(

View file

@ -1,6 +1,6 @@
use std::{borrow::Borrow, iter::FromIterator};
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct VecSet<T> {
elements: Vec<T>,
}

View file

@ -1,5 +1,5 @@
use arrayvec::ArrayVec;
use roc_can::constraint::{Constraint, Constraints};
use roc_can::constraint::{Constraint, Constraints, TypeOrVar};
use roc_can::expected::Expected::{self, *};
use roc_can::num::{FloatBound, FloatWidth, IntBound, IntLitWidth, NumBound, SignDemand};
use roc_module::symbol::Symbol;
@ -30,29 +30,34 @@ pub fn add_numeric_bound_constr(
num_num(Variable(num_var))
}
NumericBound::FloatExact(width) => {
let actual_type = Variable(float_width_to_variable(width));
let actual_type = constraints.push_type(Variable(float_width_to_variable(width)));
let expected = Expected::ForReason(Reason::NumericLiteralSuffix, actual_type, region);
let type_index = constraints.push_type(Variable(num_var));
let expected_index = constraints.push_expected_type(expected);
let because_suffix =
constraints.equal_types(Variable(num_var), expected, category, region);
constraints.equal_types(type_index, expected_index, category, region);
num_constraints.extend([because_suffix]);
Variable(num_var)
}
NumericBound::IntExact(width) => {
let actual_type = Variable(int_lit_width_to_variable(width));
let actual_type = constraints.push_type(Variable(int_lit_width_to_variable(width)));
let expected = Expected::ForReason(Reason::NumericLiteralSuffix, actual_type, region);
let type_index = constraints.push_type(Variable(num_var));
let expected_index = constraints.push_expected_type(expected);
let because_suffix =
constraints.equal_types(Variable(num_var), expected, category, region);
constraints.equal_types(type_index, expected_index, category, region);
num_constraints.extend([because_suffix]);
Variable(num_var)
}
NumericBound::Range(range) => {
let actual_type = Variable(precision_var);
let expected = Expected::NoExpectation(RangedNumber(range));
let constr = constraints.equal_types(actual_type, expected, category, region);
let precision_type = constraints.push_type(Variable(precision_var));
let expected = Expected::NoExpectation(constraints.push_type(RangedNumber(range)));
let expected_index = constraints.push_expected_type(expected);
let constr = constraints.equal_types(precision_type, expected_index, category, region);
num_constraints.extend([constr]);
@ -66,7 +71,7 @@ pub fn int_literal(
constraints: &mut Constraints,
num_var: Variable,
precision_var: Variable,
expected: Expected<Type>,
expected: Expected<TypeOrVar>,
region: Region,
bound: IntBound,
) -> Constraint {
@ -84,14 +89,18 @@ pub fn int_literal(
Category::Num,
);
let num_type_index = constraints.push_type(num_type);
let int_precision_type = constraints.push_type(num_int(Type::Variable(precision_var)));
let expect_precision_var =
constraints.push_expected_type(ForReason(reason, int_precision_type, region));
constrs.extend([
constraints.equal_types(
num_type.clone(),
ForReason(reason, num_int(Type::Variable(precision_var)), region),
Category::Int,
region,
),
constraints.equal_types(num_type, expected, Category::Int, region),
constraints.equal_types(num_type_index, expect_precision_var, Category::Int, region),
{
let expected_index = constraints.push_expected_type(expected);
constraints.equal_types(num_type_index, expected_index, Category::Int, region)
},
]);
// TODO the precision_var is not part of the exists here; for float it is. Which is correct?
@ -103,7 +112,7 @@ pub fn single_quote_literal(
constraints: &mut Constraints,
num_var: Variable,
precision_var: Variable,
expected: Expected<Type>,
expected: Expected<TypeOrVar>,
region: Region,
bound: SingleQuoteBound,
) -> Constraint {
@ -121,14 +130,23 @@ pub fn single_quote_literal(
Category::Character,
);
let num_type_index = constraints.push_type(num_type);
let int_precision_type = constraints.push_type(num_int(Type::Variable(precision_var)));
let expect_precision_var =
constraints.push_expected_type(ForReason(reason, int_precision_type, region));
constrs.extend([
constraints.equal_types(
num_type.clone(),
ForReason(reason, num_int(Type::Variable(precision_var)), region),
num_type_index,
expect_precision_var,
Category::Character,
region,
),
constraints.equal_types(num_type, expected, Category::Character, region),
{
let expected_index = constraints.push_expected_type(expected);
constraints.equal_types(num_type_index, expected_index, Category::Character, region)
},
]);
let and_constraint = constraints.and_constraint(constrs);
@ -140,7 +158,7 @@ pub fn float_literal(
constraints: &mut Constraints,
num_var: Variable,
precision_var: Variable,
expected: Expected<Type>,
expected: Expected<TypeOrVar>,
region: Region,
bound: FloatBound,
) -> Constraint {
@ -157,14 +175,18 @@ pub fn float_literal(
Category::Frac,
);
let num_type_index = constraints.push_type(num_type);
let float_precision_type = constraints.push_type(num_float(Type::Variable(precision_var)));
let expect_precision_var =
constraints.push_expected_type(ForReason(reason, float_precision_type, region));
constrs.extend([
constraints.equal_types(
num_type.clone(),
ForReason(reason, num_float(Type::Variable(precision_var)), region),
Category::Frac,
region,
),
constraints.equal_types(num_type, expected, Category::Frac, region),
constraints.equal_types(num_type_index, expect_precision_var, Category::Frac, region),
{
let expected_index = constraints.push_expected_type(expected);
constraints.equal_types(num_type_index, expected_index, Category::Frac, region)
},
]);
let and_constraint = constraints.and_constraint(constrs);
@ -175,7 +197,7 @@ pub fn float_literal(
pub fn num_literal(
constraints: &mut Constraints,
num_var: Variable,
expected: Expected<Type>,
expected: Expected<TypeOrVar>,
region: Region,
bound: NumBound,
) -> Constraint {
@ -190,13 +212,18 @@ pub fn num_literal(
Category::Num,
);
constrs.extend([constraints.equal_types(num_type, expected, Category::Num, region)]);
let type_index = constraints.push_type(num_type);
let expected_index = constraints.push_expected_type(expected);
constrs.extend([constraints.equal_types(type_index, expected_index, Category::Num, region)]);
let and_constraint = constraints.and_constraint(constrs);
constraints.exists([num_var], and_constraint)
}
#[inline(always)]
// Try not to be too clever about inlining, at least in debug builds.
// Inlining these tiny leaf functions can lead to death by a thousand cuts,
// where we end up with huge stack frames in non-tail-recursive functions.
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn builtin_type(symbol: Symbol, args: Vec<Type>) -> Type {
Type::Apply(
symbol,
@ -205,23 +232,23 @@ pub fn builtin_type(symbol: Symbol, args: Vec<Type>) -> Type {
)
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn empty_list_type(var: Variable) -> Type {
list_type(Type::Variable(var))
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn list_type(typ: Type) -> Type {
builtin_type(Symbol::LIST_LIST, vec![typ])
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn str_type() -> Type {
builtin_type(Symbol::STR_STR, Vec::new())
}
#[inline(always)]
fn builtin_alias(
#[cfg_attr(not(debug_assertions), inline(always))]
fn builtin_num_alias(
symbol: Symbol,
type_arguments: Vec<OptAbleType>,
actual: Box<Type>,
@ -232,13 +259,14 @@ fn builtin_alias(
type_arguments,
actual,
lambda_set_variables: vec![],
infer_ext_in_output_types: vec![],
kind,
}
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn num_float(range: Type) -> Type {
builtin_alias(
builtin_num_alias(
Symbol::NUM_FRAC,
vec![OptAbleType::unbound(range.clone())],
Box::new(num_num(num_floatingpoint(range))),
@ -246,9 +274,9 @@ pub fn num_float(range: Type) -> Type {
)
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn num_floatingpoint(range: Type) -> Type {
builtin_alias(
builtin_num_alias(
Symbol::NUM_FLOATINGPOINT,
vec![OptAbleType::unbound(range.clone())],
Box::new(range),
@ -256,9 +284,9 @@ pub fn num_floatingpoint(range: Type) -> Type {
)
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn num_u32() -> Type {
builtin_alias(
builtin_num_alias(
Symbol::NUM_U32,
vec![],
Box::new(num_int(num_unsigned32())),
@ -266,9 +294,9 @@ pub fn num_u32() -> Type {
)
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
fn num_unsigned32() -> Type {
builtin_alias(
builtin_num_alias(
Symbol::NUM_UNSIGNED32,
vec![],
Box::new(Type::EmptyTagUnion),
@ -276,9 +304,9 @@ fn num_unsigned32() -> Type {
)
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn num_binary64() -> Type {
builtin_alias(
builtin_num_alias(
Symbol::NUM_BINARY64,
vec![],
Box::new(Type::EmptyTagUnion),
@ -286,9 +314,9 @@ pub fn num_binary64() -> Type {
)
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn num_int(range: Type) -> Type {
builtin_alias(
builtin_num_alias(
Symbol::NUM_INT,
vec![OptAbleType::unbound(range.clone())],
Box::new(num_num(num_integer(range))),
@ -296,9 +324,9 @@ pub fn num_int(range: Type) -> Type {
)
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn num_signed64() -> Type {
builtin_alias(
builtin_num_alias(
Symbol::NUM_SIGNED64,
vec![],
Box::new(Type::EmptyTagUnion),
@ -306,9 +334,9 @@ pub fn num_signed64() -> Type {
)
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn num_integer(range: Type) -> Type {
builtin_alias(
builtin_num_alias(
Symbol::NUM_INTEGER,
vec![OptAbleType::unbound(range.clone())],
Box::new(range),
@ -316,9 +344,9 @@ pub fn num_integer(range: Type) -> Type {
)
}
#[inline(always)]
#[cfg_attr(not(debug_assertions), inline(always))]
pub fn num_num(typ: Type) -> Type {
builtin_alias(
builtin_num_alias(
Symbol::NUM_NUM,
vec![OptAbleType::unbound(typ.clone())],
Box::new(typ),

File diff suppressed because it is too large Load diff

View file

@ -50,8 +50,9 @@ fn constrain_symbols_from_requires(
};
let pattern = Loc::at_zero(roc_can::pattern::Pattern::Identifier(loc_symbol.value));
let type_index = constraints.push_type(loc_type.value);
let def_pattern_state =
constrain_def_pattern(constraints, &mut env, &pattern, loc_type.value);
constrain_def_pattern(constraints, &mut env, &pattern, type_index);
debug_assert!(env.resolutions_to_make.is_empty());
@ -69,18 +70,18 @@ fn constrain_symbols_from_requires(
// Otherwise, this symbol comes from an app module - we want to check that the type
// provided by the app is in fact what the package module requires.
let arity = loc_type.value.arity();
let provided_eq_requires_constr = constraints.lookup(
loc_symbol.value,
Expected::FromAnnotation(
loc_symbol.map(|&s| Pattern::Identifier(s)),
arity,
AnnotationSource::RequiredSymbol {
region: loc_type.region,
},
loc_type.value,
),
loc_type.region,
);
let typ = loc_type.value;
let type_index = constraints.push_type(typ);
let expected = constraints.push_expected_type(Expected::FromAnnotation(
loc_symbol.map(|&s| Pattern::Identifier(s)),
arity,
AnnotationSource::RequiredSymbol {
region: loc_type.region,
},
type_index,
));
let provided_eq_requires_constr =
constraints.lookup(loc_symbol.value, expected, loc_type.region);
constraints.and_constraint([provided_eq_requires_constr, constraint])
}
})
@ -108,12 +109,10 @@ pub fn frontload_ability_constraints(
};
let pattern = Loc::at_zero(roc_can::pattern::Pattern::Identifier(*member_name));
let mut def_pattern_state = constrain_def_pattern(
constraints,
&mut env,
&pattern,
Type::Variable(signature_var),
);
let signature_index = constraints.push_type(signature.clone());
let mut def_pattern_state =
constrain_def_pattern(constraints, &mut env, &pattern, signature_index);
debug_assert!(env.resolutions_to_make.is_empty());
@ -122,11 +121,14 @@ pub fn frontload_ability_constraints(
let rigid_variables = vars.rigid_vars.iter().chain(vars.able_vars.iter()).copied();
let infer_variables = vars.flex_vars.iter().copied();
let signature_expectation =
constraints.push_expected_type(Expected::NoExpectation(signature_index));
def_pattern_state
.constraints
.push(constraints.equal_types_var(
signature_var,
Expected::NoExpectation(signature.clone()),
signature_expectation,
Category::Storage(file!(), line!()),
Region::zero(),
));

View file

@ -1,9 +1,9 @@
use crate::builtins;
use crate::expr::{constrain_expr, Env};
use roc_can::constraint::{Constraint, Constraints};
use roc_can::constraint::{Constraint, Constraints, TypeOrVar};
use roc_can::expected::{Expected, PExpected};
use roc_can::pattern::Pattern::{self, *};
use roc_can::pattern::{DestructType, RecordDestruct};
use roc_can::pattern::{DestructType, ListPatterns, RecordDestruct};
use roc_collections::all::{HumanIndex, SendMap};
use roc_collections::VecMap;
use roc_module::ident::Lowercase;
@ -17,7 +17,7 @@ use roc_types::types::{
#[derive(Default, Debug)]
pub struct PatternState {
pub headers: VecMap<Symbol, Loc<Type>>,
pub headers: VecMap<Symbol, Loc<TypeOrVar>>,
pub vars: Vec<Variable>,
pub constraints: Vec<Constraint>,
pub delayed_is_open_constraints: Vec<Constraint>,
@ -31,14 +31,16 @@ pub struct PatternState {
/// Would add `x => <42>` to the headers (i.e., symbol points to a type variable). If the
/// definition has an annotation, we instead now add `x => Int`.
pub fn headers_from_annotation(
constraints: &mut Constraints,
pattern: &Pattern,
annotation: &Loc<&Type>,
) -> Option<VecMap<Symbol, Loc<Type>>> {
) -> Option<VecMap<Symbol, Loc<TypeOrVar>>> {
let mut headers = VecMap::default();
// Check that the annotation structurally agrees with the pattern, preventing e.g. `{ x, y } : Int`
// in such incorrect cases we don't put the full annotation in headers, just a variable, and let
// inference generate a proper error.
let is_structurally_valid = headers_from_annotation_help(pattern, annotation, &mut headers);
let is_structurally_valid =
headers_from_annotation_help(constraints, pattern, annotation, &mut headers);
if is_structurally_valid {
Some(headers)
@ -48,9 +50,10 @@ pub fn headers_from_annotation(
}
fn headers_from_annotation_help(
constraints: &mut Constraints,
pattern: &Pattern,
annotation: &Loc<&Type>,
headers: &mut VecMap<Symbol, Loc<Type>>,
headers: &mut VecMap<Symbol, Loc<TypeOrVar>>,
) -> bool {
match pattern {
Identifier(symbol)
@ -60,7 +63,8 @@ fn headers_from_annotation_help(
ident: symbol,
specializes: _,
} => {
let typ = Loc::at(annotation.region, annotation.value.clone());
let annotation_index = constraints.push_type(annotation.value.clone());
let typ = Loc::at(annotation.region, annotation_index);
headers.insert(*symbol, typ);
true
}
@ -87,9 +91,10 @@ fn headers_from_annotation_help(
// `{ x ? 0 } = rec` or `{ x: 5 } -> ...` in all cases
// the type of `x` within the binding itself is the same.
if let Some(field_type) = fields.get(&destruct.label) {
let field_type_index = constraints.push_type(field_type.as_inner().clone());
headers.insert(
destruct.symbol,
Loc::at(annotation.region, field_type.clone().into_inner()),
Loc::at(annotation.region, field_type_index),
);
} else {
return false;
@ -101,6 +106,14 @@ fn headers_from_annotation_help(
_ => false,
},
List { .. } => {
// There are no interesting headers to introduce for list patterns, since the only
// exhaustive list pattern is
// \[..] -> <body>
// which does not introduce any symbols.
false
},
AppliedTag {
tag_name,
arguments,
@ -117,6 +130,7 @@ fn headers_from_annotation_help(
.zip(arg_types.iter())
.all(|(arg_pattern, arg_type)| {
headers_from_annotation_help(
constraints,
&arg_pattern.1.value,
&Loc::at(annotation.region, arg_type),
headers,
@ -143,15 +157,18 @@ fn headers_from_annotation_help(
actual,
type_arguments,
lambda_set_variables,
infer_ext_in_output_types: _,
} if symbol == opaque
&& type_arguments.len() == pat_type_arguments.len()
&& lambda_set_variables.len() == pat_lambda_set_variables.len() =>
{
let typ = Loc::at(annotation.region, annotation.value.clone());
let annotation_index = constraints.push_type(annotation.value.clone());
let typ = Loc::at(annotation.region, annotation_index);
headers.insert(*opaque, typ);
let (_, argument_pat) = &**argument;
headers_from_annotation_help(
constraints,
&argument_pat.value,
&Loc::at(annotation.region, actual),
headers,
@ -170,7 +187,7 @@ pub fn constrain_pattern(
env: &mut Env,
pattern: &Pattern,
region: Region,
expected: PExpected<Type>,
expected: PExpected<TypeOrVar>,
state: &mut PatternState,
) {
match pattern {
@ -181,10 +198,12 @@ pub fn constrain_pattern(
// A -> ""
// _ -> ""
// so, we know that "x" (in this case, a tag union) must be open.
if could_be_a_tag_union(expected.get_type_ref()) {
if could_be_a_tag_union(constraints, *expected.get_type_ref()) {
let type_index = expected.get_type();
state
.delayed_is_open_constraints
.push(constraints.is_open_type(expected.get_type()));
.push(constraints.is_open_type(type_index));
}
}
UnsupportedPattern(_) | MalformedPattern(_, _) | OpaqueNotInScope(..) => {
@ -192,17 +211,19 @@ pub fn constrain_pattern(
}
Identifier(symbol) | Shadowed(_, _, symbol) => {
if could_be_a_tag_union(expected.get_type_ref()) {
let type_index = *expected.get_type_ref();
if could_be_a_tag_union(constraints, type_index) {
state
.delayed_is_open_constraints
.push(constraints.is_open_type(expected.get_type_ref().clone()));
.push(constraints.is_open_type(type_index));
}
state.headers.insert(
*symbol,
Loc {
region,
value: expected.get_type(),
value: type_index,
},
);
}
@ -211,17 +232,17 @@ pub fn constrain_pattern(
ident: symbol,
specializes: _,
} => {
if could_be_a_tag_union(expected.get_type_ref()) {
state
.constraints
.push(constraints.is_open_type(expected.get_type_ref().clone()));
let type_index = *expected.get_type_ref();
if could_be_a_tag_union(constraints, type_index) {
state.constraints.push(constraints.is_open_type(type_index));
}
state.headers.insert(
*symbol,
Loc {
region,
value: expected.get_type(),
value: type_index,
},
);
}
@ -238,6 +259,9 @@ pub fn constrain_pattern(
region,
Category::Num,
);
let num_type = constraints.push_type(num_type);
let expected = constraints.push_pat_expected_type(expected);
state.constraints.push(constraints.equal_pattern_types(
num_type,
@ -259,18 +283,19 @@ pub fn constrain_pattern(
region,
Category::Int,
);
let num_type = constraints.push_type(num_type);
// Link the free num var with the int var and our expectation.
let int_type = builtins::num_int(Type::Variable(precision_var));
let int_type = constraints.push_type(builtins::num_int(Type::Variable(precision_var)));
state.constraints.push(constraints.equal_types(
num_type.clone(), // TODO check me if something breaks!
Expected::NoExpectation(int_type),
Category::Int,
region,
));
state.constraints.push({
let expected_index =
constraints.push_expected_type(Expected::NoExpectation(int_type));
constraints.equal_types(num_type, expected_index, Category::Int, region)
});
// Also constrain the pattern against the num var, again to reuse aliases if they're present.
let expected = constraints.push_pat_expected_type(expected);
state.constraints.push(constraints.equal_pattern_types(
num_type,
expected,
@ -291,20 +316,22 @@ pub fn constrain_pattern(
region,
Category::Frac,
);
let num_type_index = constraints.push_type(num_type); // TODO check me if something breaks!
// Link the free num var with the float var and our expectation.
let float_type = builtins::num_float(Type::Variable(precision_var));
let float_type =
constraints.push_type(builtins::num_float(Type::Variable(precision_var)));
state.constraints.push(constraints.equal_types(
num_type.clone(), // TODO check me if something breaks!
Expected::NoExpectation(float_type),
Category::Frac,
region,
));
state.constraints.push({
let expected_index =
constraints.push_expected_type(Expected::NoExpectation(float_type));
constraints.equal_types(num_type_index, expected_index, Category::Frac, region)
});
// Also constrain the pattern against the num var, again to reuse aliases if they're present.
let expected = constraints.push_pat_expected_type(expected);
state.constraints.push(constraints.equal_pattern_types(
num_type, // TODO check me if something breaks!
num_type_index,
expected,
PatternCategory::Float,
region,
@ -312,8 +339,10 @@ pub fn constrain_pattern(
}
StrLiteral(_) => {
let str_type = constraints.push_type(builtins::str_type());
let expected = constraints.push_pat_expected_type(expected);
state.constraints.push(constraints.equal_pattern_types(
builtins::str_type(),
str_type,
expected,
PatternCategory::Str,
region,
@ -333,19 +362,26 @@ pub fn constrain_pattern(
Category::Int,
);
// Link the free num var with the int var and our expectation.
let int_type = builtins::num_int(Type::Variable(precision_var));
let num_type_index = constraints.push_type(num_type);
state.constraints.push(constraints.equal_types(
num_type.clone(), // TODO check me if something breaks!
Expected::NoExpectation(int_type),
Category::Int,
region,
));
// Link the free num var with the int var and our expectation.
let int_type = constraints.push_type(builtins::num_int(Type::Variable(precision_var)));
state.constraints.push({
let expected_index =
constraints.push_expected_type(Expected::NoExpectation(int_type));
constraints.equal_types(
num_type_index, // TODO check me if something breaks!
expected_index,
Category::Int,
region,
)
});
// Also constrain the pattern against the num var, again to reuse aliases if they're present.
let expected = constraints.push_pat_expected_type(expected);
state.constraints.push(constraints.equal_pattern_types(
num_type,
num_type_index,
expected,
PatternCategory::Character,
region,
@ -375,23 +411,28 @@ pub fn constrain_pattern(
} in destructs
{
let pat_type = Type::Variable(*var);
let expected = PExpected::NoExpectation(pat_type.clone());
let pat_type_index = constraints.push_type(pat_type.clone());
let expected = PExpected::NoExpectation(pat_type_index);
if !state.headers.contains_key(symbol) {
state
.headers
.insert(*symbol, Loc::at(region, pat_type.clone()));
.insert(*symbol, Loc::at(region, pat_type_index));
}
let field_type = match typ {
DestructType::Guard(guard_var, loc_guard) => {
state.constraints.push(constraints.pattern_presence(
Type::Variable(*guard_var),
PExpected::ForReason(
let guard_type = constraints.push_type(Type::Variable(*guard_var));
let expected_pat =
constraints.push_pat_expected_type(PExpected::ForReason(
PReason::PatternGuard,
pat_type.clone(),
pat_type_index,
loc_guard.region,
),
));
state.constraints.push(constraints.pattern_presence(
guard_type,
expected_pat,
PatternCategory::PatternGuard,
region,
));
@ -409,13 +450,17 @@ pub fn constrain_pattern(
RecordField::Demanded(pat_type)
}
DestructType::Optional(expr_var, loc_expr) => {
state.constraints.push(constraints.pattern_presence(
Type::Variable(*expr_var),
PExpected::ForReason(
let expr_type = constraints.push_type(Type::Variable(*expr_var));
let expected_pat =
constraints.push_pat_expected_type(PExpected::ForReason(
PReason::OptionalField,
pat_type.clone(),
pat_type_index,
loc_expr.region,
),
));
state.constraints.push(constraints.pattern_presence(
expr_type,
expected_pat,
PatternCategory::PatternDefault,
region,
));
@ -424,7 +469,7 @@ pub fn constrain_pattern(
let expr_expected = Expected::ForReason(
Reason::RecordDefaultField(label.clone()),
pat_type.clone(),
pat_type_index,
loc_expr.region,
);
@ -450,17 +495,25 @@ pub fn constrain_pattern(
state.vars.push(*var);
}
let record_type = Type::Record(field_types, TypeExtension::from_type(ext_type));
let record_type = constraints.push_type(Type::Record(
field_types,
TypeExtension::from_type(ext_type),
));
let whole_var_index = constraints.push_type(Type::Variable(*whole_var));
let expected_record =
constraints.push_expected_type(Expected::NoExpectation(record_type));
let whole_con = constraints.equal_types(
Type::Variable(*whole_var),
Expected::NoExpectation(record_type),
whole_var_index,
expected_record,
Category::Storage(std::file!(), std::line!()),
region,
);
let expected = constraints.push_pat_expected_type(expected);
let record_con = constraints.pattern_presence(
Type::Variable(*whole_var),
whole_var_index,
expected,
PatternCategory::Record,
region,
@ -469,18 +522,66 @@ pub fn constrain_pattern(
state.constraints.push(whole_con);
state.constraints.push(record_con);
}
List {
list_var,
elem_var,
patterns:
ListPatterns {
patterns,
opt_rest: _,
},
} => {
let elem_var_index = constraints.push_type(Type::Variable(*elem_var));
for loc_pat in patterns.iter() {
let expected =
PExpected::ForReason(PReason::ListElem, elem_var_index, loc_pat.region);
constrain_pattern(
constraints,
env,
&loc_pat.value,
loc_pat.region,
expected,
state,
);
}
let list_var_index = constraints.push_type(Type::Variable(*list_var));
let solved_list = constraints.push_type(Type::Apply(
Symbol::LIST_LIST,
vec![Loc::at(region, Type::Variable(*elem_var))],
region,
));
let store_solved_list = constraints.store(solved_list, *list_var, file!(), line!());
let expected = constraints.push_pat_expected_type(expected);
let expected_constraint = constraints.pattern_presence(
list_var_index,
expected,
PatternCategory::List,
region,
);
state.vars.push(*list_var);
state.vars.push(*elem_var);
state.constraints.push(store_solved_list);
state.constraints.push(expected_constraint);
}
AppliedTag {
whole_var,
ext_var,
tag_name,
arguments,
} => {
let mut argument_types = Vec::with_capacity(arguments.len());
let argument_types = constraints.variable_slice(arguments.iter().map(|(var, _)| *var));
for (index, (pattern_var, loc_pattern)) in arguments.iter().enumerate() {
state.vars.push(*pattern_var);
let pattern_type = Type::Variable(*pattern_var);
argument_types.push(pattern_type.clone());
let pattern_type = constraints.push_type(Type::Variable(*pattern_var));
let expected = PExpected::ForReason(
PReason::TagArg {
@ -501,21 +602,20 @@ pub fn constrain_pattern(
}
let pat_category = PatternCategory::Ctor(tag_name.clone());
let expected_type = *expected.get_type_ref();
let whole_con = constraints.includes_tag(
expected.clone().get_type(),
expected_type,
tag_name.clone(),
argument_types.clone(),
argument_types,
pat_category.clone(),
region,
);
let tag_con = constraints.pattern_presence(
Type::Variable(*whole_var),
expected,
pat_category,
region,
);
let whole_type = constraints.push_type(Type::Variable(*whole_var));
let expected = constraints.push_pat_expected_type(expected);
let tag_con = constraints.pattern_presence(whole_type, expected, pat_category, region);
state.vars.push(*whole_var);
state.vars.push(*ext_var);
@ -534,23 +634,25 @@ pub fn constrain_pattern(
// Suppose we are constraining the pattern \@Id who, where Id n := [Id U64 n]
let (arg_pattern_var, loc_arg_pattern) = &**argument;
let arg_pattern_type = Type::Variable(*arg_pattern_var);
let arg_pattern_type_index = constraints.push_type(Type::Variable(*arg_pattern_var));
let opaque_type = Type::Alias {
let opaque_type = constraints.push_type(Type::Alias {
symbol: *opaque,
type_arguments: type_arguments
.iter()
.map(|v| OptAbleType {
typ: Type::Variable(v.var),
opt_ability: v.opt_ability,
opt_abilities: v.opt_abilities.clone(),
})
.collect(),
lambda_set_variables: lambda_set_variables.clone(),
infer_ext_in_output_types: vec![],
actual: Box::new(arg_pattern_type.clone()),
kind: AliasKind::Opaque,
};
});
// First, add a constraint for the argument "who"
let arg_pattern_expected = PExpected::NoExpectation(arg_pattern_type.clone());
let arg_pattern_expected = PExpected::NoExpectation(arg_pattern_type_index);
constrain_pattern(
constraints,
env,
@ -561,9 +663,12 @@ pub fn constrain_pattern(
);
// Next, link `whole_var` to the opaque type of "@Id who"
let whole_var_index = constraints.push_type(Type::Variable(*whole_var));
let expected_opaque =
constraints.push_expected_type(Expected::NoExpectation(opaque_type));
let whole_con = constraints.equal_types(
Type::Variable(*whole_var),
Expected::NoExpectation(opaque_type),
whole_var_index,
expected_opaque,
Category::Storage(std::file!(), std::line!()),
region,
);
@ -581,16 +686,23 @@ pub fn constrain_pattern(
// This must **always** be a presence constraint, that is enforcing
// `[A k1, B k1] += typeof (A s)`, because we are in a destructure position and not
// all constructors are covered in this branch!
let arg_pattern_type = constraints.push_type(arg_pattern_type);
let specialized_type_index = constraints.push_type((**specialized_def_type).clone());
let specialized_type_expected = constraints
.push_pat_expected_type(PExpected::NoExpectation(specialized_type_index));
let link_type_variables_con = constraints.pattern_presence(
arg_pattern_type,
PExpected::NoExpectation((**specialized_def_type).clone()),
specialized_type_expected,
PatternCategory::Opaque(*opaque),
loc_arg_pattern.region,
);
// Next, link `whole_var` (the type of "@Id who") to the expected type
let whole_type = constraints.push_type(Type::Variable(*whole_var));
let expected = constraints.push_pat_expected_type(expected);
let opaque_pattern_con = constraints.pattern_presence(
Type::Variable(*whole_var),
whole_type,
expected,
PatternCategory::Opaque(*opaque),
region,
@ -614,6 +726,18 @@ pub fn constrain_pattern(
}
}
fn could_be_a_tag_union(typ: &Type) -> bool {
!matches!(typ, Type::Apply(..) | Type::Function(..) | Type::Record(..))
fn could_be_a_tag_union(constraints: &mut Constraints, typ: TypeOrVar) -> bool {
match typ.split() {
Ok(typ_index) => {
let typ_cell = &mut constraints.types[typ_index.index()];
!matches!(
typ_cell.get_mut(),
Type::Apply(..) | Type::Function(..) | Type::Record(..)
)
}
Err(_) => {
// Variables are opaque at this point, assume yes
true
}
}
}

View file

@ -357,7 +357,7 @@ fn decoder_record_step_field(
name: "Ok".into(),
arguments: vec![(
field_var,
Loc::at_zero(Expr::Var(ok_val_symbol)),
Loc::at_zero(Expr::Var(ok_val_symbol, field_var)),
)],
})),
},
@ -417,7 +417,7 @@ fn decoder_record_step_field(
name: "Err".into(),
arguments: vec![(
decode_err_var,
Loc::at_zero(Expr::Var(err_val_symbol)),
Loc::at_zero(Expr::Var(err_val_symbol, decode_err_var)),
)],
}),
guard: None,
@ -433,7 +433,7 @@ fn decoder_record_step_field(
record_var: rec_var,
ext_var: env.new_ext_var(ExtensionKind::Record),
field_var: rec_dot_result,
loc_expr: Box::new(Loc::at_zero(Expr::Var(rec_symbol))),
loc_expr: Box::new(Loc::at_zero(Expr::Var(rec_symbol, rec_var))),
field: "result".into(),
})),
cond_var: rec_dot_result,
@ -462,7 +462,7 @@ fn decoder_record_step_field(
record_var: rec_var,
ext_var: env.new_ext_var(ExtensionKind::Record),
field_var: Variable::LIST_U8,
loc_expr: Box::new(Loc::at_zero(Expr::Var(rec_symbol))),
loc_expr: Box::new(Loc::at_zero(Expr::Var(rec_symbol, rec_var))),
field: "rest".into(),
})),
},
@ -499,12 +499,15 @@ fn decoder_record_step_field(
let condition_expr = Expr::Call(
Box::new((
this_decode_with_var,
Loc::at_zero(Expr::Var(Symbol::DECODE_DECODE_WITH)),
Loc::at_zero(Expr::Var(Symbol::DECODE_DECODE_WITH, this_decode_with_var)),
lambda_set_var,
rec_var,
)),
vec![
(Variable::LIST_U8, Loc::at_zero(Expr::Var(bytes_arg_symbol))),
(
Variable::LIST_U8,
Loc::at_zero(Expr::Var(bytes_arg_symbol, Variable::LIST_U8)),
),
(
decoder_var,
Loc::at_zero(Expr::AbilityMember(
@ -513,7 +516,10 @@ fn decoder_record_step_field(
decoder_var,
)),
),
(fmt_arg_var, Loc::at_zero(Expr::Var(fmt_arg_symbol))),
(
fmt_arg_var,
Loc::at_zero(Expr::Var(fmt_arg_symbol, fmt_arg_var)),
),
],
CalledVia::Space,
);
@ -600,7 +606,7 @@ fn decoder_record_step_field(
Expr::Call(
Box::new((
this_decode_custom_var,
Loc::at_zero(Expr::Var(Symbol::DECODE_CUSTOM)),
Loc::at_zero(Expr::Var(Symbol::DECODE_CUSTOM, this_decode_custom_var)),
decode_custom_closure_var,
decode_custom_ret_var,
)),
@ -676,7 +682,7 @@ fn decoder_record_step_field(
// when field is
let body = Expr::When {
loc_cond: Box::new(Loc::at_zero(Expr::Var(field_arg_symbol))),
loc_cond: Box::new(Loc::at_zero(Expr::Var(field_arg_symbol, Variable::STR))),
cond_var: Variable::STR,
expr_var: keep_or_skip_var,
region: Region::zero(),
@ -764,7 +770,7 @@ fn decoder_record_finalizer(
pattern_symbols.push(symbol);
let field_expr = Expr::Var(symbol);
let field_expr = Expr::Var(symbol, field_var);
let field = Field {
var: field_var,
region: Region::zero(),
@ -827,7 +833,7 @@ fn decoder_record_finalizer(
record_var: state_record_var,
ext_var: env.new_ext_var(ExtensionKind::Record),
field_var: result_field_var,
loc_expr: Box::new(Loc::at_zero(Expr::Var(state_arg_symbol))),
loc_expr: Box::new(Loc::at_zero(Expr::Var(state_arg_symbol, state_record_var))),
field: field_name.clone(),
};
@ -1126,7 +1132,7 @@ fn wrap_in_decode_custom_decode_with(
// ~ bytes, Decoder (List elem) fmt, fmt -> DecoderResult (List val)
env.unify(decode_with_type, this_decode_with_fn_var);
let decode_with_var = Var(Symbol::DECODE_DECODE_WITH);
let decode_with_var = Var(Symbol::DECODE_DECODE_WITH, this_decode_with_fn_var);
let decode_with_fn = Box::new((
this_decode_with_fn_var,
Loc::at_zero(decode_with_var),
@ -1137,9 +1143,9 @@ fn wrap_in_decode_custom_decode_with(
decode_with_fn,
vec![
// bytes inner_decoder fmt
(bytes_var, Loc::at_zero(Var(bytes_sym))),
(bytes_var, Loc::at_zero(Var(bytes_sym, bytes_var))),
(inner_decoder_var, Loc::at_zero(inner_decoder)),
(fmt_var, Loc::at_zero(Var(fmt_sym))),
(fmt_var, Loc::at_zero(Var(fmt_sym, fmt_var))),
],
CalledVia::Space,
);
@ -1231,7 +1237,7 @@ fn wrap_in_decode_custom_decode_with(
// ~ (List U8, fmt -> DecodeResult (List elem)) -> Decoder (List elem) fmt
env.unify(decode_custom_type, this_decode_custom_fn_var);
let decode_custom_var = Var(Symbol::DECODE_CUSTOM);
let decode_custom_var = Var(Symbol::DECODE_CUSTOM, this_decode_custom_fn_var);
let decode_custom_fn = Box::new((
this_decode_custom_fn_var,
Loc::at_zero(decode_custom_var),

View file

@ -134,7 +134,7 @@ fn to_encoder_list(env: &mut Env<'_>, fn_name: Symbol) -> (Expr, Variable) {
// toEncoder elem
let to_encoder_call = Call(
to_encoder_fn,
vec![(elem_var, Loc::at_zero(Var(elem_sym)))],
vec![(elem_var, Loc::at_zero(Var(elem_sym, elem_var)))],
CalledVia::Space,
);
@ -217,7 +217,7 @@ fn to_encoder_list(env: &mut Env<'_>, fn_name: Symbol) -> (Expr, Variable) {
let encode_list_call = Call(
encode_list_fn,
vec![
(list_var, Loc::at_zero(Var(lst_sym))),
(list_var, Loc::at_zero(Var(lst_sym, list_var))),
(to_elem_encoder_fn_var, Loc::at_zero(to_elem_encoder)),
],
CalledVia::Space,
@ -314,7 +314,10 @@ fn to_encoder_record(
record_var,
ext_var: env.subs.fresh_unnamed_flex_var(),
field_var,
loc_expr: Box::new(Loc::at_zero(Var(rcd_sym))),
loc_expr: Box::new(Loc::at_zero(Var(
rcd_sym,
env.subs.fresh_unnamed_flex_var(),
))),
field: field_name,
};
@ -572,7 +575,7 @@ fn to_encoder_tag_union(
// toEncoder rcd.a
let to_encoder_call = Call(
to_encoder_fn,
vec![(sym_var, Loc::at_zero(Var(sym)))],
vec![(sym_var, Loc::at_zero(Var(sym, sym_var)))],
CalledVia::Space,
);
@ -662,7 +665,7 @@ fn to_encoder_tag_union(
// A v1 v2 -> Encode.tag "A" [ Encode.toEncoder v1, Encode.toEncoder v2 ]
// B v3 -> Encode.tag "B" [ Encode.toEncoder v3 ]
let when_branches = When {
loc_cond: Box::new(Loc::at_zero(Var(tag_sym))),
loc_cond: Box::new(Loc::at_zero(Var(tag_sym, tag_union_var))),
cond_var: tag_union_var,
expr_var: whole_tag_encoders_var,
region: Region::zero(),
@ -778,7 +781,7 @@ fn wrap_in_encode_custom(
// Encode.appendWith : List U8, encoder_var, fmt -[appendWith]-> List U8 | fmt has EncoderFormatting
let append_with_fn = Box::new((
this_append_with_fn_var,
Loc::at_zero(Var(Symbol::ENCODE_APPEND_WITH)),
Loc::at_zero(Var(Symbol::ENCODE_APPEND_WITH, this_append_with_fn_var)),
this_append_with_clos_var,
Variable::LIST_U8,
));
@ -788,11 +791,11 @@ fn wrap_in_encode_custom(
append_with_fn,
vec![
// (bytes_var, bytes)
(bytes_var, Loc::at_zero(Var(bytes_sym))),
(bytes_var, Loc::at_zero(Var(bytes_sym, bytes_var))),
// (encoder_var, encoder)
(encoder_var, Loc::at_zero(encoder)),
// (fmt, fmt_var)
(fmt_var, Loc::at_zero(Var(fmt_sym))),
(fmt_var, Loc::at_zero(Var(fmt_sym, fmt_var))),
],
CalledVia::Space,
);
@ -869,7 +872,7 @@ fn wrap_in_encode_custom(
// Encode.custom : (List U8, fmt -> List U8) -> Encoder fmt | fmt has EncoderFormatting
let custom_fn = Box::new((
this_custom_fn_var,
Loc::at_zero(Var(Symbol::ENCODE_CUSTOM)),
Loc::at_zero(Var(Symbol::ENCODE_CUSTOM, this_custom_fn_var)),
this_custom_clos_var, // -[clos]->
this_custom_encoder_var, // t' ~ Encoder fmt
));

View file

@ -19,7 +19,8 @@ use roc_types::{
num::int_lit_width_to_variable,
subs::{
Content, ExhaustiveMark, FlatType, GetSubsSlice, LambdaSet, OptVariable, RecordFields,
RedundantMark, SubsIndex, SubsSlice, UnionLambdas, UnionTags, Variable, VariableSubsSlice,
RedundantMark, Subs, SubsIndex, SubsSlice, UnionLambdas, UnionTags, Variable,
VariableSubsSlice,
},
types::RecordField,
};
@ -87,10 +88,10 @@ fn hash_record(env: &mut Env<'_>, fn_name: Symbol, fields: Vec<Lowercase>) -> (V
let rcd_sym = env.new_symbol("rcd");
let hasher_sym = env.new_symbol("hasher");
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Symbol::HASH_HASHER));
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Subs::AB_HASHER));
let (body_var, body) = record_fields.iter_all().fold(
(hasher_var, Expr::Var(hasher_sym)),
(hasher_var, Expr::Var(hasher_sym, hasher_var)),
|total_hasher, (field_name, field_var, _)| {
let field_name = env.subs[field_name].clone();
let field_var = env.subs[field_var];
@ -99,7 +100,10 @@ fn hash_record(env: &mut Env<'_>, fn_name: Symbol, fields: Vec<Lowercase>) -> (V
record_var,
field_var,
ext_var: env.subs.fresh_unnamed_flex_var(),
loc_expr: Box::new(Loc::at_zero(Expr::Var(rcd_sym))),
loc_expr: Box::new(Loc::at_zero(Expr::Var(
rcd_sym,
env.subs.fresh_unnamed_flex_var(),
))),
field: field_name,
};
@ -162,7 +166,7 @@ fn hash_tag_union(
let union_sym = env.new_symbol("union");
let hasher_sym = env.new_symbol("hasher");
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Symbol::HASH_HASHER));
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Subs::AB_HASHER));
let (discr_width, discr_precision_var, hash_discr_member) = if union_tags.len() > u64::MAX as _
{
@ -215,7 +219,7 @@ fn hash_tag_union(
let (discr_hasher_var, disc_hasher_expr) = call_hash_ability_member(
env,
hash_discr_member,
(hasher_var, Expr::Var(hasher_sym)),
(hasher_var, Expr::Var(hasher_sym, hasher_var)),
(
discr_num_var,
Expr::Int(
@ -232,7 +236,11 @@ fn hash_tag_union(
let (body_var, body_expr) = (payload_vars.into_iter()).zip(payload_syms).fold(
(discr_hasher_var, disc_hasher_expr),
|total_hasher, (payload_var, payload_sym)| {
call_hash_hash(env, total_hasher, (payload_var, Expr::Var(payload_sym)))
call_hash_hash(
env,
total_hasher,
(payload_var, Expr::Var(payload_sym, payload_var)),
)
},
);
@ -251,7 +259,7 @@ fn hash_tag_union(
// ...
let when_var = whole_hasher_var;
let when_expr = Expr::When {
loc_cond: Box::new(Loc::at_zero(Expr::Var(union_sym))),
loc_cond: Box::new(Loc::at_zero(Expr::Var(union_sym, union_var))),
cond_var: union_var,
expr_var: when_var,
region: Region::zero(),
@ -313,7 +321,7 @@ fn hash_newtype_tag_union(
// hash_union = \hasher, A x1 .. xn ->
// Hash.hash (... (Hash.hash discrHasher x1) ...) xn
let hasher_sym = env.new_symbol("hasher");
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Symbol::HASH_HASHER));
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Subs::AB_HASHER));
// A
let tag_name = tag_name;
@ -338,9 +346,13 @@ fn hash_newtype_tag_union(
// Fold up `Hash.hash (... (Hash.hash discrHasher x11) ...) x1n`
let (body_var, body_expr) = (payload_vars.into_iter()).zip(payload_syms).fold(
(hasher_var, Expr::Var(hasher_sym)),
(hasher_var, Expr::Var(hasher_sym, hasher_var)),
|total_hasher, (payload_var, payload_sym)| {
call_hash_hash(env, total_hasher, (payload_var, Expr::Var(payload_sym)))
call_hash_hash(
env,
total_hasher,
(payload_var, Expr::Var(payload_sym, payload_var)),
)
},
);

View file

@ -99,14 +99,11 @@ impl DerivedModule {
exposed_by_module: &ExposedByModule,
key: DeriveKey,
) -> &(Symbol, Def, SpecializationLambdaSets) {
match self.map.get(&key) {
Some(entry) => {
// rustc won't let us return an immutable reference *and* continue using
// `self.map` immutably below, but this is safe, because we are not returning
// an immutable reference to the entry.
return unsafe { std::mem::transmute(entry) };
}
None => {}
if let Some(entry) = self.map.get(&key) {
// rustc won't let us return an immutable reference *and* continue using
// `self.map` immutably below, but this is safe, because we are not returning
// an immutable reference to the entry.
return unsafe { std::mem::transmute(entry) };
}
let ident_id = if cfg!(debug_assertions) || cfg!(feature = "debug-derived-symbols") {

View file

@ -1,7 +1,10 @@
use roc_can::{abilities::SpecializationLambdaSets, module::ExposedByModule};
use roc_error_macros::internal_error;
use roc_module::symbol::{IdentIds, Symbol};
use roc_types::subs::{instantiate_rigids, Subs, Variable};
use roc_types::{
subs::{instantiate_rigids, Subs, Variable},
types::Polarity,
};
use crate::DERIVED_SYNTH;
@ -69,7 +72,13 @@ impl Env<'_> {
pub fn unify(&mut self, left: Variable, right: Variable) {
use roc_unify::unify::{unify, Env, Mode, Unified};
let unified = unify(&mut Env::new(self.subs), left, right, Mode::EQ);
let unified = unify(
&mut Env::new(self.subs),
left,
right,
Mode::EQ,
Polarity::OF_PATTERN,
);
match unified {
Unified::Success {

View file

@ -123,19 +123,19 @@ impl FlatHash {
Ok(SingleLambdaSetImmediate(Symbol::HASH_ADD_U128))
}
Symbol::NUM_I8 | Symbol::NUM_SIGNED8 => {
Ok(SingleLambdaSetImmediate(Symbol::HASH_ADD_I8))
Ok(SingleLambdaSetImmediate(Symbol::HASH_HASH_I8))
}
Symbol::NUM_I16 | Symbol::NUM_SIGNED16 => {
Ok(SingleLambdaSetImmediate(Symbol::HASH_ADD_I16))
Ok(SingleLambdaSetImmediate(Symbol::HASH_HASH_I16))
}
Symbol::NUM_I32 | Symbol::NUM_SIGNED32 => {
Ok(SingleLambdaSetImmediate(Symbol::HASH_ADD_I32))
Ok(SingleLambdaSetImmediate(Symbol::HASH_HASH_I32))
}
Symbol::NUM_I64 | Symbol::NUM_SIGNED64 => {
Ok(SingleLambdaSetImmediate(Symbol::HASH_ADD_I64))
Ok(SingleLambdaSetImmediate(Symbol::HASH_HASH_I64))
}
Symbol::NUM_I128 | Symbol::NUM_SIGNED128 => {
Ok(SingleLambdaSetImmediate(Symbol::HASH_ADD_I128))
Ok(SingleLambdaSetImmediate(Symbol::HASH_HASH_I128))
}
// NB: I believe it is okay to unwrap opaques here because derivers are only used
// by the backend, and the backend treats opaques like structural aliases.

View file

@ -25,7 +25,7 @@ use hash::{FlatHash, FlatHashKey};
use roc_module::symbol::Symbol;
use roc_types::subs::{Subs, Variable};
#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Eq)]
pub enum DeriveError {
/// Unbound variable present in the type-to-derive. It may be possible to derive for this type
/// once the unbound variable is resolved.

View file

@ -9,3 +9,4 @@ edition = "2021"
roc_collections = { path = "../collections" }
roc_region = { path = "../region" }
roc_module = { path = "../module" }
roc_error_macros = { path = "../../error_macros" }

View file

@ -2,6 +2,7 @@
//! http://moscova.inria.fr/~maranget/papers/warn/warn.pdf
use roc_collections::all::{HumanIndex, MutMap};
use roc_error_macros::internal_error;
use roc_module::{
ident::{Lowercase, TagIdIntType, TagName},
symbol::Symbol,
@ -69,6 +70,54 @@ pub enum Pattern {
Anything,
Literal(Literal),
Ctor(Union, TagId, std::vec::Vec<Pattern>),
List(ListArity, std::vec::Vec<Pattern>),
}
/// The arity of list pattern.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ListArity {
/// A list pattern of an exact size.
Exact(usize),
/// A list pattern matching a variable size, where `Slice(before, after)` refers to the number
/// of elements that must be present before and after the variable rest pattern, respectively.
///
/// For example,
/// [..] => Slice(0, 0)
/// [A, .., B] => Slice(1, 1)
/// [A, B, ..] => Slice(2, 0)
/// [.., A, B] => Slice(0, 2)
Slice(usize, usize),
}
impl ListArity {
/// The trivially-exhaustive list pattern `[..]`
const ANY: ListArity = ListArity::Slice(0, 0);
pub fn min_len(&self) -> usize {
match self {
ListArity::Exact(n) => *n,
ListArity::Slice(l, r) => l + r,
}
}
/// Could this list pattern include list pattern arity `other`?
fn covers_arities_of(&self, other: &Self) -> bool {
self.covers_length(other.min_len())
}
pub fn covers_length(&self, length: usize) -> bool {
match self {
ListArity::Exact(l) => {
// [_, _, _] can only cover [_, _, _]
*l == length
}
ListArity::Slice(head, tail) => {
// [_, _, .., _] can cover infinite arities >=3 , including
// [_, _, .., _], [_, .., _, _], [_, _, .., _, _], [_, _, _, .., _, _], and so on
head + tail <= length
}
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
@ -141,15 +190,16 @@ pub fn check(
/// The initial count of items per row "n" is also 1
/// The resulting rows are examples of missing patterns
fn is_exhaustive(matrix: &RefPatternMatrix, n: usize) -> PatternMatrix {
if matrix.is_empty() {
vec![std::iter::repeat(Anything).take(n).collect()]
let ctors = if matrix.is_empty() {
return vec![std::iter::repeat(Anything).take(n).collect()];
} else if n == 0 {
vec![]
return vec![];
} else {
let ctors = collect_ctors(matrix);
let num_seen = ctors.len();
collect_ctors(matrix)
};
if num_seen == 0 {
match ctors {
CollectedCtors::NonExhaustiveAny => {
let new_matrix: Vec<_> = matrix
.iter()
.filter_map(|row| specialize_row_by_anything(row))
@ -161,7 +211,11 @@ fn is_exhaustive(matrix: &RefPatternMatrix, n: usize) -> PatternMatrix {
}
rest
} else {
}
CollectedCtors::Ctors(ctors) => {
debug_assert!(!ctors.is_empty());
let num_seen = ctors.len();
let alts = ctors.iter().next().unwrap().1;
let alt_list = &alts.alternatives;
@ -193,7 +247,7 @@ fn is_exhaustive(matrix: &RefPatternMatrix, n: usize) -> PatternMatrix {
let is_alt_exhaustive = |Ctor { arity, tag_id, .. }| {
let new_matrix: Vec<_> = matrix
.iter()
.filter_map(|r| specialize_row_by_ctor(tag_id, arity, r))
.filter_map(|r| specialize_row_by_ctor(tag_id, arity, r.to_owned()))
.collect();
let rest: Vec<Vec<Pattern>> = is_exhaustive(&new_matrix, arity + n - 1);
@ -212,6 +266,21 @@ fn is_exhaustive(matrix: &RefPatternMatrix, n: usize) -> PatternMatrix {
.collect()
}
}
CollectedCtors::NonExhaustiveList(alt_lists) => {
let is_alt_exhaustive = |arity: ListArity| {
let new_matrix: Vec<_> = matrix
.iter()
.filter_map(|row| specialize_row_by_list(arity, row.to_owned()))
.collect();
let rest = is_exhaustive(&new_matrix, arity.min_len() + n - 1);
rest.into_iter()
.map(move |row_not_covered| recover_list(arity, row_not_covered))
};
alt_lists.into_iter().flat_map(is_alt_exhaustive).collect()
}
}
}
@ -232,14 +301,23 @@ fn recover_ctor(
arity: usize,
mut patterns: Vec<Pattern>,
) -> Vec<Pattern> {
let mut rest = patterns.split_off(arity);
let args = patterns;
let args = patterns.split_off(patterns.len() - arity);
let mut rest = patterns;
rest.push(Ctor(union, tag_id, args));
rest
}
fn recover_list(arity: ListArity, mut patterns: Vec<Pattern>) -> Vec<Pattern> {
let list_elems = patterns.split_off(patterns.len() - arity.min_len());
let mut rest = patterns;
rest.push(List(arity, list_elems));
rest
}
/// Check if a new row "vector" is useful given previous rows "matrix"
pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
let mut matrix = Vec::with_capacity(old_matrix.len());
@ -262,13 +340,53 @@ pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
match first_pattern {
// keep checking rows that start with this Ctor or Anything
Ctor(_, id, args) => {
specialize_row_by_ctor2(id, args.len(), &mut old_matrix, &mut matrix);
specialize_matrix_by_ctor(id, args.len(), &mut old_matrix, &mut matrix);
std::mem::swap(&mut old_matrix, &mut matrix);
vector.extend(args);
}
List(arity, args) => {
// Check if there any specialized constructor of this list pattern
// that is useful.
let spec_list_ctors = build_list_ctors_covering_patterns(
arity,
filter_matrix_list_ctors(&old_matrix),
);
debug_assert!(!spec_list_ctors.is_empty());
if spec_list_ctors.len() == 1 {
specialize_matrix_by_list(
spec_list_ctors[0],
&mut old_matrix,
&mut matrix,
);
std::mem::swap(&mut old_matrix, &mut matrix);
vector.extend(args);
} else {
// TODO turn this into an iteration over the outer loop rather than bouncing
vector.extend(args);
for list_ctor in spec_list_ctors {
let mut old_matrix = old_matrix.clone();
let mut spec_matrix = Vec::with_capacity(old_matrix.len());
specialize_matrix_by_list(
list_ctor,
&mut old_matrix,
&mut spec_matrix,
);
if is_useful(spec_matrix, vector.clone()) {
return true;
}
}
return false;
}
}
Anything => {
// check if all alternatives appear in matrix
match is_complete(&old_matrix) {
@ -293,7 +411,7 @@ pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
let mut old_matrix = old_matrix.clone();
let mut matrix = vec![];
specialize_row_by_ctor2(
specialize_matrix_by_ctor(
tag_id,
arity,
&mut old_matrix,
@ -330,6 +448,8 @@ pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
}
Some(Anything) => matrix.push(patterns),
Some(List(..)) => internal_error!("After type checking, lists and literals should never align in exhaustiveness checking"),
Some(Ctor(_, _, _)) => panic!(
r#"Compiler bug! After type checking, constructors and literals should never align in pattern match exhaustiveness checks."#
),
@ -347,67 +467,118 @@ pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
}
}
/// INVARIANT: (length row == N) ==> (length result == arity + N - 1)
fn specialize_row_by_ctor2(
tag_id: TagId,
arity: usize,
// Specialize rows in the matrix that match a list's constructor(s).
//
// See the docs on [build_list_ctors_covering_patterns] for more information on how list
// constructors are built up.
fn specialize_matrix_by_list(
spec_arity: ListArity,
old_matrix: &mut PatternMatrix,
matrix: &mut PatternMatrix,
spec_matrix: &mut PatternMatrix,
) {
for mut row in old_matrix.drain(..) {
let head = row.pop();
let mut patterns = row;
match head {
Some(Ctor(_, id, args)) =>
if id == tag_id {
patterns.extend(args);
matrix.push(patterns);
} else {
// do nothing
}
Some(Anything) => {
// TODO order!
patterns.extend(std::iter::repeat(Anything).take(arity));
matrix.push(patterns);
}
Some(Literal(_)) => panic!( "Compiler bug! After type checking, constructors and literal should never align in pattern match exhaustiveness checks."),
None => panic!("Compiler error! Empty matrices should not get specialized."),
}
for row in old_matrix.drain(..) {
if let Some(spec_row) = specialize_row_by_list(spec_arity, row) {
spec_matrix.push(spec_row);
}
}
}
/// INVARIANT: (length row == N) ==> (length result == arity + N - 1)
fn specialize_row_by_ctor(tag_id: TagId, arity: usize, row: &RefRow) -> Option<Row> {
let mut row = row.to_vec();
// Specialize a row that matches a list's constructor(s).
//
// See the docs on [build_list_ctors_covering_patterns] for more information on how list
// constructors are built up.
fn specialize_row_by_list(spec_arity: ListArity, mut row: Row) -> Option<Row> {
let head = row.pop();
let patterns = row;
let mut spec_patterns = row;
match head {
Some(Ctor(_, id, args)) => {
if id == tag_id {
// TODO order!
let mut new_patterns = Vec::new();
new_patterns.extend(args);
new_patterns.extend(patterns);
Some(new_patterns)
Some(List(this_arity, args)) => {
if this_arity.covers_arities_of(&spec_arity) {
// This pattern covers the constructor we are specializing, so add on the
// specialized fields of this pattern relative to the given constructor.
if spec_arity.min_len() != this_arity.min_len() {
// This list pattern covers the list we are specializing, so it must be
// a variable-length slice, i.e. of the form `[before, .., after]`.
//
// Hence, the list we're specializing for must have at least a larger minimum length.
// So we fill the middle part with enough wildcards to reach the length of
// list constructor we're specializing for.
debug_assert!(spec_arity.min_len() > this_arity.min_len());
match this_arity {
ListArity::Exact(_) => internal_error!("exact-sized lists cannot cover lists of other minimum length"),
ListArity::Slice(before, after) => {
let before = &args[..before];
let after = &args[this_arity.min_len() - after..];
let num_extra_wildcards = spec_arity.min_len() - this_arity.min_len();
let extra_wildcards = std::iter::repeat(&Anything).take(num_extra_wildcards);
let new_pats = (before.iter().chain(extra_wildcards).chain(after)).cloned();
spec_patterns.extend(new_pats);
}
}
} else {
debug_assert_eq!(this_arity.min_len(), spec_arity.min_len());
spec_patterns.extend(args);
}
Some(spec_patterns)
} else {
None
}
}
Some(Anything) => {
// TODO order!
let new_patterns = std::iter::repeat(Anything)
.take(arity)
.chain(patterns)
.collect();
Some(new_patterns)
// The specialized fields for a `Anything` pattern with a list constructor is just
// `Anything` repeated for the number of times we want to see the list pattern.
spec_patterns.extend(std::iter::repeat(Anything).take(spec_arity.min_len()));
Some(spec_patterns)
}
Some(Literal(_)) => unreachable!(
r#"Compiler bug! After type checking, a constructor can never align with a literal: that should be a type error!"#
Some(Ctor(..)) => internal_error!("After type checking, lists and constructors should never align in exhaustiveness checking"),
Some(Literal(..)) => internal_error!("After type checking, lists and literals should never align in exhaustiveness checking"),
None => internal_error!("Empty matrices should not get specialized"),
}
}
/// INVARIANT: (length row == N) ==> (length result == arity + N - 1)
fn specialize_matrix_by_ctor(
tag_id: TagId,
arity: usize,
old_matrix: &mut PatternMatrix,
matrix: &mut PatternMatrix,
) {
for row in old_matrix.drain(..) {
if let Some(spec_row) = specialize_row_by_ctor(tag_id, arity, row) {
matrix.push(spec_row);
}
}
}
/// INVARIANT: (length row == N) ==> (length result == arity + N - 1)
fn specialize_row_by_ctor(tag_id: TagId, arity: usize, mut row: Row) -> Option<Row> {
let head = row.pop();
let mut spec_patterns = row;
match head {
Some(Ctor(_, id, args)) => {
if id == tag_id {
spec_patterns.extend(args);
Some(spec_patterns)
} else {
None
}
}
Some(Anything) => {
spec_patterns.extend(std::iter::repeat(Anything).take(arity));
Some(spec_patterns)
}
Some(List(..)) => {
internal_error!(r#"After type checking, a constructor can never align with a list"#)
}
Some(Literal(_)) => internal_error!(
r#"After type checking, a constructor can never align with a literal: that should be a type error!"#
),
None => panic!("Compiler error! Empty matrices should not get specialized."),
None => internal_error!("Empty matrices should not get specialized."),
}
}
@ -430,16 +601,21 @@ pub enum Complete {
fn is_complete(matrix: &RefPatternMatrix) -> Complete {
let ctors = collect_ctors(matrix);
let length = ctors.len();
let mut it = ctors.into_iter();
match ctors {
CollectedCtors::NonExhaustiveAny | CollectedCtors::NonExhaustiveList(_) => Complete::No,
CollectedCtors::Ctors(ctors) => {
let length = ctors.len();
let mut it = ctors.into_iter();
match it.next() {
None => Complete::No,
Some((_, Union { alternatives, .. })) => {
if length == alternatives.len() {
Complete::Yes(alternatives)
} else {
Complete::No
match it.next() {
None => Complete::No,
Some((_, Union { alternatives, .. })) => {
if length == alternatives.len() {
Complete::Yes(alternatives)
} else {
Complete::No
}
}
}
}
}
@ -452,14 +628,219 @@ type PatternMatrix = Vec<Vec<Pattern>>;
type RefRow = [Pattern];
type Row = Vec<Pattern>;
fn collect_ctors(matrix: &RefPatternMatrix) -> MutMap<TagId, Union> {
let mut ctors = MutMap::default();
enum CollectedCtors {
NonExhaustiveAny,
NonExhaustiveList(Vec<ListArity>),
Ctors(MutMap<TagId, Union>),
}
for row in matrix {
if let Some(Ctor(union, id, _)) = row.last() {
ctors.insert(*id, union.clone());
}
fn collect_ctors(matrix: &RefPatternMatrix) -> CollectedCtors {
if matrix.is_empty() {
return CollectedCtors::NonExhaustiveAny;
}
ctors
let first_row = &matrix[0];
if let Some(ctor) = first_row.last() {
match ctor {
Anything => CollectedCtors::NonExhaustiveAny,
Pattern::Literal(_) => CollectedCtors::NonExhaustiveAny,
List(_, _) => {
let list_ctors = build_list_ctors_covering_patterns(
ListArity::ANY,
filter_matrix_list_ctors(matrix),
);
CollectedCtors::NonExhaustiveList(list_ctors)
}
Pattern::Ctor(_, _, _) => {
let mut ctors = MutMap::default();
for row in matrix {
if let Some(Ctor(union, id, _)) = row.last() {
ctors.insert(*id, union.clone());
}
}
CollectedCtors::Ctors(ctors)
}
}
} else {
CollectedCtors::NonExhaustiveAny
}
}
/// Largely derived from Rust's list-pattern exhaustiveness checking algorithm: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_build/thir/pattern/usefulness/index.html
/// Dual-licensed under MIT and Apache licenses.
/// Thank you, Rust contributors.
///
/// Calculates the list constructors that are covered by a given [slice constructor][ListArity::Slice],
/// relative to other list constructors matched by a series of patterns.
///
/// This is relevant for both exhaustiveness and redundancy checking; to understand the motivation,
/// let's start with the exhaustiveness checking case.
///
/// # Exhaustiveness Checking
///
/// All list constructors are exausted by the pattern [..], which actually represents the infinite
/// series of constructors
/// []
/// [_]
/// [_, _]
/// ...
///
/// But we don't need to enumerate that infinite series to check if a series of list patterns is exhaustive -
/// we only need to enumerate a finite number of constructors, up to the largest exact-size list
/// pattern not covered by the patterns, or the largest slice pattern covered by the patterns.
///
/// ## Exact-sized patterns
///
/// Say we have patterns
/// [_] -> ..
/// [_, _] -> ..
/// To exhaustiveness-check these patterns, we only need to build the subset of `[..]` constructors
/// []
/// [_]
/// [_, _]
/// [_, _, _, ..]
/// to cover all list constructors that may or may not be matched by the patterns (in this case
/// not, because `[]` is not matched, and the last constructor `[_, _, _, ..]` is not matched).
///
/// We include `[_, _, _, ..]` here because during exhaustiveness checking, we specialize list
/// patterns **by exact size**, not by ranges. That means that is we stopped enumerating the
/// constructors needed at `[_, _, ..]`, when specializing the list patterns against `[_, _, ..]`,
/// we would see that the last pattern `[_, _] -> ..` exhausts it.
///
/// So, in the presence of exact-size constructors, we want to include a slice constructor that is
/// larger than all other exact-size list pattern.
///
/// ## Slice patterns
///
/// Say we have patterns
/// [1] -> ..
/// [2, ..] -> ..
/// now it's enough to just build
/// []
/// [_, ..]
/// as possible constructors, since the last constructor `[_, ..]` will specialize both patterns to
/// [1] -> ..
/// [2] -> ..
/// and if these patterns are exhaustive w.r.t. their arguments (`1` and `2`, which they are not,
/// since number literals are not exhaustive), then the whole pattern must be exhaustive, since the
/// largest slice constructor `[_, ..]` will cover the remaining infinite number of list constructors.
///
/// You can see that this holds with slice constructors that match elements at their head and tail
/// as well:
/// [{}, ..] -> ..
/// [.., {}] -> ..
/// Here again it's enough to just build the constructors [] and [_, ..] to match against -
/// notice that above slices of arity `1`, the patterns above do not provide any more information,
/// since they match any additional elements at the tail and head, respectively.
///
/// So, if they are exhaustive at arity `1`, they must be exhaustive at any higher arity.
///
/// In fact, in this case, if we are matching against `List {}`, the second pattern redundant!
///
/// # Redundancy checking
///
/// Redundancy checking (in general, and for list patterns) is the same as exhaustiveness checking,
/// except that instead of checking whether `[..]` is covered by all patterns, we want to check if
/// the list constructor of a pattern introduces any more information than previous patterns we've
/// seen.
///
/// Let's say we're redundancy checking the pattern marked by `*`
/// [] -> ..
/// [_] -> ..
/// (*) [.., _] -> ..
///
/// The list constructors this pattern introduces are the infinite series [_], [_, _], ...
/// But the only ones relevant, relevant to the patterns we've already seen, are
/// [_]
/// [_, _]
/// (Notice that the enumeration algorithm is the same as for `[..]` in the presence of exact-size
/// slices, just that the starting size differs - due to the tail matched by this pattern)
///
/// During checking we'll see that the `[_, _]` pattern is not already covered, so `[.., _]` is in
/// fact not redundant.
///
/// On the other hand, suppose we have
/// [] -> ..
/// [_, ..] -> ..
/// (*) [.., _] -> ..
///
/// Again enumerating the relevant constructors of `[.., _]` relative to the other patterns, we find
/// them to be
/// []
/// [.., _]
/// the first is already matched by the first pattern `[] -> ..`, and the latter specialized to
/// `[_]`, which in fact is covered by the second pattern `[_, ..] -> ..`. So the pattern marked by (*)
/// is indeed redundant.
///
/// # All together
///
/// So the idea to cover the infinite # of list constructors enumerated by a [slice][ListArity::Slice],
/// while specializing to the constructors that the user has provided, is as follows:
/// - Build [exact][ListArity::Exact] constructor variants for everything up to the max slice
/// constructor size, L.
/// - Then, the infinite # of list constructors is covered by the [0..L) exact-size constructors, and
/// the last slice constructor, that covers size [L..∞).
///
/// If we might only see [exact][ListArity::Exact] constructors along the way, we want to pick the
/// max slice size L that is larger than all of those exact size constructors.
///
/// But for slice constructors, we can just pick the largest slice, since that will cover slices of
/// that size, and any larger size.
///
/// Putting that together, we calculate L via
///
/// L = max(max_exact_len + 1, max_prefix_len + max_suffix_len)
fn build_list_ctors_covering_patterns(
list_arity: ListArity,
list_pattern_arities: impl IntoIterator<Item = ListArity>,
) -> std::vec::Vec<ListArity> {
match list_arity {
ListArity::Exact(_) => {
// Exact-size lists can only cover themselves..
vec![list_arity]
}
ListArity::Slice(prefix_len, suffix_len) => {
let min_len = prefix_len + suffix_len;
let mut max_exact_len = 0;
let mut max_prefix_len = prefix_len;
let mut max_suffix_len = suffix_len;
for arity in list_pattern_arities {
match arity {
ListArity::Exact(n) => max_exact_len = max_exact_len.max(n),
ListArity::Slice(prefix, suffix) => {
max_prefix_len = max_prefix_len.max(prefix);
max_suffix_len = max_suffix_len.max(suffix);
}
}
}
let (inf_cover_prefix, inf_cover_suffix) = {
if max_exact_len + 1 >= max_prefix_len + max_suffix_len {
max_prefix_len = max_exact_len + 1 - max_suffix_len;
}
(max_prefix_len, max_suffix_len)
};
let l = inf_cover_prefix + inf_cover_suffix;
let exact_size_lists = (min_len..l) // exclusive
.map(ListArity::Exact);
exact_size_lists
.chain([ListArity::Slice(inf_cover_prefix, inf_cover_suffix)])
.collect()
}
}
}
fn filter_matrix_list_ctors(matrix: &RefPatternMatrix) -> impl Iterator<Item = ListArity> + '_ {
matrix.iter().filter_map(|ctor| match ctor.last() {
Some(List(ar, _)) => Some(*ar),
_ => None,
})
}

View file

@ -166,8 +166,8 @@ impl<'a> Formattable for TypeAnnotation<'a> {
Wildcard | Inferred | BoundVariable(_) | Malformed(_) => false,
Function(args, result) => {
(&result.value).is_multiline()
|| args.iter().any(|loc_arg| (&loc_arg.value).is_multiline())
result.value.is_multiline()
|| args.iter().any(|loc_arg| loc_arg.value.is_multiline())
}
Apply(_, _, args) => args.iter().any(|loc_arg| loc_arg.value.is_multiline()),
As(lhs, _, _) => lhs.value.is_multiline(),
@ -226,7 +226,7 @@ impl<'a> Formattable for TypeAnnotation<'a> {
buf.newline();
}
(&argument.value).format_with_options(
argument.value.format_with_options(
buf,
Parens::InFunctionType,
Newlines::No,
@ -251,7 +251,8 @@ impl<'a> Formattable for TypeAnnotation<'a> {
buf.push_str("->");
buf.spaces(1);
(&ret.value).format_with_options(buf, Parens::InFunctionType, Newlines::No, indent);
ret.value
.format_with_options(buf, Parens::InFunctionType, Newlines::No, indent);
if needs_parens {
buf.push(')')
@ -275,12 +276,9 @@ impl<'a> Formattable for TypeAnnotation<'a> {
for argument in *arguments {
buf.spaces(1);
(&argument.value).format_with_options(
buf,
Parens::InApply,
Newlines::No,
indent,
);
argument
.value
.format_with_options(buf, Parens::InApply, Newlines::No, indent);
}
if write_parens {
@ -371,12 +369,12 @@ impl<'a> Formattable for AssignedField<'a, TypeAnnotation<'a>> {
fn format_with_options<'buf>(
&self,
buf: &mut Buf<'buf>,
parens: Parens,
_parens: Parens,
newlines: Newlines,
indent: u16,
) {
// we abuse the `Newlines` type to decide between multiline or single-line layout
format_assigned_field_help(self, buf, parens, indent, 1, newlines == Newlines::Yes);
format_assigned_field_help(self, buf, indent, 1, newlines == Newlines::Yes);
}
}
@ -388,12 +386,12 @@ impl<'a> Formattable for AssignedField<'a, Expr<'a>> {
fn format_with_options<'buf>(
&self,
buf: &mut Buf<'buf>,
parens: Parens,
_parens: Parens,
newlines: Newlines,
indent: u16,
) {
// we abuse the `Newlines` type to decide between multiline or single-line layout
format_assigned_field_help(self, buf, parens, indent, 0, newlines == Newlines::Yes);
format_assigned_field_help(self, buf, indent, 0, newlines == Newlines::Yes);
}
}
@ -413,7 +411,6 @@ fn is_multiline_assigned_field_help<T: Formattable>(afield: &AssignedField<'_, T
fn format_assigned_field_help<'a, 'buf, T>(
zelf: &AssignedField<'a, T>,
buf: &mut Buf<'buf>,
parens: Parens,
indent: u16,
separator_spaces: usize,
is_multiline: bool,
@ -466,24 +463,10 @@ fn format_assigned_field_help<'a, 'buf, T>(
}
AssignedField::SpaceBefore(sub_field, spaces) => {
fmt_comments_only(buf, spaces.iter(), NewlineAt::Bottom, indent);
format_assigned_field_help(
sub_field,
buf,
parens,
indent,
separator_spaces,
is_multiline,
);
format_assigned_field_help(sub_field, buf, indent, separator_spaces, is_multiline);
}
AssignedField::SpaceAfter(sub_field, spaces) => {
format_assigned_field_help(
sub_field,
buf,
parens,
indent,
separator_spaces,
is_multiline,
);
format_assigned_field_help(sub_field, buf, indent, separator_spaces, is_multiline);
fmt_comments_only(buf, spaces.iter(), NewlineAt::Bottom, indent);
}
Malformed(raw) => {
@ -497,7 +480,7 @@ impl<'a> Formattable for Tag<'a> {
use self::Tag::*;
match self {
Apply { args, .. } => args.iter().any(|arg| (&arg.value).is_multiline()),
Apply { args, .. } => args.iter().any(|arg| arg.value.is_multiline()),
Tag::SpaceBefore(_, _) | Tag::SpaceAfter(_, _) => true,
Malformed(text) => text.chars().any(|c| c == '\n'),
}
@ -546,7 +529,8 @@ impl<'a> Formattable for Tag<'a> {
impl<'a> Formattable for HasClause<'a> {
fn is_multiline(&self) -> bool {
self.ability.is_multiline()
// No, always put abilities in a "has" clause on one line
false
}
fn format_with_options<'buf>(
@ -560,8 +544,15 @@ impl<'a> Formattable for HasClause<'a> {
buf.spaces(1);
buf.push_str("has");
buf.spaces(1);
self.ability
.format_with_options(buf, parens, newlines, indent);
for (i, ab) in self.abilities.iter().enumerate() {
if i > 0 {
buf.spaces(1);
buf.push('&');
buf.spaces(1);
}
ab.format_with_options(buf, parens, newlines, indent);
}
}
}

View file

@ -542,7 +542,7 @@ fn fmt_binops<'a, 'buf>(
indent: u16,
) {
let is_multiline = part_of_multi_line_binops
|| (&loc_right_side.value).is_multiline()
|| loc_right_side.value.is_multiline()
|| lefts.iter().any(|(expr, _)| expr.value.is_multiline());
for (loc_left_side, loc_binop) in lefts {
@ -1045,7 +1045,7 @@ fn fmt_closure<'a, 'buf>(
buf.push_str("->");
let is_multiline = (&loc_ret.value).is_multiline();
let is_multiline = loc_ret.value.is_multiline();
// If the body is multiline, go down a line and indent.
let body_indent = if is_multiline {
@ -1156,7 +1156,7 @@ fn fmt_backpassing<'a, 'buf>(
buf.push_str("<-");
let is_multiline = (&loc_ret.value).is_multiline();
let is_multiline = loc_ret.value.is_multiline();
// If the body is multiline, go down a line and indent.
let body_indent = if is_multiline {

View file

@ -40,7 +40,10 @@ impl<'a> Formattable for Pattern<'a> {
| Pattern::Underscore(_)
| Pattern::Malformed(_)
| Pattern::MalformedIdent(_, _)
| Pattern::QualifiedIdentifier { .. } => false,
| Pattern::QualifiedIdentifier { .. }
| Pattern::ListRest => false,
Pattern::List(patterns) => patterns.iter().any(|p| p.is_multiline()),
}
}
@ -149,6 +152,7 @@ impl<'a> Formattable for Pattern<'a> {
}
StrLiteral(literal) => fmt_str_literal(buf, *literal, indent),
SingleQuote(string) => {
buf.indent(indent);
buf.push('\'');
buf.push_str(string);
buf.push('\'');
@ -158,6 +162,26 @@ impl<'a> Formattable for Pattern<'a> {
buf.push('_');
buf.push_str(name);
}
List(loc_patterns) => {
buf.indent(indent);
buf.push_str("[");
let mut it = loc_patterns.iter().peekable();
while let Some(loc_pattern) = it.next() {
loc_pattern.format(buf, indent);
if it.peek().is_some() {
buf.push_str(",");
buf.spaces(1);
}
}
buf.push_str("]");
}
ListRest => {
buf.indent(indent);
buf.push_str("..");
}
// Space
SpaceBefore(sub_pattern, spaces) => {

Some files were not shown because too many files have changed in this diff Show more