mirror of
https://github.com/roc-lang/roc.git
synced 2025-07-24 06:55:15 +00:00
Merge remote-tracking branch 'origin/main' into repl
This commit is contained in:
commit
c03dc17ab4
197 changed files with 5171 additions and 2946 deletions
|
@ -1,4 +0,0 @@
|
|||
AUTHORS
|
||||
nix
|
||||
.envrc
|
||||
.gitignore
|
15
.github/workflows/benchmarks.yml
vendored
15
.github/workflows/benchmarks.yml
vendored
|
@ -23,25 +23,18 @@ jobs:
|
|||
ref: "main"
|
||||
clean: "true"
|
||||
|
||||
- name: Earthly version
|
||||
run: earthly --version
|
||||
|
||||
- name: on main; prepare a self-contained benchmark folder
|
||||
run: ./ci/safe-earthly.sh --build-arg BENCH_SUFFIX=main +prep-bench-folder
|
||||
run: nix develop -c ./ci/benchmarks/prep_folder.sh main
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
clean: "false" # we want to keep the benchmark folder
|
||||
|
||||
- name: on current branch; prepare a self-contained benchmark folder
|
||||
run: ./ci/safe-earthly.sh +prep-bench-folder
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
run: nix develop -c ./ci/benchmarks/prep_folder.sh branch
|
||||
|
||||
- name: build benchmark runner
|
||||
run: cd ci/bench-runner && cargo build --release && cd ../..
|
||||
run: nix develop -c bash -c "cd ci/benchmarks/bench-runner && cargo build --release && cd ../../.."
|
||||
|
||||
- name: run benchmarks with regression check
|
||||
run: ./ci/bench-runner/target/release/bench-runner --check-executables-changed
|
||||
run: nix develop -c ./ci/benchmarks/bench-runner/target/release/bench-runner --check-executables-changed
|
||||
|
|
4
.github/workflows/nix_linux_x86_64.yml
vendored
4
.github/workflows/nix_linux_x86_64.yml
vendored
|
@ -20,7 +20,7 @@ jobs:
|
|||
clean: "true"
|
||||
|
||||
- name: execute tests with --release
|
||||
run: /home/big-ci-user/.nix-profile/bin/nix develop -c cargo test --locked --release
|
||||
run: nix develop -c cargo test --locked --release
|
||||
|
||||
- name: test wasm32 cli_run
|
||||
run: /home/big-ci-user/.nix-profile/bin/nix develop -c cargo test --locked --release --features="wasm32-cli-run"
|
||||
run: nix develop -c cargo test --locked --release --features="wasm32-cli-run"
|
||||
|
|
20
.github/workflows/windows.yml
vendored
20
.github/workflows/windows.yml
vendored
|
@ -15,10 +15,17 @@ jobs:
|
|||
runs-on: windows-2022
|
||||
env:
|
||||
LLVM_SYS_130_PREFIX: C:\LLVM-13.0.1-win64
|
||||
timeout-minutes: 90
|
||||
|
||||
timeout-minutes: 150
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- run: Add-Content -Path "$env:GITHUB_ENV" -Value "GITHUB_RUNNER_CPU=$((Get-CimInstance Win32_Processor).Name)"
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
shared-key: "rust-cache-windows-${{env.GITHUB_RUNNER_CPU}}"
|
||||
|
||||
- name: download and install zig
|
||||
run: |
|
||||
curl.exe --output "C:\zig-windows-x86_64-0.9.1.zip" --url https://ziglang.org/download/0.9.1/zig-windows-x86_64-0.9.1.zip
|
||||
|
@ -34,5 +41,12 @@ jobs:
|
|||
curl.exe -L -O https://github.com/roc-lang/llvm-package-windows/releases/download/v13.0.1/LLVM-13.0.1-win64.7z
|
||||
7z x LLVM-13.0.1-win64.7z -oC:\LLVM-13.0.1-win64
|
||||
|
||||
- name: build
|
||||
run: cargo build
|
||||
- name: Build tests --release without running. Twice for zig lld-link error.
|
||||
run: cargo test --locked --release --no-run || cargo test --locked --release --no-run
|
||||
|
||||
# Why are these tests not build with previous command? => fingerprint error. Use `CARGO_LOG=cargo::core::compiler::fingerprint=info` to investigate
|
||||
- name: Build specific tests without running. Twice for zig lld-link error.
|
||||
run: cargo test --locked --release --no-run -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_editor -p roc_linker || cargo test --locked --release --no-run -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_editor -p roc_linker
|
||||
|
||||
- name: Actually run the tests.
|
||||
run: cargo test --locked --release -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_editor -p roc_linker
|
||||
|
|
8
AUTHORS
8
AUTHORS
|
@ -113,3 +113,11 @@ João Mota <jackthemotorcycle@gmail.com>
|
|||
Marcos Prieto <marcospri@gmail.com>
|
||||
Prajwal S N <prajwalnadig21@gmail.com>
|
||||
Christopher Duncan <chris.duncan.arauz+git@protonmail.com>
|
||||
Luke Boswell <lukewilliamboswell@gmail.com>
|
||||
Luca Cervello <luca.cervello@gmail.com>
|
||||
Josh Mak <joshmak@berkeley.edu>
|
||||
Travis Staloch <twostepted@gmail.com>
|
||||
Nick Gravgaard <nick@nickgravgaard.com>
|
||||
Keerthana Kasthuril <76804118+keerthanak-tw@users.noreply.github.com>
|
||||
Salman Shaik <salmansiddiq.shaik@gmail.com>
|
||||
Austin Clements <austinclementsbass@gmail.com>
|
||||
|
|
8
Cargo.lock
generated
8
Cargo.lock
generated
|
@ -496,6 +496,7 @@ dependencies = [
|
|||
"roc_load",
|
||||
"roc_module",
|
||||
"roc_reporting",
|
||||
"roc_utils",
|
||||
"serde",
|
||||
"serde-xml-rs",
|
||||
"strip-ansi-escapes",
|
||||
|
@ -3573,6 +3574,7 @@ dependencies = [
|
|||
"roc_solve",
|
||||
"roc_types",
|
||||
"roc_unify",
|
||||
"roc_utils",
|
||||
"rodio",
|
||||
"serde",
|
||||
"snafu",
|
||||
|
@ -3594,6 +3596,7 @@ name = "roc_exhaustive"
|
|||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"roc_collections",
|
||||
"roc_error_macros",
|
||||
"roc_module",
|
||||
"roc_region",
|
||||
]
|
||||
|
@ -3993,6 +3996,7 @@ dependencies = [
|
|||
"roc_reporting",
|
||||
"roc_target",
|
||||
"roc_types",
|
||||
"roc_utils",
|
||||
"wasi_libc_sys",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
|
@ -4860,6 +4864,7 @@ dependencies = [
|
|||
"roc_target",
|
||||
"roc_types",
|
||||
"roc_unify",
|
||||
"roc_utils",
|
||||
"target-lexicon",
|
||||
"tempfile",
|
||||
"wasi_libc_sys",
|
||||
|
@ -5262,6 +5267,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
|||
[[package]]
|
||||
name = "wasi_libc_sys"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"roc_utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
|
|
|
@ -58,7 +58,7 @@ members = [
|
|||
"crates/wasi-libc-sys",
|
||||
]
|
||||
exclude = [
|
||||
"ci/bench-runner",
|
||||
"ci/benchmarks/bench-runner",
|
||||
# Examples sometimes have Rust hosts in their platforms. The compiler should ignore those.
|
||||
"crates/cli_testing_examples",
|
||||
"examples",
|
||||
|
|
72
Earthfile
72
Earthfile
|
@ -1,72 +0,0 @@
|
|||
FROM rust:1.61.0-slim-bullseye # make sure to update rust-toolchain.toml too so that everything uses the same rust version
|
||||
WORKDIR /earthbuild
|
||||
|
||||
prep-debian:
|
||||
RUN apt -y update
|
||||
|
||||
install-other-libs:
|
||||
FROM +prep-debian
|
||||
RUN apt -y install wget git
|
||||
RUN apt -y install libxcb-shape0-dev libxcb-xfixes0-dev # for editor clipboard
|
||||
RUN apt -y install libasound2-dev # for editor sounds
|
||||
RUN apt -y install libunwind-dev pkg-config libx11-dev zlib1g-dev
|
||||
RUN apt -y install unzip # for www/build.sh
|
||||
|
||||
install-zig-llvm-valgrind:
|
||||
FROM +install-other-libs
|
||||
# editor
|
||||
RUN apt -y install libxkbcommon-dev
|
||||
# zig
|
||||
RUN wget -c https://ziglang.org/download/0.9.1/zig-linux-x86_64-0.9.1.tar.xz --no-check-certificate
|
||||
RUN tar -xf zig-linux-x86_64-0.9.1.tar.xz
|
||||
RUN ln -s /earthbuild/zig-linux-x86_64-0.9.1/zig /bin/zig
|
||||
# zig builtins wasm tests
|
||||
RUN apt -y install build-essential
|
||||
RUN cargo install wasmer-cli --features "singlepass"
|
||||
RUN cargo install bindgen
|
||||
# llvm
|
||||
RUN apt -y install lsb-release software-properties-common gnupg
|
||||
RUN wget https://apt.llvm.org/llvm.sh
|
||||
RUN chmod +x llvm.sh
|
||||
RUN ./llvm.sh 13
|
||||
RUN ln -s /usr/bin/clang-13 /usr/bin/clang
|
||||
# use lld as linker
|
||||
RUN ln -s /usr/bin/lld-13 /usr/bin/ld.lld
|
||||
ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
|
||||
# valgrind
|
||||
RUN apt -y install valgrind
|
||||
# wasm repl & tests
|
||||
RUN rustup target add wasm32-unknown-unknown wasm32-wasi
|
||||
RUN apt -y install libssl-dev
|
||||
RUN OPENSSL_NO_VENDOR=1 cargo install wasm-pack
|
||||
# criterion
|
||||
RUN cargo install cargo-criterion
|
||||
# sccache
|
||||
RUN cargo install sccache
|
||||
RUN sccache -V
|
||||
ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache
|
||||
ENV SCCACHE_DIR=/earthbuild/sccache_dir
|
||||
ENV CARGO_INCREMENTAL=0 # no need to recompile package when using new function
|
||||
|
||||
copy-dirs:
|
||||
FROM +install-zig-llvm-valgrind
|
||||
COPY --dir crates Cargo.toml Cargo.lock version.txt www ./
|
||||
|
||||
# compile everything needed for benchmarks and output a self-contained dir from which benchmarks can be run.
|
||||
prep-bench-folder:
|
||||
FROM +copy-dirs
|
||||
# to make use of avx, avx2, sse2, sse4.2... instructions
|
||||
ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
|
||||
ARG BENCH_SUFFIX=branch
|
||||
RUN cargo criterion -V
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR cd crates/cli && cargo criterion --no-run
|
||||
RUN mkdir -p bench-folder/crates/cli_testing_examples/benchmarks
|
||||
RUN mkdir -p bench-folder/crates/compiler/builtins/bitcode/src
|
||||
RUN mkdir -p bench-folder/target/release/deps
|
||||
RUN cp crates/cli_testing_examples/benchmarks/*.roc bench-folder/crates/cli_testing_examples/benchmarks/
|
||||
RUN cp -r crates/cli_testing_examples/benchmarks/platform bench-folder/crates/cli_testing_examples/benchmarks/
|
||||
RUN cp crates/compiler/builtins/bitcode/src/str.zig bench-folder/crates/compiler/builtins/bitcode/src
|
||||
RUN cp target/release/roc bench-folder/target/release
|
||||
# copy the most recent time bench to bench-folder
|
||||
RUN cp target/release/deps/`ls -t target/release/deps/ | grep time_bench | head -n 1` bench-folder/target/release/deps/time_bench
|
||||
SAVE ARTIFACT bench-folder AS LOCAL bench-folder-$BENCH_SUFFIX
|
304
TUTORIAL.md
304
TUTORIAL.md
|
@ -1,6 +1,6 @@
|
|||
# Tutorial
|
||||
|
||||
This is a tutorial to learn how to build Roc applications.
|
||||
This is a tutorial to learn how to build Roc applications.
|
||||
It covers the REPL, basic types like strings, lists, tags, and functions, syntax like `when` and `if then else`, and more!
|
||||
|
||||
Enjoy!
|
||||
|
@ -1091,6 +1091,63 @@ of the type annotation, or even the function's implementation! The only way to h
|
|||
Similarly, the only way to have a function whose type is `a -> a` is if the function's implementation returns
|
||||
its argument without modifying it in any way. This is known as [the identity function](https://en.wikipedia.org/wiki/Identity_function).
|
||||
|
||||
## Tag Unions
|
||||
|
||||
We can also annotate types that include tags:
|
||||
|
||||
```coffee
|
||||
colorFromStr : Str -> [Red, Green, Yellow]
|
||||
colorFromStr = \string ->
|
||||
when string is
|
||||
"red" -> Red
|
||||
"green" -> Green
|
||||
_ -> Yellow
|
||||
```
|
||||
|
||||
You can read the type `[Red, Green, Yellow]` as "a *tag union* of the tags `Red`, `Green`, and `Yellow`."
|
||||
|
||||
Some tag unions have only one tag in them. For example:
|
||||
|
||||
```coffee
|
||||
redTag : [Red]
|
||||
redTag = Red
|
||||
```
|
||||
|
||||
Tag unions can accumulate additional tags based on how they're used in the program. Consider this `if` expression:
|
||||
|
||||
```elm
|
||||
\str ->
|
||||
if Str.isEmpty str then
|
||||
Ok "it was empty"
|
||||
else
|
||||
Err ["it was not empty"]
|
||||
```
|
||||
|
||||
Here, Roc sees that the first branch has the type `[Ok Str]` and that the `else` branch has
|
||||
the type `[Err (List Str)]`, so it concludes that the whole `if` expression evaluates to the
|
||||
combination of those two tag unions: `[Ok Str, Err (List Str)]`.
|
||||
|
||||
This means the entire `\str -> …` funcion here has the type `Str -> [Ok Str, Err (List Str)]`.
|
||||
However, it would be most common to annotate it as `Result Str (List Str)` instead, because
|
||||
the `Result` type (for operations like `Result.withDefault`, which we saw earlier) is a type
|
||||
alias for a tag union with `Ok` and `Err` tags that each have one payload:
|
||||
|
||||
```haskell
|
||||
Result ok err : [Ok ok, Err err]
|
||||
```
|
||||
|
||||
We just saw how tag unions get combined when different branches of a conditional return different tags. Another way tag unions can get combined is through pattern matching. For example:
|
||||
|
||||
```coffeescript
|
||||
when color is
|
||||
Red -> "red"
|
||||
Yellow -> "yellow"
|
||||
Green -> "green"
|
||||
```
|
||||
|
||||
Here, Roc's compiler will infer that `color`'s type is `[Red, Yellow, Green]`, because
|
||||
those are the three possibilities this `when` handles.
|
||||
|
||||
## Numeric types
|
||||
|
||||
Roc has different numeric types that each have different tradeoffs.
|
||||
|
@ -1399,8 +1456,8 @@ this `imports` line tells the Roc compiler that when we call `Stdout.line`, it
|
|||
should look for that `line` function in the `Stdout` module of the
|
||||
`examples/cli/cli-platform/main.roc` package.
|
||||
|
||||
If we would like to include other modules in our application, say `AdditionalModule.roc`
|
||||
and `AnotherModule.roc`, then they can be imported directly in `imports` like this:
|
||||
If we would like to include other modules in our application, say `AdditionalModule.roc`
|
||||
and `AnotherModule.roc`, then they can be imported directly in `imports` like this:
|
||||
|
||||
```coffee
|
||||
packages { pf: "examples/cli/cli-platform/main.roc" }
|
||||
|
@ -1423,7 +1480,7 @@ interface Parser.Core
|
|||
```
|
||||
|
||||
This says that the current .roc file is an *interface module* because it begins with the `interface` keyword.
|
||||
We are naming this module when we write `interface Parser.Core`. It means that this file is in
|
||||
We are naming this module when we write `interface Parser.Core`. It means that this file is in
|
||||
a package `Parser` and the current module is named `core`.
|
||||
When we write `exposes [Parser, ParseResult, ...]`, it specifies the definitions we
|
||||
want to *expose*. Exposing makes them importable from other modules.
|
||||
|
@ -1469,7 +1526,7 @@ TODO
|
|||
|
||||
## Comments
|
||||
|
||||
Comments that begin with `##` will be included in generated documentation (```roc docs```). They require a single space after the `##`, and can include code blocks by adding five spaces after `##`.
|
||||
Comments that begin with `##` will be included in generated documentation (```roc docs```). They require a single space after the `##`, and can include code blocks by adding five spaces after `##`.
|
||||
|
||||
```coffee
|
||||
## This is a comment for documentation, and includes a code block.
|
||||
|
@ -1722,6 +1779,30 @@ Some important things to note about backpassing and `await`:
|
|||
- Backpassing syntax does not need to be used with `await` in particular. It can be used with any function.
|
||||
- Roc's compiler treats functions defined with backpassing exactly the same way as functions defined the other way. The only difference between `\text ->` and `text <-` is how they look, so feel free to use whichever looks nicer to you!
|
||||
|
||||
### Empty Tag Unions
|
||||
|
||||
If you look up the type of [`Program.exit`](https://www.roc-lang.org/examples/cli/Program#exit),
|
||||
you may notice that it takes a `Task` where the error type is `[]`. What does that mean?
|
||||
|
||||
Just like how `{}` is the type of an empty record, `[]` is the type of an empty tag union.
|
||||
There is no way to create an empty tag union at runtime, since creating a tag union requires
|
||||
making an actual tag, and an empty tag union has no tags in it!
|
||||
|
||||
This means if you have a function with the type `[] -> Str`, you can be sure that it will
|
||||
never execute. It requires an argument that can't be provided! Similarly, if you have a
|
||||
function with the type `Str -> []`, you can call it, but you can be sure it will not terminate
|
||||
normally. The only way to implement a function like that is using [infinite recursion](https://en.wikipedia.org/wiki/Infinite_loop#Infinite_recursion), which will either run indefinitely or else crash with a [stack overflow](https://en.wikipedia.org/wiki/Stack_overflow).
|
||||
|
||||
Empty tag unions can be useful as type parameters. For example, a function with the type
|
||||
`List [] -> Str` can be successfully called, but only if you pass it an empty list. That's because
|
||||
an empty list has the type `List *`, which means it can be used wherever any type of `List` is
|
||||
needed - even a `List []`!
|
||||
|
||||
Similarly, a function which accepts a `Result Str []` only accepts a "Result which is always `Ok`" - so you could call that function passing something like `Ok "hello"` with no problem,
|
||||
but if you tried to give it an `Err`, you'd get a type mismatch.
|
||||
|
||||
Applying this to `Task`, a task with `[]` for its error type is a "task which can never fail." The only way to obtain one is by obtaining a task with an error type of `*`, since that works with any task. You can get one of these "tasks that can never fail" by using [`Task.succeed`](https://www.roc-lang.org/examples/cli/Task#succeed) or, more commonly, by handling all possible errors using [`Task.attempt`](https://www.roc-lang.org/examples/cli/Task#attempt).
|
||||
|
||||
## What now?
|
||||
|
||||
That's it, you can start writing Roc apps now!
|
||||
|
@ -1950,200 +2031,65 @@ type that accumulates more and more fields as it progresses through a series of
|
|||
|
||||
### Open and Closed Tag Unions
|
||||
|
||||
Just like how Roc has open records and closed records, it also has open and closed tag unions.
|
||||
Just like how Roc has open records and closed records, it also has open and closed tag unions. Similarly to how an open record can have other fields besides the ones explicitly listed, an open tag union can have other tags beyond the ones explicitly listed.
|
||||
|
||||
The *open tag union* (or *open union* for short) `[Foo Str, Bar Bool]*` represents a tag that might
|
||||
be `Foo Str` and might be `Bar Bool`, but might also be some other tag whose type isn't known at compile time.
|
||||
|
||||
Because an open union represents possibilities that are impossible to know ahead of time, any `when` I use on a
|
||||
`[Foo Str, Bar Bool]*` value must include a catch-all `_ ->` branch. Otherwise, if one of those
|
||||
unknown tags were to come up, the `when` would not know what to do with it! For example:
|
||||
For example, here `[Red, Green]` is a closed union like the ones we saw earlier:
|
||||
|
||||
```coffee
|
||||
example : [Foo Str, Bar Bool]* -> Bool
|
||||
example = \tag ->
|
||||
when tag is
|
||||
Foo str -> Str.isEmpty str
|
||||
Bar bool -> bool
|
||||
_ -> Bool.false
|
||||
```
|
||||
colorToStr : [Red, Green] -> String
|
||||
colorToStr = \color ->
|
||||
when color is
|
||||
Red -> "red"
|
||||
Green -> "green"
|
||||
|
||||
In contrast, a *closed tag union* (or *closed union*) like `[Foo Str, Bar Bool]` (without the `*`)
|
||||
represents an exhaustive set of possible tags. If I use a `when` on one of these, I can match on `Foo`
|
||||
only and then on `Bar` only, with no need for a catch-all branch. For example:
|
||||
Now let's compare to an *open union* version:
|
||||
|
||||
```coffee
|
||||
example : [Foo Str, Bar Bool] -> Bool
|
||||
example = \tag ->
|
||||
when tag is
|
||||
Foo str -> Str.isEmpty str
|
||||
Bar bool -> bool
|
||||
colorOrOther : [Red, Green]* -> String
|
||||
colorOrOther = \color ->
|
||||
when color is
|
||||
Red -> "red"
|
||||
Green -> "green"
|
||||
_ -> "other"
|
||||
```
|
||||
|
||||
If we were to remove the type annotations from the previous two code examples, Roc would infer the same
|
||||
types for them anyway.
|
||||
Two things have changed compared to the first example.
|
||||
1. The `when color is` now has an extra branch: `_ -> "other"`
|
||||
2. Since this branch matches any tag, the type annotation for the `color` argument changed from the closed union `[Red, Green]` to the _open union_ `[Red, Green]*`.
|
||||
|
||||
It would infer `tag : [Foo Str, Bar Bool]` for the latter example because the `when tag is` expression
|
||||
only includes a `Foo Str` branch and a `Bar Bool` branch, and nothing else. Since the `when` doesn't handle
|
||||
any other possibilities, these two tags must be the only possible ones the `tag` argument could be.
|
||||
|
||||
It would infer `tag : [Foo Str, Bar Bool]*` for the former example because the `when tag is` expression
|
||||
includes a `Foo Str` branch and a `Bar Bool` branch - meaning we know about at least those two specific
|
||||
possibilities - but also a `_ ->` branch, indicating that there may be other tags we don't know about. Since
|
||||
the `when` is flexible enough to handle all possible tags, `tag` gets inferred as an open union.
|
||||
|
||||
Putting these together, whether a tag union is inferred to be open or closed depends on which possibilities
|
||||
the implementation actually handles.
|
||||
|
||||
> **Aside:** As with open and closed records, we can use type annotations to make tag union types less flexible
|
||||
> than what would be inferred. If we added a `_ ->` branch to the second example above, the compiler would still
|
||||
> accept `example : [Foo Str, Bar Bool] -> Bool` as the type annotation, even though the catch-all branch
|
||||
> would permit the more flexible `example : [Foo Str, Bar Bool]* -> Bool` annotation instead.
|
||||
|
||||
### Combining Open Unions
|
||||
|
||||
When we make a new record, it's inferred to be a closed record. For example, in `foo { a: "hi" }`,
|
||||
the type of `{ a: "hi" }` is inferred to be `{ a : Str }`. In contrast, when we make a new tag, it's inferred
|
||||
to be an open union. So in `foo (Bar "hi")`, the type of `Bar "hi"` is inferred to be `[Bar Str]*`.
|
||||
|
||||
This is because open unions can accumulate additional tags based on how they're used in the program,
|
||||
whereas closed unions cannot. For example, let's look at this conditional:
|
||||
|
||||
```elm
|
||||
if x > 5 then
|
||||
"foo"
|
||||
else
|
||||
7
|
||||
```
|
||||
|
||||
This will be a type mismatch because the two branches have incompatible types. Strings and numbers are not
|
||||
type-compatible! Now let's look at another example:
|
||||
|
||||
```elm
|
||||
if x > 5 then
|
||||
Ok "foo"
|
||||
else
|
||||
Err "bar"
|
||||
```
|
||||
|
||||
This shouldn't be a type mismatch, because we can see that the two branches are compatible; they are both
|
||||
tags that could easily coexist in the same tag union. But if the compiler inferred the type of `Ok "foo"` to be
|
||||
the closed union `[Ok Str]`, and likewise for `Err "bar"` and `[Err Str]`, then this would have to be
|
||||
a type mismatch - because those two closed unions are incompatible.
|
||||
|
||||
Instead, the compiler infers `Ok "foo"` to be the open union `[Ok Str]*`, and `Err "bar"` to be the open
|
||||
union `[Err Str]*`. Then, when using them together in this conditional, the inferred type of the conditional
|
||||
becomes `[Ok Str, Err Str]*` - that is, the combination of the unions in each of its branches. (Branches in
|
||||
a `when` work the same way with open unions.)
|
||||
|
||||
Earlier we saw how a function which accepts an open union must account for more possibilities, by including
|
||||
catch-all `_ ->` patterns in its `when` expressions. So *accepting* an open union means you have more requirements.
|
||||
In contrast, when you already *have* a value which is an open union, you have fewer requirements. A value
|
||||
which is an open union (like `Ok "foo"`, which has the type `[Ok Str]*`) can be provided to anything that's
|
||||
expecting a tag union (no matter whether it's open or closed), as long as the expected tag union includes at least
|
||||
the tags in the open union you're providing.
|
||||
|
||||
So if I have an `[Ok Str]*` value, I can pass it to functions with any of these types (among others):
|
||||
|
||||
- `[Ok Str]* -> Bool`
|
||||
- `[Ok Str] -> Bool`
|
||||
- `[Ok Str, Err Bool]* -> Bool`
|
||||
- `[Ok Str, Err Bool] -> Bool`
|
||||
- `[Ok Str, Err Bool, Whatever]* -> Bool`
|
||||
- `[Ok Str, Err Bool, Whatever] -> Bool`
|
||||
- `Result Str Bool -> Bool`
|
||||
- `[Err Bool, Whatever]* -> Bool`
|
||||
|
||||
That last one works because a function accepting an open union can accept any unrecognized tag, including
|
||||
`Ok Str` - even though it is not mentioned as one of the tags in `[Err Bool, Whatever]*`! Remember, when
|
||||
a function accepts an open tag union, any `when` branches on that union must include a catch-all `_ ->` branch,
|
||||
which is the branch that will end up handling the `Ok Str` value we pass in.
|
||||
|
||||
However, I could not pass an `[Ok Str]*` to a function with a *closed* tag union argument that did not
|
||||
mention `Ok Str` as one of its tags. So if I tried to pass `[Ok Str]*` to a function with the type
|
||||
`[Err Bool, Whatever] -> Str`, I would get a type mismatch - because a `when` in that function could
|
||||
be handling the `Err Bool` possibility and the `Whatever` possibility, and since it would not necessarily have
|
||||
a catch-all `_ ->` branch, it might not know what to do with an `Ok Str` if it received one.
|
||||
|
||||
> **Note:** It wouldn't be accurate to say that a function which accepts an open union handles
|
||||
> "all possible tags." For example, if I have a function `[Ok Str]* -> Bool` and I pass it
|
||||
> `Ok 5`, that will still be a type mismatch. If you think about it, a `when` in that function might
|
||||
> have the branch `Ok str ->` which assumes there's a string inside that `Ok`, and if `Ok 5` type-checked,
|
||||
> then that assumption would be false and things would break!
|
||||
>
|
||||
> So `[Ok Str]*` is more restrictive than `[]*`. It's basically saying "this may or may not be an `Ok` tag,
|
||||
> but if it is an `Ok` tag, then it's guaranteed to have a payload of exactly `Str`."
|
||||
|
||||
In summary, here's a way to think about the difference between open unions in a value you have, compared to a value you're accepting:
|
||||
|
||||
- If you *have* a closed union, that means it has all the tags it ever will, and can't accumulate more.
|
||||
- If you *have* an open union, that means it can accumulate more tags through conditional branches.
|
||||
- If you *accept* a closed union, that means you only have to handle the possibilities listed in the union.
|
||||
- If you *accept* an open union, that means you have to handle the possibility that it has a tag you can't know about.
|
||||
|
||||
### Type Variables in Tag Unions
|
||||
|
||||
Earlier we saw these two examples, one with an open tag union and the other with a closed one:
|
||||
Also like with open records, you can name the type variable in an open tag union. For example:
|
||||
|
||||
```coffee
|
||||
example : [Foo Str, Bar Bool]* -> Bool
|
||||
example = \tag ->
|
||||
when tag is
|
||||
Foo str -> Str.isEmpty str
|
||||
Bar bool -> bool
|
||||
_ -> Bool.false
|
||||
```
|
||||
|
||||
```coffee
|
||||
example : [Foo Str, Bar Bool] -> Bool
|
||||
example = \tag ->
|
||||
when tag is
|
||||
Foo str -> Str.isEmpty str
|
||||
Bar bool -> bool
|
||||
```
|
||||
|
||||
Similarly to how there are open records with a `*`, closed records with nothing,
|
||||
and constrained records with a named type variable, we can also have *constrained tag unions*
|
||||
with a named type variable. Here's an example:
|
||||
|
||||
```coffee
|
||||
example : [Foo Str, Bar Bool]a -> [Foo Str, Bar Bool]a
|
||||
example = \tag ->
|
||||
when tag is
|
||||
Foo str -> Bar (Str.isEmpty str)
|
||||
Bar _ -> Bar Bool.false
|
||||
stopGoOther : [Red, Green]a -> [Stop, Go]a
|
||||
stopGoOther = \color ->
|
||||
when color is
|
||||
Red -> Stop
|
||||
Green -> Go
|
||||
other -> other
|
||||
```
|
||||
|
||||
This type says that the `example` function will take either a `Foo Str` tag, or a `Bar Bool` tag,
|
||||
or possibly another tag we don't know about at compile time - and it also says that the function's
|
||||
return type is the same as the type of its argument.
|
||||
You can read this type annotation as "`stopGoOther` takes either a `Red` tag, a `Green` tag, or some other tag. It returns either a `Stop` tag, a `Go` tag, or any one of the tags it received in its argument."
|
||||
|
||||
So if we give this function a `[Foo Str, Bar Bool, Baz (List Str)]` argument, then it will be guaranteed
|
||||
to return a `[Foo Str, Bar Bool, Baz (List Str)]` value. This is more constrained than a function that
|
||||
returned `[Foo Str, Bar Bool]*` because that would say it could return *any* other tag (in addition to
|
||||
the `Foo Str` and `Bar Bool` we already know about).
|
||||
So let's say you called this `stopGoOther` function passing `Foo "hello"`. Then the `a` type variable would be the closed union `[Foo Str]`, and `stopGoOther` would return a union with the type `[Stop, Go][Foo Str]` - which is equivalent to `[Stop, Go, Foo Str]`.
|
||||
|
||||
If we removed the type annotation from `example` above, Roc's compiler would infer the same type anyway.
|
||||
This may be surprising if you look closely at the body of the function, because:
|
||||
Just like with records, you can replace the type variable in tag union types with a concrete type.
|
||||
For example, `[Foo Str][Bar Bool][Baz (List Str)]` is equivalent to `[Foo Str, Bar Bool, Baz (List Str)]`.
|
||||
|
||||
- The return type includes `Foo Str`, but no branch explicitly returns `Foo`. Couldn't the return type be `[Bar Bool]a` instead?
|
||||
- The argument type includes `Bar Bool` even though we never look at `Bar`'s payload. Couldn't the argument type be inferred to be `Bar *` instead of `Bar Bool`, since we never look at it?
|
||||
Also just like with records, you can use this to compose tag union type aliases. For example, you can write `NetworkError : [Timeout, Disconnected]` and then `Problem : [InvalidInput, UnknownFormat]NetworkError`.
|
||||
|
||||
The reason it has this type is the `other -> other` branch. Take a look at that branch, and ask this question:
|
||||
"What is the type of `other`?" There has to be exactly one answer! It can't be the case that `other` has one
|
||||
type before the `->` and another type after it; whenever you see a named value in Roc, it is guaranteed to have
|
||||
the same type everywhere it appears in that scope.
|
||||
Note that that a function which accepts an open union does not accept "all possible tags."
|
||||
For example, if I have a function `[Ok Str]* -> Bool` and I pass it
|
||||
`Ok 5`, that will still be a type mismatch. A `when` on that function's argument might
|
||||
have the branch `Ok str ->` which assumes there's a string inside that `Ok`,
|
||||
and if `Ok 5` type-checked, then that assumption would be false and things would break!
|
||||
|
||||
For this reason, any time you see a function that only runs a `when` on its only argument, and that `when`
|
||||
includes a branch like `x -> x` or `other -> other`, the function's argument type and return type must necessarily
|
||||
be equivalent.
|
||||
So `[Ok Str]*` is more restrictive than `[]*`. It's basically saying "this may or may not be an `Ok` tag, but if it _is_ an `Ok` tag, then it's guaranteed to have a payload of exactly `Str`."
|
||||
|
||||
> **Note:** Just like with records, you can also replace the type variable in tag union types with a concrete type.
|
||||
> For example, `[Foo Str][Bar Bool][Baz (List Str)]` is equivalent to `[Foo Str, Bar Bool, Baz (List Str)]`.
|
||||
>
|
||||
> Also just like with records, you can use this to compose tag union type aliases. For example, you can write
|
||||
> `NetworkError : [Timeout, Disconnected]` and then `Problem : [InvalidInput, UnknownFormat]NetworkError`
|
||||
> **Note:** As with open and closed records, we can use type annotations to make tag union types less flexible
|
||||
> than what the compiler would infer. For example, if we changed the type of the second
|
||||
> `colorOrOther` function from the open `[Red, Green]*` to the closed `[Red, Green]`, Roc's compiler
|
||||
> would accept it as a valid annotation, but it would give a warning that the `_ -> "other"`
|
||||
> branch had become unreachable.
|
||||
|
||||
### Phantom Types
|
||||
|
||||
|
|
|
@ -240,9 +240,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.5.4"
|
||||
version = "1.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
|
||||
checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||
|
||||
[dependencies]
|
||||
clap = { version = "3.1.15", features = ["derive"] }
|
||||
regex = "1.5.4"
|
||||
regex = "1.5.5"
|
||||
is_executable = "1.0.1"
|
||||
ring = "0.16.20"
|
||||
data-encoding = "2.3.2"
|
|
@ -33,7 +33,7 @@ fn main() {
|
|||
|
||||
if check_if_bench_executables_changed() {
|
||||
println!(
|
||||
"Comparison of sha256 of executables reveals changes, doing full benchmarks..."
|
||||
"\n\nComparison of sha256 of executables reveals changes, doing full benchmarks...\n\n"
|
||||
);
|
||||
|
||||
let all_regressed_benches = do_all_benches(optional_args.nr_repeat_benchmarks);
|
||||
|
@ -51,8 +51,8 @@ fn main() {
|
|||
eprintln!(
|
||||
r#"I can't find bench-folder-main and bench-folder-branch from the current directory.
|
||||
I should be executed from the repo root.
|
||||
Use `./ci/safe-earthly.sh --build-arg BENCH_SUFFIX=main +prep-bench-folder` to generate bench-folder-main.
|
||||
Use `./ci/safe-earthly.sh +prep-bench-folder` to generate bench-folder-branch."#
|
||||
Use `./ci/benchmarks/prep_folder.sh main` to generate bench-folder-main.
|
||||
Use `./ci/benchmarks/prep_folder.sh branch` to generate bench-folder-branch."#
|
||||
);
|
||||
|
||||
process::exit(1)
|
||||
|
@ -85,6 +85,8 @@ fn do_all_benches(nr_repeat_benchmarks: usize) -> HashSet<String> {
|
|||
return HashSet::new();
|
||||
}
|
||||
|
||||
println!("\n\nDoing benchmarks {:?} times to reduce flukes.\n\n", nr_repeat_benchmarks);
|
||||
|
||||
for _ in 1..nr_repeat_benchmarks {
|
||||
delete_old_bench_results();
|
||||
do_benchmark("main");
|
||||
|
@ -112,7 +114,7 @@ fn do_benchmark(branch_name: &'static str) -> HashSet<String> {
|
|||
))
|
||||
.args(&["--bench", "--noplot"])
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.unwrap_or_else(|_| panic!("Failed to benchmark {}.", branch_name));
|
||||
|
||||
|
@ -133,14 +135,14 @@ fn do_benchmark(branch_name: &'static str) -> HashSet<String> {
|
|||
"Failed to get line that contains benchmark name from last_three_lines_queue.",
|
||||
);
|
||||
|
||||
let regex_match = bench_name_regex.find(regressed_bench_name_line).expect("This line should hoave the benchmark name between double quotes but I could not match it");
|
||||
let regex_match = bench_name_regex.find(regressed_bench_name_line).expect("This line should have the benchmark name between double quotes but I could not match it");
|
||||
|
||||
regressed_benches.insert(regex_match.as_str().to_string().replace("\"", ""));
|
||||
}
|
||||
|
||||
last_three_lines_queue.push_front(line_str.clone());
|
||||
|
||||
println!("bench {:?}: {:?}", branch_name, line_str);
|
||||
println!(">>bench {:?}: {:?}", branch_name, line_str);
|
||||
}
|
||||
|
||||
regressed_benches
|
||||
|
@ -186,8 +188,20 @@ fn sha256_digest<R: Read>(mut reader: R) -> Result<Digest, io::Error> {
|
|||
}
|
||||
|
||||
fn sha_file(file_path: &Path) -> Result<String, io::Error> {
|
||||
let input = File::open(file_path)?;
|
||||
let reader = BufReader::new(input);
|
||||
// Debug info is dependent on the dir in which executable was created,
|
||||
// so we need to strip that to be able to compare binaries.
|
||||
let no_debug_info_file_path = file_path.to_str().unwrap().to_string() + ("_no_debug_info");
|
||||
std::fs::copy(file_path, &no_debug_info_file_path)?;
|
||||
|
||||
let strip_output = Command::new("strip")
|
||||
.args(["--strip-debug", &no_debug_info_file_path])
|
||||
.output()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(strip_output.status.success());
|
||||
|
||||
let no_debug_info_file = File::open(no_debug_info_file_path)?;
|
||||
let reader = BufReader::new(no_debug_info_file);
|
||||
let digest = sha256_digest(reader)?;
|
||||
|
||||
Ok(HEXUPPER.encode(digest.as_ref()))
|
26
ci/benchmarks/prep_folder.sh
Executable file
26
ci/benchmarks/prep_folder.sh
Executable file
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# compile everything needed for benchmarks and output a self-contained dir from which benchmarks can be run.
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
# to make use of avx, avx2, sse2, sse4.2... instructions
|
||||
RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
|
||||
BENCH_SUFFIX=$1
|
||||
|
||||
cargo criterion -V
|
||||
cd crates/cli && cargo criterion --no-run && cd ../..
|
||||
mkdir -p bench-folder/crates/cli_testing_examples/benchmarks
|
||||
mkdir -p bench-folder/crates/compiler/builtins/bitcode/src
|
||||
mkdir -p bench-folder/target/release/deps
|
||||
mkdir -p bench-folder/target/release/lib
|
||||
cp "crates/cli_testing_examples/benchmarks/"*".roc" bench-folder/crates/cli_testing_examples/benchmarks/
|
||||
cp -r crates/cli_testing_examples/benchmarks/platform bench-folder/crates/cli_testing_examples/benchmarks/
|
||||
cp crates/compiler/builtins/bitcode/src/str.zig bench-folder/crates/compiler/builtins/bitcode/src
|
||||
cp target/release/roc bench-folder/target/release
|
||||
cp -r target/release/lib bench-folder/target/release
|
||||
|
||||
# copy the most recent time bench to bench-folder
|
||||
cp target/release/deps/`ls -t target/release/deps/ | grep time_bench | head -n 1` bench-folder/target/release/deps/time_bench
|
||||
mv bench-folder bench-folder-$BENCH_SUFFIX
|
|
@ -1,5 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
mkdir -p $HOME/.cargo
|
||||
echo -e "[build]\nrustflags = [\"-C\", \"link-arg=-fuse-ld=lld\", \"-C\", \"target-cpu=native\"]" > $HOME/.cargo/config
|
||||
|
||||
|
|
|
@ -2,6 +2,9 @@
|
|||
|
||||
# assumes roc_releases.json is present
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
LATEST_RELEASE_URL=`cat roc_releases.json | jq --arg arch $1 --arg today $(date +'%Y-%m-%d') '.[0] | .assets | map(.browser_download_url) | map(select(. | contains("\($arch)-\($today)"))) | .[0]'`
|
||||
|
||||
if [[ "$LATEST_RELEASE_URL" == "null" ]]
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
cp target/release/roc ./roc # to be able to exclude "target" later in the tar command
|
||||
cp -r target/release/lib ./lib
|
||||
tar -czvf $1 --exclude="target" --exclude="zig-cache" roc lib LICENSE LEGAL_DETAILS examples/helloWorld.roc examples/platform-switching examples/cli crates/roc_std
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
LOG_FILE="earthly_log.txt"
|
||||
touch $LOG_FILE
|
||||
|
||||
# first arg + everything after
|
||||
ARGS=${@:1}
|
||||
FULL_CMD="earthly --config ci/earthly-conf.yml $ARGS"
|
||||
echo $FULL_CMD
|
||||
script -efq $LOG_FILE -c "$FULL_CMD"
|
||||
EXIT_CODE=$?
|
||||
|
||||
if grep -q "failed to mount" "$LOG_FILE"; then
|
||||
echo ""
|
||||
echo ""
|
||||
echo "------<<<<<<!!!!!!>>>>>>------"
|
||||
echo "DETECTED FAILURE TO MOUNT ERROR: running without cache"
|
||||
echo "------<<<<<<!!!!!!>>>>>>------"
|
||||
echo ""
|
||||
echo ""
|
||||
earthly --config ci/earthly-conf.yml --no-cache $ARGS
|
||||
else
|
||||
exit $EXIT_CODE
|
||||
fi
|
|
@ -1,3 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
# version.txt is used by the CLI: roc --version
|
||||
printf 'nightly pre-release, built from commit ' > version.txt && git log --pretty=format:'%h' -n 1 >> version.txt && printf ' on ' >> version.txt && date -u >> version.txt
|
|
@ -1,3 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
crates/repl_wasm/build-www.sh `pwd`/roc_repl_wasm.tar.gz
|
||||
|
|
|
@ -352,7 +352,7 @@ pub fn expr_to_expr2<'a>(
|
|||
|
||||
for (node_id, branch) in can_branches.iter_node_ids().zip(branches.iter()) {
|
||||
let (can_when_branch, branch_references) =
|
||||
canonicalize_when_branch(env, scope, *branch, &mut output);
|
||||
canonicalize_when_branch(env, scope, branch, &mut output);
|
||||
|
||||
output.references.union_mut(branch_references);
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ use roc_module::ident::Lowercase;
|
|||
use roc_module::symbol::Symbol;
|
||||
use roc_types::subs::Variable;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Default)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Default)]
|
||||
pub struct IntroducedVariables {
|
||||
// Rigids must be unique within a type annotation.
|
||||
// E.g. in `identity : a -> a`, there should only be one
|
||||
|
|
|
@ -252,7 +252,9 @@ pub fn build_file<'a>(
|
|||
}
|
||||
|
||||
let rebuild_timing = if linking_strategy == LinkingStrategy::Additive {
|
||||
let rebuild_duration = rebuild_thread.join().unwrap();
|
||||
let rebuild_duration = rebuild_thread
|
||||
.join()
|
||||
.expect("Failed to (re)build platform.");
|
||||
if emit_timings && !prebuilt {
|
||||
println!(
|
||||
"Finished rebuilding the platform in {} ms\n",
|
||||
|
@ -304,7 +306,7 @@ pub fn build_file<'a>(
|
|||
}
|
||||
|
||||
if let HostRebuildTiming::ConcurrentWithApp(thread) = rebuild_timing {
|
||||
let rebuild_duration = thread.join().unwrap();
|
||||
let rebuild_duration = thread.join().expect("Failed to (re)build platform.");
|
||||
if emit_timings && !prebuilt {
|
||||
println!(
|
||||
"Finished rebuilding the platform in {} ms\n",
|
||||
|
|
|
@ -85,6 +85,12 @@ mod cli_run {
|
|||
let (before_first_digit, _) = err.split_at(err.rfind("found in ").unwrap());
|
||||
let err = format!("{}found in <ignored for test> ms.", before_first_digit);
|
||||
|
||||
// make paths consistent
|
||||
let err = err.replace('\\', "/");
|
||||
|
||||
// consistency with typewriters, very important
|
||||
let err = err.replace('\r', "");
|
||||
|
||||
assert_multiline_str_eq!(err.as_str(), expected);
|
||||
}
|
||||
|
||||
|
@ -1148,7 +1154,7 @@ fn run_with_wasmer(wasm_path: &std::path::Path, stdin: &[&str]) -> String {
|
|||
// .unwrap();
|
||||
|
||||
let store = Store::default();
|
||||
let module = Module::from_file(&store, &wasm_path).unwrap();
|
||||
let module = Module::from_file(&store, wasm_path).unwrap();
|
||||
|
||||
let mut fake_stdin = wasmer_wasi::Pipe::new();
|
||||
let fake_stdout = wasmer_wasi::Pipe::new();
|
||||
|
|
|
@ -210,6 +210,7 @@ fn roc_fx_getInt_help() !i64 {
|
|||
const stdin = std.io.getStdIn().reader();
|
||||
var buf: [40]u8 = undefined;
|
||||
|
||||
// make sure to strip `\r` on windows
|
||||
const raw_line: []u8 = (try stdin.readUntilDelimiterOrEof(&buf, '\n')) orelse "";
|
||||
const line = std.mem.trimRight(u8, raw_line, &std.ascii.spaces);
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ roc_collections = { path = "../compiler/collections" }
|
|||
roc_reporting = { path = "../reporting" }
|
||||
roc_load = { path = "../compiler/load" }
|
||||
roc_module = { path = "../compiler/module" }
|
||||
roc_utils = { path = "../utils" }
|
||||
bumpalo = { version = "3.8.0", features = ["collections"] }
|
||||
criterion = { git = "https://github.com/Anton-4/criterion.rs"}
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
|
|
|
@ -4,6 +4,7 @@ extern crate roc_load;
|
|||
extern crate roc_module;
|
||||
extern crate tempfile;
|
||||
|
||||
use roc_utils::cargo;
|
||||
use serde::Deserialize;
|
||||
use serde_xml_rs::from_str;
|
||||
use std::env;
|
||||
|
@ -48,7 +49,7 @@ where
|
|||
vec!["build", "--release", "--bin", "roc"]
|
||||
};
|
||||
|
||||
let output = Command::new("cargo")
|
||||
let output = cargo()
|
||||
.current_dir(root_project_dir)
|
||||
.args(args)
|
||||
.output()
|
||||
|
|
|
@ -4,21 +4,15 @@ use roc_builtins::bitcode;
|
|||
use roc_error_macros::internal_error;
|
||||
use roc_mono::ir::OptLevel;
|
||||
use roc_utils::get_lib_path;
|
||||
use roc_utils::{cargo, clang, zig};
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{self, Child, Command, Output};
|
||||
use std::process::{self, Child, Command};
|
||||
use target_lexicon::{Architecture, OperatingSystem, Triple};
|
||||
use wasi_libc_sys::{WASI_COMPILER_RT_PATH, WASI_LIBC_PATH};
|
||||
|
||||
fn zig_executable() -> String {
|
||||
match std::env::var("ROC_ZIG") {
|
||||
Ok(path) => path,
|
||||
Err(_) => "zig".into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub enum LinkType {
|
||||
// These numbers correspond to the --lib and --no-link flags
|
||||
|
@ -113,9 +107,9 @@ pub fn build_zig_host_native(
|
|||
target: &str,
|
||||
opt_level: OptLevel,
|
||||
shared_lib_path: Option<&Path>,
|
||||
) -> Output {
|
||||
let mut command = Command::new(&zig_executable());
|
||||
command
|
||||
) -> Command {
|
||||
let mut zig_cmd = zig();
|
||||
zig_cmd
|
||||
.env_clear()
|
||||
.env("PATH", env_path)
|
||||
.env("HOME", env_home);
|
||||
|
@ -130,7 +124,7 @@ pub fn build_zig_host_native(
|
|||
bitcode::get_builtins_host_obj_path()
|
||||
};
|
||||
|
||||
command.args(&[
|
||||
zig_cmd.args([
|
||||
"build-exe",
|
||||
"-fPIE",
|
||||
"-rdynamic", // make sure roc_alloc and friends are exposed
|
||||
|
@ -138,12 +132,12 @@ pub fn build_zig_host_native(
|
|||
&builtins_obj,
|
||||
]);
|
||||
} else {
|
||||
command.args(&["build-obj", "-fPIC"]);
|
||||
zig_cmd.args(["build-obj", "-fPIC"]);
|
||||
}
|
||||
|
||||
command.args(&[
|
||||
zig_cmd.args([
|
||||
zig_host_src,
|
||||
emit_bin,
|
||||
&format!("-femit-bin={}", emit_bin),
|
||||
"--pkg-begin",
|
||||
"str",
|
||||
zig_str_path,
|
||||
|
@ -160,7 +154,7 @@ pub fn build_zig_host_native(
|
|||
// when we use zig 0.9. It looks like zig 0.10 is going to fix
|
||||
// this problem for us, so this is a temporary workaround
|
||||
if !target.contains("windows") {
|
||||
command.args(&[
|
||||
zig_cmd.args([
|
||||
// include the zig runtime
|
||||
"-fcompiler-rt",
|
||||
]);
|
||||
|
@ -168,16 +162,16 @@ pub fn build_zig_host_native(
|
|||
|
||||
// valgrind does not yet support avx512 instructions, see #1963.
|
||||
if env::var("NO_AVX512").is_ok() {
|
||||
command.args(&["-mcpu", "x86_64"]);
|
||||
zig_cmd.args(["-mcpu", "x86_64"]);
|
||||
}
|
||||
|
||||
if matches!(opt_level, OptLevel::Optimize) {
|
||||
command.args(&["-O", "ReleaseSafe"]);
|
||||
zig_cmd.args(["-O", "ReleaseSafe"]);
|
||||
} else if matches!(opt_level, OptLevel::Size) {
|
||||
command.args(&["-O", "ReleaseSmall"]);
|
||||
zig_cmd.args(["-O", "ReleaseSmall"]);
|
||||
}
|
||||
|
||||
command.output().unwrap()
|
||||
zig_cmd
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
|
@ -191,27 +185,27 @@ pub fn build_zig_host_native(
|
|||
target: &str,
|
||||
opt_level: OptLevel,
|
||||
shared_lib_path: Option<&Path>,
|
||||
) -> Output {
|
||||
let mut command = Command::new(&zig_executable());
|
||||
command
|
||||
) -> Command {
|
||||
let mut zig_cmd = zig();
|
||||
zig_cmd
|
||||
.env_clear()
|
||||
.env("PATH", env_path)
|
||||
.env("HOME", env_home);
|
||||
|
||||
if let Some(shared_lib_path) = shared_lib_path {
|
||||
command.args(&[
|
||||
zig_cmd.args(&[
|
||||
"build-exe",
|
||||
// "-fPIE", PIE seems to fail on windows
|
||||
shared_lib_path.to_str().unwrap(),
|
||||
&bitcode::get_builtins_windows_obj_path(),
|
||||
]);
|
||||
} else {
|
||||
command.args(&["build-obj", "-fPIC"]);
|
||||
zig_cmd.args(&["build-obj", "-fPIC"]);
|
||||
}
|
||||
|
||||
command.args(&[
|
||||
zig_cmd.args(&[
|
||||
zig_host_src,
|
||||
emit_bin,
|
||||
&format!("-femit-bin={}", emit_bin),
|
||||
"--pkg-begin",
|
||||
"str",
|
||||
zig_str_path,
|
||||
|
@ -227,12 +221,12 @@ pub fn build_zig_host_native(
|
|||
]);
|
||||
|
||||
if matches!(opt_level, OptLevel::Optimize) {
|
||||
command.args(&["-O", "ReleaseSafe"]);
|
||||
zig_cmd.args(&["-O", "ReleaseSafe"]);
|
||||
} else if matches!(opt_level, OptLevel::Size) {
|
||||
command.args(&["-O", "ReleaseSmall"]);
|
||||
zig_cmd.args(&["-O", "ReleaseSmall"]);
|
||||
}
|
||||
|
||||
command.output().unwrap()
|
||||
zig_cmd
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
|
@ -247,14 +241,11 @@ pub fn build_zig_host_native(
|
|||
opt_level: OptLevel,
|
||||
shared_lib_path: Option<&Path>,
|
||||
// For compatibility with the non-macOS def above. Keep these in sync.
|
||||
) -> Output {
|
||||
) -> Command {
|
||||
use serde_json::Value;
|
||||
|
||||
// Run `zig env` to find the location of zig's std/ directory
|
||||
let zig_env_output = Command::new(&zig_executable())
|
||||
.args(&["env"])
|
||||
.output()
|
||||
.unwrap();
|
||||
let zig_env_output = zig().args(&["env"]).output().unwrap();
|
||||
|
||||
let zig_env_json = if zig_env_output.status.success() {
|
||||
std::str::from_utf8(&zig_env_output.stdout).unwrap_or_else(|utf8_err| {
|
||||
|
@ -291,24 +282,24 @@ pub fn build_zig_host_native(
|
|||
zig_compiler_rt_path.push("special");
|
||||
zig_compiler_rt_path.push("compiler_rt.zig");
|
||||
|
||||
let mut command = Command::new(&zig_executable());
|
||||
command
|
||||
let mut zig_cmd = zig();
|
||||
zig_cmd
|
||||
.env_clear()
|
||||
.env("PATH", &env_path)
|
||||
.env("HOME", &env_home);
|
||||
if let Some(shared_lib_path) = shared_lib_path {
|
||||
command.args(&[
|
||||
zig_cmd.args(&[
|
||||
"build-exe",
|
||||
"-fPIE",
|
||||
shared_lib_path.to_str().unwrap(),
|
||||
&bitcode::get_builtins_host_obj_path(),
|
||||
]);
|
||||
} else {
|
||||
command.args(&["build-obj", "-fPIC"]);
|
||||
zig_cmd.args(&["build-obj", "-fPIC"]);
|
||||
}
|
||||
command.args(&[
|
||||
zig_cmd.args(&[
|
||||
zig_host_src,
|
||||
emit_bin,
|
||||
&format!("-femit-bin={}", emit_bin),
|
||||
"--pkg-begin",
|
||||
"str",
|
||||
zig_str_path,
|
||||
|
@ -323,11 +314,12 @@ pub fn build_zig_host_native(
|
|||
"c",
|
||||
]);
|
||||
if matches!(opt_level, OptLevel::Optimize) {
|
||||
command.args(&["-O", "ReleaseSafe"]);
|
||||
zig_cmd.args(&["-O", "ReleaseSafe"]);
|
||||
} else if matches!(opt_level, OptLevel::Size) {
|
||||
command.args(&["-O", "ReleaseSmall"]);
|
||||
zig_cmd.args(&["-O", "ReleaseSmall"]);
|
||||
}
|
||||
command.output().unwrap()
|
||||
|
||||
zig_cmd
|
||||
}
|
||||
|
||||
pub fn build_zig_host_wasm32(
|
||||
|
@ -338,7 +330,7 @@ pub fn build_zig_host_wasm32(
|
|||
zig_str_path: &str,
|
||||
opt_level: OptLevel,
|
||||
shared_lib_path: Option<&Path>,
|
||||
) -> Output {
|
||||
) -> Command {
|
||||
if shared_lib_path.is_some() {
|
||||
unimplemented!("Linking a shared library to wasm not yet implemented");
|
||||
}
|
||||
|
@ -358,7 +350,7 @@ pub fn build_zig_host_wasm32(
|
|||
// we'd like to compile with `-target wasm32-wasi` but that is blocked on
|
||||
//
|
||||
// https://github.com/ziglang/zig/issues/9414
|
||||
let mut command = Command::new(&zig_executable());
|
||||
let mut zig_cmd = zig();
|
||||
let args = &[
|
||||
"build-obj",
|
||||
zig_host_src,
|
||||
|
@ -379,18 +371,19 @@ pub fn build_zig_host_wasm32(
|
|||
"--strip",
|
||||
];
|
||||
|
||||
command
|
||||
zig_cmd
|
||||
.env_clear()
|
||||
.env("PATH", env_path)
|
||||
.env("HOME", env_home)
|
||||
.args(args);
|
||||
|
||||
if matches!(opt_level, OptLevel::Optimize) {
|
||||
command.args(&["-O", "ReleaseSafe"]);
|
||||
zig_cmd.args(["-O", "ReleaseSafe"]);
|
||||
} else if matches!(opt_level, OptLevel::Size) {
|
||||
command.args(&["-O", "ReleaseSmall"]);
|
||||
zig_cmd.args(["-O", "ReleaseSmall"]);
|
||||
}
|
||||
command.output().unwrap()
|
||||
|
||||
zig_cmd
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
@ -403,15 +396,15 @@ pub fn build_c_host_native(
|
|||
sources: &[&str],
|
||||
opt_level: OptLevel,
|
||||
shared_lib_path: Option<&Path>,
|
||||
) -> Output {
|
||||
let mut command = Command::new("clang");
|
||||
command
|
||||
) -> Command {
|
||||
let mut clang_cmd = clang();
|
||||
clang_cmd
|
||||
.env_clear()
|
||||
.env("PATH", &env_path)
|
||||
.env("CPATH", &env_cpath)
|
||||
.env("HOME", &env_home)
|
||||
.env("PATH", env_path)
|
||||
.env("CPATH", env_cpath)
|
||||
.env("HOME", env_home)
|
||||
.args(sources)
|
||||
.args(&["-o", dest]);
|
||||
.args(["-o", dest]);
|
||||
if let Some(shared_lib_path) = shared_lib_path {
|
||||
match target.operating_system {
|
||||
OperatingSystem::Windows => {
|
||||
|
@ -432,7 +425,7 @@ pub fn build_c_host_native(
|
|||
);
|
||||
}
|
||||
_ => {
|
||||
command.args(&[
|
||||
clang_cmd.args([
|
||||
shared_lib_path.to_str().unwrap(),
|
||||
// This line is commented out because
|
||||
// @bhansconnect: With the addition of Str.graphemes, always
|
||||
|
@ -451,14 +444,15 @@ pub fn build_c_host_native(
|
|||
}
|
||||
}
|
||||
} else {
|
||||
command.args(&["-fPIC", "-c"]);
|
||||
clang_cmd.args(["-fPIC", "-c"]);
|
||||
}
|
||||
if matches!(opt_level, OptLevel::Optimize) {
|
||||
command.arg("-O3");
|
||||
clang_cmd.arg("-O3");
|
||||
} else if matches!(opt_level, OptLevel::Size) {
|
||||
command.arg("-Os");
|
||||
clang_cmd.arg("-Os");
|
||||
}
|
||||
command.output().unwrap()
|
||||
|
||||
clang_cmd
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
@ -471,7 +465,7 @@ pub fn build_swift_host_native(
|
|||
shared_lib_path: Option<&Path>,
|
||||
objc_header_path: Option<&str>,
|
||||
arch: Architecture,
|
||||
) -> Output {
|
||||
) -> Command {
|
||||
if shared_lib_path.is_some() {
|
||||
unimplemented!("Linking a shared library to Swift not yet implemented");
|
||||
}
|
||||
|
@ -479,8 +473,8 @@ pub fn build_swift_host_native(
|
|||
let mut command = Command::new("arch");
|
||||
command
|
||||
.env_clear()
|
||||
.env("PATH", &env_path)
|
||||
.env("HOME", &env_home);
|
||||
.env("PATH", env_path)
|
||||
.env("HOME", env_home);
|
||||
|
||||
match arch {
|
||||
Architecture::Aarch64(_) => command.arg("-arm64"),
|
||||
|
@ -493,10 +487,10 @@ pub fn build_swift_host_native(
|
|||
.args(sources)
|
||||
.arg("-emit-object")
|
||||
.arg("-parse-as-library")
|
||||
.args(&["-o", dest]);
|
||||
.args(["-o", dest]);
|
||||
|
||||
if let Some(objc_header) = objc_header_path {
|
||||
command.args(&["-import-objc-header", objc_header]);
|
||||
command.args(["-import-objc-header", objc_header]);
|
||||
}
|
||||
|
||||
if matches!(opt_level, OptLevel::Optimize) {
|
||||
|
@ -505,7 +499,7 @@ pub fn build_swift_host_native(
|
|||
command.arg("-Osize");
|
||||
}
|
||||
|
||||
command.output().unwrap()
|
||||
command
|
||||
}
|
||||
|
||||
pub fn rebuild_host(
|
||||
|
@ -567,7 +561,7 @@ pub fn rebuild_host(
|
|||
&zig_str_path
|
||||
);
|
||||
|
||||
let output = match target.architecture {
|
||||
let zig_cmd = match target.architecture {
|
||||
Architecture::Wasm32 => {
|
||||
let emit_bin = if matches!(opt_level, OptLevel::Development) {
|
||||
format!("-femit-bin={}", host_dest.to_str().unwrap())
|
||||
|
@ -585,8 +579,6 @@ pub fn rebuild_host(
|
|||
)
|
||||
}
|
||||
Architecture::X86_64 => {
|
||||
let emit_bin = format!("-femit-bin={}", host_dest.to_str().unwrap());
|
||||
|
||||
let target = match target.operating_system {
|
||||
OperatingSystem::Windows => "x86_64-windows-gnu",
|
||||
_ => "native",
|
||||
|
@ -595,7 +587,7 @@ pub fn rebuild_host(
|
|||
build_zig_host_native(
|
||||
&env_path,
|
||||
&env_home,
|
||||
&emit_bin,
|
||||
host_dest.to_str().unwrap(),
|
||||
zig_host_src.to_str().unwrap(),
|
||||
zig_str_path.to_str().unwrap(),
|
||||
target,
|
||||
|
@ -603,37 +595,31 @@ pub fn rebuild_host(
|
|||
shared_lib_path,
|
||||
)
|
||||
}
|
||||
Architecture::X86_32(_) => {
|
||||
let emit_bin = format!("-femit-bin={}", host_dest.to_str().unwrap());
|
||||
build_zig_host_native(
|
||||
&env_path,
|
||||
&env_home,
|
||||
&emit_bin,
|
||||
zig_host_src.to_str().unwrap(),
|
||||
zig_str_path.to_str().unwrap(),
|
||||
"i386-linux-musl",
|
||||
opt_level,
|
||||
shared_lib_path,
|
||||
)
|
||||
}
|
||||
Architecture::X86_32(_) => build_zig_host_native(
|
||||
&env_path,
|
||||
&env_home,
|
||||
host_dest.to_str().unwrap(),
|
||||
zig_host_src.to_str().unwrap(),
|
||||
zig_str_path.to_str().unwrap(),
|
||||
"i386-linux-musl",
|
||||
opt_level,
|
||||
shared_lib_path,
|
||||
),
|
||||
|
||||
Architecture::Aarch64(_) => {
|
||||
let emit_bin = format!("-femit-bin={}", host_dest.to_str().unwrap());
|
||||
build_zig_host_native(
|
||||
&env_path,
|
||||
&env_home,
|
||||
&emit_bin,
|
||||
zig_host_src.to_str().unwrap(),
|
||||
zig_str_path.to_str().unwrap(),
|
||||
target_zig_str(target),
|
||||
opt_level,
|
||||
shared_lib_path,
|
||||
)
|
||||
}
|
||||
Architecture::Aarch64(_) => build_zig_host_native(
|
||||
&env_path,
|
||||
&env_home,
|
||||
host_dest.to_str().unwrap(),
|
||||
zig_host_src.to_str().unwrap(),
|
||||
zig_str_path.to_str().unwrap(),
|
||||
target_zig_str(target),
|
||||
opt_level,
|
||||
shared_lib_path,
|
||||
),
|
||||
_ => internal_error!("Unsupported architecture {:?}", target.architecture),
|
||||
};
|
||||
|
||||
validate_output("host.zig", &zig_executable(), output)
|
||||
run_build_command(zig_cmd, "host.zig")
|
||||
} else if cargo_host_src.exists() {
|
||||
// Compile and link Cargo.toml, if it exists
|
||||
let cargo_dir = host_input_path.parent().unwrap();
|
||||
|
@ -646,25 +632,23 @@ pub fn rebuild_host(
|
|||
},
|
||||
);
|
||||
|
||||
let mut command = Command::new("cargo");
|
||||
command.arg("build").current_dir(cargo_dir);
|
||||
let mut cargo_cmd = cargo();
|
||||
cargo_cmd.arg("build").current_dir(cargo_dir);
|
||||
// Rust doesn't expose size without editing the cargo.toml. Instead just use release.
|
||||
if matches!(opt_level, OptLevel::Optimize | OptLevel::Size) {
|
||||
command.arg("--release");
|
||||
cargo_cmd.arg("--release");
|
||||
}
|
||||
|
||||
let source_file = if shared_lib_path.is_some() {
|
||||
command.env("RUSTFLAGS", "-C link-dead-code");
|
||||
command.args(&["--bin", "host"]);
|
||||
cargo_cmd.env("RUSTFLAGS", "-C link-dead-code");
|
||||
cargo_cmd.args(["--bin", "host"]);
|
||||
"src/main.rs"
|
||||
} else {
|
||||
command.arg("--lib");
|
||||
cargo_cmd.arg("--lib");
|
||||
"src/lib.rs"
|
||||
};
|
||||
|
||||
let output = command.output().unwrap();
|
||||
|
||||
validate_output(source_file, "cargo build", output);
|
||||
run_build_command(cargo_cmd, source_file);
|
||||
|
||||
if shared_lib_path.is_some() {
|
||||
// For surgical linking, just copy the dynamically linked rust app.
|
||||
|
@ -674,7 +658,7 @@ pub fn rebuild_host(
|
|||
} else {
|
||||
// Cargo hosts depend on a c wrapper for the api. Compile host.c as well.
|
||||
|
||||
let output = build_c_host_native(
|
||||
let clang_cmd = build_c_host_native(
|
||||
target,
|
||||
&env_path,
|
||||
&env_home,
|
||||
|
@ -684,23 +668,22 @@ pub fn rebuild_host(
|
|||
opt_level,
|
||||
shared_lib_path,
|
||||
);
|
||||
validate_output("host.c", "clang", output);
|
||||
|
||||
let output = Command::new("ld")
|
||||
.env_clear()
|
||||
.env("PATH", &env_path)
|
||||
.args(&[
|
||||
"-r",
|
||||
"-L",
|
||||
cargo_out_dir.to_str().unwrap(),
|
||||
c_host_dest.to_str().unwrap(),
|
||||
"-lhost",
|
||||
"-o",
|
||||
host_dest.to_str().unwrap(),
|
||||
])
|
||||
.output()
|
||||
.unwrap();
|
||||
validate_output("c_host.o", "ld", output);
|
||||
run_build_command(clang_cmd, "host.c");
|
||||
|
||||
let mut ld_cmd = Command::new("ld");
|
||||
|
||||
ld_cmd.env_clear().env("PATH", &env_path).args([
|
||||
"-r",
|
||||
"-L",
|
||||
cargo_out_dir.to_str().unwrap(),
|
||||
c_host_dest.to_str().unwrap(),
|
||||
"-lhost",
|
||||
"-o",
|
||||
host_dest.to_str().unwrap(),
|
||||
]);
|
||||
|
||||
run_build_command(ld_cmd, "c_host.o");
|
||||
|
||||
// Clean up c_host.o
|
||||
if c_host_dest.exists() {
|
||||
|
@ -709,25 +692,24 @@ pub fn rebuild_host(
|
|||
}
|
||||
} else if rust_host_src.exists() {
|
||||
// Compile and link host.rs, if it exists
|
||||
let mut command = Command::new("rustc");
|
||||
command.args(&[
|
||||
let mut rustc_cmd = Command::new("rustc");
|
||||
rustc_cmd.args([
|
||||
rust_host_src.to_str().unwrap(),
|
||||
"-o",
|
||||
rust_host_dest.to_str().unwrap(),
|
||||
]);
|
||||
if matches!(opt_level, OptLevel::Optimize) {
|
||||
command.arg("-O");
|
||||
rustc_cmd.arg("-O");
|
||||
} else if matches!(opt_level, OptLevel::Size) {
|
||||
command.arg("-C opt-level=s");
|
||||
rustc_cmd.arg("-C opt-level=s");
|
||||
}
|
||||
let output = command.output().unwrap();
|
||||
|
||||
validate_output("host.rs", "rustc", output);
|
||||
run_build_command(rustc_cmd, "host.rs");
|
||||
|
||||
// Rust hosts depend on a c wrapper for the api. Compile host.c as well.
|
||||
if shared_lib_path.is_some() {
|
||||
// If compiling to executable, let c deal with linking as well.
|
||||
let output = build_c_host_native(
|
||||
let clang_cmd = build_c_host_native(
|
||||
target,
|
||||
&env_path,
|
||||
&env_home,
|
||||
|
@ -740,9 +722,9 @@ pub fn rebuild_host(
|
|||
opt_level,
|
||||
shared_lib_path,
|
||||
);
|
||||
validate_output("host.c", "clang", output);
|
||||
run_build_command(clang_cmd, "host.c");
|
||||
} else {
|
||||
let output = build_c_host_native(
|
||||
let clang_cmd = build_c_host_native(
|
||||
target,
|
||||
&env_path,
|
||||
&env_home,
|
||||
|
@ -753,21 +735,19 @@ pub fn rebuild_host(
|
|||
shared_lib_path,
|
||||
);
|
||||
|
||||
validate_output("host.c", "clang", output);
|
||||
let output = Command::new("ld")
|
||||
.env_clear()
|
||||
.env("PATH", &env_path)
|
||||
.args(&[
|
||||
"-r",
|
||||
c_host_dest.to_str().unwrap(),
|
||||
rust_host_dest.to_str().unwrap(),
|
||||
"-o",
|
||||
host_dest.to_str().unwrap(),
|
||||
])
|
||||
.output()
|
||||
.unwrap();
|
||||
run_build_command(clang_cmd, "host.c");
|
||||
|
||||
validate_output("rust_host.o", "ld", output);
|
||||
let mut ld_cmd = Command::new("ld");
|
||||
|
||||
ld_cmd.env_clear().env("PATH", &env_path).args([
|
||||
"-r",
|
||||
c_host_dest.to_str().unwrap(),
|
||||
rust_host_dest.to_str().unwrap(),
|
||||
"-o",
|
||||
host_dest.to_str().unwrap(),
|
||||
]);
|
||||
|
||||
run_build_command(ld_cmd, "rust_host.o");
|
||||
}
|
||||
|
||||
// Clean up rust_host.o and c_host.o
|
||||
|
@ -779,7 +759,7 @@ pub fn rebuild_host(
|
|||
}
|
||||
} else if c_host_src.exists() {
|
||||
// Compile host.c, if it exists
|
||||
let output = build_c_host_native(
|
||||
let clang_cmd = build_c_host_native(
|
||||
target,
|
||||
&env_path,
|
||||
&env_home,
|
||||
|
@ -789,10 +769,11 @@ pub fn rebuild_host(
|
|||
opt_level,
|
||||
shared_lib_path,
|
||||
);
|
||||
validate_output("host.c", "clang", output);
|
||||
|
||||
run_build_command(clang_cmd, "host.c");
|
||||
} else if swift_host_src.exists() {
|
||||
// Compile host.swift, if it exists
|
||||
let output = build_swift_host_native(
|
||||
let swiftc_cmd = build_swift_host_native(
|
||||
&env_path,
|
||||
&env_home,
|
||||
host_dest.to_str().unwrap(),
|
||||
|
@ -804,7 +785,8 @@ pub fn rebuild_host(
|
|||
.then(|| swift_host_header_src.to_str().unwrap()),
|
||||
target.architecture,
|
||||
);
|
||||
validate_output("host.swift", "swiftc", output);
|
||||
|
||||
run_build_command(swiftc_cmd, "host.swift");
|
||||
}
|
||||
|
||||
host_dest
|
||||
|
@ -873,10 +855,10 @@ fn link_linux(
|
|||
|
||||
if let Architecture::X86_32(_) = target.architecture {
|
||||
return Ok((
|
||||
Command::new(&zig_executable())
|
||||
.args(&["build-exe"])
|
||||
zig()
|
||||
.args(["build-exe"])
|
||||
.args(input_paths)
|
||||
.args(&[
|
||||
.args([
|
||||
"-target",
|
||||
"i386-linux-musl",
|
||||
"-lc",
|
||||
|
@ -1029,7 +1011,7 @@ fn link_linux(
|
|||
.filter(|&(ref k, _)| k.starts_with("NIX_"))
|
||||
.collect::<HashMap<String, String>>(),
|
||||
)
|
||||
.args(&[
|
||||
.args([
|
||||
"--gc-sections",
|
||||
"--eh-frame-hdr",
|
||||
"-A",
|
||||
|
@ -1039,11 +1021,11 @@ fn link_linux(
|
|||
&*crtn_path.to_string_lossy(),
|
||||
])
|
||||
.args(&base_args)
|
||||
.args(&["-dynamic-linker", ld_linux])
|
||||
.args(["-dynamic-linker", ld_linux])
|
||||
.args(input_paths)
|
||||
// ld.lld requires this argument, and does not accept --arch
|
||||
// .args(&["-L/usr/lib/x86_64-linux-gnu"])
|
||||
.args(&[
|
||||
.args([
|
||||
// Libraries - see https://github.com/roc-lang/roc/pull/554#discussion_r496365925
|
||||
// for discussion and further references
|
||||
"-lc",
|
||||
|
@ -1094,7 +1076,7 @@ fn link_macos(
|
|||
// The `-l` flags should go after the `.o` arguments
|
||||
// Don't allow LD_ env vars to affect this
|
||||
.env_clear()
|
||||
.args(&[
|
||||
.args([
|
||||
// NOTE: we don't do --gc-sections on macOS because the default
|
||||
// macOS linker doesn't support it, but it's a performance
|
||||
// optimization, so if we ever switch to a different linker,
|
||||
|
@ -1126,7 +1108,7 @@ fn link_macos(
|
|||
ld_command.arg(roc_link_flag);
|
||||
}
|
||||
|
||||
ld_command.args(&[
|
||||
ld_command.args([
|
||||
// Libraries - see https://github.com/roc-lang/roc/pull/554#discussion_r496392274
|
||||
// for discussion and further references
|
||||
"-lSystem",
|
||||
|
@ -1166,7 +1148,7 @@ fn link_macos(
|
|||
Architecture::Aarch64(_) => {
|
||||
ld_child.wait()?;
|
||||
let codesign_child = Command::new("codesign")
|
||||
.args(&["-s", "-", output_path.to_str().unwrap()])
|
||||
.args(["-s", "-", output_path.to_str().unwrap()])
|
||||
.spawn()?;
|
||||
|
||||
Ok((codesign_child, output_path))
|
||||
|
@ -1202,10 +1184,10 @@ fn link_wasm32(
|
|||
let zig_str_path = find_zig_str_path();
|
||||
let wasi_libc_path = find_wasi_libc_path();
|
||||
|
||||
let child = Command::new(&zig_executable())
|
||||
let child = zig()
|
||||
// .env_clear()
|
||||
// .env("PATH", &env_path)
|
||||
.args(&["build-exe"])
|
||||
.args(["build-exe"])
|
||||
.args(input_paths)
|
||||
.args([
|
||||
// include wasi libc
|
||||
|
@ -1239,8 +1221,8 @@ fn link_windows(
|
|||
|
||||
match link_type {
|
||||
LinkType::Dylib => {
|
||||
let child = Command::new(&zig_executable())
|
||||
.args(&["build-lib"])
|
||||
let child = zig()
|
||||
.args(["build-lib"])
|
||||
.args(input_paths)
|
||||
.args([
|
||||
"-lc",
|
||||
|
@ -1261,8 +1243,8 @@ fn link_windows(
|
|||
Ok((child, output_path))
|
||||
}
|
||||
LinkType::Executable => {
|
||||
let child = Command::new(&zig_executable())
|
||||
.args(&["build-exe"])
|
||||
let child = zig()
|
||||
.args(["build-exe"])
|
||||
.args(input_paths)
|
||||
.args([
|
||||
"-target",
|
||||
|
@ -1349,7 +1331,7 @@ pub fn preprocess_host_wasm32(host_input_path: &Path, preprocessed_host_path: &P
|
|||
(but seems to be an unofficial API)
|
||||
*/
|
||||
|
||||
let mut command = Command::new(&zig_executable());
|
||||
let mut zig_cmd = zig();
|
||||
let args = &[
|
||||
"wasm-ld",
|
||||
&bitcode::get_builtins_wasm32_obj_path(),
|
||||
|
@ -1364,28 +1346,30 @@ pub fn preprocess_host_wasm32(host_input_path: &Path, preprocessed_host_path: &P
|
|||
"--relocatable",
|
||||
];
|
||||
|
||||
command.args(args);
|
||||
zig_cmd.args(args);
|
||||
|
||||
// println!("\npreprocess_host_wasm32");
|
||||
// println!("zig {}\n", args.join(" "));
|
||||
|
||||
let output = command.output().unwrap();
|
||||
validate_output(output_file, "zig", output)
|
||||
run_build_command(zig_cmd, output_file)
|
||||
}
|
||||
|
||||
fn validate_output(file_name: &str, cmd_name: &str, output: Output) {
|
||||
if !output.status.success() {
|
||||
match std::str::from_utf8(&output.stderr) {
|
||||
fn run_build_command(mut command: Command, file_to_build: &str) {
|
||||
let cmd_str = format!("{:?}", &command);
|
||||
let cmd_output = command.output().unwrap();
|
||||
|
||||
if !cmd_output.status.success() {
|
||||
match std::str::from_utf8(&cmd_output.stderr) {
|
||||
Ok(stderr) => internal_error!(
|
||||
"Failed to rebuild {} - stderr of the `{}` command was:\n{}",
|
||||
file_name,
|
||||
cmd_name,
|
||||
"Error:\n Failed to rebuild {}:\n The executed command was:\n {}\n stderr of that command:\n {}",
|
||||
file_to_build,
|
||||
cmd_str,
|
||||
stderr
|
||||
),
|
||||
Err(utf8_err) => internal_error!(
|
||||
"Failed to rebuild {} - stderr of the `{}` command was invalid utf8 ({:?})",
|
||||
file_name,
|
||||
cmd_name,
|
||||
"Error:\n Failed to rebuild {}:\n The executed command was:\n {}\n stderr of that command could not be parsed as valid utf8:\n {}",
|
||||
file_to_build,
|
||||
cmd_str,
|
||||
utf8_err
|
||||
),
|
||||
}
|
||||
|
|
|
@ -347,7 +347,7 @@ fn gen_from_mono_module_llvm(
|
|||
|
||||
// run the debugir https://github.com/vaivaswatha/debugir tool
|
||||
match Command::new("debugir")
|
||||
.args(&["-instnamer", app_ll_file.to_str().unwrap()])
|
||||
.args(["-instnamer", app_ll_file.to_str().unwrap()])
|
||||
.output()
|
||||
{
|
||||
Ok(_) => {}
|
||||
|
@ -369,7 +369,7 @@ fn gen_from_mono_module_llvm(
|
|||
| Architecture::Aarch64(_)
|
||||
| Architecture::Wasm32 => {
|
||||
let ll_to_bc = Command::new("llvm-as")
|
||||
.args(&[
|
||||
.args([
|
||||
app_ll_dbg_file.to_str().unwrap(),
|
||||
"-o",
|
||||
app_bc_file.to_str().unwrap(),
|
||||
|
|
|
@ -16,6 +16,7 @@ lazy_static = "1.4.0"
|
|||
[build-dependencies]
|
||||
# dunce can be removed once ziglang/zig#5109 is fixed
|
||||
dunce = "1.0.3"
|
||||
roc_utils = { path = "../../utils" }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.build-dependencies]
|
||||
tempfile = "3.2.0"
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
zig build-exe benchmark/dec.zig -O ReleaseFast --main-pkg-path .
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use std::convert::AsRef;
|
||||
use roc_utils::zig;
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
|
@ -14,13 +13,6 @@ use tempfile::tempdir;
|
|||
/// To debug the zig code with debug prints, we need to disable the wasm code gen
|
||||
const DEBUG: bool = false;
|
||||
|
||||
fn zig_executable() -> String {
|
||||
match std::env::var("ROC_ZIG") {
|
||||
Ok(path) => path,
|
||||
Err(_) => "zig".into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
|
||||
|
@ -95,12 +87,13 @@ fn generate_object_file(bitcode_path: &Path, zig_object: &str, object_file_name:
|
|||
println!("Compiling zig object `{}` to: {}", zig_object, src_obj);
|
||||
|
||||
if !DEBUG {
|
||||
run_command(
|
||||
&bitcode_path,
|
||||
&zig_executable(),
|
||||
&["build", zig_object, "-Drelease=true"],
|
||||
0,
|
||||
);
|
||||
let mut zig_cmd = zig();
|
||||
|
||||
zig_cmd
|
||||
.current_dir(bitcode_path)
|
||||
.args(["build", zig_object, "-Drelease=true"]);
|
||||
|
||||
run_command(zig_cmd, 0);
|
||||
|
||||
println!("Moving zig object `{}` to: {}", zig_object, dest_obj);
|
||||
|
||||
|
@ -130,12 +123,13 @@ fn generate_bc_file(bitcode_path: &Path, zig_object: &str, file_name: &str) {
|
|||
#[cfg(target_os = "macos")]
|
||||
let _ = fs::remove_dir_all("./bitcode/zig-cache");
|
||||
|
||||
run_command(
|
||||
&bitcode_path,
|
||||
&zig_executable(),
|
||||
&["build", zig_object, "-Drelease=true"],
|
||||
0,
|
||||
);
|
||||
let mut zig_cmd = zig();
|
||||
|
||||
zig_cmd
|
||||
.current_dir(bitcode_path)
|
||||
.args(["build", zig_object, "-Drelease=true"]);
|
||||
|
||||
run_command(zig_cmd, 0);
|
||||
}
|
||||
|
||||
pub fn get_lib_dir() -> PathBuf {
|
||||
|
@ -174,7 +168,7 @@ fn copy_zig_builtins_to_target_dir(bitcode_path: &Path) {
|
|||
// recursively copy all the .zig files from this directory, but do *not* recurse into zig-cache/
|
||||
fn cp_unless_zig_cache(src_dir: &Path, target_dir: &Path) -> io::Result<()> {
|
||||
// Make sure the destination directory exists before we try to copy anything into it.
|
||||
std::fs::create_dir_all(&target_dir).unwrap_or_else(|err| {
|
||||
std::fs::create_dir_all(target_dir).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Failed to create output library directory for zig bitcode {:?}: {:?}",
|
||||
target_dir, err
|
||||
|
@ -204,19 +198,10 @@ fn cp_unless_zig_cache(src_dir: &Path, target_dir: &Path) -> io::Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn run_command<S, I: Copy, P: AsRef<Path> + Copy>(
|
||||
path: P,
|
||||
command_str: &str,
|
||||
args: I,
|
||||
flaky_fail_counter: usize,
|
||||
) where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<OsStr>,
|
||||
{
|
||||
let output_result = Command::new(OsStr::new(&command_str))
|
||||
.current_dir(path)
|
||||
.args(args)
|
||||
.output();
|
||||
fn run_command(mut command: Command, flaky_fail_counter: usize) {
|
||||
let command_str = format!("{:?}", &command);
|
||||
|
||||
let output_result = command.output();
|
||||
|
||||
match output_result {
|
||||
Ok(output) => match output.status.success() {
|
||||
|
@ -227,14 +212,14 @@ fn run_command<S, I: Copy, P: AsRef<Path> + Copy>(
|
|||
Err(_) => format!("Failed to run \"{}\"", command_str),
|
||||
};
|
||||
|
||||
// flaky test error that only occurs sometimes inside MacOS ci run
|
||||
// Flaky test errors that only occur sometimes on MacOS ci server.
|
||||
if error_str.contains("FileNotFound")
|
||||
|| error_str.contains("unable to save cached ZIR code")
|
||||
{
|
||||
if flaky_fail_counter == 10 {
|
||||
panic!("{} failed 10 times in a row. The following error is unlikely to be a flaky error: {}", command_str, error_str);
|
||||
} else {
|
||||
run_command(path, command_str, args, flaky_fail_counter + 1)
|
||||
run_command(command, flaky_fail_counter + 1)
|
||||
}
|
||||
} else {
|
||||
panic!("{} failed: {}", command_str, error_str);
|
||||
|
|
|
@ -223,18 +223,18 @@ expect update (single "a" Bool.true) "a" alterValue == empty
|
|||
## Dict.empty
|
||||
## |> Dict.insert 1234 "5678"
|
||||
## |> Dict.contains 1234
|
||||
## |> Bool.isEq Bool.true
|
||||
contains : Dict k v, k -> Bool | k has Eq
|
||||
contains = \@Dict list, needle ->
|
||||
step = \_, Pair key _val ->
|
||||
if key == needle then
|
||||
Break {}
|
||||
else
|
||||
Continue {}
|
||||
List.any list \Pair key _val -> key == needle
|
||||
|
||||
when List.iterate list {} step is
|
||||
Continue _ -> Bool.false
|
||||
Break _ -> Bool.true
|
||||
expect contains empty "a" == Bool.false
|
||||
expect contains (single "a" {}) "a" == Bool.true
|
||||
expect contains (single "b" {}) "a" == Bool.false
|
||||
expect
|
||||
Dict.empty
|
||||
|> Dict.insert 1234 "5678"
|
||||
|> Dict.contains 1234
|
||||
|> Bool.isEq Bool.true
|
||||
|
||||
## Returns a dictionary containing the key and value provided as input.
|
||||
##
|
||||
|
|
|
@ -8,7 +8,6 @@ interface List
|
|||
map,
|
||||
len,
|
||||
withCapacity,
|
||||
iterate,
|
||||
walkBackwards,
|
||||
concat,
|
||||
first,
|
||||
|
|
|
@ -28,7 +28,7 @@ pub struct MemberVariables {
|
|||
|
||||
/// The member and its signature is defined locally, in the module the store is created for.
|
||||
/// We need to instantiate and introduce this during solving.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ResolvedMemberType(Variable);
|
||||
|
||||
/// Member type information that needs to be resolved from imports.
|
||||
|
@ -56,7 +56,7 @@ impl ResolvePhase for Pending {
|
|||
type MemberType = PendingMemberType;
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Copy, PartialEq)]
|
||||
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Resolved;
|
||||
impl ResolvePhase for Resolved {
|
||||
type MemberType = ResolvedMemberType;
|
||||
|
|
|
@ -13,13 +13,14 @@ use roc_types::types::{Category, PatternCategory, Type};
|
|||
pub struct Constraints {
|
||||
pub constraints: Vec<Constraint>,
|
||||
pub types: Vec<Cell<Type>>,
|
||||
pub type_slices: Vec<TypeOrVar>,
|
||||
pub variables: Vec<Variable>,
|
||||
pub loc_symbols: Vec<(Symbol, Region)>,
|
||||
pub let_constraints: Vec<LetConstraint>,
|
||||
pub categories: Vec<Category>,
|
||||
pub pattern_categories: Vec<PatternCategory>,
|
||||
pub expectations: Vec<Expected<Cell<Type>>>,
|
||||
pub pattern_expectations: Vec<PExpected<Cell<Type>>>,
|
||||
pub expectations: Vec<Expected<TypeOrVar>>,
|
||||
pub pattern_expectations: Vec<PExpected<TypeOrVar>>,
|
||||
pub includes_tags: Vec<IncludesTag>,
|
||||
pub strings: Vec<&'static str>,
|
||||
pub sketched_rows: Vec<SketchedRows>,
|
||||
|
@ -33,6 +34,7 @@ impl std::fmt::Debug for Constraints {
|
|||
f.debug_struct("Constraints")
|
||||
.field("constraints", &self.constraints)
|
||||
.field("types", &"<types>")
|
||||
.field("type_slices", &self.type_slices)
|
||||
.field("variables", &self.variables)
|
||||
.field("loc_symbols", &self.loc_symbols)
|
||||
.field("let_constraints", &self.let_constraints)
|
||||
|
@ -56,15 +58,15 @@ impl Default for Constraints {
|
|||
}
|
||||
}
|
||||
|
||||
pub type TypeIndex = Index<Cell<Type>>;
|
||||
pub type ExpectedTypeIndex = Index<Expected<Cell<Type>>>;
|
||||
pub type PExpectedTypeIndex = Index<PExpected<Cell<Type>>>;
|
||||
pub type ExpectedTypeIndex = Index<Expected<TypeOrVar>>;
|
||||
pub type PExpectedTypeIndex = Index<PExpected<TypeOrVar>>;
|
||||
pub type TypeOrVar = EitherIndex<Cell<Type>, Variable>;
|
||||
|
||||
impl Constraints {
|
||||
pub fn new() -> Self {
|
||||
let constraints = Vec::new();
|
||||
let mut types = Vec::new();
|
||||
let type_slices = Vec::with_capacity(16);
|
||||
let variables = Vec::new();
|
||||
let loc_symbols = Vec::new();
|
||||
let let_constraints = Vec::new();
|
||||
|
@ -119,6 +121,7 @@ impl Constraints {
|
|||
Self {
|
||||
constraints,
|
||||
types,
|
||||
type_slices,
|
||||
variables,
|
||||
loc_symbols,
|
||||
let_constraints,
|
||||
|
@ -211,12 +214,12 @@ impl Constraints {
|
|||
EitherIndex::from_right(index)
|
||||
}
|
||||
|
||||
pub fn push_expected_type(&mut self, expected: Expected<Type>) -> ExpectedTypeIndex {
|
||||
Index::push_new(&mut self.expectations, expected.map(Cell::new))
|
||||
pub fn push_expected_type(&mut self, expected: Expected<TypeOrVar>) -> ExpectedTypeIndex {
|
||||
Index::push_new(&mut self.expectations, expected)
|
||||
}
|
||||
|
||||
pub fn push_pat_expected_type(&mut self, expected: PExpected<Type>) -> PExpectedTypeIndex {
|
||||
Index::push_new(&mut self.pattern_expectations, expected.map(Cell::new))
|
||||
pub fn push_pat_expected_type(&mut self, expected: PExpected<TypeOrVar>) -> PExpectedTypeIndex {
|
||||
Index::push_new(&mut self.pattern_expectations, expected)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -372,24 +375,24 @@ impl Constraints {
|
|||
|
||||
fn def_types_slice<I>(&mut self, it: I) -> DefTypes
|
||||
where
|
||||
I: IntoIterator<Item = (Symbol, Loc<Type>)>,
|
||||
I: IntoIterator<Item = (Symbol, Loc<TypeOrVar>)>,
|
||||
I::IntoIter: ExactSizeIterator,
|
||||
{
|
||||
let it = it.into_iter();
|
||||
|
||||
let types_start = self.types.len();
|
||||
let types_start = self.type_slices.len();
|
||||
let loc_symbols_start = self.loc_symbols.len();
|
||||
|
||||
// because we have an ExactSizeIterator, we can reserve space here
|
||||
let length = it.len();
|
||||
|
||||
self.types.reserve(length);
|
||||
self.type_slices.reserve(length);
|
||||
self.loc_symbols.reserve(length);
|
||||
|
||||
for (symbol, loc_type) in it {
|
||||
let Loc { region, value } = loc_type;
|
||||
|
||||
self.types.push(Cell::new(value));
|
||||
self.type_slices.push(value);
|
||||
self.loc_symbols.push((symbol, region));
|
||||
}
|
||||
|
||||
|
@ -460,7 +463,7 @@ impl Constraints {
|
|||
where
|
||||
I1: IntoIterator<Item = Variable>,
|
||||
I2: IntoIterator<Item = Variable>,
|
||||
I3: IntoIterator<Item = (Symbol, Loc<Type>)>,
|
||||
I3: IntoIterator<Item = (Symbol, Loc<TypeOrVar>)>,
|
||||
I3::IntoIter: ExactSizeIterator,
|
||||
{
|
||||
// defs and ret constraint are stored consequtively, so we only need to store one index
|
||||
|
@ -506,7 +509,7 @@ impl Constraints {
|
|||
) -> Constraint
|
||||
where
|
||||
I1: IntoIterator<Item = Variable>,
|
||||
I2: IntoIterator<Item = (Symbol, Loc<Type>)>,
|
||||
I2: IntoIterator<Item = (Symbol, Loc<TypeOrVar>)>,
|
||||
I2::IntoIter: ExactSizeIterator,
|
||||
{
|
||||
// defs and ret constraint are stored consequtively, so we only need to store one index
|
||||
|
@ -615,8 +618,8 @@ impl Constraints {
|
|||
real_var: Variable,
|
||||
real_region: Region,
|
||||
category_and_expectation: Result<
|
||||
(Category, Expected<Type>),
|
||||
(PatternCategory, PExpected<Type>),
|
||||
(Category, ExpectedTypeIndex),
|
||||
(PatternCategory, PExpectedTypeIndex),
|
||||
>,
|
||||
sketched_rows: SketchedRows,
|
||||
context: ExhaustiveContext,
|
||||
|
@ -628,15 +631,12 @@ impl Constraints {
|
|||
let equality = match category_and_expectation {
|
||||
Ok((category, expected)) => {
|
||||
let category = Index::push_new(&mut self.categories, category);
|
||||
let expected = Index::push_new(&mut self.expectations, expected.map(Cell::new));
|
||||
let equality = Eq(real_var, expected, category, real_region);
|
||||
let equality = Index::push_new(&mut self.eq, equality);
|
||||
Ok(equality)
|
||||
}
|
||||
Err((category, expected)) => {
|
||||
let category = Index::push_new(&mut self.pattern_categories, category);
|
||||
let expected =
|
||||
Index::push_new(&mut self.pattern_expectations, expected.map(Cell::new));
|
||||
let equality = PatternEq(real_var, expected, category, real_region);
|
||||
let equality = Index::push_new(&mut self.pattern_eq, equality);
|
||||
Err(equality)
|
||||
|
@ -679,7 +679,7 @@ roc_error_macros::assert_sizeof_aarch64!(Constraint, 3 * 8);
|
|||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Eq(
|
||||
pub TypeOrVar,
|
||||
pub Index<Expected<Cell<Type>>>,
|
||||
pub ExpectedTypeIndex,
|
||||
pub Index<Category>,
|
||||
pub Region,
|
||||
);
|
||||
|
@ -759,7 +759,7 @@ pub enum Constraint {
|
|||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
pub struct DefTypes {
|
||||
pub types: Slice<Type>,
|
||||
pub types: Slice<TypeOrVar>,
|
||||
pub loc_symbols: Slice<(Symbol, Region)>,
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::{
|
||||
def::Def,
|
||||
expr::{AccessorData, ClosureData, Expr, Field, OpaqueWrapFunctionData, WhenBranchPattern},
|
||||
pattern::{DestructType, Pattern, RecordDestruct},
|
||||
pattern::{DestructType, ListPatterns, Pattern, RecordDestruct},
|
||||
};
|
||||
use roc_module::{
|
||||
ident::{Lowercase, TagName},
|
||||
|
@ -707,6 +707,18 @@ fn deep_copy_pattern_help<C: CopyEnv>(
|
|||
})
|
||||
.collect(),
|
||||
},
|
||||
List {
|
||||
list_var,
|
||||
elem_var,
|
||||
patterns: ListPatterns { patterns, opt_rest },
|
||||
} => List {
|
||||
list_var: sub!(*list_var),
|
||||
elem_var: sub!(*elem_var),
|
||||
patterns: ListPatterns {
|
||||
patterns: patterns.iter().map(|lp| lp.map(|p| go_help!(p))).collect(),
|
||||
opt_rest: *opt_rest,
|
||||
},
|
||||
},
|
||||
NumLiteral(var, s, n, bound) => NumLiteral(sub!(*var), s.clone(), *n, *bound),
|
||||
IntLiteral(v1, v2, s, n, bound) => IntLiteral(sub!(*v1), sub!(*v2), s.clone(), *n, *bound),
|
||||
FloatLiteral(v1, v2, s, n, bound) => {
|
||||
|
|
|
@ -1821,7 +1821,7 @@ pub(crate) fn sort_can_defs(
|
|||
.strongly_connected_components_subset(group);
|
||||
|
||||
debug_assert!(
|
||||
!group.iter_ones().any(|index| matches!((&defs[index]).as_ref().unwrap().loc_pattern.value, Pattern::AbilityMemberSpecialization{..})),
|
||||
!group.iter_ones().any(|index| matches!(defs[index].as_ref().unwrap().loc_pattern.value, Pattern::AbilityMemberSpecialization{..})),
|
||||
"A specialization is involved in a recursive cycle - this should not be knowable until solving");
|
||||
|
||||
let declaration = if direct_sccs.groups().count() == 1 {
|
||||
|
@ -1921,6 +1921,14 @@ fn pattern_to_vars_by_symbol(
|
|||
}
|
||||
}
|
||||
|
||||
List {
|
||||
patterns, elem_var, ..
|
||||
} => {
|
||||
for pat in patterns.patterns.iter() {
|
||||
pattern_to_vars_by_symbol(vars_by_symbol, &pat.value, *elem_var);
|
||||
}
|
||||
}
|
||||
|
||||
NumLiteral(..)
|
||||
| IntLiteral(..)
|
||||
| FloatLiteral(..)
|
||||
|
|
|
@ -4,11 +4,14 @@ use roc_collections::all::HumanIndex;
|
|||
use roc_collections::VecMap;
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_exhaustive::{
|
||||
is_useful, Ctor, CtorName, Error, Guard, Literal, Pattern, RenderAs, TagId, Union,
|
||||
is_useful, Ctor, CtorName, Error, Guard, ListArity, Literal, Pattern, RenderAs, TagId, Union,
|
||||
};
|
||||
use roc_module::ident::{TagIdIntType, TagName};
|
||||
use roc_module::ident::{Lowercase, TagIdIntType, TagName};
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::{Content, FlatType, RedundantMark, Subs, SubsFmtContent, Variable};
|
||||
use roc_types::subs::{
|
||||
Content, FlatType, GetSubsSlice, RedundantMark, Subs, SubsFmtContent, Variable,
|
||||
};
|
||||
use roc_types::types::AliasKind;
|
||||
|
||||
pub use roc_exhaustive::Context as ExhaustiveContext;
|
||||
|
@ -22,12 +25,19 @@ pub struct ExhaustiveSummary {
|
|||
pub redundancies: Vec<RedundantMark>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TypeError;
|
||||
|
||||
/// Exhaustiveness-checks [sketched rows][SketchedRows] against an expected type.
|
||||
///
|
||||
/// Returns an error if the sketch has a type error, in which case exhautiveness checking will not
|
||||
/// have been performed.
|
||||
pub fn check(
|
||||
subs: &Subs,
|
||||
real_var: Variable,
|
||||
sketched_rows: SketchedRows,
|
||||
context: ExhaustiveContext,
|
||||
) -> ExhaustiveSummary {
|
||||
) -> Result<ExhaustiveSummary, TypeError> {
|
||||
let overall_region = sketched_rows.overall_region;
|
||||
let mut all_errors = Vec::with_capacity(1);
|
||||
|
||||
|
@ -35,7 +45,7 @@ pub fn check(
|
|||
non_redundant_rows,
|
||||
errors,
|
||||
redundancies,
|
||||
} = sketched_rows.reify_to_non_redundant(subs, real_var);
|
||||
} = sketched_rows.reify_to_non_redundant(subs, real_var)?;
|
||||
all_errors.extend(errors);
|
||||
|
||||
let exhaustive = match roc_exhaustive::check(overall_region, context, non_redundant_rows) {
|
||||
|
@ -46,11 +56,11 @@ pub fn check(
|
|||
}
|
||||
};
|
||||
|
||||
ExhaustiveSummary {
|
||||
Ok(ExhaustiveSummary {
|
||||
errors: all_errors,
|
||||
exhaustive,
|
||||
redundancies,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -60,7 +70,8 @@ enum SketchedPattern {
|
|||
/// A constructor whose expected union is not yet known.
|
||||
/// We'll know the whole union when reifying the sketched pattern against an expected case type.
|
||||
Ctor(TagName, Vec<SketchedPattern>),
|
||||
KnownCtor(Union, IndexCtor<'static>, TagId, Vec<SketchedPattern>),
|
||||
KnownCtor(Union, TagId, Vec<SketchedPattern>),
|
||||
List(ListArity, Vec<SketchedPattern>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -68,12 +79,42 @@ enum IndexCtor<'a> {
|
|||
/// Index an opaque type. There should be one argument.
|
||||
Opaque,
|
||||
/// Index a record type. The arguments are the types of the record fields.
|
||||
Record,
|
||||
Record(&'a [Lowercase]),
|
||||
/// Index a guard constructor. The arguments are a faux guard pattern, and then the real
|
||||
/// pattern being guarded. E.g. `A B if g` becomes Guard { [True, (A B)] }.
|
||||
Guard,
|
||||
/// Index a tag union with the given tag constructor.
|
||||
Tag(&'a TagName),
|
||||
/// Index a list type. The argument is the element type.
|
||||
List,
|
||||
}
|
||||
|
||||
impl<'a> IndexCtor<'a> {
|
||||
fn of_union(un: &'a Union, tag_id: TagId) -> Self {
|
||||
let Union {
|
||||
alternatives,
|
||||
render_as,
|
||||
} = un;
|
||||
|
||||
match render_as {
|
||||
RenderAs::Tag => {
|
||||
let tag_name = alternatives
|
||||
.iter()
|
||||
.find(|ctor| ctor.tag_id == tag_id)
|
||||
.map(|Ctor { name, .. }| match name {
|
||||
CtorName::Tag(tag) => tag,
|
||||
CtorName::Opaque(_) => {
|
||||
internal_error!("tag union should never have opaque alternative")
|
||||
}
|
||||
})
|
||||
.expect("indexable tag ID must be known to alternatives");
|
||||
Self::Tag(tag_name)
|
||||
}
|
||||
RenderAs::Opaque => Self::Opaque,
|
||||
RenderAs::Record(fields) => Self::Record(fields),
|
||||
RenderAs::Guard => Self::Guard,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Index a variable as a certain constructor, to get the expected argument types of that constructor.
|
||||
|
@ -82,11 +123,11 @@ fn index_var(
|
|||
mut var: Variable,
|
||||
ctor: IndexCtor,
|
||||
render_as: &RenderAs,
|
||||
) -> Vec<Variable> {
|
||||
) -> Result<Vec<Variable>, TypeError> {
|
||||
if matches!(ctor, IndexCtor::Guard) {
|
||||
// `A B if g` becomes Guard { [True, (A B)] }, so the arguments are a bool, and the type
|
||||
// of the pattern.
|
||||
return vec![Variable::BOOL, var];
|
||||
return Ok(vec![Variable::BOOL, var]);
|
||||
}
|
||||
loop {
|
||||
match subs.get_content_without_compacting(var) {
|
||||
|
@ -95,10 +136,8 @@ fn index_var(
|
|||
| Content::FlexAbleVar(_, _)
|
||||
| Content::RigidAbleVar(_, _)
|
||||
| Content::LambdaSet(_)
|
||||
| Content::RangedNumber(..) => internal_error!("not a indexable constructor"),
|
||||
Content::Error => {
|
||||
internal_error!("errors should not be reachable during exhautiveness checking")
|
||||
}
|
||||
| Content::RangedNumber(..) => return Err(TypeError),
|
||||
Content::Error => return Err(TypeError),
|
||||
Content::RecursionVar {
|
||||
structure,
|
||||
opt_name: _,
|
||||
|
@ -106,14 +145,19 @@ fn index_var(
|
|||
var = *structure;
|
||||
}
|
||||
Content::Structure(structure) => match structure {
|
||||
FlatType::Apply(_, _)
|
||||
| FlatType::Func(_, _, _)
|
||||
| FlatType::FunctionOrTagUnion(_, _, _) => {
|
||||
internal_error!("not an indexable constructor")
|
||||
FlatType::Func(_, _, _) | FlatType::FunctionOrTagUnion(_, _, _) => {
|
||||
return Err(TypeError)
|
||||
}
|
||||
FlatType::Erroneous(_) => {
|
||||
internal_error!("errors should not be reachable during exhautiveness checking")
|
||||
FlatType::Erroneous(_) => return Err(TypeError),
|
||||
FlatType::Apply(Symbol::LIST_LIST, args) => {
|
||||
match (subs.get_subs_slice(*args), ctor) {
|
||||
([elem_var], IndexCtor::List) => {
|
||||
return Ok(vec![*elem_var]);
|
||||
}
|
||||
_ => internal_error!("list types can only be indexed by list patterns"),
|
||||
}
|
||||
}
|
||||
FlatType::Apply(..) => internal_error!("not an indexable constructor"),
|
||||
FlatType::Record(fields, ext) => {
|
||||
let fields_order = match render_as {
|
||||
RenderAs::Record(fields) => fields,
|
||||
|
@ -137,7 +181,7 @@ fn index_var(
|
|||
})
|
||||
.collect();
|
||||
|
||||
return field_types;
|
||||
return Ok(field_types);
|
||||
}
|
||||
FlatType::TagUnion(tags, ext) | FlatType::RecursiveTagUnion(_, tags, ext) => {
|
||||
let tag_ctor = match ctor {
|
||||
|
@ -155,10 +199,10 @@ fn index_var(
|
|||
}
|
||||
});
|
||||
let vars = opt_vars.expect("constructor must be known in the indexable type if we are exhautiveness checking");
|
||||
return vars;
|
||||
return Ok(vars);
|
||||
}
|
||||
FlatType::EmptyRecord => {
|
||||
debug_assert!(matches!(ctor, IndexCtor::Record));
|
||||
debug_assert!(matches!(ctor, IndexCtor::Record(..)));
|
||||
// If there are optional record fields we don't unify them, but we need to
|
||||
// cover them. Since optional fields correspond to "any" patterns, we can pass
|
||||
// through arbitrary types.
|
||||
|
@ -168,7 +212,7 @@ fn index_var(
|
|||
"record constructors must always be rendered as records"
|
||||
),
|
||||
};
|
||||
return std::iter::repeat(Variable::NULL).take(num_fields).collect();
|
||||
return Ok(std::iter::repeat(Variable::NULL).take(num_fields).collect());
|
||||
}
|
||||
FlatType::EmptyTagUnion => {
|
||||
internal_error!("empty tag unions are not indexable")
|
||||
|
@ -176,7 +220,7 @@ fn index_var(
|
|||
},
|
||||
Content::Alias(_, _, var, AliasKind::Opaque) => {
|
||||
debug_assert!(matches!(ctor, IndexCtor::Opaque));
|
||||
return vec![*var];
|
||||
return Ok(vec![*var]);
|
||||
}
|
||||
Content::Alias(_, _, inner, AliasKind::Structural) => {
|
||||
var = *inner;
|
||||
|
@ -186,35 +230,44 @@ fn index_var(
|
|||
}
|
||||
|
||||
impl SketchedPattern {
|
||||
fn reify(self, subs: &Subs, real_var: Variable) -> Pattern {
|
||||
fn reify(self, subs: &Subs, real_var: Variable) -> Result<Pattern, TypeError> {
|
||||
match self {
|
||||
Self::Anything => Pattern::Anything,
|
||||
Self::Literal(lit) => Pattern::Literal(lit),
|
||||
Self::KnownCtor(union, index_ctor, tag_id, patterns) => {
|
||||
let arg_vars = index_var(subs, real_var, index_ctor, &union.render_as);
|
||||
Self::Anything => Ok(Pattern::Anything),
|
||||
Self::Literal(lit) => Ok(Pattern::Literal(lit)),
|
||||
Self::KnownCtor(union, tag_id, patterns) => {
|
||||
let index_ctor = IndexCtor::of_union(&union, tag_id);
|
||||
let arg_vars = index_var(subs, real_var, index_ctor, &union.render_as)?;
|
||||
|
||||
debug_assert!(arg_vars.len() == patterns.len());
|
||||
let args = (patterns.into_iter())
|
||||
.zip(arg_vars)
|
||||
.map(|(pat, var)| {
|
||||
// FIXME
|
||||
pat.reify(subs, var)
|
||||
})
|
||||
.collect();
|
||||
.map(|(pat, var)| pat.reify(subs, var))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
Pattern::Ctor(union, tag_id, args)
|
||||
Ok(Pattern::Ctor(union, tag_id, args))
|
||||
}
|
||||
Self::Ctor(tag_name, patterns) => {
|
||||
let arg_vars = index_var(subs, real_var, IndexCtor::Tag(&tag_name), &RenderAs::Tag);
|
||||
let arg_vars =
|
||||
index_var(subs, real_var, IndexCtor::Tag(&tag_name), &RenderAs::Tag)?;
|
||||
let (union, tag_id) = convert_tag(subs, real_var, &tag_name);
|
||||
|
||||
debug_assert!(arg_vars.len() == patterns.len());
|
||||
let args = (patterns.into_iter())
|
||||
.zip(arg_vars)
|
||||
.map(|(pat, var)| pat.reify(subs, var))
|
||||
.collect();
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
Pattern::Ctor(union, tag_id, args)
|
||||
Ok(Pattern::Ctor(union, tag_id, args))
|
||||
}
|
||||
Self::List(arity, patterns) => {
|
||||
let elem_var = index_var(subs, real_var, IndexCtor::List, &RenderAs::Tag)?[0];
|
||||
|
||||
let patterns = patterns
|
||||
.into_iter()
|
||||
.map(|pat| pat.reify(subs, elem_var))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
Ok(Pattern::List(arity, patterns))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -235,7 +288,11 @@ pub struct SketchedRows {
|
|||
}
|
||||
|
||||
impl SketchedRows {
|
||||
fn reify_to_non_redundant(self, subs: &Subs, real_var: Variable) -> NonRedundantSummary {
|
||||
fn reify_to_non_redundant(
|
||||
self,
|
||||
subs: &Subs,
|
||||
real_var: Variable,
|
||||
) -> Result<NonRedundantSummary, TypeError> {
|
||||
to_nonredundant_rows(subs, real_var, self)
|
||||
}
|
||||
}
|
||||
|
@ -283,7 +340,23 @@ fn sketch_pattern(pattern: &crate::pattern::Pattern) -> SketchedPattern {
|
|||
}],
|
||||
};
|
||||
|
||||
SP::KnownCtor(union, IndexCtor::Record, tag_id, patterns)
|
||||
SP::KnownCtor(union, tag_id, patterns)
|
||||
}
|
||||
|
||||
List {
|
||||
patterns,
|
||||
list_var: _,
|
||||
elem_var: _,
|
||||
} => {
|
||||
let arity = patterns.arity();
|
||||
|
||||
let sketched_elem_patterns = patterns
|
||||
.patterns
|
||||
.iter()
|
||||
.map(|p| sketch_pattern(&p.value))
|
||||
.collect();
|
||||
|
||||
SP::List(arity, sketched_elem_patterns)
|
||||
}
|
||||
|
||||
AppliedTag {
|
||||
|
@ -315,12 +388,7 @@ fn sketch_pattern(pattern: &crate::pattern::Pattern) -> SketchedPattern {
|
|||
}],
|
||||
};
|
||||
|
||||
SP::KnownCtor(
|
||||
union,
|
||||
IndexCtor::Opaque,
|
||||
tag_id,
|
||||
vec![sketch_pattern(&argument.value)],
|
||||
)
|
||||
SP::KnownCtor(union, tag_id, vec![sketch_pattern(&argument.value)])
|
||||
}
|
||||
|
||||
// Treat this like a literal so we mark it as non-exhaustive
|
||||
|
@ -390,7 +458,6 @@ pub fn sketch_when_branches(region: Region, patterns: &[expr::WhenBranch]) -> Sk
|
|||
|
||||
vec![SP::KnownCtor(
|
||||
union,
|
||||
IndexCtor::Guard,
|
||||
tag_id,
|
||||
// NB: ordering the guard pattern first seems to be better at catching
|
||||
// non-exhaustive constructors in the second argument; see the paper to see if
|
||||
|
@ -445,7 +512,7 @@ fn to_nonredundant_rows(
|
|||
subs: &Subs,
|
||||
real_var: Variable,
|
||||
rows: SketchedRows,
|
||||
) -> NonRedundantSummary {
|
||||
) -> Result<NonRedundantSummary, TypeError> {
|
||||
let SketchedRows {
|
||||
rows,
|
||||
overall_region,
|
||||
|
@ -468,7 +535,7 @@ fn to_nonredundant_rows(
|
|||
let next_row: Vec<Pattern> = patterns
|
||||
.into_iter()
|
||||
.map(|pattern| pattern.reify(subs, real_var))
|
||||
.collect();
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
let redundant_err = if !is_inhabited_row(&next_row) {
|
||||
Some(Error::Unmatchable {
|
||||
|
@ -499,11 +566,11 @@ fn to_nonredundant_rows(
|
|||
}
|
||||
}
|
||||
|
||||
NonRedundantSummary {
|
||||
Ok(NonRedundantSummary {
|
||||
non_redundant_rows: checked_rows,
|
||||
redundancies,
|
||||
errors,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn is_inhabited_row(patterns: &[Pattern]) -> bool {
|
||||
|
@ -518,10 +585,16 @@ fn is_inhabited_pattern(pat: &Pattern) -> bool {
|
|||
Pattern::Literal(_) => {}
|
||||
Pattern::Ctor(union, id, pats) => {
|
||||
if !union.alternatives.iter().any(|alt| alt.tag_id == *id) {
|
||||
// The tag ID was dropped from the union, which means that this tag ID is one
|
||||
// that is not material to the union, and so is uninhabited!
|
||||
return false;
|
||||
}
|
||||
stack.extend(pats);
|
||||
}
|
||||
Pattern::List(_, pats) => {
|
||||
// List is uninhabited if any element is uninhabited.
|
||||
stack.extend(pats);
|
||||
}
|
||||
}
|
||||
}
|
||||
true
|
||||
|
|
|
@ -65,7 +65,7 @@ impl Output {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Copy)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Copy)]
|
||||
pub enum IntValue {
|
||||
I128([u8; 16]),
|
||||
U128([u8; 16]),
|
||||
|
@ -345,7 +345,7 @@ pub struct ClosureData {
|
|||
///
|
||||
/// We distinguish them from closures so we can have better error messages
|
||||
/// during constraint generation.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AccessorData {
|
||||
pub name: Symbol,
|
||||
pub function_var: Variable,
|
||||
|
@ -485,7 +485,7 @@ pub struct Field {
|
|||
pub loc_expr: Box<Loc<Expr>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum Recursive {
|
||||
NotRecursive = 0,
|
||||
Recursive = 1,
|
||||
|
@ -888,7 +888,7 @@ pub fn canonicalize_expr<'a>(
|
|||
var_store,
|
||||
inner_scope,
|
||||
region,
|
||||
*branch,
|
||||
branch,
|
||||
&mut output,
|
||||
)
|
||||
});
|
||||
|
@ -1493,7 +1493,7 @@ fn canonicalize_fields<'a>(
|
|||
let mut output = Output::default();
|
||||
|
||||
for loc_field in fields.iter() {
|
||||
match canonicalize_field(env, var_store, scope, &loc_field.value, loc_field.region) {
|
||||
match canonicalize_field(env, var_store, scope, &loc_field.value) {
|
||||
Ok((label, field_expr, field_out, field_var)) => {
|
||||
let field = Field {
|
||||
var: field_var,
|
||||
|
@ -1546,7 +1546,6 @@ fn canonicalize_field<'a>(
|
|||
var_store: &mut VarStore,
|
||||
scope: &mut Scope,
|
||||
field: &'a ast::AssignedField<'a, ast::Expr<'a>>,
|
||||
region: Region,
|
||||
) -> Result<(Lowercase, Loc<Expr>, Output, Variable), CanonicalizeFieldProblem> {
|
||||
use roc_parse::ast::AssignedField::*;
|
||||
|
||||
|
@ -1576,7 +1575,7 @@ fn canonicalize_field<'a>(
|
|||
}
|
||||
|
||||
SpaceBefore(sub_field, _) | SpaceAfter(sub_field, _) => {
|
||||
canonicalize_field(env, var_store, scope, sub_field, region)
|
||||
canonicalize_field(env, var_store, scope, sub_field)
|
||||
}
|
||||
|
||||
Malformed(_string) => {
|
||||
|
@ -1652,7 +1651,7 @@ fn canonicalize_var_lookup(
|
|||
}
|
||||
|
||||
/// Currently uses the heuristic of "only inline if it's a builtin"
|
||||
pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) -> Expr {
|
||||
pub fn inline_calls(var_store: &mut VarStore, expr: Expr) -> Expr {
|
||||
use Expr::*;
|
||||
|
||||
match expr {
|
||||
|
@ -1681,7 +1680,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
let mut new_elems = Vec::with_capacity(loc_elems.len());
|
||||
|
||||
for loc_elem in loc_elems {
|
||||
let value = inline_calls(var_store, scope, loc_elem.value);
|
||||
let value = inline_calls(var_store, loc_elem.value);
|
||||
|
||||
new_elems.push(Loc {
|
||||
value,
|
||||
|
@ -1706,20 +1705,20 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
} => {
|
||||
let loc_cond = Box::new(Loc {
|
||||
region: loc_cond.region,
|
||||
value: inline_calls(var_store, scope, loc_cond.value),
|
||||
value: inline_calls(var_store, loc_cond.value),
|
||||
});
|
||||
|
||||
let mut new_branches = Vec::with_capacity(branches.len());
|
||||
|
||||
for branch in branches {
|
||||
let value = Loc {
|
||||
value: inline_calls(var_store, scope, branch.value.value),
|
||||
value: inline_calls(var_store, branch.value.value),
|
||||
region: branch.value.region,
|
||||
};
|
||||
let guard = match branch.guard {
|
||||
Some(loc_expr) => Some(Loc {
|
||||
region: loc_expr.region,
|
||||
value: inline_calls(var_store, scope, loc_expr.value),
|
||||
value: inline_calls(var_store, loc_expr.value),
|
||||
}),
|
||||
None => None,
|
||||
};
|
||||
|
@ -1753,12 +1752,12 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
|
||||
for (loc_cond, loc_expr) in branches {
|
||||
let loc_cond = Loc {
|
||||
value: inline_calls(var_store, scope, loc_cond.value),
|
||||
value: inline_calls(var_store, loc_cond.value),
|
||||
region: loc_cond.region,
|
||||
};
|
||||
|
||||
let loc_expr = Loc {
|
||||
value: inline_calls(var_store, scope, loc_expr.value),
|
||||
value: inline_calls(var_store, loc_expr.value),
|
||||
region: loc_expr.region,
|
||||
};
|
||||
|
||||
|
@ -1767,7 +1766,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
|
||||
let final_else = Box::new(Loc {
|
||||
region: final_else.region,
|
||||
value: inline_calls(var_store, scope, final_else.value),
|
||||
value: inline_calls(var_store, final_else.value),
|
||||
});
|
||||
|
||||
If {
|
||||
|
@ -1785,12 +1784,12 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
} => {
|
||||
let loc_condition = Loc {
|
||||
region: loc_condition.region,
|
||||
value: inline_calls(var_store, scope, loc_condition.value),
|
||||
value: inline_calls(var_store, loc_condition.value),
|
||||
};
|
||||
|
||||
let loc_continuation = Loc {
|
||||
region: loc_continuation.region,
|
||||
value: inline_calls(var_store, scope, loc_continuation.value),
|
||||
value: inline_calls(var_store, loc_continuation.value),
|
||||
};
|
||||
|
||||
Expect {
|
||||
|
@ -1807,12 +1806,12 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
} => {
|
||||
let loc_condition = Loc {
|
||||
region: loc_condition.region,
|
||||
value: inline_calls(var_store, scope, loc_condition.value),
|
||||
value: inline_calls(var_store, loc_condition.value),
|
||||
};
|
||||
|
||||
let loc_continuation = Loc {
|
||||
region: loc_continuation.region,
|
||||
value: inline_calls(var_store, scope, loc_continuation.value),
|
||||
value: inline_calls(var_store, loc_continuation.value),
|
||||
};
|
||||
|
||||
ExpectFx {
|
||||
|
@ -1830,7 +1829,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
loc_pattern: def.loc_pattern,
|
||||
loc_expr: Loc {
|
||||
region: def.loc_expr.region,
|
||||
value: inline_calls(var_store, scope, def.loc_expr.value),
|
||||
value: inline_calls(var_store, def.loc_expr.value),
|
||||
},
|
||||
expr_var: def.expr_var,
|
||||
pattern_vars: def.pattern_vars,
|
||||
|
@ -1840,7 +1839,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
|
||||
let loc_expr = Loc {
|
||||
region: loc_expr.region,
|
||||
value: inline_calls(var_store, scope, loc_expr.value),
|
||||
value: inline_calls(var_store, loc_expr.value),
|
||||
};
|
||||
|
||||
LetRec(new_defs, Box::new(loc_expr), mark)
|
||||
|
@ -1851,7 +1850,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
loc_pattern: def.loc_pattern,
|
||||
loc_expr: Loc {
|
||||
region: def.loc_expr.region,
|
||||
value: inline_calls(var_store, scope, def.loc_expr.value),
|
||||
value: inline_calls(var_store, def.loc_expr.value),
|
||||
},
|
||||
expr_var: def.expr_var,
|
||||
pattern_vars: def.pattern_vars,
|
||||
|
@ -1860,7 +1859,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
|
||||
let loc_expr = Loc {
|
||||
region: loc_expr.region,
|
||||
value: inline_calls(var_store, scope, loc_expr.value),
|
||||
value: inline_calls(var_store, loc_expr.value),
|
||||
};
|
||||
|
||||
LetNonRec(Box::new(def), Box::new(loc_expr))
|
||||
|
@ -1878,7 +1877,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
}) => {
|
||||
let loc_expr = *loc_body;
|
||||
let loc_expr = Loc {
|
||||
value: inline_calls(var_store, scope, loc_expr.value),
|
||||
value: inline_calls(var_store, loc_expr.value),
|
||||
region: loc_expr.region,
|
||||
};
|
||||
|
||||
|
@ -1938,7 +1937,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
|
|||
let (var, loc_expr) = *argument;
|
||||
let argument = Box::new((
|
||||
var,
|
||||
loc_expr.map_owned(|expr| inline_calls(var_store, scope, expr)),
|
||||
loc_expr.map_owned(|expr| inline_calls(var_store, expr)),
|
||||
));
|
||||
|
||||
OpaqueRef {
|
||||
|
@ -2737,7 +2736,7 @@ fn get_lookup_symbols(expr: &Expr) -> Vec<ExpectLookup> {
|
|||
| Expr::ExpectFx {
|
||||
loc_continuation, ..
|
||||
} => {
|
||||
stack.push(&(*loc_continuation).value);
|
||||
stack.push(&loc_continuation.value);
|
||||
|
||||
// Intentionally ignore the lookups in the nested `expect` condition itself,
|
||||
// because they couldn't possibly influence the outcome of this `expect`!
|
||||
|
|
|
@ -898,6 +898,15 @@ fn fix_values_captured_in_closure_pattern(
|
|||
}
|
||||
}
|
||||
}
|
||||
List { patterns, .. } => {
|
||||
for loc_pat in patterns.patterns.iter_mut() {
|
||||
fix_values_captured_in_closure_pattern(
|
||||
&mut loc_pat.value,
|
||||
no_capture_symbols,
|
||||
closure_captures,
|
||||
);
|
||||
}
|
||||
}
|
||||
Identifier(_)
|
||||
| NumLiteral(..)
|
||||
| IntLiteral(..)
|
||||
|
|
|
@ -79,7 +79,7 @@ fn desugar_value_def<'a>(arena: &'a Bump, def: &'a ValueDef<'a>) -> ValueDef<'a>
|
|||
ann_pattern,
|
||||
ann_type,
|
||||
comment: *comment,
|
||||
body_pattern: *body_pattern,
|
||||
body_pattern,
|
||||
body_expr: desugar_expr(arena, body_expr),
|
||||
},
|
||||
Expect {
|
||||
|
|
|
@ -6,6 +6,7 @@ use crate::num::{
|
|||
ParsedNumResult,
|
||||
};
|
||||
use crate::scope::{PendingAbilitiesInScope, Scope};
|
||||
use roc_exhaustive::ListArity;
|
||||
use roc_module::ident::{Ident, Lowercase, TagName};
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_parse::ast::{self, StrLiteral, StrSegment};
|
||||
|
@ -56,6 +57,11 @@ pub enum Pattern {
|
|||
ext_var: Variable,
|
||||
destructs: Vec<Loc<RecordDestruct>>,
|
||||
},
|
||||
List {
|
||||
list_var: Variable,
|
||||
elem_var: Variable,
|
||||
patterns: ListPatterns,
|
||||
},
|
||||
NumLiteral(Variable, Box<str>, IntValue, NumBound),
|
||||
IntLiteral(Variable, Variable, Box<str>, IntValue, IntBound),
|
||||
FloatLiteral(Variable, Variable, Box<str>, f64, FloatBound),
|
||||
|
@ -92,6 +98,10 @@ impl Pattern {
|
|||
AppliedTag { whole_var, .. } => Some(*whole_var),
|
||||
UnwrappedOpaque { whole_var, .. } => Some(*whole_var),
|
||||
RecordDestructure { whole_var, .. } => Some(*whole_var),
|
||||
List {
|
||||
list_var: whole_var,
|
||||
..
|
||||
} => Some(*whole_var),
|
||||
NumLiteral(var, ..) => Some(*var),
|
||||
IntLiteral(var, ..) => Some(*var),
|
||||
FloatLiteral(var, ..) => Some(*var),
|
||||
|
@ -119,6 +129,7 @@ impl Pattern {
|
|||
| MalformedPattern(..)
|
||||
| AbilityMemberSpecialization { .. } => true,
|
||||
RecordDestructure { destructs, .. } => destructs.is_empty(),
|
||||
List { patterns, .. } => patterns.surely_exhaustive(),
|
||||
AppliedTag { .. }
|
||||
| NumLiteral(..)
|
||||
| IntLiteral(..)
|
||||
|
@ -145,6 +156,7 @@ impl Pattern {
|
|||
UnwrappedOpaque { opaque, .. } => C::Opaque(*opaque),
|
||||
RecordDestructure { destructs, .. } if destructs.is_empty() => C::EmptyRecord,
|
||||
RecordDestructure { .. } => C::Record,
|
||||
List { .. } => C::List,
|
||||
NumLiteral(..) => C::Num,
|
||||
IntLiteral(..) => C::Int,
|
||||
FloatLiteral(..) => C::Float,
|
||||
|
@ -161,6 +173,36 @@ impl Pattern {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ListPatterns {
|
||||
pub patterns: Vec<Loc<Pattern>>,
|
||||
/// Where a rest pattern splits patterns before and after it, if it does at all.
|
||||
/// If present, patterns at index >= the rest index appear after the rest pattern.
|
||||
/// For example:
|
||||
/// [ .., A, B ] -> patterns = [A, B], rest = 0
|
||||
/// [ A, .., B ] -> patterns = [A, B], rest = 1
|
||||
/// [ A, B, .. ] -> patterns = [A, B], rest = 2
|
||||
pub opt_rest: Option<usize>,
|
||||
}
|
||||
|
||||
impl ListPatterns {
|
||||
/// Is this list pattern the trivially-exhaustive pattern `[..]`?
|
||||
fn surely_exhaustive(&self) -> bool {
|
||||
self.patterns.is_empty() && matches!(self.opt_rest, Some(0))
|
||||
}
|
||||
|
||||
pub fn arity(&self) -> ListArity {
|
||||
match self.opt_rest {
|
||||
Some(i) => {
|
||||
let before = i;
|
||||
let after = self.patterns.len() - before;
|
||||
ListArity::Slice(before, after)
|
||||
}
|
||||
None => ListArity::Exact(self.patterns.len()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RecordDestruct {
|
||||
pub var: Variable,
|
||||
|
@ -621,8 +663,75 @@ pub fn canonicalize_pattern<'a>(
|
|||
unreachable!("should have been handled in RecordDestructure");
|
||||
}
|
||||
|
||||
List(..) => todo!(),
|
||||
ListRest => todo!(),
|
||||
List(patterns) => {
|
||||
// We want to admit the following cases:
|
||||
//
|
||||
// []
|
||||
// [..]
|
||||
// [.., P_1,* P_n]
|
||||
// [P_1,* P_n, ..]
|
||||
// [P_1,* P_m, .., P_n,* P_q]
|
||||
// [P_1,* P_n]
|
||||
//
|
||||
// So, a list-rest pattern can appear anywhere in a list pattern, but can appear at
|
||||
// most once.
|
||||
let elem_var = var_store.fresh();
|
||||
let list_var = var_store.fresh();
|
||||
|
||||
let mut rest_index = None;
|
||||
let mut can_pats = Vec::with_capacity(patterns.len());
|
||||
let mut opt_erroneous = None;
|
||||
|
||||
for (i, loc_pattern) in patterns.iter().enumerate() {
|
||||
match &loc_pattern.value {
|
||||
ListRest => match rest_index {
|
||||
None => {
|
||||
rest_index = Some(i);
|
||||
}
|
||||
Some(_) => {
|
||||
env.problem(Problem::MultipleListRestPattern {
|
||||
region: loc_pattern.region,
|
||||
});
|
||||
|
||||
opt_erroneous = Some(Pattern::MalformedPattern(
|
||||
MalformedPatternProblem::DuplicateListRestPattern,
|
||||
loc_pattern.region,
|
||||
));
|
||||
}
|
||||
},
|
||||
pattern => {
|
||||
let pat = canonicalize_pattern(
|
||||
env,
|
||||
var_store,
|
||||
scope,
|
||||
output,
|
||||
pattern_type,
|
||||
pattern,
|
||||
loc_pattern.region,
|
||||
permit_shadows,
|
||||
);
|
||||
can_pats.push(pat);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we encountered an erroneous pattern (e.g. one with shadowing),
|
||||
// use the resulting RuntimeError. Otherwise, return a successful record destructure.
|
||||
opt_erroneous.unwrap_or(Pattern::List {
|
||||
list_var,
|
||||
elem_var,
|
||||
patterns: ListPatterns {
|
||||
patterns: can_pats,
|
||||
opt_rest: rest_index,
|
||||
},
|
||||
})
|
||||
}
|
||||
ListRest => {
|
||||
// Parsing should make sure these only appear in list patterns, where we will generate
|
||||
// better contextual errors.
|
||||
let problem = MalformedPatternProblem::Unknown;
|
||||
malformed_pattern(env, problem, region)
|
||||
}
|
||||
|
||||
Malformed(_str) => {
|
||||
let problem = MalformedPatternProblem::Unknown;
|
||||
|
@ -739,6 +848,9 @@ impl<'a> BindingsFromPattern<'a> {
|
|||
| MalformedPattern(_, _)
|
||||
| UnsupportedPattern(_)
|
||||
| OpaqueNotInScope(..) => (),
|
||||
List { patterns, .. } => {
|
||||
stack.extend(patterns.patterns.iter().rev().map(Pattern));
|
||||
}
|
||||
}
|
||||
}
|
||||
BindingsFromPatternWork::Destruct(loc_destruct) => {
|
||||
|
|
|
@ -472,6 +472,12 @@ pub fn walk_pattern<V: Visitor>(visitor: &mut V, pattern: &Pattern) {
|
|||
RecordDestructure { destructs, .. } => destructs
|
||||
.iter()
|
||||
.for_each(|d| visitor.visit_record_destruct(&d.value, d.region)),
|
||||
List {
|
||||
patterns, elem_var, ..
|
||||
} => patterns
|
||||
.patterns
|
||||
.iter()
|
||||
.for_each(|p| visitor.visit_pattern(&p.value, p.region, Some(*elem_var))),
|
||||
NumLiteral(..) => { /* terminal */ }
|
||||
IntLiteral(..) => { /* terminal */ }
|
||||
FloatLiteral(..) => { /* terminal */ }
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{borrow::Borrow, iter::FromIterator};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct VecSet<T> {
|
||||
elements: Vec<T>,
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use arrayvec::ArrayVec;
|
||||
use roc_can::constraint::{Constraint, Constraints};
|
||||
use roc_can::constraint::{Constraint, Constraints, TypeOrVar};
|
||||
use roc_can::expected::Expected::{self, *};
|
||||
use roc_can::num::{FloatBound, FloatWidth, IntBound, IntLitWidth, NumBound, SignDemand};
|
||||
use roc_module::symbol::Symbol;
|
||||
|
@ -30,7 +30,7 @@ pub fn add_numeric_bound_constr(
|
|||
num_num(Variable(num_var))
|
||||
}
|
||||
NumericBound::FloatExact(width) => {
|
||||
let actual_type = Variable(float_width_to_variable(width));
|
||||
let actual_type = constraints.push_type(Variable(float_width_to_variable(width)));
|
||||
let expected = Expected::ForReason(Reason::NumericLiteralSuffix, actual_type, region);
|
||||
let type_index = constraints.push_type(Variable(num_var));
|
||||
let expected_index = constraints.push_expected_type(expected);
|
||||
|
@ -42,7 +42,7 @@ pub fn add_numeric_bound_constr(
|
|||
Variable(num_var)
|
||||
}
|
||||
NumericBound::IntExact(width) => {
|
||||
let actual_type = Variable(int_lit_width_to_variable(width));
|
||||
let actual_type = constraints.push_type(Variable(int_lit_width_to_variable(width)));
|
||||
let expected = Expected::ForReason(Reason::NumericLiteralSuffix, actual_type, region);
|
||||
let type_index = constraints.push_type(Variable(num_var));
|
||||
let expected_index = constraints.push_expected_type(expected);
|
||||
|
@ -54,11 +54,10 @@ pub fn add_numeric_bound_constr(
|
|||
Variable(num_var)
|
||||
}
|
||||
NumericBound::Range(range) => {
|
||||
let actual_type = Variable(precision_var);
|
||||
let expected = Expected::NoExpectation(RangedNumber(range));
|
||||
let type_index = constraints.push_type(actual_type);
|
||||
let precision_type = constraints.push_type(Variable(precision_var));
|
||||
let expected = Expected::NoExpectation(constraints.push_type(RangedNumber(range)));
|
||||
let expected_index = constraints.push_expected_type(expected);
|
||||
let constr = constraints.equal_types(type_index, expected_index, category, region);
|
||||
let constr = constraints.equal_types(precision_type, expected_index, category, region);
|
||||
|
||||
num_constraints.extend([constr]);
|
||||
|
||||
|
@ -72,7 +71,7 @@ pub fn int_literal(
|
|||
constraints: &mut Constraints,
|
||||
num_var: Variable,
|
||||
precision_var: Variable,
|
||||
expected: Expected<Type>,
|
||||
expected: Expected<TypeOrVar>,
|
||||
region: Region,
|
||||
bound: IntBound,
|
||||
) -> Constraint {
|
||||
|
@ -91,11 +90,10 @@ pub fn int_literal(
|
|||
);
|
||||
|
||||
let num_type_index = constraints.push_type(num_type);
|
||||
let expect_precision_var = constraints.push_expected_type(ForReason(
|
||||
reason,
|
||||
num_int(Type::Variable(precision_var)),
|
||||
region,
|
||||
));
|
||||
let int_precision_type = constraints.push_type(num_int(Type::Variable(precision_var)));
|
||||
|
||||
let expect_precision_var =
|
||||
constraints.push_expected_type(ForReason(reason, int_precision_type, region));
|
||||
|
||||
constrs.extend([
|
||||
constraints.equal_types(num_type_index, expect_precision_var, Category::Int, region),
|
||||
|
@ -114,7 +112,7 @@ pub fn single_quote_literal(
|
|||
constraints: &mut Constraints,
|
||||
num_var: Variable,
|
||||
precision_var: Variable,
|
||||
expected: Expected<Type>,
|
||||
expected: Expected<TypeOrVar>,
|
||||
region: Region,
|
||||
bound: SingleQuoteBound,
|
||||
) -> Constraint {
|
||||
|
@ -133,11 +131,10 @@ pub fn single_quote_literal(
|
|||
);
|
||||
|
||||
let num_type_index = constraints.push_type(num_type);
|
||||
let expect_precision_var = constraints.push_expected_type(ForReason(
|
||||
reason,
|
||||
num_int(Type::Variable(precision_var)),
|
||||
region,
|
||||
));
|
||||
let int_precision_type = constraints.push_type(num_int(Type::Variable(precision_var)));
|
||||
|
||||
let expect_precision_var =
|
||||
constraints.push_expected_type(ForReason(reason, int_precision_type, region));
|
||||
|
||||
constrs.extend([
|
||||
constraints.equal_types(
|
||||
|
@ -161,7 +158,7 @@ pub fn float_literal(
|
|||
constraints: &mut Constraints,
|
||||
num_var: Variable,
|
||||
precision_var: Variable,
|
||||
expected: Expected<Type>,
|
||||
expected: Expected<TypeOrVar>,
|
||||
region: Region,
|
||||
bound: FloatBound,
|
||||
) -> Constraint {
|
||||
|
@ -179,11 +176,10 @@ pub fn float_literal(
|
|||
);
|
||||
|
||||
let num_type_index = constraints.push_type(num_type);
|
||||
let expect_precision_var = constraints.push_expected_type(ForReason(
|
||||
reason,
|
||||
num_float(Type::Variable(precision_var)),
|
||||
region,
|
||||
));
|
||||
let float_precision_type = constraints.push_type(num_float(Type::Variable(precision_var)));
|
||||
|
||||
let expect_precision_var =
|
||||
constraints.push_expected_type(ForReason(reason, float_precision_type, region));
|
||||
|
||||
constrs.extend([
|
||||
constraints.equal_types(num_type_index, expect_precision_var, Category::Frac, region),
|
||||
|
@ -201,7 +197,7 @@ pub fn float_literal(
|
|||
pub fn num_literal(
|
||||
constraints: &mut Constraints,
|
||||
num_var: Variable,
|
||||
expected: Expected<Type>,
|
||||
expected: Expected<TypeOrVar>,
|
||||
region: Region,
|
||||
bound: NumBound,
|
||||
) -> Constraint {
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -50,8 +50,9 @@ fn constrain_symbols_from_requires(
|
|||
};
|
||||
let pattern = Loc::at_zero(roc_can::pattern::Pattern::Identifier(loc_symbol.value));
|
||||
|
||||
let type_index = constraints.push_type(loc_type.value);
|
||||
let def_pattern_state =
|
||||
constrain_def_pattern(constraints, &mut env, &pattern, loc_type.value);
|
||||
constrain_def_pattern(constraints, &mut env, &pattern, type_index);
|
||||
|
||||
debug_assert!(env.resolutions_to_make.is_empty());
|
||||
|
||||
|
@ -69,13 +70,15 @@ fn constrain_symbols_from_requires(
|
|||
// Otherwise, this symbol comes from an app module - we want to check that the type
|
||||
// provided by the app is in fact what the package module requires.
|
||||
let arity = loc_type.value.arity();
|
||||
let typ = loc_type.value;
|
||||
let type_index = constraints.push_type(typ);
|
||||
let expected = constraints.push_expected_type(Expected::FromAnnotation(
|
||||
loc_symbol.map(|&s| Pattern::Identifier(s)),
|
||||
arity,
|
||||
AnnotationSource::RequiredSymbol {
|
||||
region: loc_type.region,
|
||||
},
|
||||
loc_type.value,
|
||||
type_index,
|
||||
));
|
||||
let provided_eq_requires_constr =
|
||||
constraints.lookup(loc_symbol.value, expected, loc_type.region);
|
||||
|
@ -106,12 +109,10 @@ pub fn frontload_ability_constraints(
|
|||
};
|
||||
let pattern = Loc::at_zero(roc_can::pattern::Pattern::Identifier(*member_name));
|
||||
|
||||
let mut def_pattern_state = constrain_def_pattern(
|
||||
constraints,
|
||||
&mut env,
|
||||
&pattern,
|
||||
Type::Variable(signature_var),
|
||||
);
|
||||
let signature_index = constraints.push_type(signature.clone());
|
||||
|
||||
let mut def_pattern_state =
|
||||
constrain_def_pattern(constraints, &mut env, &pattern, signature_index);
|
||||
|
||||
debug_assert!(env.resolutions_to_make.is_empty());
|
||||
|
||||
|
@ -121,7 +122,7 @@ pub fn frontload_ability_constraints(
|
|||
let infer_variables = vars.flex_vars.iter().copied();
|
||||
|
||||
let signature_expectation =
|
||||
constraints.push_expected_type(Expected::NoExpectation(signature.clone()));
|
||||
constraints.push_expected_type(Expected::NoExpectation(signature_index));
|
||||
|
||||
def_pattern_state
|
||||
.constraints
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use crate::builtins;
|
||||
use crate::expr::{constrain_expr, Env};
|
||||
use roc_can::constraint::{Constraint, Constraints};
|
||||
use roc_can::constraint::{Constraint, Constraints, TypeOrVar};
|
||||
use roc_can::expected::{Expected, PExpected};
|
||||
use roc_can::pattern::Pattern::{self, *};
|
||||
use roc_can::pattern::{DestructType, RecordDestruct};
|
||||
use roc_can::pattern::{DestructType, ListPatterns, RecordDestruct};
|
||||
use roc_collections::all::{HumanIndex, SendMap};
|
||||
use roc_collections::VecMap;
|
||||
use roc_module::ident::Lowercase;
|
||||
|
@ -17,7 +17,7 @@ use roc_types::types::{
|
|||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct PatternState {
|
||||
pub headers: VecMap<Symbol, Loc<Type>>,
|
||||
pub headers: VecMap<Symbol, Loc<TypeOrVar>>,
|
||||
pub vars: Vec<Variable>,
|
||||
pub constraints: Vec<Constraint>,
|
||||
pub delayed_is_open_constraints: Vec<Constraint>,
|
||||
|
@ -31,14 +31,16 @@ pub struct PatternState {
|
|||
/// Would add `x => <42>` to the headers (i.e., symbol points to a type variable). If the
|
||||
/// definition has an annotation, we instead now add `x => Int`.
|
||||
pub fn headers_from_annotation(
|
||||
constraints: &mut Constraints,
|
||||
pattern: &Pattern,
|
||||
annotation: &Loc<&Type>,
|
||||
) -> Option<VecMap<Symbol, Loc<Type>>> {
|
||||
) -> Option<VecMap<Symbol, Loc<TypeOrVar>>> {
|
||||
let mut headers = VecMap::default();
|
||||
// Check that the annotation structurally agrees with the pattern, preventing e.g. `{ x, y } : Int`
|
||||
// in such incorrect cases we don't put the full annotation in headers, just a variable, and let
|
||||
// inference generate a proper error.
|
||||
let is_structurally_valid = headers_from_annotation_help(pattern, annotation, &mut headers);
|
||||
let is_structurally_valid =
|
||||
headers_from_annotation_help(constraints, pattern, annotation, &mut headers);
|
||||
|
||||
if is_structurally_valid {
|
||||
Some(headers)
|
||||
|
@ -48,9 +50,10 @@ pub fn headers_from_annotation(
|
|||
}
|
||||
|
||||
fn headers_from_annotation_help(
|
||||
constraints: &mut Constraints,
|
||||
pattern: &Pattern,
|
||||
annotation: &Loc<&Type>,
|
||||
headers: &mut VecMap<Symbol, Loc<Type>>,
|
||||
headers: &mut VecMap<Symbol, Loc<TypeOrVar>>,
|
||||
) -> bool {
|
||||
match pattern {
|
||||
Identifier(symbol)
|
||||
|
@ -60,7 +63,8 @@ fn headers_from_annotation_help(
|
|||
ident: symbol,
|
||||
specializes: _,
|
||||
} => {
|
||||
let typ = Loc::at(annotation.region, annotation.value.clone());
|
||||
let annotation_index = constraints.push_type(annotation.value.clone());
|
||||
let typ = Loc::at(annotation.region, annotation_index);
|
||||
headers.insert(*symbol, typ);
|
||||
true
|
||||
}
|
||||
|
@ -87,9 +91,10 @@ fn headers_from_annotation_help(
|
|||
// `{ x ? 0 } = rec` or `{ x: 5 } -> ...` in all cases
|
||||
// the type of `x` within the binding itself is the same.
|
||||
if let Some(field_type) = fields.get(&destruct.label) {
|
||||
let field_type_index = constraints.push_type(field_type.as_inner().clone());
|
||||
headers.insert(
|
||||
destruct.symbol,
|
||||
Loc::at(annotation.region, field_type.clone().into_inner()),
|
||||
Loc::at(annotation.region, field_type_index),
|
||||
);
|
||||
} else {
|
||||
return false;
|
||||
|
@ -101,6 +106,14 @@ fn headers_from_annotation_help(
|
|||
_ => false,
|
||||
},
|
||||
|
||||
List { .. } => {
|
||||
// There are no interesting headers to introduce for list patterns, since the only
|
||||
// exhaustive list pattern is
|
||||
// \[..] -> <body>
|
||||
// which does not introduce any symbols.
|
||||
false
|
||||
},
|
||||
|
||||
AppliedTag {
|
||||
tag_name,
|
||||
arguments,
|
||||
|
@ -117,6 +130,7 @@ fn headers_from_annotation_help(
|
|||
.zip(arg_types.iter())
|
||||
.all(|(arg_pattern, arg_type)| {
|
||||
headers_from_annotation_help(
|
||||
constraints,
|
||||
&arg_pattern.1.value,
|
||||
&Loc::at(annotation.region, arg_type),
|
||||
headers,
|
||||
|
@ -148,11 +162,13 @@ fn headers_from_annotation_help(
|
|||
&& type_arguments.len() == pat_type_arguments.len()
|
||||
&& lambda_set_variables.len() == pat_lambda_set_variables.len() =>
|
||||
{
|
||||
let typ = Loc::at(annotation.region, annotation.value.clone());
|
||||
let annotation_index = constraints.push_type(annotation.value.clone());
|
||||
let typ = Loc::at(annotation.region, annotation_index);
|
||||
headers.insert(*opaque, typ);
|
||||
|
||||
let (_, argument_pat) = &**argument;
|
||||
headers_from_annotation_help(
|
||||
constraints,
|
||||
&argument_pat.value,
|
||||
&Loc::at(annotation.region, actual),
|
||||
headers,
|
||||
|
@ -171,7 +187,7 @@ pub fn constrain_pattern(
|
|||
env: &mut Env,
|
||||
pattern: &Pattern,
|
||||
region: Region,
|
||||
expected: PExpected<Type>,
|
||||
expected: PExpected<TypeOrVar>,
|
||||
state: &mut PatternState,
|
||||
) {
|
||||
match pattern {
|
||||
|
@ -182,8 +198,8 @@ pub fn constrain_pattern(
|
|||
// A -> ""
|
||||
// _ -> ""
|
||||
// so, we know that "x" (in this case, a tag union) must be open.
|
||||
if could_be_a_tag_union(expected.get_type_ref()) {
|
||||
let type_index = constraints.push_type(expected.get_type());
|
||||
if could_be_a_tag_union(constraints, *expected.get_type_ref()) {
|
||||
let type_index = expected.get_type();
|
||||
|
||||
state
|
||||
.delayed_is_open_constraints
|
||||
|
@ -195,9 +211,9 @@ pub fn constrain_pattern(
|
|||
}
|
||||
|
||||
Identifier(symbol) | Shadowed(_, _, symbol) => {
|
||||
if could_be_a_tag_union(expected.get_type_ref()) {
|
||||
let type_index = constraints.push_type(expected.get_type_ref().clone());
|
||||
let type_index = *expected.get_type_ref();
|
||||
|
||||
if could_be_a_tag_union(constraints, type_index) {
|
||||
state
|
||||
.delayed_is_open_constraints
|
||||
.push(constraints.is_open_type(type_index));
|
||||
|
@ -207,7 +223,7 @@ pub fn constrain_pattern(
|
|||
*symbol,
|
||||
Loc {
|
||||
region,
|
||||
value: expected.get_type(),
|
||||
value: type_index,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
@ -216,9 +232,9 @@ pub fn constrain_pattern(
|
|||
ident: symbol,
|
||||
specializes: _,
|
||||
} => {
|
||||
if could_be_a_tag_union(expected.get_type_ref()) {
|
||||
let type_index = constraints.push_type(expected.get_type_ref().clone());
|
||||
let type_index = *expected.get_type_ref();
|
||||
|
||||
if could_be_a_tag_union(constraints, type_index) {
|
||||
state.constraints.push(constraints.is_open_type(type_index));
|
||||
}
|
||||
|
||||
|
@ -226,7 +242,7 @@ pub fn constrain_pattern(
|
|||
*symbol,
|
||||
Loc {
|
||||
region,
|
||||
value: expected.get_type(),
|
||||
value: type_index,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
@ -270,7 +286,7 @@ pub fn constrain_pattern(
|
|||
let num_type = constraints.push_type(num_type);
|
||||
|
||||
// Link the free num var with the int var and our expectation.
|
||||
let int_type = builtins::num_int(Type::Variable(precision_var));
|
||||
let int_type = constraints.push_type(builtins::num_int(Type::Variable(precision_var)));
|
||||
|
||||
state.constraints.push({
|
||||
let expected_index =
|
||||
|
@ -300,10 +316,11 @@ pub fn constrain_pattern(
|
|||
region,
|
||||
Category::Frac,
|
||||
);
|
||||
let num_type_index = constraints.push_type(num_type); // TODO check me if something breaks!
|
||||
|
||||
// Link the free num var with the float var and our expectation.
|
||||
let float_type = builtins::num_float(Type::Variable(precision_var));
|
||||
let num_type_index = constraints.push_type(num_type); // TODO check me if something breaks!
|
||||
let float_type =
|
||||
constraints.push_type(builtins::num_float(Type::Variable(precision_var)));
|
||||
|
||||
state.constraints.push({
|
||||
let expected_index =
|
||||
|
@ -345,11 +362,11 @@ pub fn constrain_pattern(
|
|||
Category::Int,
|
||||
);
|
||||
|
||||
// Link the free num var with the int var and our expectation.
|
||||
let int_type = builtins::num_int(Type::Variable(precision_var));
|
||||
|
||||
let num_type_index = constraints.push_type(num_type);
|
||||
|
||||
// Link the free num var with the int var and our expectation.
|
||||
let int_type = constraints.push_type(builtins::num_int(Type::Variable(precision_var)));
|
||||
|
||||
state.constraints.push({
|
||||
let expected_index =
|
||||
constraints.push_expected_type(Expected::NoExpectation(int_type));
|
||||
|
@ -394,12 +411,13 @@ pub fn constrain_pattern(
|
|||
} in destructs
|
||||
{
|
||||
let pat_type = Type::Variable(*var);
|
||||
let expected = PExpected::NoExpectation(pat_type.clone());
|
||||
let pat_type_index = constraints.push_type(pat_type.clone());
|
||||
let expected = PExpected::NoExpectation(pat_type_index);
|
||||
|
||||
if !state.headers.contains_key(symbol) {
|
||||
state
|
||||
.headers
|
||||
.insert(*symbol, Loc::at(region, pat_type.clone()));
|
||||
.insert(*symbol, Loc::at(region, pat_type_index));
|
||||
}
|
||||
|
||||
let field_type = match typ {
|
||||
|
@ -408,7 +426,7 @@ pub fn constrain_pattern(
|
|||
let expected_pat =
|
||||
constraints.push_pat_expected_type(PExpected::ForReason(
|
||||
PReason::PatternGuard,
|
||||
pat_type.clone(),
|
||||
pat_type_index,
|
||||
loc_guard.region,
|
||||
));
|
||||
|
||||
|
@ -436,7 +454,7 @@ pub fn constrain_pattern(
|
|||
let expected_pat =
|
||||
constraints.push_pat_expected_type(PExpected::ForReason(
|
||||
PReason::OptionalField,
|
||||
pat_type.clone(),
|
||||
pat_type_index,
|
||||
loc_expr.region,
|
||||
));
|
||||
|
||||
|
@ -451,7 +469,7 @@ pub fn constrain_pattern(
|
|||
|
||||
let expr_expected = Expected::ForReason(
|
||||
Reason::RecordDefaultField(label.clone()),
|
||||
pat_type.clone(),
|
||||
pat_type_index,
|
||||
loc_expr.region,
|
||||
);
|
||||
|
||||
|
@ -477,7 +495,10 @@ pub fn constrain_pattern(
|
|||
state.vars.push(*var);
|
||||
}
|
||||
|
||||
let record_type = Type::Record(field_types, TypeExtension::from_type(ext_type));
|
||||
let record_type = constraints.push_type(Type::Record(
|
||||
field_types,
|
||||
TypeExtension::from_type(ext_type),
|
||||
));
|
||||
|
||||
let whole_var_index = constraints.push_type(Type::Variable(*whole_var));
|
||||
let expected_record =
|
||||
|
@ -501,6 +522,54 @@ pub fn constrain_pattern(
|
|||
state.constraints.push(whole_con);
|
||||
state.constraints.push(record_con);
|
||||
}
|
||||
|
||||
List {
|
||||
list_var,
|
||||
elem_var,
|
||||
patterns:
|
||||
ListPatterns {
|
||||
patterns,
|
||||
opt_rest: _,
|
||||
},
|
||||
} => {
|
||||
let elem_var_index = constraints.push_type(Type::Variable(*elem_var));
|
||||
|
||||
for loc_pat in patterns.iter() {
|
||||
let expected =
|
||||
PExpected::ForReason(PReason::ListElem, elem_var_index, loc_pat.region);
|
||||
|
||||
constrain_pattern(
|
||||
constraints,
|
||||
env,
|
||||
&loc_pat.value,
|
||||
loc_pat.region,
|
||||
expected,
|
||||
state,
|
||||
);
|
||||
}
|
||||
|
||||
let list_var_index = constraints.push_type(Type::Variable(*list_var));
|
||||
let solved_list = constraints.push_type(Type::Apply(
|
||||
Symbol::LIST_LIST,
|
||||
vec![Loc::at(region, Type::Variable(*elem_var))],
|
||||
region,
|
||||
));
|
||||
let store_solved_list = constraints.store(solved_list, *list_var, file!(), line!());
|
||||
|
||||
let expected = constraints.push_pat_expected_type(expected);
|
||||
let expected_constraint = constraints.pattern_presence(
|
||||
list_var_index,
|
||||
expected,
|
||||
PatternCategory::List,
|
||||
region,
|
||||
);
|
||||
|
||||
state.vars.push(*list_var);
|
||||
state.vars.push(*elem_var);
|
||||
state.constraints.push(store_solved_list);
|
||||
state.constraints.push(expected_constraint);
|
||||
}
|
||||
|
||||
AppliedTag {
|
||||
whole_var,
|
||||
ext_var,
|
||||
|
@ -512,7 +581,7 @@ pub fn constrain_pattern(
|
|||
for (index, (pattern_var, loc_pattern)) in arguments.iter().enumerate() {
|
||||
state.vars.push(*pattern_var);
|
||||
|
||||
let pattern_type = Type::Variable(*pattern_var);
|
||||
let pattern_type = constraints.push_type(Type::Variable(*pattern_var));
|
||||
|
||||
let expected = PExpected::ForReason(
|
||||
PReason::TagArg {
|
||||
|
@ -533,7 +602,7 @@ pub fn constrain_pattern(
|
|||
}
|
||||
|
||||
let pat_category = PatternCategory::Ctor(tag_name.clone());
|
||||
let expected_type = constraints.push_type(expected.get_type_ref().clone());
|
||||
let expected_type = *expected.get_type_ref();
|
||||
|
||||
let whole_con = constraints.includes_tag(
|
||||
expected_type,
|
||||
|
@ -565,8 +634,9 @@ pub fn constrain_pattern(
|
|||
// Suppose we are constraining the pattern \@Id who, where Id n := [Id U64 n]
|
||||
let (arg_pattern_var, loc_arg_pattern) = &**argument;
|
||||
let arg_pattern_type = Type::Variable(*arg_pattern_var);
|
||||
let arg_pattern_type_index = constraints.push_type(Type::Variable(*arg_pattern_var));
|
||||
|
||||
let opaque_type = Type::Alias {
|
||||
let opaque_type = constraints.push_type(Type::Alias {
|
||||
symbol: *opaque,
|
||||
type_arguments: type_arguments
|
||||
.iter()
|
||||
|
@ -579,10 +649,10 @@ pub fn constrain_pattern(
|
|||
infer_ext_in_output_types: vec![],
|
||||
actual: Box::new(arg_pattern_type.clone()),
|
||||
kind: AliasKind::Opaque,
|
||||
};
|
||||
});
|
||||
|
||||
// First, add a constraint for the argument "who"
|
||||
let arg_pattern_expected = PExpected::NoExpectation(arg_pattern_type.clone());
|
||||
let arg_pattern_expected = PExpected::NoExpectation(arg_pattern_type_index);
|
||||
constrain_pattern(
|
||||
constraints,
|
||||
env,
|
||||
|
@ -617,11 +687,13 @@ pub fn constrain_pattern(
|
|||
// `[A k1, B k1] += typeof (A s)`, because we are in a destructure position and not
|
||||
// all constructors are covered in this branch!
|
||||
let arg_pattern_type = constraints.push_type(arg_pattern_type);
|
||||
let specialized_type = constraints
|
||||
.push_pat_expected_type(PExpected::NoExpectation((**specialized_def_type).clone()));
|
||||
let specialized_type_index = constraints.push_type((**specialized_def_type).clone());
|
||||
let specialized_type_expected = constraints
|
||||
.push_pat_expected_type(PExpected::NoExpectation(specialized_type_index));
|
||||
|
||||
let link_type_variables_con = constraints.pattern_presence(
|
||||
arg_pattern_type,
|
||||
specialized_type,
|
||||
specialized_type_expected,
|
||||
PatternCategory::Opaque(*opaque),
|
||||
loc_arg_pattern.region,
|
||||
);
|
||||
|
@ -654,6 +726,18 @@ pub fn constrain_pattern(
|
|||
}
|
||||
}
|
||||
|
||||
fn could_be_a_tag_union(typ: &Type) -> bool {
|
||||
!matches!(typ, Type::Apply(..) | Type::Function(..) | Type::Record(..))
|
||||
fn could_be_a_tag_union(constraints: &mut Constraints, typ: TypeOrVar) -> bool {
|
||||
match typ.split() {
|
||||
Ok(typ_index) => {
|
||||
let typ_cell = &mut constraints.types[typ_index.index()];
|
||||
!matches!(
|
||||
typ_cell.get_mut(),
|
||||
Type::Apply(..) | Type::Function(..) | Type::Record(..)
|
||||
)
|
||||
}
|
||||
Err(_) => {
|
||||
// Variables are opaque at this point, assume yes
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,14 +99,11 @@ impl DerivedModule {
|
|||
exposed_by_module: &ExposedByModule,
|
||||
key: DeriveKey,
|
||||
) -> &(Symbol, Def, SpecializationLambdaSets) {
|
||||
match self.map.get(&key) {
|
||||
Some(entry) => {
|
||||
// rustc won't let us return an immutable reference *and* continue using
|
||||
// `self.map` immutably below, but this is safe, because we are not returning
|
||||
// an immutable reference to the entry.
|
||||
return unsafe { std::mem::transmute(entry) };
|
||||
}
|
||||
None => {}
|
||||
if let Some(entry) = self.map.get(&key) {
|
||||
// rustc won't let us return an immutable reference *and* continue using
|
||||
// `self.map` immutably below, but this is safe, because we are not returning
|
||||
// an immutable reference to the entry.
|
||||
return unsafe { std::mem::transmute(entry) };
|
||||
}
|
||||
|
||||
let ident_id = if cfg!(debug_assertions) || cfg!(feature = "debug-derived-symbols") {
|
||||
|
|
|
@ -25,7 +25,7 @@ use hash::{FlatHash, FlatHashKey};
|
|||
use roc_module::symbol::Symbol;
|
||||
use roc_types::subs::{Subs, Variable};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum DeriveError {
|
||||
/// Unbound variable present in the type-to-derive. It may be possible to derive for this type
|
||||
/// once the unbound variable is resolved.
|
||||
|
|
|
@ -9,3 +9,4 @@ edition = "2021"
|
|||
roc_collections = { path = "../collections" }
|
||||
roc_region = { path = "../region" }
|
||||
roc_module = { path = "../module" }
|
||||
roc_error_macros = { path = "../../error_macros" }
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
//! http://moscova.inria.fr/~maranget/papers/warn/warn.pdf
|
||||
|
||||
use roc_collections::all::{HumanIndex, MutMap};
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::{
|
||||
ident::{Lowercase, TagIdIntType, TagName},
|
||||
symbol::Symbol,
|
||||
|
@ -69,6 +70,54 @@ pub enum Pattern {
|
|||
Anything,
|
||||
Literal(Literal),
|
||||
Ctor(Union, TagId, std::vec::Vec<Pattern>),
|
||||
List(ListArity, std::vec::Vec<Pattern>),
|
||||
}
|
||||
|
||||
/// The arity of list pattern.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum ListArity {
|
||||
/// A list pattern of an exact size.
|
||||
Exact(usize),
|
||||
/// A list pattern matching a variable size, where `Slice(before, after)` refers to the number
|
||||
/// of elements that must be present before and after the variable rest pattern, respectively.
|
||||
///
|
||||
/// For example,
|
||||
/// [..] => Slice(0, 0)
|
||||
/// [A, .., B] => Slice(1, 1)
|
||||
/// [A, B, ..] => Slice(2, 0)
|
||||
/// [.., A, B] => Slice(0, 2)
|
||||
Slice(usize, usize),
|
||||
}
|
||||
|
||||
impl ListArity {
|
||||
/// The trivially-exhaustive list pattern `[..]`
|
||||
const ANY: ListArity = ListArity::Slice(0, 0);
|
||||
|
||||
pub fn min_len(&self) -> usize {
|
||||
match self {
|
||||
ListArity::Exact(n) => *n,
|
||||
ListArity::Slice(l, r) => l + r,
|
||||
}
|
||||
}
|
||||
|
||||
/// Could this list pattern include list pattern arity `other`?
|
||||
fn covers_arities_of(&self, other: &Self) -> bool {
|
||||
self.covers_length(other.min_len())
|
||||
}
|
||||
|
||||
pub fn covers_length(&self, length: usize) -> bool {
|
||||
match self {
|
||||
ListArity::Exact(l) => {
|
||||
// [_, _, _] can only cover [_, _, _]
|
||||
*l == length
|
||||
}
|
||||
ListArity::Slice(head, tail) => {
|
||||
// [_, _, .., _] can cover infinite arities >=3 , including
|
||||
// [_, _, .., _], [_, .., _, _], [_, _, .., _, _], [_, _, _, .., _, _], and so on
|
||||
head + tail <= length
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -141,15 +190,16 @@ pub fn check(
|
|||
/// The initial count of items per row "n" is also 1
|
||||
/// The resulting rows are examples of missing patterns
|
||||
fn is_exhaustive(matrix: &RefPatternMatrix, n: usize) -> PatternMatrix {
|
||||
if matrix.is_empty() {
|
||||
vec![std::iter::repeat(Anything).take(n).collect()]
|
||||
let ctors = if matrix.is_empty() {
|
||||
return vec![std::iter::repeat(Anything).take(n).collect()];
|
||||
} else if n == 0 {
|
||||
vec![]
|
||||
return vec![];
|
||||
} else {
|
||||
let ctors = collect_ctors(matrix);
|
||||
let num_seen = ctors.len();
|
||||
collect_ctors(matrix)
|
||||
};
|
||||
|
||||
if num_seen == 0 {
|
||||
match ctors {
|
||||
CollectedCtors::NonExhaustiveAny => {
|
||||
let new_matrix: Vec<_> = matrix
|
||||
.iter()
|
||||
.filter_map(|row| specialize_row_by_anything(row))
|
||||
|
@ -161,7 +211,11 @@ fn is_exhaustive(matrix: &RefPatternMatrix, n: usize) -> PatternMatrix {
|
|||
}
|
||||
|
||||
rest
|
||||
} else {
|
||||
}
|
||||
CollectedCtors::Ctors(ctors) => {
|
||||
debug_assert!(!ctors.is_empty());
|
||||
|
||||
let num_seen = ctors.len();
|
||||
let alts = ctors.iter().next().unwrap().1;
|
||||
|
||||
let alt_list = &alts.alternatives;
|
||||
|
@ -193,7 +247,7 @@ fn is_exhaustive(matrix: &RefPatternMatrix, n: usize) -> PatternMatrix {
|
|||
let is_alt_exhaustive = |Ctor { arity, tag_id, .. }| {
|
||||
let new_matrix: Vec<_> = matrix
|
||||
.iter()
|
||||
.filter_map(|r| specialize_row_by_ctor(tag_id, arity, r))
|
||||
.filter_map(|r| specialize_row_by_ctor(tag_id, arity, r.to_owned()))
|
||||
.collect();
|
||||
let rest: Vec<Vec<Pattern>> = is_exhaustive(&new_matrix, arity + n - 1);
|
||||
|
||||
|
@ -212,6 +266,21 @@ fn is_exhaustive(matrix: &RefPatternMatrix, n: usize) -> PatternMatrix {
|
|||
.collect()
|
||||
}
|
||||
}
|
||||
CollectedCtors::NonExhaustiveList(alt_lists) => {
|
||||
let is_alt_exhaustive = |arity: ListArity| {
|
||||
let new_matrix: Vec<_> = matrix
|
||||
.iter()
|
||||
.filter_map(|row| specialize_row_by_list(arity, row.to_owned()))
|
||||
.collect();
|
||||
|
||||
let rest = is_exhaustive(&new_matrix, arity.min_len() + n - 1);
|
||||
|
||||
rest.into_iter()
|
||||
.map(move |row_not_covered| recover_list(arity, row_not_covered))
|
||||
};
|
||||
|
||||
alt_lists.into_iter().flat_map(is_alt_exhaustive).collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -232,14 +301,23 @@ fn recover_ctor(
|
|||
arity: usize,
|
||||
mut patterns: Vec<Pattern>,
|
||||
) -> Vec<Pattern> {
|
||||
let mut rest = patterns.split_off(arity);
|
||||
let args = patterns;
|
||||
let args = patterns.split_off(patterns.len() - arity);
|
||||
let mut rest = patterns;
|
||||
|
||||
rest.push(Ctor(union, tag_id, args));
|
||||
|
||||
rest
|
||||
}
|
||||
|
||||
fn recover_list(arity: ListArity, mut patterns: Vec<Pattern>) -> Vec<Pattern> {
|
||||
let list_elems = patterns.split_off(patterns.len() - arity.min_len());
|
||||
let mut rest = patterns;
|
||||
|
||||
rest.push(List(arity, list_elems));
|
||||
|
||||
rest
|
||||
}
|
||||
|
||||
/// Check if a new row "vector" is useful given previous rows "matrix"
|
||||
pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
|
||||
let mut matrix = Vec::with_capacity(old_matrix.len());
|
||||
|
@ -262,13 +340,53 @@ pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
|
|||
match first_pattern {
|
||||
// keep checking rows that start with this Ctor or Anything
|
||||
Ctor(_, id, args) => {
|
||||
specialize_row_by_ctor2(id, args.len(), &mut old_matrix, &mut matrix);
|
||||
specialize_matrix_by_ctor(id, args.len(), &mut old_matrix, &mut matrix);
|
||||
|
||||
std::mem::swap(&mut old_matrix, &mut matrix);
|
||||
|
||||
vector.extend(args);
|
||||
}
|
||||
|
||||
List(arity, args) => {
|
||||
// Check if there any specialized constructor of this list pattern
|
||||
// that is useful.
|
||||
let spec_list_ctors = build_list_ctors_covering_patterns(
|
||||
arity,
|
||||
filter_matrix_list_ctors(&old_matrix),
|
||||
);
|
||||
debug_assert!(!spec_list_ctors.is_empty());
|
||||
|
||||
if spec_list_ctors.len() == 1 {
|
||||
specialize_matrix_by_list(
|
||||
spec_list_ctors[0],
|
||||
&mut old_matrix,
|
||||
&mut matrix,
|
||||
);
|
||||
|
||||
std::mem::swap(&mut old_matrix, &mut matrix);
|
||||
|
||||
vector.extend(args);
|
||||
} else {
|
||||
// TODO turn this into an iteration over the outer loop rather than bouncing
|
||||
vector.extend(args);
|
||||
for list_ctor in spec_list_ctors {
|
||||
let mut old_matrix = old_matrix.clone();
|
||||
let mut spec_matrix = Vec::with_capacity(old_matrix.len());
|
||||
|
||||
specialize_matrix_by_list(
|
||||
list_ctor,
|
||||
&mut old_matrix,
|
||||
&mut spec_matrix,
|
||||
);
|
||||
|
||||
if is_useful(spec_matrix, vector.clone()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
Anything => {
|
||||
// check if all alternatives appear in matrix
|
||||
match is_complete(&old_matrix) {
|
||||
|
@ -293,7 +411,7 @@ pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
|
|||
|
||||
let mut old_matrix = old_matrix.clone();
|
||||
let mut matrix = vec![];
|
||||
specialize_row_by_ctor2(
|
||||
specialize_matrix_by_ctor(
|
||||
tag_id,
|
||||
arity,
|
||||
&mut old_matrix,
|
||||
|
@ -330,6 +448,8 @@ pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
|
|||
}
|
||||
Some(Anything) => matrix.push(patterns),
|
||||
|
||||
Some(List(..)) => internal_error!("After type checking, lists and literals should never align in exhaustiveness checking"),
|
||||
|
||||
Some(Ctor(_, _, _)) => panic!(
|
||||
r#"Compiler bug! After type checking, constructors and literals should never align in pattern match exhaustiveness checks."#
|
||||
),
|
||||
|
@ -347,67 +467,118 @@ pub fn is_useful(mut old_matrix: PatternMatrix, mut vector: Row) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
/// INVARIANT: (length row == N) ==> (length result == arity + N - 1)
|
||||
fn specialize_row_by_ctor2(
|
||||
tag_id: TagId,
|
||||
arity: usize,
|
||||
// Specialize rows in the matrix that match a list's constructor(s).
|
||||
//
|
||||
// See the docs on [build_list_ctors_covering_patterns] for more information on how list
|
||||
// constructors are built up.
|
||||
fn specialize_matrix_by_list(
|
||||
spec_arity: ListArity,
|
||||
old_matrix: &mut PatternMatrix,
|
||||
matrix: &mut PatternMatrix,
|
||||
spec_matrix: &mut PatternMatrix,
|
||||
) {
|
||||
for mut row in old_matrix.drain(..) {
|
||||
let head = row.pop();
|
||||
let mut patterns = row;
|
||||
|
||||
match head {
|
||||
Some(Ctor(_, id, args)) =>
|
||||
if id == tag_id {
|
||||
patterns.extend(args);
|
||||
matrix.push(patterns);
|
||||
} else {
|
||||
// do nothing
|
||||
}
|
||||
Some(Anything) => {
|
||||
// TODO order!
|
||||
patterns.extend(std::iter::repeat(Anything).take(arity));
|
||||
matrix.push(patterns);
|
||||
}
|
||||
Some(Literal(_)) => panic!( "Compiler bug! After type checking, constructors and literal should never align in pattern match exhaustiveness checks."),
|
||||
None => panic!("Compiler error! Empty matrices should not get specialized."),
|
||||
}
|
||||
for row in old_matrix.drain(..) {
|
||||
if let Some(spec_row) = specialize_row_by_list(spec_arity, row) {
|
||||
spec_matrix.push(spec_row);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// INVARIANT: (length row == N) ==> (length result == arity + N - 1)
|
||||
fn specialize_row_by_ctor(tag_id: TagId, arity: usize, row: &RefRow) -> Option<Row> {
|
||||
let mut row = row.to_vec();
|
||||
|
||||
// Specialize a row that matches a list's constructor(s).
|
||||
//
|
||||
// See the docs on [build_list_ctors_covering_patterns] for more information on how list
|
||||
// constructors are built up.
|
||||
fn specialize_row_by_list(spec_arity: ListArity, mut row: Row) -> Option<Row> {
|
||||
let head = row.pop();
|
||||
let patterns = row;
|
||||
let mut spec_patterns = row;
|
||||
|
||||
match head {
|
||||
Some(Ctor(_, id, args)) => {
|
||||
if id == tag_id {
|
||||
// TODO order!
|
||||
let mut new_patterns = Vec::new();
|
||||
new_patterns.extend(args);
|
||||
new_patterns.extend(patterns);
|
||||
Some(new_patterns)
|
||||
Some(List(this_arity, args)) => {
|
||||
if this_arity.covers_arities_of(&spec_arity) {
|
||||
// This pattern covers the constructor we are specializing, so add on the
|
||||
// specialized fields of this pattern relative to the given constructor.
|
||||
if spec_arity.min_len() != this_arity.min_len() {
|
||||
// This list pattern covers the list we are specializing, so it must be
|
||||
// a variable-length slice, i.e. of the form `[before, .., after]`.
|
||||
//
|
||||
// Hence, the list we're specializing for must have at least a larger minimum length.
|
||||
// So we fill the middle part with enough wildcards to reach the length of
|
||||
// list constructor we're specializing for.
|
||||
debug_assert!(spec_arity.min_len() > this_arity.min_len());
|
||||
match this_arity {
|
||||
ListArity::Exact(_) => internal_error!("exact-sized lists cannot cover lists of other minimum length"),
|
||||
ListArity::Slice(before, after) => {
|
||||
let before = &args[..before];
|
||||
let after = &args[this_arity.min_len() - after..];
|
||||
let num_extra_wildcards = spec_arity.min_len() - this_arity.min_len();
|
||||
let extra_wildcards = std::iter::repeat(&Anything).take(num_extra_wildcards);
|
||||
|
||||
let new_pats = (before.iter().chain(extra_wildcards).chain(after)).cloned();
|
||||
|
||||
spec_patterns.extend(new_pats);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
debug_assert_eq!(this_arity.min_len(), spec_arity.min_len());
|
||||
|
||||
spec_patterns.extend(args);
|
||||
}
|
||||
|
||||
Some(spec_patterns)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Some(Anything) => {
|
||||
// TODO order!
|
||||
let new_patterns = std::iter::repeat(Anything)
|
||||
.take(arity)
|
||||
.chain(patterns)
|
||||
.collect();
|
||||
Some(new_patterns)
|
||||
// The specialized fields for a `Anything` pattern with a list constructor is just
|
||||
// `Anything` repeated for the number of times we want to see the list pattern.
|
||||
spec_patterns.extend(std::iter::repeat(Anything).take(spec_arity.min_len()));
|
||||
Some(spec_patterns)
|
||||
}
|
||||
Some(Literal(_)) => unreachable!(
|
||||
r#"Compiler bug! After type checking, a constructor can never align with a literal: that should be a type error!"#
|
||||
Some(Ctor(..)) => internal_error!("After type checking, lists and constructors should never align in exhaustiveness checking"),
|
||||
Some(Literal(..)) => internal_error!("After type checking, lists and literals should never align in exhaustiveness checking"),
|
||||
None => internal_error!("Empty matrices should not get specialized"),
|
||||
}
|
||||
}
|
||||
|
||||
/// INVARIANT: (length row == N) ==> (length result == arity + N - 1)
|
||||
fn specialize_matrix_by_ctor(
|
||||
tag_id: TagId,
|
||||
arity: usize,
|
||||
old_matrix: &mut PatternMatrix,
|
||||
matrix: &mut PatternMatrix,
|
||||
) {
|
||||
for row in old_matrix.drain(..) {
|
||||
if let Some(spec_row) = specialize_row_by_ctor(tag_id, arity, row) {
|
||||
matrix.push(spec_row);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// INVARIANT: (length row == N) ==> (length result == arity + N - 1)
|
||||
fn specialize_row_by_ctor(tag_id: TagId, arity: usize, mut row: Row) -> Option<Row> {
|
||||
let head = row.pop();
|
||||
let mut spec_patterns = row;
|
||||
|
||||
match head {
|
||||
Some(Ctor(_, id, args)) => {
|
||||
if id == tag_id {
|
||||
spec_patterns.extend(args);
|
||||
Some(spec_patterns)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Some(Anything) => {
|
||||
spec_patterns.extend(std::iter::repeat(Anything).take(arity));
|
||||
Some(spec_patterns)
|
||||
}
|
||||
Some(List(..)) => {
|
||||
internal_error!(r#"After type checking, a constructor can never align with a list"#)
|
||||
}
|
||||
Some(Literal(_)) => internal_error!(
|
||||
r#"After type checking, a constructor can never align with a literal: that should be a type error!"#
|
||||
),
|
||||
None => panic!("Compiler error! Empty matrices should not get specialized."),
|
||||
None => internal_error!("Empty matrices should not get specialized."),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -430,16 +601,21 @@ pub enum Complete {
|
|||
|
||||
fn is_complete(matrix: &RefPatternMatrix) -> Complete {
|
||||
let ctors = collect_ctors(matrix);
|
||||
let length = ctors.len();
|
||||
let mut it = ctors.into_iter();
|
||||
match ctors {
|
||||
CollectedCtors::NonExhaustiveAny | CollectedCtors::NonExhaustiveList(_) => Complete::No,
|
||||
CollectedCtors::Ctors(ctors) => {
|
||||
let length = ctors.len();
|
||||
let mut it = ctors.into_iter();
|
||||
|
||||
match it.next() {
|
||||
None => Complete::No,
|
||||
Some((_, Union { alternatives, .. })) => {
|
||||
if length == alternatives.len() {
|
||||
Complete::Yes(alternatives)
|
||||
} else {
|
||||
Complete::No
|
||||
match it.next() {
|
||||
None => Complete::No,
|
||||
Some((_, Union { alternatives, .. })) => {
|
||||
if length == alternatives.len() {
|
||||
Complete::Yes(alternatives)
|
||||
} else {
|
||||
Complete::No
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -452,14 +628,219 @@ type PatternMatrix = Vec<Vec<Pattern>>;
|
|||
type RefRow = [Pattern];
|
||||
type Row = Vec<Pattern>;
|
||||
|
||||
fn collect_ctors(matrix: &RefPatternMatrix) -> MutMap<TagId, Union> {
|
||||
let mut ctors = MutMap::default();
|
||||
enum CollectedCtors {
|
||||
NonExhaustiveAny,
|
||||
NonExhaustiveList(Vec<ListArity>),
|
||||
Ctors(MutMap<TagId, Union>),
|
||||
}
|
||||
|
||||
for row in matrix {
|
||||
if let Some(Ctor(union, id, _)) = row.last() {
|
||||
ctors.insert(*id, union.clone());
|
||||
}
|
||||
fn collect_ctors(matrix: &RefPatternMatrix) -> CollectedCtors {
|
||||
if matrix.is_empty() {
|
||||
return CollectedCtors::NonExhaustiveAny;
|
||||
}
|
||||
|
||||
ctors
|
||||
let first_row = &matrix[0];
|
||||
|
||||
if let Some(ctor) = first_row.last() {
|
||||
match ctor {
|
||||
Anything => CollectedCtors::NonExhaustiveAny,
|
||||
Pattern::Literal(_) => CollectedCtors::NonExhaustiveAny,
|
||||
List(_, _) => {
|
||||
let list_ctors = build_list_ctors_covering_patterns(
|
||||
ListArity::ANY,
|
||||
filter_matrix_list_ctors(matrix),
|
||||
);
|
||||
|
||||
CollectedCtors::NonExhaustiveList(list_ctors)
|
||||
}
|
||||
Pattern::Ctor(_, _, _) => {
|
||||
let mut ctors = MutMap::default();
|
||||
|
||||
for row in matrix {
|
||||
if let Some(Ctor(union, id, _)) = row.last() {
|
||||
ctors.insert(*id, union.clone());
|
||||
}
|
||||
}
|
||||
|
||||
CollectedCtors::Ctors(ctors)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
CollectedCtors::NonExhaustiveAny
|
||||
}
|
||||
}
|
||||
|
||||
/// Largely derived from Rust's list-pattern exhaustiveness checking algorithm: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_build/thir/pattern/usefulness/index.html
|
||||
/// Dual-licensed under MIT and Apache licenses.
|
||||
/// Thank you, Rust contributors.
|
||||
///
|
||||
/// Calculates the list constructors that are covered by a given [slice constructor][ListArity::Slice],
|
||||
/// relative to other list constructors matched by a series of patterns.
|
||||
///
|
||||
/// This is relevant for both exhaustiveness and redundancy checking; to understand the motivation,
|
||||
/// let's start with the exhaustiveness checking case.
|
||||
///
|
||||
/// # Exhaustiveness Checking
|
||||
///
|
||||
/// All list constructors are exausted by the pattern [..], which actually represents the infinite
|
||||
/// series of constructors
|
||||
/// []
|
||||
/// [_]
|
||||
/// [_, _]
|
||||
/// ...
|
||||
///
|
||||
/// But we don't need to enumerate that infinite series to check if a series of list patterns is exhaustive -
|
||||
/// we only need to enumerate a finite number of constructors, up to the largest exact-size list
|
||||
/// pattern not covered by the patterns, or the largest slice pattern covered by the patterns.
|
||||
///
|
||||
/// ## Exact-sized patterns
|
||||
///
|
||||
/// Say we have patterns
|
||||
/// [_] -> ..
|
||||
/// [_, _] -> ..
|
||||
/// To exhaustiveness-check these patterns, we only need to build the subset of `[..]` constructors
|
||||
/// []
|
||||
/// [_]
|
||||
/// [_, _]
|
||||
/// [_, _, _, ..]
|
||||
/// to cover all list constructors that may or may not be matched by the patterns (in this case
|
||||
/// not, because `[]` is not matched, and the last constructor `[_, _, _, ..]` is not matched).
|
||||
///
|
||||
/// We include `[_, _, _, ..]` here because during exhaustiveness checking, we specialize list
|
||||
/// patterns **by exact size**, not by ranges. That means that is we stopped enumerating the
|
||||
/// constructors needed at `[_, _, ..]`, when specializing the list patterns against `[_, _, ..]`,
|
||||
/// we would see that the last pattern `[_, _] -> ..` exhausts it.
|
||||
///
|
||||
/// So, in the presence of exact-size constructors, we want to include a slice constructor that is
|
||||
/// larger than all other exact-size list pattern.
|
||||
///
|
||||
/// ## Slice patterns
|
||||
///
|
||||
/// Say we have patterns
|
||||
/// [1] -> ..
|
||||
/// [2, ..] -> ..
|
||||
/// now it's enough to just build
|
||||
/// []
|
||||
/// [_, ..]
|
||||
/// as possible constructors, since the last constructor `[_, ..]` will specialize both patterns to
|
||||
/// [1] -> ..
|
||||
/// [2] -> ..
|
||||
/// and if these patterns are exhaustive w.r.t. their arguments (`1` and `2`, which they are not,
|
||||
/// since number literals are not exhaustive), then the whole pattern must be exhaustive, since the
|
||||
/// largest slice constructor `[_, ..]` will cover the remaining infinite number of list constructors.
|
||||
///
|
||||
/// You can see that this holds with slice constructors that match elements at their head and tail
|
||||
/// as well:
|
||||
/// [{}, ..] -> ..
|
||||
/// [.., {}] -> ..
|
||||
/// Here again it's enough to just build the constructors [] and [_, ..] to match against -
|
||||
/// notice that above slices of arity `1`, the patterns above do not provide any more information,
|
||||
/// since they match any additional elements at the tail and head, respectively.
|
||||
///
|
||||
/// So, if they are exhaustive at arity `1`, they must be exhaustive at any higher arity.
|
||||
///
|
||||
/// In fact, in this case, if we are matching against `List {}`, the second pattern redundant!
|
||||
///
|
||||
/// # Redundancy checking
|
||||
///
|
||||
/// Redundancy checking (in general, and for list patterns) is the same as exhaustiveness checking,
|
||||
/// except that instead of checking whether `[..]` is covered by all patterns, we want to check if
|
||||
/// the list constructor of a pattern introduces any more information than previous patterns we've
|
||||
/// seen.
|
||||
///
|
||||
/// Let's say we're redundancy checking the pattern marked by `*`
|
||||
/// [] -> ..
|
||||
/// [_] -> ..
|
||||
/// (*) [.., _] -> ..
|
||||
///
|
||||
/// The list constructors this pattern introduces are the infinite series [_], [_, _], ...
|
||||
/// But the only ones relevant, relevant to the patterns we've already seen, are
|
||||
/// [_]
|
||||
/// [_, _]
|
||||
/// (Notice that the enumeration algorithm is the same as for `[..]` in the presence of exact-size
|
||||
/// slices, just that the starting size differs - due to the tail matched by this pattern)
|
||||
///
|
||||
/// During checking we'll see that the `[_, _]` pattern is not already covered, so `[.., _]` is in
|
||||
/// fact not redundant.
|
||||
///
|
||||
/// On the other hand, suppose we have
|
||||
/// [] -> ..
|
||||
/// [_, ..] -> ..
|
||||
/// (*) [.., _] -> ..
|
||||
///
|
||||
/// Again enumerating the relevant constructors of `[.., _]` relative to the other patterns, we find
|
||||
/// them to be
|
||||
/// []
|
||||
/// [.., _]
|
||||
/// the first is already matched by the first pattern `[] -> ..`, and the latter specialized to
|
||||
/// `[_]`, which in fact is covered by the second pattern `[_, ..] -> ..`. So the pattern marked by (*)
|
||||
/// is indeed redundant.
|
||||
///
|
||||
/// # All together
|
||||
///
|
||||
/// So the idea to cover the infinite # of list constructors enumerated by a [slice][ListArity::Slice],
|
||||
/// while specializing to the constructors that the user has provided, is as follows:
|
||||
/// - Build [exact][ListArity::Exact] constructor variants for everything up to the max slice
|
||||
/// constructor size, L.
|
||||
/// - Then, the infinite # of list constructors is covered by the [0..L) exact-size constructors, and
|
||||
/// the last slice constructor, that covers size [L..∞).
|
||||
///
|
||||
/// If we might only see [exact][ListArity::Exact] constructors along the way, we want to pick the
|
||||
/// max slice size L that is larger than all of those exact size constructors.
|
||||
///
|
||||
/// But for slice constructors, we can just pick the largest slice, since that will cover slices of
|
||||
/// that size, and any larger size.
|
||||
///
|
||||
/// Putting that together, we calculate L via
|
||||
///
|
||||
/// L = max(max_exact_len + 1, max_prefix_len + max_suffix_len)
|
||||
fn build_list_ctors_covering_patterns(
|
||||
list_arity: ListArity,
|
||||
list_pattern_arities: impl IntoIterator<Item = ListArity>,
|
||||
) -> std::vec::Vec<ListArity> {
|
||||
match list_arity {
|
||||
ListArity::Exact(_) => {
|
||||
// Exact-size lists can only cover themselves..
|
||||
vec![list_arity]
|
||||
}
|
||||
ListArity::Slice(prefix_len, suffix_len) => {
|
||||
let min_len = prefix_len + suffix_len;
|
||||
|
||||
let mut max_exact_len = 0;
|
||||
let mut max_prefix_len = prefix_len;
|
||||
let mut max_suffix_len = suffix_len;
|
||||
|
||||
for arity in list_pattern_arities {
|
||||
match arity {
|
||||
ListArity::Exact(n) => max_exact_len = max_exact_len.max(n),
|
||||
ListArity::Slice(prefix, suffix) => {
|
||||
max_prefix_len = max_prefix_len.max(prefix);
|
||||
max_suffix_len = max_suffix_len.max(suffix);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let (inf_cover_prefix, inf_cover_suffix) = {
|
||||
if max_exact_len + 1 >= max_prefix_len + max_suffix_len {
|
||||
max_prefix_len = max_exact_len + 1 - max_suffix_len;
|
||||
}
|
||||
(max_prefix_len, max_suffix_len)
|
||||
};
|
||||
let l = inf_cover_prefix + inf_cover_suffix;
|
||||
|
||||
let exact_size_lists = (min_len..l) // exclusive
|
||||
.map(ListArity::Exact);
|
||||
|
||||
exact_size_lists
|
||||
.chain([ListArity::Slice(inf_cover_prefix, inf_cover_suffix)])
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn filter_matrix_list_ctors(matrix: &RefPatternMatrix) -> impl Iterator<Item = ListArity> + '_ {
|
||||
matrix.iter().filter_map(|ctor| match ctor.last() {
|
||||
Some(List(ar, _)) => Some(*ar),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -166,8 +166,8 @@ impl<'a> Formattable for TypeAnnotation<'a> {
|
|||
|
||||
Wildcard | Inferred | BoundVariable(_) | Malformed(_) => false,
|
||||
Function(args, result) => {
|
||||
(&result.value).is_multiline()
|
||||
|| args.iter().any(|loc_arg| (&loc_arg.value).is_multiline())
|
||||
result.value.is_multiline()
|
||||
|| args.iter().any(|loc_arg| loc_arg.value.is_multiline())
|
||||
}
|
||||
Apply(_, _, args) => args.iter().any(|loc_arg| loc_arg.value.is_multiline()),
|
||||
As(lhs, _, _) => lhs.value.is_multiline(),
|
||||
|
@ -226,7 +226,7 @@ impl<'a> Formattable for TypeAnnotation<'a> {
|
|||
buf.newline();
|
||||
}
|
||||
|
||||
(&argument.value).format_with_options(
|
||||
argument.value.format_with_options(
|
||||
buf,
|
||||
Parens::InFunctionType,
|
||||
Newlines::No,
|
||||
|
@ -251,7 +251,8 @@ impl<'a> Formattable for TypeAnnotation<'a> {
|
|||
buf.push_str("->");
|
||||
buf.spaces(1);
|
||||
|
||||
(&ret.value).format_with_options(buf, Parens::InFunctionType, Newlines::No, indent);
|
||||
ret.value
|
||||
.format_with_options(buf, Parens::InFunctionType, Newlines::No, indent);
|
||||
|
||||
if needs_parens {
|
||||
buf.push(')')
|
||||
|
@ -275,12 +276,9 @@ impl<'a> Formattable for TypeAnnotation<'a> {
|
|||
|
||||
for argument in *arguments {
|
||||
buf.spaces(1);
|
||||
(&argument.value).format_with_options(
|
||||
buf,
|
||||
Parens::InApply,
|
||||
Newlines::No,
|
||||
indent,
|
||||
);
|
||||
argument
|
||||
.value
|
||||
.format_with_options(buf, Parens::InApply, Newlines::No, indent);
|
||||
}
|
||||
|
||||
if write_parens {
|
||||
|
@ -371,12 +369,12 @@ impl<'a> Formattable for AssignedField<'a, TypeAnnotation<'a>> {
|
|||
fn format_with_options<'buf>(
|
||||
&self,
|
||||
buf: &mut Buf<'buf>,
|
||||
parens: Parens,
|
||||
_parens: Parens,
|
||||
newlines: Newlines,
|
||||
indent: u16,
|
||||
) {
|
||||
// we abuse the `Newlines` type to decide between multiline or single-line layout
|
||||
format_assigned_field_help(self, buf, parens, indent, 1, newlines == Newlines::Yes);
|
||||
format_assigned_field_help(self, buf, indent, 1, newlines == Newlines::Yes);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -388,12 +386,12 @@ impl<'a> Formattable for AssignedField<'a, Expr<'a>> {
|
|||
fn format_with_options<'buf>(
|
||||
&self,
|
||||
buf: &mut Buf<'buf>,
|
||||
parens: Parens,
|
||||
_parens: Parens,
|
||||
newlines: Newlines,
|
||||
indent: u16,
|
||||
) {
|
||||
// we abuse the `Newlines` type to decide between multiline or single-line layout
|
||||
format_assigned_field_help(self, buf, parens, indent, 0, newlines == Newlines::Yes);
|
||||
format_assigned_field_help(self, buf, indent, 0, newlines == Newlines::Yes);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -413,7 +411,6 @@ fn is_multiline_assigned_field_help<T: Formattable>(afield: &AssignedField<'_, T
|
|||
fn format_assigned_field_help<'a, 'buf, T>(
|
||||
zelf: &AssignedField<'a, T>,
|
||||
buf: &mut Buf<'buf>,
|
||||
parens: Parens,
|
||||
indent: u16,
|
||||
separator_spaces: usize,
|
||||
is_multiline: bool,
|
||||
|
@ -466,24 +463,10 @@ fn format_assigned_field_help<'a, 'buf, T>(
|
|||
}
|
||||
AssignedField::SpaceBefore(sub_field, spaces) => {
|
||||
fmt_comments_only(buf, spaces.iter(), NewlineAt::Bottom, indent);
|
||||
format_assigned_field_help(
|
||||
sub_field,
|
||||
buf,
|
||||
parens,
|
||||
indent,
|
||||
separator_spaces,
|
||||
is_multiline,
|
||||
);
|
||||
format_assigned_field_help(sub_field, buf, indent, separator_spaces, is_multiline);
|
||||
}
|
||||
AssignedField::SpaceAfter(sub_field, spaces) => {
|
||||
format_assigned_field_help(
|
||||
sub_field,
|
||||
buf,
|
||||
parens,
|
||||
indent,
|
||||
separator_spaces,
|
||||
is_multiline,
|
||||
);
|
||||
format_assigned_field_help(sub_field, buf, indent, separator_spaces, is_multiline);
|
||||
fmt_comments_only(buf, spaces.iter(), NewlineAt::Bottom, indent);
|
||||
}
|
||||
Malformed(raw) => {
|
||||
|
@ -497,7 +480,7 @@ impl<'a> Formattable for Tag<'a> {
|
|||
use self::Tag::*;
|
||||
|
||||
match self {
|
||||
Apply { args, .. } => args.iter().any(|arg| (&arg.value).is_multiline()),
|
||||
Apply { args, .. } => args.iter().any(|arg| arg.value.is_multiline()),
|
||||
Tag::SpaceBefore(_, _) | Tag::SpaceAfter(_, _) => true,
|
||||
Malformed(text) => text.chars().any(|c| c == '\n'),
|
||||
}
|
||||
|
|
|
@ -542,7 +542,7 @@ fn fmt_binops<'a, 'buf>(
|
|||
indent: u16,
|
||||
) {
|
||||
let is_multiline = part_of_multi_line_binops
|
||||
|| (&loc_right_side.value).is_multiline()
|
||||
|| loc_right_side.value.is_multiline()
|
||||
|| lefts.iter().any(|(expr, _)| expr.value.is_multiline());
|
||||
|
||||
for (loc_left_side, loc_binop) in lefts {
|
||||
|
@ -1045,7 +1045,7 @@ fn fmt_closure<'a, 'buf>(
|
|||
|
||||
buf.push_str("->");
|
||||
|
||||
let is_multiline = (&loc_ret.value).is_multiline();
|
||||
let is_multiline = loc_ret.value.is_multiline();
|
||||
|
||||
// If the body is multiline, go down a line and indent.
|
||||
let body_indent = if is_multiline {
|
||||
|
@ -1156,7 +1156,7 @@ fn fmt_backpassing<'a, 'buf>(
|
|||
|
||||
buf.push_str("<-");
|
||||
|
||||
let is_multiline = (&loc_ret.value).is_multiline();
|
||||
let is_multiline = loc_ret.value.is_multiline();
|
||||
|
||||
// If the body is multiline, go down a line and indent.
|
||||
let body_indent = if is_multiline {
|
||||
|
|
|
@ -152,6 +152,7 @@ impl<'a> Formattable for Pattern<'a> {
|
|||
}
|
||||
StrLiteral(literal) => fmt_str_literal(buf, *literal, indent),
|
||||
SingleQuote(string) => {
|
||||
buf.indent(indent);
|
||||
buf.push('\'');
|
||||
buf.push_str(string);
|
||||
buf.push('\'');
|
||||
|
|
|
@ -3864,6 +3864,17 @@ mod test_fmt {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn when_with_single_quote_char() {
|
||||
expr_formats_same(indoc!(
|
||||
r#"
|
||||
when x is
|
||||
'0' -> 0
|
||||
'1' -> 1
|
||||
"#
|
||||
));
|
||||
}
|
||||
|
||||
// NEWLINES
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -19,7 +19,7 @@ macro_rules! disassembler_test {
|
|||
// TODO: Not sure if there is a better way to merge these together,
|
||||
// but I like the end use of this a lot better than the old tests.
|
||||
($assemble_fn: expr, $format_fn: expr) => {{
|
||||
use crate::generic64::disassembler_test_macro::merge_instructions_without_line_numbers;
|
||||
use $crate::generic64::disassembler_test_macro::merge_instructions_without_line_numbers;
|
||||
let arena = bumpalo::Bump::new();
|
||||
let (mut buf, cs) = setup_capstone_and_arena(&arena);
|
||||
$assemble_fn(&mut buf);
|
||||
|
|
|
@ -1011,7 +1011,7 @@ impl Assembler<X86_64GeneralReg, X86_64FloatReg> for X86_64Assembler {
|
|||
|
||||
#[inline(always)]
|
||||
fn call(buf: &mut Vec<'_, u8>, relocs: &mut Vec<'_, Relocation>, fn_name: String) {
|
||||
buf.extend(&[0xE8, 0x00, 0x00, 0x00, 0x00]);
|
||||
buf.extend([0xE8, 0x00, 0x00, 0x00, 0x00]);
|
||||
relocs.push(Relocation::LinkedFunction {
|
||||
offset: buf.len() as u64 - 4,
|
||||
name: fn_name,
|
||||
|
@ -1478,7 +1478,7 @@ fn binop_reg64_reg64(
|
|||
let rex = add_reg_extension(src, rex);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
let src_mod = (src as u8 % 8) << 3;
|
||||
buf.extend(&[rex, op_code, 0xC0 | dst_mod | src_mod]);
|
||||
buf.extend([rex, op_code, 0xC0 | dst_mod | src_mod]);
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -1493,7 +1493,7 @@ fn extended_binop_reg64_reg64(
|
|||
let rex = add_reg_extension(src, rex);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
let src_mod = (src as u8 % 8) << 3;
|
||||
buf.extend(&[rex, op_code1, op_code2, 0xC0 | dst_mod | src_mod]);
|
||||
buf.extend([rex, op_code1, op_code2, 0xC0 | dst_mod | src_mod]);
|
||||
}
|
||||
|
||||
// Below here are the functions for all of the assembly instructions.
|
||||
|
@ -1508,8 +1508,8 @@ fn add_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) {
|
|||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(7);
|
||||
buf.extend(&[rex, 0x81, 0xC0 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
buf.extend([rex, 0x81, 0xC0 | dst_mod]);
|
||||
buf.extend(imm.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `ADD r/m64,r64` -> Add r64 to r/m64.
|
||||
|
@ -1547,7 +1547,7 @@ fn addsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
let src_high = src as u8 > 7;
|
||||
let src_mod = src as u8 % 8;
|
||||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
buf.extend([
|
||||
0xF2,
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
|
@ -1555,7 +1555,7 @@ fn addsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF2, 0x0F, 0x58, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
buf.extend([0xF2, 0x0F, 0x58, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1567,7 +1567,7 @@ fn addss_freg32_freg32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
let src_high = src as u8 > 7;
|
||||
let src_mod = src as u8 % 8;
|
||||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
buf.extend([
|
||||
0xF3,
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
|
@ -1575,7 +1575,7 @@ fn addss_freg32_freg32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF3, 0x0F, 0x58, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
buf.extend([0xF3, 0x0F, 0x58, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1587,7 +1587,7 @@ fn mulsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
let src_high = src as u8 > 7;
|
||||
let src_mod = src as u8 % 8;
|
||||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
buf.extend([
|
||||
0xF2,
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
|
@ -1595,7 +1595,7 @@ fn mulsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF2, 0x0F, 0x59, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
buf.extend([0xF2, 0x0F, 0x59, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1607,7 +1607,7 @@ fn divss_freg32_freg32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
let src_high = src as u8 > 7;
|
||||
let src_mod = src as u8 % 8;
|
||||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
buf.extend([
|
||||
0xF3,
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
|
@ -1615,7 +1615,7 @@ fn divss_freg32_freg32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF3, 0x0F, 0x5E, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
buf.extend([0xF3, 0x0F, 0x5E, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1627,7 +1627,7 @@ fn divsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
let src_high = src as u8 > 7;
|
||||
let src_mod = src as u8 % 8;
|
||||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
buf.extend([
|
||||
0xF2,
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
|
@ -1635,7 +1635,7 @@ fn divsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF2, 0x0F, 0x5E, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
buf.extend([0xF2, 0x0F, 0x5E, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1647,7 +1647,7 @@ fn mulss_freg32_freg32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
let src_high = src as u8 > 7;
|
||||
let src_mod = src as u8 % 8;
|
||||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
buf.extend([
|
||||
0xF3,
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
|
@ -1655,7 +1655,7 @@ fn mulss_freg32_freg32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF3, 0x0F, 0x59, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
buf.extend([0xF3, 0x0F, 0x59, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1667,7 +1667,7 @@ fn andpd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
let src_mod = src as u8 % 8;
|
||||
|
||||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
buf.extend([
|
||||
0x66,
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
|
@ -1675,7 +1675,7 @@ fn andpd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0x66, 0x0F, 0x54, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
buf.extend([0x66, 0x0F, 0x54, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1684,7 +1684,7 @@ fn andpd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
fn and_reg64_imm8(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i8) {
|
||||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.extend(&[rex, 0x83, 0xE0 | dst_mod, imm as u8]);
|
||||
buf.extend([rex, 0x83, 0xE0 | dst_mod, imm as u8]);
|
||||
}
|
||||
|
||||
/// `CMOVL r64,r/m64` -> Move if less (SF≠ OF).
|
||||
|
@ -1694,7 +1694,7 @@ fn cmovl_reg64_reg64(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, src: X86_64Ge
|
|||
let rex = add_rm_extension(src, rex);
|
||||
let dst_mod = (dst as u8 % 8) << 3;
|
||||
let src_mod = src as u8 % 8;
|
||||
buf.extend(&[rex, 0x0F, 0x4C, 0xC0 | dst_mod | src_mod]);
|
||||
buf.extend([rex, 0x0F, 0x4C, 0xC0 | dst_mod | src_mod]);
|
||||
}
|
||||
|
||||
/// `CMP r/m64,i32` -> Compare i32 to r/m64.
|
||||
|
@ -1703,8 +1703,8 @@ fn cmp_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) {
|
|||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(7);
|
||||
buf.extend(&[rex, 0x81, 0xF8 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
buf.extend([rex, 0x81, 0xF8 | dst_mod]);
|
||||
buf.extend(imm.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `CMP r/m64,r64` -> Compare r64 to r/m64.
|
||||
|
@ -1738,7 +1738,7 @@ fn mul_reg64_reg64(buf: &mut Vec<'_, u8>, src: X86_64GeneralReg) {
|
|||
rex |= REX_PREFIX_B;
|
||||
}
|
||||
|
||||
buf.extend(&[rex, 0xF7, 0b1110_0000 | (src as u8 % 8)]);
|
||||
buf.extend([rex, 0xF7, 0b1110_0000 | (src as u8 % 8)]);
|
||||
}
|
||||
|
||||
/// `IDIV r/m64` -> Signed divide RDX:RAX by r/m64, with result stored in RAX ← Quotient, RDX ← Remainder.
|
||||
|
@ -1756,9 +1756,9 @@ fn idiv_reg64_reg64(buf: &mut Vec<'_, u8>, src: X86_64GeneralReg) {
|
|||
//
|
||||
// The CQO instruction (available in 64-bit mode only) copies the sign (bit 63)
|
||||
// of the value in the RAX register into every bit position in the RDX register
|
||||
buf.extend(&[0x48, 0x99]);
|
||||
buf.extend([0x48, 0x99]);
|
||||
|
||||
buf.extend(&[rex, 0xF7, 0b1111_1000 | (src as u8 % 8)]);
|
||||
buf.extend([rex, 0xF7, 0b1111_1000 | (src as u8 % 8)]);
|
||||
}
|
||||
|
||||
/// `DIV r/m64` -> Unsigned divide RDX:RAX by r/m64, with result stored in RAX ← Quotient, RDX ← Remainder.
|
||||
|
@ -1776,10 +1776,10 @@ fn udiv_reg64_reg64(buf: &mut Vec<'_, u8>, src: X86_64GeneralReg) {
|
|||
//
|
||||
// The CQO instruction (available in 64-bit mode only) copies the sign (bit 63)
|
||||
// of the value in the RAX register into every bit position in the RDX register
|
||||
buf.extend(&[0x48, 0x99]);
|
||||
buf.extend([0x48, 0x99]);
|
||||
|
||||
// adds a cqo (convert doubleword to quadword)
|
||||
buf.extend(&[rex, 0xF7, 0b1111_0000 | (src as u8 % 8)]);
|
||||
buf.extend([rex, 0xF7, 0b1111_0000 | (src as u8 % 8)]);
|
||||
}
|
||||
|
||||
/// Jump near, relative, RIP = RIP + 32-bit displacement sign extended to 64-bits.
|
||||
|
@ -1787,7 +1787,7 @@ fn udiv_reg64_reg64(buf: &mut Vec<'_, u8>, src: X86_64GeneralReg) {
|
|||
fn jmp_imm32(buf: &mut Vec<'_, u8>, imm: i32) {
|
||||
buf.reserve(5);
|
||||
buf.push(0xE9);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
buf.extend(imm.to_le_bytes());
|
||||
}
|
||||
|
||||
/// Jump near if not equal (ZF=0).
|
||||
|
@ -1796,7 +1796,7 @@ fn jne_imm32(buf: &mut Vec<'_, u8>, imm: i32) {
|
|||
buf.reserve(6);
|
||||
buf.push(0x0F);
|
||||
buf.push(0x85);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
buf.extend(imm.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `MOV r/m64, imm32` -> Move imm32 sign extended to 64-bits to r/m64.
|
||||
|
@ -1805,8 +1805,8 @@ fn mov_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) {
|
|||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(7);
|
||||
buf.extend(&[rex, 0xC7, 0xC0 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
buf.extend([rex, 0xC7, 0xC0 | dst_mod]);
|
||||
buf.extend(imm.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `MOV r64, imm64` -> Move imm64 to r64.
|
||||
|
@ -1818,8 +1818,8 @@ fn mov_reg64_imm64(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i64) {
|
|||
let rex = add_opcode_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(10);
|
||||
buf.extend(&[rex, 0xB8 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
buf.extend([rex, 0xB8 | dst_mod]);
|
||||
buf.extend(imm.to_le_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1854,12 +1854,12 @@ fn mov_base64_offset32_reg64(
|
|||
let src_mod = (src as u8 % 8) << 3;
|
||||
let base_mod = base as u8 % 8;
|
||||
buf.reserve(8);
|
||||
buf.extend(&[rex, 0x89, 0x80 | src_mod | base_mod]);
|
||||
buf.extend([rex, 0x89, 0x80 | src_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
}
|
||||
buf.extend(&offset.to_le_bytes());
|
||||
buf.extend(offset.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `MOV r64,r/m64` -> Move r/m64 to r64, where m64 references a base + offset.
|
||||
|
@ -1875,12 +1875,12 @@ fn mov_reg64_base64_offset32(
|
|||
let dst_mod = (dst as u8 % 8) << 3;
|
||||
let base_mod = base as u8 % 8;
|
||||
buf.reserve(8);
|
||||
buf.extend(&[rex, 0x8B, 0x80 | dst_mod | base_mod]);
|
||||
buf.extend([rex, 0x8B, 0x80 | dst_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
}
|
||||
buf.extend(&offset.to_le_bytes());
|
||||
buf.extend(offset.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `MOVZX r64,r/m8` -> Move r/m8 with zero extention to r64, where m8 references a base + offset.
|
||||
|
@ -1896,12 +1896,12 @@ fn movzx_reg64_base8_offset32(
|
|||
let dst_mod = (dst as u8 % 8) << 3;
|
||||
let base_mod = base as u8 % 8;
|
||||
buf.reserve(9);
|
||||
buf.extend(&[rex, 0x0F, 0xB6, 0x80 | dst_mod | base_mod]);
|
||||
buf.extend([rex, 0x0F, 0xB6, 0x80 | dst_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
}
|
||||
buf.extend(&offset.to_le_bytes());
|
||||
buf.extend(offset.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `MOVSD xmm1,xmm2` -> Move scalar double-precision floating-point value from xmm2 to xmm1 register.
|
||||
|
@ -1922,7 +1922,7 @@ fn raw_movsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_
|
|||
let src_high = src as u8 > 7;
|
||||
let src_mod = src as u8 % 8;
|
||||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
buf.extend([
|
||||
0xF2,
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
|
@ -1930,7 +1930,7 @@ fn raw_movsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_
|
|||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF2, 0x0F, 0x10, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
buf.extend([0xF2, 0x0F, 0x10, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1952,7 +1952,7 @@ fn raw_movss_freg32_freg32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_
|
|||
let src_high = src as u8 > 7;
|
||||
let src_mod = src as u8 % 8;
|
||||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
buf.extend([
|
||||
0xF3,
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
|
@ -1960,7 +1960,7 @@ fn raw_movss_freg32_freg32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_
|
|||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF3, 0x0F, 0x10, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
buf.extend([0xF3, 0x0F, 0x10, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1970,12 +1970,12 @@ fn movss_freg32_rip_offset32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, offset:
|
|||
let dst_mod = dst as u8 % 8;
|
||||
if dst as u8 > 7 {
|
||||
buf.reserve(9);
|
||||
buf.extend(&[0xF3, 0x44, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
buf.extend([0xF3, 0x44, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
} else {
|
||||
buf.reserve(8);
|
||||
buf.extend(&[0xF3, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
buf.extend([0xF3, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
}
|
||||
buf.extend(&offset.to_le_bytes());
|
||||
buf.extend(offset.to_le_bytes());
|
||||
}
|
||||
|
||||
// `MOVSD xmm, m64` -> Load scalar double-precision floating-point value from m64 to xmm register.
|
||||
|
@ -1984,12 +1984,12 @@ fn movsd_freg64_rip_offset32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, offset:
|
|||
let dst_mod = dst as u8 % 8;
|
||||
if dst as u8 > 7 {
|
||||
buf.reserve(9);
|
||||
buf.extend(&[0xF2, 0x44, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
buf.extend([0xF2, 0x44, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
} else {
|
||||
buf.reserve(8);
|
||||
buf.extend(&[0xF2, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
buf.extend([0xF2, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
}
|
||||
buf.extend(&offset.to_le_bytes());
|
||||
buf.extend(offset.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `MOVSD r/m64,xmm1` -> Move xmm1 to r/m64. where m64 references the base pointer.
|
||||
|
@ -2009,12 +2009,12 @@ fn movsd_base64_offset32_freg64(
|
|||
if src as u8 > 7 || base as u8 > 7 {
|
||||
buf.push(rex);
|
||||
}
|
||||
buf.extend(&[0x0F, 0x11, 0x80 | src_mod | base_mod]);
|
||||
buf.extend([0x0F, 0x11, 0x80 | src_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
}
|
||||
buf.extend(&offset.to_le_bytes());
|
||||
buf.extend(offset.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `MOVSD xmm1,r/m64` -> Move r/m64 to xmm1. where m64 references the base pointer.
|
||||
|
@ -2034,12 +2034,12 @@ fn movsd_freg64_base64_offset32(
|
|||
if dst as u8 > 7 || base as u8 > 7 {
|
||||
buf.push(rex);
|
||||
}
|
||||
buf.extend(&[0x0F, 0x10, 0x80 | dst_mod | base_mod]);
|
||||
buf.extend([0x0F, 0x10, 0x80 | dst_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
}
|
||||
buf.extend(&offset.to_le_bytes());
|
||||
buf.extend(offset.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `NEG r/m64` -> Two's complement negate r/m64.
|
||||
|
@ -2047,7 +2047,7 @@ fn movsd_freg64_base64_offset32(
|
|||
fn neg_reg64(buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) {
|
||||
let rex = add_rm_extension(reg, REX_W);
|
||||
let reg_mod = reg as u8 % 8;
|
||||
buf.extend(&[rex, 0xF7, 0xD8 | reg_mod]);
|
||||
buf.extend([rex, 0xF7, 0xD8 | reg_mod]);
|
||||
}
|
||||
|
||||
// helper function for `set*` instructions
|
||||
|
@ -2060,10 +2060,10 @@ fn set_reg64_help(op_code: u8, buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) {
|
|||
let reg_mod = reg as u8 % 8;
|
||||
use X86_64GeneralReg::*;
|
||||
match reg {
|
||||
RAX | RCX | RDX | RBX => buf.extend(&[0x0F, op_code, 0xC0 | reg_mod]),
|
||||
RSP | RBP | RSI | RDI => buf.extend(&[REX, 0x0F, op_code, 0xC0 | reg_mod]),
|
||||
RAX | RCX | RDX | RBX => buf.extend([0x0F, op_code, 0xC0 | reg_mod]),
|
||||
RSP | RBP | RSI | RDI => buf.extend([REX, 0x0F, op_code, 0xC0 | reg_mod]),
|
||||
R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15 => {
|
||||
buf.extend(&[REX | 1, 0x0F, op_code, 0xC0 | reg_mod])
|
||||
buf.extend([REX | 1, 0x0F, op_code, 0xC0 | reg_mod])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2085,7 +2085,7 @@ fn cvtsi2_help<T: RegTrait, U: RegTrait>(
|
|||
let mod1 = (dst.value() % 8) << 3;
|
||||
let mod2 = src.value() % 8;
|
||||
|
||||
buf.extend(&[op_code1, rex, 0x0F, op_code2, 0xC0 | mod1 | mod2])
|
||||
buf.extend([op_code1, rex, 0x0F, op_code2, 0xC0 | mod1 | mod2])
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -2099,7 +2099,7 @@ fn cvtsx2_help<T: RegTrait, V: RegTrait>(
|
|||
let mod1 = (dst.value() % 8) << 3;
|
||||
let mod2 = src.value() % 8;
|
||||
|
||||
buf.extend(&[op_code1, 0x0F, op_code2, 0xC0 | mod1 | mod2])
|
||||
buf.extend([op_code1, 0x0F, op_code2, 0xC0 | mod1 | mod2])
|
||||
}
|
||||
|
||||
/// `SETE r/m64` -> Set Byte on Condition - zero/equal (ZF=1)
|
||||
|
@ -2183,8 +2183,8 @@ fn sub_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) {
|
|||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(7);
|
||||
buf.extend(&[rex, 0x81, 0xE8 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
buf.extend([rex, 0x81, 0xE8 | dst_mod]);
|
||||
buf.extend(imm.to_le_bytes());
|
||||
}
|
||||
|
||||
/// `SUB r/m64,r64` -> Sub r64 to r/m64.
|
||||
|
@ -2199,7 +2199,7 @@ fn pop_reg64(buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) {
|
|||
let reg_mod = reg as u8 % 8;
|
||||
if reg as u8 > 7 {
|
||||
let rex = add_opcode_extension(reg, REX);
|
||||
buf.extend(&[rex, 0x58 | reg_mod]);
|
||||
buf.extend([rex, 0x58 | reg_mod]);
|
||||
} else {
|
||||
buf.push(0x58 | reg_mod);
|
||||
}
|
||||
|
@ -2211,7 +2211,7 @@ fn push_reg64(buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) {
|
|||
let reg_mod = reg as u8 % 8;
|
||||
if reg as u8 > 7 {
|
||||
let rex = add_opcode_extension(reg, REX);
|
||||
buf.extend(&[rex, 0x50 | reg_mod]);
|
||||
buf.extend([rex, 0x50 | reg_mod]);
|
||||
} else {
|
||||
buf.push(0x50 | reg_mod);
|
||||
}
|
||||
|
|
|
@ -156,7 +156,7 @@ trait Backend<'a> {
|
|||
let module_id = env.module_id;
|
||||
let ident_ids = interns.all_ident_ids.get_mut(&module_id).unwrap();
|
||||
|
||||
rc_proc_gen.expand_refcount_stmt(ident_ids, layout, modify, *following)
|
||||
rc_proc_gen.expand_refcount_stmt(ident_ids, layout, modify, following)
|
||||
};
|
||||
|
||||
for spec in new_specializations.into_iter() {
|
||||
|
|
|
@ -421,7 +421,7 @@ fn build_proc<'a, B: Backend<'a>>(
|
|||
}
|
||||
Relocation::LinkedFunction { offset, name } => {
|
||||
// If the symbol is an undefined roc function, we need to add it here.
|
||||
if output.symbol_id(name.as_bytes()) == None && name.starts_with("roc_") {
|
||||
if output.symbol_id(name.as_bytes()).is_none() && name.starts_with("roc_") {
|
||||
let builtin_symbol = Symbol {
|
||||
name: name.as_bytes().to_vec(),
|
||||
value: 0,
|
||||
|
@ -435,7 +435,7 @@ fn build_proc<'a, B: Backend<'a>>(
|
|||
output.add_symbol(builtin_symbol);
|
||||
}
|
||||
// If the symbol is an undefined reference counting procedure, we need to add it here.
|
||||
if output.symbol_id(name.as_bytes()) == None {
|
||||
if output.symbol_id(name.as_bytes()).is_none() {
|
||||
for (sym, rc_name) in rc_proc_names.iter() {
|
||||
if name == rc_name {
|
||||
let section_id = output.add_section(
|
||||
|
|
|
@ -411,16 +411,16 @@ fn build_rc_wrapper<'a, 'ctx, 'env>(
|
|||
match rc_operation {
|
||||
Mode::Inc => {
|
||||
let n = 1;
|
||||
increment_refcount_layout(env, function_value, layout_ids, n, value, layout);
|
||||
increment_refcount_layout(env, layout_ids, n, value, layout);
|
||||
}
|
||||
Mode::IncN => {
|
||||
let n = it.next().unwrap().into_int_value();
|
||||
n.set_name(Symbol::ARG_2.as_str(&env.interns));
|
||||
|
||||
increment_n_refcount_layout(env, function_value, layout_ids, n, value, layout);
|
||||
increment_n_refcount_layout(env, layout_ids, n, value, layout);
|
||||
}
|
||||
Mode::Dec => {
|
||||
decrement_refcount_layout(env, function_value, layout_ids, value, layout);
|
||||
decrement_refcount_layout(env, layout_ids, value, layout);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ impl<'ctx> Iterator for FunctionIterator<'ctx> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq)]
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Scope<'a, 'ctx> {
|
||||
symbols: ImMap<Symbol, (Layout<'a>, BasicValueEnum<'ctx>)>,
|
||||
pub top_level_thunks: ImMap<Symbol, (ProcLayout<'a>, FunctionValue<'ctx>)>,
|
||||
|
@ -973,7 +973,7 @@ pub fn build_exp_literal<'a, 'ctx, 'env>(
|
|||
_ => unreachable!("incorrect small_str_bytes"),
|
||||
}
|
||||
} else {
|
||||
let ptr = define_global_str_literal_ptr(env, *str_literal);
|
||||
let ptr = define_global_str_literal_ptr(env, str_literal);
|
||||
let number_of_elements = env.ptr_int().const_int(str_literal.len() as u64, false);
|
||||
|
||||
let alloca =
|
||||
|
@ -2723,14 +2723,7 @@ pub fn build_exp_stmt<'a, 'ctx, 'env>(
|
|||
let layout = *layout;
|
||||
|
||||
if layout.contains_refcounted(env.layout_interner) {
|
||||
increment_refcount_layout(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
*inc_amount,
|
||||
value,
|
||||
&layout,
|
||||
);
|
||||
increment_refcount_layout(env, layout_ids, *inc_amount, value, &layout);
|
||||
}
|
||||
|
||||
build_exp_stmt(env, layout_ids, func_spec_solutions, scope, parent, cont)
|
||||
|
@ -2739,7 +2732,7 @@ pub fn build_exp_stmt<'a, 'ctx, 'env>(
|
|||
let (value, layout) = load_symbol_and_layout(scope, symbol);
|
||||
|
||||
if layout.contains_refcounted(env.layout_interner) {
|
||||
decrement_refcount_layout(env, parent, layout_ids, value, layout);
|
||||
decrement_refcount_layout(env, layout_ids, value, layout);
|
||||
}
|
||||
|
||||
build_exp_stmt(env, layout_ids, func_spec_solutions, scope, parent, cont)
|
||||
|
@ -3847,12 +3840,9 @@ fn expose_function_to_host_help_c_abi_v2<'a, 'ctx, 'env>(
|
|||
arg_type.into_pointer_type().get_element_type(),
|
||||
);
|
||||
// C return pointer goes at the beginning of params, and we must skip it if it exists.
|
||||
let param_index = (i
|
||||
+ (if matches!(cc_return, CCReturn::ByPointer) {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
})) as u32;
|
||||
let returns_pointer = matches!(cc_return, CCReturn::ByPointer);
|
||||
let param_index = i as u32 + returns_pointer as u32;
|
||||
|
||||
c_function.add_attribute(AttributeLoc::Param(param_index), byval);
|
||||
c_function.add_attribute(AttributeLoc::Param(param_index), nonnull);
|
||||
}
|
||||
|
@ -4554,7 +4544,7 @@ fn build_procedures_help<'a, 'ctx, 'env>(
|
|||
fn_val.print_to_stderr();
|
||||
|
||||
if let Some(app_ll_file) = debug_output_file {
|
||||
env.module.print_to_file(&app_ll_file).unwrap();
|
||||
env.module.print_to_file(app_ll_file).unwrap();
|
||||
|
||||
panic!(
|
||||
r"😱 LLVM errors when defining function {:?}; I wrote the full LLVM IR to {:?}",
|
||||
|
@ -6209,14 +6199,7 @@ fn run_low_level<'a, 'ctx, 'env>(
|
|||
|
||||
let element_layout = list_element_layout!(list_layout);
|
||||
|
||||
list_get_unsafe(
|
||||
env,
|
||||
layout_ids,
|
||||
parent,
|
||||
element_layout,
|
||||
elem_index,
|
||||
wrapper_struct,
|
||||
)
|
||||
list_get_unsafe(env, layout_ids, element_layout, elem_index, wrapper_struct)
|
||||
}
|
||||
ListReplaceUnsafe => {
|
||||
let list = load_symbol(scope, &args[0]);
|
||||
|
|
|
@ -121,7 +121,6 @@ pub(crate) fn list_with_capacity<'a, 'ctx, 'env>(
|
|||
pub(crate) fn list_get_unsafe<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
parent: FunctionValue<'ctx>,
|
||||
element_layout: &Layout<'a>,
|
||||
elem_index: IntValue<'ctx>,
|
||||
wrapper_struct: StructValue<'ctx>,
|
||||
|
@ -140,7 +139,7 @@ pub(crate) fn list_get_unsafe<'a, 'ctx, 'env>(
|
|||
|
||||
let result = load_roc_value(env, *element_layout, elem_ptr, "list_get_load_element");
|
||||
|
||||
increment_refcount_layout(env, parent, layout_ids, 1, result, element_layout);
|
||||
increment_refcount_layout(env, layout_ids, 1, result, element_layout);
|
||||
|
||||
result
|
||||
}
|
||||
|
|
|
@ -327,8 +327,6 @@ fn modify_refcount_struct_help<'a, 'ctx, 'env>(
|
|||
|
||||
arg_val.set_name(arg_symbol.as_str(&env.interns));
|
||||
|
||||
let parent = fn_val;
|
||||
|
||||
let wrapper_struct = arg_val.into_struct_value();
|
||||
|
||||
for (i, field_layout) in layouts.iter().enumerate() {
|
||||
|
@ -347,7 +345,6 @@ fn modify_refcount_struct_help<'a, 'ctx, 'env>(
|
|||
|
||||
modify_refcount_layout_help(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
mode.to_call_mode(fn_val),
|
||||
when_recursive,
|
||||
|
@ -362,42 +359,32 @@ fn modify_refcount_struct_help<'a, 'ctx, 'env>(
|
|||
|
||||
pub fn increment_refcount_layout<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
parent: FunctionValue<'ctx>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
inc_amount: u64,
|
||||
value: BasicValueEnum<'ctx>,
|
||||
layout: &Layout<'a>,
|
||||
) {
|
||||
let amount = env.ptr_int().const_int(inc_amount, false);
|
||||
increment_n_refcount_layout(env, parent, layout_ids, amount, value, layout);
|
||||
increment_n_refcount_layout(env, layout_ids, amount, value, layout);
|
||||
}
|
||||
|
||||
pub fn increment_n_refcount_layout<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
parent: FunctionValue<'ctx>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
amount: IntValue<'ctx>,
|
||||
value: BasicValueEnum<'ctx>,
|
||||
layout: &Layout<'a>,
|
||||
) {
|
||||
modify_refcount_layout(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
CallMode::Inc(amount),
|
||||
value,
|
||||
layout,
|
||||
);
|
||||
modify_refcount_layout(env, layout_ids, CallMode::Inc(amount), value, layout);
|
||||
}
|
||||
|
||||
pub fn decrement_refcount_layout<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
parent: FunctionValue<'ctx>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
value: BasicValueEnum<'ctx>,
|
||||
layout: &Layout<'a>,
|
||||
) {
|
||||
modify_refcount_layout(env, parent, layout_ids, CallMode::Dec, value, layout);
|
||||
modify_refcount_layout(env, layout_ids, CallMode::Dec, value, layout);
|
||||
}
|
||||
|
||||
fn modify_refcount_builtin<'a, 'ctx, 'env>(
|
||||
|
@ -435,7 +422,6 @@ fn modify_refcount_builtin<'a, 'ctx, 'env>(
|
|||
|
||||
fn modify_refcount_layout<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
parent: FunctionValue<'ctx>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
call_mode: CallMode<'ctx>,
|
||||
value: BasicValueEnum<'ctx>,
|
||||
|
@ -443,7 +429,6 @@ fn modify_refcount_layout<'a, 'ctx, 'env>(
|
|||
) {
|
||||
modify_refcount_layout_help(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
call_mode,
|
||||
&WhenRecursive::Unreachable,
|
||||
|
@ -460,7 +445,6 @@ enum WhenRecursive<'a> {
|
|||
|
||||
fn modify_refcount_layout_help<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
parent: FunctionValue<'ctx>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
call_mode: CallMode<'ctx>,
|
||||
when_recursive: &WhenRecursive<'a>,
|
||||
|
@ -474,7 +458,6 @@ fn modify_refcount_layout_help<'a, 'ctx, 'env>(
|
|||
|
||||
let function = match modify_refcount_layout_build_function(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
mode,
|
||||
when_recursive,
|
||||
|
@ -538,7 +521,6 @@ fn call_help<'a, 'ctx, 'env>(
|
|||
|
||||
fn modify_refcount_layout_build_function<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
parent: FunctionValue<'ctx>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
mode: Mode,
|
||||
when_recursive: &WhenRecursive<'a>,
|
||||
|
@ -603,7 +585,6 @@ fn modify_refcount_layout_build_function<'a, 'ctx, 'env>(
|
|||
|
||||
let function = modify_refcount_layout_build_function(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
mode,
|
||||
when_recursive,
|
||||
|
@ -615,7 +596,6 @@ fn modify_refcount_layout_build_function<'a, 'ctx, 'env>(
|
|||
},
|
||||
LambdaSet(lambda_set) => modify_refcount_layout_build_function(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
mode,
|
||||
when_recursive,
|
||||
|
@ -731,7 +711,6 @@ fn modify_refcount_list_help<'a, 'ctx, 'env>(
|
|||
let loop_fn = |_index, element| {
|
||||
modify_refcount_layout_help(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
mode.to_call_mode(fn_val),
|
||||
when_recursive,
|
||||
|
@ -1302,7 +1281,6 @@ fn build_rec_union_recursive_decrement<'a, 'ctx, 'env>(
|
|||
for (field, field_layout) in deferred_nonrec {
|
||||
modify_refcount_layout_help(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
mode.to_call_mode(decrement_fn),
|
||||
when_recursive,
|
||||
|
@ -1366,7 +1344,7 @@ fn union_layout_tags<'a>(
|
|||
match union_layout {
|
||||
NullableWrapped {
|
||||
other_tags: tags, ..
|
||||
} => *tags,
|
||||
} => tags,
|
||||
NullableUnwrapped { other_fields, .. } => arena.alloc([*other_fields]),
|
||||
NonNullableUnwrapped(fields) => arena.alloc([*fields]),
|
||||
Recursive(tags) => tags,
|
||||
|
@ -1687,7 +1665,6 @@ fn modify_refcount_union_help<'a, 'ctx, 'env>(
|
|||
|
||||
modify_refcount_layout_help(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
mode.to_call_mode(fn_val),
|
||||
when_recursive,
|
||||
|
@ -1709,7 +1686,6 @@ fn modify_refcount_union_help<'a, 'ctx, 'env>(
|
|||
|
||||
modify_refcount_layout_help(
|
||||
env,
|
||||
parent,
|
||||
layout_ids,
|
||||
mode.to_call_mode(fn_val),
|
||||
when_recursive,
|
||||
|
|
|
@ -1237,7 +1237,7 @@ impl<'a> WasmBackend<'a> {
|
|||
}
|
||||
|
||||
CallType::HigherOrder(higher_order_lowlevel) => {
|
||||
call_higher_order_lowlevel(self, ret_sym, ret_layout, *higher_order_lowlevel)
|
||||
call_higher_order_lowlevel(self, ret_sym, ret_layout, higher_order_lowlevel)
|
||||
}
|
||||
|
||||
CallType::Foreign {
|
||||
|
|
|
@ -69,7 +69,7 @@ impl std::fmt::Debug for VmBlock<'_> {
|
|||
/// Rust representation matches Wasm encoding.
|
||||
/// It's an error to specify alignment higher than the "natural" alignment of the instruction
|
||||
#[repr(u8)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)]
|
||||
pub enum Align {
|
||||
Bytes1 = 0,
|
||||
Bytes2 = 1,
|
||||
|
@ -111,7 +111,7 @@ impl From<u32> for Align {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Copy)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
|
||||
pub enum VmSymbolState {
|
||||
/// Value doesn't exist yet
|
||||
NotYetPushed,
|
||||
|
|
|
@ -303,7 +303,7 @@ impl<'a> Serialize for TypeSection<'a> {
|
|||
*
|
||||
*******************************************************************/
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum ImportDesc {
|
||||
Func { signature_index: u32 },
|
||||
Table { ty: TableType },
|
||||
|
@ -359,7 +359,7 @@ impl Serialize for ImportDesc {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct Import<'a> {
|
||||
pub module: &'a str,
|
||||
pub name: &'a str,
|
||||
|
@ -551,7 +551,7 @@ impl Parse<()> for RefType {
|
|||
}
|
||||
}
|
||||
}
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct TableType {
|
||||
pub ref_type: RefType,
|
||||
pub limits: Limits,
|
||||
|
@ -659,7 +659,7 @@ impl Serialize for TableSection {
|
|||
*
|
||||
*******************************************************************/
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum Limits {
|
||||
Min(u32),
|
||||
MinMax(u32, u32),
|
||||
|
@ -749,7 +749,7 @@ section_impl!(MemorySection, SectionId::Memory);
|
|||
*
|
||||
*******************************************************************/
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub struct GlobalType {
|
||||
pub value_type: ValueType,
|
||||
pub is_mutable: bool,
|
||||
|
|
|
@ -44,7 +44,7 @@ fn write_subs_for_module(module_id: ModuleId, filename: &str) {
|
|||
println!("cargo:rerun-if-changed={}", filepath.to_str().unwrap());
|
||||
|
||||
let mut output_path = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
||||
output_path.extend(&[filename]);
|
||||
output_path.extend([filename]);
|
||||
output_path.set_extension("dat");
|
||||
|
||||
#[cfg(not(windows))]
|
||||
|
@ -64,7 +64,7 @@ fn write_subs_for_module(module_id: ModuleId, filename: &str) {
|
|||
|
||||
fn write_types_for_module_dummy(output_path: &Path) {
|
||||
// write out a dummy file
|
||||
std::fs::write(output_path, &[]).unwrap();
|
||||
std::fs::write(output_path, []).unwrap();
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
|
@ -107,7 +107,7 @@ fn write_types_for_module_real(module_id: ModuleId, filename: &str, output_path:
|
|||
let abilities = module.abilities_store;
|
||||
let solved_implementations = module.resolved_implementations;
|
||||
|
||||
let mut file = std::fs::File::create(&output_path).unwrap();
|
||||
let mut file = std::fs::File::create(output_path).unwrap();
|
||||
|
||||
let type_state = TypeState {
|
||||
subs,
|
||||
|
|
|
@ -6,7 +6,7 @@ use crossbeam::thread;
|
|||
use parking_lot::Mutex;
|
||||
use roc_builtins::roc::module_source;
|
||||
use roc_can::abilities::{AbilitiesStore, PendingAbilitiesStore, ResolvedImpl};
|
||||
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints};
|
||||
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints, TypeOrVar};
|
||||
use roc_can::expr::PendingDerives;
|
||||
use roc_can::expr::{Declarations, ExpectLookup};
|
||||
use roc_can::module::{
|
||||
|
@ -3100,7 +3100,7 @@ fn load_platform_module<'a>(
|
|||
) -> Result<Msg<'a>, LoadingProblem<'a>> {
|
||||
let module_start_time = Instant::now();
|
||||
let file_io_start = Instant::now();
|
||||
let file = fs::read(&filename);
|
||||
let file = fs::read(filename);
|
||||
let file_io_duration = file_io_start.elapsed();
|
||||
|
||||
match file {
|
||||
|
@ -4262,10 +4262,11 @@ fn synth_list_len_type(subs: &mut Subs) -> Variable {
|
|||
|
||||
pub fn add_imports(
|
||||
my_module: ModuleId,
|
||||
constraints: &mut Constraints,
|
||||
subs: &mut Subs,
|
||||
mut pending_abilities: PendingAbilitiesStore,
|
||||
exposed_for_module: &ExposedForModule,
|
||||
def_types: &mut Vec<(Symbol, Loc<roc_types::types::Type>)>,
|
||||
def_types: &mut Vec<(Symbol, Loc<TypeOrVar>)>,
|
||||
rigid_vars: &mut Vec<Variable>,
|
||||
) -> (Vec<Variable>, AbilitiesStore) {
|
||||
use roc_types::types::Type;
|
||||
|
@ -4294,10 +4295,11 @@ pub fn add_imports(
|
|||
};
|
||||
|
||||
let copied_import = exposed_types.storage_subs.export_variable_to($subs, variable);
|
||||
let copied_import_index = constraints.push_type(Type::Variable(copied_import.variable));
|
||||
|
||||
def_types.push((
|
||||
$symbol,
|
||||
Loc::at_zero(Type::Variable(copied_import.variable)),
|
||||
Loc::at_zero(copied_import_index),
|
||||
));
|
||||
|
||||
// not a typo; rigids are turned into flex during type inference, but when imported we must
|
||||
|
@ -4327,12 +4329,10 @@ pub fn add_imports(
|
|||
// Patch used symbols from circular dependencies.
|
||||
if my_module == ModuleId::NUM {
|
||||
// Num needs List.len, but List imports Num.
|
||||
let list_len_type = synth_list_len_type(subs);
|
||||
def_types.push((
|
||||
Symbol::LIST_LEN,
|
||||
Loc::at_zero(Type::Variable(list_len_type)),
|
||||
));
|
||||
import_variables.push(list_len_type);
|
||||
let list_len_type_var = synth_list_len_type(subs);
|
||||
let list_len_type_index = constraints.push_type(Type::Variable(list_len_type_var));
|
||||
def_types.push((Symbol::LIST_LEN, Loc::at_zero(list_len_type_index)));
|
||||
import_variables.push(list_len_type_var);
|
||||
}
|
||||
|
||||
// Fill in the implementation information of the abilities from the modules we import, which we
|
||||
|
@ -4418,12 +4418,13 @@ fn run_solve_solve(
|
|||
} = module;
|
||||
|
||||
let mut rigid_vars: Vec<Variable> = Vec::new();
|
||||
let mut def_types: Vec<(Symbol, Loc<roc_types::types::Type>)> = Vec::new();
|
||||
let mut def_types: Vec<(Symbol, Loc<TypeOrVar>)> = Vec::new();
|
||||
|
||||
let mut subs = Subs::new_from_varstore(var_store);
|
||||
|
||||
let (import_variables, abilities_store) = add_imports(
|
||||
module.module_id,
|
||||
&mut constraints,
|
||||
&mut subs,
|
||||
pending_abilities,
|
||||
&exposed_for_module,
|
||||
|
|
|
@ -70,7 +70,7 @@ pub enum ArgSide {
|
|||
Right,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Associativity {
|
||||
/// left-associative operators:
|
||||
///
|
||||
|
|
|
@ -44,7 +44,7 @@ const SYMBOL_HAS_NICHE: () =
|
|||
// register_debug_idents calls (which should be made in debug mode).
|
||||
// Set it to false if you want to see the raw ModuleId and IdentId ints,
|
||||
// but please set it back to true before checking in the result!
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(any(debug_assertions, feature = "debug-symbols"))]
|
||||
const PRETTY_PRINT_DEBUG_SYMBOLS: bool = true;
|
||||
|
||||
pub const DERIVABLE_ABILITIES: &[(Symbol, &[Symbol])] = &[
|
||||
|
@ -183,7 +183,7 @@ impl Symbol {
|
|||
///
|
||||
/// `Foo.bar`
|
||||
impl fmt::Debug for Symbol {
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(any(debug_assertions, feature = "debug-symbols"))]
|
||||
#[allow(clippy::print_in_format_impl)]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if PRETTY_PRINT_DEBUG_SYMBOLS {
|
||||
|
@ -216,7 +216,7 @@ impl fmt::Debug for Symbol {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[cfg(not(any(debug_assertions, feature = "debug-symbols")))]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fallback_debug_fmt(*self, f)
|
||||
}
|
||||
|
@ -256,7 +256,7 @@ fn fallback_debug_fmt(symbol: Symbol, f: &mut fmt::Formatter) -> fmt::Result {
|
|||
// end up using it in release builds anyway. Right? ...Right?
|
||||
lazy_static! {}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(any(debug_assertions, feature = "debug-symbols"))]
|
||||
lazy_static! {
|
||||
/// This is used in Debug builds only, to let us have a Debug instance
|
||||
/// which displays not only the Module ID, but also the Module Name which
|
||||
|
@ -399,7 +399,7 @@ impl fmt::Debug for ModuleId {
|
|||
/// needs a global mutex, so we don't do this in release builds. This means
|
||||
/// the Debug impl in release builds only shows the number, not the name (which
|
||||
/// it does not have available, due to having never stored it in the mutexed intern table.)
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(any(debug_assertions, feature = "debug-symbols"))]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
// Originally, this printed both name and numeric ID, but the numeric ID
|
||||
// didn't seem to add anything useful. Feel free to temporarily re-add it
|
||||
|
@ -425,7 +425,7 @@ impl fmt::Debug for ModuleId {
|
|||
}
|
||||
|
||||
/// In release builds, all we have access to is the number, so only display that.
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[cfg(not(any(debug_assertions, feature = "debug-symbols")))]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
|
@ -470,7 +470,7 @@ impl<'a> PackageModuleIds<'a> {
|
|||
// didn't find it, so we'll add it
|
||||
let module_id = ModuleId::from_zero_indexed(self.by_id.len());
|
||||
self.by_id.push(module_name.clone());
|
||||
if cfg!(debug_assertions) {
|
||||
if cfg!(any(debug_assertions, feature = "debug-symbols")) {
|
||||
Self::insert_debug_name(module_id, module_name);
|
||||
}
|
||||
|
||||
|
@ -487,7 +487,7 @@ impl<'a> PackageModuleIds<'a> {
|
|||
ModuleIds { by_id }
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(any(debug_assertions, feature = "debug-symbols"))]
|
||||
fn insert_debug_name(module_id: ModuleId, module_name: &PQModuleName) {
|
||||
let mut names = DEBUG_MODULE_ID_NAMES.lock().expect("Failed to acquire lock for Debug interning into DEBUG_MODULE_ID_NAMES, presumably because a thread panicked.");
|
||||
|
||||
|
@ -503,7 +503,7 @@ impl<'a> PackageModuleIds<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[cfg(not(any(debug_assertions, feature = "debug-symbols")))]
|
||||
fn insert_debug_name(_module_id: ModuleId, _module_name: &PQModuleName) {
|
||||
// By design, this is a no-op in release builds!
|
||||
}
|
||||
|
@ -557,14 +557,14 @@ impl ModuleIds {
|
|||
// didn't find it, so we'll add it
|
||||
let module_id = ModuleId::from_zero_indexed(self.by_id.len());
|
||||
self.by_id.push(module_name.clone());
|
||||
if cfg!(debug_assertions) {
|
||||
if cfg!(any(debug_assertions, feature = "debug-symbols")) {
|
||||
Self::insert_debug_name(module_id, module_name);
|
||||
}
|
||||
|
||||
module_id
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(any(debug_assertions, feature = "debug-symbols"))]
|
||||
fn insert_debug_name(module_id: ModuleId, module_name: &ModuleName) {
|
||||
let mut names = DEBUG_MODULE_ID_NAMES.lock().expect("Failed to acquire lock for Debug interning into DEBUG_MODULE_ID_NAMES, presumably because a thread panicked.");
|
||||
|
||||
|
@ -574,7 +574,7 @@ impl ModuleIds {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[cfg(not(any(debug_assertions, feature = "debug-symbols")))]
|
||||
fn insert_debug_name(_module_id: ModuleId, _module_name: &ModuleName) {
|
||||
// By design, this is a no-op in release builds!
|
||||
}
|
||||
|
@ -868,7 +868,7 @@ macro_rules! define_builtins {
|
|||
IdentIds{ interner }
|
||||
};
|
||||
|
||||
if cfg!(debug_assertions) {
|
||||
if cfg!(any(debug_assertions, feature = "debug-symbols")) {
|
||||
let name = PQModuleName::Unqualified($module_name.into());
|
||||
PackageModuleIds::insert_debug_name(module_id, &name);
|
||||
module_id.register_debug_idents(&ident_ids);
|
||||
|
@ -910,7 +910,7 @@ macro_rules! define_builtins {
|
|||
let mut insert_both = |id: ModuleId, name_str: &'static str| {
|
||||
let name: ModuleName = name_str.into();
|
||||
|
||||
if cfg!(debug_assertions) {
|
||||
if cfg!(any(debug_assertions, feature = "debug-symbols")) {
|
||||
Self::insert_debug_name(id, &name);
|
||||
}
|
||||
|
||||
|
@ -936,7 +936,7 @@ macro_rules! define_builtins {
|
|||
let raw_name: IdentStr = name_str.into();
|
||||
let name = PQModuleName::Unqualified(raw_name.into());
|
||||
|
||||
if cfg!(debug_assertions) {
|
||||
if cfg!(any(debug_assertions, feature = "debug-symbols")) {
|
||||
Self::insert_debug_name(id, &name);
|
||||
}
|
||||
|
||||
|
|
|
@ -471,7 +471,7 @@ impl<'a> CodeGenHelp<'a> {
|
|||
) -> (bool, Vec<'a, Option<usize>>) {
|
||||
use UnionLayout::*;
|
||||
match union {
|
||||
NonRecursive(_) => return (false, bumpalo::vec![in self.arena]),
|
||||
NonRecursive(_) => (false, bumpalo::vec![in self.arena]),
|
||||
|
||||
Recursive(tags) => self.union_tail_recursion_fields_help(tags),
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
use crate::ir::{
|
||||
BranchInfo, DestructType, Env, Expr, JoinPointId, Literal, Param, Pattern, Procs, Stmt,
|
||||
build_list_index_probe, BranchInfo, Call, CallType, DestructType, Env, Expr, JoinPointId,
|
||||
ListIndex, Literal, Param, Pattern, Procs, Stmt,
|
||||
};
|
||||
use crate::layout::{Builtin, Layout, LayoutCache, TagIdIntType, UnionLayout};
|
||||
use roc_builtins::bitcode::{FloatWidth, IntWidth};
|
||||
use roc_collections::all::{MutMap, MutSet};
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_exhaustive::{Ctor, CtorName, RenderAs, TagId, Union};
|
||||
use roc_exhaustive::{Ctor, CtorName, ListArity, RenderAs, TagId, Union};
|
||||
use roc_module::ident::TagName;
|
||||
use roc_module::low_level::LowLevel;
|
||||
use roc_module::symbol::Symbol;
|
||||
|
@ -77,6 +78,12 @@ enum GuardedTest<'a> {
|
|||
Placeholder,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Hash)]
|
||||
enum ListLenBound {
|
||||
Exact,
|
||||
AtLeast,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
enum Test<'a> {
|
||||
|
@ -95,6 +102,10 @@ enum Test<'a> {
|
|||
tag_id: TagIdIntType,
|
||||
num_alts: usize,
|
||||
},
|
||||
IsListLen {
|
||||
bound: ListLenBound,
|
||||
len: u64,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> Test<'a> {
|
||||
|
@ -110,6 +121,10 @@ impl<'a> Test<'a> {
|
|||
Test::IsStr(_) => false,
|
||||
Test::IsBit(_) => true,
|
||||
Test::IsByte { .. } => true,
|
||||
Test::IsListLen { bound, .. } => match bound {
|
||||
ListLenBound::Exact => true,
|
||||
ListLenBound::AtLeast => false,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -149,10 +164,13 @@ impl<'a> Hash for Test<'a> {
|
|||
num_alts.hash(state);
|
||||
}
|
||||
IsDecimal(v) => {
|
||||
// TODO: Is this okay?
|
||||
state.write_u8(6);
|
||||
v.hash(state);
|
||||
}
|
||||
IsListLen { len, bound } => {
|
||||
state.write_u8(7);
|
||||
(len, bound).hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -331,6 +349,11 @@ fn tests_are_complete_help(last_test: &Test, number_of_tests: usize) -> bool {
|
|||
Test::IsFloat(_, _) => false,
|
||||
Test::IsDecimal(_) => false,
|
||||
Test::IsStr(_) => false,
|
||||
Test::IsListLen {
|
||||
bound: ListLenBound::AtLeast,
|
||||
len: 0,
|
||||
} => true, // [..] test
|
||||
Test::IsListLen { .. } => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -578,6 +601,18 @@ fn test_at_path<'a>(
|
|||
arguments: arguments.to_vec(),
|
||||
},
|
||||
|
||||
List {
|
||||
arity,
|
||||
element_layout: _,
|
||||
elements: _,
|
||||
} => IsListLen {
|
||||
bound: match arity {
|
||||
ListArity::Exact(_) => ListLenBound::Exact,
|
||||
ListArity::Slice(_, _) => ListLenBound::AtLeast,
|
||||
},
|
||||
len: arity.min_len() as _,
|
||||
},
|
||||
|
||||
Voided { .. } => internal_error!("unreachable"),
|
||||
|
||||
OpaqueUnwrap { opaque, argument } => {
|
||||
|
@ -755,6 +790,37 @@ fn to_relevant_branch_help<'a>(
|
|||
_ => None,
|
||||
},
|
||||
|
||||
List {
|
||||
arity: my_arity,
|
||||
elements,
|
||||
element_layout: _,
|
||||
} => match test {
|
||||
IsListLen { bound: _, len } if my_arity.covers_length(*len as _) => {
|
||||
let sub_positions = elements.into_iter().enumerate().map(|(index, elem_pat)| {
|
||||
let mut new_path = path.to_vec();
|
||||
|
||||
let probe_index = ListIndex::from_pattern_index(index, my_arity);
|
||||
|
||||
let next_instr = PathInstruction::ListIndex {
|
||||
index: probe_index as _,
|
||||
};
|
||||
new_path.push(next_instr);
|
||||
|
||||
(new_path, elem_pat)
|
||||
});
|
||||
start.extend(sub_positions);
|
||||
start.extend(end);
|
||||
|
||||
Some(Branch {
|
||||
goal: branch.goal,
|
||||
guard: branch.guard.clone(),
|
||||
patterns: start,
|
||||
})
|
||||
}
|
||||
|
||||
_ => None,
|
||||
},
|
||||
|
||||
NewtypeDestructure {
|
||||
tag_name,
|
||||
arguments,
|
||||
|
@ -1021,7 +1087,8 @@ fn needs_tests(pattern: &Pattern) -> bool {
|
|||
| IntLiteral(_, _)
|
||||
| FloatLiteral(_, _)
|
||||
| DecimalLiteral(_)
|
||||
| StrLiteral(_) => true,
|
||||
| StrLiteral(_)
|
||||
| List { .. } => true,
|
||||
|
||||
Voided { .. } => internal_error!("unreachable"),
|
||||
}
|
||||
|
@ -1268,6 +1335,7 @@ pub fn optimize_when<'a>(
|
|||
enum PathInstruction {
|
||||
NewType,
|
||||
TagIndex { index: u64, tag_id: TagIdIntType },
|
||||
ListIndex { index: ListIndex },
|
||||
}
|
||||
|
||||
fn path_to_expr_help<'a>(
|
||||
|
@ -1337,19 +1405,46 @@ fn path_to_expr_help<'a>(
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
PathInstruction::ListIndex { index } => {
|
||||
let list_sym = symbol;
|
||||
|
||||
match layout {
|
||||
Layout::Builtin(Builtin::List(elem_layout)) => {
|
||||
let (index_sym, new_stores) = build_list_index_probe(env, list_sym, index);
|
||||
|
||||
stores.extend(new_stores);
|
||||
|
||||
let load_sym = env.unique_symbol();
|
||||
let load_expr = Expr::Call(Call {
|
||||
call_type: CallType::LowLevel {
|
||||
op: LowLevel::ListGetUnsafe,
|
||||
update_mode: env.next_update_mode_id(),
|
||||
},
|
||||
arguments: env.arena.alloc([list_sym, index_sym]),
|
||||
});
|
||||
|
||||
stores.push((load_sym, *elem_layout, load_expr));
|
||||
|
||||
layout = *elem_layout;
|
||||
symbol = load_sym;
|
||||
}
|
||||
_ => internal_error!("not a list"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(symbol, stores, layout)
|
||||
}
|
||||
|
||||
fn test_to_equality<'a>(
|
||||
fn test_to_comparison<'a>(
|
||||
env: &mut Env<'a, '_>,
|
||||
cond_symbol: Symbol,
|
||||
cond_layout: &Layout<'a>,
|
||||
path: &[PathInstruction],
|
||||
test: Test<'a>,
|
||||
) -> (StoresVec<'a>, Symbol, Symbol, Option<ConstructorKnown<'a>>) {
|
||||
) -> (StoresVec<'a>, Comparison, Option<ConstructorKnown<'a>>) {
|
||||
let (rhs_symbol, mut stores, test_layout) =
|
||||
path_to_expr_help(env, cond_symbol, path, *cond_layout);
|
||||
|
||||
|
@ -1379,8 +1474,7 @@ fn test_to_equality<'a>(
|
|||
|
||||
(
|
||||
stores,
|
||||
lhs_symbol,
|
||||
rhs_symbol,
|
||||
(lhs_symbol, Comparator::Eq, rhs_symbol),
|
||||
Some(ConstructorKnown::OnlyPass {
|
||||
scrutinee: path_symbol,
|
||||
layout: *cond_layout,
|
||||
|
@ -1397,7 +1491,7 @@ fn test_to_equality<'a>(
|
|||
let lhs_symbol = env.unique_symbol();
|
||||
stores.push((lhs_symbol, Layout::int_width(precision), lhs));
|
||||
|
||||
(stores, lhs_symbol, rhs_symbol, None)
|
||||
(stores, (lhs_symbol, Comparator::Eq, rhs_symbol), None)
|
||||
}
|
||||
|
||||
Test::IsFloat(test_int, precision) => {
|
||||
|
@ -1407,7 +1501,7 @@ fn test_to_equality<'a>(
|
|||
let lhs_symbol = env.unique_symbol();
|
||||
stores.push((lhs_symbol, Layout::float_width(precision), lhs));
|
||||
|
||||
(stores, lhs_symbol, rhs_symbol, None)
|
||||
(stores, (lhs_symbol, Comparator::Eq, rhs_symbol), None)
|
||||
}
|
||||
|
||||
Test::IsDecimal(test_dec) => {
|
||||
|
@ -1415,7 +1509,7 @@ fn test_to_equality<'a>(
|
|||
let lhs_symbol = env.unique_symbol();
|
||||
stores.push((lhs_symbol, *cond_layout, lhs));
|
||||
|
||||
(stores, lhs_symbol, rhs_symbol, None)
|
||||
(stores, (lhs_symbol, Comparator::Eq, rhs_symbol), None)
|
||||
}
|
||||
|
||||
Test::IsByte {
|
||||
|
@ -1427,7 +1521,7 @@ fn test_to_equality<'a>(
|
|||
let lhs_symbol = env.unique_symbol();
|
||||
stores.push((lhs_symbol, Layout::u8(), lhs));
|
||||
|
||||
(stores, lhs_symbol, rhs_symbol, None)
|
||||
(stores, (lhs_symbol, Comparator::Eq, rhs_symbol), None)
|
||||
}
|
||||
|
||||
Test::IsBit(test_bit) => {
|
||||
|
@ -1435,7 +1529,7 @@ fn test_to_equality<'a>(
|
|||
let lhs_symbol = env.unique_symbol();
|
||||
stores.push((lhs_symbol, Layout::Builtin(Builtin::Bool), lhs));
|
||||
|
||||
(stores, lhs_symbol, rhs_symbol, None)
|
||||
(stores, (lhs_symbol, Comparator::Eq, rhs_symbol), None)
|
||||
}
|
||||
|
||||
Test::IsStr(test_str) => {
|
||||
|
@ -1444,15 +1538,58 @@ fn test_to_equality<'a>(
|
|||
|
||||
stores.push((lhs_symbol, Layout::Builtin(Builtin::Str), lhs));
|
||||
|
||||
(stores, lhs_symbol, rhs_symbol, None)
|
||||
(stores, (lhs_symbol, Comparator::Eq, rhs_symbol), None)
|
||||
}
|
||||
|
||||
Test::IsListLen { bound, len } => {
|
||||
let list_layout = test_layout;
|
||||
let list_sym = rhs_symbol;
|
||||
|
||||
match list_layout {
|
||||
Layout::Builtin(Builtin::List(_elem_layout)) => {
|
||||
let real_len_expr = Expr::Call(Call {
|
||||
call_type: CallType::LowLevel {
|
||||
op: LowLevel::ListLen,
|
||||
update_mode: env.next_update_mode_id(),
|
||||
},
|
||||
arguments: env.arena.alloc([list_sym]),
|
||||
});
|
||||
let test_len_expr = Expr::Literal(Literal::Int((len as i128).to_ne_bytes()));
|
||||
|
||||
let real_len = env.unique_symbol();
|
||||
let test_len = env.unique_symbol();
|
||||
|
||||
let usize_layout = Layout::usize(env.target_info);
|
||||
|
||||
stores.push((real_len, usize_layout, real_len_expr));
|
||||
stores.push((test_len, usize_layout, test_len_expr));
|
||||
|
||||
let comparison = match bound {
|
||||
ListLenBound::Exact => (real_len, Comparator::Eq, test_len),
|
||||
ListLenBound::AtLeast => (real_len, Comparator::Geq, test_len),
|
||||
};
|
||||
|
||||
(stores, comparison, None)
|
||||
}
|
||||
_ => internal_error!(
|
||||
"test path is not a list: {:#?}",
|
||||
(cond_layout, test_layout, path)
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Comparator {
|
||||
Eq,
|
||||
Geq,
|
||||
}
|
||||
|
||||
type Comparison = (Symbol, Comparator, Symbol);
|
||||
|
||||
type Tests<'a> = std::vec::Vec<(
|
||||
bumpalo::collections::Vec<'a, (Symbol, Layout<'a>, Expr<'a>)>,
|
||||
Symbol,
|
||||
Symbol,
|
||||
Comparison,
|
||||
Option<ConstructorKnown<'a>>,
|
||||
)>;
|
||||
|
||||
|
@ -1466,17 +1603,25 @@ fn stores_and_condition<'a>(
|
|||
|
||||
// Assumption: there is at most 1 guard, and it is the outer layer.
|
||||
for (path, test) in test_chain {
|
||||
tests.push(test_to_equality(env, cond_symbol, cond_layout, &path, test))
|
||||
tests.push(test_to_comparison(
|
||||
env,
|
||||
cond_symbol,
|
||||
cond_layout,
|
||||
&path,
|
||||
test,
|
||||
))
|
||||
}
|
||||
|
||||
tests
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn compile_test<'a>(
|
||||
env: &mut Env<'a, '_>,
|
||||
ret_layout: Layout<'a>,
|
||||
stores: bumpalo::collections::Vec<'a, (Symbol, Layout<'a>, Expr<'a>)>,
|
||||
lhs: Symbol,
|
||||
cmp: Comparator,
|
||||
rhs: Symbol,
|
||||
fail: &'a Stmt<'a>,
|
||||
cond: Stmt<'a>,
|
||||
|
@ -1487,6 +1632,7 @@ fn compile_test<'a>(
|
|||
ret_layout,
|
||||
stores,
|
||||
lhs,
|
||||
cmp,
|
||||
rhs,
|
||||
fail,
|
||||
cond,
|
||||
|
@ -1500,6 +1646,7 @@ fn compile_test_help<'a>(
|
|||
ret_layout: Layout<'a>,
|
||||
stores: bumpalo::collections::Vec<'a, (Symbol, Layout<'a>, Expr<'a>)>,
|
||||
lhs: Symbol,
|
||||
cmp: Comparator,
|
||||
rhs: Symbol,
|
||||
fail: &'a Stmt<'a>,
|
||||
mut cond: Stmt<'a>,
|
||||
|
@ -1560,7 +1707,10 @@ fn compile_test_help<'a>(
|
|||
default_branch,
|
||||
};
|
||||
|
||||
let op = LowLevel::Eq;
|
||||
let op = match cmp {
|
||||
Comparator::Eq => LowLevel::Eq,
|
||||
Comparator::Geq => LowLevel::NumGte,
|
||||
};
|
||||
let test = Expr::Call(crate::ir::Call {
|
||||
call_type: crate::ir::CallType::LowLevel {
|
||||
op,
|
||||
|
@ -1592,13 +1742,15 @@ fn compile_tests<'a>(
|
|||
fail: &'a Stmt<'a>,
|
||||
mut cond: Stmt<'a>,
|
||||
) -> Stmt<'a> {
|
||||
for (new_stores, lhs, rhs, opt_constructor_info) in tests.into_iter() {
|
||||
for (new_stores, (lhs, cmp, rhs), opt_constructor_info) in tests.into_iter() {
|
||||
match opt_constructor_info {
|
||||
None => {
|
||||
cond = compile_test(env, ret_layout, new_stores, lhs, rhs, fail, cond);
|
||||
cond = compile_test(env, ret_layout, new_stores, lhs, cmp, rhs, fail, cond);
|
||||
}
|
||||
Some(cinfo) => {
|
||||
cond = compile_test_help(env, cinfo, ret_layout, new_stores, lhs, rhs, fail, cond);
|
||||
cond = compile_test_help(
|
||||
env, cinfo, ret_layout, new_stores, lhs, cmp, rhs, fail, cond,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1781,7 +1933,7 @@ fn decide_to_branching<'a>(
|
|||
if number_of_tests == 1 {
|
||||
// if there is just one test, compile to a simple if-then-else
|
||||
|
||||
let (new_stores, lhs, rhs, _cinfo) = tests.into_iter().next().unwrap();
|
||||
let (new_stores, (lhs, cmp, rhs), _cinfo) = tests.into_iter().next().unwrap();
|
||||
|
||||
compile_test_help(
|
||||
env,
|
||||
|
@ -1789,6 +1941,7 @@ fn decide_to_branching<'a>(
|
|||
ret_layout,
|
||||
new_stores,
|
||||
lhs,
|
||||
cmp,
|
||||
rhs,
|
||||
fail,
|
||||
pass_expr,
|
||||
|
@ -1854,6 +2007,12 @@ fn decide_to_branching<'a>(
|
|||
Test::IsBit(v) => v as u64,
|
||||
Test::IsByte { tag_id, .. } => tag_id as u64,
|
||||
Test::IsCtor { tag_id, .. } => tag_id as u64,
|
||||
Test::IsListLen { len, bound } => match bound {
|
||||
ListLenBound::Exact => len as _,
|
||||
ListLenBound::AtLeast => {
|
||||
unreachable!("at-least bounds cannot be switched on")
|
||||
}
|
||||
},
|
||||
Test::IsDecimal(_) => unreachable!("decimals cannot be switched on"),
|
||||
Test::IsStr(_) => unreachable!("strings cannot be switched on"),
|
||||
};
|
||||
|
@ -1911,6 +2070,31 @@ fn decide_to_branching<'a>(
|
|||
union_layout.tag_id_layout(),
|
||||
env.arena.alloc(temp),
|
||||
)
|
||||
} else if let Layout::Builtin(Builtin::List(_)) = inner_cond_layout {
|
||||
let len_symbol = env.unique_symbol();
|
||||
|
||||
let switch = Stmt::Switch {
|
||||
cond_layout: Layout::usize(env.target_info),
|
||||
cond_symbol: len_symbol,
|
||||
branches: branches.into_bump_slice(),
|
||||
default_branch: (default_branch_info, env.arena.alloc(default_branch)),
|
||||
ret_layout,
|
||||
};
|
||||
|
||||
let len_expr = Expr::Call(Call {
|
||||
call_type: CallType::LowLevel {
|
||||
op: LowLevel::ListLen,
|
||||
update_mode: env.next_update_mode_id(),
|
||||
},
|
||||
arguments: env.arena.alloc([inner_cond_symbol]),
|
||||
});
|
||||
|
||||
Stmt::Let(
|
||||
len_symbol,
|
||||
len_expr,
|
||||
Layout::usize(env.target_info),
|
||||
env.arena.alloc(switch),
|
||||
)
|
||||
} else {
|
||||
Stmt::Switch {
|
||||
cond_layout: inner_cond_layout,
|
||||
|
|
|
@ -21,7 +21,7 @@ use roc_debug_flags::{
|
|||
};
|
||||
use roc_derive::SharedDerivedModule;
|
||||
use roc_error_macros::{internal_error, todo_abilities};
|
||||
use roc_exhaustive::{Ctor, CtorName, RenderAs, TagId};
|
||||
use roc_exhaustive::{Ctor, CtorName, ListArity, RenderAs, TagId};
|
||||
use roc_intern::Interner;
|
||||
use roc_late_solve::storage::{ExternalModuleStorage, ExternalModuleStorageSnapshot};
|
||||
use roc_late_solve::{resolve_ability_specialization, AbilitiesView, Resolved, UnificationFailed};
|
||||
|
@ -284,7 +284,7 @@ impl AbilityAliases {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum CapturedSymbols<'a> {
|
||||
None,
|
||||
Captured(&'a [(Symbol, Variable)]),
|
||||
|
@ -317,7 +317,7 @@ pub struct Proc<'a> {
|
|||
pub host_exposed_layouts: HostExposedLayouts<'a>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum HostExposedLayouts<'a> {
|
||||
NotHostExposed,
|
||||
HostExposed {
|
||||
|
@ -326,13 +326,13 @@ pub enum HostExposedLayouts<'a> {
|
|||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum SelfRecursive {
|
||||
NotSelfRecursive,
|
||||
SelfRecursive(JoinPointId),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum Parens {
|
||||
NotNeeded,
|
||||
InTypeParam,
|
||||
|
@ -847,6 +847,38 @@ struct SpecializationMark<'a> {
|
|||
function_mark: Option<RawFunctionLayout<'a>>,
|
||||
}
|
||||
|
||||
/// The deepest closure in the current stack of procedures under specialization a symbol specialization
|
||||
/// was used in.
|
||||
///
|
||||
/// This is necessary to understand what symbol specializations are used in what capture sets. For
|
||||
/// example, consider
|
||||
///
|
||||
/// main =
|
||||
/// x = 1
|
||||
///
|
||||
/// y = \{} -> 1u8 + x
|
||||
/// z = \{} -> 1u16 + x
|
||||
///
|
||||
/// Here, we have a two specializations of `x` to U8 and U16 with deepest uses of
|
||||
/// (2, y) and (2, z), respectively. This tells us that both of those specializations must be
|
||||
/// preserved by `main` (which is at depth 1), but that `y` and `z` respectively only need to
|
||||
/// capture one particular specialization of `x` each.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
struct UseDepth {
|
||||
depth: usize,
|
||||
symbol: Symbol,
|
||||
}
|
||||
|
||||
impl UseDepth {
|
||||
fn is_nested_use_in(&self, outer: &Self) -> bool {
|
||||
if self.symbol == outer.symbol {
|
||||
debug_assert!(self.depth == outer.depth);
|
||||
return true;
|
||||
}
|
||||
self.depth > outer.depth
|
||||
}
|
||||
}
|
||||
|
||||
/// When walking a function body, we may encounter specialized usages of polymorphic symbols. For
|
||||
/// example
|
||||
///
|
||||
|
@ -865,73 +897,10 @@ struct SymbolSpecializations<'a>(
|
|||
// 2. the number of specializations of a symbol in a def is even smaller (almost always only one)
|
||||
// So, a linear VecMap is preferrable. Use a two-layered one to make (1) extraction of defs easy
|
||||
// and (2) reads of a certain symbol be determined by its first occurrence, not its last.
|
||||
VecMap<Symbol, VecMap<SpecializationMark<'a>, (Variable, Symbol)>>,
|
||||
VecMap<Symbol, VecMap<SpecializationMark<'a>, (Variable, Symbol, UseDepth)>>,
|
||||
);
|
||||
|
||||
impl<'a> SymbolSpecializations<'a> {
|
||||
/// Gets a specialization for a symbol, or creates a new one.
|
||||
#[inline(always)]
|
||||
fn get_or_insert(
|
||||
&mut self,
|
||||
env: &mut Env<'a, '_>,
|
||||
layout_cache: &mut LayoutCache<'a>,
|
||||
symbol: Symbol,
|
||||
specialization_var: Variable,
|
||||
) -> Symbol {
|
||||
let arena = env.arena;
|
||||
let subs: &Subs = env.subs;
|
||||
|
||||
let layout = match layout_cache.from_var(arena, specialization_var, subs) {
|
||||
Ok(layout) => layout,
|
||||
// This can happen when the def symbol has a type error. In such cases just use the
|
||||
// def symbol, which is erroring.
|
||||
Err(_) => return symbol,
|
||||
};
|
||||
|
||||
let is_closure = matches!(
|
||||
subs.get_content_without_compacting(specialization_var),
|
||||
Content::Structure(FlatType::Func(..))
|
||||
);
|
||||
let function_mark = if is_closure {
|
||||
let fn_layout = match layout_cache.raw_from_var(arena, specialization_var, subs) {
|
||||
Ok(layout) => layout,
|
||||
// This can happen when the def symbol has a type error. In such cases just use the
|
||||
// def symbol, which is erroring.
|
||||
Err(_) => return symbol,
|
||||
};
|
||||
Some(fn_layout)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let specialization_mark = SpecializationMark {
|
||||
layout,
|
||||
function_mark,
|
||||
};
|
||||
|
||||
let symbol_specializations = self.0.get_or_insert(symbol, Default::default);
|
||||
|
||||
// For the first specialization, always reuse the current symbol. The vast majority of defs
|
||||
// only have one instance type, so this preserves readability of the IR.
|
||||
// TODO: turn me off and see what breaks.
|
||||
let needs_fresh_symbol = !symbol_specializations.is_empty();
|
||||
|
||||
let mut make_specialized_symbol = || {
|
||||
if needs_fresh_symbol {
|
||||
env.unique_symbol()
|
||||
} else {
|
||||
symbol
|
||||
}
|
||||
};
|
||||
|
||||
let (_var, specialized_symbol) = symbol_specializations
|
||||
.get_or_insert(specialization_mark, || {
|
||||
(specialization_var, make_specialized_symbol())
|
||||
});
|
||||
|
||||
*specialized_symbol
|
||||
}
|
||||
|
||||
/// Inserts a known specialization for a symbol. Returns the overwritten specialization, if any.
|
||||
pub fn get_or_insert_known(
|
||||
&mut self,
|
||||
|
@ -939,17 +908,19 @@ impl<'a> SymbolSpecializations<'a> {
|
|||
mark: SpecializationMark<'a>,
|
||||
specialization_var: Variable,
|
||||
specialization_symbol: Symbol,
|
||||
) -> Option<(Variable, Symbol)> {
|
||||
self.0
|
||||
.get_or_insert(symbol, Default::default)
|
||||
.insert(mark, (specialization_var, specialization_symbol))
|
||||
deepest_use: UseDepth,
|
||||
) -> Option<(Variable, Symbol, UseDepth)> {
|
||||
self.0.get_or_insert(symbol, Default::default).insert(
|
||||
mark,
|
||||
(specialization_var, specialization_symbol, deepest_use),
|
||||
)
|
||||
}
|
||||
|
||||
/// Removes all specializations for a symbol, returning the type and symbol of each specialization.
|
||||
pub fn remove(
|
||||
&mut self,
|
||||
symbol: Symbol,
|
||||
) -> impl ExactSizeIterator<Item = (SpecializationMark<'a>, (Variable, Symbol))> {
|
||||
) -> impl ExactSizeIterator<Item = (SpecializationMark<'a>, (Variable, Symbol, UseDepth))> {
|
||||
self.0
|
||||
.remove(&symbol)
|
||||
.map(|(_, specializations)| specializations)
|
||||
|
@ -969,7 +940,7 @@ impl<'a> SymbolSpecializations<'a> {
|
|||
symbol
|
||||
);
|
||||
|
||||
specializations.next().map(|(_, (_, symbol))| symbol)
|
||||
specializations.next().map(|(_, (_, symbol, _))| symbol)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
|
@ -987,6 +958,20 @@ pub struct ProcsBase<'a> {
|
|||
pub imported_module_thunks: &'a [Symbol],
|
||||
}
|
||||
|
||||
/// The current set of functions under specialization. They form a stack where the latest
|
||||
/// specialization to be seen is at the head of the stack.
|
||||
#[derive(Clone, Debug)]
|
||||
struct SpecializationStack<'a>(Vec<'a, Symbol>);
|
||||
|
||||
impl<'a> SpecializationStack<'a> {
|
||||
fn current_use_depth(&self) -> UseDepth {
|
||||
UseDepth {
|
||||
depth: self.0.len(),
|
||||
symbol: *self.0.last().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Procs<'a> {
|
||||
pub partial_procs: PartialProcs<'a>,
|
||||
|
@ -998,8 +983,7 @@ pub struct Procs<'a> {
|
|||
pub runtime_errors: BumpMap<Symbol, &'a str>,
|
||||
pub externals_we_need: BumpMap<ModuleId, ExternalSpecializations<'a>>,
|
||||
symbol_specializations: SymbolSpecializations<'a>,
|
||||
/// The current set of functions under specialization.
|
||||
pub specialization_stack: Vec<'a, Symbol>,
|
||||
specialization_stack: SpecializationStack<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Procs<'a> {
|
||||
|
@ -1014,17 +998,18 @@ impl<'a> Procs<'a> {
|
|||
runtime_errors: BumpMap::new_in(arena),
|
||||
externals_we_need: BumpMap::new_in(arena),
|
||||
symbol_specializations: Default::default(),
|
||||
specialization_stack: Vec::with_capacity_in(16, arena),
|
||||
specialization_stack: SpecializationStack(Vec::with_capacity_in(16, arena)),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_active_specialization(&mut self, specialization: Symbol) {
|
||||
self.specialization_stack.push(specialization);
|
||||
self.specialization_stack.0.push(specialization);
|
||||
}
|
||||
|
||||
fn pop_active_specialization(&mut self, specialization: Symbol) {
|
||||
let popped = self
|
||||
.specialization_stack
|
||||
.0
|
||||
.pop()
|
||||
.expect("specialization stack is empty");
|
||||
debug_assert_eq!(
|
||||
|
@ -1049,7 +1034,7 @@ impl<'a> Procs<'a> {
|
|||
/// specialize both `foo : Str False -> Str` and `foo : {} False -> Str` at the same time, so
|
||||
/// the latter specialization must be deferred.
|
||||
fn symbol_needs_suspended_specialization(&self, specialization: Symbol) -> bool {
|
||||
self.specialization_stack.contains(&specialization)
|
||||
self.specialization_stack.0.contains(&specialization)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1368,6 +1353,94 @@ impl<'a> Procs<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets a specialization for a symbol, or creates a new one.
|
||||
#[inline(always)]
|
||||
fn get_or_insert_symbol_specialization(
|
||||
&mut self,
|
||||
env: &mut Env<'a, '_>,
|
||||
layout_cache: &mut LayoutCache<'a>,
|
||||
symbol: Symbol,
|
||||
specialization_var: Variable,
|
||||
) -> Symbol {
|
||||
let arena = env.arena;
|
||||
let subs: &Subs = env.subs;
|
||||
|
||||
let layout = match layout_cache.from_var(arena, specialization_var, subs) {
|
||||
Ok(layout) => layout,
|
||||
// This can happen when the def symbol has a type error. In such cases just use the
|
||||
// def symbol, which is erroring.
|
||||
Err(_) => return symbol,
|
||||
};
|
||||
|
||||
let is_closure = matches!(
|
||||
subs.get_content_without_compacting(specialization_var),
|
||||
Content::Structure(FlatType::Func(..))
|
||||
);
|
||||
let function_mark = if is_closure {
|
||||
let fn_layout = match layout_cache.raw_from_var(arena, specialization_var, subs) {
|
||||
Ok(layout) => layout,
|
||||
// This can happen when the def symbol has a type error. In such cases just use the
|
||||
// def symbol, which is erroring.
|
||||
Err(_) => return symbol,
|
||||
};
|
||||
Some(fn_layout)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let specialization_mark = SpecializationMark {
|
||||
layout,
|
||||
function_mark,
|
||||
};
|
||||
|
||||
let symbol_specializations = self
|
||||
.symbol_specializations
|
||||
.0
|
||||
.get_or_insert(symbol, Default::default);
|
||||
|
||||
// For the first specialization, always reuse the current symbol. The vast majority of defs
|
||||
// only have one instance type, so this preserves readability of the IR.
|
||||
// TODO: turn me off and see what breaks.
|
||||
let needs_fresh_symbol = !symbol_specializations.is_empty();
|
||||
|
||||
let mut make_specialized_symbol = || {
|
||||
if needs_fresh_symbol {
|
||||
env.unique_symbol()
|
||||
} else {
|
||||
symbol
|
||||
}
|
||||
};
|
||||
|
||||
let current_use = self.specialization_stack.current_use_depth();
|
||||
let (_var, specialized_symbol, deepest_use) = symbol_specializations
|
||||
.get_or_insert(specialization_mark, || {
|
||||
(specialization_var, make_specialized_symbol(), current_use)
|
||||
});
|
||||
|
||||
if deepest_use.is_nested_use_in(¤t_use) {
|
||||
*deepest_use = current_use;
|
||||
}
|
||||
|
||||
*specialized_symbol
|
||||
}
|
||||
|
||||
/// Get the symbol specializations used in the active specialization's body.
|
||||
pub fn get_symbol_specializations_used_in_body(
|
||||
&self,
|
||||
symbol: Symbol,
|
||||
) -> Option<impl Iterator<Item = (Variable, Symbol)> + '_> {
|
||||
let this_use = self.specialization_stack.current_use_depth();
|
||||
self.symbol_specializations.0.get(&symbol).map(move |l| {
|
||||
l.iter().filter_map(move |(_, (var, sym, deepest_use))| {
|
||||
if deepest_use.is_nested_use_in(&this_use) {
|
||||
Some((*var, *sym))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
|
@ -1502,7 +1575,7 @@ impl<'a, 'i> Env<'a, 'i> {
|
|||
#[derive(Clone, Debug, PartialEq, Copy, Eq, Hash)]
|
||||
pub struct JoinPointId(pub Symbol);
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct Param<'a> {
|
||||
pub symbol: Symbol,
|
||||
pub borrow: bool,
|
||||
|
@ -1586,7 +1659,7 @@ pub enum Stmt<'a> {
|
|||
}
|
||||
|
||||
/// in the block below, symbol `scrutinee` is assumed be be of shape `tag_id`
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum BranchInfo<'a> {
|
||||
None,
|
||||
Constructor {
|
||||
|
@ -1629,7 +1702,7 @@ impl<'a> BranchInfo<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum ModifyRc {
|
||||
/// Increment a reference count
|
||||
Inc(Symbol, u64),
|
||||
|
@ -1775,7 +1848,7 @@ impl<'a> Call<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct CallSpecId {
|
||||
id: u32,
|
||||
}
|
||||
|
@ -1790,7 +1863,7 @@ impl CallSpecId {
|
|||
pub const BACKEND_DUMMY: Self = Self { id: 0 };
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct UpdateModeId {
|
||||
id: u32,
|
||||
}
|
||||
|
@ -1805,7 +1878,7 @@ impl UpdateModeId {
|
|||
pub const BACKEND_DUMMY: Self = Self { id: 0 };
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct UpdateModeIds {
|
||||
next: u32,
|
||||
}
|
||||
|
@ -1822,7 +1895,7 @@ impl UpdateModeIds {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum CallType<'a> {
|
||||
ByName {
|
||||
name: LambdaName<'a>,
|
||||
|
@ -1841,7 +1914,7 @@ pub enum CallType<'a> {
|
|||
HigherOrder(&'a HigherOrderLowLevel<'a>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct PassedFunction<'a> {
|
||||
/// name of the top-level function that is passed as an argument
|
||||
/// e.g. in `List.map xs Num.abs` this would be `Num.abs`
|
||||
|
@ -1858,7 +1931,7 @@ pub struct PassedFunction<'a> {
|
|||
pub owns_captured_environment: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct HigherOrderLowLevel<'a> {
|
||||
pub op: crate::low_level::HigherOrder,
|
||||
|
||||
|
@ -2503,7 +2576,7 @@ fn from_can_let<'a>(
|
|||
|
||||
// We do need specializations
|
||||
1 => {
|
||||
let (_specialization_mark, (var, specialized_symbol)) =
|
||||
let (_specialization_mark, (var, specialized_symbol, _deepest_use)) =
|
||||
needed_specializations.next().unwrap();
|
||||
|
||||
// Make sure rigid variables in the annotation are converted to flex variables.
|
||||
|
@ -2534,7 +2607,7 @@ fn from_can_let<'a>(
|
|||
|
||||
// Need to eat the cost and create a specialized version of the body for
|
||||
// each specialization.
|
||||
for (_specialization_mark, (var, specialized_symbol)) in
|
||||
for (_specialization_mark, (var, specialized_symbol, _deepest_use)) in
|
||||
needed_specializations
|
||||
{
|
||||
use roc_can::copy::deep_copy_type_vars_into_expr;
|
||||
|
@ -2703,7 +2776,7 @@ fn pattern_to_when<'a>(
|
|||
) -> (Symbol, Loc<roc_can::expr::Expr>) {
|
||||
use roc_can::expr::Expr::*;
|
||||
use roc_can::expr::{WhenBranch, WhenBranchPattern};
|
||||
use roc_can::pattern::Pattern::*;
|
||||
use roc_can::pattern::Pattern::{self, *};
|
||||
|
||||
match &pattern.value {
|
||||
Identifier(symbol) => (*symbol, body),
|
||||
|
@ -2766,6 +2839,8 @@ fn pattern_to_when<'a>(
|
|||
(symbol, Loc::at_zero(wrapped_body))
|
||||
}
|
||||
|
||||
Pattern::List { .. } => todo!(),
|
||||
|
||||
IntLiteral(..)
|
||||
| NumLiteral(..)
|
||||
| FloatLiteral(..)
|
||||
|
@ -3388,11 +3463,23 @@ fn specialize_proc_help<'a>(
|
|||
// An argument from the closure list may have taken on a specialized symbol
|
||||
// name during the evaluation of the def body. If this is the case, load the
|
||||
// specialized name rather than the original captured name!
|
||||
let mut get_specialized_name = |symbol| {
|
||||
procs
|
||||
.symbol_specializations
|
||||
.remove_single(symbol)
|
||||
.unwrap_or(symbol)
|
||||
let get_specialized_name = |symbol| {
|
||||
let specs_used_in_body =
|
||||
procs.get_symbol_specializations_used_in_body(symbol);
|
||||
|
||||
match specs_used_in_body {
|
||||
Some(mut specs) => {
|
||||
let spec_symbol =
|
||||
specs.next().map(|(_, sym)| sym).unwrap_or(symbol);
|
||||
if specs.next().is_some() {
|
||||
internal_error!(
|
||||
"polymorphic symbol captures not supported yet"
|
||||
);
|
||||
}
|
||||
spec_symbol
|
||||
}
|
||||
None => symbol,
|
||||
}
|
||||
};
|
||||
|
||||
match closure_layout
|
||||
|
@ -4083,9 +4170,7 @@ pub fn with_hole<'a>(
|
|||
variable,
|
||||
) {
|
||||
let real_symbol =
|
||||
procs
|
||||
.symbol_specializations
|
||||
.get_or_insert(env, layout_cache, symbol, variable);
|
||||
procs.get_or_insert_symbol_specialization(env, layout_cache, symbol, variable);
|
||||
symbol = real_symbol;
|
||||
}
|
||||
|
||||
|
@ -4182,7 +4267,7 @@ pub fn with_hole<'a>(
|
|||
match can_reuse_symbol(env, procs, &loc_arg_expr.value, arg_var) {
|
||||
// Opaques decay to their argument.
|
||||
ReuseSymbol::Value(symbol) => {
|
||||
let real_name = procs.symbol_specializations.get_or_insert(
|
||||
let real_name = procs.get_or_insert_symbol_specialization(
|
||||
env,
|
||||
layout_cache,
|
||||
symbol,
|
||||
|
@ -4247,7 +4332,7 @@ pub fn with_hole<'a>(
|
|||
can_fields.push(Field::FunctionOrUnspecialized(symbol, variable));
|
||||
}
|
||||
Value(symbol) => {
|
||||
let reusable = procs.symbol_specializations.get_or_insert(
|
||||
let reusable = procs.get_or_insert_symbol_specialization(
|
||||
env,
|
||||
layout_cache,
|
||||
symbol,
|
||||
|
@ -4722,6 +4807,7 @@ pub fn with_hole<'a>(
|
|||
find_lambda_name(env, layout_cache, lambda_set, name, &[]);
|
||||
construct_closure_data(
|
||||
env,
|
||||
procs,
|
||||
layout_cache,
|
||||
lambda_set,
|
||||
lambda_name,
|
||||
|
@ -4775,6 +4861,7 @@ pub fn with_hole<'a>(
|
|||
find_lambda_name(env, layout_cache, lambda_set, name, &[]);
|
||||
construct_closure_data(
|
||||
env,
|
||||
procs,
|
||||
layout_cache,
|
||||
lambda_set,
|
||||
lambda_name,
|
||||
|
@ -5023,6 +5110,7 @@ pub fn with_hole<'a>(
|
|||
|
||||
construct_closure_data(
|
||||
env,
|
||||
procs,
|
||||
layout_cache,
|
||||
lambda_set,
|
||||
lambda_name,
|
||||
|
@ -5169,7 +5257,7 @@ pub fn with_hole<'a>(
|
|||
}
|
||||
}
|
||||
Value(function_symbol) => {
|
||||
let function_symbol = procs.symbol_specializations.get_or_insert(
|
||||
let function_symbol = procs.get_or_insert_symbol_specialization(
|
||||
env,
|
||||
layout_cache,
|
||||
function_symbol,
|
||||
|
@ -5585,7 +5673,8 @@ where
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
fn construct_closure_data<'a, I>(
|
||||
env: &mut Env<'a, '_>,
|
||||
layout_cache: &LayoutCache<'a>,
|
||||
procs: &mut Procs<'a>,
|
||||
layout_cache: &mut LayoutCache<'a>,
|
||||
lambda_set: LambdaSet<'a>,
|
||||
name: LambdaName<'a>,
|
||||
symbols: I,
|
||||
|
@ -5670,12 +5759,19 @@ where
|
|||
debug_assert_eq!(symbols.len(), 1);
|
||||
|
||||
let mut symbols = symbols;
|
||||
let (captured_symbol, _) = symbols.next().unwrap();
|
||||
let (captured_symbol, captured_var) = symbols.next().unwrap();
|
||||
|
||||
let captured_symbol = procs.get_or_insert_symbol_specialization(
|
||||
env,
|
||||
layout_cache,
|
||||
*captured_symbol,
|
||||
*captured_var,
|
||||
);
|
||||
|
||||
// The capture set is unwrapped, so just replaced the assigned capture symbol with the
|
||||
// only capture.
|
||||
let mut hole = hole.clone();
|
||||
substitute_in_exprs(env.arena, &mut hole, assigned, *captured_symbol);
|
||||
substitute_in_exprs(env.arena, &mut hole, assigned, captured_symbol);
|
||||
hole
|
||||
}
|
||||
ClosureRepresentation::EnumDispatch(repr) => match repr {
|
||||
|
@ -6056,6 +6152,7 @@ fn tag_union_to_function<'a>(
|
|||
debug_assert!(lambda_name.no_captures());
|
||||
construct_closure_data(
|
||||
env,
|
||||
procs,
|
||||
layout_cache,
|
||||
lambda_set,
|
||||
lambda_name,
|
||||
|
@ -6388,7 +6485,7 @@ pub fn from_can<'a>(
|
|||
stmt = with_hole(
|
||||
env,
|
||||
loc_condition.value,
|
||||
variable,
|
||||
Variable::BOOL,
|
||||
procs,
|
||||
layout_cache,
|
||||
cond_symbol,
|
||||
|
@ -6444,7 +6541,7 @@ pub fn from_can<'a>(
|
|||
stmt = with_hole(
|
||||
env,
|
||||
loc_condition.value,
|
||||
variable,
|
||||
Variable::BOOL,
|
||||
procs,
|
||||
layout_cache,
|
||||
cond_symbol,
|
||||
|
@ -6912,7 +7009,7 @@ fn substitute_in_call<'a>(
|
|||
} => substitute(subs, name.name()).map(|new| CallType::ByName {
|
||||
name: name.replace_name(new),
|
||||
arg_layouts,
|
||||
ret_layout: *ret_layout,
|
||||
ret_layout,
|
||||
specialization_id: *specialization_id,
|
||||
}),
|
||||
CallType::Foreign { .. } => None,
|
||||
|
@ -7061,7 +7158,7 @@ fn substitute_in_expr<'a>(
|
|||
} => match substitute(subs, *structure) {
|
||||
Some(structure) => Some(StructAtIndex {
|
||||
index: *index,
|
||||
field_layouts: *field_layouts,
|
||||
field_layouts,
|
||||
structure,
|
||||
}),
|
||||
None => None,
|
||||
|
@ -7192,6 +7289,24 @@ fn store_pattern_help<'a>(
|
|||
stmt,
|
||||
);
|
||||
}
|
||||
|
||||
List {
|
||||
arity,
|
||||
element_layout,
|
||||
elements,
|
||||
} => {
|
||||
return store_list_pattern(
|
||||
env,
|
||||
procs,
|
||||
layout_cache,
|
||||
outer_symbol,
|
||||
*arity,
|
||||
*element_layout,
|
||||
elements,
|
||||
stmt,
|
||||
)
|
||||
}
|
||||
|
||||
Voided { .. } => {
|
||||
return StorePattern::NotProductive(stmt);
|
||||
}
|
||||
|
@ -7264,6 +7379,190 @@ fn store_pattern_help<'a>(
|
|||
StorePattern::Productive(stmt)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) struct ListIndex(
|
||||
/// Positive if we should index from the head, negative if we should index from the tail
|
||||
/// 0 is lst[0]
|
||||
/// -1 is lst[List.len lst - 1]
|
||||
i64,
|
||||
);
|
||||
|
||||
impl ListIndex {
|
||||
pub fn from_pattern_index(index: usize, arity: ListArity) -> Self {
|
||||
match arity {
|
||||
ListArity::Exact(_) => Self(index as _),
|
||||
ListArity::Slice(head, tail) => {
|
||||
if index < head {
|
||||
Self(index as _)
|
||||
} else {
|
||||
// Slice(head=2, tail=5)
|
||||
//
|
||||
// s t ... w y z x q
|
||||
// 0 1 2 3 4 5 6 index
|
||||
// 0 1 2 3 4 (index - head)
|
||||
// 5 4 3 2 1 (tail - (index - head))
|
||||
Self(-((tail - (index - head)) as i64))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type Store<'a> = (Symbol, Layout<'a>, Expr<'a>);
|
||||
|
||||
/// Builds the list index we should index into
|
||||
#[must_use]
|
||||
pub(crate) fn build_list_index_probe<'a>(
|
||||
env: &mut Env<'a, '_>,
|
||||
list_sym: Symbol,
|
||||
list_index: &ListIndex,
|
||||
) -> (Symbol, impl DoubleEndedIterator<Item = Store<'a>>) {
|
||||
let usize_layout = Layout::usize(env.target_info);
|
||||
|
||||
let list_index = list_index.0;
|
||||
let index_sym = env.unique_symbol();
|
||||
|
||||
let (opt_len_store, opt_offset_store, index_store) = if list_index >= 0 {
|
||||
let index_expr = Expr::Literal(Literal::Int((list_index as i128).to_ne_bytes()));
|
||||
|
||||
let index_store = (index_sym, usize_layout, index_expr);
|
||||
|
||||
(None, None, index_store)
|
||||
} else {
|
||||
let len_sym = env.unique_symbol();
|
||||
let len_expr = Expr::Call(Call {
|
||||
call_type: CallType::LowLevel {
|
||||
op: LowLevel::ListLen,
|
||||
update_mode: env.next_update_mode_id(),
|
||||
},
|
||||
arguments: env.arena.alloc([list_sym]),
|
||||
});
|
||||
|
||||
let offset = list_index.abs();
|
||||
let offset_sym = env.unique_symbol();
|
||||
let offset_expr = Expr::Literal(Literal::Int((offset as i128).to_ne_bytes()));
|
||||
|
||||
let index_expr = Expr::Call(Call {
|
||||
call_type: CallType::LowLevel {
|
||||
op: LowLevel::NumSub,
|
||||
update_mode: env.next_update_mode_id(),
|
||||
},
|
||||
arguments: env.arena.alloc([len_sym, offset_sym]),
|
||||
});
|
||||
|
||||
let len_store = (len_sym, usize_layout, len_expr);
|
||||
let offset_store = (offset_sym, usize_layout, offset_expr);
|
||||
let index_store = (index_sym, usize_layout, index_expr);
|
||||
|
||||
(Some(len_store), Some(offset_store), index_store)
|
||||
};
|
||||
|
||||
let stores = (opt_len_store.into_iter())
|
||||
.chain(opt_offset_store)
|
||||
.chain([index_store]);
|
||||
|
||||
(index_sym, stores)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn store_list_pattern<'a>(
|
||||
env: &mut Env<'a, '_>,
|
||||
procs: &mut Procs<'a>,
|
||||
layout_cache: &mut LayoutCache<'a>,
|
||||
list_sym: Symbol,
|
||||
list_arity: ListArity,
|
||||
element_layout: Layout<'a>,
|
||||
elements: &[Pattern<'a>],
|
||||
mut stmt: Stmt<'a>,
|
||||
) -> StorePattern<'a> {
|
||||
use Pattern::*;
|
||||
|
||||
let mut is_productive = false;
|
||||
|
||||
for (index, element) in elements.iter().enumerate().rev() {
|
||||
let compute_element_load = |env: &mut Env<'a, '_>| {
|
||||
let list_index = ListIndex::from_pattern_index(index, list_arity);
|
||||
|
||||
let (index_sym, needed_stores) = build_list_index_probe(env, list_sym, &list_index);
|
||||
|
||||
let load = Expr::Call(Call {
|
||||
call_type: CallType::LowLevel {
|
||||
op: LowLevel::ListGetUnsafe,
|
||||
update_mode: env.next_update_mode_id(),
|
||||
},
|
||||
arguments: env.arena.alloc([list_sym, index_sym]),
|
||||
});
|
||||
|
||||
(load, needed_stores)
|
||||
};
|
||||
|
||||
let (store_loaded, needed_stores) = match element {
|
||||
Identifier(symbol) => {
|
||||
let (load, needed_stores) = compute_element_load(env);
|
||||
|
||||
// Pattern can define only one specialization
|
||||
let symbol = procs
|
||||
.symbol_specializations
|
||||
.remove_single(*symbol)
|
||||
.unwrap_or(*symbol);
|
||||
|
||||
// store immediately in the given symbol
|
||||
(
|
||||
Stmt::Let(symbol, load, element_layout, env.arena.alloc(stmt)),
|
||||
needed_stores,
|
||||
)
|
||||
}
|
||||
Underscore
|
||||
| IntLiteral(_, _)
|
||||
| FloatLiteral(_, _)
|
||||
| DecimalLiteral(_)
|
||||
| EnumLiteral { .. }
|
||||
| BitLiteral { .. }
|
||||
| StrLiteral(_) => {
|
||||
// ignore
|
||||
continue;
|
||||
}
|
||||
_ => {
|
||||
// store the field in a symbol, and continue matching on it
|
||||
let symbol = env.unique_symbol();
|
||||
|
||||
// first recurse, continuing to unpack symbol
|
||||
match store_pattern_help(env, procs, layout_cache, element, symbol, stmt) {
|
||||
StorePattern::Productive(new) => {
|
||||
stmt = new;
|
||||
let (load, needed_stores) = compute_element_load(env);
|
||||
|
||||
// only if we bind one of its (sub)fields to a used name should we
|
||||
// extract the field
|
||||
(
|
||||
Stmt::Let(symbol, load, element_layout, env.arena.alloc(stmt)),
|
||||
needed_stores,
|
||||
)
|
||||
}
|
||||
StorePattern::NotProductive(new) => {
|
||||
// do nothing
|
||||
stmt = new;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
is_productive = true;
|
||||
|
||||
stmt = store_loaded;
|
||||
for (sym, lay, expr) in needed_stores.rev() {
|
||||
stmt = Stmt::Let(sym, expr, lay, env.arena.alloc(stmt));
|
||||
}
|
||||
}
|
||||
|
||||
if is_productive {
|
||||
StorePattern::Productive(stmt)
|
||||
} else {
|
||||
StorePattern::NotProductive(stmt)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn store_tag_pattern<'a>(
|
||||
env: &mut Env<'a, '_>,
|
||||
|
@ -7586,9 +7885,7 @@ fn possible_reuse_symbol_or_specialize<'a>(
|
|||
) -> Symbol {
|
||||
match can_reuse_symbol(env, procs, expr, var) {
|
||||
ReuseSymbol::Value(symbol) => {
|
||||
procs
|
||||
.symbol_specializations
|
||||
.get_or_insert(env, layout_cache, symbol, var)
|
||||
procs.get_or_insert_symbol_specialization(env, layout_cache, symbol, var)
|
||||
}
|
||||
_ => env.unique_symbol(),
|
||||
}
|
||||
|
@ -7664,13 +7961,17 @@ where
|
|||
// captured symbols can only ever be specialized outside the closure.
|
||||
// After that is done, remove this hack.
|
||||
.chain(if no_specializations_needed {
|
||||
[Some((variable, left))]
|
||||
[Some((
|
||||
variable,
|
||||
left,
|
||||
procs.specialization_stack.current_use_depth(),
|
||||
))]
|
||||
} else {
|
||||
[None]
|
||||
})
|
||||
.flatten();
|
||||
|
||||
for (variable, left) in needed_specializations_of_left {
|
||||
for (variable, left, _deepest_use) in needed_specializations_of_left {
|
||||
add_needed_external(procs, env, variable, LambdaName::no_niche(right));
|
||||
|
||||
let res_layout = layout_cache.from_var(env.arena, variable, env.subs);
|
||||
|
@ -7694,7 +7995,7 @@ where
|
|||
|
||||
let left_had_specialization_symbols = needed_specializations_of_left.len() > 0;
|
||||
|
||||
for (specialization_mark, (specialized_var, specialized_sym)) in
|
||||
for (specialization_mark, (specialized_var, specialized_sym, deepest_use)) in
|
||||
needed_specializations_of_left
|
||||
{
|
||||
let old_specialized_sym = procs.symbol_specializations.get_or_insert_known(
|
||||
|
@ -7702,9 +8003,10 @@ where
|
|||
specialization_mark,
|
||||
specialized_var,
|
||||
specialized_sym,
|
||||
deepest_use,
|
||||
);
|
||||
|
||||
if let Some((_, old_specialized_sym)) = old_specialized_sym {
|
||||
if let Some((_, old_specialized_sym, _)) = old_specialized_sym {
|
||||
scratchpad_update_specializations.push((old_specialized_sym, specialized_sym));
|
||||
}
|
||||
}
|
||||
|
@ -7887,6 +8189,7 @@ fn specialize_symbol<'a>(
|
|||
|
||||
construct_closure_data(
|
||||
env,
|
||||
procs,
|
||||
layout_cache,
|
||||
lambda_set,
|
||||
lambda_name,
|
||||
|
@ -7938,6 +8241,7 @@ fn specialize_symbol<'a>(
|
|||
|
||||
construct_closure_data(
|
||||
env,
|
||||
procs,
|
||||
layout_cache,
|
||||
lambda_set,
|
||||
lambda_name,
|
||||
|
@ -8343,6 +8647,7 @@ fn call_by_name_help<'a>(
|
|||
|
||||
construct_closure_data(
|
||||
env,
|
||||
procs,
|
||||
layout_cache,
|
||||
lambda_set,
|
||||
proc_name,
|
||||
|
@ -8739,6 +9044,7 @@ fn call_specialized_proc<'a>(
|
|||
|
||||
let result = construct_closure_data(
|
||||
env,
|
||||
procs,
|
||||
layout_cache,
|
||||
lambda_set,
|
||||
proc_name,
|
||||
|
@ -8818,6 +9124,11 @@ pub enum Pattern<'a> {
|
|||
opaque: Symbol,
|
||||
argument: Box<(Pattern<'a>, Layout<'a>)>,
|
||||
},
|
||||
List {
|
||||
arity: ListArity,
|
||||
element_layout: Layout<'a>,
|
||||
elements: Vec<'a, Pattern<'a>>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> Pattern<'a> {
|
||||
|
@ -8854,6 +9165,7 @@ impl<'a> Pattern<'a> {
|
|||
stack.extend(arguments.iter().map(|(t, _)| t))
|
||||
}
|
||||
Pattern::OpaqueUnwrap { argument, .. } => stack.push(&argument.0),
|
||||
Pattern::List { elements, .. } => stack.extend(elements),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -9604,6 +9916,35 @@ fn from_can_pattern_help<'a>(
|
|||
field_layouts.into_bump_slice(),
|
||||
))
|
||||
}
|
||||
|
||||
List {
|
||||
list_var: _,
|
||||
elem_var,
|
||||
patterns,
|
||||
} => {
|
||||
let element_layout = match layout_cache.from_var(env.arena, *elem_var, env.subs) {
|
||||
Ok(lay) => lay,
|
||||
Err(LayoutProblem::UnresolvedTypeVar(_)) => {
|
||||
return Err(RuntimeError::UnresolvedTypeVar)
|
||||
}
|
||||
Err(LayoutProblem::Erroneous) => return Err(RuntimeError::ErroneousType),
|
||||
};
|
||||
|
||||
let arity = patterns.arity();
|
||||
|
||||
let mut mono_patterns = Vec::with_capacity_in(patterns.patterns.len(), env.arena);
|
||||
for loc_pat in patterns.patterns.iter() {
|
||||
let mono_pat =
|
||||
from_can_pattern_help(env, procs, layout_cache, &loc_pat.value, assignments)?;
|
||||
mono_patterns.push(mono_pat);
|
||||
}
|
||||
|
||||
Ok(Pattern::List {
|
||||
arity,
|
||||
element_layout,
|
||||
elements: mono_patterns,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use roc_module::symbol::Symbol;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum HigherOrder {
|
||||
ListMap {
|
||||
xs: Symbol,
|
||||
|
|
|
@ -128,7 +128,7 @@ fn function_s<'a, 'i>(
|
|||
remainder,
|
||||
} => {
|
||||
let id = *id;
|
||||
let body: &Stmt = *body;
|
||||
let body: &Stmt = body;
|
||||
let new_body = function_s(env, w, c, body);
|
||||
|
||||
let new_join = if std::ptr::eq(body, new_body) || body == new_body {
|
||||
|
@ -179,7 +179,7 @@ fn function_s<'a, 'i>(
|
|||
arena.alloc(new_switch)
|
||||
}
|
||||
Refcounting(op, continuation) => {
|
||||
let continuation: &Stmt = *continuation;
|
||||
let continuation: &Stmt = continuation;
|
||||
let new_continuation = function_s(env, w, c, continuation);
|
||||
|
||||
if std::ptr::eq(continuation, new_continuation) || continuation == new_continuation {
|
||||
|
@ -198,7 +198,7 @@ fn function_s<'a, 'i>(
|
|||
layouts,
|
||||
remainder,
|
||||
} => {
|
||||
let continuation: &Stmt = *remainder;
|
||||
let continuation: &Stmt = remainder;
|
||||
let new_continuation = function_s(env, w, c, continuation);
|
||||
|
||||
if std::ptr::eq(continuation, new_continuation) || continuation == new_continuation {
|
||||
|
@ -223,7 +223,7 @@ fn function_s<'a, 'i>(
|
|||
layouts,
|
||||
remainder,
|
||||
} => {
|
||||
let continuation: &Stmt = *remainder;
|
||||
let continuation: &Stmt = remainder;
|
||||
let new_continuation = function_s(env, w, c, continuation);
|
||||
|
||||
if std::ptr::eq(continuation, new_continuation) || continuation == new_continuation {
|
||||
|
|
|
@ -109,7 +109,7 @@ pub enum StrSegment<'a> {
|
|||
Interpolated(Loc<&'a Expr<'a>>), // e.g. (name) in "Hi, \(name)!"
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum EscapedChar {
|
||||
Newline, // \n
|
||||
Tab, // \t
|
||||
|
@ -581,7 +581,7 @@ pub enum AssignedField<'a, Val> {
|
|||
Malformed(&'a str),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum CommentOrNewline<'a> {
|
||||
Newline,
|
||||
LineComment(&'a str),
|
||||
|
@ -888,7 +888,7 @@ impl<'a, T> Collection<'a, T> {
|
|||
|
||||
pub fn final_comments(&self) -> &'a [CommentOrNewline<'a>] {
|
||||
if let Some(final_comments) = self.final_comments {
|
||||
*final_comments
|
||||
final_comments
|
||||
} else {
|
||||
&[]
|
||||
}
|
||||
|
|
|
@ -346,6 +346,7 @@ fn parse_expr_start<'a>(
|
|||
loc!(move |a, s, m| parse_expr_operator_chain(m, options, a, s)),
|
||||
fail_expr_start_e()
|
||||
]
|
||||
.trace("expr_start")
|
||||
.parse(arena, state, min_indent)
|
||||
}
|
||||
|
||||
|
@ -546,7 +547,7 @@ fn numeric_negate_expression<'a, T>(
|
|||
expr: Loc<Expr<'a>>,
|
||||
spaces: &'a [CommentOrNewline<'a>],
|
||||
) -> Loc<Expr<'a>> {
|
||||
debug_assert_eq!(state.bytes().get(0), Some(&b'-'));
|
||||
debug_assert_eq!(state.bytes().first(), Some(&b'-'));
|
||||
// for overflow reasons, we must make the unary minus part of the number literal.
|
||||
let start = state.pos();
|
||||
let region = Region::new(start, expr.region.end());
|
||||
|
@ -1933,7 +1934,7 @@ fn expr_to_pattern_help<'a>(arena: &'a Bump, expr: &Expr<'a>) -> Result<Pattern<
|
|||
| Expr::UnaryOp(_, _) => Err(()),
|
||||
|
||||
Expr::Str(string) => Ok(Pattern::StrLiteral(*string)),
|
||||
Expr::SingleQuote(string) => Ok(Pattern::SingleQuote(*string)),
|
||||
Expr::SingleQuote(string) => Ok(Pattern::SingleQuote(string)),
|
||||
Expr::MalformedIdent(string, _problem) => Ok(Pattern::Malformed(string)),
|
||||
}
|
||||
}
|
||||
|
@ -2105,10 +2106,10 @@ mod when {
|
|||
parser::keyword_e(keyword::IS, EWhen::Is)
|
||||
)
|
||||
),
|
||||
move |arena, state, progress, (case_indent, loc_condition), min_indent| {
|
||||
move |arena, state, _progress, (case_indent, loc_condition), min_indent| {
|
||||
if case_indent < min_indent {
|
||||
return Err((
|
||||
progress,
|
||||
MadeProgress,
|
||||
// TODO maybe pass case_indent here?
|
||||
EWhen::PatternAlignment(5, state.pos()),
|
||||
state,
|
||||
|
@ -2118,15 +2119,18 @@ mod when {
|
|||
// Everything in the branches must be indented at least as much as the case itself.
|
||||
let min_indent = case_indent;
|
||||
|
||||
let (p1, branches, state) = branches(options).parse(arena, state, min_indent)?;
|
||||
let (_p1, branches, state) = branches(options)
|
||||
.parse(arena, state, min_indent)
|
||||
.map_err(|(_p, e, s)| (MadeProgress, e, s))?;
|
||||
|
||||
Ok((
|
||||
progress.or(p1),
|
||||
MadeProgress,
|
||||
Expr::When(arena.alloc(loc_condition), branches.into_bump_slice()),
|
||||
state,
|
||||
))
|
||||
},
|
||||
)
|
||||
.trace("when")
|
||||
}
|
||||
|
||||
/// Parsing when with indentation.
|
||||
|
|
|
@ -52,7 +52,7 @@ pub enum VersionComparison {
|
|||
DisallowsEqual,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub struct PackageName<'a>(&'a str);
|
||||
|
||||
impl<'a> PackageName<'a> {
|
||||
|
@ -160,7 +160,7 @@ pub struct HostedHeader<'a> {
|
|||
pub after_with: &'a [CommentOrNewline<'a>],
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum To<'a> {
|
||||
ExistingPackage(&'a str),
|
||||
NewPackage(PackageName<'a>),
|
||||
|
@ -262,7 +262,7 @@ pub struct TypedIdent<'a> {
|
|||
pub ann: Loc<TypeAnnotation<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct PackageEntry<'a> {
|
||||
pub shorthand: &'a str,
|
||||
pub spaces_after_shorthand: &'a [CommentOrNewline<'a>],
|
||||
|
|
|
@ -737,6 +737,7 @@ pub trait Parser<'a, Output, Error> {
|
|||
) -> ParseResult<'a, Output, Error>;
|
||||
|
||||
#[cfg(not(feature = "parse_debug_trace"))]
|
||||
#[inline(always)]
|
||||
fn trace(self, _message: &'static str) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
|
@ -789,7 +790,7 @@ impl<'a, O: std::fmt::Debug, E: std::fmt::Debug, P: Parser<'a, O, E>> Parser<'a,
|
|||
where
|
||||
E: 'a,
|
||||
{
|
||||
fn parse(&self, arena: &'a Bump, state: State<'a>) -> ParseResult<'a, O, E> {
|
||||
fn parse(&self, arena: &'a Bump, state: State<'a>, min_indent: u32) -> ParseResult<'a, O, E> {
|
||||
use std::cell::RefCell;
|
||||
|
||||
thread_local! {
|
||||
|
@ -803,7 +804,7 @@ where
|
|||
let cur_indent = INDENT.with(|i| *i.borrow());
|
||||
|
||||
println!(
|
||||
"{:>5?}: {}{:<50}",
|
||||
"{:<5?}: {}{:<50}",
|
||||
state.pos(),
|
||||
&indent_text[..cur_indent * 2],
|
||||
self.message
|
||||
|
@ -1379,11 +1380,12 @@ macro_rules! and {
|
|||
macro_rules! one_of {
|
||||
($p1:expr, $p2:expr) => {
|
||||
move |arena: &'a bumpalo::Bump, state: $crate::state::State<'a>, min_indent: u32| {
|
||||
let original_state = state.clone();
|
||||
|
||||
match $p1.parse(arena, state, min_indent) {
|
||||
valid @ Ok(_) => valid,
|
||||
Err((MadeProgress, fail, state)) => Err((MadeProgress, fail, state)),
|
||||
Err((NoProgress, _, state)) => $p2.parse(arena, state, min_indent),
|
||||
Err((NoProgress, _, _)) => $p2.parse(arena, original_state, min_indent),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -122,10 +122,7 @@ pub fn parse_single_quote<'a>() -> impl Parser<'a, &'a str, EString<'a>> {
|
|||
}
|
||||
}
|
||||
|
||||
fn consume_indent<'a>(
|
||||
mut state: State<'a>,
|
||||
mut indent: u32,
|
||||
) -> Result<State, (Progress, EString<'a>, State<'a>)> {
|
||||
fn consume_indent(mut state: State, mut indent: u32) -> Result<State, (Progress, EString, State)> {
|
||||
while indent > 0 {
|
||||
match state.bytes().first() {
|
||||
Some(b' ') => {
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Expr(When(Arrow(@24), @24), @0)
|
|
@ -0,0 +1,6 @@
|
|||
when Just 4 is
|
||||
Just when ->
|
||||
4
|
||||
|
||||
_ ->
|
||||
2
|
|
@ -0,0 +1 @@
|
|||
Expr(When(Arrow(@26), @20), @0)
|
|
@ -0,0 +1,3 @@
|
|||
when 5 is
|
||||
1 -> 2
|
||||
_
|
|
@ -124,6 +124,8 @@ mod test_parse {
|
|||
fail/lambda_missing_indent.expr,
|
||||
fail/type_argument_no_arrow.expr,
|
||||
fail/type_double_comma.expr,
|
||||
fail/when_missing_arrow.expr,
|
||||
fail/pattern_binds_keyword.expr,
|
||||
pass/ability_demand_signature_is_multiline.expr,
|
||||
pass/ability_multi_line.expr,
|
||||
pass/ability_single_line.expr,
|
||||
|
|
|
@ -7,19 +7,19 @@ use roc_parse::pattern::PatternType;
|
|||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::types::AliasKind;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct CycleEntry {
|
||||
pub symbol: Symbol,
|
||||
pub symbol_region: Region,
|
||||
pub expr_region: Region,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum BadPattern {
|
||||
Unsupported(PatternType),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum ShadowKind {
|
||||
Variable,
|
||||
Alias(Symbol),
|
||||
|
@ -28,7 +28,7 @@ pub enum ShadowKind {
|
|||
}
|
||||
|
||||
/// Problems that can occur in the course of canonicalization.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Problem {
|
||||
UnusedDef(Symbol, Region),
|
||||
UnusedImport(Symbol, Region),
|
||||
|
@ -185,6 +185,9 @@ pub enum Problem {
|
|||
UnnecessaryOutputWildcard {
|
||||
region: Region,
|
||||
},
|
||||
MultipleListRestPattern {
|
||||
region: Region,
|
||||
},
|
||||
}
|
||||
|
||||
impl Problem {
|
||||
|
@ -313,6 +316,7 @@ impl Problem {
|
|||
def_pattern: region,
|
||||
..
|
||||
}
|
||||
| Problem::MultipleListRestPattern { region }
|
||||
| Problem::UnnecessaryOutputWildcard { region } => Some(*region),
|
||||
Problem::RuntimeError(RuntimeError::CircularDef(cycle_entries))
|
||||
| Problem::BadRecursion(cycle_entries) => {
|
||||
|
@ -330,13 +334,13 @@ impl Problem {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ExtensionTypeKind {
|
||||
Record,
|
||||
TagUnion,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum PrecedenceProblem {
|
||||
BothNonAssociative(Region, Loc<BinOp>, Loc<BinOp>),
|
||||
}
|
||||
|
@ -385,7 +389,7 @@ pub enum FloatErrorKind {
|
|||
IntSuffix,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum RuntimeError {
|
||||
Shadowing {
|
||||
original_region: Region,
|
||||
|
@ -508,7 +512,7 @@ impl RuntimeError {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum MalformedPatternProblem {
|
||||
MalformedInt,
|
||||
MalformedFloat,
|
||||
|
@ -518,4 +522,5 @@ pub enum MalformedPatternProblem {
|
|||
BadIdent(roc_parse::ident::BadIdent),
|
||||
EmptySingleQuote,
|
||||
MultipleCharsInSingleQuote,
|
||||
DuplicateListRestPattern,
|
||||
}
|
||||
|
|
|
@ -133,7 +133,8 @@ impl Position {
|
|||
|
||||
impl Debug for Position {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "@{}", self.offset)
|
||||
write!(f, "@")?;
|
||||
self.offset.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ pub enum AbilityImplError {
|
|||
}
|
||||
|
||||
/// Indexes a requested deriving of an ability for an opaque type.
|
||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub struct RequestedDeriveKey {
|
||||
pub opaque: Symbol,
|
||||
pub ability: Symbol,
|
||||
|
|
|
@ -947,15 +947,16 @@ fn solve(
|
|||
);
|
||||
|
||||
let expectation = &constraints.expectations[expectation_index.index()];
|
||||
let expected = type_cell_to_var(
|
||||
let expected = either_type_index_to_var(
|
||||
constraints,
|
||||
subs,
|
||||
rank,
|
||||
pools,
|
||||
problems,
|
||||
abilities_store,
|
||||
obligation_cache,
|
||||
pools,
|
||||
aliases,
|
||||
expectation.get_type_ref(),
|
||||
*expectation.get_type_ref(),
|
||||
);
|
||||
|
||||
match unify(
|
||||
|
@ -1065,15 +1066,16 @@ fn solve(
|
|||
let actual = deep_copy_var_in(subs, rank, pools, var, arena);
|
||||
let expectation = &constraints.expectations[expectation_index.index()];
|
||||
|
||||
let expected = type_cell_to_var(
|
||||
let expected = either_type_index_to_var(
|
||||
constraints,
|
||||
subs,
|
||||
rank,
|
||||
pools,
|
||||
problems,
|
||||
abilities_store,
|
||||
obligation_cache,
|
||||
pools,
|
||||
aliases,
|
||||
expectation.get_type_ref(),
|
||||
*expectation.get_type_ref(),
|
||||
);
|
||||
|
||||
match unify(
|
||||
|
@ -1178,15 +1180,16 @@ fn solve(
|
|||
);
|
||||
|
||||
let expectation = &constraints.pattern_expectations[expectation_index.index()];
|
||||
let expected = type_cell_to_var(
|
||||
let expected = either_type_index_to_var(
|
||||
constraints,
|
||||
subs,
|
||||
rank,
|
||||
pools,
|
||||
problems,
|
||||
abilities_store,
|
||||
obligation_cache,
|
||||
pools,
|
||||
aliases,
|
||||
expectation.get_type_ref(),
|
||||
*expectation.get_type_ref(),
|
||||
);
|
||||
|
||||
let mode = match constraint {
|
||||
|
@ -1487,15 +1490,16 @@ fn solve(
|
|||
// 4. Condition and branch types aren't "almost equal", this is just a normal type
|
||||
// error.
|
||||
|
||||
let (real_var, real_region, expected_type, category_and_expected) = match eq {
|
||||
let (real_var, real_region, branches_var, category_and_expected) = match eq {
|
||||
Ok(eq) => {
|
||||
let roc_can::constraint::Eq(real_var, expected, category, real_region) =
|
||||
constraints.eq[eq.index()];
|
||||
let expected = &constraints.expectations[expected.index()];
|
||||
|
||||
(
|
||||
real_var,
|
||||
real_region,
|
||||
expected.get_type_ref(),
|
||||
*expected.get_type_ref(),
|
||||
Ok((category, expected)),
|
||||
)
|
||||
}
|
||||
|
@ -1507,10 +1511,11 @@ fn solve(
|
|||
real_region,
|
||||
) = constraints.pattern_eq[peq.index()];
|
||||
let expected = &constraints.pattern_expectations[expected.index()];
|
||||
|
||||
(
|
||||
real_var,
|
||||
real_region,
|
||||
expected.get_type_ref(),
|
||||
*expected.get_type_ref(),
|
||||
Err((category, expected)),
|
||||
)
|
||||
}
|
||||
|
@ -1528,15 +1533,16 @@ fn solve(
|
|||
real_var,
|
||||
);
|
||||
|
||||
let branches_var = type_cell_to_var(
|
||||
let branches_var = either_type_index_to_var(
|
||||
constraints,
|
||||
subs,
|
||||
rank,
|
||||
pools,
|
||||
problems,
|
||||
abilities_store,
|
||||
obligation_cache,
|
||||
pools,
|
||||
aliases,
|
||||
expected_type,
|
||||
branches_var,
|
||||
);
|
||||
|
||||
let cond_source_is_likely_positive_value = category_and_expected.is_ok();
|
||||
|
@ -1736,24 +1742,28 @@ fn solve(
|
|||
close_pattern_matched_tag_unions(subs, real_var);
|
||||
}
|
||||
|
||||
let ExhaustiveSummary {
|
||||
if let Ok(ExhaustiveSummary {
|
||||
errors,
|
||||
exhaustive,
|
||||
redundancies,
|
||||
} = check(subs, real_var, sketched_rows, context);
|
||||
}) = check(subs, real_var, sketched_rows, context)
|
||||
{
|
||||
// Store information about whether the "when" is exhaustive, and
|
||||
// which (if any) of its branches are redundant. Codegen may use
|
||||
// this for branch-fixing and redundant elimination.
|
||||
if !exhaustive {
|
||||
exhaustive_mark.set_non_exhaustive(subs);
|
||||
}
|
||||
for redundant_mark in redundancies {
|
||||
redundant_mark.set_redundant(subs);
|
||||
}
|
||||
|
||||
// Store information about whether the "when" is exhaustive, and
|
||||
// which (if any) of its branches are redundant. Codegen may use
|
||||
// this for branch-fixing and redundant elimination.
|
||||
if !exhaustive {
|
||||
exhaustive_mark.set_non_exhaustive(subs);
|
||||
// Store the errors.
|
||||
problems.extend(errors.into_iter().map(TypeError::Exhaustive));
|
||||
} else {
|
||||
// Otherwise there were type errors deeper in the pattern; we will have
|
||||
// already reported them.
|
||||
}
|
||||
for redundant_mark in redundancies {
|
||||
redundant_mark.set_redundant(subs);
|
||||
}
|
||||
|
||||
// Store the errors.
|
||||
problems.extend(errors.into_iter().map(TypeError::Exhaustive));
|
||||
}
|
||||
|
||||
state
|
||||
|
@ -1892,6 +1902,11 @@ fn open_tag_union(subs: &mut Subs, var: Variable) {
|
|||
stack.extend(subs.get_subs_slice(fields.variables()));
|
||||
}
|
||||
|
||||
Structure(Apply(Symbol::LIST_LIST, args)) => {
|
||||
// Open up nested tag unions.
|
||||
stack.extend(subs.get_subs_slice(args));
|
||||
}
|
||||
|
||||
_ => {
|
||||
// Everything else is not a structural type that can be opened
|
||||
// (i.e. cannot be matched in a pattern-match)
|
||||
|
@ -1952,10 +1967,15 @@ fn close_pattern_matched_tag_unions(subs: &mut Subs, var: Variable) {
|
|||
}
|
||||
|
||||
Structure(Record(fields, _)) => {
|
||||
// Open up all nested tag unions.
|
||||
// Close up all nested tag unions.
|
||||
stack.extend(subs.get_subs_slice(fields.variables()));
|
||||
}
|
||||
|
||||
Structure(Apply(Symbol::LIST_LIST, args)) => {
|
||||
// Close up nested tag unions.
|
||||
stack.extend(subs.get_subs_slice(args));
|
||||
}
|
||||
|
||||
Alias(_, _, real_var, _) => {
|
||||
stack.push(real_var);
|
||||
}
|
||||
|
@ -2233,21 +2253,22 @@ impl LocalDefVarsVec<(Symbol, Loc<Variable>)> {
|
|||
subs: &mut Subs,
|
||||
def_types_slice: roc_can::constraint::DefTypes,
|
||||
) -> Self {
|
||||
let types_slice = &constraints.types[def_types_slice.types.indices()];
|
||||
let type_indices_slice = &constraints.type_slices[def_types_slice.types.indices()];
|
||||
let loc_symbols_slice = &constraints.loc_symbols[def_types_slice.loc_symbols.indices()];
|
||||
|
||||
let mut local_def_vars = Self::with_length(types_slice.len());
|
||||
let mut local_def_vars = Self::with_length(type_indices_slice.len());
|
||||
|
||||
for (&(symbol, region), typ_cell) in (loc_symbols_slice.iter()).zip(types_slice) {
|
||||
let var = type_cell_to_var(
|
||||
for (&(symbol, region), typ_index) in (loc_symbols_slice.iter()).zip(type_indices_slice) {
|
||||
let var = either_type_index_to_var(
|
||||
constraints,
|
||||
subs,
|
||||
rank,
|
||||
pools,
|
||||
problems,
|
||||
abilities_store,
|
||||
obligation_cache,
|
||||
pools,
|
||||
aliases,
|
||||
typ_cell,
|
||||
*typ_index,
|
||||
);
|
||||
|
||||
local_def_vars.push((symbol, Loc { value: var, region }));
|
||||
|
|
|
@ -4590,7 +4590,7 @@ mod solve_expr {
|
|||
removeHelp 1i64 Empty
|
||||
"#
|
||||
),
|
||||
"RBTree I64 I64",
|
||||
"RBTree (Key (Integer Signed64)) I64",
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ pub enum TypeError {
|
|||
},
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
#[derive(PartialEq, Eq, Debug, Clone)]
|
||||
pub enum Unfulfilled {
|
||||
/// No claimed implementation of an ability for an opaque type.
|
||||
OpaqueDoesNotImplement { typ: Symbol, ability: Symbol },
|
||||
|
@ -51,7 +51,7 @@ pub enum Unfulfilled {
|
|||
},
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
#[derive(PartialEq, Eq, Debug, Clone)]
|
||||
pub enum UnderivableReason {
|
||||
NotABuiltin,
|
||||
/// The surface type is not derivable
|
||||
|
@ -60,7 +60,7 @@ pub enum UnderivableReason {
|
|||
NestedNotDerivable(ErrorType, NotDerivableContext),
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
#[derive(PartialEq, Eq, Debug, Clone)]
|
||||
pub enum NotDerivableContext {
|
||||
NoContext,
|
||||
Function,
|
||||
|
@ -70,12 +70,12 @@ pub enum NotDerivableContext {
|
|||
Eq(NotDerivableEq),
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
#[derive(PartialEq, Eq, Debug, Clone)]
|
||||
pub enum NotDerivableDecode {
|
||||
OptionalRecordField(Lowercase),
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
#[derive(PartialEq, Eq, Debug, Clone)]
|
||||
pub enum NotDerivableEq {
|
||||
FloatingPoint,
|
||||
}
|
||||
|
|
|
@ -387,6 +387,7 @@ fn pattern<'a>(
|
|||
)
|
||||
.append(f.text("}"))
|
||||
.group(),
|
||||
List { .. } => todo!(),
|
||||
NumLiteral(_, n, _, _) | IntLiteral(_, _, n, _, _) | FloatLiteral(_, _, n, _, _) => {
|
||||
f.text(&**n)
|
||||
}
|
||||
|
|
|
@ -375,6 +375,7 @@ fn check_derived_typechecks_and_golden(
|
|||
let mut rigid_vars = Default::default();
|
||||
let (import_variables, abilities_store) = add_imports(
|
||||
test_module,
|
||||
&mut constraints,
|
||||
&mut test_subs,
|
||||
pending_abilities,
|
||||
&exposed_for_module,
|
||||
|
|
|
@ -11,6 +11,7 @@ path = "src/tests.rs"
|
|||
|
||||
[build-dependencies]
|
||||
roc_builtins = { path = "../builtins" }
|
||||
roc_utils = { path = "../../utils" }
|
||||
wasi_libc_sys = { path = "../../wasi-libc-sys" }
|
||||
|
||||
[dev-dependencies]
|
||||
|
@ -25,6 +26,7 @@ roc_types = { path = "../types" }
|
|||
roc_builtins = { path = "../builtins" }
|
||||
roc_constrain = { path = "../constrain" }
|
||||
roc_unify = { path = "../unify" }
|
||||
roc_utils = { path = "../../utils" }
|
||||
roc_solve = { path = "../solve" }
|
||||
roc_mono = { path = "../mono" }
|
||||
roc_reporting = { path = "../../reporting" }
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use roc_builtins::bitcode;
|
||||
use roc_utils::zig;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use wasi_libc_sys::{WASI_COMPILER_RT_PATH, WASI_LIBC_PATH};
|
||||
|
||||
|
@ -113,13 +113,6 @@ fn build_wasm_test_host() {
|
|||
]);
|
||||
}
|
||||
|
||||
fn zig_executable() -> String {
|
||||
match std::env::var("ROC_ZIG") {
|
||||
Ok(path) => path,
|
||||
Err(_) => "zig".into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn build_wasm_platform(out_dir: &str, source_path: &str) -> PathBuf {
|
||||
let mut outfile = PathBuf::from(out_dir).join(PLATFORM_FILENAME);
|
||||
outfile.set_extension("o");
|
||||
|
@ -146,16 +139,25 @@ fn feature_is_enabled(feature_name: &str) -> bool {
|
|||
|
||||
// Run cargo with -vv to see commands printed out
|
||||
fn run_zig(args: &[&str]) {
|
||||
let zig = zig_executable();
|
||||
println!("{} {}", zig, args.join(" "));
|
||||
let output = Command::new(&zig).args(args).output().unwrap();
|
||||
let mut zig_cmd = zig();
|
||||
|
||||
if !output.status.success() {
|
||||
eprintln!("stdout:\n{}", String::from_utf8_lossy(&output.stdout));
|
||||
eprintln!("stderr:\n{}", String::from_utf8_lossy(&output.stderr));
|
||||
panic!("zig call failed with status {:?}", output.status);
|
||||
let full_zig_cmd = zig_cmd.args(args);
|
||||
println!("{:?}", full_zig_cmd);
|
||||
|
||||
let zig_cmd_output = full_zig_cmd.output().unwrap();
|
||||
|
||||
if !zig_cmd_output.status.success() {
|
||||
eprintln!(
|
||||
"stdout:\n{}",
|
||||
String::from_utf8_lossy(&zig_cmd_output.stdout)
|
||||
);
|
||||
eprintln!(
|
||||
"stderr:\n{}",
|
||||
String::from_utf8_lossy(&zig_cmd_output.stderr)
|
||||
);
|
||||
panic!("zig call failed with status {:?}", zig_cmd_output.status);
|
||||
}
|
||||
|
||||
assert!(output.stdout.is_empty(), "{:#?}", output);
|
||||
assert!(output.stderr.is_empty(), "{:#?}", output);
|
||||
assert!(zig_cmd_output.stdout.is_empty(), "{:#?}", zig_cmd_output);
|
||||
assert!(zig_cmd_output.stderr.is_empty(), "{:#?}", zig_cmd_output);
|
||||
}
|
||||
|
|
|
@ -3578,3 +3578,135 @@ fn list_walk_from_even_prefix_sum() {
|
|||
i64
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
mod pattern_match {
|
||||
#[cfg(feature = "gen-llvm")]
|
||||
use crate::helpers::llvm::assert_evals_to;
|
||||
|
||||
#[cfg(feature = "gen-wasm")]
|
||||
use crate::helpers::wasm::assert_evals_to;
|
||||
|
||||
use super::RocList;
|
||||
|
||||
#[test]
|
||||
fn unary_exact_size_match() {
|
||||
assert_evals_to!(
|
||||
r#"
|
||||
helper = \l -> when l is
|
||||
[] -> 1u8
|
||||
_ -> 2u8
|
||||
|
||||
[ helper [], helper [{}] ]
|
||||
"#,
|
||||
RocList::from_slice(&[1, 2]),
|
||||
RocList<u8>
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn many_exact_size_match() {
|
||||
assert_evals_to!(
|
||||
r#"
|
||||
helper = \l -> when l is
|
||||
[] -> 1u8
|
||||
[_] -> 2u8
|
||||
[_, _] -> 3u8
|
||||
[_, _, _] -> 4u8
|
||||
_ -> 5u8
|
||||
|
||||
[ helper [], helper [{}], helper [{}, {}], helper [{}, {}, {}], helper [{}, {}, {}, {}] ]
|
||||
"#,
|
||||
RocList::from_slice(&[1, 2, 3, 4, 5]),
|
||||
RocList<u8>
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ranged_matches_head() {
|
||||
assert_evals_to!(
|
||||
r#"
|
||||
helper = \l -> when l is
|
||||
[] -> 1u8
|
||||
[A] -> 2u8
|
||||
[A, A, ..] -> 3u8
|
||||
[A, B, ..] -> 4u8
|
||||
[B, ..] -> 5u8
|
||||
|
||||
[
|
||||
helper [],
|
||||
helper [A],
|
||||
helper [A, A], helper [A, A, A], helper [A, A, B], helper [A, A, B, A],
|
||||
helper [A, B], helper [A, B, A], helper [A, B, B], helper [A, B, A, B],
|
||||
helper [B], helper [B, A], helper [B, B], helper [B, A, B, B],
|
||||
]
|
||||
"#,
|
||||
RocList::from_slice(&[
|
||||
1, //
|
||||
2, //
|
||||
3, 3, 3, 3, //
|
||||
4, 4, 4, 4, //
|
||||
5, 5, 5, 5, //
|
||||
]),
|
||||
RocList<u8>
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ranged_matches_tail() {
|
||||
assert_evals_to!(
|
||||
r#"
|
||||
helper = \l -> when l is
|
||||
[] -> 1u8
|
||||
[A] -> 2u8
|
||||
[.., A, A] -> 3u8
|
||||
[.., B, A] -> 4u8
|
||||
[.., B] -> 5u8
|
||||
|
||||
[
|
||||
helper [],
|
||||
helper [A],
|
||||
helper [A, A], helper [A, A, A], helper [B, A, A], helper [A, B, A, A],
|
||||
helper [B, A], helper [A, B, A], helper [B, B, A], helper [B, A, B, A],
|
||||
helper [B], helper [A, B], helper [B, B], helper [B, A, B, B],
|
||||
]
|
||||
"#,
|
||||
RocList::from_slice(&[
|
||||
1, //
|
||||
2, //
|
||||
3, 3, 3, 3, //
|
||||
4, 4, 4, 4, //
|
||||
5, 5, 5, 5, //
|
||||
]),
|
||||
RocList<u8>
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bind_variables() {
|
||||
assert_evals_to!(
|
||||
r#"
|
||||
helper : List U16 -> U16
|
||||
helper = \l -> when l is
|
||||
[] -> 1
|
||||
[x] -> x
|
||||
[.., w, x, y, z] -> w * x * y * z
|
||||
[x, y, ..] -> x * y
|
||||
|
||||
[
|
||||
helper [],
|
||||
helper [5],
|
||||
helper [3, 5], helper [3, 5, 7],
|
||||
helper [2, 3, 5, 7], helper [11, 2, 3, 5, 7], helper [13, 11, 2, 3, 5, 7],
|
||||
]
|
||||
"#,
|
||||
RocList::from_slice(&[
|
||||
1, //
|
||||
5, //
|
||||
15, 15, //
|
||||
210, 210, 210, //
|
||||
]),
|
||||
RocList<u16>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4104,3 +4104,25 @@ fn issue_4348() {
|
|||
RocStr
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn issue_4349() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
ir = Ok ""
|
||||
res =
|
||||
Result.try ir \_ ->
|
||||
when ir is
|
||||
Ok "" -> Ok ""
|
||||
_ -> Err Bad
|
||||
when res is
|
||||
Ok _ -> "okay"
|
||||
_ -> "FAIL"
|
||||
"#
|
||||
),
|
||||
RocStr::from("okay"),
|
||||
RocStr
|
||||
);
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ use roc_load::{EntryPoint, ExecutionMode, LoadConfig, Threading};
|
|||
use roc_mono::ir::OptLevel;
|
||||
use roc_region::all::LineInfo;
|
||||
use roc_reporting::report::RenderTarget;
|
||||
use roc_utils::zig;
|
||||
use target_lexicon::Triple;
|
||||
|
||||
#[cfg(feature = "gen-llvm-wasm")]
|
||||
|
@ -340,11 +341,11 @@ fn annotate_with_debug_info<'ctx>(
|
|||
let app_bc_file = "/tmp/roc-debugir.bc";
|
||||
|
||||
// write the ll code to a file, so we can modify it
|
||||
module.print_to_file(&app_ll_file).unwrap();
|
||||
module.print_to_file(app_ll_file).unwrap();
|
||||
|
||||
// run the debugir https://github.com/vaivaswatha/debugir tool
|
||||
match Command::new("debugir")
|
||||
.args(&["-instnamer", app_ll_file])
|
||||
.args(["-instnamer", app_ll_file])
|
||||
.output()
|
||||
{
|
||||
Ok(_) => {}
|
||||
|
@ -360,11 +361,11 @@ fn annotate_with_debug_info<'ctx>(
|
|||
}
|
||||
|
||||
Command::new("llvm-as")
|
||||
.args(&[app_dbg_ll_file, "-o", app_bc_file])
|
||||
.args([app_dbg_ll_file, "-o", app_bc_file])
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
inkwell::module::Module::parse_bitcode_from_path(&app_bc_file, context).unwrap()
|
||||
inkwell::module::Module::parse_bitcode_from_path(app_bc_file, context).unwrap()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
@ -456,11 +457,9 @@ fn llvm_module_to_wasm_file(
|
|||
.write_to_file(llvm_module, file_type, &test_a_path)
|
||||
.unwrap();
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
let output = Command::new(&crate::helpers::zig_executable())
|
||||
let output = zig()
|
||||
.current_dir(dir_path)
|
||||
.args(&[
|
||||
.args([
|
||||
"wasm-ld",
|
||||
concat!(env!("OUT_DIR"), "/wasm_test_platform.wasm"),
|
||||
test_a_path.to_str().unwrap(),
|
||||
|
|
|
@ -10,14 +10,6 @@ pub mod llvm;
|
|||
#[cfg(any(feature = "gen-wasm", feature = "gen-llvm-wasm"))]
|
||||
pub mod wasm;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn zig_executable() -> String {
|
||||
match std::env::var("ROC_ZIG") {
|
||||
Ok(path) => path,
|
||||
Err(_) => "zig".into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn src_hash(src: &str) -> u64 {
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue