mirror of
https://github.com/roc-lang/roc.git
synced 2025-09-26 13:29:12 +00:00
Merge branch 'trunk' of github.com:rtfeldman/roc into wasm-code-gen-fixes-i3448
This commit is contained in:
commit
50326f7e0b
184 changed files with 7004 additions and 3437 deletions
11
.github/workflows/nightly_macos_x86_64.yml
vendored
11
.github/workflows/nightly_macos_x86_64.yml
vendored
|
@ -24,6 +24,12 @@ jobs:
|
|||
run: zig version
|
||||
- name: Install LLVM
|
||||
run: brew install llvm@13
|
||||
# build has to be done before tests #2572
|
||||
- name: build release
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --locked
|
||||
- name: execute rust tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
|
@ -31,11 +37,6 @@ jobs:
|
|||
args: --locked # no --release yet until #3166 is fixed
|
||||
- name: write version to file
|
||||
run: ./ci/write_version.sh
|
||||
- name: build release
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --locked
|
||||
- name: package release
|
||||
run: ./ci/package_release.sh roc_darwin_x86_64.tar.gz
|
||||
- name: Create pre-release with test_archive.tar.gz
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -8,6 +8,8 @@ zig-cache
|
|||
*.obj
|
||||
*.tmp
|
||||
*.wasm
|
||||
*.exe
|
||||
*.pdb
|
||||
|
||||
# llvm human-readable output
|
||||
*.ll
|
||||
|
|
3
AUTHORS
3
AUTHORS
|
@ -86,3 +86,6 @@ Jared Forsyth <jared@jaredforsyth.com>
|
|||
Patrick Kilgore <git@pck.email>
|
||||
Marten/Qqwy <w-m@wmcode.nl>
|
||||
Christoph Rüßler <christoph.ruessler@mailbox.org>
|
||||
Ralf Engbers <raleng@users.noreply.github.com>
|
||||
Mostly Void <7rat13@gmail.com>
|
||||
Luis F. Gutierrez <luis@gutierrezhiller.com>
|
||||
|
|
|
@ -71,6 +71,8 @@ To build the compiler, you need these installed:
|
|||
* `libxkbcommon` - macOS seems to have it already; on Ubuntu or Debian you can get it with `apt-get install libxkbcommon-dev`
|
||||
* On Debian/Ubuntu `sudo apt-get install pkg-config`
|
||||
* LLVM, see below for version
|
||||
* [rust](https://rustup.rs/)
|
||||
* Also run `cargo install bindgen` after installing rust. You may need to open a new terminal.
|
||||
|
||||
To run the test suite (via `cargo test`), you additionally need to install:
|
||||
|
||||
|
|
104
Cargo.lock
generated
104
Cargo.lock
generated
|
@ -104,7 +104,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "arena-pool"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
|
@ -3268,7 +3268,7 @@ checksum = "f1382d1f0a252c4bf97dc20d979a2fdd05b024acd7c2ed0f7595d7817666a157"
|
|||
|
||||
[[package]]
|
||||
name = "repl_test"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"indoc",
|
||||
"lazy_static",
|
||||
|
@ -3316,7 +3316,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_alias_analysis"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"morphic_lib",
|
||||
"roc_collections",
|
||||
|
@ -3355,7 +3355,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_build"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"inkwell 0.1.0",
|
||||
|
@ -3380,6 +3380,7 @@ dependencies = [
|
|||
"roc_target",
|
||||
"roc_types",
|
||||
"roc_unify",
|
||||
"roc_utils",
|
||||
"serde_json",
|
||||
"target-lexicon",
|
||||
"tempfile",
|
||||
|
@ -3388,21 +3389,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_builtins"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"dunce",
|
||||
"fs_extra",
|
||||
"lazy_static",
|
||||
"roc_collections",
|
||||
"roc_module",
|
||||
"roc_region",
|
||||
"roc_target",
|
||||
"roc_utils",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "roc_can"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bitvec 1.0.1",
|
||||
"bumpalo",
|
||||
|
@ -3483,7 +3484,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_collections"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bitvec 1.0.1",
|
||||
"bumpalo",
|
||||
|
@ -3495,7 +3496,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_constrain"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"arrayvec 0.7.2",
|
||||
"roc_can",
|
||||
|
@ -3509,11 +3510,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_debug_flags"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "roc_derive"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"roc_can",
|
||||
|
@ -3528,7 +3529,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_derive_key"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"roc_can",
|
||||
"roc_collections",
|
||||
|
@ -3540,7 +3541,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_docs"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"peg",
|
||||
|
@ -3564,7 +3565,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_docs_cli"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"clap 3.2.11",
|
||||
"roc_docs",
|
||||
|
@ -3572,7 +3573,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_editor"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"arrayvec 0.7.2",
|
||||
"bumpalo",
|
||||
|
@ -3621,11 +3622,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_error_macros"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "roc_exhaustive"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"roc_collections",
|
||||
"roc_module",
|
||||
|
@ -3634,7 +3635,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_fmt"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"indoc",
|
||||
|
@ -3649,7 +3650,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_gen_dev"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"capstone",
|
||||
|
@ -3674,7 +3675,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_gen_llvm"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"inkwell 0.1.0",
|
||||
|
@ -3694,7 +3695,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_gen_wasm"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bitvec 1.0.1",
|
||||
"bumpalo",
|
||||
|
@ -3709,7 +3710,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_glue"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"clap 3.2.11",
|
||||
|
@ -3739,7 +3740,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_highlight"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"peg",
|
||||
"roc_code_markup",
|
||||
|
@ -3747,11 +3748,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_ident"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "roc_late_solve"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"roc_can",
|
||||
|
@ -3766,7 +3767,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_linker"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"bumpalo",
|
||||
|
@ -3786,7 +3787,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_load"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"roc_builtins",
|
||||
|
@ -3801,7 +3802,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_load_internal"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"crossbeam",
|
||||
|
@ -3835,7 +3836,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_module"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"lazy_static",
|
||||
|
@ -3849,7 +3850,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_mono"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"hashbrown 0.12.2",
|
||||
|
@ -3858,6 +3859,7 @@ dependencies = [
|
|||
"roc_collections",
|
||||
"roc_debug_flags",
|
||||
"roc_derive",
|
||||
"roc_derive_key",
|
||||
"roc_error_macros",
|
||||
"roc_exhaustive",
|
||||
"roc_late_solve",
|
||||
|
@ -3873,7 +3875,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_parse"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"criterion",
|
||||
|
@ -3890,7 +3892,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_problem"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"roc_collections",
|
||||
"roc_module",
|
||||
|
@ -3901,14 +3903,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_region"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "roc_repl_cli"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"const_format",
|
||||
|
@ -3935,7 +3937,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_repl_eval"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"roc_builtins",
|
||||
|
@ -3955,7 +3957,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_repl_expect"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"indoc",
|
||||
|
@ -3985,7 +3987,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_repl_wasm"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"console_error_panic_hook",
|
||||
|
@ -4007,7 +4009,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_reporting"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"distance",
|
||||
|
@ -4037,7 +4039,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_solve"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"arrayvec 0.7.2",
|
||||
"bumpalo",
|
||||
|
@ -4070,7 +4072,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_solve_problem"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"roc_can",
|
||||
"roc_collections",
|
||||
|
@ -4083,7 +4085,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_std"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"arrayvec 0.7.2",
|
||||
"static_assertions",
|
||||
|
@ -4091,7 +4093,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_target"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"strum",
|
||||
"strum_macros",
|
||||
|
@ -4100,7 +4102,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_test_utils"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"pretty_assertions",
|
||||
"remove_dir_all 0.7.0",
|
||||
|
@ -4108,7 +4110,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_types"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"roc_collections",
|
||||
|
@ -4121,7 +4123,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_unify"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"roc_can",
|
||||
|
@ -4135,7 +4137,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "roc_utils"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"snafu",
|
||||
]
|
||||
|
@ -4784,7 +4786,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "test_derive"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"indoc",
|
||||
|
@ -4810,7 +4812,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "test_gen"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"criterion",
|
||||
|
@ -4850,7 +4852,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "test_mono"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"indoc",
|
||||
|
@ -4867,7 +4869,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "test_mono_macros"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -5186,7 +5188,7 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
|||
|
||||
[[package]]
|
||||
name = "wasi_libc_sys"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
|
|
179
FAQ.md
179
FAQ.md
|
@ -1,6 +1,36 @@
|
|||
# Frequently Asked Questions
|
||||
|
||||
## Where did the name Roc come from?
|
||||
|
||||
<img width="128" alt="The Roc logo, an origami bird" src="https://user-images.githubusercontent.com/1094080/92188927-e61ebd00-ee2b-11ea-97ef-2fc88e0094b0.png">
|
||||
|
||||
The Roc programming language is named after [a mythical bird](<https://en.wikipedia.org/wiki/Roc_(mythology)>).
|
||||
|
||||
That’s why the logo is a bird. It’s specifically an [_origami_ bird](https://youtu.be/9gni1t1k1uY) as an homage
|
||||
to [Elm](https://elm-lang.org/)’s tangram logo.
|
||||
|
||||
Roc is a direct descendant of Elm. The languages are similar, but not the same.
|
||||
[Origami](https://en.wikipedia.org/wiki/Origami) likewise has similarities to [tangrams](https://en.wikipedia.org/wiki/Tangram), although they are not the same.
|
||||
Both involve making a surprising variety of things
|
||||
from simple primitives. [_Folds_](<https://en.wikipedia.org/wiki/Fold_(higher-order_function)>)
|
||||
are also common in functional programming.
|
||||
|
||||
The logo was made by tracing triangles onto a photo of a physical origami bird.
|
||||
It’s made of triangles because triangles are a foundational primitive in
|
||||
computer graphics.
|
||||
|
||||
The name was chosen because it makes for a three-letter file extension, it means something
|
||||
fantastical, and it has incredible potential for puns. Here are some different ways to spell it:
|
||||
|
||||
- **Roc** - traditional
|
||||
- **roc** - low-key
|
||||
- **ROC** - [YELLING](https://package.elm-lang.org/packages/elm/core/latest/String#toUpper)
|
||||
- **Röc** - [metal 🤘](https://en.wikipedia.org/wiki/Metal_umlaut)
|
||||
|
||||
Fun fact: "roc" translates to 鹏 in Chinese, [which means](https://www.mdbg.net/chinese/dictionary?page=worddict&wdrst=0&wdqb=%E9%B9%8F) "a large fabulous bird."
|
||||
|
||||
# Why make a new editor instead of making an LSP plugin for VSCode, Vim or Emacs?
|
||||
|
||||
The Roc editor is one of the key areas where we want to innovate. Constraining ourselves to a plugin for existing editors would severely limit our possibilities for innovation.
|
||||
|
||||
A key part of our editor will be the use of plugins that are shipped with libraries. Think of a regex visualizer, parser debugger, or color picker. For library authors, it would be most convenient to write these plugins in Roc. Trying to dynamically load library plugins (written in Roc) in for example VSCode seems very difficult.
|
||||
|
@ -8,7 +38,7 @@ A key part of our editor will be the use of plugins that are shipped with librar
|
|||
## Is there syntax highlighting for Vim/Emacs/VS Code or a LSP?
|
||||
|
||||
Not currently. Although they will presumably exist someday, while Roc is in the early days there's actually a conscious
|
||||
effort to focus on the Roc Editor *instead of* adding Roc support to other editors - specifically in order to give the Roc
|
||||
effort to focus on the Roc Editor _instead of_ adding Roc support to other editors - specifically in order to give the Roc
|
||||
Editor the best possible chance at kickstarting a virtuous cycle of plugin authorship.
|
||||
|
||||
This is an unusual approach, but there are more details in [this 2021 interview](https://youtu.be/ITrDd6-PbvY?t=212).
|
||||
|
@ -68,16 +98,18 @@ Both of these would make revising code riskier across the entire language, which
|
|||
Another option would be to define that function equality always returns `False`. So both of these would evaluate
|
||||
to `False`:
|
||||
|
||||
* `(\x -> x + 1) == (\x -> 1 + x)`
|
||||
* `(\x -> x + 1) == (\x -> x + 1)`
|
||||
- `(\x -> x + 1) == (\x -> 1 + x)`
|
||||
- `(\x -> x + 1) == (\x -> x + 1)`
|
||||
|
||||
This makes function equality effectively useless, while still technically allowing it. It has some other downsides:
|
||||
* Now if you put a function inside a record, using `==` on that record will still type-check, but it will then return `False`. This could lead to bugs if you didn't realize you had accidentally put a function in there - for example, because you were actually storing a different type (e.g. an opaque type) and didn't realize it had a function inside it.
|
||||
* If you put a function (or a value containing a function) into a `Dict` or `Set`, you'll never be able to get it out again. This is a common problem with [NaN](https://en.wikipedia.org/wiki/NaN), which is also defined not to be equal to itself.
|
||||
|
||||
- Now if you put a function inside a record, using `==` on that record will still type-check, but it will then return `False`. This could lead to bugs if you didn't realize you had accidentally put a function in there - for example, because you were actually storing a different type (e.g. an opaque type) and didn't realize it had a function inside it.
|
||||
- If you put a function (or a value containing a function) into a `Dict` or `Set`, you'll never be able to get it out again. This is a common problem with [NaN](https://en.wikipedia.org/wiki/NaN), which is also defined not to be equal to itself.
|
||||
|
||||
The first of these problems could be addressed by having function equality always return `True` instead of `False` (since that way it would not affect other fields' equality checks in a record), but that design has its own problems:
|
||||
* Although function equality is still useless, `(\x -> x + 1) == (\x -> x)` returns `True`. Even if it didn't lead to bugs in practice, this would certainly be surprising and confusing to beginners.
|
||||
* Now if you put several different functions into a `Dict` or `Set`, only one of them will be kept; the others will be discarded or overwritten. This could cause bugs if a value stored a function internally, and then other functions relied on that internal function for correctness.
|
||||
|
||||
- Although function equality is still useless, `(\x -> x + 1) == (\x -> x)` returns `True`. Even if it didn't lead to bugs in practice, this would certainly be surprising and confusing to beginners.
|
||||
- Now if you put several different functions into a `Dict` or `Set`, only one of them will be kept; the others will be discarded or overwritten. This could cause bugs if a value stored a function internally, and then other functions relied on that internal function for correctness.
|
||||
|
||||
Each of these designs makes Roc a language that's some combination of more error-prone, more confusing, and more
|
||||
brittle to change. Disallowing function equality at compile time eliminates all of these drawbacks.
|
||||
|
@ -107,12 +139,12 @@ To describe something that's neither an optional field nor an operation that can
|
|||
more descriptive than something like `Maybe`. For example, if a record type has an `artist` field, but the artist
|
||||
information may not be available, compare these three alternative ways to represent that:
|
||||
|
||||
* `artist : Maybe Artist`
|
||||
* `artist : [Loading, Loaded Artist]`
|
||||
* `artist : [Unspecified, Specified Artist]`
|
||||
- `artist : Maybe Artist`
|
||||
- `artist : [Loading, Loaded Artist]`
|
||||
- `artist : [Unspecified, Specified Artist]`
|
||||
|
||||
All three versions tell us that we might not have access to an `Artist`. However, the `Maybe` version doesn't
|
||||
tell us why that might be. The `Loading`/`Loaded` version tells us we don't have one *yet*, because we're
|
||||
tell us why that might be. The `Loading`/`Loaded` version tells us we don't have one _yet_, because we're
|
||||
still loading it, whereas the `Unspecified`/`Specified` version tells us we don't have one and shouldn't expect
|
||||
to have one later if we wait, because it wasn't specified.
|
||||
|
||||
|
@ -135,8 +167,8 @@ _Since this is a FAQ answer, I'm going to assume familiarity with higher-kinded
|
|||
A valuable aspect of Roc's type system is that it has decidable [principal](https://en.wikipedia.org/wiki/Principal_type)
|
||||
type inference. This means that:
|
||||
|
||||
* At compile time, Roc can correctly infer the types for every expression in a program, even if you don't annotate any of the types.
|
||||
* This inference always infers the most general type possible; you couldn't possibly add a valid type annotation that would make the type more flexible than the one that Roc would infer if you deleted the annotation.
|
||||
- At compile time, Roc can correctly infer the types for every expression in a program, even if you don't annotate any of the types.
|
||||
- This inference always infers the most general type possible; you couldn't possibly add a valid type annotation that would make the type more flexible than the one that Roc would infer if you deleted the annotation.
|
||||
|
||||
It's been proven that any type system which supports either [higher-kinded polymorphism](https://www.cl.cam.ac.uk/~jdy22/papers/lightweight-higher-kinded-polymorphism.pdf) or [arbitrary-rank types](https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/putting.pdf) cannot have decidable
|
||||
principal type inference. With either of those features in the language, there will be situations where the compiler
|
||||
|
@ -152,9 +184,9 @@ sacrificing principal type inference to attain, so let's focus on the trade-offs
|
|||
|
||||
Supporting Rank-2 types in Roc has been discussed before, but it has several important downsides:
|
||||
|
||||
* It would increase the complexity of the language.
|
||||
* It would make some compiler error messages more confusing (e.g. they might mention `forall` because that was the most general type that could be inferred, even if that wasn't helpful or related to the actual problem).
|
||||
* It would substantially increase the complexity of the type checker, which would necessarily slow it down.
|
||||
- It would increase the complexity of the language.
|
||||
- It would make some compiler error messages more confusing (e.g. they might mention `forall` because that was the most general type that could be inferred, even if that wasn't helpful or related to the actual problem).
|
||||
- It would substantially increase the complexity of the type checker, which would necessarily slow it down.
|
||||
|
||||
No implementation of Rank-2 types can remove any of these downsides. Thus far, we've been able to come up
|
||||
with sufficiently nice APIs that only require Rank-1 types, and we haven't seen a really compelling use case
|
||||
|
@ -201,9 +233,9 @@ Culturally, to support HKP is to take a side, and to decline to support it is al
|
|||
|
||||
Given this, language designers have three options:
|
||||
|
||||
* Have HKP and have Monad in the standard library. Embrace them and build a culture and ecosystem around them.
|
||||
* Have HKP and don't have Monad in the standard library. An alternate standard lbirary built around monads will inevitably emerge, and both the community and ecosystem will divide themselves along pro-monad and anti-monad lines.
|
||||
* Don't have HKP; build a culture and ecosystem around other things.
|
||||
- Have HKP and have Monad in the standard library. Embrace them and build a culture and ecosystem around them.
|
||||
- Have HKP and don't have Monad in the standard library. An alternate standard lbirary built around monads will inevitably emerge, and both the community and ecosystem will divide themselves along pro-monad and anti-monad lines.
|
||||
- Don't have HKP; build a culture and ecosystem around other things.
|
||||
|
||||
Considering that these are the only three options, I think the best choice for Roc—not only on a technical
|
||||
level, but on a cultural level as well—is to make it clear that the plan is for Roc never to support HKP.
|
||||
|
@ -224,30 +256,30 @@ the result would be broken code and sadness.
|
|||
|
||||
So why does Roc have the specific syntax changes it does? Here are some brief explanations:
|
||||
|
||||
* `#` instead of `--` for comments - this allows [hashbang](https://senthilnayagan.medium.com/shebang-hashbang-10966b8f28a8)s to work without needing special syntax. That isn't a use case Elm supports, but it is one Roc is designed to support.
|
||||
* `{}` instead of `()` for the unit type - Elm has both, and they can both be used as a unit type. Since `{}` has other uses in the type system, but `()` doesn't, I consider it redundant and took it out.
|
||||
* `when`...`is` instead of `case`...`of` - I predict it will be easier for beginners to pick up, because usually the way I explain `case`...`of` to beginners is by saying the words "when" and "is" out loud - e.g. "when `color` is `Red`, it runs this first branch; when `color` is `Blue`, it runs this other branch..."
|
||||
* `:` instead of `=` for record field definitions (e.g. `{ foo: bar }` where Elm syntax would be `{ foo = bar }`): I like `=` being reserved for definitions, and `:` is the most popular alternative.
|
||||
* Backpassing syntax - since Roc is designed to be used for use cases like command-line apps, shell scripts, and servers, I expect chained effects to come up a lot more often than they do in Elm. I think backpassing is nice for those use cases, similarly to how `do` notation is nice for them in Haskell.
|
||||
* Tag unions instead of Elm's custom types (aka algebraic data types). This isn't just a syntactic change; tag unions are mainly in Roc because they can facilitate errors being accumulated across chained effects, which (as noted a moment ago) I expect to be a lot more common in Roc than in Elm. If you have tag unions, you don't really need a separate language feature for algebraic data types, since closed tag unions essentially work the same way - aside from not giving you a way to selectively expose variants or define phantom types. Roc's opaque types language feature covers those use cases instead.
|
||||
* No `::` operator, or `::` pattern matching for lists. Both of these are for the same reason: an Elm `List` is a linked list, so both prepending to it and removing an element from the front are very cheap operations. In contrast, a Roc `List` is a flat array, so both prepending to it and removing an element from the front are among the most expensive operations you can possibly do with it! To get good performance, this usage pattern should be encouraged in Elm and discouraged in Roc. Since having special syntax would encourage it, it would not be good for Roc to have that syntax!
|
||||
* No `<|` operator. In Elm, I almost exclusively found myself wanting to use this in conjunction with anonymous functions (e.g. `foo <| \bar -> ...`) or conditionals (e.g. `foo <| if bar then ...`). In Roc you can do both of these without the `<|`. That means the main remaining use for `<|` is to reduce parentheses, but I tend to think `|>` is better at that (or else the parens are fine), so after the other syntactic changes, I considered `<|` an unnecessary stylistic alternative to `|>` or parens.
|
||||
* The `|>` operator passes the expression before the `|>` as the *first* argument to the function after the `|>` instead of as the last argument. See the section on currying for details on why this works this way.
|
||||
* `:` instead of `type alias` - I like to avoid reserved keywords for terms that are desirable in userspace, so that people don't have to name things `typ` because `type` is a reserved keyword, or `clazz` because `class` is reserved. (I couldn't think of satisfactory alternatives for `as`, `when`, `is`, or `if` other than different reserved keywords. I could see an argument for `then`—and maybe even `is`—being replaced with a `->` or `=>` or something, but I don't anticipate missing either of those words much in userspace. `then` is used in JavaScript promises, but I think there are several better names for that function.)
|
||||
* No underscores in variable names - I've seen Elm beginners reflexively use `snake_case` over `camelCase` and then need to un-learn the habit after the compiler accepted it. I'd rather have the compiler give feedback that this isn't the way to do it in Roc, and suggest a camelCase alternative. I've also seen underscores used for lazy naming, e.g. `foo` and then `foo_`. If lazy naming is the goal, `foo2` is just as concise as `foo_`, but `foo3` is more concise than `foo__`. So in a way, removing `_` is a forcing function for improved laziness. (Of course, more descriptive naming would be even better.)
|
||||
* Trailing commas - I've seen people walk away (in some cases physically!) from Elm as soon as they saw the leading commas in collection literals. While I think they've made a mistake by not pushing past this aesthetic preference to give the language a chance, I also would prefer not put them in a position to make such a mistake in the first place. Secondarily, while I'm personally fine with either style, between the two I prefer the look of trailing commas.
|
||||
* The `!` unary prefix operator. I didn't want to have a `Basics` module (more on that in a moment), and without `Basics`, this would either need to be called fully-qualified (`Bool.not`) or else a module import of `Bool.{ not }` would be necessary. Both seemed less nice than supporting the `!` prefix that's common to so many widely-used languages, especially when we already have a unary prefix operator of `-` for negation (e.g. `-x`).
|
||||
* `!=` for the inequality operator (instead of Elm's `/=`) - this one pairs more naturally with the `!` prefix operator and is also very common in other languages.
|
||||
- `#` instead of `--` for comments - this allows [hashbang](https://senthilnayagan.medium.com/shebang-hashbang-10966b8f28a8)s to work without needing special syntax. That isn't a use case Elm supports, but it is one Roc is designed to support.
|
||||
- `{}` instead of `()` for the unit type - Elm has both, and they can both be used as a unit type. Since `{}` has other uses in the type system, but `()` doesn't, I consider it redundant and took it out.
|
||||
- `when`...`is` instead of `case`...`of` - I predict it will be easier for beginners to pick up, because usually the way I explain `case`...`of` to beginners is by saying the words "when" and "is" out loud - e.g. "when `color` is `Red`, it runs this first branch; when `color` is `Blue`, it runs this other branch..."
|
||||
- `:` instead of `=` for record field definitions (e.g. `{ foo: bar }` where Elm syntax would be `{ foo = bar }`): I like `=` being reserved for definitions, and `:` is the most popular alternative.
|
||||
- Backpassing syntax - since Roc is designed to be used for use cases like command-line apps, shell scripts, and servers, I expect chained effects to come up a lot more often than they do in Elm. I think backpassing is nice for those use cases, similarly to how `do` notation is nice for them in Haskell.
|
||||
- Tag unions instead of Elm's custom types (aka algebraic data types). This isn't just a syntactic change; tag unions are mainly in Roc because they can facilitate errors being accumulated across chained effects, which (as noted a moment ago) I expect to be a lot more common in Roc than in Elm. If you have tag unions, you don't really need a separate language feature for algebraic data types, since closed tag unions essentially work the same way - aside from not giving you a way to selectively expose variants or define phantom types. Roc's opaque types language feature covers those use cases instead.
|
||||
- No `::` operator, or `::` pattern matching for lists. Both of these are for the same reason: an Elm `List` is a linked list, so both prepending to it and removing an element from the front are very cheap operations. In contrast, a Roc `List` is a flat array, so both prepending to it and removing an element from the front are among the most expensive operations you can possibly do with it! To get good performance, this usage pattern should be encouraged in Elm and discouraged in Roc. Since having special syntax would encourage it, it would not be good for Roc to have that syntax!
|
||||
- No `<|` operator. In Elm, I almost exclusively found myself wanting to use this in conjunction with anonymous functions (e.g. `foo <| \bar -> ...`) or conditionals (e.g. `foo <| if bar then ...`). In Roc you can do both of these without the `<|`. That means the main remaining use for `<|` is to reduce parentheses, but I tend to think `|>` is better at that (or else the parens are fine), so after the other syntactic changes, I considered `<|` an unnecessary stylistic alternative to `|>` or parens.
|
||||
- The `|>` operator passes the expression before the `|>` as the _first_ argument to the function after the `|>` instead of as the last argument. See the section on currying for details on why this works this way.
|
||||
- `:` instead of `type alias` - I like to avoid reserved keywords for terms that are desirable in userspace, so that people don't have to name things `typ` because `type` is a reserved keyword, or `clazz` because `class` is reserved. (I couldn't think of satisfactory alternatives for `as`, `when`, `is`, or `if` other than different reserved keywords. I could see an argument for `then`—and maybe even `is`—being replaced with a `->` or `=>` or something, but I don't anticipate missing either of those words much in userspace. `then` is used in JavaScript promises, but I think there are several better names for that function.)
|
||||
- No underscores in variable names - I've seen Elm beginners reflexively use `snake_case` over `camelCase` and then need to un-learn the habit after the compiler accepted it. I'd rather have the compiler give feedback that this isn't the way to do it in Roc, and suggest a camelCase alternative. I've also seen underscores used for lazy naming, e.g. `foo` and then `foo_`. If lazy naming is the goal, `foo2` is just as concise as `foo_`, but `foo3` is more concise than `foo__`. So in a way, removing `_` is a forcing function for improved laziness. (Of course, more descriptive naming would be even better.)
|
||||
- Trailing commas - I've seen people walk away (in some cases physically!) from Elm as soon as they saw the leading commas in collection literals. While I think they've made a mistake by not pushing past this aesthetic preference to give the language a chance, I also would prefer not put them in a position to make such a mistake in the first place. Secondarily, while I'm personally fine with either style, between the two I prefer the look of trailing commas.
|
||||
- The `!` unary prefix operator. I didn't want to have a `Basics` module (more on that in a moment), and without `Basics`, this would either need to be called fully-qualified (`Bool.not`) or else a module import of `Bool.{ not }` would be necessary. Both seemed less nice than supporting the `!` prefix that's common to so many widely-used languages, especially when we already have a unary prefix operator of `-` for negation (e.g. `-x`).
|
||||
- `!=` for the inequality operator (instead of Elm's `/=`) - this one pairs more naturally with the `!` prefix operator and is also very common in other languages.
|
||||
|
||||
Roc also has a different standard library from Elm. Some of the differences come down to platforms and applications (e.g. having `Task` in Roc's standard library wouldn't make sense), but others do not. Here are some brief explanations:
|
||||
|
||||
* No `Basics` module. I wanted to have a simple rule of "all modules in the standard library are imported by default, and so are their exposed types," and that's it. Given that I wanted the comparison operators (e.g. `<`) to work only on numbers, it ended up that having `Num` and `Bool` modules meant that almost nothing would be left for a `Basics` equivalent in Roc except `identity` and `Never`. The Roc type `[]` (empty tag union) is equivalent to `Never`, so that wasn't necessary, and I generally think that `identity` is a good concept but a sign of an incomplete API whenever its use comes up in practice. For example, instead of calling `|> List.filterMap identity` I'd rather have access to a more self-descriptive function like `|> List.dropNothings`. With `Num` and `Bool`, and without `identity` and `Never`, there was nothing left in `Basics`.
|
||||
* `Str` instead of `String` - after using the `str` type in Rust, I realized I had no issue whatsoever with the more concise name, especially since it was used in so many places (similar to `Msg` and `Cmd` in Elm) - so I decided to save a couple of letters.
|
||||
* No function composition operators - I stopped using these in Elm so long ago, at one point I forgot they were in the language! See the FAQ entry on currying for details about why.
|
||||
* No `Char`. What most people think of as a "character" is a rendered glyph. However, rendered glyphs are comprised of [grapheme clusters](https://stackoverflow.com/a/27331885), which are a variable number of Unicode code points - and there's no upper bound on how many code points there can be in a single cluster. In a world of emoji, I think this makes `Char` error-prone and it's better to have `Str` be the only first-class unit. For convenience when working with unicode code points (e.g. for performance-critical tasks like parsing), the single-quote syntax is sugar for the corresponding `U32` code point - for example, writing `'鹏'` is exactly the same as writing `40527`. Like Rust, you get a compiler error if you put something in single quotes that's not a valid [Unicode scalar value](http://www.unicode.org/glossary/#unicode_scalar_value).
|
||||
* No `Debug.log` - the editor can do a better job at this, or you can write `expect x != x` to see what `x` is when the expectation fails. Using the editor means your code doesn't change, and using `expect` gives a natural reminder to remove the debugging code before shipping: the build will fail.
|
||||
* No `Debug.todo` - instead you can write a type annotation with no implementation below it; the type checker will treat it normally, but attempting to use the value will cause a runtime exception. This is a feature I've often wanted in Elm, because I like prototyping APIs by writing out the types only, but then when I want the compiler to type-check them for me, I end up having to add `Debug.todo` in various places.
|
||||
* No `Maybe`. See the "Why doesn't Roc have a `Maybe`/`Option`/`Optional` type" FAQ question
|
||||
- No `Basics` module. I wanted to have a simple rule of "all modules in the standard library are imported by default, and so are their exposed types," and that's it. Given that I wanted the comparison operators (e.g. `<`) to work only on numbers, it ended up that having `Num` and `Bool` modules meant that almost nothing would be left for a `Basics` equivalent in Roc except `identity` and `Never`. The Roc type `[]` (empty tag union) is equivalent to `Never`, so that wasn't necessary, and I generally think that `identity` is a good concept but a sign of an incomplete API whenever its use comes up in practice. For example, instead of calling `|> List.filterMap identity` I'd rather have access to a more self-descriptive function like `|> List.dropNothings`. With `Num` and `Bool`, and without `identity` and `Never`, there was nothing left in `Basics`.
|
||||
- `Str` instead of `String` - after using the `str` type in Rust, I realized I had no issue whatsoever with the more concise name, especially since it was used in so many places (similar to `Msg` and `Cmd` in Elm) - so I decided to save a couple of letters.
|
||||
- No function composition operators - I stopped using these in Elm so long ago, at one point I forgot they were in the language! See the FAQ entry on currying for details about why.
|
||||
- No `Char`. What most people think of as a "character" is a rendered glyph. However, rendered glyphs are comprised of [grapheme clusters](https://stackoverflow.com/a/27331885), which are a variable number of Unicode code points - and there's no upper bound on how many code points there can be in a single cluster. In a world of emoji, I think this makes `Char` error-prone and it's better to have `Str` be the only first-class unit. For convenience when working with unicode code points (e.g. for performance-critical tasks like parsing), the single-quote syntax is sugar for the corresponding `U32` code point - for example, writing `'鹏'` is exactly the same as writing `40527`. Like Rust, you get a compiler error if you put something in single quotes that's not a valid [Unicode scalar value](http://www.unicode.org/glossary/#unicode_scalar_value).
|
||||
- No `Debug.log` - the editor can do a better job at this, or you can write `expect x != x` to see what `x` is when the expectation fails. Using the editor means your code doesn't change, and using `expect` gives a natural reminder to remove the debugging code before shipping: the build will fail.
|
||||
- No `Debug.todo` - instead you can write a type annotation with no implementation below it; the type checker will treat it normally, but attempting to use the value will cause a runtime exception. This is a feature I've often wanted in Elm, because I like prototyping APIs by writing out the types only, but then when I want the compiler to type-check them for me, I end up having to add `Debug.todo` in various places.
|
||||
- No `Maybe`. See the "Why doesn't Roc have a `Maybe`/`Option`/`Optional` type" FAQ question
|
||||
|
||||
## Why aren't Roc functions curried by default?
|
||||
|
||||
|
@ -259,15 +291,15 @@ by default" for the sake of brevity.
|
|||
|
||||
As I see it, currying has one major upside and several major downsides. The upside:
|
||||
|
||||
* It makes function calls more concise in some cases.
|
||||
- It makes function calls more concise in some cases.
|
||||
|
||||
The downsides:
|
||||
|
||||
* It lowers error message quality, because there can no longer be an error for "function called with too few arguments." (Calling a function with fewer arguments is always valid in curried functions; the error you get instead will unavoidably be some other sort of type mismatch, and it will be up to you to figure out that the real problem was that you forgot an argument.)
|
||||
* It makes the `|>` operator more error-prone in some cases.
|
||||
* It makes higher-order function calls need more parentheses in some cases.
|
||||
* It significantly increases the language's learning curve. (More on this later.)
|
||||
* It facilitates pointfree function composition. (More on why this is listed as a downside later.)
|
||||
- It lowers error message quality, because there can no longer be an error for "function called with too few arguments." (Calling a function with fewer arguments is always valid in curried functions; the error you get instead will unavoidably be some other sort of type mismatch, and it will be up to you to figure out that the real problem was that you forgot an argument.)
|
||||
- It makes the `|>` operator more error-prone in some cases.
|
||||
- It makes higher-order function calls need more parentheses in some cases.
|
||||
- It significantly increases the language's learning curve. (More on this later.)
|
||||
- It facilitates pointfree function composition. (More on why this is listed as a downside later.)
|
||||
|
||||
There's also a downside that it would make runtime performance of compiled programs worse by default,
|
||||
but I assume it would be possible to optimize that away at the cost of slightly longer compile times.
|
||||
|
@ -284,7 +316,7 @@ In Roc, this code produces `"Hello, World!"`
|
|||
|> Str.concat "!"
|
||||
```
|
||||
|
||||
This is because Roc's `|>` operator uses the expression before the `|>` as the *first* argument to the function
|
||||
This is because Roc's `|>` operator uses the expression before the `|>` as the _first_ argument to the function
|
||||
after it. For functions where both arguments have the same type, but it's obvious which argument goes where (e.g.
|
||||
`Str.concat "Hello, " "World!"`, `List.concat [1, 2] [3, 4]`), this works out well. Another example would
|
||||
be `|> Num.sub 1`, which subtracts 1 from whatever came before the `|>`.
|
||||
|
@ -318,7 +350,7 @@ This is a fundamental design tension. One argument order works well with `|>` (a
|
|||
today) and with passing anonymous functions to higher-order functions, and the other works well with currying.
|
||||
It's impossible to have both.
|
||||
|
||||
Of note, one possible design is to have currying while also having `|>` pass the *last* argument instead of the first.
|
||||
Of note, one possible design is to have currying while also having `|>` pass the _last_ argument instead of the first.
|
||||
This is what Elm does, and it makes pipeline-friendliness and curry-friendliness the same thing. However, it also
|
||||
means that either `|> Str.concat "!"` would add the `"!"` to the front of the string, or else `Str.concat`'s
|
||||
arguments would have to be flipped - meaning that `Str.concat "Hello, World" "!"` would evaluate to `"!Hello, World"`.
|
||||
|
@ -338,9 +370,9 @@ first pure functional programming language.
|
|||
|
||||
Here was my experience teaching currying:
|
||||
|
||||
* The only way to avoid teaching it is to refuse to explain why multi-argument functions have multiple `->`s in them. (If you don't explain it, at least one student will ask about it - and many if not all of the others will wonder.)
|
||||
* Teaching currying properly takes a solid chunk of time, because it requires explaining partial application, explaining how curried functions facilitate partial application, how function signatures accurately reflect that they're curried, and going through examples for all of these.
|
||||
* Even after doing all this, and iterating on my approach each time to try to explain it more effectively than I had the time before, I'd estimate that under 50% of the class ended up actually understanding currying. I consistently heard that in practice it only "clicked" for most people after spending significantly more time writing code with it.
|
||||
- The only way to avoid teaching it is to refuse to explain why multi-argument functions have multiple `->`s in them. (If you don't explain it, at least one student will ask about it - and many if not all of the others will wonder.)
|
||||
- Teaching currying properly takes a solid chunk of time, because it requires explaining partial application, explaining how curried functions facilitate partial application, how function signatures accurately reflect that they're curried, and going through examples for all of these.
|
||||
- Even after doing all this, and iterating on my approach each time to try to explain it more effectively than I had the time before, I'd estimate that under 50% of the class ended up actually understanding currying. I consistently heard that in practice it only "clicked" for most people after spending significantly more time writing code with it.
|
||||
|
||||
This is not the end of the world, especially because it's easy enough to think "okay, I still don't totally get this
|
||||
even after that explanation, but I can remember that function arguments are separated by `->` in this language
|
||||
|
@ -396,10 +428,47 @@ Currying facilitates the antipattern of pointfree function composition, which I
|
|||
Stacking up all these downsides of currying against the one upside of making certain function calls more concise,
|
||||
I concluded that it would be a mistake to have it in Roc.
|
||||
|
||||
## Why are both rust and zig used?
|
||||
## Will Roc ever have linear types, dependent types, refinement types, or uniqueness types?
|
||||
|
||||
At the start of the project, we did not know zig well and it was not production ready. The reason zig entered the project because it has many different backends (wasm, various assembly formats, llvm IR) and can create code with minimal dependencies
|
||||
The plan is for Roc to never have linear types, dependent types, refinement types, or uniqueness types.
|
||||
|
||||
Rust has much more overhead in terms of code size. It's objectively not a lot, but it's less with zig.
|
||||
Fast compile times are a primary goal for Roc, and a major downside of refinement types is an exponential increase in compile times. This rules out refinement types for Roc.
|
||||
|
||||
We think rust is a nicer language to work in for a project of this size. It has a type system that we're more familiar with, it has a package ecosystem and excellent tooling.
|
||||
If Roc were to have linear types or uniqueness types, they would move things that are currently behind-the-scenes performance optimizations into the type system. For them to be effective across the ecosystem, they couldn't really be opt-in; everyone would have to use them, even those for whom the current system of behind-the-scenes optimizations already met their performance needs without any added type system complexity. Since the overwhelming majority of use cases are expected to fall into that latter group, adding linear types or uniqueness types to Roc would be a net negative for the ecosystem.
|
||||
|
||||
Dependent types are too risky of a bet for Roc to take. They have been implemented in programming languages for three decades, and for at least half that time period, it has been easy to find predictions that dependent types will be the future of type systems. Much harder to find are success stories of complex applications built with dependent types, which realized benefits that significantly outweighed the substantial complexity of introducing value semantics to a type system.
|
||||
|
||||
Perhaps more success stories will emerge over time, but in the meantime it remains an open question whether dependent types are net beneficial in practice to application development. Further experimentation would be required to answer this question, and Roc is not the right language to do those experiments.
|
||||
|
||||
## Will Roc's compiler ever be self-hosted? (That is, will it ever be written in Roc?)
|
||||
|
||||
The plan is to never implement Roc's compiler in Roc.
|
||||
|
||||
The goal is for Roc's compiler to deliver the best user experience possible. Compiler performance is strongly influenced by how memory is used, and there are many performance benefits to be gained from using a systems language like Rust which offers more direct control over memory than Roc ever should.
|
||||
|
||||
Roc isn't trying to be the best possible language for high-performance compiler development, but it is trying to have a high-performance compiler. The best tool for that job is a language other than Roc, so that's what we're using!
|
||||
|
||||
## Why does Roc use the license it does?
|
||||
|
||||
The short explanation for why Roc is released under the [Universal Permissive License](https://opensource.org/licenses/UPL):
|
||||
|
||||
- Like [MIT](https://opensource.org/licenses/MIT), it's permissive and concise
|
||||
- Like [Apache2](https://opensource.org/licenses/Apache-2.0), it protects against contributors claiming software patents over contributed code after the fact (MIT and BSD do not include protections against this)
|
||||
- It's compatible with [GPLv2](https://opensource.org/licenses/GPL-2.0) (which [Apache2 is not](https://www.apache.org/licenses/GPL-compatibility.html))
|
||||
- It's one license, unlike "MIT or Apache2, at your choice" (which is how [Rust addressed the problem](https://internals.rust-lang.org/t/rationale-of-apache-dual-licensing/8952/4) of MIT not having patent protections but Apache2 not being GPLv2 compatible)
|
||||
- It's been approved by OSI, FSF, and Oracle's lawyers, so it has been not only vetted by three giants in the world of software licensing, but also three giants with competing interests - and they all approved it.
|
||||
|
||||
There's also [a longer explanation](https://github.com/rtfeldman/roc/issues/1199) with more detail about the motivation and thought process, if you're interested.
|
||||
|
||||
## Why does Roc use both Rust and Zig?
|
||||
|
||||
Roc's compiler has always been written in [Rust](https://www.rust-lang.org/). Roc's standard library was briefly written in Rust, but was soon rewritten in [Zig](https://ziglang.org/).
|
||||
|
||||
There were a few reasons for this rewrite.
|
||||
|
||||
1. We struggled to get Rust to emit LLVM bitcode in the format we needed, which is important so that LLVM can do whole-program optimizations across the standard library and compiled application.
|
||||
2. Since the standard library has to interact with raw generated machine code (or LLVM bitcode), the Rust code unavoidable needed `unsafe` annotations all over the place. This made one of Rust's biggest selling points inapplicable in this particular use case.
|
||||
3. Given that Rust's main selling points are inapplicable (its package ecosystem being another), Zig's much faster compile times are a welcome benefit.
|
||||
4. Zig has more tools for working in a memory-unsafe environment, such as reporting memory leaks in tests. These have been helpful in finding bugs that are out of scope for safe Rust.
|
||||
|
||||
The split of Rust for the compiler and Zig for the standard library has worked well so far, and there are no plans to change it.
|
||||
|
|
117
README.md
117
README.md
|
@ -1,118 +1,17 @@
|
|||
# The Roc Programming Language
|
||||
# Work in progress!
|
||||
|
||||
Roc is a language for making delightful software.
|
||||
Roc is not ready for an 0.1 release yet, but we have a [Zulip chat](https://roc.zulipchat.com) where you can learn more about the project.
|
||||
|
||||
The [tutorial](TUTORIAL.md) is the best place to learn about how to use the language - it assumes no prior knowledge of Roc or similar languages. (If you already know [Elm](https://elm-lang.org/), then [Roc for Elm Programmers](https://github.com/rtfeldman/roc/blob/trunk/roc-for-elm-programmers.md) may be of interest.)
|
||||
If you'd like to get involved in contributing to the language, the Zulip chat is also the best place to get help with [good first issues](https://github.com/rtfeldman/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
|
||||
|
||||
There's also a folder of [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) - the [CLI form example](https://github.com/rtfeldman/roc/tree/trunk/examples/interactive/form.roc) in particular is a reasonable starting point to build on.
|
||||
# Sponsors
|
||||
|
||||
If you have a specific question, the [FAQ](FAQ.md) might have an answer, although [Roc Zulip chat](https://roc.zulipchat.com) is overall the best place to ask questions and get help! It's also where we discuss [ideas](https://roc.zulipchat.com/#narrow/stream/304641-ideas) for the language. If you want to get involved in contributing to the language, Zulip is also a great place to ask about good first projects.
|
||||
|
||||
## State of Roc
|
||||
|
||||
Roc is not ready for production yet. You are likely to encounter bugs. Publishing packages or documentation is not yet supported.
|
||||
Many programs can however be compiled correctly. Check out [examples](examples) and [examples/benchmarks](examples/benchmarks). There are minimal platforms for Rust, Zig, C, Swift and an HTTP server. We are hard at work to make programming in Roc a delightful experience!
|
||||
|
||||
## Getting started
|
||||
|
||||
- [Linux x86](getting_started/linux_x86.md)
|
||||
- [MacOS Apple Silicon](getting_started/macos_apple_silicon.md)
|
||||
- [MacOS x86](getting_started/macos_x86.md)
|
||||
- [Windows](getting_started/windows.md)
|
||||
- [Other](getting_started/other.md)
|
||||
|
||||
### Examples
|
||||
|
||||
Run examples as follows:
|
||||
```
|
||||
cargo run examples/hello-world/main.roc
|
||||
```
|
||||
Some examples like `examples/benchmarks/NQueens.roc` require input after running.
|
||||
For NQueens, input 10 in the terminal and press enter.
|
||||
|
||||
[examples/benchmarks](examples/benchmarks) contains larger examples.
|
||||
|
||||
**Tip:** when programming in roc, we recommend to execute `./roc check myproject/Foo.roc` before `./roc myproject/Foo.roc` or `./roc build myproject/Foo.roc`. `./roc check` can produce clear error messages in cases where building/running may panic.
|
||||
|
||||
## Sponsors
|
||||
|
||||
We are very grateful for our sponsors [NoRedInk](https://www.noredink.com/) and [rwx](https://www.rwx.com).
|
||||
We are very grateful to our sponsors [NoRedInk](https://www.noredink.com/), [rwx](https://www.rwx.com), and [Tweede golf](https://tweedegolf.nl/en).
|
||||
|
||||
[<img src="https://www.noredink.com/assets/logo-red-black-f6989d7567cf90b349409137595e99c52d036d755b4403d25528e0fd83a3b084.svg" height="60" alt="NoRedInk logo"/>](https://www.noredink.com/)
|
||||
|
||||
[<img src="https://www.rwx.com/build/_assets/rwx_banner_transparent_cropped-RYV7W2KL.svg" height="60" alt="rwx logo"/>](https://www.rwx.com)
|
||||
|
||||
[<img src="https://user-images.githubusercontent.com/1094080/183123052-856815b1-8cc9-410a-83b0-589f03613188.svg" height="60" alt="tweede golf logo"/>](https://tweedegolf.nl/en)
|
||||
|
||||
## Applications and Platforms
|
||||
|
||||
Applications are often built on a *framework.* Typically, both application and framework are written in the same language.
|
||||
* [Rails](https://rubyonrails.org/) applications are written in Ruby, and so is Rails.
|
||||
* [Angular](https://angularjs.org/) applications are written in TypeScript, and so is Angular.
|
||||
* [Phoenix](https://phoenixframework.org/) applications are written in Elixir, and so is Phoenix.
|
||||
|
||||
Some programs support plugins. Often the plugins are written in the same language as the underlying program.
|
||||
* [Webpack](https://webpack.js.org/) plugins are written in JavaScript, and so is Webpack.
|
||||
* [Eclipse](https://www.eclipse.org/ide/) plugins are written in Java, and so is Eclipse.
|
||||
* [Leiningen](https://leiningen.org/) plugins are written in Clojure, and so is Leiningen.
|
||||
|
||||
All of these can be considered examples of a platform/application relationship. There is an underlying platform, and many applications are built on top of it. (Plugins are a type of application in this sense.)
|
||||
|
||||
Sometimes, platforms and their applications are written in different languages.
|
||||
|
||||
* [Neovim](https://neovim.io/) is written in C for performance, and its plugins can be written in languages such as Python, JS, and Ruby.
|
||||
* [NGINX](https://www.nginx.com/) is written in C for performance, and its plugins can be written in a [subset of JavaScript](https://www.nginx.com/blog/introduction-nginscript/).
|
||||
* [Unity](https://unity.com/) is written in C++ for performance, and Unity applications (such as games) can be written in C#, Boo, or a JavaScript dialect called UnityScript.
|
||||
|
||||
Like in the previous examples, application authors building on these platforms get to use high-level languages with automatic memory management. They make no ergonomics sacrifices, and may not even be aware that the underlying platform is written in a lower-level language.
|
||||
|
||||
By using systems-level programming languages like C and C++, platform authors sacrifice development speed, but unlock the highest possible performance characteristics. This is a tradeoff many platform authors are happy to accept, for the sake of having applications built on their platforms run very fast.
|
||||
|
||||
## Roc's Design
|
||||
|
||||
Roc is designed to make the "systems-level platform, higher-level application" experience as nice as possible.
|
||||
|
||||
* **Application** authors code exclusively in Roc. It's a language designed for nice ergonomics. The syntax resembles Ruby or CoffeeScript, and it has a fast compiler with full type inference.
|
||||
* **Platform** authors code almost exclusively in a systems-level language like C, C++, Rust, Swift or [Zig](https://ziglang.org/), except for the thin Roc API they expose to application authors. Roc application code compiles to machine code, and production builds of Roc apps benefit from the same [LLVM](https://llvm.org/) optimizations that C++, Rust, Swift and Zig do. Roc application authors do not need to know this lower-level code exists; all they have to interact with is the platform's API, which is exposed as an ordinary Roc API.
|
||||
|
||||
Every Roc application is built on top of exactly one Roc platform. There is no such thing as a Roc application that runs without a platform, and there is no default platform. You must choose one!
|
||||
|
||||
The core Roc language and standard library include no I/O operations, which gives platform authors complete control over which effects they want to support. Some of the implications of this include:
|
||||
|
||||
* A high-performance build tool (or text editor) written in Rust can be a Roc platform with a strong plugin security model. For example, it could expose only operations allowing plugin authors to modify the contents of certain files, rather than allowing plugins arbitrary read/write access to the entire filesystem.
|
||||
* A VR or [Arduino](https://www.arduino.cc/) platform can expose uncommon I/O operations supported by that hardware, while omitting common I/O operations that are unsupported (such as reading keyboard input from a terminal that doesn't exist).
|
||||
* A high-performance Web server written in Rust can be a Roc platform where all I/O operations are implemented in terms of Streams or Observables rather than a more traditional asynchronous abstraction like Futures or Promises. This would mean all code in that platform's ecosystem would be necessarily built on a common streaming abstraction.
|
||||
|
||||
## Project Goals
|
||||
|
||||
Roc is in relatively early stages of development. It's currently possible to build both platforms and applications (see the [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) folder for some examples that aren't particularly organized at the moment), although [documentation](https://github.com/rtfeldman/roc/tree/trunk/crates/compiler/builtins/roc) is in even earlier stages than the compiler itself.
|
||||
|
||||
Besides the above language design, a separate goal is for Roc to ship with an ambitiously boundary-pushing graphical editor. Not like "an IDE," but rather something that makes people say "I have never seen anything remotely like this outside of Bret Victor demos."
|
||||
|
||||
One of the reasons this editor is coupled with the language itself is to allow package authors to include custom editor tooling inside packages.
|
||||
|
||||
A trivial example: suppose I'm writing a Roc app for an Arduino platform. I install a platform-specific package for displaying text on a grid of LEDs. Because I've installed this package, at the call site where I call the function to specify the color of the text on the LEDs, my Roc editor displays an inline color picker. As I move a slider around to try out different colors, not only does my code change to reflect that value in realtime, but the physical LEDs in my room change color in realtime as well. As the application author, all I did to get that experience was to install the "text on an LED grid" package, nothing else.
|
||||
|
||||
The goal is for this to be one of the most trivial, bare minimum examples of what the editor experience would be like. Hopefully, people in the future will look back on this example and say "that's so embarrassingly basic; why didn't you talk about one of the *actually great* things in the seamless editor plugin ecosystem?"
|
||||
|
||||
Finally, some implementation goals:
|
||||
|
||||
* The web server for the package manager is written in Roc (with an underlying Rust platform for the web server, for example [warp](https://github.com/seanmonstar/warp)).
|
||||
* The editor plugins are written in Roc (with an underlying Rust platform for the editor itself, for example using [gfx-hal](https://github.com/gfx-rs/gfx)).
|
||||
* The CLI (for building Roc projects on CI platforms) has its user interface written in Roc (with an underlying Rust platform for fast compilation and basic CLI interactions).
|
||||
|
||||
It's an ambitious project! It'll take a long time to get where it's going, but hopefully it'll be worth the wait.
|
||||
|
||||
## Getting Involved
|
||||
|
||||
The number of people involved in Roc's development has been steadily increasing
|
||||
over time - which has been great, because it's meant we've been able to onboard
|
||||
people at a nice pace. (Most people who have contributed to Roc had previously
|
||||
never done anything with Rust and also never worked on a compiler, but we've
|
||||
been able to find beginner-friendly projects to get people up to speed gradually.)
|
||||
|
||||
If you're interested in getting involved, check out
|
||||
[CONTRIBUTING.md](https://github.com/rtfeldman/roc/blob/trunk/CONTRIBUTING.md)!
|
||||
|
||||
## Name and Logo
|
||||
|
||||
If you're curious about where the language's name and logo came from,
|
||||
[here's an explanation](https://github.com/rtfeldman/roc/blob/trunk/name-and-logo.md).
|
||||
If you or your employer would like to sponsor Roc's development, please [DM Richard Feldman on Zulip](https://roc.zulipchat.com/#narrow/pm-with/281383-user281383)!
|
||||
|
|
|
@ -2362,7 +2362,7 @@ pub mod test_constrain {
|
|||
\f -> (\a, b -> f b a)
|
||||
"#
|
||||
),
|
||||
"(a, b -> c) -> (b, a -> c)",
|
||||
"(a, b -> d) -> (b, a -> d)",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2400,7 +2400,7 @@ pub mod test_constrain {
|
|||
\{} -> x
|
||||
"#
|
||||
),
|
||||
"{}* -> Num *",
|
||||
"{}* -> Num a",
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
// use crate::pattern::{bindings_from_patterns, canonicalize_pattern, Pattern};
|
||||
// use crate::procedure::References;
|
||||
use roc_collections::all::{default_hasher, ImMap, MutMap, MutSet, SendMap};
|
||||
use roc_error_macros::{todo_abilities, todo_opaques};
|
||||
use roc_error_macros::{internal_error, todo_abilities};
|
||||
use roc_module::ident::Lowercase;
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_parse::ast::{self, CommentOrNewline, Defs, TypeDef, TypeHeader, ValueDef as AstValueDef};
|
||||
|
@ -21,6 +21,7 @@ use roc_parse::pattern::PatternType;
|
|||
use roc_problem::can::{Problem, RuntimeError, ShadowKind};
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::{VarStore, Variable};
|
||||
use roc_types::types::AliasKind;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Debug;
|
||||
use ven_graph::{strongly_connected_components, topological_sort_into_groups};
|
||||
|
@ -274,7 +275,7 @@ fn to_pending_def<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
Type(TypeDef::Opaque { .. }) => todo_opaques!(),
|
||||
Type(TypeDef::Opaque { .. }) => internal_error!("opaques not implemented"),
|
||||
Type(TypeDef::Ability { .. }) => todo_abilities!(),
|
||||
|
||||
Value(AstValueDef::Expect { .. }) => todo!(),
|
||||
|
@ -341,6 +342,7 @@ fn from_pending_alias<'a>(
|
|||
typ: symbol,
|
||||
variable_region: loc_lowercase.region,
|
||||
variable_name: loc_lowercase.value.clone(),
|
||||
alias_kind: AliasKind::Structural,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -373,7 +375,12 @@ fn from_pending_alias<'a>(
|
|||
|
||||
scope.add_alias(env.pool, symbol, named, annotation_id);
|
||||
} else {
|
||||
env.problem(Problem::CyclicAlias(symbol, name.region, vec![]));
|
||||
env.problem(Problem::CyclicAlias(
|
||||
symbol,
|
||||
name.region,
|
||||
vec![],
|
||||
AliasKind::Structural,
|
||||
));
|
||||
return output;
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -8,7 +8,7 @@ use roc_can::num::{
|
|||
finish_parsing_base, finish_parsing_float, finish_parsing_num, ParsedNumResult,
|
||||
};
|
||||
use roc_collections::all::BumpMap;
|
||||
use roc_error_macros::todo_opaques;
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::symbol::{Interns, Symbol};
|
||||
use roc_parse::ast::{StrLiteral, StrSegment};
|
||||
use roc_parse::pattern::PatternType;
|
||||
|
@ -272,7 +272,7 @@ pub fn to_pattern2<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
OpaqueRef(..) => todo_opaques!(),
|
||||
OpaqueRef(..) => internal_error!("opaques not implemented"),
|
||||
|
||||
Apply(tag, patterns) => {
|
||||
let can_patterns = PoolVec::with_capacity(patterns.len() as u32, env.pool);
|
||||
|
|
|
@ -7,7 +7,7 @@ use roc_error_macros::todo_abilities;
|
|||
use roc_module::ident::{Ident, Lowercase, TagName, Uppercase};
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::types::{Problem, RecordField};
|
||||
use roc_types::types::{AliasKind, Problem, RecordField};
|
||||
use roc_types::{subs::Variable, types::ErrorType};
|
||||
|
||||
use crate::lang::env::Env;
|
||||
|
@ -793,6 +793,7 @@ fn to_type_apply<'a>(
|
|||
region,
|
||||
alias_needs: alias.targs.len() as u8,
|
||||
type_got: args.len() as u8,
|
||||
alias_kind: AliasKind::Structural,
|
||||
});
|
||||
return error;
|
||||
}
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
use bumpalo::Bump;
|
||||
use roc_load::{LoadedModule, Threading};
|
||||
use roc_load::{ExecutionMode, LoadConfig, LoadedModule, Threading};
|
||||
use roc_target::TargetInfo;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn load_module(src_file: &Path, threading: Threading) -> LoadedModule {
|
||||
let subs_by_module = Default::default();
|
||||
|
||||
let arena = Bump::new();
|
||||
let loaded = roc_load::load_and_typecheck(
|
||||
&arena,
|
||||
src_file.to_path_buf(),
|
||||
subs_by_module,
|
||||
TargetInfo::default_x86_64(),
|
||||
roc_reporting::report::RenderTarget::ColorTerminal,
|
||||
let load_config = LoadConfig {
|
||||
target_info: TargetInfo::default_x86_64(), // editor only needs type info, so this is unused
|
||||
render: roc_reporting::report::RenderTarget::ColorTerminal,
|
||||
threading,
|
||||
);
|
||||
exec_mode: ExecutionMode::Check,
|
||||
};
|
||||
|
||||
let arena = Bump::new();
|
||||
let loaded =
|
||||
roc_load::load_and_typecheck(&arena, src_file.to_path_buf(), subs_by_module, load_config);
|
||||
|
||||
match loaded {
|
||||
Ok(x) => x,
|
||||
|
|
|
@ -5,7 +5,7 @@ use roc_build::{
|
|||
};
|
||||
use roc_builtins::bitcode;
|
||||
use roc_collections::VecMap;
|
||||
use roc_load::{Expectations, LoadingProblem, Threading};
|
||||
use roc_load::{EntryPoint, ExecutionMode, Expectations, LoadConfig, LoadingProblem, Threading};
|
||||
use roc_module::symbol::{Interns, ModuleId};
|
||||
use roc_mono::ir::OptLevel;
|
||||
use roc_reporting::report::RenderTarget;
|
||||
|
@ -55,14 +55,18 @@ pub fn build_file<'a>(
|
|||
// Step 1: compile the app and generate the .o file
|
||||
let subs_by_module = Default::default();
|
||||
|
||||
let load_config = LoadConfig {
|
||||
target_info,
|
||||
// TODO: expose this from CLI?
|
||||
render: RenderTarget::ColorTerminal,
|
||||
threading,
|
||||
exec_mode: ExecutionMode::Executable,
|
||||
};
|
||||
let loaded = roc_load::load_and_monomorphize(
|
||||
arena,
|
||||
app_module_path.clone(),
|
||||
subs_by_module,
|
||||
target_info,
|
||||
// TODO: expose this from CLI?
|
||||
RenderTarget::ColorTerminal,
|
||||
threading,
|
||||
load_config,
|
||||
)?;
|
||||
|
||||
use target_lexicon::Architecture;
|
||||
|
@ -74,36 +78,37 @@ pub fn build_file<'a>(
|
|||
// > Non-Emscripten WebAssembly hasn't implemented __builtin_return_address
|
||||
//
|
||||
// and zig does not currently emit `.a` webassembly static libraries
|
||||
let host_extension = if emit_wasm {
|
||||
let (host_extension, app_extension, extension) = {
|
||||
use roc_target::OperatingSystem::*;
|
||||
|
||||
match roc_target::OperatingSystem::from(target.operating_system) {
|
||||
Wasi => {
|
||||
if matches!(opt_level, OptLevel::Development) {
|
||||
"wasm"
|
||||
("wasm", "wasm", Some("wasm"))
|
||||
} else {
|
||||
"zig"
|
||||
("zig", "bc", Some("wasm"))
|
||||
}
|
||||
} else {
|
||||
"o"
|
||||
};
|
||||
let app_extension = if emit_wasm {
|
||||
if matches!(opt_level, OptLevel::Development) {
|
||||
"wasm"
|
||||
} else {
|
||||
"bc"
|
||||
}
|
||||
} else {
|
||||
"o"
|
||||
Unix => ("o", "o", None),
|
||||
Windows => ("obj", "obj", Some("exe")),
|
||||
}
|
||||
};
|
||||
|
||||
let cwd = app_module_path.parent().unwrap();
|
||||
let mut binary_path = cwd.join(&*loaded.output_path); // TODO should join ".exe" on Windows
|
||||
let mut binary_path = cwd.join(&*loaded.output_path);
|
||||
|
||||
if emit_wasm {
|
||||
binary_path.set_extension("wasm");
|
||||
if let Some(extension) = extension {
|
||||
binary_path.set_extension(extension);
|
||||
}
|
||||
|
||||
let host_input_path = cwd
|
||||
.join(&*loaded.platform_path)
|
||||
let host_input_path = if let EntryPoint::Executable { platform_path, .. } = &loaded.entry_point
|
||||
{
|
||||
cwd.join(platform_path)
|
||||
.with_file_name("host")
|
||||
.with_extension(host_extension);
|
||||
.with_extension(host_extension)
|
||||
} else {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
// TODO this should probably be moved before load_and_monomorphize.
|
||||
// To do this we will need to preprocess files just for their exported symbols.
|
||||
|
@ -307,8 +312,11 @@ pub fn build_file<'a>(
|
|||
host_input_path.as_path().to_str().unwrap(),
|
||||
app_o_file.to_str().unwrap(),
|
||||
];
|
||||
|
||||
let str_host_obj_path = bitcode::get_builtins_host_obj_path();
|
||||
|
||||
if matches!(opt_level, OptLevel::Development) {
|
||||
inputs.push(bitcode::BUILTINS_HOST_OBJ_PATH);
|
||||
inputs.push(&str_host_obj_path);
|
||||
}
|
||||
|
||||
let (mut child, _) = // TODO use lld
|
||||
|
@ -438,15 +446,15 @@ pub fn check_file(
|
|||
// Step 1: compile the app and generate the .o file
|
||||
let subs_by_module = Default::default();
|
||||
|
||||
let mut loaded = roc_load::load_and_typecheck(
|
||||
arena,
|
||||
roc_file_path,
|
||||
subs_by_module,
|
||||
let load_config = LoadConfig {
|
||||
target_info,
|
||||
// TODO: expose this from CLI?
|
||||
RenderTarget::ColorTerminal,
|
||||
render: RenderTarget::ColorTerminal,
|
||||
threading,
|
||||
)?;
|
||||
exec_mode: ExecutionMode::Check,
|
||||
};
|
||||
let mut loaded =
|
||||
roc_load::load_and_typecheck(arena, roc_file_path, subs_by_module, load_config)?;
|
||||
|
||||
let buf = &mut String::with_capacity(1024);
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use roc_error_macros::{internal_error, user_error};
|
|||
use roc_gen_llvm::llvm::build::LlvmBackendMode;
|
||||
use roc_gen_llvm::run_roc::RocCallResult;
|
||||
use roc_gen_llvm::run_roc_dylib;
|
||||
use roc_load::{Expectations, LoadingProblem, Threading};
|
||||
use roc_load::{ExecutionMode, Expectations, LoadConfig, LoadingProblem, Threading};
|
||||
use roc_module::symbol::{Interns, ModuleId};
|
||||
use roc_mono::ir::OptLevel;
|
||||
use roc_repl_expect::run::{expect_mono_module_to_dylib, roc_dev_expect};
|
||||
|
@ -336,11 +336,7 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
|
|||
let path = Path::new(filename);
|
||||
|
||||
// Spawn the root task
|
||||
let path = path.canonicalize().unwrap_or_else(|err| {
|
||||
use io::ErrorKind::*;
|
||||
|
||||
match err.kind() {
|
||||
NotFound => {
|
||||
if !path.exists() {
|
||||
let path_string = path.to_string_lossy();
|
||||
|
||||
// TODO these should use roc_reporting to display nicer error messages.
|
||||
|
@ -356,11 +352,6 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
|
|||
|
||||
process::exit(1);
|
||||
}
|
||||
_ => {
|
||||
todo!("TODO Gracefully handle opening {:?} - {:?}", path, err);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let arena = &arena;
|
||||
let target = &triple;
|
||||
|
@ -370,15 +361,15 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
|
|||
// Step 1: compile the app and generate the .o file
|
||||
let subs_by_module = Default::default();
|
||||
|
||||
let loaded = roc_load::load_and_monomorphize(
|
||||
arena,
|
||||
path,
|
||||
subs_by_module,
|
||||
let load_config = LoadConfig {
|
||||
target_info,
|
||||
// TODO: expose this from CLI?
|
||||
roc_reporting::report::RenderTarget::ColorTerminal,
|
||||
render: roc_reporting::report::RenderTarget::ColorTerminal,
|
||||
threading,
|
||||
)
|
||||
exec_mode: ExecutionMode::Test,
|
||||
};
|
||||
let loaded =
|
||||
roc_load::load_and_monomorphize(arena, path.to_path_buf(), subs_by_module, load_config)
|
||||
.unwrap();
|
||||
|
||||
let mut loaded = loaded;
|
||||
|
@ -439,10 +430,8 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
|
|||
31 // red
|
||||
};
|
||||
|
||||
println!();
|
||||
|
||||
println!(
|
||||
"\x1B[{failed_color}m{failed}\x1B[39m failed and \x1B[32m{passed}\x1B[39m passed in {} ms.\n",
|
||||
"\n\x1B[{failed_color}m{failed}\x1B[39m failed and \x1B[32m{passed}\x1B[39m passed in {} ms.\n",
|
||||
total_time.as_millis(),
|
||||
);
|
||||
|
||||
|
@ -509,11 +498,7 @@ pub fn build(
|
|||
let path = Path::new(filename);
|
||||
|
||||
// Spawn the root task
|
||||
let path = path.canonicalize().unwrap_or_else(|err| {
|
||||
use io::ErrorKind::*;
|
||||
|
||||
match err.kind() {
|
||||
NotFound => {
|
||||
if !path.exists() {
|
||||
let path_string = path.to_string_lossy();
|
||||
|
||||
// TODO these should use roc_reporting to display nicer error messages.
|
||||
|
@ -529,17 +514,12 @@ pub fn build(
|
|||
|
||||
process::exit(1);
|
||||
}
|
||||
_ => {
|
||||
todo!("TODO Gracefully handle opening {:?} - {:?}", path, err);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let target_valgrind = matches.is_present(FLAG_VALGRIND);
|
||||
let res_binary_path = build_file(
|
||||
&arena,
|
||||
&triple,
|
||||
path,
|
||||
path.to_path_buf(),
|
||||
opt_level,
|
||||
emit_debug_info,
|
||||
emit_timings,
|
||||
|
@ -752,14 +732,18 @@ fn roc_run<'a, I: IntoIterator<Item = &'a OsStr>>(
|
|||
// since the process is about to exit anyway.
|
||||
std::mem::forget(arena);
|
||||
|
||||
if cfg!(target_family = "unix") {
|
||||
#[cfg(target_family = "unix")]
|
||||
{
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
|
||||
run_with_wasmer(
|
||||
generated_filename,
|
||||
args.into_iter().map(|os_str| os_str.as_bytes()),
|
||||
);
|
||||
} else {
|
||||
}
|
||||
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
{
|
||||
run_with_wasmer(
|
||||
generated_filename,
|
||||
args.into_iter().map(|os_str| {
|
||||
|
@ -776,6 +760,7 @@ fn roc_run<'a, I: IntoIterator<Item = &'a OsStr>>(
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
fn make_argv_envp<'a, I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
||||
arena: &'a Bump,
|
||||
executable: &ExecutableFile,
|
||||
|
@ -909,11 +894,28 @@ impl ExecutableFile {
|
|||
let path_cstring = CString::new(path.as_os_str().as_bytes()).unwrap();
|
||||
libc::execve(path_cstring.as_ptr().cast(), argv.as_ptr(), envp.as_ptr())
|
||||
}
|
||||
|
||||
#[cfg(all(target_family = "windows"))]
|
||||
ExecutableFile::OnDisk(_, path) => {
|
||||
use std::process::Command;
|
||||
|
||||
let _ = argv;
|
||||
let _ = envp;
|
||||
|
||||
let mut command = Command::new(path);
|
||||
|
||||
let output = command.output().unwrap();
|
||||
|
||||
println!("{}", String::from_utf8_lossy(&output.stdout));
|
||||
|
||||
std::process::exit(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// with Expect
|
||||
#[cfg(target_family = "unix")]
|
||||
unsafe fn roc_run_native_debug(
|
||||
executable: ExecutableFile,
|
||||
argv: &[*const c_char],
|
||||
|
@ -1053,35 +1055,75 @@ fn roc_run_executable_file_path(binary_bytes: &mut [u8]) -> std::io::Result<Exec
|
|||
Ok(ExecutableFile::OnDisk(temp_dir, app_path_buf))
|
||||
}
|
||||
|
||||
#[cfg(all(target_family = "windows"))]
|
||||
fn roc_run_executable_file_path(binary_bytes: &mut [u8]) -> std::io::Result<ExecutableFile> {
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
// We have not found a way to use a virtual file on non-Linux OSes.
|
||||
// Hence we fall back to just writing the file to the file system, and using that file.
|
||||
let app_path_buf = temp_dir.path().join("roc_app_binary.exe");
|
||||
let mut file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
//.mode(0o777) // create the file as executable
|
||||
.open(&app_path_buf)?;
|
||||
|
||||
file.write_all(binary_bytes)?;
|
||||
|
||||
// We store the TempDir in this variant alongside the path to the executable,
|
||||
// so that the TempDir doesn't get dropped until after we're done with the path.
|
||||
// If we didn't do that, then the tempdir would potentially get deleted by the
|
||||
// TempDir's Drop impl before the file had been executed.
|
||||
Ok(ExecutableFile::OnDisk(temp_dir, app_path_buf))
|
||||
}
|
||||
|
||||
/// Run on the native OS (not on wasm)
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
fn roc_run_native<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
||||
_arena: Bump, // This should be passed an owned value, not a reference, so we can usefully mem::forget it!
|
||||
arena: Bump, // This should be passed an owned value, not a reference, so we can usefully mem::forget it!
|
||||
opt_level: OptLevel,
|
||||
_args: I,
|
||||
_binary_bytes: &mut [u8],
|
||||
binary_bytes: &mut [u8],
|
||||
_expectations: VecMap<ModuleId, Expectations>,
|
||||
_interns: Interns,
|
||||
) -> io::Result<i32> {
|
||||
todo!("TODO support running roc programs on non-UNIX targets");
|
||||
// let mut cmd = std::process::Command::new(&binary_path);
|
||||
use bumpalo::collections::CollectIn;
|
||||
|
||||
// // Run the compiled app
|
||||
// let exit_status = cmd
|
||||
// .spawn()
|
||||
// .unwrap_or_else(|err| panic!("Failed to run app after building it: {:?}", err))
|
||||
// .wait()
|
||||
// .expect("TODO gracefully handle block_on failing when `roc` spawns a subprocess for the compiled app");
|
||||
unsafe {
|
||||
let executable = roc_run_executable_file_path(binary_bytes)?;
|
||||
|
||||
// // `roc [FILE]` exits with the same status code as the app it ran.
|
||||
// //
|
||||
// // If you want to know whether there were compilation problems
|
||||
// // via status code, use either `roc build` or `roc check` instead!
|
||||
// match exit_status.code() {
|
||||
// Some(code) => Ok(code),
|
||||
// None => {
|
||||
// todo!("TODO gracefully handle the `roc [FILE]` subprocess terminating with a signal.");
|
||||
// }
|
||||
// }
|
||||
// TODO forward the arguments
|
||||
// let (argv_cstrings, envp_cstrings) = make_argv_envp(&arena, &executable, args);
|
||||
let argv_cstrings = bumpalo::vec![ in &arena; CString::default()];
|
||||
let envp_cstrings = bumpalo::vec![ in &arena; CString::default()];
|
||||
|
||||
let argv: bumpalo::collections::Vec<*const c_char> = argv_cstrings
|
||||
.iter()
|
||||
.map(|s| s.as_ptr())
|
||||
.chain([std::ptr::null()])
|
||||
.collect_in(&arena);
|
||||
|
||||
let envp: bumpalo::collections::Vec<*const c_char> = envp_cstrings
|
||||
.iter()
|
||||
.map(|s| s.as_ptr())
|
||||
.chain([std::ptr::null()])
|
||||
.collect_in(&arena);
|
||||
|
||||
match opt_level {
|
||||
OptLevel::Development => {
|
||||
// roc_run_native_debug(executable, &argv, &envp, expectations, interns)
|
||||
todo!()
|
||||
}
|
||||
OptLevel::Normal | OptLevel::Size | OptLevel::Optimize => {
|
||||
roc_run_native_fast(executable, &argv, &envp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(1)
|
||||
}
|
||||
|
||||
#[cfg(feature = "run-wasm32")]
|
||||
|
|
|
@ -351,6 +351,11 @@ pub fn root_dir() -> PathBuf {
|
|||
path.pop();
|
||||
path.pop();
|
||||
|
||||
// running cargo with --target will put us in the target dir
|
||||
if path.ends_with("target") {
|
||||
path.pop();
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ authors = ["The Roc Contributors"]
|
|||
edition = "2021"
|
||||
license = "UPL-1.0"
|
||||
name = "roc_alias_analysis"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
|
||||
[dependencies]
|
||||
morphic_lib = {path = "../../vendor/morphic_lib"}
|
||||
|
|
|
@ -131,7 +131,7 @@ fn bytes_as_ascii(bytes: &[u8]) -> String {
|
|||
|
||||
pub fn spec_program<'a, I>(
|
||||
opt_level: OptLevel,
|
||||
entry_point: roc_mono::ir::EntryPoint<'a>,
|
||||
opt_entry_point: Option<roc_mono::ir::EntryPoint<'a>>,
|
||||
procs: I,
|
||||
) -> Result<morphic_lib::Solutions>
|
||||
where
|
||||
|
@ -221,6 +221,7 @@ where
|
|||
m.add_func(func_name, spec)?;
|
||||
}
|
||||
|
||||
if let Some(entry_point) = opt_entry_point {
|
||||
// the entry point wrapper
|
||||
let roc_main_bytes = func_name_bytes_help(
|
||||
entry_point.symbol,
|
||||
|
@ -234,6 +235,7 @@ where
|
|||
build_entry_point(entry_point.layout, roc_main, &host_exposed_functions)?;
|
||||
let entry_point_name = FuncName(ENTRY_POINT_NAME);
|
||||
m.add_func(entry_point_name, entry_point_function)?;
|
||||
}
|
||||
|
||||
for union_layout in type_definitions {
|
||||
let type_name_bytes = recursive_tag_union_name_bytes(&union_layout).as_bytes();
|
||||
|
@ -264,8 +266,10 @@ where
|
|||
let mut p = ProgramBuilder::new();
|
||||
p.add_mod(MOD_APP, main_module)?;
|
||||
|
||||
if opt_entry_point.is_some() {
|
||||
let entry_point_name = FuncName(ENTRY_POINT_NAME);
|
||||
p.add_entry_point(EntryPointName(ENTRY_POINT_NAME), MOD_APP, entry_point_name)?;
|
||||
}
|
||||
|
||||
p.build()?
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "arena-pool"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
repository = "https://github.com/rtfeldman/roc"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_build"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
@ -26,6 +26,7 @@ roc_gen_dev = { path = "../gen_dev", default-features = false }
|
|||
roc_reporting = { path = "../../reporting" }
|
||||
roc_error_macros = { path = "../../error_macros" }
|
||||
roc_std = { path = "../../roc_std", default-features = false }
|
||||
roc_utils = { path = "../../utils" }
|
||||
bumpalo = { version = "3.8.0", features = ["collections"] }
|
||||
libloading = "0.7.1"
|
||||
tempfile = "3.2.0"
|
||||
|
|
|
@ -3,6 +3,7 @@ use libloading::{Error, Library};
|
|||
use roc_builtins::bitcode;
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_mono::ir::OptLevel;
|
||||
use roc_utils::get_lib_path;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::io;
|
||||
|
@ -66,12 +67,13 @@ pub fn link(
|
|||
|
||||
fn find_zig_str_path() -> PathBuf {
|
||||
// First try using the lib path relative to the executable location.
|
||||
let exe_relative_str_path = std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|path| Some(path.parent()?.join("lib").join("str.zig")));
|
||||
if let Some(exe_relative_str_path) = exe_relative_str_path {
|
||||
if std::path::Path::exists(&exe_relative_str_path) {
|
||||
return exe_relative_str_path;
|
||||
let lib_path_opt = get_lib_path();
|
||||
|
||||
if let Some(lib_path) = lib_path_opt {
|
||||
let zig_str_path = lib_path.join("str.zig");
|
||||
|
||||
if std::path::Path::exists(&zig_str_path) {
|
||||
return zig_str_path;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -87,7 +89,7 @@ fn find_zig_str_path() -> PathBuf {
|
|||
return zig_str_path;
|
||||
}
|
||||
|
||||
panic!("cannot find `str.zig`. Launch me from either the root of the roc repo or one level down(roc/examples, roc/cli...)")
|
||||
panic!("cannot find `str.zig`. Check the source code in find_zig_str_path() to show all the paths I tried.")
|
||||
}
|
||||
|
||||
fn find_wasi_libc_path() -> PathBuf {
|
||||
|
@ -118,16 +120,18 @@ pub fn build_zig_host_native(
|
|||
.env_clear()
|
||||
.env("PATH", env_path)
|
||||
.env("HOME", env_home);
|
||||
|
||||
if let Some(shared_lib_path) = shared_lib_path {
|
||||
command.args(&[
|
||||
"build-exe",
|
||||
"-fPIE",
|
||||
shared_lib_path.to_str().unwrap(),
|
||||
bitcode::BUILTINS_HOST_OBJ_PATH,
|
||||
&bitcode::get_builtins_host_obj_path(),
|
||||
]);
|
||||
} else {
|
||||
command.args(&["build-obj", "-fPIC"]);
|
||||
}
|
||||
|
||||
command.args(&[
|
||||
zig_host_src,
|
||||
emit_bin,
|
||||
|
@ -158,6 +162,7 @@ pub fn build_zig_host_native(
|
|||
} else if matches!(opt_level, OptLevel::Size) {
|
||||
command.args(&["-O", "ReleaseSmall"]);
|
||||
}
|
||||
|
||||
command.output().unwrap()
|
||||
}
|
||||
|
||||
|
@ -228,7 +233,7 @@ pub fn build_zig_host_native(
|
|||
"build-exe",
|
||||
"-fPIE",
|
||||
shared_lib_path.to_str().unwrap(),
|
||||
bitcode::BUILTINS_HOST_OBJ_PATH,
|
||||
&bitcode::get_builtins_host_obj_path(),
|
||||
]);
|
||||
} else {
|
||||
command.args(&["build-obj", "-fPIC"]);
|
||||
|
@ -340,7 +345,7 @@ pub fn build_c_host_native(
|
|||
if let Some(shared_lib_path) = shared_lib_path {
|
||||
command.args(&[
|
||||
shared_lib_path.to_str().unwrap(),
|
||||
bitcode::BUILTINS_HOST_OBJ_PATH,
|
||||
&bitcode::get_builtins_host_obj_path(),
|
||||
"-fPIE",
|
||||
"-pie",
|
||||
"-lm",
|
||||
|
@ -423,7 +428,11 @@ pub fn rebuild_host(
|
|||
host_input_path.with_file_name(if shared_lib_path.is_some() {
|
||||
"dynhost"
|
||||
} else {
|
||||
"host.o"
|
||||
match roc_target::OperatingSystem::from(target.operating_system) {
|
||||
roc_target::OperatingSystem::Windows => "host.obj",
|
||||
roc_target::OperatingSystem::Unix => "host.o",
|
||||
roc_target::OperatingSystem::Wasi => "host.o",
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
|
@ -1093,11 +1102,58 @@ fn link_wasm32(
|
|||
|
||||
fn link_windows(
|
||||
_target: &Triple,
|
||||
_output_path: PathBuf,
|
||||
_input_paths: &[&str],
|
||||
_link_type: LinkType,
|
||||
output_path: PathBuf,
|
||||
input_paths: &[&str],
|
||||
link_type: LinkType,
|
||||
) -> io::Result<(Child, PathBuf)> {
|
||||
todo!("Add windows support to the surgical linker. See issue #2608.")
|
||||
let zig_str_path = find_zig_str_path();
|
||||
|
||||
match link_type {
|
||||
LinkType::Dylib => {
|
||||
let child = Command::new(&zig_executable())
|
||||
.args(&["build-lib"])
|
||||
.args(input_paths)
|
||||
.args([
|
||||
"-lc",
|
||||
&format!("-femit-bin={}", output_path.to_str().unwrap()),
|
||||
"-target",
|
||||
"native",
|
||||
"--pkg-begin",
|
||||
"str",
|
||||
zig_str_path.to_str().unwrap(),
|
||||
"--pkg-end",
|
||||
"--strip",
|
||||
"-O",
|
||||
"Debug",
|
||||
"-dynamic",
|
||||
])
|
||||
.spawn()?;
|
||||
|
||||
Ok((child, output_path))
|
||||
}
|
||||
LinkType::Executable => {
|
||||
let child = Command::new(&zig_executable())
|
||||
.args(&["build-exe"])
|
||||
.args(input_paths)
|
||||
.args([
|
||||
"-lc",
|
||||
&format!("-femit-bin={}", output_path.to_str().unwrap()),
|
||||
"-target",
|
||||
"native",
|
||||
"--pkg-begin",
|
||||
"str",
|
||||
zig_str_path.to_str().unwrap(),
|
||||
"--pkg-end",
|
||||
"--strip",
|
||||
"-O",
|
||||
"Debug",
|
||||
])
|
||||
.spawn()?;
|
||||
|
||||
Ok((child, output_path))
|
||||
}
|
||||
LinkType::None => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn llvm_module_to_dylib(
|
||||
|
@ -1172,7 +1228,7 @@ pub fn preprocess_host_wasm32(host_input_path: &Path, preprocessed_host_path: &P
|
|||
let mut command = Command::new(&zig_executable());
|
||||
let args = &[
|
||||
"wasm-ld",
|
||||
bitcode::BUILTINS_WASM32_OBJ_PATH,
|
||||
&bitcode::get_builtins_wasm32_obj_path(),
|
||||
host_input,
|
||||
WASI_LIBC_PATH,
|
||||
WASI_COMPILER_RT_PATH, // builtins need __multi3, __udivti3, __fixdfti
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
pub use roc_gen_llvm::llvm::build::FunctionIterator;
|
||||
use roc_gen_llvm::llvm::build::{module_from_builtins, LlvmBackendMode};
|
||||
use roc_gen_llvm::llvm::externs::add_default_roc_externs;
|
||||
use roc_load::{LoadedModule, MonomorphizedModule};
|
||||
use roc_load::{EntryPoint, LoadedModule, MonomorphizedModule};
|
||||
use roc_module::symbol::{Interns, ModuleId};
|
||||
use roc_mono::ir::OptLevel;
|
||||
use roc_region::all::LineInfo;
|
||||
|
@ -265,11 +265,18 @@ pub fn gen_from_mono_module_llvm(
|
|||
// expects that would confuse the surgical linker
|
||||
add_default_roc_externs(&env);
|
||||
|
||||
let opt_entry_point = match loaded.entry_point {
|
||||
EntryPoint::Executable { symbol, layout, .. } => {
|
||||
Some(roc_mono::ir::EntryPoint { symbol, layout })
|
||||
}
|
||||
EntryPoint::Test => None,
|
||||
};
|
||||
|
||||
roc_gen_llvm::llvm::build::build_procedures(
|
||||
&env,
|
||||
opt_level,
|
||||
loaded.procedures,
|
||||
loaded.entry_point,
|
||||
opt_entry_point,
|
||||
Some(&app_ll_file),
|
||||
);
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_builtins"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
@ -10,12 +10,12 @@ roc_collections = { path = "../collections" }
|
|||
roc_region = { path = "../region" }
|
||||
roc_module = { path = "../module" }
|
||||
roc_target = { path = "../roc_target" }
|
||||
roc_utils = { path = "../../utils" }
|
||||
lazy_static = "1.4.0"
|
||||
|
||||
[build-dependencies]
|
||||
# dunce can be removed once ziglang/zig#5109 is fixed
|
||||
dunce = "1.0.2"
|
||||
fs_extra = "1.2.0"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.build-dependencies]
|
||||
tempfile = "3.2.0"
|
||||
|
|
|
@ -2608,10 +2608,10 @@ test "getScalarUnsafe" {
|
|||
}
|
||||
|
||||
pub fn strCloneTo(
|
||||
string: RocStr,
|
||||
ptr: [*]u8,
|
||||
offset: usize,
|
||||
extra_offset: usize,
|
||||
string: RocStr,
|
||||
) callconv(.C) usize {
|
||||
const WIDTH: usize = @sizeOf(RocStr);
|
||||
if (string.isSmallStr()) {
|
||||
|
|
|
@ -131,18 +131,16 @@ const RC_TYPE = Refcount.normal;
|
|||
|
||||
pub fn increfC(ptr_to_refcount: *isize, amount: isize) callconv(.C) void {
|
||||
if (RC_TYPE == Refcount.none) return;
|
||||
var refcount = ptr_to_refcount.*;
|
||||
if (refcount < REFCOUNT_MAX_ISIZE) {
|
||||
// Ensure that the refcount is not whole program lifetime.
|
||||
if (ptr_to_refcount.* != REFCOUNT_MAX_ISIZE) {
|
||||
// Note: we assume that a refcount will never overflow.
|
||||
// As such, we do not need to cap incrementing.
|
||||
switch (RC_TYPE) {
|
||||
Refcount.normal => {
|
||||
ptr_to_refcount.* = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE);
|
||||
ptr_to_refcount.* += amount;
|
||||
},
|
||||
Refcount.atomic => {
|
||||
var next = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE);
|
||||
while (@cmpxchgWeak(isize, ptr_to_refcount, refcount, next, Monotonic, Monotonic)) |found| {
|
||||
refcount = found;
|
||||
next = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE);
|
||||
}
|
||||
_ = @atomicRmw(isize, ptr_to_refcount, std.builtin.AtomicRmwOp.Add, amount, Monotonic);
|
||||
},
|
||||
Refcount.none => unreachable,
|
||||
}
|
||||
|
@ -194,26 +192,26 @@ inline fn decref_ptr_to_refcount(
|
|||
) void {
|
||||
if (RC_TYPE == Refcount.none) return;
|
||||
const extra_bytes = std.math.max(alignment, @sizeOf(usize));
|
||||
// Ensure that the refcount is not whole program lifetime.
|
||||
const refcount: isize = refcount_ptr[0];
|
||||
if (refcount != REFCOUNT_MAX_ISIZE) {
|
||||
switch (RC_TYPE) {
|
||||
Refcount.normal => {
|
||||
const refcount: isize = refcount_ptr[0];
|
||||
refcount_ptr[0] = refcount -% 1;
|
||||
if (refcount == REFCOUNT_ONE_ISIZE) {
|
||||
dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment);
|
||||
} else if (refcount < REFCOUNT_MAX_ISIZE) {
|
||||
refcount_ptr[0] = refcount - 1;
|
||||
}
|
||||
},
|
||||
Refcount.atomic => {
|
||||
if (refcount_ptr[0] < REFCOUNT_MAX_ISIZE) {
|
||||
var last = @atomicRmw(isize, &refcount_ptr[0], std.builtin.AtomicRmwOp.Sub, 1, Monotonic);
|
||||
if (last == REFCOUNT_ONE_ISIZE) {
|
||||
dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment);
|
||||
}
|
||||
}
|
||||
},
|
||||
Refcount.none => unreachable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn allocateWithRefcountC(
|
||||
data_bytes: usize,
|
||||
|
|
|
@ -4,6 +4,7 @@ use std::ffi::OsStr;
|
|||
use std::fs;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::str;
|
||||
|
||||
|
@ -53,19 +54,9 @@ fn main() {
|
|||
#[cfg(not(windows))]
|
||||
const BUILTINS_HOST_FILE: &str = "builtins-host.o";
|
||||
|
||||
generate_object_file(
|
||||
&bitcode_path,
|
||||
"BUILTINS_HOST_O",
|
||||
"object",
|
||||
BUILTINS_HOST_FILE,
|
||||
);
|
||||
generate_object_file(&bitcode_path, "object", BUILTINS_HOST_FILE);
|
||||
|
||||
generate_object_file(
|
||||
&bitcode_path,
|
||||
"BUILTINS_WASM32_O",
|
||||
"wasm32-object",
|
||||
"builtins-wasm32.o",
|
||||
);
|
||||
generate_object_file(&bitcode_path, "wasm32-object", "builtins-wasm32.o");
|
||||
|
||||
copy_zig_builtins_to_target_dir(&bitcode_path);
|
||||
|
||||
|
@ -84,21 +75,10 @@ fn main() {
|
|||
.expect("Failed to delete temp dir zig_cache_dir.");
|
||||
}
|
||||
|
||||
fn generate_object_file(
|
||||
bitcode_path: &Path,
|
||||
env_var_name: &str,
|
||||
zig_object: &str,
|
||||
object_file_name: &str,
|
||||
) {
|
||||
let out_dir = env::var_os("OUT_DIR").unwrap();
|
||||
|
||||
let dest_obj_path = Path::new(&out_dir).join(object_file_name);
|
||||
fn generate_object_file(bitcode_path: &Path, zig_object: &str, object_file_name: &str) {
|
||||
let dest_obj_path = get_lib_dir().join(object_file_name);
|
||||
let dest_obj = dest_obj_path.to_str().expect("Invalid dest object path");
|
||||
|
||||
// set the variable (e.g. BUILTINS_HOST_O) that is later used in
|
||||
// `compiler/builtins/src/bitcode.rs` to load the object file
|
||||
println!("cargo:rustc-env={}={}", env_var_name, dest_obj);
|
||||
|
||||
let src_obj_path = bitcode_path.join(object_file_name);
|
||||
let src_obj = src_obj_path.to_str().expect("Invalid src object path");
|
||||
|
||||
|
@ -109,6 +89,7 @@ fn generate_object_file(
|
|||
&bitcode_path,
|
||||
&zig_executable(),
|
||||
&["build", zig_object, "-Drelease=true"],
|
||||
0,
|
||||
);
|
||||
|
||||
println!("Moving zig object `{}` to: {}", zig_object, dest_obj);
|
||||
|
@ -143,35 +124,36 @@ fn generate_bc_file(bitcode_path: &Path, zig_object: &str, file_name: &str) {
|
|||
&bitcode_path,
|
||||
&zig_executable(),
|
||||
&["build", zig_object, "-Drelease=true"],
|
||||
0,
|
||||
);
|
||||
}
|
||||
|
||||
fn copy_zig_builtins_to_target_dir(bitcode_path: &Path) {
|
||||
// To enable roc to find the zig biultins, we want them to be moved to a folder next to the roc executable.
|
||||
// So if <roc_folder>/roc is the executable. The zig files will be in <roc_folder>/lib/*.zig
|
||||
|
||||
pub fn get_lib_dir() -> PathBuf {
|
||||
// Currently we have the OUT_DIR variable which points to `/target/debug/build/roc_builtins-*/out/`.
|
||||
// So we just need to shed a 3 of the outer layers to get `/target/debug/` and then add `lib`.
|
||||
let out_dir = env::var_os("OUT_DIR").unwrap();
|
||||
let target_profile_dir = Path::new(&out_dir)
|
||||
|
||||
let lib_path = Path::new(&out_dir)
|
||||
.parent()
|
||||
.and_then(|path| path.parent())
|
||||
.and_then(|path| path.parent())
|
||||
.unwrap()
|
||||
.join("lib");
|
||||
|
||||
// create dir of it does not exist
|
||||
fs::create_dir_all(lib_path.clone()).expect("Failed to make lib dir.");
|
||||
|
||||
lib_path
|
||||
}
|
||||
|
||||
fn copy_zig_builtins_to_target_dir(bitcode_path: &Path) {
|
||||
// To enable roc to find the zig biultins, we want them to be moved to a folder next to the roc executable.
|
||||
// So if <roc_folder>/roc is the executable. The zig files will be in <roc_folder>/lib/*.zig
|
||||
let target_profile_dir = get_lib_dir();
|
||||
|
||||
let zig_src_dir = bitcode_path.join("src");
|
||||
|
||||
std::fs::create_dir_all(&target_profile_dir).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Failed to create output library directory for zig bitcode {:?}: {:?}",
|
||||
target_profile_dir, err
|
||||
);
|
||||
});
|
||||
let mut options = fs_extra::dir::CopyOptions::new();
|
||||
options.content_only = true;
|
||||
options.overwrite = true;
|
||||
fs_extra::dir::copy(&zig_src_dir, &target_profile_dir, &options).unwrap_or_else(|err| {
|
||||
cp_unless_zig_cache(&zig_src_dir, &target_profile_dir).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Failed to copy zig bitcode files {:?} to {:?}: {:?}",
|
||||
zig_src_dir, target_profile_dir, err
|
||||
|
@ -179,8 +161,45 @@ fn copy_zig_builtins_to_target_dir(bitcode_path: &Path) {
|
|||
});
|
||||
}
|
||||
|
||||
fn run_command<S, I: Copy, P: AsRef<Path> + Copy>(path: P, command_str: &str, args: I)
|
||||
where
|
||||
// recursively copy all the .zig files from this directory, but do *not* recurse into zig-cache/
|
||||
fn cp_unless_zig_cache(src_dir: &Path, target_dir: &Path) -> io::Result<()> {
|
||||
// Make sure the destination directory exists before we try to copy anything into it.
|
||||
std::fs::create_dir_all(&target_dir).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Failed to create output library directory for zig bitcode {:?}: {:?}",
|
||||
target_dir, err
|
||||
);
|
||||
});
|
||||
|
||||
for entry in fs::read_dir(src_dir)? {
|
||||
let src_path = entry?.path();
|
||||
let src_filename = src_path.file_name().unwrap();
|
||||
|
||||
// Only copy individual files if they have the .zig extension
|
||||
if src_path.extension().unwrap_or_default() == "zig" {
|
||||
let dest = target_dir.join(src_filename);
|
||||
|
||||
fs::copy(&src_path, &dest).unwrap_or_else(|err| {
|
||||
panic!(
|
||||
"Failed to copy zig bitcode file {:?} to {:?}: {:?}",
|
||||
src_path, dest, err
|
||||
);
|
||||
});
|
||||
} else if src_path.is_dir() && src_filename != "zig-cache" {
|
||||
// Recursively copy all directories except zig-cache
|
||||
cp_unless_zig_cache(&src_path, &target_dir.join(src_filename))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_command<S, I: Copy, P: AsRef<Path> + Copy>(
|
||||
path: P,
|
||||
command_str: &str,
|
||||
args: I,
|
||||
flaky_fail_counter: usize,
|
||||
) where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<OsStr>,
|
||||
{
|
||||
|
@ -199,10 +218,14 @@ where
|
|||
};
|
||||
|
||||
// flaky test error that only occurs sometimes inside MacOS ci run
|
||||
if error_str.contains("unable to build stage1 zig object: FileNotFound")
|
||||
if error_str.contains("FileNotFound")
|
||||
|| error_str.contains("unable to save cached ZIR code")
|
||||
{
|
||||
run_command(path, command_str, args)
|
||||
if flaky_fail_counter == 10 {
|
||||
panic!("{} failed 10 times in a row. The following error is unlikely to be a flaky error: {}", command_str, error_str);
|
||||
} else {
|
||||
run_command(path, command_str, args, flaky_fail_counter + 1)
|
||||
}
|
||||
} else {
|
||||
panic!("{} failed: {}", command_str, error_str);
|
||||
}
|
||||
|
|
79
crates/compiler/builtins/roc/Decode.roc
Normal file
79
crates/compiler/builtins/roc/Decode.roc
Normal file
|
@ -0,0 +1,79 @@
|
|||
interface Decode
|
||||
exposes [
|
||||
DecodeError,
|
||||
DecodeResult,
|
||||
Decoder,
|
||||
Decoding,
|
||||
DecoderFormatting,
|
||||
decoder,
|
||||
u8,
|
||||
u16,
|
||||
u32,
|
||||
u64,
|
||||
u128,
|
||||
i8,
|
||||
i16,
|
||||
i32,
|
||||
i64,
|
||||
i128,
|
||||
f32,
|
||||
f64,
|
||||
dec,
|
||||
bool,
|
||||
string,
|
||||
list,
|
||||
custom,
|
||||
decodeWith,
|
||||
fromBytesPartial,
|
||||
fromBytes,
|
||||
]
|
||||
imports [
|
||||
List,
|
||||
]
|
||||
|
||||
DecodeError : [TooShort]
|
||||
|
||||
DecodeResult val : { result : Result val DecodeError, rest : List U8 }
|
||||
|
||||
Decoder val fmt := List U8, fmt -> DecodeResult val | fmt has DecoderFormatting
|
||||
|
||||
Decoding has
|
||||
decoder : Decoder val fmt | val has Decoding, fmt has DecoderFormatting
|
||||
|
||||
DecoderFormatting has
|
||||
u8 : Decoder U8 fmt | fmt has DecoderFormatting
|
||||
u16 : Decoder U16 fmt | fmt has DecoderFormatting
|
||||
u32 : Decoder U32 fmt | fmt has DecoderFormatting
|
||||
u64 : Decoder U64 fmt | fmt has DecoderFormatting
|
||||
u128 : Decoder U128 fmt | fmt has DecoderFormatting
|
||||
i8 : Decoder I8 fmt | fmt has DecoderFormatting
|
||||
i16 : Decoder I16 fmt | fmt has DecoderFormatting
|
||||
i32 : Decoder I32 fmt | fmt has DecoderFormatting
|
||||
i64 : Decoder I64 fmt | fmt has DecoderFormatting
|
||||
i128 : Decoder I128 fmt | fmt has DecoderFormatting
|
||||
f32 : Decoder F32 fmt | fmt has DecoderFormatting
|
||||
f64 : Decoder F64 fmt | fmt has DecoderFormatting
|
||||
dec : Decoder Dec fmt | fmt has DecoderFormatting
|
||||
bool : Decoder Bool fmt | fmt has DecoderFormatting
|
||||
string : Decoder Str fmt | fmt has DecoderFormatting
|
||||
list : Decoder elem fmt -> Decoder (List elem) fmt | fmt has DecoderFormatting
|
||||
|
||||
custom : (List U8, fmt -> DecodeResult val) -> Decoder val fmt | fmt has DecoderFormatting
|
||||
custom = \decode -> @Decoder decode
|
||||
|
||||
decodeWith : List U8, Decoder val fmt, fmt -> DecodeResult val | fmt has DecoderFormatting
|
||||
decodeWith = \bytes, @Decoder decode, fmt -> decode bytes fmt
|
||||
|
||||
fromBytesPartial : List U8, fmt -> DecodeResult val | val has Decoding, fmt has DecoderFormatting
|
||||
fromBytesPartial = \bytes, fmt -> decodeWith bytes decoder fmt
|
||||
|
||||
fromBytes : List U8, fmt -> Result val [Leftover (List U8)]DecodeError | val has Decoding, fmt has DecoderFormatting
|
||||
fromBytes = \bytes, fmt ->
|
||||
when fromBytesPartial bytes fmt is
|
||||
{ result, rest } ->
|
||||
if List.isEmpty rest then
|
||||
when result is
|
||||
Ok val -> Ok val
|
||||
Err TooShort -> Err TooShort
|
||||
else
|
||||
Err (Leftover rest)
|
|
@ -2,92 +2,98 @@ interface Json
|
|||
exposes [
|
||||
Json,
|
||||
toUtf8,
|
||||
fromUtf8,
|
||||
]
|
||||
imports [
|
||||
List,
|
||||
Str,
|
||||
Encode,
|
||||
Encode.{
|
||||
Encoder,
|
||||
EncoderFormatting,
|
||||
custom,
|
||||
appendWith,
|
||||
u8,
|
||||
u16,
|
||||
u32,
|
||||
u64,
|
||||
u128,
|
||||
i8,
|
||||
i16,
|
||||
i32,
|
||||
i64,
|
||||
i128,
|
||||
f32,
|
||||
f64,
|
||||
dec,
|
||||
bool,
|
||||
string,
|
||||
list,
|
||||
record,
|
||||
tag,
|
||||
},
|
||||
Decode,
|
||||
Decode.{
|
||||
DecoderFormatting,
|
||||
},
|
||||
]
|
||||
|
||||
Json := {} has [
|
||||
EncoderFormatting {
|
||||
u8,
|
||||
u16,
|
||||
u32,
|
||||
u64,
|
||||
u128,
|
||||
i8,
|
||||
i16,
|
||||
i32,
|
||||
i64,
|
||||
i128,
|
||||
f32,
|
||||
f64,
|
||||
dec,
|
||||
bool,
|
||||
string,
|
||||
list,
|
||||
record,
|
||||
tag,
|
||||
u8: encodeU8,
|
||||
u16: encodeU16,
|
||||
u32: encodeU32,
|
||||
u64: encodeU64,
|
||||
u128: encodeU128,
|
||||
i8: encodeI8,
|
||||
i16: encodeI16,
|
||||
i32: encodeI32,
|
||||
i64: encodeI64,
|
||||
i128: encodeI128,
|
||||
f32: encodeF32,
|
||||
f64: encodeF64,
|
||||
dec: encodeDec,
|
||||
bool: encodeBool,
|
||||
string: encodeString,
|
||||
list: encodeList,
|
||||
record: encodeRecord,
|
||||
tag: encodeTag,
|
||||
},
|
||||
DecoderFormatting {
|
||||
u8: decodeU8,
|
||||
u16: decodeU16,
|
||||
u32: decodeU32,
|
||||
u64: decodeU64,
|
||||
u128: decodeU128,
|
||||
i8: decodeI8,
|
||||
i16: decodeI16,
|
||||
i32: decodeI32,
|
||||
i64: decodeI64,
|
||||
i128: decodeI128,
|
||||
f32: decodeF32,
|
||||
f64: decodeF64,
|
||||
dec: decodeDec,
|
||||
bool: decodeBool,
|
||||
string: decodeString,
|
||||
list: decodeList,
|
||||
},
|
||||
]
|
||||
|
||||
toUtf8 = @Json {}
|
||||
|
||||
fromUtf8 = @Json {}
|
||||
|
||||
numToBytes = \n ->
|
||||
n |> Num.toStr |> Str.toUtf8
|
||||
|
||||
# impl EncoderFormatting for Json
|
||||
u8 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeU8 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
u16 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeU16 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
u32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeU32 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
u64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeU64 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
u128 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeU128 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
i8 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeI8 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
i16 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeI16 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
i32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeI32 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
i64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeI64 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
i128 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeI128 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
f32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeF32 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
f64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeF64 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
dec = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
encodeDec = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
|
||||
|
||||
bool = \b -> custom \bytes, @Json {} ->
|
||||
encodeBool = \b -> Encode.custom \bytes, @Json {} ->
|
||||
if
|
||||
b
|
||||
then
|
||||
|
@ -95,13 +101,13 @@ bool = \b -> custom \bytes, @Json {} ->
|
|||
else
|
||||
List.concat bytes (Str.toUtf8 "false")
|
||||
|
||||
string = \s -> custom \bytes, @Json {} ->
|
||||
encodeString = \s -> Encode.custom \bytes, @Json {} ->
|
||||
List.append bytes (Num.toU8 '"')
|
||||
|> List.concat (Str.toUtf8 s)
|
||||
|> List.append (Num.toU8 '"')
|
||||
|
||||
list = \lst, encodeElem ->
|
||||
custom \bytes, @Json {} ->
|
||||
encodeList = \lst, encodeElem ->
|
||||
Encode.custom \bytes, @Json {} ->
|
||||
writeList = \{ buffer, elemsLeft }, elem ->
|
||||
bufferWithElem = appendWith buffer (encodeElem elem) (@Json {})
|
||||
bufferWithSuffix =
|
||||
|
@ -117,8 +123,8 @@ list = \lst, encodeElem ->
|
|||
|
||||
List.append withList (Num.toU8 ']')
|
||||
|
||||
record = \fields ->
|
||||
custom \bytes, @Json {} ->
|
||||
encodeRecord = \fields ->
|
||||
Encode.custom \bytes, @Json {} ->
|
||||
writeRecord = \{ buffer, fieldsLeft }, { key, value } ->
|
||||
bufferWithKeyValue =
|
||||
List.append buffer (Num.toU8 '"')
|
||||
|
@ -140,8 +146,8 @@ record = \fields ->
|
|||
|
||||
List.append bytesWithRecord (Num.toU8 '}')
|
||||
|
||||
tag = \name, payload ->
|
||||
custom \bytes, @Json {} ->
|
||||
encodeTag = \name, payload ->
|
||||
Encode.custom \bytes, @Json {} ->
|
||||
# Idea: encode `A v1 v2` as `{"A": [v1, v2]}`
|
||||
writePayload = \{ buffer, itemsLeft }, encoder ->
|
||||
bufferWithValue = appendWith buffer encoder (@Json {})
|
||||
|
@ -165,3 +171,204 @@ tag = \name, payload ->
|
|||
|
||||
List.append bytesWithPayload (Num.toU8 ']')
|
||||
|> List.append (Num.toU8 '}')
|
||||
|
||||
takeWhile = \list, predicate ->
|
||||
helper = \{ taken, rest } ->
|
||||
when List.first rest is
|
||||
Ok elem ->
|
||||
if predicate elem then
|
||||
helper { taken: List.append taken elem, rest: List.split rest 1 |> .others }
|
||||
else
|
||||
{ taken, rest }
|
||||
|
||||
Err _ -> { taken, rest }
|
||||
|
||||
helper { taken: [], rest: list }
|
||||
|
||||
asciiByte = \b -> Num.toU8 b
|
||||
|
||||
digits = List.range (asciiByte '0') (asciiByte '9' + 1)
|
||||
|
||||
takeDigits = \bytes ->
|
||||
takeWhile bytes \n -> List.contains digits n
|
||||
|
||||
takeFloat = \bytes ->
|
||||
{ taken: intPart, rest } = takeDigits bytes
|
||||
|
||||
when List.get rest 0 is
|
||||
Ok 46 -> # 46 = .
|
||||
{ taken: floatPart, rest: afterAll } = takeDigits (List.split rest 1).others
|
||||
builtFloat =
|
||||
List.concat (List.append intPart (asciiByte '.')) floatPart
|
||||
|
||||
{ taken: builtFloat, rest: afterAll }
|
||||
|
||||
_ ->
|
||||
{ taken: intPart, rest }
|
||||
|
||||
decodeU8 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toU8 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeU16 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toU16 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeU32 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toU32 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeU64 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toU64 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeU128 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toU128 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeI8 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toI8 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeI16 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toI16 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeI32 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toI32 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeI64 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toI64 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeI128 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeDigits bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toI128 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeF32 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeFloat bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toF32 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeF64 = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeFloat bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toF64 is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeDec = Decode.custom \bytes, @Json {} ->
|
||||
{ taken, rest } = takeFloat bytes
|
||||
|
||||
when Str.fromUtf8 taken |> Result.try Str.toDec is
|
||||
Ok n -> { result: Ok n, rest }
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
|
||||
decodeBool = Decode.custom \bytes, @Json {} ->
|
||||
{ before: maybeFalse, others: afterFalse } = List.split bytes 5
|
||||
|
||||
# Note: this could be more performant by traversing both branches char-by-char.
|
||||
# Doing that would also make `rest` more correct in the erroring case.
|
||||
if
|
||||
maybeFalse == [asciiByte 'f', asciiByte 'a', asciiByte 'l', asciiByte 's', asciiByte 'e']
|
||||
then
|
||||
{ result: Ok False, rest: afterFalse }
|
||||
else
|
||||
{ before: maybeTrue, others: afterTrue } = List.split bytes 4
|
||||
|
||||
if
|
||||
maybeTrue == [asciiByte 't', asciiByte 'r', asciiByte 'u', asciiByte 'e']
|
||||
then
|
||||
{ result: Ok True, rest: afterTrue }
|
||||
else
|
||||
{ result: Err TooShort, rest: bytes }
|
||||
|
||||
decodeString = Decode.custom \bytes, @Json {} ->
|
||||
{ before, others: afterStartingQuote } = List.split bytes 1
|
||||
|
||||
if
|
||||
before == [asciiByte '"']
|
||||
then
|
||||
# TODO: handle escape sequences
|
||||
{ taken: strSequence, rest } = takeWhile afterStartingQuote \n -> n != asciiByte '"'
|
||||
|
||||
when Str.fromUtf8 strSequence is
|
||||
Ok s ->
|
||||
{ others: afterEndingQuote } = List.split rest 1
|
||||
|
||||
{ result: Ok s, rest: afterEndingQuote }
|
||||
|
||||
Err _ -> { result: Err TooShort, rest }
|
||||
else
|
||||
{ result: Err TooShort, rest: bytes }
|
||||
|
||||
decodeList = \decodeElem -> Decode.custom \bytes, @Json {} ->
|
||||
decodeElems = \chunk, accum ->
|
||||
when Decode.decodeWith chunk decodeElem (@Json {}) is
|
||||
{ result, rest } ->
|
||||
when result is
|
||||
Ok val ->
|
||||
# TODO: handle spaces before ','
|
||||
{ before: afterElem, others } = List.split rest 1
|
||||
|
||||
if
|
||||
afterElem == [asciiByte ',']
|
||||
then
|
||||
decodeElems others (List.append accum val)
|
||||
else
|
||||
Done (List.append accum val) rest
|
||||
|
||||
Err e -> Errored e rest
|
||||
|
||||
{ before, others: afterStartingBrace } = List.split bytes 1
|
||||
|
||||
if
|
||||
before == [asciiByte '[']
|
||||
then
|
||||
# TODO: empty lists
|
||||
when decodeElems afterStartingBrace [] is
|
||||
Errored e rest -> { result: Err e, rest }
|
||||
Done vals rest ->
|
||||
{ before: maybeEndingBrace, others: afterEndingBrace } = List.split rest 1
|
||||
|
||||
if
|
||||
maybeEndingBrace == [asciiByte ']']
|
||||
then
|
||||
{ result: Ok vals, rest: afterEndingBrace }
|
||||
else
|
||||
{ result: Err TooShort, rest }
|
||||
else
|
||||
{ result: Err TooShort, rest: bytes }
|
||||
|
|
|
@ -334,7 +334,7 @@ lastMatch : Str, Str -> [Some Nat, None]
|
|||
lastMatch = \haystack, needle ->
|
||||
haystackLength = Str.countUtf8Bytes haystack
|
||||
needleLength = Str.countUtf8Bytes needle
|
||||
lastPossibleIndex = Num.subSaturated haystackLength (needleLength + 1)
|
||||
lastPossibleIndex = Num.subSaturated haystackLength needleLength
|
||||
|
||||
lastMatchHelp haystack needle lastPossibleIndex
|
||||
|
||||
|
|
|
@ -1,16 +1,29 @@
|
|||
use roc_module::symbol::Symbol;
|
||||
use roc_target::TargetInfo;
|
||||
use roc_utils::get_lib_path;
|
||||
use std::ops::Index;
|
||||
|
||||
pub const BUILTINS_HOST_OBJ_PATH: &str = env!(
|
||||
"BUILTINS_HOST_O",
|
||||
"Env var BUILTINS_HOST_O not found. Is there a problem with the build script?"
|
||||
);
|
||||
pub fn get_builtins_host_obj_path() -> String {
|
||||
let builtins_host_path = get_lib_path()
|
||||
.expect("Failed to find lib dir.")
|
||||
.join("builtins-host.o");
|
||||
|
||||
pub const BUILTINS_WASM32_OBJ_PATH: &str = env!(
|
||||
"BUILTINS_WASM32_O",
|
||||
"Env var BUILTINS_WASM32_O not found. Is there a problem with the build script?"
|
||||
);
|
||||
builtins_host_path
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.expect("Failed to convert builtins_host_path to str")
|
||||
}
|
||||
|
||||
pub fn get_builtins_wasm32_obj_path() -> String {
|
||||
let builtins_wasm32_path = get_lib_path()
|
||||
.expect("Failed to find lib dir.")
|
||||
.join("builtins-wasm32.o");
|
||||
|
||||
builtins_wasm32_path
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.expect("Failed to convert builtins_wasm32_path to str")
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Copy, Clone)]
|
||||
pub struct IntrinsicName {
|
||||
|
@ -51,7 +64,7 @@ impl FloatWidth {
|
|||
}
|
||||
|
||||
pub const fn alignment_bytes(&self, target_info: TargetInfo) -> u32 {
|
||||
use roc_target::Architecture;
|
||||
use roc_target::Architecture::*;
|
||||
use FloatWidth::*;
|
||||
|
||||
// NOTE: this must never use mem::align_of, because that returns the alignment
|
||||
|
@ -60,8 +73,8 @@ impl FloatWidth {
|
|||
match self {
|
||||
F32 => 4,
|
||||
F64 | F128 => match target_info.architecture {
|
||||
Architecture::X86_64 | Architecture::Aarch64 | Architecture::Wasm32 => 8,
|
||||
Architecture::X86_32 | Architecture::Aarch32 => 4,
|
||||
X86_64 | Aarch64 | Wasm32 => 8,
|
||||
X86_32 | Aarch32 => 4,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ pub fn module_source(module_id: ModuleId) -> &'static str {
|
|||
ModuleId::BOX => BOX,
|
||||
ModuleId::BOOL => BOOL,
|
||||
ModuleId::ENCODE => ENCODE,
|
||||
ModuleId::DECODE => DECODE,
|
||||
ModuleId::JSON => JSON,
|
||||
_ => panic!(
|
||||
"ModuleId {:?} is not part of the standard library",
|
||||
|
@ -29,4 +30,5 @@ const SET: &str = include_str!("../roc/Set.roc");
|
|||
const BOX: &str = include_str!("../roc/Box.roc");
|
||||
const BOOL: &str = include_str!("../roc/Bool.roc");
|
||||
const ENCODE: &str = include_str!("../roc/Encode.roc");
|
||||
const DECODE: &str = include_str!("../roc/Decode.roc");
|
||||
const JSON: &str = include_str!("../roc/Json.roc");
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_can"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -566,6 +566,7 @@ fn can_annotation_help(
|
|||
region,
|
||||
alias_needs: alias.type_variables.len() as u8,
|
||||
type_got: args.len() as u8,
|
||||
alias_kind: alias.kind,
|
||||
});
|
||||
return error;
|
||||
}
|
||||
|
|
|
@ -713,7 +713,6 @@ pub struct PatternEq(
|
|||
pub struct OpportunisticResolve {
|
||||
/// The specialized type of this lookup, to try to resolve.
|
||||
pub specialization_variable: Variable,
|
||||
pub specialization_expectation: Index<Expected<Type>>,
|
||||
|
||||
/// The ability member to try to resolve.
|
||||
pub member: Symbol,
|
||||
|
|
|
@ -369,6 +369,7 @@ fn canonicalize_alias<'a>(
|
|||
typ: symbol,
|
||||
variable_region: loc_lowercase.region,
|
||||
variable_name: loc_lowercase.value.clone(),
|
||||
alias_kind: AliasKind::Structural,
|
||||
});
|
||||
}
|
||||
AliasKind::Opaque => {
|
||||
|
@ -2688,6 +2689,7 @@ fn correct_mutual_recursive_type_alias<'a>(
|
|||
env,
|
||||
&mut alias.typ,
|
||||
alias_name,
|
||||
alias.kind,
|
||||
alias.region,
|
||||
rest,
|
||||
can_still_report_error,
|
||||
|
@ -2870,7 +2872,15 @@ fn make_tag_union_recursive_help<'a, 'b>(
|
|||
}
|
||||
_ => {
|
||||
// take care to report a cyclic alias only once (not once for each alias in the cycle)
|
||||
mark_cyclic_alias(env, typ, symbol, region, others, *can_report_cyclic_error);
|
||||
mark_cyclic_alias(
|
||||
env,
|
||||
typ,
|
||||
symbol,
|
||||
alias_kind,
|
||||
region,
|
||||
others,
|
||||
*can_report_cyclic_error,
|
||||
);
|
||||
*can_report_cyclic_error = false;
|
||||
|
||||
Cyclic
|
||||
|
@ -2882,6 +2892,7 @@ fn mark_cyclic_alias<'a>(
|
|||
env: &mut Env<'a>,
|
||||
typ: &mut Type,
|
||||
symbol: Symbol,
|
||||
alias_kind: AliasKind,
|
||||
region: Region,
|
||||
others: Vec<Symbol>,
|
||||
report: bool,
|
||||
|
@ -2890,7 +2901,7 @@ fn mark_cyclic_alias<'a>(
|
|||
*typ = Type::Erroneous(problem);
|
||||
|
||||
if report {
|
||||
let problem = Problem::CyclicAlias(symbol, region, others);
|
||||
let problem = Problem::CyclicAlias(symbol, region, others, alias_kind);
|
||||
env.problems.push(problem);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1568,13 +1568,6 @@ fn canonicalize_var_lookup(
|
|||
output.references.insert_value_lookup(symbol);
|
||||
|
||||
if scope.abilities_store.is_ability_member_name(symbol) {
|
||||
// Is there a shadow implementation with the same name? If so, we might be in
|
||||
// the def for that shadow. In that case add a value lookup of the shadow impl,
|
||||
// so that it's marked as possibly-recursive.
|
||||
if let Some(shadow) = scope.get_member_shadow(symbol) {
|
||||
output.references.insert_value_lookup(shadow.value);
|
||||
}
|
||||
|
||||
AbilityMember(
|
||||
symbol,
|
||||
Some(scope.abilities_store.fresh_specialization_id()),
|
||||
|
|
|
@ -206,7 +206,6 @@ pub fn canonicalize_def_header_pattern<'a>(
|
|||
// Likely a specialization of an ability.
|
||||
Some(ability_member_name) => {
|
||||
output.references.insert_bound(symbol);
|
||||
output.references.insert_value_lookup(ability_member_name);
|
||||
Pattern::AbilityMemberSpecialization {
|
||||
ident: symbol,
|
||||
specializes: ability_member_name,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_collections"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_constrain"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -422,12 +422,13 @@ pub fn constrain_expr(
|
|||
constraints.lookup(*symbol, expected, region)
|
||||
}
|
||||
&AbilityMember(symbol, specialization_id, specialization_var) => {
|
||||
// make lookup constraint to lookup this symbol's type in the environment
|
||||
let store_expected = constraints.equal_types_var(
|
||||
// Save the expectation in the `specialization_var` so we know what to specialize, then
|
||||
// lookup the member in the environment.
|
||||
let store_expected = constraints.store(
|
||||
expected.get_type_ref().clone(),
|
||||
specialization_var,
|
||||
expected,
|
||||
Category::Storage(file!(), line!()),
|
||||
region,
|
||||
file!(),
|
||||
line!(),
|
||||
);
|
||||
let lookup_constr = constraints.lookup(
|
||||
symbol,
|
||||
|
@ -435,13 +436,10 @@ pub fn constrain_expr(
|
|||
region,
|
||||
);
|
||||
|
||||
// Make sure we attempt to resolve the specialization, if we need to.
|
||||
// Make sure we attempt to resolve the specialization, if we can.
|
||||
if let Some(specialization_id) = specialization_id {
|
||||
env.resolutions_to_make.push(OpportunisticResolve {
|
||||
specialization_variable: specialization_var,
|
||||
specialization_expectation: constraints.push_expected_type(
|
||||
Expected::NoExpectation(Type::Variable(specialization_var)),
|
||||
),
|
||||
member: symbol,
|
||||
specialization_id,
|
||||
});
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
[package]
|
||||
name = "roc_debug_flags"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
|
||||
[dependencies]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_derive"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
258
crates/compiler/derive/src/decoding.rs
Normal file
258
crates/compiler/derive/src/decoding.rs
Normal file
|
@ -0,0 +1,258 @@
|
|||
//! Derivers for the `Decoding` ability.
|
||||
|
||||
use roc_can::expr::{AnnotatedMark, ClosureData, Expr, Recursive};
|
||||
use roc_can::pattern::Pattern;
|
||||
use roc_derive_key::decoding::FlatDecodableKey;
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::called_via::CalledVia;
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_region::all::Loc;
|
||||
use roc_types::subs::{
|
||||
Content, FlatType, GetSubsSlice, LambdaSet, OptVariable, SubsSlice, UnionLambdas, Variable,
|
||||
};
|
||||
use roc_types::types::AliasKind;
|
||||
|
||||
use crate::util::Env;
|
||||
use crate::{synth_var, DerivedBody};
|
||||
|
||||
pub(crate) fn derive_decoder(
|
||||
env: &mut Env<'_>,
|
||||
key: FlatDecodableKey,
|
||||
def_symbol: Symbol,
|
||||
) -> DerivedBody {
|
||||
let (body, body_type) = match key {
|
||||
FlatDecodableKey::List() => decoder_list(env, def_symbol),
|
||||
};
|
||||
|
||||
let specialization_lambda_sets =
|
||||
env.get_specialization_lambda_sets(body_type, Symbol::DECODE_DECODER);
|
||||
|
||||
DerivedBody {
|
||||
body,
|
||||
body_type,
|
||||
specialization_lambda_sets,
|
||||
}
|
||||
}
|
||||
|
||||
fn decoder_list(env: &mut Env<'_>, _def_symbol: Symbol) -> (Expr, Variable) {
|
||||
// Build
|
||||
//
|
||||
// def_symbol : Decoder (List elem) fmt | elem has Decoding, fmt has DecoderFormatting
|
||||
// def_symbol = Decode.custom \bytes, fmt -> Decode.decodeWith bytes (Decode.list Decode.decoder) fmt
|
||||
//
|
||||
// TODO try to reduce to `Decode.list Decode.decoder`
|
||||
|
||||
use Expr::*;
|
||||
|
||||
// Decode.list Decode.decoder : Decoder (List elem) fmt
|
||||
let (decode_list_call, this_decode_list_ret_var) = {
|
||||
// List elem
|
||||
let elem_var = env.subs.fresh_unnamed_flex_var();
|
||||
|
||||
// Decode.decoder : Decoder elem fmt | elem has Decoding, fmt has EncoderFormatting
|
||||
let (elem_decoder, elem_decoder_var) = {
|
||||
// build `Decode.decoder : Decoder elem fmt` type
|
||||
// Decoder val fmt | val has Decoding, fmt has EncoderFormatting
|
||||
let elem_decoder_var = env.import_builtin_symbol_var(Symbol::DECODE_DECODER);
|
||||
|
||||
// set val ~ elem
|
||||
let val_var = match env.subs.get_content_without_compacting(elem_decoder_var) {
|
||||
Content::Alias(Symbol::DECODE_DECODER_OPAQUE, vars, _, AliasKind::Opaque)
|
||||
if vars.type_variables_len == 2 =>
|
||||
{
|
||||
env.subs.get_subs_slice(vars.type_variables())[0]
|
||||
}
|
||||
_ => internal_error!("Decode.decode not an opaque type"),
|
||||
};
|
||||
|
||||
env.unify(val_var, elem_var);
|
||||
|
||||
(
|
||||
AbilityMember(Symbol::DECODE_DECODER, None, elem_decoder_var),
|
||||
elem_decoder_var,
|
||||
)
|
||||
};
|
||||
|
||||
// Build `Decode.list Decode.decoder` type
|
||||
// Decoder val fmt -[uls]-> Decoder (List val) fmt | fmt has DecoderFormatting
|
||||
let decode_list_fn_var = env.import_builtin_symbol_var(Symbol::DECODE_LIST);
|
||||
|
||||
// Decoder elem fmt -a-> b
|
||||
let elem_decoder_var_slice = SubsSlice::insert_into_subs(env.subs, [elem_decoder_var]);
|
||||
let this_decode_list_clos_var = env.subs.fresh_unnamed_flex_var();
|
||||
let this_decode_list_ret_var = env.subs.fresh_unnamed_flex_var();
|
||||
let this_decode_list_fn_var = synth_var(
|
||||
env.subs,
|
||||
Content::Structure(FlatType::Func(
|
||||
elem_decoder_var_slice,
|
||||
this_decode_list_clos_var,
|
||||
this_decode_list_ret_var,
|
||||
)),
|
||||
);
|
||||
|
||||
// Decoder val fmt -[uls]-> Decoder (List val) fmt | fmt has DecoderFormatting
|
||||
// ~ Decoder elem fmt -a -> b
|
||||
env.unify(decode_list_fn_var, this_decode_list_fn_var);
|
||||
|
||||
let decode_list_member = AbilityMember(Symbol::DECODE_LIST, None, this_decode_list_fn_var);
|
||||
let decode_list_fn = Box::new((
|
||||
decode_list_fn_var,
|
||||
Loc::at_zero(decode_list_member),
|
||||
this_decode_list_clos_var,
|
||||
this_decode_list_ret_var,
|
||||
));
|
||||
|
||||
let decode_list_call = Call(
|
||||
decode_list_fn,
|
||||
vec![(elem_decoder_var, Loc::at_zero(elem_decoder))],
|
||||
CalledVia::Space,
|
||||
);
|
||||
|
||||
(decode_list_call, this_decode_list_ret_var)
|
||||
};
|
||||
|
||||
let bytes_sym = env.new_symbol("bytes");
|
||||
let bytes_var = env.subs.fresh_unnamed_flex_var();
|
||||
let fmt_sym = env.new_symbol("fmt");
|
||||
let fmt_var = env.subs.fresh_unnamed_flex_var();
|
||||
|
||||
// Decode.decodeWith bytes (Decode.list Decode.decoder) fmt : DecodeResult (List elem)
|
||||
let (decode_with_call, decode_result_list_elem_var) = {
|
||||
// Decode.decodeWith : List U8, Decoder val fmt, fmt -> DecodeResult val | fmt has DecoderFormatting
|
||||
let decode_with_type = env.import_builtin_symbol_var(Symbol::DECODE_DECODE_WITH);
|
||||
|
||||
// Decode.decodeWith : bytes, Decoder (List elem) fmt, fmt -> DecoderResult (List val)
|
||||
let this_decode_with_var_slice =
|
||||
SubsSlice::insert_into_subs(env.subs, [bytes_var, this_decode_list_ret_var, fmt_var]);
|
||||
let this_decode_with_clos_var = env.subs.fresh_unnamed_flex_var();
|
||||
let this_decode_with_ret_var = env.subs.fresh_unnamed_flex_var();
|
||||
let this_decode_with_fn_var = synth_var(
|
||||
env.subs,
|
||||
Content::Structure(FlatType::Func(
|
||||
this_decode_with_var_slice,
|
||||
this_decode_with_clos_var,
|
||||
this_decode_with_ret_var,
|
||||
)),
|
||||
);
|
||||
|
||||
// List U8, Decoder val fmt, fmt -> DecodeResult val | fmt has DecoderFormatting
|
||||
// ~ bytes, Decoder (List elem) fmt, fmt -> DecoderResult (List val)
|
||||
env.unify(decode_with_type, this_decode_with_fn_var);
|
||||
|
||||
let decode_with_var = Var(Symbol::DECODE_DECODE_WITH);
|
||||
let decode_with_fn = Box::new((
|
||||
this_decode_with_fn_var,
|
||||
Loc::at_zero(decode_with_var),
|
||||
this_decode_with_clos_var,
|
||||
this_decode_with_ret_var,
|
||||
));
|
||||
let decode_with_call = Call(
|
||||
decode_with_fn,
|
||||
vec![
|
||||
// bytes (Decode.list Decode.decoder) fmt
|
||||
(bytes_var, Loc::at_zero(Var(bytes_sym))),
|
||||
(this_decode_list_ret_var, Loc::at_zero(decode_list_call)),
|
||||
(fmt_var, Loc::at_zero(Var(fmt_sym))),
|
||||
],
|
||||
CalledVia::Space,
|
||||
);
|
||||
|
||||
(decode_with_call, this_decode_with_ret_var)
|
||||
};
|
||||
|
||||
// \bytes, fmt -> Decode.decodeWith bytes (Decode.list Decode.decoder) fmt
|
||||
let (custom_lambda, custom_var) = {
|
||||
let fn_name = env.new_symbol("custom");
|
||||
|
||||
// Create fn_var for ambient capture; we fix it up below.
|
||||
let fn_var = synth_var(env.subs, Content::Error);
|
||||
|
||||
// -[[fn_name]]->
|
||||
let fn_name_labels = UnionLambdas::insert_into_subs(env.subs, [(fn_name, vec![])]);
|
||||
let fn_clos_var = synth_var(
|
||||
env.subs,
|
||||
Content::LambdaSet(LambdaSet {
|
||||
solved: fn_name_labels,
|
||||
recursion_var: OptVariable::NONE,
|
||||
unspecialized: SubsSlice::default(),
|
||||
ambient_function: fn_var,
|
||||
}),
|
||||
);
|
||||
|
||||
// bytes, fmt -[[fn_name]]-> DecoderResult (List elem)
|
||||
let args_slice = SubsSlice::insert_into_subs(env.subs, vec![bytes_var, fmt_var]);
|
||||
env.subs.set_content(
|
||||
fn_var,
|
||||
Content::Structure(FlatType::Func(
|
||||
args_slice,
|
||||
fn_clos_var,
|
||||
decode_result_list_elem_var,
|
||||
)),
|
||||
);
|
||||
|
||||
// \bytes, fmt -[[fn_name]]-> Decode.decodeWith bytes (Decode.list Decode.decoder) fmt
|
||||
let clos = Closure(ClosureData {
|
||||
function_type: fn_var,
|
||||
closure_type: fn_clos_var,
|
||||
return_type: decode_result_list_elem_var,
|
||||
name: fn_name,
|
||||
captured_symbols: vec![],
|
||||
recursive: Recursive::NotRecursive,
|
||||
arguments: vec![
|
||||
(
|
||||
bytes_var,
|
||||
AnnotatedMark::known_exhaustive(),
|
||||
Loc::at_zero(Pattern::Identifier(bytes_sym)),
|
||||
),
|
||||
(
|
||||
fmt_var,
|
||||
AnnotatedMark::known_exhaustive(),
|
||||
Loc::at_zero(Pattern::Identifier(fmt_sym)),
|
||||
),
|
||||
],
|
||||
loc_body: Box::new(Loc::at_zero(decode_with_call)),
|
||||
});
|
||||
|
||||
(clos, fn_var)
|
||||
};
|
||||
|
||||
// Decode.custom \bytes, fmt -> Decode.decodeWith bytes (Decode.list Decode.decoder) fmt
|
||||
let (decode_custom_call, decoder_var) = {
|
||||
// (List U8, fmt -> DecodeResult val) -> Decoder val fmt | fmt has DecoderFormatting
|
||||
let decode_custom_type = env.import_builtin_symbol_var(Symbol::DECODE_CUSTOM);
|
||||
|
||||
// (List U8, fmt -> DecodeResult (List elem)) -> Decoder (List elem) fmt
|
||||
let this_decode_custom_args = SubsSlice::insert_into_subs(env.subs, [custom_var]);
|
||||
let this_decode_custom_clos_var = env.subs.fresh_unnamed_flex_var();
|
||||
let this_decode_custom_ret_var = env.subs.fresh_unnamed_flex_var();
|
||||
let this_decode_custom_fn_var = synth_var(
|
||||
env.subs,
|
||||
Content::Structure(FlatType::Func(
|
||||
this_decode_custom_args,
|
||||
this_decode_custom_clos_var,
|
||||
this_decode_custom_ret_var,
|
||||
)),
|
||||
);
|
||||
|
||||
// (List U8, fmt -> DecodeResult val) -> Decoder val fmt | fmt has DecoderFormatting
|
||||
// ~ (List U8, fmt -> DecodeResult (List elem)) -> Decoder (List elem) fmt
|
||||
env.unify(decode_custom_type, this_decode_custom_fn_var);
|
||||
|
||||
let decode_custom_var = Var(Symbol::DECODE_CUSTOM);
|
||||
let decode_custom_fn = Box::new((
|
||||
this_decode_custom_fn_var,
|
||||
Loc::at_zero(decode_custom_var),
|
||||
this_decode_custom_clos_var,
|
||||
this_decode_custom_ret_var,
|
||||
));
|
||||
let decode_custom_call = Call(
|
||||
decode_custom_fn,
|
||||
vec![(custom_var, Loc::at_zero(custom_lambda))],
|
||||
CalledVia::Space,
|
||||
);
|
||||
|
||||
(decode_custom_call, this_decode_custom_ret_var)
|
||||
};
|
||||
|
||||
(decode_custom_call, decoder_var)
|
||||
}
|
|
@ -2,173 +2,24 @@
|
|||
|
||||
use std::iter::once;
|
||||
|
||||
use roc_can::abilities::SpecializationLambdaSets;
|
||||
use roc_can::expr::{
|
||||
AnnotatedMark, ClosureData, Expr, Field, Recursive, WhenBranch, WhenBranchPattern,
|
||||
};
|
||||
use roc_can::module::ExposedByModule;
|
||||
use roc_can::pattern::Pattern;
|
||||
use roc_collections::SendMap;
|
||||
use roc_derive_key::encoding::FlatEncodableKey;
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::called_via::CalledVia;
|
||||
use roc_module::ident::Lowercase;
|
||||
use roc_module::symbol::{IdentIds, ModuleId, Symbol};
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::{
|
||||
instantiate_rigids, Content, ExhaustiveMark, FlatType, GetSubsSlice, LambdaSet, OptVariable,
|
||||
RecordFields, RedundantMark, Subs, SubsSlice, UnionLambdas, UnionTags, Variable,
|
||||
VariableSubsSlice,
|
||||
Content, ExhaustiveMark, FlatType, GetSubsSlice, LambdaSet, OptVariable, RecordFields,
|
||||
RedundantMark, SubsSlice, UnionLambdas, UnionTags, Variable, VariableSubsSlice,
|
||||
};
|
||||
use roc_types::types::RecordField;
|
||||
|
||||
use crate::{synth_var, DerivedBody, DERIVED_SYNTH};
|
||||
|
||||
pub(crate) struct Env<'a> {
|
||||
/// NB: This **must** be subs for the derive module!
|
||||
pub subs: &'a mut Subs,
|
||||
pub exposed_types: &'a ExposedByModule,
|
||||
pub derived_ident_ids: &'a mut IdentIds,
|
||||
}
|
||||
|
||||
impl Env<'_> {
|
||||
fn new_symbol(&mut self, name_hint: &str) -> Symbol {
|
||||
if cfg!(any(
|
||||
debug_assertions,
|
||||
test,
|
||||
feature = "debug-derived-symbols"
|
||||
)) {
|
||||
let mut i = 0;
|
||||
let debug_name = loop {
|
||||
i += 1;
|
||||
let name = if i == 1 {
|
||||
name_hint.to_owned()
|
||||
} else {
|
||||
format!("{}{}", name_hint, i)
|
||||
};
|
||||
if self.derived_ident_ids.get_id(&name).is_none() {
|
||||
break name;
|
||||
}
|
||||
};
|
||||
|
||||
let ident_id = self.derived_ident_ids.get_or_insert(&debug_name);
|
||||
|
||||
Symbol::new(DERIVED_SYNTH, ident_id)
|
||||
} else {
|
||||
self.unique_symbol()
|
||||
}
|
||||
}
|
||||
|
||||
fn unique_symbol(&mut self) -> Symbol {
|
||||
let ident_id = self.derived_ident_ids.gen_unique();
|
||||
Symbol::new(DERIVED_SYNTH, ident_id)
|
||||
}
|
||||
|
||||
fn import_encode_symbol(&mut self, symbol: Symbol) -> Variable {
|
||||
debug_assert_eq!(symbol.module_id(), ModuleId::ENCODE);
|
||||
|
||||
let encode_types = &self
|
||||
.exposed_types
|
||||
.get(&ModuleId::ENCODE)
|
||||
.unwrap()
|
||||
.exposed_types_storage_subs;
|
||||
let storage_var = encode_types.stored_vars_by_symbol.get(&symbol).unwrap();
|
||||
let imported = encode_types
|
||||
.storage_subs
|
||||
.export_variable_to_directly_to_use_site(self.subs, *storage_var);
|
||||
|
||||
instantiate_rigids(self.subs, imported.variable);
|
||||
|
||||
imported.variable
|
||||
}
|
||||
|
||||
fn unify(&mut self, left: Variable, right: Variable) {
|
||||
use roc_unify::unify::{unify, Env, Mode, Unified};
|
||||
|
||||
let unified = unify(&mut Env::new(self.subs), left, right, Mode::EQ);
|
||||
|
||||
match unified {
|
||||
Unified::Success {
|
||||
vars: _,
|
||||
must_implement_ability: _,
|
||||
lambda_sets_to_specialize,
|
||||
extra_metadata: _,
|
||||
} => {
|
||||
if !lambda_sets_to_specialize.is_empty() {
|
||||
internal_error!("Did not expect derivers to need to specialize unspecialized lambda sets, but we got some: {:?}", lambda_sets_to_specialize)
|
||||
}
|
||||
}
|
||||
Unified::Failure(..) | Unified::BadType(..) => {
|
||||
internal_error!("Unification failed in deriver - that's a deriver bug!")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_specialization_lambda_sets(
|
||||
&mut self,
|
||||
specialization_type: Variable,
|
||||
ability_member: Symbol,
|
||||
) -> SpecializationLambdaSets {
|
||||
use roc_unify::unify::{unify_introduced_ability_specialization, Env, Mode, Unified};
|
||||
|
||||
let member_signature = self.import_encode_symbol(ability_member);
|
||||
|
||||
let unified = unify_introduced_ability_specialization(
|
||||
&mut Env::new(self.subs),
|
||||
member_signature,
|
||||
specialization_type,
|
||||
Mode::EQ,
|
||||
);
|
||||
|
||||
match unified {
|
||||
Unified::Success {
|
||||
vars: _,
|
||||
must_implement_ability: _,
|
||||
lambda_sets_to_specialize: _lambda_sets_to_specialize,
|
||||
extra_metadata: specialization_lsets,
|
||||
} => {
|
||||
let specialization_lsets: SpecializationLambdaSets = specialization_lsets
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|((spec_member, region), var)| {
|
||||
debug_assert_eq!(spec_member, ability_member);
|
||||
(region, var)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Since we're doing `{foo} ~ a | a has Encoding`, we may see "lambda sets to
|
||||
// specialize" for e.g. `{foo}:toEncoder:1`, but these are actually just the
|
||||
// specialization lambda sets, so we don't need to do any extra work!
|
||||
//
|
||||
// If there are other lambda sets to specialize in here, that's unexpected, because
|
||||
// that means we would have been deriving something like `toEncoder {foo: bar}`,
|
||||
// and now seen that we needed `toEncoder bar` where `bar` is a concrete type. But
|
||||
// we only expect `bar` to polymorphic at this stage!
|
||||
//
|
||||
// TODO: it would be better if `unify` could prune these for us. See also
|
||||
// https://github.com/rtfeldman/roc/issues/3207; that is a blocker for this TODO.
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
for (spec_var, lambda_sets) in _lambda_sets_to_specialize.drain() {
|
||||
for lambda_set in lambda_sets {
|
||||
let belongs_to_specialized_lambda_sets =
|
||||
specialization_lsets.iter().any(|(_, var)| {
|
||||
self.subs.get_root_key_without_compacting(*var)
|
||||
== self.subs.get_root_key_without_compacting(lambda_set)
|
||||
});
|
||||
debug_assert!(belongs_to_specialized_lambda_sets,
|
||||
"Did not expect derivers to need to specialize unspecialized lambda sets, but we got one: {:?} for {:?}", lambda_set, spec_var)
|
||||
}
|
||||
}
|
||||
}
|
||||
specialization_lsets
|
||||
}
|
||||
Unified::Failure(..) | Unified::BadType(..) => {
|
||||
internal_error!("Unification failed in deriver - that's a deriver bug!")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
use crate::util::Env;
|
||||
use crate::{synth_var, DerivedBody};
|
||||
|
||||
pub(crate) fn derive_to_encoder(
|
||||
env: &mut Env<'_>,
|
||||
|
@ -253,7 +104,7 @@ fn to_encoder_list(env: &mut Env<'_>, fn_name: Symbol) -> (Expr, Variable) {
|
|||
|
||||
// build `toEncoder elem` type
|
||||
// val -[uls]-> Encoder fmt | fmt has EncoderFormatting
|
||||
let to_encoder_fn_var = env.import_encode_symbol(Symbol::ENCODE_TO_ENCODER);
|
||||
let to_encoder_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_TO_ENCODER);
|
||||
|
||||
// elem -[clos]-> t1
|
||||
let to_encoder_clos_var = env.subs.fresh_unnamed_flex_var(); // clos
|
||||
|
@ -333,7 +184,7 @@ fn to_encoder_list(env: &mut Env<'_>, fn_name: Symbol) -> (Expr, Variable) {
|
|||
|
||||
// build `Encode.list lst (\elem -> Encode.toEncoder elem)` type
|
||||
// List e, (e -> Encoder fmt) -[uls]-> Encoder fmt | fmt has EncoderFormatting
|
||||
let encode_list_fn_var = env.import_encode_symbol(Symbol::ENCODE_LIST);
|
||||
let encode_list_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_LIST);
|
||||
|
||||
// List elem, to_elem_encoder_fn_var -[clos]-> t1
|
||||
let this_encode_list_args_slice =
|
||||
|
@ -469,7 +320,7 @@ fn to_encoder_record(
|
|||
|
||||
// build `toEncoder rcd.a` type
|
||||
// val -[uls]-> Encoder fmt | fmt has EncoderFormatting
|
||||
let to_encoder_fn_var = env.import_encode_symbol(Symbol::ENCODE_TO_ENCODER);
|
||||
let to_encoder_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_TO_ENCODER);
|
||||
|
||||
// (typeof rcd.a) -[clos]-> t1
|
||||
let to_encoder_clos_var = env.subs.fresh_unnamed_flex_var(); // clos
|
||||
|
@ -549,7 +400,7 @@ fn to_encoder_record(
|
|||
|
||||
// build `Encode.record [ { key: .., value: ..}, .. ]` type
|
||||
// List { key : Str, value : Encoder fmt } -[uls]-> Encoder fmt | fmt has EncoderFormatting
|
||||
let encode_record_fn_var = env.import_encode_symbol(Symbol::ENCODE_RECORD);
|
||||
let encode_record_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_RECORD);
|
||||
|
||||
// fields_list_var -[clos]-> t1
|
||||
let fields_list_var_slice =
|
||||
|
@ -687,7 +538,8 @@ fn to_encoder_tag_union(
|
|||
.map(|(&sym, &sym_var)| {
|
||||
// build `toEncoder v1` type
|
||||
// expected: val -[uls]-> Encoder fmt | fmt has EncoderFormatting
|
||||
let to_encoder_fn_var = env.import_encode_symbol(Symbol::ENCODE_TO_ENCODER);
|
||||
let to_encoder_fn_var =
|
||||
env.import_builtin_symbol_var(Symbol::ENCODE_TO_ENCODER);
|
||||
|
||||
// wanted: t1 -[clos]-> t'
|
||||
let var_slice_of_sym_var =
|
||||
|
@ -747,7 +599,7 @@ fn to_encoder_tag_union(
|
|||
|
||||
// build `Encode.tag "A" [ ... ]` type
|
||||
// expected: Str, List (Encoder fmt) -[uls]-> Encoder fmt | fmt has EncoderFormatting
|
||||
let encode_tag_fn_var = env.import_encode_symbol(Symbol::ENCODE_TAG);
|
||||
let encode_tag_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_TAG);
|
||||
|
||||
// wanted: Str, List whole_encoders_var -[clos]-> t'
|
||||
let this_encode_tag_args_var_slice = VariableSubsSlice::insert_into_subs(
|
||||
|
@ -904,7 +756,7 @@ fn wrap_in_encode_custom(
|
|||
|
||||
// build `Encode.appendWith bytes encoder fmt` type
|
||||
// expected: Encode.appendWith : List U8, Encoder fmt, fmt -[appendWith]-> List U8 | fmt has EncoderFormatting
|
||||
let append_with_fn_var = env.import_encode_symbol(Symbol::ENCODE_APPEND_WITH);
|
||||
let append_with_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_APPEND_WITH);
|
||||
|
||||
// wanted: Encode.appendWith : List U8, encoder_var, fmt -[clos]-> List U8 | fmt has EncoderFormatting
|
||||
let this_append_with_args_var_slice =
|
||||
|
@ -995,7 +847,7 @@ fn wrap_in_encode_custom(
|
|||
// Encode.custom \bytes, fmt -> Encode.appendWith bytes encoder fmt
|
||||
//
|
||||
// expected: Encode.custom : (List U8, fmt -> List U8) -> Encoder fmt | fmt has EncoderFormatting
|
||||
let custom_fn_var = env.import_encode_symbol(Symbol::ENCODE_CUSTOM);
|
||||
let custom_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_CUSTOM);
|
||||
|
||||
// wanted: Encode.custom : fn_var -[clos]-> t'
|
||||
let this_custom_args_var_slice = VariableSubsSlice::insert_into_subs(env.subs, [fn_var]);
|
||||
|
|
|
@ -14,9 +14,13 @@ use roc_region::all::Loc;
|
|||
use roc_types::subs::{
|
||||
copy_import_to, Content, Descriptor, Mark, OptVariable, Rank, Subs, Variable,
|
||||
};
|
||||
use util::Env;
|
||||
|
||||
mod decoding;
|
||||
mod encoding;
|
||||
|
||||
mod util;
|
||||
|
||||
pub(crate) const DERIVED_SYNTH: ModuleId = ModuleId::DERIVED_SYNTH;
|
||||
|
||||
pub fn synth_var(subs: &mut Subs, content: Content) -> Variable {
|
||||
|
@ -56,20 +60,23 @@ fn build_derived_body(
|
|||
derived_symbol: Symbol,
|
||||
derive_key: DeriveKey,
|
||||
) -> (Def, SpecializationLambdaSets) {
|
||||
let mut env = Env {
|
||||
subs: derived_subs,
|
||||
exposed_types: exposed_by_module,
|
||||
derived_ident_ids,
|
||||
};
|
||||
|
||||
let DerivedBody {
|
||||
body,
|
||||
body_type,
|
||||
specialization_lambda_sets,
|
||||
} = match derive_key {
|
||||
DeriveKey::ToEncoder(to_encoder_key) => {
|
||||
let mut env = encoding::Env {
|
||||
subs: derived_subs,
|
||||
exposed_types: exposed_by_module,
|
||||
derived_ident_ids,
|
||||
};
|
||||
encoding::derive_to_encoder(&mut env, to_encoder_key, derived_symbol)
|
||||
}
|
||||
DeriveKey::Decoding => todo!(),
|
||||
DeriveKey::Decoder(decoder_key) => {
|
||||
decoding::derive_decoder(&mut env, decoder_key, derived_symbol)
|
||||
}
|
||||
};
|
||||
|
||||
let def = Def {
|
||||
|
@ -174,18 +181,18 @@ impl DerivedModule {
|
|||
&mut self,
|
||||
gen_subs: &mut Subs,
|
||||
should_load_def: impl Fn(Symbol) -> bool,
|
||||
) -> VecMap<Symbol, Expr> {
|
||||
) -> VecMap<Symbol, (Expr, Variable)> {
|
||||
self.map
|
||||
.values()
|
||||
.filter_map(|(symbol, def, _)| {
|
||||
if should_load_def(*symbol) {
|
||||
let (_new_expr_var, new_expr) = roc_can::copy::deep_copy_expr_across_subs(
|
||||
let (new_expr_var, new_expr) = roc_can::copy::deep_copy_expr_across_subs(
|
||||
&mut self.subs,
|
||||
gen_subs,
|
||||
def.expr_var,
|
||||
&def.loc_expr.value,
|
||||
);
|
||||
Some((*symbol, new_expr))
|
||||
Some((*symbol, (new_expr, new_expr_var)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
155
crates/compiler/derive/src/util.rs
Normal file
155
crates/compiler/derive/src/util.rs
Normal file
|
@ -0,0 +1,155 @@
|
|||
use roc_can::{abilities::SpecializationLambdaSets, module::ExposedByModule};
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::symbol::{IdentIds, Symbol};
|
||||
use roc_types::subs::{instantiate_rigids, Subs, Variable};
|
||||
|
||||
use crate::DERIVED_SYNTH;
|
||||
|
||||
/// An environment representing the Derived_synth module, for use in building derived
|
||||
/// implementations.
|
||||
pub(crate) struct Env<'a> {
|
||||
/// NB: This **must** be subs for the derive module!
|
||||
pub subs: &'a mut Subs,
|
||||
pub exposed_types: &'a ExposedByModule,
|
||||
pub derived_ident_ids: &'a mut IdentIds,
|
||||
}
|
||||
|
||||
impl Env<'_> {
|
||||
pub fn new_symbol(&mut self, name_hint: &str) -> Symbol {
|
||||
if cfg!(any(
|
||||
debug_assertions,
|
||||
test,
|
||||
feature = "debug-derived-symbols"
|
||||
)) {
|
||||
let mut i = 0;
|
||||
let debug_name = loop {
|
||||
i += 1;
|
||||
let name = if i == 1 {
|
||||
name_hint.to_owned()
|
||||
} else {
|
||||
format!("{}{}", name_hint, i)
|
||||
};
|
||||
if self.derived_ident_ids.get_id(&name).is_none() {
|
||||
break name;
|
||||
}
|
||||
};
|
||||
|
||||
let ident_id = self.derived_ident_ids.get_or_insert(&debug_name);
|
||||
|
||||
Symbol::new(DERIVED_SYNTH, ident_id)
|
||||
} else {
|
||||
self.unique_symbol()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unique_symbol(&mut self) -> Symbol {
|
||||
let ident_id = self.derived_ident_ids.gen_unique();
|
||||
Symbol::new(DERIVED_SYNTH, ident_id)
|
||||
}
|
||||
|
||||
pub fn import_builtin_symbol_var(&mut self, symbol: Symbol) -> Variable {
|
||||
let module_id = symbol.module_id();
|
||||
debug_assert!(module_id.is_builtin());
|
||||
|
||||
let module_types = &self
|
||||
.exposed_types
|
||||
.get(&module_id)
|
||||
.unwrap()
|
||||
.exposed_types_storage_subs;
|
||||
let storage_var = module_types.stored_vars_by_symbol.get(&symbol).unwrap();
|
||||
let imported = module_types
|
||||
.storage_subs
|
||||
.export_variable_to_directly_to_use_site(self.subs, *storage_var);
|
||||
|
||||
instantiate_rigids(self.subs, imported.variable);
|
||||
|
||||
imported.variable
|
||||
}
|
||||
|
||||
pub fn unify(&mut self, left: Variable, right: Variable) {
|
||||
use roc_unify::unify::{unify, Env, Mode, Unified};
|
||||
|
||||
let unified = unify(&mut Env::new(self.subs), left, right, Mode::EQ);
|
||||
|
||||
match unified {
|
||||
Unified::Success {
|
||||
vars: _,
|
||||
must_implement_ability: _,
|
||||
lambda_sets_to_specialize,
|
||||
extra_metadata: _,
|
||||
} => {
|
||||
if !lambda_sets_to_specialize.is_empty() {
|
||||
internal_error!("Did not expect derivers to need to specialize unspecialized lambda sets, but we got some: {:?}", lambda_sets_to_specialize)
|
||||
}
|
||||
}
|
||||
Unified::Failure(..) | Unified::BadType(..) => {
|
||||
internal_error!("Unification failed in deriver - that's a deriver bug!")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_specialization_lambda_sets(
|
||||
&mut self,
|
||||
specialization_type: Variable,
|
||||
ability_member: Symbol,
|
||||
) -> SpecializationLambdaSets {
|
||||
use roc_unify::unify::{unify_introduced_ability_specialization, Env, Mode, Unified};
|
||||
|
||||
let member_signature = self.import_builtin_symbol_var(ability_member);
|
||||
|
||||
let unified = unify_introduced_ability_specialization(
|
||||
&mut Env::new(self.subs),
|
||||
member_signature,
|
||||
specialization_type,
|
||||
Mode::EQ,
|
||||
);
|
||||
|
||||
match unified {
|
||||
Unified::Success {
|
||||
vars: _,
|
||||
must_implement_ability: _,
|
||||
lambda_sets_to_specialize: _lambda_sets_to_specialize,
|
||||
extra_metadata: specialization_lsets,
|
||||
} => {
|
||||
let specialization_lsets: SpecializationLambdaSets = specialization_lsets
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|((spec_member, region), var)| {
|
||||
debug_assert_eq!(spec_member, ability_member);
|
||||
(region, var)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Since we're doing `{foo} ~ a | a has Encoding`, we may see "lambda sets to
|
||||
// specialize" for e.g. `{foo}:toEncoder:1`, but these are actually just the
|
||||
// specialization lambda sets, so we don't need to do any extra work!
|
||||
//
|
||||
// If there are other lambda sets to specialize in here, that's unexpected, because
|
||||
// that means we would have been deriving something like `toEncoder {foo: bar}`,
|
||||
// and now seen that we needed `toEncoder bar` where `bar` is a concrete type. But
|
||||
// we only expect `bar` to polymorphic at this stage!
|
||||
//
|
||||
// TODO: it would be better if `unify` could prune these for us. See also
|
||||
// https://github.com/rtfeldman/roc/issues/3207; that is a blocker for this TODO.
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
for (spec_var, lambda_sets) in _lambda_sets_to_specialize.drain() {
|
||||
for lambda_set in lambda_sets {
|
||||
let belongs_to_specialized_lambda_sets =
|
||||
specialization_lsets.iter().any(|(_, var)| {
|
||||
self.subs.get_root_key_without_compacting(*var)
|
||||
== self.subs.get_root_key_without_compacting(lambda_set)
|
||||
});
|
||||
debug_assert!(belongs_to_specialized_lambda_sets,
|
||||
"Did not expect derivers to need to specialize unspecialized lambda sets, but we got one: {:?} for {:?}", lambda_set, spec_var)
|
||||
}
|
||||
}
|
||||
}
|
||||
specialization_lsets
|
||||
}
|
||||
Unified::Failure(..) | Unified::BadType(..) => {
|
||||
internal_error!("Unification failed in deriver - that's a deriver bug!")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_derive_key"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
84
crates/compiler/derive_key/src/decoding.rs
Normal file
84
crates/compiler/derive_key/src/decoding.rs
Normal file
|
@ -0,0 +1,84 @@
|
|||
use roc_module::symbol::Symbol;
|
||||
use roc_types::subs::{Content, FlatType, Subs, Variable};
|
||||
|
||||
use crate::DeriveError;
|
||||
|
||||
#[derive(Hash)]
|
||||
pub enum FlatDecodable {
|
||||
Immediate(Symbol),
|
||||
Key(FlatDecodableKey),
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone)]
|
||||
pub enum FlatDecodableKey {
|
||||
List(/* takes one variable */),
|
||||
}
|
||||
|
||||
impl FlatDecodableKey {
|
||||
pub(crate) fn debug_name(&self) -> String {
|
||||
match self {
|
||||
FlatDecodableKey::List() => "list".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FlatDecodable {
|
||||
pub(crate) fn from_var(subs: &Subs, var: Variable) -> Result<FlatDecodable, DeriveError> {
|
||||
use DeriveError::*;
|
||||
use FlatDecodable::*;
|
||||
match *subs.get_content_without_compacting(var) {
|
||||
Content::Structure(flat_type) => match flat_type {
|
||||
FlatType::Apply(sym, _) => match sym {
|
||||
Symbol::LIST_LIST => Ok(Key(FlatDecodableKey::List())),
|
||||
Symbol::STR_STR => Ok(Immediate(Symbol::DECODE_STRING)),
|
||||
_ => Err(Underivable),
|
||||
},
|
||||
FlatType::Record(_fields, _ext) => {
|
||||
Err(Underivable) // yet
|
||||
}
|
||||
FlatType::TagUnion(_tags, _ext) | FlatType::RecursiveTagUnion(_, _tags, _ext) => {
|
||||
Err(Underivable) // yet
|
||||
}
|
||||
FlatType::FunctionOrTagUnion(_name_index, _, _) => {
|
||||
Err(Underivable) // yet
|
||||
}
|
||||
FlatType::EmptyRecord => {
|
||||
Err(Underivable) // yet
|
||||
}
|
||||
FlatType::EmptyTagUnion => {
|
||||
Err(Underivable) // yet
|
||||
}
|
||||
//
|
||||
FlatType::Erroneous(_) => Err(Underivable),
|
||||
FlatType::Func(..) => Err(Underivable),
|
||||
},
|
||||
Content::Alias(sym, _, real_var, _) => match sym {
|
||||
Symbol::NUM_U8 | Symbol::NUM_UNSIGNED8 => Ok(Immediate(Symbol::DECODE_U8)),
|
||||
Symbol::NUM_U16 | Symbol::NUM_UNSIGNED16 => Ok(Immediate(Symbol::DECODE_U16)),
|
||||
Symbol::NUM_U32 | Symbol::NUM_UNSIGNED32 => Ok(Immediate(Symbol::DECODE_U32)),
|
||||
Symbol::NUM_U64 | Symbol::NUM_UNSIGNED64 => Ok(Immediate(Symbol::DECODE_U64)),
|
||||
Symbol::NUM_U128 | Symbol::NUM_UNSIGNED128 => Ok(Immediate(Symbol::DECODE_U128)),
|
||||
Symbol::NUM_I8 | Symbol::NUM_SIGNED8 => Ok(Immediate(Symbol::DECODE_I8)),
|
||||
Symbol::NUM_I16 | Symbol::NUM_SIGNED16 => Ok(Immediate(Symbol::DECODE_I16)),
|
||||
Symbol::NUM_I32 | Symbol::NUM_SIGNED32 => Ok(Immediate(Symbol::DECODE_I32)),
|
||||
Symbol::NUM_I64 | Symbol::NUM_SIGNED64 => Ok(Immediate(Symbol::DECODE_I64)),
|
||||
Symbol::NUM_I128 | Symbol::NUM_SIGNED128 => Ok(Immediate(Symbol::DECODE_I128)),
|
||||
Symbol::NUM_DEC | Symbol::NUM_DECIMAL => Ok(Immediate(Symbol::DECODE_DEC)),
|
||||
Symbol::NUM_F32 | Symbol::NUM_BINARY32 => Ok(Immediate(Symbol::DECODE_F32)),
|
||||
Symbol::NUM_F64 | Symbol::NUM_BINARY64 => Ok(Immediate(Symbol::DECODE_F64)),
|
||||
// NB: I believe it is okay to unwrap opaques here because derivers are only used
|
||||
// by the backend, and the backend treats opaques like structural aliases.
|
||||
_ => Self::from_var(subs, real_var),
|
||||
},
|
||||
Content::RangedNumber(_) => Err(Underivable),
|
||||
//
|
||||
Content::RecursionVar { .. } => Err(Underivable),
|
||||
Content::Error => Err(Underivable),
|
||||
Content::FlexVar(_)
|
||||
| Content::RigidVar(_)
|
||||
| Content::FlexAbleVar(_, _)
|
||||
| Content::RigidAbleVar(_, _) => Err(UnboundVar),
|
||||
Content::LambdaSet(_) => Err(Underivable),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -13,8 +13,10 @@
|
|||
//! For these reasons the content keying is based on a strategy as well, which are the variants of
|
||||
//! [`DeriveKey`].
|
||||
|
||||
pub mod decoding;
|
||||
pub mod encoding;
|
||||
|
||||
use decoding::{FlatDecodable, FlatDecodableKey};
|
||||
use encoding::{FlatEncodable, FlatEncodableKey};
|
||||
|
||||
use roc_module::symbol::Symbol;
|
||||
|
@ -33,15 +35,14 @@ pub enum DeriveError {
|
|||
#[repr(u8)]
|
||||
pub enum DeriveKey {
|
||||
ToEncoder(FlatEncodableKey),
|
||||
#[allow(unused)]
|
||||
Decoding,
|
||||
Decoder(FlatDecodableKey),
|
||||
}
|
||||
|
||||
impl DeriveKey {
|
||||
pub fn debug_name(&self) -> String {
|
||||
match self {
|
||||
DeriveKey::ToEncoder(key) => format!("toEncoder_{}", key.debug_name()),
|
||||
DeriveKey::Decoding => todo!(),
|
||||
DeriveKey::Decoder(key) => format!("decoder_{}", key.debug_name()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -57,11 +58,40 @@ pub enum Derived {
|
|||
Key(DeriveKey),
|
||||
}
|
||||
|
||||
/// The builtin ability member to derive.
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum DeriveBuiltin {
|
||||
ToEncoder,
|
||||
Decoder,
|
||||
}
|
||||
|
||||
impl TryFrom<Symbol> for DeriveBuiltin {
|
||||
type Error = Symbol;
|
||||
|
||||
fn try_from(value: Symbol) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
Symbol::ENCODE_TO_ENCODER => Ok(DeriveBuiltin::ToEncoder),
|
||||
Symbol::DECODE_DECODER => Ok(DeriveBuiltin::Decoder),
|
||||
_ => Err(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Derived {
|
||||
pub fn encoding(subs: &Subs, var: Variable) -> Result<Self, DeriveError> {
|
||||
match encoding::FlatEncodable::from_var(subs, var)? {
|
||||
pub fn builtin(
|
||||
builtin: DeriveBuiltin,
|
||||
subs: &Subs,
|
||||
var: Variable,
|
||||
) -> Result<Self, DeriveError> {
|
||||
match builtin {
|
||||
DeriveBuiltin::ToEncoder => match encoding::FlatEncodable::from_var(subs, var)? {
|
||||
FlatEncodable::Immediate(imm) => Ok(Derived::Immediate(imm)),
|
||||
FlatEncodable::Key(repr) => Ok(Derived::Key(DeriveKey::ToEncoder(repr))),
|
||||
},
|
||||
DeriveBuiltin::Decoder => match decoding::FlatDecodable::from_var(subs, var)? {
|
||||
FlatDecodable::Immediate(imm) => Ok(Derived::Immediate(imm)),
|
||||
FlatDecodable::Key(repr) => Ok(Derived::Key(DeriveKey::Decoder(repr))),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_exhaustive"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_fmt"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -288,13 +288,18 @@ fn fmt_expect<'a, 'buf>(
|
|||
is_multiline: bool,
|
||||
indent: u16,
|
||||
) {
|
||||
buf.ensure_ends_with_newline();
|
||||
buf.indent(indent);
|
||||
buf.push_str("expect");
|
||||
|
||||
let return_indent = if is_multiline {
|
||||
buf.newline();
|
||||
indent + INDENT
|
||||
} else {
|
||||
buf.spaces(1);
|
||||
indent
|
||||
};
|
||||
|
||||
buf.push_str("expect");
|
||||
condition.format(buf, return_indent);
|
||||
}
|
||||
|
||||
|
|
|
@ -815,16 +815,24 @@ fn fmt_expect<'a, 'buf>(
|
|||
is_multiline: bool,
|
||||
indent: u16,
|
||||
) {
|
||||
buf.ensure_ends_with_newline();
|
||||
buf.indent(indent);
|
||||
buf.push_str("expect");
|
||||
|
||||
let return_indent = if is_multiline {
|
||||
buf.newline();
|
||||
indent + INDENT
|
||||
} else {
|
||||
buf.spaces(1);
|
||||
indent
|
||||
};
|
||||
|
||||
buf.push_str("expect");
|
||||
condition.format(buf, return_indent);
|
||||
buf.push('\n');
|
||||
continuation.format(buf, return_indent);
|
||||
|
||||
// Always put a blank line after the `expect` line(s)
|
||||
buf.ensure_ends_with_blank_line();
|
||||
|
||||
continuation.format(buf, indent);
|
||||
}
|
||||
|
||||
fn fmt_if<'a, 'buf>(
|
||||
|
|
|
@ -5399,6 +5399,64 @@ mod test_fmt {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expect_single_line() {
|
||||
expr_formats_same(indoc!(
|
||||
r#"
|
||||
x = 5
|
||||
|
||||
expect x == y
|
||||
|
||||
expect y == z
|
||||
|
||||
42
|
||||
"#
|
||||
));
|
||||
|
||||
module_formats_same(indoc!(
|
||||
r#"
|
||||
interface Foo exposes [] imports []
|
||||
|
||||
expect x == y
|
||||
|
||||
expect y == z
|
||||
|
||||
foo = bar
|
||||
"#
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expect_multiline() {
|
||||
expr_formats_same(indoc!(
|
||||
r#"
|
||||
x = 5
|
||||
|
||||
expect
|
||||
foo bar
|
||||
|> baz
|
||||
|
||||
42
|
||||
"#
|
||||
));
|
||||
|
||||
module_formats_same(indoc!(
|
||||
r#"
|
||||
interface Foo exposes [] imports []
|
||||
|
||||
expect
|
||||
foo bar
|
||||
|> baz
|
||||
|
||||
expect
|
||||
blah
|
||||
etc
|
||||
|
||||
foo = bar
|
||||
"#
|
||||
));
|
||||
}
|
||||
|
||||
// this is a parse error atm
|
||||
// #[test]
|
||||
// fn multiline_apply() {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "roc_gen_dev"
|
||||
description = "The development backend for the Roc compiler"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -1264,12 +1264,12 @@ impl X86_64Assembler {
|
|||
}
|
||||
}
|
||||
const REX: u8 = 0x40;
|
||||
const REX_W: u8 = REX + 0x8;
|
||||
const REX_W: u8 = REX | 0x8;
|
||||
|
||||
#[inline(always)]
|
||||
fn add_rm_extension<T: RegTrait>(reg: T, byte: u8) -> u8 {
|
||||
if reg.value() > 7 {
|
||||
byte + 1
|
||||
byte | 1
|
||||
} else {
|
||||
byte
|
||||
}
|
||||
|
@ -1283,7 +1283,7 @@ fn add_opcode_extension(reg: X86_64GeneralReg, byte: u8) -> u8 {
|
|||
#[inline(always)]
|
||||
fn add_reg_extension<T: RegTrait>(reg: T, byte: u8) -> u8 {
|
||||
if reg.value() > 7 {
|
||||
byte + 4
|
||||
byte | 4
|
||||
} else {
|
||||
byte
|
||||
}
|
||||
|
@ -1300,7 +1300,7 @@ fn binop_reg64_reg64(
|
|||
let rex = add_reg_extension(src, rex);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
let src_mod = (src as u8 % 8) << 3;
|
||||
buf.extend(&[rex, op_code, 0xC0 + dst_mod + src_mod]);
|
||||
buf.extend(&[rex, op_code, 0xC0 | dst_mod | src_mod]);
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -1315,7 +1315,7 @@ fn extended_binop_reg64_reg64(
|
|||
let rex = add_reg_extension(src, rex);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
let src_mod = (src as u8 % 8) << 3;
|
||||
buf.extend(&[rex, op_code1, op_code2, 0xC0 + dst_mod + src_mod]);
|
||||
buf.extend(&[rex, op_code1, op_code2, 0xC0 | dst_mod | src_mod]);
|
||||
}
|
||||
|
||||
// Below here are the functions for all of the assembly instructions.
|
||||
|
@ -1330,7 +1330,7 @@ fn add_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) {
|
|||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(7);
|
||||
buf.extend(&[rex, 0x81, 0xC0 + dst_mod]);
|
||||
buf.extend(&[rex, 0x81, 0xC0 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
}
|
||||
|
||||
|
@ -1350,13 +1350,13 @@ fn addsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
0xF2,
|
||||
0x40 + ((dst_high as u8) << 2) + (src_high as u8),
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
0x58,
|
||||
0xC0 + (dst_mod << 3) + (src_mod),
|
||||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF2, 0x0F, 0x58, 0xC0 + (dst_mod << 3) + (src_mod)])
|
||||
buf.extend(&[0xF2, 0x0F, 0x58, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1370,13 +1370,13 @@ fn andpd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
0x66,
|
||||
0x40 + ((dst_high as u8) << 2) + (src_high as u8),
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
0x54,
|
||||
0xC0 + (dst_mod << 3) + (src_mod),
|
||||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0x66, 0x0F, 0x54, 0xC0 + (dst_mod << 3) + (src_mod)])
|
||||
buf.extend(&[0x66, 0x0F, 0x54, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1385,7 +1385,7 @@ fn andpd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl
|
|||
fn and_reg64_imm8(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i8) {
|
||||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.extend(&[rex, 0x83, 0xE0 + dst_mod, imm as u8]);
|
||||
buf.extend(&[rex, 0x83, 0xE0 | dst_mod, imm as u8]);
|
||||
}
|
||||
|
||||
/// `CMOVL r64,r/m64` -> Move if less (SF≠ OF).
|
||||
|
@ -1395,7 +1395,7 @@ fn cmovl_reg64_reg64(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, src: X86_64Ge
|
|||
let rex = add_rm_extension(src, rex);
|
||||
let dst_mod = (dst as u8 % 8) << 3;
|
||||
let src_mod = src as u8 % 8;
|
||||
buf.extend(&[rex, 0x0F, 0x4C, 0xC0 + dst_mod + src_mod]);
|
||||
buf.extend(&[rex, 0x0F, 0x4C, 0xC0 | dst_mod | src_mod]);
|
||||
}
|
||||
|
||||
/// `CMP r/m64,i32` -> Compare i32 to r/m64.
|
||||
|
@ -1404,7 +1404,7 @@ fn cmp_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) {
|
|||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(7);
|
||||
buf.extend(&[rex, 0x81, 0xF8 + dst_mod]);
|
||||
buf.extend(&[rex, 0x81, 0xF8 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
}
|
||||
|
||||
|
@ -1452,7 +1452,7 @@ fn mov_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) {
|
|||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(7);
|
||||
buf.extend(&[rex, 0xC7, 0xC0 + dst_mod]);
|
||||
buf.extend(&[rex, 0xC7, 0xC0 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
}
|
||||
|
||||
|
@ -1465,7 +1465,7 @@ fn mov_reg64_imm64(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i64) {
|
|||
let rex = add_opcode_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(10);
|
||||
buf.extend(&[rex, 0xB8 + dst_mod]);
|
||||
buf.extend(&[rex, 0xB8 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
}
|
||||
}
|
||||
|
@ -1501,7 +1501,7 @@ fn mov_base64_offset32_reg64(
|
|||
let src_mod = (src as u8 % 8) << 3;
|
||||
let base_mod = base as u8 % 8;
|
||||
buf.reserve(8);
|
||||
buf.extend(&[rex, 0x89, 0x80 + src_mod + base_mod]);
|
||||
buf.extend(&[rex, 0x89, 0x80 | src_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
|
@ -1522,7 +1522,7 @@ fn mov_reg64_base64_offset32(
|
|||
let dst_mod = (dst as u8 % 8) << 3;
|
||||
let base_mod = base as u8 % 8;
|
||||
buf.reserve(8);
|
||||
buf.extend(&[rex, 0x8B, 0x80 + dst_mod + base_mod]);
|
||||
buf.extend(&[rex, 0x8B, 0x80 | dst_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
|
@ -1543,7 +1543,7 @@ fn movzx_reg64_base8_offset32(
|
|||
let dst_mod = (dst as u8 % 8) << 3;
|
||||
let base_mod = base as u8 % 8;
|
||||
buf.reserve(9);
|
||||
buf.extend(&[rex, 0x0F, 0xB6, 0x80 + dst_mod + base_mod]);
|
||||
buf.extend(&[rex, 0x0F, 0xB6, 0x80 | dst_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
|
@ -1571,13 +1571,13 @@ fn raw_movsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_
|
|||
if dst_high || src_high {
|
||||
buf.extend(&[
|
||||
0xF2,
|
||||
0x40 + ((dst_high as u8) << 2) + (src_high as u8),
|
||||
0x40 | ((dst_high as u8) << 2) | (src_high as u8),
|
||||
0x0F,
|
||||
0x10,
|
||||
0xC0 + (dst_mod << 3) + (src_mod),
|
||||
0xC0 | (dst_mod << 3) | (src_mod),
|
||||
])
|
||||
} else {
|
||||
buf.extend(&[0xF2, 0x0F, 0x10, 0xC0 + (dst_mod << 3) + (src_mod)])
|
||||
buf.extend(&[0xF2, 0x0F, 0x10, 0xC0 | (dst_mod << 3) | (src_mod)])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1587,10 +1587,10 @@ fn movss_freg32_rip_offset32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, offset:
|
|||
let dst_mod = dst as u8 % 8;
|
||||
if dst as u8 > 7 {
|
||||
buf.reserve(9);
|
||||
buf.extend(&[0xF3, 0x44, 0x0F, 0x10, 0x05 + (dst_mod << 3)]);
|
||||
buf.extend(&[0xF3, 0x44, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
} else {
|
||||
buf.reserve(8);
|
||||
buf.extend(&[0xF3, 0x0F, 0x10, 0x05 + (dst_mod << 3)]);
|
||||
buf.extend(&[0xF3, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
}
|
||||
buf.extend(&offset.to_le_bytes());
|
||||
}
|
||||
|
@ -1601,10 +1601,10 @@ fn movsd_freg64_rip_offset32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, offset:
|
|||
let dst_mod = dst as u8 % 8;
|
||||
if dst as u8 > 7 {
|
||||
buf.reserve(9);
|
||||
buf.extend(&[0xF2, 0x44, 0x0F, 0x10, 0x05 + (dst_mod << 3)]);
|
||||
buf.extend(&[0xF2, 0x44, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
} else {
|
||||
buf.reserve(8);
|
||||
buf.extend(&[0xF2, 0x0F, 0x10, 0x05 + (dst_mod << 3)]);
|
||||
buf.extend(&[0xF2, 0x0F, 0x10, 0x05 | (dst_mod << 3)]);
|
||||
}
|
||||
buf.extend(&offset.to_le_bytes());
|
||||
}
|
||||
|
@ -1626,7 +1626,7 @@ fn movsd_base64_offset32_freg64(
|
|||
if src as u8 > 7 || base as u8 > 7 {
|
||||
buf.push(rex);
|
||||
}
|
||||
buf.extend(&[0x0F, 0x11, 0x80 + src_mod + base_mod]);
|
||||
buf.extend(&[0x0F, 0x11, 0x80 | src_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
|
@ -1651,7 +1651,7 @@ fn movsd_freg64_base64_offset32(
|
|||
if dst as u8 > 7 || base as u8 > 7 {
|
||||
buf.push(rex);
|
||||
}
|
||||
buf.extend(&[0x0F, 0x10, 0x80 + dst_mod + base_mod]);
|
||||
buf.extend(&[0x0F, 0x10, 0x80 | dst_mod | base_mod]);
|
||||
// Using RSP or R12 requires a secondary index byte.
|
||||
if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 {
|
||||
buf.push(0x24);
|
||||
|
@ -1664,7 +1664,7 @@ fn movsd_freg64_base64_offset32(
|
|||
fn neg_reg64(buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) {
|
||||
let rex = add_rm_extension(reg, REX_W);
|
||||
let reg_mod = reg as u8 % 8;
|
||||
buf.extend(&[rex, 0xF7, 0xD8 + reg_mod]);
|
||||
buf.extend(&[rex, 0xF7, 0xD8 | reg_mod]);
|
||||
}
|
||||
|
||||
// helper function for `set*` instructions
|
||||
|
@ -1677,10 +1677,10 @@ fn set_reg64_help(op_code: u8, buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) {
|
|||
let reg_mod = reg as u8 % 8;
|
||||
use X86_64GeneralReg::*;
|
||||
match reg {
|
||||
RAX | RCX | RDX | RBX => buf.extend(&[0x0F, op_code, 0xC0 + reg_mod]),
|
||||
RSP | RBP | RSI | RDI => buf.extend(&[REX, 0x0F, op_code, 0xC0 + reg_mod]),
|
||||
RAX | RCX | RDX | RBX => buf.extend(&[0x0F, op_code, 0xC0 | reg_mod]),
|
||||
RSP | RBP | RSI | RDI => buf.extend(&[REX, 0x0F, op_code, 0xC0 | reg_mod]),
|
||||
R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15 => {
|
||||
buf.extend(&[REX + 1, 0x0F, op_code, 0xC0 + reg_mod])
|
||||
buf.extend(&[REX | 1, 0x0F, op_code, 0xC0 | reg_mod])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1702,7 +1702,7 @@ fn cvtsi2_help<T: RegTrait, U: RegTrait>(
|
|||
let mod1 = (dst.value() % 8) << 3;
|
||||
let mod2 = src.value() % 8;
|
||||
|
||||
buf.extend(&[op_code1, rex, 0x0F, op_code2, 0xC0 + mod1 + mod2])
|
||||
buf.extend(&[op_code1, rex, 0x0F, op_code2, 0xC0 | mod1 | mod2])
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -1716,7 +1716,7 @@ fn cvtsx2_help<T: RegTrait, V: RegTrait>(
|
|||
let mod1 = (dst.value() % 8) << 3;
|
||||
let mod2 = src.value() % 8;
|
||||
|
||||
buf.extend(&[op_code1, 0x0F, op_code2, 0xC0 + mod1 + mod2])
|
||||
buf.extend(&[op_code1, 0x0F, op_code2, 0xC0 | mod1 | mod2])
|
||||
}
|
||||
|
||||
/// `SETE r/m64` -> Set Byte on Condition - zero/equal (ZF=1)
|
||||
|
@ -1794,7 +1794,7 @@ fn sub_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) {
|
|||
let rex = add_rm_extension(dst, REX_W);
|
||||
let dst_mod = dst as u8 % 8;
|
||||
buf.reserve(7);
|
||||
buf.extend(&[rex, 0x81, 0xE8 + dst_mod]);
|
||||
buf.extend(&[rex, 0x81, 0xE8 | dst_mod]);
|
||||
buf.extend(&imm.to_le_bytes());
|
||||
}
|
||||
|
||||
|
@ -1810,9 +1810,9 @@ fn pop_reg64(buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) {
|
|||
let reg_mod = reg as u8 % 8;
|
||||
if reg as u8 > 7 {
|
||||
let rex = add_opcode_extension(reg, REX);
|
||||
buf.extend(&[rex, 0x58 + reg_mod]);
|
||||
buf.extend(&[rex, 0x58 | reg_mod]);
|
||||
} else {
|
||||
buf.push(0x58 + reg_mod);
|
||||
buf.push(0x58 | reg_mod);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1822,9 +1822,9 @@ fn push_reg64(buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) {
|
|||
let reg_mod = reg as u8 % 8;
|
||||
if reg as u8 > 7 {
|
||||
let rex = add_opcode_extension(reg, REX);
|
||||
buf.extend(&[rex, 0x50 + reg_mod]);
|
||||
buf.extend(&[rex, 0x50 | reg_mod]);
|
||||
} else {
|
||||
buf.push(0x50 + reg_mod);
|
||||
buf.push(0x50 | reg_mod);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "roc_gen_llvm"
|
||||
description = "The LLVM backend for the Roc compiler"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -4220,10 +4220,16 @@ pub fn build_procedures<'a, 'ctx, 'env>(
|
|||
env: &Env<'a, 'ctx, 'env>,
|
||||
opt_level: OptLevel,
|
||||
procedures: MutMap<(Symbol, ProcLayout<'a>), roc_mono::ir::Proc<'a>>,
|
||||
entry_point: EntryPoint<'a>,
|
||||
opt_entry_point: Option<EntryPoint<'a>>,
|
||||
debug_output_file: Option<&Path>,
|
||||
) {
|
||||
build_procedures_help(env, opt_level, procedures, entry_point, debug_output_file);
|
||||
build_procedures_help(
|
||||
env,
|
||||
opt_level,
|
||||
procedures,
|
||||
opt_entry_point,
|
||||
debug_output_file,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn build_wasm_test_wrapper<'a, 'ctx, 'env>(
|
||||
|
@ -4236,7 +4242,7 @@ pub fn build_wasm_test_wrapper<'a, 'ctx, 'env>(
|
|||
env,
|
||||
opt_level,
|
||||
procedures,
|
||||
entry_point,
|
||||
Some(entry_point),
|
||||
Some(Path::new("/tmp/test.ll")),
|
||||
);
|
||||
|
||||
|
@ -4253,7 +4259,7 @@ pub fn build_procedures_return_main<'a, 'ctx, 'env>(
|
|||
env,
|
||||
opt_level,
|
||||
procedures,
|
||||
entry_point,
|
||||
Some(entry_point),
|
||||
Some(Path::new("/tmp/test.ll")),
|
||||
);
|
||||
|
||||
|
@ -4265,13 +4271,13 @@ pub fn build_procedures_expose_expects<'a, 'ctx, 'env>(
|
|||
opt_level: OptLevel,
|
||||
expects: &[Symbol],
|
||||
procedures: MutMap<(Symbol, ProcLayout<'a>), roc_mono::ir::Proc<'a>>,
|
||||
entry_point: EntryPoint<'a>,
|
||||
opt_entry_point: Option<EntryPoint<'a>>,
|
||||
) -> Vec<'a, &'a str> {
|
||||
let mod_solutions = build_procedures_help(
|
||||
env,
|
||||
opt_level,
|
||||
procedures,
|
||||
entry_point,
|
||||
opt_entry_point,
|
||||
Some(Path::new("/tmp/test.ll")),
|
||||
);
|
||||
|
||||
|
@ -4333,7 +4339,7 @@ fn build_procedures_help<'a, 'ctx, 'env>(
|
|||
env: &Env<'a, 'ctx, 'env>,
|
||||
opt_level: OptLevel,
|
||||
procedures: MutMap<(Symbol, ProcLayout<'a>), roc_mono::ir::Proc<'a>>,
|
||||
entry_point: EntryPoint<'a>,
|
||||
opt_entry_point: Option<EntryPoint<'a>>,
|
||||
debug_output_file: Option<&Path>,
|
||||
) -> &'a ModSolutions {
|
||||
let mut layout_ids = roc_mono::layout::LayoutIds::default();
|
||||
|
@ -4341,7 +4347,7 @@ fn build_procedures_help<'a, 'ctx, 'env>(
|
|||
|
||||
let it = procedures.iter().map(|x| x.1);
|
||||
|
||||
let solutions = match roc_alias_analysis::spec_program(opt_level, entry_point, it) {
|
||||
let solutions = match roc_alias_analysis::spec_program(opt_level, opt_entry_point, it) {
|
||||
Err(e) => panic!("Error in alias analysis: {}", e),
|
||||
Ok(solutions) => solutions,
|
||||
};
|
||||
|
@ -6951,21 +6957,30 @@ fn build_int_binop<'a, 'ctx, 'env>(
|
|||
// but llvm normalizes to the above ordering in -O3
|
||||
let zero = rhs.get_type().const_zero();
|
||||
let neg_1 = rhs.get_type().const_int(-1i64 as u64, false);
|
||||
let is_signed = int_width.is_signed();
|
||||
|
||||
let special_block = env.context.append_basic_block(parent, "special_block");
|
||||
let default_block = env.context.append_basic_block(parent, "default_block");
|
||||
let cont_block = env.context.append_basic_block(parent, "branchcont");
|
||||
|
||||
if is_signed {
|
||||
bd.build_switch(
|
||||
rhs,
|
||||
default_block,
|
||||
&[(zero, special_block), (neg_1, special_block)],
|
||||
);
|
||||
)
|
||||
} else {
|
||||
bd.build_switch(rhs, default_block, &[(zero, special_block)])
|
||||
};
|
||||
|
||||
let condition_rem = {
|
||||
bd.position_at_end(default_block);
|
||||
|
||||
let rem = bd.build_int_signed_rem(lhs, rhs, "int_rem");
|
||||
let rem = if is_signed {
|
||||
bd.build_int_signed_rem(lhs, rhs, "int_rem")
|
||||
} else {
|
||||
bd.build_int_unsigned_rem(lhs, rhs, "uint_rem")
|
||||
};
|
||||
let result = bd.build_int_compare(IntPredicate::EQ, rem, zero, "is_zero_rem");
|
||||
|
||||
bd.build_unconditional_branch(cont_block);
|
||||
|
@ -6976,10 +6991,15 @@ fn build_int_binop<'a, 'ctx, 'env>(
|
|||
bd.position_at_end(special_block);
|
||||
|
||||
let is_zero = bd.build_int_compare(IntPredicate::EQ, lhs, zero, "is_zero_lhs");
|
||||
|
||||
let result = if is_signed {
|
||||
let is_neg_one =
|
||||
bd.build_int_compare(IntPredicate::EQ, rhs, neg_1, "is_neg_one_rhs");
|
||||
|
||||
let result = bd.build_or(is_neg_one, is_zero, "cond");
|
||||
bd.build_or(is_neg_one, is_zero, "cond")
|
||||
} else {
|
||||
is_zero
|
||||
};
|
||||
|
||||
bd.build_unconditional_branch(cont_block);
|
||||
|
||||
|
|
|
@ -1,17 +1,21 @@
|
|||
use crate::llvm::bitcode::call_bitcode_fn;
|
||||
use crate::llvm::build::{store_roc_value, Env};
|
||||
use crate::debug_info_init;
|
||||
use crate::llvm::bitcode::call_str_bitcode_fn;
|
||||
use crate::llvm::build::{get_tag_id, store_roc_value, Env};
|
||||
use crate::llvm::build_list::{self, incrementing_elem_loop};
|
||||
use crate::llvm::convert::basic_type_from_layout;
|
||||
use crate::llvm::convert::{basic_type_from_layout, RocUnion};
|
||||
use inkwell::builder::Builder;
|
||||
use inkwell::types::BasicType;
|
||||
use inkwell::values::{BasicValueEnum, IntValue, PointerValue};
|
||||
use inkwell::module::Linkage;
|
||||
use inkwell::types::{BasicMetadataTypeEnum, BasicType};
|
||||
use inkwell::values::{BasicValueEnum, FunctionValue, IntValue, PointerValue};
|
||||
use inkwell::AddressSpace;
|
||||
use roc_builtins::bitcode;
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_mono::layout::{Builtin, Layout, LayoutIds, UnionLayout};
|
||||
use roc_region::all::Region;
|
||||
|
||||
use super::build::{load_symbol_and_layout, Scope};
|
||||
use super::build::{
|
||||
add_func, load_roc_value, load_symbol_and_layout, use_roc_value, FunctionSpec, Scope,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct Cursors<'ctx> {
|
||||
|
@ -204,19 +208,19 @@ fn build_clone<'a, 'ctx, 'env>(
|
|||
when_recursive,
|
||||
),
|
||||
|
||||
Layout::Struct {
|
||||
field_layouts: _, ..
|
||||
} => {
|
||||
if layout.safe_to_memcpy() {
|
||||
build_copy(env, ptr, cursors.offset, value)
|
||||
} else {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
Layout::Struct { field_layouts, .. } => build_clone_struct(
|
||||
env,
|
||||
layout_ids,
|
||||
ptr,
|
||||
cursors,
|
||||
value,
|
||||
field_layouts,
|
||||
when_recursive,
|
||||
),
|
||||
|
||||
Layout::LambdaSet(_) => unreachable!("cannot compare closures"),
|
||||
|
||||
Layout::Union(_union_layout) => {
|
||||
Layout::Union(union_layout) => {
|
||||
if layout.safe_to_memcpy() {
|
||||
let ptr = unsafe {
|
||||
env.builder
|
||||
|
@ -230,24 +234,50 @@ fn build_clone<'a, 'ctx, 'env>(
|
|||
|
||||
store_roc_value(env, layout, ptr, value);
|
||||
|
||||
let width = value.get_type().size_of().unwrap();
|
||||
env.builder
|
||||
.build_int_add(cursors.offset, width, "new_offset")
|
||||
cursors.extra_offset
|
||||
} else {
|
||||
todo!()
|
||||
build_clone_tag(
|
||||
env,
|
||||
layout_ids,
|
||||
ptr,
|
||||
cursors,
|
||||
value,
|
||||
union_layout,
|
||||
WhenRecursive::Loop(union_layout),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Layout::Boxed(inner_layout) => build_box_eq(
|
||||
Layout::Boxed(inner_layout) => {
|
||||
// write the offset
|
||||
build_copy(env, ptr, cursors.offset, cursors.extra_offset.into());
|
||||
|
||||
let source = value.into_pointer_value();
|
||||
let value = load_roc_value(env, *inner_layout, source, "inner");
|
||||
|
||||
let inner_width = env
|
||||
.ptr_int()
|
||||
.const_int(inner_layout.stack_size(env.target_info) as u64, false);
|
||||
|
||||
let new_extra = env
|
||||
.builder
|
||||
.build_int_add(cursors.offset, inner_width, "new_extra");
|
||||
|
||||
let cursors = Cursors {
|
||||
offset: cursors.extra_offset,
|
||||
extra_offset: new_extra,
|
||||
};
|
||||
|
||||
build_clone(
|
||||
env,
|
||||
layout_ids,
|
||||
ptr,
|
||||
cursors,
|
||||
value,
|
||||
*inner_layout,
|
||||
when_recursive,
|
||||
lhs_layout,
|
||||
inner_layout,
|
||||
lhs_val,
|
||||
rhs_val,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
Layout::RecursivePointer => match when_recursive {
|
||||
WhenRecursive::Unreachable => {
|
||||
|
@ -260,27 +290,249 @@ fn build_clone<'a, 'ctx, 'env>(
|
|||
let bt = basic_type_from_layout(env, &layout);
|
||||
|
||||
// cast the i64 pointer to a pointer to block of memory
|
||||
let field1_cast = env
|
||||
.builder
|
||||
.build_bitcast(lhs_val, bt, "i64_to_opaque")
|
||||
.into_pointer_value();
|
||||
let field1_cast = env.builder.build_bitcast(value, bt, "i64_to_opaque");
|
||||
|
||||
let field2_cast = env
|
||||
.builder
|
||||
.build_bitcast(rhs_val, bt, "i64_to_opaque")
|
||||
.into_pointer_value();
|
||||
|
||||
build_tag_eq(
|
||||
build_clone_tag(
|
||||
env,
|
||||
layout_ids,
|
||||
ptr,
|
||||
cursors,
|
||||
field1_cast,
|
||||
union_layout,
|
||||
WhenRecursive::Loop(union_layout),
|
||||
&union_layout,
|
||||
field1_cast.into(),
|
||||
field2_cast.into(),
|
||||
)
|
||||
}
|
||||
},
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn build_clone_struct<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
ptr: PointerValue<'ctx>,
|
||||
cursors: Cursors<'ctx>,
|
||||
value: BasicValueEnum<'ctx>,
|
||||
field_layouts: &[Layout<'a>],
|
||||
when_recursive: WhenRecursive<'a>,
|
||||
) -> IntValue<'ctx> {
|
||||
let layout = Layout::struct_no_name_order(field_layouts);
|
||||
|
||||
if layout.safe_to_memcpy() {
|
||||
build_copy(env, ptr, cursors.offset, value)
|
||||
} else {
|
||||
let mut cursors = cursors;
|
||||
|
||||
let structure = value.into_struct_value();
|
||||
|
||||
for (i, field_layout) in field_layouts.iter().enumerate() {
|
||||
let field = env
|
||||
.builder
|
||||
.build_extract_value(structure, i as _, "extract")
|
||||
.unwrap();
|
||||
|
||||
let field = use_roc_value(env, *field_layout, field, "field");
|
||||
|
||||
let new_extra = build_clone(
|
||||
env,
|
||||
layout_ids,
|
||||
ptr,
|
||||
cursors,
|
||||
field,
|
||||
*field_layout,
|
||||
when_recursive,
|
||||
);
|
||||
|
||||
let field_width = env
|
||||
.ptr_int()
|
||||
.const_int(field_layout.stack_size(env.target_info) as u64, false);
|
||||
|
||||
cursors.extra_offset = new_extra;
|
||||
cursors.offset = env
|
||||
.builder
|
||||
.build_int_add(cursors.offset, field_width, "offset");
|
||||
}
|
||||
|
||||
cursors.extra_offset
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn build_clone_tag<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
ptr: PointerValue<'ctx>,
|
||||
cursors: Cursors<'ctx>,
|
||||
value: BasicValueEnum<'ctx>,
|
||||
union_layout: UnionLayout<'a>,
|
||||
when_recursive: WhenRecursive<'a>,
|
||||
) -> IntValue<'ctx> {
|
||||
let layout = Layout::Union(union_layout);
|
||||
let layout_id = layout_ids.get(Symbol::CLONE, &layout);
|
||||
let fn_name = layout_id.to_symbol_string(Symbol::CLONE, &env.interns);
|
||||
|
||||
let function = match env.module.get_function(fn_name.as_str()) {
|
||||
Some(function_value) => function_value,
|
||||
None => {
|
||||
let block = env.builder.get_insert_block().expect("to be in a function");
|
||||
let di_location = env.builder.get_current_debug_location().unwrap();
|
||||
|
||||
let function_type = env.ptr_int().fn_type(
|
||||
&[
|
||||
env.context.i8_type().ptr_type(AddressSpace::Generic).into(),
|
||||
env.ptr_int().into(),
|
||||
env.ptr_int().into(),
|
||||
BasicMetadataTypeEnum::from(value.get_type()),
|
||||
],
|
||||
false,
|
||||
);
|
||||
|
||||
let function_value = add_func(
|
||||
env.context,
|
||||
env.module,
|
||||
&fn_name,
|
||||
FunctionSpec::known_fastcc(function_type),
|
||||
Linkage::Private,
|
||||
);
|
||||
|
||||
let subprogram = env.new_subprogram(&fn_name);
|
||||
function_value.set_subprogram(subprogram);
|
||||
|
||||
env.dibuilder.finalize();
|
||||
|
||||
build_clone_tag_help(
|
||||
env,
|
||||
layout_ids,
|
||||
union_layout,
|
||||
when_recursive,
|
||||
function_value,
|
||||
);
|
||||
|
||||
env.builder.position_at_end(block);
|
||||
env.builder
|
||||
.set_current_debug_location(env.context, di_location);
|
||||
|
||||
function_value
|
||||
}
|
||||
};
|
||||
|
||||
let call = env.builder.build_call(
|
||||
function,
|
||||
&[
|
||||
ptr.into(),
|
||||
cursors.offset.into(),
|
||||
cursors.extra_offset.into(),
|
||||
value.into(),
|
||||
],
|
||||
"build_clone_tag",
|
||||
);
|
||||
|
||||
call.set_call_convention(function.get_call_conventions());
|
||||
|
||||
let result = call.try_as_basic_value().left().unwrap();
|
||||
|
||||
result.into_int_value()
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn build_clone_tag_help<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
union_layout: UnionLayout<'a>,
|
||||
when_recursive: WhenRecursive<'a>,
|
||||
fn_val: FunctionValue<'ctx>,
|
||||
) {
|
||||
use bumpalo::collections::Vec;
|
||||
|
||||
let context = &env.context;
|
||||
let builder = env.builder;
|
||||
|
||||
// Add a basic block for the entry point
|
||||
let entry = context.append_basic_block(fn_val, "entry");
|
||||
|
||||
builder.position_at_end(entry);
|
||||
|
||||
debug_info_init!(env, fn_val);
|
||||
|
||||
// Add args to scope
|
||||
// let arg_symbol = Symbol::ARG_1;
|
||||
// tag_value.set_name(arg_symbol.as_str(&env.interns));
|
||||
|
||||
let mut it = fn_val.get_param_iter();
|
||||
|
||||
let ptr = it.next().unwrap().into_pointer_value();
|
||||
let offset = it.next().unwrap().into_int_value();
|
||||
let extra_offset = it.next().unwrap().into_int_value();
|
||||
let tag_value = it.next().unwrap();
|
||||
|
||||
let cursors = Cursors {
|
||||
offset,
|
||||
extra_offset,
|
||||
};
|
||||
|
||||
let parent = fn_val;
|
||||
|
||||
debug_assert!(tag_value.is_pointer_value());
|
||||
|
||||
use UnionLayout::*;
|
||||
|
||||
match union_layout {
|
||||
NonRecursive(&[]) => {
|
||||
// we're comparing empty tag unions; this code is effectively unreachable
|
||||
env.builder.build_unreachable();
|
||||
}
|
||||
NonRecursive(tags) => {
|
||||
let id = get_tag_id(env, parent, &union_layout, tag_value);
|
||||
|
||||
let switch_block = env.context.append_basic_block(parent, "switch_block");
|
||||
env.builder.build_unconditional_branch(switch_block);
|
||||
|
||||
let mut cases = Vec::with_capacity_in(tags.len(), env.arena);
|
||||
|
||||
for (tag_id, field_layouts) in tags.iter().enumerate() {
|
||||
let block = env.context.append_basic_block(parent, "tag_id_modify");
|
||||
env.builder.position_at_end(block);
|
||||
|
||||
let raw_data_ptr = env
|
||||
.builder
|
||||
.build_struct_gep(
|
||||
tag_value.into_pointer_value(),
|
||||
RocUnion::TAG_DATA_INDEX,
|
||||
"tag_data",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let layout = Layout::struct_no_name_order(field_layouts);
|
||||
let basic_type = basic_type_from_layout(env, &layout);
|
||||
|
||||
let data_ptr = env.builder.build_pointer_cast(
|
||||
raw_data_ptr,
|
||||
basic_type.ptr_type(AddressSpace::Generic),
|
||||
"data_ptr",
|
||||
);
|
||||
|
||||
let data = env.builder.build_load(data_ptr, "load_data");
|
||||
|
||||
let answer =
|
||||
build_clone(env, layout_ids, ptr, cursors, data, layout, when_recursive);
|
||||
|
||||
env.builder.build_return(Some(&answer));
|
||||
|
||||
cases.push((id.get_type().const_int(tag_id as u64, false), block));
|
||||
}
|
||||
|
||||
env.builder.position_at_end(switch_block);
|
||||
|
||||
match cases.pop() {
|
||||
Some((_, default)) => {
|
||||
env.builder.build_switch(id, default, &cases);
|
||||
}
|
||||
None => {
|
||||
// we're serializing an empty tag union; this code is effectively unreachable
|
||||
env.builder.build_unreachable();
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => todo!(),
|
||||
}
|
||||
}
|
||||
|
@ -329,14 +581,15 @@ fn build_clone_builtin<'a, 'ctx, 'env>(
|
|||
Builtin::Str => {
|
||||
//
|
||||
|
||||
call_bitcode_fn(
|
||||
call_str_bitcode_fn(
|
||||
env,
|
||||
&[value],
|
||||
&[
|
||||
ptr.into(),
|
||||
cursors.offset.into(),
|
||||
cursors.extra_offset.into(),
|
||||
value,
|
||||
],
|
||||
crate::llvm::bitcode::BitcodeReturns::Basic,
|
||||
bitcode::STR_CLONE_TO,
|
||||
)
|
||||
.into_int_value()
|
||||
|
@ -380,10 +633,6 @@ fn build_clone_builtin<'a, 'ctx, 'env>(
|
|||
"elements",
|
||||
);
|
||||
|
||||
// where we write the elements' stack representation
|
||||
// let element_offset = bd.build_alloca(env.ptr_int(), "element_offset");
|
||||
// bd.build_store(element_offset, elements_start_offset);
|
||||
|
||||
// if the element has any pointers, we clone them to this offset
|
||||
let rest_offset = bd.build_alloca(env.ptr_int(), "rest_offset");
|
||||
|
||||
|
@ -404,26 +653,24 @@ fn build_clone_builtin<'a, 'ctx, 'env>(
|
|||
bd.build_int_add(elements_start_offset, current_offset, "current_offset");
|
||||
let current_extra_offset = bd.build_load(rest_offset, "element_offset");
|
||||
|
||||
let offset = current_offset; // env.ptr_int().const_int(60, false);
|
||||
let extra_offset = current_extra_offset.into_int_value(); // env.ptr_int().const_int(60 + 24, false);
|
||||
let offset = current_offset;
|
||||
let extra_offset = current_extra_offset.into_int_value();
|
||||
|
||||
let cursors = Cursors {
|
||||
offset,
|
||||
extra_offset,
|
||||
};
|
||||
|
||||
let new_offset = build_clone(
|
||||
env,
|
||||
layout_ids,
|
||||
ptr,
|
||||
Cursors {
|
||||
// offset: current_offset,
|
||||
// extra_offset: current_extra_offset.into_int_value(),
|
||||
offset,
|
||||
extra_offset,
|
||||
},
|
||||
cursors,
|
||||
element,
|
||||
*elem,
|
||||
when_recursive,
|
||||
);
|
||||
|
||||
// let new_offset = env.ptr_int().const_int(60 + 24 + 34, false);
|
||||
|
||||
bd.build_store(rest_offset, new_offset);
|
||||
};
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
[package]
|
||||
name = "roc_gen_wasm"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
|
||||
[dependencies]
|
||||
bitvec = "1"
|
||||
|
|
|
@ -1612,13 +1612,46 @@ impl<'a> LowLevelCall<'a> {
|
|||
}
|
||||
}
|
||||
NumShiftRightBy => {
|
||||
backend.storage.load_symbols(
|
||||
&mut backend.code_builder,
|
||||
&[self.arguments[1], self.arguments[0]],
|
||||
);
|
||||
let bits = self.arguments[0];
|
||||
let num = self.arguments[1];
|
||||
match CodeGenNumType::from(self.ret_layout) {
|
||||
I32 => backend.code_builder.i32_shr_s(),
|
||||
I64 => backend.code_builder.i64_shr_s(),
|
||||
I32 => {
|
||||
// In most languages this operation is for signed numbers, but Roc defines it on all integers.
|
||||
// So the argument is implicitly converted to signed before the shift operator.
|
||||
// We need to make that conversion explicit for i8 and i16, which use Wasm's i32 type.
|
||||
let bit_width = 8 * self.ret_layout.stack_size(TARGET_INFO) as i32;
|
||||
if bit_width < 32 && !symbol_is_signed_int(backend, num) {
|
||||
// Sign-extend the number by shifting left and right again
|
||||
backend
|
||||
.storage
|
||||
.load_symbols(&mut backend.code_builder, &[num]);
|
||||
backend.code_builder.i32_const(32 - bit_width);
|
||||
backend.code_builder.i32_shl();
|
||||
backend.code_builder.i32_const(32 - bit_width);
|
||||
backend.code_builder.i32_shr_s();
|
||||
backend
|
||||
.storage
|
||||
.load_symbols(&mut backend.code_builder, &[bits]);
|
||||
|
||||
// Do the actual bitshift operation
|
||||
backend.code_builder.i32_shr_s();
|
||||
|
||||
// Restore to unsigned
|
||||
backend.code_builder.i32_const((1 << bit_width) - 1);
|
||||
backend.code_builder.i32_and();
|
||||
} else {
|
||||
backend
|
||||
.storage
|
||||
.load_symbols(&mut backend.code_builder, &[num, bits]);
|
||||
backend.code_builder.i32_shr_s();
|
||||
}
|
||||
}
|
||||
I64 => {
|
||||
backend
|
||||
.storage
|
||||
.load_symbols(&mut backend.code_builder, &[num, bits]);
|
||||
backend.code_builder.i64_shr_s();
|
||||
}
|
||||
I128 => todo!("{:?} for I128", self.lowlevel),
|
||||
_ => panic_ret_type(),
|
||||
}
|
||||
|
@ -1626,7 +1659,7 @@ impl<'a> LowLevelCall<'a> {
|
|||
NumShiftRightZfBy => {
|
||||
match CodeGenNumType::from(self.ret_layout) {
|
||||
I32 => {
|
||||
// This is normally an unsigned operation, but Roc defines it on all integer types.
|
||||
// In most languages this operation is for unsigned numbers, but Roc defines it on all integers.
|
||||
// So the argument is implicitly converted to unsigned before the shift operator.
|
||||
// We need to make that conversion explicit for i8 and i16, which use Wasm's i32 type.
|
||||
let bit_width = 8 * self.ret_layout.stack_size(TARGET_INFO);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_ident"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_late_solve"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -10,12 +10,14 @@ use roc_collections::MutMap;
|
|||
use roc_derive::SharedDerivedModule;
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::symbol::ModuleId;
|
||||
use roc_solve::solve::{compact_lambda_sets_of_vars, Phase, Pools};
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_solve::ability::AbilityResolver;
|
||||
use roc_solve::solve::Pools;
|
||||
use roc_solve::specialize::{compact_lambda_sets_of_vars, DerivedEnv, Phase};
|
||||
use roc_types::subs::{get_member_lambda_sets_at_region, Content, FlatType, LambdaSet};
|
||||
use roc_types::subs::{ExposedTypesStorageSubs, Subs, Variable};
|
||||
use roc_unify::unify::{unify as unify_unify, Env, Mode, Unified};
|
||||
|
||||
pub use roc_solve::ability::resolve_ability_specialization;
|
||||
pub use roc_solve::ability::Resolved;
|
||||
pub use roc_types::subs::instantiate_rigids;
|
||||
|
||||
|
@ -49,12 +51,12 @@ impl WorldAbilities {
|
|||
|
||||
#[inline(always)]
|
||||
pub fn with_module_exposed_type<T>(
|
||||
&mut self,
|
||||
&self,
|
||||
module: ModuleId,
|
||||
mut f: impl FnMut(&mut ExposedTypesStorageSubs) -> T,
|
||||
mut f: impl FnMut(&ExposedTypesStorageSubs) -> T,
|
||||
) -> T {
|
||||
let mut world = self.world.write().unwrap();
|
||||
let (_, exposed_types) = world.get_mut(&module).expect("module not in the world");
|
||||
let world = self.world.read().unwrap();
|
||||
let (_, exposed_types) = world.get(&module).expect("module not in the world");
|
||||
|
||||
f(exposed_types)
|
||||
}
|
||||
|
@ -96,6 +98,75 @@ impl AbilitiesView<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct LateResolver<'a> {
|
||||
home: ModuleId,
|
||||
abilities: &'a AbilitiesView<'a>,
|
||||
}
|
||||
|
||||
impl<'a> AbilityResolver for LateResolver<'a> {
|
||||
fn member_parent_and_signature_var(
|
||||
&self,
|
||||
ability_member: roc_module::symbol::Symbol,
|
||||
home_subs: &mut Subs,
|
||||
) -> Option<(roc_module::symbol::Symbol, Variable)> {
|
||||
let (parent_ability, signature_var) =
|
||||
self.abilities
|
||||
.with_module_abilities_store(ability_member.module_id(), |store| {
|
||||
store
|
||||
.member_def(ability_member)
|
||||
.map(|def| (def.parent_ability, def.signature_var()))
|
||||
})?;
|
||||
|
||||
let parent_ability_module = parent_ability.module_id();
|
||||
debug_assert_eq!(parent_ability_module, ability_member.module_id());
|
||||
|
||||
let signature_var = match (parent_ability_module == self.home, self.abilities) {
|
||||
(false, AbilitiesView::World(world)) => {
|
||||
// Need to copy the type from an external module into our home subs
|
||||
world.with_module_exposed_type(parent_ability_module, |external_types| {
|
||||
let stored_signature_var =
|
||||
external_types.stored_ability_member_vars.get(&signature_var).expect("Ability member is in an external store, but its signature variables are not stored accordingly!");
|
||||
|
||||
let home_copy = external_types
|
||||
.storage_subs
|
||||
.export_variable_to(home_subs, *stored_signature_var);
|
||||
|
||||
home_copy.variable
|
||||
})
|
||||
}
|
||||
_ => signature_var,
|
||||
};
|
||||
|
||||
Some((parent_ability, signature_var))
|
||||
}
|
||||
|
||||
fn get_implementation(
|
||||
&self,
|
||||
impl_key: roc_can::abilities::ImplKey,
|
||||
) -> Option<roc_types::types::MemberImpl> {
|
||||
self.abilities
|
||||
.with_module_abilities_store(impl_key.opaque.module_id(), |store| {
|
||||
store.get_implementation(impl_key).copied()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_ability_specialization(
|
||||
home: ModuleId,
|
||||
subs: &mut Subs,
|
||||
abilities: &AbilitiesView,
|
||||
ability_member: Symbol,
|
||||
specialization_var: Variable,
|
||||
) -> Option<Resolved> {
|
||||
let late_resolver = LateResolver { home, abilities };
|
||||
roc_solve::ability::resolve_ability_specialization(
|
||||
subs,
|
||||
&late_resolver,
|
||||
ability_member,
|
||||
specialization_var,
|
||||
)
|
||||
}
|
||||
|
||||
pub struct LatePhase<'a> {
|
||||
home: ModuleId,
|
||||
abilities: &'a AbilitiesView<'a>,
|
||||
|
@ -272,15 +343,18 @@ pub fn unify(
|
|||
let mut pools = Pools::default();
|
||||
|
||||
let late_phase = LatePhase { home, abilities };
|
||||
let derived_env = DerivedEnv {
|
||||
derived_module,
|
||||
exposed_types: exposed_by_module,
|
||||
};
|
||||
|
||||
let must_implement_constraints = compact_lambda_sets_of_vars(
|
||||
subs,
|
||||
derived_module,
|
||||
&derived_env,
|
||||
arena,
|
||||
&mut pools,
|
||||
lambda_sets_to_specialize,
|
||||
&late_phase,
|
||||
exposed_by_module,
|
||||
);
|
||||
// At this point we can't do anything with must-implement constraints, since we're no
|
||||
// longer solving. We must assume that they were totally caught during solving.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_load"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -14,6 +14,7 @@ const MODULES: &[(ModuleId, &str)] = &[
|
|||
(ModuleId::SET, "Set.roc"),
|
||||
(ModuleId::BOX, "Box.roc"),
|
||||
(ModuleId::ENCODE, "Encode.roc"),
|
||||
(ModuleId::DECODE, "Decode.roc"),
|
||||
(ModuleId::JSON, "Json.roc"),
|
||||
];
|
||||
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
pub use roc_load_internal::file::Threading;
|
||||
|
||||
use bumpalo::Bump;
|
||||
use roc_can::module::ExposedByModule;
|
||||
use roc_collections::all::MutMap;
|
||||
|
@ -11,7 +9,8 @@ use std::path::PathBuf;
|
|||
|
||||
pub use roc_load_internal::docs;
|
||||
pub use roc_load_internal::file::{
|
||||
Expectations, LoadResult, LoadStart, LoadedModule, LoadingProblem, MonomorphizedModule, Phase,
|
||||
EntryPoint, ExecutionMode, Expectations, LoadConfig, LoadResult, LoadStart, LoadedModule,
|
||||
LoadingProblem, MonomorphizedModule, Phase, Threading,
|
||||
};
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
@ -19,23 +18,11 @@ fn load<'a>(
|
|||
arena: &'a Bump,
|
||||
load_start: LoadStart<'a>,
|
||||
exposed_types: ExposedByModule,
|
||||
goal_phase: Phase,
|
||||
target_info: TargetInfo,
|
||||
render: RenderTarget,
|
||||
threading: Threading,
|
||||
load_config: LoadConfig,
|
||||
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
|
||||
let cached_subs = read_cached_subs();
|
||||
|
||||
roc_load_internal::file::load(
|
||||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
goal_phase,
|
||||
target_info,
|
||||
cached_subs,
|
||||
render,
|
||||
threading,
|
||||
)
|
||||
roc_load_internal::file::load(arena, load_start, exposed_types, cached_subs, load_config)
|
||||
}
|
||||
|
||||
/// Load using only a single thread; used when compiling to webassembly
|
||||
|
@ -43,9 +30,9 @@ pub fn load_single_threaded<'a>(
|
|||
arena: &'a Bump,
|
||||
load_start: LoadStart<'a>,
|
||||
exposed_types: ExposedByModule,
|
||||
goal_phase: Phase,
|
||||
target_info: TargetInfo,
|
||||
render: RenderTarget,
|
||||
exec_mode: ExecutionMode,
|
||||
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
|
||||
let cached_subs = read_cached_subs();
|
||||
|
||||
|
@ -53,10 +40,10 @@ pub fn load_single_threaded<'a>(
|
|||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
goal_phase,
|
||||
target_info,
|
||||
cached_subs,
|
||||
render,
|
||||
exec_mode,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -67,23 +54,13 @@ pub fn load_and_monomorphize_from_str<'a>(
|
|||
src: &'a str,
|
||||
src_dir: PathBuf,
|
||||
exposed_types: ExposedByModule,
|
||||
target_info: TargetInfo,
|
||||
render: RenderTarget,
|
||||
threading: Threading,
|
||||
load_config: LoadConfig,
|
||||
) -> Result<MonomorphizedModule<'a>, LoadingProblem<'a>> {
|
||||
use LoadResult::*;
|
||||
|
||||
let load_start = LoadStart::from_str(arena, filename, src, src_dir)?;
|
||||
|
||||
match load(
|
||||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
Phase::MakeSpecializations,
|
||||
target_info,
|
||||
render,
|
||||
threading,
|
||||
)? {
|
||||
match load(arena, load_start, exposed_types, load_config)? {
|
||||
Monomorphized(module) => Ok(module),
|
||||
TypeChecked(_) => unreachable!(""),
|
||||
}
|
||||
|
@ -93,23 +70,13 @@ pub fn load_and_monomorphize(
|
|||
arena: &Bump,
|
||||
filename: PathBuf,
|
||||
exposed_types: ExposedByModule,
|
||||
target_info: TargetInfo,
|
||||
render: RenderTarget,
|
||||
threading: Threading,
|
||||
load_config: LoadConfig,
|
||||
) -> Result<MonomorphizedModule<'_>, LoadingProblem<'_>> {
|
||||
use LoadResult::*;
|
||||
|
||||
let load_start = LoadStart::from_path(arena, filename, render)?;
|
||||
let load_start = LoadStart::from_path(arena, filename, load_config.render)?;
|
||||
|
||||
match load(
|
||||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
Phase::MakeSpecializations,
|
||||
target_info,
|
||||
render,
|
||||
threading,
|
||||
)? {
|
||||
match load(arena, load_start, exposed_types, load_config)? {
|
||||
Monomorphized(module) => Ok(module),
|
||||
TypeChecked(_) => unreachable!(""),
|
||||
}
|
||||
|
@ -119,23 +86,13 @@ pub fn load_and_typecheck(
|
|||
arena: &Bump,
|
||||
filename: PathBuf,
|
||||
exposed_types: ExposedByModule,
|
||||
target_info: TargetInfo,
|
||||
render: RenderTarget,
|
||||
threading: Threading,
|
||||
load_config: LoadConfig,
|
||||
) -> Result<LoadedModule, LoadingProblem<'_>> {
|
||||
use LoadResult::*;
|
||||
|
||||
let load_start = LoadStart::from_path(arena, filename, render)?;
|
||||
let load_start = LoadStart::from_path(arena, filename, load_config.render)?;
|
||||
|
||||
match load(
|
||||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
Phase::SolveTypes,
|
||||
target_info,
|
||||
render,
|
||||
threading,
|
||||
)? {
|
||||
match load(arena, load_start, exposed_types, load_config)? {
|
||||
Monomorphized(_) => unreachable!(""),
|
||||
TypeChecked(module) => Ok(module),
|
||||
}
|
||||
|
@ -161,9 +118,9 @@ pub fn load_and_typecheck_str<'a>(
|
|||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
Phase::SolveTypes,
|
||||
target_info,
|
||||
render,
|
||||
ExecutionMode::Check,
|
||||
)? {
|
||||
Monomorphized(_) => unreachable!(""),
|
||||
TypeChecked(module) => Ok(module),
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_load_internal"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -30,8 +30,8 @@ use roc_module::symbol::{
|
|||
PackageQualified, Symbol,
|
||||
};
|
||||
use roc_mono::ir::{
|
||||
CapturedSymbols, EntryPoint, ExternalSpecializations, PartialProc, Proc, ProcLayout, Procs,
|
||||
ProcsBase, UpdateModeIds,
|
||||
CapturedSymbols, ExternalSpecializations, PartialProc, Proc, ProcLayout, Procs, ProcsBase,
|
||||
UpdateModeIds,
|
||||
};
|
||||
use roc_mono::layout::{CapturesNiche, LambdaName, Layout, LayoutCache, LayoutProblem};
|
||||
use roc_parse::ast::{self, Defs, ExtractSpaces, Spaced, StrLiteral, TypeAnnotation};
|
||||
|
@ -117,6 +117,30 @@ macro_rules! log {
|
|||
($($arg:tt)*) => (dbg_do!(ROC_PRINT_LOAD_LOG, println!($($arg)*)))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LoadConfig {
|
||||
pub target_info: TargetInfo,
|
||||
pub render: RenderTarget,
|
||||
pub threading: Threading,
|
||||
pub exec_mode: ExecutionMode,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum ExecutionMode {
|
||||
Test,
|
||||
Check,
|
||||
Executable,
|
||||
}
|
||||
|
||||
impl ExecutionMode {
|
||||
fn goal_phase(&self) -> Phase {
|
||||
match self {
|
||||
ExecutionMode::Test | ExecutionMode::Executable => Phase::MakeSpecializations,
|
||||
ExecutionMode::Check => Phase::SolveTypes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Struct storing various intermediate stages by their ModuleId
|
||||
#[derive(Debug)]
|
||||
struct ModuleCache<'a> {
|
||||
|
@ -167,6 +191,7 @@ impl Default for ModuleCache<'_> {
|
|||
NUM,
|
||||
BOX,
|
||||
ENCODE,
|
||||
DECODE,
|
||||
JSON,
|
||||
}
|
||||
|
||||
|
@ -669,7 +694,6 @@ pub struct MonomorphizedModule<'a> {
|
|||
pub interns: Interns,
|
||||
pub subs: Subs,
|
||||
pub output_path: Box<Path>,
|
||||
pub platform_path: Box<Path>,
|
||||
pub can_problems: MutMap<ModuleId, Vec<roc_problem::can::Problem>>,
|
||||
pub type_problems: MutMap<ModuleId, Vec<TypeError>>,
|
||||
pub procedures: MutMap<(Symbol, ProcLayout<'a>), Proc<'a>>,
|
||||
|
@ -681,6 +705,16 @@ pub struct MonomorphizedModule<'a> {
|
|||
pub expectations: VecMap<ModuleId, Expectations>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum EntryPoint<'a> {
|
||||
Executable {
|
||||
symbol: Symbol,
|
||||
layout: ProcLayout<'a>,
|
||||
platform_path: Box<Path>,
|
||||
},
|
||||
Test,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Expectations {
|
||||
pub subs: roc_types::subs::Subs,
|
||||
|
@ -847,7 +881,6 @@ struct State<'a> {
|
|||
pub root_id: ModuleId,
|
||||
pub root_subs: Option<Subs>,
|
||||
pub platform_data: Option<PlatformData>,
|
||||
pub goal_phase: Phase,
|
||||
pub exposed_types: ExposedByModule,
|
||||
pub output_path: Option<&'a str>,
|
||||
pub platform_path: PlatformPath<'a>,
|
||||
|
@ -858,6 +891,7 @@ struct State<'a> {
|
|||
pub procedures: MutMap<(Symbol, ProcLayout<'a>), Proc<'a>>,
|
||||
pub toplevel_expects: VecMap<Symbol, Region>,
|
||||
pub exposed_to_host: ExposedToHost,
|
||||
pub goal_phase: Phase,
|
||||
|
||||
/// This is the "final" list of IdentIds, after canonicalization and constraint gen
|
||||
/// have completed for a given module.
|
||||
|
@ -885,6 +919,7 @@ struct State<'a> {
|
|||
pub layout_caches: std::vec::Vec<LayoutCache<'a>>,
|
||||
|
||||
pub render: RenderTarget,
|
||||
pub exec_mode: ExecutionMode,
|
||||
|
||||
/// All abilities across all modules.
|
||||
pub world_abilities: WorldAbilities,
|
||||
|
@ -902,16 +937,17 @@ impl<'a> State<'a> {
|
|||
fn new(
|
||||
root_id: ModuleId,
|
||||
target_info: TargetInfo,
|
||||
goal_phase: Phase,
|
||||
exposed_types: ExposedByModule,
|
||||
arc_modules: Arc<Mutex<PackageModuleIds<'a>>>,
|
||||
ident_ids_by_module: SharedIdentIdsByModule,
|
||||
cached_subs: MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)>,
|
||||
render: RenderTarget,
|
||||
number_of_workers: usize,
|
||||
exec_mode: ExecutionMode,
|
||||
) -> Self {
|
||||
let arc_shorthands = Arc::new(Mutex::new(MutMap::default()));
|
||||
|
||||
let goal_phase = exec_mode.goal_phase();
|
||||
let dependencies = Dependencies::new(goal_phase);
|
||||
|
||||
Self {
|
||||
|
@ -939,6 +975,7 @@ impl<'a> State<'a> {
|
|||
layout_caches: std::vec::Vec::with_capacity(number_of_workers),
|
||||
cached_subs: Arc::new(Mutex::new(cached_subs)),
|
||||
render,
|
||||
exec_mode,
|
||||
make_specializations_pass: MakeSpecializationsPass::Pass(1),
|
||||
world_abilities: Default::default(),
|
||||
}
|
||||
|
@ -1145,16 +1182,14 @@ pub fn load_and_typecheck_str<'a>(
|
|||
// where we want to regenerate the cached data
|
||||
let cached_subs = MutMap::default();
|
||||
|
||||
match load(
|
||||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
Phase::SolveTypes,
|
||||
let load_config = LoadConfig {
|
||||
target_info,
|
||||
cached_subs,
|
||||
render,
|
||||
threading,
|
||||
)? {
|
||||
exec_mode: ExecutionMode::Check,
|
||||
};
|
||||
|
||||
match load(arena, load_start, exposed_types, cached_subs, load_config)? {
|
||||
Monomorphized(_) => unreachable!(""),
|
||||
TypeChecked(module) => Ok(module),
|
||||
}
|
||||
|
@ -1363,11 +1398,8 @@ pub fn load<'a>(
|
|||
arena: &'a Bump,
|
||||
load_start: LoadStart<'a>,
|
||||
exposed_types: ExposedByModule,
|
||||
goal_phase: Phase,
|
||||
target_info: TargetInfo,
|
||||
cached_subs: MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)>,
|
||||
render: RenderTarget,
|
||||
threading: Threading,
|
||||
load_config: LoadConfig,
|
||||
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
|
||||
enum Threads {
|
||||
Single,
|
||||
|
@ -1384,7 +1416,7 @@ pub fn load<'a>(
|
|||
Err(_) => Threads::Single,
|
||||
Ok(0) => unreachable!("NonZeroUsize"),
|
||||
Ok(1) => Threads::Single,
|
||||
Ok(reported) => match threading {
|
||||
Ok(reported) => match load_config.threading {
|
||||
Threading::Single => Threads::Single,
|
||||
Threading::AllAvailable => Threads::Many(reported),
|
||||
Threading::AtMost(at_most) => Threads::Many(Ord::min(reported, at_most)),
|
||||
|
@ -1398,20 +1430,20 @@ pub fn load<'a>(
|
|||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
goal_phase,
|
||||
target_info,
|
||||
load_config.target_info,
|
||||
cached_subs,
|
||||
render,
|
||||
load_config.render,
|
||||
load_config.exec_mode,
|
||||
),
|
||||
Threads::Many(threads) => load_multi_threaded(
|
||||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
goal_phase,
|
||||
target_info,
|
||||
load_config.target_info,
|
||||
cached_subs,
|
||||
render,
|
||||
load_config.render,
|
||||
threads,
|
||||
load_config.exec_mode,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -1422,10 +1454,10 @@ pub fn load_single_threaded<'a>(
|
|||
arena: &'a Bump,
|
||||
load_start: LoadStart<'a>,
|
||||
exposed_types: ExposedByModule,
|
||||
goal_phase: Phase,
|
||||
target_info: TargetInfo,
|
||||
cached_subs: MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)>,
|
||||
render: RenderTarget,
|
||||
exec_mode: ExecutionMode,
|
||||
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
|
||||
let LoadStart {
|
||||
arc_modules,
|
||||
|
@ -1446,13 +1478,13 @@ pub fn load_single_threaded<'a>(
|
|||
let mut state = State::new(
|
||||
root_id,
|
||||
target_info,
|
||||
goal_phase,
|
||||
exposed_types,
|
||||
arc_modules,
|
||||
ident_ids_by_module,
|
||||
cached_subs,
|
||||
render,
|
||||
number_of_workers,
|
||||
exec_mode,
|
||||
);
|
||||
|
||||
// We'll add tasks to this, and then worker threads will take tasks from it.
|
||||
|
@ -1623,11 +1655,11 @@ fn load_multi_threaded<'a>(
|
|||
arena: &'a Bump,
|
||||
load_start: LoadStart<'a>,
|
||||
exposed_types: ExposedByModule,
|
||||
goal_phase: Phase,
|
||||
target_info: TargetInfo,
|
||||
cached_subs: MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)>,
|
||||
render: RenderTarget,
|
||||
available_threads: usize,
|
||||
exec_mode: ExecutionMode,
|
||||
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
|
||||
let LoadStart {
|
||||
arc_modules,
|
||||
|
@ -1663,13 +1695,13 @@ fn load_multi_threaded<'a>(
|
|||
let mut state = State::new(
|
||||
root_id,
|
||||
target_info,
|
||||
goal_phase,
|
||||
exposed_types,
|
||||
arc_modules,
|
||||
ident_ids_by_module,
|
||||
cached_subs,
|
||||
render,
|
||||
num_workers,
|
||||
exec_mode,
|
||||
);
|
||||
|
||||
// an arena for every worker, stored in an arena-allocated bumpalo vec to make the lifetimes work
|
||||
|
@ -2748,6 +2780,7 @@ fn finish_specialization(
|
|||
output_path,
|
||||
platform_path,
|
||||
platform_data,
|
||||
exec_mode,
|
||||
..
|
||||
} = state;
|
||||
|
||||
|
@ -2764,6 +2797,10 @@ fn finish_specialization(
|
|||
.map(|(id, (path, src))| (id, (path, src.into())))
|
||||
.collect();
|
||||
|
||||
let entry_point = {
|
||||
match exec_mode {
|
||||
ExecutionMode::Test => EntryPoint::Test,
|
||||
ExecutionMode::Executable => {
|
||||
let path_to_platform = {
|
||||
use PlatformPath::*;
|
||||
let package_name = match platform_path {
|
||||
|
@ -2784,8 +2821,6 @@ fn finish_specialization(
|
|||
};
|
||||
|
||||
let platform_path = Path::new(path_to_platform).into();
|
||||
|
||||
let entry_point = {
|
||||
let symbol = match platform_data {
|
||||
None => {
|
||||
debug_assert_eq!(exposed_to_host.values.len(), 1);
|
||||
|
@ -2795,23 +2830,28 @@ fn finish_specialization(
|
|||
};
|
||||
|
||||
match procedures.keys().find(|(s, _)| *s == symbol) {
|
||||
Some((_, layout)) => EntryPoint {
|
||||
Some((_, layout)) => EntryPoint::Executable {
|
||||
layout: *layout,
|
||||
symbol,
|
||||
platform_path,
|
||||
},
|
||||
None => {
|
||||
// the entry point is not specialized. This can happen if the repl output
|
||||
// is a function value
|
||||
EntryPoint {
|
||||
EntryPoint::Executable {
|
||||
layout: roc_mono::ir::ProcLayout {
|
||||
arguments: &[],
|
||||
result: Layout::struct_no_name_order(&[]),
|
||||
captures_niche: CapturesNiche::no_niche(),
|
||||
},
|
||||
symbol,
|
||||
platform_path,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ExecutionMode::Check => unreachable!(),
|
||||
}
|
||||
};
|
||||
|
||||
let output_path = match output_path {
|
||||
|
@ -2823,7 +2863,7 @@ fn finish_specialization(
|
|||
can_problems,
|
||||
type_problems,
|
||||
output_path,
|
||||
platform_path,
|
||||
expectations,
|
||||
exposed_to_host,
|
||||
module_id: state.root_id,
|
||||
subs,
|
||||
|
@ -2833,7 +2873,6 @@ fn finish_specialization(
|
|||
sources,
|
||||
timings: state.timings,
|
||||
toplevel_expects,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -3088,6 +3127,7 @@ fn load_module<'a>(
|
|||
"Bool", ModuleId::BOOL
|
||||
"Box", ModuleId::BOX
|
||||
"Encode", ModuleId::ENCODE
|
||||
"Decode", ModuleId::DECODE
|
||||
"Json", ModuleId::JSON
|
||||
}
|
||||
|
||||
|
@ -4477,7 +4517,7 @@ fn canonicalize_and_constrain<'a>(
|
|||
Vacant(vacant) => {
|
||||
let should_include_builtin = matches!(
|
||||
name.module_id(),
|
||||
ModuleId::ENCODE | ModuleId::DICT | ModuleId::SET
|
||||
ModuleId::ENCODE | ModuleId::DECODE | ModuleId::DICT | ModuleId::SET
|
||||
);
|
||||
|
||||
if !name.is_builtin() || should_include_builtin {
|
||||
|
@ -5076,7 +5116,7 @@ fn load_derived_partial_procs<'a>(
|
|||
|
||||
// TODO: we can be even lazier here if we move `add_def_to_module` to happen in mono. Also, the
|
||||
// timings would be more accurate.
|
||||
for (derived_symbol, derived_expr) in derives_to_add.into_iter() {
|
||||
for (derived_symbol, (derived_expr, derived_expr_var)) in derives_to_add.into_iter() {
|
||||
let mut mono_env = roc_mono::ir::Env {
|
||||
arena,
|
||||
subs,
|
||||
|
@ -5115,7 +5155,22 @@ fn load_derived_partial_procs<'a>(
|
|||
return_type,
|
||||
)
|
||||
}
|
||||
_ => internal_error!("Expected only functions to be derived"),
|
||||
_ => {
|
||||
// mark this symbols as a top-level thunk before any other work on the procs
|
||||
new_module_thunks.push(derived_symbol);
|
||||
|
||||
PartialProc {
|
||||
annotation: derived_expr_var,
|
||||
// This is a 0-arity thunk, so it has no arguments.
|
||||
pattern_symbols: &[],
|
||||
// This is a top-level definition, so it cannot capture anything
|
||||
captured_symbols: CapturedSymbols::None,
|
||||
body: derived_expr,
|
||||
body_var: derived_expr_var,
|
||||
// This is a 0-arity thunk, so it cannot be recursive
|
||||
is_self_recursive: false,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
procs_base
|
||||
|
|
|
@ -17,8 +17,8 @@ mod helpers;
|
|||
use crate::helpers::fixtures_dir;
|
||||
use bumpalo::Bump;
|
||||
use roc_can::module::ExposedByModule;
|
||||
use roc_load_internal::file::Threading;
|
||||
use roc_load_internal::file::{LoadResult, LoadStart, LoadedModule, LoadingProblem, Phase};
|
||||
use roc_load_internal::file::{ExecutionMode, LoadConfig, Threading};
|
||||
use roc_load_internal::file::{LoadResult, LoadStart, LoadedModule, LoadingProblem};
|
||||
use roc_module::ident::ModuleName;
|
||||
use roc_module::symbol::{Interns, ModuleId};
|
||||
use roc_problem::can::Problem;
|
||||
|
@ -41,16 +41,19 @@ fn load_and_typecheck(
|
|||
use LoadResult::*;
|
||||
|
||||
let load_start = LoadStart::from_path(arena, filename, RenderTarget::Generic)?;
|
||||
let load_config = LoadConfig {
|
||||
target_info,
|
||||
render: RenderTarget::Generic,
|
||||
threading: Threading::Single,
|
||||
exec_mode: ExecutionMode::Check,
|
||||
};
|
||||
|
||||
match roc_load_internal::file::load(
|
||||
arena,
|
||||
load_start,
|
||||
exposed_types,
|
||||
Phase::SolveTypes,
|
||||
target_info,
|
||||
Default::default(), // these tests will re-compile the builtins
|
||||
RenderTarget::Generic,
|
||||
Threading::Single,
|
||||
load_config,
|
||||
)? {
|
||||
Monomorphized(_) => unreachable!(""),
|
||||
TypeChecked(module) => Ok(module),
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_module"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
edition = "2021"
|
||||
license = "UPL-1.0"
|
||||
|
|
|
@ -78,6 +78,7 @@ impl ModuleName {
|
|||
pub const RESULT: &'static str = "Result";
|
||||
pub const BOX: &'static str = "Box";
|
||||
pub const ENCODE: &'static str = "Encode";
|
||||
pub const DECODE: &'static str = "Decode";
|
||||
pub const JSON: &'static str = "Json";
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
|
|
|
@ -47,8 +47,10 @@ const SYMBOL_HAS_NICHE: () =
|
|||
#[cfg(debug_assertions)]
|
||||
const PRETTY_PRINT_DEBUG_SYMBOLS: bool = true;
|
||||
|
||||
pub const DERIVABLE_ABILITIES: &[(Symbol, &[Symbol])] =
|
||||
&[(Symbol::ENCODE_ENCODING, &[Symbol::ENCODE_TO_ENCODER])];
|
||||
pub const DERIVABLE_ABILITIES: &[(Symbol, &[Symbol])] = &[
|
||||
(Symbol::ENCODE_ENCODING, &[Symbol::ENCODE_TO_ENCODER]),
|
||||
(Symbol::DECODE_DECODING, &[Symbol::DECODE_DECODER]),
|
||||
];
|
||||
|
||||
/// In Debug builds only, Symbol has a name() method that lets
|
||||
/// you look up its name in a global intern table. This table is
|
||||
|
@ -1005,6 +1007,8 @@ define_builtins! {
|
|||
30 DEV_TMP5: "#dev_tmp5"
|
||||
|
||||
31 ATTR_INVALID: "#attr_invalid"
|
||||
|
||||
32 CLONE: "#clone" // internal function that clones a value into a buffer
|
||||
}
|
||||
// Fake module for synthesizing and storing derived implementations
|
||||
1 DERIVED_SYNTH: "#Derived" => {
|
||||
|
@ -1389,9 +1393,37 @@ define_builtins! {
|
|||
24 ENCODE_APPEND: "append"
|
||||
25 ENCODE_TO_BYTES: "toBytes"
|
||||
}
|
||||
12 JSON: "Json" => {
|
||||
12 DECODE: "Decode" => {
|
||||
0 DECODE_DECODE_ERROR: "DecodeError"
|
||||
1 DECODE_DECODE_RESULT: "DecodeResult"
|
||||
2 DECODE_DECODER_OPAQUE: "Decoder"
|
||||
3 DECODE_DECODING: "Decoding"
|
||||
4 DECODE_DECODER: "decoder"
|
||||
5 DECODE_DECODERFORMATTING: "DecoderFormatting"
|
||||
6 DECODE_U8: "u8"
|
||||
7 DECODE_U16: "u16"
|
||||
8 DECODE_U32: "u32"
|
||||
9 DECODE_U64: "u64"
|
||||
10 DECODE_U128: "u128"
|
||||
11 DECODE_I8: "i8"
|
||||
12 DECODE_I16: "i16"
|
||||
13 DECODE_I32: "i32"
|
||||
14 DECODE_I64: "i64"
|
||||
15 DECODE_I128: "i128"
|
||||
16 DECODE_F32: "f32"
|
||||
17 DECODE_F64: "f64"
|
||||
18 DECODE_DEC: "dec"
|
||||
19 DECODE_BOOL: "bool"
|
||||
20 DECODE_STRING: "string"
|
||||
21 DECODE_LIST: "list"
|
||||
22 DECODE_CUSTOM: "custom"
|
||||
23 DECODE_DECODE_WITH: "decodeWith"
|
||||
24 DECODE_FROM_BYTES_PARTIAL: "fromBytesPartial"
|
||||
25 DECODE_FROM_BYTES: "fromBytes"
|
||||
}
|
||||
13 JSON: "Json" => {
|
||||
0 JSON_JSON: "Json"
|
||||
}
|
||||
|
||||
num_modules: 13 // Keep this count up to date by hand! (TODO: see the mut_map! macro for how we could determine this count correctly in the macro)
|
||||
num_modules: 14 // Keep this count up to date by hand! (TODO: see the mut_map! macro for how we could determine this count correctly in the macro)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_mono"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
@ -12,6 +12,7 @@ roc_region = { path = "../region" }
|
|||
roc_module = { path = "../module" }
|
||||
roc_types = { path = "../types" }
|
||||
roc_can = { path = "../can" }
|
||||
roc_derive_key = { path = "../derive_key" }
|
||||
roc_derive = { path = "../derive" }
|
||||
roc_late_solve = { path = "../late_solve" }
|
||||
roc_std = { path = "../../roc_std", default-features = false }
|
||||
|
|
|
@ -144,7 +144,7 @@ pub fn refcount_reset_proc_body<'a>(
|
|||
let rc = root.create_symbol(ident_ids, "rc");
|
||||
let refcount_1 = root.create_symbol(ident_ids, "refcount_1");
|
||||
let is_unique = root.create_symbol(ident_ids, "is_unique");
|
||||
let masked = root.create_symbol(ident_ids, "masked");
|
||||
let addr = root.create_symbol(ident_ids, "addr");
|
||||
|
||||
let union_layout = match layout {
|
||||
Layout::Union(u) => u,
|
||||
|
@ -213,7 +213,7 @@ pub fn refcount_reset_proc_body<'a>(
|
|||
op: LowLevel::NumSubWrap,
|
||||
update_mode: UpdateModeId::BACKEND_DUMMY,
|
||||
},
|
||||
arguments: root.arena.alloc([masked, alignment]),
|
||||
arguments: root.arena.alloc([addr, alignment]),
|
||||
});
|
||||
|
||||
Stmt::Let(
|
||||
|
@ -341,7 +341,7 @@ pub fn refcount_reset_proc_body<'a>(
|
|||
rc_ptr,
|
||||
union_layout.stores_tag_id_in_pointer(root.target_info),
|
||||
root.arena.alloc(rc_stmt),
|
||||
masked,
|
||||
addr,
|
||||
)
|
||||
};
|
||||
|
||||
|
|
|
@ -96,6 +96,23 @@ enum Test<'a> {
|
|||
},
|
||||
}
|
||||
|
||||
impl<'a> Test<'a> {
|
||||
fn can_be_switch(&self) -> bool {
|
||||
match self {
|
||||
Test::IsCtor { .. } => true,
|
||||
Test::IsInt(_, int_width) => {
|
||||
// llvm does not like switching on 128-bit values
|
||||
!matches!(int_width, IntWidth::U128 | IntWidth::I128)
|
||||
}
|
||||
Test::IsFloat(_, _) => true,
|
||||
Test::IsDecimal(_) => false,
|
||||
Test::IsStr(_) => false,
|
||||
Test::IsBit(_) => true,
|
||||
Test::IsByte { .. } => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
use std::hash::{Hash, Hasher};
|
||||
impl<'a> Hash for Test<'a> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
|
@ -1370,8 +1387,6 @@ fn test_to_equality<'a>(
|
|||
}
|
||||
|
||||
Test::IsInt(test_int, precision) => {
|
||||
// TODO don't downcast i128 here
|
||||
debug_assert!(i128::from_ne_bytes(test_int) <= i64::MAX as i128);
|
||||
let lhs = Expr::Literal(Literal::Int(test_int));
|
||||
let lhs_symbol = env.unique_symbol();
|
||||
stores.push((lhs_symbol, Layout::int_width(precision), lhs));
|
||||
|
@ -1833,7 +1848,8 @@ fn decide_to_branching<'a>(
|
|||
Test::IsBit(v) => v as u64,
|
||||
Test::IsByte { tag_id, .. } => tag_id as u64,
|
||||
Test::IsCtor { tag_id, .. } => tag_id as u64,
|
||||
other => todo!("other {:?}", other),
|
||||
Test::IsDecimal(_) => unreachable!("decimals cannot be switched on"),
|
||||
Test::IsStr(_) => unreachable!("strings cannot be switched on"),
|
||||
};
|
||||
|
||||
// branch info is only useful for refcounted values
|
||||
|
@ -2004,16 +2020,31 @@ fn fanout_decider<'a>(
|
|||
edges: Vec<(GuardedTest<'a>, DecisionTree<'a>)>,
|
||||
) -> Decider<'a, u64> {
|
||||
let fallback_decider = tree_to_decider(fallback);
|
||||
let necessary_tests = edges
|
||||
let necessary_tests: Vec<_> = edges
|
||||
.into_iter()
|
||||
.map(|(test, tree)| fanout_decider_help(tree, test))
|
||||
.collect();
|
||||
|
||||
if necessary_tests.iter().all(|(t, _)| t.can_be_switch()) {
|
||||
Decider::FanOut {
|
||||
path,
|
||||
tests: necessary_tests,
|
||||
fallback: Box::new(fallback_decider),
|
||||
}
|
||||
} else {
|
||||
// in llvm, we cannot switch on strings so must chain
|
||||
let mut decider = fallback_decider;
|
||||
|
||||
for (test, branch_decider) in necessary_tests.into_iter().rev() {
|
||||
decider = Decider::Chain {
|
||||
test_chain: vec![(path.clone(), test)],
|
||||
success: Box::new(branch_decider),
|
||||
failure: Box::new(decider),
|
||||
};
|
||||
}
|
||||
|
||||
decider
|
||||
}
|
||||
}
|
||||
|
||||
fn fanout_decider_help<'a>(
|
||||
|
|
|
@ -3999,9 +3999,10 @@ pub fn with_hole<'a>(
|
|||
}
|
||||
|
||||
// creating a record from the var will unpack it if it's just a single field.
|
||||
let layout = layout_cache
|
||||
.from_var(env.arena, record_var, env.subs)
|
||||
.unwrap_or_else(|err| panic!("TODO turn fn_var into a RuntimeError {:?}", err));
|
||||
let layout = match layout_cache.from_var(env.arena, record_var, env.subs) {
|
||||
Ok(layout) => layout,
|
||||
Err(_) => return Stmt::RuntimeError("Can't create record with improper layout"),
|
||||
};
|
||||
|
||||
let field_symbols = field_symbols.into_bump_slice();
|
||||
|
||||
|
@ -4915,14 +4916,12 @@ pub fn with_hole<'a>(
|
|||
UnspecializedExpr(symbol) => {
|
||||
match procs.ability_member_aliases.get(symbol).unwrap() {
|
||||
&self::AbilityMember(member) => {
|
||||
let resolved_proc = env.abilities.with_module_abilities_store(env.home, |store|
|
||||
resolve_ability_specialization(env.subs, store, member, fn_var)
|
||||
.expect("Recorded as an ability member, but it doesn't have a specialization")
|
||||
);
|
||||
let resolved_proc = resolve_ability_specialization(env.home, env.subs, &env.abilities, member, fn_var)
|
||||
.expect("Recorded as an ability member, but it doesn't have a specialization");
|
||||
|
||||
let resolved_proc = match resolved_proc {
|
||||
Resolved::Specialization(symbol) => symbol,
|
||||
Resolved::NeedsGenerated => {
|
||||
Resolved::NeedsGenerated(_) => {
|
||||
todo_abilities!("Generate impls for structural types")
|
||||
}
|
||||
};
|
||||
|
@ -5226,17 +5225,41 @@ fn late_resolve_ability_specialization<'a>(
|
|||
env.subs[spec_symbol_index]
|
||||
} else {
|
||||
// Otherwise, resolve by checking the able var.
|
||||
let specialization = env
|
||||
.abilities
|
||||
.with_module_abilities_store(env.home, |store| {
|
||||
resolve_ability_specialization(env.subs, store, member, specialization_var)
|
||||
.expect("Ability specialization is unknown - code generation cannot proceed!")
|
||||
});
|
||||
let specialization = resolve_ability_specialization(
|
||||
env.home,
|
||||
env.subs,
|
||||
&env.abilities,
|
||||
member,
|
||||
specialization_var,
|
||||
)
|
||||
.expect("Ability specialization is unknown - code generation cannot proceed!");
|
||||
|
||||
match specialization {
|
||||
Resolved::Specialization(symbol) => symbol,
|
||||
Resolved::NeedsGenerated => {
|
||||
todo_abilities!("Generate impls for structural types")
|
||||
Resolved::NeedsGenerated(var) => {
|
||||
let derive_key = roc_derive_key::Derived::builtin(
|
||||
member.try_into().expect("derived symbols must be builtins"),
|
||||
env.subs,
|
||||
var,
|
||||
)
|
||||
.expect("specialization var not derivable!");
|
||||
|
||||
match derive_key {
|
||||
roc_derive_key::Derived::Immediate(imm) => {
|
||||
// The immediate is an ability member itself, so it must be resolved!
|
||||
late_resolve_ability_specialization(env, imm, None, specialization_var)
|
||||
}
|
||||
roc_derive_key::Derived::Key(derive_key) => {
|
||||
let mut derived_module = env
|
||||
.derived_module
|
||||
.lock()
|
||||
.expect("derived module unavailable");
|
||||
|
||||
derived_module
|
||||
.get_or_insert(env.exposed_by_module, derive_key)
|
||||
.0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8957,7 +8980,7 @@ impl NumLiteral {
|
|||
fn to_pattern(&self) -> Pattern<'static> {
|
||||
match *self {
|
||||
NumLiteral::Int(n, w) => Pattern::IntLiteral(n, w),
|
||||
NumLiteral::U128(_) => todo!(),
|
||||
NumLiteral::U128(n) => Pattern::IntLiteral(n, IntWidth::U128),
|
||||
NumLiteral::Float(n, w) => Pattern::FloatLiteral(f64::to_bits(n), w),
|
||||
NumLiteral::Decimal(n) => Pattern::DecimalLiteral(n),
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_parse"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -866,7 +866,7 @@ where
|
|||
// the next character should not be an identifier character
|
||||
// to prevent treating `whence` or `iffy` as keywords
|
||||
match state.bytes().get(width) {
|
||||
Some(next) if *next == b' ' || *next == b'#' || *next == b'\n' => {
|
||||
Some(next) if *next == b' ' || *next == b'#' || *next == b'\n' || *next == b'\r' => {
|
||||
state = state.advance(width);
|
||||
Ok((MadeProgress, (), state))
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_problem"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -45,12 +45,13 @@ pub enum Problem {
|
|||
shadow: Loc<Ident>,
|
||||
kind: ShadowKind,
|
||||
},
|
||||
CyclicAlias(Symbol, Region, Vec<Symbol>),
|
||||
CyclicAlias(Symbol, Region, Vec<Symbol>, AliasKind),
|
||||
BadRecursion(Vec<CycleEntry>),
|
||||
PhantomTypeArgument {
|
||||
typ: Symbol,
|
||||
variable_region: Region,
|
||||
variable_name: Lowercase,
|
||||
alias_kind: AliasKind,
|
||||
},
|
||||
UnboundTypeVariable {
|
||||
typ: Symbol,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_region"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_target"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -4,9 +4,31 @@
|
|||
|
||||
use strum_macros::{EnumCount, EnumIter};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum OperatingSystem {
|
||||
Windows,
|
||||
Unix,
|
||||
Wasi,
|
||||
}
|
||||
|
||||
impl From<target_lexicon::OperatingSystem> for OperatingSystem {
|
||||
fn from(target: target_lexicon::OperatingSystem) -> Self {
|
||||
match target {
|
||||
target_lexicon::OperatingSystem::Windows => OperatingSystem::Windows,
|
||||
target_lexicon::OperatingSystem::Wasi => OperatingSystem::Wasi,
|
||||
target_lexicon::OperatingSystem::Linux => OperatingSystem::Unix,
|
||||
target_lexicon::OperatingSystem::MacOSX { .. } => OperatingSystem::Unix,
|
||||
target_lexicon::OperatingSystem::Darwin => OperatingSystem::Unix,
|
||||
target_lexicon::OperatingSystem::Unknown => OperatingSystem::Unix,
|
||||
other => unreachable!("unsupported operating system {:?}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct TargetInfo {
|
||||
pub architecture: Architecture,
|
||||
pub operating_system: OperatingSystem,
|
||||
}
|
||||
|
||||
impl TargetInfo {
|
||||
|
@ -28,18 +50,21 @@ impl TargetInfo {
|
|||
pub const fn default_aarch64() -> Self {
|
||||
TargetInfo {
|
||||
architecture: Architecture::Aarch64,
|
||||
operating_system: OperatingSystem::Unix,
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn default_x86_64() -> Self {
|
||||
TargetInfo {
|
||||
architecture: Architecture::X86_64,
|
||||
operating_system: OperatingSystem::Unix,
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn default_wasm32() -> Self {
|
||||
TargetInfo {
|
||||
architecture: Architecture::Wasm32,
|
||||
operating_system: OperatingSystem::Wasi,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -47,15 +72,13 @@ impl TargetInfo {
|
|||
impl From<&target_lexicon::Triple> for TargetInfo {
|
||||
fn from(triple: &target_lexicon::Triple) -> Self {
|
||||
let architecture = Architecture::from(triple.architecture);
|
||||
let operating_system = OperatingSystem::from(triple.operating_system);
|
||||
|
||||
Self { architecture }
|
||||
Self {
|
||||
architecture,
|
||||
operating_system,
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Architecture> for TargetInfo {
|
||||
fn from(architecture: Architecture) -> Self {
|
||||
Self { architecture }
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(u8)]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_solve"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
use roc_can::abilities::AbilitiesStore;
|
||||
use roc_can::expr::PendingDerives;
|
||||
use roc_collections::{VecMap, VecSet};
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_error_macros::{internal_error, todo_abilities};
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_solve_problem::{TypeError, UnderivableReason, Unfulfilled};
|
||||
use roc_types::num::NumericRange;
|
||||
use roc_types::subs::{instantiate_rigids, Content, FlatType, GetSubsSlice, Rank, Subs, Variable};
|
||||
use roc_types::types::{AliasKind, Category, PatternCategory};
|
||||
use roc_types::types::{AliasKind, Category, MemberImpl, PatternCategory};
|
||||
use roc_unify::unify::{Env, MustImplementConstraints};
|
||||
use roc_unify::unify::{MustImplementAbility, Obligated};
|
||||
|
||||
|
@ -253,7 +254,20 @@ impl ObligationCache {
|
|||
// independent queries.
|
||||
|
||||
let opt_can_derive_builtin = match ability {
|
||||
Symbol::ENCODE_ENCODING => Some(self.can_derive_encoding(subs, abilities_store, var)),
|
||||
Symbol::ENCODE_ENCODING => Some(DeriveEncoding::is_derivable(
|
||||
self,
|
||||
abilities_store,
|
||||
subs,
|
||||
var,
|
||||
)),
|
||||
|
||||
Symbol::DECODE_DECODING => Some(DeriveDecoding::is_derivable(
|
||||
self,
|
||||
abilities_store,
|
||||
subs,
|
||||
var,
|
||||
)),
|
||||
|
||||
_ => None,
|
||||
};
|
||||
|
||||
|
@ -262,7 +276,7 @@ impl ObligationCache {
|
|||
// can derive!
|
||||
None
|
||||
}
|
||||
Some(Err(failure_var)) => Some(if failure_var == var {
|
||||
Some(Err(DerivableError::NotDerivable(failure_var))) => Some(if failure_var == var {
|
||||
UnderivableReason::SurfaceNotDerivable
|
||||
} else {
|
||||
let (error_type, _skeletons) = subs.var_to_error_type(failure_var);
|
||||
|
@ -391,16 +405,133 @@ impl ObligationCache {
|
|||
let check_has_fake = self.derive_cache.insert(derive_key, root_result);
|
||||
debug_assert_eq!(check_has_fake, Some(fake_fulfilled));
|
||||
}
|
||||
}
|
||||
|
||||
// If we have a lot of these, consider using a visitor.
|
||||
// It will be very similar for most types (can't derive functions, can't derive unbound type
|
||||
// variables, can only derive opaques if they have an impl, etc).
|
||||
fn can_derive_encoding(
|
||||
&mut self,
|
||||
subs: &mut Subs,
|
||||
#[inline(always)]
|
||||
#[rustfmt::skip]
|
||||
fn is_builtin_number_alias(symbol: Symbol) -> bool {
|
||||
matches!(symbol,
|
||||
Symbol::NUM_U8 | Symbol::NUM_UNSIGNED8
|
||||
| Symbol::NUM_U16 | Symbol::NUM_UNSIGNED16
|
||||
| Symbol::NUM_U32 | Symbol::NUM_UNSIGNED32
|
||||
| Symbol::NUM_U64 | Symbol::NUM_UNSIGNED64
|
||||
| Symbol::NUM_U128 | Symbol::NUM_UNSIGNED128
|
||||
| Symbol::NUM_I8 | Symbol::NUM_SIGNED8
|
||||
| Symbol::NUM_I16 | Symbol::NUM_SIGNED16
|
||||
| Symbol::NUM_I32 | Symbol::NUM_SIGNED32
|
||||
| Symbol::NUM_I64 | Symbol::NUM_SIGNED64
|
||||
| Symbol::NUM_I128 | Symbol::NUM_SIGNED128
|
||||
| Symbol::NUM_NAT | Symbol::NUM_NATURAL
|
||||
| Symbol::NUM_F32 | Symbol::NUM_BINARY32
|
||||
| Symbol::NUM_F64 | Symbol::NUM_BINARY64
|
||||
| Symbol::NUM_DEC | Symbol::NUM_DECIMAL,
|
||||
)
|
||||
}
|
||||
|
||||
enum DerivableError {
|
||||
NotDerivable(Variable),
|
||||
}
|
||||
|
||||
struct Descend(bool);
|
||||
|
||||
trait DerivableVisitor {
|
||||
const ABILITY: Symbol;
|
||||
|
||||
#[inline(always)]
|
||||
fn is_derivable_builtin_opaque(_symbol: Symbol) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_flex(var: Variable) -> Result<(), DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_rigid(var: Variable) -> Result<(), DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_flex_able(var: Variable, ability: Symbol) -> Result<(), DerivableError> {
|
||||
if ability != Self::ABILITY {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_rigid_able(var: Variable, ability: Symbol) -> Result<(), DerivableError> {
|
||||
if ability != Self::ABILITY {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_recursion(var: Variable) -> Result<Descend, DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_apply(var: Variable, _symbol: Symbol) -> Result<Descend, DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_func(var: Variable) -> Result<Descend, DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_record(var: Variable) -> Result<Descend, DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_tag_union(var: Variable) -> Result<Descend, DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_recursive_tag_union(var: Variable) -> Result<Descend, DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_function_or_tag_union(var: Variable) -> Result<Descend, DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_empty_record(var: Variable) -> Result<(), DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_empty_tag_union(var: Variable) -> Result<(), DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_alias(var: Variable, _symbol: Symbol) -> Result<Descend, DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_ranged_number(var: Variable, _range: NumericRange) -> Result<(), DerivableError> {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn is_derivable(
|
||||
obligation_cache: &mut ObligationCache,
|
||||
abilities_store: &AbilitiesStore,
|
||||
subs: &Subs,
|
||||
var: Variable,
|
||||
) -> Result<(), Variable> {
|
||||
) -> Result<(), DerivableError> {
|
||||
let mut stack = vec![var];
|
||||
let mut seen_recursion_vars = vec![];
|
||||
|
||||
|
@ -418,102 +549,103 @@ impl ObligationCache {
|
|||
let content = subs.get_content_without_compacting(var);
|
||||
|
||||
use Content::*;
|
||||
use DerivableError::*;
|
||||
use FlatType::*;
|
||||
match content {
|
||||
FlexVar(_) | RigidVar(_) => return Err(var),
|
||||
FlexAbleVar(_, ability) | RigidAbleVar(_, ability) => {
|
||||
if *ability != Symbol::ENCODE_ENCODING {
|
||||
return Err(var);
|
||||
}
|
||||
// Any concrete type this variables is instantiated with will also gain a "does
|
||||
// implement" check so this is okay.
|
||||
}
|
||||
match *content {
|
||||
FlexVar(_) => Self::visit_flex(var)?,
|
||||
RigidVar(_) => Self::visit_rigid(var)?,
|
||||
FlexAbleVar(_, ability) => Self::visit_flex_able(var, ability)?,
|
||||
RigidAbleVar(_, ability) => Self::visit_rigid_able(var, ability)?,
|
||||
RecursionVar {
|
||||
structure,
|
||||
opt_name: _,
|
||||
} => {
|
||||
let descend = Self::visit_recursion(var)?;
|
||||
if descend.0 {
|
||||
seen_recursion_vars.push(var);
|
||||
stack.push(*structure);
|
||||
stack.push(structure);
|
||||
}
|
||||
}
|
||||
Structure(flat_type) => match flat_type {
|
||||
Apply(
|
||||
Symbol::LIST_LIST | Symbol::SET_SET | Symbol::DICT_DICT | Symbol::STR_STR,
|
||||
vars,
|
||||
) => push_var_slice!(*vars),
|
||||
Apply(..) => return Err(var),
|
||||
Func(..) => {
|
||||
return Err(var);
|
||||
Apply(symbol, vars) => {
|
||||
let descend = Self::visit_apply(var, symbol)?;
|
||||
if descend.0 {
|
||||
push_var_slice!(vars)
|
||||
}
|
||||
Record(fields, var) => {
|
||||
}
|
||||
Func(args, _clos, ret) => {
|
||||
let descend = Self::visit_func(var)?;
|
||||
if descend.0 {
|
||||
push_var_slice!(args);
|
||||
stack.push(ret);
|
||||
}
|
||||
}
|
||||
Record(fields, ext) => {
|
||||
let descend = Self::visit_record(var)?;
|
||||
if descend.0 {
|
||||
push_var_slice!(fields.variables());
|
||||
stack.push(*var);
|
||||
stack.push(ext);
|
||||
}
|
||||
TagUnion(tags, ext_var) => {
|
||||
}
|
||||
TagUnion(tags, ext) => {
|
||||
let descend = Self::visit_tag_union(var)?;
|
||||
if descend.0 {
|
||||
for i in tags.variables() {
|
||||
push_var_slice!(subs[i]);
|
||||
}
|
||||
stack.push(*ext_var);
|
||||
stack.push(ext);
|
||||
}
|
||||
FunctionOrTagUnion(_, _, var) => stack.push(*var),
|
||||
RecursiveTagUnion(rec_var, tags, ext_var) => {
|
||||
seen_recursion_vars.push(*rec_var);
|
||||
}
|
||||
FunctionOrTagUnion(_tag_name, _fn_name, ext) => {
|
||||
let descend = Self::visit_function_or_tag_union(var)?;
|
||||
if descend.0 {
|
||||
stack.push(ext);
|
||||
}
|
||||
}
|
||||
RecursiveTagUnion(rec, tags, ext) => {
|
||||
let descend = Self::visit_recursive_tag_union(var)?;
|
||||
if descend.0 {
|
||||
seen_recursion_vars.push(rec);
|
||||
for i in tags.variables() {
|
||||
push_var_slice!(subs[i]);
|
||||
}
|
||||
stack.push(*ext_var);
|
||||
stack.push(ext);
|
||||
}
|
||||
EmptyRecord | EmptyTagUnion => {
|
||||
// yes
|
||||
}
|
||||
Erroneous(_) => return Err(var),
|
||||
EmptyRecord => Self::visit_empty_record(var)?,
|
||||
EmptyTagUnion => Self::visit_empty_tag_union(var)?,
|
||||
|
||||
Erroneous(_) => return Err(NotDerivable(var)),
|
||||
},
|
||||
#[rustfmt::skip]
|
||||
Alias(
|
||||
Symbol::NUM_U8 | Symbol::NUM_UNSIGNED8
|
||||
| Symbol::NUM_U16 | Symbol::NUM_UNSIGNED16
|
||||
| Symbol::NUM_U32 | Symbol::NUM_UNSIGNED32
|
||||
| Symbol::NUM_U64 | Symbol::NUM_UNSIGNED64
|
||||
| Symbol::NUM_U128 | Symbol::NUM_UNSIGNED128
|
||||
| Symbol::NUM_I8 | Symbol::NUM_SIGNED8
|
||||
| Symbol::NUM_I16 | Symbol::NUM_SIGNED16
|
||||
| Symbol::NUM_I32 | Symbol::NUM_SIGNED32
|
||||
| Symbol::NUM_I64 | Symbol::NUM_SIGNED64
|
||||
| Symbol::NUM_I128 | Symbol::NUM_SIGNED128
|
||||
| Symbol::NUM_NAT | Symbol::NUM_NATURAL
|
||||
| Symbol::NUM_F32 | Symbol::NUM_BINARY32
|
||||
| Symbol::NUM_F64 | Symbol::NUM_BINARY64
|
||||
| Symbol::NUM_DEC | Symbol::NUM_DECIMAL,
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
) => {
|
||||
// yes
|
||||
}
|
||||
Alias(
|
||||
Symbol::NUM_NUM | Symbol::NUM_INTEGER | Symbol::NUM_FLOATINGPOINT,
|
||||
_,
|
||||
_alias_variables,
|
||||
real_var,
|
||||
_,
|
||||
) => stack.push(*real_var),
|
||||
Alias(name, _, _, AliasKind::Opaque) => {
|
||||
let opaque = *name;
|
||||
if self
|
||||
.check_opaque_and_read(abilities_store, opaque, Symbol::ENCODE_ENCODING)
|
||||
AliasKind::Opaque,
|
||||
) => {
|
||||
// Numbers: always decay until a ground is hit.
|
||||
stack.push(real_var);
|
||||
}
|
||||
Alias(opaque, _alias_variables, _real_var, AliasKind::Opaque) => {
|
||||
if obligation_cache
|
||||
.check_opaque_and_read(abilities_store, opaque, Self::ABILITY)
|
||||
.is_err()
|
||||
&& !Self::is_derivable_builtin_opaque(opaque)
|
||||
{
|
||||
return Err(var);
|
||||
return Err(NotDerivable(var));
|
||||
}
|
||||
}
|
||||
Alias(_, arguments, real_type_var, _) => {
|
||||
push_var_slice!(arguments.all_variables());
|
||||
stack.push(*real_type_var);
|
||||
Alias(symbol, _alias_variables, real_var, AliasKind::Structural) => {
|
||||
let descend = Self::visit_alias(var, symbol)?;
|
||||
if descend.0 {
|
||||
stack.push(real_var);
|
||||
}
|
||||
RangedNumber(..) => {
|
||||
// yes, all numbers can
|
||||
}
|
||||
LambdaSet(..) => return Err(var),
|
||||
RangedNumber(range) => Self::visit_ranged_number(var, range)?,
|
||||
|
||||
LambdaSet(..) => return Err(NotDerivable(var)),
|
||||
Error => {
|
||||
return Err(var);
|
||||
return Err(NotDerivable(var));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -522,6 +654,148 @@ impl ObligationCache {
|
|||
}
|
||||
}
|
||||
|
||||
struct DeriveEncoding;
|
||||
impl DerivableVisitor for DeriveEncoding {
|
||||
const ABILITY: Symbol = Symbol::ENCODE_ENCODING;
|
||||
|
||||
#[inline(always)]
|
||||
fn is_derivable_builtin_opaque(symbol: Symbol) -> bool {
|
||||
is_builtin_number_alias(symbol)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_recursion(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_apply(var: Variable, symbol: Symbol) -> Result<Descend, DerivableError> {
|
||||
if matches!(
|
||||
symbol,
|
||||
Symbol::LIST_LIST | Symbol::SET_SET | Symbol::DICT_DICT | Symbol::STR_STR,
|
||||
) {
|
||||
Ok(Descend(true))
|
||||
} else {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_record(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_tag_union(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_recursive_tag_union(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_function_or_tag_union(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_empty_record(_var: Variable) -> Result<(), DerivableError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_empty_tag_union(_var: Variable) -> Result<(), DerivableError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_alias(_var: Variable, symbol: Symbol) -> Result<Descend, DerivableError> {
|
||||
if is_builtin_number_alias(symbol) {
|
||||
Ok(Descend(false))
|
||||
} else {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_ranged_number(_var: Variable, _range: NumericRange) -> Result<(), DerivableError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct DeriveDecoding;
|
||||
impl DerivableVisitor for DeriveDecoding {
|
||||
const ABILITY: Symbol = Symbol::DECODE_DECODING;
|
||||
|
||||
#[inline(always)]
|
||||
fn is_derivable_builtin_opaque(symbol: Symbol) -> bool {
|
||||
is_builtin_number_alias(symbol)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_recursion(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_apply(var: Variable, symbol: Symbol) -> Result<Descend, DerivableError> {
|
||||
if matches!(
|
||||
symbol,
|
||||
Symbol::LIST_LIST | Symbol::SET_SET | Symbol::DICT_DICT | Symbol::STR_STR,
|
||||
) {
|
||||
Ok(Descend(true))
|
||||
} else {
|
||||
Err(DerivableError::NotDerivable(var))
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_record(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_tag_union(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_recursive_tag_union(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_function_or_tag_union(_var: Variable) -> Result<Descend, DerivableError> {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_empty_record(_var: Variable) -> Result<(), DerivableError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_empty_tag_union(_var: Variable) -> Result<(), DerivableError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_alias(_var: Variable, symbol: Symbol) -> Result<Descend, DerivableError> {
|
||||
if is_builtin_number_alias(symbol) {
|
||||
Ok(Descend(false))
|
||||
} else {
|
||||
Ok(Descend(true))
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn visit_ranged_number(_var: Variable, _range: NumericRange) -> Result<(), DerivableError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines what type implements an ability member of a specialized signature, given the
|
||||
/// [MustImplementAbility] constraints of the signature.
|
||||
pub fn type_implementing_specialization(
|
||||
|
@ -547,31 +821,69 @@ pub fn type_implementing_specialization(
|
|||
}
|
||||
|
||||
/// Result of trying to resolve an ability specialization.
|
||||
#[derive(Clone, Copy)]
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum Resolved {
|
||||
/// A user-defined specialization should be used.
|
||||
Specialization(Symbol),
|
||||
/// A specialization must be generated.
|
||||
NeedsGenerated,
|
||||
/// A specialization must be generated for the given type variable.
|
||||
NeedsGenerated(Variable),
|
||||
}
|
||||
|
||||
pub fn resolve_ability_specialization(
|
||||
/// An [`AbilityResolver`] is a shell of an abilities store that answers questions needed for
|
||||
/// [resolving ability specializations][`resolve_ability_specialization`].
|
||||
///
|
||||
/// The trait is provided so you can implement your own resolver at other points in the compilation
|
||||
/// process, for example during monomorphization we have module-re-entrant ability stores that are
|
||||
/// not available during solving.
|
||||
pub trait AbilityResolver {
|
||||
/// Gets the parent ability and type of an ability member.
|
||||
///
|
||||
/// If needed, the type of the ability member will be imported into a local `subs` buffer; as
|
||||
/// such, subs must be provided.
|
||||
fn member_parent_and_signature_var(
|
||||
&self,
|
||||
ability_member: Symbol,
|
||||
home_subs: &mut Subs,
|
||||
) -> Option<(Symbol, Variable)>;
|
||||
|
||||
/// Finds the declared implementation of an [`ImplKey`][roc_can::abilities::ImplKey].
|
||||
fn get_implementation(&self, impl_key: roc_can::abilities::ImplKey) -> Option<MemberImpl>;
|
||||
}
|
||||
|
||||
/// Trivial implementation of a resolver for a module-local abilities store, that defers all
|
||||
/// queries to the module store.
|
||||
impl AbilityResolver for AbilitiesStore {
|
||||
#[inline(always)]
|
||||
fn member_parent_and_signature_var(
|
||||
&self,
|
||||
ability_member: Symbol,
|
||||
_home_subs: &mut Subs, // only have access to one abilities store, do nothing with subs
|
||||
) -> Option<(Symbol, Variable)> {
|
||||
self.member_def(ability_member)
|
||||
.map(|def| (def.parent_ability, def.signature_var()))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn get_implementation(&self, impl_key: roc_can::abilities::ImplKey) -> Option<MemberImpl> {
|
||||
self.get_implementation(impl_key).copied()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_ability_specialization<R: AbilityResolver>(
|
||||
subs: &mut Subs,
|
||||
abilities_store: &AbilitiesStore,
|
||||
resolver: &R,
|
||||
ability_member: Symbol,
|
||||
specialization_var: Variable,
|
||||
) -> Option<Resolved> {
|
||||
use roc_unify::unify::{unify, Mode};
|
||||
|
||||
let member_def = abilities_store
|
||||
.member_def(ability_member)
|
||||
let (parent_ability, signature_var) = resolver
|
||||
.member_parent_and_signature_var(ability_member, subs)
|
||||
.expect("Not an ability member symbol");
|
||||
|
||||
// Figure out the ability we're resolving in a temporary subs snapshot.
|
||||
let snapshot = subs.snapshot();
|
||||
|
||||
let signature_var = member_def.signature_var();
|
||||
|
||||
instantiate_rigids(subs, signature_var);
|
||||
let (_vars, must_implement_ability, _lambda_sets_to_specialize, _meta) = unify(
|
||||
&mut Env::new(subs),
|
||||
|
@ -585,8 +897,7 @@ pub fn resolve_ability_specialization(
|
|||
|
||||
subs.rollback_to(snapshot);
|
||||
|
||||
let obligated =
|
||||
type_implementing_specialization(&must_implement_ability, member_def.parent_ability)?;
|
||||
let obligated = type_implementing_specialization(&must_implement_ability, parent_ability)?;
|
||||
|
||||
let resolved = match obligated {
|
||||
Obligated::Opaque(symbol) => {
|
||||
|
@ -595,19 +906,21 @@ pub fn resolve_ability_specialization(
|
|||
ability_member,
|
||||
};
|
||||
|
||||
match abilities_store.get_implementation(impl_key)? {
|
||||
match resolver.get_implementation(impl_key)? {
|
||||
roc_types::types::MemberImpl::Impl(spec_symbol) => {
|
||||
Resolved::Specialization(*spec_symbol)
|
||||
Resolved::Specialization(spec_symbol)
|
||||
}
|
||||
roc_types::types::MemberImpl::Derived => {
|
||||
todo_abilities!("get type from obligated opaque")
|
||||
}
|
||||
roc_types::types::MemberImpl::Derived => Resolved::NeedsGenerated,
|
||||
// TODO this is not correct. We can replace `Resolved` with `MemberImpl` entirely,
|
||||
// which will make this simpler.
|
||||
roc_types::types::MemberImpl::Error => Resolved::Specialization(Symbol::UNDERSCORE),
|
||||
}
|
||||
}
|
||||
Obligated::Adhoc(_) => {
|
||||
Obligated::Adhoc(variable) => {
|
||||
// TODO: more rules need to be validated here, like is this a builtin ability?
|
||||
Resolved::NeedsGenerated
|
||||
Resolved::NeedsGenerated(variable)
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -5,3 +5,4 @@
|
|||
pub mod ability;
|
||||
pub mod module;
|
||||
pub mod solve;
|
||||
pub mod specialize;
|
||||
|
|
File diff suppressed because it is too large
Load diff
789
crates/compiler/solve/src/specialize.rs
Normal file
789
crates/compiler/solve/src/specialize.rs
Normal file
|
@ -0,0 +1,789 @@
|
|||
//! Module [specialize] is resolves specialization lambda sets.
|
||||
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use bumpalo::Bump;
|
||||
use roc_can::{
|
||||
abilities::{AbilitiesStore, ImplKey},
|
||||
module::ExposedByModule,
|
||||
};
|
||||
use roc_collections::{VecMap, VecSet};
|
||||
use roc_debug_flags::dbg_do;
|
||||
#[cfg(debug_assertions)]
|
||||
use roc_debug_flags::ROC_TRACE_COMPACTION;
|
||||
use roc_derive::SharedDerivedModule;
|
||||
use roc_derive_key::{DeriveError, DeriveKey};
|
||||
use roc_error_macros::{internal_error, todo_abilities};
|
||||
use roc_module::symbol::{ModuleId, Symbol};
|
||||
use roc_types::{
|
||||
subs::{
|
||||
get_member_lambda_sets_at_region, Content, Descriptor, GetSubsSlice, LambdaSet, Mark,
|
||||
OptVariable, Rank, Subs, SubsSlice, UlsOfVar, Variable,
|
||||
},
|
||||
types::{AliasKind, MemberImpl, Uls},
|
||||
};
|
||||
use roc_unify::unify::{unify, Env as UEnv, Mode, MustImplementConstraints};
|
||||
|
||||
use crate::solve::{deep_copy_var_in, introduce, Pools};
|
||||
|
||||
/// What phase in the compiler is reaching out to specialize lambda sets?
|
||||
/// This is important to distinguish subtle differences in the behavior of the solving algorithm.
|
||||
//
|
||||
// TODO the APIs of this trait suck, this needs a nice cleanup.
|
||||
pub trait Phase {
|
||||
/// The regular type-solving phase, or during some later phase of compilation.
|
||||
/// During the solving phase we must anticipate that some information is still unknown and react to
|
||||
/// that; during late phases, we expect that all information is resolved.
|
||||
const IS_LATE: bool;
|
||||
|
||||
fn with_module_abilities_store<T, F>(&self, module: ModuleId, f: F) -> T
|
||||
where
|
||||
F: FnMut(&AbilitiesStore) -> T;
|
||||
|
||||
/// Given a known lambda set's ambient function in an external module, copy that ambient
|
||||
/// function into the given subs.
|
||||
fn copy_lambda_set_ambient_function_to_home_subs(
|
||||
&self,
|
||||
external_lambda_set_var: Variable,
|
||||
external_module_id: ModuleId,
|
||||
home_subs: &mut Subs,
|
||||
) -> Variable;
|
||||
|
||||
/// Find the ambient function var at a given region for an ability member definition (not a
|
||||
/// specialization!), and copy that into the given subs.
|
||||
fn get_and_copy_ability_member_ambient_function(
|
||||
&self,
|
||||
ability_member: Symbol,
|
||||
region: u8,
|
||||
home_subs: &mut Subs,
|
||||
) -> Variable;
|
||||
}
|
||||
|
||||
pub(crate) struct SolvePhase<'a> {
|
||||
pub abilities_store: &'a AbilitiesStore,
|
||||
}
|
||||
impl Phase for SolvePhase<'_> {
|
||||
const IS_LATE: bool = false;
|
||||
|
||||
fn with_module_abilities_store<T, F>(&self, _module: ModuleId, mut f: F) -> T
|
||||
where
|
||||
F: FnMut(&AbilitiesStore) -> T,
|
||||
{
|
||||
// During solving we're only aware of our module's abilities store.
|
||||
f(self.abilities_store)
|
||||
}
|
||||
|
||||
fn copy_lambda_set_ambient_function_to_home_subs(
|
||||
&self,
|
||||
external_lambda_set_var: Variable,
|
||||
_external_module_id: ModuleId,
|
||||
home_subs: &mut Subs,
|
||||
) -> Variable {
|
||||
// During solving we're only aware of our module's abilities store, the var must
|
||||
// be in our module store. Even if the specialization lambda set comes from another
|
||||
// module, we should have taken care to import it before starting solving in this module.
|
||||
let LambdaSet {
|
||||
ambient_function, ..
|
||||
} = home_subs.get_lambda_set(external_lambda_set_var);
|
||||
ambient_function
|
||||
}
|
||||
|
||||
fn get_and_copy_ability_member_ambient_function(
|
||||
&self,
|
||||
ability_member: Symbol,
|
||||
region: u8,
|
||||
home_subs: &mut Subs,
|
||||
) -> Variable {
|
||||
// During solving we're only aware of our module's abilities store, the var must
|
||||
// be in our module store. Even if the specialization lambda set comes from another
|
||||
// module, we should have taken care to import it before starting solving in this module.
|
||||
let member_def = self
|
||||
.abilities_store
|
||||
.member_def(ability_member)
|
||||
.unwrap_or_else(|| {
|
||||
internal_error!(
|
||||
"{:?} is not resolved, or not an ability member!",
|
||||
ability_member
|
||||
)
|
||||
});
|
||||
let member_var = member_def.signature_var();
|
||||
|
||||
let region_lset = get_member_lambda_sets_at_region(home_subs, member_var, region);
|
||||
|
||||
let LambdaSet {
|
||||
ambient_function, ..
|
||||
} = home_subs.get_lambda_set(region_lset);
|
||||
|
||||
ambient_function
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DerivedEnv<'a> {
|
||||
pub derived_module: &'a SharedDerivedModule,
|
||||
/// Exposed types needed by the derived module.
|
||||
pub exposed_types: &'a ExposedByModule,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct AwaitingSpecializations {
|
||||
// What variables' specialized lambda sets in `uls_of_var` will be unlocked for specialization
|
||||
// when an implementation key's specialization is resolved?
|
||||
waiting: VecMap<ImplKey, VecSet<Variable>>,
|
||||
uls_of_var: UlsOfVar,
|
||||
}
|
||||
|
||||
impl AwaitingSpecializations {
|
||||
pub fn remove_for_specialized(&mut self, subs: &Subs, impl_key: ImplKey) -> UlsOfVar {
|
||||
let spec_variables = self
|
||||
.waiting
|
||||
.remove(&impl_key)
|
||||
.map(|(_, set)| set)
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut result = UlsOfVar::default();
|
||||
for var in spec_variables {
|
||||
let target_lambda_sets = self
|
||||
.uls_of_var
|
||||
.remove_dependent_unspecialized_lambda_sets(subs, var);
|
||||
|
||||
result.extend(var, target_lambda_sets);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
pub fn add(
|
||||
&mut self,
|
||||
impl_key: ImplKey,
|
||||
var: Variable,
|
||||
lambda_sets: impl IntoIterator<Item = Variable>,
|
||||
) {
|
||||
self.uls_of_var.extend(var, lambda_sets);
|
||||
let waiting = self.waiting.get_or_insert(impl_key, Default::default);
|
||||
waiting.insert(var);
|
||||
}
|
||||
|
||||
pub fn union(&mut self, other: Self) {
|
||||
for (impl_key, waiting_vars) in other.waiting {
|
||||
let waiting = self.waiting.get_or_insert(impl_key, Default::default);
|
||||
waiting.extend(waiting_vars);
|
||||
}
|
||||
self.uls_of_var.union(other.uls_of_var);
|
||||
}
|
||||
|
||||
pub fn waiting_for(&self, impl_key: ImplKey) -> bool {
|
||||
self.waiting.contains_key(&impl_key)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CompactionResult {
|
||||
pub obligations: MustImplementConstraints,
|
||||
pub awaiting_specialization: AwaitingSpecializations,
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
fn trace_compaction_step_1(subs: &Subs, c_a: Variable, uls_a: &[Variable]) {
|
||||
let c_a = roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(c_a), subs);
|
||||
let uls_a = uls_a
|
||||
.iter()
|
||||
.map(|v| {
|
||||
format!(
|
||||
"{:?}",
|
||||
roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(*v), subs)
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(",");
|
||||
eprintln!("===lambda set compaction===");
|
||||
eprintln!(" concrete type: {:?}", c_a);
|
||||
eprintln!(" step 1:");
|
||||
eprintln!(" uls_a = {{ {} }}", uls_a);
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
fn trace_compaction_step_2(subs: &Subs, uls_a: &[Variable]) {
|
||||
let uls_a = uls_a
|
||||
.iter()
|
||||
.map(|v| {
|
||||
format!(
|
||||
"{:?}",
|
||||
roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(*v), subs)
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(",");
|
||||
eprintln!(" step 2:");
|
||||
eprintln!(" uls_a' = {{ {} }}", uls_a);
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
fn trace_compaction_step_3start() {
|
||||
eprintln!(" step 3:");
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
fn trace_compaction_step_3iter_start(
|
||||
subs: &Subs,
|
||||
iteration_lambda_set: Variable,
|
||||
t_f1: Variable,
|
||||
t_f2: Variable,
|
||||
) {
|
||||
let iteration_lambda_set = roc_types::subs::SubsFmtContent(
|
||||
subs.get_content_without_compacting(iteration_lambda_set),
|
||||
subs,
|
||||
);
|
||||
let t_f1 = roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(t_f1), subs);
|
||||
let t_f2 = roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(t_f2), subs);
|
||||
eprintln!(" - iteration: {:?}", iteration_lambda_set);
|
||||
eprintln!(" {:?}", t_f1);
|
||||
eprintln!(" ~ {:?}", t_f2);
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[rustfmt::skip]
|
||||
fn trace_compaction_step_3iter_end(subs: &Subs, t_f_result: Variable, skipped: bool) {
|
||||
let t_f_result =
|
||||
roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(t_f_result), subs);
|
||||
if skipped {
|
||||
eprintln!(" SKIP");
|
||||
}
|
||||
eprintln!(" = {:?}\n", t_f_result);
|
||||
}
|
||||
|
||||
macro_rules! trace_compact {
|
||||
(1. $subs:expr, $c_a:expr, $uls_a:expr) => {{
|
||||
dbg_do!(ROC_TRACE_COMPACTION, {
|
||||
trace_compaction_step_1($subs, $c_a, $uls_a)
|
||||
})
|
||||
}};
|
||||
(2. $subs:expr, $uls_a:expr) => {{
|
||||
dbg_do!(ROC_TRACE_COMPACTION, {
|
||||
trace_compaction_step_2($subs, $uls_a)
|
||||
})
|
||||
}};
|
||||
(3start.) => {{
|
||||
dbg_do!(ROC_TRACE_COMPACTION, { trace_compaction_step_3start() })
|
||||
}};
|
||||
(3iter_start. $subs:expr, $iteration_lset:expr, $t_f1:expr, $t_f2:expr) => {{
|
||||
dbg_do!(ROC_TRACE_COMPACTION, {
|
||||
trace_compaction_step_3iter_start($subs, $iteration_lset, $t_f1, $t_f2)
|
||||
})
|
||||
}};
|
||||
(3iter_end. $subs:expr, $t_f_result:expr) => {{
|
||||
dbg_do!(ROC_TRACE_COMPACTION, {
|
||||
trace_compaction_step_3iter_end($subs, $t_f_result, false)
|
||||
})
|
||||
}};
|
||||
(3iter_end_skipped. $subs:expr, $t_f_result:expr) => {{
|
||||
dbg_do!(ROC_TRACE_COMPACTION, {
|
||||
trace_compaction_step_3iter_end($subs, $t_f_result, true)
|
||||
})
|
||||
}};
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn iter_concrete_of_unspecialized<'a>(
|
||||
subs: &'a Subs,
|
||||
c_a: Variable,
|
||||
uls: &'a [Uls],
|
||||
) -> impl Iterator<Item = &'a Uls> {
|
||||
uls.iter()
|
||||
.filter(move |Uls(var, _, _)| subs.equivalent_without_compacting(*var, c_a))
|
||||
}
|
||||
|
||||
/// Gets the unique unspecialized lambda resolving to concrete type `c_a` in a list of
|
||||
/// unspecialized lambda sets.
|
||||
#[inline(always)]
|
||||
fn unique_unspecialized_lambda(subs: &Subs, c_a: Variable, uls: &[Uls]) -> Option<Uls> {
|
||||
let mut iter_concrete = iter_concrete_of_unspecialized(subs, c_a, uls);
|
||||
let uls = iter_concrete.next()?;
|
||||
debug_assert!(iter_concrete.next().is_none(), "multiple concrete");
|
||||
Some(*uls)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn compact_lambda_sets_of_vars<P: Phase>(
|
||||
subs: &mut Subs,
|
||||
derived_env: &DerivedEnv,
|
||||
arena: &Bump,
|
||||
pools: &mut Pools,
|
||||
uls_of_var: UlsOfVar,
|
||||
phase: &P,
|
||||
) -> CompactionResult {
|
||||
let mut must_implement = MustImplementConstraints::default();
|
||||
let mut awaiting_specialization = AwaitingSpecializations::default();
|
||||
|
||||
let mut uls_of_var_queue = VecDeque::with_capacity(uls_of_var.len());
|
||||
uls_of_var_queue.extend(uls_of_var.drain());
|
||||
|
||||
// Suppose a type variable `a` with `uls_of_var` mapping `uls_a = {l1, ... ln}` has been instantiated to a concrete type `C_a`.
|
||||
while let Some((c_a, uls_a)) = uls_of_var_queue.pop_front() {
|
||||
let c_a = subs.get_root_key_without_compacting(c_a);
|
||||
// 1. Let each `l` in `uls_a` be of form `[solved_lambdas + ... + C:f:r + ...]`.
|
||||
// NB: There may be multiple unspecialized lambdas of form `C:f:r, C:f1:r1, ..., C:fn:rn` in `l`.
|
||||
// In this case, let `t1, ... tm` be the other unspecialized lambdas not of form `C:_:_`,
|
||||
// that is, none of which are now specialized to the type `C`. Then, deconstruct
|
||||
// `l` such that `l' = [solved_lambdas + t1 + ... + tm + C:f:r]` and `l1 = [[] + C:f1:r1], ..., ln = [[] + C:fn:rn]`.
|
||||
// Replace `l` with `l', l1, ..., ln` in `uls_a`, flattened.
|
||||
// TODO: the flattening step described above
|
||||
let uls_a = {
|
||||
let mut uls = uls_a.into_vec();
|
||||
|
||||
// De-duplicate lambdas by root key.
|
||||
uls.iter_mut().for_each(|v| *v = subs.get_root_key(*v));
|
||||
uls.sort();
|
||||
uls.dedup();
|
||||
uls
|
||||
};
|
||||
|
||||
trace_compact!(1. subs, c_a, &uls_a);
|
||||
|
||||
// The flattening step - remove lambda sets that don't reference the concrete var, and for
|
||||
// flatten lambda sets that reference it more than once.
|
||||
let mut uls_a: Vec<_> = uls_a
|
||||
.into_iter()
|
||||
.flat_map(|lambda_set| {
|
||||
let LambdaSet {
|
||||
solved,
|
||||
recursion_var,
|
||||
unspecialized,
|
||||
ambient_function,
|
||||
} = subs.get_lambda_set(lambda_set);
|
||||
let lambda_set_rank = subs.get_rank(lambda_set);
|
||||
let unspecialized = subs.get_subs_slice(unspecialized);
|
||||
// TODO: is it faster to traverse once, see if we only have one concrete lambda, and
|
||||
// bail in that happy-path, rather than always splitting?
|
||||
let (concrete, mut not_concrete): (Vec<_>, Vec<_>) = unspecialized
|
||||
.iter()
|
||||
.copied()
|
||||
.partition(|Uls(var, _, _)| subs.equivalent_without_compacting(*var, c_a));
|
||||
if concrete.len() == 1 {
|
||||
// No flattening needs to be done, just return the lambda set as-is
|
||||
return vec![lambda_set];
|
||||
}
|
||||
// Must flatten
|
||||
concrete
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, concrete_lambda)| {
|
||||
let (var, unspecialized) = if i == 0 {
|
||||
// The first lambda set contains one concrete lambda, plus all solved
|
||||
// lambdas, plus all other unspecialized lambdas.
|
||||
// l' = [solved_lambdas + t1 + ... + tm + C:f:r]
|
||||
let unspecialized = SubsSlice::extend_new(
|
||||
&mut subs.unspecialized_lambda_sets,
|
||||
not_concrete
|
||||
.drain(..)
|
||||
.chain(std::iter::once(concrete_lambda)),
|
||||
);
|
||||
(lambda_set, unspecialized)
|
||||
} else {
|
||||
// All the other lambda sets consists only of their respective concrete
|
||||
// lambdas.
|
||||
// ln = [[] + C:fn:rn]
|
||||
let unspecialized = SubsSlice::extend_new(
|
||||
&mut subs.unspecialized_lambda_sets,
|
||||
[concrete_lambda],
|
||||
);
|
||||
let var = subs.fresh(Descriptor {
|
||||
content: Content::Error,
|
||||
rank: lambda_set_rank,
|
||||
mark: Mark::NONE,
|
||||
copy: OptVariable::NONE,
|
||||
});
|
||||
(var, unspecialized)
|
||||
};
|
||||
|
||||
subs.set_content(
|
||||
var,
|
||||
Content::LambdaSet(LambdaSet {
|
||||
solved,
|
||||
recursion_var,
|
||||
unspecialized,
|
||||
ambient_function,
|
||||
}),
|
||||
);
|
||||
var
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.collect();
|
||||
|
||||
// 2. Now, each `l` in `uls_a` has a unique unspecialized lambda of form `C:f:r`.
|
||||
// Sort `uls_a` primarily by `f` (arbitrary order), and secondarily by `r` in descending order.
|
||||
uls_a.sort_by(|v1, v2| {
|
||||
let unspec_1 = subs.get_subs_slice(subs.get_lambda_set(*v1).unspecialized);
|
||||
let unspec_2 = subs.get_subs_slice(subs.get_lambda_set(*v2).unspecialized);
|
||||
|
||||
let Uls(_, f1, r1) = unique_unspecialized_lambda(subs, c_a, unspec_1).unwrap();
|
||||
let Uls(_, f2, r2) = unique_unspecialized_lambda(subs, c_a, unspec_2).unwrap();
|
||||
|
||||
match f1.cmp(&f2) {
|
||||
std::cmp::Ordering::Equal => {
|
||||
// Order by descending order of region.
|
||||
r2.cmp(&r1)
|
||||
}
|
||||
ord => ord,
|
||||
}
|
||||
});
|
||||
|
||||
trace_compact!(2. subs, &uls_a);
|
||||
|
||||
// 3. For each `l` in `uls_a` with unique unspecialized lambda `C:f:r`:
|
||||
// 1. Let `t_f1` be the directly ambient function of the lambda set containing `C:f:r`. Remove `C:f:r` from `t_f1`'s lambda set.
|
||||
// - For example, `(b' -[[] + Fo:f:2]-> {})` if `C:f:r=Fo:f:2`. Removing `Fo:f:2`, we get `(b' -[[]]-> {})`.
|
||||
// 2. Let `t_f2` be the directly ambient function of the specialization lambda set resolved by `C:f:r`.
|
||||
// - For example, `(b -[[] + b:g:1]-> {})` if `C:f:r=Fo:f:2`, running on example from above.
|
||||
// 3. Unify `t_f1 ~ t_f2`.
|
||||
trace_compact!(3start.);
|
||||
for l in uls_a {
|
||||
let compaction_result =
|
||||
compact_lambda_set(subs, derived_env, arena, pools, c_a, l, phase);
|
||||
|
||||
match compaction_result {
|
||||
OneCompactionResult::Compacted {
|
||||
new_obligations,
|
||||
new_lambda_sets_to_specialize,
|
||||
} => {
|
||||
must_implement.extend(new_obligations);
|
||||
uls_of_var_queue.extend(new_lambda_sets_to_specialize.drain());
|
||||
}
|
||||
OneCompactionResult::MustWaitForSpecialization(impl_key) => {
|
||||
awaiting_specialization.add(impl_key, c_a, [l])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CompactionResult {
|
||||
obligations: must_implement,
|
||||
awaiting_specialization,
|
||||
}
|
||||
}
|
||||
|
||||
enum OneCompactionResult {
|
||||
Compacted {
|
||||
new_obligations: MustImplementConstraints,
|
||||
new_lambda_sets_to_specialize: UlsOfVar,
|
||||
},
|
||||
MustWaitForSpecialization(ImplKey),
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn compact_lambda_set<P: Phase>(
|
||||
subs: &mut Subs,
|
||||
derived_env: &DerivedEnv,
|
||||
arena: &Bump,
|
||||
pools: &mut Pools,
|
||||
resolved_concrete: Variable,
|
||||
this_lambda_set: Variable,
|
||||
phase: &P,
|
||||
) -> OneCompactionResult {
|
||||
// 3. For each `l` in `uls_a` with unique unspecialized lambda `C:f:r`:
|
||||
// 1. Let `t_f1` be the directly ambient function of the lambda set containing `C:f:r`. Remove `C:f:r` from `t_f1`'s lambda set.
|
||||
// - For example, `(b' -[[] + Fo:f:2]-> {})` if `C:f:r=Fo:f:2`. Removing `Fo:f:2`, we get `(b' -[[]]-> {})`.
|
||||
// 2. Let `t_f2` be the directly ambient function of the specialization lambda set resolved by `C:f:r`.
|
||||
// - For example, `(b -[[] + b:g:1]-> {})` if `C:f:r=Fo:f:2`, from the algorithm's running example.
|
||||
// 3. Unify `t_f1 ~ t_f2`.
|
||||
let LambdaSet {
|
||||
solved,
|
||||
recursion_var,
|
||||
unspecialized,
|
||||
ambient_function: t_f1,
|
||||
} = subs.get_lambda_set(this_lambda_set);
|
||||
let target_rank = subs.get_rank(this_lambda_set);
|
||||
|
||||
debug_assert!(!unspecialized.is_empty());
|
||||
|
||||
let unspecialized = subs.get_subs_slice(unspecialized);
|
||||
|
||||
// 1. Let `t_f1` be the directly ambient function of the lambda set containing `C:f:r`.
|
||||
let Uls(c, f, r) = unique_unspecialized_lambda(subs, resolved_concrete, unspecialized).unwrap();
|
||||
|
||||
debug_assert!(subs.equivalent_without_compacting(c, resolved_concrete));
|
||||
|
||||
// Now decide: do we
|
||||
// - proceed with specialization
|
||||
// - simply drop the specialization lambda set (due to an error)
|
||||
// - or do we need to wait, because we don't know enough information for the specialization yet?
|
||||
let specialization_decision = make_specialization_decision(subs, phase, c, f);
|
||||
let specialization_key_or_drop = match specialization_decision {
|
||||
SpecializeDecision::Specialize(key) => Ok(key),
|
||||
SpecializeDecision::Drop => Err(()),
|
||||
SpecializeDecision::PendingSpecialization(impl_key) => {
|
||||
// Bail, we need to wait for the specialization to be known.
|
||||
return OneCompactionResult::MustWaitForSpecialization(impl_key);
|
||||
}
|
||||
};
|
||||
|
||||
// 1b. Remove `C:f:r` from `t_f1`'s lambda set.
|
||||
let new_unspecialized: Vec<_> = unspecialized
|
||||
.iter()
|
||||
.filter(|Uls(v, _, _)| !subs.equivalent_without_compacting(*v, resolved_concrete))
|
||||
.copied()
|
||||
.collect();
|
||||
debug_assert_eq!(new_unspecialized.len(), unspecialized.len() - 1);
|
||||
let t_f1_lambda_set_without_concrete = LambdaSet {
|
||||
solved,
|
||||
recursion_var,
|
||||
unspecialized: SubsSlice::extend_new(
|
||||
&mut subs.unspecialized_lambda_sets,
|
||||
new_unspecialized,
|
||||
),
|
||||
ambient_function: t_f1,
|
||||
};
|
||||
subs.set_content(
|
||||
this_lambda_set,
|
||||
Content::LambdaSet(t_f1_lambda_set_without_concrete),
|
||||
);
|
||||
|
||||
let specialization_key = match specialization_key_or_drop {
|
||||
Ok(specialization_key) => specialization_key,
|
||||
Err(()) => {
|
||||
// Do nothing other than to remove the concrete lambda to drop from the lambda set,
|
||||
// which we already did in 1b above.
|
||||
trace_compact!(3iter_end_skipped. subs, t_f1);
|
||||
return OneCompactionResult::Compacted {
|
||||
new_obligations: Default::default(),
|
||||
new_lambda_sets_to_specialize: Default::default(),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let specialization_ambient_function_var = get_specialization_lambda_set_ambient_function(
|
||||
subs,
|
||||
derived_env,
|
||||
phase,
|
||||
f,
|
||||
r,
|
||||
specialization_key,
|
||||
target_rank,
|
||||
);
|
||||
|
||||
let t_f2 = match specialization_ambient_function_var {
|
||||
Ok(lset) => lset,
|
||||
Err(()) => {
|
||||
// Do nothing other than to remove the concrete lambda to drop from the lambda set,
|
||||
// which we already did in 1b above.
|
||||
trace_compact!(3iter_end_skipped. subs, t_f1);
|
||||
return OneCompactionResult::Compacted {
|
||||
new_obligations: Default::default(),
|
||||
new_lambda_sets_to_specialize: Default::default(),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
// Ensure the specialized ambient function we'll unify with is not a generalized one, but one
|
||||
// at the rank of the lambda set being compacted.
|
||||
let t_f2 = deep_copy_var_in(subs, target_rank, pools, t_f2, arena);
|
||||
|
||||
// 3. Unify `t_f1 ~ t_f2`.
|
||||
trace_compact!(3iter_start. subs, this_lambda_set, t_f1, t_f2);
|
||||
let (vars, new_obligations, new_lambda_sets_to_specialize, _meta) = unify(
|
||||
&mut UEnv::new(subs),
|
||||
t_f1,
|
||||
t_f2,
|
||||
Mode::LAMBDA_SET_SPECIALIZATION,
|
||||
)
|
||||
.expect_success("ambient functions don't unify");
|
||||
trace_compact!(3iter_end. subs, t_f1);
|
||||
|
||||
introduce(subs, target_rank, pools, &vars);
|
||||
|
||||
OneCompactionResult::Compacted {
|
||||
new_obligations,
|
||||
new_lambda_sets_to_specialize,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum SpecializationTypeKey {
|
||||
Opaque(Symbol),
|
||||
Derived(DeriveKey),
|
||||
Immediate(Symbol),
|
||||
}
|
||||
|
||||
enum SpecializeDecision {
|
||||
Specialize(SpecializationTypeKey),
|
||||
Drop,
|
||||
|
||||
/// Only relevant during module solving of recursive defs - we don't yet know the
|
||||
/// specialization type for a declared ability implementation, so we must hold off on
|
||||
/// specialization.
|
||||
PendingSpecialization(ImplKey),
|
||||
}
|
||||
|
||||
fn make_specialization_decision<P: Phase>(
|
||||
subs: &Subs,
|
||||
phase: &P,
|
||||
var: Variable,
|
||||
ability_member: Symbol,
|
||||
) -> SpecializeDecision {
|
||||
use Content::*;
|
||||
use SpecializationTypeKey::*;
|
||||
match subs.get_content_without_compacting(var) {
|
||||
Alias(opaque, _, _, AliasKind::Opaque) if opaque.module_id() != ModuleId::NUM => {
|
||||
if P::IS_LATE {
|
||||
SpecializeDecision::Specialize(Opaque(*opaque))
|
||||
} else {
|
||||
// Solving within a module.
|
||||
phase.with_module_abilities_store(opaque.module_id(), |abilities_store| {
|
||||
let impl_key = ImplKey {
|
||||
opaque: *opaque,
|
||||
ability_member,
|
||||
};
|
||||
match abilities_store.get_implementation(impl_key) {
|
||||
None => {
|
||||
// Doesn't specialize; an error will already be reported for this.
|
||||
SpecializeDecision::Drop
|
||||
}
|
||||
Some(MemberImpl::Error | MemberImpl::Derived) => {
|
||||
// TODO: probably not right, we may want to choose a derive decision!
|
||||
SpecializeDecision::Specialize(Opaque(*opaque))
|
||||
}
|
||||
Some(MemberImpl::Impl(specialization_symbol)) => {
|
||||
match abilities_store.specialization_info(*specialization_symbol) {
|
||||
Some(_) => SpecializeDecision::Specialize(Opaque(*opaque)),
|
||||
|
||||
// If we expect a specialization impl but don't yet know it, we must hold off
|
||||
// compacting the lambda set until the specialization is well-known.
|
||||
None => SpecializeDecision::PendingSpecialization(impl_key),
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
Structure(_) | Alias(_, _, _, _) => {
|
||||
let builtin = match ability_member.try_into() {
|
||||
Ok(builtin) => builtin,
|
||||
Err(_) => return SpecializeDecision::Drop,
|
||||
};
|
||||
|
||||
// This is a structural type, find the derived ability function it should use.
|
||||
match roc_derive_key::Derived::builtin(builtin, subs, var) {
|
||||
Ok(derived) => match derived {
|
||||
roc_derive_key::Derived::Immediate(imm) => {
|
||||
SpecializeDecision::Specialize(Immediate(imm))
|
||||
}
|
||||
roc_derive_key::Derived::Key(derive_key) => {
|
||||
SpecializeDecision::Specialize(Derived(derive_key))
|
||||
}
|
||||
},
|
||||
Err(DeriveError::UnboundVar) => {
|
||||
// not specialized yet, but that also means that it can't possibly be derivable
|
||||
// at this point?
|
||||
// TODO: is this right? Revisit if it causes us problems in the future.
|
||||
SpecializeDecision::Drop
|
||||
}
|
||||
Err(DeriveError::Underivable) => {
|
||||
// we should have reported an error for this; drop the lambda set.
|
||||
SpecializeDecision::Drop
|
||||
}
|
||||
}
|
||||
}
|
||||
Error => SpecializeDecision::Drop,
|
||||
FlexAbleVar(_, _)
|
||||
| RigidAbleVar(..)
|
||||
| FlexVar(..)
|
||||
| RigidVar(..)
|
||||
| RecursionVar { .. }
|
||||
| LambdaSet(..)
|
||||
| RangedNumber(..) => {
|
||||
internal_error!("unexpected")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn get_specialization_lambda_set_ambient_function<P: Phase>(
|
||||
subs: &mut Subs,
|
||||
derived_env: &DerivedEnv,
|
||||
phase: &P,
|
||||
ability_member: Symbol,
|
||||
lset_region: u8,
|
||||
specialization_key: SpecializationTypeKey,
|
||||
target_rank: Rank,
|
||||
) -> Result<Variable, ()> {
|
||||
match specialization_key {
|
||||
SpecializationTypeKey::Opaque(opaque) => {
|
||||
let opaque_home = opaque.module_id();
|
||||
let external_specialized_lset =
|
||||
phase.with_module_abilities_store(opaque_home, |abilities_store| {
|
||||
let impl_key = roc_can::abilities::ImplKey {
|
||||
opaque,
|
||||
ability_member,
|
||||
};
|
||||
let opt_specialization =
|
||||
abilities_store.get_implementation(impl_key);
|
||||
match opt_specialization {
|
||||
None => {
|
||||
if P::IS_LATE {
|
||||
internal_error!(
|
||||
"expected to know a specialization for {:?}#{:?}, but it wasn't found",
|
||||
opaque,
|
||||
ability_member
|
||||
);
|
||||
} else {
|
||||
// doesn't specialize, we'll have reported an error for this
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
Some(member_impl) => match member_impl {
|
||||
MemberImpl::Impl(spec_symbol) => {
|
||||
let specialization =
|
||||
abilities_store.specialization_info(*spec_symbol).expect("expected custom implementations to always have complete specialization info by this point");
|
||||
|
||||
let specialized_lambda_set = *specialization
|
||||
.specialization_lambda_sets
|
||||
.get(&lset_region)
|
||||
.expect("lambda set region not resolved");
|
||||
Ok(specialized_lambda_set)
|
||||
}
|
||||
MemberImpl::Derived => todo_abilities!(),
|
||||
MemberImpl::Error => todo_abilities!(),
|
||||
},
|
||||
}
|
||||
})?;
|
||||
|
||||
let specialized_ambient = phase.copy_lambda_set_ambient_function_to_home_subs(
|
||||
external_specialized_lset,
|
||||
opaque_home,
|
||||
subs,
|
||||
);
|
||||
|
||||
Ok(specialized_ambient)
|
||||
}
|
||||
|
||||
SpecializationTypeKey::Derived(derive_key) => {
|
||||
let mut derived_module = derived_env.derived_module.lock().unwrap();
|
||||
|
||||
let (_, _, specialization_lambda_sets) =
|
||||
derived_module.get_or_insert(derived_env.exposed_types, derive_key);
|
||||
|
||||
let specialized_lambda_set = *specialization_lambda_sets
|
||||
.get(&lset_region)
|
||||
.expect("lambda set region not resolved");
|
||||
|
||||
let specialized_ambient = derived_module.copy_lambda_set_ambient_function_to_subs(
|
||||
specialized_lambda_set,
|
||||
subs,
|
||||
target_rank,
|
||||
);
|
||||
|
||||
Ok(specialized_ambient)
|
||||
}
|
||||
|
||||
SpecializationTypeKey::Immediate(imm) => {
|
||||
// Immediates are like opaques in that we can simply look up their type definition in
|
||||
// the ability store, there is nothing new to synthesize.
|
||||
//
|
||||
// THEORY: if something can become an immediate, it will always be available in the
|
||||
// local ability store, because the transformation is local (?)
|
||||
let immediate_lambda_set_at_region =
|
||||
phase.get_and_copy_ability_member_ambient_function(imm, lset_region, subs);
|
||||
|
||||
Ok(immediate_lambda_set_at_region)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3451,7 +3451,7 @@ mod solve_expr {
|
|||
{ id1, id2 }
|
||||
"#
|
||||
),
|
||||
"{ id1 : q -> q, id2 : a -> a }",
|
||||
"{ id1 : q -> q, id2 : q1 -> q1 }",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -3966,7 +3966,7 @@ mod solve_expr {
|
|||
{ a, b }
|
||||
"#
|
||||
),
|
||||
"{ a : { x : I64, y : I64, z : Num c }, b : { blah : Str, x : I64, y : I64, z : Num a } }",
|
||||
"{ a : { x : I64, y : I64, z : Num c }, b : { blah : Str, x : I64, y : I64, z : Num c1 } }",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -3997,7 +3997,7 @@ mod solve_expr {
|
|||
{ a, b }
|
||||
"#
|
||||
),
|
||||
"{ a : { x : Num *, y : Float *, z : c }, b : { blah : Str, x : Num *, y : Float *, z : a } }",
|
||||
"{ a : { x : Num *, y : Float *, z : c }, b : { blah : Str, x : Num *, y : Float *, z : c1 } }",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -6157,7 +6157,7 @@ mod solve_expr {
|
|||
hashEq = \x, y -> hash x == hash y
|
||||
"#
|
||||
),
|
||||
"a, b -> Bool | a has Hash, b has Hash",
|
||||
"a, a1 -> Bool | a has Hash, a1 has Hash",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -6510,7 +6510,6 @@ mod solve_expr {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "TODO: fix unification of derived types"]
|
||||
fn encode_record() {
|
||||
infer_queries!(
|
||||
indoc!(
|
||||
|
@ -6523,14 +6522,11 @@ mod solve_expr {
|
|||
# ^^^^^^^^^
|
||||
"#
|
||||
),
|
||||
@r#"
|
||||
"Encoding#toEncoder(2) : { a : Str } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting",
|
||||
"#
|
||||
@"Encoding#toEncoder(2) : { a : Str } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting"
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "TODO: fix unification of derived types"]
|
||||
fn encode_record_with_nested_custom_impl() {
|
||||
infer_queries!(
|
||||
indoc!(
|
||||
|
@ -6539,16 +6535,14 @@ mod solve_expr {
|
|||
imports [Encode.{ toEncoder, Encoding, custom }]
|
||||
provides [main] to "./platform"
|
||||
|
||||
A := {}
|
||||
A := {} has [Encoding {toEncoder}]
|
||||
toEncoder = \@A _ -> custom \b, _ -> b
|
||||
|
||||
main = toEncoder { a: @A {} }
|
||||
# ^^^^^^^^^
|
||||
"#
|
||||
),
|
||||
@r#"
|
||||
"Encoding#toEncoder(2) : { a : A } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting",
|
||||
"#
|
||||
@"Encoding#toEncoder(2) : { a : A } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting"
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -6721,7 +6715,7 @@ mod solve_expr {
|
|||
),
|
||||
@r#"
|
||||
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
|
||||
Id#id(3) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
|
||||
Id#id(3) : a -[[] + a:id(3):1]-> ({} -[[] + a:id(3):2]-> a) | a has Id
|
||||
alias : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
|
||||
"#
|
||||
print_only_under_alias: true
|
||||
|
@ -6831,15 +6825,13 @@ mod solve_expr {
|
|||
ping : a -> a | a has Bounce
|
||||
pong : a -> a | a has Bounce
|
||||
|
||||
A := {} has [Bounce {ping, pong}]
|
||||
A := {} has [Bounce {ping: pingA, pong: pongA}]
|
||||
|
||||
ping : A -> A
|
||||
ping = \@A {} -> pong (@A {})
|
||||
#^^^^{-1} ^^^^
|
||||
pingA = \@A {} -> pong (@A {})
|
||||
#^^^^^{-1} ^^^^
|
||||
|
||||
pong : A -> A
|
||||
pong = \@A {} -> ping (@A {})
|
||||
#^^^^{-1} ^^^^
|
||||
pongA = \@A {} -> ping (@A {})
|
||||
#^^^^^{-1} ^^^^
|
||||
|
||||
main =
|
||||
a : A
|
||||
|
@ -6850,17 +6842,16 @@ mod solve_expr {
|
|||
"#
|
||||
),
|
||||
@r###"
|
||||
A#ping(5) : A -[[ping(5)]]-> A
|
||||
A#pong(6) : A -[[pong(6)]]-> A
|
||||
A#pong(6) : A -[[pong(6)]]-> A
|
||||
A#ping(5) : A -[[ping(5)]]-> A
|
||||
A#ping(5) : A -[[ping(5)]]-> A
|
||||
pingA : A -[[pingA(5)]]-> A
|
||||
A#pong(6) : A -[[pongA(6)]]-> A
|
||||
pongA : A -[[pongA(6)]]-> A
|
||||
A#ping(5) : A -[[pingA(5)]]-> A
|
||||
A#ping(5) : A -[[pingA(5)]]-> A
|
||||
"###
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "TODO: this currently runs into trouble with ping and pong first being inferred as overly-general before recursive constraining"]
|
||||
fn resolve_mutually_recursive_ability_lambda_sets_inferred() {
|
||||
infer_queries!(
|
||||
indoc!(
|
||||
|
@ -6889,7 +6880,7 @@ mod solve_expr {
|
|||
),
|
||||
@r###"
|
||||
A#ping(5) : A -[[ping(5)]]-> A
|
||||
Bounce#pong(3) : A -[[pong(6)]]-> A
|
||||
A#pong(6) : A -[[pong(6)]]-> A
|
||||
A#pong(6) : A -[[pong(6)]]-> A
|
||||
A#ping(5) : A -[[ping(5)]]-> A
|
||||
A#ping(5) : A -[[ping(5)]]-> A
|
||||
|
@ -7257,24 +7248,11 @@ mod solve_expr {
|
|||
# ^
|
||||
"#
|
||||
),
|
||||
// TODO SERIOUS: Let generalization is broken here, and this is NOT correct!!
|
||||
// Two problems:
|
||||
// - 1. `{}` always has its rank adjusted to the toplevel, which forces the rest
|
||||
// of the type to the toplevel, but that is NOT correct here!
|
||||
// - 2. During solving lambda set compaction cannot happen until an entire module
|
||||
// is solved, which forces resolved-but-not-yet-compacted lambdas in
|
||||
// unspecialized lambda sets to pull the rank into a lower, non-generalized
|
||||
// rank. Special-casing for that is a TERRIBLE HACK that interferes very
|
||||
// poorly with (1)
|
||||
//
|
||||
// We are BLOCKED on https://github.com/rtfeldman/roc/issues/3207 to make this work
|
||||
// correctly!
|
||||
// See also https://github.com/rtfeldman/roc/pull/3175, a separate, but similar problem.
|
||||
@r###"
|
||||
Fo#f(7) : Fo -[[f(7)]]-> (b -[[] + b:g(4):1]-> {}) | b has G
|
||||
Go#g(8) : Go -[[g(8)]]-> {}
|
||||
h : Go -[[g(8)]]-> {}
|
||||
Fo#f(7) : Fo -[[f(7)]]-> (Go -[[g(8)]]-> {})
|
||||
h : b -[[] + b:g(4):1]-> {} | b has G
|
||||
Fo#f(7) : Fo -[[f(7)]]-> (b -[[] + b:g(4):1]-> {}) | b has G
|
||||
h : Go -[[g(8)]]-> {}
|
||||
"###
|
||||
);
|
||||
|
@ -7346,6 +7324,169 @@ mod solve_expr {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn polymorphic_lambda_set_specialization_varying_over_multiple_variables() {
|
||||
infer_queries!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" provides [main] to "./platform"
|
||||
|
||||
J has j : j -> (k -> {}) | j has J, k has K
|
||||
K has k : k -> {} | k has K
|
||||
|
||||
C := {} has [J {j: jC}]
|
||||
jC = \@C _ -> k
|
||||
#^^{-1}
|
||||
|
||||
D := {} has [J {j: jD}]
|
||||
jD = \@D _ -> k
|
||||
#^^{-1}
|
||||
|
||||
E := {} has [K {k}]
|
||||
k = \@E _ -> {}
|
||||
#^{-1}
|
||||
|
||||
f = \flag, a, b ->
|
||||
# ^ ^
|
||||
it =
|
||||
# ^^
|
||||
when flag is
|
||||
A -> j a
|
||||
# ^
|
||||
B -> j b
|
||||
# ^
|
||||
it
|
||||
# ^^
|
||||
|
||||
main = (f A (@C {}) (@D {})) (@E {})
|
||||
# ^
|
||||
# ^^^^^^^^^^^^^^^^^^^
|
||||
#^^^^{-1}
|
||||
"#
|
||||
),
|
||||
@r###"
|
||||
jC : C -[[jC(8)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
|
||||
jD : D -[[jD(9)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
|
||||
E#k(10) : E -[[k(10)]]-> {}
|
||||
a : j | j has J
|
||||
b : j | j has J
|
||||
it : k -[[] + j:j(2):2 + j1:j(2):2]-> {} | j has J, j1 has J, k has K
|
||||
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j:j(2):2 + j1:j(2):2]-> {}) | j has J, j1 has J, k has K
|
||||
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j1:j(2):2 + j:j(2):2]-> {}) | j has J, j1 has J, k has K
|
||||
it : k -[[] + j:j(2):2 + j1:j(2):2]-> {} | j has J, j1 has J, k has K
|
||||
f : [A, B], C, D -[[f(11)]]-> (E -[[k(10)]]-> {})
|
||||
f A (@C {}) (@D {}) : E -[[k(10)]]-> {}
|
||||
main : {}
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn polymorphic_lambda_set_specialization_varying_over_multiple_variables_two_results() {
|
||||
infer_queries!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" provides [main] to "./platform"
|
||||
|
||||
J has j : j -> (k -> {}) | j has J, k has K
|
||||
K has k : k -> {} | k has K
|
||||
|
||||
C := {} has [J {j: jC}]
|
||||
jC = \@C _ -> k
|
||||
#^^{-1}
|
||||
|
||||
D := {} has [J {j: jD}]
|
||||
jD = \@D _ -> k
|
||||
#^^{-1}
|
||||
|
||||
E := {} has [K {k: kE}]
|
||||
kE = \@E _ -> {}
|
||||
#^^{-1}
|
||||
|
||||
F := {} has [K {k: kF}]
|
||||
kF = \@F _ -> {}
|
||||
#^^{-1}
|
||||
|
||||
f = \flag, a, b ->
|
||||
# ^ ^
|
||||
it =
|
||||
# ^^
|
||||
when flag is
|
||||
A -> j a
|
||||
# ^
|
||||
B -> j b
|
||||
# ^
|
||||
it
|
||||
# ^^
|
||||
|
||||
main =
|
||||
#^^^^{-1}
|
||||
it =
|
||||
# ^^
|
||||
(f A (@C {}) (@D {}))
|
||||
# ^
|
||||
if True
|
||||
then it (@E {})
|
||||
# ^^
|
||||
else it (@F {})
|
||||
# ^^
|
||||
"#
|
||||
),
|
||||
@r###"
|
||||
jC : C -[[jC(9)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
|
||||
jD : D -[[jD(10)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
|
||||
kE : E -[[kE(11)]]-> {}
|
||||
kF : F -[[kF(12)]]-> {}
|
||||
a : j | j has J
|
||||
b : j | j has J
|
||||
it : k -[[] + j:j(2):2 + j1:j(2):2]-> {} | j has J, j1 has J, k has K
|
||||
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j:j(2):2 + j1:j(2):2]-> {}) | j has J, j1 has J, k has K
|
||||
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j1:j(2):2 + j:j(2):2]-> {}) | j has J, j1 has J, k has K
|
||||
it : k -[[] + j:j(2):2 + j1:j(2):2]-> {} | j has J, j1 has J, k has K
|
||||
main : {}
|
||||
it : k -[[] + k:k(4):1]-> {} | k has K
|
||||
f : [A, B], C, D -[[f(13)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
|
||||
it : E -[[kE(11)]]-> {}
|
||||
it : F -[[kF(12)]]-> {}
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn polymorphic_lambda_set_specialization_branching_over_single_variable() {
|
||||
infer_queries!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" provides [f] to "./platform"
|
||||
|
||||
J has j : j -> (k -> {}) | j has J, k has K
|
||||
K has k : k -> {} | k has K
|
||||
|
||||
C := {} has [J {j: jC}]
|
||||
jC = \@C _ -> k
|
||||
|
||||
D := {} has [J {j: jD}]
|
||||
jD = \@D _ -> k
|
||||
|
||||
E := {} has [K {k}]
|
||||
k = \@E _ -> {}
|
||||
|
||||
f = \flag, a, c ->
|
||||
it =
|
||||
when flag is
|
||||
A -> j a
|
||||
B -> j a
|
||||
it c
|
||||
# ^^ ^
|
||||
"#
|
||||
),
|
||||
@r###"
|
||||
it : k -[[] + j:j(2):2]-> {} | j has J, k has K
|
||||
c : k | k has K
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn wrap_recursive_opaque_negative_position() {
|
||||
infer_eq_without_problem(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_solve_problem"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "roc_str"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "test_derive"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
51
crates/compiler/test_derive/src/decoding.rs
Normal file
51
crates/compiler/test_derive/src/decoding.rs
Normal file
|
@ -0,0 +1,51 @@
|
|||
#![cfg(test)]
|
||||
// Even with #[allow(non_snake_case)] on individual idents, rust-analyzer issues diagnostics.
|
||||
// See https://github.com/rust-lang/rust-analyzer/issues/6541.
|
||||
// For the `v!` macro we use uppercase variables when constructing tag unions.
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use crate::{
|
||||
util::{check_immediate, derive_test},
|
||||
v,
|
||||
};
|
||||
use insta::assert_snapshot;
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_types::subs::Variable;
|
||||
|
||||
use roc_derive_key::DeriveBuiltin::Decoder;
|
||||
|
||||
#[test]
|
||||
fn immediates() {
|
||||
check_immediate(Decoder, v!(U8), Symbol::DECODE_U8);
|
||||
check_immediate(Decoder, v!(U16), Symbol::DECODE_U16);
|
||||
check_immediate(Decoder, v!(U32), Symbol::DECODE_U32);
|
||||
check_immediate(Decoder, v!(U64), Symbol::DECODE_U64);
|
||||
check_immediate(Decoder, v!(U128), Symbol::DECODE_U128);
|
||||
check_immediate(Decoder, v!(I8), Symbol::DECODE_I8);
|
||||
check_immediate(Decoder, v!(I16), Symbol::DECODE_I16);
|
||||
check_immediate(Decoder, v!(I32), Symbol::DECODE_I32);
|
||||
check_immediate(Decoder, v!(I64), Symbol::DECODE_I64);
|
||||
check_immediate(Decoder, v!(I128), Symbol::DECODE_I128);
|
||||
check_immediate(Decoder, v!(DEC), Symbol::DECODE_DEC);
|
||||
check_immediate(Decoder, v!(F32), Symbol::DECODE_F32);
|
||||
check_immediate(Decoder, v!(F64), Symbol::DECODE_F64);
|
||||
check_immediate(Decoder, v!(STR), Symbol::DECODE_STRING);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list() {
|
||||
derive_test(Decoder, v!(Symbol::LIST_LIST v!(STR)), |golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for List Str
|
||||
# Decoder (List val) fmt | fmt has DecoderFormatting, val has Decoding
|
||||
# List U8, fmt -[[custom(3)]]-> { rest : List U8, result : [Err [TooShort], Ok (List val)] } | fmt has DecoderFormatting, val has Decoding
|
||||
# Specialization lambda sets:
|
||||
# @<1>: [[custom(3)]]
|
||||
#Derived.decoder_list =
|
||||
Decode.custom
|
||||
\#Derived.bytes, #Derived.fmt ->
|
||||
Decode.decodeWith #Derived.bytes (Decode.list Decode.decoder) #Derived.fmt
|
||||
"###
|
||||
)
|
||||
})
|
||||
}
|
|
@ -4,427 +4,22 @@
|
|||
// For the `v!` macro we use uppercase variables when constructing tag unions.
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use bumpalo::Bump;
|
||||
use insta::assert_snapshot;
|
||||
use pretty_assertions::assert_eq;
|
||||
use ven_pretty::DocAllocator;
|
||||
|
||||
use crate::pretty_print::{pretty_print_def, Ctx};
|
||||
use roc_can::{
|
||||
abilities::{AbilitiesStore, SpecializationLambdaSets},
|
||||
constraint::Constraints,
|
||||
def::Def,
|
||||
expr::Declarations,
|
||||
module::{
|
||||
ExposedByModule, ExposedForModule, ExposedModuleTypes, ResolvedImplementations,
|
||||
RigidVariables,
|
||||
},
|
||||
use crate::{
|
||||
test_hash_eq, test_hash_neq,
|
||||
util::{check_immediate, derive_test},
|
||||
v,
|
||||
};
|
||||
use roc_collections::VecSet;
|
||||
use roc_constrain::expr::constrain_decls;
|
||||
use roc_debug_flags::dbg_do;
|
||||
use roc_derive::{synth_var, DerivedModule};
|
||||
use roc_derive_key::{DeriveKey, Derived};
|
||||
use roc_load_internal::file::{add_imports, default_aliases, LoadedModule, Threading};
|
||||
use roc_module::{
|
||||
ident::TagName,
|
||||
symbol::{IdentIds, Interns, ModuleId, Symbol},
|
||||
};
|
||||
use roc_region::all::LineInfo;
|
||||
use roc_reporting::report::{type_problem, RocDocAllocator};
|
||||
use roc_types::{
|
||||
pretty_print::{name_and_print_var, DebugPrint},
|
||||
subs::{
|
||||
AliasVariables, Content, ExposedTypesStorageSubs, FlatType, RecordFields, Subs, SubsIndex,
|
||||
SubsSlice, UnionTags, Variable,
|
||||
},
|
||||
types::{AliasKind, RecordField},
|
||||
};
|
||||
|
||||
const DERIVED_MODULE: ModuleId = ModuleId::DERIVED_SYNTH;
|
||||
|
||||
fn encode_path() -> PathBuf {
|
||||
let repo_root = std::env::var("ROC_WORKSPACE_DIR").expect("are you running with `cargo test`?");
|
||||
PathBuf::from(repo_root)
|
||||
.join("compiler")
|
||||
.join("builtins")
|
||||
.join("roc")
|
||||
.join("Encode.roc")
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn assemble_derived_golden(
|
||||
subs: &mut Subs,
|
||||
test_module: ModuleId,
|
||||
interns: &Interns,
|
||||
source_var: Variable,
|
||||
derived_source: &str,
|
||||
typ: Variable,
|
||||
specialization_lsets: SpecializationLambdaSets,
|
||||
) -> String {
|
||||
let mut print_var = |var: Variable, print_only_under_alias| {
|
||||
let snapshot = subs.snapshot();
|
||||
let pretty_type = name_and_print_var(
|
||||
var,
|
||||
subs,
|
||||
test_module,
|
||||
interns,
|
||||
DebugPrint {
|
||||
print_lambda_sets: true,
|
||||
print_only_under_alias,
|
||||
},
|
||||
);
|
||||
subs.rollback_to(snapshot);
|
||||
pretty_type
|
||||
};
|
||||
|
||||
let mut pretty_buf = String::new();
|
||||
|
||||
pretty_buf.push_str(&format!("# derived for {}\n", print_var(source_var, false)));
|
||||
|
||||
let pretty_type = print_var(typ, false);
|
||||
pretty_buf.push_str(&format!("# {}\n", &pretty_type));
|
||||
|
||||
let pretty_type_under_aliases = print_var(typ, true);
|
||||
pretty_buf.push_str(&format!("# {}\n", &pretty_type_under_aliases));
|
||||
|
||||
pretty_buf.push_str("# Specialization lambda sets:\n");
|
||||
let mut specialization_lsets = specialization_lsets.into_iter().collect::<Vec<_>>();
|
||||
specialization_lsets.sort_by_key(|(region, _)| *region);
|
||||
for (region, var) in specialization_lsets {
|
||||
let pretty_lset = print_var(var, false);
|
||||
pretty_buf.push_str(&format!("# @<{}>: {}\n", region, pretty_lset));
|
||||
}
|
||||
|
||||
pretty_buf.push_str(derived_source);
|
||||
|
||||
pretty_buf
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn check_derived_typechecks_and_golden(
|
||||
derived_def: Def,
|
||||
test_module: ModuleId,
|
||||
mut test_subs: Subs,
|
||||
interns: &Interns,
|
||||
exposed_encode_types: ExposedTypesStorageSubs,
|
||||
encode_abilities_store: AbilitiesStore,
|
||||
source_var: Variable,
|
||||
derived_program: &str,
|
||||
specialization_lsets: SpecializationLambdaSets,
|
||||
check_golden: impl Fn(&str),
|
||||
) {
|
||||
// constrain the derived
|
||||
let mut constraints = Constraints::new();
|
||||
let def_var = derived_def.expr_var;
|
||||
let mut decls = Declarations::new();
|
||||
decls.push_def(derived_def);
|
||||
let constr = constrain_decls(&mut constraints, test_module, &decls);
|
||||
|
||||
// the derived depends on stuff from Encode, so
|
||||
// - we need to add those dependencies as imported on the constraint
|
||||
// - we need to add Encode ability info to a local abilities store
|
||||
let encode_values_to_import = exposed_encode_types
|
||||
.stored_vars_by_symbol
|
||||
.keys()
|
||||
.copied()
|
||||
.collect::<VecSet<_>>();
|
||||
let pending_abilities = encode_abilities_store.closure_from_imported(&encode_values_to_import);
|
||||
let mut exposed_by_module = ExposedByModule::default();
|
||||
exposed_by_module.insert(
|
||||
ModuleId::ENCODE,
|
||||
ExposedModuleTypes {
|
||||
exposed_types_storage_subs: exposed_encode_types,
|
||||
resolved_implementations: ResolvedImplementations::default(),
|
||||
},
|
||||
);
|
||||
let exposed_for_module =
|
||||
ExposedForModule::new(encode_values_to_import.iter(), exposed_by_module);
|
||||
let mut def_types = Default::default();
|
||||
let mut rigid_vars = Default::default();
|
||||
let (import_variables, abilities_store) = add_imports(
|
||||
test_module,
|
||||
&mut test_subs,
|
||||
pending_abilities,
|
||||
&exposed_for_module,
|
||||
&mut def_types,
|
||||
&mut rigid_vars,
|
||||
);
|
||||
let constr =
|
||||
constraints.let_import_constraint(rigid_vars, def_types, constr, &import_variables);
|
||||
|
||||
// run the solver, print and fail if we have errors
|
||||
dbg_do!(
|
||||
roc_debug_flags::ROC_PRINT_UNIFICATIONS_DERIVED,
|
||||
std::env::set_var(roc_debug_flags::ROC_PRINT_UNIFICATIONS_DERIVED, "1")
|
||||
);
|
||||
let (mut solved_subs, _, problems, _) = roc_solve::module::run_solve(
|
||||
test_module,
|
||||
&constraints,
|
||||
constr,
|
||||
RigidVariables::default(),
|
||||
test_subs,
|
||||
default_aliases(),
|
||||
abilities_store,
|
||||
Default::default(),
|
||||
&exposed_for_module.exposed_by_module,
|
||||
Default::default(),
|
||||
);
|
||||
let subs = solved_subs.inner_mut();
|
||||
|
||||
if !problems.is_empty() {
|
||||
let filename = PathBuf::from("Test.roc");
|
||||
let lines = LineInfo::new(" ");
|
||||
let src_lines = vec![" "];
|
||||
let mut reports = Vec::new();
|
||||
let alloc = RocDocAllocator::new(&src_lines, test_module, interns);
|
||||
|
||||
for problem in problems.into_iter() {
|
||||
if let Some(report) = type_problem(&alloc, &lines, filename.clone(), problem.clone()) {
|
||||
reports.push(report);
|
||||
}
|
||||
}
|
||||
|
||||
let has_reports = !reports.is_empty();
|
||||
|
||||
let doc = alloc
|
||||
.stack(reports.into_iter().map(|v| v.pretty(&alloc)))
|
||||
.append(if has_reports {
|
||||
alloc.line()
|
||||
} else {
|
||||
alloc.nil()
|
||||
});
|
||||
|
||||
let mut buf = String::new();
|
||||
doc.1
|
||||
.render_raw(80, &mut roc_reporting::report::CiWrite::new(&mut buf))
|
||||
.unwrap();
|
||||
|
||||
panic!(
|
||||
"Derived does not typecheck:\n{}\nDerived def:\n{}",
|
||||
buf, derived_program
|
||||
);
|
||||
}
|
||||
|
||||
let golden = assemble_derived_golden(
|
||||
subs,
|
||||
test_module,
|
||||
interns,
|
||||
source_var,
|
||||
derived_program,
|
||||
def_var,
|
||||
specialization_lsets,
|
||||
);
|
||||
|
||||
check_golden(&golden)
|
||||
}
|
||||
|
||||
fn derive_test<S>(synth_input: S, check_golden: impl Fn(&str))
|
||||
where
|
||||
S: FnOnce(&mut Subs) -> Variable,
|
||||
{
|
||||
let arena = Bump::new();
|
||||
let source = roc_builtins::roc::module_source(ModuleId::ENCODE);
|
||||
let target_info = roc_target::TargetInfo::default_x86_64();
|
||||
|
||||
let LoadedModule {
|
||||
mut interns,
|
||||
exposed_types_storage: exposed_encode_types,
|
||||
abilities_store,
|
||||
resolved_implementations,
|
||||
..
|
||||
} = roc_load_internal::file::load_and_typecheck_str(
|
||||
&arena,
|
||||
encode_path().file_name().unwrap().into(),
|
||||
source,
|
||||
encode_path().parent().unwrap().to_path_buf(),
|
||||
Default::default(),
|
||||
target_info,
|
||||
roc_reporting::report::RenderTarget::ColorTerminal,
|
||||
Threading::AllAvailable,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut subs = Subs::new();
|
||||
let ident_ids = IdentIds::default();
|
||||
let source_var = synth_input(&mut subs);
|
||||
let key = get_key(&subs, source_var);
|
||||
|
||||
let mut derived_module = unsafe { DerivedModule::from_components(subs, ident_ids) };
|
||||
|
||||
let mut exposed_by_module = ExposedByModule::default();
|
||||
exposed_by_module.insert(
|
||||
ModuleId::ENCODE,
|
||||
ExposedModuleTypes {
|
||||
exposed_types_storage_subs: exposed_encode_types.clone(),
|
||||
resolved_implementations,
|
||||
},
|
||||
);
|
||||
|
||||
let (_derived_symbol, derived_def, specialization_lsets) =
|
||||
derived_module.get_or_insert(&exposed_by_module, key);
|
||||
let specialization_lsets = specialization_lsets.clone();
|
||||
let derived_def = derived_def.clone();
|
||||
|
||||
let (subs, ident_ids) = derived_module.decompose();
|
||||
|
||||
interns.all_ident_ids.insert(DERIVED_MODULE, ident_ids);
|
||||
DERIVED_MODULE.register_debug_idents(interns.all_ident_ids.get(&DERIVED_MODULE).unwrap());
|
||||
|
||||
let ctx = Ctx { interns: &interns };
|
||||
let derived_program = pretty_print_def(&ctx, &derived_def);
|
||||
|
||||
check_derived_typechecks_and_golden(
|
||||
derived_def,
|
||||
DERIVED_MODULE,
|
||||
subs,
|
||||
&interns,
|
||||
exposed_encode_types,
|
||||
abilities_store,
|
||||
source_var,
|
||||
&derived_program,
|
||||
specialization_lsets,
|
||||
check_golden,
|
||||
);
|
||||
}
|
||||
|
||||
fn get_key(subs: &Subs, var: Variable) -> DeriveKey {
|
||||
match Derived::encoding(subs, var) {
|
||||
Ok(Derived::Key(key)) => key,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn check_key<S1, S2>(eq: bool, synth1: S1, synth2: S2)
|
||||
where
|
||||
S1: FnOnce(&mut Subs) -> Variable,
|
||||
S2: FnOnce(&mut Subs) -> Variable,
|
||||
{
|
||||
let mut subs = Subs::new();
|
||||
let var1 = synth1(&mut subs);
|
||||
let var2 = synth2(&mut subs);
|
||||
|
||||
let key1 = Derived::encoding(&subs, var1);
|
||||
let key2 = Derived::encoding(&subs, var2);
|
||||
|
||||
if eq {
|
||||
assert_eq!(key1, key2);
|
||||
} else {
|
||||
assert_ne!(key1, key2);
|
||||
}
|
||||
}
|
||||
|
||||
fn check_immediate<S>(synth: S, immediate: Symbol)
|
||||
where
|
||||
S: FnOnce(&mut Subs) -> Variable,
|
||||
{
|
||||
let mut subs = Subs::new();
|
||||
let var = synth(&mut subs);
|
||||
|
||||
let key = Derived::encoding(&subs, var);
|
||||
|
||||
assert_eq!(key, Ok(Derived::Immediate(immediate)));
|
||||
}
|
||||
|
||||
// Writing out the types into content is terrible, so let's use a DSL at least for testing
|
||||
macro_rules! v {
|
||||
({ $($field:ident: $make_v:expr,)* $(?$opt_field:ident : $make_opt_v:expr,)* }) => {
|
||||
|subs: &mut Subs| {
|
||||
$(let $field = $make_v(subs);)*
|
||||
$(let $opt_field = $make_opt_v(subs);)*
|
||||
let fields = vec![
|
||||
$( (stringify!($field).into(), RecordField::Required($field)) ,)*
|
||||
$( (stringify!($opt_field).into(), RecordField::Required($opt_field)) ,)*
|
||||
];
|
||||
let fields = RecordFields::insert_into_subs(subs, fields);
|
||||
synth_var(subs, Content::Structure(FlatType::Record(fields, Variable::EMPTY_RECORD)))
|
||||
}
|
||||
};
|
||||
([ $($tag:ident $($payload:expr)*),* ]) => {
|
||||
|subs: &mut Subs| {
|
||||
$(
|
||||
let $tag = vec![ $( $payload(subs), )* ];
|
||||
)*
|
||||
let tags = UnionTags::insert_into_subs::<_, Vec<Variable>>(subs, vec![ $( (TagName(stringify!($tag).into()), $tag) ,)* ]);
|
||||
synth_var(subs, Content::Structure(FlatType::TagUnion(tags, Variable::EMPTY_TAG_UNION)))
|
||||
}
|
||||
};
|
||||
([ $($tag:ident $($payload:expr)*),* ] as $rec_var:ident) => {
|
||||
|subs: &mut Subs| {
|
||||
let $rec_var = subs.fresh_unnamed_flex_var();
|
||||
let rec_name_index =
|
||||
SubsIndex::push_new(&mut subs.field_names, stringify!($rec).into());
|
||||
|
||||
$(
|
||||
let $tag = vec![ $( $payload(subs), )* ];
|
||||
)*
|
||||
let tags = UnionTags::insert_into_subs::<_, Vec<Variable>>(subs, vec![ $( (TagName(stringify!($tag).into()), $tag) ,)* ]);
|
||||
let tag_union_var = synth_var(subs, Content::Structure(FlatType::RecursiveTagUnion($rec_var, tags, Variable::EMPTY_TAG_UNION)));
|
||||
|
||||
subs.set_content(
|
||||
$rec_var,
|
||||
Content::RecursionVar {
|
||||
structure: tag_union_var,
|
||||
opt_name: Some(rec_name_index),
|
||||
},
|
||||
);
|
||||
tag_union_var
|
||||
}
|
||||
};
|
||||
(Symbol::$sym:ident $($arg:expr)*) => {
|
||||
|subs: &mut Subs| {
|
||||
let $sym = vec![ $( $arg(subs) ,)* ];
|
||||
let var_slice = SubsSlice::insert_into_subs(subs, $sym);
|
||||
synth_var(subs, Content::Structure(FlatType::Apply(Symbol::$sym, var_slice)))
|
||||
}
|
||||
};
|
||||
(Symbol::$alias:ident $($arg:expr)* => $real_var:expr) => {
|
||||
|subs: &mut Subs| {
|
||||
let args = vec![$( $arg(subs) )*];
|
||||
let alias_variables = AliasVariables::insert_into_subs::<Vec<_>, Vec<_>>(subs, args, vec![]);
|
||||
let real_var = $real_var(subs);
|
||||
synth_var(subs, Content::Alias(Symbol::$alias, alias_variables, real_var, AliasKind::Structural))
|
||||
}
|
||||
};
|
||||
(@Symbol::$alias:ident $($arg:expr)* => $real_var:expr) => {
|
||||
|subs: &mut Subs| {
|
||||
let args = vec![$( $arg(subs) )*];
|
||||
let alias_variables = AliasVariables::insert_into_subs::<Vec<_>, Vec<_>>(subs, args, vec![]);
|
||||
let real_var = $real_var(subs);
|
||||
synth_var(subs, Content::Alias(Symbol::$alias, alias_variables, real_var, AliasKind::Opaque))
|
||||
}
|
||||
};
|
||||
(*$rec_var:ident) => {
|
||||
|_: &mut Subs| { $rec_var }
|
||||
};
|
||||
($var:ident) => {
|
||||
|_: &mut Subs| { Variable::$var }
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! test_hash_eq {
|
||||
($($name:ident: $synth1:expr, $synth2:expr)*) => {$(
|
||||
#[test]
|
||||
fn $name() {
|
||||
check_key(true, $synth1, $synth2)
|
||||
}
|
||||
)*};
|
||||
}
|
||||
|
||||
macro_rules! test_hash_neq {
|
||||
($($name:ident: $synth1:expr, $synth2:expr)*) => {$(
|
||||
#[test]
|
||||
fn $name() {
|
||||
check_key(false, $synth1, $synth2)
|
||||
}
|
||||
)*};
|
||||
}
|
||||
use roc_derive_key::DeriveBuiltin::ToEncoder;
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_types::subs::Variable;
|
||||
|
||||
// {{{ hash tests
|
||||
|
||||
test_hash_eq! {
|
||||
ToEncoder,
|
||||
|
||||
same_record:
|
||||
v!({ a: v!(U8), }), v!({ a: v!(U8), })
|
||||
same_record_fields_diff_types:
|
||||
|
@ -448,9 +43,9 @@ test_hash_eq! {
|
|||
v!(EMPTY_TAG_UNION), v!([])
|
||||
|
||||
same_recursive_tag_union:
|
||||
v!([ Nil, Cons v!(*lst)] as lst), v!([ Nil, Cons v!(*lst)] as lst)
|
||||
v!([ Nil, Cons v!(^lst)] as lst), v!([ Nil, Cons v!(^lst)] as lst)
|
||||
same_tag_union_and_recursive_tag_union_fields:
|
||||
v!([ Nil, Cons v!(STR)]), v!([ Nil, Cons v!(*lst)] as lst)
|
||||
v!([ Nil, Cons v!(STR)]), v!([ Nil, Cons v!(^lst)] as lst)
|
||||
|
||||
list_list_diff_types:
|
||||
v!(Symbol::LIST_LIST v!(STR)), v!(Symbol::LIST_LIST v!(U8))
|
||||
|
@ -476,6 +71,8 @@ test_hash_eq! {
|
|||
}
|
||||
|
||||
test_hash_neq! {
|
||||
ToEncoder,
|
||||
|
||||
different_record_fields:
|
||||
v!({ a: v!(U8), }), v!({ b: v!(U8), })
|
||||
record_empty_vs_nonempty:
|
||||
|
@ -486,7 +83,7 @@ test_hash_neq! {
|
|||
tag_union_empty_vs_nonempty:
|
||||
v!(EMPTY_TAG_UNION), v!([ B v!(U8) ])
|
||||
different_recursive_tag_union_tags:
|
||||
v!([ Nil, Cons v!(*lst) ] as lst), v!([ Nil, Next v!(*lst) ] as lst)
|
||||
v!([ Nil, Cons v!(^lst) ] as lst), v!([ Nil, Next v!(^lst) ] as lst)
|
||||
|
||||
same_alias_diff_real_type:
|
||||
v!(Symbol::BOOL_BOOL => v!([ True, False ])), v!(Symbol::BOOL_BOOL => v!([ False, True, Maybe ]))
|
||||
|
@ -505,25 +102,25 @@ test_hash_neq! {
|
|||
|
||||
#[test]
|
||||
fn immediates() {
|
||||
check_immediate(v!(U8), Symbol::ENCODE_U8);
|
||||
check_immediate(v!(U16), Symbol::ENCODE_U16);
|
||||
check_immediate(v!(U32), Symbol::ENCODE_U32);
|
||||
check_immediate(v!(U64), Symbol::ENCODE_U64);
|
||||
check_immediate(v!(U128), Symbol::ENCODE_U128);
|
||||
check_immediate(v!(I8), Symbol::ENCODE_I8);
|
||||
check_immediate(v!(I16), Symbol::ENCODE_I16);
|
||||
check_immediate(v!(I32), Symbol::ENCODE_I32);
|
||||
check_immediate(v!(I64), Symbol::ENCODE_I64);
|
||||
check_immediate(v!(I128), Symbol::ENCODE_I128);
|
||||
check_immediate(v!(DEC), Symbol::ENCODE_DEC);
|
||||
check_immediate(v!(F32), Symbol::ENCODE_F32);
|
||||
check_immediate(v!(F64), Symbol::ENCODE_F64);
|
||||
check_immediate(v!(STR), Symbol::ENCODE_STRING);
|
||||
check_immediate(ToEncoder, v!(U8), Symbol::ENCODE_U8);
|
||||
check_immediate(ToEncoder, v!(U16), Symbol::ENCODE_U16);
|
||||
check_immediate(ToEncoder, v!(U32), Symbol::ENCODE_U32);
|
||||
check_immediate(ToEncoder, v!(U64), Symbol::ENCODE_U64);
|
||||
check_immediate(ToEncoder, v!(U128), Symbol::ENCODE_U128);
|
||||
check_immediate(ToEncoder, v!(I8), Symbol::ENCODE_I8);
|
||||
check_immediate(ToEncoder, v!(I16), Symbol::ENCODE_I16);
|
||||
check_immediate(ToEncoder, v!(I32), Symbol::ENCODE_I32);
|
||||
check_immediate(ToEncoder, v!(I64), Symbol::ENCODE_I64);
|
||||
check_immediate(ToEncoder, v!(I128), Symbol::ENCODE_I128);
|
||||
check_immediate(ToEncoder, v!(DEC), Symbol::ENCODE_DEC);
|
||||
check_immediate(ToEncoder, v!(F32), Symbol::ENCODE_F32);
|
||||
check_immediate(ToEncoder, v!(F64), Symbol::ENCODE_F64);
|
||||
check_immediate(ToEncoder, v!(STR), Symbol::ENCODE_STRING);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_record() {
|
||||
derive_test(v!(EMPTY_RECORD), |golden| {
|
||||
derive_test(ToEncoder, v!(EMPTY_RECORD), |golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for {}
|
||||
# {} -[[toEncoder_{}(0)]]-> Encoder fmt | fmt has EncoderFormatting
|
||||
|
@ -543,7 +140,7 @@ fn empty_record() {
|
|||
|
||||
#[test]
|
||||
fn zero_field_record() {
|
||||
derive_test(v!({}), |golden| {
|
||||
derive_test(ToEncoder, v!({}), |golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for {}
|
||||
# {} -[[toEncoder_{}(0)]]-> Encoder fmt | fmt has EncoderFormatting
|
||||
|
@ -563,7 +160,7 @@ fn zero_field_record() {
|
|||
|
||||
#[test]
|
||||
fn one_field_record() {
|
||||
derive_test(v!({ a: v!(U8), }), |golden| {
|
||||
derive_test(ToEncoder, v!({ a: v!(U8), }), |golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for { a : U8 }
|
||||
# { a : val } -[[toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding
|
||||
|
@ -588,23 +185,27 @@ fn one_field_record() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "TODO #3421 unification of unspecialized variables in lambda sets currently causes this to be derived incorrectly"]
|
||||
fn two_field_record() {
|
||||
derive_test(v!({ a: v!(U8), b: v!(STR), }), |golden| {
|
||||
derive_test(ToEncoder, v!({ a: v!(U8), b: v!(STR), }), |golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for { a : U8, b : Str }
|
||||
# { a : val, b : a } -[[toEncoder_{a,b}(0)]]-> Encoder fmt | a has Encoding, fmt has EncoderFormatting, val has Encoding
|
||||
# { a : val, b : a } -[[toEncoder_{a,b}(0)]]-> (List U8, fmt -[[custom(2) { a : val, b : a }]]-> List U8) | a has Encoding, fmt has EncoderFormatting, val has Encoding
|
||||
# { a : val, b : val1 } -[[toEncoder_{a,b}(0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
|
||||
# { a : val, b : val1 } -[[toEncoder_{a,b}(0)]]-> (List U8, fmt -[[custom(2) { a : val, b : val1 }]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
|
||||
# Specialization lambda sets:
|
||||
# @<1>: [[toEncoder_{a,b}(0)]]
|
||||
# @<2>: [[custom(2) { a : val, b : a }]] | a has Encoding, val has Encoding
|
||||
# @<2>: [[custom(2) { a : val, b : val1 }]] | val has Encoding, val1 has Encoding
|
||||
#Derived.toEncoder_{a,b} =
|
||||
\#Derived.rcd ->
|
||||
Encode.custom \#Derived.bytes, #Derived.fmt ->
|
||||
Encode.appendWith #Derived.bytes (Encode.record [
|
||||
Encode.custom
|
||||
\#Derived.bytes, #Derived.fmt ->
|
||||
Encode.appendWith
|
||||
#Derived.bytes
|
||||
(Encode.record
|
||||
[
|
||||
{ value: Encode.toEncoder #Derived.rcd.a, key: "a", },
|
||||
{ value: Encode.toEncoder #Derived.rcd.b, key: "b", },
|
||||
]) #Derived.fmt
|
||||
])
|
||||
#Derived.fmt
|
||||
"###
|
||||
)
|
||||
})
|
||||
|
@ -614,7 +215,7 @@ fn two_field_record() {
|
|||
#[ignore = "NOTE: this would never actually happen, because [] is uninhabited, and hence toEncoder can never be called with a value of []!
|
||||
Rightfully it induces broken assertions in other parts of the compiler, so we ignore it."]
|
||||
fn empty_tag_union() {
|
||||
derive_test(v!(EMPTY_TAG_UNION), |golden| {
|
||||
derive_test(ToEncoder, v!(EMPTY_TAG_UNION), |golden| {
|
||||
assert_snapshot!(
|
||||
golden,
|
||||
@r#"
|
||||
|
@ -625,7 +226,7 @@ fn empty_tag_union() {
|
|||
|
||||
#[test]
|
||||
fn tag_one_label_zero_args() {
|
||||
derive_test(v!([A]), |golden| {
|
||||
derive_test(ToEncoder, v!([A]), |golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for [A]
|
||||
# [A] -[[toEncoder_[A 0](0)]]-> Encoder fmt | fmt has EncoderFormatting
|
||||
|
@ -647,47 +248,59 @@ fn tag_one_label_zero_args() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "TODO #3421 unification of unspecialized variables in lambda sets currently causes this to be derived incorrectly"]
|
||||
fn tag_one_label_two_args() {
|
||||
derive_test(v!([A v!(U8) v!(STR)]), |golden| {
|
||||
derive_test(ToEncoder, v!([A v!(U8) v!(STR)]), |golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for [A U8 Str]
|
||||
# [A val a] -[[toEncoder_[A 2](0)]]-> Encoder fmt | a has Encoding, fmt has EncoderFormatting, val has Encoding
|
||||
# [A val a] -[[toEncoder_[A 2](0)]]-> (List U8, fmt -[[custom(4) [A val a]]]-> List U8) | a has Encoding, fmt has EncoderFormatting, val has Encoding
|
||||
# [A val val1] -[[toEncoder_[A 2](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
|
||||
# [A val val1] -[[toEncoder_[A 2](0)]]-> (List U8, fmt -[[custom(4) [A val val1]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
|
||||
# Specialization lambda sets:
|
||||
# @<1>: [[toEncoder_[A 2](0)]]
|
||||
# @<2>: [[custom(4) [A val a]]] | a has Encoding, val has Encoding
|
||||
# @<2>: [[custom(4) [A val val1]]] | val has Encoding, val1 has Encoding
|
||||
#Derived.toEncoder_[A 2] =
|
||||
\#Derived.tag ->
|
||||
Encode.custom \#Derived.bytes, #Derived.fmt ->
|
||||
Encode.appendWith #Derived.bytes (when #Derived.tag is
|
||||
Encode.custom
|
||||
\#Derived.bytes, #Derived.fmt ->
|
||||
Encode.appendWith
|
||||
#Derived.bytes
|
||||
(when #Derived.tag is
|
||||
A #Derived.2 #Derived.3 ->
|
||||
Encode.tag "A" [
|
||||
Encode.tag
|
||||
"A"
|
||||
[
|
||||
Encode.toEncoder #Derived.2,
|
||||
Encode.toEncoder #Derived.3,
|
||||
]) #Derived.fmt
|
||||
])
|
||||
#Derived.fmt
|
||||
"###
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "TODO #3421 unification of unspecialized variables in lambda sets currently causes this to be derived incorrectly"]
|
||||
fn tag_two_labels() {
|
||||
derive_test(v!([A v!(U8) v!(STR) v!(U16), B v!(STR)]), |golden| {
|
||||
derive_test(
|
||||
ToEncoder,
|
||||
v!([A v!(U8) v!(STR) v!(U16), B v!(STR)]),
|
||||
|golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for [A U8 Str U16, B Str]
|
||||
# [A val a b, B c] -[[toEncoder_[A 3,B 1](0)]]-> Encoder fmt | a has Encoding, b has Encoding, c has Encoding, fmt has EncoderFormatting, val has Encoding
|
||||
# [A val a b, B c] -[[toEncoder_[A 3,B 1](0)]]-> (List U8, fmt -[[custom(6) [A val a b, B c]]]-> List U8) | a has Encoding, b has Encoding, c has Encoding, fmt has EncoderFormatting, val has Encoding
|
||||
# [A val val1 val1, B val1] -[[toEncoder_[A 3,B 1](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
|
||||
# [A val val1 val1, B val1] -[[toEncoder_[A 3,B 1](0)]]-> (List U8, fmt -[[custom(6) [A val val1 val1, B val1]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
|
||||
# Specialization lambda sets:
|
||||
# @<1>: [[toEncoder_[A 3,B 1](0)]]
|
||||
# @<2>: [[custom(6) [A val a b, B c]]] | a has Encoding, b has Encoding, c has Encoding, val has Encoding
|
||||
# @<2>: [[custom(6) [A val val1 val1, B val1]]] | val has Encoding, val1 has Encoding
|
||||
#Derived.toEncoder_[A 3,B 1] =
|
||||
\#Derived.tag ->
|
||||
Encode.custom \#Derived.bytes, #Derived.fmt ->
|
||||
Encode.appendWith #Derived.bytes (when #Derived.tag is
|
||||
Encode.custom
|
||||
\#Derived.bytes, #Derived.fmt ->
|
||||
Encode.appendWith
|
||||
#Derived.bytes
|
||||
(when #Derived.tag is
|
||||
A #Derived.2 #Derived.3 #Derived.4 ->
|
||||
Encode.tag "A" [
|
||||
Encode.tag
|
||||
"A"
|
||||
[
|
||||
Encode.toEncoder #Derived.2,
|
||||
Encode.toEncoder #Derived.3,
|
||||
Encode.toEncoder #Derived.4,
|
||||
|
@ -696,38 +309,48 @@ fn tag_two_labels() {
|
|||
#Derived.fmt
|
||||
"###
|
||||
)
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "TODO #3421 unification of unspecialized variables in lambda sets currently causes this to be derived incorrectly"]
|
||||
fn recursive_tag_union() {
|
||||
derive_test(v!([Nil, Cons v!(U8) v!(*lst) ] as lst), |golden| {
|
||||
derive_test(
|
||||
ToEncoder,
|
||||
v!([Nil, Cons v!(U8) v!(^lst) ] as lst),
|
||||
|golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for [Cons U8 $rec, Nil] as $rec
|
||||
# [Cons val a, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> Encoder fmt | a has Encoding, fmt has EncoderFormatting, val has Encoding
|
||||
# [Cons val a, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> (List U8, fmt -[[custom(4) [Cons val a, Nil]]]-> List U8) | a has Encoding, fmt has EncoderFormatting, val has Encoding
|
||||
# [Cons val val1, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
|
||||
# [Cons val val1, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> (List U8, fmt -[[custom(4) [Cons val val1, Nil]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding
|
||||
# Specialization lambda sets:
|
||||
# @<1>: [[toEncoder_[Cons 2,Nil 0](0)]]
|
||||
# @<2>: [[custom(4) [Cons val a, Nil]]] | a has Encoding, val has Encoding
|
||||
# @<2>: [[custom(4) [Cons val val1, Nil]]] | val has Encoding, val1 has Encoding
|
||||
#Derived.toEncoder_[Cons 2,Nil 0] =
|
||||
\#Derived.tag ->
|
||||
Encode.custom \#Derived.bytes, #Derived.fmt ->
|
||||
Encode.appendWith #Derived.bytes (when #Derived.tag is
|
||||
Encode.custom
|
||||
\#Derived.bytes, #Derived.fmt ->
|
||||
Encode.appendWith
|
||||
#Derived.bytes
|
||||
(when #Derived.tag is
|
||||
Cons #Derived.2 #Derived.3 ->
|
||||
Encode.tag "Cons" [
|
||||
Encode.tag
|
||||
"Cons"
|
||||
[
|
||||
Encode.toEncoder #Derived.2,
|
||||
Encode.toEncoder #Derived.3,
|
||||
]
|
||||
Nil -> Encode.tag "Nil" []) #Derived.fmt
|
||||
Nil -> Encode.tag "Nil" [])
|
||||
#Derived.fmt
|
||||
"###
|
||||
)
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list() {
|
||||
derive_test(v!(Symbol::LIST_LIST v!(STR)), |golden| {
|
||||
derive_test(ToEncoder, v!(Symbol::LIST_LIST v!(STR)), |golden| {
|
||||
assert_snapshot!(golden, @r###"
|
||||
# derived for List Str
|
||||
# List val -[[toEncoder_list(0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#![cfg(test)]
|
||||
|
||||
mod decoding;
|
||||
mod encoding;
|
||||
|
||||
mod pretty_print;
|
||||
mod util;
|
||||
|
|
467
crates/compiler/test_derive/src/util.rs
Normal file
467
crates/compiler/test_derive/src/util.rs
Normal file
|
@ -0,0 +1,467 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use bumpalo::Bump;
|
||||
use ven_pretty::DocAllocator;
|
||||
|
||||
use crate::pretty_print::{pretty_print_def, Ctx};
|
||||
use roc_can::{
|
||||
abilities::{AbilitiesStore, SpecializationLambdaSets},
|
||||
constraint::Constraints,
|
||||
def::Def,
|
||||
expr::Declarations,
|
||||
module::{
|
||||
ExposedByModule, ExposedForModule, ExposedModuleTypes, ResolvedImplementations,
|
||||
RigidVariables,
|
||||
},
|
||||
};
|
||||
use roc_collections::VecSet;
|
||||
use roc_constrain::expr::constrain_decls;
|
||||
use roc_debug_flags::dbg_do;
|
||||
use roc_derive::DerivedModule;
|
||||
use roc_derive_key::{DeriveBuiltin, DeriveKey, Derived};
|
||||
use roc_load_internal::file::{add_imports, default_aliases, LoadedModule, Threading};
|
||||
use roc_module::symbol::{IdentIds, Interns, ModuleId, Symbol};
|
||||
use roc_region::all::LineInfo;
|
||||
use roc_reporting::report::{type_problem, RocDocAllocator};
|
||||
use roc_types::{
|
||||
pretty_print::{name_and_print_var, DebugPrint},
|
||||
subs::{ExposedTypesStorageSubs, Subs, Variable},
|
||||
};
|
||||
|
||||
const DERIVED_MODULE: ModuleId = ModuleId::DERIVED_SYNTH;
|
||||
|
||||
fn module_source_and_path(builtin: DeriveBuiltin) -> (ModuleId, &'static str, PathBuf) {
|
||||
use roc_builtins::roc::module_source;
|
||||
|
||||
let repo_root = std::env::var("ROC_WORKSPACE_DIR").expect("are you running with `cargo test`?");
|
||||
let builtins_path = PathBuf::from(repo_root)
|
||||
.join("compiler")
|
||||
.join("builtins")
|
||||
.join("roc");
|
||||
|
||||
match builtin {
|
||||
DeriveBuiltin::ToEncoder => (
|
||||
ModuleId::ENCODE,
|
||||
module_source(ModuleId::ENCODE),
|
||||
builtins_path.join("Encode.roc"),
|
||||
),
|
||||
DeriveBuiltin::Decoder => (
|
||||
ModuleId::DECODE,
|
||||
module_source(ModuleId::DECODE),
|
||||
builtins_path.join("Decode.roc"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// DSL for creating [`Content`][crate::subs::Content].
|
||||
#[macro_export]
|
||||
macro_rules! v {
|
||||
({ $($field:ident: $make_v:expr,)* $(?$opt_field:ident : $make_opt_v:expr,)* }) => {{
|
||||
#[allow(unused)]
|
||||
use roc_types::types::RecordField;
|
||||
use roc_types::subs::{Subs, RecordFields, Content, FlatType, Variable};
|
||||
|subs: &mut Subs| {
|
||||
$(let $field = $make_v(subs);)*
|
||||
$(let $opt_field = $make_opt_v(subs);)*
|
||||
let fields = vec![
|
||||
$( (stringify!($field).into(), RecordField::Required($field)) ,)*
|
||||
$( (stringify!($opt_field).into(), RecordField::Required($opt_field)) ,)*
|
||||
];
|
||||
let fields = RecordFields::insert_into_subs(subs, fields);
|
||||
roc_derive::synth_var(subs, Content::Structure(FlatType::Record(fields, Variable::EMPTY_RECORD)))
|
||||
}
|
||||
}};
|
||||
([ $($tag:ident $($payload:expr)*),* ]$( $ext:tt )?) => {{
|
||||
#[allow(unused)]
|
||||
use roc_types::subs::{Subs, UnionTags, Content, FlatType, Variable};
|
||||
#[allow(unused)]
|
||||
use roc_module::ident::TagName;
|
||||
|subs: &mut Subs| {
|
||||
$(
|
||||
let $tag = vec![ $( $payload(subs), )* ];
|
||||
)*
|
||||
let tags = UnionTags::insert_into_subs::<_, Vec<Variable>>(subs, vec![ $( (TagName(stringify!($tag).into()), $tag) ,)* ]);
|
||||
|
||||
#[allow(unused_mut)]
|
||||
let mut ext = Variable::EMPTY_TAG_UNION;
|
||||
$( ext = $crate::v!($ext)(subs); )?
|
||||
|
||||
roc_derive::synth_var(subs, Content::Structure(FlatType::TagUnion(tags, ext)))
|
||||
}
|
||||
}};
|
||||
([ $($tag:ident $($payload:expr)*),* ] as $rec_var:ident) => {{
|
||||
use roc_types::subs::{Subs, SubsIndex, Variable, Content, FlatType, UnionTags};
|
||||
use roc_module::ident::TagName;
|
||||
|subs: &mut Subs| {
|
||||
let $rec_var = subs.fresh_unnamed_flex_var();
|
||||
let rec_name_index =
|
||||
SubsIndex::push_new(&mut subs.field_names, stringify!($rec).into());
|
||||
|
||||
$(
|
||||
let $tag = vec![ $( $payload(subs), )* ];
|
||||
)*
|
||||
let tags = UnionTags::insert_into_subs::<_, Vec<Variable>>(subs, vec![ $( (TagName(stringify!($tag).into()), $tag) ,)* ]);
|
||||
let tag_union_var = roc_derive::synth_var(subs, Content::Structure(FlatType::RecursiveTagUnion($rec_var, tags, Variable::EMPTY_TAG_UNION)));
|
||||
|
||||
subs.set_content(
|
||||
$rec_var,
|
||||
Content::RecursionVar {
|
||||
structure: tag_union_var,
|
||||
opt_name: Some(rec_name_index),
|
||||
},
|
||||
);
|
||||
tag_union_var
|
||||
}
|
||||
}};
|
||||
(Symbol::$sym:ident $($arg:expr)*) => {{
|
||||
use roc_types::subs::{Subs, SubsSlice, Content, FlatType};
|
||||
use roc_module::symbol::Symbol;
|
||||
|subs: &mut Subs| {
|
||||
let $sym = vec![ $( $arg(subs) ,)* ];
|
||||
let var_slice = SubsSlice::insert_into_subs(subs, $sym);
|
||||
roc_derive::synth_var(subs, Content::Structure(FlatType::Apply(Symbol::$sym, var_slice)))
|
||||
}
|
||||
}};
|
||||
(Symbol::$alias:ident $($arg:expr)* => $real_var:expr) => {{
|
||||
use roc_types::subs::{Subs, AliasVariables, Content};
|
||||
use roc_types::types::AliasKind;
|
||||
use roc_module::symbol::Symbol;
|
||||
|subs: &mut Subs| {
|
||||
let args = vec![$( $arg(subs) )*];
|
||||
let alias_variables = AliasVariables::insert_into_subs::<Vec<_>, Vec<_>>(subs, args, vec![]);
|
||||
let real_var = $real_var(subs);
|
||||
roc_derive::synth_var(subs, Content::Alias(Symbol::$alias, alias_variables, real_var, AliasKind::Structural))
|
||||
}
|
||||
}};
|
||||
(@Symbol::$alias:ident $($arg:expr)* => $real_var:expr) => {{
|
||||
use roc_types::subs::{Subs, AliasVariables, Content};
|
||||
use roc_types::types::AliasKind;
|
||||
use roc_module::symbol::Symbol;
|
||||
|subs: &mut Subs| {
|
||||
let args = vec![$( $arg(subs) )*];
|
||||
let alias_variables = AliasVariables::insert_into_subs::<Vec<_>, Vec<_>>(subs, args, vec![]);
|
||||
let real_var = $real_var(subs);
|
||||
roc_derive::synth_var(subs, Content::Alias(Symbol::$alias, alias_variables, real_var, AliasKind::Opaque))
|
||||
}
|
||||
}};
|
||||
(*) => {{
|
||||
use roc_types::subs::{Subs, Content};
|
||||
|subs: &mut Subs| { roc_derive::synth_var(subs, Content::FlexVar(None)) }
|
||||
}};
|
||||
(^$rec_var:ident) => {{
|
||||
use roc_types::subs::{Subs};
|
||||
|_: &mut Subs| { $rec_var }
|
||||
}};
|
||||
($var:ident) => {{
|
||||
use roc_types::subs::{Subs};
|
||||
|_: &mut Subs| { Variable::$var }
|
||||
}};
|
||||
}
|
||||
|
||||
pub(crate) fn check_key<S1, S2>(builtin: DeriveBuiltin, eq: bool, synth1: S1, synth2: S2)
|
||||
where
|
||||
S1: FnOnce(&mut Subs) -> Variable,
|
||||
S2: FnOnce(&mut Subs) -> Variable,
|
||||
{
|
||||
let mut subs = Subs::new();
|
||||
let var1 = synth1(&mut subs);
|
||||
let var2 = synth2(&mut subs);
|
||||
|
||||
let key1 = Derived::builtin(builtin, &subs, var1);
|
||||
let key2 = Derived::builtin(builtin, &subs, var2);
|
||||
|
||||
if eq {
|
||||
assert_eq!(key1, key2);
|
||||
} else {
|
||||
assert_ne!(key1, key2);
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! test_hash_eq {
|
||||
($builtin:expr, $($name:ident: $synth1:expr, $synth2:expr)*) => {$(
|
||||
#[test]
|
||||
fn $name() {
|
||||
$crate::util::check_key($builtin,true, $synth1, $synth2)
|
||||
}
|
||||
)*};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! test_hash_neq {
|
||||
($builtin:expr, $($name:ident: $synth1:expr, $synth2:expr)*) => {$(
|
||||
#[test]
|
||||
fn $name() {
|
||||
$crate::util::check_key($builtin, false, $synth1, $synth2)
|
||||
}
|
||||
)*};
|
||||
}
|
||||
|
||||
pub(crate) fn check_immediate<S>(builtin: DeriveBuiltin, synth: S, immediate: Symbol)
|
||||
where
|
||||
S: FnOnce(&mut Subs) -> Variable,
|
||||
{
|
||||
let mut subs = Subs::new();
|
||||
let var = synth(&mut subs);
|
||||
|
||||
let key = Derived::builtin(builtin, &subs, var);
|
||||
|
||||
assert_eq!(key, Ok(Derived::Immediate(immediate)));
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn assemble_derived_golden(
|
||||
subs: &mut Subs,
|
||||
test_module: ModuleId,
|
||||
interns: &Interns,
|
||||
source_var: Variable,
|
||||
derived_source: &str,
|
||||
typ: Variable,
|
||||
specialization_lsets: SpecializationLambdaSets,
|
||||
) -> String {
|
||||
let mut print_var = |var: Variable, print_only_under_alias| {
|
||||
let snapshot = subs.snapshot();
|
||||
let pretty_type = name_and_print_var(
|
||||
var,
|
||||
subs,
|
||||
test_module,
|
||||
interns,
|
||||
DebugPrint {
|
||||
print_lambda_sets: true,
|
||||
print_only_under_alias,
|
||||
},
|
||||
);
|
||||
subs.rollback_to(snapshot);
|
||||
pretty_type
|
||||
};
|
||||
|
||||
let mut pretty_buf = String::new();
|
||||
|
||||
pretty_buf.push_str(&format!("# derived for {}\n", print_var(source_var, false)));
|
||||
|
||||
let pretty_type = print_var(typ, false);
|
||||
pretty_buf.push_str(&format!("# {}\n", &pretty_type));
|
||||
|
||||
let pretty_type_under_aliases = print_var(typ, true);
|
||||
pretty_buf.push_str(&format!("# {}\n", &pretty_type_under_aliases));
|
||||
|
||||
pretty_buf.push_str("# Specialization lambda sets:\n");
|
||||
let mut specialization_lsets = specialization_lsets.into_iter().collect::<Vec<_>>();
|
||||
specialization_lsets.sort_by_key(|(region, _)| *region);
|
||||
for (region, var) in specialization_lsets {
|
||||
let pretty_lset = print_var(var, false);
|
||||
pretty_buf.push_str(&format!("# @<{}>: {}\n", region, pretty_lset));
|
||||
}
|
||||
|
||||
pretty_buf.push_str(derived_source);
|
||||
|
||||
pretty_buf
|
||||
}
|
||||
|
||||
/// The environment of the module containing the builtin ability we're deriving for a type.
|
||||
struct DeriveBuiltinEnv {
|
||||
module_id: ModuleId,
|
||||
exposed_types: ExposedTypesStorageSubs,
|
||||
abilities_store: AbilitiesStore,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn check_derived_typechecks_and_golden(
|
||||
derived_def: Def,
|
||||
test_module: ModuleId,
|
||||
mut test_subs: Subs,
|
||||
interns: &Interns,
|
||||
derive_builtin_env: DeriveBuiltinEnv,
|
||||
source_var: Variable,
|
||||
derived_program: &str,
|
||||
specialization_lsets: SpecializationLambdaSets,
|
||||
check_golden: impl Fn(&str),
|
||||
) {
|
||||
// constrain the derived
|
||||
let mut constraints = Constraints::new();
|
||||
let def_var = derived_def.expr_var;
|
||||
let mut decls = Declarations::new();
|
||||
decls.push_def(derived_def);
|
||||
let constr = constrain_decls(&mut constraints, test_module, &decls);
|
||||
|
||||
// the derived implementation on stuff from the builtin module, so
|
||||
// - we need to add those dependencies as imported on the constraint
|
||||
// - we need to add the builtin ability info to a local abilities store
|
||||
let values_to_import_from_builtin_module = derive_builtin_env
|
||||
.exposed_types
|
||||
.stored_vars_by_symbol
|
||||
.keys()
|
||||
.copied()
|
||||
.collect::<VecSet<_>>();
|
||||
let pending_abilities = derive_builtin_env
|
||||
.abilities_store
|
||||
.closure_from_imported(&values_to_import_from_builtin_module);
|
||||
let mut exposed_by_module = ExposedByModule::default();
|
||||
exposed_by_module.insert(
|
||||
derive_builtin_env.module_id,
|
||||
ExposedModuleTypes {
|
||||
exposed_types_storage_subs: derive_builtin_env.exposed_types,
|
||||
resolved_implementations: ResolvedImplementations::default(),
|
||||
},
|
||||
);
|
||||
let exposed_for_module = ExposedForModule::new(
|
||||
values_to_import_from_builtin_module.iter(),
|
||||
exposed_by_module,
|
||||
);
|
||||
let mut def_types = Default::default();
|
||||
let mut rigid_vars = Default::default();
|
||||
let (import_variables, abilities_store) = add_imports(
|
||||
test_module,
|
||||
&mut test_subs,
|
||||
pending_abilities,
|
||||
&exposed_for_module,
|
||||
&mut def_types,
|
||||
&mut rigid_vars,
|
||||
);
|
||||
let constr =
|
||||
constraints.let_import_constraint(rigid_vars, def_types, constr, &import_variables);
|
||||
|
||||
// run the solver, print and fail if we have errors
|
||||
dbg_do!(
|
||||
roc_debug_flags::ROC_PRINT_UNIFICATIONS_DERIVED,
|
||||
std::env::set_var(roc_debug_flags::ROC_PRINT_UNIFICATIONS_DERIVED, "1")
|
||||
);
|
||||
let (mut solved_subs, _, problems, _) = roc_solve::module::run_solve(
|
||||
test_module,
|
||||
&constraints,
|
||||
constr,
|
||||
RigidVariables::default(),
|
||||
test_subs,
|
||||
default_aliases(),
|
||||
abilities_store,
|
||||
Default::default(),
|
||||
&exposed_for_module.exposed_by_module,
|
||||
Default::default(),
|
||||
);
|
||||
let subs = solved_subs.inner_mut();
|
||||
|
||||
if !problems.is_empty() {
|
||||
let filename = PathBuf::from("Test.roc");
|
||||
let lines = LineInfo::new(" ");
|
||||
let src_lines = vec![" "];
|
||||
let mut reports = Vec::new();
|
||||
let alloc = RocDocAllocator::new(&src_lines, test_module, interns);
|
||||
|
||||
for problem in problems.into_iter() {
|
||||
if let Some(report) = type_problem(&alloc, &lines, filename.clone(), problem.clone()) {
|
||||
reports.push(report);
|
||||
}
|
||||
}
|
||||
|
||||
let has_reports = !reports.is_empty();
|
||||
|
||||
let doc = alloc
|
||||
.stack(reports.into_iter().map(|v| v.pretty(&alloc)))
|
||||
.append(if has_reports {
|
||||
alloc.line()
|
||||
} else {
|
||||
alloc.nil()
|
||||
});
|
||||
|
||||
let mut buf = String::new();
|
||||
doc.1
|
||||
.render_raw(80, &mut roc_reporting::report::CiWrite::new(&mut buf))
|
||||
.unwrap();
|
||||
|
||||
panic!(
|
||||
"Derived does not typecheck:\n{}\nDerived def:\n{}",
|
||||
buf, derived_program
|
||||
);
|
||||
}
|
||||
|
||||
let golden = assemble_derived_golden(
|
||||
subs,
|
||||
test_module,
|
||||
interns,
|
||||
source_var,
|
||||
derived_program,
|
||||
def_var,
|
||||
specialization_lsets,
|
||||
);
|
||||
|
||||
check_golden(&golden)
|
||||
}
|
||||
|
||||
fn get_key(builtin: DeriveBuiltin, subs: &Subs, var: Variable) -> DeriveKey {
|
||||
match Derived::builtin(builtin, subs, var) {
|
||||
Ok(Derived::Key(key)) => key,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn derive_test<S>(builtin: DeriveBuiltin, synth_input: S, check_golden: impl Fn(&str))
|
||||
where
|
||||
S: FnOnce(&mut Subs) -> Variable,
|
||||
{
|
||||
let arena = Bump::new();
|
||||
let (builtin_module, source, path) = module_source_and_path(builtin);
|
||||
let target_info = roc_target::TargetInfo::default_x86_64();
|
||||
|
||||
let LoadedModule {
|
||||
mut interns,
|
||||
exposed_types_storage,
|
||||
abilities_store,
|
||||
resolved_implementations,
|
||||
..
|
||||
} = roc_load_internal::file::load_and_typecheck_str(
|
||||
&arena,
|
||||
path.file_name().unwrap().into(),
|
||||
source,
|
||||
path.parent().unwrap().to_path_buf(),
|
||||
Default::default(),
|
||||
target_info,
|
||||
roc_reporting::report::RenderTarget::ColorTerminal,
|
||||
Threading::AllAvailable,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut subs = Subs::new();
|
||||
let ident_ids = IdentIds::default();
|
||||
let source_var = synth_input(&mut subs);
|
||||
let key = get_key(builtin, &subs, source_var);
|
||||
|
||||
let mut derived_module = unsafe { DerivedModule::from_components(subs, ident_ids) };
|
||||
|
||||
let mut exposed_by_module = ExposedByModule::default();
|
||||
exposed_by_module.insert(
|
||||
builtin_module,
|
||||
ExposedModuleTypes {
|
||||
exposed_types_storage_subs: exposed_types_storage.clone(),
|
||||
resolved_implementations,
|
||||
},
|
||||
);
|
||||
|
||||
let (_derived_symbol, derived_def, specialization_lsets) =
|
||||
derived_module.get_or_insert(&exposed_by_module, key);
|
||||
let specialization_lsets = specialization_lsets.clone();
|
||||
let derived_def = derived_def.clone();
|
||||
|
||||
let (subs, ident_ids) = derived_module.decompose();
|
||||
|
||||
interns.all_ident_ids.insert(DERIVED_MODULE, ident_ids);
|
||||
DERIVED_MODULE.register_debug_idents(interns.all_ident_ids.get(&DERIVED_MODULE).unwrap());
|
||||
|
||||
let ctx = Ctx { interns: &interns };
|
||||
let derived_program = pretty_print_def(&ctx, &derived_def);
|
||||
|
||||
check_derived_typechecks_and_golden(
|
||||
derived_def,
|
||||
DERIVED_MODULE,
|
||||
subs,
|
||||
&interns,
|
||||
DeriveBuiltinEnv {
|
||||
module_id: builtin_module,
|
||||
exposed_types: exposed_types_storage,
|
||||
abilities_store,
|
||||
},
|
||||
source_var,
|
||||
&derived_program,
|
||||
specialization_lsets,
|
||||
check_golden,
|
||||
);
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "test_gen"
|
||||
version = "0.1.0"
|
||||
version = "0.0.1"
|
||||
authors = ["The Roc Contributors"]
|
||||
license = "UPL-1.0"
|
||||
edition = "2021"
|
||||
|
|
|
@ -95,7 +95,7 @@ fn build_wasm_test_host() {
|
|||
|
||||
run_zig(&[
|
||||
"wasm-ld",
|
||||
bitcode::BUILTINS_WASM32_OBJ_PATH,
|
||||
&bitcode::get_builtins_wasm32_obj_path(),
|
||||
platform_path.to_str().unwrap(),
|
||||
WASI_COMPILER_RT_PATH,
|
||||
WASI_LIBC_PATH,
|
||||
|
|
|
@ -265,6 +265,7 @@ fn encode() {
|
|||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
#[ignore = "running into weird let-generalization issue when a variable is only in output position, see #3660"]
|
||||
fn decode() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -349,7 +350,7 @@ fn encode_use_stdlib() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_use_stdlib_without_wrapping_custom() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -374,7 +375,7 @@ fn encode_use_stdlib_without_wrapping_custom() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn to_encoder_encode_custom_has_capture() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -405,6 +406,9 @@ mod encode_immediate {
|
|||
#[cfg(feature = "gen-llvm")]
|
||||
use crate::helpers::llvm::assert_evals_to;
|
||||
|
||||
#[cfg(feature = "gen-wasm")]
|
||||
use crate::helpers::wasm::assert_evals_to;
|
||||
|
||||
#[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))]
|
||||
use indoc::indoc;
|
||||
|
||||
|
@ -412,7 +416,7 @@ mod encode_immediate {
|
|||
use roc_std::RocStr;
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn string() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -433,7 +437,7 @@ mod encode_immediate {
|
|||
macro_rules! num_immediate {
|
||||
($($num:expr, $typ:ident)*) => {$(
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn $typ() {
|
||||
assert_evals_to!(
|
||||
&format!(indoc!(
|
||||
|
@ -471,7 +475,7 @@ mod encode_immediate {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_derived_record_one_field_string() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -493,7 +497,7 @@ fn encode_derived_record_one_field_string() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_derived_record_two_fields_strings() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -516,7 +520,7 @@ fn encode_derived_record_two_fields_strings() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_derived_nested_record_string() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -540,7 +544,7 @@ fn encode_derived_nested_record_string() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_derived_tag_one_payload_string() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -564,7 +568,7 @@ fn encode_derived_tag_one_payload_string() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_derived_tag_two_payloads_string() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -588,7 +592,7 @@ fn encode_derived_tag_two_payloads_string() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_derived_nested_tag_string() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -613,7 +617,7 @@ fn encode_derived_nested_tag_string() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_derived_nested_record_tag_record() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -638,7 +642,7 @@ fn encode_derived_nested_record_tag_record() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_derived_list_string() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -662,7 +666,7 @@ fn encode_derived_list_string() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn encode_derived_list_of_records() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
|
@ -684,3 +688,273 @@ fn encode_derived_list_of_records() {
|
|||
RocStr
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
#[ignore = "#3696: Currently hits some weird panic in borrow checking, not sure if it's directly related to abilities."]
|
||||
fn encode_derived_list_of_lists_of_strings() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test"
|
||||
imports [Encode.{ toEncoder }, Json]
|
||||
provides [main] to "./platform"
|
||||
|
||||
main =
|
||||
lst = [["a", "b"], ["c", "d", "e"], ["f"]]
|
||||
encoded = Encode.toBytes lst Json.toUtf8
|
||||
result = Str.fromUtf8 encoded
|
||||
when result is
|
||||
Ok s -> s
|
||||
_ -> "<bad>"
|
||||
"#
|
||||
),
|
||||
RocStr::from(r#"[["a","b"],["c","d","e"],["f"]]"#),
|
||||
RocStr
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(all(
|
||||
any(feature = "gen-llvm", feature = "gen-wasm"),
|
||||
not(feature = "gen-llvm-wasm") // hits a stack limit in wasm3
|
||||
))]
|
||||
fn encode_derived_record_with_many_types() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test"
|
||||
imports [Encode.{ toEncoder }, Json]
|
||||
provides [main] to "./platform"
|
||||
|
||||
main =
|
||||
fresh : [Fresh Str, Rotten Str]
|
||||
fresh = Fresh "tomatoes"
|
||||
rcd = {actors: ["Idris Elba", "Mila Kunis"], year: 2004u16, rating: {average: 7u8, min: 1u8, max: 10u8, sentiment: fresh}}
|
||||
result = Str.fromUtf8 (Encode.toBytes rcd Json.toUtf8)
|
||||
when result is
|
||||
Ok s -> s
|
||||
_ -> "<bad>"
|
||||
"#
|
||||
),
|
||||
RocStr::from(
|
||||
r#"{"actors":["Idris Elba","Mila Kunis"],"rating":{"average":7,"max":10,"min":1,"sentiment":{"Fresh":["tomatoes"]}},"year":2004}"#
|
||||
),
|
||||
RocStr
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn decode_use_stdlib() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test"
|
||||
imports [Decode.{ Decoding }, Json]
|
||||
provides [main] to "./platform"
|
||||
|
||||
MyNum := U8 has [Decoding {decoder: myDecoder}]
|
||||
|
||||
myDecoder =
|
||||
Decode.custom \bytes, fmt ->
|
||||
when Decode.decodeWith bytes Decode.u8 fmt is
|
||||
{result, rest} ->
|
||||
when result is
|
||||
Ok n -> {result: Ok (@MyNum n), rest}
|
||||
Err e -> {result: Err e, rest}
|
||||
|
||||
main =
|
||||
when Decode.fromBytes [49, 53] Json.fromUtf8 is
|
||||
Ok (@MyNum n) -> n
|
||||
_ -> 101
|
||||
"#
|
||||
),
|
||||
15,
|
||||
u8
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn decode_use_stdlib_json_list() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test"
|
||||
imports [Decode.{ Decoding }, Json]
|
||||
provides [main] to "./platform"
|
||||
|
||||
MyNumList := List U8 has [Decoding {decoder: myDecoder}]
|
||||
|
||||
myDecoder =
|
||||
Decode.custom \bytes, fmt ->
|
||||
when Decode.decodeWith bytes (Decode.list Decode.u8) fmt is
|
||||
{result, rest} ->
|
||||
when result is
|
||||
Ok lst -> {result: Ok (@MyNumList lst), rest}
|
||||
Err e -> {result: Err e, rest}
|
||||
|
||||
main =
|
||||
when Str.toUtf8 "[1,2,3]" |> Decode.fromBytes Json.fromUtf8 is
|
||||
Ok (@MyNumList lst) -> lst
|
||||
_ -> []
|
||||
"#
|
||||
),
|
||||
RocList::from_slice(&[1u8, 2u8, 3u8]),
|
||||
RocList<u8>
|
||||
)
|
||||
}
|
||||
|
||||
mod decode_immediate {
|
||||
#[cfg(feature = "gen-llvm")]
|
||||
use crate::helpers::llvm::assert_evals_to;
|
||||
|
||||
#[cfg(feature = "gen-wasm")]
|
||||
use crate::helpers::wasm::assert_evals_to;
|
||||
|
||||
#[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))]
|
||||
use indoc::indoc;
|
||||
|
||||
#[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))]
|
||||
use roc_std::RocStr;
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
fn string() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode, Json] provides [main] to "./platform"
|
||||
|
||||
main =
|
||||
when Str.toUtf8 "\"foo\"" |> Decode.fromBytes Json.fromUtf8 is
|
||||
Ok s -> s
|
||||
_ -> "<bad>"
|
||||
"#
|
||||
),
|
||||
RocStr::from("foo"),
|
||||
RocStr
|
||||
)
|
||||
}
|
||||
|
||||
macro_rules! num_immediate {
|
||||
($($num:expr, $typ:ident)*) => {$(
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
fn $typ() {
|
||||
assert_evals_to!(
|
||||
&format!(indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode, Json] provides [main] to "./platform"
|
||||
|
||||
main =
|
||||
when Num.toStr {}{} |> Str.toUtf8 |> Decode.fromBytes Json.fromUtf8 is
|
||||
Ok n -> n
|
||||
_ -> 101{}
|
||||
"#
|
||||
), $num, stringify!($typ), stringify!($typ)),
|
||||
$num,
|
||||
$typ
|
||||
)
|
||||
}
|
||||
)*}
|
||||
}
|
||||
|
||||
num_immediate! {
|
||||
17, i8
|
||||
17, i16
|
||||
17, i32
|
||||
17, i64
|
||||
17, i128
|
||||
17, u8
|
||||
17, u16
|
||||
17, u32
|
||||
17, u64
|
||||
17, u128
|
||||
17.23, f32
|
||||
17.23, f64
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
fn dec() {
|
||||
use roc_std::RocDec;
|
||||
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode, Json] provides [main] to "./platform"
|
||||
|
||||
main =
|
||||
when Num.toStr 17.23dec |> Str.toUtf8 |> Decode.fromBytes Json.fromUtf8 is
|
||||
Ok n -> n
|
||||
_ -> 101dec
|
||||
"#
|
||||
),
|
||||
RocDec::from_str("17.23").unwrap(),
|
||||
RocDec
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
fn decode_list_of_strings() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Decode, Json] provides [main] to "./platform"
|
||||
|
||||
main =
|
||||
when Str.toUtf8 "[\"a\",\"b\",\"c\"]" |> Decode.fromBytes Json.fromUtf8 is
|
||||
Ok l -> Str.joinWith l ","
|
||||
_ -> "<bad>"
|
||||
"#
|
||||
),
|
||||
RocStr::from("a,b,c"),
|
||||
RocStr
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(all(
|
||||
any(feature = "gen-llvm"), // currently fails on gen-wasm
|
||||
not(feature = "gen-llvm-wasm") // hits a stack limit in wasm3
|
||||
))]
|
||||
fn encode_then_decode_list_of_strings() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Encode, Decode, Json] provides [main] to "./platform"
|
||||
|
||||
main =
|
||||
when Encode.toBytes ["a", "b", "c"] Json.fromUtf8 |> Decode.fromBytes Json.fromUtf8 is
|
||||
Ok l -> Str.joinWith l ","
|
||||
_ -> "something went wrong"
|
||||
"#
|
||||
),
|
||||
RocStr::from("a,b,c"),
|
||||
RocStr
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm"))]
|
||||
#[ignore = "#3696: Currently hits some weird panic in borrow checking, not sure if it's directly related to abilities."]
|
||||
fn encode_then_decode_list_of_lists_of_strings() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test" imports [Encode, Decode, Json] provides [main] to "./platform"
|
||||
|
||||
main =
|
||||
when Encode.toBytes [["a", "b"], ["c", "d", "e"], ["f"]] Json.fromUtf8 |> Decode.fromBytes Json.fromUtf8 is
|
||||
Ok list -> (List.map list \inner -> Str.joinWith inner ",") |> Str.joinWith l ";"
|
||||
_ -> "something went wrong"
|
||||
"#
|
||||
),
|
||||
RocStr::from("a,b;c,d,e;f"),
|
||||
RocStr
|
||||
)
|
||||
}
|
||||
|
|
|
@ -15,8 +15,6 @@ use indoc::indoc;
|
|||
#[allow(unused_imports)]
|
||||
use roc_std::{RocList, RocResult, RocStr};
|
||||
|
||||
use core::convert::Infallible;
|
||||
|
||||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
|
||||
fn roc_list_construction() {
|
||||
|
@ -287,6 +285,8 @@ fn list_map_try_ok() {
|
|||
#[test]
|
||||
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
|
||||
fn list_map_try_err() {
|
||||
use core::convert::Infallible;
|
||||
|
||||
assert_evals_to!(
|
||||
r#"
|
||||
List.mapTry [1, 2, 3] \_ -> Err -1
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue