diff --git a/.github/workflows/nightly_macos_x86_64.yml b/.github/workflows/nightly_macos_x86_64.yml index ac4c896a6b..577cf1d7e2 100644 --- a/.github/workflows/nightly_macos_x86_64.yml +++ b/.github/workflows/nightly_macos_x86_64.yml @@ -24,6 +24,12 @@ jobs: run: zig version - name: Install LLVM run: brew install llvm@13 + # build has to be done before tests #2572 + - name: build release + uses: actions-rs/cargo@v1 + with: + command: build + args: --release --locked - name: execute rust tests uses: actions-rs/cargo@v1 with: @@ -31,11 +37,6 @@ jobs: args: --locked # no --release yet until #3166 is fixed - name: write version to file run: ./ci/write_version.sh - - name: build release - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --locked - name: package release run: ./ci/package_release.sh roc_darwin_x86_64.tar.gz - name: Create pre-release with test_archive.tar.gz diff --git a/.gitignore b/.gitignore index b841a5c80f..0a15f36169 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,8 @@ zig-cache *.obj *.tmp *.wasm +*.exe +*.pdb # llvm human-readable output *.ll diff --git a/AUTHORS b/AUTHORS index a75abebe7e..1e4d2b8be0 100644 --- a/AUTHORS +++ b/AUTHORS @@ -86,3 +86,6 @@ Jared Forsyth Patrick Kilgore Marten/Qqwy Christoph Rüßler +Ralf Engbers +Mostly Void <7rat13@gmail.com> +Luis F. Gutierrez diff --git a/BUILDING_FROM_SOURCE.md b/BUILDING_FROM_SOURCE.md index 01a56bc87b..ced14f0806 100644 --- a/BUILDING_FROM_SOURCE.md +++ b/BUILDING_FROM_SOURCE.md @@ -71,6 +71,8 @@ To build the compiler, you need these installed: * `libxkbcommon` - macOS seems to have it already; on Ubuntu or Debian you can get it with `apt-get install libxkbcommon-dev` * On Debian/Ubuntu `sudo apt-get install pkg-config` * LLVM, see below for version +* [rust](https://rustup.rs/) +* Also run `cargo install bindgen` after installing rust. You may need to open a new terminal. To run the test suite (via `cargo test`), you additionally need to install: diff --git a/Cargo.lock b/Cargo.lock index 7f4c1a39fe..c6d1b6e6c4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -104,7 +104,7 @@ dependencies = [ [[package]] name = "arena-pool" -version = "0.1.0" +version = "0.0.1" [[package]] name = "arrayvec" @@ -3268,7 +3268,7 @@ checksum = "f1382d1f0a252c4bf97dc20d979a2fdd05b024acd7c2ed0f7595d7817666a157" [[package]] name = "repl_test" -version = "0.1.0" +version = "0.0.1" dependencies = [ "indoc", "lazy_static", @@ -3316,7 +3316,7 @@ dependencies = [ [[package]] name = "roc_alias_analysis" -version = "0.1.0" +version = "0.0.1" dependencies = [ "morphic_lib", "roc_collections", @@ -3355,7 +3355,7 @@ dependencies = [ [[package]] name = "roc_build" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "inkwell 0.1.0", @@ -3380,6 +3380,7 @@ dependencies = [ "roc_target", "roc_types", "roc_unify", + "roc_utils", "serde_json", "target-lexicon", "tempfile", @@ -3388,21 +3389,21 @@ dependencies = [ [[package]] name = "roc_builtins" -version = "0.1.0" +version = "0.0.1" dependencies = [ "dunce", - "fs_extra", "lazy_static", "roc_collections", "roc_module", "roc_region", "roc_target", + "roc_utils", "tempfile", ] [[package]] name = "roc_can" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bitvec 1.0.1", "bumpalo", @@ -3483,7 +3484,7 @@ dependencies = [ [[package]] name = "roc_collections" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bitvec 1.0.1", "bumpalo", @@ -3495,7 +3496,7 @@ dependencies = [ [[package]] name = "roc_constrain" -version = "0.1.0" +version = "0.0.1" dependencies = [ "arrayvec 0.7.2", "roc_can", @@ -3509,11 +3510,11 @@ dependencies = [ [[package]] name = "roc_debug_flags" -version = "0.1.0" +version = "0.0.1" [[package]] name = "roc_derive" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "roc_can", @@ -3528,7 +3529,7 @@ dependencies = [ [[package]] name = "roc_derive_key" -version = "0.1.0" +version = "0.0.1" dependencies = [ "roc_can", "roc_collections", @@ -3540,7 +3541,7 @@ dependencies = [ [[package]] name = "roc_docs" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "peg", @@ -3564,7 +3565,7 @@ dependencies = [ [[package]] name = "roc_docs_cli" -version = "0.1.0" +version = "0.0.1" dependencies = [ "clap 3.2.11", "roc_docs", @@ -3572,7 +3573,7 @@ dependencies = [ [[package]] name = "roc_editor" -version = "0.1.0" +version = "0.0.1" dependencies = [ "arrayvec 0.7.2", "bumpalo", @@ -3621,11 +3622,11 @@ dependencies = [ [[package]] name = "roc_error_macros" -version = "0.1.0" +version = "0.0.1" [[package]] name = "roc_exhaustive" -version = "0.1.0" +version = "0.0.1" dependencies = [ "roc_collections", "roc_module", @@ -3634,7 +3635,7 @@ dependencies = [ [[package]] name = "roc_fmt" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "indoc", @@ -3649,7 +3650,7 @@ dependencies = [ [[package]] name = "roc_gen_dev" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "capstone", @@ -3674,7 +3675,7 @@ dependencies = [ [[package]] name = "roc_gen_llvm" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "inkwell 0.1.0", @@ -3694,7 +3695,7 @@ dependencies = [ [[package]] name = "roc_gen_wasm" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bitvec 1.0.1", "bumpalo", @@ -3709,7 +3710,7 @@ dependencies = [ [[package]] name = "roc_glue" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "clap 3.2.11", @@ -3739,7 +3740,7 @@ dependencies = [ [[package]] name = "roc_highlight" -version = "0.1.0" +version = "0.0.1" dependencies = [ "peg", "roc_code_markup", @@ -3747,11 +3748,11 @@ dependencies = [ [[package]] name = "roc_ident" -version = "0.1.0" +version = "0.0.1" [[package]] name = "roc_late_solve" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "roc_can", @@ -3766,7 +3767,7 @@ dependencies = [ [[package]] name = "roc_linker" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bincode", "bumpalo", @@ -3786,7 +3787,7 @@ dependencies = [ [[package]] name = "roc_load" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "roc_builtins", @@ -3801,7 +3802,7 @@ dependencies = [ [[package]] name = "roc_load_internal" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "crossbeam", @@ -3835,7 +3836,7 @@ dependencies = [ [[package]] name = "roc_module" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "lazy_static", @@ -3849,7 +3850,7 @@ dependencies = [ [[package]] name = "roc_mono" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "hashbrown 0.12.2", @@ -3858,6 +3859,7 @@ dependencies = [ "roc_collections", "roc_debug_flags", "roc_derive", + "roc_derive_key", "roc_error_macros", "roc_exhaustive", "roc_late_solve", @@ -3873,7 +3875,7 @@ dependencies = [ [[package]] name = "roc_parse" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "criterion", @@ -3890,7 +3892,7 @@ dependencies = [ [[package]] name = "roc_problem" -version = "0.1.0" +version = "0.0.1" dependencies = [ "roc_collections", "roc_module", @@ -3901,14 +3903,14 @@ dependencies = [ [[package]] name = "roc_region" -version = "0.1.0" +version = "0.0.1" dependencies = [ "static_assertions", ] [[package]] name = "roc_repl_cli" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "const_format", @@ -3935,7 +3937,7 @@ dependencies = [ [[package]] name = "roc_repl_eval" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "roc_builtins", @@ -3955,7 +3957,7 @@ dependencies = [ [[package]] name = "roc_repl_expect" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "indoc", @@ -3985,7 +3987,7 @@ dependencies = [ [[package]] name = "roc_repl_wasm" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "console_error_panic_hook", @@ -4007,7 +4009,7 @@ dependencies = [ [[package]] name = "roc_reporting" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "distance", @@ -4037,7 +4039,7 @@ dependencies = [ [[package]] name = "roc_solve" -version = "0.1.0" +version = "0.0.1" dependencies = [ "arrayvec 0.7.2", "bumpalo", @@ -4070,7 +4072,7 @@ dependencies = [ [[package]] name = "roc_solve_problem" -version = "0.1.0" +version = "0.0.1" dependencies = [ "roc_can", "roc_collections", @@ -4083,7 +4085,7 @@ dependencies = [ [[package]] name = "roc_std" -version = "0.1.0" +version = "0.0.1" dependencies = [ "arrayvec 0.7.2", "static_assertions", @@ -4091,7 +4093,7 @@ dependencies = [ [[package]] name = "roc_target" -version = "0.1.0" +version = "0.0.1" dependencies = [ "strum", "strum_macros", @@ -4100,7 +4102,7 @@ dependencies = [ [[package]] name = "roc_test_utils" -version = "0.1.0" +version = "0.0.1" dependencies = [ "pretty_assertions", "remove_dir_all 0.7.0", @@ -4108,7 +4110,7 @@ dependencies = [ [[package]] name = "roc_types" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "roc_collections", @@ -4121,7 +4123,7 @@ dependencies = [ [[package]] name = "roc_unify" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bitflags", "roc_can", @@ -4135,7 +4137,7 @@ dependencies = [ [[package]] name = "roc_utils" -version = "0.1.0" +version = "0.0.1" dependencies = [ "snafu", ] @@ -4784,7 +4786,7 @@ dependencies = [ [[package]] name = "test_derive" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "indoc", @@ -4810,7 +4812,7 @@ dependencies = [ [[package]] name = "test_gen" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "criterion", @@ -4850,7 +4852,7 @@ dependencies = [ [[package]] name = "test_mono" -version = "0.1.0" +version = "0.0.1" dependencies = [ "bumpalo", "indoc", @@ -4867,7 +4869,7 @@ dependencies = [ [[package]] name = "test_mono_macros" -version = "0.1.0" +version = "0.0.1" dependencies = [ "proc-macro2", "quote", @@ -5186,7 +5188,7 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi_libc_sys" -version = "0.1.0" +version = "0.0.1" [[package]] name = "wasm-bindgen" diff --git a/FAQ.md b/FAQ.md index 9a0290874b..607358d858 100644 --- a/FAQ.md +++ b/FAQ.md @@ -1,6 +1,36 @@ # Frequently Asked Questions +## Where did the name Roc come from? + +The Roc logo, an origami bird + +The Roc programming language is named after [a mythical bird](). + +That’s why the logo is a bird. It’s specifically an [_origami_ bird](https://youtu.be/9gni1t1k1uY) as an homage +to [Elm](https://elm-lang.org/)’s tangram logo. + +Roc is a direct descendant of Elm. The languages are similar, but not the same. +[Origami](https://en.wikipedia.org/wiki/Origami) likewise has similarities to [tangrams](https://en.wikipedia.org/wiki/Tangram), although they are not the same. +Both involve making a surprising variety of things +from simple primitives. [_Folds_]() +are also common in functional programming. + +The logo was made by tracing triangles onto a photo of a physical origami bird. +It’s made of triangles because triangles are a foundational primitive in +computer graphics. + +The name was chosen because it makes for a three-letter file extension, it means something +fantastical, and it has incredible potential for puns. Here are some different ways to spell it: + +- **Roc** - traditional +- **roc** - low-key +- **ROC** - [YELLING](https://package.elm-lang.org/packages/elm/core/latest/String#toUpper) +- **Röc** - [metal 🤘](https://en.wikipedia.org/wiki/Metal_umlaut) + +Fun fact: "roc" translates to 鹏 in Chinese, [which means](https://www.mdbg.net/chinese/dictionary?page=worddict&wdrst=0&wdqb=%E9%B9%8F) "a large fabulous bird." + # Why make a new editor instead of making an LSP plugin for VSCode, Vim or Emacs? + The Roc editor is one of the key areas where we want to innovate. Constraining ourselves to a plugin for existing editors would severely limit our possibilities for innovation. A key part of our editor will be the use of plugins that are shipped with libraries. Think of a regex visualizer, parser debugger, or color picker. For library authors, it would be most convenient to write these plugins in Roc. Trying to dynamically load library plugins (written in Roc) in for example VSCode seems very difficult. @@ -8,7 +38,7 @@ A key part of our editor will be the use of plugins that are shipped with librar ## Is there syntax highlighting for Vim/Emacs/VS Code or a LSP? Not currently. Although they will presumably exist someday, while Roc is in the early days there's actually a conscious -effort to focus on the Roc Editor *instead of* adding Roc support to other editors - specifically in order to give the Roc +effort to focus on the Roc Editor _instead of_ adding Roc support to other editors - specifically in order to give the Roc Editor the best possible chance at kickstarting a virtuous cycle of plugin authorship. This is an unusual approach, but there are more details in [this 2021 interview](https://youtu.be/ITrDd6-PbvY?t=212). @@ -68,16 +98,18 @@ Both of these would make revising code riskier across the entire language, which Another option would be to define that function equality always returns `False`. So both of these would evaluate to `False`: -* `(\x -> x + 1) == (\x -> 1 + x)` -* `(\x -> x + 1) == (\x -> x + 1)` +- `(\x -> x + 1) == (\x -> 1 + x)` +- `(\x -> x + 1) == (\x -> x + 1)` This makes function equality effectively useless, while still technically allowing it. It has some other downsides: -* Now if you put a function inside a record, using `==` on that record will still type-check, but it will then return `False`. This could lead to bugs if you didn't realize you had accidentally put a function in there - for example, because you were actually storing a different type (e.g. an opaque type) and didn't realize it had a function inside it. -* If you put a function (or a value containing a function) into a `Dict` or `Set`, you'll never be able to get it out again. This is a common problem with [NaN](https://en.wikipedia.org/wiki/NaN), which is also defined not to be equal to itself. + +- Now if you put a function inside a record, using `==` on that record will still type-check, but it will then return `False`. This could lead to bugs if you didn't realize you had accidentally put a function in there - for example, because you were actually storing a different type (e.g. an opaque type) and didn't realize it had a function inside it. +- If you put a function (or a value containing a function) into a `Dict` or `Set`, you'll never be able to get it out again. This is a common problem with [NaN](https://en.wikipedia.org/wiki/NaN), which is also defined not to be equal to itself. The first of these problems could be addressed by having function equality always return `True` instead of `False` (since that way it would not affect other fields' equality checks in a record), but that design has its own problems: -* Although function equality is still useless, `(\x -> x + 1) == (\x -> x)` returns `True`. Even if it didn't lead to bugs in practice, this would certainly be surprising and confusing to beginners. -* Now if you put several different functions into a `Dict` or `Set`, only one of them will be kept; the others will be discarded or overwritten. This could cause bugs if a value stored a function internally, and then other functions relied on that internal function for correctness. + +- Although function equality is still useless, `(\x -> x + 1) == (\x -> x)` returns `True`. Even if it didn't lead to bugs in practice, this would certainly be surprising and confusing to beginners. +- Now if you put several different functions into a `Dict` or `Set`, only one of them will be kept; the others will be discarded or overwritten. This could cause bugs if a value stored a function internally, and then other functions relied on that internal function for correctness. Each of these designs makes Roc a language that's some combination of more error-prone, more confusing, and more brittle to change. Disallowing function equality at compile time eliminates all of these drawbacks. @@ -107,12 +139,12 @@ To describe something that's neither an optional field nor an operation that can more descriptive than something like `Maybe`. For example, if a record type has an `artist` field, but the artist information may not be available, compare these three alternative ways to represent that: -* `artist : Maybe Artist` -* `artist : [Loading, Loaded Artist]` -* `artist : [Unspecified, Specified Artist]` +- `artist : Maybe Artist` +- `artist : [Loading, Loaded Artist]` +- `artist : [Unspecified, Specified Artist]` All three versions tell us that we might not have access to an `Artist`. However, the `Maybe` version doesn't -tell us why that might be. The `Loading`/`Loaded` version tells us we don't have one *yet*, because we're +tell us why that might be. The `Loading`/`Loaded` version tells us we don't have one _yet_, because we're still loading it, whereas the `Unspecified`/`Specified` version tells us we don't have one and shouldn't expect to have one later if we wait, because it wasn't specified. @@ -135,8 +167,8 @@ _Since this is a FAQ answer, I'm going to assume familiarity with higher-kinded A valuable aspect of Roc's type system is that it has decidable [principal](https://en.wikipedia.org/wiki/Principal_type) type inference. This means that: -* At compile time, Roc can correctly infer the types for every expression in a program, even if you don't annotate any of the types. -* This inference always infers the most general type possible; you couldn't possibly add a valid type annotation that would make the type more flexible than the one that Roc would infer if you deleted the annotation. +- At compile time, Roc can correctly infer the types for every expression in a program, even if you don't annotate any of the types. +- This inference always infers the most general type possible; you couldn't possibly add a valid type annotation that would make the type more flexible than the one that Roc would infer if you deleted the annotation. It's been proven that any type system which supports either [higher-kinded polymorphism](https://www.cl.cam.ac.uk/~jdy22/papers/lightweight-higher-kinded-polymorphism.pdf) or [arbitrary-rank types](https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/putting.pdf) cannot have decidable principal type inference. With either of those features in the language, there will be situations where the compiler @@ -152,9 +184,9 @@ sacrificing principal type inference to attain, so let's focus on the trade-offs Supporting Rank-2 types in Roc has been discussed before, but it has several important downsides: -* It would increase the complexity of the language. -* It would make some compiler error messages more confusing (e.g. they might mention `forall` because that was the most general type that could be inferred, even if that wasn't helpful or related to the actual problem). -* It would substantially increase the complexity of the type checker, which would necessarily slow it down. +- It would increase the complexity of the language. +- It would make some compiler error messages more confusing (e.g. they might mention `forall` because that was the most general type that could be inferred, even if that wasn't helpful or related to the actual problem). +- It would substantially increase the complexity of the type checker, which would necessarily slow it down. No implementation of Rank-2 types can remove any of these downsides. Thus far, we've been able to come up with sufficiently nice APIs that only require Rank-1 types, and we haven't seen a really compelling use case @@ -201,9 +233,9 @@ Culturally, to support HKP is to take a side, and to decline to support it is al Given this, language designers have three options: -* Have HKP and have Monad in the standard library. Embrace them and build a culture and ecosystem around them. -* Have HKP and don't have Monad in the standard library. An alternate standard lbirary built around monads will inevitably emerge, and both the community and ecosystem will divide themselves along pro-monad and anti-monad lines. -* Don't have HKP; build a culture and ecosystem around other things. +- Have HKP and have Monad in the standard library. Embrace them and build a culture and ecosystem around them. +- Have HKP and don't have Monad in the standard library. An alternate standard lbirary built around monads will inevitably emerge, and both the community and ecosystem will divide themselves along pro-monad and anti-monad lines. +- Don't have HKP; build a culture and ecosystem around other things. Considering that these are the only three options, I think the best choice for Roc—not only on a technical level, but on a cultural level as well—is to make it clear that the plan is for Roc never to support HKP. @@ -224,30 +256,30 @@ the result would be broken code and sadness. So why does Roc have the specific syntax changes it does? Here are some brief explanations: -* `#` instead of `--` for comments - this allows [hashbang](https://senthilnayagan.medium.com/shebang-hashbang-10966b8f28a8)s to work without needing special syntax. That isn't a use case Elm supports, but it is one Roc is designed to support. -* `{}` instead of `()` for the unit type - Elm has both, and they can both be used as a unit type. Since `{}` has other uses in the type system, but `()` doesn't, I consider it redundant and took it out. -* `when`...`is` instead of `case`...`of` - I predict it will be easier for beginners to pick up, because usually the way I explain `case`...`of` to beginners is by saying the words "when" and "is" out loud - e.g. "when `color` is `Red`, it runs this first branch; when `color` is `Blue`, it runs this other branch..." -* `:` instead of `=` for record field definitions (e.g. `{ foo: bar }` where Elm syntax would be `{ foo = bar }`): I like `=` being reserved for definitions, and `:` is the most popular alternative. -* Backpassing syntax - since Roc is designed to be used for use cases like command-line apps, shell scripts, and servers, I expect chained effects to come up a lot more often than they do in Elm. I think backpassing is nice for those use cases, similarly to how `do` notation is nice for them in Haskell. -* Tag unions instead of Elm's custom types (aka algebraic data types). This isn't just a syntactic change; tag unions are mainly in Roc because they can facilitate errors being accumulated across chained effects, which (as noted a moment ago) I expect to be a lot more common in Roc than in Elm. If you have tag unions, you don't really need a separate language feature for algebraic data types, since closed tag unions essentially work the same way - aside from not giving you a way to selectively expose variants or define phantom types. Roc's opaque types language feature covers those use cases instead. -* No `::` operator, or `::` pattern matching for lists. Both of these are for the same reason: an Elm `List` is a linked list, so both prepending to it and removing an element from the front are very cheap operations. In contrast, a Roc `List` is a flat array, so both prepending to it and removing an element from the front are among the most expensive operations you can possibly do with it! To get good performance, this usage pattern should be encouraged in Elm and discouraged in Roc. Since having special syntax would encourage it, it would not be good for Roc to have that syntax! -* No `<|` operator. In Elm, I almost exclusively found myself wanting to use this in conjunction with anonymous functions (e.g. `foo <| \bar -> ...`) or conditionals (e.g. `foo <| if bar then ...`). In Roc you can do both of these without the `<|`. That means the main remaining use for `<|` is to reduce parentheses, but I tend to think `|>` is better at that (or else the parens are fine), so after the other syntactic changes, I considered `<|` an unnecessary stylistic alternative to `|>` or parens. -* The `|>` operator passes the expression before the `|>` as the *first* argument to the function after the `|>` instead of as the last argument. See the section on currying for details on why this works this way. -* `:` instead of `type alias` - I like to avoid reserved keywords for terms that are desirable in userspace, so that people don't have to name things `typ` because `type` is a reserved keyword, or `clazz` because `class` is reserved. (I couldn't think of satisfactory alternatives for `as`, `when`, `is`, or `if` other than different reserved keywords. I could see an argument for `then`—and maybe even `is`—being replaced with a `->` or `=>` or something, but I don't anticipate missing either of those words much in userspace. `then` is used in JavaScript promises, but I think there are several better names for that function.) -* No underscores in variable names - I've seen Elm beginners reflexively use `snake_case` over `camelCase` and then need to un-learn the habit after the compiler accepted it. I'd rather have the compiler give feedback that this isn't the way to do it in Roc, and suggest a camelCase alternative. I've also seen underscores used for lazy naming, e.g. `foo` and then `foo_`. If lazy naming is the goal, `foo2` is just as concise as `foo_`, but `foo3` is more concise than `foo__`. So in a way, removing `_` is a forcing function for improved laziness. (Of course, more descriptive naming would be even better.) -* Trailing commas - I've seen people walk away (in some cases physically!) from Elm as soon as they saw the leading commas in collection literals. While I think they've made a mistake by not pushing past this aesthetic preference to give the language a chance, I also would prefer not put them in a position to make such a mistake in the first place. Secondarily, while I'm personally fine with either style, between the two I prefer the look of trailing commas. -* The `!` unary prefix operator. I didn't want to have a `Basics` module (more on that in a moment), and without `Basics`, this would either need to be called fully-qualified (`Bool.not`) or else a module import of `Bool.{ not }` would be necessary. Both seemed less nice than supporting the `!` prefix that's common to so many widely-used languages, especially when we already have a unary prefix operator of `-` for negation (e.g. `-x`). -* `!=` for the inequality operator (instead of Elm's `/=`) - this one pairs more naturally with the `!` prefix operator and is also very common in other languages. +- `#` instead of `--` for comments - this allows [hashbang](https://senthilnayagan.medium.com/shebang-hashbang-10966b8f28a8)s to work without needing special syntax. That isn't a use case Elm supports, but it is one Roc is designed to support. +- `{}` instead of `()` for the unit type - Elm has both, and they can both be used as a unit type. Since `{}` has other uses in the type system, but `()` doesn't, I consider it redundant and took it out. +- `when`...`is` instead of `case`...`of` - I predict it will be easier for beginners to pick up, because usually the way I explain `case`...`of` to beginners is by saying the words "when" and "is" out loud - e.g. "when `color` is `Red`, it runs this first branch; when `color` is `Blue`, it runs this other branch..." +- `:` instead of `=` for record field definitions (e.g. `{ foo: bar }` where Elm syntax would be `{ foo = bar }`): I like `=` being reserved for definitions, and `:` is the most popular alternative. +- Backpassing syntax - since Roc is designed to be used for use cases like command-line apps, shell scripts, and servers, I expect chained effects to come up a lot more often than they do in Elm. I think backpassing is nice for those use cases, similarly to how `do` notation is nice for them in Haskell. +- Tag unions instead of Elm's custom types (aka algebraic data types). This isn't just a syntactic change; tag unions are mainly in Roc because they can facilitate errors being accumulated across chained effects, which (as noted a moment ago) I expect to be a lot more common in Roc than in Elm. If you have tag unions, you don't really need a separate language feature for algebraic data types, since closed tag unions essentially work the same way - aside from not giving you a way to selectively expose variants or define phantom types. Roc's opaque types language feature covers those use cases instead. +- No `::` operator, or `::` pattern matching for lists. Both of these are for the same reason: an Elm `List` is a linked list, so both prepending to it and removing an element from the front are very cheap operations. In contrast, a Roc `List` is a flat array, so both prepending to it and removing an element from the front are among the most expensive operations you can possibly do with it! To get good performance, this usage pattern should be encouraged in Elm and discouraged in Roc. Since having special syntax would encourage it, it would not be good for Roc to have that syntax! +- No `<|` operator. In Elm, I almost exclusively found myself wanting to use this in conjunction with anonymous functions (e.g. `foo <| \bar -> ...`) or conditionals (e.g. `foo <| if bar then ...`). In Roc you can do both of these without the `<|`. That means the main remaining use for `<|` is to reduce parentheses, but I tend to think `|>` is better at that (or else the parens are fine), so after the other syntactic changes, I considered `<|` an unnecessary stylistic alternative to `|>` or parens. +- The `|>` operator passes the expression before the `|>` as the _first_ argument to the function after the `|>` instead of as the last argument. See the section on currying for details on why this works this way. +- `:` instead of `type alias` - I like to avoid reserved keywords for terms that are desirable in userspace, so that people don't have to name things `typ` because `type` is a reserved keyword, or `clazz` because `class` is reserved. (I couldn't think of satisfactory alternatives for `as`, `when`, `is`, or `if` other than different reserved keywords. I could see an argument for `then`—and maybe even `is`—being replaced with a `->` or `=>` or something, but I don't anticipate missing either of those words much in userspace. `then` is used in JavaScript promises, but I think there are several better names for that function.) +- No underscores in variable names - I've seen Elm beginners reflexively use `snake_case` over `camelCase` and then need to un-learn the habit after the compiler accepted it. I'd rather have the compiler give feedback that this isn't the way to do it in Roc, and suggest a camelCase alternative. I've also seen underscores used for lazy naming, e.g. `foo` and then `foo_`. If lazy naming is the goal, `foo2` is just as concise as `foo_`, but `foo3` is more concise than `foo__`. So in a way, removing `_` is a forcing function for improved laziness. (Of course, more descriptive naming would be even better.) +- Trailing commas - I've seen people walk away (in some cases physically!) from Elm as soon as they saw the leading commas in collection literals. While I think they've made a mistake by not pushing past this aesthetic preference to give the language a chance, I also would prefer not put them in a position to make such a mistake in the first place. Secondarily, while I'm personally fine with either style, between the two I prefer the look of trailing commas. +- The `!` unary prefix operator. I didn't want to have a `Basics` module (more on that in a moment), and without `Basics`, this would either need to be called fully-qualified (`Bool.not`) or else a module import of `Bool.{ not }` would be necessary. Both seemed less nice than supporting the `!` prefix that's common to so many widely-used languages, especially when we already have a unary prefix operator of `-` for negation (e.g. `-x`). +- `!=` for the inequality operator (instead of Elm's `/=`) - this one pairs more naturally with the `!` prefix operator and is also very common in other languages. Roc also has a different standard library from Elm. Some of the differences come down to platforms and applications (e.g. having `Task` in Roc's standard library wouldn't make sense), but others do not. Here are some brief explanations: -* No `Basics` module. I wanted to have a simple rule of "all modules in the standard library are imported by default, and so are their exposed types," and that's it. Given that I wanted the comparison operators (e.g. `<`) to work only on numbers, it ended up that having `Num` and `Bool` modules meant that almost nothing would be left for a `Basics` equivalent in Roc except `identity` and `Never`. The Roc type `[]` (empty tag union) is equivalent to `Never`, so that wasn't necessary, and I generally think that `identity` is a good concept but a sign of an incomplete API whenever its use comes up in practice. For example, instead of calling `|> List.filterMap identity` I'd rather have access to a more self-descriptive function like `|> List.dropNothings`. With `Num` and `Bool`, and without `identity` and `Never`, there was nothing left in `Basics`. -* `Str` instead of `String` - after using the `str` type in Rust, I realized I had no issue whatsoever with the more concise name, especially since it was used in so many places (similar to `Msg` and `Cmd` in Elm) - so I decided to save a couple of letters. -* No function composition operators - I stopped using these in Elm so long ago, at one point I forgot they were in the language! See the FAQ entry on currying for details about why. -* No `Char`. What most people think of as a "character" is a rendered glyph. However, rendered glyphs are comprised of [grapheme clusters](https://stackoverflow.com/a/27331885), which are a variable number of Unicode code points - and there's no upper bound on how many code points there can be in a single cluster. In a world of emoji, I think this makes `Char` error-prone and it's better to have `Str` be the only first-class unit. For convenience when working with unicode code points (e.g. for performance-critical tasks like parsing), the single-quote syntax is sugar for the corresponding `U32` code point - for example, writing `'鹏'` is exactly the same as writing `40527`. Like Rust, you get a compiler error if you put something in single quotes that's not a valid [Unicode scalar value](http://www.unicode.org/glossary/#unicode_scalar_value). -* No `Debug.log` - the editor can do a better job at this, or you can write `expect x != x` to see what `x` is when the expectation fails. Using the editor means your code doesn't change, and using `expect` gives a natural reminder to remove the debugging code before shipping: the build will fail. -* No `Debug.todo` - instead you can write a type annotation with no implementation below it; the type checker will treat it normally, but attempting to use the value will cause a runtime exception. This is a feature I've often wanted in Elm, because I like prototyping APIs by writing out the types only, but then when I want the compiler to type-check them for me, I end up having to add `Debug.todo` in various places. -* No `Maybe`. See the "Why doesn't Roc have a `Maybe`/`Option`/`Optional` type" FAQ question +- No `Basics` module. I wanted to have a simple rule of "all modules in the standard library are imported by default, and so are their exposed types," and that's it. Given that I wanted the comparison operators (e.g. `<`) to work only on numbers, it ended up that having `Num` and `Bool` modules meant that almost nothing would be left for a `Basics` equivalent in Roc except `identity` and `Never`. The Roc type `[]` (empty tag union) is equivalent to `Never`, so that wasn't necessary, and I generally think that `identity` is a good concept but a sign of an incomplete API whenever its use comes up in practice. For example, instead of calling `|> List.filterMap identity` I'd rather have access to a more self-descriptive function like `|> List.dropNothings`. With `Num` and `Bool`, and without `identity` and `Never`, there was nothing left in `Basics`. +- `Str` instead of `String` - after using the `str` type in Rust, I realized I had no issue whatsoever with the more concise name, especially since it was used in so many places (similar to `Msg` and `Cmd` in Elm) - so I decided to save a couple of letters. +- No function composition operators - I stopped using these in Elm so long ago, at one point I forgot they were in the language! See the FAQ entry on currying for details about why. +- No `Char`. What most people think of as a "character" is a rendered glyph. However, rendered glyphs are comprised of [grapheme clusters](https://stackoverflow.com/a/27331885), which are a variable number of Unicode code points - and there's no upper bound on how many code points there can be in a single cluster. In a world of emoji, I think this makes `Char` error-prone and it's better to have `Str` be the only first-class unit. For convenience when working with unicode code points (e.g. for performance-critical tasks like parsing), the single-quote syntax is sugar for the corresponding `U32` code point - for example, writing `'鹏'` is exactly the same as writing `40527`. Like Rust, you get a compiler error if you put something in single quotes that's not a valid [Unicode scalar value](http://www.unicode.org/glossary/#unicode_scalar_value). +- No `Debug.log` - the editor can do a better job at this, or you can write `expect x != x` to see what `x` is when the expectation fails. Using the editor means your code doesn't change, and using `expect` gives a natural reminder to remove the debugging code before shipping: the build will fail. +- No `Debug.todo` - instead you can write a type annotation with no implementation below it; the type checker will treat it normally, but attempting to use the value will cause a runtime exception. This is a feature I've often wanted in Elm, because I like prototyping APIs by writing out the types only, but then when I want the compiler to type-check them for me, I end up having to add `Debug.todo` in various places. +- No `Maybe`. See the "Why doesn't Roc have a `Maybe`/`Option`/`Optional` type" FAQ question ## Why aren't Roc functions curried by default? @@ -259,15 +291,15 @@ by default" for the sake of brevity. As I see it, currying has one major upside and several major downsides. The upside: -* It makes function calls more concise in some cases. +- It makes function calls more concise in some cases. The downsides: -* It lowers error message quality, because there can no longer be an error for "function called with too few arguments." (Calling a function with fewer arguments is always valid in curried functions; the error you get instead will unavoidably be some other sort of type mismatch, and it will be up to you to figure out that the real problem was that you forgot an argument.) -* It makes the `|>` operator more error-prone in some cases. -* It makes higher-order function calls need more parentheses in some cases. -* It significantly increases the language's learning curve. (More on this later.) -* It facilitates pointfree function composition. (More on why this is listed as a downside later.) +- It lowers error message quality, because there can no longer be an error for "function called with too few arguments." (Calling a function with fewer arguments is always valid in curried functions; the error you get instead will unavoidably be some other sort of type mismatch, and it will be up to you to figure out that the real problem was that you forgot an argument.) +- It makes the `|>` operator more error-prone in some cases. +- It makes higher-order function calls need more parentheses in some cases. +- It significantly increases the language's learning curve. (More on this later.) +- It facilitates pointfree function composition. (More on why this is listed as a downside later.) There's also a downside that it would make runtime performance of compiled programs worse by default, but I assume it would be possible to optimize that away at the cost of slightly longer compile times. @@ -284,8 +316,8 @@ In Roc, this code produces `"Hello, World!"` |> Str.concat "!" ``` -This is because Roc's `|>` operator uses the expression before the `|>` as the *first* argument to the function -after it. For functions where both arguments have the same type, but it's obvious which argument goes where (e.g. +This is because Roc's `|>` operator uses the expression before the `|>` as the _first_ argument to the function +after it. For functions where both arguments have the same type, but it's obvious which argument goes where (e.g. `Str.concat "Hello, " "World!"`, `List.concat [1, 2] [3, 4]`), this works out well. Another example would be `|> Num.sub 1`, which subtracts 1 from whatever came before the `|>`. @@ -318,7 +350,7 @@ This is a fundamental design tension. One argument order works well with `|>` (a today) and with passing anonymous functions to higher-order functions, and the other works well with currying. It's impossible to have both. -Of note, one possible design is to have currying while also having `|>` pass the *last* argument instead of the first. +Of note, one possible design is to have currying while also having `|>` pass the _last_ argument instead of the first. This is what Elm does, and it makes pipeline-friendliness and curry-friendliness the same thing. However, it also means that either `|> Str.concat "!"` would add the `"!"` to the front of the string, or else `Str.concat`'s arguments would have to be flipped - meaning that `Str.concat "Hello, World" "!"` would evaluate to `"!Hello, World"`. @@ -338,9 +370,9 @@ first pure functional programming language. Here was my experience teaching currying: -* The only way to avoid teaching it is to refuse to explain why multi-argument functions have multiple `->`s in them. (If you don't explain it, at least one student will ask about it - and many if not all of the others will wonder.) -* Teaching currying properly takes a solid chunk of time, because it requires explaining partial application, explaining how curried functions facilitate partial application, how function signatures accurately reflect that they're curried, and going through examples for all of these. -* Even after doing all this, and iterating on my approach each time to try to explain it more effectively than I had the time before, I'd estimate that under 50% of the class ended up actually understanding currying. I consistently heard that in practice it only "clicked" for most people after spending significantly more time writing code with it. +- The only way to avoid teaching it is to refuse to explain why multi-argument functions have multiple `->`s in them. (If you don't explain it, at least one student will ask about it - and many if not all of the others will wonder.) +- Teaching currying properly takes a solid chunk of time, because it requires explaining partial application, explaining how curried functions facilitate partial application, how function signatures accurately reflect that they're curried, and going through examples for all of these. +- Even after doing all this, and iterating on my approach each time to try to explain it more effectively than I had the time before, I'd estimate that under 50% of the class ended up actually understanding currying. I consistently heard that in practice it only "clicked" for most people after spending significantly more time writing code with it. This is not the end of the world, especially because it's easy enough to think "okay, I still don't totally get this even after that explanation, but I can remember that function arguments are separated by `->` in this language @@ -396,10 +428,47 @@ Currying facilitates the antipattern of pointfree function composition, which I Stacking up all these downsides of currying against the one upside of making certain function calls more concise, I concluded that it would be a mistake to have it in Roc. -## Why are both rust and zig used? +## Will Roc ever have linear types, dependent types, refinement types, or uniqueness types? -At the start of the project, we did not know zig well and it was not production ready. The reason zig entered the project because it has many different backends (wasm, various assembly formats, llvm IR) and can create code with minimal dependencies +The plan is for Roc to never have linear types, dependent types, refinement types, or uniqueness types. -Rust has much more overhead in terms of code size. It's objectively not a lot, but it's less with zig. +Fast compile times are a primary goal for Roc, and a major downside of refinement types is an exponential increase in compile times. This rules out refinement types for Roc. -We think rust is a nicer language to work in for a project of this size. It has a type system that we're more familiar with, it has a package ecosystem and excellent tooling. +If Roc were to have linear types or uniqueness types, they would move things that are currently behind-the-scenes performance optimizations into the type system. For them to be effective across the ecosystem, they couldn't really be opt-in; everyone would have to use them, even those for whom the current system of behind-the-scenes optimizations already met their performance needs without any added type system complexity. Since the overwhelming majority of use cases are expected to fall into that latter group, adding linear types or uniqueness types to Roc would be a net negative for the ecosystem. + +Dependent types are too risky of a bet for Roc to take. They have been implemented in programming languages for three decades, and for at least half that time period, it has been easy to find predictions that dependent types will be the future of type systems. Much harder to find are success stories of complex applications built with dependent types, which realized benefits that significantly outweighed the substantial complexity of introducing value semantics to a type system. + +Perhaps more success stories will emerge over time, but in the meantime it remains an open question whether dependent types are net beneficial in practice to application development. Further experimentation would be required to answer this question, and Roc is not the right language to do those experiments. + +## Will Roc's compiler ever be self-hosted? (That is, will it ever be written in Roc?) + +The plan is to never implement Roc's compiler in Roc. + +The goal is for Roc's compiler to deliver the best user experience possible. Compiler performance is strongly influenced by how memory is used, and there are many performance benefits to be gained from using a systems language like Rust which offers more direct control over memory than Roc ever should. + +Roc isn't trying to be the best possible language for high-performance compiler development, but it is trying to have a high-performance compiler. The best tool for that job is a language other than Roc, so that's what we're using! + +## Why does Roc use the license it does? + +The short explanation for why Roc is released under the [Universal Permissive License](https://opensource.org/licenses/UPL): + +- Like [MIT](https://opensource.org/licenses/MIT), it's permissive and concise +- Like [Apache2](https://opensource.org/licenses/Apache-2.0), it protects against contributors claiming software patents over contributed code after the fact (MIT and BSD do not include protections against this) +- It's compatible with [GPLv2](https://opensource.org/licenses/GPL-2.0) (which [Apache2 is not](https://www.apache.org/licenses/GPL-compatibility.html)) +- It's one license, unlike "MIT or Apache2, at your choice" (which is how [Rust addressed the problem](https://internals.rust-lang.org/t/rationale-of-apache-dual-licensing/8952/4) of MIT not having patent protections but Apache2 not being GPLv2 compatible) +- It's been approved by OSI, FSF, and Oracle's lawyers, so it has been not only vetted by three giants in the world of software licensing, but also three giants with competing interests - and they all approved it. + +There's also [a longer explanation](https://github.com/rtfeldman/roc/issues/1199) with more detail about the motivation and thought process, if you're interested. + +## Why does Roc use both Rust and Zig? + +Roc's compiler has always been written in [Rust](https://www.rust-lang.org/). Roc's standard library was briefly written in Rust, but was soon rewritten in [Zig](https://ziglang.org/). + +There were a few reasons for this rewrite. + +1. We struggled to get Rust to emit LLVM bitcode in the format we needed, which is important so that LLVM can do whole-program optimizations across the standard library and compiled application. +2. Since the standard library has to interact with raw generated machine code (or LLVM bitcode), the Rust code unavoidable needed `unsafe` annotations all over the place. This made one of Rust's biggest selling points inapplicable in this particular use case. +3. Given that Rust's main selling points are inapplicable (its package ecosystem being another), Zig's much faster compile times are a welcome benefit. +4. Zig has more tools for working in a memory-unsafe environment, such as reporting memory leaks in tests. These have been helpful in finding bugs that are out of scope for safe Rust. + +The split of Rust for the compiler and Zig for the standard library has worked well so far, and there are no plans to change it. diff --git a/README.md b/README.md index 14bb9842ec..746539d72c 100644 --- a/README.md +++ b/README.md @@ -1,118 +1,17 @@ -# The Roc Programming Language +# Work in progress! -Roc is a language for making delightful software. +Roc is not ready for an 0.1 release yet, but we have a [Zulip chat](https://roc.zulipchat.com) where you can learn more about the project. -The [tutorial](TUTORIAL.md) is the best place to learn about how to use the language - it assumes no prior knowledge of Roc or similar languages. (If you already know [Elm](https://elm-lang.org/), then [Roc for Elm Programmers](https://github.com/rtfeldman/roc/blob/trunk/roc-for-elm-programmers.md) may be of interest.) +If you'd like to get involved in contributing to the language, the Zulip chat is also the best place to get help with [good first issues](https://github.com/rtfeldman/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22). -There's also a folder of [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) - the [CLI form example](https://github.com/rtfeldman/roc/tree/trunk/examples/interactive/form.roc) in particular is a reasonable starting point to build on. +# Sponsors -If you have a specific question, the [FAQ](FAQ.md) might have an answer, although [Roc Zulip chat](https://roc.zulipchat.com) is overall the best place to ask questions and get help! It's also where we discuss [ideas](https://roc.zulipchat.com/#narrow/stream/304641-ideas) for the language. If you want to get involved in contributing to the language, Zulip is also a great place to ask about good first projects. - -## State of Roc - -Roc is not ready for production yet. You are likely to encounter bugs. Publishing packages or documentation is not yet supported. -Many programs can however be compiled correctly. Check out [examples](examples) and [examples/benchmarks](examples/benchmarks). There are minimal platforms for Rust, Zig, C, Swift and an HTTP server. We are hard at work to make programming in Roc a delightful experience! - -## Getting started - -- [Linux x86](getting_started/linux_x86.md) -- [MacOS Apple Silicon](getting_started/macos_apple_silicon.md) -- [MacOS x86](getting_started/macos_x86.md) -- [Windows](getting_started/windows.md) -- [Other](getting_started/other.md) - -### Examples - -Run examples as follows: -``` -cargo run examples/hello-world/main.roc -``` -Some examples like `examples/benchmarks/NQueens.roc` require input after running. -For NQueens, input 10 in the terminal and press enter. - -[examples/benchmarks](examples/benchmarks) contains larger examples. - -**Tip:** when programming in roc, we recommend to execute `./roc check myproject/Foo.roc` before `./roc myproject/Foo.roc` or `./roc build myproject/Foo.roc`. `./roc check` can produce clear error messages in cases where building/running may panic. - -## Sponsors - -We are very grateful for our sponsors [NoRedInk](https://www.noredink.com/) and [rwx](https://www.rwx.com). +We are very grateful to our sponsors [NoRedInk](https://www.noredink.com/), [rwx](https://www.rwx.com), and [Tweede golf](https://tweedegolf.nl/en). [NoRedInk logo](https://www.noredink.com/)      [rwx logo](https://www.rwx.com) +     +[tweede golf logo](https://tweedegolf.nl/en) -## Applications and Platforms - -Applications are often built on a *framework.* Typically, both application and framework are written in the same language. -* [Rails](https://rubyonrails.org/) applications are written in Ruby, and so is Rails. -* [Angular](https://angularjs.org/) applications are written in TypeScript, and so is Angular. -* [Phoenix](https://phoenixframework.org/) applications are written in Elixir, and so is Phoenix. - -Some programs support plugins. Often the plugins are written in the same language as the underlying program. -* [Webpack](https://webpack.js.org/) plugins are written in JavaScript, and so is Webpack. -* [Eclipse](https://www.eclipse.org/ide/) plugins are written in Java, and so is Eclipse. -* [Leiningen](https://leiningen.org/) plugins are written in Clojure, and so is Leiningen. - -All of these can be considered examples of a platform/application relationship. There is an underlying platform, and many applications are built on top of it. (Plugins are a type of application in this sense.) - -Sometimes, platforms and their applications are written in different languages. - -* [Neovim](https://neovim.io/) is written in C for performance, and its plugins can be written in languages such as Python, JS, and Ruby. -* [NGINX](https://www.nginx.com/) is written in C for performance, and its plugins can be written in a [subset of JavaScript](https://www.nginx.com/blog/introduction-nginscript/). -* [Unity](https://unity.com/) is written in C++ for performance, and Unity applications (such as games) can be written in C#, Boo, or a JavaScript dialect called UnityScript. - -Like in the previous examples, application authors building on these platforms get to use high-level languages with automatic memory management. They make no ergonomics sacrifices, and may not even be aware that the underlying platform is written in a lower-level language. - -By using systems-level programming languages like C and C++, platform authors sacrifice development speed, but unlock the highest possible performance characteristics. This is a tradeoff many platform authors are happy to accept, for the sake of having applications built on their platforms run very fast. - -## Roc's Design - -Roc is designed to make the "systems-level platform, higher-level application" experience as nice as possible. - -* **Application** authors code exclusively in Roc. It's a language designed for nice ergonomics. The syntax resembles Ruby or CoffeeScript, and it has a fast compiler with full type inference. -* **Platform** authors code almost exclusively in a systems-level language like C, C++, Rust, Swift or [Zig](https://ziglang.org/), except for the thin Roc API they expose to application authors. Roc application code compiles to machine code, and production builds of Roc apps benefit from the same [LLVM](https://llvm.org/) optimizations that C++, Rust, Swift and Zig do. Roc application authors do not need to know this lower-level code exists; all they have to interact with is the platform's API, which is exposed as an ordinary Roc API. - -Every Roc application is built on top of exactly one Roc platform. There is no such thing as a Roc application that runs without a platform, and there is no default platform. You must choose one! - -The core Roc language and standard library include no I/O operations, which gives platform authors complete control over which effects they want to support. Some of the implications of this include: - -* A high-performance build tool (or text editor) written in Rust can be a Roc platform with a strong plugin security model. For example, it could expose only operations allowing plugin authors to modify the contents of certain files, rather than allowing plugins arbitrary read/write access to the entire filesystem. -* A VR or [Arduino](https://www.arduino.cc/) platform can expose uncommon I/O operations supported by that hardware, while omitting common I/O operations that are unsupported (such as reading keyboard input from a terminal that doesn't exist). -* A high-performance Web server written in Rust can be a Roc platform where all I/O operations are implemented in terms of Streams or Observables rather than a more traditional asynchronous abstraction like Futures or Promises. This would mean all code in that platform's ecosystem would be necessarily built on a common streaming abstraction. - -## Project Goals - -Roc is in relatively early stages of development. It's currently possible to build both platforms and applications (see the [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) folder for some examples that aren't particularly organized at the moment), although [documentation](https://github.com/rtfeldman/roc/tree/trunk/crates/compiler/builtins/roc) is in even earlier stages than the compiler itself. - -Besides the above language design, a separate goal is for Roc to ship with an ambitiously boundary-pushing graphical editor. Not like "an IDE," but rather something that makes people say "I have never seen anything remotely like this outside of Bret Victor demos." - -One of the reasons this editor is coupled with the language itself is to allow package authors to include custom editor tooling inside packages. - -A trivial example: suppose I'm writing a Roc app for an Arduino platform. I install a platform-specific package for displaying text on a grid of LEDs. Because I've installed this package, at the call site where I call the function to specify the color of the text on the LEDs, my Roc editor displays an inline color picker. As I move a slider around to try out different colors, not only does my code change to reflect that value in realtime, but the physical LEDs in my room change color in realtime as well. As the application author, all I did to get that experience was to install the "text on an LED grid" package, nothing else. - -The goal is for this to be one of the most trivial, bare minimum examples of what the editor experience would be like. Hopefully, people in the future will look back on this example and say "that's so embarrassingly basic; why didn't you talk about one of the *actually great* things in the seamless editor plugin ecosystem?" - -Finally, some implementation goals: - -* The web server for the package manager is written in Roc (with an underlying Rust platform for the web server, for example [warp](https://github.com/seanmonstar/warp)). -* The editor plugins are written in Roc (with an underlying Rust platform for the editor itself, for example using [gfx-hal](https://github.com/gfx-rs/gfx)). -* The CLI (for building Roc projects on CI platforms) has its user interface written in Roc (with an underlying Rust platform for fast compilation and basic CLI interactions). - -It's an ambitious project! It'll take a long time to get where it's going, but hopefully it'll be worth the wait. - -## Getting Involved - -The number of people involved in Roc's development has been steadily increasing -over time - which has been great, because it's meant we've been able to onboard -people at a nice pace. (Most people who have contributed to Roc had previously -never done anything with Rust and also never worked on a compiler, but we've -been able to find beginner-friendly projects to get people up to speed gradually.) - -If you're interested in getting involved, check out -[CONTRIBUTING.md](https://github.com/rtfeldman/roc/blob/trunk/CONTRIBUTING.md)! - -## Name and Logo - -If you're curious about where the language's name and logo came from, -[here's an explanation](https://github.com/rtfeldman/roc/blob/trunk/name-and-logo.md). +If you or your employer would like to sponsor Roc's development, please [DM Richard Feldman on Zulip](https://roc.zulipchat.com/#narrow/pm-with/281383-user281383)! diff --git a/crates/ast/src/constrain.rs b/crates/ast/src/constrain.rs index 6f52e9506c..5787c4726c 100644 --- a/crates/ast/src/constrain.rs +++ b/crates/ast/src/constrain.rs @@ -2362,7 +2362,7 @@ pub mod test_constrain { \f -> (\a, b -> f b a) "# ), - "(a, b -> c) -> (b, a -> c)", + "(a, b -> d) -> (b, a -> d)", ); } @@ -2400,7 +2400,7 @@ pub mod test_constrain { \{} -> x "# ), - "{}* -> Num *", + "{}* -> Num a", ) } diff --git a/crates/ast/src/lang/core/def/def.rs b/crates/ast/src/lang/core/def/def.rs index 3f0f51e5d0..00c4c0c7e3 100644 --- a/crates/ast/src/lang/core/def/def.rs +++ b/crates/ast/src/lang/core/def/def.rs @@ -13,7 +13,7 @@ // use crate::pattern::{bindings_from_patterns, canonicalize_pattern, Pattern}; // use crate::procedure::References; use roc_collections::all::{default_hasher, ImMap, MutMap, MutSet, SendMap}; -use roc_error_macros::{todo_abilities, todo_opaques}; +use roc_error_macros::{internal_error, todo_abilities}; use roc_module::ident::Lowercase; use roc_module::symbol::Symbol; use roc_parse::ast::{self, CommentOrNewline, Defs, TypeDef, TypeHeader, ValueDef as AstValueDef}; @@ -21,6 +21,7 @@ use roc_parse::pattern::PatternType; use roc_problem::can::{Problem, RuntimeError, ShadowKind}; use roc_region::all::{Loc, Region}; use roc_types::subs::{VarStore, Variable}; +use roc_types::types::AliasKind; use std::collections::HashMap; use std::fmt::Debug; use ven_graph::{strongly_connected_components, topological_sort_into_groups}; @@ -274,7 +275,7 @@ fn to_pending_def<'a>( } } - Type(TypeDef::Opaque { .. }) => todo_opaques!(), + Type(TypeDef::Opaque { .. }) => internal_error!("opaques not implemented"), Type(TypeDef::Ability { .. }) => todo_abilities!(), Value(AstValueDef::Expect { .. }) => todo!(), @@ -341,6 +342,7 @@ fn from_pending_alias<'a>( typ: symbol, variable_region: loc_lowercase.region, variable_name: loc_lowercase.value.clone(), + alias_kind: AliasKind::Structural, }); } } @@ -373,7 +375,12 @@ fn from_pending_alias<'a>( scope.add_alias(env.pool, symbol, named, annotation_id); } else { - env.problem(Problem::CyclicAlias(symbol, name.region, vec![])); + env.problem(Problem::CyclicAlias( + symbol, + name.region, + vec![], + AliasKind::Structural, + )); return output; } } else { diff --git a/crates/ast/src/lang/core/pattern.rs b/crates/ast/src/lang/core/pattern.rs index e80fcd9a69..ac0d258667 100644 --- a/crates/ast/src/lang/core/pattern.rs +++ b/crates/ast/src/lang/core/pattern.rs @@ -8,7 +8,7 @@ use roc_can::num::{ finish_parsing_base, finish_parsing_float, finish_parsing_num, ParsedNumResult, }; use roc_collections::all::BumpMap; -use roc_error_macros::todo_opaques; +use roc_error_macros::internal_error; use roc_module::symbol::{Interns, Symbol}; use roc_parse::ast::{StrLiteral, StrSegment}; use roc_parse::pattern::PatternType; @@ -272,7 +272,7 @@ pub fn to_pattern2<'a>( } } - OpaqueRef(..) => todo_opaques!(), + OpaqueRef(..) => internal_error!("opaques not implemented"), Apply(tag, patterns) => { let can_patterns = PoolVec::with_capacity(patterns.len() as u32, env.pool); diff --git a/crates/ast/src/lang/core/types.rs b/crates/ast/src/lang/core/types.rs index b118c2e2df..c665f6c5a9 100644 --- a/crates/ast/src/lang/core/types.rs +++ b/crates/ast/src/lang/core/types.rs @@ -7,7 +7,7 @@ use roc_error_macros::todo_abilities; use roc_module::ident::{Ident, Lowercase, TagName, Uppercase}; use roc_module::symbol::Symbol; use roc_region::all::{Loc, Region}; -use roc_types::types::{Problem, RecordField}; +use roc_types::types::{AliasKind, Problem, RecordField}; use roc_types::{subs::Variable, types::ErrorType}; use crate::lang::env::Env; @@ -793,6 +793,7 @@ fn to_type_apply<'a>( region, alias_needs: alias.targs.len() as u8, type_got: args.len() as u8, + alias_kind: AliasKind::Structural, }); return error; } diff --git a/crates/ast/src/module.rs b/crates/ast/src/module.rs index ae9d265b57..227ac19f8d 100644 --- a/crates/ast/src/module.rs +++ b/crates/ast/src/module.rs @@ -1,20 +1,21 @@ use bumpalo::Bump; -use roc_load::{LoadedModule, Threading}; +use roc_load::{ExecutionMode, LoadConfig, LoadedModule, Threading}; use roc_target::TargetInfo; use std::path::Path; pub fn load_module(src_file: &Path, threading: Threading) -> LoadedModule { let subs_by_module = Default::default(); - let arena = Bump::new(); - let loaded = roc_load::load_and_typecheck( - &arena, - src_file.to_path_buf(), - subs_by_module, - TargetInfo::default_x86_64(), - roc_reporting::report::RenderTarget::ColorTerminal, + let load_config = LoadConfig { + target_info: TargetInfo::default_x86_64(), // editor only needs type info, so this is unused + render: roc_reporting::report::RenderTarget::ColorTerminal, threading, - ); + exec_mode: ExecutionMode::Check, + }; + + let arena = Bump::new(); + let loaded = + roc_load::load_and_typecheck(&arena, src_file.to_path_buf(), subs_by_module, load_config); match loaded { Ok(x) => x, diff --git a/crates/cli/src/build.rs b/crates/cli/src/build.rs index a113f6f0ce..0526d87d40 100644 --- a/crates/cli/src/build.rs +++ b/crates/cli/src/build.rs @@ -5,7 +5,7 @@ use roc_build::{ }; use roc_builtins::bitcode; use roc_collections::VecMap; -use roc_load::{Expectations, LoadingProblem, Threading}; +use roc_load::{EntryPoint, ExecutionMode, Expectations, LoadConfig, LoadingProblem, Threading}; use roc_module::symbol::{Interns, ModuleId}; use roc_mono::ir::OptLevel; use roc_reporting::report::RenderTarget; @@ -55,14 +55,18 @@ pub fn build_file<'a>( // Step 1: compile the app and generate the .o file let subs_by_module = Default::default(); + let load_config = LoadConfig { + target_info, + // TODO: expose this from CLI? + render: RenderTarget::ColorTerminal, + threading, + exec_mode: ExecutionMode::Executable, + }; let loaded = roc_load::load_and_monomorphize( arena, app_module_path.clone(), subs_by_module, - target_info, - // TODO: expose this from CLI? - RenderTarget::ColorTerminal, - threading, + load_config, )?; use target_lexicon::Architecture; @@ -74,36 +78,37 @@ pub fn build_file<'a>( // > Non-Emscripten WebAssembly hasn't implemented __builtin_return_address // // and zig does not currently emit `.a` webassembly static libraries - let host_extension = if emit_wasm { - if matches!(opt_level, OptLevel::Development) { - "wasm" - } else { - "zig" + let (host_extension, app_extension, extension) = { + use roc_target::OperatingSystem::*; + + match roc_target::OperatingSystem::from(target.operating_system) { + Wasi => { + if matches!(opt_level, OptLevel::Development) { + ("wasm", "wasm", Some("wasm")) + } else { + ("zig", "bc", Some("wasm")) + } + } + Unix => ("o", "o", None), + Windows => ("obj", "obj", Some("exe")), } - } else { - "o" - }; - let app_extension = if emit_wasm { - if matches!(opt_level, OptLevel::Development) { - "wasm" - } else { - "bc" - } - } else { - "o" }; let cwd = app_module_path.parent().unwrap(); - let mut binary_path = cwd.join(&*loaded.output_path); // TODO should join ".exe" on Windows + let mut binary_path = cwd.join(&*loaded.output_path); - if emit_wasm { - binary_path.set_extension("wasm"); + if let Some(extension) = extension { + binary_path.set_extension(extension); } - let host_input_path = cwd - .join(&*loaded.platform_path) - .with_file_name("host") - .with_extension(host_extension); + let host_input_path = if let EntryPoint::Executable { platform_path, .. } = &loaded.entry_point + { + cwd.join(platform_path) + .with_file_name("host") + .with_extension(host_extension) + } else { + unreachable!(); + }; // TODO this should probably be moved before load_and_monomorphize. // To do this we will need to preprocess files just for their exported symbols. @@ -307,8 +312,11 @@ pub fn build_file<'a>( host_input_path.as_path().to_str().unwrap(), app_o_file.to_str().unwrap(), ]; + + let str_host_obj_path = bitcode::get_builtins_host_obj_path(); + if matches!(opt_level, OptLevel::Development) { - inputs.push(bitcode::BUILTINS_HOST_OBJ_PATH); + inputs.push(&str_host_obj_path); } let (mut child, _) = // TODO use lld @@ -438,15 +446,15 @@ pub fn check_file( // Step 1: compile the app and generate the .o file let subs_by_module = Default::default(); - let mut loaded = roc_load::load_and_typecheck( - arena, - roc_file_path, - subs_by_module, + let load_config = LoadConfig { target_info, // TODO: expose this from CLI? - RenderTarget::ColorTerminal, + render: RenderTarget::ColorTerminal, threading, - )?; + exec_mode: ExecutionMode::Check, + }; + let mut loaded = + roc_load::load_and_typecheck(arena, roc_file_path, subs_by_module, load_config)?; let buf = &mut String::with_capacity(1024); diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 56079db15f..1f387f861c 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -10,7 +10,7 @@ use roc_error_macros::{internal_error, user_error}; use roc_gen_llvm::llvm::build::LlvmBackendMode; use roc_gen_llvm::run_roc::RocCallResult; use roc_gen_llvm::run_roc_dylib; -use roc_load::{Expectations, LoadingProblem, Threading}; +use roc_load::{ExecutionMode, Expectations, LoadConfig, LoadingProblem, Threading}; use roc_module::symbol::{Interns, ModuleId}; use roc_mono::ir::OptLevel; use roc_repl_expect::run::{expect_mono_module_to_dylib, roc_dev_expect}; @@ -336,31 +336,22 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result { let path = Path::new(filename); // Spawn the root task - let path = path.canonicalize().unwrap_or_else(|err| { - use io::ErrorKind::*; + if !path.exists() { + let path_string = path.to_string_lossy(); - match err.kind() { - NotFound => { - let path_string = path.to_string_lossy(); - - // TODO these should use roc_reporting to display nicer error messages. - match matches.value_source(ROC_FILE) { - Some(ValueSource::DefaultValue) => { - eprintln!( - "\nNo `.roc` file was specified, and the current directory does not contain a {} file to use as a default.\n\nYou can run `roc help` for more information on how to provide a .roc file.\n", - DEFAULT_ROC_FILENAME - ) - } - _ => eprintln!("\nThis file was not found: {}\n\nYou can run `roc help` for more information on how to provide a .roc file.\n", path_string), - } - - process::exit(1); - } - _ => { - todo!("TODO Gracefully handle opening {:?} - {:?}", path, err); + // TODO these should use roc_reporting to display nicer error messages. + match matches.value_source(ROC_FILE) { + Some(ValueSource::DefaultValue) => { + eprintln!( + "\nNo `.roc` file was specified, and the current directory does not contain a {} file to use as a default.\n\nYou can run `roc help` for more information on how to provide a .roc file.\n", + DEFAULT_ROC_FILENAME + ) } + _ => eprintln!("\nThis file was not found: {}\n\nYou can run `roc help` for more information on how to provide a .roc file.\n", path_string), } - }); + + process::exit(1); + } let arena = &arena; let target = &triple; @@ -370,16 +361,16 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result { // Step 1: compile the app and generate the .o file let subs_by_module = Default::default(); - let loaded = roc_load::load_and_monomorphize( - arena, - path, - subs_by_module, + let load_config = LoadConfig { target_info, // TODO: expose this from CLI? - roc_reporting::report::RenderTarget::ColorTerminal, + render: roc_reporting::report::RenderTarget::ColorTerminal, threading, - ) - .unwrap(); + exec_mode: ExecutionMode::Test, + }; + let loaded = + roc_load::load_and_monomorphize(arena, path.to_path_buf(), subs_by_module, load_config) + .unwrap(); let mut loaded = loaded; let mut expectations = std::mem::take(&mut loaded.expectations); @@ -439,10 +430,8 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result { 31 // red }; - println!(); - println!( - "\x1B[{failed_color}m{failed}\x1B[39m failed and \x1B[32m{passed}\x1B[39m passed in {} ms.\n", + "\n\x1B[{failed_color}m{failed}\x1B[39m failed and \x1B[32m{passed}\x1B[39m passed in {} ms.\n", total_time.as_millis(), ); @@ -509,15 +498,11 @@ pub fn build( let path = Path::new(filename); // Spawn the root task - let path = path.canonicalize().unwrap_or_else(|err| { - use io::ErrorKind::*; + if !path.exists() { + let path_string = path.to_string_lossy(); - match err.kind() { - NotFound => { - let path_string = path.to_string_lossy(); - - // TODO these should use roc_reporting to display nicer error messages. - match matches.value_source(ROC_FILE) { + // TODO these should use roc_reporting to display nicer error messages. + match matches.value_source(ROC_FILE) { Some(ValueSource::DefaultValue) => { eprintln!( "\nNo `.roc` file was specified, and the current directory does not contain a {} file to use as a default.\n\nYou can run `roc help` for more information on how to provide a .roc file.\n", @@ -527,19 +512,14 @@ pub fn build( _ => eprintln!("\nThis file was not found: {}\n\nYou can run `roc help` for more information on how to provide a .roc file.\n", path_string), } - process::exit(1); - } - _ => { - todo!("TODO Gracefully handle opening {:?} - {:?}", path, err); - } - } - }); + process::exit(1); + } let target_valgrind = matches.is_present(FLAG_VALGRIND); let res_binary_path = build_file( &arena, &triple, - path, + path.to_path_buf(), opt_level, emit_debug_info, emit_timings, @@ -752,14 +732,18 @@ fn roc_run<'a, I: IntoIterator>( // since the process is about to exit anyway. std::mem::forget(arena); - if cfg!(target_family = "unix") { + #[cfg(target_family = "unix")] + { use std::os::unix::ffi::OsStrExt; run_with_wasmer( generated_filename, args.into_iter().map(|os_str| os_str.as_bytes()), ); - } else { + } + + #[cfg(not(target_family = "unix"))] + { run_with_wasmer( generated_filename, args.into_iter().map(|os_str| { @@ -776,6 +760,7 @@ fn roc_run<'a, I: IntoIterator>( } } +#[cfg(target_family = "unix")] fn make_argv_envp<'a, I: IntoIterator, S: AsRef>( arena: &'a Bump, executable: &ExecutableFile, @@ -909,11 +894,28 @@ impl ExecutableFile { let path_cstring = CString::new(path.as_os_str().as_bytes()).unwrap(); libc::execve(path_cstring.as_ptr().cast(), argv.as_ptr(), envp.as_ptr()) } + + #[cfg(all(target_family = "windows"))] + ExecutableFile::OnDisk(_, path) => { + use std::process::Command; + + let _ = argv; + let _ = envp; + + let mut command = Command::new(path); + + let output = command.output().unwrap(); + + println!("{}", String::from_utf8_lossy(&output.stdout)); + + std::process::exit(0) + } } } } // with Expect +#[cfg(target_family = "unix")] unsafe fn roc_run_native_debug( executable: ExecutableFile, argv: &[*const c_char], @@ -1053,35 +1055,75 @@ fn roc_run_executable_file_path(binary_bytes: &mut [u8]) -> std::io::Result std::io::Result { + use std::fs::OpenOptions; + use std::io::Write; + + let temp_dir = tempfile::tempdir()?; + + // We have not found a way to use a virtual file on non-Linux OSes. + // Hence we fall back to just writing the file to the file system, and using that file. + let app_path_buf = temp_dir.path().join("roc_app_binary.exe"); + let mut file = OpenOptions::new() + .create(true) + .write(true) + //.mode(0o777) // create the file as executable + .open(&app_path_buf)?; + + file.write_all(binary_bytes)?; + + // We store the TempDir in this variant alongside the path to the executable, + // so that the TempDir doesn't get dropped until after we're done with the path. + // If we didn't do that, then the tempdir would potentially get deleted by the + // TempDir's Drop impl before the file had been executed. + Ok(ExecutableFile::OnDisk(temp_dir, app_path_buf)) +} + /// Run on the native OS (not on wasm) #[cfg(not(target_family = "unix"))] fn roc_run_native, S: AsRef>( - _arena: Bump, // This should be passed an owned value, not a reference, so we can usefully mem::forget it! + arena: Bump, // This should be passed an owned value, not a reference, so we can usefully mem::forget it! + opt_level: OptLevel, _args: I, - _binary_bytes: &mut [u8], + binary_bytes: &mut [u8], _expectations: VecMap, _interns: Interns, ) -> io::Result { - todo!("TODO support running roc programs on non-UNIX targets"); - // let mut cmd = std::process::Command::new(&binary_path); + use bumpalo::collections::CollectIn; - // // Run the compiled app - // let exit_status = cmd - // .spawn() - // .unwrap_or_else(|err| panic!("Failed to run app after building it: {:?}", err)) - // .wait() - // .expect("TODO gracefully handle block_on failing when `roc` spawns a subprocess for the compiled app"); + unsafe { + let executable = roc_run_executable_file_path(binary_bytes)?; - // // `roc [FILE]` exits with the same status code as the app it ran. - // // - // // If you want to know whether there were compilation problems - // // via status code, use either `roc build` or `roc check` instead! - // match exit_status.code() { - // Some(code) => Ok(code), - // None => { - // todo!("TODO gracefully handle the `roc [FILE]` subprocess terminating with a signal."); - // } - // } + // TODO forward the arguments + // let (argv_cstrings, envp_cstrings) = make_argv_envp(&arena, &executable, args); + let argv_cstrings = bumpalo::vec![ in &arena; CString::default()]; + let envp_cstrings = bumpalo::vec![ in &arena; CString::default()]; + + let argv: bumpalo::collections::Vec<*const c_char> = argv_cstrings + .iter() + .map(|s| s.as_ptr()) + .chain([std::ptr::null()]) + .collect_in(&arena); + + let envp: bumpalo::collections::Vec<*const c_char> = envp_cstrings + .iter() + .map(|s| s.as_ptr()) + .chain([std::ptr::null()]) + .collect_in(&arena); + + match opt_level { + OptLevel::Development => { + // roc_run_native_debug(executable, &argv, &envp, expectations, interns) + todo!() + } + OptLevel::Normal | OptLevel::Size | OptLevel::Optimize => { + roc_run_native_fast(executable, &argv, &envp); + } + } + } + + Ok(1) } #[cfg(feature = "run-wasm32")] diff --git a/crates/cli_utils/src/helpers.rs b/crates/cli_utils/src/helpers.rs index fb8c63b370..4719de2585 100644 --- a/crates/cli_utils/src/helpers.rs +++ b/crates/cli_utils/src/helpers.rs @@ -351,6 +351,11 @@ pub fn root_dir() -> PathBuf { path.pop(); path.pop(); + // running cargo with --target will put us in the target dir + if path.ends_with("target") { + path.pop(); + } + path } diff --git a/crates/compiler/alias_analysis/Cargo.toml b/crates/compiler/alias_analysis/Cargo.toml index 98fd17f6f8..621d0d6561 100644 --- a/crates/compiler/alias_analysis/Cargo.toml +++ b/crates/compiler/alias_analysis/Cargo.toml @@ -3,7 +3,7 @@ authors = ["The Roc Contributors"] edition = "2021" license = "UPL-1.0" name = "roc_alias_analysis" -version = "0.1.0" +version = "0.0.1" [dependencies] morphic_lib = {path = "../../vendor/morphic_lib"} diff --git a/crates/compiler/alias_analysis/src/lib.rs b/crates/compiler/alias_analysis/src/lib.rs index 723451da7c..d01e178d2e 100644 --- a/crates/compiler/alias_analysis/src/lib.rs +++ b/crates/compiler/alias_analysis/src/lib.rs @@ -131,7 +131,7 @@ fn bytes_as_ascii(bytes: &[u8]) -> String { pub fn spec_program<'a, I>( opt_level: OptLevel, - entry_point: roc_mono::ir::EntryPoint<'a>, + opt_entry_point: Option>, procs: I, ) -> Result where @@ -221,19 +221,21 @@ where m.add_func(func_name, spec)?; } - // the entry point wrapper - let roc_main_bytes = func_name_bytes_help( - entry_point.symbol, - entry_point.layout.arguments.iter().copied(), - CapturesNiche::no_niche(), - &entry_point.layout.result, - ); - let roc_main = FuncName(&roc_main_bytes); + if let Some(entry_point) = opt_entry_point { + // the entry point wrapper + let roc_main_bytes = func_name_bytes_help( + entry_point.symbol, + entry_point.layout.arguments.iter().copied(), + CapturesNiche::no_niche(), + &entry_point.layout.result, + ); + let roc_main = FuncName(&roc_main_bytes); - let entry_point_function = - build_entry_point(entry_point.layout, roc_main, &host_exposed_functions)?; - let entry_point_name = FuncName(ENTRY_POINT_NAME); - m.add_func(entry_point_name, entry_point_function)?; + let entry_point_function = + build_entry_point(entry_point.layout, roc_main, &host_exposed_functions)?; + let entry_point_name = FuncName(ENTRY_POINT_NAME); + m.add_func(entry_point_name, entry_point_function)?; + } for union_layout in type_definitions { let type_name_bytes = recursive_tag_union_name_bytes(&union_layout).as_bytes(); @@ -264,8 +266,10 @@ where let mut p = ProgramBuilder::new(); p.add_mod(MOD_APP, main_module)?; - let entry_point_name = FuncName(ENTRY_POINT_NAME); - p.add_entry_point(EntryPointName(ENTRY_POINT_NAME), MOD_APP, entry_point_name)?; + if opt_entry_point.is_some() { + let entry_point_name = FuncName(ENTRY_POINT_NAME); + p.add_entry_point(EntryPointName(ENTRY_POINT_NAME), MOD_APP, entry_point_name)?; + } p.build()? }; diff --git a/crates/compiler/arena_pool/Cargo.toml b/crates/compiler/arena_pool/Cargo.toml index f3d0aa6e08..708bc1160f 100644 --- a/crates/compiler/arena_pool/Cargo.toml +++ b/crates/compiler/arena_pool/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "arena-pool" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" repository = "https://github.com/rtfeldman/roc" diff --git a/crates/compiler/build/Cargo.toml b/crates/compiler/build/Cargo.toml index 4aa2da6f2a..97320d5462 100644 --- a/crates/compiler/build/Cargo.toml +++ b/crates/compiler/build/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_build" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" @@ -26,6 +26,7 @@ roc_gen_dev = { path = "../gen_dev", default-features = false } roc_reporting = { path = "../../reporting" } roc_error_macros = { path = "../../error_macros" } roc_std = { path = "../../roc_std", default-features = false } +roc_utils = { path = "../../utils" } bumpalo = { version = "3.8.0", features = ["collections"] } libloading = "0.7.1" tempfile = "3.2.0" diff --git a/crates/compiler/build/src/link.rs b/crates/compiler/build/src/link.rs index a610abe32c..9e0cc84a67 100644 --- a/crates/compiler/build/src/link.rs +++ b/crates/compiler/build/src/link.rs @@ -3,6 +3,7 @@ use libloading::{Error, Library}; use roc_builtins::bitcode; use roc_error_macros::internal_error; use roc_mono::ir::OptLevel; +use roc_utils::get_lib_path; use std::collections::HashMap; use std::env; use std::io; @@ -66,12 +67,13 @@ pub fn link( fn find_zig_str_path() -> PathBuf { // First try using the lib path relative to the executable location. - let exe_relative_str_path = std::env::current_exe() - .ok() - .and_then(|path| Some(path.parent()?.join("lib").join("str.zig"))); - if let Some(exe_relative_str_path) = exe_relative_str_path { - if std::path::Path::exists(&exe_relative_str_path) { - return exe_relative_str_path; + let lib_path_opt = get_lib_path(); + + if let Some(lib_path) = lib_path_opt { + let zig_str_path = lib_path.join("str.zig"); + + if std::path::Path::exists(&zig_str_path) { + return zig_str_path; } } @@ -87,7 +89,7 @@ fn find_zig_str_path() -> PathBuf { return zig_str_path; } - panic!("cannot find `str.zig`. Launch me from either the root of the roc repo or one level down(roc/examples, roc/cli...)") + panic!("cannot find `str.zig`. Check the source code in find_zig_str_path() to show all the paths I tried.") } fn find_wasi_libc_path() -> PathBuf { @@ -118,16 +120,18 @@ pub fn build_zig_host_native( .env_clear() .env("PATH", env_path) .env("HOME", env_home); + if let Some(shared_lib_path) = shared_lib_path { command.args(&[ "build-exe", "-fPIE", shared_lib_path.to_str().unwrap(), - bitcode::BUILTINS_HOST_OBJ_PATH, + &bitcode::get_builtins_host_obj_path(), ]); } else { command.args(&["build-obj", "-fPIC"]); } + command.args(&[ zig_host_src, emit_bin, @@ -158,6 +162,7 @@ pub fn build_zig_host_native( } else if matches!(opt_level, OptLevel::Size) { command.args(&["-O", "ReleaseSmall"]); } + command.output().unwrap() } @@ -228,7 +233,7 @@ pub fn build_zig_host_native( "build-exe", "-fPIE", shared_lib_path.to_str().unwrap(), - bitcode::BUILTINS_HOST_OBJ_PATH, + &bitcode::get_builtins_host_obj_path(), ]); } else { command.args(&["build-obj", "-fPIC"]); @@ -340,7 +345,7 @@ pub fn build_c_host_native( if let Some(shared_lib_path) = shared_lib_path { command.args(&[ shared_lib_path.to_str().unwrap(), - bitcode::BUILTINS_HOST_OBJ_PATH, + &bitcode::get_builtins_host_obj_path(), "-fPIE", "-pie", "-lm", @@ -423,7 +428,11 @@ pub fn rebuild_host( host_input_path.with_file_name(if shared_lib_path.is_some() { "dynhost" } else { - "host.o" + match roc_target::OperatingSystem::from(target.operating_system) { + roc_target::OperatingSystem::Windows => "host.obj", + roc_target::OperatingSystem::Unix => "host.o", + roc_target::OperatingSystem::Wasi => "host.o", + } }) }; @@ -1093,11 +1102,58 @@ fn link_wasm32( fn link_windows( _target: &Triple, - _output_path: PathBuf, - _input_paths: &[&str], - _link_type: LinkType, + output_path: PathBuf, + input_paths: &[&str], + link_type: LinkType, ) -> io::Result<(Child, PathBuf)> { - todo!("Add windows support to the surgical linker. See issue #2608.") + let zig_str_path = find_zig_str_path(); + + match link_type { + LinkType::Dylib => { + let child = Command::new(&zig_executable()) + .args(&["build-lib"]) + .args(input_paths) + .args([ + "-lc", + &format!("-femit-bin={}", output_path.to_str().unwrap()), + "-target", + "native", + "--pkg-begin", + "str", + zig_str_path.to_str().unwrap(), + "--pkg-end", + "--strip", + "-O", + "Debug", + "-dynamic", + ]) + .spawn()?; + + Ok((child, output_path)) + } + LinkType::Executable => { + let child = Command::new(&zig_executable()) + .args(&["build-exe"]) + .args(input_paths) + .args([ + "-lc", + &format!("-femit-bin={}", output_path.to_str().unwrap()), + "-target", + "native", + "--pkg-begin", + "str", + zig_str_path.to_str().unwrap(), + "--pkg-end", + "--strip", + "-O", + "Debug", + ]) + .spawn()?; + + Ok((child, output_path)) + } + LinkType::None => todo!(), + } } pub fn llvm_module_to_dylib( @@ -1172,7 +1228,7 @@ pub fn preprocess_host_wasm32(host_input_path: &Path, preprocessed_host_path: &P let mut command = Command::new(&zig_executable()); let args = &[ "wasm-ld", - bitcode::BUILTINS_WASM32_OBJ_PATH, + &bitcode::get_builtins_wasm32_obj_path(), host_input, WASI_LIBC_PATH, WASI_COMPILER_RT_PATH, // builtins need __multi3, __udivti3, __fixdfti diff --git a/crates/compiler/build/src/program.rs b/crates/compiler/build/src/program.rs index 98ef9d36e1..f16a25029d 100644 --- a/crates/compiler/build/src/program.rs +++ b/crates/compiler/build/src/program.rs @@ -1,7 +1,7 @@ pub use roc_gen_llvm::llvm::build::FunctionIterator; use roc_gen_llvm::llvm::build::{module_from_builtins, LlvmBackendMode}; use roc_gen_llvm::llvm::externs::add_default_roc_externs; -use roc_load::{LoadedModule, MonomorphizedModule}; +use roc_load::{EntryPoint, LoadedModule, MonomorphizedModule}; use roc_module::symbol::{Interns, ModuleId}; use roc_mono::ir::OptLevel; use roc_region::all::LineInfo; @@ -265,11 +265,18 @@ pub fn gen_from_mono_module_llvm( // expects that would confuse the surgical linker add_default_roc_externs(&env); + let opt_entry_point = match loaded.entry_point { + EntryPoint::Executable { symbol, layout, .. } => { + Some(roc_mono::ir::EntryPoint { symbol, layout }) + } + EntryPoint::Test => None, + }; + roc_gen_llvm::llvm::build::build_procedures( &env, opt_level, loaded.procedures, - loaded.entry_point, + opt_entry_point, Some(&app_ll_file), ); diff --git a/crates/compiler/builtins/Cargo.toml b/crates/compiler/builtins/Cargo.toml index 6d3033797a..d824d44a64 100644 --- a/crates/compiler/builtins/Cargo.toml +++ b/crates/compiler/builtins/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_builtins" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" @@ -10,12 +10,12 @@ roc_collections = { path = "../collections" } roc_region = { path = "../region" } roc_module = { path = "../module" } roc_target = { path = "../roc_target" } +roc_utils = { path = "../../utils" } lazy_static = "1.4.0" [build-dependencies] # dunce can be removed once ziglang/zig#5109 is fixed dunce = "1.0.2" -fs_extra = "1.2.0" [target.'cfg(target_os = "macos")'.build-dependencies] tempfile = "3.2.0" diff --git a/crates/compiler/builtins/bitcode/src/str.zig b/crates/compiler/builtins/bitcode/src/str.zig index 8f3b63f4a5..a8418a64e0 100644 --- a/crates/compiler/builtins/bitcode/src/str.zig +++ b/crates/compiler/builtins/bitcode/src/str.zig @@ -2608,10 +2608,10 @@ test "getScalarUnsafe" { } pub fn strCloneTo( + string: RocStr, ptr: [*]u8, offset: usize, extra_offset: usize, - string: RocStr, ) callconv(.C) usize { const WIDTH: usize = @sizeOf(RocStr); if (string.isSmallStr()) { diff --git a/crates/compiler/builtins/bitcode/src/utils.zig b/crates/compiler/builtins/bitcode/src/utils.zig index 388245aa06..3783df0c48 100644 --- a/crates/compiler/builtins/bitcode/src/utils.zig +++ b/crates/compiler/builtins/bitcode/src/utils.zig @@ -131,18 +131,16 @@ const RC_TYPE = Refcount.normal; pub fn increfC(ptr_to_refcount: *isize, amount: isize) callconv(.C) void { if (RC_TYPE == Refcount.none) return; - var refcount = ptr_to_refcount.*; - if (refcount < REFCOUNT_MAX_ISIZE) { + // Ensure that the refcount is not whole program lifetime. + if (ptr_to_refcount.* != REFCOUNT_MAX_ISIZE) { + // Note: we assume that a refcount will never overflow. + // As such, we do not need to cap incrementing. switch (RC_TYPE) { Refcount.normal => { - ptr_to_refcount.* = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE); + ptr_to_refcount.* += amount; }, Refcount.atomic => { - var next = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE); - while (@cmpxchgWeak(isize, ptr_to_refcount, refcount, next, Monotonic, Monotonic)) |found| { - refcount = found; - next = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE); - } + _ = @atomicRmw(isize, ptr_to_refcount, std.builtin.AtomicRmwOp.Add, amount, Monotonic); }, Refcount.none => unreachable, } @@ -194,24 +192,24 @@ inline fn decref_ptr_to_refcount( ) void { if (RC_TYPE == Refcount.none) return; const extra_bytes = std.math.max(alignment, @sizeOf(usize)); - switch (RC_TYPE) { - Refcount.normal => { - const refcount: isize = refcount_ptr[0]; - if (refcount == REFCOUNT_ONE_ISIZE) { - dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment); - } else if (refcount < REFCOUNT_MAX_ISIZE) { - refcount_ptr[0] = refcount - 1; - } - }, - Refcount.atomic => { - if (refcount_ptr[0] < REFCOUNT_MAX_ISIZE) { + // Ensure that the refcount is not whole program lifetime. + const refcount: isize = refcount_ptr[0]; + if (refcount != REFCOUNT_MAX_ISIZE) { + switch (RC_TYPE) { + Refcount.normal => { + refcount_ptr[0] = refcount -% 1; + if (refcount == REFCOUNT_ONE_ISIZE) { + dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment); + } + }, + Refcount.atomic => { var last = @atomicRmw(isize, &refcount_ptr[0], std.builtin.AtomicRmwOp.Sub, 1, Monotonic); if (last == REFCOUNT_ONE_ISIZE) { dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment); } - } - }, - Refcount.none => unreachable, + }, + Refcount.none => unreachable, + } } } diff --git a/crates/compiler/builtins/build.rs b/crates/compiler/builtins/build.rs index 67f4581667..5ee5848151 100644 --- a/crates/compiler/builtins/build.rs +++ b/crates/compiler/builtins/build.rs @@ -4,6 +4,7 @@ use std::ffi::OsStr; use std::fs; use std::io; use std::path::Path; +use std::path::PathBuf; use std::process::Command; use std::str; @@ -53,19 +54,9 @@ fn main() { #[cfg(not(windows))] const BUILTINS_HOST_FILE: &str = "builtins-host.o"; - generate_object_file( - &bitcode_path, - "BUILTINS_HOST_O", - "object", - BUILTINS_HOST_FILE, - ); + generate_object_file(&bitcode_path, "object", BUILTINS_HOST_FILE); - generate_object_file( - &bitcode_path, - "BUILTINS_WASM32_O", - "wasm32-object", - "builtins-wasm32.o", - ); + generate_object_file(&bitcode_path, "wasm32-object", "builtins-wasm32.o"); copy_zig_builtins_to_target_dir(&bitcode_path); @@ -84,21 +75,10 @@ fn main() { .expect("Failed to delete temp dir zig_cache_dir."); } -fn generate_object_file( - bitcode_path: &Path, - env_var_name: &str, - zig_object: &str, - object_file_name: &str, -) { - let out_dir = env::var_os("OUT_DIR").unwrap(); - - let dest_obj_path = Path::new(&out_dir).join(object_file_name); +fn generate_object_file(bitcode_path: &Path, zig_object: &str, object_file_name: &str) { + let dest_obj_path = get_lib_dir().join(object_file_name); let dest_obj = dest_obj_path.to_str().expect("Invalid dest object path"); - // set the variable (e.g. BUILTINS_HOST_O) that is later used in - // `compiler/builtins/src/bitcode.rs` to load the object file - println!("cargo:rustc-env={}={}", env_var_name, dest_obj); - let src_obj_path = bitcode_path.join(object_file_name); let src_obj = src_obj_path.to_str().expect("Invalid src object path"); @@ -109,6 +89,7 @@ fn generate_object_file( &bitcode_path, &zig_executable(), &["build", zig_object, "-Drelease=true"], + 0, ); println!("Moving zig object `{}` to: {}", zig_object, dest_obj); @@ -143,35 +124,36 @@ fn generate_bc_file(bitcode_path: &Path, zig_object: &str, file_name: &str) { &bitcode_path, &zig_executable(), &["build", zig_object, "-Drelease=true"], + 0, ); } -fn copy_zig_builtins_to_target_dir(bitcode_path: &Path) { - // To enable roc to find the zig biultins, we want them to be moved to a folder next to the roc executable. - // So if /roc is the executable. The zig files will be in /lib/*.zig - +pub fn get_lib_dir() -> PathBuf { // Currently we have the OUT_DIR variable which points to `/target/debug/build/roc_builtins-*/out/`. // So we just need to shed a 3 of the outer layers to get `/target/debug/` and then add `lib`. let out_dir = env::var_os("OUT_DIR").unwrap(); - let target_profile_dir = Path::new(&out_dir) + + let lib_path = Path::new(&out_dir) .parent() .and_then(|path| path.parent()) .and_then(|path| path.parent()) .unwrap() .join("lib"); + // create dir of it does not exist + fs::create_dir_all(lib_path.clone()).expect("Failed to make lib dir."); + + lib_path +} + +fn copy_zig_builtins_to_target_dir(bitcode_path: &Path) { + // To enable roc to find the zig biultins, we want them to be moved to a folder next to the roc executable. + // So if /roc is the executable. The zig files will be in /lib/*.zig + let target_profile_dir = get_lib_dir(); + let zig_src_dir = bitcode_path.join("src"); - std::fs::create_dir_all(&target_profile_dir).unwrap_or_else(|err| { - panic!( - "Failed to create output library directory for zig bitcode {:?}: {:?}", - target_profile_dir, err - ); - }); - let mut options = fs_extra::dir::CopyOptions::new(); - options.content_only = true; - options.overwrite = true; - fs_extra::dir::copy(&zig_src_dir, &target_profile_dir, &options).unwrap_or_else(|err| { + cp_unless_zig_cache(&zig_src_dir, &target_profile_dir).unwrap_or_else(|err| { panic!( "Failed to copy zig bitcode files {:?} to {:?}: {:?}", zig_src_dir, target_profile_dir, err @@ -179,8 +161,45 @@ fn copy_zig_builtins_to_target_dir(bitcode_path: &Path) { }); } -fn run_command + Copy>(path: P, command_str: &str, args: I) -where +// recursively copy all the .zig files from this directory, but do *not* recurse into zig-cache/ +fn cp_unless_zig_cache(src_dir: &Path, target_dir: &Path) -> io::Result<()> { + // Make sure the destination directory exists before we try to copy anything into it. + std::fs::create_dir_all(&target_dir).unwrap_or_else(|err| { + panic!( + "Failed to create output library directory for zig bitcode {:?}: {:?}", + target_dir, err + ); + }); + + for entry in fs::read_dir(src_dir)? { + let src_path = entry?.path(); + let src_filename = src_path.file_name().unwrap(); + + // Only copy individual files if they have the .zig extension + if src_path.extension().unwrap_or_default() == "zig" { + let dest = target_dir.join(src_filename); + + fs::copy(&src_path, &dest).unwrap_or_else(|err| { + panic!( + "Failed to copy zig bitcode file {:?} to {:?}: {:?}", + src_path, dest, err + ); + }); + } else if src_path.is_dir() && src_filename != "zig-cache" { + // Recursively copy all directories except zig-cache + cp_unless_zig_cache(&src_path, &target_dir.join(src_filename))?; + } + } + + Ok(()) +} + +fn run_command + Copy>( + path: P, + command_str: &str, + args: I, + flaky_fail_counter: usize, +) where I: IntoIterator, S: AsRef, { @@ -199,10 +218,14 @@ where }; // flaky test error that only occurs sometimes inside MacOS ci run - if error_str.contains("unable to build stage1 zig object: FileNotFound") + if error_str.contains("FileNotFound") || error_str.contains("unable to save cached ZIR code") { - run_command(path, command_str, args) + if flaky_fail_counter == 10 { + panic!("{} failed 10 times in a row. The following error is unlikely to be a flaky error: {}", command_str, error_str); + } else { + run_command(path, command_str, args, flaky_fail_counter + 1) + } } else { panic!("{} failed: {}", command_str, error_str); } diff --git a/crates/compiler/builtins/roc/Decode.roc b/crates/compiler/builtins/roc/Decode.roc new file mode 100644 index 0000000000..5f47c77021 --- /dev/null +++ b/crates/compiler/builtins/roc/Decode.roc @@ -0,0 +1,79 @@ +interface Decode + exposes [ + DecodeError, + DecodeResult, + Decoder, + Decoding, + DecoderFormatting, + decoder, + u8, + u16, + u32, + u64, + u128, + i8, + i16, + i32, + i64, + i128, + f32, + f64, + dec, + bool, + string, + list, + custom, + decodeWith, + fromBytesPartial, + fromBytes, + ] + imports [ + List, + ] + +DecodeError : [TooShort] + +DecodeResult val : { result : Result val DecodeError, rest : List U8 } + +Decoder val fmt := List U8, fmt -> DecodeResult val | fmt has DecoderFormatting + +Decoding has + decoder : Decoder val fmt | val has Decoding, fmt has DecoderFormatting + +DecoderFormatting has + u8 : Decoder U8 fmt | fmt has DecoderFormatting + u16 : Decoder U16 fmt | fmt has DecoderFormatting + u32 : Decoder U32 fmt | fmt has DecoderFormatting + u64 : Decoder U64 fmt | fmt has DecoderFormatting + u128 : Decoder U128 fmt | fmt has DecoderFormatting + i8 : Decoder I8 fmt | fmt has DecoderFormatting + i16 : Decoder I16 fmt | fmt has DecoderFormatting + i32 : Decoder I32 fmt | fmt has DecoderFormatting + i64 : Decoder I64 fmt | fmt has DecoderFormatting + i128 : Decoder I128 fmt | fmt has DecoderFormatting + f32 : Decoder F32 fmt | fmt has DecoderFormatting + f64 : Decoder F64 fmt | fmt has DecoderFormatting + dec : Decoder Dec fmt | fmt has DecoderFormatting + bool : Decoder Bool fmt | fmt has DecoderFormatting + string : Decoder Str fmt | fmt has DecoderFormatting + list : Decoder elem fmt -> Decoder (List elem) fmt | fmt has DecoderFormatting + +custom : (List U8, fmt -> DecodeResult val) -> Decoder val fmt | fmt has DecoderFormatting +custom = \decode -> @Decoder decode + +decodeWith : List U8, Decoder val fmt, fmt -> DecodeResult val | fmt has DecoderFormatting +decodeWith = \bytes, @Decoder decode, fmt -> decode bytes fmt + +fromBytesPartial : List U8, fmt -> DecodeResult val | val has Decoding, fmt has DecoderFormatting +fromBytesPartial = \bytes, fmt -> decodeWith bytes decoder fmt + +fromBytes : List U8, fmt -> Result val [Leftover (List U8)]DecodeError | val has Decoding, fmt has DecoderFormatting +fromBytes = \bytes, fmt -> + when fromBytesPartial bytes fmt is + { result, rest } -> + if List.isEmpty rest then + when result is + Ok val -> Ok val + Err TooShort -> Err TooShort + else + Err (Leftover rest) diff --git a/crates/compiler/builtins/roc/Json.roc b/crates/compiler/builtins/roc/Json.roc index fdb928feab..c7aca09cc6 100644 --- a/crates/compiler/builtins/roc/Json.roc +++ b/crates/compiler/builtins/roc/Json.roc @@ -2,92 +2,98 @@ interface Json exposes [ Json, toUtf8, + fromUtf8, ] imports [ List, Str, + Encode, Encode.{ Encoder, EncoderFormatting, - custom, appendWith, - u8, - u16, - u32, - u64, - u128, - i8, - i16, - i32, - i64, - i128, - f32, - f64, - dec, - bool, - string, - list, - record, - tag, + }, + Decode, + Decode.{ + DecoderFormatting, }, ] Json := {} has [ EncoderFormatting { - u8, - u16, - u32, - u64, - u128, - i8, - i16, - i32, - i64, - i128, - f32, - f64, - dec, - bool, - string, - list, - record, - tag, + u8: encodeU8, + u16: encodeU16, + u32: encodeU32, + u64: encodeU64, + u128: encodeU128, + i8: encodeI8, + i16: encodeI16, + i32: encodeI32, + i64: encodeI64, + i128: encodeI128, + f32: encodeF32, + f64: encodeF64, + dec: encodeDec, + bool: encodeBool, + string: encodeString, + list: encodeList, + record: encodeRecord, + tag: encodeTag, + }, + DecoderFormatting { + u8: decodeU8, + u16: decodeU16, + u32: decodeU32, + u64: decodeU64, + u128: decodeU128, + i8: decodeI8, + i16: decodeI16, + i32: decodeI32, + i64: decodeI64, + i128: decodeI128, + f32: decodeF32, + f64: decodeF64, + dec: decodeDec, + bool: decodeBool, + string: decodeString, + list: decodeList, }, ] toUtf8 = @Json {} +fromUtf8 = @Json {} + numToBytes = \n -> n |> Num.toStr |> Str.toUtf8 -# impl EncoderFormatting for Json -u8 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeU8 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -u16 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeU16 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -u32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeU32 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -u64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeU64 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -u128 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeU128 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -i8 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeI8 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -i16 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeI16 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -i32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeI32 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -i64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeI64 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -i128 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeI128 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -f32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeF32 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -f64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeF64 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -dec = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n) +encodeDec = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n) -bool = \b -> custom \bytes, @Json {} -> +encodeBool = \b -> Encode.custom \bytes, @Json {} -> if b then @@ -95,13 +101,13 @@ bool = \b -> custom \bytes, @Json {} -> else List.concat bytes (Str.toUtf8 "false") -string = \s -> custom \bytes, @Json {} -> +encodeString = \s -> Encode.custom \bytes, @Json {} -> List.append bytes (Num.toU8 '"') |> List.concat (Str.toUtf8 s) |> List.append (Num.toU8 '"') -list = \lst, encodeElem -> - custom \bytes, @Json {} -> +encodeList = \lst, encodeElem -> + Encode.custom \bytes, @Json {} -> writeList = \{ buffer, elemsLeft }, elem -> bufferWithElem = appendWith buffer (encodeElem elem) (@Json {}) bufferWithSuffix = @@ -117,8 +123,8 @@ list = \lst, encodeElem -> List.append withList (Num.toU8 ']') -record = \fields -> - custom \bytes, @Json {} -> +encodeRecord = \fields -> + Encode.custom \bytes, @Json {} -> writeRecord = \{ buffer, fieldsLeft }, { key, value } -> bufferWithKeyValue = List.append buffer (Num.toU8 '"') @@ -140,8 +146,8 @@ record = \fields -> List.append bytesWithRecord (Num.toU8 '}') -tag = \name, payload -> - custom \bytes, @Json {} -> +encodeTag = \name, payload -> + Encode.custom \bytes, @Json {} -> # Idea: encode `A v1 v2` as `{"A": [v1, v2]}` writePayload = \{ buffer, itemsLeft }, encoder -> bufferWithValue = appendWith buffer encoder (@Json {}) @@ -165,3 +171,204 @@ tag = \name, payload -> List.append bytesWithPayload (Num.toU8 ']') |> List.append (Num.toU8 '}') + +takeWhile = \list, predicate -> + helper = \{ taken, rest } -> + when List.first rest is + Ok elem -> + if predicate elem then + helper { taken: List.append taken elem, rest: List.split rest 1 |> .others } + else + { taken, rest } + + Err _ -> { taken, rest } + + helper { taken: [], rest: list } + +asciiByte = \b -> Num.toU8 b + +digits = List.range (asciiByte '0') (asciiByte '9' + 1) + +takeDigits = \bytes -> + takeWhile bytes \n -> List.contains digits n + +takeFloat = \bytes -> + { taken: intPart, rest } = takeDigits bytes + + when List.get rest 0 is + Ok 46 -> # 46 = . + { taken: floatPart, rest: afterAll } = takeDigits (List.split rest 1).others + builtFloat = + List.concat (List.append intPart (asciiByte '.')) floatPart + + { taken: builtFloat, rest: afterAll } + + _ -> + { taken: intPart, rest } + +decodeU8 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toU8 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeU16 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toU16 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeU32 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toU32 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeU64 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toU64 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeU128 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toU128 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeI8 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toI8 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeI16 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toI16 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeI32 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toI32 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeI64 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toI64 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeI128 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeDigits bytes + + when Str.fromUtf8 taken |> Result.try Str.toI128 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeF32 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeFloat bytes + + when Str.fromUtf8 taken |> Result.try Str.toF32 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeF64 = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeFloat bytes + + when Str.fromUtf8 taken |> Result.try Str.toF64 is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeDec = Decode.custom \bytes, @Json {} -> + { taken, rest } = takeFloat bytes + + when Str.fromUtf8 taken |> Result.try Str.toDec is + Ok n -> { result: Ok n, rest } + Err _ -> { result: Err TooShort, rest } + +decodeBool = Decode.custom \bytes, @Json {} -> + { before: maybeFalse, others: afterFalse } = List.split bytes 5 + + # Note: this could be more performant by traversing both branches char-by-char. + # Doing that would also make `rest` more correct in the erroring case. + if + maybeFalse == [asciiByte 'f', asciiByte 'a', asciiByte 'l', asciiByte 's', asciiByte 'e'] + then + { result: Ok False, rest: afterFalse } + else + { before: maybeTrue, others: afterTrue } = List.split bytes 4 + + if + maybeTrue == [asciiByte 't', asciiByte 'r', asciiByte 'u', asciiByte 'e'] + then + { result: Ok True, rest: afterTrue } + else + { result: Err TooShort, rest: bytes } + +decodeString = Decode.custom \bytes, @Json {} -> + { before, others: afterStartingQuote } = List.split bytes 1 + + if + before == [asciiByte '"'] + then + # TODO: handle escape sequences + { taken: strSequence, rest } = takeWhile afterStartingQuote \n -> n != asciiByte '"' + + when Str.fromUtf8 strSequence is + Ok s -> + { others: afterEndingQuote } = List.split rest 1 + + { result: Ok s, rest: afterEndingQuote } + + Err _ -> { result: Err TooShort, rest } + else + { result: Err TooShort, rest: bytes } + +decodeList = \decodeElem -> Decode.custom \bytes, @Json {} -> + decodeElems = \chunk, accum -> + when Decode.decodeWith chunk decodeElem (@Json {}) is + { result, rest } -> + when result is + Ok val -> + # TODO: handle spaces before ',' + { before: afterElem, others } = List.split rest 1 + + if + afterElem == [asciiByte ','] + then + decodeElems others (List.append accum val) + else + Done (List.append accum val) rest + + Err e -> Errored e rest + + { before, others: afterStartingBrace } = List.split bytes 1 + + if + before == [asciiByte '['] + then + # TODO: empty lists + when decodeElems afterStartingBrace [] is + Errored e rest -> { result: Err e, rest } + Done vals rest -> + { before: maybeEndingBrace, others: afterEndingBrace } = List.split rest 1 + + if + maybeEndingBrace == [asciiByte ']'] + then + { result: Ok vals, rest: afterEndingBrace } + else + { result: Err TooShort, rest } + else + { result: Err TooShort, rest: bytes } diff --git a/crates/compiler/builtins/roc/Str.roc b/crates/compiler/builtins/roc/Str.roc index 1e3eecdb98..166f5f9a60 100644 --- a/crates/compiler/builtins/roc/Str.roc +++ b/crates/compiler/builtins/roc/Str.roc @@ -334,7 +334,7 @@ lastMatch : Str, Str -> [Some Nat, None] lastMatch = \haystack, needle -> haystackLength = Str.countUtf8Bytes haystack needleLength = Str.countUtf8Bytes needle - lastPossibleIndex = Num.subSaturated haystackLength (needleLength + 1) + lastPossibleIndex = Num.subSaturated haystackLength needleLength lastMatchHelp haystack needle lastPossibleIndex diff --git a/crates/compiler/builtins/src/bitcode.rs b/crates/compiler/builtins/src/bitcode.rs index 612960cd80..94ef5be80d 100644 --- a/crates/compiler/builtins/src/bitcode.rs +++ b/crates/compiler/builtins/src/bitcode.rs @@ -1,16 +1,29 @@ use roc_module::symbol::Symbol; use roc_target::TargetInfo; +use roc_utils::get_lib_path; use std::ops::Index; -pub const BUILTINS_HOST_OBJ_PATH: &str = env!( - "BUILTINS_HOST_O", - "Env var BUILTINS_HOST_O not found. Is there a problem with the build script?" -); +pub fn get_builtins_host_obj_path() -> String { + let builtins_host_path = get_lib_path() + .expect("Failed to find lib dir.") + .join("builtins-host.o"); -pub const BUILTINS_WASM32_OBJ_PATH: &str = env!( - "BUILTINS_WASM32_O", - "Env var BUILTINS_WASM32_O not found. Is there a problem with the build script?" -); + builtins_host_path + .into_os_string() + .into_string() + .expect("Failed to convert builtins_host_path to str") +} + +pub fn get_builtins_wasm32_obj_path() -> String { + let builtins_wasm32_path = get_lib_path() + .expect("Failed to find lib dir.") + .join("builtins-wasm32.o"); + + builtins_wasm32_path + .into_os_string() + .into_string() + .expect("Failed to convert builtins_wasm32_path to str") +} #[derive(Debug, Default, Copy, Clone)] pub struct IntrinsicName { @@ -51,7 +64,7 @@ impl FloatWidth { } pub const fn alignment_bytes(&self, target_info: TargetInfo) -> u32 { - use roc_target::Architecture; + use roc_target::Architecture::*; use FloatWidth::*; // NOTE: this must never use mem::align_of, because that returns the alignment @@ -60,8 +73,8 @@ impl FloatWidth { match self { F32 => 4, F64 | F128 => match target_info.architecture { - Architecture::X86_64 | Architecture::Aarch64 | Architecture::Wasm32 => 8, - Architecture::X86_32 | Architecture::Aarch32 => 4, + X86_64 | Aarch64 | Wasm32 => 8, + X86_32 | Aarch32 => 4, }, } } diff --git a/crates/compiler/builtins/src/roc.rs b/crates/compiler/builtins/src/roc.rs index 430a9135ed..ae90b4b42a 100644 --- a/crates/compiler/builtins/src/roc.rs +++ b/crates/compiler/builtins/src/roc.rs @@ -12,6 +12,7 @@ pub fn module_source(module_id: ModuleId) -> &'static str { ModuleId::BOX => BOX, ModuleId::BOOL => BOOL, ModuleId::ENCODE => ENCODE, + ModuleId::DECODE => DECODE, ModuleId::JSON => JSON, _ => panic!( "ModuleId {:?} is not part of the standard library", @@ -29,4 +30,5 @@ const SET: &str = include_str!("../roc/Set.roc"); const BOX: &str = include_str!("../roc/Box.roc"); const BOOL: &str = include_str!("../roc/Bool.roc"); const ENCODE: &str = include_str!("../roc/Encode.roc"); +const DECODE: &str = include_str!("../roc/Decode.roc"); const JSON: &str = include_str!("../roc/Json.roc"); diff --git a/crates/compiler/can/Cargo.toml b/crates/compiler/can/Cargo.toml index 79d99d746e..4bdc118bb6 100644 --- a/crates/compiler/can/Cargo.toml +++ b/crates/compiler/can/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_can" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/can/src/annotation.rs b/crates/compiler/can/src/annotation.rs index 5d4123c4b1..e0cefdebb5 100644 --- a/crates/compiler/can/src/annotation.rs +++ b/crates/compiler/can/src/annotation.rs @@ -566,6 +566,7 @@ fn can_annotation_help( region, alias_needs: alias.type_variables.len() as u8, type_got: args.len() as u8, + alias_kind: alias.kind, }); return error; } diff --git a/crates/compiler/can/src/constraint.rs b/crates/compiler/can/src/constraint.rs index 6755a8b626..3fc45b5320 100644 --- a/crates/compiler/can/src/constraint.rs +++ b/crates/compiler/can/src/constraint.rs @@ -713,7 +713,6 @@ pub struct PatternEq( pub struct OpportunisticResolve { /// The specialized type of this lookup, to try to resolve. pub specialization_variable: Variable, - pub specialization_expectation: Index>, /// The ability member to try to resolve. pub member: Symbol, diff --git a/crates/compiler/can/src/def.rs b/crates/compiler/can/src/def.rs index 00a41d12fd..48a5d78e31 100644 --- a/crates/compiler/can/src/def.rs +++ b/crates/compiler/can/src/def.rs @@ -369,6 +369,7 @@ fn canonicalize_alias<'a>( typ: symbol, variable_region: loc_lowercase.region, variable_name: loc_lowercase.value.clone(), + alias_kind: AliasKind::Structural, }); } AliasKind::Opaque => { @@ -2688,6 +2689,7 @@ fn correct_mutual_recursive_type_alias<'a>( env, &mut alias.typ, alias_name, + alias.kind, alias.region, rest, can_still_report_error, @@ -2870,7 +2872,15 @@ fn make_tag_union_recursive_help<'a, 'b>( } _ => { // take care to report a cyclic alias only once (not once for each alias in the cycle) - mark_cyclic_alias(env, typ, symbol, region, others, *can_report_cyclic_error); + mark_cyclic_alias( + env, + typ, + symbol, + alias_kind, + region, + others, + *can_report_cyclic_error, + ); *can_report_cyclic_error = false; Cyclic @@ -2882,6 +2892,7 @@ fn mark_cyclic_alias<'a>( env: &mut Env<'a>, typ: &mut Type, symbol: Symbol, + alias_kind: AliasKind, region: Region, others: Vec, report: bool, @@ -2890,7 +2901,7 @@ fn mark_cyclic_alias<'a>( *typ = Type::Erroneous(problem); if report { - let problem = Problem::CyclicAlias(symbol, region, others); + let problem = Problem::CyclicAlias(symbol, region, others, alias_kind); env.problems.push(problem); } } diff --git a/crates/compiler/can/src/expr.rs b/crates/compiler/can/src/expr.rs index 79df139f5c..5d3ac7f037 100644 --- a/crates/compiler/can/src/expr.rs +++ b/crates/compiler/can/src/expr.rs @@ -1568,13 +1568,6 @@ fn canonicalize_var_lookup( output.references.insert_value_lookup(symbol); if scope.abilities_store.is_ability_member_name(symbol) { - // Is there a shadow implementation with the same name? If so, we might be in - // the def for that shadow. In that case add a value lookup of the shadow impl, - // so that it's marked as possibly-recursive. - if let Some(shadow) = scope.get_member_shadow(symbol) { - output.references.insert_value_lookup(shadow.value); - } - AbilityMember( symbol, Some(scope.abilities_store.fresh_specialization_id()), diff --git a/crates/compiler/can/src/pattern.rs b/crates/compiler/can/src/pattern.rs index 1ebfb7db7e..9fc18f35ed 100644 --- a/crates/compiler/can/src/pattern.rs +++ b/crates/compiler/can/src/pattern.rs @@ -206,7 +206,6 @@ pub fn canonicalize_def_header_pattern<'a>( // Likely a specialization of an ability. Some(ability_member_name) => { output.references.insert_bound(symbol); - output.references.insert_value_lookup(ability_member_name); Pattern::AbilityMemberSpecialization { ident: symbol, specializes: ability_member_name, diff --git a/crates/compiler/collections/Cargo.toml b/crates/compiler/collections/Cargo.toml index 8e6134d4a7..357dd18744 100644 --- a/crates/compiler/collections/Cargo.toml +++ b/crates/compiler/collections/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_collections" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/constrain/Cargo.toml b/crates/compiler/constrain/Cargo.toml index bff476cc8f..e820e4c816 100644 --- a/crates/compiler/constrain/Cargo.toml +++ b/crates/compiler/constrain/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_constrain" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/constrain/src/expr.rs b/crates/compiler/constrain/src/expr.rs index ad7028ce01..ee74285dec 100644 --- a/crates/compiler/constrain/src/expr.rs +++ b/crates/compiler/constrain/src/expr.rs @@ -422,12 +422,13 @@ pub fn constrain_expr( constraints.lookup(*symbol, expected, region) } &AbilityMember(symbol, specialization_id, specialization_var) => { - // make lookup constraint to lookup this symbol's type in the environment - let store_expected = constraints.equal_types_var( + // Save the expectation in the `specialization_var` so we know what to specialize, then + // lookup the member in the environment. + let store_expected = constraints.store( + expected.get_type_ref().clone(), specialization_var, - expected, - Category::Storage(file!(), line!()), - region, + file!(), + line!(), ); let lookup_constr = constraints.lookup( symbol, @@ -435,13 +436,10 @@ pub fn constrain_expr( region, ); - // Make sure we attempt to resolve the specialization, if we need to. + // Make sure we attempt to resolve the specialization, if we can. if let Some(specialization_id) = specialization_id { env.resolutions_to_make.push(OpportunisticResolve { specialization_variable: specialization_var, - specialization_expectation: constraints.push_expected_type( - Expected::NoExpectation(Type::Variable(specialization_var)), - ), member: symbol, specialization_id, }); diff --git a/crates/compiler/debug_flags/Cargo.toml b/crates/compiler/debug_flags/Cargo.toml index 8aecfb22ec..019e4c27d3 100644 --- a/crates/compiler/debug_flags/Cargo.toml +++ b/crates/compiler/debug_flags/Cargo.toml @@ -1,6 +1,8 @@ [package] name = "roc_debug_flags" -version = "0.1.0" +version = "0.0.1" edition = "2021" +authors = ["The Roc Contributors"] +license = "UPL-1.0" [dependencies] diff --git a/crates/compiler/derive/Cargo.toml b/crates/compiler/derive/Cargo.toml index e4eeba71dc..d0ca82a7a2 100644 --- a/crates/compiler/derive/Cargo.toml +++ b/crates/compiler/derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_derive" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/derive/src/decoding.rs b/crates/compiler/derive/src/decoding.rs new file mode 100644 index 0000000000..04b291cbd5 --- /dev/null +++ b/crates/compiler/derive/src/decoding.rs @@ -0,0 +1,258 @@ +//! Derivers for the `Decoding` ability. + +use roc_can::expr::{AnnotatedMark, ClosureData, Expr, Recursive}; +use roc_can::pattern::Pattern; +use roc_derive_key::decoding::FlatDecodableKey; +use roc_error_macros::internal_error; +use roc_module::called_via::CalledVia; +use roc_module::symbol::Symbol; +use roc_region::all::Loc; +use roc_types::subs::{ + Content, FlatType, GetSubsSlice, LambdaSet, OptVariable, SubsSlice, UnionLambdas, Variable, +}; +use roc_types::types::AliasKind; + +use crate::util::Env; +use crate::{synth_var, DerivedBody}; + +pub(crate) fn derive_decoder( + env: &mut Env<'_>, + key: FlatDecodableKey, + def_symbol: Symbol, +) -> DerivedBody { + let (body, body_type) = match key { + FlatDecodableKey::List() => decoder_list(env, def_symbol), + }; + + let specialization_lambda_sets = + env.get_specialization_lambda_sets(body_type, Symbol::DECODE_DECODER); + + DerivedBody { + body, + body_type, + specialization_lambda_sets, + } +} + +fn decoder_list(env: &mut Env<'_>, _def_symbol: Symbol) -> (Expr, Variable) { + // Build + // + // def_symbol : Decoder (List elem) fmt | elem has Decoding, fmt has DecoderFormatting + // def_symbol = Decode.custom \bytes, fmt -> Decode.decodeWith bytes (Decode.list Decode.decoder) fmt + // + // TODO try to reduce to `Decode.list Decode.decoder` + + use Expr::*; + + // Decode.list Decode.decoder : Decoder (List elem) fmt + let (decode_list_call, this_decode_list_ret_var) = { + // List elem + let elem_var = env.subs.fresh_unnamed_flex_var(); + + // Decode.decoder : Decoder elem fmt | elem has Decoding, fmt has EncoderFormatting + let (elem_decoder, elem_decoder_var) = { + // build `Decode.decoder : Decoder elem fmt` type + // Decoder val fmt | val has Decoding, fmt has EncoderFormatting + let elem_decoder_var = env.import_builtin_symbol_var(Symbol::DECODE_DECODER); + + // set val ~ elem + let val_var = match env.subs.get_content_without_compacting(elem_decoder_var) { + Content::Alias(Symbol::DECODE_DECODER_OPAQUE, vars, _, AliasKind::Opaque) + if vars.type_variables_len == 2 => + { + env.subs.get_subs_slice(vars.type_variables())[0] + } + _ => internal_error!("Decode.decode not an opaque type"), + }; + + env.unify(val_var, elem_var); + + ( + AbilityMember(Symbol::DECODE_DECODER, None, elem_decoder_var), + elem_decoder_var, + ) + }; + + // Build `Decode.list Decode.decoder` type + // Decoder val fmt -[uls]-> Decoder (List val) fmt | fmt has DecoderFormatting + let decode_list_fn_var = env.import_builtin_symbol_var(Symbol::DECODE_LIST); + + // Decoder elem fmt -a-> b + let elem_decoder_var_slice = SubsSlice::insert_into_subs(env.subs, [elem_decoder_var]); + let this_decode_list_clos_var = env.subs.fresh_unnamed_flex_var(); + let this_decode_list_ret_var = env.subs.fresh_unnamed_flex_var(); + let this_decode_list_fn_var = synth_var( + env.subs, + Content::Structure(FlatType::Func( + elem_decoder_var_slice, + this_decode_list_clos_var, + this_decode_list_ret_var, + )), + ); + + // Decoder val fmt -[uls]-> Decoder (List val) fmt | fmt has DecoderFormatting + // ~ Decoder elem fmt -a -> b + env.unify(decode_list_fn_var, this_decode_list_fn_var); + + let decode_list_member = AbilityMember(Symbol::DECODE_LIST, None, this_decode_list_fn_var); + let decode_list_fn = Box::new(( + decode_list_fn_var, + Loc::at_zero(decode_list_member), + this_decode_list_clos_var, + this_decode_list_ret_var, + )); + + let decode_list_call = Call( + decode_list_fn, + vec![(elem_decoder_var, Loc::at_zero(elem_decoder))], + CalledVia::Space, + ); + + (decode_list_call, this_decode_list_ret_var) + }; + + let bytes_sym = env.new_symbol("bytes"); + let bytes_var = env.subs.fresh_unnamed_flex_var(); + let fmt_sym = env.new_symbol("fmt"); + let fmt_var = env.subs.fresh_unnamed_flex_var(); + + // Decode.decodeWith bytes (Decode.list Decode.decoder) fmt : DecodeResult (List elem) + let (decode_with_call, decode_result_list_elem_var) = { + // Decode.decodeWith : List U8, Decoder val fmt, fmt -> DecodeResult val | fmt has DecoderFormatting + let decode_with_type = env.import_builtin_symbol_var(Symbol::DECODE_DECODE_WITH); + + // Decode.decodeWith : bytes, Decoder (List elem) fmt, fmt -> DecoderResult (List val) + let this_decode_with_var_slice = + SubsSlice::insert_into_subs(env.subs, [bytes_var, this_decode_list_ret_var, fmt_var]); + let this_decode_with_clos_var = env.subs.fresh_unnamed_flex_var(); + let this_decode_with_ret_var = env.subs.fresh_unnamed_flex_var(); + let this_decode_with_fn_var = synth_var( + env.subs, + Content::Structure(FlatType::Func( + this_decode_with_var_slice, + this_decode_with_clos_var, + this_decode_with_ret_var, + )), + ); + + // List U8, Decoder val fmt, fmt -> DecodeResult val | fmt has DecoderFormatting + // ~ bytes, Decoder (List elem) fmt, fmt -> DecoderResult (List val) + env.unify(decode_with_type, this_decode_with_fn_var); + + let decode_with_var = Var(Symbol::DECODE_DECODE_WITH); + let decode_with_fn = Box::new(( + this_decode_with_fn_var, + Loc::at_zero(decode_with_var), + this_decode_with_clos_var, + this_decode_with_ret_var, + )); + let decode_with_call = Call( + decode_with_fn, + vec![ + // bytes (Decode.list Decode.decoder) fmt + (bytes_var, Loc::at_zero(Var(bytes_sym))), + (this_decode_list_ret_var, Loc::at_zero(decode_list_call)), + (fmt_var, Loc::at_zero(Var(fmt_sym))), + ], + CalledVia::Space, + ); + + (decode_with_call, this_decode_with_ret_var) + }; + + // \bytes, fmt -> Decode.decodeWith bytes (Decode.list Decode.decoder) fmt + let (custom_lambda, custom_var) = { + let fn_name = env.new_symbol("custom"); + + // Create fn_var for ambient capture; we fix it up below. + let fn_var = synth_var(env.subs, Content::Error); + + // -[[fn_name]]-> + let fn_name_labels = UnionLambdas::insert_into_subs(env.subs, [(fn_name, vec![])]); + let fn_clos_var = synth_var( + env.subs, + Content::LambdaSet(LambdaSet { + solved: fn_name_labels, + recursion_var: OptVariable::NONE, + unspecialized: SubsSlice::default(), + ambient_function: fn_var, + }), + ); + + // bytes, fmt -[[fn_name]]-> DecoderResult (List elem) + let args_slice = SubsSlice::insert_into_subs(env.subs, vec![bytes_var, fmt_var]); + env.subs.set_content( + fn_var, + Content::Structure(FlatType::Func( + args_slice, + fn_clos_var, + decode_result_list_elem_var, + )), + ); + + // \bytes, fmt -[[fn_name]]-> Decode.decodeWith bytes (Decode.list Decode.decoder) fmt + let clos = Closure(ClosureData { + function_type: fn_var, + closure_type: fn_clos_var, + return_type: decode_result_list_elem_var, + name: fn_name, + captured_symbols: vec![], + recursive: Recursive::NotRecursive, + arguments: vec![ + ( + bytes_var, + AnnotatedMark::known_exhaustive(), + Loc::at_zero(Pattern::Identifier(bytes_sym)), + ), + ( + fmt_var, + AnnotatedMark::known_exhaustive(), + Loc::at_zero(Pattern::Identifier(fmt_sym)), + ), + ], + loc_body: Box::new(Loc::at_zero(decode_with_call)), + }); + + (clos, fn_var) + }; + + // Decode.custom \bytes, fmt -> Decode.decodeWith bytes (Decode.list Decode.decoder) fmt + let (decode_custom_call, decoder_var) = { + // (List U8, fmt -> DecodeResult val) -> Decoder val fmt | fmt has DecoderFormatting + let decode_custom_type = env.import_builtin_symbol_var(Symbol::DECODE_CUSTOM); + + // (List U8, fmt -> DecodeResult (List elem)) -> Decoder (List elem) fmt + let this_decode_custom_args = SubsSlice::insert_into_subs(env.subs, [custom_var]); + let this_decode_custom_clos_var = env.subs.fresh_unnamed_flex_var(); + let this_decode_custom_ret_var = env.subs.fresh_unnamed_flex_var(); + let this_decode_custom_fn_var = synth_var( + env.subs, + Content::Structure(FlatType::Func( + this_decode_custom_args, + this_decode_custom_clos_var, + this_decode_custom_ret_var, + )), + ); + + // (List U8, fmt -> DecodeResult val) -> Decoder val fmt | fmt has DecoderFormatting + // ~ (List U8, fmt -> DecodeResult (List elem)) -> Decoder (List elem) fmt + env.unify(decode_custom_type, this_decode_custom_fn_var); + + let decode_custom_var = Var(Symbol::DECODE_CUSTOM); + let decode_custom_fn = Box::new(( + this_decode_custom_fn_var, + Loc::at_zero(decode_custom_var), + this_decode_custom_clos_var, + this_decode_custom_ret_var, + )); + let decode_custom_call = Call( + decode_custom_fn, + vec![(custom_var, Loc::at_zero(custom_lambda))], + CalledVia::Space, + ); + + (decode_custom_call, this_decode_custom_ret_var) + }; + + (decode_custom_call, decoder_var) +} diff --git a/crates/compiler/derive/src/encoding.rs b/crates/compiler/derive/src/encoding.rs index d5d03ac942..006b99e925 100644 --- a/crates/compiler/derive/src/encoding.rs +++ b/crates/compiler/derive/src/encoding.rs @@ -2,173 +2,24 @@ use std::iter::once; -use roc_can::abilities::SpecializationLambdaSets; use roc_can::expr::{ AnnotatedMark, ClosureData, Expr, Field, Recursive, WhenBranch, WhenBranchPattern, }; -use roc_can::module::ExposedByModule; use roc_can::pattern::Pattern; use roc_collections::SendMap; use roc_derive_key::encoding::FlatEncodableKey; -use roc_error_macros::internal_error; use roc_module::called_via::CalledVia; use roc_module::ident::Lowercase; -use roc_module::symbol::{IdentIds, ModuleId, Symbol}; +use roc_module::symbol::Symbol; use roc_region::all::{Loc, Region}; use roc_types::subs::{ - instantiate_rigids, Content, ExhaustiveMark, FlatType, GetSubsSlice, LambdaSet, OptVariable, - RecordFields, RedundantMark, Subs, SubsSlice, UnionLambdas, UnionTags, Variable, - VariableSubsSlice, + Content, ExhaustiveMark, FlatType, GetSubsSlice, LambdaSet, OptVariable, RecordFields, + RedundantMark, SubsSlice, UnionLambdas, UnionTags, Variable, VariableSubsSlice, }; use roc_types::types::RecordField; -use crate::{synth_var, DerivedBody, DERIVED_SYNTH}; - -pub(crate) struct Env<'a> { - /// NB: This **must** be subs for the derive module! - pub subs: &'a mut Subs, - pub exposed_types: &'a ExposedByModule, - pub derived_ident_ids: &'a mut IdentIds, -} - -impl Env<'_> { - fn new_symbol(&mut self, name_hint: &str) -> Symbol { - if cfg!(any( - debug_assertions, - test, - feature = "debug-derived-symbols" - )) { - let mut i = 0; - let debug_name = loop { - i += 1; - let name = if i == 1 { - name_hint.to_owned() - } else { - format!("{}{}", name_hint, i) - }; - if self.derived_ident_ids.get_id(&name).is_none() { - break name; - } - }; - - let ident_id = self.derived_ident_ids.get_or_insert(&debug_name); - - Symbol::new(DERIVED_SYNTH, ident_id) - } else { - self.unique_symbol() - } - } - - fn unique_symbol(&mut self) -> Symbol { - let ident_id = self.derived_ident_ids.gen_unique(); - Symbol::new(DERIVED_SYNTH, ident_id) - } - - fn import_encode_symbol(&mut self, symbol: Symbol) -> Variable { - debug_assert_eq!(symbol.module_id(), ModuleId::ENCODE); - - let encode_types = &self - .exposed_types - .get(&ModuleId::ENCODE) - .unwrap() - .exposed_types_storage_subs; - let storage_var = encode_types.stored_vars_by_symbol.get(&symbol).unwrap(); - let imported = encode_types - .storage_subs - .export_variable_to_directly_to_use_site(self.subs, *storage_var); - - instantiate_rigids(self.subs, imported.variable); - - imported.variable - } - - fn unify(&mut self, left: Variable, right: Variable) { - use roc_unify::unify::{unify, Env, Mode, Unified}; - - let unified = unify(&mut Env::new(self.subs), left, right, Mode::EQ); - - match unified { - Unified::Success { - vars: _, - must_implement_ability: _, - lambda_sets_to_specialize, - extra_metadata: _, - } => { - if !lambda_sets_to_specialize.is_empty() { - internal_error!("Did not expect derivers to need to specialize unspecialized lambda sets, but we got some: {:?}", lambda_sets_to_specialize) - } - } - Unified::Failure(..) | Unified::BadType(..) => { - internal_error!("Unification failed in deriver - that's a deriver bug!") - } - } - } - - fn get_specialization_lambda_sets( - &mut self, - specialization_type: Variable, - ability_member: Symbol, - ) -> SpecializationLambdaSets { - use roc_unify::unify::{unify_introduced_ability_specialization, Env, Mode, Unified}; - - let member_signature = self.import_encode_symbol(ability_member); - - let unified = unify_introduced_ability_specialization( - &mut Env::new(self.subs), - member_signature, - specialization_type, - Mode::EQ, - ); - - match unified { - Unified::Success { - vars: _, - must_implement_ability: _, - lambda_sets_to_specialize: _lambda_sets_to_specialize, - extra_metadata: specialization_lsets, - } => { - let specialization_lsets: SpecializationLambdaSets = specialization_lsets - .0 - .into_iter() - .map(|((spec_member, region), var)| { - debug_assert_eq!(spec_member, ability_member); - (region, var) - }) - .collect(); - - // Since we're doing `{foo} ~ a | a has Encoding`, we may see "lambda sets to - // specialize" for e.g. `{foo}:toEncoder:1`, but these are actually just the - // specialization lambda sets, so we don't need to do any extra work! - // - // If there are other lambda sets to specialize in here, that's unexpected, because - // that means we would have been deriving something like `toEncoder {foo: bar}`, - // and now seen that we needed `toEncoder bar` where `bar` is a concrete type. But - // we only expect `bar` to polymorphic at this stage! - // - // TODO: it would be better if `unify` could prune these for us. See also - // https://github.com/rtfeldman/roc/issues/3207; that is a blocker for this TODO. - #[cfg(debug_assertions)] - { - for (spec_var, lambda_sets) in _lambda_sets_to_specialize.drain() { - for lambda_set in lambda_sets { - let belongs_to_specialized_lambda_sets = - specialization_lsets.iter().any(|(_, var)| { - self.subs.get_root_key_without_compacting(*var) - == self.subs.get_root_key_without_compacting(lambda_set) - }); - debug_assert!(belongs_to_specialized_lambda_sets, - "Did not expect derivers to need to specialize unspecialized lambda sets, but we got one: {:?} for {:?}", lambda_set, spec_var) - } - } - } - specialization_lsets - } - Unified::Failure(..) | Unified::BadType(..) => { - internal_error!("Unification failed in deriver - that's a deriver bug!") - } - } - } -} +use crate::util::Env; +use crate::{synth_var, DerivedBody}; pub(crate) fn derive_to_encoder( env: &mut Env<'_>, @@ -253,7 +104,7 @@ fn to_encoder_list(env: &mut Env<'_>, fn_name: Symbol) -> (Expr, Variable) { // build `toEncoder elem` type // val -[uls]-> Encoder fmt | fmt has EncoderFormatting - let to_encoder_fn_var = env.import_encode_symbol(Symbol::ENCODE_TO_ENCODER); + let to_encoder_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_TO_ENCODER); // elem -[clos]-> t1 let to_encoder_clos_var = env.subs.fresh_unnamed_flex_var(); // clos @@ -333,7 +184,7 @@ fn to_encoder_list(env: &mut Env<'_>, fn_name: Symbol) -> (Expr, Variable) { // build `Encode.list lst (\elem -> Encode.toEncoder elem)` type // List e, (e -> Encoder fmt) -[uls]-> Encoder fmt | fmt has EncoderFormatting - let encode_list_fn_var = env.import_encode_symbol(Symbol::ENCODE_LIST); + let encode_list_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_LIST); // List elem, to_elem_encoder_fn_var -[clos]-> t1 let this_encode_list_args_slice = @@ -469,7 +320,7 @@ fn to_encoder_record( // build `toEncoder rcd.a` type // val -[uls]-> Encoder fmt | fmt has EncoderFormatting - let to_encoder_fn_var = env.import_encode_symbol(Symbol::ENCODE_TO_ENCODER); + let to_encoder_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_TO_ENCODER); // (typeof rcd.a) -[clos]-> t1 let to_encoder_clos_var = env.subs.fresh_unnamed_flex_var(); // clos @@ -549,7 +400,7 @@ fn to_encoder_record( // build `Encode.record [ { key: .., value: ..}, .. ]` type // List { key : Str, value : Encoder fmt } -[uls]-> Encoder fmt | fmt has EncoderFormatting - let encode_record_fn_var = env.import_encode_symbol(Symbol::ENCODE_RECORD); + let encode_record_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_RECORD); // fields_list_var -[clos]-> t1 let fields_list_var_slice = @@ -687,7 +538,8 @@ fn to_encoder_tag_union( .map(|(&sym, &sym_var)| { // build `toEncoder v1` type // expected: val -[uls]-> Encoder fmt | fmt has EncoderFormatting - let to_encoder_fn_var = env.import_encode_symbol(Symbol::ENCODE_TO_ENCODER); + let to_encoder_fn_var = + env.import_builtin_symbol_var(Symbol::ENCODE_TO_ENCODER); // wanted: t1 -[clos]-> t' let var_slice_of_sym_var = @@ -747,7 +599,7 @@ fn to_encoder_tag_union( // build `Encode.tag "A" [ ... ]` type // expected: Str, List (Encoder fmt) -[uls]-> Encoder fmt | fmt has EncoderFormatting - let encode_tag_fn_var = env.import_encode_symbol(Symbol::ENCODE_TAG); + let encode_tag_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_TAG); // wanted: Str, List whole_encoders_var -[clos]-> t' let this_encode_tag_args_var_slice = VariableSubsSlice::insert_into_subs( @@ -904,7 +756,7 @@ fn wrap_in_encode_custom( // build `Encode.appendWith bytes encoder fmt` type // expected: Encode.appendWith : List U8, Encoder fmt, fmt -[appendWith]-> List U8 | fmt has EncoderFormatting - let append_with_fn_var = env.import_encode_symbol(Symbol::ENCODE_APPEND_WITH); + let append_with_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_APPEND_WITH); // wanted: Encode.appendWith : List U8, encoder_var, fmt -[clos]-> List U8 | fmt has EncoderFormatting let this_append_with_args_var_slice = @@ -995,7 +847,7 @@ fn wrap_in_encode_custom( // Encode.custom \bytes, fmt -> Encode.appendWith bytes encoder fmt // // expected: Encode.custom : (List U8, fmt -> List U8) -> Encoder fmt | fmt has EncoderFormatting - let custom_fn_var = env.import_encode_symbol(Symbol::ENCODE_CUSTOM); + let custom_fn_var = env.import_builtin_symbol_var(Symbol::ENCODE_CUSTOM); // wanted: Encode.custom : fn_var -[clos]-> t' let this_custom_args_var_slice = VariableSubsSlice::insert_into_subs(env.subs, [fn_var]); diff --git a/crates/compiler/derive/src/lib.rs b/crates/compiler/derive/src/lib.rs index 107525a14a..45b8263649 100644 --- a/crates/compiler/derive/src/lib.rs +++ b/crates/compiler/derive/src/lib.rs @@ -14,9 +14,13 @@ use roc_region::all::Loc; use roc_types::subs::{ copy_import_to, Content, Descriptor, Mark, OptVariable, Rank, Subs, Variable, }; +use util::Env; +mod decoding; mod encoding; +mod util; + pub(crate) const DERIVED_SYNTH: ModuleId = ModuleId::DERIVED_SYNTH; pub fn synth_var(subs: &mut Subs, content: Content) -> Variable { @@ -56,20 +60,23 @@ fn build_derived_body( derived_symbol: Symbol, derive_key: DeriveKey, ) -> (Def, SpecializationLambdaSets) { + let mut env = Env { + subs: derived_subs, + exposed_types: exposed_by_module, + derived_ident_ids, + }; + let DerivedBody { body, body_type, specialization_lambda_sets, } = match derive_key { DeriveKey::ToEncoder(to_encoder_key) => { - let mut env = encoding::Env { - subs: derived_subs, - exposed_types: exposed_by_module, - derived_ident_ids, - }; encoding::derive_to_encoder(&mut env, to_encoder_key, derived_symbol) } - DeriveKey::Decoding => todo!(), + DeriveKey::Decoder(decoder_key) => { + decoding::derive_decoder(&mut env, decoder_key, derived_symbol) + } }; let def = Def { @@ -174,18 +181,18 @@ impl DerivedModule { &mut self, gen_subs: &mut Subs, should_load_def: impl Fn(Symbol) -> bool, - ) -> VecMap { + ) -> VecMap { self.map .values() .filter_map(|(symbol, def, _)| { if should_load_def(*symbol) { - let (_new_expr_var, new_expr) = roc_can::copy::deep_copy_expr_across_subs( + let (new_expr_var, new_expr) = roc_can::copy::deep_copy_expr_across_subs( &mut self.subs, gen_subs, def.expr_var, &def.loc_expr.value, ); - Some((*symbol, new_expr)) + Some((*symbol, (new_expr, new_expr_var))) } else { None } diff --git a/crates/compiler/derive/src/util.rs b/crates/compiler/derive/src/util.rs new file mode 100644 index 0000000000..e9661c8847 --- /dev/null +++ b/crates/compiler/derive/src/util.rs @@ -0,0 +1,155 @@ +use roc_can::{abilities::SpecializationLambdaSets, module::ExposedByModule}; +use roc_error_macros::internal_error; +use roc_module::symbol::{IdentIds, Symbol}; +use roc_types::subs::{instantiate_rigids, Subs, Variable}; + +use crate::DERIVED_SYNTH; + +/// An environment representing the Derived_synth module, for use in building derived +/// implementations. +pub(crate) struct Env<'a> { + /// NB: This **must** be subs for the derive module! + pub subs: &'a mut Subs, + pub exposed_types: &'a ExposedByModule, + pub derived_ident_ids: &'a mut IdentIds, +} + +impl Env<'_> { + pub fn new_symbol(&mut self, name_hint: &str) -> Symbol { + if cfg!(any( + debug_assertions, + test, + feature = "debug-derived-symbols" + )) { + let mut i = 0; + let debug_name = loop { + i += 1; + let name = if i == 1 { + name_hint.to_owned() + } else { + format!("{}{}", name_hint, i) + }; + if self.derived_ident_ids.get_id(&name).is_none() { + break name; + } + }; + + let ident_id = self.derived_ident_ids.get_or_insert(&debug_name); + + Symbol::new(DERIVED_SYNTH, ident_id) + } else { + self.unique_symbol() + } + } + + pub fn unique_symbol(&mut self) -> Symbol { + let ident_id = self.derived_ident_ids.gen_unique(); + Symbol::new(DERIVED_SYNTH, ident_id) + } + + pub fn import_builtin_symbol_var(&mut self, symbol: Symbol) -> Variable { + let module_id = symbol.module_id(); + debug_assert!(module_id.is_builtin()); + + let module_types = &self + .exposed_types + .get(&module_id) + .unwrap() + .exposed_types_storage_subs; + let storage_var = module_types.stored_vars_by_symbol.get(&symbol).unwrap(); + let imported = module_types + .storage_subs + .export_variable_to_directly_to_use_site(self.subs, *storage_var); + + instantiate_rigids(self.subs, imported.variable); + + imported.variable + } + + pub fn unify(&mut self, left: Variable, right: Variable) { + use roc_unify::unify::{unify, Env, Mode, Unified}; + + let unified = unify(&mut Env::new(self.subs), left, right, Mode::EQ); + + match unified { + Unified::Success { + vars: _, + must_implement_ability: _, + lambda_sets_to_specialize, + extra_metadata: _, + } => { + if !lambda_sets_to_specialize.is_empty() { + internal_error!("Did not expect derivers to need to specialize unspecialized lambda sets, but we got some: {:?}", lambda_sets_to_specialize) + } + } + Unified::Failure(..) | Unified::BadType(..) => { + internal_error!("Unification failed in deriver - that's a deriver bug!") + } + } + } + + pub fn get_specialization_lambda_sets( + &mut self, + specialization_type: Variable, + ability_member: Symbol, + ) -> SpecializationLambdaSets { + use roc_unify::unify::{unify_introduced_ability_specialization, Env, Mode, Unified}; + + let member_signature = self.import_builtin_symbol_var(ability_member); + + let unified = unify_introduced_ability_specialization( + &mut Env::new(self.subs), + member_signature, + specialization_type, + Mode::EQ, + ); + + match unified { + Unified::Success { + vars: _, + must_implement_ability: _, + lambda_sets_to_specialize: _lambda_sets_to_specialize, + extra_metadata: specialization_lsets, + } => { + let specialization_lsets: SpecializationLambdaSets = specialization_lsets + .0 + .into_iter() + .map(|((spec_member, region), var)| { + debug_assert_eq!(spec_member, ability_member); + (region, var) + }) + .collect(); + + // Since we're doing `{foo} ~ a | a has Encoding`, we may see "lambda sets to + // specialize" for e.g. `{foo}:toEncoder:1`, but these are actually just the + // specialization lambda sets, so we don't need to do any extra work! + // + // If there are other lambda sets to specialize in here, that's unexpected, because + // that means we would have been deriving something like `toEncoder {foo: bar}`, + // and now seen that we needed `toEncoder bar` where `bar` is a concrete type. But + // we only expect `bar` to polymorphic at this stage! + // + // TODO: it would be better if `unify` could prune these for us. See also + // https://github.com/rtfeldman/roc/issues/3207; that is a blocker for this TODO. + #[cfg(debug_assertions)] + { + for (spec_var, lambda_sets) in _lambda_sets_to_specialize.drain() { + for lambda_set in lambda_sets { + let belongs_to_specialized_lambda_sets = + specialization_lsets.iter().any(|(_, var)| { + self.subs.get_root_key_without_compacting(*var) + == self.subs.get_root_key_without_compacting(lambda_set) + }); + debug_assert!(belongs_to_specialized_lambda_sets, + "Did not expect derivers to need to specialize unspecialized lambda sets, but we got one: {:?} for {:?}", lambda_set, spec_var) + } + } + } + specialization_lsets + } + Unified::Failure(..) | Unified::BadType(..) => { + internal_error!("Unification failed in deriver - that's a deriver bug!") + } + } + } +} diff --git a/crates/compiler/derive_key/Cargo.toml b/crates/compiler/derive_key/Cargo.toml index e674089371..a69689fd32 100644 --- a/crates/compiler/derive_key/Cargo.toml +++ b/crates/compiler/derive_key/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_derive_key" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/derive_key/src/decoding.rs b/crates/compiler/derive_key/src/decoding.rs new file mode 100644 index 0000000000..73dfdee74d --- /dev/null +++ b/crates/compiler/derive_key/src/decoding.rs @@ -0,0 +1,84 @@ +use roc_module::symbol::Symbol; +use roc_types::subs::{Content, FlatType, Subs, Variable}; + +use crate::DeriveError; + +#[derive(Hash)] +pub enum FlatDecodable { + Immediate(Symbol), + Key(FlatDecodableKey), +} + +#[derive(Hash, PartialEq, Eq, Debug, Clone)] +pub enum FlatDecodableKey { + List(/* takes one variable */), +} + +impl FlatDecodableKey { + pub(crate) fn debug_name(&self) -> String { + match self { + FlatDecodableKey::List() => "list".to_string(), + } + } +} + +impl FlatDecodable { + pub(crate) fn from_var(subs: &Subs, var: Variable) -> Result { + use DeriveError::*; + use FlatDecodable::*; + match *subs.get_content_without_compacting(var) { + Content::Structure(flat_type) => match flat_type { + FlatType::Apply(sym, _) => match sym { + Symbol::LIST_LIST => Ok(Key(FlatDecodableKey::List())), + Symbol::STR_STR => Ok(Immediate(Symbol::DECODE_STRING)), + _ => Err(Underivable), + }, + FlatType::Record(_fields, _ext) => { + Err(Underivable) // yet + } + FlatType::TagUnion(_tags, _ext) | FlatType::RecursiveTagUnion(_, _tags, _ext) => { + Err(Underivable) // yet + } + FlatType::FunctionOrTagUnion(_name_index, _, _) => { + Err(Underivable) // yet + } + FlatType::EmptyRecord => { + Err(Underivable) // yet + } + FlatType::EmptyTagUnion => { + Err(Underivable) // yet + } + // + FlatType::Erroneous(_) => Err(Underivable), + FlatType::Func(..) => Err(Underivable), + }, + Content::Alias(sym, _, real_var, _) => match sym { + Symbol::NUM_U8 | Symbol::NUM_UNSIGNED8 => Ok(Immediate(Symbol::DECODE_U8)), + Symbol::NUM_U16 | Symbol::NUM_UNSIGNED16 => Ok(Immediate(Symbol::DECODE_U16)), + Symbol::NUM_U32 | Symbol::NUM_UNSIGNED32 => Ok(Immediate(Symbol::DECODE_U32)), + Symbol::NUM_U64 | Symbol::NUM_UNSIGNED64 => Ok(Immediate(Symbol::DECODE_U64)), + Symbol::NUM_U128 | Symbol::NUM_UNSIGNED128 => Ok(Immediate(Symbol::DECODE_U128)), + Symbol::NUM_I8 | Symbol::NUM_SIGNED8 => Ok(Immediate(Symbol::DECODE_I8)), + Symbol::NUM_I16 | Symbol::NUM_SIGNED16 => Ok(Immediate(Symbol::DECODE_I16)), + Symbol::NUM_I32 | Symbol::NUM_SIGNED32 => Ok(Immediate(Symbol::DECODE_I32)), + Symbol::NUM_I64 | Symbol::NUM_SIGNED64 => Ok(Immediate(Symbol::DECODE_I64)), + Symbol::NUM_I128 | Symbol::NUM_SIGNED128 => Ok(Immediate(Symbol::DECODE_I128)), + Symbol::NUM_DEC | Symbol::NUM_DECIMAL => Ok(Immediate(Symbol::DECODE_DEC)), + Symbol::NUM_F32 | Symbol::NUM_BINARY32 => Ok(Immediate(Symbol::DECODE_F32)), + Symbol::NUM_F64 | Symbol::NUM_BINARY64 => Ok(Immediate(Symbol::DECODE_F64)), + // NB: I believe it is okay to unwrap opaques here because derivers are only used + // by the backend, and the backend treats opaques like structural aliases. + _ => Self::from_var(subs, real_var), + }, + Content::RangedNumber(_) => Err(Underivable), + // + Content::RecursionVar { .. } => Err(Underivable), + Content::Error => Err(Underivable), + Content::FlexVar(_) + | Content::RigidVar(_) + | Content::FlexAbleVar(_, _) + | Content::RigidAbleVar(_, _) => Err(UnboundVar), + Content::LambdaSet(_) => Err(Underivable), + } + } +} diff --git a/crates/compiler/derive_key/src/lib.rs b/crates/compiler/derive_key/src/lib.rs index f2ebb3e806..8c6f97caaf 100644 --- a/crates/compiler/derive_key/src/lib.rs +++ b/crates/compiler/derive_key/src/lib.rs @@ -13,8 +13,10 @@ //! For these reasons the content keying is based on a strategy as well, which are the variants of //! [`DeriveKey`]. +pub mod decoding; pub mod encoding; +use decoding::{FlatDecodable, FlatDecodableKey}; use encoding::{FlatEncodable, FlatEncodableKey}; use roc_module::symbol::Symbol; @@ -33,15 +35,14 @@ pub enum DeriveError { #[repr(u8)] pub enum DeriveKey { ToEncoder(FlatEncodableKey), - #[allow(unused)] - Decoding, + Decoder(FlatDecodableKey), } impl DeriveKey { pub fn debug_name(&self) -> String { match self { DeriveKey::ToEncoder(key) => format!("toEncoder_{}", key.debug_name()), - DeriveKey::Decoding => todo!(), + DeriveKey::Decoder(key) => format!("decoder_{}", key.debug_name()), } } } @@ -57,11 +58,40 @@ pub enum Derived { Key(DeriveKey), } -impl Derived { - pub fn encoding(subs: &Subs, var: Variable) -> Result { - match encoding::FlatEncodable::from_var(subs, var)? { - FlatEncodable::Immediate(imm) => Ok(Derived::Immediate(imm)), - FlatEncodable::Key(repr) => Ok(Derived::Key(DeriveKey::ToEncoder(repr))), +/// The builtin ability member to derive. +#[derive(Clone, Copy)] +pub enum DeriveBuiltin { + ToEncoder, + Decoder, +} + +impl TryFrom for DeriveBuiltin { + type Error = Symbol; + + fn try_from(value: Symbol) -> Result { + match value { + Symbol::ENCODE_TO_ENCODER => Ok(DeriveBuiltin::ToEncoder), + Symbol::DECODE_DECODER => Ok(DeriveBuiltin::Decoder), + _ => Err(value), + } + } +} + +impl Derived { + pub fn builtin( + builtin: DeriveBuiltin, + subs: &Subs, + var: Variable, + ) -> Result { + match builtin { + DeriveBuiltin::ToEncoder => match encoding::FlatEncodable::from_var(subs, var)? { + FlatEncodable::Immediate(imm) => Ok(Derived::Immediate(imm)), + FlatEncodable::Key(repr) => Ok(Derived::Key(DeriveKey::ToEncoder(repr))), + }, + DeriveBuiltin::Decoder => match decoding::FlatDecodable::from_var(subs, var)? { + FlatDecodable::Immediate(imm) => Ok(Derived::Immediate(imm)), + FlatDecodable::Key(repr) => Ok(Derived::Key(DeriveKey::Decoder(repr))), + }, } } } diff --git a/crates/compiler/exhaustive/Cargo.toml b/crates/compiler/exhaustive/Cargo.toml index 8d2aece6ed..5a1fa3a271 100644 --- a/crates/compiler/exhaustive/Cargo.toml +++ b/crates/compiler/exhaustive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_exhaustive" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/fmt/Cargo.toml b/crates/compiler/fmt/Cargo.toml index a402a79d81..2659ae29a5 100644 --- a/crates/compiler/fmt/Cargo.toml +++ b/crates/compiler/fmt/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_fmt" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/fmt/src/def.rs b/crates/compiler/fmt/src/def.rs index 42f5f9792a..d5ba1a4f6f 100644 --- a/crates/compiler/fmt/src/def.rs +++ b/crates/compiler/fmt/src/def.rs @@ -288,13 +288,18 @@ fn fmt_expect<'a, 'buf>( is_multiline: bool, indent: u16, ) { + buf.ensure_ends_with_newline(); + buf.indent(indent); + buf.push_str("expect"); + let return_indent = if is_multiline { + buf.newline(); indent + INDENT } else { + buf.spaces(1); indent }; - buf.push_str("expect"); condition.format(buf, return_indent); } diff --git a/crates/compiler/fmt/src/expr.rs b/crates/compiler/fmt/src/expr.rs index 53867e929f..0ac5825f18 100644 --- a/crates/compiler/fmt/src/expr.rs +++ b/crates/compiler/fmt/src/expr.rs @@ -815,16 +815,24 @@ fn fmt_expect<'a, 'buf>( is_multiline: bool, indent: u16, ) { + buf.ensure_ends_with_newline(); + buf.indent(indent); + buf.push_str("expect"); + let return_indent = if is_multiline { + buf.newline(); indent + INDENT } else { + buf.spaces(1); indent }; - buf.push_str("expect"); condition.format(buf, return_indent); - buf.push('\n'); - continuation.format(buf, return_indent); + + // Always put a blank line after the `expect` line(s) + buf.ensure_ends_with_blank_line(); + + continuation.format(buf, indent); } fn fmt_if<'a, 'buf>( diff --git a/crates/compiler/fmt/tests/test_fmt.rs b/crates/compiler/fmt/tests/test_fmt.rs index 0844fe772f..dc8ca43b13 100644 --- a/crates/compiler/fmt/tests/test_fmt.rs +++ b/crates/compiler/fmt/tests/test_fmt.rs @@ -5287,7 +5287,7 @@ mod test_fmt { eq1, }, ] - + 0 "# ), @@ -5399,6 +5399,64 @@ mod test_fmt { ); } + #[test] + fn expect_single_line() { + expr_formats_same(indoc!( + r#" + x = 5 + + expect x == y + + expect y == z + + 42 + "# + )); + + module_formats_same(indoc!( + r#" + interface Foo exposes [] imports [] + + expect x == y + + expect y == z + + foo = bar + "# + )); + } + + #[test] + fn expect_multiline() { + expr_formats_same(indoc!( + r#" + x = 5 + + expect + foo bar + |> baz + + 42 + "# + )); + + module_formats_same(indoc!( + r#" + interface Foo exposes [] imports [] + + expect + foo bar + |> baz + + expect + blah + etc + + foo = bar + "# + )); + } + // this is a parse error atm // #[test] // fn multiline_apply() { diff --git a/crates/compiler/gen_dev/Cargo.toml b/crates/compiler/gen_dev/Cargo.toml index 3393d043ed..17e652c01b 100644 --- a/crates/compiler/gen_dev/Cargo.toml +++ b/crates/compiler/gen_dev/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "roc_gen_dev" description = "The development backend for the Roc compiler" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/gen_dev/src/generic64/x86_64.rs b/crates/compiler/gen_dev/src/generic64/x86_64.rs index cdd515221a..666ce48be4 100644 --- a/crates/compiler/gen_dev/src/generic64/x86_64.rs +++ b/crates/compiler/gen_dev/src/generic64/x86_64.rs @@ -1264,12 +1264,12 @@ impl X86_64Assembler { } } const REX: u8 = 0x40; -const REX_W: u8 = REX + 0x8; +const REX_W: u8 = REX | 0x8; #[inline(always)] fn add_rm_extension(reg: T, byte: u8) -> u8 { if reg.value() > 7 { - byte + 1 + byte | 1 } else { byte } @@ -1283,7 +1283,7 @@ fn add_opcode_extension(reg: X86_64GeneralReg, byte: u8) -> u8 { #[inline(always)] fn add_reg_extension(reg: T, byte: u8) -> u8 { if reg.value() > 7 { - byte + 4 + byte | 4 } else { byte } @@ -1300,7 +1300,7 @@ fn binop_reg64_reg64( let rex = add_reg_extension(src, rex); let dst_mod = dst as u8 % 8; let src_mod = (src as u8 % 8) << 3; - buf.extend(&[rex, op_code, 0xC0 + dst_mod + src_mod]); + buf.extend(&[rex, op_code, 0xC0 | dst_mod | src_mod]); } #[inline(always)] @@ -1315,7 +1315,7 @@ fn extended_binop_reg64_reg64( let rex = add_reg_extension(src, rex); let dst_mod = dst as u8 % 8; let src_mod = (src as u8 % 8) << 3; - buf.extend(&[rex, op_code1, op_code2, 0xC0 + dst_mod + src_mod]); + buf.extend(&[rex, op_code1, op_code2, 0xC0 | dst_mod | src_mod]); } // Below here are the functions for all of the assembly instructions. @@ -1330,7 +1330,7 @@ fn add_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) { let rex = add_rm_extension(dst, REX_W); let dst_mod = dst as u8 % 8; buf.reserve(7); - buf.extend(&[rex, 0x81, 0xC0 + dst_mod]); + buf.extend(&[rex, 0x81, 0xC0 | dst_mod]); buf.extend(&imm.to_le_bytes()); } @@ -1350,13 +1350,13 @@ fn addsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl if dst_high || src_high { buf.extend(&[ 0xF2, - 0x40 + ((dst_high as u8) << 2) + (src_high as u8), + 0x40 | ((dst_high as u8) << 2) | (src_high as u8), 0x0F, 0x58, - 0xC0 + (dst_mod << 3) + (src_mod), + 0xC0 | (dst_mod << 3) | (src_mod), ]) } else { - buf.extend(&[0xF2, 0x0F, 0x58, 0xC0 + (dst_mod << 3) + (src_mod)]) + buf.extend(&[0xF2, 0x0F, 0x58, 0xC0 | (dst_mod << 3) | (src_mod)]) } } @@ -1370,13 +1370,13 @@ fn andpd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl if dst_high || src_high { buf.extend(&[ 0x66, - 0x40 + ((dst_high as u8) << 2) + (src_high as u8), + 0x40 | ((dst_high as u8) << 2) | (src_high as u8), 0x0F, 0x54, - 0xC0 + (dst_mod << 3) + (src_mod), + 0xC0 | (dst_mod << 3) | (src_mod), ]) } else { - buf.extend(&[0x66, 0x0F, 0x54, 0xC0 + (dst_mod << 3) + (src_mod)]) + buf.extend(&[0x66, 0x0F, 0x54, 0xC0 | (dst_mod << 3) | (src_mod)]) } } @@ -1385,7 +1385,7 @@ fn andpd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_64Fl fn and_reg64_imm8(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i8) { let rex = add_rm_extension(dst, REX_W); let dst_mod = dst as u8 % 8; - buf.extend(&[rex, 0x83, 0xE0 + dst_mod, imm as u8]); + buf.extend(&[rex, 0x83, 0xE0 | dst_mod, imm as u8]); } /// `CMOVL r64,r/m64` -> Move if less (SF≠ OF). @@ -1395,7 +1395,7 @@ fn cmovl_reg64_reg64(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, src: X86_64Ge let rex = add_rm_extension(src, rex); let dst_mod = (dst as u8 % 8) << 3; let src_mod = src as u8 % 8; - buf.extend(&[rex, 0x0F, 0x4C, 0xC0 + dst_mod + src_mod]); + buf.extend(&[rex, 0x0F, 0x4C, 0xC0 | dst_mod | src_mod]); } /// `CMP r/m64,i32` -> Compare i32 to r/m64. @@ -1404,7 +1404,7 @@ fn cmp_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) { let rex = add_rm_extension(dst, REX_W); let dst_mod = dst as u8 % 8; buf.reserve(7); - buf.extend(&[rex, 0x81, 0xF8 + dst_mod]); + buf.extend(&[rex, 0x81, 0xF8 | dst_mod]); buf.extend(&imm.to_le_bytes()); } @@ -1452,7 +1452,7 @@ fn mov_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) { let rex = add_rm_extension(dst, REX_W); let dst_mod = dst as u8 % 8; buf.reserve(7); - buf.extend(&[rex, 0xC7, 0xC0 + dst_mod]); + buf.extend(&[rex, 0xC7, 0xC0 | dst_mod]); buf.extend(&imm.to_le_bytes()); } @@ -1465,7 +1465,7 @@ fn mov_reg64_imm64(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i64) { let rex = add_opcode_extension(dst, REX_W); let dst_mod = dst as u8 % 8; buf.reserve(10); - buf.extend(&[rex, 0xB8 + dst_mod]); + buf.extend(&[rex, 0xB8 | dst_mod]); buf.extend(&imm.to_le_bytes()); } } @@ -1501,7 +1501,7 @@ fn mov_base64_offset32_reg64( let src_mod = (src as u8 % 8) << 3; let base_mod = base as u8 % 8; buf.reserve(8); - buf.extend(&[rex, 0x89, 0x80 + src_mod + base_mod]); + buf.extend(&[rex, 0x89, 0x80 | src_mod | base_mod]); // Using RSP or R12 requires a secondary index byte. if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 { buf.push(0x24); @@ -1522,7 +1522,7 @@ fn mov_reg64_base64_offset32( let dst_mod = (dst as u8 % 8) << 3; let base_mod = base as u8 % 8; buf.reserve(8); - buf.extend(&[rex, 0x8B, 0x80 + dst_mod + base_mod]); + buf.extend(&[rex, 0x8B, 0x80 | dst_mod | base_mod]); // Using RSP or R12 requires a secondary index byte. if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 { buf.push(0x24); @@ -1543,7 +1543,7 @@ fn movzx_reg64_base8_offset32( let dst_mod = (dst as u8 % 8) << 3; let base_mod = base as u8 % 8; buf.reserve(9); - buf.extend(&[rex, 0x0F, 0xB6, 0x80 + dst_mod + base_mod]); + buf.extend(&[rex, 0x0F, 0xB6, 0x80 | dst_mod | base_mod]); // Using RSP or R12 requires a secondary index byte. if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 { buf.push(0x24); @@ -1571,13 +1571,13 @@ fn raw_movsd_freg64_freg64(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, src: X86_ if dst_high || src_high { buf.extend(&[ 0xF2, - 0x40 + ((dst_high as u8) << 2) + (src_high as u8), + 0x40 | ((dst_high as u8) << 2) | (src_high as u8), 0x0F, 0x10, - 0xC0 + (dst_mod << 3) + (src_mod), + 0xC0 | (dst_mod << 3) | (src_mod), ]) } else { - buf.extend(&[0xF2, 0x0F, 0x10, 0xC0 + (dst_mod << 3) + (src_mod)]) + buf.extend(&[0xF2, 0x0F, 0x10, 0xC0 | (dst_mod << 3) | (src_mod)]) } } @@ -1587,10 +1587,10 @@ fn movss_freg32_rip_offset32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, offset: let dst_mod = dst as u8 % 8; if dst as u8 > 7 { buf.reserve(9); - buf.extend(&[0xF3, 0x44, 0x0F, 0x10, 0x05 + (dst_mod << 3)]); + buf.extend(&[0xF3, 0x44, 0x0F, 0x10, 0x05 | (dst_mod << 3)]); } else { buf.reserve(8); - buf.extend(&[0xF3, 0x0F, 0x10, 0x05 + (dst_mod << 3)]); + buf.extend(&[0xF3, 0x0F, 0x10, 0x05 | (dst_mod << 3)]); } buf.extend(&offset.to_le_bytes()); } @@ -1601,10 +1601,10 @@ fn movsd_freg64_rip_offset32(buf: &mut Vec<'_, u8>, dst: X86_64FloatReg, offset: let dst_mod = dst as u8 % 8; if dst as u8 > 7 { buf.reserve(9); - buf.extend(&[0xF2, 0x44, 0x0F, 0x10, 0x05 + (dst_mod << 3)]); + buf.extend(&[0xF2, 0x44, 0x0F, 0x10, 0x05 | (dst_mod << 3)]); } else { buf.reserve(8); - buf.extend(&[0xF2, 0x0F, 0x10, 0x05 + (dst_mod << 3)]); + buf.extend(&[0xF2, 0x0F, 0x10, 0x05 | (dst_mod << 3)]); } buf.extend(&offset.to_le_bytes()); } @@ -1626,7 +1626,7 @@ fn movsd_base64_offset32_freg64( if src as u8 > 7 || base as u8 > 7 { buf.push(rex); } - buf.extend(&[0x0F, 0x11, 0x80 + src_mod + base_mod]); + buf.extend(&[0x0F, 0x11, 0x80 | src_mod | base_mod]); // Using RSP or R12 requires a secondary index byte. if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 { buf.push(0x24); @@ -1651,7 +1651,7 @@ fn movsd_freg64_base64_offset32( if dst as u8 > 7 || base as u8 > 7 { buf.push(rex); } - buf.extend(&[0x0F, 0x10, 0x80 + dst_mod + base_mod]); + buf.extend(&[0x0F, 0x10, 0x80 | dst_mod | base_mod]); // Using RSP or R12 requires a secondary index byte. if base == X86_64GeneralReg::RSP || base == X86_64GeneralReg::R12 { buf.push(0x24); @@ -1664,7 +1664,7 @@ fn movsd_freg64_base64_offset32( fn neg_reg64(buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) { let rex = add_rm_extension(reg, REX_W); let reg_mod = reg as u8 % 8; - buf.extend(&[rex, 0xF7, 0xD8 + reg_mod]); + buf.extend(&[rex, 0xF7, 0xD8 | reg_mod]); } // helper function for `set*` instructions @@ -1677,10 +1677,10 @@ fn set_reg64_help(op_code: u8, buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) { let reg_mod = reg as u8 % 8; use X86_64GeneralReg::*; match reg { - RAX | RCX | RDX | RBX => buf.extend(&[0x0F, op_code, 0xC0 + reg_mod]), - RSP | RBP | RSI | RDI => buf.extend(&[REX, 0x0F, op_code, 0xC0 + reg_mod]), + RAX | RCX | RDX | RBX => buf.extend(&[0x0F, op_code, 0xC0 | reg_mod]), + RSP | RBP | RSI | RDI => buf.extend(&[REX, 0x0F, op_code, 0xC0 | reg_mod]), R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15 => { - buf.extend(&[REX + 1, 0x0F, op_code, 0xC0 + reg_mod]) + buf.extend(&[REX | 1, 0x0F, op_code, 0xC0 | reg_mod]) } } @@ -1702,7 +1702,7 @@ fn cvtsi2_help( let mod1 = (dst.value() % 8) << 3; let mod2 = src.value() % 8; - buf.extend(&[op_code1, rex, 0x0F, op_code2, 0xC0 + mod1 + mod2]) + buf.extend(&[op_code1, rex, 0x0F, op_code2, 0xC0 | mod1 | mod2]) } #[inline(always)] @@ -1716,7 +1716,7 @@ fn cvtsx2_help( let mod1 = (dst.value() % 8) << 3; let mod2 = src.value() % 8; - buf.extend(&[op_code1, 0x0F, op_code2, 0xC0 + mod1 + mod2]) + buf.extend(&[op_code1, 0x0F, op_code2, 0xC0 | mod1 | mod2]) } /// `SETE r/m64` -> Set Byte on Condition - zero/equal (ZF=1) @@ -1794,7 +1794,7 @@ fn sub_reg64_imm32(buf: &mut Vec<'_, u8>, dst: X86_64GeneralReg, imm: i32) { let rex = add_rm_extension(dst, REX_W); let dst_mod = dst as u8 % 8; buf.reserve(7); - buf.extend(&[rex, 0x81, 0xE8 + dst_mod]); + buf.extend(&[rex, 0x81, 0xE8 | dst_mod]); buf.extend(&imm.to_le_bytes()); } @@ -1810,9 +1810,9 @@ fn pop_reg64(buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) { let reg_mod = reg as u8 % 8; if reg as u8 > 7 { let rex = add_opcode_extension(reg, REX); - buf.extend(&[rex, 0x58 + reg_mod]); + buf.extend(&[rex, 0x58 | reg_mod]); } else { - buf.push(0x58 + reg_mod); + buf.push(0x58 | reg_mod); } } @@ -1822,9 +1822,9 @@ fn push_reg64(buf: &mut Vec<'_, u8>, reg: X86_64GeneralReg) { let reg_mod = reg as u8 % 8; if reg as u8 > 7 { let rex = add_opcode_extension(reg, REX); - buf.extend(&[rex, 0x50 + reg_mod]); + buf.extend(&[rex, 0x50 | reg_mod]); } else { - buf.push(0x50 + reg_mod); + buf.push(0x50 | reg_mod); } } diff --git a/crates/compiler/gen_llvm/Cargo.toml b/crates/compiler/gen_llvm/Cargo.toml index 3df6623306..a5da18532d 100644 --- a/crates/compiler/gen_llvm/Cargo.toml +++ b/crates/compiler/gen_llvm/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "roc_gen_llvm" description = "The LLVM backend for the Roc compiler" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/gen_llvm/src/llvm/build.rs b/crates/compiler/gen_llvm/src/llvm/build.rs index fa8cbbd64b..383157a103 100644 --- a/crates/compiler/gen_llvm/src/llvm/build.rs +++ b/crates/compiler/gen_llvm/src/llvm/build.rs @@ -4220,10 +4220,16 @@ pub fn build_procedures<'a, 'ctx, 'env>( env: &Env<'a, 'ctx, 'env>, opt_level: OptLevel, procedures: MutMap<(Symbol, ProcLayout<'a>), roc_mono::ir::Proc<'a>>, - entry_point: EntryPoint<'a>, + opt_entry_point: Option>, debug_output_file: Option<&Path>, ) { - build_procedures_help(env, opt_level, procedures, entry_point, debug_output_file); + build_procedures_help( + env, + opt_level, + procedures, + opt_entry_point, + debug_output_file, + ); } pub fn build_wasm_test_wrapper<'a, 'ctx, 'env>( @@ -4236,7 +4242,7 @@ pub fn build_wasm_test_wrapper<'a, 'ctx, 'env>( env, opt_level, procedures, - entry_point, + Some(entry_point), Some(Path::new("/tmp/test.ll")), ); @@ -4253,7 +4259,7 @@ pub fn build_procedures_return_main<'a, 'ctx, 'env>( env, opt_level, procedures, - entry_point, + Some(entry_point), Some(Path::new("/tmp/test.ll")), ); @@ -4265,13 +4271,13 @@ pub fn build_procedures_expose_expects<'a, 'ctx, 'env>( opt_level: OptLevel, expects: &[Symbol], procedures: MutMap<(Symbol, ProcLayout<'a>), roc_mono::ir::Proc<'a>>, - entry_point: EntryPoint<'a>, + opt_entry_point: Option>, ) -> Vec<'a, &'a str> { let mod_solutions = build_procedures_help( env, opt_level, procedures, - entry_point, + opt_entry_point, Some(Path::new("/tmp/test.ll")), ); @@ -4333,7 +4339,7 @@ fn build_procedures_help<'a, 'ctx, 'env>( env: &Env<'a, 'ctx, 'env>, opt_level: OptLevel, procedures: MutMap<(Symbol, ProcLayout<'a>), roc_mono::ir::Proc<'a>>, - entry_point: EntryPoint<'a>, + opt_entry_point: Option>, debug_output_file: Option<&Path>, ) -> &'a ModSolutions { let mut layout_ids = roc_mono::layout::LayoutIds::default(); @@ -4341,7 +4347,7 @@ fn build_procedures_help<'a, 'ctx, 'env>( let it = procedures.iter().map(|x| x.1); - let solutions = match roc_alias_analysis::spec_program(opt_level, entry_point, it) { + let solutions = match roc_alias_analysis::spec_program(opt_level, opt_entry_point, it) { Err(e) => panic!("Error in alias analysis: {}", e), Ok(solutions) => solutions, }; @@ -6951,21 +6957,30 @@ fn build_int_binop<'a, 'ctx, 'env>( // but llvm normalizes to the above ordering in -O3 let zero = rhs.get_type().const_zero(); let neg_1 = rhs.get_type().const_int(-1i64 as u64, false); + let is_signed = int_width.is_signed(); let special_block = env.context.append_basic_block(parent, "special_block"); let default_block = env.context.append_basic_block(parent, "default_block"); let cont_block = env.context.append_basic_block(parent, "branchcont"); - bd.build_switch( - rhs, - default_block, - &[(zero, special_block), (neg_1, special_block)], - ); + if is_signed { + bd.build_switch( + rhs, + default_block, + &[(zero, special_block), (neg_1, special_block)], + ) + } else { + bd.build_switch(rhs, default_block, &[(zero, special_block)]) + }; let condition_rem = { bd.position_at_end(default_block); - let rem = bd.build_int_signed_rem(lhs, rhs, "int_rem"); + let rem = if is_signed { + bd.build_int_signed_rem(lhs, rhs, "int_rem") + } else { + bd.build_int_unsigned_rem(lhs, rhs, "uint_rem") + }; let result = bd.build_int_compare(IntPredicate::EQ, rem, zero, "is_zero_rem"); bd.build_unconditional_branch(cont_block); @@ -6976,10 +6991,15 @@ fn build_int_binop<'a, 'ctx, 'env>( bd.position_at_end(special_block); let is_zero = bd.build_int_compare(IntPredicate::EQ, lhs, zero, "is_zero_lhs"); - let is_neg_one = - bd.build_int_compare(IntPredicate::EQ, rhs, neg_1, "is_neg_one_rhs"); - let result = bd.build_or(is_neg_one, is_zero, "cond"); + let result = if is_signed { + let is_neg_one = + bd.build_int_compare(IntPredicate::EQ, rhs, neg_1, "is_neg_one_rhs"); + + bd.build_or(is_neg_one, is_zero, "cond") + } else { + is_zero + }; bd.build_unconditional_branch(cont_block); diff --git a/crates/compiler/gen_llvm/src/llvm/expect.rs b/crates/compiler/gen_llvm/src/llvm/expect.rs index 446637d996..087590ef58 100644 --- a/crates/compiler/gen_llvm/src/llvm/expect.rs +++ b/crates/compiler/gen_llvm/src/llvm/expect.rs @@ -1,17 +1,21 @@ -use crate::llvm::bitcode::call_bitcode_fn; -use crate::llvm::build::{store_roc_value, Env}; +use crate::debug_info_init; +use crate::llvm::bitcode::call_str_bitcode_fn; +use crate::llvm::build::{get_tag_id, store_roc_value, Env}; use crate::llvm::build_list::{self, incrementing_elem_loop}; -use crate::llvm::convert::basic_type_from_layout; +use crate::llvm::convert::{basic_type_from_layout, RocUnion}; use inkwell::builder::Builder; -use inkwell::types::BasicType; -use inkwell::values::{BasicValueEnum, IntValue, PointerValue}; +use inkwell::module::Linkage; +use inkwell::types::{BasicMetadataTypeEnum, BasicType}; +use inkwell::values::{BasicValueEnum, FunctionValue, IntValue, PointerValue}; use inkwell::AddressSpace; use roc_builtins::bitcode; use roc_module::symbol::Symbol; use roc_mono::layout::{Builtin, Layout, LayoutIds, UnionLayout}; use roc_region::all::Region; -use super::build::{load_symbol_and_layout, Scope}; +use super::build::{ + add_func, load_roc_value, load_symbol_and_layout, use_roc_value, FunctionSpec, Scope, +}; #[derive(Debug, Clone, Copy)] struct Cursors<'ctx> { @@ -204,19 +208,19 @@ fn build_clone<'a, 'ctx, 'env>( when_recursive, ), - Layout::Struct { - field_layouts: _, .. - } => { - if layout.safe_to_memcpy() { - build_copy(env, ptr, cursors.offset, value) - } else { - todo!() - } - } + Layout::Struct { field_layouts, .. } => build_clone_struct( + env, + layout_ids, + ptr, + cursors, + value, + field_layouts, + when_recursive, + ), Layout::LambdaSet(_) => unreachable!("cannot compare closures"), - Layout::Union(_union_layout) => { + Layout::Union(union_layout) => { if layout.safe_to_memcpy() { let ptr = unsafe { env.builder @@ -230,24 +234,50 @@ fn build_clone<'a, 'ctx, 'env>( store_roc_value(env, layout, ptr, value); - let width = value.get_type().size_of().unwrap(); - env.builder - .build_int_add(cursors.offset, width, "new_offset") + cursors.extra_offset } else { - todo!() + build_clone_tag( + env, + layout_ids, + ptr, + cursors, + value, + union_layout, + WhenRecursive::Loop(union_layout), + ) } } - /* - Layout::Boxed(inner_layout) => build_box_eq( - env, - layout_ids, - when_recursive, - lhs_layout, - inner_layout, - lhs_val, - rhs_val, - ), + Layout::Boxed(inner_layout) => { + // write the offset + build_copy(env, ptr, cursors.offset, cursors.extra_offset.into()); + + let source = value.into_pointer_value(); + let value = load_roc_value(env, *inner_layout, source, "inner"); + + let inner_width = env + .ptr_int() + .const_int(inner_layout.stack_size(env.target_info) as u64, false); + + let new_extra = env + .builder + .build_int_add(cursors.offset, inner_width, "new_extra"); + + let cursors = Cursors { + offset: cursors.extra_offset, + extra_offset: new_extra, + }; + + build_clone( + env, + layout_ids, + ptr, + cursors, + value, + *inner_layout, + when_recursive, + ) + } Layout::RecursivePointer => match when_recursive { WhenRecursive::Unreachable => { @@ -260,27 +290,249 @@ fn build_clone<'a, 'ctx, 'env>( let bt = basic_type_from_layout(env, &layout); // cast the i64 pointer to a pointer to block of memory - let field1_cast = env - .builder - .build_bitcast(lhs_val, bt, "i64_to_opaque") - .into_pointer_value(); + let field1_cast = env.builder.build_bitcast(value, bt, "i64_to_opaque"); - let field2_cast = env - .builder - .build_bitcast(rhs_val, bt, "i64_to_opaque") - .into_pointer_value(); - - build_tag_eq( + build_clone_tag( env, layout_ids, + ptr, + cursors, + field1_cast, + union_layout, WhenRecursive::Loop(union_layout), - &union_layout, - field1_cast.into(), - field2_cast.into(), ) } }, - */ + } +} + +#[allow(clippy::too_many_arguments)] +fn build_clone_struct<'a, 'ctx, 'env>( + env: &Env<'a, 'ctx, 'env>, + layout_ids: &mut LayoutIds<'a>, + ptr: PointerValue<'ctx>, + cursors: Cursors<'ctx>, + value: BasicValueEnum<'ctx>, + field_layouts: &[Layout<'a>], + when_recursive: WhenRecursive<'a>, +) -> IntValue<'ctx> { + let layout = Layout::struct_no_name_order(field_layouts); + + if layout.safe_to_memcpy() { + build_copy(env, ptr, cursors.offset, value) + } else { + let mut cursors = cursors; + + let structure = value.into_struct_value(); + + for (i, field_layout) in field_layouts.iter().enumerate() { + let field = env + .builder + .build_extract_value(structure, i as _, "extract") + .unwrap(); + + let field = use_roc_value(env, *field_layout, field, "field"); + + let new_extra = build_clone( + env, + layout_ids, + ptr, + cursors, + field, + *field_layout, + when_recursive, + ); + + let field_width = env + .ptr_int() + .const_int(field_layout.stack_size(env.target_info) as u64, false); + + cursors.extra_offset = new_extra; + cursors.offset = env + .builder + .build_int_add(cursors.offset, field_width, "offset"); + } + + cursors.extra_offset + } +} + +#[allow(clippy::too_many_arguments)] +fn build_clone_tag<'a, 'ctx, 'env>( + env: &Env<'a, 'ctx, 'env>, + layout_ids: &mut LayoutIds<'a>, + ptr: PointerValue<'ctx>, + cursors: Cursors<'ctx>, + value: BasicValueEnum<'ctx>, + union_layout: UnionLayout<'a>, + when_recursive: WhenRecursive<'a>, +) -> IntValue<'ctx> { + let layout = Layout::Union(union_layout); + let layout_id = layout_ids.get(Symbol::CLONE, &layout); + let fn_name = layout_id.to_symbol_string(Symbol::CLONE, &env.interns); + + let function = match env.module.get_function(fn_name.as_str()) { + Some(function_value) => function_value, + None => { + let block = env.builder.get_insert_block().expect("to be in a function"); + let di_location = env.builder.get_current_debug_location().unwrap(); + + let function_type = env.ptr_int().fn_type( + &[ + env.context.i8_type().ptr_type(AddressSpace::Generic).into(), + env.ptr_int().into(), + env.ptr_int().into(), + BasicMetadataTypeEnum::from(value.get_type()), + ], + false, + ); + + let function_value = add_func( + env.context, + env.module, + &fn_name, + FunctionSpec::known_fastcc(function_type), + Linkage::Private, + ); + + let subprogram = env.new_subprogram(&fn_name); + function_value.set_subprogram(subprogram); + + env.dibuilder.finalize(); + + build_clone_tag_help( + env, + layout_ids, + union_layout, + when_recursive, + function_value, + ); + + env.builder.position_at_end(block); + env.builder + .set_current_debug_location(env.context, di_location); + + function_value + } + }; + + let call = env.builder.build_call( + function, + &[ + ptr.into(), + cursors.offset.into(), + cursors.extra_offset.into(), + value.into(), + ], + "build_clone_tag", + ); + + call.set_call_convention(function.get_call_conventions()); + + let result = call.try_as_basic_value().left().unwrap(); + + result.into_int_value() +} + +#[allow(clippy::too_many_arguments)] +fn build_clone_tag_help<'a, 'ctx, 'env>( + env: &Env<'a, 'ctx, 'env>, + layout_ids: &mut LayoutIds<'a>, + union_layout: UnionLayout<'a>, + when_recursive: WhenRecursive<'a>, + fn_val: FunctionValue<'ctx>, +) { + use bumpalo::collections::Vec; + + let context = &env.context; + let builder = env.builder; + + // Add a basic block for the entry point + let entry = context.append_basic_block(fn_val, "entry"); + + builder.position_at_end(entry); + + debug_info_init!(env, fn_val); + + // Add args to scope + // let arg_symbol = Symbol::ARG_1; + // tag_value.set_name(arg_symbol.as_str(&env.interns)); + + let mut it = fn_val.get_param_iter(); + + let ptr = it.next().unwrap().into_pointer_value(); + let offset = it.next().unwrap().into_int_value(); + let extra_offset = it.next().unwrap().into_int_value(); + let tag_value = it.next().unwrap(); + + let cursors = Cursors { + offset, + extra_offset, + }; + + let parent = fn_val; + + debug_assert!(tag_value.is_pointer_value()); + + use UnionLayout::*; + + match union_layout { + NonRecursive(&[]) => { + // we're comparing empty tag unions; this code is effectively unreachable + env.builder.build_unreachable(); + } + NonRecursive(tags) => { + let id = get_tag_id(env, parent, &union_layout, tag_value); + + let switch_block = env.context.append_basic_block(parent, "switch_block"); + env.builder.build_unconditional_branch(switch_block); + + let mut cases = Vec::with_capacity_in(tags.len(), env.arena); + + for (tag_id, field_layouts) in tags.iter().enumerate() { + let block = env.context.append_basic_block(parent, "tag_id_modify"); + env.builder.position_at_end(block); + + let raw_data_ptr = env + .builder + .build_struct_gep( + tag_value.into_pointer_value(), + RocUnion::TAG_DATA_INDEX, + "tag_data", + ) + .unwrap(); + + let layout = Layout::struct_no_name_order(field_layouts); + let basic_type = basic_type_from_layout(env, &layout); + + let data_ptr = env.builder.build_pointer_cast( + raw_data_ptr, + basic_type.ptr_type(AddressSpace::Generic), + "data_ptr", + ); + + let data = env.builder.build_load(data_ptr, "load_data"); + + let answer = + build_clone(env, layout_ids, ptr, cursors, data, layout, when_recursive); + + env.builder.build_return(Some(&answer)); + + cases.push((id.get_type().const_int(tag_id as u64, false), block)); + } + + env.builder.position_at_end(switch_block); + + match cases.pop() { + Some((_, default)) => { + env.builder.build_switch(id, default, &cases); + } + None => { + // we're serializing an empty tag union; this code is effectively unreachable + env.builder.build_unreachable(); + } + } + } _ => todo!(), } } @@ -329,14 +581,15 @@ fn build_clone_builtin<'a, 'ctx, 'env>( Builtin::Str => { // - call_bitcode_fn( + call_str_bitcode_fn( env, + &[value], &[ ptr.into(), cursors.offset.into(), cursors.extra_offset.into(), - value, ], + crate::llvm::bitcode::BitcodeReturns::Basic, bitcode::STR_CLONE_TO, ) .into_int_value() @@ -380,10 +633,6 @@ fn build_clone_builtin<'a, 'ctx, 'env>( "elements", ); - // where we write the elements' stack representation - // let element_offset = bd.build_alloca(env.ptr_int(), "element_offset"); - // bd.build_store(element_offset, elements_start_offset); - // if the element has any pointers, we clone them to this offset let rest_offset = bd.build_alloca(env.ptr_int(), "rest_offset"); @@ -404,26 +653,24 @@ fn build_clone_builtin<'a, 'ctx, 'env>( bd.build_int_add(elements_start_offset, current_offset, "current_offset"); let current_extra_offset = bd.build_load(rest_offset, "element_offset"); - let offset = current_offset; // env.ptr_int().const_int(60, false); - let extra_offset = current_extra_offset.into_int_value(); // env.ptr_int().const_int(60 + 24, false); + let offset = current_offset; + let extra_offset = current_extra_offset.into_int_value(); + + let cursors = Cursors { + offset, + extra_offset, + }; let new_offset = build_clone( env, layout_ids, ptr, - Cursors { - // offset: current_offset, - // extra_offset: current_extra_offset.into_int_value(), - offset, - extra_offset, - }, + cursors, element, *elem, when_recursive, ); - // let new_offset = env.ptr_int().const_int(60 + 24 + 34, false); - bd.build_store(rest_offset, new_offset); }; diff --git a/crates/compiler/gen_wasm/Cargo.toml b/crates/compiler/gen_wasm/Cargo.toml index 4cdc6e9108..4350739601 100644 --- a/crates/compiler/gen_wasm/Cargo.toml +++ b/crates/compiler/gen_wasm/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "roc_gen_wasm" -version = "0.1.0" +version = "0.0.1" edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +authors = ["The Roc Contributors"] +license = "UPL-1.0" [dependencies] bitvec = "1" diff --git a/crates/compiler/gen_wasm/src/low_level.rs b/crates/compiler/gen_wasm/src/low_level.rs index 6c8c9afe05..6dadb056b4 100644 --- a/crates/compiler/gen_wasm/src/low_level.rs +++ b/crates/compiler/gen_wasm/src/low_level.rs @@ -1612,13 +1612,46 @@ impl<'a> LowLevelCall<'a> { } } NumShiftRightBy => { - backend.storage.load_symbols( - &mut backend.code_builder, - &[self.arguments[1], self.arguments[0]], - ); + let bits = self.arguments[0]; + let num = self.arguments[1]; match CodeGenNumType::from(self.ret_layout) { - I32 => backend.code_builder.i32_shr_s(), - I64 => backend.code_builder.i64_shr_s(), + I32 => { + // In most languages this operation is for signed numbers, but Roc defines it on all integers. + // So the argument is implicitly converted to signed before the shift operator. + // We need to make that conversion explicit for i8 and i16, which use Wasm's i32 type. + let bit_width = 8 * self.ret_layout.stack_size(TARGET_INFO) as i32; + if bit_width < 32 && !symbol_is_signed_int(backend, num) { + // Sign-extend the number by shifting left and right again + backend + .storage + .load_symbols(&mut backend.code_builder, &[num]); + backend.code_builder.i32_const(32 - bit_width); + backend.code_builder.i32_shl(); + backend.code_builder.i32_const(32 - bit_width); + backend.code_builder.i32_shr_s(); + backend + .storage + .load_symbols(&mut backend.code_builder, &[bits]); + + // Do the actual bitshift operation + backend.code_builder.i32_shr_s(); + + // Restore to unsigned + backend.code_builder.i32_const((1 << bit_width) - 1); + backend.code_builder.i32_and(); + } else { + backend + .storage + .load_symbols(&mut backend.code_builder, &[num, bits]); + backend.code_builder.i32_shr_s(); + } + } + I64 => { + backend + .storage + .load_symbols(&mut backend.code_builder, &[num, bits]); + backend.code_builder.i64_shr_s(); + } I128 => todo!("{:?} for I128", self.lowlevel), _ => panic_ret_type(), } @@ -1626,7 +1659,7 @@ impl<'a> LowLevelCall<'a> { NumShiftRightZfBy => { match CodeGenNumType::from(self.ret_layout) { I32 => { - // This is normally an unsigned operation, but Roc defines it on all integer types. + // In most languages this operation is for unsigned numbers, but Roc defines it on all integers. // So the argument is implicitly converted to unsigned before the shift operator. // We need to make that conversion explicit for i8 and i16, which use Wasm's i32 type. let bit_width = 8 * self.ret_layout.stack_size(TARGET_INFO); diff --git a/crates/compiler/ident/Cargo.toml b/crates/compiler/ident/Cargo.toml index 70987df20e..d942503fd0 100644 --- a/crates/compiler/ident/Cargo.toml +++ b/crates/compiler/ident/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_ident" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/late_solve/Cargo.toml b/crates/compiler/late_solve/Cargo.toml index f213049b40..f73c1ee04b 100644 --- a/crates/compiler/late_solve/Cargo.toml +++ b/crates/compiler/late_solve/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_late_solve" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/late_solve/src/lib.rs b/crates/compiler/late_solve/src/lib.rs index 34b3467dbc..76bab34e76 100644 --- a/crates/compiler/late_solve/src/lib.rs +++ b/crates/compiler/late_solve/src/lib.rs @@ -10,12 +10,14 @@ use roc_collections::MutMap; use roc_derive::SharedDerivedModule; use roc_error_macros::internal_error; use roc_module::symbol::ModuleId; -use roc_solve::solve::{compact_lambda_sets_of_vars, Phase, Pools}; +use roc_module::symbol::Symbol; +use roc_solve::ability::AbilityResolver; +use roc_solve::solve::Pools; +use roc_solve::specialize::{compact_lambda_sets_of_vars, DerivedEnv, Phase}; use roc_types::subs::{get_member_lambda_sets_at_region, Content, FlatType, LambdaSet}; use roc_types::subs::{ExposedTypesStorageSubs, Subs, Variable}; use roc_unify::unify::{unify as unify_unify, Env, Mode, Unified}; -pub use roc_solve::ability::resolve_ability_specialization; pub use roc_solve::ability::Resolved; pub use roc_types::subs::instantiate_rigids; @@ -49,12 +51,12 @@ impl WorldAbilities { #[inline(always)] pub fn with_module_exposed_type( - &mut self, + &self, module: ModuleId, - mut f: impl FnMut(&mut ExposedTypesStorageSubs) -> T, + mut f: impl FnMut(&ExposedTypesStorageSubs) -> T, ) -> T { - let mut world = self.world.write().unwrap(); - let (_, exposed_types) = world.get_mut(&module).expect("module not in the world"); + let world = self.world.read().unwrap(); + let (_, exposed_types) = world.get(&module).expect("module not in the world"); f(exposed_types) } @@ -96,6 +98,75 @@ impl AbilitiesView<'_> { } } +pub struct LateResolver<'a> { + home: ModuleId, + abilities: &'a AbilitiesView<'a>, +} + +impl<'a> AbilityResolver for LateResolver<'a> { + fn member_parent_and_signature_var( + &self, + ability_member: roc_module::symbol::Symbol, + home_subs: &mut Subs, + ) -> Option<(roc_module::symbol::Symbol, Variable)> { + let (parent_ability, signature_var) = + self.abilities + .with_module_abilities_store(ability_member.module_id(), |store| { + store + .member_def(ability_member) + .map(|def| (def.parent_ability, def.signature_var())) + })?; + + let parent_ability_module = parent_ability.module_id(); + debug_assert_eq!(parent_ability_module, ability_member.module_id()); + + let signature_var = match (parent_ability_module == self.home, self.abilities) { + (false, AbilitiesView::World(world)) => { + // Need to copy the type from an external module into our home subs + world.with_module_exposed_type(parent_ability_module, |external_types| { + let stored_signature_var = + external_types.stored_ability_member_vars.get(&signature_var).expect("Ability member is in an external store, but its signature variables are not stored accordingly!"); + + let home_copy = external_types + .storage_subs + .export_variable_to(home_subs, *stored_signature_var); + + home_copy.variable + }) + } + _ => signature_var, + }; + + Some((parent_ability, signature_var)) + } + + fn get_implementation( + &self, + impl_key: roc_can::abilities::ImplKey, + ) -> Option { + self.abilities + .with_module_abilities_store(impl_key.opaque.module_id(), |store| { + store.get_implementation(impl_key).copied() + }) + } +} + +pub fn resolve_ability_specialization( + home: ModuleId, + subs: &mut Subs, + abilities: &AbilitiesView, + ability_member: Symbol, + specialization_var: Variable, +) -> Option { + let late_resolver = LateResolver { home, abilities }; + roc_solve::ability::resolve_ability_specialization( + subs, + &late_resolver, + ability_member, + specialization_var, + ) +} + pub struct LatePhase<'a> { home: ModuleId, abilities: &'a AbilitiesView<'a>, @@ -272,15 +343,18 @@ pub fn unify( let mut pools = Pools::default(); let late_phase = LatePhase { home, abilities }; + let derived_env = DerivedEnv { + derived_module, + exposed_types: exposed_by_module, + }; let must_implement_constraints = compact_lambda_sets_of_vars( subs, - derived_module, + &derived_env, arena, &mut pools, lambda_sets_to_specialize, &late_phase, - exposed_by_module, ); // At this point we can't do anything with must-implement constraints, since we're no // longer solving. We must assume that they were totally caught during solving. diff --git a/crates/compiler/load/Cargo.toml b/crates/compiler/load/Cargo.toml index aab10ef774..330ee5d8a5 100644 --- a/crates/compiler/load/Cargo.toml +++ b/crates/compiler/load/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_load" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/load/build.rs b/crates/compiler/load/build.rs index 8f6ed834c4..0e7a3d4a92 100644 --- a/crates/compiler/load/build.rs +++ b/crates/compiler/load/build.rs @@ -14,6 +14,7 @@ const MODULES: &[(ModuleId, &str)] = &[ (ModuleId::SET, "Set.roc"), (ModuleId::BOX, "Box.roc"), (ModuleId::ENCODE, "Encode.roc"), + (ModuleId::DECODE, "Decode.roc"), (ModuleId::JSON, "Json.roc"), ]; diff --git a/crates/compiler/load/src/lib.rs b/crates/compiler/load/src/lib.rs index 126a039ab2..42c87be8e3 100644 --- a/crates/compiler/load/src/lib.rs +++ b/crates/compiler/load/src/lib.rs @@ -1,5 +1,3 @@ -pub use roc_load_internal::file::Threading; - use bumpalo::Bump; use roc_can::module::ExposedByModule; use roc_collections::all::MutMap; @@ -11,7 +9,8 @@ use std::path::PathBuf; pub use roc_load_internal::docs; pub use roc_load_internal::file::{ - Expectations, LoadResult, LoadStart, LoadedModule, LoadingProblem, MonomorphizedModule, Phase, + EntryPoint, ExecutionMode, Expectations, LoadConfig, LoadResult, LoadStart, LoadedModule, + LoadingProblem, MonomorphizedModule, Phase, Threading, }; #[allow(clippy::too_many_arguments)] @@ -19,23 +18,11 @@ fn load<'a>( arena: &'a Bump, load_start: LoadStart<'a>, exposed_types: ExposedByModule, - goal_phase: Phase, - target_info: TargetInfo, - render: RenderTarget, - threading: Threading, + load_config: LoadConfig, ) -> Result, LoadingProblem<'a>> { let cached_subs = read_cached_subs(); - roc_load_internal::file::load( - arena, - load_start, - exposed_types, - goal_phase, - target_info, - cached_subs, - render, - threading, - ) + roc_load_internal::file::load(arena, load_start, exposed_types, cached_subs, load_config) } /// Load using only a single thread; used when compiling to webassembly @@ -43,9 +30,9 @@ pub fn load_single_threaded<'a>( arena: &'a Bump, load_start: LoadStart<'a>, exposed_types: ExposedByModule, - goal_phase: Phase, target_info: TargetInfo, render: RenderTarget, + exec_mode: ExecutionMode, ) -> Result, LoadingProblem<'a>> { let cached_subs = read_cached_subs(); @@ -53,10 +40,10 @@ pub fn load_single_threaded<'a>( arena, load_start, exposed_types, - goal_phase, target_info, cached_subs, render, + exec_mode, ) } @@ -67,23 +54,13 @@ pub fn load_and_monomorphize_from_str<'a>( src: &'a str, src_dir: PathBuf, exposed_types: ExposedByModule, - target_info: TargetInfo, - render: RenderTarget, - threading: Threading, + load_config: LoadConfig, ) -> Result, LoadingProblem<'a>> { use LoadResult::*; let load_start = LoadStart::from_str(arena, filename, src, src_dir)?; - match load( - arena, - load_start, - exposed_types, - Phase::MakeSpecializations, - target_info, - render, - threading, - )? { + match load(arena, load_start, exposed_types, load_config)? { Monomorphized(module) => Ok(module), TypeChecked(_) => unreachable!(""), } @@ -93,23 +70,13 @@ pub fn load_and_monomorphize( arena: &Bump, filename: PathBuf, exposed_types: ExposedByModule, - target_info: TargetInfo, - render: RenderTarget, - threading: Threading, + load_config: LoadConfig, ) -> Result, LoadingProblem<'_>> { use LoadResult::*; - let load_start = LoadStart::from_path(arena, filename, render)?; + let load_start = LoadStart::from_path(arena, filename, load_config.render)?; - match load( - arena, - load_start, - exposed_types, - Phase::MakeSpecializations, - target_info, - render, - threading, - )? { + match load(arena, load_start, exposed_types, load_config)? { Monomorphized(module) => Ok(module), TypeChecked(_) => unreachable!(""), } @@ -119,23 +86,13 @@ pub fn load_and_typecheck( arena: &Bump, filename: PathBuf, exposed_types: ExposedByModule, - target_info: TargetInfo, - render: RenderTarget, - threading: Threading, + load_config: LoadConfig, ) -> Result> { use LoadResult::*; - let load_start = LoadStart::from_path(arena, filename, render)?; + let load_start = LoadStart::from_path(arena, filename, load_config.render)?; - match load( - arena, - load_start, - exposed_types, - Phase::SolveTypes, - target_info, - render, - threading, - )? { + match load(arena, load_start, exposed_types, load_config)? { Monomorphized(_) => unreachable!(""), TypeChecked(module) => Ok(module), } @@ -161,9 +118,9 @@ pub fn load_and_typecheck_str<'a>( arena, load_start, exposed_types, - Phase::SolveTypes, target_info, render, + ExecutionMode::Check, )? { Monomorphized(_) => unreachable!(""), TypeChecked(module) => Ok(module), diff --git a/crates/compiler/load_internal/Cargo.toml b/crates/compiler/load_internal/Cargo.toml index b6b9426edf..0967a2a2c2 100644 --- a/crates/compiler/load_internal/Cargo.toml +++ b/crates/compiler/load_internal/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_load_internal" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/load_internal/src/file.rs b/crates/compiler/load_internal/src/file.rs index a248882e0e..0151e3a130 100644 --- a/crates/compiler/load_internal/src/file.rs +++ b/crates/compiler/load_internal/src/file.rs @@ -30,8 +30,8 @@ use roc_module::symbol::{ PackageQualified, Symbol, }; use roc_mono::ir::{ - CapturedSymbols, EntryPoint, ExternalSpecializations, PartialProc, Proc, ProcLayout, Procs, - ProcsBase, UpdateModeIds, + CapturedSymbols, ExternalSpecializations, PartialProc, Proc, ProcLayout, Procs, ProcsBase, + UpdateModeIds, }; use roc_mono::layout::{CapturesNiche, LambdaName, Layout, LayoutCache, LayoutProblem}; use roc_parse::ast::{self, Defs, ExtractSpaces, Spaced, StrLiteral, TypeAnnotation}; @@ -117,6 +117,30 @@ macro_rules! log { ($($arg:tt)*) => (dbg_do!(ROC_PRINT_LOAD_LOG, println!($($arg)*))) } +#[derive(Debug)] +pub struct LoadConfig { + pub target_info: TargetInfo, + pub render: RenderTarget, + pub threading: Threading, + pub exec_mode: ExecutionMode, +} + +#[derive(Debug, Clone, Copy)] +pub enum ExecutionMode { + Test, + Check, + Executable, +} + +impl ExecutionMode { + fn goal_phase(&self) -> Phase { + match self { + ExecutionMode::Test | ExecutionMode::Executable => Phase::MakeSpecializations, + ExecutionMode::Check => Phase::SolveTypes, + } + } +} + /// Struct storing various intermediate stages by their ModuleId #[derive(Debug)] struct ModuleCache<'a> { @@ -167,6 +191,7 @@ impl Default for ModuleCache<'_> { NUM, BOX, ENCODE, + DECODE, JSON, } @@ -669,7 +694,6 @@ pub struct MonomorphizedModule<'a> { pub interns: Interns, pub subs: Subs, pub output_path: Box, - pub platform_path: Box, pub can_problems: MutMap>, pub type_problems: MutMap>, pub procedures: MutMap<(Symbol, ProcLayout<'a>), Proc<'a>>, @@ -681,6 +705,16 @@ pub struct MonomorphizedModule<'a> { pub expectations: VecMap, } +#[derive(Debug)] +pub enum EntryPoint<'a> { + Executable { + symbol: Symbol, + layout: ProcLayout<'a>, + platform_path: Box, + }, + Test, +} + #[derive(Debug)] pub struct Expectations { pub subs: roc_types::subs::Subs, @@ -847,7 +881,6 @@ struct State<'a> { pub root_id: ModuleId, pub root_subs: Option, pub platform_data: Option, - pub goal_phase: Phase, pub exposed_types: ExposedByModule, pub output_path: Option<&'a str>, pub platform_path: PlatformPath<'a>, @@ -858,6 +891,7 @@ struct State<'a> { pub procedures: MutMap<(Symbol, ProcLayout<'a>), Proc<'a>>, pub toplevel_expects: VecMap, pub exposed_to_host: ExposedToHost, + pub goal_phase: Phase, /// This is the "final" list of IdentIds, after canonicalization and constraint gen /// have completed for a given module. @@ -885,6 +919,7 @@ struct State<'a> { pub layout_caches: std::vec::Vec>, pub render: RenderTarget, + pub exec_mode: ExecutionMode, /// All abilities across all modules. pub world_abilities: WorldAbilities, @@ -902,16 +937,17 @@ impl<'a> State<'a> { fn new( root_id: ModuleId, target_info: TargetInfo, - goal_phase: Phase, exposed_types: ExposedByModule, arc_modules: Arc>>, ident_ids_by_module: SharedIdentIdsByModule, cached_subs: MutMap)>, render: RenderTarget, number_of_workers: usize, + exec_mode: ExecutionMode, ) -> Self { let arc_shorthands = Arc::new(Mutex::new(MutMap::default())); + let goal_phase = exec_mode.goal_phase(); let dependencies = Dependencies::new(goal_phase); Self { @@ -939,6 +975,7 @@ impl<'a> State<'a> { layout_caches: std::vec::Vec::with_capacity(number_of_workers), cached_subs: Arc::new(Mutex::new(cached_subs)), render, + exec_mode, make_specializations_pass: MakeSpecializationsPass::Pass(1), world_abilities: Default::default(), } @@ -1145,16 +1182,14 @@ pub fn load_and_typecheck_str<'a>( // where we want to regenerate the cached data let cached_subs = MutMap::default(); - match load( - arena, - load_start, - exposed_types, - Phase::SolveTypes, + let load_config = LoadConfig { target_info, - cached_subs, render, threading, - )? { + exec_mode: ExecutionMode::Check, + }; + + match load(arena, load_start, exposed_types, cached_subs, load_config)? { Monomorphized(_) => unreachable!(""), TypeChecked(module) => Ok(module), } @@ -1363,11 +1398,8 @@ pub fn load<'a>( arena: &'a Bump, load_start: LoadStart<'a>, exposed_types: ExposedByModule, - goal_phase: Phase, - target_info: TargetInfo, cached_subs: MutMap)>, - render: RenderTarget, - threading: Threading, + load_config: LoadConfig, ) -> Result, LoadingProblem<'a>> { enum Threads { Single, @@ -1384,7 +1416,7 @@ pub fn load<'a>( Err(_) => Threads::Single, Ok(0) => unreachable!("NonZeroUsize"), Ok(1) => Threads::Single, - Ok(reported) => match threading { + Ok(reported) => match load_config.threading { Threading::Single => Threads::Single, Threading::AllAvailable => Threads::Many(reported), Threading::AtMost(at_most) => Threads::Many(Ord::min(reported, at_most)), @@ -1398,20 +1430,20 @@ pub fn load<'a>( arena, load_start, exposed_types, - goal_phase, - target_info, + load_config.target_info, cached_subs, - render, + load_config.render, + load_config.exec_mode, ), Threads::Many(threads) => load_multi_threaded( arena, load_start, exposed_types, - goal_phase, - target_info, + load_config.target_info, cached_subs, - render, + load_config.render, threads, + load_config.exec_mode, ), } } @@ -1422,10 +1454,10 @@ pub fn load_single_threaded<'a>( arena: &'a Bump, load_start: LoadStart<'a>, exposed_types: ExposedByModule, - goal_phase: Phase, target_info: TargetInfo, cached_subs: MutMap)>, render: RenderTarget, + exec_mode: ExecutionMode, ) -> Result, LoadingProblem<'a>> { let LoadStart { arc_modules, @@ -1446,13 +1478,13 @@ pub fn load_single_threaded<'a>( let mut state = State::new( root_id, target_info, - goal_phase, exposed_types, arc_modules, ident_ids_by_module, cached_subs, render, number_of_workers, + exec_mode, ); // We'll add tasks to this, and then worker threads will take tasks from it. @@ -1623,11 +1655,11 @@ fn load_multi_threaded<'a>( arena: &'a Bump, load_start: LoadStart<'a>, exposed_types: ExposedByModule, - goal_phase: Phase, target_info: TargetInfo, cached_subs: MutMap)>, render: RenderTarget, available_threads: usize, + exec_mode: ExecutionMode, ) -> Result, LoadingProblem<'a>> { let LoadStart { arc_modules, @@ -1663,13 +1695,13 @@ fn load_multi_threaded<'a>( let mut state = State::new( root_id, target_info, - goal_phase, exposed_types, arc_modules, ident_ids_by_module, cached_subs, render, num_workers, + exec_mode, ); // an arena for every worker, stored in an arena-allocated bumpalo vec to make the lifetimes work @@ -2748,6 +2780,7 @@ fn finish_specialization( output_path, platform_path, platform_data, + exec_mode, .. } = state; @@ -2764,53 +2797,60 @@ fn finish_specialization( .map(|(id, (path, src))| (id, (path, src.into()))) .collect(); - let path_to_platform = { - use PlatformPath::*; - let package_name = match platform_path { - Valid(To::ExistingPackage(shorthand)) => { - match (*state.arc_shorthands).lock().get(shorthand) { - Some(p_or_p) => *p_or_p, - None => unreachable!(), - } - } - Valid(To::NewPackage(p_or_p)) => p_or_p, - other => { - let buf = to_missing_platform_report(state.root_id, other); - return Err(LoadingProblem::FormattedReport(buf)); - } - }; - - package_name.into() - }; - - let platform_path = Path::new(path_to_platform).into(); - let entry_point = { - let symbol = match platform_data { - None => { - debug_assert_eq!(exposed_to_host.values.len(), 1); - *exposed_to_host.values.iter().next().unwrap().0 - } - Some(PlatformData { provides, .. }) => provides, - }; + match exec_mode { + ExecutionMode::Test => EntryPoint::Test, + ExecutionMode::Executable => { + let path_to_platform = { + use PlatformPath::*; + let package_name = match platform_path { + Valid(To::ExistingPackage(shorthand)) => { + match (*state.arc_shorthands).lock().get(shorthand) { + Some(p_or_p) => *p_or_p, + None => unreachable!(), + } + } + Valid(To::NewPackage(p_or_p)) => p_or_p, + other => { + let buf = to_missing_platform_report(state.root_id, other); + return Err(LoadingProblem::FormattedReport(buf)); + } + }; - match procedures.keys().find(|(s, _)| *s == symbol) { - Some((_, layout)) => EntryPoint { - layout: *layout, - symbol, - }, - None => { - // the entry point is not specialized. This can happen if the repl output - // is a function value - EntryPoint { - layout: roc_mono::ir::ProcLayout { - arguments: &[], - result: Layout::struct_no_name_order(&[]), - captures_niche: CapturesNiche::no_niche(), + package_name.into() + }; + + let platform_path = Path::new(path_to_platform).into(); + let symbol = match platform_data { + None => { + debug_assert_eq!(exposed_to_host.values.len(), 1); + *exposed_to_host.values.iter().next().unwrap().0 + } + Some(PlatformData { provides, .. }) => provides, + }; + + match procedures.keys().find(|(s, _)| *s == symbol) { + Some((_, layout)) => EntryPoint::Executable { + layout: *layout, + symbol, + platform_path, }, - symbol, + None => { + // the entry point is not specialized. This can happen if the repl output + // is a function value + EntryPoint::Executable { + layout: roc_mono::ir::ProcLayout { + arguments: &[], + result: Layout::struct_no_name_order(&[]), + captures_niche: CapturesNiche::no_niche(), + }, + symbol, + platform_path, + } + } } } + ExecutionMode::Check => unreachable!(), } }; @@ -2823,7 +2863,7 @@ fn finish_specialization( can_problems, type_problems, output_path, - platform_path, + expectations, exposed_to_host, module_id: state.root_id, subs, @@ -2833,7 +2873,6 @@ fn finish_specialization( sources, timings: state.timings, toplevel_expects, - expectations, }) } @@ -3088,6 +3127,7 @@ fn load_module<'a>( "Bool", ModuleId::BOOL "Box", ModuleId::BOX "Encode", ModuleId::ENCODE + "Decode", ModuleId::DECODE "Json", ModuleId::JSON } @@ -4477,7 +4517,7 @@ fn canonicalize_and_constrain<'a>( Vacant(vacant) => { let should_include_builtin = matches!( name.module_id(), - ModuleId::ENCODE | ModuleId::DICT | ModuleId::SET + ModuleId::ENCODE | ModuleId::DECODE | ModuleId::DICT | ModuleId::SET ); if !name.is_builtin() || should_include_builtin { @@ -5076,7 +5116,7 @@ fn load_derived_partial_procs<'a>( // TODO: we can be even lazier here if we move `add_def_to_module` to happen in mono. Also, the // timings would be more accurate. - for (derived_symbol, derived_expr) in derives_to_add.into_iter() { + for (derived_symbol, (derived_expr, derived_expr_var)) in derives_to_add.into_iter() { let mut mono_env = roc_mono::ir::Env { arena, subs, @@ -5115,7 +5155,22 @@ fn load_derived_partial_procs<'a>( return_type, ) } - _ => internal_error!("Expected only functions to be derived"), + _ => { + // mark this symbols as a top-level thunk before any other work on the procs + new_module_thunks.push(derived_symbol); + + PartialProc { + annotation: derived_expr_var, + // This is a 0-arity thunk, so it has no arguments. + pattern_symbols: &[], + // This is a top-level definition, so it cannot capture anything + captured_symbols: CapturedSymbols::None, + body: derived_expr, + body_var: derived_expr_var, + // This is a 0-arity thunk, so it cannot be recursive + is_self_recursive: false, + } + } }; procs_base diff --git a/crates/compiler/load_internal/tests/test_load.rs b/crates/compiler/load_internal/tests/test_load.rs index 33079f3905..9d1aa79849 100644 --- a/crates/compiler/load_internal/tests/test_load.rs +++ b/crates/compiler/load_internal/tests/test_load.rs @@ -17,8 +17,8 @@ mod helpers; use crate::helpers::fixtures_dir; use bumpalo::Bump; use roc_can::module::ExposedByModule; -use roc_load_internal::file::Threading; -use roc_load_internal::file::{LoadResult, LoadStart, LoadedModule, LoadingProblem, Phase}; +use roc_load_internal::file::{ExecutionMode, LoadConfig, Threading}; +use roc_load_internal::file::{LoadResult, LoadStart, LoadedModule, LoadingProblem}; use roc_module::ident::ModuleName; use roc_module::symbol::{Interns, ModuleId}; use roc_problem::can::Problem; @@ -41,16 +41,19 @@ fn load_and_typecheck( use LoadResult::*; let load_start = LoadStart::from_path(arena, filename, RenderTarget::Generic)?; + let load_config = LoadConfig { + target_info, + render: RenderTarget::Generic, + threading: Threading::Single, + exec_mode: ExecutionMode::Check, + }; match roc_load_internal::file::load( arena, load_start, exposed_types, - Phase::SolveTypes, - target_info, Default::default(), // these tests will re-compile the builtins - RenderTarget::Generic, - Threading::Single, + load_config, )? { Monomorphized(_) => unreachable!(""), TypeChecked(module) => Ok(module), diff --git a/crates/compiler/module/Cargo.toml b/crates/compiler/module/Cargo.toml index 3fd82fc109..bdb53a1179 100644 --- a/crates/compiler/module/Cargo.toml +++ b/crates/compiler/module/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_module" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] edition = "2021" license = "UPL-1.0" diff --git a/crates/compiler/module/src/ident.rs b/crates/compiler/module/src/ident.rs index 934717c4bf..df8e90be0d 100644 --- a/crates/compiler/module/src/ident.rs +++ b/crates/compiler/module/src/ident.rs @@ -78,6 +78,7 @@ impl ModuleName { pub const RESULT: &'static str = "Result"; pub const BOX: &'static str = "Box"; pub const ENCODE: &'static str = "Encode"; + pub const DECODE: &'static str = "Decode"; pub const JSON: &'static str = "Json"; pub fn as_str(&self) -> &str { diff --git a/crates/compiler/module/src/symbol.rs b/crates/compiler/module/src/symbol.rs index 5f894d3123..455d439e2c 100644 --- a/crates/compiler/module/src/symbol.rs +++ b/crates/compiler/module/src/symbol.rs @@ -47,8 +47,10 @@ const SYMBOL_HAS_NICHE: () = #[cfg(debug_assertions)] const PRETTY_PRINT_DEBUG_SYMBOLS: bool = true; -pub const DERIVABLE_ABILITIES: &[(Symbol, &[Symbol])] = - &[(Symbol::ENCODE_ENCODING, &[Symbol::ENCODE_TO_ENCODER])]; +pub const DERIVABLE_ABILITIES: &[(Symbol, &[Symbol])] = &[ + (Symbol::ENCODE_ENCODING, &[Symbol::ENCODE_TO_ENCODER]), + (Symbol::DECODE_DECODING, &[Symbol::DECODE_DECODER]), +]; /// In Debug builds only, Symbol has a name() method that lets /// you look up its name in a global intern table. This table is @@ -1005,6 +1007,8 @@ define_builtins! { 30 DEV_TMP5: "#dev_tmp5" 31 ATTR_INVALID: "#attr_invalid" + + 32 CLONE: "#clone" // internal function that clones a value into a buffer } // Fake module for synthesizing and storing derived implementations 1 DERIVED_SYNTH: "#Derived" => { @@ -1389,9 +1393,37 @@ define_builtins! { 24 ENCODE_APPEND: "append" 25 ENCODE_TO_BYTES: "toBytes" } - 12 JSON: "Json" => { + 12 DECODE: "Decode" => { + 0 DECODE_DECODE_ERROR: "DecodeError" + 1 DECODE_DECODE_RESULT: "DecodeResult" + 2 DECODE_DECODER_OPAQUE: "Decoder" + 3 DECODE_DECODING: "Decoding" + 4 DECODE_DECODER: "decoder" + 5 DECODE_DECODERFORMATTING: "DecoderFormatting" + 6 DECODE_U8: "u8" + 7 DECODE_U16: "u16" + 8 DECODE_U32: "u32" + 9 DECODE_U64: "u64" + 10 DECODE_U128: "u128" + 11 DECODE_I8: "i8" + 12 DECODE_I16: "i16" + 13 DECODE_I32: "i32" + 14 DECODE_I64: "i64" + 15 DECODE_I128: "i128" + 16 DECODE_F32: "f32" + 17 DECODE_F64: "f64" + 18 DECODE_DEC: "dec" + 19 DECODE_BOOL: "bool" + 20 DECODE_STRING: "string" + 21 DECODE_LIST: "list" + 22 DECODE_CUSTOM: "custom" + 23 DECODE_DECODE_WITH: "decodeWith" + 24 DECODE_FROM_BYTES_PARTIAL: "fromBytesPartial" + 25 DECODE_FROM_BYTES: "fromBytes" + } + 13 JSON: "Json" => { 0 JSON_JSON: "Json" } - num_modules: 13 // Keep this count up to date by hand! (TODO: see the mut_map! macro for how we could determine this count correctly in the macro) + num_modules: 14 // Keep this count up to date by hand! (TODO: see the mut_map! macro for how we could determine this count correctly in the macro) } diff --git a/crates/compiler/mono/Cargo.toml b/crates/compiler/mono/Cargo.toml index c1fa81d366..f2ee5c0ba3 100644 --- a/crates/compiler/mono/Cargo.toml +++ b/crates/compiler/mono/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_mono" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" @@ -12,6 +12,7 @@ roc_region = { path = "../region" } roc_module = { path = "../module" } roc_types = { path = "../types" } roc_can = { path = "../can" } +roc_derive_key = { path = "../derive_key" } roc_derive = { path = "../derive" } roc_late_solve = { path = "../late_solve" } roc_std = { path = "../../roc_std", default-features = false } diff --git a/crates/compiler/mono/src/code_gen_help/refcount.rs b/crates/compiler/mono/src/code_gen_help/refcount.rs index a36da6664e..9e46c1a208 100644 --- a/crates/compiler/mono/src/code_gen_help/refcount.rs +++ b/crates/compiler/mono/src/code_gen_help/refcount.rs @@ -144,7 +144,7 @@ pub fn refcount_reset_proc_body<'a>( let rc = root.create_symbol(ident_ids, "rc"); let refcount_1 = root.create_symbol(ident_ids, "refcount_1"); let is_unique = root.create_symbol(ident_ids, "is_unique"); - let masked = root.create_symbol(ident_ids, "masked"); + let addr = root.create_symbol(ident_ids, "addr"); let union_layout = match layout { Layout::Union(u) => u, @@ -213,7 +213,7 @@ pub fn refcount_reset_proc_body<'a>( op: LowLevel::NumSubWrap, update_mode: UpdateModeId::BACKEND_DUMMY, }, - arguments: root.arena.alloc([masked, alignment]), + arguments: root.arena.alloc([addr, alignment]), }); Stmt::Let( @@ -341,7 +341,7 @@ pub fn refcount_reset_proc_body<'a>( rc_ptr, union_layout.stores_tag_id_in_pointer(root.target_info), root.arena.alloc(rc_stmt), - masked, + addr, ) }; diff --git a/crates/compiler/mono/src/decision_tree.rs b/crates/compiler/mono/src/decision_tree.rs index 8335e4517f..12e9222d66 100644 --- a/crates/compiler/mono/src/decision_tree.rs +++ b/crates/compiler/mono/src/decision_tree.rs @@ -96,6 +96,23 @@ enum Test<'a> { }, } +impl<'a> Test<'a> { + fn can_be_switch(&self) -> bool { + match self { + Test::IsCtor { .. } => true, + Test::IsInt(_, int_width) => { + // llvm does not like switching on 128-bit values + !matches!(int_width, IntWidth::U128 | IntWidth::I128) + } + Test::IsFloat(_, _) => true, + Test::IsDecimal(_) => false, + Test::IsStr(_) => false, + Test::IsBit(_) => true, + Test::IsByte { .. } => true, + } + } +} + use std::hash::{Hash, Hasher}; impl<'a> Hash for Test<'a> { fn hash(&self, state: &mut H) { @@ -1370,8 +1387,6 @@ fn test_to_equality<'a>( } Test::IsInt(test_int, precision) => { - // TODO don't downcast i128 here - debug_assert!(i128::from_ne_bytes(test_int) <= i64::MAX as i128); let lhs = Expr::Literal(Literal::Int(test_int)); let lhs_symbol = env.unique_symbol(); stores.push((lhs_symbol, Layout::int_width(precision), lhs)); @@ -1833,7 +1848,8 @@ fn decide_to_branching<'a>( Test::IsBit(v) => v as u64, Test::IsByte { tag_id, .. } => tag_id as u64, Test::IsCtor { tag_id, .. } => tag_id as u64, - other => todo!("other {:?}", other), + Test::IsDecimal(_) => unreachable!("decimals cannot be switched on"), + Test::IsStr(_) => unreachable!("strings cannot be switched on"), }; // branch info is only useful for refcounted values @@ -2004,15 +2020,30 @@ fn fanout_decider<'a>( edges: Vec<(GuardedTest<'a>, DecisionTree<'a>)>, ) -> Decider<'a, u64> { let fallback_decider = tree_to_decider(fallback); - let necessary_tests = edges + let necessary_tests: Vec<_> = edges .into_iter() .map(|(test, tree)| fanout_decider_help(tree, test)) .collect(); - Decider::FanOut { - path, - tests: necessary_tests, - fallback: Box::new(fallback_decider), + if necessary_tests.iter().all(|(t, _)| t.can_be_switch()) { + Decider::FanOut { + path, + tests: necessary_tests, + fallback: Box::new(fallback_decider), + } + } else { + // in llvm, we cannot switch on strings so must chain + let mut decider = fallback_decider; + + for (test, branch_decider) in necessary_tests.into_iter().rev() { + decider = Decider::Chain { + test_chain: vec![(path.clone(), test)], + success: Box::new(branch_decider), + failure: Box::new(decider), + }; + } + + decider } } diff --git a/crates/compiler/mono/src/ir.rs b/crates/compiler/mono/src/ir.rs index 084f1fb36f..8812e9b897 100644 --- a/crates/compiler/mono/src/ir.rs +++ b/crates/compiler/mono/src/ir.rs @@ -3999,9 +3999,10 @@ pub fn with_hole<'a>( } // creating a record from the var will unpack it if it's just a single field. - let layout = layout_cache - .from_var(env.arena, record_var, env.subs) - .unwrap_or_else(|err| panic!("TODO turn fn_var into a RuntimeError {:?}", err)); + let layout = match layout_cache.from_var(env.arena, record_var, env.subs) { + Ok(layout) => layout, + Err(_) => return Stmt::RuntimeError("Can't create record with improper layout"), + }; let field_symbols = field_symbols.into_bump_slice(); @@ -4915,14 +4916,12 @@ pub fn with_hole<'a>( UnspecializedExpr(symbol) => { match procs.ability_member_aliases.get(symbol).unwrap() { &self::AbilityMember(member) => { - let resolved_proc = env.abilities.with_module_abilities_store(env.home, |store| - resolve_ability_specialization(env.subs, store, member, fn_var) - .expect("Recorded as an ability member, but it doesn't have a specialization") - ); + let resolved_proc = resolve_ability_specialization(env.home, env.subs, &env.abilities, member, fn_var) + .expect("Recorded as an ability member, but it doesn't have a specialization"); let resolved_proc = match resolved_proc { Resolved::Specialization(symbol) => symbol, - Resolved::NeedsGenerated => { + Resolved::NeedsGenerated(_) => { todo_abilities!("Generate impls for structural types") } }; @@ -5226,17 +5225,41 @@ fn late_resolve_ability_specialization<'a>( env.subs[spec_symbol_index] } else { // Otherwise, resolve by checking the able var. - let specialization = env - .abilities - .with_module_abilities_store(env.home, |store| { - resolve_ability_specialization(env.subs, store, member, specialization_var) - .expect("Ability specialization is unknown - code generation cannot proceed!") - }); + let specialization = resolve_ability_specialization( + env.home, + env.subs, + &env.abilities, + member, + specialization_var, + ) + .expect("Ability specialization is unknown - code generation cannot proceed!"); match specialization { Resolved::Specialization(symbol) => symbol, - Resolved::NeedsGenerated => { - todo_abilities!("Generate impls for structural types") + Resolved::NeedsGenerated(var) => { + let derive_key = roc_derive_key::Derived::builtin( + member.try_into().expect("derived symbols must be builtins"), + env.subs, + var, + ) + .expect("specialization var not derivable!"); + + match derive_key { + roc_derive_key::Derived::Immediate(imm) => { + // The immediate is an ability member itself, so it must be resolved! + late_resolve_ability_specialization(env, imm, None, specialization_var) + } + roc_derive_key::Derived::Key(derive_key) => { + let mut derived_module = env + .derived_module + .lock() + .expect("derived module unavailable"); + + derived_module + .get_or_insert(env.exposed_by_module, derive_key) + .0 + } + } } } } @@ -8957,7 +8980,7 @@ impl NumLiteral { fn to_pattern(&self) -> Pattern<'static> { match *self { NumLiteral::Int(n, w) => Pattern::IntLiteral(n, w), - NumLiteral::U128(_) => todo!(), + NumLiteral::U128(n) => Pattern::IntLiteral(n, IntWidth::U128), NumLiteral::Float(n, w) => Pattern::FloatLiteral(f64::to_bits(n), w), NumLiteral::Decimal(n) => Pattern::DecimalLiteral(n), } diff --git a/crates/compiler/parse/Cargo.toml b/crates/compiler/parse/Cargo.toml index 3b78b5f120..8663767852 100644 --- a/crates/compiler/parse/Cargo.toml +++ b/crates/compiler/parse/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_parse" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/parse/src/parser.rs b/crates/compiler/parse/src/parser.rs index e0f53fcc1e..af9174db33 100644 --- a/crates/compiler/parse/src/parser.rs +++ b/crates/compiler/parse/src/parser.rs @@ -866,7 +866,7 @@ where // the next character should not be an identifier character // to prevent treating `whence` or `iffy` as keywords match state.bytes().get(width) { - Some(next) if *next == b' ' || *next == b'#' || *next == b'\n' => { + Some(next) if *next == b' ' || *next == b'#' || *next == b'\n' || *next == b'\r' => { state = state.advance(width); Ok((MadeProgress, (), state)) } diff --git a/crates/compiler/problem/Cargo.toml b/crates/compiler/problem/Cargo.toml index 9aa102ff5f..bfbc1d5c7d 100644 --- a/crates/compiler/problem/Cargo.toml +++ b/crates/compiler/problem/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_problem" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/problem/src/can.rs b/crates/compiler/problem/src/can.rs index ba956a7f72..82f2a6f35d 100644 --- a/crates/compiler/problem/src/can.rs +++ b/crates/compiler/problem/src/can.rs @@ -45,12 +45,13 @@ pub enum Problem { shadow: Loc, kind: ShadowKind, }, - CyclicAlias(Symbol, Region, Vec), + CyclicAlias(Symbol, Region, Vec, AliasKind), BadRecursion(Vec), PhantomTypeArgument { typ: Symbol, variable_region: Region, variable_name: Lowercase, + alias_kind: AliasKind, }, UnboundTypeVariable { typ: Symbol, diff --git a/crates/compiler/region/Cargo.toml b/crates/compiler/region/Cargo.toml index d80e07b9a0..b4511db830 100644 --- a/crates/compiler/region/Cargo.toml +++ b/crates/compiler/region/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_region" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/roc_target/Cargo.toml b/crates/compiler/roc_target/Cargo.toml index cbef2efa5a..e25f84ffd8 100644 --- a/crates/compiler/roc_target/Cargo.toml +++ b/crates/compiler/roc_target/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_target" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/roc_target/src/lib.rs b/crates/compiler/roc_target/src/lib.rs index f4feac1dc5..96352c872b 100644 --- a/crates/compiler/roc_target/src/lib.rs +++ b/crates/compiler/roc_target/src/lib.rs @@ -4,9 +4,31 @@ use strum_macros::{EnumCount, EnumIter}; +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum OperatingSystem { + Windows, + Unix, + Wasi, +} + +impl From for OperatingSystem { + fn from(target: target_lexicon::OperatingSystem) -> Self { + match target { + target_lexicon::OperatingSystem::Windows => OperatingSystem::Windows, + target_lexicon::OperatingSystem::Wasi => OperatingSystem::Wasi, + target_lexicon::OperatingSystem::Linux => OperatingSystem::Unix, + target_lexicon::OperatingSystem::MacOSX { .. } => OperatingSystem::Unix, + target_lexicon::OperatingSystem::Darwin => OperatingSystem::Unix, + target_lexicon::OperatingSystem::Unknown => OperatingSystem::Unix, + other => unreachable!("unsupported operating system {:?}", other), + } + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct TargetInfo { pub architecture: Architecture, + pub operating_system: OperatingSystem, } impl TargetInfo { @@ -28,18 +50,21 @@ impl TargetInfo { pub const fn default_aarch64() -> Self { TargetInfo { architecture: Architecture::Aarch64, + operating_system: OperatingSystem::Unix, } } pub const fn default_x86_64() -> Self { TargetInfo { architecture: Architecture::X86_64, + operating_system: OperatingSystem::Unix, } } pub const fn default_wasm32() -> Self { TargetInfo { architecture: Architecture::Wasm32, + operating_system: OperatingSystem::Wasi, } } } @@ -47,14 +72,12 @@ impl TargetInfo { impl From<&target_lexicon::Triple> for TargetInfo { fn from(triple: &target_lexicon::Triple) -> Self { let architecture = Architecture::from(triple.architecture); + let operating_system = OperatingSystem::from(triple.operating_system); - Self { architecture } - } -} - -impl From for TargetInfo { - fn from(architecture: Architecture) -> Self { - Self { architecture } + Self { + architecture, + operating_system, + } } } diff --git a/crates/compiler/solve/Cargo.toml b/crates/compiler/solve/Cargo.toml index f6c3ac556e..34611f4b78 100644 --- a/crates/compiler/solve/Cargo.toml +++ b/crates/compiler/solve/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_solve" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/solve/src/ability.rs b/crates/compiler/solve/src/ability.rs index c398fca961..dc88e6a877 100644 --- a/crates/compiler/solve/src/ability.rs +++ b/crates/compiler/solve/src/ability.rs @@ -1,12 +1,13 @@ use roc_can::abilities::AbilitiesStore; use roc_can::expr::PendingDerives; use roc_collections::{VecMap, VecSet}; -use roc_error_macros::internal_error; +use roc_error_macros::{internal_error, todo_abilities}; use roc_module::symbol::Symbol; use roc_region::all::{Loc, Region}; use roc_solve_problem::{TypeError, UnderivableReason, Unfulfilled}; +use roc_types::num::NumericRange; use roc_types::subs::{instantiate_rigids, Content, FlatType, GetSubsSlice, Rank, Subs, Variable}; -use roc_types::types::{AliasKind, Category, PatternCategory}; +use roc_types::types::{AliasKind, Category, MemberImpl, PatternCategory}; use roc_unify::unify::{Env, MustImplementConstraints}; use roc_unify::unify::{MustImplementAbility, Obligated}; @@ -253,7 +254,20 @@ impl ObligationCache { // independent queries. let opt_can_derive_builtin = match ability { - Symbol::ENCODE_ENCODING => Some(self.can_derive_encoding(subs, abilities_store, var)), + Symbol::ENCODE_ENCODING => Some(DeriveEncoding::is_derivable( + self, + abilities_store, + subs, + var, + )), + + Symbol::DECODE_DECODING => Some(DeriveDecoding::is_derivable( + self, + abilities_store, + subs, + var, + )), + _ => None, }; @@ -262,7 +276,7 @@ impl ObligationCache { // can derive! None } - Some(Err(failure_var)) => Some(if failure_var == var { + Some(Err(DerivableError::NotDerivable(failure_var))) => Some(if failure_var == var { UnderivableReason::SurfaceNotDerivable } else { let (error_type, _skeletons) = subs.var_to_error_type(failure_var); @@ -391,16 +405,133 @@ impl ObligationCache { let check_has_fake = self.derive_cache.insert(derive_key, root_result); debug_assert_eq!(check_has_fake, Some(fake_fulfilled)); } +} - // If we have a lot of these, consider using a visitor. - // It will be very similar for most types (can't derive functions, can't derive unbound type - // variables, can only derive opaques if they have an impl, etc). - fn can_derive_encoding( - &mut self, - subs: &mut Subs, +#[inline(always)] +#[rustfmt::skip] +fn is_builtin_number_alias(symbol: Symbol) -> bool { + matches!(symbol, + Symbol::NUM_U8 | Symbol::NUM_UNSIGNED8 + | Symbol::NUM_U16 | Symbol::NUM_UNSIGNED16 + | Symbol::NUM_U32 | Symbol::NUM_UNSIGNED32 + | Symbol::NUM_U64 | Symbol::NUM_UNSIGNED64 + | Symbol::NUM_U128 | Symbol::NUM_UNSIGNED128 + | Symbol::NUM_I8 | Symbol::NUM_SIGNED8 + | Symbol::NUM_I16 | Symbol::NUM_SIGNED16 + | Symbol::NUM_I32 | Symbol::NUM_SIGNED32 + | Symbol::NUM_I64 | Symbol::NUM_SIGNED64 + | Symbol::NUM_I128 | Symbol::NUM_SIGNED128 + | Symbol::NUM_NAT | Symbol::NUM_NATURAL + | Symbol::NUM_F32 | Symbol::NUM_BINARY32 + | Symbol::NUM_F64 | Symbol::NUM_BINARY64 + | Symbol::NUM_DEC | Symbol::NUM_DECIMAL, + ) +} + +enum DerivableError { + NotDerivable(Variable), +} + +struct Descend(bool); + +trait DerivableVisitor { + const ABILITY: Symbol; + + #[inline(always)] + fn is_derivable_builtin_opaque(_symbol: Symbol) -> bool { + false + } + + #[inline(always)] + fn visit_flex(var: Variable) -> Result<(), DerivableError> { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_rigid(var: Variable) -> Result<(), DerivableError> { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_flex_able(var: Variable, ability: Symbol) -> Result<(), DerivableError> { + if ability != Self::ABILITY { + Err(DerivableError::NotDerivable(var)) + } else { + Ok(()) + } + } + + #[inline(always)] + fn visit_rigid_able(var: Variable, ability: Symbol) -> Result<(), DerivableError> { + if ability != Self::ABILITY { + Err(DerivableError::NotDerivable(var)) + } else { + Ok(()) + } + } + + #[inline(always)] + fn visit_recursion(var: Variable) -> Result { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_apply(var: Variable, _symbol: Symbol) -> Result { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_func(var: Variable) -> Result { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_record(var: Variable) -> Result { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_tag_union(var: Variable) -> Result { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_recursive_tag_union(var: Variable) -> Result { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_function_or_tag_union(var: Variable) -> Result { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_empty_record(var: Variable) -> Result<(), DerivableError> { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_empty_tag_union(var: Variable) -> Result<(), DerivableError> { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_alias(var: Variable, _symbol: Symbol) -> Result { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn visit_ranged_number(var: Variable, _range: NumericRange) -> Result<(), DerivableError> { + Err(DerivableError::NotDerivable(var)) + } + + #[inline(always)] + fn is_derivable( + obligation_cache: &mut ObligationCache, abilities_store: &AbilitiesStore, + subs: &Subs, var: Variable, - ) -> Result<(), Variable> { + ) -> Result<(), DerivableError> { let mut stack = vec![var]; let mut seen_recursion_vars = vec![]; @@ -418,102 +549,103 @@ impl ObligationCache { let content = subs.get_content_without_compacting(var); use Content::*; + use DerivableError::*; use FlatType::*; - match content { - FlexVar(_) | RigidVar(_) => return Err(var), - FlexAbleVar(_, ability) | RigidAbleVar(_, ability) => { - if *ability != Symbol::ENCODE_ENCODING { - return Err(var); - } - // Any concrete type this variables is instantiated with will also gain a "does - // implement" check so this is okay. - } + match *content { + FlexVar(_) => Self::visit_flex(var)?, + RigidVar(_) => Self::visit_rigid(var)?, + FlexAbleVar(_, ability) => Self::visit_flex_able(var, ability)?, + RigidAbleVar(_, ability) => Self::visit_rigid_able(var, ability)?, RecursionVar { structure, opt_name: _, } => { - seen_recursion_vars.push(var); - stack.push(*structure); + let descend = Self::visit_recursion(var)?; + if descend.0 { + seen_recursion_vars.push(var); + stack.push(structure); + } } Structure(flat_type) => match flat_type { - Apply( - Symbol::LIST_LIST | Symbol::SET_SET | Symbol::DICT_DICT | Symbol::STR_STR, - vars, - ) => push_var_slice!(*vars), - Apply(..) => return Err(var), - Func(..) => { - return Err(var); - } - Record(fields, var) => { - push_var_slice!(fields.variables()); - stack.push(*var); - } - TagUnion(tags, ext_var) => { - for i in tags.variables() { - push_var_slice!(subs[i]); + Apply(symbol, vars) => { + let descend = Self::visit_apply(var, symbol)?; + if descend.0 { + push_var_slice!(vars) } - stack.push(*ext_var); } - FunctionOrTagUnion(_, _, var) => stack.push(*var), - RecursiveTagUnion(rec_var, tags, ext_var) => { - seen_recursion_vars.push(*rec_var); - for i in tags.variables() { - push_var_slice!(subs[i]); + Func(args, _clos, ret) => { + let descend = Self::visit_func(var)?; + if descend.0 { + push_var_slice!(args); + stack.push(ret); } - stack.push(*ext_var); } - EmptyRecord | EmptyTagUnion => { - // yes + Record(fields, ext) => { + let descend = Self::visit_record(var)?; + if descend.0 { + push_var_slice!(fields.variables()); + stack.push(ext); + } } - Erroneous(_) => return Err(var), + TagUnion(tags, ext) => { + let descend = Self::visit_tag_union(var)?; + if descend.0 { + for i in tags.variables() { + push_var_slice!(subs[i]); + } + stack.push(ext); + } + } + FunctionOrTagUnion(_tag_name, _fn_name, ext) => { + let descend = Self::visit_function_or_tag_union(var)?; + if descend.0 { + stack.push(ext); + } + } + RecursiveTagUnion(rec, tags, ext) => { + let descend = Self::visit_recursive_tag_union(var)?; + if descend.0 { + seen_recursion_vars.push(rec); + for i in tags.variables() { + push_var_slice!(subs[i]); + } + stack.push(ext); + } + } + EmptyRecord => Self::visit_empty_record(var)?, + EmptyTagUnion => Self::visit_empty_tag_union(var)?, + + Erroneous(_) => return Err(NotDerivable(var)), }, - #[rustfmt::skip] - Alias( - Symbol::NUM_U8 | Symbol::NUM_UNSIGNED8 - | Symbol::NUM_U16 | Symbol::NUM_UNSIGNED16 - | Symbol::NUM_U32 | Symbol::NUM_UNSIGNED32 - | Symbol::NUM_U64 | Symbol::NUM_UNSIGNED64 - | Symbol::NUM_U128 | Symbol::NUM_UNSIGNED128 - | Symbol::NUM_I8 | Symbol::NUM_SIGNED8 - | Symbol::NUM_I16 | Symbol::NUM_SIGNED16 - | Symbol::NUM_I32 | Symbol::NUM_SIGNED32 - | Symbol::NUM_I64 | Symbol::NUM_SIGNED64 - | Symbol::NUM_I128 | Symbol::NUM_SIGNED128 - | Symbol::NUM_NAT | Symbol::NUM_NATURAL - | Symbol::NUM_F32 | Symbol::NUM_BINARY32 - | Symbol::NUM_F64 | Symbol::NUM_BINARY64 - | Symbol::NUM_DEC | Symbol::NUM_DECIMAL, - _, - _, - _, - ) => { - // yes - } Alias( Symbol::NUM_NUM | Symbol::NUM_INTEGER | Symbol::NUM_FLOATINGPOINT, - _, + _alias_variables, real_var, - _, - ) => stack.push(*real_var), - Alias(name, _, _, AliasKind::Opaque) => { - let opaque = *name; - if self - .check_opaque_and_read(abilities_store, opaque, Symbol::ENCODE_ENCODING) + AliasKind::Opaque, + ) => { + // Numbers: always decay until a ground is hit. + stack.push(real_var); + } + Alias(opaque, _alias_variables, _real_var, AliasKind::Opaque) => { + if obligation_cache + .check_opaque_and_read(abilities_store, opaque, Self::ABILITY) .is_err() + && !Self::is_derivable_builtin_opaque(opaque) { - return Err(var); + return Err(NotDerivable(var)); } } - Alias(_, arguments, real_type_var, _) => { - push_var_slice!(arguments.all_variables()); - stack.push(*real_type_var); + Alias(symbol, _alias_variables, real_var, AliasKind::Structural) => { + let descend = Self::visit_alias(var, symbol)?; + if descend.0 { + stack.push(real_var); + } } - RangedNumber(..) => { - // yes, all numbers can - } - LambdaSet(..) => return Err(var), + RangedNumber(range) => Self::visit_ranged_number(var, range)?, + + LambdaSet(..) => return Err(NotDerivable(var)), Error => { - return Err(var); + return Err(NotDerivable(var)); } } } @@ -522,6 +654,148 @@ impl ObligationCache { } } +struct DeriveEncoding; +impl DerivableVisitor for DeriveEncoding { + const ABILITY: Symbol = Symbol::ENCODE_ENCODING; + + #[inline(always)] + fn is_derivable_builtin_opaque(symbol: Symbol) -> bool { + is_builtin_number_alias(symbol) + } + + #[inline(always)] + fn visit_recursion(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_apply(var: Variable, symbol: Symbol) -> Result { + if matches!( + symbol, + Symbol::LIST_LIST | Symbol::SET_SET | Symbol::DICT_DICT | Symbol::STR_STR, + ) { + Ok(Descend(true)) + } else { + Err(DerivableError::NotDerivable(var)) + } + } + + #[inline(always)] + fn visit_record(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_tag_union(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_recursive_tag_union(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_function_or_tag_union(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_empty_record(_var: Variable) -> Result<(), DerivableError> { + Ok(()) + } + + #[inline(always)] + fn visit_empty_tag_union(_var: Variable) -> Result<(), DerivableError> { + Ok(()) + } + + #[inline(always)] + fn visit_alias(_var: Variable, symbol: Symbol) -> Result { + if is_builtin_number_alias(symbol) { + Ok(Descend(false)) + } else { + Ok(Descend(true)) + } + } + + #[inline(always)] + fn visit_ranged_number(_var: Variable, _range: NumericRange) -> Result<(), DerivableError> { + Ok(()) + } +} + +struct DeriveDecoding; +impl DerivableVisitor for DeriveDecoding { + const ABILITY: Symbol = Symbol::DECODE_DECODING; + + #[inline(always)] + fn is_derivable_builtin_opaque(symbol: Symbol) -> bool { + is_builtin_number_alias(symbol) + } + + #[inline(always)] + fn visit_recursion(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_apply(var: Variable, symbol: Symbol) -> Result { + if matches!( + symbol, + Symbol::LIST_LIST | Symbol::SET_SET | Symbol::DICT_DICT | Symbol::STR_STR, + ) { + Ok(Descend(true)) + } else { + Err(DerivableError::NotDerivable(var)) + } + } + + #[inline(always)] + fn visit_record(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_tag_union(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_recursive_tag_union(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_function_or_tag_union(_var: Variable) -> Result { + Ok(Descend(true)) + } + + #[inline(always)] + fn visit_empty_record(_var: Variable) -> Result<(), DerivableError> { + Ok(()) + } + + #[inline(always)] + fn visit_empty_tag_union(_var: Variable) -> Result<(), DerivableError> { + Ok(()) + } + + #[inline(always)] + fn visit_alias(_var: Variable, symbol: Symbol) -> Result { + if is_builtin_number_alias(symbol) { + Ok(Descend(false)) + } else { + Ok(Descend(true)) + } + } + + #[inline(always)] + fn visit_ranged_number(_var: Variable, _range: NumericRange) -> Result<(), DerivableError> { + Ok(()) + } +} + /// Determines what type implements an ability member of a specialized signature, given the /// [MustImplementAbility] constraints of the signature. pub fn type_implementing_specialization( @@ -547,31 +821,69 @@ pub fn type_implementing_specialization( } /// Result of trying to resolve an ability specialization. -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Debug)] pub enum Resolved { /// A user-defined specialization should be used. Specialization(Symbol), - /// A specialization must be generated. - NeedsGenerated, + /// A specialization must be generated for the given type variable. + NeedsGenerated(Variable), } -pub fn resolve_ability_specialization( +/// An [`AbilityResolver`] is a shell of an abilities store that answers questions needed for +/// [resolving ability specializations][`resolve_ability_specialization`]. +/// +/// The trait is provided so you can implement your own resolver at other points in the compilation +/// process, for example during monomorphization we have module-re-entrant ability stores that are +/// not available during solving. +pub trait AbilityResolver { + /// Gets the parent ability and type of an ability member. + /// + /// If needed, the type of the ability member will be imported into a local `subs` buffer; as + /// such, subs must be provided. + fn member_parent_and_signature_var( + &self, + ability_member: Symbol, + home_subs: &mut Subs, + ) -> Option<(Symbol, Variable)>; + + /// Finds the declared implementation of an [`ImplKey`][roc_can::abilities::ImplKey]. + fn get_implementation(&self, impl_key: roc_can::abilities::ImplKey) -> Option; +} + +/// Trivial implementation of a resolver for a module-local abilities store, that defers all +/// queries to the module store. +impl AbilityResolver for AbilitiesStore { + #[inline(always)] + fn member_parent_and_signature_var( + &self, + ability_member: Symbol, + _home_subs: &mut Subs, // only have access to one abilities store, do nothing with subs + ) -> Option<(Symbol, Variable)> { + self.member_def(ability_member) + .map(|def| (def.parent_ability, def.signature_var())) + } + + #[inline(always)] + fn get_implementation(&self, impl_key: roc_can::abilities::ImplKey) -> Option { + self.get_implementation(impl_key).copied() + } +} + +pub fn resolve_ability_specialization( subs: &mut Subs, - abilities_store: &AbilitiesStore, + resolver: &R, ability_member: Symbol, specialization_var: Variable, ) -> Option { use roc_unify::unify::{unify, Mode}; - let member_def = abilities_store - .member_def(ability_member) + let (parent_ability, signature_var) = resolver + .member_parent_and_signature_var(ability_member, subs) .expect("Not an ability member symbol"); // Figure out the ability we're resolving in a temporary subs snapshot. let snapshot = subs.snapshot(); - let signature_var = member_def.signature_var(); - instantiate_rigids(subs, signature_var); let (_vars, must_implement_ability, _lambda_sets_to_specialize, _meta) = unify( &mut Env::new(subs), @@ -585,8 +897,7 @@ pub fn resolve_ability_specialization( subs.rollback_to(snapshot); - let obligated = - type_implementing_specialization(&must_implement_ability, member_def.parent_ability)?; + let obligated = type_implementing_specialization(&must_implement_ability, parent_ability)?; let resolved = match obligated { Obligated::Opaque(symbol) => { @@ -595,19 +906,21 @@ pub fn resolve_ability_specialization( ability_member, }; - match abilities_store.get_implementation(impl_key)? { + match resolver.get_implementation(impl_key)? { roc_types::types::MemberImpl::Impl(spec_symbol) => { - Resolved::Specialization(*spec_symbol) + Resolved::Specialization(spec_symbol) + } + roc_types::types::MemberImpl::Derived => { + todo_abilities!("get type from obligated opaque") } - roc_types::types::MemberImpl::Derived => Resolved::NeedsGenerated, // TODO this is not correct. We can replace `Resolved` with `MemberImpl` entirely, // which will make this simpler. roc_types::types::MemberImpl::Error => Resolved::Specialization(Symbol::UNDERSCORE), } } - Obligated::Adhoc(_) => { + Obligated::Adhoc(variable) => { // TODO: more rules need to be validated here, like is this a builtin ability? - Resolved::NeedsGenerated + Resolved::NeedsGenerated(variable) } }; diff --git a/crates/compiler/solve/src/lib.rs b/crates/compiler/solve/src/lib.rs index d0e42eaf42..8f6a0bcafb 100644 --- a/crates/compiler/solve/src/lib.rs +++ b/crates/compiler/solve/src/lib.rs @@ -5,3 +5,4 @@ pub mod ability; pub mod module; pub mod solve; +pub mod specialize; diff --git a/crates/compiler/solve/src/solve.rs b/crates/compiler/solve/src/solve.rs index c4b926ddcb..53c069a190 100644 --- a/crates/compiler/solve/src/solve.rs +++ b/crates/compiler/solve/src/solve.rs @@ -3,6 +3,9 @@ use crate::ability::{ CheckedDerives, ObligationCache, PendingDerivesTable, Resolved, }; use crate::module::Solved; +use crate::specialize::{ + compact_lambda_sets_of_vars, AwaitingSpecializations, CompactionResult, DerivedEnv, SolvePhase, +}; use bumpalo::Bump; use roc_can::abilities::{AbilitiesStore, MemberSpecializationInfo}; use roc_can::constraint::Constraint::{self, *}; @@ -13,28 +16,27 @@ use roc_can::module::ExposedByModule; use roc_collections::all::MutMap; use roc_debug_flags::dbg_do; #[cfg(debug_assertions)] -use roc_debug_flags::{ROC_TRACE_COMPACTION, ROC_VERIFY_RIGID_LET_GENERALIZED}; +use roc_debug_flags::ROC_VERIFY_RIGID_LET_GENERALIZED; use roc_derive::SharedDerivedModule; -use roc_derive_key::{DeriveError, DeriveKey}; -use roc_error_macros::{internal_error, todo_abilities}; +use roc_error_macros::internal_error; use roc_module::ident::TagName; use roc_module::symbol::{ModuleId, Symbol}; use roc_problem::can::CycleEntry; -use roc_region::all::{Loc, Region}; +use roc_region::all::Loc; use roc_solve_problem::TypeError; use roc_types::subs::{ - self, get_member_lambda_sets_at_region, AliasVariables, Content, Descriptor, FlatType, - GetSubsSlice, LambdaSet, Mark, OptVariable, Rank, RecordFields, Subs, SubsIndex, SubsSlice, - UlsOfVar, UnionLabels, UnionLambdas, UnionTags, Variable, VariableSubsSlice, + self, AliasVariables, Content, Descriptor, FlatType, GetSubsSlice, LambdaSet, Mark, + OptVariable, Rank, RecordFields, Subs, SubsIndex, SubsSlice, UlsOfVar, UnionLabels, + UnionLambdas, UnionTags, Variable, VariableSubsSlice, }; use roc_types::types::Type::{self, *}; use roc_types::types::{ - gather_fields_unsorted_iter, AliasCommon, AliasKind, Category, MemberImpl, OptAbleType, - OptAbleVar, Reason, TypeExtension, Uls, + gather_fields_unsorted_iter, AliasCommon, AliasKind, Category, OptAbleType, OptAbleVar, Reason, + TypeExtension, Uls, }; use roc_unify::unify::{ - unify, unify_introduced_ability_specialization, Env as UEnv, Mode, MustImplementConstraints, - Obligated, SpecializationLsetCollector, Unified::*, + unify, unify_introduced_ability_specialization, Env as UEnv, Mode, Obligated, + SpecializationLsetCollector, Unified::*, }; // Type checking system adapted from Elm by Evan Czaplicki, BSD-3-Clause Licensed @@ -499,98 +501,6 @@ impl Pools { } } -/// What phase in the compiler is reaching out to solve types. -/// This is important to distinguish subtle differences in the behavior of the solving algorithm. -// -// TODO the APIs of this trait suck, this needs a nice cleanup. -pub trait Phase { - /// The regular type-solving phase, or during some later phase of compilation. - /// During the solving phase we must anticipate that some information is still unknown and react to - /// that; during late phases, we expect that all information is resolved. - const IS_LATE: bool; - - fn with_module_abilities_store(&self, module: ModuleId, f: F) -> T - where - F: FnMut(&AbilitiesStore) -> T; - - /// Given a known lambda set's ambient function in an external module, copy that ambient - /// function into the given subs. - fn copy_lambda_set_ambient_function_to_home_subs( - &self, - external_lambda_set_var: Variable, - external_module_id: ModuleId, - home_subs: &mut Subs, - ) -> Variable; - - /// Find the ambient function var at a given region for an ability member definition (not a - /// specialization!), and copy that into the given subs. - fn get_and_copy_ability_member_ambient_function( - &self, - ability_member: Symbol, - region: u8, - home_subs: &mut Subs, - ) -> Variable; -} - -struct SolvePhase<'a> { - abilities_store: &'a AbilitiesStore, -} -impl Phase for SolvePhase<'_> { - const IS_LATE: bool = false; - - fn with_module_abilities_store(&self, _module: ModuleId, mut f: F) -> T - where - F: FnMut(&AbilitiesStore) -> T, - { - // During solving we're only aware of our module's abilities store. - f(self.abilities_store) - } - - fn copy_lambda_set_ambient_function_to_home_subs( - &self, - external_lambda_set_var: Variable, - _external_module_id: ModuleId, - home_subs: &mut Subs, - ) -> Variable { - // During solving we're only aware of our module's abilities store, the var must - // be in our module store. Even if the specialization lambda set comes from another - // module, we should have taken care to import it before starting solving in this module. - let LambdaSet { - ambient_function, .. - } = home_subs.get_lambda_set(external_lambda_set_var); - ambient_function - } - - fn get_and_copy_ability_member_ambient_function( - &self, - ability_member: Symbol, - region: u8, - home_subs: &mut Subs, - ) -> Variable { - // During solving we're only aware of our module's abilities store, the var must - // be in our module store. Even if the specialization lambda set comes from another - // module, we should have taken care to import it before starting solving in this module. - let member_def = self - .abilities_store - .member_def(ability_member) - .unwrap_or_else(|| { - internal_error!( - "{:?} is not resolved, or not an ability member!", - ability_member - ) - }); - let member_var = member_def.signature_var(); - - let region_lset = get_member_lambda_sets_at_region(home_subs, member_var, region); - - let LambdaSet { - ambient_function, .. - } = home_subs.get_lambda_set(region_lset); - - ambient_function - } -} - #[derive(Clone)] struct State { env: Env, @@ -650,6 +560,7 @@ fn run_in_place( let arena = Bump::new(); let mut obligation_cache = ObligationCache::default(); + let mut awaiting_specializations = AwaitingSpecializations::default(); let pending_derives = PendingDerivesTable::new(subs, aliases, pending_derives); let CheckedDerives { @@ -658,9 +569,10 @@ fn run_in_place( } = obligation_cache.check_derives(subs, abilities_store, pending_derives); problems.extend(derives_problems); - // Because we don't know what ability specializations are available until the entire module is - // solved, we must wait to solve unspecialized lambda sets then. - let mut deferred_uls_to_resolve = UlsOfVar::default(); + let derived_env = DerivedEnv { + derived_module: &derived_module, + exposed_types: exposed_by_module, + }; let state = solve( &arena, @@ -674,28 +586,10 @@ fn run_in_place( constraint, abilities_store, &mut obligation_cache, - &mut deferred_uls_to_resolve, + &mut awaiting_specializations, + &derived_env, ); - // Now that the module has been solved, we can run through and check all - // types claimed to implement abilities. This will also tell us what derives - // are legal, which we need to register. - let new_must_implement = compact_lambda_sets_of_vars( - subs, - &derived_module, - &arena, - &mut pools, - deferred_uls_to_resolve, - &SolvePhase { abilities_store }, - exposed_by_module, - ); - problems.extend(obligation_cache.check_obligations( - subs, - abilities_store, - new_must_implement, - AbilityImplError::DoesNotImplement, - )); - state.env } @@ -748,7 +642,8 @@ fn solve( constraint: &Constraint, abilities_store: &mut AbilitiesStore, obligation_cache: &mut ObligationCache, - deferred_uls_to_resolve: &mut UlsOfVar, + awaiting_specializations: &mut AwaitingSpecializations, + derived_env: &DerivedEnv, ) -> State { let initial = Work::Constraint { env: &Env::default(), @@ -804,11 +699,13 @@ fn solve( check_ability_specialization( arena, subs, + derived_env, pools, rank, abilities_store, + obligation_cache, + awaiting_specializations, problems, - deferred_uls_to_resolve, *symbol, *loc_var, ); @@ -911,11 +808,13 @@ fn solve( check_ability_specialization( arena, subs, + derived_env, pools, rank, abilities_store, + obligation_cache, + awaiting_specializations, problems, - deferred_uls_to_resolve, *symbol, *loc_var, ); @@ -981,7 +880,17 @@ fn solve( ); problems.extend(new_problems); } - deferred_uls_to_resolve.union(lambda_sets_to_specialize); + compact_lambdas_and_check_obligations( + arena, + pools, + problems, + subs, + abilities_store, + obligation_cache, + awaiting_specializations, + derived_env, + lambda_sets_to_specialize, + ); state } @@ -1019,37 +928,10 @@ fn solve( aliases, *source_index, ); - let target = *target; - match unify(&mut UEnv::new(subs), actual, target, Mode::EQ) { - Success { - vars, - // ERROR NOT REPORTED - must_implement_ability: _, - lambda_sets_to_specialize, - extra_metadata: _, - } => { - introduce(subs, rank, pools, &vars); - - deferred_uls_to_resolve.union(lambda_sets_to_specialize); - - state - } - Failure(vars, _actual_type, _expected_type, _bad_impls) => { - introduce(subs, rank, pools, &vars); - - // ERROR NOT REPORTED - - state - } - BadType(vars, _) => { - introduce(subs, rank, pools, &vars); - - // ERROR NOT REPORTED - - state - } - } + let actual_desc = subs.get(actual); + subs.union(*target, actual, actual_desc); + state } Lookup(symbol, expectation_index, region) => { match env.get_var_by_symbol(symbol) { @@ -1103,7 +985,17 @@ fn solve( ); problems.extend(new_problems); } - deferred_uls_to_resolve.union(lambda_sets_to_specialize); + compact_lambdas_and_check_obligations( + arena, + pools, + problems, + subs, + abilities_store, + obligation_cache, + awaiting_specializations, + derived_env, + lambda_sets_to_specialize, + ); state } @@ -1183,7 +1075,17 @@ fn solve( ); problems.extend(new_problems); } - deferred_uls_to_resolve.union(lambda_sets_to_specialize); + compact_lambdas_and_check_obligations( + arena, + pools, + problems, + subs, + abilities_store, + obligation_cache, + awaiting_specializations, + derived_env, + lambda_sets_to_specialize, + ); state } @@ -1356,7 +1258,17 @@ fn solve( ); problems.extend(new_problems); } - deferred_uls_to_resolve.union(lambda_sets_to_specialize); + compact_lambdas_and_check_obligations( + arena, + pools, + problems, + subs, + abilities_store, + obligation_cache, + awaiting_specializations, + derived_env, + lambda_sets_to_specialize, + ); state } @@ -1462,7 +1374,17 @@ fn solve( must_implement_ability, AbilityImplError::DoesNotImplement, )); - deferred_uls_to_resolve.union(lambda_sets_to_specialize); + compact_lambdas_and_check_obligations( + arena, + pools, + problems, + subs, + abilities_store, + obligation_cache, + awaiting_specializations, + derived_env, + lambda_sets_to_specialize, + ); // Case 1: unify error types, but don't check exhaustiveness. // Case 2: run exhaustiveness to check for redundant branches. @@ -1565,13 +1487,8 @@ fn solve( state } - // TODO: turning off opportunistic specialization for now because it doesn't mesh well with - // the polymorphic lambda resolution algorithm. After - // https://github.com/rtfeldman/roc/issues/3207 is resolved, this may be redundant - // anyway. &Resolve(OpportunisticResolve { specialization_variable, - specialization_expectation, member, specialization_id, }) => { @@ -1584,18 +1501,6 @@ fn solve( ) { abilities_store.insert_resolved(specialization_id, specialization); - - // We must now refine the current type state to account for this specialization. - let lookup_constr = arena.alloc(Constraint::Lookup( - specialization, - specialization_expectation, - Region::zero(), - )); - stack.push(Work::Constraint { - env, - rank, - constraint: lookup_constr, - }); } state @@ -1647,6 +1552,38 @@ fn solve( state } +#[allow(clippy::too_many_arguments)] +fn compact_lambdas_and_check_obligations( + arena: &Bump, + pools: &mut Pools, + problems: &mut Vec, + subs: &mut Subs, + abilities_store: &mut AbilitiesStore, + obligation_cache: &mut ObligationCache, + awaiting_specialization: &mut AwaitingSpecializations, + derived_env: &DerivedEnv, + lambda_sets_to_specialize: UlsOfVar, +) { + let CompactionResult { + obligations, + awaiting_specialization: new_awaiting, + } = compact_lambda_sets_of_vars( + subs, + derived_env, + arena, + pools, + lambda_sets_to_specialize, + &SolvePhase { abilities_store }, + ); + problems.extend(obligation_cache.check_obligations( + subs, + abilities_store, + obligations, + AbilityImplError::DoesNotImplement, + )); + awaiting_specialization.union(new_awaiting); +} + fn open_tag_union(subs: &mut Subs, var: Variable) { let mut stack = vec![var]; while let Some(var) = stack.pop() { @@ -1693,11 +1630,13 @@ fn open_tag_union(subs: &mut Subs, var: Variable) { fn check_ability_specialization( arena: &Bump, subs: &mut Subs, + derived_env: &DerivedEnv, pools: &mut Pools, rank: Rank, abilities_store: &mut AbilitiesStore, + obligation_cache: &mut ObligationCache, + awaiting_specializations: &mut AwaitingSpecializations, problems: &mut Vec, - deferred_uls_to_resolve: &mut UlsOfVar, symbol: Symbol, symbol_loc_var: Loc, ) { @@ -1730,7 +1669,7 @@ fn check_ability_specialization( Success { vars, must_implement_ability, - lambda_sets_to_specialize: other_lambda_sets_to_specialize, + lambda_sets_to_specialize, extra_metadata: SpecializationLsetCollector(specialization_lambda_sets), } => { let specialization_type = @@ -1754,7 +1693,17 @@ fn check_ability_specialization( }) .collect(); - deferred_uls_to_resolve.union(other_lambda_sets_to_specialize); + compact_lambdas_and_check_obligations( + arena, + pools, + problems, + subs, + abilities_store, + obligation_cache, + awaiting_specializations, + derived_env, + lambda_sets_to_specialize, + ); let specialization = MemberSpecializationInfo::new(symbol, specialization_lambda_sets); @@ -1865,534 +1814,27 @@ fn check_ability_specialization( abilities_store .mark_implementation(impl_key, resolved_mark) .expect("marked as a custom implementation, but not recorded as such"); - } -} -#[cfg(debug_assertions)] -fn trace_compaction_step_1(subs: &Subs, c_a: Variable, uls_a: &[Variable]) { - let c_a = roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(c_a), subs); - let uls_a = uls_a - .iter() - .map(|v| { - format!( - "{:?}", - roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(*v), subs) - ) - }) - .collect::>() - .join(","); - eprintln!("===lambda set compaction==="); - eprintln!(" concrete type: {:?}", c_a); - eprintln!(" step 1:"); - eprintln!(" uls_a = {{ {} }}", uls_a); -} - -#[cfg(debug_assertions)] -fn trace_compaction_step_2(subs: &Subs, uls_a: &[Variable]) { - let uls_a = uls_a - .iter() - .map(|v| { - format!( - "{:?}", - roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(*v), subs) - ) - }) - .collect::>() - .join(","); - eprintln!(" step 2:"); - eprintln!(" uls_a' = {{ {} }}", uls_a); -} - -#[cfg(debug_assertions)] -fn trace_compaction_step_3start() { - eprintln!(" step 3:"); -} - -#[cfg(debug_assertions)] -fn trace_compaction_step_3iter_start( - subs: &Subs, - iteration_lambda_set: Variable, - t_f1: Variable, - t_f2: Variable, -) { - let iteration_lambda_set = roc_types::subs::SubsFmtContent( - subs.get_content_without_compacting(iteration_lambda_set), - subs, - ); - let t_f1 = roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(t_f1), subs); - let t_f2 = roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(t_f2), subs); - eprintln!(" - iteration: {:?}", iteration_lambda_set); - eprintln!(" {:?}", t_f1); - eprintln!(" ~ {:?}", t_f2); -} - -#[cfg(debug_assertions)] -#[rustfmt::skip] -fn trace_compaction_step_3iter_end(subs: &Subs, t_f_result: Variable, skipped: bool) { - let t_f_result = - roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(t_f_result), subs); - if skipped { - eprintln!(" SKIP"); - } - eprintln!(" = {:?}\n", t_f_result); -} - -macro_rules! trace_compact { - (1. $subs:expr, $c_a:expr, $uls_a:expr) => {{ - dbg_do!(ROC_TRACE_COMPACTION, { - trace_compaction_step_1($subs, $c_a, $uls_a) - }) - }}; - (2. $subs:expr, $uls_a:expr) => {{ - dbg_do!(ROC_TRACE_COMPACTION, { - trace_compaction_step_2($subs, $uls_a) - }) - }}; - (3start.) => {{ - dbg_do!(ROC_TRACE_COMPACTION, { trace_compaction_step_3start() }) - }}; - (3iter_start. $subs:expr, $iteration_lset:expr, $t_f1:expr, $t_f2:expr) => {{ - dbg_do!(ROC_TRACE_COMPACTION, { - trace_compaction_step_3iter_start($subs, $iteration_lset, $t_f1, $t_f2) - }) - }}; - (3iter_end. $subs:expr, $t_f_result:expr) => {{ - dbg_do!(ROC_TRACE_COMPACTION, { - trace_compaction_step_3iter_end($subs, $t_f_result, false) - }) - }}; - (3iter_end_skipped. $subs:expr, $t_f_result:expr) => {{ - dbg_do!(ROC_TRACE_COMPACTION, { - trace_compaction_step_3iter_end($subs, $t_f_result, true) - }) - }}; -} - -#[inline(always)] -fn iter_concrete_of_unspecialized<'a>( - subs: &'a Subs, - c_a: Variable, - uls: &'a [Uls], -) -> impl Iterator { - uls.iter() - .filter(move |Uls(var, _, _)| subs.equivalent_without_compacting(*var, c_a)) -} - -/// Gets the unique unspecialized lambda resolving to concrete type `c_a` in a list of -/// unspecialized lambda sets. -#[inline(always)] -fn unique_unspecialized_lambda(subs: &Subs, c_a: Variable, uls: &[Uls]) -> Option { - let mut iter_concrete = iter_concrete_of_unspecialized(subs, c_a, uls); - let uls = iter_concrete.next()?; - debug_assert!(iter_concrete.next().is_none(), "multiple concrete"); - Some(*uls) -} - -#[must_use] -pub fn compact_lambda_sets_of_vars( - subs: &mut Subs, - derived_module: &SharedDerivedModule, - arena: &Bump, - pools: &mut Pools, - uls_of_var: UlsOfVar, - phase: &P, - exposed_by_module: &ExposedByModule, -) -> MustImplementConstraints { - // let mut seen = VecSet::default(); - let mut must_implement = MustImplementConstraints::default(); - - let mut uls_of_var_queue = VecDeque::with_capacity(uls_of_var.len()); - uls_of_var_queue.extend(uls_of_var.drain()); - - // Suppose a type variable `a` with `uls_of_var` mapping `uls_a = {l1, ... ln}` has been instantiated to a concrete type `C_a`. - while let Some((c_a, uls_a)) = uls_of_var_queue.pop_front() { - let c_a = subs.get_root_key_without_compacting(c_a); - // 1. Let each `l` in `uls_a` be of form `[solved_lambdas + ... + C:f:r + ...]`. - // NB: There may be multiple unspecialized lambdas of form `C:f:r, C:f1:r1, ..., C:fn:rn` in `l`. - // In this case, let `t1, ... tm` be the other unspecialized lambdas not of form `C:_:_`, - // that is, none of which are now specialized to the type `C`. Then, deconstruct - // `l` such that `l' = [solved_lambdas + t1 + ... + tm + C:f:r]` and `l1 = [[] + C:f1:r1], ..., ln = [[] + C:fn:rn]`. - // Replace `l` with `l', l1, ..., ln` in `uls_a`, flattened. - // TODO: the flattening step described above - let uls_a = uls_a.into_vec(); - trace_compact!(1. subs, c_a, &uls_a); - - // The flattening step - remove lambda sets that don't reference the concrete var, and for - // flatten lambda sets that reference it more than once. - let mut uls_a: Vec<_> = uls_a - .into_iter() - .flat_map(|lambda_set| { - let LambdaSet { - solved, - recursion_var, - unspecialized, - ambient_function, - } = subs.get_lambda_set(lambda_set); - let lambda_set_rank = subs.get_rank(lambda_set); - let unspecialized = subs.get_subs_slice(unspecialized); - // TODO: is it faster to traverse once, see if we only have one concrete lambda, and - // bail in that happy-path, rather than always splitting? - let (concrete, mut not_concrete): (Vec<_>, Vec<_>) = unspecialized - .iter() - .copied() - .partition(|Uls(var, _, _)| subs.equivalent_without_compacting(*var, c_a)); - if concrete.len() == 1 { - // No flattening needs to be done, just return the lambda set as-is - return vec![lambda_set]; - } - // Must flatten - concrete - .into_iter() - .enumerate() - .map(|(i, concrete_lambda)| { - let (var, unspecialized) = if i == 0 { - // The first lambda set contains one concrete lambda, plus all solved - // lambdas, plus all other unspecialized lambdas. - // l' = [solved_lambdas + t1 + ... + tm + C:f:r] - let unspecialized = SubsSlice::extend_new( - &mut subs.unspecialized_lambda_sets, - not_concrete - .drain(..) - .chain(std::iter::once(concrete_lambda)), - ); - (lambda_set, unspecialized) - } else { - // All the other lambda sets consists only of their respective concrete - // lambdas. - // ln = [[] + C:fn:rn] - let unspecialized = SubsSlice::extend_new( - &mut subs.unspecialized_lambda_sets, - [concrete_lambda], - ); - let var = subs.fresh(Descriptor { - content: Content::Error, - rank: lambda_set_rank, - mark: Mark::NONE, - copy: OptVariable::NONE, - }); - (var, unspecialized) - }; - - subs.set_content( - var, - Content::LambdaSet(LambdaSet { - solved, - recursion_var, - unspecialized, - ambient_function, - }), - ); - var - }) - .collect() - }) - .collect(); - - // 2. Now, each `l` in `uls_a` has a unique unspecialized lambda of form `C:f:r`. - // Sort `uls_a` primarily by `f` (arbitrary order), and secondarily by `r` in descending order. - uls_a.sort_by(|v1, v2| { - let unspec_1 = subs.get_subs_slice(subs.get_lambda_set(*v1).unspecialized); - let unspec_2 = subs.get_subs_slice(subs.get_lambda_set(*v2).unspecialized); - - let Uls(_, f1, r1) = unique_unspecialized_lambda(subs, c_a, unspec_1).unwrap(); - let Uls(_, f2, r2) = unique_unspecialized_lambda(subs, c_a, unspec_2).unwrap(); - - match f1.cmp(&f2) { - std::cmp::Ordering::Equal => { - // Order by descending order of region. - r2.cmp(&r1) - } - ord => ord, - } - }); - - trace_compact!(2. subs, &uls_a); - - // 3. For each `l` in `uls_a` with unique unspecialized lambda `C:f:r`: - // 1. Let `t_f1` be the directly ambient function of the lambda set containing `C:f:r`. Remove `C:f:r` from `t_f1`'s lambda set. - // - For example, `(b' -[[] + Fo:f:2]-> {})` if `C:f:r=Fo:f:2`. Removing `Fo:f:2`, we get `(b' -[[]]-> {})`. - // 2. Let `t_f2` be the directly ambient function of the specialization lambda set resolved by `C:f:r`. - // - For example, `(b -[[] + b:g:1]-> {})` if `C:f:r=Fo:f:2`, running on example from above. - // 3. Unify `t_f1 ~ t_f2`. - trace_compact!(3start.); - for l in uls_a { - // let root_lset = subs.get_root_key_without_compacting(l); - // if seen.contains(&root_lset) { - // continue; - // } - - let (new_must_implement, new_uls_of_var) = compact_lambda_set( - subs, - derived_module, - arena, - pools, - c_a, - l, - phase, - exposed_by_module, - ); - - must_implement.extend(new_must_implement); - uls_of_var_queue.extend(new_uls_of_var.drain()); - - // seen.insert(root_lset); - } - } - - must_implement -} - -#[must_use] -#[allow(clippy::too_many_arguments)] -fn compact_lambda_set( - subs: &mut Subs, - derived_module: &SharedDerivedModule, - arena: &Bump, - pools: &mut Pools, - resolved_concrete: Variable, - this_lambda_set: Variable, - phase: &P, - exposed_by_module: &ExposedByModule, -) -> (MustImplementConstraints, UlsOfVar) { - // 3. For each `l` in `uls_a` with unique unspecialized lambda `C:f:r`: - // 1. Let `t_f1` be the directly ambient function of the lambda set containing `C:f:r`. Remove `C:f:r` from `t_f1`'s lambda set. - // - For example, `(b' -[[] + Fo:f:2]-> {})` if `C:f:r=Fo:f:2`. Removing `Fo:f:2`, we get `(b' -[[]]-> {})`. - // 2. Let `t_f2` be the directly ambient function of the specialization lambda set resolved by `C:f:r`. - // - For example, `(b -[[] + b:g:1]-> {})` if `C:f:r=Fo:f:2`, from the algorithm's running example. - // 3. Unify `t_f1 ~ t_f2`. - let LambdaSet { - solved, - recursion_var, - unspecialized, - ambient_function: t_f1, - } = subs.get_lambda_set(this_lambda_set); - let target_rank = subs.get_rank(this_lambda_set); - - debug_assert!(!unspecialized.is_empty()); - - let unspecialized = subs.get_subs_slice(unspecialized); - - // 1. Let `t_f1` be the directly ambient function of the lambda set containing `C:f:r`. - let Uls(c, f, r) = unique_unspecialized_lambda(subs, resolved_concrete, unspecialized).unwrap(); - - debug_assert!(subs.equivalent_without_compacting(c, resolved_concrete)); - - // 1b. Remove `C:f:r` from `t_f1`'s lambda set. - let new_unspecialized: Vec<_> = unspecialized - .iter() - .filter(|Uls(v, _, _)| !subs.equivalent_without_compacting(*v, resolved_concrete)) - .copied() - .collect(); - debug_assert_eq!(new_unspecialized.len(), unspecialized.len() - 1); - let t_f1_lambda_set_without_concrete = LambdaSet { - solved, - recursion_var, - unspecialized: SubsSlice::extend_new( - &mut subs.unspecialized_lambda_sets, - new_unspecialized, - ), - ambient_function: t_f1, - }; - subs.set_content( - this_lambda_set, - Content::LambdaSet(t_f1_lambda_set_without_concrete), - ); - - let specialization_decision = make_specialization_decision(subs, c); - - let specialization_key = match specialization_decision { - SpecializeDecision::Specialize(key) => key, - SpecializeDecision::Drop => { - // Do nothing other than to remove the concrete lambda to drop from the lambda set, - // which we already did in 1b above. - trace_compact!(3iter_end_skipped. subs, t_f1); - return (Default::default(), Default::default()); - } - }; - - let specialization_ambient_function_var = get_specialization_lambda_set_ambient_function( - subs, - derived_module, - phase, - f, - r, - specialization_key, - exposed_by_module, - target_rank, - ); - - let t_f2 = match specialization_ambient_function_var { - Ok(lset) => lset, - Err(()) => { - // Do nothing other than to remove the concrete lambda to drop from the lambda set, - // which we already did in 1b above. - trace_compact!(3iter_end_skipped. subs, t_f1); - return (Default::default(), Default::default()); - } - }; - - // Ensure the specialized ambient function we'll unify with is not a generalized one, but one - // at the rank of the lambda set being compacted. - let t_f2 = deep_copy_var_in(subs, target_rank, pools, t_f2, arena); - - // 3. Unify `t_f1 ~ t_f2`. - trace_compact!(3iter_start. subs, this_lambda_set, t_f1, t_f2); - let (vars, new_must_implement_ability, new_lambda_sets_to_specialize, _meta) = - unify(&mut UEnv::new(subs), t_f1, t_f2, Mode::EQ) - .expect_success("ambient functions don't unify"); - trace_compact!(3iter_end. subs, t_f1); - - introduce(subs, target_rank, pools, &vars); - - (new_must_implement_ability, new_lambda_sets_to_specialize) -} - -enum SpecializationTypeKey { - Opaque(Symbol), - Derived(DeriveKey), - Immediate(Symbol), -} - -enum SpecializeDecision { - Specialize(SpecializationTypeKey), - Drop, -} - -fn make_specialization_decision(subs: &Subs, var: Variable) -> SpecializeDecision { - use Content::*; - use SpecializationTypeKey::*; - match subs.get_content_without_compacting(var) { - Alias(opaque, _, _, AliasKind::Opaque) if opaque.module_id() != ModuleId::NUM => { - SpecializeDecision::Specialize(Opaque(*opaque)) - } - Structure(_) | Alias(_, _, _, _) => { - // This is a structural type, find the name of the derived ability function it - // should use. - match roc_derive_key::Derived::encoding(subs, var) { - Ok(derived) => match derived { - roc_derive_key::Derived::Immediate(imm) => { - SpecializeDecision::Specialize(Immediate(imm)) - // todo!("deal with lambda set extraction from immediates") - } - roc_derive_key::Derived::Key(derive_key) => { - SpecializeDecision::Specialize(Derived(derive_key)) - } - }, - Err(DeriveError::UnboundVar) => { - // not specialized yet, but that also means that it can't possibly be derivable - // at this point? - // TODO: is this right? Revisit if it causes us problems in the future. - SpecializeDecision::Drop - } - Err(DeriveError::Underivable) => { - // we should have reported an error for this; drop the lambda set. - SpecializeDecision::Drop - } - } - } - Error => SpecializeDecision::Drop, - FlexAbleVar(_, _) - | RigidAbleVar(..) - | FlexVar(..) - | RigidVar(..) - | RecursionVar { .. } - | LambdaSet(..) - | RangedNumber(..) => { - internal_error!("unexpected") - } - } -} - -#[allow(clippy::too_many_arguments)] -fn get_specialization_lambda_set_ambient_function( - subs: &mut Subs, - derived_module: &SharedDerivedModule, - phase: &P, - ability_member: Symbol, - lset_region: u8, - specialization_key: SpecializationTypeKey, - exposed_by_module: &ExposedByModule, - target_rank: Rank, -) -> Result { - match specialization_key { - SpecializationTypeKey::Opaque(opaque) => { - let opaque_home = opaque.module_id(); - let external_specialized_lset = - phase.with_module_abilities_store(opaque_home, |abilities_store| { - let impl_key = roc_can::abilities::ImplKey { - opaque, - ability_member, - }; - let opt_specialization = - abilities_store.get_implementation(impl_key); - match (P::IS_LATE, opt_specialization) { - (false, None) => { - // doesn't specialize, we'll have reported an error for this - Err(()) - } - (true, None) => { - internal_error!( - "expected to know a specialization for {:?}#{:?}, but it wasn't found", - opaque, - ability_member, - ); - } - (_, Some(member_impl)) => match member_impl { - MemberImpl::Impl(spec_symbol) => { - let specialization = - abilities_store.specialization_info(*spec_symbol).expect("expected custom implementations to always have complete specialization info by this point"); - - let specialized_lambda_set = *specialization - .specialization_lambda_sets - .get(&lset_region) - .expect("lambda set region not resolved"); - Ok(specialized_lambda_set) - } - MemberImpl::Derived => todo_abilities!(), - MemberImpl::Error => todo_abilities!(), - }, - } - })?; - - let specialized_ambient = phase.copy_lambda_set_ambient_function_to_home_subs( - external_specialized_lset, - opaque_home, - subs, - ); - - Ok(specialized_ambient) - } - - SpecializationTypeKey::Derived(derive_key) => { - let mut derived_module = derived_module.lock().unwrap(); - - let (_, _, specialization_lambda_sets) = - derived_module.get_or_insert(exposed_by_module, derive_key); - - let specialized_lambda_set = *specialization_lambda_sets - .get(&lset_region) - .expect("lambda set region not resolved"); - - let specialized_ambient = derived_module.copy_lambda_set_ambient_function_to_subs( - specialized_lambda_set, - subs, - target_rank, - ); - - Ok(specialized_ambient) - } - - SpecializationTypeKey::Immediate(imm) => { - // Immediates are like opaques in that we can simply look up their type definition in - // the ability store, there is nothing new to synthesize. - // - // THEORY: if something can become an immediate, it will always be available in the - // local ability store, because the transformation is local (?) - let immediate_lambda_set_at_region = - phase.get_and_copy_ability_member_ambient_function(imm, lset_region, subs); - - Ok(immediate_lambda_set_at_region) - } + // Get the lambda sets that are ready for specialization because this ability member + // specialization was resolved, and compact them. + let new_lambda_sets_to_specialize = + awaiting_specializations.remove_for_specialized(subs, impl_key); + compact_lambdas_and_check_obligations( + arena, + pools, + problems, + subs, + abilities_store, + obligation_cache, + awaiting_specializations, + derived_env, + new_lambda_sets_to_specialize, + ); + debug_assert!( + !awaiting_specializations.waiting_for(impl_key), + "still have lambda sets waiting for {:?}, but it was just resolved", + impl_key + ); } } @@ -2452,7 +1894,6 @@ impl LocalDefVarsVec<(Symbol, Loc)> { } use std::cell::RefCell; -use std::collections::VecDeque; use std::ops::ControlFlow; std::thread_local! { /// Scratchpad arena so we don't need to allocate a new one all the time @@ -3658,9 +3099,17 @@ fn adjust_rank_content( EmptyRecord => { // from elm-compiler: THEORY: an empty record never needs to get generalized - Rank::toplevel() + // + // But for us, that theory does not hold, because there might be type variables hidden + // inside a lambda set but not on the left or right of an arrow, and records should not + // force de-generalization in such cases. + // + // See https://github.com/rtfeldman/roc/issues/3641 for a longer discussion and + // example. + group_rank } + // THEORY: an empty tag never needs to get generalized EmptyTagUnion => Rank::toplevel(), Record(fields, ext_var) => { @@ -3849,7 +3298,7 @@ fn adjust_rank_content( /// Introduce some variables to Pools at the given rank. /// Also, set each of their ranks in Subs to be the given rank. -fn introduce(subs: &mut Subs, rank: Rank, pools: &mut Pools, vars: &[Variable]) { +pub(crate) fn introduce(subs: &mut Subs, rank: Rank, pools: &mut Pools, vars: &[Variable]) { let pool: &mut Vec = pools.get_mut(rank); for &var in vars.iter() { @@ -3859,7 +3308,7 @@ fn introduce(subs: &mut Subs, rank: Rank, pools: &mut Pools, vars: &[Variable]) pool.extend(vars); } -fn deep_copy_var_in( +pub(crate) fn deep_copy_var_in( subs: &mut Subs, rank: Rank, pools: &mut Pools, diff --git a/crates/compiler/solve/src/specialize.rs b/crates/compiler/solve/src/specialize.rs new file mode 100644 index 0000000000..cfb7dc2b8c --- /dev/null +++ b/crates/compiler/solve/src/specialize.rs @@ -0,0 +1,789 @@ +//! Module [specialize] is resolves specialization lambda sets. + +use std::collections::VecDeque; + +use bumpalo::Bump; +use roc_can::{ + abilities::{AbilitiesStore, ImplKey}, + module::ExposedByModule, +}; +use roc_collections::{VecMap, VecSet}; +use roc_debug_flags::dbg_do; +#[cfg(debug_assertions)] +use roc_debug_flags::ROC_TRACE_COMPACTION; +use roc_derive::SharedDerivedModule; +use roc_derive_key::{DeriveError, DeriveKey}; +use roc_error_macros::{internal_error, todo_abilities}; +use roc_module::symbol::{ModuleId, Symbol}; +use roc_types::{ + subs::{ + get_member_lambda_sets_at_region, Content, Descriptor, GetSubsSlice, LambdaSet, Mark, + OptVariable, Rank, Subs, SubsSlice, UlsOfVar, Variable, + }, + types::{AliasKind, MemberImpl, Uls}, +}; +use roc_unify::unify::{unify, Env as UEnv, Mode, MustImplementConstraints}; + +use crate::solve::{deep_copy_var_in, introduce, Pools}; + +/// What phase in the compiler is reaching out to specialize lambda sets? +/// This is important to distinguish subtle differences in the behavior of the solving algorithm. +// +// TODO the APIs of this trait suck, this needs a nice cleanup. +pub trait Phase { + /// The regular type-solving phase, or during some later phase of compilation. + /// During the solving phase we must anticipate that some information is still unknown and react to + /// that; during late phases, we expect that all information is resolved. + const IS_LATE: bool; + + fn with_module_abilities_store(&self, module: ModuleId, f: F) -> T + where + F: FnMut(&AbilitiesStore) -> T; + + /// Given a known lambda set's ambient function in an external module, copy that ambient + /// function into the given subs. + fn copy_lambda_set_ambient_function_to_home_subs( + &self, + external_lambda_set_var: Variable, + external_module_id: ModuleId, + home_subs: &mut Subs, + ) -> Variable; + + /// Find the ambient function var at a given region for an ability member definition (not a + /// specialization!), and copy that into the given subs. + fn get_and_copy_ability_member_ambient_function( + &self, + ability_member: Symbol, + region: u8, + home_subs: &mut Subs, + ) -> Variable; +} + +pub(crate) struct SolvePhase<'a> { + pub abilities_store: &'a AbilitiesStore, +} +impl Phase for SolvePhase<'_> { + const IS_LATE: bool = false; + + fn with_module_abilities_store(&self, _module: ModuleId, mut f: F) -> T + where + F: FnMut(&AbilitiesStore) -> T, + { + // During solving we're only aware of our module's abilities store. + f(self.abilities_store) + } + + fn copy_lambda_set_ambient_function_to_home_subs( + &self, + external_lambda_set_var: Variable, + _external_module_id: ModuleId, + home_subs: &mut Subs, + ) -> Variable { + // During solving we're only aware of our module's abilities store, the var must + // be in our module store. Even if the specialization lambda set comes from another + // module, we should have taken care to import it before starting solving in this module. + let LambdaSet { + ambient_function, .. + } = home_subs.get_lambda_set(external_lambda_set_var); + ambient_function + } + + fn get_and_copy_ability_member_ambient_function( + &self, + ability_member: Symbol, + region: u8, + home_subs: &mut Subs, + ) -> Variable { + // During solving we're only aware of our module's abilities store, the var must + // be in our module store. Even if the specialization lambda set comes from another + // module, we should have taken care to import it before starting solving in this module. + let member_def = self + .abilities_store + .member_def(ability_member) + .unwrap_or_else(|| { + internal_error!( + "{:?} is not resolved, or not an ability member!", + ability_member + ) + }); + let member_var = member_def.signature_var(); + + let region_lset = get_member_lambda_sets_at_region(home_subs, member_var, region); + + let LambdaSet { + ambient_function, .. + } = home_subs.get_lambda_set(region_lset); + + ambient_function + } +} + +pub struct DerivedEnv<'a> { + pub derived_module: &'a SharedDerivedModule, + /// Exposed types needed by the derived module. + pub exposed_types: &'a ExposedByModule, +} + +#[derive(Default)] +pub struct AwaitingSpecializations { + // What variables' specialized lambda sets in `uls_of_var` will be unlocked for specialization + // when an implementation key's specialization is resolved? + waiting: VecMap>, + uls_of_var: UlsOfVar, +} + +impl AwaitingSpecializations { + pub fn remove_for_specialized(&mut self, subs: &Subs, impl_key: ImplKey) -> UlsOfVar { + let spec_variables = self + .waiting + .remove(&impl_key) + .map(|(_, set)| set) + .unwrap_or_default(); + + let mut result = UlsOfVar::default(); + for var in spec_variables { + let target_lambda_sets = self + .uls_of_var + .remove_dependent_unspecialized_lambda_sets(subs, var); + + result.extend(var, target_lambda_sets); + } + result + } + + pub fn add( + &mut self, + impl_key: ImplKey, + var: Variable, + lambda_sets: impl IntoIterator, + ) { + self.uls_of_var.extend(var, lambda_sets); + let waiting = self.waiting.get_or_insert(impl_key, Default::default); + waiting.insert(var); + } + + pub fn union(&mut self, other: Self) { + for (impl_key, waiting_vars) in other.waiting { + let waiting = self.waiting.get_or_insert(impl_key, Default::default); + waiting.extend(waiting_vars); + } + self.uls_of_var.union(other.uls_of_var); + } + + pub fn waiting_for(&self, impl_key: ImplKey) -> bool { + self.waiting.contains_key(&impl_key) + } +} + +pub struct CompactionResult { + pub obligations: MustImplementConstraints, + pub awaiting_specialization: AwaitingSpecializations, +} + +#[cfg(debug_assertions)] +fn trace_compaction_step_1(subs: &Subs, c_a: Variable, uls_a: &[Variable]) { + let c_a = roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(c_a), subs); + let uls_a = uls_a + .iter() + .map(|v| { + format!( + "{:?}", + roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(*v), subs) + ) + }) + .collect::>() + .join(","); + eprintln!("===lambda set compaction==="); + eprintln!(" concrete type: {:?}", c_a); + eprintln!(" step 1:"); + eprintln!(" uls_a = {{ {} }}", uls_a); +} + +#[cfg(debug_assertions)] +fn trace_compaction_step_2(subs: &Subs, uls_a: &[Variable]) { + let uls_a = uls_a + .iter() + .map(|v| { + format!( + "{:?}", + roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(*v), subs) + ) + }) + .collect::>() + .join(","); + eprintln!(" step 2:"); + eprintln!(" uls_a' = {{ {} }}", uls_a); +} + +#[cfg(debug_assertions)] +fn trace_compaction_step_3start() { + eprintln!(" step 3:"); +} + +#[cfg(debug_assertions)] +fn trace_compaction_step_3iter_start( + subs: &Subs, + iteration_lambda_set: Variable, + t_f1: Variable, + t_f2: Variable, +) { + let iteration_lambda_set = roc_types::subs::SubsFmtContent( + subs.get_content_without_compacting(iteration_lambda_set), + subs, + ); + let t_f1 = roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(t_f1), subs); + let t_f2 = roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(t_f2), subs); + eprintln!(" - iteration: {:?}", iteration_lambda_set); + eprintln!(" {:?}", t_f1); + eprintln!(" ~ {:?}", t_f2); +} + +#[cfg(debug_assertions)] +#[rustfmt::skip] +fn trace_compaction_step_3iter_end(subs: &Subs, t_f_result: Variable, skipped: bool) { + let t_f_result = + roc_types::subs::SubsFmtContent(subs.get_content_without_compacting(t_f_result), subs); + if skipped { + eprintln!(" SKIP"); + } + eprintln!(" = {:?}\n", t_f_result); +} + +macro_rules! trace_compact { + (1. $subs:expr, $c_a:expr, $uls_a:expr) => {{ + dbg_do!(ROC_TRACE_COMPACTION, { + trace_compaction_step_1($subs, $c_a, $uls_a) + }) + }}; + (2. $subs:expr, $uls_a:expr) => {{ + dbg_do!(ROC_TRACE_COMPACTION, { + trace_compaction_step_2($subs, $uls_a) + }) + }}; + (3start.) => {{ + dbg_do!(ROC_TRACE_COMPACTION, { trace_compaction_step_3start() }) + }}; + (3iter_start. $subs:expr, $iteration_lset:expr, $t_f1:expr, $t_f2:expr) => {{ + dbg_do!(ROC_TRACE_COMPACTION, { + trace_compaction_step_3iter_start($subs, $iteration_lset, $t_f1, $t_f2) + }) + }}; + (3iter_end. $subs:expr, $t_f_result:expr) => {{ + dbg_do!(ROC_TRACE_COMPACTION, { + trace_compaction_step_3iter_end($subs, $t_f_result, false) + }) + }}; + (3iter_end_skipped. $subs:expr, $t_f_result:expr) => {{ + dbg_do!(ROC_TRACE_COMPACTION, { + trace_compaction_step_3iter_end($subs, $t_f_result, true) + }) + }}; +} + +#[inline(always)] +fn iter_concrete_of_unspecialized<'a>( + subs: &'a Subs, + c_a: Variable, + uls: &'a [Uls], +) -> impl Iterator { + uls.iter() + .filter(move |Uls(var, _, _)| subs.equivalent_without_compacting(*var, c_a)) +} + +/// Gets the unique unspecialized lambda resolving to concrete type `c_a` in a list of +/// unspecialized lambda sets. +#[inline(always)] +fn unique_unspecialized_lambda(subs: &Subs, c_a: Variable, uls: &[Uls]) -> Option { + let mut iter_concrete = iter_concrete_of_unspecialized(subs, c_a, uls); + let uls = iter_concrete.next()?; + debug_assert!(iter_concrete.next().is_none(), "multiple concrete"); + Some(*uls) +} + +#[must_use] +pub fn compact_lambda_sets_of_vars( + subs: &mut Subs, + derived_env: &DerivedEnv, + arena: &Bump, + pools: &mut Pools, + uls_of_var: UlsOfVar, + phase: &P, +) -> CompactionResult { + let mut must_implement = MustImplementConstraints::default(); + let mut awaiting_specialization = AwaitingSpecializations::default(); + + let mut uls_of_var_queue = VecDeque::with_capacity(uls_of_var.len()); + uls_of_var_queue.extend(uls_of_var.drain()); + + // Suppose a type variable `a` with `uls_of_var` mapping `uls_a = {l1, ... ln}` has been instantiated to a concrete type `C_a`. + while let Some((c_a, uls_a)) = uls_of_var_queue.pop_front() { + let c_a = subs.get_root_key_without_compacting(c_a); + // 1. Let each `l` in `uls_a` be of form `[solved_lambdas + ... + C:f:r + ...]`. + // NB: There may be multiple unspecialized lambdas of form `C:f:r, C:f1:r1, ..., C:fn:rn` in `l`. + // In this case, let `t1, ... tm` be the other unspecialized lambdas not of form `C:_:_`, + // that is, none of which are now specialized to the type `C`. Then, deconstruct + // `l` such that `l' = [solved_lambdas + t1 + ... + tm + C:f:r]` and `l1 = [[] + C:f1:r1], ..., ln = [[] + C:fn:rn]`. + // Replace `l` with `l', l1, ..., ln` in `uls_a`, flattened. + // TODO: the flattening step described above + let uls_a = { + let mut uls = uls_a.into_vec(); + + // De-duplicate lambdas by root key. + uls.iter_mut().for_each(|v| *v = subs.get_root_key(*v)); + uls.sort(); + uls.dedup(); + uls + }; + + trace_compact!(1. subs, c_a, &uls_a); + + // The flattening step - remove lambda sets that don't reference the concrete var, and for + // flatten lambda sets that reference it more than once. + let mut uls_a: Vec<_> = uls_a + .into_iter() + .flat_map(|lambda_set| { + let LambdaSet { + solved, + recursion_var, + unspecialized, + ambient_function, + } = subs.get_lambda_set(lambda_set); + let lambda_set_rank = subs.get_rank(lambda_set); + let unspecialized = subs.get_subs_slice(unspecialized); + // TODO: is it faster to traverse once, see if we only have one concrete lambda, and + // bail in that happy-path, rather than always splitting? + let (concrete, mut not_concrete): (Vec<_>, Vec<_>) = unspecialized + .iter() + .copied() + .partition(|Uls(var, _, _)| subs.equivalent_without_compacting(*var, c_a)); + if concrete.len() == 1 { + // No flattening needs to be done, just return the lambda set as-is + return vec![lambda_set]; + } + // Must flatten + concrete + .into_iter() + .enumerate() + .map(|(i, concrete_lambda)| { + let (var, unspecialized) = if i == 0 { + // The first lambda set contains one concrete lambda, plus all solved + // lambdas, plus all other unspecialized lambdas. + // l' = [solved_lambdas + t1 + ... + tm + C:f:r] + let unspecialized = SubsSlice::extend_new( + &mut subs.unspecialized_lambda_sets, + not_concrete + .drain(..) + .chain(std::iter::once(concrete_lambda)), + ); + (lambda_set, unspecialized) + } else { + // All the other lambda sets consists only of their respective concrete + // lambdas. + // ln = [[] + C:fn:rn] + let unspecialized = SubsSlice::extend_new( + &mut subs.unspecialized_lambda_sets, + [concrete_lambda], + ); + let var = subs.fresh(Descriptor { + content: Content::Error, + rank: lambda_set_rank, + mark: Mark::NONE, + copy: OptVariable::NONE, + }); + (var, unspecialized) + }; + + subs.set_content( + var, + Content::LambdaSet(LambdaSet { + solved, + recursion_var, + unspecialized, + ambient_function, + }), + ); + var + }) + .collect() + }) + .collect(); + + // 2. Now, each `l` in `uls_a` has a unique unspecialized lambda of form `C:f:r`. + // Sort `uls_a` primarily by `f` (arbitrary order), and secondarily by `r` in descending order. + uls_a.sort_by(|v1, v2| { + let unspec_1 = subs.get_subs_slice(subs.get_lambda_set(*v1).unspecialized); + let unspec_2 = subs.get_subs_slice(subs.get_lambda_set(*v2).unspecialized); + + let Uls(_, f1, r1) = unique_unspecialized_lambda(subs, c_a, unspec_1).unwrap(); + let Uls(_, f2, r2) = unique_unspecialized_lambda(subs, c_a, unspec_2).unwrap(); + + match f1.cmp(&f2) { + std::cmp::Ordering::Equal => { + // Order by descending order of region. + r2.cmp(&r1) + } + ord => ord, + } + }); + + trace_compact!(2. subs, &uls_a); + + // 3. For each `l` in `uls_a` with unique unspecialized lambda `C:f:r`: + // 1. Let `t_f1` be the directly ambient function of the lambda set containing `C:f:r`. Remove `C:f:r` from `t_f1`'s lambda set. + // - For example, `(b' -[[] + Fo:f:2]-> {})` if `C:f:r=Fo:f:2`. Removing `Fo:f:2`, we get `(b' -[[]]-> {})`. + // 2. Let `t_f2` be the directly ambient function of the specialization lambda set resolved by `C:f:r`. + // - For example, `(b -[[] + b:g:1]-> {})` if `C:f:r=Fo:f:2`, running on example from above. + // 3. Unify `t_f1 ~ t_f2`. + trace_compact!(3start.); + for l in uls_a { + let compaction_result = + compact_lambda_set(subs, derived_env, arena, pools, c_a, l, phase); + + match compaction_result { + OneCompactionResult::Compacted { + new_obligations, + new_lambda_sets_to_specialize, + } => { + must_implement.extend(new_obligations); + uls_of_var_queue.extend(new_lambda_sets_to_specialize.drain()); + } + OneCompactionResult::MustWaitForSpecialization(impl_key) => { + awaiting_specialization.add(impl_key, c_a, [l]) + } + } + } + } + + CompactionResult { + obligations: must_implement, + awaiting_specialization, + } +} + +enum OneCompactionResult { + Compacted { + new_obligations: MustImplementConstraints, + new_lambda_sets_to_specialize: UlsOfVar, + }, + MustWaitForSpecialization(ImplKey), +} + +#[must_use] +#[allow(clippy::too_many_arguments)] +fn compact_lambda_set( + subs: &mut Subs, + derived_env: &DerivedEnv, + arena: &Bump, + pools: &mut Pools, + resolved_concrete: Variable, + this_lambda_set: Variable, + phase: &P, +) -> OneCompactionResult { + // 3. For each `l` in `uls_a` with unique unspecialized lambda `C:f:r`: + // 1. Let `t_f1` be the directly ambient function of the lambda set containing `C:f:r`. Remove `C:f:r` from `t_f1`'s lambda set. + // - For example, `(b' -[[] + Fo:f:2]-> {})` if `C:f:r=Fo:f:2`. Removing `Fo:f:2`, we get `(b' -[[]]-> {})`. + // 2. Let `t_f2` be the directly ambient function of the specialization lambda set resolved by `C:f:r`. + // - For example, `(b -[[] + b:g:1]-> {})` if `C:f:r=Fo:f:2`, from the algorithm's running example. + // 3. Unify `t_f1 ~ t_f2`. + let LambdaSet { + solved, + recursion_var, + unspecialized, + ambient_function: t_f1, + } = subs.get_lambda_set(this_lambda_set); + let target_rank = subs.get_rank(this_lambda_set); + + debug_assert!(!unspecialized.is_empty()); + + let unspecialized = subs.get_subs_slice(unspecialized); + + // 1. Let `t_f1` be the directly ambient function of the lambda set containing `C:f:r`. + let Uls(c, f, r) = unique_unspecialized_lambda(subs, resolved_concrete, unspecialized).unwrap(); + + debug_assert!(subs.equivalent_without_compacting(c, resolved_concrete)); + + // Now decide: do we + // - proceed with specialization + // - simply drop the specialization lambda set (due to an error) + // - or do we need to wait, because we don't know enough information for the specialization yet? + let specialization_decision = make_specialization_decision(subs, phase, c, f); + let specialization_key_or_drop = match specialization_decision { + SpecializeDecision::Specialize(key) => Ok(key), + SpecializeDecision::Drop => Err(()), + SpecializeDecision::PendingSpecialization(impl_key) => { + // Bail, we need to wait for the specialization to be known. + return OneCompactionResult::MustWaitForSpecialization(impl_key); + } + }; + + // 1b. Remove `C:f:r` from `t_f1`'s lambda set. + let new_unspecialized: Vec<_> = unspecialized + .iter() + .filter(|Uls(v, _, _)| !subs.equivalent_without_compacting(*v, resolved_concrete)) + .copied() + .collect(); + debug_assert_eq!(new_unspecialized.len(), unspecialized.len() - 1); + let t_f1_lambda_set_without_concrete = LambdaSet { + solved, + recursion_var, + unspecialized: SubsSlice::extend_new( + &mut subs.unspecialized_lambda_sets, + new_unspecialized, + ), + ambient_function: t_f1, + }; + subs.set_content( + this_lambda_set, + Content::LambdaSet(t_f1_lambda_set_without_concrete), + ); + + let specialization_key = match specialization_key_or_drop { + Ok(specialization_key) => specialization_key, + Err(()) => { + // Do nothing other than to remove the concrete lambda to drop from the lambda set, + // which we already did in 1b above. + trace_compact!(3iter_end_skipped. subs, t_f1); + return OneCompactionResult::Compacted { + new_obligations: Default::default(), + new_lambda_sets_to_specialize: Default::default(), + }; + } + }; + + let specialization_ambient_function_var = get_specialization_lambda_set_ambient_function( + subs, + derived_env, + phase, + f, + r, + specialization_key, + target_rank, + ); + + let t_f2 = match specialization_ambient_function_var { + Ok(lset) => lset, + Err(()) => { + // Do nothing other than to remove the concrete lambda to drop from the lambda set, + // which we already did in 1b above. + trace_compact!(3iter_end_skipped. subs, t_f1); + return OneCompactionResult::Compacted { + new_obligations: Default::default(), + new_lambda_sets_to_specialize: Default::default(), + }; + } + }; + + // Ensure the specialized ambient function we'll unify with is not a generalized one, but one + // at the rank of the lambda set being compacted. + let t_f2 = deep_copy_var_in(subs, target_rank, pools, t_f2, arena); + + // 3. Unify `t_f1 ~ t_f2`. + trace_compact!(3iter_start. subs, this_lambda_set, t_f1, t_f2); + let (vars, new_obligations, new_lambda_sets_to_specialize, _meta) = unify( + &mut UEnv::new(subs), + t_f1, + t_f2, + Mode::LAMBDA_SET_SPECIALIZATION, + ) + .expect_success("ambient functions don't unify"); + trace_compact!(3iter_end. subs, t_f1); + + introduce(subs, target_rank, pools, &vars); + + OneCompactionResult::Compacted { + new_obligations, + new_lambda_sets_to_specialize, + } +} + +#[derive(Debug)] +enum SpecializationTypeKey { + Opaque(Symbol), + Derived(DeriveKey), + Immediate(Symbol), +} + +enum SpecializeDecision { + Specialize(SpecializationTypeKey), + Drop, + + /// Only relevant during module solving of recursive defs - we don't yet know the + /// specialization type for a declared ability implementation, so we must hold off on + /// specialization. + PendingSpecialization(ImplKey), +} + +fn make_specialization_decision( + subs: &Subs, + phase: &P, + var: Variable, + ability_member: Symbol, +) -> SpecializeDecision { + use Content::*; + use SpecializationTypeKey::*; + match subs.get_content_without_compacting(var) { + Alias(opaque, _, _, AliasKind::Opaque) if opaque.module_id() != ModuleId::NUM => { + if P::IS_LATE { + SpecializeDecision::Specialize(Opaque(*opaque)) + } else { + // Solving within a module. + phase.with_module_abilities_store(opaque.module_id(), |abilities_store| { + let impl_key = ImplKey { + opaque: *opaque, + ability_member, + }; + match abilities_store.get_implementation(impl_key) { + None => { + // Doesn't specialize; an error will already be reported for this. + SpecializeDecision::Drop + } + Some(MemberImpl::Error | MemberImpl::Derived) => { + // TODO: probably not right, we may want to choose a derive decision! + SpecializeDecision::Specialize(Opaque(*opaque)) + } + Some(MemberImpl::Impl(specialization_symbol)) => { + match abilities_store.specialization_info(*specialization_symbol) { + Some(_) => SpecializeDecision::Specialize(Opaque(*opaque)), + + // If we expect a specialization impl but don't yet know it, we must hold off + // compacting the lambda set until the specialization is well-known. + None => SpecializeDecision::PendingSpecialization(impl_key), + } + } + } + }) + } + } + Structure(_) | Alias(_, _, _, _) => { + let builtin = match ability_member.try_into() { + Ok(builtin) => builtin, + Err(_) => return SpecializeDecision::Drop, + }; + + // This is a structural type, find the derived ability function it should use. + match roc_derive_key::Derived::builtin(builtin, subs, var) { + Ok(derived) => match derived { + roc_derive_key::Derived::Immediate(imm) => { + SpecializeDecision::Specialize(Immediate(imm)) + } + roc_derive_key::Derived::Key(derive_key) => { + SpecializeDecision::Specialize(Derived(derive_key)) + } + }, + Err(DeriveError::UnboundVar) => { + // not specialized yet, but that also means that it can't possibly be derivable + // at this point? + // TODO: is this right? Revisit if it causes us problems in the future. + SpecializeDecision::Drop + } + Err(DeriveError::Underivable) => { + // we should have reported an error for this; drop the lambda set. + SpecializeDecision::Drop + } + } + } + Error => SpecializeDecision::Drop, + FlexAbleVar(_, _) + | RigidAbleVar(..) + | FlexVar(..) + | RigidVar(..) + | RecursionVar { .. } + | LambdaSet(..) + | RangedNumber(..) => { + internal_error!("unexpected") + } + } +} + +#[allow(clippy::too_many_arguments)] +fn get_specialization_lambda_set_ambient_function( + subs: &mut Subs, + derived_env: &DerivedEnv, + phase: &P, + ability_member: Symbol, + lset_region: u8, + specialization_key: SpecializationTypeKey, + target_rank: Rank, +) -> Result { + match specialization_key { + SpecializationTypeKey::Opaque(opaque) => { + let opaque_home = opaque.module_id(); + let external_specialized_lset = + phase.with_module_abilities_store(opaque_home, |abilities_store| { + let impl_key = roc_can::abilities::ImplKey { + opaque, + ability_member, + }; + let opt_specialization = + abilities_store.get_implementation(impl_key); + match opt_specialization { + None => { + if P::IS_LATE { + internal_error!( + "expected to know a specialization for {:?}#{:?}, but it wasn't found", + opaque, + ability_member + ); + } else { + // doesn't specialize, we'll have reported an error for this + Err(()) + } + } + Some(member_impl) => match member_impl { + MemberImpl::Impl(spec_symbol) => { + let specialization = + abilities_store.specialization_info(*spec_symbol).expect("expected custom implementations to always have complete specialization info by this point"); + + let specialized_lambda_set = *specialization + .specialization_lambda_sets + .get(&lset_region) + .expect("lambda set region not resolved"); + Ok(specialized_lambda_set) + } + MemberImpl::Derived => todo_abilities!(), + MemberImpl::Error => todo_abilities!(), + }, + } + })?; + + let specialized_ambient = phase.copy_lambda_set_ambient_function_to_home_subs( + external_specialized_lset, + opaque_home, + subs, + ); + + Ok(specialized_ambient) + } + + SpecializationTypeKey::Derived(derive_key) => { + let mut derived_module = derived_env.derived_module.lock().unwrap(); + + let (_, _, specialization_lambda_sets) = + derived_module.get_or_insert(derived_env.exposed_types, derive_key); + + let specialized_lambda_set = *specialization_lambda_sets + .get(&lset_region) + .expect("lambda set region not resolved"); + + let specialized_ambient = derived_module.copy_lambda_set_ambient_function_to_subs( + specialized_lambda_set, + subs, + target_rank, + ); + + Ok(specialized_ambient) + } + + SpecializationTypeKey::Immediate(imm) => { + // Immediates are like opaques in that we can simply look up their type definition in + // the ability store, there is nothing new to synthesize. + // + // THEORY: if something can become an immediate, it will always be available in the + // local ability store, because the transformation is local (?) + let immediate_lambda_set_at_region = + phase.get_and_copy_ability_member_ambient_function(imm, lset_region, subs); + + Ok(immediate_lambda_set_at_region) + } + } +} diff --git a/crates/compiler/solve/tests/solve_expr.rs b/crates/compiler/solve/tests/solve_expr.rs index afc080ca17..6bacc74603 100644 --- a/crates/compiler/solve/tests/solve_expr.rs +++ b/crates/compiler/solve/tests/solve_expr.rs @@ -3451,7 +3451,7 @@ mod solve_expr { { id1, id2 } "# ), - "{ id1 : q -> q, id2 : a -> a }", + "{ id1 : q -> q, id2 : q1 -> q1 }", ); } @@ -3966,7 +3966,7 @@ mod solve_expr { { a, b } "# ), - "{ a : { x : I64, y : I64, z : Num c }, b : { blah : Str, x : I64, y : I64, z : Num a } }", + "{ a : { x : I64, y : I64, z : Num c }, b : { blah : Str, x : I64, y : I64, z : Num c1 } }", ); } @@ -3997,7 +3997,7 @@ mod solve_expr { { a, b } "# ), - "{ a : { x : Num *, y : Float *, z : c }, b : { blah : Str, x : Num *, y : Float *, z : a } }", + "{ a : { x : Num *, y : Float *, z : c }, b : { blah : Str, x : Num *, y : Float *, z : c1 } }", ); } @@ -6157,7 +6157,7 @@ mod solve_expr { hashEq = \x, y -> hash x == hash y "# ), - "a, b -> Bool | a has Hash, b has Hash", + "a, a1 -> Bool | a has Hash, a1 has Hash", ) } @@ -6510,7 +6510,6 @@ mod solve_expr { } #[test] - #[ignore = "TODO: fix unification of derived types"] fn encode_record() { infer_queries!( indoc!( @@ -6523,14 +6522,11 @@ mod solve_expr { # ^^^^^^^^^ "# ), - @r#" - "Encoding#toEncoder(2) : { a : Str } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting", - "# + @"Encoding#toEncoder(2) : { a : Str } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting" ) } #[test] - #[ignore = "TODO: fix unification of derived types"] fn encode_record_with_nested_custom_impl() { infer_queries!( indoc!( @@ -6539,16 +6535,14 @@ mod solve_expr { imports [Encode.{ toEncoder, Encoding, custom }] provides [main] to "./platform" - A := {} + A := {} has [Encoding {toEncoder}] toEncoder = \@A _ -> custom \b, _ -> b main = toEncoder { a: @A {} } # ^^^^^^^^^ "# ), - @r#" - "Encoding#toEncoder(2) : { a : A } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting", - "# + @"Encoding#toEncoder(2) : { a : A } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting" ) } @@ -6721,7 +6715,7 @@ mod solve_expr { ), @r#" A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {}) - Id#id(3) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {}) + Id#id(3) : a -[[] + a:id(3):1]-> ({} -[[] + a:id(3):2]-> a) | a has Id alias : {} -[[id(5)]]-> ({} -[[8(8)]]-> {}) "# print_only_under_alias: true @@ -6831,15 +6825,13 @@ mod solve_expr { ping : a -> a | a has Bounce pong : a -> a | a has Bounce - A := {} has [Bounce {ping, pong}] + A := {} has [Bounce {ping: pingA, pong: pongA}] - ping : A -> A - ping = \@A {} -> pong (@A {}) - #^^^^{-1} ^^^^ + pingA = \@A {} -> pong (@A {}) + #^^^^^{-1} ^^^^ - pong : A -> A - pong = \@A {} -> ping (@A {}) - #^^^^{-1} ^^^^ + pongA = \@A {} -> ping (@A {}) + #^^^^^{-1} ^^^^ main = a : A @@ -6850,17 +6842,16 @@ mod solve_expr { "# ), @r###" - A#ping(5) : A -[[ping(5)]]-> A - A#pong(6) : A -[[pong(6)]]-> A - A#pong(6) : A -[[pong(6)]]-> A - A#ping(5) : A -[[ping(5)]]-> A - A#ping(5) : A -[[ping(5)]]-> A + pingA : A -[[pingA(5)]]-> A + A#pong(6) : A -[[pongA(6)]]-> A + pongA : A -[[pongA(6)]]-> A + A#ping(5) : A -[[pingA(5)]]-> A + A#ping(5) : A -[[pingA(5)]]-> A "### ) } #[test] - #[ignore = "TODO: this currently runs into trouble with ping and pong first being inferred as overly-general before recursive constraining"] fn resolve_mutually_recursive_ability_lambda_sets_inferred() { infer_queries!( indoc!( @@ -6889,7 +6880,7 @@ mod solve_expr { ), @r###" A#ping(5) : A -[[ping(5)]]-> A - Bounce#pong(3) : A -[[pong(6)]]-> A + A#pong(6) : A -[[pong(6)]]-> A A#pong(6) : A -[[pong(6)]]-> A A#ping(5) : A -[[ping(5)]]-> A A#ping(5) : A -[[ping(5)]]-> A @@ -7257,24 +7248,11 @@ mod solve_expr { # ^ "# ), - // TODO SERIOUS: Let generalization is broken here, and this is NOT correct!! - // Two problems: - // - 1. `{}` always has its rank adjusted to the toplevel, which forces the rest - // of the type to the toplevel, but that is NOT correct here! - // - 2. During solving lambda set compaction cannot happen until an entire module - // is solved, which forces resolved-but-not-yet-compacted lambdas in - // unspecialized lambda sets to pull the rank into a lower, non-generalized - // rank. Special-casing for that is a TERRIBLE HACK that interferes very - // poorly with (1) - // - // We are BLOCKED on https://github.com/rtfeldman/roc/issues/3207 to make this work - // correctly! - // See also https://github.com/rtfeldman/roc/pull/3175, a separate, but similar problem. @r###" Fo#f(7) : Fo -[[f(7)]]-> (b -[[] + b:g(4):1]-> {}) | b has G Go#g(8) : Go -[[g(8)]]-> {} - h : Go -[[g(8)]]-> {} - Fo#f(7) : Fo -[[f(7)]]-> (Go -[[g(8)]]-> {}) + h : b -[[] + b:g(4):1]-> {} | b has G + Fo#f(7) : Fo -[[f(7)]]-> (b -[[] + b:g(4):1]-> {}) | b has G h : Go -[[g(8)]]-> {} "### ); @@ -7346,6 +7324,169 @@ mod solve_expr { ); } + #[test] + fn polymorphic_lambda_set_specialization_varying_over_multiple_variables() { + infer_queries!( + indoc!( + r#" + app "test" provides [main] to "./platform" + + J has j : j -> (k -> {}) | j has J, k has K + K has k : k -> {} | k has K + + C := {} has [J {j: jC}] + jC = \@C _ -> k + #^^{-1} + + D := {} has [J {j: jD}] + jD = \@D _ -> k + #^^{-1} + + E := {} has [K {k}] + k = \@E _ -> {} + #^{-1} + + f = \flag, a, b -> + # ^ ^ + it = + # ^^ + when flag is + A -> j a + # ^ + B -> j b + # ^ + it + # ^^ + + main = (f A (@C {}) (@D {})) (@E {}) + # ^ + # ^^^^^^^^^^^^^^^^^^^ + #^^^^{-1} + "# + ), + @r###" + jC : C -[[jC(8)]]-> (k -[[] + k:k(4):1]-> {}) | k has K + jD : D -[[jD(9)]]-> (k -[[] + k:k(4):1]-> {}) | k has K + E#k(10) : E -[[k(10)]]-> {} + a : j | j has J + b : j | j has J + it : k -[[] + j:j(2):2 + j1:j(2):2]-> {} | j has J, j1 has J, k has K + J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j:j(2):2 + j1:j(2):2]-> {}) | j has J, j1 has J, k has K + J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j1:j(2):2 + j:j(2):2]-> {}) | j has J, j1 has J, k has K + it : k -[[] + j:j(2):2 + j1:j(2):2]-> {} | j has J, j1 has J, k has K + f : [A, B], C, D -[[f(11)]]-> (E -[[k(10)]]-> {}) + f A (@C {}) (@D {}) : E -[[k(10)]]-> {} + main : {} + "### + ); + } + + #[test] + fn polymorphic_lambda_set_specialization_varying_over_multiple_variables_two_results() { + infer_queries!( + indoc!( + r#" + app "test" provides [main] to "./platform" + + J has j : j -> (k -> {}) | j has J, k has K + K has k : k -> {} | k has K + + C := {} has [J {j: jC}] + jC = \@C _ -> k + #^^{-1} + + D := {} has [J {j: jD}] + jD = \@D _ -> k + #^^{-1} + + E := {} has [K {k: kE}] + kE = \@E _ -> {} + #^^{-1} + + F := {} has [K {k: kF}] + kF = \@F _ -> {} + #^^{-1} + + f = \flag, a, b -> + # ^ ^ + it = + # ^^ + when flag is + A -> j a + # ^ + B -> j b + # ^ + it + # ^^ + + main = + #^^^^{-1} + it = + # ^^ + (f A (@C {}) (@D {})) + # ^ + if True + then it (@E {}) + # ^^ + else it (@F {}) + # ^^ + "# + ), + @r###" + jC : C -[[jC(9)]]-> (k -[[] + k:k(4):1]-> {}) | k has K + jD : D -[[jD(10)]]-> (k -[[] + k:k(4):1]-> {}) | k has K + kE : E -[[kE(11)]]-> {} + kF : F -[[kF(12)]]-> {} + a : j | j has J + b : j | j has J + it : k -[[] + j:j(2):2 + j1:j(2):2]-> {} | j has J, j1 has J, k has K + J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j:j(2):2 + j1:j(2):2]-> {}) | j has J, j1 has J, k has K + J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j1:j(2):2 + j:j(2):2]-> {}) | j has J, j1 has J, k has K + it : k -[[] + j:j(2):2 + j1:j(2):2]-> {} | j has J, j1 has J, k has K + main : {} + it : k -[[] + k:k(4):1]-> {} | k has K + f : [A, B], C, D -[[f(13)]]-> (k -[[] + k:k(4):1]-> {}) | k has K + it : E -[[kE(11)]]-> {} + it : F -[[kF(12)]]-> {} + "### + ); + } + + #[test] + fn polymorphic_lambda_set_specialization_branching_over_single_variable() { + infer_queries!( + indoc!( + r#" + app "test" provides [f] to "./platform" + + J has j : j -> (k -> {}) | j has J, k has K + K has k : k -> {} | k has K + + C := {} has [J {j: jC}] + jC = \@C _ -> k + + D := {} has [J {j: jD}] + jD = \@D _ -> k + + E := {} has [K {k}] + k = \@E _ -> {} + + f = \flag, a, c -> + it = + when flag is + A -> j a + B -> j a + it c + # ^^ ^ + "# + ), + @r###" + it : k -[[] + j:j(2):2]-> {} | j has J, k has K + c : k | k has K + "### + ); + } + #[test] fn wrap_recursive_opaque_negative_position() { infer_eq_without_problem( diff --git a/crates/compiler/solve_problem/Cargo.toml b/crates/compiler/solve_problem/Cargo.toml index 96915936eb..96f92b7175 100644 --- a/crates/compiler/solve_problem/Cargo.toml +++ b/crates/compiler/solve_problem/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_solve_problem" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/str/Cargo.toml b/crates/compiler/str/Cargo.toml index a59efae3c4..14cda3c90b 100644 --- a/crates/compiler/str/Cargo.toml +++ b/crates/compiler/str/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_str" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/test_derive/Cargo.toml b/crates/compiler/test_derive/Cargo.toml index 0d6b79aba8..a3b1647548 100644 --- a/crates/compiler/test_derive/Cargo.toml +++ b/crates/compiler/test_derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "test_derive" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/test_derive/src/decoding.rs b/crates/compiler/test_derive/src/decoding.rs new file mode 100644 index 0000000000..9477710305 --- /dev/null +++ b/crates/compiler/test_derive/src/decoding.rs @@ -0,0 +1,51 @@ +#![cfg(test)] +// Even with #[allow(non_snake_case)] on individual idents, rust-analyzer issues diagnostics. +// See https://github.com/rust-lang/rust-analyzer/issues/6541. +// For the `v!` macro we use uppercase variables when constructing tag unions. +#![allow(non_snake_case)] + +use crate::{ + util::{check_immediate, derive_test}, + v, +}; +use insta::assert_snapshot; +use roc_module::symbol::Symbol; +use roc_types::subs::Variable; + +use roc_derive_key::DeriveBuiltin::Decoder; + +#[test] +fn immediates() { + check_immediate(Decoder, v!(U8), Symbol::DECODE_U8); + check_immediate(Decoder, v!(U16), Symbol::DECODE_U16); + check_immediate(Decoder, v!(U32), Symbol::DECODE_U32); + check_immediate(Decoder, v!(U64), Symbol::DECODE_U64); + check_immediate(Decoder, v!(U128), Symbol::DECODE_U128); + check_immediate(Decoder, v!(I8), Symbol::DECODE_I8); + check_immediate(Decoder, v!(I16), Symbol::DECODE_I16); + check_immediate(Decoder, v!(I32), Symbol::DECODE_I32); + check_immediate(Decoder, v!(I64), Symbol::DECODE_I64); + check_immediate(Decoder, v!(I128), Symbol::DECODE_I128); + check_immediate(Decoder, v!(DEC), Symbol::DECODE_DEC); + check_immediate(Decoder, v!(F32), Symbol::DECODE_F32); + check_immediate(Decoder, v!(F64), Symbol::DECODE_F64); + check_immediate(Decoder, v!(STR), Symbol::DECODE_STRING); +} + +#[test] +fn list() { + derive_test(Decoder, v!(Symbol::LIST_LIST v!(STR)), |golden| { + assert_snapshot!(golden, @r###" + # derived for List Str + # Decoder (List val) fmt | fmt has DecoderFormatting, val has Decoding + # List U8, fmt -[[custom(3)]]-> { rest : List U8, result : [Err [TooShort], Ok (List val)] } | fmt has DecoderFormatting, val has Decoding + # Specialization lambda sets: + # @<1>: [[custom(3)]] + #Derived.decoder_list = + Decode.custom + \#Derived.bytes, #Derived.fmt -> + Decode.decodeWith #Derived.bytes (Decode.list Decode.decoder) #Derived.fmt + "### + ) + }) +} diff --git a/crates/compiler/test_derive/src/encoding.rs b/crates/compiler/test_derive/src/encoding.rs index 023a996ac0..1c72e631af 100644 --- a/crates/compiler/test_derive/src/encoding.rs +++ b/crates/compiler/test_derive/src/encoding.rs @@ -4,427 +4,22 @@ // For the `v!` macro we use uppercase variables when constructing tag unions. #![allow(non_snake_case)] -use std::path::PathBuf; - -use bumpalo::Bump; use insta::assert_snapshot; -use pretty_assertions::assert_eq; -use ven_pretty::DocAllocator; -use crate::pretty_print::{pretty_print_def, Ctx}; -use roc_can::{ - abilities::{AbilitiesStore, SpecializationLambdaSets}, - constraint::Constraints, - def::Def, - expr::Declarations, - module::{ - ExposedByModule, ExposedForModule, ExposedModuleTypes, ResolvedImplementations, - RigidVariables, - }, +use crate::{ + test_hash_eq, test_hash_neq, + util::{check_immediate, derive_test}, + v, }; -use roc_collections::VecSet; -use roc_constrain::expr::constrain_decls; -use roc_debug_flags::dbg_do; -use roc_derive::{synth_var, DerivedModule}; -use roc_derive_key::{DeriveKey, Derived}; -use roc_load_internal::file::{add_imports, default_aliases, LoadedModule, Threading}; -use roc_module::{ - ident::TagName, - symbol::{IdentIds, Interns, ModuleId, Symbol}, -}; -use roc_region::all::LineInfo; -use roc_reporting::report::{type_problem, RocDocAllocator}; -use roc_types::{ - pretty_print::{name_and_print_var, DebugPrint}, - subs::{ - AliasVariables, Content, ExposedTypesStorageSubs, FlatType, RecordFields, Subs, SubsIndex, - SubsSlice, UnionTags, Variable, - }, - types::{AliasKind, RecordField}, -}; - -const DERIVED_MODULE: ModuleId = ModuleId::DERIVED_SYNTH; - -fn encode_path() -> PathBuf { - let repo_root = std::env::var("ROC_WORKSPACE_DIR").expect("are you running with `cargo test`?"); - PathBuf::from(repo_root) - .join("compiler") - .join("builtins") - .join("roc") - .join("Encode.roc") -} - -#[allow(clippy::too_many_arguments)] -fn assemble_derived_golden( - subs: &mut Subs, - test_module: ModuleId, - interns: &Interns, - source_var: Variable, - derived_source: &str, - typ: Variable, - specialization_lsets: SpecializationLambdaSets, -) -> String { - let mut print_var = |var: Variable, print_only_under_alias| { - let snapshot = subs.snapshot(); - let pretty_type = name_and_print_var( - var, - subs, - test_module, - interns, - DebugPrint { - print_lambda_sets: true, - print_only_under_alias, - }, - ); - subs.rollback_to(snapshot); - pretty_type - }; - - let mut pretty_buf = String::new(); - - pretty_buf.push_str(&format!("# derived for {}\n", print_var(source_var, false))); - - let pretty_type = print_var(typ, false); - pretty_buf.push_str(&format!("# {}\n", &pretty_type)); - - let pretty_type_under_aliases = print_var(typ, true); - pretty_buf.push_str(&format!("# {}\n", &pretty_type_under_aliases)); - - pretty_buf.push_str("# Specialization lambda sets:\n"); - let mut specialization_lsets = specialization_lsets.into_iter().collect::>(); - specialization_lsets.sort_by_key(|(region, _)| *region); - for (region, var) in specialization_lsets { - let pretty_lset = print_var(var, false); - pretty_buf.push_str(&format!("# @<{}>: {}\n", region, pretty_lset)); - } - - pretty_buf.push_str(derived_source); - - pretty_buf -} - -#[allow(clippy::too_many_arguments)] -fn check_derived_typechecks_and_golden( - derived_def: Def, - test_module: ModuleId, - mut test_subs: Subs, - interns: &Interns, - exposed_encode_types: ExposedTypesStorageSubs, - encode_abilities_store: AbilitiesStore, - source_var: Variable, - derived_program: &str, - specialization_lsets: SpecializationLambdaSets, - check_golden: impl Fn(&str), -) { - // constrain the derived - let mut constraints = Constraints::new(); - let def_var = derived_def.expr_var; - let mut decls = Declarations::new(); - decls.push_def(derived_def); - let constr = constrain_decls(&mut constraints, test_module, &decls); - - // the derived depends on stuff from Encode, so - // - we need to add those dependencies as imported on the constraint - // - we need to add Encode ability info to a local abilities store - let encode_values_to_import = exposed_encode_types - .stored_vars_by_symbol - .keys() - .copied() - .collect::>(); - let pending_abilities = encode_abilities_store.closure_from_imported(&encode_values_to_import); - let mut exposed_by_module = ExposedByModule::default(); - exposed_by_module.insert( - ModuleId::ENCODE, - ExposedModuleTypes { - exposed_types_storage_subs: exposed_encode_types, - resolved_implementations: ResolvedImplementations::default(), - }, - ); - let exposed_for_module = - ExposedForModule::new(encode_values_to_import.iter(), exposed_by_module); - let mut def_types = Default::default(); - let mut rigid_vars = Default::default(); - let (import_variables, abilities_store) = add_imports( - test_module, - &mut test_subs, - pending_abilities, - &exposed_for_module, - &mut def_types, - &mut rigid_vars, - ); - let constr = - constraints.let_import_constraint(rigid_vars, def_types, constr, &import_variables); - - // run the solver, print and fail if we have errors - dbg_do!( - roc_debug_flags::ROC_PRINT_UNIFICATIONS_DERIVED, - std::env::set_var(roc_debug_flags::ROC_PRINT_UNIFICATIONS_DERIVED, "1") - ); - let (mut solved_subs, _, problems, _) = roc_solve::module::run_solve( - test_module, - &constraints, - constr, - RigidVariables::default(), - test_subs, - default_aliases(), - abilities_store, - Default::default(), - &exposed_for_module.exposed_by_module, - Default::default(), - ); - let subs = solved_subs.inner_mut(); - - if !problems.is_empty() { - let filename = PathBuf::from("Test.roc"); - let lines = LineInfo::new(" "); - let src_lines = vec![" "]; - let mut reports = Vec::new(); - let alloc = RocDocAllocator::new(&src_lines, test_module, interns); - - for problem in problems.into_iter() { - if let Some(report) = type_problem(&alloc, &lines, filename.clone(), problem.clone()) { - reports.push(report); - } - } - - let has_reports = !reports.is_empty(); - - let doc = alloc - .stack(reports.into_iter().map(|v| v.pretty(&alloc))) - .append(if has_reports { - alloc.line() - } else { - alloc.nil() - }); - - let mut buf = String::new(); - doc.1 - .render_raw(80, &mut roc_reporting::report::CiWrite::new(&mut buf)) - .unwrap(); - - panic!( - "Derived does not typecheck:\n{}\nDerived def:\n{}", - buf, derived_program - ); - } - - let golden = assemble_derived_golden( - subs, - test_module, - interns, - source_var, - derived_program, - def_var, - specialization_lsets, - ); - - check_golden(&golden) -} - -fn derive_test(synth_input: S, check_golden: impl Fn(&str)) -where - S: FnOnce(&mut Subs) -> Variable, -{ - let arena = Bump::new(); - let source = roc_builtins::roc::module_source(ModuleId::ENCODE); - let target_info = roc_target::TargetInfo::default_x86_64(); - - let LoadedModule { - mut interns, - exposed_types_storage: exposed_encode_types, - abilities_store, - resolved_implementations, - .. - } = roc_load_internal::file::load_and_typecheck_str( - &arena, - encode_path().file_name().unwrap().into(), - source, - encode_path().parent().unwrap().to_path_buf(), - Default::default(), - target_info, - roc_reporting::report::RenderTarget::ColorTerminal, - Threading::AllAvailable, - ) - .unwrap(); - - let mut subs = Subs::new(); - let ident_ids = IdentIds::default(); - let source_var = synth_input(&mut subs); - let key = get_key(&subs, source_var); - - let mut derived_module = unsafe { DerivedModule::from_components(subs, ident_ids) }; - - let mut exposed_by_module = ExposedByModule::default(); - exposed_by_module.insert( - ModuleId::ENCODE, - ExposedModuleTypes { - exposed_types_storage_subs: exposed_encode_types.clone(), - resolved_implementations, - }, - ); - - let (_derived_symbol, derived_def, specialization_lsets) = - derived_module.get_or_insert(&exposed_by_module, key); - let specialization_lsets = specialization_lsets.clone(); - let derived_def = derived_def.clone(); - - let (subs, ident_ids) = derived_module.decompose(); - - interns.all_ident_ids.insert(DERIVED_MODULE, ident_ids); - DERIVED_MODULE.register_debug_idents(interns.all_ident_ids.get(&DERIVED_MODULE).unwrap()); - - let ctx = Ctx { interns: &interns }; - let derived_program = pretty_print_def(&ctx, &derived_def); - - check_derived_typechecks_and_golden( - derived_def, - DERIVED_MODULE, - subs, - &interns, - exposed_encode_types, - abilities_store, - source_var, - &derived_program, - specialization_lsets, - check_golden, - ); -} - -fn get_key(subs: &Subs, var: Variable) -> DeriveKey { - match Derived::encoding(subs, var) { - Ok(Derived::Key(key)) => key, - _ => unreachable!(), - } -} - -fn check_key(eq: bool, synth1: S1, synth2: S2) -where - S1: FnOnce(&mut Subs) -> Variable, - S2: FnOnce(&mut Subs) -> Variable, -{ - let mut subs = Subs::new(); - let var1 = synth1(&mut subs); - let var2 = synth2(&mut subs); - - let key1 = Derived::encoding(&subs, var1); - let key2 = Derived::encoding(&subs, var2); - - if eq { - assert_eq!(key1, key2); - } else { - assert_ne!(key1, key2); - } -} - -fn check_immediate(synth: S, immediate: Symbol) -where - S: FnOnce(&mut Subs) -> Variable, -{ - let mut subs = Subs::new(); - let var = synth(&mut subs); - - let key = Derived::encoding(&subs, var); - - assert_eq!(key, Ok(Derived::Immediate(immediate))); -} - -// Writing out the types into content is terrible, so let's use a DSL at least for testing -macro_rules! v { - ({ $($field:ident: $make_v:expr,)* $(?$opt_field:ident : $make_opt_v:expr,)* }) => { - |subs: &mut Subs| { - $(let $field = $make_v(subs);)* - $(let $opt_field = $make_opt_v(subs);)* - let fields = vec![ - $( (stringify!($field).into(), RecordField::Required($field)) ,)* - $( (stringify!($opt_field).into(), RecordField::Required($opt_field)) ,)* - ]; - let fields = RecordFields::insert_into_subs(subs, fields); - synth_var(subs, Content::Structure(FlatType::Record(fields, Variable::EMPTY_RECORD))) - } - }; - ([ $($tag:ident $($payload:expr)*),* ]) => { - |subs: &mut Subs| { - $( - let $tag = vec![ $( $payload(subs), )* ]; - )* - let tags = UnionTags::insert_into_subs::<_, Vec>(subs, vec![ $( (TagName(stringify!($tag).into()), $tag) ,)* ]); - synth_var(subs, Content::Structure(FlatType::TagUnion(tags, Variable::EMPTY_TAG_UNION))) - } - }; - ([ $($tag:ident $($payload:expr)*),* ] as $rec_var:ident) => { - |subs: &mut Subs| { - let $rec_var = subs.fresh_unnamed_flex_var(); - let rec_name_index = - SubsIndex::push_new(&mut subs.field_names, stringify!($rec).into()); - - $( - let $tag = vec![ $( $payload(subs), )* ]; - )* - let tags = UnionTags::insert_into_subs::<_, Vec>(subs, vec![ $( (TagName(stringify!($tag).into()), $tag) ,)* ]); - let tag_union_var = synth_var(subs, Content::Structure(FlatType::RecursiveTagUnion($rec_var, tags, Variable::EMPTY_TAG_UNION))); - - subs.set_content( - $rec_var, - Content::RecursionVar { - structure: tag_union_var, - opt_name: Some(rec_name_index), - }, - ); - tag_union_var - } - }; - (Symbol::$sym:ident $($arg:expr)*) => { - |subs: &mut Subs| { - let $sym = vec![ $( $arg(subs) ,)* ]; - let var_slice = SubsSlice::insert_into_subs(subs, $sym); - synth_var(subs, Content::Structure(FlatType::Apply(Symbol::$sym, var_slice))) - } - }; - (Symbol::$alias:ident $($arg:expr)* => $real_var:expr) => { - |subs: &mut Subs| { - let args = vec![$( $arg(subs) )*]; - let alias_variables = AliasVariables::insert_into_subs::, Vec<_>>(subs, args, vec![]); - let real_var = $real_var(subs); - synth_var(subs, Content::Alias(Symbol::$alias, alias_variables, real_var, AliasKind::Structural)) - } - }; - (@Symbol::$alias:ident $($arg:expr)* => $real_var:expr) => { - |subs: &mut Subs| { - let args = vec![$( $arg(subs) )*]; - let alias_variables = AliasVariables::insert_into_subs::, Vec<_>>(subs, args, vec![]); - let real_var = $real_var(subs); - synth_var(subs, Content::Alias(Symbol::$alias, alias_variables, real_var, AliasKind::Opaque)) - } - }; - (*$rec_var:ident) => { - |_: &mut Subs| { $rec_var } - }; - ($var:ident) => { - |_: &mut Subs| { Variable::$var } - }; -} - -macro_rules! test_hash_eq { - ($($name:ident: $synth1:expr, $synth2:expr)*) => {$( - #[test] - fn $name() { - check_key(true, $synth1, $synth2) - } - )*}; -} - -macro_rules! test_hash_neq { - ($($name:ident: $synth1:expr, $synth2:expr)*) => {$( - #[test] - fn $name() { - check_key(false, $synth1, $synth2) - } - )*}; -} +use roc_derive_key::DeriveBuiltin::ToEncoder; +use roc_module::symbol::Symbol; +use roc_types::subs::Variable; // {{{ hash tests test_hash_eq! { + ToEncoder, + same_record: v!({ a: v!(U8), }), v!({ a: v!(U8), }) same_record_fields_diff_types: @@ -448,9 +43,9 @@ test_hash_eq! { v!(EMPTY_TAG_UNION), v!([]) same_recursive_tag_union: - v!([ Nil, Cons v!(*lst)] as lst), v!([ Nil, Cons v!(*lst)] as lst) + v!([ Nil, Cons v!(^lst)] as lst), v!([ Nil, Cons v!(^lst)] as lst) same_tag_union_and_recursive_tag_union_fields: - v!([ Nil, Cons v!(STR)]), v!([ Nil, Cons v!(*lst)] as lst) + v!([ Nil, Cons v!(STR)]), v!([ Nil, Cons v!(^lst)] as lst) list_list_diff_types: v!(Symbol::LIST_LIST v!(STR)), v!(Symbol::LIST_LIST v!(U8)) @@ -476,6 +71,8 @@ test_hash_eq! { } test_hash_neq! { + ToEncoder, + different_record_fields: v!({ a: v!(U8), }), v!({ b: v!(U8), }) record_empty_vs_nonempty: @@ -486,7 +83,7 @@ test_hash_neq! { tag_union_empty_vs_nonempty: v!(EMPTY_TAG_UNION), v!([ B v!(U8) ]) different_recursive_tag_union_tags: - v!([ Nil, Cons v!(*lst) ] as lst), v!([ Nil, Next v!(*lst) ] as lst) + v!([ Nil, Cons v!(^lst) ] as lst), v!([ Nil, Next v!(^lst) ] as lst) same_alias_diff_real_type: v!(Symbol::BOOL_BOOL => v!([ True, False ])), v!(Symbol::BOOL_BOOL => v!([ False, True, Maybe ])) @@ -505,25 +102,25 @@ test_hash_neq! { #[test] fn immediates() { - check_immediate(v!(U8), Symbol::ENCODE_U8); - check_immediate(v!(U16), Symbol::ENCODE_U16); - check_immediate(v!(U32), Symbol::ENCODE_U32); - check_immediate(v!(U64), Symbol::ENCODE_U64); - check_immediate(v!(U128), Symbol::ENCODE_U128); - check_immediate(v!(I8), Symbol::ENCODE_I8); - check_immediate(v!(I16), Symbol::ENCODE_I16); - check_immediate(v!(I32), Symbol::ENCODE_I32); - check_immediate(v!(I64), Symbol::ENCODE_I64); - check_immediate(v!(I128), Symbol::ENCODE_I128); - check_immediate(v!(DEC), Symbol::ENCODE_DEC); - check_immediate(v!(F32), Symbol::ENCODE_F32); - check_immediate(v!(F64), Symbol::ENCODE_F64); - check_immediate(v!(STR), Symbol::ENCODE_STRING); + check_immediate(ToEncoder, v!(U8), Symbol::ENCODE_U8); + check_immediate(ToEncoder, v!(U16), Symbol::ENCODE_U16); + check_immediate(ToEncoder, v!(U32), Symbol::ENCODE_U32); + check_immediate(ToEncoder, v!(U64), Symbol::ENCODE_U64); + check_immediate(ToEncoder, v!(U128), Symbol::ENCODE_U128); + check_immediate(ToEncoder, v!(I8), Symbol::ENCODE_I8); + check_immediate(ToEncoder, v!(I16), Symbol::ENCODE_I16); + check_immediate(ToEncoder, v!(I32), Symbol::ENCODE_I32); + check_immediate(ToEncoder, v!(I64), Symbol::ENCODE_I64); + check_immediate(ToEncoder, v!(I128), Symbol::ENCODE_I128); + check_immediate(ToEncoder, v!(DEC), Symbol::ENCODE_DEC); + check_immediate(ToEncoder, v!(F32), Symbol::ENCODE_F32); + check_immediate(ToEncoder, v!(F64), Symbol::ENCODE_F64); + check_immediate(ToEncoder, v!(STR), Symbol::ENCODE_STRING); } #[test] fn empty_record() { - derive_test(v!(EMPTY_RECORD), |golden| { + derive_test(ToEncoder, v!(EMPTY_RECORD), |golden| { assert_snapshot!(golden, @r###" # derived for {} # {} -[[toEncoder_{}(0)]]-> Encoder fmt | fmt has EncoderFormatting @@ -543,7 +140,7 @@ fn empty_record() { #[test] fn zero_field_record() { - derive_test(v!({}), |golden| { + derive_test(ToEncoder, v!({}), |golden| { assert_snapshot!(golden, @r###" # derived for {} # {} -[[toEncoder_{}(0)]]-> Encoder fmt | fmt has EncoderFormatting @@ -563,7 +160,7 @@ fn zero_field_record() { #[test] fn one_field_record() { - derive_test(v!({ a: v!(U8), }), |golden| { + derive_test(ToEncoder, v!({ a: v!(U8), }), |golden| { assert_snapshot!(golden, @r###" # derived for { a : U8 } # { a : val } -[[toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding @@ -588,23 +185,27 @@ fn one_field_record() { } #[test] -#[ignore = "TODO #3421 unification of unspecialized variables in lambda sets currently causes this to be derived incorrectly"] fn two_field_record() { - derive_test(v!({ a: v!(U8), b: v!(STR), }), |golden| { + derive_test(ToEncoder, v!({ a: v!(U8), b: v!(STR), }), |golden| { assert_snapshot!(golden, @r###" # derived for { a : U8, b : Str } - # { a : val, b : a } -[[toEncoder_{a,b}(0)]]-> Encoder fmt | a has Encoding, fmt has EncoderFormatting, val has Encoding - # { a : val, b : a } -[[toEncoder_{a,b}(0)]]-> (List U8, fmt -[[custom(2) { a : val, b : a }]]-> List U8) | a has Encoding, fmt has EncoderFormatting, val has Encoding + # { a : val, b : val1 } -[[toEncoder_{a,b}(0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding + # { a : val, b : val1 } -[[toEncoder_{a,b}(0)]]-> (List U8, fmt -[[custom(2) { a : val, b : val1 }]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding # Specialization lambda sets: # @<1>: [[toEncoder_{a,b}(0)]] - # @<2>: [[custom(2) { a : val, b : a }]] | a has Encoding, val has Encoding + # @<2>: [[custom(2) { a : val, b : val1 }]] | val has Encoding, val1 has Encoding #Derived.toEncoder_{a,b} = \#Derived.rcd -> - Encode.custom \#Derived.bytes, #Derived.fmt -> - Encode.appendWith #Derived.bytes (Encode.record [ - { value: Encode.toEncoder #Derived.rcd.a, key: "a", }, - { value: Encode.toEncoder #Derived.rcd.b, key: "b", }, - ]) #Derived.fmt + Encode.custom + \#Derived.bytes, #Derived.fmt -> + Encode.appendWith + #Derived.bytes + (Encode.record + [ + { value: Encode.toEncoder #Derived.rcd.a, key: "a", }, + { value: Encode.toEncoder #Derived.rcd.b, key: "b", }, + ]) + #Derived.fmt "### ) }) @@ -614,7 +215,7 @@ fn two_field_record() { #[ignore = "NOTE: this would never actually happen, because [] is uninhabited, and hence toEncoder can never be called with a value of []! Rightfully it induces broken assertions in other parts of the compiler, so we ignore it."] fn empty_tag_union() { - derive_test(v!(EMPTY_TAG_UNION), |golden| { + derive_test(ToEncoder, v!(EMPTY_TAG_UNION), |golden| { assert_snapshot!( golden, @r#" @@ -625,7 +226,7 @@ fn empty_tag_union() { #[test] fn tag_one_label_zero_args() { - derive_test(v!([A]), |golden| { + derive_test(ToEncoder, v!([A]), |golden| { assert_snapshot!(golden, @r###" # derived for [A] # [A] -[[toEncoder_[A 0](0)]]-> Encoder fmt | fmt has EncoderFormatting @@ -647,87 +248,109 @@ fn tag_one_label_zero_args() { } #[test] -#[ignore = "TODO #3421 unification of unspecialized variables in lambda sets currently causes this to be derived incorrectly"] fn tag_one_label_two_args() { - derive_test(v!([A v!(U8) v!(STR)]), |golden| { + derive_test(ToEncoder, v!([A v!(U8) v!(STR)]), |golden| { assert_snapshot!(golden, @r###" # derived for [A U8 Str] - # [A val a] -[[toEncoder_[A 2](0)]]-> Encoder fmt | a has Encoding, fmt has EncoderFormatting, val has Encoding - # [A val a] -[[toEncoder_[A 2](0)]]-> (List U8, fmt -[[custom(4) [A val a]]]-> List U8) | a has Encoding, fmt has EncoderFormatting, val has Encoding + # [A val val1] -[[toEncoder_[A 2](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding + # [A val val1] -[[toEncoder_[A 2](0)]]-> (List U8, fmt -[[custom(4) [A val val1]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding # Specialization lambda sets: # @<1>: [[toEncoder_[A 2](0)]] - # @<2>: [[custom(4) [A val a]]] | a has Encoding, val has Encoding + # @<2>: [[custom(4) [A val val1]]] | val has Encoding, val1 has Encoding #Derived.toEncoder_[A 2] = \#Derived.tag -> - Encode.custom \#Derived.bytes, #Derived.fmt -> - Encode.appendWith #Derived.bytes (when #Derived.tag is - A #Derived.2 #Derived.3 -> - Encode.tag "A" [ - Encode.toEncoder #Derived.2, - Encode.toEncoder #Derived.3, - ]) #Derived.fmt + Encode.custom + \#Derived.bytes, #Derived.fmt -> + Encode.appendWith + #Derived.bytes + (when #Derived.tag is + A #Derived.2 #Derived.3 -> + Encode.tag + "A" + [ + Encode.toEncoder #Derived.2, + Encode.toEncoder #Derived.3, + ]) + #Derived.fmt "### ) }) } #[test] -#[ignore = "TODO #3421 unification of unspecialized variables in lambda sets currently causes this to be derived incorrectly"] fn tag_two_labels() { - derive_test(v!([A v!(U8) v!(STR) v!(U16), B v!(STR)]), |golden| { - assert_snapshot!(golden, @r###" + derive_test( + ToEncoder, + v!([A v!(U8) v!(STR) v!(U16), B v!(STR)]), + |golden| { + assert_snapshot!(golden, @r###" # derived for [A U8 Str U16, B Str] - # [A val a b, B c] -[[toEncoder_[A 3,B 1](0)]]-> Encoder fmt | a has Encoding, b has Encoding, c has Encoding, fmt has EncoderFormatting, val has Encoding - # [A val a b, B c] -[[toEncoder_[A 3,B 1](0)]]-> (List U8, fmt -[[custom(6) [A val a b, B c]]]-> List U8) | a has Encoding, b has Encoding, c has Encoding, fmt has EncoderFormatting, val has Encoding + # [A val val1 val1, B val1] -[[toEncoder_[A 3,B 1](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding + # [A val val1 val1, B val1] -[[toEncoder_[A 3,B 1](0)]]-> (List U8, fmt -[[custom(6) [A val val1 val1, B val1]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding # Specialization lambda sets: # @<1>: [[toEncoder_[A 3,B 1](0)]] - # @<2>: [[custom(6) [A val a b, B c]]] | a has Encoding, b has Encoding, c has Encoding, val has Encoding + # @<2>: [[custom(6) [A val val1 val1, B val1]]] | val has Encoding, val1 has Encoding #Derived.toEncoder_[A 3,B 1] = \#Derived.tag -> - Encode.custom \#Derived.bytes, #Derived.fmt -> - Encode.appendWith #Derived.bytes (when #Derived.tag is - A #Derived.2 #Derived.3 #Derived.4 -> - Encode.tag "A" [ - Encode.toEncoder #Derived.2, - Encode.toEncoder #Derived.3, - Encode.toEncoder #Derived.4, - ] - B #Derived.5 -> Encode.tag "B" [Encode.toEncoder #Derived.5]) - #Derived.fmt + Encode.custom + \#Derived.bytes, #Derived.fmt -> + Encode.appendWith + #Derived.bytes + (when #Derived.tag is + A #Derived.2 #Derived.3 #Derived.4 -> + Encode.tag + "A" + [ + Encode.toEncoder #Derived.2, + Encode.toEncoder #Derived.3, + Encode.toEncoder #Derived.4, + ] + B #Derived.5 -> Encode.tag "B" [Encode.toEncoder #Derived.5]) + #Derived.fmt "### - ) - }) + ) + }, + ) } #[test] -#[ignore = "TODO #3421 unification of unspecialized variables in lambda sets currently causes this to be derived incorrectly"] fn recursive_tag_union() { - derive_test(v!([Nil, Cons v!(U8) v!(*lst) ] as lst), |golden| { - assert_snapshot!(golden, @r###" + derive_test( + ToEncoder, + v!([Nil, Cons v!(U8) v!(^lst) ] as lst), + |golden| { + assert_snapshot!(golden, @r###" # derived for [Cons U8 $rec, Nil] as $rec - # [Cons val a, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> Encoder fmt | a has Encoding, fmt has EncoderFormatting, val has Encoding - # [Cons val a, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> (List U8, fmt -[[custom(4) [Cons val a, Nil]]]-> List U8) | a has Encoding, fmt has EncoderFormatting, val has Encoding + # [Cons val val1, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding, val1 has Encoding + # [Cons val val1, Nil] -[[toEncoder_[Cons 2,Nil 0](0)]]-> (List U8, fmt -[[custom(4) [Cons val val1, Nil]]]-> List U8) | fmt has EncoderFormatting, val has Encoding, val1 has Encoding # Specialization lambda sets: # @<1>: [[toEncoder_[Cons 2,Nil 0](0)]] - # @<2>: [[custom(4) [Cons val a, Nil]]] | a has Encoding, val has Encoding + # @<2>: [[custom(4) [Cons val val1, Nil]]] | val has Encoding, val1 has Encoding #Derived.toEncoder_[Cons 2,Nil 0] = \#Derived.tag -> - Encode.custom \#Derived.bytes, #Derived.fmt -> - Encode.appendWith #Derived.bytes (when #Derived.tag is - Cons #Derived.2 #Derived.3 -> - Encode.tag "Cons" [ - Encode.toEncoder #Derived.2, - Encode.toEncoder #Derived.3, - ] - Nil -> Encode.tag "Nil" []) #Derived.fmt + Encode.custom + \#Derived.bytes, #Derived.fmt -> + Encode.appendWith + #Derived.bytes + (when #Derived.tag is + Cons #Derived.2 #Derived.3 -> + Encode.tag + "Cons" + [ + Encode.toEncoder #Derived.2, + Encode.toEncoder #Derived.3, + ] + Nil -> Encode.tag "Nil" []) + #Derived.fmt "### - ) - }) + ) + }, + ) } #[test] fn list() { - derive_test(v!(Symbol::LIST_LIST v!(STR)), |golden| { + derive_test(ToEncoder, v!(Symbol::LIST_LIST v!(STR)), |golden| { assert_snapshot!(golden, @r###" # derived for List Str # List val -[[toEncoder_list(0)]]-> Encoder fmt | fmt has EncoderFormatting, val has Encoding diff --git a/crates/compiler/test_derive/src/tests.rs b/crates/compiler/test_derive/src/tests.rs index 3fdefff22f..89a3af76d2 100644 --- a/crates/compiler/test_derive/src/tests.rs +++ b/crates/compiler/test_derive/src/tests.rs @@ -1,5 +1,7 @@ #![cfg(test)] +mod decoding; mod encoding; mod pretty_print; +mod util; diff --git a/crates/compiler/test_derive/src/util.rs b/crates/compiler/test_derive/src/util.rs new file mode 100644 index 0000000000..228c6490b7 --- /dev/null +++ b/crates/compiler/test_derive/src/util.rs @@ -0,0 +1,467 @@ +use std::path::PathBuf; + +use bumpalo::Bump; +use ven_pretty::DocAllocator; + +use crate::pretty_print::{pretty_print_def, Ctx}; +use roc_can::{ + abilities::{AbilitiesStore, SpecializationLambdaSets}, + constraint::Constraints, + def::Def, + expr::Declarations, + module::{ + ExposedByModule, ExposedForModule, ExposedModuleTypes, ResolvedImplementations, + RigidVariables, + }, +}; +use roc_collections::VecSet; +use roc_constrain::expr::constrain_decls; +use roc_debug_flags::dbg_do; +use roc_derive::DerivedModule; +use roc_derive_key::{DeriveBuiltin, DeriveKey, Derived}; +use roc_load_internal::file::{add_imports, default_aliases, LoadedModule, Threading}; +use roc_module::symbol::{IdentIds, Interns, ModuleId, Symbol}; +use roc_region::all::LineInfo; +use roc_reporting::report::{type_problem, RocDocAllocator}; +use roc_types::{ + pretty_print::{name_and_print_var, DebugPrint}, + subs::{ExposedTypesStorageSubs, Subs, Variable}, +}; + +const DERIVED_MODULE: ModuleId = ModuleId::DERIVED_SYNTH; + +fn module_source_and_path(builtin: DeriveBuiltin) -> (ModuleId, &'static str, PathBuf) { + use roc_builtins::roc::module_source; + + let repo_root = std::env::var("ROC_WORKSPACE_DIR").expect("are you running with `cargo test`?"); + let builtins_path = PathBuf::from(repo_root) + .join("compiler") + .join("builtins") + .join("roc"); + + match builtin { + DeriveBuiltin::ToEncoder => ( + ModuleId::ENCODE, + module_source(ModuleId::ENCODE), + builtins_path.join("Encode.roc"), + ), + DeriveBuiltin::Decoder => ( + ModuleId::DECODE, + module_source(ModuleId::DECODE), + builtins_path.join("Decode.roc"), + ), + } +} + +/// DSL for creating [`Content`][crate::subs::Content]. +#[macro_export] +macro_rules! v { + ({ $($field:ident: $make_v:expr,)* $(?$opt_field:ident : $make_opt_v:expr,)* }) => {{ + #[allow(unused)] + use roc_types::types::RecordField; + use roc_types::subs::{Subs, RecordFields, Content, FlatType, Variable}; + |subs: &mut Subs| { + $(let $field = $make_v(subs);)* + $(let $opt_field = $make_opt_v(subs);)* + let fields = vec![ + $( (stringify!($field).into(), RecordField::Required($field)) ,)* + $( (stringify!($opt_field).into(), RecordField::Required($opt_field)) ,)* + ]; + let fields = RecordFields::insert_into_subs(subs, fields); + roc_derive::synth_var(subs, Content::Structure(FlatType::Record(fields, Variable::EMPTY_RECORD))) + } + }}; + ([ $($tag:ident $($payload:expr)*),* ]$( $ext:tt )?) => {{ + #[allow(unused)] + use roc_types::subs::{Subs, UnionTags, Content, FlatType, Variable}; + #[allow(unused)] + use roc_module::ident::TagName; + |subs: &mut Subs| { + $( + let $tag = vec![ $( $payload(subs), )* ]; + )* + let tags = UnionTags::insert_into_subs::<_, Vec>(subs, vec![ $( (TagName(stringify!($tag).into()), $tag) ,)* ]); + + #[allow(unused_mut)] + let mut ext = Variable::EMPTY_TAG_UNION; + $( ext = $crate::v!($ext)(subs); )? + + roc_derive::synth_var(subs, Content::Structure(FlatType::TagUnion(tags, ext))) + } + }}; + ([ $($tag:ident $($payload:expr)*),* ] as $rec_var:ident) => {{ + use roc_types::subs::{Subs, SubsIndex, Variable, Content, FlatType, UnionTags}; + use roc_module::ident::TagName; + |subs: &mut Subs| { + let $rec_var = subs.fresh_unnamed_flex_var(); + let rec_name_index = + SubsIndex::push_new(&mut subs.field_names, stringify!($rec).into()); + + $( + let $tag = vec![ $( $payload(subs), )* ]; + )* + let tags = UnionTags::insert_into_subs::<_, Vec>(subs, vec![ $( (TagName(stringify!($tag).into()), $tag) ,)* ]); + let tag_union_var = roc_derive::synth_var(subs, Content::Structure(FlatType::RecursiveTagUnion($rec_var, tags, Variable::EMPTY_TAG_UNION))); + + subs.set_content( + $rec_var, + Content::RecursionVar { + structure: tag_union_var, + opt_name: Some(rec_name_index), + }, + ); + tag_union_var + } + }}; + (Symbol::$sym:ident $($arg:expr)*) => {{ + use roc_types::subs::{Subs, SubsSlice, Content, FlatType}; + use roc_module::symbol::Symbol; + |subs: &mut Subs| { + let $sym = vec![ $( $arg(subs) ,)* ]; + let var_slice = SubsSlice::insert_into_subs(subs, $sym); + roc_derive::synth_var(subs, Content::Structure(FlatType::Apply(Symbol::$sym, var_slice))) + } + }}; + (Symbol::$alias:ident $($arg:expr)* => $real_var:expr) => {{ + use roc_types::subs::{Subs, AliasVariables, Content}; + use roc_types::types::AliasKind; + use roc_module::symbol::Symbol; + |subs: &mut Subs| { + let args = vec![$( $arg(subs) )*]; + let alias_variables = AliasVariables::insert_into_subs::, Vec<_>>(subs, args, vec![]); + let real_var = $real_var(subs); + roc_derive::synth_var(subs, Content::Alias(Symbol::$alias, alias_variables, real_var, AliasKind::Structural)) + } + }}; + (@Symbol::$alias:ident $($arg:expr)* => $real_var:expr) => {{ + use roc_types::subs::{Subs, AliasVariables, Content}; + use roc_types::types::AliasKind; + use roc_module::symbol::Symbol; + |subs: &mut Subs| { + let args = vec![$( $arg(subs) )*]; + let alias_variables = AliasVariables::insert_into_subs::, Vec<_>>(subs, args, vec![]); + let real_var = $real_var(subs); + roc_derive::synth_var(subs, Content::Alias(Symbol::$alias, alias_variables, real_var, AliasKind::Opaque)) + } + }}; + (*) => {{ + use roc_types::subs::{Subs, Content}; + |subs: &mut Subs| { roc_derive::synth_var(subs, Content::FlexVar(None)) } + }}; + (^$rec_var:ident) => {{ + use roc_types::subs::{Subs}; + |_: &mut Subs| { $rec_var } + }}; + ($var:ident) => {{ + use roc_types::subs::{Subs}; + |_: &mut Subs| { Variable::$var } + }}; + } + +pub(crate) fn check_key(builtin: DeriveBuiltin, eq: bool, synth1: S1, synth2: S2) +where + S1: FnOnce(&mut Subs) -> Variable, + S2: FnOnce(&mut Subs) -> Variable, +{ + let mut subs = Subs::new(); + let var1 = synth1(&mut subs); + let var2 = synth2(&mut subs); + + let key1 = Derived::builtin(builtin, &subs, var1); + let key2 = Derived::builtin(builtin, &subs, var2); + + if eq { + assert_eq!(key1, key2); + } else { + assert_ne!(key1, key2); + } +} + +#[macro_export] +macro_rules! test_hash_eq { + ($builtin:expr, $($name:ident: $synth1:expr, $synth2:expr)*) => {$( + #[test] + fn $name() { + $crate::util::check_key($builtin,true, $synth1, $synth2) + } + )*}; +} + +#[macro_export] +macro_rules! test_hash_neq { + ($builtin:expr, $($name:ident: $synth1:expr, $synth2:expr)*) => {$( + #[test] + fn $name() { + $crate::util::check_key($builtin, false, $synth1, $synth2) + } + )*}; +} + +pub(crate) fn check_immediate(builtin: DeriveBuiltin, synth: S, immediate: Symbol) +where + S: FnOnce(&mut Subs) -> Variable, +{ + let mut subs = Subs::new(); + let var = synth(&mut subs); + + let key = Derived::builtin(builtin, &subs, var); + + assert_eq!(key, Ok(Derived::Immediate(immediate))); +} + +#[allow(clippy::too_many_arguments)] +fn assemble_derived_golden( + subs: &mut Subs, + test_module: ModuleId, + interns: &Interns, + source_var: Variable, + derived_source: &str, + typ: Variable, + specialization_lsets: SpecializationLambdaSets, +) -> String { + let mut print_var = |var: Variable, print_only_under_alias| { + let snapshot = subs.snapshot(); + let pretty_type = name_and_print_var( + var, + subs, + test_module, + interns, + DebugPrint { + print_lambda_sets: true, + print_only_under_alias, + }, + ); + subs.rollback_to(snapshot); + pretty_type + }; + + let mut pretty_buf = String::new(); + + pretty_buf.push_str(&format!("# derived for {}\n", print_var(source_var, false))); + + let pretty_type = print_var(typ, false); + pretty_buf.push_str(&format!("# {}\n", &pretty_type)); + + let pretty_type_under_aliases = print_var(typ, true); + pretty_buf.push_str(&format!("# {}\n", &pretty_type_under_aliases)); + + pretty_buf.push_str("# Specialization lambda sets:\n"); + let mut specialization_lsets = specialization_lsets.into_iter().collect::>(); + specialization_lsets.sort_by_key(|(region, _)| *region); + for (region, var) in specialization_lsets { + let pretty_lset = print_var(var, false); + pretty_buf.push_str(&format!("# @<{}>: {}\n", region, pretty_lset)); + } + + pretty_buf.push_str(derived_source); + + pretty_buf +} + +/// The environment of the module containing the builtin ability we're deriving for a type. +struct DeriveBuiltinEnv { + module_id: ModuleId, + exposed_types: ExposedTypesStorageSubs, + abilities_store: AbilitiesStore, +} + +#[allow(clippy::too_many_arguments)] +fn check_derived_typechecks_and_golden( + derived_def: Def, + test_module: ModuleId, + mut test_subs: Subs, + interns: &Interns, + derive_builtin_env: DeriveBuiltinEnv, + source_var: Variable, + derived_program: &str, + specialization_lsets: SpecializationLambdaSets, + check_golden: impl Fn(&str), +) { + // constrain the derived + let mut constraints = Constraints::new(); + let def_var = derived_def.expr_var; + let mut decls = Declarations::new(); + decls.push_def(derived_def); + let constr = constrain_decls(&mut constraints, test_module, &decls); + + // the derived implementation on stuff from the builtin module, so + // - we need to add those dependencies as imported on the constraint + // - we need to add the builtin ability info to a local abilities store + let values_to_import_from_builtin_module = derive_builtin_env + .exposed_types + .stored_vars_by_symbol + .keys() + .copied() + .collect::>(); + let pending_abilities = derive_builtin_env + .abilities_store + .closure_from_imported(&values_to_import_from_builtin_module); + let mut exposed_by_module = ExposedByModule::default(); + exposed_by_module.insert( + derive_builtin_env.module_id, + ExposedModuleTypes { + exposed_types_storage_subs: derive_builtin_env.exposed_types, + resolved_implementations: ResolvedImplementations::default(), + }, + ); + let exposed_for_module = ExposedForModule::new( + values_to_import_from_builtin_module.iter(), + exposed_by_module, + ); + let mut def_types = Default::default(); + let mut rigid_vars = Default::default(); + let (import_variables, abilities_store) = add_imports( + test_module, + &mut test_subs, + pending_abilities, + &exposed_for_module, + &mut def_types, + &mut rigid_vars, + ); + let constr = + constraints.let_import_constraint(rigid_vars, def_types, constr, &import_variables); + + // run the solver, print and fail if we have errors + dbg_do!( + roc_debug_flags::ROC_PRINT_UNIFICATIONS_DERIVED, + std::env::set_var(roc_debug_flags::ROC_PRINT_UNIFICATIONS_DERIVED, "1") + ); + let (mut solved_subs, _, problems, _) = roc_solve::module::run_solve( + test_module, + &constraints, + constr, + RigidVariables::default(), + test_subs, + default_aliases(), + abilities_store, + Default::default(), + &exposed_for_module.exposed_by_module, + Default::default(), + ); + let subs = solved_subs.inner_mut(); + + if !problems.is_empty() { + let filename = PathBuf::from("Test.roc"); + let lines = LineInfo::new(" "); + let src_lines = vec![" "]; + let mut reports = Vec::new(); + let alloc = RocDocAllocator::new(&src_lines, test_module, interns); + + for problem in problems.into_iter() { + if let Some(report) = type_problem(&alloc, &lines, filename.clone(), problem.clone()) { + reports.push(report); + } + } + + let has_reports = !reports.is_empty(); + + let doc = alloc + .stack(reports.into_iter().map(|v| v.pretty(&alloc))) + .append(if has_reports { + alloc.line() + } else { + alloc.nil() + }); + + let mut buf = String::new(); + doc.1 + .render_raw(80, &mut roc_reporting::report::CiWrite::new(&mut buf)) + .unwrap(); + + panic!( + "Derived does not typecheck:\n{}\nDerived def:\n{}", + buf, derived_program + ); + } + + let golden = assemble_derived_golden( + subs, + test_module, + interns, + source_var, + derived_program, + def_var, + specialization_lsets, + ); + + check_golden(&golden) +} + +fn get_key(builtin: DeriveBuiltin, subs: &Subs, var: Variable) -> DeriveKey { + match Derived::builtin(builtin, subs, var) { + Ok(Derived::Key(key)) => key, + _ => unreachable!(), + } +} + +pub(crate) fn derive_test(builtin: DeriveBuiltin, synth_input: S, check_golden: impl Fn(&str)) +where + S: FnOnce(&mut Subs) -> Variable, +{ + let arena = Bump::new(); + let (builtin_module, source, path) = module_source_and_path(builtin); + let target_info = roc_target::TargetInfo::default_x86_64(); + + let LoadedModule { + mut interns, + exposed_types_storage, + abilities_store, + resolved_implementations, + .. + } = roc_load_internal::file::load_and_typecheck_str( + &arena, + path.file_name().unwrap().into(), + source, + path.parent().unwrap().to_path_buf(), + Default::default(), + target_info, + roc_reporting::report::RenderTarget::ColorTerminal, + Threading::AllAvailable, + ) + .unwrap(); + + let mut subs = Subs::new(); + let ident_ids = IdentIds::default(); + let source_var = synth_input(&mut subs); + let key = get_key(builtin, &subs, source_var); + + let mut derived_module = unsafe { DerivedModule::from_components(subs, ident_ids) }; + + let mut exposed_by_module = ExposedByModule::default(); + exposed_by_module.insert( + builtin_module, + ExposedModuleTypes { + exposed_types_storage_subs: exposed_types_storage.clone(), + resolved_implementations, + }, + ); + + let (_derived_symbol, derived_def, specialization_lsets) = + derived_module.get_or_insert(&exposed_by_module, key); + let specialization_lsets = specialization_lsets.clone(); + let derived_def = derived_def.clone(); + + let (subs, ident_ids) = derived_module.decompose(); + + interns.all_ident_ids.insert(DERIVED_MODULE, ident_ids); + DERIVED_MODULE.register_debug_idents(interns.all_ident_ids.get(&DERIVED_MODULE).unwrap()); + + let ctx = Ctx { interns: &interns }; + let derived_program = pretty_print_def(&ctx, &derived_def); + + check_derived_typechecks_and_golden( + derived_def, + DERIVED_MODULE, + subs, + &interns, + DeriveBuiltinEnv { + module_id: builtin_module, + exposed_types: exposed_types_storage, + abilities_store, + }, + source_var, + &derived_program, + specialization_lsets, + check_golden, + ); +} diff --git a/crates/compiler/test_gen/Cargo.toml b/crates/compiler/test_gen/Cargo.toml index d5179aa817..fd3d97d1c0 100644 --- a/crates/compiler/test_gen/Cargo.toml +++ b/crates/compiler/test_gen/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "test_gen" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/test_gen/build.rs b/crates/compiler/test_gen/build.rs index ae3e1856f2..66879ab115 100644 --- a/crates/compiler/test_gen/build.rs +++ b/crates/compiler/test_gen/build.rs @@ -95,7 +95,7 @@ fn build_wasm_test_host() { run_zig(&[ "wasm-ld", - bitcode::BUILTINS_WASM32_OBJ_PATH, + &bitcode::get_builtins_wasm32_obj_path(), platform_path.to_str().unwrap(), WASI_COMPILER_RT_PATH, WASI_LIBC_PATH, diff --git a/crates/compiler/test_gen/src/gen_abilities.rs b/crates/compiler/test_gen/src/gen_abilities.rs index ed16447514..059fdc275e 100644 --- a/crates/compiler/test_gen/src/gen_abilities.rs +++ b/crates/compiler/test_gen/src/gen_abilities.rs @@ -265,6 +265,7 @@ fn encode() { #[test] #[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +#[ignore = "running into weird let-generalization issue when a variable is only in output position, see #3660"] fn decode() { assert_evals_to!( indoc!( @@ -349,7 +350,7 @@ fn encode_use_stdlib() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_use_stdlib_without_wrapping_custom() { assert_evals_to!( indoc!( @@ -374,7 +375,7 @@ fn encode_use_stdlib_without_wrapping_custom() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn to_encoder_encode_custom_has_capture() { assert_evals_to!( indoc!( @@ -405,6 +406,9 @@ mod encode_immediate { #[cfg(feature = "gen-llvm")] use crate::helpers::llvm::assert_evals_to; + #[cfg(feature = "gen-wasm")] + use crate::helpers::wasm::assert_evals_to; + #[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))] use indoc::indoc; @@ -412,7 +416,7 @@ mod encode_immediate { use roc_std::RocStr; #[test] - #[cfg(any(feature = "gen-llvm"))] + #[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn string() { assert_evals_to!( indoc!( @@ -433,7 +437,7 @@ mod encode_immediate { macro_rules! num_immediate { ($($num:expr, $typ:ident)*) => {$( #[test] - #[cfg(any(feature = "gen-llvm"))] + #[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn $typ() { assert_evals_to!( &format!(indoc!( @@ -471,7 +475,7 @@ mod encode_immediate { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_derived_record_one_field_string() { assert_evals_to!( indoc!( @@ -493,7 +497,7 @@ fn encode_derived_record_one_field_string() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_derived_record_two_fields_strings() { assert_evals_to!( indoc!( @@ -516,7 +520,7 @@ fn encode_derived_record_two_fields_strings() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_derived_nested_record_string() { assert_evals_to!( indoc!( @@ -540,7 +544,7 @@ fn encode_derived_nested_record_string() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_derived_tag_one_payload_string() { assert_evals_to!( indoc!( @@ -564,7 +568,7 @@ fn encode_derived_tag_one_payload_string() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_derived_tag_two_payloads_string() { assert_evals_to!( indoc!( @@ -588,7 +592,7 @@ fn encode_derived_tag_two_payloads_string() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_derived_nested_tag_string() { assert_evals_to!( indoc!( @@ -613,7 +617,7 @@ fn encode_derived_nested_tag_string() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_derived_nested_record_tag_record() { assert_evals_to!( indoc!( @@ -638,7 +642,7 @@ fn encode_derived_nested_record_tag_record() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_derived_list_string() { assert_evals_to!( indoc!( @@ -662,7 +666,7 @@ fn encode_derived_list_string() { } #[test] -#[cfg(any(feature = "gen-llvm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn encode_derived_list_of_records() { assert_evals_to!( indoc!( @@ -684,3 +688,273 @@ fn encode_derived_list_of_records() { RocStr ) } + +#[test] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +#[ignore = "#3696: Currently hits some weird panic in borrow checking, not sure if it's directly related to abilities."] +fn encode_derived_list_of_lists_of_strings() { + assert_evals_to!( + indoc!( + r#" + app "test" + imports [Encode.{ toEncoder }, Json] + provides [main] to "./platform" + + main = + lst = [["a", "b"], ["c", "d", "e"], ["f"]] + encoded = Encode.toBytes lst Json.toUtf8 + result = Str.fromUtf8 encoded + when result is + Ok s -> s + _ -> "" + "# + ), + RocStr::from(r#"[["a","b"],["c","d","e"],["f"]]"#), + RocStr + ) +} + +#[test] +#[cfg(all( + any(feature = "gen-llvm", feature = "gen-wasm"), + not(feature = "gen-llvm-wasm") // hits a stack limit in wasm3 +))] +fn encode_derived_record_with_many_types() { + assert_evals_to!( + indoc!( + r#" + app "test" + imports [Encode.{ toEncoder }, Json] + provides [main] to "./platform" + + main = + fresh : [Fresh Str, Rotten Str] + fresh = Fresh "tomatoes" + rcd = {actors: ["Idris Elba", "Mila Kunis"], year: 2004u16, rating: {average: 7u8, min: 1u8, max: 10u8, sentiment: fresh}} + result = Str.fromUtf8 (Encode.toBytes rcd Json.toUtf8) + when result is + Ok s -> s + _ -> "" + "# + ), + RocStr::from( + r#"{"actors":["Idris Elba","Mila Kunis"],"rating":{"average":7,"max":10,"min":1,"sentiment":{"Fresh":["tomatoes"]}},"year":2004}"# + ), + RocStr + ) +} + +#[test] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +fn decode_use_stdlib() { + assert_evals_to!( + indoc!( + r#" + app "test" + imports [Decode.{ Decoding }, Json] + provides [main] to "./platform" + + MyNum := U8 has [Decoding {decoder: myDecoder}] + + myDecoder = + Decode.custom \bytes, fmt -> + when Decode.decodeWith bytes Decode.u8 fmt is + {result, rest} -> + when result is + Ok n -> {result: Ok (@MyNum n), rest} + Err e -> {result: Err e, rest} + + main = + when Decode.fromBytes [49, 53] Json.fromUtf8 is + Ok (@MyNum n) -> n + _ -> 101 + "# + ), + 15, + u8 + ) +} + +#[test] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +fn decode_use_stdlib_json_list() { + assert_evals_to!( + indoc!( + r#" + app "test" + imports [Decode.{ Decoding }, Json] + provides [main] to "./platform" + + MyNumList := List U8 has [Decoding {decoder: myDecoder}] + + myDecoder = + Decode.custom \bytes, fmt -> + when Decode.decodeWith bytes (Decode.list Decode.u8) fmt is + {result, rest} -> + when result is + Ok lst -> {result: Ok (@MyNumList lst), rest} + Err e -> {result: Err e, rest} + + main = + when Str.toUtf8 "[1,2,3]" |> Decode.fromBytes Json.fromUtf8 is + Ok (@MyNumList lst) -> lst + _ -> [] + "# + ), + RocList::from_slice(&[1u8, 2u8, 3u8]), + RocList + ) +} + +mod decode_immediate { + #[cfg(feature = "gen-llvm")] + use crate::helpers::llvm::assert_evals_to; + + #[cfg(feature = "gen-wasm")] + use crate::helpers::wasm::assert_evals_to; + + #[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))] + use indoc::indoc; + + #[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))] + use roc_std::RocStr; + + #[test] + #[cfg(any(feature = "gen-llvm"))] + fn string() { + assert_evals_to!( + indoc!( + r#" + app "test" imports [Decode, Json] provides [main] to "./platform" + + main = + when Str.toUtf8 "\"foo\"" |> Decode.fromBytes Json.fromUtf8 is + Ok s -> s + _ -> "" + "# + ), + RocStr::from("foo"), + RocStr + ) + } + + macro_rules! num_immediate { + ($($num:expr, $typ:ident)*) => {$( + #[test] + #[cfg(any(feature = "gen-llvm"))] + fn $typ() { + assert_evals_to!( + &format!(indoc!( + r#" + app "test" imports [Decode, Json] provides [main] to "./platform" + + main = + when Num.toStr {}{} |> Str.toUtf8 |> Decode.fromBytes Json.fromUtf8 is + Ok n -> n + _ -> 101{} + "# + ), $num, stringify!($typ), stringify!($typ)), + $num, + $typ + ) + } + )*} + } + + num_immediate! { + 17, i8 + 17, i16 + 17, i32 + 17, i64 + 17, i128 + 17, u8 + 17, u16 + 17, u32 + 17, u64 + 17, u128 + 17.23, f32 + 17.23, f64 + } + + #[test] + #[cfg(any(feature = "gen-llvm"))] + fn dec() { + use roc_std::RocDec; + + assert_evals_to!( + indoc!( + r#" + app "test" imports [Decode, Json] provides [main] to "./platform" + + main = + when Num.toStr 17.23dec |> Str.toUtf8 |> Decode.fromBytes Json.fromUtf8 is + Ok n -> n + _ -> 101dec + "# + ), + RocDec::from_str("17.23").unwrap(), + RocDec + ) + } +} + +#[test] +#[cfg(any(feature = "gen-llvm"))] +fn decode_list_of_strings() { + assert_evals_to!( + indoc!( + r#" + app "test" imports [Decode, Json] provides [main] to "./platform" + + main = + when Str.toUtf8 "[\"a\",\"b\",\"c\"]" |> Decode.fromBytes Json.fromUtf8 is + Ok l -> Str.joinWith l "," + _ -> "" + "# + ), + RocStr::from("a,b,c"), + RocStr + ) +} + +#[test] +#[cfg(all( + any(feature = "gen-llvm"), // currently fails on gen-wasm + not(feature = "gen-llvm-wasm") // hits a stack limit in wasm3 +))] +fn encode_then_decode_list_of_strings() { + assert_evals_to!( + indoc!( + r#" + app "test" imports [Encode, Decode, Json] provides [main] to "./platform" + + main = + when Encode.toBytes ["a", "b", "c"] Json.fromUtf8 |> Decode.fromBytes Json.fromUtf8 is + Ok l -> Str.joinWith l "," + _ -> "something went wrong" + "# + ), + RocStr::from("a,b,c"), + RocStr + ) +} + +#[test] +#[cfg(any(feature = "gen-llvm"))] +#[ignore = "#3696: Currently hits some weird panic in borrow checking, not sure if it's directly related to abilities."] +fn encode_then_decode_list_of_lists_of_strings() { + assert_evals_to!( + indoc!( + r#" + app "test" imports [Encode, Decode, Json] provides [main] to "./platform" + + main = + when Encode.toBytes [["a", "b"], ["c", "d", "e"], ["f"]] Json.fromUtf8 |> Decode.fromBytes Json.fromUtf8 is + Ok list -> (List.map list \inner -> Str.joinWith inner ",") |> Str.joinWith l ";" + _ -> "something went wrong" + "# + ), + RocStr::from("a,b;c,d,e;f"), + RocStr + ) +} diff --git a/crates/compiler/test_gen/src/gen_list.rs b/crates/compiler/test_gen/src/gen_list.rs index 20d4f57491..d2685dda74 100644 --- a/crates/compiler/test_gen/src/gen_list.rs +++ b/crates/compiler/test_gen/src/gen_list.rs @@ -15,8 +15,6 @@ use indoc::indoc; #[allow(unused_imports)] use roc_std::{RocList, RocResult, RocStr}; -use core::convert::Infallible; - #[test] #[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))] fn roc_list_construction() { @@ -287,6 +285,8 @@ fn list_map_try_ok() { #[test] #[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn list_map_try_err() { + use core::convert::Infallible; + assert_evals_to!( r#" List.mapTry [1, 2, 3] \_ -> Err -1 diff --git a/crates/compiler/test_gen/src/gen_num.rs b/crates/compiler/test_gen/src/gen_num.rs index b5b42aad4e..776f4203e0 100644 --- a/crates/compiler/test_gen/src/gen_num.rs +++ b/crates/compiler/test_gen/src/gen_num.rs @@ -2004,25 +2004,22 @@ fn shift_left_by() { fn shift_right_by() { // Sign Extended Right Shift - let is_wasm = cfg!(feature = "gen-wasm"); let is_llvm_release_mode = cfg!(feature = "gen-llvm") && !cfg!(debug_assertions); // FIXME (Brian) Something funny happening with 8-bit binary literals in tests - if !is_wasm { - assert_evals_to!( - "Num.shiftRightBy 2 (Num.toI8 0b1100_0000u8)", - 0b1111_0000u8 as i8, - i8 - ); - assert_evals_to!("Num.shiftRightBy 2 0b0100_0000i8", 0b0001_0000i8, i8); - assert_evals_to!("Num.shiftRightBy 1 0b1110_0000u8", 0b1111_0000u8, u8); - assert_evals_to!("Num.shiftRightBy 2 0b1100_0000u8", 0b1111_0000u8, u8); - assert_evals_to!("Num.shiftRightBy 12 0b0100_0000u8", 0b0000_0000u8, u8); + assert_evals_to!( + "Num.shiftRightBy 2 (Num.toI8 0b1100_0000u8)", + 0b1111_0000u8 as i8, + i8 + ); + assert_evals_to!("Num.shiftRightBy 2 0b0100_0000i8", 0b0001_0000i8, i8); + assert_evals_to!("Num.shiftRightBy 1 0b1110_0000u8", 0b1111_0000u8, u8); + assert_evals_to!("Num.shiftRightBy 2 0b1100_0000u8", 0b1111_0000u8, u8); + assert_evals_to!("Num.shiftRightBy 12 0b0100_0000u8", 0b0000_0000u8, u8); - // LLVM in release mode returns 0 instead of -1 for some reason - if !is_llvm_release_mode { - assert_evals_to!("Num.shiftRightBy 12 0b1000_0000u8", 0b1111_1111u8, u8); - } + // LLVM in release mode returns 0 instead of -1 for some reason + if !is_llvm_release_mode { + assert_evals_to!("Num.shiftRightBy 12 0b1000_0000u8", 0b1111_1111u8, u8); } assert_evals_to!("Num.shiftRightBy 0 12", 12, i64); assert_evals_to!("Num.shiftRightBy 1 12", 6, i64); @@ -2647,7 +2644,7 @@ fn is_multiple_of_signed() { } #[test] -#[cfg(any(feature = "gen-wasm"))] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] fn is_multiple_of_unsigned() { // true assert_evals_to!("Num.isMultipleOf 5u8 1", true, bool); @@ -3651,3 +3648,70 @@ fn promote_u128_number_layout() { u128 ); } + +#[test] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +fn when_on_decimals() { + assert_evals_to!( + indoc!( + r#" + when 42.42dec is + 42.42 -> 42 + 0.05 -> 1 + 3.14 -> 2 + _ -> 4 + "# + ), + 42, + i64 + ); + + assert_evals_to!( + indoc!( + r#" + when 42.42dec is + 0.05 -> 1 + 3.14 -> 2 + _ -> 4 + "# + ), + 4, + i64 + ); +} + +#[test] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +fn when_on_i128() { + assert_evals_to!( + indoc!( + r#" + when 1701411834604692317316873037158841057i128 is + 1701411834604692317316873037158841057 -> 42 + 32 -> 1 + 64 -> 2 + _ -> 4 + "# + ), + 42, + i64 + ); +} + +#[test] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +fn when_on_u128() { + assert_evals_to!( + indoc!( + r#" + when 170141183460469231731687303715884105728u128 is + 170141183460469231731687303715884105728u128 -> 42 + 32 -> 1 + 64 -> 2 + _ -> 4 + "# + ), + 42, + i64 + ); +} diff --git a/crates/compiler/test_gen/src/gen_records.rs b/crates/compiler/test_gen/src/gen_records.rs index 1c58b83c55..66c085dd08 100644 --- a/crates/compiler/test_gen/src/gen_records.rs +++ b/crates/compiler/test_gen/src/gen_records.rs @@ -102,30 +102,6 @@ fn fn_record() { i64 ); - assert_evals_to!( - indoc!( - r#" - rec = { x: 15, y: 17, z: 19 } - - rec.y - "# - ), - 17, - i64 - ); - - assert_evals_to!( - indoc!( - r#" - rec = { x: 15, y: 17, z: 19 } - - rec.z - "# - ), - 19, - i64 - ); - assert_evals_to!( indoc!( r#" diff --git a/crates/compiler/test_gen/src/gen_str.rs b/crates/compiler/test_gen/src/gen_str.rs index 9deeccd137..e9482229b9 100644 --- a/crates/compiler/test_gen/src/gen_str.rs +++ b/crates/compiler/test_gen/src/gen_str.rs @@ -1813,3 +1813,36 @@ fn llvm_wasm_str_layout_small() { [i32; 3] ); } + +#[test] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +fn when_on_strings() { + assert_evals_to!( + indoc!( + r#" + when "Deyr fé, deyja frændr" is + "Deyr fé, deyja frændr" -> 42 + "deyr sjalfr it sama" -> 1 + "en orðstírr deyr aldregi" -> 2 + "hveim er sér góðan getr" -> 3 + _ -> 4 + "# + ), + 42, + i64 + ); + + assert_evals_to!( + indoc!( + r#" + when "Deyr fé, deyja frændr" is + "deyr sjalfr it sama" -> 1 + "en orðstírr deyr aldregi" -> 2 + "hveim er sér góðan getr" -> 3 + _ -> 4 + "# + ), + 4, + i64 + ); +} diff --git a/crates/compiler/test_gen/src/gen_tags.rs b/crates/compiler/test_gen/src/gen_tags.rs index abe5337afc..8d784a13e0 100644 --- a/crates/compiler/test_gen/src/gen_tags.rs +++ b/crates/compiler/test_gen/src/gen_tags.rs @@ -1863,3 +1863,53 @@ fn error_type_in_tag_union_payload() { true // ignore type errors ) } + +#[test] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +fn issue_3653_recursion_pointer_in_naked_opaque() { + assert_evals_to!( + indoc!( + r#" + app "test" provides [main] to "./platform" + + Peano := [ Zero, Succ Peano ] + + recurse = \@Peano peano -> + when peano is + Succ inner -> recurse inner + _ -> {} + + main = + when recurse (@Peano Zero) is + _ -> "we're back" + "# + ), + RocStr::from("we're back"), + RocStr + ) +} + +#[test] +#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))] +fn issue_3653_recursion_pointer_in_naked_opaque_localized() { + assert_evals_to!( + indoc!( + r#" + app "test" provides [main] to "./platform" + + Peano := [ Zero, Succ Peano ] + + recurse = \peano -> + when peano is + @Peano (Succ inner) -> recurse inner + @Peano Zero -> {} + + main = + when recurse (@Peano Zero) is + _ -> "we're back" + "# + ), + RocStr::from("we're back"), + RocStr + ) +} diff --git a/crates/compiler/test_gen/src/helpers/dev.rs b/crates/compiler/test_gen/src/helpers/dev.rs index 38ddd2fea8..37e3ecc981 100644 --- a/crates/compiler/test_gen/src/helpers/dev.rs +++ b/crates/compiler/test_gen/src/helpers/dev.rs @@ -1,7 +1,7 @@ use libloading::Library; use roc_build::link::{link, LinkType}; use roc_builtins::bitcode; -use roc_load::Threading; +use roc_load::{EntryPoint, ExecutionMode, LoadConfig, Threading}; use roc_region::all::LineInfo; use tempfile::tempdir; @@ -50,15 +50,19 @@ pub fn helper( module_src = &temp; } + let load_config = LoadConfig { + target_info: roc_target::TargetInfo::default_x86_64(), + render: roc_reporting::report::RenderTarget::ColorTerminal, + threading: Threading::Single, + exec_mode: ExecutionMode::Executable, + }; let loaded = roc_load::load_and_monomorphize_from_str( arena, filename, module_src, src_dir, Default::default(), - roc_target::TargetInfo::default_x86_64(), - roc_reporting::report::RenderTarget::ColorTerminal, - Threading::Single, + load_config, ); let mut loaded = loaded.expect("failed to load module"); @@ -96,8 +100,16 @@ pub fn helper( } debug_assert_eq!(exposed_to_host.values.len(), 1); - let main_fn_symbol = loaded.entry_point.symbol; - let main_fn_layout = loaded.entry_point.layout; + let entry_point = match loaded.entry_point { + EntryPoint::Executable { symbol, layout, .. } => { + roc_mono::ir::EntryPoint { symbol, layout } + } + EntryPoint::Test => { + unreachable!() + } + }; + let main_fn_symbol = entry_point.symbol; + let main_fn_layout = entry_point.layout; let mut layout_ids = roc_mono::layout::LayoutIds::default(); let main_fn_name = layout_ids @@ -187,7 +199,7 @@ pub fn helper( // With the current method all methods are kept and it adds about 100k to all outputs. &[ app_o_file.to_str().unwrap(), - bitcode::BUILTINS_HOST_OBJ_PATH, + &bitcode::get_builtins_host_obj_path(), ], LinkType::Dylib, ) diff --git a/crates/compiler/test_gen/src/helpers/llvm.rs b/crates/compiler/test_gen/src/helpers/llvm.rs index b50fdf053f..ea37fd6507 100644 --- a/crates/compiler/test_gen/src/helpers/llvm.rs +++ b/crates/compiler/test_gen/src/helpers/llvm.rs @@ -7,7 +7,7 @@ use roc_build::program::FunctionIterator; use roc_collections::all::MutSet; use roc_gen_llvm::llvm::build::LlvmBackendMode; use roc_gen_llvm::llvm::externs::add_default_roc_externs; -use roc_load::Threading; +use roc_load::{EntryPoint, ExecutionMode, LoadConfig, Threading}; use roc_mono::ir::OptLevel; use roc_region::all::LineInfo; use roc_reporting::report::RenderTarget; @@ -66,15 +66,19 @@ fn create_llvm_module<'a>( module_src = &temp; } + let load_config = LoadConfig { + target_info, + render: RenderTarget::ColorTerminal, + threading: Threading::Single, + exec_mode: ExecutionMode::Executable, + }; let loaded = roc_load::load_and_monomorphize_from_str( arena, filename, module_src, src_dir, Default::default(), - target_info, - RenderTarget::ColorTerminal, - Threading::Single, + load_config, ); let mut loaded = match loaded { @@ -226,6 +230,14 @@ fn create_llvm_module<'a>( // platform to provide them. add_default_roc_externs(&env); + let entry_point = match entry_point { + EntryPoint::Executable { symbol, layout, .. } => { + roc_mono::ir::EntryPoint { symbol, layout } + } + EntryPoint::Test => { + unreachable!() + } + }; let (main_fn_name, main_fn) = match config.mode { LlvmBackendMode::Binary => unreachable!(), LlvmBackendMode::CliTest => unreachable!(), diff --git a/crates/compiler/test_gen/src/helpers/wasm.rs b/crates/compiler/test_gen/src/helpers/wasm.rs index 0d1b9a137a..75985fe4b2 100644 --- a/crates/compiler/test_gen/src/helpers/wasm.rs +++ b/crates/compiler/test_gen/src/helpers/wasm.rs @@ -4,7 +4,7 @@ use roc_collections::all::MutSet; use roc_gen_wasm::wasm32_result::Wasm32Result; use roc_gen_wasm::wasm_module::{Export, ExportType}; use roc_gen_wasm::DEBUG_SETTINGS; -use roc_load::Threading; +use roc_load::{ExecutionMode, LoadConfig, Threading}; use std::marker::PhantomData; use std::path::PathBuf; use std::rc::Rc; @@ -35,9 +35,9 @@ fn promote_expr_to_module(src: &str) -> String { } fn write_final_wasm() -> bool { - use roc_debug_flags::{dbg_do, ROC_WRITE_FINAL_WASM}; + use roc_debug_flags::dbg_do; - dbg_do!(ROC_WRITE_FINAL_WASM, { + dbg_do!(roc_debug_flags::ROC_WRITE_FINAL_WASM, { return true; }); @@ -84,15 +84,19 @@ fn compile_roc_to_wasm_bytes<'a, T: Wasm32Result>( module_src = &temp; } + let load_config = LoadConfig { + target_info: roc_target::TargetInfo::default_wasm32(), + render: roc_reporting::report::RenderTarget::ColorTerminal, + threading: Threading::Single, + exec_mode: ExecutionMode::Executable, + }; let loaded = roc_load::load_and_monomorphize_from_str( arena, filename, module_src, src_dir, Default::default(), - roc_target::TargetInfo::default_wasm32(), - roc_reporting::report::RenderTarget::ColorTerminal, - Threading::Single, + load_config, ); let loaded = loaded.expect("failed to load module"); diff --git a/crates/compiler/test_mono/Cargo.toml b/crates/compiler/test_mono/Cargo.toml index eb87b5efa8..377c57ebbe 100644 --- a/crates/compiler/test_mono/Cargo.toml +++ b/crates/compiler/test_mono/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "test_mono" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/test_mono/generated/call_function_in_empty_list.txt b/crates/compiler/test_mono/generated/call_function_in_empty_list.txt index 2c04254ade..ac96381481 100644 --- a/crates/compiler/test_mono/generated/call_function_in_empty_list.txt +++ b/crates/compiler/test_mono/generated/call_function_in_empty_list.txt @@ -1,7 +1,7 @@ procedure List.5 (#Attr.2, #Attr.3): - let List.317 : List {} = lowlevel ListMap { xs: `#Attr.#arg1` } #Attr.2 Test.2 #Attr.3; + let List.385 : List {} = lowlevel ListMap { xs: `#Attr.#arg1` } #Attr.2 Test.2 #Attr.3; decref #Attr.2; - ret List.317; + ret List.385; procedure Test.2 (Test.3): let Test.7 : {} = Struct {}; diff --git a/crates/compiler/test_mono/generated/call_function_in_empty_list_unbound.txt b/crates/compiler/test_mono/generated/call_function_in_empty_list_unbound.txt index 2145a8f0db..8c4c9cb47d 100644 --- a/crates/compiler/test_mono/generated/call_function_in_empty_list_unbound.txt +++ b/crates/compiler/test_mono/generated/call_function_in_empty_list_unbound.txt @@ -1,7 +1,7 @@ procedure List.5 (#Attr.2, #Attr.3): - let List.317 : List [] = lowlevel ListMap { xs: `#Attr.#arg1` } #Attr.2 Test.2 #Attr.3; + let List.385 : List [] = lowlevel ListMap { xs: `#Attr.#arg1` } #Attr.2 Test.2 #Attr.3; decref #Attr.2; - ret List.317; + ret List.385; procedure Test.2 (Test.3): let Test.7 : {} = Struct {}; diff --git a/crates/compiler/test_mono/generated/closure_in_list.txt b/crates/compiler/test_mono/generated/closure_in_list.txt index 9ad991a987..520a8a1808 100644 --- a/crates/compiler/test_mono/generated/closure_in_list.txt +++ b/crates/compiler/test_mono/generated/closure_in_list.txt @@ -1,6 +1,6 @@ procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; procedure Test.1 (Test.5): let Test.2 : I64 = 41i64; diff --git a/crates/compiler/test_mono/generated/dict.txt b/crates/compiler/test_mono/generated/dict.txt index 1fcb74a01d..703c5f429d 100644 --- a/crates/compiler/test_mono/generated/dict.txt +++ b/crates/compiler/test_mono/generated/dict.txt @@ -7,8 +7,8 @@ procedure Dict.7 (Dict.96): ret Dict.101; procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; procedure Test.0 (): let Test.2 : List {[], []} = CallByName Dict.1; diff --git a/crates/compiler/test_mono/generated/empty_list_of_function_type.txt b/crates/compiler/test_mono/generated/empty_list_of_function_type.txt index 6fb9fbfeaf..70263729b3 100644 --- a/crates/compiler/test_mono/generated/empty_list_of_function_type.txt +++ b/crates/compiler/test_mono/generated/empty_list_of_function_type.txt @@ -1,22 +1,22 @@ -procedure List.2 (List.82, List.83): - let List.322 : U64 = CallByName List.6 List.82; - let List.319 : Int1 = CallByName Num.22 List.83 List.322; - if List.319 then - let List.321 : {} = CallByName List.60 List.82 List.83; - let List.320 : [C {}, C {}] = TagId(1) List.321; - ret List.320; +procedure List.2 (List.90, List.91): + let List.390 : U64 = CallByName List.6 List.90; + let List.387 : Int1 = CallByName Num.22 List.91 List.390; + if List.387 then + let List.389 : {} = CallByName List.66 List.90 List.91; + let List.388 : [C {}, C {}] = TagId(1) List.389; + ret List.388; else - let List.318 : {} = Struct {}; - let List.317 : [C {}, C {}] = TagId(0) List.318; - ret List.317; + let List.386 : {} = Struct {}; + let List.385 : [C {}, C {}] = TagId(0) List.386; + ret List.385; procedure List.6 (#Attr.2): - let List.324 : U64 = lowlevel ListLen #Attr.2; - ret List.324; + let List.392 : U64 = lowlevel ListLen #Attr.2; + ret List.392; -procedure List.60 (#Attr.2, #Attr.3): - let List.323 : {} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.323; +procedure List.66 (#Attr.2, #Attr.3): + let List.391 : {} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.391; procedure Num.22 (#Attr.2, #Attr.3): let Num.257 : Int1 = lowlevel NumLt #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/encode.txt b/crates/compiler/test_mono/generated/encode.txt index 9964316f71..f7bc46f32d 100644 --- a/crates/compiler/test_mono/generated/encode.txt +++ b/crates/compiler/test_mono/generated/encode.txt @@ -1,16 +1,16 @@ -procedure List.4 (List.93, List.94): - let List.319 : U64 = 1i64; - let List.318 : List U8 = CallByName List.65 List.93 List.319; - let List.317 : List U8 = CallByName List.66 List.318 List.94; - ret List.317; +procedure List.4 (List.101, List.102): + let List.387 : U64 = 1i64; + let List.386 : List U8 = CallByName List.70 List.101 List.387; + let List.385 : List U8 = CallByName List.71 List.386 List.102; + ret List.385; -procedure List.65 (#Attr.2, #Attr.3): - let List.321 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; - ret List.321; +procedure List.70 (#Attr.2, #Attr.3): + let List.389 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; + ret List.389; -procedure List.66 (#Attr.2, #Attr.3): - let List.320 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; - ret List.320; +procedure List.71 (#Attr.2, #Attr.3): + let List.388 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; + ret List.388; procedure Test.23 (Test.24, Test.35, #Attr.12): let Test.22 : U8 = StructAtIndex 0 #Attr.12; diff --git a/crates/compiler/test_mono/generated/encode_derived_nested_record_string.txt b/crates/compiler/test_mono/generated/encode_derived_nested_record_string.txt index 35dc1a839e..995f6b5fe6 100644 --- a/crates/compiler/test_mono/generated/encode_derived_nested_record_string.txt +++ b/crates/compiler/test_mono/generated/encode_derived_nested_record_string.txt @@ -224,159 +224,159 @@ procedure Json.83 (Json.111, Json.112): else jump Json.166 Json.91; -procedure List.125 (List.126, List.127, #Attr.12): - let List.124 : {} = StructAtIndex 0 #Attr.12; - let List.366 : {List U8, U64} = CallByName Json.83 List.126 List.127; - let List.365 : [C [], C {List U8, U64}] = TagId(1) List.366; - ret List.365; +procedure List.133 (List.134, List.135, #Attr.12): + let List.132 : {} = StructAtIndex 0 #Attr.12; + let List.434 : {List U8, U64} = CallByName Json.83 List.134 List.135; + let List.433 : [C [], C {List U8, U64}] = TagId(1) List.434; + ret List.433; -procedure List.125 (List.126, List.127, #Attr.12): - let List.124 : {} = StructAtIndex 0 #Attr.12; - let List.447 : {List U8, U64} = CallByName Json.83 List.126 List.127; - let List.446 : [C [], C {List U8, U64}] = TagId(1) List.447; - ret List.446; +procedure List.133 (List.134, List.135, #Attr.12): + let List.132 : {} = StructAtIndex 0 #Attr.12; + let List.515 : {List U8, U64} = CallByName Json.83 List.134 List.135; + let List.514 : [C [], C {List U8, U64}] = TagId(1) List.515; + ret List.514; -procedure List.18 (List.122, List.123, List.124): - let List.343 : {{}} = Struct {List.124}; - let List.337 : [C [], C {List U8, U64}] = CallByName List.63 List.122 List.123 List.343; - let List.340 : U8 = 1i64; - let List.341 : U8 = GetTagId List.337; - let List.342 : Int1 = lowlevel Eq List.340 List.341; - if List.342 then - let List.129 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.337; - inc List.129; - dec List.337; - ret List.129; +procedure List.18 (List.130, List.131, List.132): + let List.411 : {{}} = Struct {List.132}; + let List.405 : [C [], C {List U8, U64}] = CallByName List.75 List.130 List.131 List.411; + let List.408 : U8 = 1i64; + let List.409 : U8 = GetTagId List.405; + let List.410 : Int1 = lowlevel Eq List.408 List.409; + if List.410 then + let List.137 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.405; + inc List.137; + dec List.405; + ret List.137; else - let List.130 : [] = UnionAtIndex (Id 0) (Index 0) List.337; - dec List.337; - let List.339 : {List U8, U64} = CallByName List.64 List.130; - ret List.339; + let List.138 : [] = UnionAtIndex (Id 0) (Index 0) List.405; + dec List.405; + let List.407 : {List U8, U64} = CallByName List.69 List.138; + ret List.407; -procedure List.18 (List.122, List.123, List.124): - let List.423 : {{}} = Struct {List.124}; - let List.417 : [C [], C {List U8, U64}] = CallByName List.63 List.122 List.123 List.423; - let List.420 : U8 = 1i64; - let List.421 : U8 = GetTagId List.417; - let List.422 : Int1 = lowlevel Eq List.420 List.421; - if List.422 then - let List.129 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.417; - inc List.129; - dec List.417; - ret List.129; +procedure List.18 (List.130, List.131, List.132): + let List.491 : {{}} = Struct {List.132}; + let List.485 : [C [], C {List U8, U64}] = CallByName List.75 List.130 List.131 List.491; + let List.488 : U8 = 1i64; + let List.489 : U8 = GetTagId List.485; + let List.490 : Int1 = lowlevel Eq List.488 List.489; + if List.490 then + let List.137 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.485; + inc List.137; + dec List.485; + ret List.137; else - let List.130 : [] = UnionAtIndex (Id 0) (Index 0) List.417; - dec List.417; - let List.419 : {List U8, U64} = CallByName List.64 List.130; - ret List.419; + let List.138 : [] = UnionAtIndex (Id 0) (Index 0) List.485; + dec List.485; + let List.487 : {List U8, U64} = CallByName List.69 List.138; + ret List.487; -procedure List.4 (List.93, List.94): - let List.416 : U64 = 1i64; - let List.415 : List U8 = CallByName List.65 List.93 List.416; - let List.414 : List U8 = CallByName List.66 List.415 List.94; - ret List.414; +procedure List.4 (List.101, List.102): + let List.484 : U64 = 1i64; + let List.483 : List U8 = CallByName List.70 List.101 List.484; + let List.482 : List U8 = CallByName List.71 List.483 List.102; + ret List.482; procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; procedure List.6 (#Attr.2): - let List.345 : U64 = lowlevel ListLen #Attr.2; - ret List.345; + let List.413 : U64 = lowlevel ListLen #Attr.2; + ret List.413; procedure List.6 (#Attr.2): - let List.426 : U64 = lowlevel ListLen #Attr.2; - ret List.426; - -procedure List.60 (#Attr.2, #Attr.3): - let List.364 : {Str, {Str}} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.364; - -procedure List.60 (#Attr.2, #Attr.3): - let List.445 : {Str, {Str}} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.445; - -procedure List.63 (List.305, List.306, List.307): - let List.350 : U64 = 0i64; - let List.351 : U64 = CallByName List.6 List.305; - let List.349 : [C [], C {List U8, U64}] = CallByName List.80 List.305 List.306 List.307 List.350 List.351; - ret List.349; - -procedure List.63 (List.305, List.306, List.307): - let List.431 : U64 = 0i64; - let List.432 : U64 = CallByName List.6 List.305; - let List.430 : [C [], C {List U8, U64}] = CallByName List.80 List.305 List.306 List.307 List.431 List.432; - ret List.430; - -procedure List.64 (#Attr.2): - let List.429 : {List U8, U64} = lowlevel Unreachable #Attr.2; - ret List.429; - -procedure List.65 (#Attr.2, #Attr.3): - let List.428 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; - ret List.428; + let List.494 : U64 = lowlevel ListLen #Attr.2; + ret List.494; procedure List.66 (#Attr.2, #Attr.3): - let List.427 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; - ret List.427; + let List.432 : {Str, {Str}} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.432; + +procedure List.66 (#Attr.2, #Attr.3): + let List.513 : {Str, {Str}} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.513; + +procedure List.69 (#Attr.2): + let List.497 : {List U8, U64} = lowlevel Unreachable #Attr.2; + ret List.497; + +procedure List.70 (#Attr.2, #Attr.3): + let List.496 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; + ret List.496; + +procedure List.71 (#Attr.2, #Attr.3): + let List.495 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; + ret List.495; + +procedure List.75 (List.361, List.362, List.363): + let List.418 : U64 = 0i64; + let List.419 : U64 = CallByName List.6 List.361; + let List.417 : [C [], C {List U8, U64}] = CallByName List.86 List.361 List.362 List.363 List.418 List.419; + ret List.417; + +procedure List.75 (List.361, List.362, List.363): + let List.499 : U64 = 0i64; + let List.500 : U64 = CallByName List.6 List.361; + let List.498 : [C [], C {List U8, U64}] = CallByName List.86 List.361 List.362 List.363 List.499 List.500; + ret List.498; procedure List.8 (#Attr.2, #Attr.3): - let List.425 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; - ret List.425; + let List.493 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; + ret List.493; -procedure List.80 (List.380, List.381, List.382, List.383, List.384): - joinpoint List.352 List.308 List.309 List.310 List.311 List.312: - let List.354 : Int1 = CallByName Num.22 List.311 List.312; - if List.354 then - let List.363 : {Str, {Str}} = CallByName List.60 List.308 List.311; - let List.355 : [C [], C {List U8, U64}] = CallByName List.125 List.309 List.363 List.310; - let List.360 : U8 = 1i64; - let List.361 : U8 = GetTagId List.355; - let List.362 : Int1 = lowlevel Eq List.360 List.361; - if List.362 then - let List.313 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.355; - inc List.313; - dec List.355; - let List.358 : U64 = 1i64; - let List.357 : U64 = CallByName Num.19 List.311 List.358; - jump List.352 List.308 List.313 List.310 List.357 List.312; +procedure List.86 (List.448, List.449, List.450, List.451, List.452): + joinpoint List.420 List.364 List.365 List.366 List.367 List.368: + let List.422 : Int1 = CallByName Num.22 List.367 List.368; + if List.422 then + let List.431 : {Str, {Str}} = CallByName List.66 List.364 List.367; + let List.423 : [C [], C {List U8, U64}] = CallByName List.133 List.365 List.431 List.366; + let List.428 : U8 = 1i64; + let List.429 : U8 = GetTagId List.423; + let List.430 : Int1 = lowlevel Eq List.428 List.429; + if List.430 then + let List.369 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.423; + inc List.369; + dec List.423; + let List.426 : U64 = 1i64; + let List.425 : U64 = CallByName Num.19 List.367 List.426; + jump List.420 List.364 List.369 List.366 List.425 List.368; else - let List.314 : [] = UnionAtIndex (Id 0) (Index 0) List.355; - dec List.355; - let List.359 : [C [], C {List U8, U64}] = TagId(0) List.314; - ret List.359; + let List.370 : [] = UnionAtIndex (Id 0) (Index 0) List.423; + dec List.423; + let List.427 : [C [], C {List U8, U64}] = TagId(0) List.370; + ret List.427; else - let List.353 : [C [], C {List U8, U64}] = TagId(1) List.309; - ret List.353; + let List.421 : [C [], C {List U8, U64}] = TagId(1) List.365; + ret List.421; in - jump List.352 List.380 List.381 List.382 List.383 List.384; + jump List.420 List.448 List.449 List.450 List.451 List.452; -procedure List.80 (List.461, List.462, List.463, List.464, List.465): - joinpoint List.433 List.308 List.309 List.310 List.311 List.312: - let List.435 : Int1 = CallByName Num.22 List.311 List.312; - if List.435 then - let List.444 : {Str, {Str}} = CallByName List.60 List.308 List.311; - let List.436 : [C [], C {List U8, U64}] = CallByName List.125 List.309 List.444 List.310; - let List.441 : U8 = 1i64; - let List.442 : U8 = GetTagId List.436; - let List.443 : Int1 = lowlevel Eq List.441 List.442; - if List.443 then - let List.313 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.436; - inc List.313; - dec List.436; - let List.439 : U64 = 1i64; - let List.438 : U64 = CallByName Num.19 List.311 List.439; - jump List.433 List.308 List.313 List.310 List.438 List.312; +procedure List.86 (List.529, List.530, List.531, List.532, List.533): + joinpoint List.501 List.364 List.365 List.366 List.367 List.368: + let List.503 : Int1 = CallByName Num.22 List.367 List.368; + if List.503 then + let List.512 : {Str, {Str}} = CallByName List.66 List.364 List.367; + let List.504 : [C [], C {List U8, U64}] = CallByName List.133 List.365 List.512 List.366; + let List.509 : U8 = 1i64; + let List.510 : U8 = GetTagId List.504; + let List.511 : Int1 = lowlevel Eq List.509 List.510; + if List.511 then + let List.369 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.504; + inc List.369; + dec List.504; + let List.507 : U64 = 1i64; + let List.506 : U64 = CallByName Num.19 List.367 List.507; + jump List.501 List.364 List.369 List.366 List.506 List.368; else - let List.314 : [] = UnionAtIndex (Id 0) (Index 0) List.436; - dec List.436; - let List.440 : [C [], C {List U8, U64}] = TagId(0) List.314; - ret List.440; + let List.370 : [] = UnionAtIndex (Id 0) (Index 0) List.504; + dec List.504; + let List.508 : [C [], C {List U8, U64}] = TagId(0) List.370; + ret List.508; else - let List.434 : [C [], C {List U8, U64}] = TagId(1) List.309; - ret List.434; + let List.502 : [C [], C {List U8, U64}] = TagId(1) List.365; + ret List.502; in - jump List.433 List.461 List.462 List.463 List.464 List.465; + jump List.501 List.529 List.530 List.531 List.532 List.533; procedure Num.123 (#Attr.2): let Num.283 : U8 = lowlevel NumIntCast #Attr.2; diff --git a/crates/compiler/test_mono/generated/encode_derived_record_one_field_string.txt b/crates/compiler/test_mono/generated/encode_derived_record_one_field_string.txt index 4a7047fb37..8563241976 100644 --- a/crates/compiler/test_mono/generated/encode_derived_record_one_field_string.txt +++ b/crates/compiler/test_mono/generated/encode_derived_record_one_field_string.txt @@ -129,95 +129,95 @@ procedure Json.83 (Json.114, Json.115): else jump Json.126 Json.91; -procedure List.125 (List.126, List.127, #Attr.12): - let List.124 : {} = StructAtIndex 0 #Attr.12; - let List.373 : {List U8, U64} = CallByName Json.83 List.126 List.127; - let List.372 : [C [], C {List U8, U64}] = TagId(1) List.373; - ret List.372; +procedure List.133 (List.134, List.135, #Attr.12): + let List.132 : {} = StructAtIndex 0 #Attr.12; + let List.441 : {List U8, U64} = CallByName Json.83 List.134 List.135; + let List.440 : [C [], C {List U8, U64}] = TagId(1) List.441; + ret List.440; -procedure List.18 (List.122, List.123, List.124): - let List.349 : {{}} = Struct {List.124}; - let List.343 : [C [], C {List U8, U64}] = CallByName List.63 List.122 List.123 List.349; - let List.346 : U8 = 1i64; - let List.347 : U8 = GetTagId List.343; - let List.348 : Int1 = lowlevel Eq List.346 List.347; - if List.348 then - let List.129 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.343; - inc List.129; - dec List.343; - ret List.129; +procedure List.18 (List.130, List.131, List.132): + let List.417 : {{}} = Struct {List.132}; + let List.411 : [C [], C {List U8, U64}] = CallByName List.75 List.130 List.131 List.417; + let List.414 : U8 = 1i64; + let List.415 : U8 = GetTagId List.411; + let List.416 : Int1 = lowlevel Eq List.414 List.415; + if List.416 then + let List.137 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.411; + inc List.137; + dec List.411; + ret List.137; else - let List.130 : [] = UnionAtIndex (Id 0) (Index 0) List.343; - dec List.343; - let List.345 : {List U8, U64} = CallByName List.64 List.130; - ret List.345; + let List.138 : [] = UnionAtIndex (Id 0) (Index 0) List.411; + dec List.411; + let List.413 : {List U8, U64} = CallByName List.69 List.138; + ret List.413; -procedure List.4 (List.93, List.94): - let List.342 : U64 = 1i64; - let List.341 : List U8 = CallByName List.65 List.93 List.342; - let List.340 : List U8 = CallByName List.66 List.341 List.94; - ret List.340; +procedure List.4 (List.101, List.102): + let List.410 : U64 = 1i64; + let List.409 : List U8 = CallByName List.70 List.101 List.410; + let List.408 : List U8 = CallByName List.71 List.409 List.102; + ret List.408; procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; procedure List.6 (#Attr.2): - let List.352 : U64 = lowlevel ListLen #Attr.2; - ret List.352; - -procedure List.60 (#Attr.2, #Attr.3): - let List.371 : {Str, {Str}} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.371; - -procedure List.63 (List.305, List.306, List.307): - let List.357 : U64 = 0i64; - let List.358 : U64 = CallByName List.6 List.305; - let List.356 : [C [], C {List U8, U64}] = CallByName List.80 List.305 List.306 List.307 List.357 List.358; - ret List.356; - -procedure List.64 (#Attr.2): - let List.355 : {List U8, U64} = lowlevel Unreachable #Attr.2; - ret List.355; - -procedure List.65 (#Attr.2, #Attr.3): - let List.354 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; - ret List.354; + let List.420 : U64 = lowlevel ListLen #Attr.2; + ret List.420; procedure List.66 (#Attr.2, #Attr.3): - let List.353 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; - ret List.353; + let List.439 : {Str, {Str}} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.439; + +procedure List.69 (#Attr.2): + let List.423 : {List U8, U64} = lowlevel Unreachable #Attr.2; + ret List.423; + +procedure List.70 (#Attr.2, #Attr.3): + let List.422 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; + ret List.422; + +procedure List.71 (#Attr.2, #Attr.3): + let List.421 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; + ret List.421; + +procedure List.75 (List.361, List.362, List.363): + let List.425 : U64 = 0i64; + let List.426 : U64 = CallByName List.6 List.361; + let List.424 : [C [], C {List U8, U64}] = CallByName List.86 List.361 List.362 List.363 List.425 List.426; + ret List.424; procedure List.8 (#Attr.2, #Attr.3): - let List.351 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; - ret List.351; + let List.419 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; + ret List.419; -procedure List.80 (List.387, List.388, List.389, List.390, List.391): - joinpoint List.359 List.308 List.309 List.310 List.311 List.312: - let List.361 : Int1 = CallByName Num.22 List.311 List.312; - if List.361 then - let List.370 : {Str, {Str}} = CallByName List.60 List.308 List.311; - let List.362 : [C [], C {List U8, U64}] = CallByName List.125 List.309 List.370 List.310; - let List.367 : U8 = 1i64; - let List.368 : U8 = GetTagId List.362; - let List.369 : Int1 = lowlevel Eq List.367 List.368; - if List.369 then - let List.313 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.362; - inc List.313; - dec List.362; - let List.365 : U64 = 1i64; - let List.364 : U64 = CallByName Num.19 List.311 List.365; - jump List.359 List.308 List.313 List.310 List.364 List.312; +procedure List.86 (List.455, List.456, List.457, List.458, List.459): + joinpoint List.427 List.364 List.365 List.366 List.367 List.368: + let List.429 : Int1 = CallByName Num.22 List.367 List.368; + if List.429 then + let List.438 : {Str, {Str}} = CallByName List.66 List.364 List.367; + let List.430 : [C [], C {List U8, U64}] = CallByName List.133 List.365 List.438 List.366; + let List.435 : U8 = 1i64; + let List.436 : U8 = GetTagId List.430; + let List.437 : Int1 = lowlevel Eq List.435 List.436; + if List.437 then + let List.369 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.430; + inc List.369; + dec List.430; + let List.433 : U64 = 1i64; + let List.432 : U64 = CallByName Num.19 List.367 List.433; + jump List.427 List.364 List.369 List.366 List.432 List.368; else - let List.314 : [] = UnionAtIndex (Id 0) (Index 0) List.362; - dec List.362; - let List.366 : [C [], C {List U8, U64}] = TagId(0) List.314; - ret List.366; + let List.370 : [] = UnionAtIndex (Id 0) (Index 0) List.430; + dec List.430; + let List.434 : [C [], C {List U8, U64}] = TagId(0) List.370; + ret List.434; else - let List.360 : [C [], C {List U8, U64}] = TagId(1) List.309; - ret List.360; + let List.428 : [C [], C {List U8, U64}] = TagId(1) List.365; + ret List.428; in - jump List.359 List.387 List.388 List.389 List.390 List.391; + jump List.427 List.455 List.456 List.457 List.458 List.459; procedure Num.123 (#Attr.2): let Num.264 : U8 = lowlevel NumIntCast #Attr.2; diff --git a/crates/compiler/test_mono/generated/encode_derived_record_two_field_strings.txt b/crates/compiler/test_mono/generated/encode_derived_record_two_field_strings.txt index 47f38234c3..2d895e110b 100644 --- a/crates/compiler/test_mono/generated/encode_derived_record_two_field_strings.txt +++ b/crates/compiler/test_mono/generated/encode_derived_record_two_field_strings.txt @@ -137,95 +137,95 @@ procedure Json.83 (Json.117, Json.118): else jump Json.129 Json.91; -procedure List.125 (List.126, List.127, #Attr.12): - let List.124 : {} = StructAtIndex 0 #Attr.12; - let List.373 : {List U8, U64} = CallByName Json.83 List.126 List.127; - let List.372 : [C [], C {List U8, U64}] = TagId(1) List.373; - ret List.372; +procedure List.133 (List.134, List.135, #Attr.12): + let List.132 : {} = StructAtIndex 0 #Attr.12; + let List.441 : {List U8, U64} = CallByName Json.83 List.134 List.135; + let List.440 : [C [], C {List U8, U64}] = TagId(1) List.441; + ret List.440; -procedure List.18 (List.122, List.123, List.124): - let List.349 : {{}} = Struct {List.124}; - let List.343 : [C [], C {List U8, U64}] = CallByName List.63 List.122 List.123 List.349; - let List.346 : U8 = 1i64; - let List.347 : U8 = GetTagId List.343; - let List.348 : Int1 = lowlevel Eq List.346 List.347; - if List.348 then - let List.129 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.343; - inc List.129; - dec List.343; - ret List.129; +procedure List.18 (List.130, List.131, List.132): + let List.417 : {{}} = Struct {List.132}; + let List.411 : [C [], C {List U8, U64}] = CallByName List.75 List.130 List.131 List.417; + let List.414 : U8 = 1i64; + let List.415 : U8 = GetTagId List.411; + let List.416 : Int1 = lowlevel Eq List.414 List.415; + if List.416 then + let List.137 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.411; + inc List.137; + dec List.411; + ret List.137; else - let List.130 : [] = UnionAtIndex (Id 0) (Index 0) List.343; - dec List.343; - let List.345 : {List U8, U64} = CallByName List.64 List.130; - ret List.345; + let List.138 : [] = UnionAtIndex (Id 0) (Index 0) List.411; + dec List.411; + let List.413 : {List U8, U64} = CallByName List.69 List.138; + ret List.413; -procedure List.4 (List.93, List.94): - let List.342 : U64 = 1i64; - let List.341 : List U8 = CallByName List.65 List.93 List.342; - let List.340 : List U8 = CallByName List.66 List.341 List.94; - ret List.340; +procedure List.4 (List.101, List.102): + let List.410 : U64 = 1i64; + let List.409 : List U8 = CallByName List.70 List.101 List.410; + let List.408 : List U8 = CallByName List.71 List.409 List.102; + ret List.408; procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; procedure List.6 (#Attr.2): - let List.352 : U64 = lowlevel ListLen #Attr.2; - ret List.352; - -procedure List.60 (#Attr.2, #Attr.3): - let List.371 : {Str, {Str}} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.371; - -procedure List.63 (List.305, List.306, List.307): - let List.357 : U64 = 0i64; - let List.358 : U64 = CallByName List.6 List.305; - let List.356 : [C [], C {List U8, U64}] = CallByName List.80 List.305 List.306 List.307 List.357 List.358; - ret List.356; - -procedure List.64 (#Attr.2): - let List.355 : {List U8, U64} = lowlevel Unreachable #Attr.2; - ret List.355; - -procedure List.65 (#Attr.2, #Attr.3): - let List.354 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; - ret List.354; + let List.420 : U64 = lowlevel ListLen #Attr.2; + ret List.420; procedure List.66 (#Attr.2, #Attr.3): - let List.353 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; - ret List.353; + let List.439 : {Str, {Str}} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.439; + +procedure List.69 (#Attr.2): + let List.423 : {List U8, U64} = lowlevel Unreachable #Attr.2; + ret List.423; + +procedure List.70 (#Attr.2, #Attr.3): + let List.422 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; + ret List.422; + +procedure List.71 (#Attr.2, #Attr.3): + let List.421 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; + ret List.421; + +procedure List.75 (List.361, List.362, List.363): + let List.425 : U64 = 0i64; + let List.426 : U64 = CallByName List.6 List.361; + let List.424 : [C [], C {List U8, U64}] = CallByName List.86 List.361 List.362 List.363 List.425 List.426; + ret List.424; procedure List.8 (#Attr.2, #Attr.3): - let List.351 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; - ret List.351; + let List.419 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; + ret List.419; -procedure List.80 (List.387, List.388, List.389, List.390, List.391): - joinpoint List.359 List.308 List.309 List.310 List.311 List.312: - let List.361 : Int1 = CallByName Num.22 List.311 List.312; - if List.361 then - let List.370 : {Str, {Str}} = CallByName List.60 List.308 List.311; - let List.362 : [C [], C {List U8, U64}] = CallByName List.125 List.309 List.370 List.310; - let List.367 : U8 = 1i64; - let List.368 : U8 = GetTagId List.362; - let List.369 : Int1 = lowlevel Eq List.367 List.368; - if List.369 then - let List.313 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.362; - inc List.313; - dec List.362; - let List.365 : U64 = 1i64; - let List.364 : U64 = CallByName Num.19 List.311 List.365; - jump List.359 List.308 List.313 List.310 List.364 List.312; +procedure List.86 (List.455, List.456, List.457, List.458, List.459): + joinpoint List.427 List.364 List.365 List.366 List.367 List.368: + let List.429 : Int1 = CallByName Num.22 List.367 List.368; + if List.429 then + let List.438 : {Str, {Str}} = CallByName List.66 List.364 List.367; + let List.430 : [C [], C {List U8, U64}] = CallByName List.133 List.365 List.438 List.366; + let List.435 : U8 = 1i64; + let List.436 : U8 = GetTagId List.430; + let List.437 : Int1 = lowlevel Eq List.435 List.436; + if List.437 then + let List.369 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.430; + inc List.369; + dec List.430; + let List.433 : U64 = 1i64; + let List.432 : U64 = CallByName Num.19 List.367 List.433; + jump List.427 List.364 List.369 List.366 List.432 List.368; else - let List.314 : [] = UnionAtIndex (Id 0) (Index 0) List.362; - dec List.362; - let List.366 : [C [], C {List U8, U64}] = TagId(0) List.314; - ret List.366; + let List.370 : [] = UnionAtIndex (Id 0) (Index 0) List.430; + dec List.430; + let List.434 : [C [], C {List U8, U64}] = TagId(0) List.370; + ret List.434; else - let List.360 : [C [], C {List U8, U64}] = TagId(1) List.309; - ret List.360; + let List.428 : [C [], C {List U8, U64}] = TagId(1) List.365; + ret List.428; in - jump List.359 List.387 List.388 List.389 List.390 List.391; + jump List.427 List.455 List.456 List.457 List.458 List.459; procedure Num.123 (#Attr.2): let Num.264 : U8 = lowlevel NumIntCast #Attr.2; diff --git a/crates/compiler/test_mono/generated/encode_derived_string.txt b/crates/compiler/test_mono/generated/encode_derived_string.txt index 3f908525f0..0348ec3e3f 100644 --- a/crates/compiler/test_mono/generated/encode_derived_string.txt +++ b/crates/compiler/test_mono/generated/encode_derived_string.txt @@ -34,27 +34,27 @@ procedure Json.65 (Json.66, Json.109, #Attr.12): let Json.111 : List U8 = CallByName List.4 Json.112 Json.113; ret Json.111; -procedure List.4 (List.93, List.94): - let List.324 : U64 = 1i64; - let List.323 : List U8 = CallByName List.65 List.93 List.324; - let List.322 : List U8 = CallByName List.66 List.323 List.94; - ret List.322; +procedure List.4 (List.101, List.102): + let List.392 : U64 = 1i64; + let List.391 : List U8 = CallByName List.70 List.101 List.392; + let List.390 : List U8 = CallByName List.71 List.391 List.102; + ret List.390; procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; -procedure List.65 (#Attr.2, #Attr.3): - let List.327 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; - ret List.327; +procedure List.70 (#Attr.2, #Attr.3): + let List.395 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; + ret List.395; -procedure List.66 (#Attr.2, #Attr.3): - let List.326 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; - ret List.326; +procedure List.71 (#Attr.2, #Attr.3): + let List.394 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; + ret List.394; procedure List.8 (#Attr.2, #Attr.3): - let List.325 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; - ret List.325; + let List.393 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; + ret List.393; procedure Num.123 (#Attr.2): let Num.258 : U8 = lowlevel NumIntCast #Attr.2; diff --git a/crates/compiler/test_mono/generated/encode_derived_tag_one_field_string.txt b/crates/compiler/test_mono/generated/encode_derived_tag_one_field_string.txt index 100272a0df..4aaea1d783 100644 --- a/crates/compiler/test_mono/generated/encode_derived_tag_one_field_string.txt +++ b/crates/compiler/test_mono/generated/encode_derived_tag_one_field_string.txt @@ -134,95 +134,95 @@ procedure Json.97 (Json.114, Json.103): else jump Json.128 Json.104; -procedure List.125 (List.126, List.127, #Attr.12): - let List.124 : {} = StructAtIndex 0 #Attr.12; - let List.379 : {List U8, U64} = CallByName Json.97 List.126 List.127; - let List.378 : [C [], C {List U8, U64}] = TagId(1) List.379; - ret List.378; +procedure List.133 (List.134, List.135, #Attr.12): + let List.132 : {} = StructAtIndex 0 #Attr.12; + let List.447 : {List U8, U64} = CallByName Json.97 List.134 List.135; + let List.446 : [C [], C {List U8, U64}] = TagId(1) List.447; + ret List.446; -procedure List.18 (List.122, List.123, List.124): - let List.355 : {{}} = Struct {List.124}; - let List.349 : [C [], C {List U8, U64}] = CallByName List.63 List.122 List.123 List.355; - let List.352 : U8 = 1i64; - let List.353 : U8 = GetTagId List.349; - let List.354 : Int1 = lowlevel Eq List.352 List.353; - if List.354 then - let List.129 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.349; - inc List.129; - dec List.349; - ret List.129; +procedure List.18 (List.130, List.131, List.132): + let List.423 : {{}} = Struct {List.132}; + let List.417 : [C [], C {List U8, U64}] = CallByName List.75 List.130 List.131 List.423; + let List.420 : U8 = 1i64; + let List.421 : U8 = GetTagId List.417; + let List.422 : Int1 = lowlevel Eq List.420 List.421; + if List.422 then + let List.137 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.417; + inc List.137; + dec List.417; + ret List.137; else - let List.130 : [] = UnionAtIndex (Id 0) (Index 0) List.349; - dec List.349; - let List.351 : {List U8, U64} = CallByName List.64 List.130; - ret List.351; + let List.138 : [] = UnionAtIndex (Id 0) (Index 0) List.417; + dec List.417; + let List.419 : {List U8, U64} = CallByName List.69 List.138; + ret List.419; -procedure List.4 (List.93, List.94): - let List.348 : U64 = 1i64; - let List.347 : List U8 = CallByName List.65 List.93 List.348; - let List.346 : List U8 = CallByName List.66 List.347 List.94; - ret List.346; +procedure List.4 (List.101, List.102): + let List.416 : U64 = 1i64; + let List.415 : List U8 = CallByName List.70 List.101 List.416; + let List.414 : List U8 = CallByName List.71 List.415 List.102; + ret List.414; procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; procedure List.6 (#Attr.2): - let List.356 : U64 = lowlevel ListLen #Attr.2; - ret List.356; - -procedure List.60 (#Attr.2, #Attr.3): - let List.377 : {Str} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.377; - -procedure List.63 (List.305, List.306, List.307): - let List.363 : U64 = 0i64; - let List.364 : U64 = CallByName List.6 List.305; - let List.362 : [C [], C {List U8, U64}] = CallByName List.80 List.305 List.306 List.307 List.363 List.364; - ret List.362; - -procedure List.64 (#Attr.2): - let List.361 : {List U8, U64} = lowlevel Unreachable #Attr.2; - ret List.361; - -procedure List.65 (#Attr.2, #Attr.3): - let List.360 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; - ret List.360; + let List.424 : U64 = lowlevel ListLen #Attr.2; + ret List.424; procedure List.66 (#Attr.2, #Attr.3): - let List.359 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; - ret List.359; + let List.445 : {Str} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.445; + +procedure List.69 (#Attr.2): + let List.429 : {List U8, U64} = lowlevel Unreachable #Attr.2; + ret List.429; + +procedure List.70 (#Attr.2, #Attr.3): + let List.428 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; + ret List.428; + +procedure List.71 (#Attr.2, #Attr.3): + let List.427 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; + ret List.427; + +procedure List.75 (List.361, List.362, List.363): + let List.431 : U64 = 0i64; + let List.432 : U64 = CallByName List.6 List.361; + let List.430 : [C [], C {List U8, U64}] = CallByName List.86 List.361 List.362 List.363 List.431 List.432; + ret List.430; procedure List.8 (#Attr.2, #Attr.3): - let List.358 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; - ret List.358; + let List.426 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; + ret List.426; -procedure List.80 (List.393, List.394, List.395, List.396, List.397): - joinpoint List.365 List.308 List.309 List.310 List.311 List.312: - let List.367 : Int1 = CallByName Num.22 List.311 List.312; - if List.367 then - let List.376 : {Str} = CallByName List.60 List.308 List.311; - let List.368 : [C [], C {List U8, U64}] = CallByName List.125 List.309 List.376 List.310; - let List.373 : U8 = 1i64; - let List.374 : U8 = GetTagId List.368; - let List.375 : Int1 = lowlevel Eq List.373 List.374; - if List.375 then - let List.313 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.368; - inc List.313; - dec List.368; - let List.371 : U64 = 1i64; - let List.370 : U64 = CallByName Num.19 List.311 List.371; - jump List.365 List.308 List.313 List.310 List.370 List.312; +procedure List.86 (List.461, List.462, List.463, List.464, List.465): + joinpoint List.433 List.364 List.365 List.366 List.367 List.368: + let List.435 : Int1 = CallByName Num.22 List.367 List.368; + if List.435 then + let List.444 : {Str} = CallByName List.66 List.364 List.367; + let List.436 : [C [], C {List U8, U64}] = CallByName List.133 List.365 List.444 List.366; + let List.441 : U8 = 1i64; + let List.442 : U8 = GetTagId List.436; + let List.443 : Int1 = lowlevel Eq List.441 List.442; + if List.443 then + let List.369 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.436; + inc List.369; + dec List.436; + let List.439 : U64 = 1i64; + let List.438 : U64 = CallByName Num.19 List.367 List.439; + jump List.433 List.364 List.369 List.366 List.438 List.368; else - let List.314 : [] = UnionAtIndex (Id 0) (Index 0) List.368; - dec List.368; - let List.372 : [C [], C {List U8, U64}] = TagId(0) List.314; - ret List.372; + let List.370 : [] = UnionAtIndex (Id 0) (Index 0) List.436; + dec List.436; + let List.440 : [C [], C {List U8, U64}] = TagId(0) List.370; + ret List.440; else - let List.366 : [C [], C {List U8, U64}] = TagId(1) List.309; - ret List.366; + let List.434 : [C [], C {List U8, U64}] = TagId(1) List.365; + ret List.434; in - jump List.365 List.393 List.394 List.395 List.396 List.397; + jump List.433 List.461 List.462 List.463 List.464 List.465; procedure Num.123 (#Attr.2): let Num.266 : U8 = lowlevel NumIntCast #Attr.2; diff --git a/crates/compiler/test_mono/generated/encode_derived_tag_two_payloads_string.txt b/crates/compiler/test_mono/generated/encode_derived_tag_two_payloads_string.txt index 4c8ea40bdc..1329435b75 100644 --- a/crates/compiler/test_mono/generated/encode_derived_tag_two_payloads_string.txt +++ b/crates/compiler/test_mono/generated/encode_derived_tag_two_payloads_string.txt @@ -140,95 +140,95 @@ procedure Json.97 (Json.117, Json.103): else jump Json.131 Json.104; -procedure List.125 (List.126, List.127, #Attr.12): - let List.124 : {} = StructAtIndex 0 #Attr.12; - let List.379 : {List U8, U64} = CallByName Json.97 List.126 List.127; - let List.378 : [C [], C {List U8, U64}] = TagId(1) List.379; - ret List.378; +procedure List.133 (List.134, List.135, #Attr.12): + let List.132 : {} = StructAtIndex 0 #Attr.12; + let List.447 : {List U8, U64} = CallByName Json.97 List.134 List.135; + let List.446 : [C [], C {List U8, U64}] = TagId(1) List.447; + ret List.446; -procedure List.18 (List.122, List.123, List.124): - let List.355 : {{}} = Struct {List.124}; - let List.349 : [C [], C {List U8, U64}] = CallByName List.63 List.122 List.123 List.355; - let List.352 : U8 = 1i64; - let List.353 : U8 = GetTagId List.349; - let List.354 : Int1 = lowlevel Eq List.352 List.353; - if List.354 then - let List.129 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.349; - inc List.129; - dec List.349; - ret List.129; +procedure List.18 (List.130, List.131, List.132): + let List.423 : {{}} = Struct {List.132}; + let List.417 : [C [], C {List U8, U64}] = CallByName List.75 List.130 List.131 List.423; + let List.420 : U8 = 1i64; + let List.421 : U8 = GetTagId List.417; + let List.422 : Int1 = lowlevel Eq List.420 List.421; + if List.422 then + let List.137 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.417; + inc List.137; + dec List.417; + ret List.137; else - let List.130 : [] = UnionAtIndex (Id 0) (Index 0) List.349; - dec List.349; - let List.351 : {List U8, U64} = CallByName List.64 List.130; - ret List.351; + let List.138 : [] = UnionAtIndex (Id 0) (Index 0) List.417; + dec List.417; + let List.419 : {List U8, U64} = CallByName List.69 List.138; + ret List.419; -procedure List.4 (List.93, List.94): - let List.348 : U64 = 1i64; - let List.347 : List U8 = CallByName List.65 List.93 List.348; - let List.346 : List U8 = CallByName List.66 List.347 List.94; - ret List.346; +procedure List.4 (List.101, List.102): + let List.416 : U64 = 1i64; + let List.415 : List U8 = CallByName List.70 List.101 List.416; + let List.414 : List U8 = CallByName List.71 List.415 List.102; + ret List.414; procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; procedure List.6 (#Attr.2): - let List.356 : U64 = lowlevel ListLen #Attr.2; - ret List.356; - -procedure List.60 (#Attr.2, #Attr.3): - let List.377 : {Str} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.377; - -procedure List.63 (List.305, List.306, List.307): - let List.363 : U64 = 0i64; - let List.364 : U64 = CallByName List.6 List.305; - let List.362 : [C [], C {List U8, U64}] = CallByName List.80 List.305 List.306 List.307 List.363 List.364; - ret List.362; - -procedure List.64 (#Attr.2): - let List.361 : {List U8, U64} = lowlevel Unreachable #Attr.2; - ret List.361; - -procedure List.65 (#Attr.2, #Attr.3): - let List.360 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; - ret List.360; + let List.424 : U64 = lowlevel ListLen #Attr.2; + ret List.424; procedure List.66 (#Attr.2, #Attr.3): - let List.359 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; - ret List.359; + let List.445 : {Str} = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.445; + +procedure List.69 (#Attr.2): + let List.429 : {List U8, U64} = lowlevel Unreachable #Attr.2; + ret List.429; + +procedure List.70 (#Attr.2, #Attr.3): + let List.428 : List U8 = lowlevel ListReserve #Attr.2 #Attr.3; + ret List.428; + +procedure List.71 (#Attr.2, #Attr.3): + let List.427 : List U8 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; + ret List.427; + +procedure List.75 (List.361, List.362, List.363): + let List.431 : U64 = 0i64; + let List.432 : U64 = CallByName List.6 List.361; + let List.430 : [C [], C {List U8, U64}] = CallByName List.86 List.361 List.362 List.363 List.431 List.432; + ret List.430; procedure List.8 (#Attr.2, #Attr.3): - let List.358 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; - ret List.358; + let List.426 : List U8 = lowlevel ListConcat #Attr.2 #Attr.3; + ret List.426; -procedure List.80 (List.393, List.394, List.395, List.396, List.397): - joinpoint List.365 List.308 List.309 List.310 List.311 List.312: - let List.367 : Int1 = CallByName Num.22 List.311 List.312; - if List.367 then - let List.376 : {Str} = CallByName List.60 List.308 List.311; - let List.368 : [C [], C {List U8, U64}] = CallByName List.125 List.309 List.376 List.310; - let List.373 : U8 = 1i64; - let List.374 : U8 = GetTagId List.368; - let List.375 : Int1 = lowlevel Eq List.373 List.374; - if List.375 then - let List.313 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.368; - inc List.313; - dec List.368; - let List.371 : U64 = 1i64; - let List.370 : U64 = CallByName Num.19 List.311 List.371; - jump List.365 List.308 List.313 List.310 List.370 List.312; +procedure List.86 (List.461, List.462, List.463, List.464, List.465): + joinpoint List.433 List.364 List.365 List.366 List.367 List.368: + let List.435 : Int1 = CallByName Num.22 List.367 List.368; + if List.435 then + let List.444 : {Str} = CallByName List.66 List.364 List.367; + let List.436 : [C [], C {List U8, U64}] = CallByName List.133 List.365 List.444 List.366; + let List.441 : U8 = 1i64; + let List.442 : U8 = GetTagId List.436; + let List.443 : Int1 = lowlevel Eq List.441 List.442; + if List.443 then + let List.369 : {List U8, U64} = UnionAtIndex (Id 1) (Index 0) List.436; + inc List.369; + dec List.436; + let List.439 : U64 = 1i64; + let List.438 : U64 = CallByName Num.19 List.367 List.439; + jump List.433 List.364 List.369 List.366 List.438 List.368; else - let List.314 : [] = UnionAtIndex (Id 0) (Index 0) List.368; - dec List.368; - let List.372 : [C [], C {List U8, U64}] = TagId(0) List.314; - ret List.372; + let List.370 : [] = UnionAtIndex (Id 0) (Index 0) List.436; + dec List.436; + let List.440 : [C [], C {List U8, U64}] = TagId(0) List.370; + ret List.440; else - let List.366 : [C [], C {List U8, U64}] = TagId(1) List.309; - ret List.366; + let List.434 : [C [], C {List U8, U64}] = TagId(1) List.365; + ret List.434; in - jump List.365 List.393 List.394 List.395 List.396 List.397; + jump List.433 List.461 List.462 List.463 List.464 List.465; procedure Num.123 (#Attr.2): let Num.266 : U8 = lowlevel NumIntCast #Attr.2; diff --git a/crates/compiler/test_mono/generated/ir_int_add.txt b/crates/compiler/test_mono/generated/ir_int_add.txt index 632b8b3783..6b56056268 100644 --- a/crates/compiler/test_mono/generated/ir_int_add.txt +++ b/crates/compiler/test_mono/generated/ir_int_add.txt @@ -1,6 +1,6 @@ procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; procedure Num.19 (#Attr.2, #Attr.3): let Num.259 : U64 = lowlevel NumAdd #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/issue_2583_specialize_errors_behind_unified_branches.txt b/crates/compiler/test_mono/generated/issue_2583_specialize_errors_behind_unified_branches.txt index 4e003b4439..793db11a85 100644 --- a/crates/compiler/test_mono/generated/issue_2583_specialize_errors_behind_unified_branches.txt +++ b/crates/compiler/test_mono/generated/issue_2583_specialize_errors_behind_unified_branches.txt @@ -2,40 +2,40 @@ procedure Bool.7 (#Attr.2, #Attr.3): let Bool.9 : Int1 = lowlevel Eq #Attr.2 #Attr.3; ret Bool.9; -procedure List.2 (List.82, List.83): - let List.331 : U64 = CallByName List.6 List.82; - let List.327 : Int1 = CallByName Num.22 List.83 List.331; - if List.327 then - let List.329 : I64 = CallByName List.60 List.82 List.83; - let List.328 : [C {}, C I64] = TagId(1) List.329; - ret List.328; +procedure List.2 (List.90, List.91): + let List.399 : U64 = CallByName List.6 List.90; + let List.395 : Int1 = CallByName Num.22 List.91 List.399; + if List.395 then + let List.397 : I64 = CallByName List.66 List.90 List.91; + let List.396 : [C {}, C I64] = TagId(1) List.397; + ret List.396; else - let List.326 : {} = Struct {}; - let List.325 : [C {}, C I64] = TagId(0) List.326; - ret List.325; + let List.394 : {} = Struct {}; + let List.393 : [C {}, C I64] = TagId(0) List.394; + ret List.393; procedure List.6 (#Attr.2): - let List.332 : U64 = lowlevel ListLen #Attr.2; - ret List.332; + let List.400 : U64 = lowlevel ListLen #Attr.2; + ret List.400; -procedure List.60 (#Attr.2, #Attr.3): - let List.330 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.330; +procedure List.66 (#Attr.2, #Attr.3): + let List.398 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.398; -procedure List.9 (List.210): - let List.324 : U64 = 0i64; - let List.317 : [C {}, C I64] = CallByName List.2 List.210 List.324; - let List.321 : U8 = 1i64; - let List.322 : U8 = GetTagId List.317; - let List.323 : Int1 = lowlevel Eq List.321 List.322; - if List.323 then - let List.211 : I64 = UnionAtIndex (Id 1) (Index 0) List.317; - let List.318 : [C Int1, C I64] = TagId(1) List.211; - ret List.318; +procedure List.9 (List.218): + let List.392 : U64 = 0i64; + let List.385 : [C {}, C I64] = CallByName List.2 List.218 List.392; + let List.389 : U8 = 1i64; + let List.390 : U8 = GetTagId List.385; + let List.391 : Int1 = lowlevel Eq List.389 List.390; + if List.391 then + let List.219 : I64 = UnionAtIndex (Id 1) (Index 0) List.385; + let List.386 : [C Int1, C I64] = TagId(1) List.219; + ret List.386; else - let List.320 : Int1 = true; - let List.319 : [C Int1, C I64] = TagId(0) List.320; - ret List.319; + let List.388 : Int1 = true; + let List.387 : [C Int1, C I64] = TagId(0) List.388; + ret List.387; procedure Num.22 (#Attr.2, #Attr.3): let Num.257 : Int1 = lowlevel NumLt #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/issue_3669.txt b/crates/compiler/test_mono/generated/issue_3669.txt new file mode 100644 index 0000000000..5025608363 --- /dev/null +++ b/crates/compiler/test_mono/generated/issue_3669.txt @@ -0,0 +1,26 @@ +procedure Bool.7 (#Attr.2, #Attr.3): + let Bool.9 : Int1 = lowlevel Eq #Attr.2 #Attr.3; + ret Bool.9; + +procedure Test.2 (Test.19): + joinpoint Test.13 Test.7: + let Test.16 : U8 = 1i64; + let Test.17 : U8 = GetTagId Test.7; + let Test.18 : Int1 = lowlevel Eq Test.16 Test.17; + if Test.18 then + let Test.14 : {} = Struct {}; + ret Test.14; + else + let Test.5 : [, C *self] = UnionAtIndex (Id 0) (Index 0) Test.7; + jump Test.13 Test.5; + in + jump Test.13 Test.19; + +procedure Test.0 (): + let Test.12 : [, C *self] = TagId(1) ; + let Test.10 : {} = CallByName Test.2 Test.12; + dec Test.12; + let Test.11 : {} = Struct {}; + let Test.8 : Int1 = CallByName Bool.7 Test.10 Test.11; + let Test.9 : Str = ""; + ret Test.9; diff --git a/crates/compiler/test_mono/generated/list_append.txt b/crates/compiler/test_mono/generated/list_append.txt index 5c4b4ef245..77dd229693 100644 --- a/crates/compiler/test_mono/generated/list_append.txt +++ b/crates/compiler/test_mono/generated/list_append.txt @@ -1,16 +1,16 @@ -procedure List.4 (List.93, List.94): - let List.319 : U64 = 1i64; - let List.318 : List I64 = CallByName List.65 List.93 List.319; - let List.317 : List I64 = CallByName List.66 List.318 List.94; - ret List.317; +procedure List.4 (List.101, List.102): + let List.387 : U64 = 1i64; + let List.386 : List I64 = CallByName List.70 List.101 List.387; + let List.385 : List I64 = CallByName List.71 List.386 List.102; + ret List.385; -procedure List.65 (#Attr.2, #Attr.3): - let List.321 : List I64 = lowlevel ListReserve #Attr.2 #Attr.3; - ret List.321; +procedure List.70 (#Attr.2, #Attr.3): + let List.389 : List I64 = lowlevel ListReserve #Attr.2 #Attr.3; + ret List.389; -procedure List.66 (#Attr.2, #Attr.3): - let List.320 : List I64 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; - ret List.320; +procedure List.71 (#Attr.2, #Attr.3): + let List.388 : List I64 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; + ret List.388; procedure Test.0 (): let Test.2 : List I64 = Array [1i64]; diff --git a/crates/compiler/test_mono/generated/list_append_closure.txt b/crates/compiler/test_mono/generated/list_append_closure.txt index 22943210ca..c2718f77a5 100644 --- a/crates/compiler/test_mono/generated/list_append_closure.txt +++ b/crates/compiler/test_mono/generated/list_append_closure.txt @@ -1,16 +1,16 @@ -procedure List.4 (List.93, List.94): - let List.319 : U64 = 1i64; - let List.318 : List I64 = CallByName List.65 List.93 List.319; - let List.317 : List I64 = CallByName List.66 List.318 List.94; - ret List.317; +procedure List.4 (List.101, List.102): + let List.387 : U64 = 1i64; + let List.386 : List I64 = CallByName List.70 List.101 List.387; + let List.385 : List I64 = CallByName List.71 List.386 List.102; + ret List.385; -procedure List.65 (#Attr.2, #Attr.3): - let List.321 : List I64 = lowlevel ListReserve #Attr.2 #Attr.3; - ret List.321; +procedure List.70 (#Attr.2, #Attr.3): + let List.389 : List I64 = lowlevel ListReserve #Attr.2 #Attr.3; + ret List.389; -procedure List.66 (#Attr.2, #Attr.3): - let List.320 : List I64 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; - ret List.320; +procedure List.71 (#Attr.2, #Attr.3): + let List.388 : List I64 = lowlevel ListAppendUnsafe #Attr.2 #Attr.3; + ret List.388; procedure Test.1 (Test.2): let Test.6 : I64 = 42i64; diff --git a/crates/compiler/test_mono/generated/list_cannot_update_inplace.txt b/crates/compiler/test_mono/generated/list_cannot_update_inplace.txt index e27a1377a2..fb0edd59df 100644 --- a/crates/compiler/test_mono/generated/list_cannot_update_inplace.txt +++ b/crates/compiler/test_mono/generated/list_cannot_update_inplace.txt @@ -1,27 +1,27 @@ -procedure List.3 (List.90, List.91, List.92): - let List.320 : {List I64, I64} = CallByName List.57 List.90 List.91 List.92; - let List.319 : List I64 = StructAtIndex 0 List.320; - inc List.319; - dec List.320; - ret List.319; - -procedure List.57 (List.87, List.88, List.89): - let List.325 : U64 = CallByName List.6 List.87; - let List.322 : Int1 = CallByName Num.22 List.88 List.325; - if List.322 then - let List.323 : {List I64, I64} = CallByName List.61 List.87 List.88 List.89; - ret List.323; - else - let List.321 : {List I64, I64} = Struct {List.87, List.89}; - ret List.321; +procedure List.3 (List.98, List.99, List.100): + let List.388 : {List I64, I64} = CallByName List.64 List.98 List.99 List.100; + let List.387 : List I64 = StructAtIndex 0 List.388; + inc List.387; + dec List.388; + ret List.387; procedure List.6 (#Attr.2): - let List.318 : U64 = lowlevel ListLen #Attr.2; - ret List.318; + let List.386 : U64 = lowlevel ListLen #Attr.2; + ret List.386; -procedure List.61 (#Attr.2, #Attr.3, #Attr.4): - let List.324 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4; - ret List.324; +procedure List.64 (List.95, List.96, List.97): + let List.393 : U64 = CallByName List.6 List.95; + let List.390 : Int1 = CallByName Num.22 List.96 List.393; + if List.390 then + let List.391 : {List I64, I64} = CallByName List.67 List.95 List.96 List.97; + ret List.391; + else + let List.389 : {List I64, I64} = Struct {List.95, List.97}; + ret List.389; + +procedure List.67 (#Attr.2, #Attr.3, #Attr.4): + let List.392 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4; + ret List.392; procedure Num.19 (#Attr.2, #Attr.3): let Num.257 : U64 = lowlevel NumAdd #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/list_get.txt b/crates/compiler/test_mono/generated/list_get.txt index 76d194510a..a6d352d43e 100644 --- a/crates/compiler/test_mono/generated/list_get.txt +++ b/crates/compiler/test_mono/generated/list_get.txt @@ -1,22 +1,22 @@ -procedure List.2 (List.82, List.83): - let List.322 : U64 = CallByName List.6 List.82; - let List.319 : Int1 = CallByName Num.22 List.83 List.322; - if List.319 then - let List.321 : I64 = CallByName List.60 List.82 List.83; - let List.320 : [C {}, C I64] = TagId(1) List.321; - ret List.320; +procedure List.2 (List.90, List.91): + let List.390 : U64 = CallByName List.6 List.90; + let List.387 : Int1 = CallByName Num.22 List.91 List.390; + if List.387 then + let List.389 : I64 = CallByName List.66 List.90 List.91; + let List.388 : [C {}, C I64] = TagId(1) List.389; + ret List.388; else - let List.318 : {} = Struct {}; - let List.317 : [C {}, C I64] = TagId(0) List.318; - ret List.317; + let List.386 : {} = Struct {}; + let List.385 : [C {}, C I64] = TagId(0) List.386; + ret List.385; procedure List.6 (#Attr.2): - let List.324 : U64 = lowlevel ListLen #Attr.2; - ret List.324; + let List.392 : U64 = lowlevel ListLen #Attr.2; + ret List.392; -procedure List.60 (#Attr.2, #Attr.3): - let List.323 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.323; +procedure List.66 (#Attr.2, #Attr.3): + let List.391 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.391; procedure Num.22 (#Attr.2, #Attr.3): let Num.257 : Int1 = lowlevel NumLt #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/list_len.txt b/crates/compiler/test_mono/generated/list_len.txt index 8a7573f008..6401dd667d 100644 --- a/crates/compiler/test_mono/generated/list_len.txt +++ b/crates/compiler/test_mono/generated/list_len.txt @@ -1,10 +1,10 @@ procedure List.6 (#Attr.2): - let List.317 : U64 = lowlevel ListLen #Attr.2; - ret List.317; + let List.385 : U64 = lowlevel ListLen #Attr.2; + ret List.385; procedure List.6 (#Attr.2): - let List.318 : U64 = lowlevel ListLen #Attr.2; - ret List.318; + let List.386 : U64 = lowlevel ListLen #Attr.2; + ret List.386; procedure Num.19 (#Attr.2, #Attr.3): let Num.257 : U64 = lowlevel NumAdd #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/list_map_closure_borrows.txt b/crates/compiler/test_mono/generated/list_map_closure_borrows.txt index 3871bcd628..881b2ed236 100644 --- a/crates/compiler/test_mono/generated/list_map_closure_borrows.txt +++ b/crates/compiler/test_mono/generated/list_map_closure_borrows.txt @@ -1,26 +1,26 @@ -procedure List.2 (List.82, List.83): - let List.322 : U64 = CallByName List.6 List.82; - let List.319 : Int1 = CallByName Num.22 List.83 List.322; - if List.319 then - let List.321 : Str = CallByName List.60 List.82 List.83; - let List.320 : [C {}, C Str] = TagId(1) List.321; - ret List.320; +procedure List.2 (List.90, List.91): + let List.390 : U64 = CallByName List.6 List.90; + let List.387 : Int1 = CallByName Num.22 List.91 List.390; + if List.387 then + let List.389 : Str = CallByName List.66 List.90 List.91; + let List.388 : [C {}, C Str] = TagId(1) List.389; + ret List.388; else - let List.318 : {} = Struct {}; - let List.317 : [C {}, C Str] = TagId(0) List.318; - ret List.317; + let List.386 : {} = Struct {}; + let List.385 : [C {}, C Str] = TagId(0) List.386; + ret List.385; procedure List.5 (#Attr.2, #Attr.3): - let List.323 : List Str = lowlevel ListMap { xs: `#Attr.#arg1` } #Attr.2 Test.3 #Attr.3; - ret List.323; + let List.391 : List Str = lowlevel ListMap { xs: `#Attr.#arg1` } #Attr.2 Test.3 #Attr.3; + ret List.391; procedure List.6 (#Attr.2): - let List.325 : U64 = lowlevel ListLen #Attr.2; - ret List.325; + let List.393 : U64 = lowlevel ListLen #Attr.2; + ret List.393; -procedure List.60 (#Attr.2, #Attr.3): - let List.324 : Str = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.324; +procedure List.66 (#Attr.2, #Attr.3): + let List.392 : Str = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.392; procedure Num.22 (#Attr.2, #Attr.3): let Num.257 : Int1 = lowlevel NumLt #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/list_map_closure_owns.txt b/crates/compiler/test_mono/generated/list_map_closure_owns.txt index 12773e8a59..a78d58d911 100644 --- a/crates/compiler/test_mono/generated/list_map_closure_owns.txt +++ b/crates/compiler/test_mono/generated/list_map_closure_owns.txt @@ -1,28 +1,28 @@ -procedure List.2 (List.82, List.83): - let List.322 : U64 = CallByName List.6 List.82; - let List.319 : Int1 = CallByName Num.22 List.83 List.322; - if List.319 then - let List.321 : Str = CallByName List.60 List.82 List.83; - let List.320 : [C {}, C Str] = TagId(1) List.321; - ret List.320; +procedure List.2 (List.90, List.91): + let List.390 : U64 = CallByName List.6 List.90; + let List.387 : Int1 = CallByName Num.22 List.91 List.390; + if List.387 then + let List.389 : Str = CallByName List.66 List.90 List.91; + let List.388 : [C {}, C Str] = TagId(1) List.389; + ret List.388; else - let List.318 : {} = Struct {}; - let List.317 : [C {}, C Str] = TagId(0) List.318; - ret List.317; + let List.386 : {} = Struct {}; + let List.385 : [C {}, C Str] = TagId(0) List.386; + ret List.385; procedure List.5 (#Attr.2, #Attr.3): inc #Attr.2; - let List.323 : List Str = lowlevel ListMap { xs: `#Attr.#arg1` } #Attr.2 Test.3 #Attr.3; + let List.391 : List Str = lowlevel ListMap { xs: `#Attr.#arg1` } #Attr.2 Test.3 #Attr.3; decref #Attr.2; - ret List.323; + ret List.391; procedure List.6 (#Attr.2): - let List.325 : U64 = lowlevel ListLen #Attr.2; - ret List.325; + let List.393 : U64 = lowlevel ListLen #Attr.2; + ret List.393; -procedure List.60 (#Attr.2, #Attr.3): - let List.324 : Str = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.324; +procedure List.66 (#Attr.2, #Attr.3): + let List.392 : Str = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.392; procedure Num.22 (#Attr.2, #Attr.3): let Num.257 : Int1 = lowlevel NumLt #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/list_pass_to_function.txt b/crates/compiler/test_mono/generated/list_pass_to_function.txt index d6a8009ed5..b49095993b 100644 --- a/crates/compiler/test_mono/generated/list_pass_to_function.txt +++ b/crates/compiler/test_mono/generated/list_pass_to_function.txt @@ -1,27 +1,27 @@ -procedure List.3 (List.90, List.91, List.92): - let List.318 : {List I64, I64} = CallByName List.57 List.90 List.91 List.92; - let List.317 : List I64 = StructAtIndex 0 List.318; - inc List.317; - dec List.318; - ret List.317; - -procedure List.57 (List.87, List.88, List.89): - let List.323 : U64 = CallByName List.6 List.87; - let List.320 : Int1 = CallByName Num.22 List.88 List.323; - if List.320 then - let List.321 : {List I64, I64} = CallByName List.61 List.87 List.88 List.89; - ret List.321; - else - let List.319 : {List I64, I64} = Struct {List.87, List.89}; - ret List.319; +procedure List.3 (List.98, List.99, List.100): + let List.386 : {List I64, I64} = CallByName List.64 List.98 List.99 List.100; + let List.385 : List I64 = StructAtIndex 0 List.386; + inc List.385; + dec List.386; + ret List.385; procedure List.6 (#Attr.2): - let List.324 : U64 = lowlevel ListLen #Attr.2; - ret List.324; + let List.392 : U64 = lowlevel ListLen #Attr.2; + ret List.392; -procedure List.61 (#Attr.2, #Attr.3, #Attr.4): - let List.322 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4; - ret List.322; +procedure List.64 (List.95, List.96, List.97): + let List.391 : U64 = CallByName List.6 List.95; + let List.388 : Int1 = CallByName Num.22 List.96 List.391; + if List.388 then + let List.389 : {List I64, I64} = CallByName List.67 List.95 List.96 List.97; + ret List.389; + else + let List.387 : {List I64, I64} = Struct {List.95, List.97}; + ret List.387; + +procedure List.67 (#Attr.2, #Attr.3, #Attr.4): + let List.390 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4; + ret List.390; procedure Num.22 (#Attr.2, #Attr.3): let Num.257 : Int1 = lowlevel NumLt #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/list_sort_asc.txt b/crates/compiler/test_mono/generated/list_sort_asc.txt index 6ebd6fcaa7..a824deb62c 100644 --- a/crates/compiler/test_mono/generated/list_sort_asc.txt +++ b/crates/compiler/test_mono/generated/list_sort_asc.txt @@ -1,16 +1,16 @@ procedure List.28 (#Attr.2, #Attr.3): - let List.319 : List I64 = lowlevel ListSortWith { xs: `#Attr.#arg1` } #Attr.2 Num.46 #Attr.3; + let List.387 : List I64 = lowlevel ListSortWith { xs: `#Attr.#arg1` } #Attr.2 Num.46 #Attr.3; let #Derived_gen.0 : Int1 = lowlevel ListIsUnique #Attr.2; if #Derived_gen.0 then - ret List.319; + ret List.387; else decref #Attr.2; - ret List.319; + ret List.387; -procedure List.54 (List.205): - let List.318 : {} = Struct {}; - let List.317 : List I64 = CallByName List.28 List.205 List.318; - ret List.317; +procedure List.59 (List.213): + let List.386 : {} = Struct {}; + let List.385 : List I64 = CallByName List.28 List.213 List.386; + ret List.385; procedure Num.46 (#Attr.2, #Attr.3): let Num.257 : U8 = lowlevel NumCompare #Attr.2 #Attr.3; @@ -18,5 +18,5 @@ procedure Num.46 (#Attr.2, #Attr.3): procedure Test.0 (): let Test.2 : List I64 = Array [4i64, 3i64, 2i64, 1i64]; - let Test.1 : List I64 = CallByName List.54 Test.2; + let Test.1 : List I64 = CallByName List.59 Test.2; ret Test.1; diff --git a/crates/compiler/test_mono/generated/quicksort_swap.txt b/crates/compiler/test_mono/generated/quicksort_swap.txt index 27d5db2025..475a7f21d4 100644 --- a/crates/compiler/test_mono/generated/quicksort_swap.txt +++ b/crates/compiler/test_mono/generated/quicksort_swap.txt @@ -1,43 +1,43 @@ -procedure List.2 (List.82, List.83): - let List.332 : U64 = CallByName List.6 List.82; - let List.329 : Int1 = CallByName Num.22 List.83 List.332; - if List.329 then - let List.331 : I64 = CallByName List.60 List.82 List.83; - let List.330 : [C {}, C I64] = TagId(1) List.331; - ret List.330; +procedure List.2 (List.90, List.91): + let List.400 : U64 = CallByName List.6 List.90; + let List.397 : Int1 = CallByName Num.22 List.91 List.400; + if List.397 then + let List.399 : I64 = CallByName List.66 List.90 List.91; + let List.398 : [C {}, C I64] = TagId(1) List.399; + ret List.398; else - let List.328 : {} = Struct {}; - let List.327 : [C {}, C I64] = TagId(0) List.328; - ret List.327; + let List.396 : {} = Struct {}; + let List.395 : [C {}, C I64] = TagId(0) List.396; + ret List.395; -procedure List.3 (List.90, List.91, List.92): - let List.320 : {List I64, I64} = CallByName List.57 List.90 List.91 List.92; - let List.319 : List I64 = StructAtIndex 0 List.320; - inc List.319; - dec List.320; - ret List.319; - -procedure List.57 (List.87, List.88, List.89): - let List.337 : U64 = CallByName List.6 List.87; - let List.334 : Int1 = CallByName Num.22 List.88 List.337; - if List.334 then - let List.335 : {List I64, I64} = CallByName List.61 List.87 List.88 List.89; - ret List.335; - else - let List.333 : {List I64, I64} = Struct {List.87, List.89}; - ret List.333; +procedure List.3 (List.98, List.99, List.100): + let List.388 : {List I64, I64} = CallByName List.64 List.98 List.99 List.100; + let List.387 : List I64 = StructAtIndex 0 List.388; + inc List.387; + dec List.388; + ret List.387; procedure List.6 (#Attr.2): - let List.338 : U64 = lowlevel ListLen #Attr.2; - ret List.338; + let List.406 : U64 = lowlevel ListLen #Attr.2; + ret List.406; -procedure List.60 (#Attr.2, #Attr.3): - let List.339 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.339; +procedure List.64 (List.95, List.96, List.97): + let List.405 : U64 = CallByName List.6 List.95; + let List.402 : Int1 = CallByName Num.22 List.96 List.405; + if List.402 then + let List.403 : {List I64, I64} = CallByName List.67 List.95 List.96 List.97; + ret List.403; + else + let List.401 : {List I64, I64} = Struct {List.95, List.97}; + ret List.401; -procedure List.61 (#Attr.2, #Attr.3, #Attr.4): - let List.336 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4; - ret List.336; +procedure List.66 (#Attr.2, #Attr.3): + let List.407 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.407; + +procedure List.67 (#Attr.2, #Attr.3, #Attr.4): + let List.404 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4; + ret List.404; procedure Num.22 (#Attr.2, #Attr.3): let Num.259 : Int1 = lowlevel NumLt #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/generated/rigids.txt b/crates/compiler/test_mono/generated/rigids.txt index 6b37dbf456..95cb00abaa 100644 --- a/crates/compiler/test_mono/generated/rigids.txt +++ b/crates/compiler/test_mono/generated/rigids.txt @@ -1,43 +1,43 @@ -procedure List.2 (List.82, List.83): - let List.332 : U64 = CallByName List.6 List.82; - let List.329 : Int1 = CallByName Num.22 List.83 List.332; - if List.329 then - let List.331 : I64 = CallByName List.60 List.82 List.83; - let List.330 : [C {}, C I64] = TagId(1) List.331; - ret List.330; +procedure List.2 (List.90, List.91): + let List.400 : U64 = CallByName List.6 List.90; + let List.397 : Int1 = CallByName Num.22 List.91 List.400; + if List.397 then + let List.399 : I64 = CallByName List.66 List.90 List.91; + let List.398 : [C {}, C I64] = TagId(1) List.399; + ret List.398; else - let List.328 : {} = Struct {}; - let List.327 : [C {}, C I64] = TagId(0) List.328; - ret List.327; + let List.396 : {} = Struct {}; + let List.395 : [C {}, C I64] = TagId(0) List.396; + ret List.395; -procedure List.3 (List.90, List.91, List.92): - let List.320 : {List I64, I64} = CallByName List.57 List.90 List.91 List.92; - let List.319 : List I64 = StructAtIndex 0 List.320; - inc List.319; - dec List.320; - ret List.319; - -procedure List.57 (List.87, List.88, List.89): - let List.337 : U64 = CallByName List.6 List.87; - let List.334 : Int1 = CallByName Num.22 List.88 List.337; - if List.334 then - let List.335 : {List I64, I64} = CallByName List.61 List.87 List.88 List.89; - ret List.335; - else - let List.333 : {List I64, I64} = Struct {List.87, List.89}; - ret List.333; +procedure List.3 (List.98, List.99, List.100): + let List.388 : {List I64, I64} = CallByName List.64 List.98 List.99 List.100; + let List.387 : List I64 = StructAtIndex 0 List.388; + inc List.387; + dec List.388; + ret List.387; procedure List.6 (#Attr.2): - let List.338 : U64 = lowlevel ListLen #Attr.2; - ret List.338; + let List.406 : U64 = lowlevel ListLen #Attr.2; + ret List.406; -procedure List.60 (#Attr.2, #Attr.3): - let List.339 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3; - ret List.339; +procedure List.64 (List.95, List.96, List.97): + let List.405 : U64 = CallByName List.6 List.95; + let List.402 : Int1 = CallByName Num.22 List.96 List.405; + if List.402 then + let List.403 : {List I64, I64} = CallByName List.67 List.95 List.96 List.97; + ret List.403; + else + let List.401 : {List I64, I64} = Struct {List.95, List.97}; + ret List.401; -procedure List.61 (#Attr.2, #Attr.3, #Attr.4): - let List.336 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4; - ret List.336; +procedure List.66 (#Attr.2, #Attr.3): + let List.407 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3; + ret List.407; + +procedure List.67 (#Attr.2, #Attr.3, #Attr.4): + let List.404 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4; + ret List.404; procedure Num.22 (#Attr.2, #Attr.3): let Num.259 : Int1 = lowlevel NumLt #Attr.2 #Attr.3; diff --git a/crates/compiler/test_mono/src/tests.rs b/crates/compiler/test_mono/src/tests.rs index ac925f1023..c4a68e4000 100644 --- a/crates/compiler/test_mono/src/tests.rs +++ b/crates/compiler/test_mono/src/tests.rs @@ -13,6 +13,8 @@ extern crate indoc; #[allow(dead_code)] const EXPANDED_STACK_SIZE: usize = 8 * 1024 * 1024; +use roc_load::ExecutionMode; +use roc_load::LoadConfig; use test_mono_macros::*; use roc_collections::all::MutMap; @@ -91,15 +93,19 @@ fn compiles_to_ir(test_name: &str, src: &str) { module_src = &temp; } + let load_config = LoadConfig { + target_info: TARGET_INFO, + threading: Threading::Single, + render: roc_reporting::report::RenderTarget::Generic, + exec_mode: ExecutionMode::Executable, + }; let loaded = roc_load::load_and_monomorphize_from_str( arena, filename, module_src, src_dir, Default::default(), - TARGET_INFO, - roc_reporting::report::RenderTarget::Generic, - Threading::Single, + load_config, ); let mut loaded = match loaded { @@ -1895,3 +1901,24 @@ fn issue_3560_nested_tag_constructor_is_newtype() { "# ) } + +#[mono_test] +fn issue_3669() { + indoc!( + r#" + Peano a := [ + Zero, + Successor (Peano a) + ] + + unwrap : Peano a -> {} + unwrap = \@Peano p -> + when p is + Zero -> {} + Successor inner -> unwrap inner + + when unwrap (@Peano Zero) == {} is + _ -> "" + "# + ) +} diff --git a/crates/compiler/test_mono_macros/Cargo.toml b/crates/compiler/test_mono_macros/Cargo.toml index c5889844c5..80dd69472c 100644 --- a/crates/compiler/test_mono_macros/Cargo.toml +++ b/crates/compiler/test_mono_macros/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "test_mono_macros" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/types/Cargo.toml b/crates/compiler/types/Cargo.toml index 266d118d6c..63c9a31fd6 100644 --- a/crates/compiler/types/Cargo.toml +++ b/crates/compiler/types/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_types" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/compiler/types/src/pretty_print.rs b/crates/compiler/types/src/pretty_print.rs index 81793a7088..714fdfd647 100644 --- a/crates/compiler/types/src/pretty_print.rs +++ b/crates/compiler/types/src/pretty_print.rs @@ -2,7 +2,7 @@ use crate::subs::{ self, AliasVariables, Content, FlatType, GetSubsSlice, Label, Subs, SubsIndex, UnionLabels, UnionTags, UnsortedUnionLabels, Variable, }; -use crate::types::{name_type_var, RecordField, Uls}; +use crate::types::{name_type_var, name_type_var_with_hint, RecordField, Uls}; use roc_collections::all::MutMap; use roc_module::ident::{Lowercase, TagName}; use roc_module::symbol::{Interns, ModuleId, Symbol}; @@ -202,7 +202,7 @@ fn find_names_needed( ); } } - Structure(Func(arg_vars, _closure_var, ret_var)) => { + Structure(Func(arg_vars, closure_var, ret_var)) => { for index in arg_vars.into_iter() { let var = subs[index]; find_names_needed( @@ -215,6 +215,15 @@ fn find_names_needed( ); } + find_names_needed( + *closure_var, + subs, + roots, + root_appearances, + names_taken, + find_under_alias, + ); + find_names_needed( *ret_var, subs, @@ -455,10 +464,34 @@ fn name_root( subs: &mut Subs, taken: &mut MutMap, ) -> u32 { - let (generated_name, new_letters_used) = - name_type_var(letters_used, &mut taken.keys(), |var, str| { + let (generated_name, new_letters_used) = match subs.get_content_unchecked(root) { + Content::FlexVar(Some(name)) + | Content::RigidVar(name) + | Content::FlexAbleVar(Some(name), _) + | Content::RigidAbleVar(name, _) + | Content::RecursionVar { + opt_name: Some(name), + .. + } => { + let name_hint = &subs[*name]; + if name_hint.as_str() == "*" { + // Give a proper name to named wildcards! + name_type_var(letters_used, &mut taken.keys(), |var, str| { + var.as_str() == str + }) + } else { + let generated = + name_type_var_with_hint(name_hint.as_str(), &mut taken.keys(), |var, str| { + var.as_str() == str + }); + + (generated, letters_used) + } + } + _ => name_type_var(letters_used, &mut taken.keys(), |var, str| { var.as_str() == str - }); + }), + }; taken.insert(generated_name.clone(), root); diff --git a/crates/compiler/types/src/subs.rs b/crates/compiler/types/src/subs.rs index 5fbcc6b3db..5a91563459 100644 --- a/crates/compiler/types/src/subs.rs +++ b/crates/compiler/types/src/subs.rs @@ -361,6 +361,21 @@ impl UlsOfVar { fn rollback_to(&mut self, snapshot: UlsOfVarSnapshot) { *self = snapshot.0; } + + pub fn remove_dependent_unspecialized_lambda_sets<'a>( + &'a mut self, + subs: &'a Subs, + var: Variable, + ) -> impl Iterator + 'a { + let utable = &subs.utable; + let root_var = utable.root_key_without_compacting(var); + + self.0 + .drain_filter(move |cand_var, _| { + utable.root_key_without_compacting(*cand_var) == root_var + }) + .flat_map(|(_, lambda_set_vars)| lambda_set_vars.into_iter()) + } } #[derive(Clone)] @@ -910,7 +925,16 @@ fn subs_fmt_flat_type(this: &FlatType, subs: &Subs, f: &mut fmt::Formatter) -> f RecordField::Required(_) => ':', RecordField::Demanded(_) => ':', }; - write!(f, "{:?} {} {:?}, ", name, separator, content)?; + write!( + f, + "{:?} {} {:?}, ", + name, + separator, + SubsFmtContent( + subs.get_content_without_compacting(*content.as_inner()), + subs + ) + )?; } write!(f, "}}<{:?}>", new_ext) @@ -2342,10 +2366,6 @@ impl AliasVariables { let all_variables_len = (subs.variables.len() as u32 - variables_start) as u16; - if type_variables_len == 3 { - panic!(); - } - Self { variables_start, type_variables_len, diff --git a/crates/compiler/types/src/types.rs b/crates/compiler/types/src/types.rs index 4bdca144aa..c0fe6c0739 100644 --- a/crates/compiler/types/src/types.rs +++ b/crates/compiler/types/src/types.rs @@ -1322,6 +1322,7 @@ impl Type { region, type_got: args.len() as u8, alias_needs: alias.type_variables.len() as u8, + alias_kind: AliasKind::Structural, }); return; } @@ -2028,6 +2029,15 @@ pub enum AliasKind { Opaque, } +impl AliasKind { + pub fn as_str(&self) -> &'static str { + match self { + AliasKind::Structural => "alias", + AliasKind::Opaque => "opaque", + } + } +} + #[derive(Clone, Debug, PartialEq)] pub struct AliasVar { pub name: Lowercase, @@ -2104,6 +2114,7 @@ pub enum Problem { region: Region, type_got: u8, alias_needs: u8, + alias_kind: AliasKind, }, InvalidModule, SolvedTypeError, @@ -2566,6 +2577,7 @@ fn write_type_ext(ext: TypeExt, buf: &mut String) { static THE_LETTER_A: u32 = 'a' as u32; +/// Generates a fresh type variable name, composed of lowercase alphabetic characters in sequence. pub fn name_type_var bool>( letters_used: u32, taken: &mut impl Iterator, @@ -2596,6 +2608,28 @@ pub fn name_type_var bool>( } } +/// Generates a fresh type variable name given a hint, composed of the hint as a prefix and a +/// number as a suffix. For example, given hint `a` we'll name the variable `a`, `a1`, or `a27`. +pub fn name_type_var_with_hint bool>( + hint: &str, + taken: &mut impl Iterator, + mut predicate: F, +) -> Lowercase { + if !taken.any(|item| predicate(&item, hint)) { + return hint.into(); + } + + let mut i = 0; + loop { + i += 1; + let cand = format!("{}{}", hint, i); + + if !taken.any(|item| predicate(&item, &cand)) { + return cand.into(); + } + } +} + #[derive(Debug, Copy, Clone)] pub struct RecordFieldsError; @@ -2638,6 +2672,9 @@ pub fn gather_fields_unsorted_iter( // TODO investigate apparently this one pops up in the reporting tests! RigidVar(_) => break, + // Stop on errors in the record + Error => break, + _ => return Err(RecordFieldsError), } } diff --git a/crates/compiler/unify/Cargo.toml b/crates/compiler/unify/Cargo.toml index 653eece5c0..3252648967 100644 --- a/crates/compiler/unify/Cargo.toml +++ b/crates/compiler/unify/Cargo.toml @@ -3,7 +3,7 @@ authors = ["The Roc Contributors"] edition = "2021" license = "UPL-1.0" name = "roc_unify" -version = "0.1.0" +version = "0.0.1" [dependencies] bitflags = "1.3.2" diff --git a/crates/compiler/unify/src/unify.rs b/crates/compiler/unify/src/unify.rs index 0f974a8a64..556d6ea297 100644 --- a/crates/compiler/unify/src/unify.rs +++ b/crates/compiler/unify/src/unify.rs @@ -100,6 +100,10 @@ bitflags! { /// /// For example, t1 += [A Str] says we should "add" the tag "A Str" to the type of "t1". const PRESENT = 1 << 1; + /// Like [`Mode::EQ`], but also instructs the unifier that the ambient lambda set + /// specialization algorithm is running. This has implications for the unification of + /// unspecialized lambda sets; see [`unify_unspecialized_lambdas`]. + const LAMBDA_SET_SPECIALIZATION = Mode::EQ.bits | (1 << 2); } } @@ -114,6 +118,11 @@ impl Mode { self.contains(Mode::PRESENT) } + fn is_lambda_set_specialization(&self) -> bool { + debug_assert!(!self.contains(Mode::EQ | Mode::PRESENT)); + self.contains(Mode::LAMBDA_SET_SPECIALIZATION) + } + fn as_eq(self) -> Self { (self - Mode::PRESENT) | Mode::EQ } @@ -671,46 +680,95 @@ fn unify_two_aliases( env: &mut Env, pool: &mut Pool, ctx: &Context, - // _symbol has an underscore because it's unused in --release builds - _symbol: Symbol, + kind: AliasKind, + symbol: Symbol, args: AliasVariables, real_var: Variable, other_args: AliasVariables, other_real_var: Variable, - other_content: &Content, ) -> Outcome { if args.len() == other_args.len() { let mut outcome = Outcome::default(); - let it = args - .all_variables() + + let args_it = args + .type_variables() .into_iter() - .zip(other_args.all_variables().into_iter()); + .zip(other_args.type_variables().into_iter()); - let length_before = env.subs.len(); + let lambda_set_it = args + .lambda_set_variables() + .into_iter() + .zip(other_args.lambda_set_variables().into_iter()); - for (l, r) in it { + let mut merged_args = Vec::with_capacity(args.type_variables().len()); + let mut merged_lambda_set_args = Vec::with_capacity(args.lambda_set_variables().len()); + debug_assert_eq!( + merged_args.capacity() + merged_lambda_set_args.capacity(), + args.all_variables_len as _ + ); + + for (l, r) in args_it { let l_var = env.subs[l]; let r_var = env.subs[r]; outcome.union(unify_pool(env, pool, l_var, r_var, ctx.mode)); + + let merged_var = choose_merged_var(env.subs, l_var, r_var); + merged_args.push(merged_var); + } + + for (l, r) in lambda_set_it { + let l_var = env.subs[l]; + let r_var = env.subs[r]; + outcome.union(unify_pool(env, pool, l_var, r_var, ctx.mode)); + + let merged_var = choose_merged_var(env.subs, l_var, r_var); + merged_lambda_set_args.push(merged_var); } if outcome.mismatches.is_empty() { - outcome.union(merge(env, ctx, *other_content)); - } + // Even if there are no changes to alias arguments, and no new variables were + // introduced, we may still need to unify the "actual types" of the alias or opaque! + // + // The unification is not necessary from a types perspective (and in fact, we may want + // to disable it for `roc check` later on), but it is necessary for the monomorphizer, + // which expects identical types to be reflected in the same variable. + // + // As a concrete example, consider the unification of two opaques + // + // P := [Zero, Succ P] + // + // (@P (Succ n)) ~ (@P (Succ o)) + // + // `P` has no arguments, and unification of the surface of `P` introduces nothing new. + // But if we do not unify the types of `n` and `o`, which are recursion variables, they + // will remain disjoint! Currently, the implication of this is that they will be seen + // to have separate recursive memory layouts in the monomorphizer - which is no good + // for our compilation model. + // + // As such, always unify the real vars. - let length_after = env.subs.len(); + // Don't report real_var mismatches, because they must always be surfaced higher, from + // the argument types. + let mut real_var_outcome = + unify_pool::(env, pool, real_var, other_real_var, ctx.mode); + let _ = real_var_outcome.mismatches.drain(..); + outcome.union(real_var_outcome); - let args_unification_had_changes = length_after != length_before; + let merged_real_var = choose_merged_var(env.subs, real_var, other_real_var); - if !args.is_empty() && args_unification_had_changes && outcome.mismatches.is_empty() { - // We need to unify the real vars because unification of type variables - // may have made them larger, which then needs to be reflected in the `real_var`. - outcome.union(unify_pool(env, pool, real_var, other_real_var, ctx.mode)); + // POSSIBLE OPT: choose_merged_var chooses the left when the choice is arbitrary. If + // the merged vars are all left, avoid re-insertion. Is checking for argument slice + // equality faster than re-inserting? + let merged_variables = + AliasVariables::insert_into_subs(env.subs, merged_args, merged_lambda_set_args); + let merged_content = Content::Alias(symbol, merged_variables, merged_real_var, kind); + + outcome.union(merge(env, ctx, merged_content)); } outcome } else { - mismatch!("{:?}", _symbol) + mismatch!("{:?}", symbol) } } @@ -744,12 +802,12 @@ fn unify_alias( env, pool, ctx, + AliasKind::Structural, symbol, args, real_var, *other_args, *other_real_var, - other_content, ) } else { unify_pool(env, pool, real_var, *other_real_var, ctx.mode) @@ -813,12 +871,12 @@ fn unify_opaque( env, pool, ctx, + AliasKind::Opaque, symbol, args, real_var, *other_args, *other_real_var, - other_content, ) } else { mismatch!("{:?}", symbol) @@ -1049,6 +1107,7 @@ struct SeparatedUnionLambdas { fn separate_union_lambdas( env: &mut Env, pool: &mut Pool, + mode: Mode, fields1: UnionLambdas, fields2: UnionLambdas, ) -> (Outcome, SeparatedUnionLambdas) { @@ -1157,7 +1216,7 @@ fn separate_union_lambdas( maybe_mark_union_recursive(env, var1); maybe_mark_union_recursive(env, var2); - let outcome = unify_pool(env, pool, var1, var2, Mode::EQ); + let outcome = unify_pool(env, pool, var1, var2, mode); if !outcome.mismatches.is_empty() { env.subs.rollback_to(snapshot); @@ -1198,64 +1257,292 @@ fn separate_union_lambdas( ) } +/// ULS-SORT-ORDER: +/// - Arrange into partitions of (_, member, region), in ascending order of (member, region). +/// - Within each partition, place flex-able vars at the end of the partition. +/// - Amongst all flex-able vars, sort by their root key, so that identical vars are next to each other. +#[inline(always)] +fn unspecialized_lambda_set_sorter(subs: &Subs, uls1: Uls, uls2: Uls) -> std::cmp::Ordering { + let Uls(var1, sym1, region1) = uls1; + let Uls(var2, sym2, region2) = uls2; + + use std::cmp::Ordering::*; + use Content::*; + match (sym1, region1).cmp(&(sym2, region2)) { + Equal => { + match ( + subs.get_content_without_compacting(var1), + subs.get_content_without_compacting(var2), + ) { + (FlexAbleVar(..) | RigidAbleVar(..), FlexAbleVar(..) | RigidAbleVar(..)) => subs + .get_root_key_without_compacting(var1) + .cmp(&subs.get_root_key_without_compacting(var2)), + (FlexVar(..) | RigidVar(..), _) | (_, FlexVar(..) | RigidVar(..)) => { + internal_error!("unexpected variable type in unspecialized lambda set!") + } + (FlexAbleVar(..), _) => Greater, + (_, FlexAbleVar(..)) => Less, + // For everything else, the order is irrelevant + (_, _) => Less, + } + } + ord => ord, + } +} + +#[inline(always)] +fn sort_unspecialized_lambda_sets(subs: &Subs, mut uls: Vec) -> Vec { + uls.sort_by(|&uls1, &uls2| unspecialized_lambda_set_sorter(subs, uls1, uls2)); + uls +} + +#[inline(always)] +fn is_sorted_unspecialized_lamba_set_list(subs: &Subs, uls: &[Uls]) -> bool { + uls == sort_unspecialized_lambda_sets(subs, uls.to_vec()) +} + fn unify_unspecialized_lambdas( env: &mut Env, pool: &mut Pool, - uls1: SubsSlice, - uls2: SubsSlice, + mode: Mode, + uls_left: SubsSlice, + uls_right: SubsSlice, ) -> Result<(SubsSlice, Outcome), Outcome> { - // For now we merge all variables of unspecialized lambdas in a lambda set that share the same - // ability member/region. - // See the section "A property that's lost, and how we can hold on to it" of - // solve/docs/ambient_lambda_set_specialization.md to see how we can loosen this restriction. - // Note that we don't need to update the bookkeeping of variable -> lambda set to be resolved, // because if we had v1 -> lset1, and now lset1 ~ lset2, then afterward either lset1 still // resolves to itself or re-points to lset2. // In either case the merged unspecialized lambda sets will be there. - match (uls1.is_empty(), uls2.is_empty()) { - (true, true) => Ok((SubsSlice::default(), Default::default())), - (false, true) => Ok((uls1, Default::default())), - (true, false) => Ok((uls2, Default::default())), - (false, false) => { - let mut all_uls = (env.subs.get_subs_slice(uls1).iter()) - .chain(env.subs.get_subs_slice(uls2)) - .map(|&Uls(var, sym, region)| { - // Take the root key to deduplicate - Uls(env.subs.get_root_key_without_compacting(var), sym, region) - }) - .collect::>(); - // Arrange into partitions of (_, member, region). - all_uls.sort_by_key(|&Uls(_, sym, region)| (sym, region)); + let (uls_left, uls_right) = match (uls_left.is_empty(), uls_right.is_empty()) { + (true, true) => return Ok((SubsSlice::default(), Default::default())), + (false, true) => return Ok((uls_left, Default::default())), + (true, false) => return Ok((uls_right, Default::default())), + (false, false) => ( + env.subs.get_subs_slice(uls_left).to_vec(), + env.subs.get_subs_slice(uls_right).to_vec(), + ), + }; - // Now merge the variables of unspecialized lambdas pointing to the same - // member/region. - let mut whole_outcome = Outcome::default(); - let mut j = 1; - while j < all_uls.len() { - let i = j - 1; - let Uls(var_i, sym_i, region_i) = all_uls[i]; - let Uls(var_j, sym_j, region_j) = all_uls[j]; - if sym_i == sym_j && region_i == region_j { - let outcome = unify_pool(env, pool, var_i, var_j, Mode::EQ); - if !outcome.mismatches.is_empty() { - return Err(outcome); + // Unfortunately, it is not an invariant that `uls_left` and `uls_right` obey ULS-SORT-ORDER before + // merging. + // + // That's because flex-able variables in unspecialized lambda sets may be unified at any time, + // and unification of flex-able variables may change their root keys, which ULS-SORT-ORDER + // considers. + // + // As such, we must sort beforehand. In practice these sets are very, very small (<5 elements). + let uls_left = sort_unspecialized_lambda_sets(env.subs, uls_left); + let uls_right = sort_unspecialized_lambda_sets(env.subs, uls_right); + + let (mut uls_left, mut uls_right) = (uls_left.iter().peekable(), uls_right.iter().peekable()); + let mut merged_uls = Vec::with_capacity(uls_left.len() + uls_right.len()); + let mut whole_outcome = Outcome::default(); + + loop { + let (uls_l, uls_r) = match (uls_left.peek(), uls_right.peek()) { + (Some(uls_l), Some(uls_r)) => (**uls_l, **uls_r), + (Some(_), None) => { + merged_uls.push(*uls_left.next().unwrap()); + continue; + } + (None, Some(_)) => { + merged_uls.push(*uls_right.next().unwrap()); + continue; + } + (None, None) => break, + }; + + let Uls(var_l, sym_l, region_l) = uls_l; + let Uls(var_r, sym_r, region_r) = uls_r; + + use std::cmp::Ordering::*; + match (sym_l, region_l).cmp(&(sym_r, region_r)) { + Less => { + // Left needs to catch up to right, add it to the merged lambdas. + merged_uls.push(*uls_left.next().unwrap()); + } + Greater => { + // Right needs to catch up to left, add it to the merged lambdas. + merged_uls.push(*uls_right.next().unwrap()); + } + Equal => { + // The interesting case - both point to the same specialization. + use Content::*; + match ( + env.subs.get_content_without_compacting(var_l), + env.subs.get_content_without_compacting(var_r), + ) { + (FlexAbleVar(..) | RigidAbleVar(..), FlexAbleVar(..) | RigidAbleVar(..)) => { + // If the types are root-equivalent, de-duplicate them. + // + // Otherwise, the type variables are disjoint, and we want to keep both + // of them, for purposes of disjoint variable lambda specialization. + // + // For more information, see "A Property that’s lost, and how we can hold on to it" + // in solve/docs/ambient_lambda_set_specialization.md. + + if env.subs.equivalent_without_compacting(var_l, var_r) { + // ... a1 ... + // ... b1=a1 ... + // => ... a1 ... + // + // Keep the one on the left, drop the one on the right. + // + // Then progress both, because the invariant tells us they must be + // disjoint, and if there were any concrete variables, they would have + // appeared earlier. + let _dropped = uls_right.next().unwrap(); + let kept = uls_left.next().unwrap(); + merged_uls.push(*kept); + } else if mode.is_lambda_set_specialization() { + // ... a1 ... + // ... b1 ... + // => ... a1=b1 ... + // + // If we're in the process of running the ambient lambda set + // specialization procedure, disjoint type variables being merged from + // the left and right lists are treated specially! + // + // In particular, we are unifying a local list of lambda sets, for + // which the specialization is for (on the left), with specialization + // lambda sets, which have just been freshened (on the right). + // + // [ .. a:lam:1 ] (local, undergoing specialization) + // [ .. a':lam:1 ] (specialization lambda sets, just freshened) + // + // Because the specialization lambdas are freshened, they certainly are + // disjoint from the local lambdas - but they may be equivalent in + // principle, from the perspective of a human looking at the + // unification! + // + // Running with the example above, the specialization lambda set has an + // unspecialized lambda `a':lam:1`. Now, this is disjoint from + // `a:lam:1` in the local lambda set, from the purely technical + // perspective that `a' != a`. + // + // But, in expected function, they **should not** be treated as disjoint! + // In this case, the specialization lambda is not introducing any new + // information, and is targeting exactly the local lambda `a:lam:1`. + // + // So, to avoid introducing superfluous variables, we unify these disjoint + // variables once, and then progress on both sides. We progress on both + // sides to avoid unifying more than what we should in our principle. + // + // It doesn't matter which side we choose to progress on, since after + // unification of flex vars roots are equivalent. So, choose the left + // side. + // + // See the ambient lambda set specialization document for more details. + let outcome = unify_pool(env, pool, var_l, var_r, mode); + if !outcome.mismatches.is_empty() { + return Err(outcome); + } + whole_outcome.union(outcome); + + debug_assert!(env.subs.equivalent_without_compacting(var_l, var_r)); + + let _dropped = uls_right.next().unwrap(); + let kept = uls_left.next().unwrap(); + merged_uls.push(*kept); + } else { + // ... a1 ... + // ... b1 ... + // => ... a1, b1 ... + // + // Keep both. But, we have to be careful about how we do this - + // immediately add the one with the lower root, and advance that side; + // keep the other as-is, because the next variable on the advanced side + // might be lower than the current non-advanced variable. For example: + // + // ... 640 645 ... + // ... 670 ... + // + // we want to add `640` to the merged list and advance to + // + // ... 645 ... + // ... 670 ... + // + // rather than adding both `640` and `670`, and skipping the comparison + // of `645` with `670`. + // + // An important thing to notice is that we *don't* want to advance + // both sides, because if these two variables are disjoint, then + // advancing one side *might* make the next comparison be between + // equivalent variables, for example in a case like + // + // ... 640 670 ... + // ... 670 ... + // + // In the above case, we certainly only want to advance the left side! + if env.subs.get_root_key(var_l) < env.subs.get_root_key(var_r) { + let kept = uls_left.next().unwrap(); + merged_uls.push(*kept); + } else { + let kept = uls_right.next().unwrap(); + merged_uls.push(*kept); + } + } + } + (FlexAbleVar(..) | RigidAbleVar(..), _) => { + // ... a1 ... + // ... {foo: _} ... + // => ... {foo: _} ... + // + // Unify them, then advance the merged flex var. + + let outcome = unify_pool(env, pool, var_l, var_r, mode); + if !outcome.mismatches.is_empty() { + return Err(outcome); + } + whole_outcome.union(outcome); + + let _dropped = uls_right.next().unwrap(); + } + (_, FlexAbleVar(..) | RigidAbleVar(..)) => { + // ... {foo: _} ... + // ... a1 ... + // => ... {foo: _} ... + // + // Unify them, then advance the merged flex var. + + let outcome = unify_pool(env, pool, var_l, var_r, mode); + if !outcome.mismatches.is_empty() { + return Err(outcome); + } + whole_outcome.union(outcome); + + let _dropped = uls_left.next().unwrap(); + } + (_, _) => { + // ... {foo: _} ... + // ... {foo: _} ... + // => ... {foo: _} ... + // + // Unify them, then advance one. + // (the choice is arbitrary, so we choose the left) + + let outcome = unify_pool(env, pool, var_l, var_r, mode); + if !outcome.mismatches.is_empty() { + return Err(outcome); + } + whole_outcome.union(outcome); + + let _dropped = uls_left.next().unwrap(); } - whole_outcome.union(outcome); - // Keep the Uls in position `i` and remove the one in position `j`. - all_uls.remove(j); - } else { - // Keep both Uls, look at the next one. - j += 1; } } - - Ok(( - SubsSlice::extend_new(&mut env.subs.unspecialized_lambda_sets, all_uls), - whole_outcome, - )) } } + + debug_assert!( + is_sorted_unspecialized_lamba_set_list(env.subs, &merged_uls), + "merging of unspecialized lambda sets does not preserve sort! {:?}", + merged_uls + ); + + Ok(( + SubsSlice::extend_new(&mut env.subs.unspecialized_lambda_sets, merged_uls), + whole_outcome, + )) } fn unify_lambda_set_help( @@ -1300,7 +1587,7 @@ fn unify_lambda_set_help( only_in_right, joined, }, - ) = separate_union_lambdas(env, pool, solved1, solved2); + ) = separate_union_lambdas(env, pool, ctx.mode, solved1, solved2); let all_lambdas = joined .into_iter() @@ -1327,7 +1614,7 @@ fn unify_lambda_set_help( (None, None) => OptVariable::NONE, }; - let merged_unspecialized = match unify_unspecialized_lambdas(env, pool, uls1, uls2) { + let merged_unspecialized = match unify_unspecialized_lambdas(env, pool, ctx.mode, uls1, uls2) { Ok((merged, outcome)) => { whole_outcome.union(outcome); merged @@ -1954,6 +2241,46 @@ fn maybe_mark_union_recursive(env: &mut Env, union_var: Variable) { } } +fn choose_merged_var(subs: &Subs, var1: Variable, var2: Variable) -> Variable { + // If one of the variables is a recursion var, keep that one, so that we avoid inlining + // a recursive tag union type content where we should have a recursion var instead. + // + // When might this happen? For example, in the code + // + // Indirect : [Indirect ConsList] + // + // ConsList : [Nil, Cons Indirect] + // + // l : ConsList + // l = Cons (Indirect (Cons (Indirect Nil))) + // # ^^^^^^^^^^^^^^^~~~~~~~~~~~~~~~~~~~~~^ region-a + // # ~~~~~~~~~~~~~~~~~~~~~ region-b + // l + // + // Suppose `ConsList` has the expanded type `[Nil, Cons [Indirect ]] as `. + // After unifying the tag application annotated "region-b" with the recursion variable ``, + // we might have that e.g. `actual` is `` and `expected` is `[Cons (Indirect ...)]`. + // + // Now, we need to be careful to set the type we choose to represent the merged type + // here to be ``, not the tag union content of `expected`! Otherwise, we will + // have lost a recursion variable in the recursive tag union. + // + // This would not be incorrect from a type perspective, but causes problems later on for e.g. + // layout generation, which expects recursion variables to be placed correctly. Attempting to detect + // this during layout generation does not work so well because it may be that there *are* recursive + // tag unions that should be inlined, and not pass through recursion variables. So instead, resolve + // these cases here. + // + // See tests labeled "issue_2810" for more examples. + match ( + (var1, subs.get_content_unchecked(var1)), + (var2, subs.get_content_unchecked(var2)), + ) { + ((var, Content::RecursionVar { .. }), _) | (_, (var, Content::RecursionVar { .. })) => var, + _ => var1, + } +} + fn unify_shared_tags_new( env: &mut Env, pool: &mut Pool, @@ -2014,44 +2341,7 @@ fn unify_shared_tags_new( outcome.union(unify_pool(env, pool, actual, expected, ctx.mode)); if outcome.mismatches.is_empty() { - // If one of the variables is a recursion var, keep that one, so that we avoid inlining - // a recursive tag union type content where we should have a recursion var instead. - // - // When might this happen? For example, in the code - // - // Indirect : [Indirect ConsList] - // - // ConsList : [Nil, Cons Indirect] - // - // l : ConsList - // l = Cons (Indirect (Cons (Indirect Nil))) - // # ^^^^^^^^^^^^^^^~~~~~~~~~~~~~~~~~~~~~^ region-a - // # ~~~~~~~~~~~~~~~~~~~~~ region-b - // l - // - // Suppose `ConsList` has the expanded type `[Nil, Cons [Indirect ]] as `. - // After unifying the tag application annotated "region-b" with the recursion variable ``, - // we might have that e.g. `actual` is `` and `expected` is `[Cons (Indirect ...)]`. - // - // Now, we need to be careful to set the type we choose to represent the merged type - // here to be ``, not the tag union content of `expected`! Otherwise, we will - // have lost a recursion variable in the recursive tag union. - // - // This would not be incorrect from a type perspective, but causes problems later on for e.g. - // layout generation, which expects recursion variables to be placed correctly. Attempting to detect - // this during layout generation does not work so well because it may be that there *are* recursive - // tag unions that should be inlined, and not pass through recursion variables. So instead, resolve - // these cases here. - // - // See tests labeled "issue_2810" for more examples. - let merged_var = match ( - (actual, env.subs.get_content_unchecked(actual)), - (expected, env.subs.get_content_unchecked(expected)), - ) { - ((var, Content::RecursionVar { .. }), _) - | (_, (var, Content::RecursionVar { .. })) => var, - _ => actual, - }; + let merged_var = choose_merged_var(env.subs, actual, expected); matching_vars.push(merged_var); } diff --git a/crates/docs/Cargo.toml b/crates/docs/Cargo.toml index 4a7b6b54ec..15a5b33a6d 100644 --- a/crates/docs/Cargo.toml +++ b/crates/docs/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_docs" -version = "0.1.0" +version = "0.0.1" license = "UPL-1.0" authors = ["The Roc Contributors"] edition = "2021" diff --git a/crates/docs/src/lib.rs b/crates/docs/src/lib.rs index 266aa8ef7c..b69418ad33 100644 --- a/crates/docs/src/lib.rs +++ b/crates/docs/src/lib.rs @@ -10,7 +10,7 @@ use roc_highlight::highlight_parser::{highlight_defs, highlight_expr}; use roc_load::docs::DocEntry::DocDef; use roc_load::docs::{DocEntry, TypeAnnotation}; use roc_load::docs::{ModuleDocumentation, RecordField}; -use roc_load::{LoadedModule, LoadingProblem, Threading}; +use roc_load::{ExecutionMode, LoadConfig, LoadedModule, LoadingProblem, Threading}; use roc_module::symbol::{IdentIdsByModule, Interns, ModuleId}; use roc_parse::ident::{parse_ident, Ident}; use roc_parse::state::State; @@ -432,14 +432,13 @@ pub fn load_modules_for_files(filenames: Vec) -> Vec { let mut modules = Vec::with_capacity(filenames.len()); for filename in filenames { - match roc_load::load_and_typecheck( - &arena, - filename, - Default::default(), - roc_target::TargetInfo::default_x86_64(), // This is just type-checking for docs, so "target" doesn't matter - roc_reporting::report::RenderTarget::ColorTerminal, - Threading::AllAvailable, - ) { + let load_config = LoadConfig { + target_info: roc_target::TargetInfo::default_x86_64(), // This is just type-checking for docs, so "target" doesn't matter + render: roc_reporting::report::RenderTarget::ColorTerminal, + threading: Threading::AllAvailable, + exec_mode: ExecutionMode::Check, + }; + match roc_load::load_and_typecheck(&arena, filename, Default::default(), load_config) { Ok(loaded) => modules.push(loaded), Err(LoadingProblem::FormattedReport(report)) => { eprintln!("{}", report); diff --git a/crates/docs_cli/Cargo.toml b/crates/docs_cli/Cargo.toml index 96600f1f17..043b3b91d8 100644 --- a/crates/docs_cli/Cargo.toml +++ b/crates/docs_cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_docs_cli" -version = "0.1.0" +version = "0.0.1" license = "UPL-1.0" authors = ["The Roc Contributors"] edition = "2021" diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index a471c929cc..e58a35a3e0 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_editor" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/error_macros/Cargo.toml b/crates/error_macros/Cargo.toml index 83744ab39d..b7db7fa4b5 100644 --- a/crates/error_macros/Cargo.toml +++ b/crates/error_macros/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_error_macros" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/error_macros/src/lib.rs b/crates/error_macros/src/lib.rs index 679af370b9..7cc51fa78e 100644 --- a/crates/error_macros/src/lib.rs +++ b/crates/error_macros/src/lib.rs @@ -115,14 +115,4 @@ macro_rules! todo_abilities { }; } -#[macro_export] -macro_rules! todo_opaques { - () => { - $crate::_incomplete_project!("Abilities (opaques)", 2463) - }; - ($($arg:tt)+) => { - $crate::_incomplete_project!("Abilities (opaques)", 2463, $($arg)+) - }; -} - // END LARGE SCALE PROJECTS diff --git a/crates/glue/Cargo.toml b/crates/glue/Cargo.toml index b7e08114fd..83edbdf567 100644 --- a/crates/glue/Cargo.toml +++ b/crates/glue/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_glue" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/glue/src/load.rs b/crates/glue/src/load.rs index 0b942325b2..ae4f4d5622 100644 --- a/crates/glue/src/load.rs +++ b/crates/glue/src/load.rs @@ -1,9 +1,9 @@ use crate::rust_glue; use crate::types::{Env, Types}; use bumpalo::Bump; -use roc_load::{LoadedModule, LoadingProblem, Threading}; +use roc_load::{ExecutionMode, LoadConfig, LoadedModule, LoadingProblem, Threading}; use roc_reporting::report::RenderTarget; -use roc_target::{Architecture, TargetInfo}; +use roc_target::{Architecture, OperatingSystem, TargetInfo}; use std::fs::File; use std::io::{self, ErrorKind, Write}; use std::path::{Path, PathBuf}; @@ -83,9 +83,12 @@ pub fn load_types( arena, full_file_path, subs_by_module, - target_info, - RenderTarget::Generic, - threading, + LoadConfig { + target_info, + render: RenderTarget::Generic, + threading, + exec_mode: ExecutionMode::Check, + }, ) .unwrap_or_else(|problem| match problem { LoadingProblem::FormattedReport(report) => { @@ -135,7 +138,10 @@ pub fn load_types( let types_and_targets = Architecture::iter() .map(|arch| { - let target_info = arch.into(); + let target_info = TargetInfo { + architecture: arch, + operating_system: OperatingSystem::Unix, + }; let mut env = Env::new(arena, subs, &mut interns, target_info); (env.vars_to_types(variables.clone()), target_info) diff --git a/crates/glue/src/rust_glue.rs b/crates/glue/src/rust_glue.rs index f40896b929..b31d5f1c8a 100644 --- a/crates/glue/src/rust_glue.rs +++ b/crates/glue/src/rust_glue.rs @@ -931,7 +931,7 @@ pub struct {name} {{ } // The Ord impl for the tag union - { + if !has_float(typ, types) { let opt_impl = Some(format!("impl Ord for {name}")); let mut buf = r#"fn cmp(&self, other: &Self) -> core::cmp::Ordering { match self.discriminant().cmp(&other.discriminant()) { @@ -1041,7 +1041,7 @@ pub struct {name} {{ } // The Hash impl for the tag union - { + if !has_float(typ, types) { let opt_impl = Some(format!("impl core::hash::Hash for {name}")); let mut buf = r#"fn hash(&self, state: &mut H) {"#.to_string(); diff --git a/crates/highlight/Cargo.toml b/crates/highlight/Cargo.toml index 4cd885c52f..5c4f498091 100644 --- a/crates/highlight/Cargo.toml +++ b/crates/highlight/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_highlight" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/linker/Cargo.toml b/crates/linker/Cargo.toml index 31240e194b..445db1405e 100644 --- a/crates/linker/Cargo.toml +++ b/crates/linker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_linker" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" repository = "https://github.com/rtfeldman/roc" diff --git a/crates/repl_cli/Cargo.toml b/crates/repl_cli/Cargo.toml index e027eddd43..a3fb73fc02 100644 --- a/crates/repl_cli/Cargo.toml +++ b/crates/repl_cli/Cargo.toml @@ -1,9 +1,10 @@ [package] edition = "2021" name = "roc_repl_cli" -version = "0.1.0" +version = "0.0.1" +authors = ["The Roc Contributors"] +license = "UPL-1.0" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [features] # pipe target to roc_build diff --git a/crates/repl_cli/src/lib.rs b/crates/repl_cli/src/lib.rs index 2ab12bd113..4cbfd6d09d 100644 --- a/crates/repl_cli/src/lib.rs +++ b/crates/repl_cli/src/lib.rs @@ -15,7 +15,7 @@ use roc_build::link::llvm_module_to_dylib; use roc_collections::all::MutSet; use roc_gen_llvm::llvm::externs::add_default_roc_externs; use roc_gen_llvm::{run_jit_function, run_jit_function_dynamic_type}; -use roc_load::MonomorphizedModule; +use roc_load::{EntryPoint, MonomorphizedModule}; use roc_mono::ir::OptLevel; use roc_parse::ast::Expr; use roc_parse::parser::{EExpr, ELambda, SyntaxError}; @@ -239,6 +239,15 @@ pub fn mono_module_to_dylib<'a>( // platform to provide them. add_default_roc_externs(&env); + let entry_point = match entry_point { + EntryPoint::Executable { symbol, layout, .. } => { + roc_mono::ir::EntryPoint { symbol, layout } + } + EntryPoint::Test => { + unreachable!() + } + }; + let (main_fn_name, main_fn) = roc_gen_llvm::llvm::build::build_procedures_return_main( &env, opt_level, diff --git a/crates/repl_eval/Cargo.toml b/crates/repl_eval/Cargo.toml index 37e58c2f50..3f21f92a0f 100644 --- a/crates/repl_eval/Cargo.toml +++ b/crates/repl_eval/Cargo.toml @@ -1,7 +1,9 @@ [package] edition = "2021" name = "roc_repl_eval" -version = "0.1.0" +version = "0.0.1" +authors = ["The Roc Contributors"] +license = "UPL-1.0" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/crates/repl_eval/src/gen.rs b/crates/repl_eval/src/gen.rs index 07ba4e644e..0becb9fb10 100644 --- a/crates/repl_eval/src/gen.rs +++ b/crates/repl_eval/src/gen.rs @@ -1,5 +1,5 @@ use bumpalo::Bump; -use roc_load::Threading; +use roc_load::{ExecutionMode, LoadConfig, Threading}; use roc_reporting::report::Palette; use std::path::PathBuf; @@ -60,9 +60,12 @@ pub fn compile_to_mono<'a>( module_src, src_dir, exposed_types, - target_info, - roc_reporting::report::RenderTarget::ColorTerminal, - Threading::Single, + LoadConfig { + target_info, + render: roc_reporting::report::RenderTarget::ColorTerminal, + threading: Threading::Single, + exec_mode: ExecutionMode::Executable, + }, ); let mut loaded = match loaded { diff --git a/crates/repl_expect/Cargo.toml b/crates/repl_expect/Cargo.toml index 8d07e35df6..922f2a30e4 100644 --- a/crates/repl_expect/Cargo.toml +++ b/crates/repl_expect/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "roc_repl_expect" -version = "0.1.0" +version = "0.0.1" edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +authors = ["The Roc Contributors"] +license = "UPL-1.0" [dependencies] bumpalo = {version = "3.8.0", features = ["collections"]} diff --git a/crates/repl_expect/src/lib.rs b/crates/repl_expect/src/lib.rs index 8b499ff11d..f3be3326ab 100644 --- a/crates/repl_expect/src/lib.rs +++ b/crates/repl_expect/src/lib.rs @@ -80,7 +80,7 @@ mod test { use indoc::indoc; use pretty_assertions::assert_eq; use roc_gen_llvm::{llvm::build::LlvmBackendMode, run_roc::RocCallResult, run_roc_dylib}; - use roc_load::Threading; + use roc_load::{ExecutionMode, LoadConfig, Threading}; use roc_reporting::report::RenderTarget; use target_lexicon::Triple; @@ -104,15 +104,19 @@ mod test { std::fs::write(&filename, source).unwrap(); + let load_config = LoadConfig { + target_info, + render: RenderTarget::ColorTerminal, + threading: Threading::Single, + exec_mode: ExecutionMode::Executable, + }; let loaded = roc_load::load_and_monomorphize_from_str( arena, filename, source, src_dir.path().to_path_buf(), Default::default(), - target_info, - RenderTarget::ColorTerminal, - Threading::Single, + load_config, ) .unwrap(); @@ -479,4 +483,119 @@ mod test { ), ); } + + #[test] + fn struct_with_strings() { + run_expect_test( + indoc!( + r#" + app "test" provides [main] to "./platform" + + main = 0 + + expect + a = { + utopia: "Astra mortemque praestare gradatim", + brillist: "Profundum et fundamentum", + } + + a != a + "# + ), + indoc!( + r#" + This expectation failed: + + 5│> expect + 6│> a = { + 7│> utopia: "Astra mortemque praestare gradatim", + 8│> brillist: "Profundum et fundamentum", + 9│> } + 10│> + 11│> a != a + + When it failed, these variables had these values: + + a : { brillist : Str, utopia : Str } + a = { brillist: "Profundum et fundamentum", utopia: "Astra mortemque praestare gradatim" } + "# + ), + ); + } + + #[test] + fn box_with_strings() { + run_expect_test( + indoc!( + r#" + app "test" provides [main] to "./platform" + + main = 0 + + expect + a = Box.box "Astra mortemque praestare gradatim" + b = Box.box "Profundum et fundamentum" + + a == b + "# + ), + indoc!( + r#" + This expectation failed: + + 5│> expect + 6│> a = Box.box "Astra mortemque praestare gradatim" + 7│> b = Box.box "Profundum et fundamentum" + 8│> + 9│> a == b + + When it failed, these variables had these values: + + a : Box Str + a = Box.box "Astra mortemque praestare gradatim" + + b : Box Str + b = Box.box "Profundum et fundamentum" + "# + ), + ); + } + + #[test] + fn result_with_strings() { + run_expect_test( + indoc!( + r#" + app "test" provides [main] to "./platform" + + main = 0 + + expect + a = Ok "Astra mortemque praestare gradatim" + b = Err "Profundum et fundamentum" + + a == b + "# + ), + indoc!( + r#" + This expectation failed: + + 5│> expect + 6│> a = Ok "Astra mortemque praestare gradatim" + 7│> b = Err "Profundum et fundamentum" + 8│> + 9│> a == b + + When it failed, these variables had these values: + + a : [Ok Str]a + a = Ok "Astra mortemque praestare gradatim" + + b : [Err Str]a + b = Err "Profundum et fundamentum" + "# + ), + ); + } } diff --git a/crates/repl_expect/src/run.rs b/crates/repl_expect/src/run.rs index 707bc5d658..fef58f5117 100644 --- a/crates/repl_expect/src/run.rs +++ b/crates/repl_expect/src/run.rs @@ -4,7 +4,7 @@ use inkwell::context::Context; use roc_build::link::llvm_module_to_dylib; use roc_collections::{MutSet, VecMap}; use roc_gen_llvm::llvm::{build::LlvmBackendMode, externs::add_default_roc_externs}; -use roc_load::{Expectations, MonomorphizedModule}; +use roc_load::{EntryPoint, Expectations, MonomorphizedModule}; use roc_module::symbol::{Interns, ModuleId, Symbol}; use roc_mono::ir::OptLevel; use roc_region::all::Region; @@ -299,12 +299,19 @@ pub fn expect_mono_module_to_dylib<'a>( // platform to provide them. add_default_roc_externs(&env); + let opt_entry_point = match entry_point { + EntryPoint::Executable { symbol, layout, .. } => { + Some(roc_mono::ir::EntryPoint { symbol, layout }) + } + EntryPoint::Test => None, + }; + let expect_names = roc_gen_llvm::llvm::build::build_procedures_expose_expects( &env, opt_level, toplevel_expects.unzip_slices().0, procedures, - entry_point, + opt_entry_point, ); let expects = bumpalo::collections::Vec::from_iter_in( @@ -341,7 +348,5 @@ pub fn expect_mono_module_to_dylib<'a>( ); } - env.module.print_to_file("/tmp/test.ll").unwrap(); - llvm_module_to_dylib(env.module, &target, opt_level).map(|lib| (lib, expects)) } diff --git a/crates/repl_test/Cargo.toml b/crates/repl_test/Cargo.toml index 4ab8c31292..ca1a83d1a8 100644 --- a/crates/repl_test/Cargo.toml +++ b/crates/repl_test/Cargo.toml @@ -1,7 +1,9 @@ [package] edition = "2021" name = "repl_test" -version = "0.1.0" +version = "0.0.1" +authors = ["The Roc Contributors"] +license = "UPL-1.0" [build-dependencies] roc_cli = {path = "../cli"} diff --git a/crates/repl_wasm/Cargo.toml b/crates/repl_wasm/Cargo.toml index 4a859465e9..1a13904eca 100644 --- a/crates/repl_wasm/Cargo.toml +++ b/crates/repl_wasm/Cargo.toml @@ -1,7 +1,9 @@ [package] edition = "2021" name = "roc_repl_wasm" -version = "0.1.0" +version = "0.0.1" +authors = ["The Roc Contributors"] +license = "UPL-1.0" [lib] crate-type = ["cdylib"] diff --git a/crates/repl_wasm/build.rs b/crates/repl_wasm/build.rs index 93af44fe29..93f869ef43 100644 --- a/crates/repl_wasm/build.rs +++ b/crates/repl_wasm/build.rs @@ -26,7 +26,7 @@ fn main() { let output = Command::new(&zig_executable()) .args([ "wasm-ld", - bitcode::BUILTINS_WASM32_OBJ_PATH, + &bitcode::get_builtins_wasm32_obj_path(), platform_obj.to_str().unwrap(), WASI_COMPILER_RT_PATH, WASI_LIBC_PATH, diff --git a/crates/reporting/Cargo.toml b/crates/reporting/Cargo.toml index 98f796366b..b63395a313 100644 --- a/crates/reporting/Cargo.toml +++ b/crates/reporting/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_reporting" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/reporting/src/error/canonicalize.rs b/crates/reporting/src/error/canonicalize.rs index b6393f3ed6..8d4725e3af 100644 --- a/crates/reporting/src/error/canonicalize.rs +++ b/crates/reporting/src/error/canonicalize.rs @@ -233,8 +233,10 @@ pub fn can_problem<'b>( title = DUPLICATE_NAME.to_string(); severity = Severity::RuntimeError; } - Problem::CyclicAlias(symbol, region, others) => { - let answer = crate::error::r#type::cyclic_alias(alloc, lines, symbol, region, others); + Problem::CyclicAlias(symbol, region, others, alias_kind) => { + let answer = crate::error::r#type::cyclic_alias( + alloc, lines, symbol, region, others, alias_kind, + ); doc = answer.0; title = answer.1; @@ -244,6 +246,7 @@ pub fn can_problem<'b>( typ: alias, variable_region, variable_name, + alias_kind, } => { doc = alloc.stack([ alloc.concat([ @@ -251,10 +254,12 @@ pub fn can_problem<'b>( alloc.type_variable(variable_name), alloc.reflow(" type parameter is not used in the "), alloc.symbol_unqualified(alias), - alloc.reflow(" alias definition:"), + alloc.reflow(" "), + alloc.reflow(alias_kind.as_str()), + alloc.reflow(" definition:"), ]), alloc.region(lines.convert_region(variable_region)), - alloc.reflow("Roc does not allow unused type alias parameters!"), + alloc.reflow("Roc does not allow unused type parameters!"), // TODO add link to this guide section alloc.tip().append(alloc.reflow( "If you want an unused type parameter (a so-called \"phantom type\"), \ @@ -954,8 +959,10 @@ pub fn can_problem<'b>( } fn list_builtin_abilities<'a>(alloc: &'a RocDocAllocator<'a>) -> RocDocBuilder<'a> { - debug_assert!(DERIVABLE_ABILITIES.len() == 1); - alloc.concat([alloc.symbol_qualified(DERIVABLE_ABILITIES[0].0)]) + alloc.intersperse( + [alloc.symbol_qualified(DERIVABLE_ABILITIES[0].0)], + alloc.reflow(", "), + ) } fn to_invalid_optional_value_report<'b>( diff --git a/crates/reporting/src/error/type.rs b/crates/reporting/src/error/type.rs index 88a4ae1862..0801fa26d3 100644 --- a/crates/reporting/src/error/type.rs +++ b/crates/reporting/src/error/type.rs @@ -77,6 +77,7 @@ pub fn type_problem<'b>( region, type_got, alias_needs, + alias_kind, } => { let needed_arguments = if alias_needs == 1 { alloc.reflow("1 type argument") @@ -92,7 +93,9 @@ pub fn type_problem<'b>( alloc.concat([ alloc.reflow("The "), alloc.symbol_unqualified(symbol), - alloc.reflow(" alias expects "), + alloc.reflow(" "), + alloc.reflow(alias_kind.as_str()), + alloc.reflow(" expects "), needed_arguments, alloc.reflow(", but it got "), found_arguments, @@ -433,16 +436,21 @@ pub fn cyclic_alias<'b>( symbol: Symbol, region: roc_region::all::Region, others: Vec, + alias_kind: AliasKind, ) -> (RocDocBuilder<'b>, String) { let when_is_recursion_legal = - alloc.reflow("Recursion in aliases is only allowed if recursion happens behind a tagged union, at least one variant of which is not recursive."); + alloc.reflow("Recursion in ") + .append(alloc.reflow(alias_kind.as_str())) + .append(alloc.reflow("es is only allowed if recursion happens behind a tagged union, at least one variant of which is not recursive.")); let doc = if others.is_empty() { alloc.stack([ alloc .reflow("The ") .append(alloc.symbol_unqualified(symbol)) - .append(alloc.reflow(" alias is self-recursive in an invalid way:")), + .append(alloc.reflow(" ")) + .append(alloc.reflow(alias_kind.as_str())) + .append(alloc.reflow(" is self-recursive in an invalid way:")), alloc.region(lines.convert_region(region)), when_is_recursion_legal, ]) @@ -451,14 +459,18 @@ pub fn cyclic_alias<'b>( alloc .reflow("The ") .append(alloc.symbol_unqualified(symbol)) - .append(alloc.reflow(" alias is recursive in an invalid way:")), + .append(alloc.reflow(" ")) + .append(alloc.reflow(alias_kind.as_str())) + .append(alloc.reflow(" is recursive in an invalid way:")), alloc.region(lines.convert_region(region)), alloc .reflow("The ") .append(alloc.symbol_unqualified(symbol)) - .append(alloc.reflow( - " alias depends on itself through the following chain of definitions:", - )), + .append(alloc.reflow(" ")) + .append(alloc.reflow(alias_kind.as_str())) + .append( + alloc.reflow(" depends on itself through the following chain of definitions:"), + ), crate::report::cycle( alloc, 4, diff --git a/crates/reporting/tests/test_reporting.rs b/crates/reporting/tests/test_reporting.rs index 67d2f5c512..cba4c05385 100644 --- a/crates/reporting/tests/test_reporting.rs +++ b/crates/reporting/tests/test_reporting.rs @@ -13,7 +13,7 @@ mod test_reporting { use indoc::indoc; use roc_can::abilities::AbilitiesStore; use roc_can::expr::PendingDerives; - use roc_load::{self, LoadedModule, LoadingProblem, Threading}; + use roc_load::{self, ExecutionMode, LoadConfig, LoadedModule, LoadingProblem, Threading}; use roc_module::symbol::{Interns, ModuleId}; use roc_region::all::LineInfo; use roc_reporting::report::{ @@ -83,14 +83,14 @@ mod test_reporting { let full_file_path = file_path.clone(); let mut file = File::create(file_path).unwrap(); writeln!(file, "{}", module_src).unwrap(); - let result = roc_load::load_and_typecheck( - arena, - full_file_path, - exposed_types, - roc_target::TargetInfo::default_x86_64(), - RenderTarget::Generic, - Threading::Single, - ); + let load_config = LoadConfig { + target_info: roc_target::TargetInfo::default_x86_64(), + render: RenderTarget::Generic, + threading: Threading::Single, + exec_mode: ExecutionMode::Check, + }; + let result = + roc_load::load_and_typecheck(arena, full_file_path, exposed_types, load_config); drop(file); result @@ -1226,7 +1226,7 @@ mod test_reporting { // variables they can put themselves in, and to run the constraint algorithm // against that extra variable, rather than possibly having to translate a `Type` // again. - @r#" + @r###" ── CIRCULAR TYPE ───────────────────────────────────────── /code/proj/Main.roc ─ I'm inferring a weird self-referential type for `f`: @@ -1265,7 +1265,20 @@ mod test_reporting { infinitely. List ∞ -> List a - "# + + ── CIRCULAR TYPE ───────────────────────────────────────── /code/proj/Main.roc ─ + + I'm inferring a weird self-referential type for `main`: + + 3│ main = + ^^^^ + + Here is my best effort at writing down the type. You will see ∞ for + parts of the type that repeat something already printed out + infinitely. + + List ∞ -> List a + "### ); test_report!( @@ -3112,7 +3125,7 @@ mod test_reporting { @r###" ── TOO MANY TYPE ARGUMENTS ─────────────────────────────── /code/proj/Main.roc ─ - The `Num` alias expects 1 type argument, but it got 2 instead: + The `Num` opaque expects 1 type argument, but it got 2 instead: 4│ a : Num.Num Num.I64 Num.F64 ^^^^^^^^^^^^^^^^^^^^^^^ @@ -3134,7 +3147,7 @@ mod test_reporting { @r###" ── TOO MANY TYPE ARGUMENTS ─────────────────────────────── /code/proj/Main.roc ─ - The `Num` alias expects 1 type argument, but it got 2 instead: + The `Num` opaque expects 1 type argument, but it got 2 instead: 4│ f : Str -> Num.Num Num.I64 Num.F64 ^^^^^^^^^^^^^^^^^^^^^^^ @@ -3210,7 +3223,7 @@ mod test_reporting { 4│ Foo a : [Foo] ^ - Roc does not allow unused type alias parameters! + Roc does not allow unused type parameters! Tip: If you want an unused type parameter (a so-called "phantom type"), read the guide section on phantom values. @@ -3613,8 +3626,8 @@ mod test_reporting { Is there an import missing? Perhaps there is a typo. Did you mean one of these? - List Set + List Dict Result @@ -10117,4 +10130,181 @@ All branches in an `if` must have the same type! determined to actually specialize `Id2`! "### ); + + test_report!( + mismatched_record_annotation, + indoc!( + r#" + x : { y : Str } + x = {} + + x + "# + ), + @r###" + ── TYPE MISMATCH ───────────────────────────────────────── /code/proj/Main.roc ─ + + Something is off with the body of the `x` definition: + + 4│ x : { y : Str } + 5│ x = {} + ^^ + + The body is a record of type: + + {} + + But the type annotation on `x` says it should be: + + { y : Str } + + Tip: Looks like the y field is missing. + "### + ); + + test_report!( + cyclic_opaque, + indoc!( + r#" + Recursive := [Infinitely Recursive] + + 0 + "# + ), + @r###" + ── CYCLIC ALIAS ────────────────────────────────────────── /code/proj/Main.roc ─ + + The `Recursive` opaque is self-recursive in an invalid way: + + 4│ Recursive := [Infinitely Recursive] + ^^^^^^^^^ + + Recursion in opaquees is only allowed if recursion happens behind a + tagged union, at least one variant of which is not recursive. + "### + ); + + test_report!( + derive_decoding_for_function, + indoc!( + r#" + app "test" imports [Decode] provides [A] to "./platform" + + A a := a -> a has [Decode.Decoding] + "# + ), + @r###" + ── INCOMPLETE ABILITY IMPLEMENTATION ───────────────────── /code/proj/Main.roc ─ + + Roc can't derive an implementation of the `Decode.Decoding` for `A`: + + 3│ A a := a -> a has [Decode.Decoding] + ^^^^^^^^^^^^^^^ + + Note: `Decoding` cannot be generated for functions. + + Tip: You can define a custom implementation of `Decode.Decoding` for `A`. + "### + ); + + test_report!( + derive_decoding_for_non_decoding_opaque, + indoc!( + r#" + app "test" imports [Decode] provides [A] to "./platform" + + A := B has [Decode.Decoding] + + B := {} + "# + ), + @r###" + ── INCOMPLETE ABILITY IMPLEMENTATION ───────────────────── /code/proj/Main.roc ─ + + Roc can't derive an implementation of the `Decode.Decoding` for `A`: + + 3│ A := B has [Decode.Decoding] + ^^^^^^^^^^^^^^^ + + Tip: `B` does not implement `Decoding`. Consider adding a custom + implementation or `has Decode.Decoding` to the definition of `B`. + + Tip: You can define a custom implementation of `Decode.Decoding` for `A`. + "### + ); + + test_report!( + derive_decoding_for_other_has_decoding, + indoc!( + r#" + app "test" imports [Decode] provides [A] to "./platform" + + A := B has [Decode.Decoding] + + B := {} has [Decode.Decoding] + "# + ), + @"" // no error + ); + + test_report!( + derive_decoding_for_recursive_deriving, + indoc!( + r#" + app "test" imports [Decode] provides [MyNat] to "./platform" + + MyNat := [S MyNat, Z] has [Decode.Decoding] + "# + ), + @"" // no error + ); + + test_report!( + function_cannot_derive_encoding, + indoc!( + r#" + app "test" imports [Decode.{Decoder, DecoderFormatting, decoder}] provides [main] to "./platform" + + main = + myDecoder : Decoder (a -> a) fmt | fmt has DecoderFormatting + myDecoder = decoder + + myDecoder + "# + ), + @r###" + ── TYPE MISMATCH ───────────────────────────────────────── /code/proj/Main.roc ─ + + This expression has a type that does not implement the abilities it's expected to: + + 5│ myDecoder = decoder + ^^^^^^^ + + Roc can't generate an implementation of the `Decode.Decoding` ability + for + + a -> a + + Note: `Decoding` cannot be generated for functions. + "### + ); + + test_report!( + #[ignore = "needs structural deriving to be turned on first"] + nested_opaque_cannot_derive_encoding, + indoc!( + r#" + app "test" imports [Decode.{Decoder, DecoderFormatting, decoder}] provides [main] to "./platform" + + A : {} + main = + myDecoder : Decoder {x : A} fmt | fmt has DecoderFormatting + myDecoder = decoder + + myDecoder + "# + ), + @r###" + "### + ); } diff --git a/crates/roc_std/Cargo.toml b/crates/roc_std/Cargo.toml index d819057fa3..37a35c6672 100644 --- a/crates/roc_std/Cargo.toml +++ b/crates/roc_std/Cargo.toml @@ -6,7 +6,7 @@ license = "UPL-1.0" name = "roc_std" readme = "README.md" repository = "https://github.com/rtfeldman/roc" -version = "0.1.0" +version = "0.0.1" [dependencies] static_assertions = "1.1.0" diff --git a/crates/test_utils/Cargo.toml b/crates/test_utils/Cargo.toml index c053106f67..0488acf3ee 100644 --- a/crates/test_utils/Cargo.toml +++ b/crates/test_utils/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_test_utils" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 147b8bba11..02445e2963 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "roc_utils" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 8ebfae2069..cdc5d3fb0e 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -1,5 +1,5 @@ use snafu::OptionExt; -use std::{collections::HashMap, slice::SliceIndex}; +use std::{collections::HashMap, path::PathBuf, slice::SliceIndex}; use util_error::{IndexOfFailedSnafu, KeyNotFoundSnafu, OutOfBoundsSnafu, UtilResult}; pub mod util_error; @@ -93,3 +93,30 @@ pub fn first_last_index_of( .fail() } } + +// get the path of the lib folder +// runtime dependencies like zig files, builtin_host.o are put in the lib folder +pub fn get_lib_path() -> Option { + let exe_relative_str_path_opt = std::env::current_exe().ok(); + + if let Some(exe_relative_str_path) = exe_relative_str_path_opt { + let mut curr_parent_opt = exe_relative_str_path.parent(); + + // this differs for regular build and nix releases, so we check in multiple spots. + for _ in 0..3 { + if let Some(curr_parent) = curr_parent_opt { + let lib_path = curr_parent.join("lib"); + + if std::path::Path::exists(&lib_path) { + return Some(lib_path); + } else { + curr_parent_opt = curr_parent.parent(); + } + } else { + break; + } + } + } + + None +} diff --git a/crates/wasi-libc-sys/Cargo.toml b/crates/wasi-libc-sys/Cargo.toml index 259a38b2cf..959bcdf00a 100644 --- a/crates/wasi-libc-sys/Cargo.toml +++ b/crates/wasi-libc-sys/Cargo.toml @@ -5,4 +5,4 @@ edition = "2021" license = "UPL-1.0" name = "wasi_libc_sys" repository = "https://github.com/rtfeldman/roc" -version = "0.1.0" +version = "0.0.1" diff --git a/default.nix b/default.nix new file mode 100644 index 0000000000..ec783d393c --- /dev/null +++ b/default.nix @@ -0,0 +1,86 @@ +{ }: +# we only this file to release a nix package, use flake.nix for development +let + rev = "f6342b8b9e7a4177c7e775cdbf38e1c1b43e7ab3"; # nixpkgs master + nixpkgs = builtins.fetchTarball { + url = "https://github.com/nixos/nixpkgs/tarball/${rev}"; + sha256 = "JTiKsBT1BwMbtSUsvtSl8ffkiirby8FaujJVGV766Q8="; + }; + pkgs = import nixpkgs { }; + rustPlatform = pkgs.rustPlatform; + llvmPkgs = pkgs.llvmPackages_13; + # nix does not store libs in /usr/lib or /lib + nixGlibcPath = if pkgs.stdenv.isLinux then "${pkgs.glibc.out}/lib" else ""; +in +rustPlatform.buildRustPackage { + pname = "roc"; + version = "0.0.1"; + + src = pkgs.nix-gitignore.gitignoreSource [] ./.; + + cargoSha256 = "sha256-cFzOcU982kANsZjx4YoLQOZSOYN3loj+5zowhWoBWM8="; + + LLVM_SYS_130_PREFIX = "${llvmPkgs.llvm.dev}"; + + # required for zig + XDG_CACHE_HOME = "xdg_cache"; # prevents zig AccessDenied error github.com/ziglang/zig/issues/6810 + # want to see backtrace in case of failure + RUST_BACKTRACE = 1; + + # skip running rust tests, problems: + # building of example platforms requires network: Could not resolve host + # zig AccessDenied error github.com/ziglang/zig/issues/6810 + # Once instance has previously been poisoned ?? + doCheck = false; + + nativeBuildInputs = (with pkgs; [ + cmake + git + pkg-config + python3 + llvmPkgs.clang + llvmPkgs.llvm.dev + zig + rust-bindgen + ]); + + buildInputs = (with pkgs; [ + libffi + libiconv + libxkbcommon + libxml2 + ncurses + zlib + cargo + makeWrapper # necessary for postBuild wrapProgram + ] + ++ lib.optionals pkgs.stdenv.isLinux [ + alsa-lib + valgrind + vulkan-headers + vulkan-loader + vulkan-tools + vulkan-validation-layers + xorg.libX11 + xorg.libXcursor + xorg.libXi + xorg.libXrandr + xorg.libxcb + ] + ++ lib.optionals pkgs.stdenv.isDarwin [ + AppKit + CoreFoundation + CoreServices + CoreVideo + Foundation + Metal + Security + ]); + + # cp: to copy str.zig,list.zig... + # wrapProgram pkgs.stdenv.cc: to make ld available for compiler/build/src/link.rs + postInstall = '' + cp -r target/x86_64-unknown-linux-gnu/release/lib/. $out/lib + wrapProgram $out/bin/roc --set NIX_GLIBC_PATH ${nixGlibcPath} --prefix PATH : ${pkgs.lib.makeBinPath [ pkgs.stdenv.cc ]} + ''; +} diff --git a/examples/breakout/platform/Cargo.toml b/examples/breakout/platform/Cargo.toml index 82343f1def..544db04357 100644 --- a/examples/breakout/platform/Cargo.toml +++ b/examples/breakout/platform/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "host" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/examples/false-interpreter/platform/Cargo.toml b/examples/false-interpreter/platform/Cargo.toml index 85cdec4c34..9d98cfe0e8 100644 --- a/examples/false-interpreter/platform/Cargo.toml +++ b/examples/false-interpreter/platform/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "host" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/examples/gui/platform/Cargo.toml b/examples/gui/platform/Cargo.toml index 21c09521f8..3dfd8330e1 100644 --- a/examples/gui/platform/Cargo.toml +++ b/examples/gui/platform/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "host" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/examples/interactive/cli-platform/Cargo.toml b/examples/interactive/cli-platform/Cargo.toml index 582ed117ff..250b0419de 100644 --- a/examples/interactive/cli-platform/Cargo.toml +++ b/examples/interactive/cli-platform/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "host" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/examples/platform-switching/rust-platform/Cargo.toml b/examples/platform-switching/rust-platform/Cargo.toml index 596682c0da..8d10ce6eea 100644 --- a/examples/platform-switching/rust-platform/Cargo.toml +++ b/examples/platform-switching/rust-platform/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "host" -version = "0.1.0" +version = "0.0.1" authors = ["The Roc Contributors"] license = "UPL-1.0" edition = "2021" diff --git a/flake.nix b/flake.nix index 60a9389254..95c009be70 100644 --- a/flake.nix +++ b/flake.nix @@ -3,6 +3,7 @@ inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixos-22.05"; + # rust from nixpkgs has some libc problems, this is patched in the rust-overlay rust-overlay = { url = "github:oxalica/rust-overlay"; @@ -24,7 +25,7 @@ outputs = { self, nixpkgs, rust-overlay, zig, flake-utils, nixgl }: let - supportedSystems = [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ]; + supportedSystems = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ]; in flake-utils.lib.eachSystem supportedSystems (system: let @@ -120,6 +121,7 @@ buildInputs = sharedInputs ++ darwinInputs ++ linuxInputs ++ (if system == "x86_64-linux" then [ pkgs.nixgl.nixVulkanIntel ] else []); LLVM_SYS_130_PREFIX = "${llvmPkgs.llvm.dev}"; + # nix does not store libs in /usr/lib or /lib NIX_GLIBC_PATH = if pkgs.stdenv.isLinux then "${pkgs.glibc.out}/lib" else ""; LD_LIBRARY_PATH = with pkgs; lib.makeLibraryPath diff --git a/getting_started/README.md b/getting_started/README.md new file mode 100644 index 0000000000..0cc286314f --- /dev/null +++ b/getting_started/README.md @@ -0,0 +1,46 @@ +# Getting Started + +Roc is a language for making delightful software. It does not have an 0.1 release yet, and we +certainly don't recommend using it in production in its current state! However, it can be fun to +play around with as long as you have a high tolerance for missing features and compiler bugs. :) + +The [tutorial](TUTORIAL.md) is the best place to learn about how to use the language - it assumes no prior knowledge of Roc or similar languages. (If you already know [Elm](https://elm-lang.org/), then [Roc for Elm Programmers](https://github.com/rtfeldman/roc/blob/trunk/roc-for-elm-programmers.md) may be of interest.) + +There's also a folder of [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) - the [CLI form example](https://github.com/rtfeldman/roc/tree/trunk/examples/interactive/form.roc) in particular is a reasonable starting point to build on. + +If you have a specific question, the [FAQ](FAQ.md) might have an answer, although [Roc Zulip chat](https://roc.zulipchat.com) is overall the best place to ask questions and get help! It's also where we discuss [ideas](https://roc.zulipchat.com/#narrow/stream/304641-ideas) for the language. If you want to get involved in contributing to the language, Zulip is also a great place to ask about good first projects. + +## Installation + +- [Linux x86](getting_started/linux_x86.md) +- [MacOS Apple Silicon](getting_started/macos_apple_silicon.md) +- [MacOS x86](getting_started/macos_x86.md) +- [Windows](getting_started/windows.md) +- [Other](getting_started/other.md) + +## Running Examples + +You can run examples as follows: + +``` +$ cd examples/hello-world +$ roc run +``` + +Some examples like `examples/benchmarks/NQueens.roc` require input after running. +For NQueens, input 10 in the terminal and press enter. + +[examples/benchmarks](examples/benchmarks) contains larger examples. + +**Tip:** when programming in roc, we recommend to execute `./roc check myproject/Foo.roc` before `./roc myproject/Foo.roc` or `./roc build myproject/Foo.roc`. `./roc check` can produce clear error messages in cases where building/running may panic. + +## Getting Involved + +The number of people involved in Roc's development has been steadily increasing +over time - which has been great, because it's meant we've been able to onboard +people at a nice pace. (Most people who have contributed to Roc had previously +never done anything with Rust and also never worked on a compiler, but we've +been able to find beginner-friendly projects to get people up to speed gradually.) + +If you're interested in getting involved, check out +[CONTRIBUTING.md](https://github.com/rtfeldman/roc/blob/trunk/CONTRIBUTING.md)! diff --git a/getting_started/windows.md b/getting_started/windows.md index de3a7e1cbc..da5eab6bc1 100644 --- a/getting_started/windows.md +++ b/getting_started/windows.md @@ -1,2 +1,2 @@ -Windows is not yet supported, we have a big project in the works (see issue #2608) that will allow this. -Until then we recommend using Ubuntu through the "Windows Subsystem for Linux". \ No newline at end of file +Windows is not yet supported, we have a big project in the works (see [issue #2608](https://github.com/rtfeldman/roc/issues/2608)) that will allow this. +Until then we recommend using Ubuntu through the "Windows Subsystem for Linux". diff --git a/name-and-logo.md b/name-and-logo.md deleted file mode 100644 index f8eb46f3d4..0000000000 --- a/name-and-logo.md +++ /dev/null @@ -1,32 +0,0 @@ -The Roc logo, an origami bird - -# Name and Logo - -The Roc programming language is named after [a mythical bird](https://en.wikipedia.org/wiki/Roc_(mythology)). - -That’s why the logo is a bird. It’s specifically an [*origami* bird](https://youtu.be/9gni1t1k1uY) as an homage -to [Elm](https://elm-lang.org/)’s tangram logo. - -Roc is a direct descendant of Elm. The languages are similar, but not the same. -[Origami](https://en.wikipedia.org/wiki/Origami) likewise has similarities to [tangrams](https://en.wikipedia.org/wiki/Tangram), although they are not the same. -Both involve making a surprising variety of things -from simple primitives. [*Folds*](https://en.wikipedia.org/wiki/Fold_(higher-order_function)) -are also common in functional programming. - -The logo was made by tracing triangles onto a photo of a physical origami bird. -It’s made of triangles because triangles are a foundational primitive in -computer graphics. - -The name was chosen because it makes for a three-letter file extension, it means -something fantastical, and it has incredible potential for puns. - -# Different Ways to Spell Roc - -* **Roc** - traditional -* **roc** - low-key -* **ROC** - [YELLING](https://package.elm-lang.org/packages/elm/core/latest/String#toUpper) -* **Röc** - [metal 🤘](https://en.wikipedia.org/wiki/Metal_umlaut) - -# Fun Facts - -Roc translates to 鹏 in Chinese, [which means](https://www.mdbg.net/chinese/dictionary?page=worddict&wdrst=0&wdqb=%E9%B9%8F) "a large fabulous bird." diff --git a/nightly_benches/Cargo.toml b/nightly_benches/Cargo.toml index f37a09f96a..a79266821d 100644 --- a/nightly_benches/Cargo.toml +++ b/nightly_benches/Cargo.toml @@ -1,7 +1,9 @@ [package] name = "nightly_benches" -version = "0.1.0" +version = "0.0.1" edition = "2021" +authors = ["The Roc Contributors"] +license = "UPL-1.0" [dependencies] diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 5debbf317f..681b87bf62 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -5,3 +5,4 @@ components = [ # for usages of rust-analyzer or similar tools inside `nix develop` "rust-src" ] +targets = [ "x86_64-unknown-linux-gnu" ] \ No newline at end of file