Merge remote-tracking branch 'origin/main' into roc-dev-inline-expects

This commit is contained in:
Folkert 2022-10-16 13:56:30 +02:00
commit 323210c547
No known key found for this signature in database
GPG key ID: 1F17F6FFD112B97C
124 changed files with 5594 additions and 3837 deletions

3
.github/FUNDING.yml vendored Normal file
View file

@ -0,0 +1,3 @@
# These are supported funding model platforms
github: roc-lang

View file

@ -7,3 +7,5 @@ updates:
day: "monday"
time: "07:00"
timezone: "Europe/Brussels"
# Disable all version updates, only critical security updates will be submitted
open-pull-requests-limit: 0

View file

@ -0,0 +1,18 @@
on: [pull_request]
name: Check Markdown links
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
markdown-link-check:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: gaurav-nelson/github-action-markdown-link-check@v1
with:
use-quiet-mode: 'yes'
use-verbose-mode: 'yes'
base-branch: 'main'

View file

@ -11,7 +11,10 @@ env:
jobs:
test-build-upload:
name: build, test, package and upload nightly release
runs-on: [macos-12]
strategy:
matrix:
os: [ macos-11, macos-12 ]
runs-on: ${{ matrix.os }}
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
@ -35,23 +38,41 @@ jobs:
command: build
args: --release --locked
- name: execute rust tests
- name: execute rust tests if macos 12
if: endsWith(matrix.os, '12')
uses: actions-rs/cargo@v1
with:
command: test
args: --release --locked -- --skip opaque_wrap_function --skip bool_list_literal
- name: execute rust tests if macos 11
if: endsWith(matrix.os, '11')
uses: actions-rs/cargo@v1
with:
command: test
args: --release --locked -- --skip opaque_wrap_function --skip bool_list_literal --skip platform_switching_swift --skip swift_ui
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
# this issue may be caused by using older versions of XCode
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: get macos version if 11
if: endsWith(matrix.os, '11')
run: echo "MACOSVERSION=11" >> $GITHUB_ENV
- name: get macos version if 12
if: endsWith(matrix.os, '12')
run: echo "MACOSVERSION=12" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_TAR_FILENAME=roc_nightly-macos_x86_64-$DATE-$SHA.tar.gz" >> $GITHUB_ENV
run: echo "RELEASE_TAR_FILENAME=roc_nightly-macos_${MACOSVERSION}_x86_64-$DATE-$SHA.tar.gz" >> $GITHUB_ENV
- name: package release
run: ./ci/package_release.sh ${{ env.RELEASE_TAR_FILENAME }}

View file

@ -0,0 +1,16 @@
on:
schedule:
- cron: '0 9 * * *'
name: Nightly netlify build and deploy
jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: trigger netlify build and deploy
env:
HOOK: ${{ secrets.NETLIFY_BUILD_HOOK }}
run: |
curl -X POST -d {} https://api.netlify.com/build_hooks/${HOOK}

View file

@ -21,3 +21,6 @@ jobs:
- name: execute tests with --release
run: /home/big-ci-user/.nix-profile/bin/nix develop -c cargo test --locked --release
- name: test wasm32 cli_run
run: /home/big-ci-user/.nix-profile/bin/nix develop -c cargo test --locked --release --features="wasm32-cli-run"

View file

@ -6,10 +6,10 @@ name: Test latest nightly release for macOS, ubu 20.04, ubu 22.04 x86_64
jobs:
test-nightly:
name: test nightly macos, ubu 20.04, ubu 22.04
name: test nightly macos 11, macos 12, ubu 20.04, ubu 22.04
strategy:
matrix:
os: [ macos-12, ubuntu-20.04, ubuntu-22.04 ]
os: [ macos-11, macos-12, ubuntu-20.04, ubuntu-22.04 ]
runs-on: ${{ matrix.os }}
timeout-minutes: 90
steps:
@ -23,13 +23,17 @@ jobs:
--header 'content-type: application/json' \
--output roc_releases.json
- name: get the url of today`s release for linux x86_64
- name: get the url of today`s release for linux (x86_64)
if: startsWith(matrix.os, 'ubuntu')
run: echo "RELEASE_URL=$(./ci/get_latest_release_url.sh linux_x86_64)" >> $GITHUB_ENV
- name: get the url of today`s release for macos x86_64
if: startsWith(matrix.os, 'macos')
run: echo "RELEASE_URL=$(./ci/get_latest_release_url.sh macos_x86_64)" >> $GITHUB_ENV
- name: get the url of today`s release for macos 11 (x86_64)
if: startsWith(matrix.os, 'macos-11')
run: echo "RELEASE_URL=$(./ci/get_latest_release_url.sh macos_11_x86_64)" >> $GITHUB_ENV
- name: get the url of today`s release for macos 12 (x86_64)
if: startsWith(matrix.os, 'macos-12')
run: echo "RELEASE_URL=$(./ci/get_latest_release_url.sh macos_12_x86_64)" >> $GITHUB_ENV
- name: get the archive from the url
run: curl -OL ${{ env.RELEASE_URL }}

View file

@ -26,6 +26,9 @@ jobs:
- name: zig fmt check, zig tests
run: cd crates/compiler/builtins/bitcode && ./run-tests.sh
- name: roc format check on builtins
run: cargo run --locked --release format --check crates/compiler/builtins/roc
- name: zig wasm tests
run: cd crates/compiler/builtins/bitcode && ./run-wasm-tests.sh
@ -42,6 +45,9 @@ jobs:
- name: run `roc test` on Str builtins
run: cargo run --locked --release -- test crates/compiler/builtins/roc/Str.roc && sccache --show-stats
- name: run `roc test` on Dict builtins
run: cargo run --locked --release -- test crates/compiler/builtins/roc/Dict.roc && sccache --show-stats
#TODO pass --locked into the script here as well, this avoids rebuilding dependencies unnecessarily
- name: wasm repl test
run: crates/repl_test/test_wasm.sh && sccache --show-stats

18
AUTHORS
View file

@ -65,6 +65,7 @@ Mats Sigge <<mats.sigge@gmail.com>>
Drew Lazzeri <dlazzeri1@gmail.com>
Tom Dohrmann <erbse.13@gmx.de>
Elijah Schow <elijah.schow@gmail.com>
Emi Simpson <emi@alchemi.dev>
Derek Gustafson <degustaf@gmail.com>
Philippe Vinchon <p.vinchon@gmail.com>
Pierre-Henri Trivier <phtrivier@yahoo.fr>
@ -74,17 +75,21 @@ Ananda Umamil <zweimach@zweimach.org>
SylvanSign <jake.d.bray@gmail.com>
Nikita Mounier <36044205+nikitamounier@users.noreply.github.com>
Cai Bingjun <62678643+C-BJ@users.noreply.github.com>
Kevin Gillette <kgillette628@gmail.com>
Jared Cone <jared.cone@gmail.com>
Sean Hagstrom <sean@seanhagstrom.com>
Kas Buunk <kasbuunk@icloud.com>
Kas Buunk <kasbuunk@icloud.com>
Tommy Graves <tommy@rwx.com>
Oskar Hahn <mail@oshahn.de>
Nuno Ferreira <nunogcferreira@gmail.com>
Jonas Schell <jonasschell@ocupe.org>
Mfon Eti-mfon <mfonetimfon@gmail.com>
Drake Bennion <drake.bennion@gmail.com>
Hashi364 <49736221+Kiyoshi364@users.noreply.github.com>
Jared Forsyth <jared@jaredforsyth.com>
Patrick Kilgore <git@pck.email>
Marten/Qqwy <w-m@wmcode.nl>
Tobias Steckenborn <tobias.steckenborn@consolvis.de>
Christoph Rüßler <christoph.ruessler@mailbox.org>
Ralf Engbers <raleng@users.noreply.github.com>
Mostly Void <7rat13@gmail.com>
@ -97,3 +102,14 @@ Marko Vujanic <crashxx@gmail.com>
KilianVounckx <kilianvounckx@hotmail.be>
David Dunn <26876072+doubledup@users.noreply.github.com>
Jelle Besseling <jelle@pingiun.com>
isaacthefallenapple <isaacthefallenapple@gmail.com>
Bryce Miller <sandprickle@users.noreply.github.com>
Bjørn Madsen <bm@aeons.dk>
Vilem <17603372+buggymcbugfix@users.noreply.github.com>
J Teeuwissen <jelleteeuwissen@hotmail.nl>
Matthieu Pizenberg <matthieu.pizenberg@gmail.com>
rezzaghi <lbrezzaghi@gmail.com>
João Mota <jackthemotorcycle@gmail.com>
Marcos Prieto <marcospri@gmail.com>
Prajwal S N <prajwalnadig21@gmail.com>
Christopher Duncan <chris.duncan.arauz+git@protonmail.com>

View file

@ -2,7 +2,7 @@
## Code of Conduct
We are committed to providing a friendly, safe and welcoming environment for all. Make sure to take a look at the [Code of Conduct](CodeOfConduct.md)!
We are committed to providing a friendly, safe and welcoming environment for all. Make sure to take a look at the [Code of Conduct](CODE_OF_CONDUCT.md)!
## How to contribute
@ -23,7 +23,7 @@ Check [Building from source](BUILDING_FROM_SOURCE.md) for instructions.
## Running Tests
Most contributors execute the following commands befor pushing their code:
Most contributors execute the following commands before pushing their code:
```sh
cargo test

31
Cargo.lock generated
View file

@ -96,6 +96,9 @@ dependencies = [
[[package]]
name = "arena-pool"
version = "0.0.1"
dependencies = [
"roc_error_macros",
]
[[package]]
name = "arrayvec"
@ -484,7 +487,7 @@ checksum = "4bfbf56724aa9eca8afa4fcfadeb479e722935bb2a0900c2d37e0cc477af0688"
[[package]]
name = "cli_utils"
version = "0.1.0"
version = "0.0.1"
dependencies = [
"bumpalo",
"criterion",
@ -1957,9 +1960,9 @@ dependencies = [
[[package]]
name = "libc"
version = "0.2.133"
version = "0.2.135"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0f80d65747a3e43d1596c7c5492d95d5edddaabd45a7fcdb02b95f644164966"
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
[[package]]
name = "libloading"
@ -2557,9 +2560,9 @@ dependencies = [
[[package]]
name = "once_cell"
version = "1.14.0"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0"
checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
[[package]]
name = "oorandom"
@ -3285,13 +3288,14 @@ dependencies = [
"morphic_lib",
"roc_collections",
"roc_debug_flags",
"roc_error_macros",
"roc_module",
"roc_mono",
]
[[package]]
name = "roc_ast"
version = "0.1.0"
version = "0.0.1"
dependencies = [
"arrayvec 0.7.2",
"bumpalo",
@ -3380,13 +3384,14 @@ dependencies = [
"roc_parse",
"roc_problem",
"roc_region",
"roc_serialize",
"roc_types",
"static_assertions",
]
[[package]]
name = "roc_cli"
version = "0.1.0"
version = "0.0.1"
dependencies = [
"bumpalo",
"clap 3.2.20",
@ -3437,7 +3442,7 @@ dependencies = [
[[package]]
name = "roc_code_markup"
version = "0.1.0"
version = "0.0.1"
dependencies = [
"bumpalo",
"palette",
@ -4028,6 +4033,13 @@ dependencies = [
"ven_pretty",
]
[[package]]
name = "roc_serialize"
version = "0.0.1"
dependencies = [
"roc_collections",
]
[[package]]
name = "roc_solve"
version = "0.0.1"
@ -4118,6 +4130,7 @@ dependencies = [
"roc_error_macros",
"roc_module",
"roc_region",
"roc_serialize",
"static_assertions",
]
@ -5113,7 +5126,7 @@ version = "1.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
dependencies = [
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"rand",
"static_assertions",
]

View file

@ -32,6 +32,7 @@ members = [
"crates/compiler/test_gen",
"crates/compiler/roc_target",
"crates/compiler/debug_flags",
"crates/compiler/serialize",
"crates/vendor/inkwell",
"crates/vendor/pathfinding",
"crates/vendor/pretty",

View file

@ -1239,6 +1239,77 @@ each bit. `0b0000_1000` evaluates to decimal `8`
The integer type can be specified as a suffix to the binary literal,
so `0b0100u8` evaluates to decimal `4` as an unsigned 8-bit integer.
## Tests and expectations
You can write automated tests for your Roc code like so:
```swift
pluralize = \singular, plural, count ->
countStr = Num.toStr count
if count == 1 then
"\(countStr) \(singular)"
else
"\(countStr) \(plural)"
expect pluralize "cactus" "cacti" 1 == "1 cactus"
expect pluralize "cactus" "cacti" 2 == "2 cacti"
```
If you put this in a file named `main.roc` and run `roc test`, Roc will execute the two `expect`
expressions (that is, the two `pluralize` calls) and report any that returned `false`.
### Inline `expect`s
For example:
```swift
if count == 1 then
"\(countStr) \(singular)"
else
expect count > 0
"\(countStr) \(plural)"
```
This `expect` will fail if you call `pluralize` passing a count of 0, and it will fail
regardless of whether the inline `expect` is reached when running your program via `roc dev`
or in the course of running a test (with `roc test`).
So for example, if we added this top-level `expect`...
```swift
expect pluralize "cactus" "cacti" 0 == "0 cacti"
```
...it would hit the inline `expect count > 0`, which would then fail the test.
Note that inline `expect`s do not halt the program! They are designed to inform, not to affect
control flow. In fact, if you do `roc build`, they are not even included in the final binary.
If you try this code out, you may note that when an `expect` fails (either a top-level or inline
one), the failure message includes the values of any named variables - such as `count` here.
This leads to a useful technique, which we will see next.
### Quick debugging with inline `expect`s
An age-old debugging technique is printing out a variable to the terminal. In Roc you can use
`expect` to do this. Here's an example:
```elm
\arg ->
x = arg - 1
# Reports the value of `x` without stopping the program
expect x != x
Num.abs x
```
The failure output will include both the value of `x` as well as the comment immediately above it,
which lets you use that comment for extra context in your output.
## Interface modules
[This part of the tutorial has not been written yet. Coming soon!]
@ -1326,6 +1397,14 @@ this `imports` line tells the Roc compiler that when we call `Stdout.line`, it
should look for that `line` function in the `Stdout` module of the
`examples/cli/cli-platform/main.roc` package.
If we would like to include other modules in our application, say `AdditionalModule.roc` and `AnotherModule.roc`, then they can be imported directly in `imports` like this:
```coffee
packages { pf: "examples/cli/cli-platform/main.roc" }
imports [pf.Stdout, pf.Program, AdditionalModule, AnotherModule]
provides main to pf
```
## Tasks
Tasks are technically not part of the Roc language, but they're very common in

View file

@ -30,7 +30,7 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "bench-runner"
version = "0.1.0"
version = "0.0.1"
dependencies = [
"clap",
"data-encoding",

View file

@ -1,6 +1,6 @@
[package]
name = "bench-runner"
version = "0.1.0"
version = "0.0.1"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View file

@ -1,6 +1,6 @@
[package]
name = "roc_ast"
version = "0.1.0"
version = "0.0.1"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2021"
@ -26,7 +26,7 @@ bumpalo = { version = "3.11.0", features = ["collections"] }
page_size = "0.4.2"
snafu = { version = "0.7.1", features = ["backtraces"] }
ven_graph = { path = "../vendor/pathfinding" }
libc = "0.2.133"
libc = "0.2.135"
[dev-dependencies]
indoc = "1.0.7"

View file

@ -1,6 +1,6 @@
[package]
name = "roc_cli"
version = "0.1.0"
version = "0.0.1"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/roc-lang/roc"
@ -65,7 +65,7 @@ clap = { version = "3.2.20", default-features = false, features = ["std", "color
const_format = { version = "0.2.23", features = ["const_generics"] }
bumpalo = { version = "3.11.0", features = ["collections"] }
mimalloc = { version = "0.1.26", default-features = false }
libc = "0.2.133"
libc = "0.2.135"
errno = "0.2.8"
ven_pretty = { path = "../vendor/pretty" }
@ -100,7 +100,7 @@ indoc = "1.0.7"
serial_test = "0.9.0"
criterion = { git = "https://github.com/Anton-4/criterion.rs"}
cli_utils = { path = "../cli_utils" }
once_cell = "1.14.0"
once_cell = "1.15.0"
parking_lot = "0.12"
# Wasmer singlepass compiler only works on x86_64.

View file

@ -1192,7 +1192,7 @@ impl Target {
Wasm32 => Triple {
architecture: Architecture::Wasm32,
vendor: Vendor::Unknown,
operating_system: OperatingSystem::Unknown,
operating_system: OperatingSystem::Wasi,
environment: Environment::Unknown,
binary_format: BinaryFormat::Wasm,
},

View file

@ -763,6 +763,8 @@ mod cli_run {
flags: &[&str],
expected_ending: &str,
) {
use super::{concatcp, run_roc, CMD_BUILD, TARGET_FLAG};
let mut flags = flags.to_vec();
flags.push(concatcp!(TARGET_FLAG, "=wasm32"));
@ -771,14 +773,14 @@ mod cli_run {
.iter()
.chain(flags.as_slice()),
&[],
&[],
);
if !compile_out.stderr.is_empty() {
panic!("{}", compile_out.stderr);
}
assert!(compile_out.status.success(), "bad status {:?}", compile_out);
let path = file.with_file_name(executable_filename);
let mut path = file.with_file_name(executable_filename);
path.set_extension("wasm");
let stdout = crate::run_with_wasmer(&path, stdin);
if !stdout.ends_with(expected_ending) {

View file

@ -20,7 +20,7 @@ initialModel = \start -> {
cameFrom: Dict.empty,
}
cheapestOpen : (position -> F64), Model position -> Result position {}
cheapestOpen : (position -> F64), Model position -> Result position {} | position has Eq
cheapestOpen = \costFn, model ->
model.openSet
|> Set.toList
@ -35,13 +35,13 @@ cheapestOpen = \costFn, model ->
|> Result.map .position
|> Result.mapErr (\_ -> {})
reconstructPath : Dict position position, position -> List position
reconstructPath : Dict position position, position -> List position | position has Eq
reconstructPath = \cameFrom, goal ->
when Dict.get cameFrom goal is
Err _ -> []
Ok next -> List.append (reconstructPath cameFrom next) goal
updateCost : position, position, Model position -> Model position
updateCost : position, position, Model position -> Model position | position has Eq
updateCost = \current, neighbor, model ->
newCameFrom =
Dict.insert model.cameFrom neighbor current
@ -70,7 +70,7 @@ updateCost = \current, neighbor, model ->
else
model
astar : (position, position -> F64), (position -> Set position), position, Model position -> Result (List position) {}
astar : (position, position -> F64), (position -> Set position), position, Model position -> Result (List position) {} | position has Eq
astar = \costFn, moveFn, goal, model ->
when cheapestOpen (\source -> costFn source goal) model is
Err {} -> Err {}

View file

@ -218,14 +218,14 @@ fn roc_fx_getInt_64bit() callconv(.C) GetInt {
fn roc_fx_getInt_32bit(output: *GetInt) callconv(.C) void {
if (roc_fx_getInt_help()) |value| {
const get_int = GetInt{ .is_error = false, .value = value, .error_code = false };
const get_int = GetInt{ .is_error = false, .value = value };
output.* = get_int;
} else |err| switch (err) {
error.InvalidCharacter => {
output.* = GetInt{ .is_error = true, .value = 0, .error_code = false };
output.* = GetInt{ .is_error = true, .value = 0 };
},
else => {
output.* = GetInt{ .is_error = true, .value = 0, .error_code = true };
output.* = GetInt{ .is_error = true, .value = 0 };
},
}

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[package]
name = "cli_utils"
version = "0.1.0"
version = "0.0.1"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/roc-lang/roc"

View file

@ -1,6 +1,6 @@
[package]
name = "roc_code_markup"
version = "0.1.0"
version = "0.0.1"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2021"

View file

@ -11,3 +11,4 @@ roc_collections = {path = "../collections"}
roc_module = {path = "../module"}
roc_mono = {path = "../mono"}
roc_debug_flags = {path = "../debug_flags"}
roc_error_macros = { path = "../../error_macros" }

View file

@ -16,6 +16,8 @@ use roc_mono::layout::{
Builtin, CapturesNiche, Layout, RawFunctionLayout, STLayoutInterner, UnionLayout,
};
use roc_error_macros::internal_error;
// just using one module for now
pub const MOD_APP: ModName = ModName(b"UserApp");
@ -603,9 +605,10 @@ fn build_tuple_value(
for field in symbols.iter() {
let value_id = match env.symbols.get(field) {
None => panic!(
None => internal_error!(
"Symbol {:?} is not defined in environment {:?}",
field, &env.symbols
field,
&env.symbols
),
Some(x) => *x,
};

View file

@ -6,3 +6,6 @@ license = "UPL-1.0"
repository = "https://github.com/roc-lang/roc"
edition = "2021"
description = "A CLI for Roc"
[dependencies]
roc_error_macros = { path = "../../error_macros" }

View file

@ -1,3 +1,4 @@
use roc_error_macros::internal_error;
use std::marker::PhantomPinned;
use std::ptr::{copy_nonoverlapping, NonNull};
@ -391,6 +392,6 @@ fn verify_ownership<T>(
// The address wasn't within any of our chunks' bounds.
// Panic to avoid use-after-free errors!
panic!("Pointer ownership verification failed.");
internal_error!("Pointer ownership verification failed.");
}
}

View file

@ -61,7 +61,7 @@ pub fn link(
operating_system: OperatingSystem::Windows,
..
} => link_windows(target, output_path, input_paths, link_type),
_ => panic!("TODO gracefully handle unsupported target: {:?}", target),
_ => internal_error!("TODO gracefully handle unsupported target: {:?}", target),
}
}
@ -89,7 +89,7 @@ fn find_zig_str_path() -> PathBuf {
return zig_str_path;
}
panic!("cannot find `str.zig`. Check the source code in find_zig_str_path() to show all the paths I tried.")
internal_error!("cannot find `str.zig`. Check the source code in find_zig_str_path() to show all the paths I tried.")
}
fn find_wasi_libc_path() -> PathBuf {
@ -99,7 +99,7 @@ fn find_wasi_libc_path() -> PathBuf {
return wasi_libc_pathbuf;
}
panic!("cannot find `wasi-libc.a`")
internal_error!("cannot find `wasi-libc.a`")
}
#[cfg(all(unix, not(target_os = "macos")))]
@ -258,15 +258,15 @@ pub fn build_zig_host_native(
let zig_env_json = if zig_env_output.status.success() {
std::str::from_utf8(&zig_env_output.stdout).unwrap_or_else(|utf8_err| {
panic!(
internal_error!(
"`zig env` failed; its stderr output was invalid utf8 ({:?})",
utf8_err
);
})
} else {
match std::str::from_utf8(&zig_env_output.stderr) {
Ok(stderr) => panic!("`zig env` failed - stderr output was: {:?}", stderr),
Err(utf8_err) => panic!(
Ok(stderr) => internal_error!("`zig env` failed - stderr output was: {:?}", stderr),
Err(utf8_err) => internal_error!(
"`zig env` failed; its stderr output was invalid utf8 ({:?})",
utf8_err
),
@ -277,11 +277,11 @@ pub fn build_zig_host_native(
Ok(Value::Object(map)) => match map.get("std_dir") {
Some(Value::String(std_dir)) => PathBuf::from(Path::new(std_dir)),
_ => {
panic!("Expected JSON containing a `std_dir` String field from `zig env`, but got: {:?}", zig_env_json);
internal_error!("Expected JSON containing a `std_dir` String field from `zig env`, but got: {:?}", zig_env_json);
}
},
_ => {
panic!(
internal_error!(
"Expected JSON containing a `std_dir` field from `zig env`, but got: {:?}",
zig_env_json
);
@ -628,7 +628,7 @@ pub fn rebuild_host(
shared_lib_path,
)
}
_ => panic!("Unsupported architecture {:?}", target.architecture),
_ => internal_error!("Unsupported architecture {:?}", target.architecture),
};
validate_output("host.zig", &zig_executable(), output)
@ -962,7 +962,7 @@ fn link_linux(
}
}
Architecture::Aarch64(_) => library_path(["/lib", "ld-linux-aarch64.so.1"]),
_ => panic!(
_ => internal_error!(
"TODO gracefully handle unsupported linux architecture: {:?}",
target.architecture
),
@ -1370,13 +1370,17 @@ pub fn preprocess_host_wasm32(host_input_path: &Path, preprocessed_host_path: &P
fn validate_output(file_name: &str, cmd_name: &str, output: Output) {
if !output.status.success() {
match std::str::from_utf8(&output.stderr) {
Ok(stderr) => panic!(
Ok(stderr) => internal_error!(
"Failed to rebuild {} - stderr of the `{}` command was:\n{}",
file_name, cmd_name, stderr
file_name,
cmd_name,
stderr
),
Err(utf8_err) => panic!(
Err(utf8_err) => internal_error!(
"Failed to rebuild {} - stderr of the `{}` command was invalid utf8 ({:?})",
file_name, cmd_name, utf8_err
file_name,
cmd_name,
utf8_err
),
}
}

View file

@ -1,4 +1,5 @@
use inkwell::memory_buffer::MemoryBuffer;
use roc_error_macros::internal_error;
pub use roc_gen_llvm::llvm::build::FunctionIterator;
use roc_gen_llvm::llvm::build::{module_from_builtins, LlvmBackendMode};
use roc_gen_llvm::llvm::externs::add_default_roc_externs;
@ -335,7 +336,7 @@ fn gen_from_mono_module_llvm<'a>(
// write the ll code to a file, so we can modify it
env.module.print_to_file(&app_ll_file).unwrap();
panic!(
internal_error!(
"😱 LLVM errors when defining module; I wrote the full LLVM IR to {:?}\n\n {}",
app_ll_file,
errors.to_string(),
@ -373,10 +374,10 @@ fn gen_from_mono_module_llvm<'a>(
Err(error) => {
use std::io::ErrorKind;
match error.kind() {
ErrorKind::NotFound => panic!(
ErrorKind::NotFound => internal_error!(
r"I could not find the `debugir` tool on the PATH, install it from https://github.com/vaivaswatha/debugir"
),
_ => panic!("{:?}", error),
_ => internal_error!("{:?}", error),
}
}
}
@ -437,7 +438,7 @@ fn gen_from_mono_module_llvm<'a>(
// module.print_to_file(app_ll_file);
module.write_bitcode_to_memory()
}
_ => panic!(
_ => internal_error!(
"TODO gracefully handle unsupported architecture: {:?}",
target.architecture
),
@ -531,14 +532,14 @@ fn gen_from_mono_module_dev_wasm32<'a>(
};
let host_bytes = std::fs::read(preprocessed_host_path).unwrap_or_else(|_| {
panic!(
internal_error!(
"Failed to read host object file {}! Try setting --prebuilt-platform=false",
preprocessed_host_path.display()
)
});
let host_module = roc_gen_wasm::parse_host(arena, &host_bytes).unwrap_or_else(|e| {
panic!(
internal_error!(
"I ran into a problem with the host object file, {} at offset 0x{:x}:\n{}",
preprocessed_host_path.display(),
e.offset,

View file

@ -2,6 +2,7 @@ use inkwell::{
targets::{CodeModel, InitializationConfig, RelocMode, Target, TargetMachine, TargetTriple},
OptimizationLevel,
};
use roc_error_macros::internal_error;
use roc_mono::ir::OptLevel;
use target_lexicon::{Architecture, Environment, OperatingSystem, Triple};
@ -44,7 +45,7 @@ pub fn target_triple_str(target: &Triple) -> &'static str {
operating_system: OperatingSystem::Windows,
..
} => "x86_64-pc-windows-gnu",
_ => panic!("TODO gracefully handle unsupported target: {:?}", target),
_ => internal_error!("TODO gracefully handle unsupported target: {:?}", target),
}
}
@ -92,7 +93,7 @@ pub fn target_zig_str(target: &Triple) -> &'static str {
operating_system: OperatingSystem::Darwin,
..
} => "aarch64-apple-darwin",
_ => panic!("TODO gracefully handle unsupported target: {:?}", target),
_ => internal_error!("TODO gracefully handle unsupported target: {:?}", target),
}
}
@ -112,7 +113,7 @@ pub fn init_arch(target: &Triple) {
Architecture::Wasm32 if cfg!(feature = "target-wasm32") => {
Target::initialize_webassembly(&InitializationConfig::default());
}
_ => panic!(
_ => internal_error!(
"TODO gracefully handle unsupported target architecture: {:?}",
target.architecture
),
@ -132,7 +133,7 @@ pub fn arch_str(target: &Triple) -> &'static str {
Architecture::Aarch64(_) if cfg!(feature = "target-aarch64") => "aarch64",
Architecture::Arm(_) if cfg!(feature = "target-arm") => "arm",
Architecture::Wasm32 if cfg!(feature = "target-webassembly") => "wasm32",
_ => panic!(
_ => internal_error!(
"TODO gracefully handle unsupported target architecture: {:?}",
target.architecture
),

View file

@ -1,6 +1,5 @@
const std = @import("std");
const utils = @import("utils.zig");
const RocResult = utils.RocResult;
const UpdateMode = utils.UpdateMode;
const mem = std.mem;
const math = std.math;
@ -126,53 +125,6 @@ pub const RocList = extern struct {
return new_list;
}
// We follow roughly the [fbvector](https://github.com/facebook/folly/blob/main/folly/docs/FBVector.md) when it comes to growing a RocList.
// Here is [their growth strategy](https://github.com/facebook/folly/blob/3e0525988fd444201b19b76b390a5927c15cb697/folly/FBVector.h#L1128) for push_back:
//
// (1) initial size
// Instead of growing to size 1 from empty, fbvector allocates at least
// 64 bytes. You may still use reserve to reserve a lesser amount of
// memory.
// (2) 1.5x
// For medium-sized vectors, the growth strategy is 1.5x. See the docs
// for details.
// This does not apply to very small or very large fbvectors. This is a
// heuristic.
//
// In our case, we exposed allocate and reallocate, which will use a smart growth stategy.
// We also expose allocateExact and reallocateExact for case where a specific number of elements is requested.
// calculateCapacity should only be called in cases the list will be growing.
// requested_length should always be greater than old_capacity.
inline fn calculateCapacity(
old_capacity: usize,
requested_length: usize,
element_width: usize,
) usize {
// TODO: there are two adjustments that would likely lead to better results for Roc.
// 1. Deal with the fact we allocate an extra u64 for refcount.
// This may lead to allocating page size + 8 bytes.
// That could mean allocating an entire page for 8 bytes of data which isn't great.
// 2. Deal with the fact that we can request more than 1 element at a time.
// fbvector assumes just appending 1 element at a time when using this algorithm.
// As such, they will generally grow in a way that should better match certain memory multiple.
// This is also the normal case for roc, but we could also grow by a much larger amount.
// We may want to round to multiples of 2 or something similar.
var new_capacity: usize = 0;
if (element_width == 0) {
return requested_length;
} else if (old_capacity == 0) {
new_capacity = 64 / element_width;
} else if (old_capacity < 4096 / element_width) {
new_capacity = old_capacity * 2;
} else if (old_capacity > 4096 * 32 / element_width) {
new_capacity = old_capacity * 2;
} else {
new_capacity = (old_capacity * 3 + 1) / 2;
}
return @maximum(new_capacity, requested_length);
}
pub fn allocate(
alignment: u32,
length: usize,
@ -182,7 +134,7 @@ pub const RocList = extern struct {
return empty();
}
const capacity = calculateCapacity(0, length, element_width);
const capacity = utils.calculateCapacity(0, length, element_width);
const data_bytes = capacity * element_width;
return RocList{
.bytes = utils.allocateWithRefcount(data_bytes, alignment),
@ -191,23 +143,6 @@ pub const RocList = extern struct {
};
}
pub fn allocateExact(
alignment: u32,
length: usize,
element_width: usize,
) RocList {
if (length == 0) {
return empty();
}
const data_bytes = length * element_width;
return RocList{
.bytes = utils.allocateWithRefcount(data_bytes, alignment),
.length = length,
.capacity = length,
};
}
pub fn reallocate(
self: RocList,
alignment: u32,
@ -219,67 +154,36 @@ pub const RocList = extern struct {
if (self.capacity >= new_length) {
return RocList{ .bytes = self.bytes, .length = new_length, .capacity = self.capacity };
} else {
const new_capacity = calculateCapacity(self.capacity, new_length, element_width);
const new_capacity = utils.calculateCapacity(self.capacity, new_length, element_width);
const new_source = utils.unsafeReallocate(source_ptr, alignment, self.len(), new_capacity, element_width);
return RocList{ .bytes = new_source, .length = new_length, .capacity = new_capacity };
}
}
// TODO: Investigate the performance of this.
// Maybe we should just always reallocate to the new_length instead of expanding capacity?
const new_capacity = if (self.capacity >= new_length) self.capacity else calculateCapacity(self.capacity, new_length, element_width);
return self.reallocateFresh(alignment, new_length, new_capacity, element_width);
return self.reallocateFresh(alignment, new_length, element_width);
}
return RocList.allocate(alignment, new_length, element_width);
}
pub fn reallocateExact(
self: RocList,
alignment: u32,
new_length: usize,
element_width: usize,
) RocList {
if (self.bytes) |source_ptr| {
if (self.isUnique()) {
if (self.capacity >= new_length) {
return RocList{ .bytes = self.bytes, .length = new_length, .capacity = self.capacity };
} else {
const new_source = utils.unsafeReallocate(source_ptr, alignment, self.len(), new_length, element_width);
return RocList{ .bytes = new_source, .length = new_length, .capacity = new_length };
}
}
return self.reallocateFresh(alignment, new_length, new_length, element_width);
}
return RocList.allocateExact(alignment, new_length, element_width);
}
/// reallocate by explicitly making a new allocation and copying elements over
fn reallocateFresh(
self: RocList,
alignment: u32,
new_length: usize,
new_capacity: usize,
element_width: usize,
) RocList {
const old_length = self.length;
const delta_length = new_length - old_length;
const data_bytes = new_capacity * element_width;
const first_slot = utils.allocateWithRefcount(data_bytes, alignment);
const result = RocList.allocate(alignment, new_length, element_width);
// transfer the memory
if (self.bytes) |source_ptr| {
const dest_ptr = first_slot;
const dest_ptr = result.bytes orelse unreachable;
@memcpy(dest_ptr, source_ptr, old_length * element_width);
@memset(dest_ptr + old_length * element_width, 0, delta_length * element_width);
}
const result = RocList{
.bytes = first_slot,
.length = new_length,
.capacity = new_capacity,
};
utils.decref(self.bytes, old_length * element_width, alignment);
return result;
@ -504,7 +408,7 @@ pub fn listWithCapacity(
alignment: u32,
element_width: usize,
) callconv(.C) RocList {
var output = RocList.allocateExact(alignment, capacity, element_width);
var output = RocList.allocate(alignment, capacity, element_width);
output.length = 0;
return output;
}

View file

@ -50,7 +50,7 @@ pub const RocStr = extern struct {
// This clones the pointed-to bytes if they won't fit in a
// small string, and returns a (pointer, len) tuple which points to them.
pub fn init(bytes_ptr: [*]const u8, length: usize) RocStr {
var result = RocStr.allocate(length, length);
var result = RocStr.allocate(length);
@memcpy(result.asU8ptr(), bytes_ptr, length);
return result;
@ -70,11 +70,14 @@ pub const RocStr = extern struct {
};
}
// allocate space for a (big or small) RocStr, but put nothing in it yet
pub fn allocate(length: usize, capacity: usize) RocStr {
const result_is_big = capacity >= SMALL_STRING_SIZE;
// allocate space for a (big or small) RocStr, but put nothing in it yet.
// May have a larger capacity than the length.
pub fn allocate(length: usize) RocStr {
const element_width = 1;
const result_is_big = length >= SMALL_STRING_SIZE;
if (result_is_big) {
const capacity = utils.calculateCapacity(0, length, element_width);
return RocStr.allocateBig(length, capacity);
} else {
var string = RocStr.empty();
@ -91,25 +94,6 @@ pub const RocStr = extern struct {
}
}
// This takes ownership of the pointed-to bytes if they won't fit in a
// small string, and returns a (pointer, len) tuple which points to them.
pub fn withCapacity(length: usize) RocStr {
const roc_str_size = @sizeOf(RocStr);
if (length < roc_str_size) {
return RocStr.empty();
} else {
var new_bytes = utils.alloc(length, RocStr.alignment) catch unreachable;
var new_bytes_ptr: [*]u8 = @ptrCast([*]u8, &new_bytes);
return RocStr{
.str_bytes = new_bytes_ptr,
.str_len = length,
};
}
}
pub fn eq(self: RocStr, other: RocStr) bool {
// If they are byte-for-byte equal, they're definitely equal!
if (self.str_bytes == other.str_bytes and self.str_len == other.str_len and self.str_capacity == other.str_capacity) {
@ -169,38 +153,43 @@ pub const RocStr = extern struct {
pub fn reallocate(
self: RocStr,
new_length: usize,
new_capacity: usize,
) RocStr {
const element_width = 1;
const old_capacity = self.getCapacity();
if (self.str_bytes) |source_ptr| {
if (self.isUnique() and !self.isSmallStr()) {
const new_source = utils.unsafeReallocate(
source_ptr,
RocStr.alignment,
old_capacity,
new_capacity,
element_width,
);
return RocStr{ .str_bytes = new_source, .str_len = new_length, .str_capacity = new_capacity };
}
if (self.isSmallStr() or !self.isUnique()) {
return self.reallocateFresh(new_length);
}
return self.reallocateFresh(new_length, new_capacity);
if (self.str_bytes) |source_ptr| {
if (old_capacity > new_length) {
var output = self;
output.setLen(new_length);
return output;
}
const new_capacity = utils.calculateCapacity(old_capacity, new_length, element_width);
const new_source = utils.unsafeReallocate(
source_ptr,
RocStr.alignment,
old_capacity,
new_capacity,
element_width,
);
return RocStr{ .str_bytes = new_source, .str_len = new_length, .str_capacity = new_capacity };
}
return self.reallocateFresh(new_length);
}
/// reallocate by explicitly making a new allocation and copying elements over
pub fn reallocateFresh(
fn reallocateFresh(
self: RocStr,
new_length: usize,
new_capacity: usize,
) RocStr {
const old_length = self.len();
const delta_length = new_length - old_length;
const result = RocStr.allocate(new_length, new_capacity);
const result = RocStr.allocate(new_length);
// transfer the memory
@ -238,6 +227,14 @@ pub const RocStr = extern struct {
}
}
pub fn setLen(self: *RocStr, length: usize) void {
if (self.isSmallStr()) {
self.asU8ptr()[@sizeOf(RocStr) - 1] = @intCast(u8, length) | 0b1000_0000;
} else {
self.str_len = length;
}
}
pub fn getCapacity(self: RocStr) usize {
if (self.isSmallStr()) {
return SMALL_STR_MAX_LENGTH;
@ -1387,7 +1384,7 @@ pub fn repeat(string: RocStr, count: usize) callconv(.C) RocStr {
const bytes_len = string.len();
const bytes_ptr = string.asU8ptr();
var ret_string = RocStr.allocate(count * bytes_len, count * bytes_len);
var ret_string = RocStr.allocate(count * bytes_len);
var ret_string_ptr = ret_string.asU8ptr();
var i: usize = 0;
@ -1528,7 +1525,7 @@ fn strConcat(arg1: RocStr, arg2: RocStr) RocStr {
} else {
const combined_length = arg1.len() + arg2.len();
const result = arg1.reallocate(combined_length, combined_length);
const result = arg1.reallocate(combined_length);
@memcpy(result.asU8ptr() + arg1.len(), arg2.asU8ptr(), arg2.len());
@ -1600,7 +1597,7 @@ fn strJoinWith(list: RocListStr, separator: RocStr) RocStr {
// include size of the separator
total_size += separator.len() * (len - 1);
var result = RocStr.allocate(total_size, total_size);
var result = RocStr.allocate(total_size);
var result_ptr = result.asU8ptr();
var offset: usize = 0;
@ -2512,14 +2509,14 @@ test "capacity: big string" {
var data = RocStr.init(data_bytes, data_bytes.len);
defer data.deinit();
try expectEqual(data.getCapacity(), data_bytes.len);
try expect(data.getCapacity() >= data_bytes.len);
}
pub fn appendScalar(string: RocStr, scalar_u32: u32) callconv(.C) RocStr {
const scalar = @intCast(u21, scalar_u32);
const width = std.unicode.utf8CodepointSequenceLength(scalar) catch unreachable;
var output = string.reallocate(string.len() + width, string.len() + width);
var output = string.reallocate(string.len() + width);
var slice = output.asSliceWithCapacity();
_ = std.unicode.utf8Encode(scalar, slice[string.len() .. string.len() + width]) catch unreachable;
@ -2587,17 +2584,21 @@ test "appendScalar: big 😀" {
try expect(actual.eq(expected));
}
pub fn reserve(string: RocStr, capacity: usize) callconv(.C) RocStr {
if (capacity > string.getCapacity()) {
// expand allocation but keep string length the same
return string.reallocate(string.len(), capacity);
} else {
pub fn reserve(string: RocStr, spare: usize) callconv(.C) RocStr {
const old_length = string.len();
if (string.getCapacity() >= old_length + spare) {
return string;
} else {
var output = string.reallocate(old_length + spare);
output.setLen(old_length);
return output;
}
}
pub fn withCapacity(capacity: usize) callconv(.C) RocStr {
return RocStr.allocate(0, capacity);
var str = RocStr.allocate(capacity);
str.setLen(0);
return str;
}
pub fn getScalarUnsafe(string: RocStr, index: usize) callconv(.C) extern struct { bytesParsed: usize, scalar: u32 } {

View file

@ -238,6 +238,53 @@ inline fn decref_ptr_to_refcount(
}
}
// We follow roughly the [fbvector](https://github.com/facebook/folly/blob/main/folly/docs/FBVector.md) when it comes to growing a RocList.
// Here is [their growth strategy](https://github.com/facebook/folly/blob/3e0525988fd444201b19b76b390a5927c15cb697/folly/FBVector.h#L1128) for push_back:
//
// (1) initial size
// Instead of growing to size 1 from empty, fbvector allocates at least
// 64 bytes. You may still use reserve to reserve a lesser amount of
// memory.
// (2) 1.5x
// For medium-sized vectors, the growth strategy is 1.5x. See the docs
// for details.
// This does not apply to very small or very large fbvectors. This is a
// heuristic.
//
// In our case, we exposed allocate and reallocate, which will use a smart growth stategy.
// We also expose allocateExact and reallocateExact for case where a specific number of elements is requested.
// calculateCapacity should only be called in cases the list will be growing.
// requested_length should always be greater than old_capacity.
pub inline fn calculateCapacity(
old_capacity: usize,
requested_length: usize,
element_width: usize,
) usize {
// TODO: there are two adjustments that would likely lead to better results for Roc.
// 1. Deal with the fact we allocate an extra u64 for refcount.
// This may lead to allocating page size + 8 bytes.
// That could mean allocating an entire page for 8 bytes of data which isn't great.
// 2. Deal with the fact that we can request more than 1 element at a time.
// fbvector assumes just appending 1 element at a time when using this algorithm.
// As such, they will generally grow in a way that should better match certain memory multiple.
// This is also the normal case for roc, but we could also grow by a much larger amount.
// We may want to round to multiples of 2 or something similar.
var new_capacity: usize = 0;
if (element_width == 0) {
return requested_length;
} else if (old_capacity == 0) {
new_capacity = 64 / element_width;
} else if (old_capacity < 4096 / element_width) {
new_capacity = old_capacity * 2;
} else if (old_capacity > 4096 * 32 / element_width) {
new_capacity = old_capacity * 2;
} else {
new_capacity = (old_capacity * 3 + 1) / 2;
}
return @maximum(new_capacity, requested_length);
}
pub fn allocateWithRefcountC(
data_bytes: usize,
element_alignment: u32,
@ -292,25 +339,6 @@ pub fn unsafeReallocate(
return new_source;
}
pub const RocResult = extern struct {
bytes: ?[*]u8,
pub fn isOk(self: RocResult) bool {
// assumptions
//
// - the tag is the first field
// - the tag is usize bytes wide
// - Ok has tag_id 1, because Err < Ok
const usizes: [*]usize = @ptrCast([*]usize, @alignCast(@alignOf(usize), self.bytes));
return usizes[0] == 1;
}
pub fn isErr(self: RocResult) bool {
return !self.isOk();
}
};
pub const Ordering = enum(u8) {
EQ = 0,
GT = 1,

View file

@ -1,8 +1,34 @@
interface Bool
exposes [Bool, true, false, and, or, not, isEq, isNotEq]
exposes [Bool, Eq, true, false, and, or, not, isEq, isNotEq]
imports []
Bool := [True, False]
## A type that can be compared for total equality.
##
## Total equality means that all values of the type can be compared to each
## other, and two values `a`, `b` are identical if and only if `isEq a b` is
## `Bool.true`.
##
## Not all types support total equality. For example, an [F32] or [F64] can
## be a `NaN` ([not a number](https://en.wikipedia.org/wiki/NaN)), and the [IEEE-754](https://en.wikipedia.org/wiki/IEEE_754)
## floating point standard specifies that two `NaN`s are never equal to each other.
Eq has
## Returns `Bool.true` if the two values are equal, and `Bool.false` otherwise.
##
## `a == b` is shorthand for `Bool.isEq a b`.
##
## When `isEq` is derived by the Roc compiler, values are compared via
## structural equality. Structural equality works as follows:
##
## 1. Tags are equal if they have the same tag name, and also their contents (if any) are equal.
## 2. Records are equal if all their fields are equal.
## 3. Collections ([Str], [List], [Dict], and [Set]) are equal if they are the same length, and also all their corresponding elements are equal.
## 4. [Num](Num#Num) values are equal if their numbers are equal, with one exception: if both arguments to `isEq` are *NaN*, then `isEq` returns `Bool.false`. See `Num.isNaN` for more about *NaN*.
## 5. Functions can never be compared for structural equality. Roc cannot derive `isEq` for types that contain functions!
isEq : a, a -> Bool | a has Eq
Bool := [True, False] has [Eq { isEq: boolIsEq }]
boolIsEq = \@Bool b1, @Bool b2 -> structuralEq b1 b2
## The boolean true value.
true : Bool
@ -67,25 +93,19 @@ or : Bool, Bool -> Bool
## Returns `Bool.false` when given `Bool.true`, and vice versa.
not : Bool -> Bool
## Returns `Bool.true` if the two values are *structurally equal*, and `Bool.false` otherwise.
##
## `a == b` is shorthand for `Bool.isEq a b`
##
## Structural equality works as follows:
##
## 1. Tags are equal if they have the same tag name, and also their contents (if any) are equal.
## 2. Records are equal if all their fields are equal.
## 3. Collections ([Str], [List], [Dict], and [Set]) are equal if they are the same length, and also all their corresponding elements are equal.
## 4. [Num](Num#Num) values are equal if their numbers are equal, with one exception: if both arguments to `isEq` are *NaN*, then `isEq` returns `Bool.false`. See `Num.isNaN` for more about *NaN*.
##
## Note that `isEq` takes `'val` instead of `val`, which means `isEq` does not
## accept arguments whose types contain functions.
isEq : a, a -> Bool
## Calls [isEq] on the given values, then calls [not] on the result.
##
## `a != b` is shorthand for `Bool.isNotEq a b`
##
## Note that `isNotEq` takes `'val` instead of `val`, which means `isNotEq` does not
## accept arguments whose types contain functions.
isNotEq : a, a -> Bool
isNotEq : a, a -> Bool | a has Eq
isNotEq = \a, b -> structuralNotEq a b
# INTERNAL COMPILER USE ONLY: used to lower calls to `isEq` to structural
# equality via the `Eq` low-level for derived types.
structuralEq : a, a -> Bool
# INTERNAL COMPILER USE ONLY: used to lower calls to `isNotEq` to structural
# inequality via the `NotEq` low-level for derived types.
structuralNotEq : a, a -> Bool

View file

@ -9,6 +9,7 @@ interface Dict
insert,
len,
remove,
update,
contains,
keys,
values,
@ -17,10 +18,12 @@ interface Dict
removeAll,
]
imports [
Bool.{ Bool },
Bool.{ Bool, Eq },
Result.{ Result },
List,
Num.{ Nat },
Str,
Num.{ Nat, U64, U8 },
Hash.{ Hasher },
]
## A [dictionary](https://en.wikipedia.org/wiki/Associative_array) that lets you can associate keys with values.
@ -71,7 +74,9 @@ interface Dict
## When comparing two dictionaries for equality, they are `==` only if their both their contents and their
## orderings match. This preserves the property that if `dict1 == dict2`, you should be able to rely on
## `fn dict1 == fn dict2` also being `Bool.true`, even if `fn` relies on the dictionary's ordering.
Dict k v := List [Pair k v]
Dict k v := List [Pair k v] has [Eq { isEq: dictEq }]
dictEq = \@Dict l1, @Dict l2 -> l1 == l2
## An empty dictionary.
empty : Dict k v
@ -80,7 +85,7 @@ empty = @Dict []
withCapacity : Nat -> Dict k v
withCapacity = \n -> @Dict (List.withCapacity n)
get : Dict k v, k -> Result v [KeyNotFound]*
get : Dict k v, k -> Result v [KeyNotFound]* | k has Eq
get = \@Dict list, needle ->
when List.findFirst list (\Pair key _ -> key == needle) is
Ok (Pair _ v) ->
@ -93,7 +98,7 @@ walk : Dict k v, state, (state, k, v -> state) -> state
walk = \@Dict list, initialState, transform ->
List.walk list initialState (\state, Pair k v -> transform state k v)
insert : Dict k v, k, v -> Dict k v
insert : Dict k v, k, v -> Dict k v | k has Eq
insert = \@Dict list, k, v ->
when List.findFirstIndex list (\Pair key _ -> key == k) is
Err NotFound ->
@ -108,7 +113,7 @@ len : Dict k v -> Nat
len = \@Dict list ->
List.len list
remove : Dict k v, k -> Dict k v
remove : Dict k v, k -> Dict k v | k has Eq
remove = \@Dict list, key ->
when List.findFirstIndex list (\Pair k _ -> k == key) is
Err NotFound ->
@ -122,7 +127,31 @@ remove = \@Dict list, key ->
|> List.dropLast
|> @Dict
contains : Dict k v, k -> Bool
## Insert or remove a value in a Dict based on its presence
update : Dict k v, k, ([Present v, Missing] -> [Present v, Missing]) -> Dict k v | k has Eq
update = \dict, key, alter ->
possibleValue =
get dict key
|> Result.map Present
|> Result.withDefault Missing
when alter possibleValue is
Present value -> insert dict key value
Missing -> remove dict key
## Internal for testing only
alterValue : [Present Bool, Missing] -> [Present Bool, Missing]
alterValue = \possibleValue ->
when possibleValue is
Missing -> Present Bool.false
Present value if Bool.not value -> Present Bool.true
Present _ -> Missing
expect update empty "a" alterValue == single "a" Bool.false
expect update (single "a" Bool.false) "a" alterValue == single "a" Bool.true
expect update (single "a" Bool.true) "a" alterValue == empty
contains : Dict k v, k -> Bool | k has Eq
contains = \@Dict list, needle ->
step = \_, Pair key _val ->
if key == needle then
@ -149,18 +178,18 @@ values = \@Dict list ->
List.map list (\Pair _ v -> v)
# union : Dict k v, Dict k v -> Dict k v
insertAll : Dict k v, Dict k v -> Dict k v
insertAll : Dict k v, Dict k v -> Dict k v | k has Eq
insertAll = \xs, @Dict ys ->
List.walk ys xs (\state, Pair k v -> Dict.insertIfVacant state k v)
# intersection : Dict k v, Dict k v -> Dict k v
keepShared : Dict k v, Dict k v -> Dict k v
keepShared : Dict k v, Dict k v -> Dict k v | k has Eq
keepShared = \@Dict xs, ys ->
List.keepIf xs (\Pair k _ -> Dict.contains ys k)
|> @Dict
# difference : Dict k v, Dict k v -> Dict k v
removeAll : Dict k v, Dict k v -> Dict k v
removeAll : Dict k v, Dict k v -> Dict k v | k has Eq
removeAll = \xs, @Dict ys ->
List.walk ys xs (\state, Pair k _ -> Dict.remove state k)
@ -173,9 +202,418 @@ insertFresh = \@Dict list, k, v ->
|> List.append (Pair k v)
|> @Dict
insertIfVacant : Dict k v, k, v -> Dict k v
insertIfVacant : Dict k v, k, v -> Dict k v | k has Eq
insertIfVacant = \dict, key, value ->
if Dict.contains dict key then
dict
else
Dict.insert dict key value
# We have decided not to expose the standard roc hashing algorithm.
# This is to avoid external dependence and the need for versioning.
# The current implementation is a form of [Wyhash final3](https://github.com/wangyi-fudan/wyhash/blob/a5995b98ebfa7bd38bfadc0919326d2e7aabb805/wyhash.h).
# It is 64bit and little endian specific currently.
# TODO: wyhash is slow for large keys, use something like cityhash if the keys are too long.
# TODO: Add a builtin to distinguish big endian systems and change loading orders.
# TODO: Switch out Wymum on systems with slow 128bit multiplication.
LowLevelHasher := { originalSeed : U64, state : U64 } has [
Hasher {
addBytes,
addU8,
addU16,
addU32,
addU64,
addU128,
addI8,
addI16,
addI32,
addI64,
addI128,
complete,
},
]
# unsafe primitive that does not perform a bounds check
# TODO hide behind an InternalList.roc module
listGetUnsafe : List a, Nat -> a
createLowLevelHasher : { seed ?U64 } -> LowLevelHasher
createLowLevelHasher = \{ seed ? 0x526F_6352_616E_643F } ->
@LowLevelHasher { originalSeed: seed, state: seed }
combineState : LowLevelHasher, { a : U64, b : U64, seed : U64, length : U64 } -> LowLevelHasher
combineState = \@LowLevelHasher { originalSeed, state }, { a, b, seed, length } ->
tmp = wymix (Num.bitwiseXor wyp1 a) (Num.bitwiseXor seed b)
hash = wymix (Num.bitwiseXor wyp1 length) tmp
@LowLevelHasher { originalSeed, state: wymix state hash }
complete = \@LowLevelHasher { state } -> state
addI8 = \hasher, i8 ->
addU8 hasher (Num.toU8 i8)
addI16 = \hasher, i16 ->
addU16 hasher (Num.toU16 i16)
addI32 = \hasher, i32 ->
addU32 hasher (Num.toU32 i32)
addI64 = \hasher, i64 ->
addU64 hasher (Num.toU64 i64)
addI128 = \hasher, i128 ->
addU128 hasher (Num.toU128 i128)
# These implementations hash each value individually with the seed and then mix
# the resulting hash with the state. There are other options that may be faster
# like using the output of the last hash as the seed to the current hash.
# I am simply not sure the tradeoffs here. Theoretically this method is more sound.
# Either way, the performance will be similar and we can change this later.
addU8 = \@LowLevelHasher { originalSeed, state }, u8 ->
seed = Num.bitwiseXor originalSeed wyp0
p0 = Num.toU64 u8
a =
Num.shiftLeftBy p0 16
|> Num.bitwiseOr (Num.shiftLeftBy p0 8)
|> Num.bitwiseOr p0
b = 0
combineState (@LowLevelHasher { originalSeed, state }) { a, b, seed, length: 1 }
addU16 = \@LowLevelHasher { originalSeed, state }, u16 ->
seed = Num.bitwiseXor originalSeed wyp0
p0 = Num.bitwiseAnd u16 0xFF |> Num.toU64
p1 = Num.shiftRightZfBy u16 8 |> Num.toU64
a =
Num.shiftLeftBy p0 16
|> Num.bitwiseOr (Num.shiftLeftBy p1 8)
|> Num.bitwiseOr p1
b = 0
combineState (@LowLevelHasher { originalSeed, state }) { a, b, seed, length: 2 }
addU32 = \@LowLevelHasher { originalSeed, state }, u32 ->
seed = Num.bitwiseXor originalSeed wyp0
p0 = Num.toU64 u32
a = Num.shiftLeftBy p0 32 |> Num.bitwiseOr p0
combineState (@LowLevelHasher { originalSeed, state }) { a, b: a, seed, length: 4 }
addU64 = \@LowLevelHasher { originalSeed, state }, u64 ->
seed = Num.bitwiseXor originalSeed wyp0
p0 = Num.bitwiseAnd 0xFFFF_FFFF u64
p1 = Num.shiftRightZfBy u64 32
a = Num.shiftLeftBy p0 32 |> Num.bitwiseOr p1
b = Num.shiftLeftBy p1 32 |> Num.bitwiseOr p0
combineState (@LowLevelHasher { originalSeed, state }) { a, b, seed, length: 8 }
addU128 = \@LowLevelHasher { originalSeed, state }, u128 ->
seed = Num.bitwiseXor originalSeed wyp0
lower = u128 |> Num.toU64
upper = Num.shiftRightZfBy u128 64 |> Num.toU64
p0 = Num.bitwiseAnd 0xFFFF_FFFF lower
p1 = Num.shiftRightZfBy lower 32 |> Num.bitwiseAnd 0xFFFF_FFFF
p2 = Num.bitwiseAnd 0xFFFF_FFFF upper
p3 = Num.shiftRightZfBy upper 32 |> Num.bitwiseAnd 0xFFFF_FFFF
a = Num.shiftLeftBy p0 32 |> Num.bitwiseOr p2
b = Num.shiftLeftBy p3 32 |> Num.bitwiseOr p1
combineState (@LowLevelHasher { originalSeed, state }) { a, b, seed, length: 16 }
addBytes : LowLevelHasher, List U8 -> LowLevelHasher
addBytes = \@LowLevelHasher { originalSeed, state }, list ->
length = List.len list
seed = Num.bitwiseXor originalSeed wyp0
abs =
if length <= 16 then
if length >= 4 then
x = Num.shiftRightZfBy length 3 |> Num.shiftLeftBy 2
a = Num.bitwiseOr (wyr4 list 0 |> Num.shiftLeftBy 32) (wyr4 list x)
b =
(wyr4 list (Num.subWrap length 4) |> Num.shiftLeftBy 32)
|> Num.bitwiseOr (wyr4 list (Num.subWrap length 4 |> Num.subWrap x))
{ a, b, seed }
else if length > 0 then
{ a: wyr3 list 0 length, b: 0, seed }
else
{ a: 0, b: 0, seed }
else if length <= 48 then
hashBytesHelper16 seed list 0 length
else
hashBytesHelper48 seed seed seed list 0 length
combineState (@LowLevelHasher { originalSeed, state }) { a: abs.a, b: abs.b, seed: abs.seed, length: Num.toU64 length }
hashBytesHelper48 : U64, U64, U64, List U8, Nat, Nat -> { a : U64, b : U64, seed : U64 }
hashBytesHelper48 = \seed, see1, see2, list, index, remaining ->
newSeed = wymix (Num.bitwiseXor (wyr8 list index) wyp1) (Num.bitwiseXor (wyr8 list (Num.addWrap index 8)) seed)
newSee1 = wymix (Num.bitwiseXor (wyr8 list (Num.addWrap index 16)) wyp2) (Num.bitwiseXor (wyr8 list (Num.addWrap index 24)) see1)
newSee2 = wymix (Num.bitwiseXor (wyr8 list (Num.addWrap index 32)) wyp3) (Num.bitwiseXor (wyr8 list (Num.addWrap index 40)) see2)
newRemaining = Num.subWrap remaining 48
newIndex = Num.addWrap index 48
if newRemaining > 48 then
hashBytesHelper48 newSeed newSee1 newSee2 list newIndex newRemaining
else if newRemaining > 16 then
finalSeed = Num.bitwiseXor newSee2 (Num.bitwiseXor newSee1 newSeed)
hashBytesHelper16 finalSeed list newIndex newRemaining
else
finalSeed = Num.bitwiseXor newSee2 (Num.bitwiseXor newSee1 newSeed)
{ a: wyr8 list (Num.subWrap newRemaining 16 |> Num.addWrap newIndex), b: wyr8 list (Num.subWrap newRemaining 8 |> Num.addWrap newIndex), seed: finalSeed }
hashBytesHelper16 : U64, List U8, Nat, Nat -> { a : U64, b : U64, seed : U64 }
hashBytesHelper16 = \seed, list, index, remaining ->
newSeed = wymix (Num.bitwiseXor (wyr8 list index) wyp1) (Num.bitwiseXor (wyr8 list (Num.addWrap index 8)) seed)
newRemaining = Num.subWrap remaining 16
newIndex = Num.addWrap index 16
if newRemaining <= 16 then
{ a: wyr8 list (Num.subWrap newRemaining 16 |> Num.addWrap newIndex), b: wyr8 list (Num.subWrap newRemaining 8 |> Num.addWrap newIndex), seed: newSeed }
else
hashBytesHelper16 newSeed list newIndex newRemaining
wyp0 : U64
wyp0 = 0xa0761d6478bd642f
wyp1 : U64
wyp1 = 0xe7037ed1a0b428db
wyp2 : U64
wyp2 = 0x8ebc6af09c88c6e3
wyp3 : U64
wyp3 = 0x589965cc75374cc3
wymix : U64, U64 -> U64
wymix = \a, b ->
{ lower, upper } = wymum a b
Num.bitwiseXor lower upper
wymum : U64, U64 -> { lower : U64, upper : U64 }
wymum = \a, b ->
r = Num.toU128 a * Num.toU128 b
lower = Num.toU64 r
upper = Num.shiftRightZfBy r 64 |> Num.toU64
# This is the more robust form.
# { lower: Num.bitwiseXor a lower, upper: Num.bitwiseXor b upper }
{ lower, upper }
# Get the next 8 bytes as a U64
wyr8 : List U8, Nat -> U64
wyr8 = \list, index ->
# With seamless slices and Num.fromBytes, this should be possible to make faster and nicer.
# It would also deal with the fact that on big endian systems we want to invert the order here.
# Without seamless slices, we would need fromBytes to take an index.
p1 = listGetUnsafe list index |> Num.toU64
p2 = listGetUnsafe list (Num.addWrap index 1) |> Num.toU64
p3 = listGetUnsafe list (Num.addWrap index 2) |> Num.toU64
p4 = listGetUnsafe list (Num.addWrap index 3) |> Num.toU64
p5 = listGetUnsafe list (Num.addWrap index 4) |> Num.toU64
p6 = listGetUnsafe list (Num.addWrap index 5) |> Num.toU64
p7 = listGetUnsafe list (Num.addWrap index 6) |> Num.toU64
p8 = listGetUnsafe list (Num.addWrap index 7) |> Num.toU64
a = Num.bitwiseOr p1 (Num.shiftLeftBy p2 8)
b = Num.bitwiseOr (Num.shiftLeftBy p3 16) (Num.shiftLeftBy p4 24)
c = Num.bitwiseOr (Num.shiftLeftBy p5 32) (Num.shiftLeftBy p6 40)
d = Num.bitwiseOr (Num.shiftLeftBy p7 48) (Num.shiftLeftBy p8 56)
Num.bitwiseOr (Num.bitwiseOr a b) (Num.bitwiseOr c d)
# Get the next 4 bytes as a U64 with some shifting.
wyr4 : List U8, Nat -> U64
wyr4 = \list, index ->
p1 = listGetUnsafe list index |> Num.toU64
p2 = listGetUnsafe list (Num.addWrap index 1) |> Num.toU64
p3 = listGetUnsafe list (Num.addWrap index 2) |> Num.toU64
p4 = listGetUnsafe list (Num.addWrap index 3) |> Num.toU64
a = Num.bitwiseOr p1 (Num.shiftLeftBy p2 8)
b = Num.bitwiseOr (Num.shiftLeftBy p3 16) (Num.shiftLeftBy p4 24)
Num.bitwiseOr a b
# Get the next K bytes with some shifting.
# K must be 3 or less.
wyr3 : List U8, Nat, Nat -> U64
wyr3 = \list, index, k ->
# ((uint64_t)p[0])<<16)|(((uint64_t)p[k>>1])<<8)|p[k-1]
p1 = listGetUnsafe list index |> Num.toU64
p2 = listGetUnsafe list (Num.shiftRightZfBy k 1 |> Num.addWrap index) |> Num.toU64
p3 = listGetUnsafe list (Num.subWrap k 1 |> Num.addWrap index) |> Num.toU64
a = Num.bitwiseOr (Num.shiftLeftBy p1 16) (Num.shiftLeftBy p2 8)
Num.bitwiseOr a p3
# TODO: would be great to have table driven expects for this.
# Would also be great to have some sort of property based hasher
# where we can compare `addU*` functions to the `addBytes` function.
expect
hash =
createLowLevelHasher {}
|> addBytes []
|> complete
hash == 0x1C3F_F8BF_07F9_B0B3
expect
hash =
createLowLevelHasher {}
|> addBytes [0x42]
|> complete
hash == 0x8F9F_0A1E_E06F_0D52
expect
hash =
createLowLevelHasher {}
|> addU8 0x42
|> complete
hash == 0x8F9F_0A1E_E06F_0D52
expect
hash =
createLowLevelHasher {}
|> addBytes [0xFF, 0xFF]
|> complete
hash == 0x86CC_8B71_563F_F084
expect
hash =
createLowLevelHasher {}
|> addU16 0xFFFF
|> complete
hash == 0x86CC_8B71_563F_F084
expect
hash =
createLowLevelHasher {}
|> addBytes [0x36, 0xA7]
|> complete
hash == 0xD1A5_0F24_2536_84F8
expect
hash =
createLowLevelHasher {}
|> addU16 0xA736
|> complete
hash == 0xD1A5_0F24_2536_84F8
expect
hash =
createLowLevelHasher {}
|> addBytes [0x00, 0x00, 0x00, 0x00]
|> complete
hash == 0x3762_ACB1_7604_B541
expect
hash =
createLowLevelHasher {}
|> addU32 0x0000_0000
|> complete
hash == 0x3762_ACB1_7604_B541
expect
hash =
createLowLevelHasher {}
|> addBytes [0xA9, 0x2F, 0xEE, 0x21]
|> complete
hash == 0x20F3_3FD7_D32E_C7A9
expect
hash =
createLowLevelHasher {}
|> addU32 0x21EE_2FA9
|> complete
hash == 0x20F3_3FD7_D32E_C7A9
expect
hash =
createLowLevelHasher {}
|> addBytes [0x5D, 0x66, 0xB1, 0x8F, 0x68, 0x44, 0xC7, 0x03, 0xE1, 0xDD, 0x23, 0x34, 0xBB, 0x9A, 0x42, 0xA7]
|> complete
hash == 0xA16F_DDAA_C167_74C7
expect
hash =
createLowLevelHasher {}
|> addU128 0xA742_9ABB_3423_DDE1_03C7_4468_8FB1_665D
|> complete
hash == 0xA16F_DDAA_C167_74C7
expect
hash =
createLowLevelHasher {}
|> Hash.hashStrBytes "abcdefghijklmnopqrstuvwxyz"
|> complete
hash == 0xBEE0_A8FD_E990_D285
expect
hash =
createLowLevelHasher {}
|> Hash.hashStrBytes "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
|> complete
hash == 0xB3C5_8528_9D82_A6EF
expect
hash =
createLowLevelHasher {}
|> Hash.hashStrBytes "1234567890123456789012345678901234567890123456789012345678901234567890"
|> complete
hash == 0xDB6B_7997_7A55_BA03
expect
hash =
createLowLevelHasher {}
|> addBytes (List.repeat 0x77 100)
|> complete
hash == 0x171F_EEE2_B764_8E5E
# Note, had to specify u8 in the lists below to avoid ability type resolution error.
# Apparently it won't pick the default integer.
expect
hash =
createLowLevelHasher {}
|> Hash.hashUnordered [8u8, 82u8, 3u8, 8u8, 24u8] List.walk
|> complete
hash == 0x999F_B530_3529_F17D
expect
hash1 =
createLowLevelHasher {}
|> Hash.hashUnordered ([0u8, 1u8, 2u8, 3u8, 4u8]) List.walk
|> complete
hash2 =
createLowLevelHasher {}
|> Hash.hashUnordered [4u8, 3u8, 2u8, 1u8, 0u8] List.walk
|> complete
hash1 == hash2
expect
hash1 =
createLowLevelHasher {}
|> Hash.hashUnordered [0u8, 1u8, 2u8, 3u8, 4u8] List.walk
|> complete
hash2 =
createLowLevelHasher {}
|> Hash.hashUnordered [4u8, 3u8, 2u8, 1u8, 0u8, 0u8] List.walk
|> complete
hash1 != hash2

View file

@ -17,6 +17,7 @@ interface Hash
complete,
hashStrBytes,
hashList,
hashUnordered,
] imports [
List,
Str,
@ -75,10 +76,33 @@ Hasher has
## Adds a string into a [Hasher] by hashing its UTF-8 bytes.
hashStrBytes = \hasher, s ->
Str.walkUtf8WithIndex s hasher \accumHasher, byte, _ ->
addU8 accumHasher byte
addBytes hasher (Str.toUtf8 s)
## Adds a list of [Hash]able elements to a [Hasher] by hashing each element.
hashList = \hasher, lst ->
List.walk lst hasher \accumHasher, elem ->
hash accumHasher elem
## Adds a container of [Hash]able elements to a [Hasher] by hashing each element.
## The container is iterated using the walk method passed in.
## The order of the elements does not affect the final hash.
hashUnordered = \hasher, container, walk ->
walk
container
0
(\accum, elem ->
x =
# Note, we intentionally copy the hasher in every iteration.
# Having the same base state is required for unordered hashing.
hasher
|> hash elem
|> complete
nextAccum = Num.addWrap accum x
if nextAccum < accum then
# we don't want to lose a bit of entropy on overflow, so add it back in.
Num.addWrap nextAccum 1
else
nextAccum
)
|> \accum -> addU64 hasher accum

View file

@ -33,7 +33,7 @@ interface Json
F64,
Dec,
},
Bool.{ Bool },
Bool.{ Bool, Eq },
Result,
]

View file

@ -62,9 +62,10 @@ interface List
sortDesc,
reserve,
walkBackwardsUntil,
countIf,
]
imports [
Bool.{ Bool },
Bool.{ Bool, Eq },
Result.{ Result },
Num.{ Nat, Num, Int },
]
@ -353,7 +354,7 @@ join = \lists ->
List.walk lists (List.withCapacity totalLength) (\state, list -> List.concat state list)
contains : List a, a -> Bool
contains : List a, a -> Bool | a has Eq
contains = \list, needle ->
List.any list (\x -> x == needle)
@ -528,6 +529,18 @@ dropIf : List a, (a -> Bool) -> List a
dropIf = \list, predicate ->
List.keepIf list (\e -> Bool.not (predicate e))
## Run the given function on each element of a list, and return the
## number of elements for which the function returned `Bool.true`.
countIf : List a, (a -> Bool) -> Nat
countIf = \list, predicate ->
walkState = \state, elem ->
if predicate elem then
state + 1
else
state
List.walk list 0 walkState
## This works like [List.map], except only the transformed values that are
## wrapped in `Ok` are kept. Any that are wrapped in `Err` are dropped.
##
@ -890,7 +903,7 @@ intersperse = \list, sep ->
## is considered to "start with" an empty list.
##
## If the first list is empty, this only returns `Bool.true` if the second list is empty.
startsWith : List elem, List elem -> Bool
startsWith : List elem, List elem -> Bool | elem has Eq
startsWith = \list, prefix ->
# TODO once we have seamless slices, verify that this wouldn't
# have better performance with a function like List.compareSublists
@ -902,7 +915,7 @@ startsWith = \list, prefix ->
## is considered to "end with" an empty list.
##
## If the first list is empty, this only returns `Bool.true` if the second list is empty.
endsWith : List elem, List elem -> Bool
endsWith : List elem, List elem -> Bool | elem has Eq
endsWith = \list, suffix ->
# TODO once we have seamless slices, verify that this wouldn't
# have better performance with a function like List.compareSublists
@ -931,7 +944,7 @@ split = \elements, userSplitIndex ->
## remaining elements after that occurrence. If the delimiter is not found, returns `Err`.
##
## List.splitFirst [Foo, Z, Bar, Z, Baz] Z == Ok { before: [Foo], after: [Bar, Baz] }
splitFirst : List elem, elem -> Result { before : List elem, after : List elem } [NotFound]*
splitFirst : List elem, elem -> Result { before : List elem, after : List elem } [NotFound]* | elem has Eq
splitFirst = \list, delimiter ->
when List.findFirstIndex list (\elem -> elem == delimiter) is
Ok index ->
@ -946,7 +959,7 @@ splitFirst = \list, delimiter ->
## remaining elements after that occurrence. If the delimiter is not found, returns `Err`.
##
## List.splitLast [Foo, Z, Bar, Z, Baz] Z == Ok { before: [Foo, Bar], after: [Baz] }
splitLast : List elem, elem -> Result { before : List elem, after : List elem } [NotFound]*
splitLast : List elem, elem -> Result { before : List elem, after : List elem } [NotFound]* | elem has Eq
splitLast = \list, delimiter ->
when List.findLastIndex list (\elem -> elem == delimiter) is
Ok index ->

View file

@ -793,7 +793,7 @@ div : Frac a, Frac a -> Frac a
divChecked : Frac a, Frac a -> Result (Frac a) [DivByZero]*
divChecked = \a, b ->
if b == 0 then
if Num.isZero b then
Err DivByZero
else
Ok (Num.div a b)
@ -802,7 +802,7 @@ divCeil : Int a, Int a -> Int a
divCeilChecked : Int a, Int a -> Result (Int a) [DivByZero]*
divCeilChecked = \a, b ->
if b == 0 then
if Num.isZero b then
Err DivByZero
else
Ok (Num.divCeil a b)
@ -827,7 +827,7 @@ divTrunc : Int a, Int a -> Int a
divTruncChecked : Int a, Int a -> Result (Int a) [DivByZero]*
divTruncChecked = \a, b ->
if b == 0 then
if Num.isZero b then
Err DivByZero
else
Ok (Num.divTrunc a b)
@ -847,7 +847,7 @@ rem : Int a, Int a -> Int a
remChecked : Int a, Int a -> Result (Int a) [DivByZero]*
remChecked = \a, b ->
if b == 0 then
if Num.isZero b then
Err DivByZero
else
Ok (Num.rem a b)
@ -1223,19 +1223,19 @@ toU64 : Int * -> U64
toU128 : Int * -> U128
## Convert an [Int] to a [Nat]. If the given number doesn't fit in [Nat], it will be truncated.
## Since #Nat has a different maximum number depending on the system you're building
## Since [Nat] has a different maximum number depending on the system you're building
## for, this may give a different answer on different systems.
##
## For example, on a 32-bit system, #Num.maxNat will return the same answer as
## [Num.maxU32]. This means that calling `Num.toNat 9_000_000_000` on a 32-bit
## system will return [Num.maxU32] instead of 9 billion, because 9 billion is
## higher than [Num.maxU32] and will not fit in a [Nat] on a 32-bit system.
## For example, on a 32-bit system, `Num.maxNat` will return the same answer as
## `Num.maxU32`. This means that calling `Num.toNat 9_000_000_000` on a 32-bit
## system will return `Num.maxU32` instead of 9 billion, because 9 billion is
## higher than `Num.maxU32` and will not fit in a [Nat] on a 32-bit system.
##
## However, calling `Num.toNat 9_000_000_000` on a 64-bit system will return
## the #Nat value of 9_000_000_000. This is because on a 64-bit system, [Nat] can
## hold up to [Num.maxU64], and 9_000_000_000 is lower than [Num.maxU64].
## the [Nat] value of 9_000_000_000. This is because on a 64-bit system, [Nat] can
## hold up to `Num.maxU64`, and 9_000_000_000 is lower than `Num.maxU64`.
##
## To convert a [Frac] to a [Nat], first call either #Num.round, #Num.ceil, or [Num.floor]
## To convert a [Frac] to a [Nat], first call either `Num.round`, `Num.ceil`, or `Num.floor`
## on it, then call this on the resulting [Int].
toNat : Int * -> Nat

View file

@ -14,9 +14,11 @@ interface Set
intersection,
difference,
]
imports [List, Bool.{ Bool }, Dict.{ Dict }, Num.{ Nat }]
imports [List, Bool.{ Bool, Eq }, Dict.{ Dict }, Num.{ Nat }]
Set k := Dict.Dict k {}
Set k := Dict.Dict k {} has [Eq { isEq: setEq }]
setEq = \@Set d1, @Set d2 -> d1 == d2
fromDict : Dict k {} -> Set k
fromDict = \dict -> @Set dict
@ -35,7 +37,7 @@ single = \key ->
## Make sure never to insert a *NaN* to a [Set]! Because *NaN* is defined to be
## unequal to *NaN*, adding a *NaN* results in an entry that can never be
## retrieved or removed from the [Set].
insert : Set k, k -> Set k
insert : Set k, k -> Set k | k has Eq
insert = \@Set dict, key ->
dict
|> Dict.insert key {}
@ -75,11 +77,11 @@ expect
actual == 3
## Drops the given element from the set.
remove : Set k, k -> Set k
remove : Set k, k -> Set k | k has Eq
remove = \@Set dict, key ->
@Set (Dict.remove dict key)
contains : Set k, k -> Bool
contains : Set k, k -> Bool | k has Eq
contains = \set, key ->
set
|> Set.toDict
@ -89,21 +91,21 @@ toList : Set k -> List k
toList = \@Set dict ->
Dict.keys dict
fromList : List k -> Set k
fromList : List k -> Set k | k has Eq
fromList = \list ->
initial = @Set (Dict.withCapacity (List.len list))
List.walk list initial \set, key -> Set.insert set key
union : Set k, Set k -> Set k
union : Set k, Set k -> Set k | k has Eq
union = \@Set dict1, @Set dict2 ->
@Set (Dict.insertAll dict1 dict2)
intersection : Set k, Set k -> Set k
intersection : Set k, Set k -> Set k | k has Eq
intersection = \@Set dict1, @Set dict2 ->
@Set (Dict.keepShared dict1 dict2)
difference : Set k, Set k -> Set k
difference : Set k, Set k -> Set k | k has Eq
difference = \@Set dict1, @Set dict2 ->
@Set (Dict.removeAll dict1 dict2)

View file

@ -44,9 +44,10 @@ interface Str
walkScalars,
walkScalarsUntil,
withCapacity,
withPrefix,
]
imports [
Bool.{ Bool },
Bool.{ Bool, Eq },
Result.{ Result },
List,
Num.{ Nat, Num, U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec },
@ -73,7 +74,7 @@ interface Str
## programming, and "extended grapheme cluster" is a mouthful, in Roc we use the
## term "grapheme" as a shorthand for the more precise "extended grapheme cluster."
##
## You can get the number of graphemes in a string by calling [Str.countGraphemes] on it:
## You can get the number of graphemes in a string by calling `Str.countGraphemes` on it:
##
## Str.countGraphemes "Roc!"
## Str.countGraphemes "折り紙"
@ -139,40 +140,44 @@ Utf8Problem : { byteIndex : Nat, problem : Utf8ByteProblem }
## Returns `Bool.true` if the string is empty, and `Bool.false` otherwise.
##
## >>> Str.isEmpty "hi!"
##
## >>> Str.isEmpty ""
## expect Str.isEmpty "hi!" == Bool.false
## expect Str.isEmpty "" == Bool.true
isEmpty : Str -> Bool
## Concatenate two [Str] values together.
##
## expect Str.concat "Hello" "World" == "HelloWorld"
concat : Str, Str -> Str
## Returns a string of the specified capacity without any content
## Returns a [Str] of the specified capacity [Num] without any content
withCapacity : Nat -> Str
## Combine a list of strings into a single string, with a separator
## string in between each.
## Combine a [List] of [Str] into a single [Str], with a separator
## [Str] in between each.
##
## >>> Str.joinWith ["one", "two", "three"] ", "
## expect Str.joinWith ["one", "two", "three"] ", " == "one, two, three"
## expect Str.joinWith ["1", "2", "3", "4"] "." == "1.2.3.4"
joinWith : List Str, Str -> Str
## Split a string around a separator.
## Split a [Str] around a separator. Passing `""` for the separator is not
## useful; it returns the original string wrapped in a list. To split a string
## into its individual [graphemes](https://stackoverflow.com/a/27331885/4200103), use `Str.graphemes`
##
## >>> Str.split "1,2,3" ","
##
## Passing `""` for the separator is not useful; it returns the original string
## wrapped in a list.
##
## >>> Str.split "1,2,3" ""
##
## To split a string into its individual graphemes, use `Str.graphemes`
## expect Str.split "1,2,3" "," == ["1","2","3"]
## expect Str.split "1,2,3" "" == ["1,2,3"]
split : Str, Str -> List Str
## Repeat a given [Str] value [Nat] times.
##
## expect Str.repeat ">" 3 == ">>>"
repeat : Str, Nat -> Str
## Count the number of [extended grapheme clusters](http://www.unicode.org/glossary/#extended_grapheme_cluster)
## in the string.
##
## Str.countGraphemes "Roc!" # 4
## Str.countGraphemes "七巧板" # 3
## Str.countGraphemes "üïä" # 1
## expect Str.countGraphemes "Roc!" == 4
## expect Str.countGraphemes "七巧板" == 9
## expect Str.countGraphemes "üïä" == 4
countGraphemes : Str -> Nat
## If the string begins with a [Unicode code point](http://www.unicode.org/glossary/#code_point)
@ -181,7 +186,7 @@ countGraphemes : Str -> Nat
## If the given [Str] is empty, or if the given [U32] is not a valid
## code point, this will return `Bool.false`.
##
## **Performance Note:** This runs slightly faster than [Str.startsWith], so
## **Performance Note:** This runs slightly faster than `Str.startsWith`, so
## if you want to check whether a string begins with something that's representable
## in a single code point, you can use (for example) `Str.startsWithScalar '鹏'`
## instead of `Str.startsWith "鹏"`. ('鹏' evaluates to the [U32] value `40527`.)
@ -192,25 +197,25 @@ countGraphemes : Str -> Nat
startsWithScalar : Str, U32 -> Bool
## Return a [List] of the [unicode scalar values](https://unicode.org/glossary/#unicode_scalar_value)
## in the given string.
## in the given string. Strings contain only scalar values, not [surrogate code points](https://unicode.org/glossary/#surrogate_code_point),
## so this is equivalent to returning a list of the string's [code points](https://unicode.org/glossary/#code_point).
##
## (Strings contain only scalar values, not [surrogate code points](https://unicode.org/glossary/#surrogate_code_point),
## so this is equivalent to returning a list of the string's [code points](https://unicode.org/glossary/#code_point).)
## expect Str.toScalars "I ♥ Roc" == [73, 32, 9829, 32, 82, 111, 99]
toScalars : Str -> List U32
## Return a [List] of the string's [U8] UTF-8 [code units](https://unicode.org/glossary/#code_unit).
## (To split the string into a [List] of smaller [Str] values instead of [U8] values,
## see [Str.split].)
## To split the string into a [List] of smaller [Str] values instead of [U8] values,
## see `Str.split`.
##
## >>> Str.toUtf8 "👩‍👩‍👦‍👦"
##
## >>> Str.toUtf8 "Roc"
##
## >>> Str.toUtf8 "鹏"
##
## >>> Str.toUtf8 "🐦"
## expect Str.toUtf8 "鹏" == [233, 185, 143]
## expect Str.toUtf8 "🐦" == [240, 159, 144, 166]
toUtf8 : Str -> List U8
## Encode a [List] of [U8] UTF-8 [code units](https://unicode.org/glossary/#code_unit)
## into a [Str]
##
## expect Str.fromUtf8 [233, 185, 143] == Ok "鹏"
## expect Str.fromUtf8 [0xb0] == Err (BadUtf8 InvalidStartByte 0)
fromUtf8 : List U8 -> Result Str [BadUtf8 Utf8ByteProblem Nat]*
fromUtf8 = \bytes ->
result = fromUtf8RangeLowlevel bytes 0 (List.len bytes)
@ -220,6 +225,10 @@ fromUtf8 = \bytes ->
else
Err (BadUtf8 result.dProblemCode result.aByteIndex)
## Encode part of a [List] of [U8] UTF-8 [code units](https://unicode.org/glossary/#code_unit)
## into a [Str]
##
## expect Str.fromUtf8Range [72, 105, 80, 103] { start : 0, count : 2 } == Ok "Hi"
fromUtf8Range : List U8, { start : Nat, count : Nat } -> Result Str [BadUtf8 Utf8ByteProblem Nat, OutOfBounds]*
fromUtf8Range = \bytes, config ->
if config.start + config.count <= List.len bytes then
@ -241,64 +250,210 @@ FromUtf8Result : {
fromUtf8RangeLowlevel : List U8, Nat, Nat -> FromUtf8Result
## Check if the given [Str] starts with a value.
##
## expect Str.startsWith "ABC" "A" == Bool.true
## expect Str.startsWith "ABC" "X" == Bool.false
startsWith : Str, Str -> Bool
## Check if the given [Str] ends with a value.
##
## expect Str.endsWith "ABC" "C" == Bool.true
## expect Str.endsWith "ABC" "X" == Bool.false
endsWith : Str, Str -> Bool
## Return the string with any blank spaces removed from both the beginning
## Return the [Str] with all whitespace removed from both the beginning
## as well as the end.
##
## expect Str.trim " Hello \n\n" == "Hello"
trim : Str -> Str
## Return the [Str] with all whitespace removed from the beginning.
##
## expect Str.trimLeft " Hello \n\n" == "Hello \n\n"
trimLeft : Str -> Str
## Return the [Str] with all whitespace removed from the end.
##
## expect Str.trimRight " Hello \n\n" == " Hello"
trimRight : Str -> Str
## Encode a [Str] to a [Dec]. A [Dec] value is a 128-bit decimal
## [fixed-point number](https://en.wikipedia.org/wiki/Fixed-point_arithmetic).
##
## expect Str.toDec "10" == Ok 10dec
## expect Str.toDec "-0.25" == Ok -0.25dec
## expect Str.toDec "not a number" == Err InvalidNumStr
toDec : Str -> Result Dec [InvalidNumStr]*
toDec = \string -> strToNumHelp string
## Encode a [Str] to a [F64]. A [F64] value is a 64-bit
## [floating-point number](https://en.wikipedia.org/wiki/IEEE_754) and can be
## specified with a `f64` suffix.
##
## expect Str.toF64 "0.10" == Ok 0.10f64
## expect Str.toF64 "not a number" == Err InvalidNumStr
toF64 : Str -> Result F64 [InvalidNumStr]*
toF64 = \string -> strToNumHelp string
## Encode a [Str] to a [F32].A [F32] value is a 32-bit
## [floating-point number](https://en.wikipedia.org/wiki/IEEE_754) and can be
## specified with a `f32` suffix.
##
## expect Str.toF32 "0.10" == Ok 0.10f32
## expect Str.toF32 "not a number" == Err InvalidNumStr
toF32 : Str -> Result F32 [InvalidNumStr]*
toF32 = \string -> strToNumHelp string
## Convert a [Str] to a [Nat]. If the given number doesn't fit in [Nat], it will be [truncated](https://www.ualberta.ca/computing-science/media-library/teaching-resources/java/truncation-rounding.html).
## [Nat] has a different maximum number depending on the system you're building
## for, so this may give a different answer on different systems.
##
## For example, on a 32-bit system, `Num.maxNat` will return the same answer as
## `Num.maxU32`. This means that calling `Str.toNat "9_000_000_000"` on a 32-bit
## system will return `Num.maxU32` instead of 9 billion, because 9 billion is
## larger than `Num.maxU32` and will not fit in a [Nat] on a 32-bit system.
##
## Calling `Str.toNat "9_000_000_000"` on a 64-bit system will return
## the [Nat] value of 9_000_000_000. This is because on a 64-bit system, [Nat] can
## hold up to `Num.maxU64`, and 9_000_000_000 is smaller than `Num.maxU64`.
##
## expect Str.toNat "9_000_000_000" == Ok 9000000000
## expect Str.toNat "not a number" == Err InvalidNumStr
toNat : Str -> Result Nat [InvalidNumStr]*
toNat = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U128] integer. A [U128] value can hold numbers
## from `0` to `340_282_366_920_938_463_463_374_607_431_768_211_455` (over
## 340 undecillion). It can be specified with a u128 suffix.
##
## expect Str.toU128 "1500" == Ok 1500u128
## expect Str.toU128 "0.1" == Err InvalidNumStr
## expect Str.toU128 "-1" == Err InvalidNumStr
## expect Str.toU128 "not a number" == Err InvalidNumStr
toU128 : Str -> Result U128 [InvalidNumStr]*
toU128 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I128] integer. A [I128] value can hold numbers
## from `-170_141_183_460_469_231_731_687_303_715_884_105_728` to
## `170_141_183_460_469_231_731_687_303_715_884_105_727`. It can be specified
## with a i128 suffix.
##
## expect Str.toI128 "1500" == Ok 1500i128
## expect Str.toI128 "-1" == Ok -1i128
## expect Str.toI128 "0.1" == Err InvalidNumStr
## expect Str.toI128 "not a number" == Err InvalidNumStr
toI128 : Str -> Result I128 [InvalidNumStr]*
toI128 = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U64] integer. A [U64] value can hold numbers
## from `0` to `18_446_744_073_709_551_615` (over 18 quintillion). It
## can be specified with a u64 suffix.
##
## expect Str.toU64 "1500" == Ok 1500u64
## expect Str.toU64 "0.1" == Err InvalidNumStr
## expect Str.toU64 "-1" == Err InvalidNumStr
## expect Str.toU64 "not a number" == Err InvalidNumStr
toU64 : Str -> Result U64 [InvalidNumStr]*
toU64 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I64] integer. A [I64] value can hold numbers
## from `-9_223_372_036_854_775_808` to `9_223_372_036_854_775_807`. It can be
## specified with a i64 suffix.
##
## expect Str.toI64 "1500" == Ok 1500i64
## expect Str.toI64 "-1" == Ok -1i64
## expect Str.toI64 "0.1" == Err InvalidNumStr
## expect Str.toI64 "not a number" == Err InvalidNumStr
toI64 : Str -> Result I64 [InvalidNumStr]*
toI64 = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U32] integer. A [U32] value can hold numbers
## from `0` to `4_294_967_295` (over 4 billion). It can be specified with
## a u32 suffix.
##
## expect Str.toU32 "1500" == Ok 1500u32
## expect Str.toU32 "0.1" == Err InvalidNumStr
## expect Str.toU32 "-1" == Err InvalidNumStr
## expect Str.toU32 "not a number" == Err InvalidNumStr
toU32 : Str -> Result U32 [InvalidNumStr]*
toU32 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I32] integer. A [I32] value can hold numbers
## from `-2_147_483_648` to `2_147_483_647`. It can be
## specified with a i32 suffix.
##
## expect Str.toI32 "1500" == Ok 1500i32
## expect Str.toI32 "-1" == Ok -1i32
## expect Str.toI32 "0.1" == Err InvalidNumStr
## expect Str.toI32 "not a number" == Err InvalidNumStr
toI32 : Str -> Result I32 [InvalidNumStr]*
toI32 = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U16] integer. A [U16] value can hold numbers
## from `0` to `65_535`. It can be specified with a u16 suffix.
##
## expect Str.toU16 "1500" == Ok 1500u16
## expect Str.toU16 "0.1" == Err InvalidNumStr
## expect Str.toU16 "-1" == Err InvalidNumStr
## expect Str.toU16 "not a number" == Err InvalidNumStr
toU16 : Str -> Result U16 [InvalidNumStr]*
toU16 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I16] integer. A [I16] value can hold numbers
## from `-32_768` to `32_767`. It can be
## specified with a i16 suffix.
##
## expect Str.toI16 "1500" == Ok 1500i16
## expect Str.toI16 "-1" == Ok -1i16
## expect Str.toI16 "0.1" == Err InvalidNumStr
## expect Str.toI16 "not a number" == Err InvalidNumStr
toI16 : Str -> Result I16 [InvalidNumStr]*
toI16 = \string -> strToNumHelp string
## Encode a [Str] to an unsigned [U8] integer. A [U8] value can hold numbers
## from `0` to `255`. It can be specified with a u8 suffix.
##
## expect Str.toU8 "250" == Ok 250u8
## expect Str.toU8 "-0.1" == Err InvalidNumStr
## expect Str.toU8 "not a number" == Err InvalidNumStr
## expect Str.toU8 "1500" == Err InvalidNumStr
toU8 : Str -> Result U8 [InvalidNumStr]*
toU8 = \string -> strToNumHelp string
## Encode a [Str] to a signed [I8] integer. A [I8] value can hold numbers
## from `-128` to `127`. It can be
## specified with a i8 suffix.
##
## expect Str.toI8 "-15" == Ok -15i8
## expect Str.toI8 "150.00" == Err InvalidNumStr
## expect Str.toI8 "not a number" == Err InvalidNumStr
toI8 : Str -> Result I8 [InvalidNumStr]*
toI8 = \string -> strToNumHelp string
## Gets the byte at the given index, without performing a bounds check
## Get the byte at the given index, without performing a bounds check.
getUnsafe : Str, Nat -> U8
## gives the number of string bytes
## Gives the number of bytes in a [Str] value.
##
## expect Str.countUtf8Bytes "Hello World" == 11
countUtf8Bytes : Str -> Nat
## string slice that does not do bounds checking or utf-8 verification
substringUnsafe : Str, Nat, Nat -> Str
## Returns the string with each occurrence of a substring replaced with a replacement.
## If the substring is not found, returns `Err NotFound`.
## Returns the given [Str] with each occurrence of a substring replaced.
## Returns [Err NotFound] if the substring is not found.
##
## Str.replaceEach "foo/bar/baz" "/" "_" == Ok "foo_bar_baz"
## expect Str.replaceEach "foo/bar/baz" "/" "_" == Ok "foo_bar_baz"
## expect Str.replaceEach "not here" "/" "_" == Err NotFound
replaceEach : Str, Str, Str -> Result Str [NotFound]*
replaceEach = \haystack, needle, flower ->
when splitFirst haystack needle is
Ok { before, after } ->
# We found at least one needle, so start the buffer off with
# `before` followed by the first replacement flower.
Str.reserve "" (Str.countUtf8Bytes haystack)
Str.withCapacity (Str.countUtf8Bytes haystack)
|> Str.concat before
|> Str.concat flower
|> replaceEachHelp after needle flower
@ -319,10 +474,11 @@ replaceEachHelp = \buf, haystack, needle, flower ->
expect Str.replaceEach "abXdeXghi" "X" "_" == Ok "ab_de_ghi"
## Returns the string with the first occurrence of a substring replaced with a replacement.
## If the substring is not found, returns `Err NotFound`.
## Returns the given [Str] with the first occurrence of a substring replaced.
## Returns [Err NotFound] if the substring is not found.
##
## Str.replaceFirst "foo/bar/baz" "/" "_" == Ok "foo_bar/baz"
## expect Str.replaceFirst "foo/bar/baz" "/" "_" == Ok "foo_bar/baz"
## expect Str.replaceFirst "no slashes here" "/" "_" == Err NotFound
replaceFirst : Str, Str, Str -> Result Str [NotFound]*
replaceFirst = \haystack, needle, flower ->
when splitFirst haystack needle is
@ -333,10 +489,11 @@ replaceFirst = \haystack, needle, flower ->
expect Str.replaceFirst "abXdeXghi" "X" "_" == Ok "ab_deXghi"
## Returns the string with the last occurrence of a substring replaced with a replacement.
## If the substring is not found, returns `Err NotFound`.
## Returns the given [Str] with the last occurrence of a substring replaced.
## Returns [Err NotFound] if the substring is not found.
##
## Str.replaceLast "foo/bar/baz" "/" "_" == Ok "foo/bar_baz"
## expect Str.replaceLast "foo/bar/baz" "/" "_" == Ok "foo/bar_baz"
## expect Str.replaceLast "no slashes here" "/" "_" == Err NotFound
replaceLast : Str, Str, Str -> Result Str [NotFound]*
replaceLast = \haystack, needle, flower ->
when splitLast haystack needle is
@ -347,10 +504,12 @@ replaceLast = \haystack, needle, flower ->
expect Str.replaceLast "abXdeXghi" "X" "_" == Ok "abXde_ghi"
## Returns the string before the first occurrence of a delimiter, as well as the
## rest of the string after that occurrence. If the delimiter is not found, returns `Err`.
## Returns the given [Str] before the first occurrence of a [delimiter](https://www.computerhope.com/jargon/d/delimite.htm), as well
## as the rest of the string after that occurrence.
## Returns [ Err NotFound] if the delimiter is not found.
##
## Str.splitFirst "foo/bar/baz" "/" == Ok { before: "foo", after: "bar/baz" }
## expect Str.splitFirst "foo/bar/baz" "/" == Ok { before: "foo", after: "bar/baz" }
## expect Str.splitFirst "no slashes here" "/" == Err NotFound
splitFirst : Str, Str -> Result { before : Str, after : Str } [NotFound]*
splitFirst = \haystack, needle ->
when firstMatch haystack needle is
@ -398,10 +557,12 @@ firstMatchHelp = \haystack, needle, index, lastPossible ->
else
None
## Returns the string before the last occurrence of a delimiter, as well as the
## rest of the string after that occurrence. If the delimiter is not found, returns `Err`.
## Returns the given [Str] before the last occurrence of a delimiter, as well as
## the rest of the string after that occurrence.
## Returns [Err NotFound] if the delimiter is not found.
##
## Str.splitLast "foo/bar/baz" "/" == Ok { before: "foo/bar", after: "baz" }
## expect Str.splitLast "foo/bar/baz" "/" == Ok { before: "foo/bar", after: "baz" }
## expect Str.splitLast "no slashes here" "/" == Err NotFound
splitLast : Str, Str -> Result { before : Str, after : Str } [NotFound]*
splitLast = \haystack, needle ->
when lastMatch haystack needle is
@ -487,8 +648,13 @@ matchesAtHelp = \state ->
doesThisMatch && doesRestMatch
## Walks over the string's UTF-8 bytes, calling a function which updates a state using each
## UTF-8 `U8` byte as well as the index of that byte within the string.
## Walks over the `UTF-8` bytes of the given [Str] and calls a function to update
## state for each byte. The index for that byte in the string is provided
## to the update function.
##
## f : List U8, U8, Nat -> List U8
## f = \state, byte, _ -> List.append state byte
## expect Str.walkUtf8WithIndex "ABC" [] f == [65, 66, 67]
walkUtf8WithIndex : Str, state, (state, U8, Nat -> state) -> state
walkUtf8WithIndex = \string, state, step ->
walkUtf8WithIndexHelp string state step 0 (Str.countUtf8Bytes string)
@ -503,12 +669,17 @@ walkUtf8WithIndexHelp = \string, state, step, index, length ->
else
state
## Make sure at least some number of bytes fit in this string without reallocating
## Enlarge the given [Str] for at least capacity additional bytes.
reserve : Str, Nat -> Str
## is UB when the scalar is invalid
appendScalarUnsafe : Str, U32 -> Str
## Append a [U32] scalar to the given [Str]. If the given scalar is not a valid
## unicode value, it will return [Err InvalidScalar].
##
## expect Str.appendScalar "H" 105 == Ok "Hi"
## expect Str.appendScalar "😢" 0xabcdef == Err InvalidScalar
appendScalar : Str, U32 -> Result Str [InvalidScalar]*
appendScalar = \string, scalar ->
if isValidScalar scalar then
@ -522,6 +693,12 @@ isValidScalar = \scalar ->
getScalarUnsafe : Str, Nat -> { scalar : U32, bytesParsed : Nat }
## Walks over the unicode [U32] values for the given [Str] and calls a function
## to update state for each.
##
## f : List U32, U32 -> List U32
## f = \state, scalar -> List.append state scalar
## expect Str.walkScalars "ABC" [] f == [65, 66, 67]
walkScalars : Str, state, (state, U32 -> state) -> state
walkScalars = \string, init, step ->
walkScalarsHelp string init step 0 (Str.countUtf8Bytes string)
@ -536,6 +713,18 @@ walkScalarsHelp = \string, state, step, index, length ->
else
state
## Walks over the unicode [U32] values for the given [Str] and calls a function
## to update state for each.
##
## f : List U32, U32 -> [Break (List U32), Continue (List U32)]
## f = \state, scalar ->
## check = 66
## if scalar == check then
## Break [check]
## else
## Continue (List.append state scalar)
## expect Str.walkScalarsUntil "ABC" [] f == [66]
## expect Str.walkScalarsUntil "AxC" [] f == [65, 120, 67]
walkScalarsUntil : Str, state, (state, U32 -> [Break state, Continue state]) -> state
walkScalarsUntil = \string, init, step ->
walkScalarsUntilHelp string init step 0 (Str.countUtf8Bytes string)
@ -565,3 +754,9 @@ strToNumHelp = \string ->
Ok result.aresult
else
Err InvalidNumStr
## Adds a prefix to the given [Str].
##
## expect Str.withPrefix "Awesome" "Roc" == "RocAwesome"
withPrefix : Str, Str -> Str
withPrefix = \str, prefix -> Str.concat prefix str

View file

@ -14,6 +14,7 @@ roc_module = { path = "../module" }
roc_parse = { path = "../parse" }
roc_problem = { path = "../problem" }
roc_types = { path = "../types" }
roc_serialize = { path = "../serialize" }
bumpalo = { version = "3.11.0", features = ["collections"] }
static_assertions = "1.1.0"
bitvec = "1"

View file

@ -9,6 +9,14 @@ use roc_types::{
types::{MemberImpl, Type},
};
/// During type solving and monomorphization, a module must know how its imported ability
/// implementations are resolved - are they derived, or have a concrete implementation?
///
/// Unfortunately we cannot keep this information opaque, as it's important for properly
/// restoring specialization lambda sets. As such, we need to export implementation information,
/// which is the job of this structure.
pub type ResolvedImplementations = VecMap<ImplKey, ResolvedImpl>;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MemberVariables {
pub able_vars: Vec<Variable>,
@ -20,7 +28,7 @@ pub struct MemberVariables {
/// The member and its signature is defined locally, in the module the store is created for.
/// We need to instantiate and introduce this during solving.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedMemberType(Variable);
/// Member type information that needs to be resolved from imports.
@ -48,7 +56,7 @@ impl ResolvePhase for Pending {
type MemberType = PendingMemberType;
}
#[derive(Default, Debug, Clone, Copy)]
#[derive(Default, Debug, Clone, Copy, PartialEq)]
pub struct Resolved;
impl ResolvePhase for Resolved {
type MemberType = ResolvedMemberType;
@ -57,7 +65,7 @@ impl ResolvePhase for Resolved {
/// Stores information about an ability member definition, including the parent ability, the
/// defining type, and what type variables need to be instantiated with instances of the ability.
// TODO: SoA and put me in an arena
#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq)]
pub struct AbilityMemberData<Phase: ResolvePhase> {
pub parent_ability: Symbol,
pub region: Region,
@ -82,7 +90,7 @@ impl AbilityMemberData<Resolved> {
pub type SpecializationLambdaSets = VecMap<u8, Variable>;
/// A particular specialization of an ability member.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq)]
pub struct MemberSpecializationInfo<Phase: ResolvePhase> {
_phase: std::marker::PhantomData<Phase>,
pub symbol: Symbol,
@ -100,6 +108,7 @@ impl MemberSpecializationInfo<Resolved> {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct SpecializationId(NonZeroU32);
static_assertions::assert_eq_size!(SpecializationId, Option<SpecializationId>);
@ -469,8 +478,19 @@ impl IAbilitiesStore<Resolved> {
pub fn get_resolved(&self, id: SpecializationId) -> Option<Symbol> {
self.resolved_specializations.get(&id).copied()
}
pub fn serialize(&self, writer: &mut impl std::io::Write) -> std::io::Result<usize> {
serialize::serialize(self, writer)
}
pub fn deserialize(bytes: &[u8]) -> (Self, usize) {
serialize::deserialize(bytes)
}
}
pub use serialize::deserialize_solved_implementations;
pub use serialize::serialize_solved_implementations;
impl IAbilitiesStore<Pending> {
pub fn import_implementation(&mut self, impl_key: ImplKey, resolved_impl: &ResolvedImpl) {
let member_impl = match resolved_impl {
@ -484,8 +504,12 @@ impl IAbilitiesStore<Pending> {
let old_declared_impl = self.declared_implementations.insert(impl_key, member_impl);
debug_assert!(
old_declared_impl.is_none(),
"Replacing existing declared impl!"
old_declared_impl.is_none() ||
// Can happen between we import declared implementations during canonicalization, but
// implementation information only after solving
old_declared_impl.unwrap() == member_impl,
"Replacing existing declared impl: {:?}",
(impl_key, old_declared_impl)
);
}
@ -660,3 +684,700 @@ impl IAbilitiesStore<Pending> {
}
}
}
mod serialize {
use roc_collections::{MutMap, VecMap};
use roc_module::symbol::Symbol;
use roc_region::all::Region;
use roc_serialize::bytes;
use roc_types::{
subs::{SubsSlice, Variable},
types::MemberImpl,
};
use super::{
AbilitiesStore, AbilityMemberData, ImplKey, MemberSpecializationInfo, Resolved,
ResolvedImpl, ResolvedImplementations, ResolvedMemberType, SpecializationId,
};
use std::io::{self, Write};
#[repr(C)]
#[derive(Clone, Copy, Debug)]
struct Header {
members_of_ability: u64,
specialization_to_root: u64,
ability_members: u64,
declared_implementations: u64,
specializations: u64,
next_specialization_id: u64,
resolved_specializations: u64,
}
impl Header {
fn from_store(store: &AbilitiesStore) -> Self {
let AbilitiesStore {
members_of_ability,
specialization_to_root,
ability_members,
declared_implementations,
specializations,
next_specialization_id,
resolved_specializations,
} = store;
Self {
members_of_ability: members_of_ability.len() as _,
specialization_to_root: specialization_to_root.len() as _,
ability_members: ability_members.len() as _,
declared_implementations: declared_implementations.len() as _,
specializations: specializations.len() as _,
next_specialization_id: next_specialization_id.get() as _,
resolved_specializations: resolved_specializations.len() as _,
}
}
fn to_array(self) -> [u8; std::mem::size_of::<Self>()] {
// Safety: With repr(c) all fields are in order and properly aligned without padding.
unsafe { std::mem::transmute(self) }
}
fn from_array(array: [u8; std::mem::size_of::<Self>()]) -> Self {
// Safety: With repr(c) all fields are in order and properly aligned without padding.
unsafe { std::mem::transmute(array) }
}
}
pub(super) fn serialize(store: &AbilitiesStore, writer: &mut impl Write) -> io::Result<usize> {
let header = Header::from_store(store).to_array();
let written = header.len();
writer.write_all(&header)?;
let AbilitiesStore {
members_of_ability,
specialization_to_root,
ability_members,
declared_implementations,
specializations,
next_specialization_id: _, // written in the header
resolved_specializations,
} = store;
let written = serialize_members_of_ability(members_of_ability, writer, written)?;
let written = serialize_specializations_to_root(specialization_to_root, writer, written)?;
let written = serialize_ability_members(ability_members, writer, written)?;
let written =
serialize_declared_implementations(declared_implementations, writer, written)?;
let written = serialize_specializations(specializations, writer, written)?;
let written =
serialize_resolved_specializations(resolved_specializations, writer, written)?;
Ok(written)
}
pub(super) fn deserialize(bytes: &[u8]) -> (AbilitiesStore, usize) {
let mut offset = 0;
let header_slice = &bytes[..std::mem::size_of::<Header>()];
offset += header_slice.len();
let header = Header::from_array(header_slice.try_into().unwrap());
let (members_of_ability, offset) =
deserialize_members_of_ability(bytes, header.members_of_ability as _, offset);
let (specialization_to_root, offset) =
deserialize_specialization_to_root(bytes, header.specialization_to_root as _, offset);
let (ability_members, offset) =
deserialize_ability_members(bytes, header.ability_members as _, offset);
let (declared_implementations, offset) = deserialize_declared_implementations(
bytes,
header.declared_implementations as _,
offset,
);
let (specializations, offset) =
deserialize_specializations(bytes, header.specializations as _, offset);
let (resolved_specializations, offset) = deserialize_resolved_specializations(
bytes,
header.resolved_specializations as _,
offset,
);
(
AbilitiesStore {
members_of_ability,
specialization_to_root,
ability_members,
declared_implementations,
specializations,
next_specialization_id: (header.next_specialization_id as u32).try_into().unwrap(),
resolved_specializations,
},
offset,
)
}
fn serialize_members_of_ability(
members_of_ability: &MutMap<Symbol, Vec<Symbol>>,
writer: &mut impl Write,
written: usize,
) -> io::Result<usize> {
bytes::serialize_map(
members_of_ability,
bytes::serialize_slice,
bytes::serialize_slice_of_slices,
writer,
written,
)
}
fn deserialize_members_of_ability(
bytes: &[u8],
length: usize,
offset: usize,
) -> (MutMap<Symbol, Vec<Symbol>>, usize) {
bytes::deserialize_map(
bytes,
bytes::deserialize_vec,
bytes::deserialize_slice_of_slices,
length,
offset,
)
}
#[derive(Clone, Copy)]
#[repr(C)]
struct SerImplKey(Symbol, Symbol);
impl From<&ImplKey> for SerImplKey {
fn from(k: &ImplKey) -> Self {
Self(k.opaque, k.ability_member)
}
}
impl From<&SerImplKey> for ImplKey {
fn from(k: &SerImplKey) -> Self {
Self {
opaque: k.0,
ability_member: k.1,
}
}
}
fn serialize_specializations_to_root(
specialization_to_root: &MutMap<Symbol, ImplKey>,
writer: &mut impl Write,
written: usize,
) -> io::Result<usize> {
bytes::serialize_map(
specialization_to_root,
bytes::serialize_slice,
|keys, writer, written| {
bytes::serialize_slice(
&keys.iter().map(SerImplKey::from).collect::<Vec<_>>(),
writer,
written,
)
},
writer,
written,
)
}
fn deserialize_specialization_to_root(
bytes: &[u8],
length: usize,
offset: usize,
) -> (MutMap<Symbol, ImplKey>, usize) {
bytes::deserialize_map(
bytes,
bytes::deserialize_vec,
|bytes, length, offset| {
let (slice, offset) = bytes::deserialize_slice::<SerImplKey>(bytes, length, offset);
(slice.iter().map(ImplKey::from).collect(), offset)
},
length,
offset,
)
}
#[derive(Clone, Copy)]
#[repr(C)]
struct SerMemberData(Symbol, Region, Variable);
impl From<&AbilityMemberData<Resolved>> for SerMemberData {
fn from(k: &AbilityMemberData<Resolved>) -> Self {
Self(k.parent_ability, k.region, k.typ.0)
}
}
impl From<&SerMemberData> for AbilityMemberData<Resolved> {
fn from(k: &SerMemberData) -> Self {
Self {
parent_ability: k.0,
region: k.1,
typ: ResolvedMemberType(k.2),
}
}
}
fn serialize_ability_members(
ability_members: &MutMap<Symbol, AbilityMemberData<Resolved>>,
writer: &mut impl Write,
written: usize,
) -> io::Result<usize> {
bytes::serialize_map(
ability_members,
bytes::serialize_slice,
|keys, writer, written| {
bytes::serialize_slice(
&keys.iter().map(SerMemberData::from).collect::<Vec<_>>(),
writer,
written,
)
},
writer,
written,
)
}
fn deserialize_ability_members(
bytes: &[u8],
length: usize,
offset: usize,
) -> (MutMap<Symbol, AbilityMemberData<Resolved>>, usize) {
bytes::deserialize_map(
bytes,
bytes::deserialize_vec,
|bytes, length, offset| {
let (slice, offset) =
bytes::deserialize_slice::<SerMemberData>(bytes, length, offset);
(slice.iter().map(AbilityMemberData::from).collect(), offset)
},
length,
offset,
)
}
#[derive(Clone, Copy)]
#[repr(C)]
enum SerMemberImpl {
Impl(Symbol),
Derived,
Error,
}
impl From<&MemberImpl> for SerMemberImpl {
fn from(k: &MemberImpl) -> Self {
match k {
MemberImpl::Impl(s) => Self::Impl(*s),
MemberImpl::Derived => Self::Derived,
MemberImpl::Error => Self::Error,
}
}
}
impl From<&SerMemberImpl> for MemberImpl {
fn from(k: &SerMemberImpl) -> Self {
match k {
SerMemberImpl::Impl(s) => Self::Impl(*s),
SerMemberImpl::Derived => Self::Derived,
SerMemberImpl::Error => Self::Error,
}
}
}
fn serialize_declared_implementations(
declared_implementations: &MutMap<ImplKey, MemberImpl>,
writer: &mut impl Write,
written: usize,
) -> io::Result<usize> {
bytes::serialize_map(
declared_implementations,
bytes::serialize_slice,
|keys, writer, written| {
bytes::serialize_slice(
&keys.iter().map(SerMemberImpl::from).collect::<Vec<_>>(),
writer,
written,
)
},
writer,
written,
)
}
fn deserialize_declared_implementations(
bytes: &[u8],
length: usize,
offset: usize,
) -> (MutMap<ImplKey, MemberImpl>, usize) {
bytes::deserialize_map(
bytes,
bytes::deserialize_vec,
|bytes, length, offset| {
let (slice, offset) =
bytes::deserialize_slice::<SerMemberImpl>(bytes, length, offset);
(slice.iter().map(MemberImpl::from).collect(), offset)
},
length,
offset,
)
}
#[derive(Clone, Copy)]
#[repr(C)]
struct SerMemberSpecInfo(Symbol, SubsSlice<u8>, SubsSlice<Variable>);
fn serialize_specializations(
specializations: &MutMap<Symbol, MemberSpecializationInfo<Resolved>>,
writer: &mut impl Write,
written: usize,
) -> io::Result<usize> {
bytes::serialize_map(
specializations,
bytes::serialize_slice,
|spec_info, writer, written| {
let mut spec_lambda_sets_regions: Vec<u8> = Vec::new();
let mut spec_lambda_sets_vars: Vec<Variable> = Vec::new();
let mut ser_member_spec_infos: Vec<SerMemberSpecInfo> = Vec::new();
for MemberSpecializationInfo {
_phase: _,
symbol,
specialization_lambda_sets,
} in spec_info
{
let regions = SubsSlice::extend_new(
&mut spec_lambda_sets_regions,
specialization_lambda_sets.keys().copied(),
);
let vars = SubsSlice::extend_new(
&mut spec_lambda_sets_vars,
specialization_lambda_sets.values().copied(),
);
ser_member_spec_infos.push(SerMemberSpecInfo(*symbol, regions, vars));
}
let written = bytes::serialize_slice(&ser_member_spec_infos, writer, written)?;
let written = bytes::serialize_slice(&spec_lambda_sets_regions, writer, written)?;
let written = bytes::serialize_slice(&spec_lambda_sets_vars, writer, written)?;
Ok(written)
},
writer,
written,
)
}
fn deserialize_specializations(
bytes: &[u8],
length: usize,
offset: usize,
) -> (MutMap<Symbol, MemberSpecializationInfo<Resolved>>, usize) {
bytes::deserialize_map(
bytes,
bytes::deserialize_vec,
|bytes, length, offset| {
let (serialized_slices, offset) =
bytes::deserialize_slice::<SerMemberSpecInfo>(bytes, length, offset);
let (regions_slice, offset) = {
let total_items = serialized_slices.iter().map(|s| s.1.len()).sum();
bytes::deserialize_slice::<u8>(bytes, total_items, offset)
};
let (vars_slice, offset) = {
let total_items = serialized_slices.iter().map(|s| s.2.len()).sum();
bytes::deserialize_slice::<Variable>(bytes, total_items, offset)
};
let mut spec_infos: Vec<MemberSpecializationInfo<Resolved>> =
Vec::with_capacity(length);
for SerMemberSpecInfo(symbol, regions, vars) in serialized_slices {
let regions = regions_slice[regions.indices()].to_vec();
let lset_vars = vars_slice[vars.indices()].to_vec();
let spec_info = MemberSpecializationInfo::new(*symbol, unsafe {
VecMap::zip(regions, lset_vars)
});
spec_infos.push(spec_info)
}
(spec_infos, offset)
},
length,
offset,
)
}
fn serialize_resolved_specializations(
resolved_specializations: &MutMap<SpecializationId, Symbol>,
writer: &mut impl Write,
written: usize,
) -> io::Result<usize> {
bytes::serialize_map(
resolved_specializations,
bytes::serialize_slice,
bytes::serialize_slice,
writer,
written,
)
}
fn deserialize_resolved_specializations(
bytes: &[u8],
length: usize,
offset: usize,
) -> (MutMap<SpecializationId, Symbol>, usize) {
bytes::deserialize_map(
bytes,
bytes::deserialize_vec,
bytes::deserialize_vec,
length,
offset,
)
}
#[derive(Copy, Clone)]
#[repr(C)]
enum SerResolvedImpl {
Impl(SerMemberSpecInfo),
Derived,
Error,
}
impl SerResolvedImpl {
fn num_regions(&self) -> usize {
match self {
SerResolvedImpl::Impl(spec) => spec.1.len(),
SerResolvedImpl::Derived => 0,
SerResolvedImpl::Error => 0,
}
}
}
pub fn serialize_solved_implementations(
solved_impls: &ResolvedImplementations,
writer: &mut impl std::io::Write,
) -> std::io::Result<usize> {
let len = solved_impls.len() as u64;
let written = bytes::serialize_slice(&[len], writer, 0)?;
bytes::serialize_vec_map(
solved_impls,
|keys, writer, written| {
bytes::serialize_slice(
&keys.iter().map(SerImplKey::from).collect::<Vec<_>>(),
writer,
written,
)
},
|resolved_impls, writer, written| {
let mut spec_lambda_sets_regions: Vec<u8> = Vec::new();
let mut spec_lambda_sets_vars: Vec<Variable> = Vec::new();
let mut ser_resolved_impls: Vec<SerResolvedImpl> = Vec::new();
for resolved_impl in resolved_impls {
let ser_resolved_impl = match resolved_impl {
ResolvedImpl::Impl(MemberSpecializationInfo {
_phase: _,
symbol,
specialization_lambda_sets,
}) => {
let regions = SubsSlice::extend_new(
&mut spec_lambda_sets_regions,
specialization_lambda_sets.keys().copied(),
);
let vars = SubsSlice::extend_new(
&mut spec_lambda_sets_vars,
specialization_lambda_sets.values().copied(),
);
SerResolvedImpl::Impl(SerMemberSpecInfo(*symbol, regions, vars))
}
ResolvedImpl::Derived => SerResolvedImpl::Derived,
ResolvedImpl::Error => SerResolvedImpl::Error,
};
ser_resolved_impls.push(ser_resolved_impl);
}
let written = bytes::serialize_slice(&ser_resolved_impls, writer, written)?;
let written = bytes::serialize_slice(&spec_lambda_sets_regions, writer, written)?;
let written = bytes::serialize_slice(&spec_lambda_sets_vars, writer, written)?;
Ok(written)
},
writer,
written,
)
}
pub fn deserialize_solved_implementations(bytes: &[u8]) -> (ResolvedImplementations, usize) {
let (len_slice, offset) = bytes::deserialize_slice::<u64>(bytes, 1, 0);
let length = len_slice[0];
bytes::deserialize_vec_map(
bytes,
|bytes, length, offset| {
let (slice, offset) = bytes::deserialize_slice::<SerImplKey>(bytes, length, offset);
(slice.iter().map(ImplKey::from).collect(), offset)
},
|bytes, length, offset| {
let (serialized_slices, offset) =
bytes::deserialize_slice::<SerResolvedImpl>(bytes, length, offset);
let total_num_regions = serialized_slices.iter().map(|s| s.num_regions()).sum();
let (regions_slice, offset) =
bytes::deserialize_slice::<u8>(bytes, total_num_regions, offset);
let (vars_slice, offset) =
{ bytes::deserialize_slice::<Variable>(bytes, total_num_regions, offset) };
let mut all_resolved: Vec<ResolvedImpl> = Vec::with_capacity(length);
for ser_resolved in serialized_slices {
let resolved = match ser_resolved {
SerResolvedImpl::Impl(SerMemberSpecInfo(symbol, regions, vars)) => {
let regions = regions_slice[regions.indices()].to_vec();
let lset_vars = vars_slice[vars.indices()].to_vec();
let spec_info = MemberSpecializationInfo::new(*symbol, unsafe {
VecMap::zip(regions, lset_vars)
});
ResolvedImpl::Impl(spec_info)
}
SerResolvedImpl::Derived => ResolvedImpl::Derived,
SerResolvedImpl::Error => ResolvedImpl::Error,
};
all_resolved.push(resolved);
}
(all_resolved, offset)
},
length as _,
offset,
)
}
}
#[cfg(test)]
mod test {
use roc_collections::VecMap;
use roc_module::symbol::Symbol;
use roc_region::all::Region;
use roc_types::{subs::Variable, types::MemberImpl};
use super::{
AbilitiesStore, AbilityMemberData, ImplKey, MemberSpecializationInfo, ResolvedMemberType,
};
#[test]
fn serde_abilities_store() {
let store = {
let mut store = AbilitiesStore::default();
store.register_ability(
Symbol::ARG_1,
[
(
Symbol::ARG_2,
AbilityMemberData {
parent_ability: Symbol::ARG_1,
region: Region::zero(),
typ: ResolvedMemberType(Variable::BOOL),
},
),
(
Symbol::ARG_3,
AbilityMemberData {
parent_ability: Symbol::ARG_1,
region: Region::zero(),
typ: ResolvedMemberType(Variable::BOOL),
},
),
],
);
store.register_ability(
Symbol::ARG_4,
[(
Symbol::ARG_5,
AbilityMemberData {
parent_ability: Symbol::ARG_4,
region: Region::zero(),
typ: ResolvedMemberType(Variable::BOOL),
},
)],
);
store.register_declared_implementations(
Symbol::ATTR_ATTR,
[
(Symbol::ARG_2, MemberImpl::Impl(Symbol::ATTR_INVALID)),
(Symbol::ARG_3, MemberImpl::Impl(Symbol::ARG_CLOSURE)),
],
);
store.register_declared_implementations(
Symbol::ATTR_ATTR,
[(Symbol::ARG_5, MemberImpl::Derived)],
);
store
.mark_implementation(
ImplKey {
opaque: Symbol::ATTR_ATTR,
ability_member: Symbol::ARG_2,
},
Ok(MemberSpecializationInfo::new(Symbol::UNDERSCORE, {
let mut map = VecMap::default();
map.insert(1, Variable::BOOL);
map.insert(2, Variable::U8);
map
})),
)
.unwrap();
store
.mark_implementation(
ImplKey {
opaque: Symbol::ATTR_ATTR,
ability_member: Symbol::ARG_3,
},
Ok(MemberSpecializationInfo::new(Symbol::UNDERSCORE, {
let mut map = VecMap::default();
map.insert(1, Variable::BOOL);
map.insert(2, Variable::U8);
map.insert(3, Variable::U32);
map.insert(4, Variable::U64);
map
})),
)
.unwrap();
let spec_id1 = store.fresh_specialization_id();
let spec_id2 = store.fresh_specialization_id();
store.insert_resolved(spec_id1, Symbol::ARG_2);
store.insert_resolved(spec_id2, Symbol::ARG_3);
store
};
let mut bytes = Vec::new();
let written = store.serialize(&mut bytes).unwrap();
assert_eq!(bytes.len(), written);
let AbilitiesStore {
members_of_ability,
specialization_to_root,
ability_members,
declared_implementations,
specializations,
next_specialization_id,
resolved_specializations,
} = store;
let (de_store, offset) = AbilitiesStore::deserialize(&bytes);
assert_eq!(bytes.len(), offset);
assert_eq!(members_of_ability, de_store.members_of_ability);
assert_eq!(specialization_to_root, de_store.specialization_to_root);
assert_eq!(ability_members, de_store.ability_members);
assert_eq!(declared_implementations, de_store.declared_implementations);
assert_eq!(specializations, de_store.specializations);
assert_eq!(next_specialization_id, de_store.next_specialization_id);
assert_eq!(resolved_specializations, de_store.resolved_specializations);
}
}

View file

@ -553,7 +553,7 @@ fn can_annotation_help(
references,
);
args.push(arg_ann);
args.push(Loc::at(arg.region, arg_ann));
}
match scope.lookup_alias(symbol) {
@ -573,8 +573,14 @@ fn can_annotation_help(
let mut type_var_to_arg = Vec::new();
for (_, arg_ann) in alias.type_variables.iter().zip(args) {
type_var_to_arg.push(arg_ann);
for (alias_arg, arg_ann) in alias.type_variables.iter().zip(args) {
type_var_to_arg.push(Loc::at(
arg_ann.region,
OptAbleType {
typ: arg_ann.value,
opt_ability: alias_arg.value.opt_bound_ability,
},
));
}
let mut lambda_set_variables =

View file

@ -70,6 +70,7 @@ macro_rules! map_symbol_to_lowlevel_and_arity {
// Below, we explicitly handle some exceptions to the pattern where a lowlevel maps
// directly to a symbol. If you are unsure if your lowlevel is an exception, assume
// that it isn't and just see if that works.
#[allow(unreachable_patterns)] // multiple symbols can map to one low-level
match lowlevel {
$(
LowLevel::$lowlevel => Symbol::$symbol,
@ -144,6 +145,8 @@ map_symbol_to_lowlevel_and_arity! {
ListSwap; LIST_SWAP; 3,
ListGetCapacity; LIST_CAPACITY; 1,
ListGetUnsafe; DICT_LIST_GET_UNSAFE; 2,
NumAdd; NUM_ADD; 2,
NumAddWrap; NUM_ADD_WRAP; 2,
NumAddChecked; NUM_ADD_CHECKED_LOWLEVEL; 2,
@ -191,8 +194,8 @@ map_symbol_to_lowlevel_and_arity! {
NumShiftRightZfBy; NUM_SHIFT_RIGHT_ZERO_FILL; 2,
NumToStr; NUM_TO_STR; 1,
Eq; BOOL_EQ; 2,
NotEq; BOOL_NEQ; 2,
Eq; BOOL_STRUCTURAL_EQ; 2,
NotEq; BOOL_STRUCTURAL_NOT_EQ; 2,
And; BOOL_AND; 2,
Or; BOOL_OR; 2,
Not; BOOL_NOT; 1,

View file

@ -829,7 +829,15 @@ fn canonicalize_opaque<'a>(
type_arguments: alias
.type_variables
.iter()
.map(|_| Type::Variable(var_store.fresh()))
.map(|alias_var| {
Loc::at(
alias_var.region,
OptAbleType {
typ: Type::Variable(var_store.fresh()),
opt_ability: alias_var.value.opt_bound_ability,
},
)
})
.collect(),
lambda_set_variables: alias
.lambda_set_variables

View file

@ -1,4 +1,4 @@
use crate::abilities::{ImplKey, PendingAbilitiesStore, ResolvedImpl};
use crate::abilities::{AbilitiesStore, ImplKey, PendingAbilitiesStore, ResolvedImpl};
use crate::annotation::canonicalize_annotation;
use crate::def::{canonicalize_defs, Def};
use crate::effect_module::HostedGeneratedFunctions;
@ -17,7 +17,7 @@ use roc_parse::header::HeaderFor;
use roc_parse::pattern::PatternType;
use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Loc, Region};
use roc_types::subs::{ExposedTypesStorageSubs, VarStore, Variable};
use roc_types::subs::{ExposedTypesStorageSubs, Subs, VarStore, Variable};
use roc_types::types::{Alias, AliasKind, AliasVar, Type};
/// The types of all exposed values/functions of a collection of modules
@ -374,6 +374,7 @@ pub fn canonicalize_module_defs<'a>(
if !output.references.has_type_or_value_lookup(symbol)
&& !exposed_symbols.contains(&symbol)
&& !scope.abilities_store.is_specialization_name(symbol)
&& !symbol.is_exposed_for_builtin_derivers()
{
env.problem(Problem::UnusedDef(symbol, region));
}
@ -1179,3 +1180,53 @@ fn fix_values_captured_in_closure_expr(
OpaqueWrapFunction(_) => {}
}
}
/// Type state for a single module.
#[derive(Debug)]
pub struct TypeState {
pub subs: Subs,
pub exposed_vars_by_symbol: Vec<(Symbol, Variable)>,
pub abilities: AbilitiesStore,
pub solved_implementations: ResolvedImplementations,
}
impl TypeState {
pub fn serialize(&self, writer: &mut impl std::io::Write) -> std::io::Result<usize> {
let Self {
subs,
exposed_vars_by_symbol,
abilities,
solved_implementations,
} = self;
let written_subs = subs.serialize(exposed_vars_by_symbol, writer)?;
let written_ab = abilities.serialize(writer)?;
let written_solved_impls =
crate::abilities::serialize_solved_implementations(solved_implementations, writer)?;
Ok(written_subs + written_ab + written_solved_impls)
}
pub fn deserialize(bytes: &[u8]) -> (Self, usize) {
let ((subs, exposed_vars_by_symbol), len_subs) = Subs::deserialize(bytes);
let bytes = &bytes[len_subs..];
let (abilities, len_abilities) = AbilitiesStore::deserialize(bytes);
let bytes = &bytes[len_abilities..];
let (solved_implementations, len_solved_impls) =
crate::abilities::deserialize_solved_implementations(bytes);
let total_offset = len_subs + len_abilities + len_solved_impls;
(
Self {
subs,
exposed_vars_by_symbol: exposed_vars_by_symbol.to_vec(),
abilities,
solved_implementations,
},
total_offset,
)
}
}

View file

@ -25,6 +25,14 @@ impl<K, V> VecMap<K, V> {
(k, v)
}
pub fn unzip(self) -> (Vec<K>, Vec<V>) {
(self.keys, self.values)
}
pub fn unzip_slices(&self) -> (&[K], &[V]) {
(&self.keys, &self.values)
}
}
impl<K: PartialEq, V> VecMap<K, V> {
@ -114,14 +122,6 @@ impl<K: PartialEq, V> VecMap<K, V> {
self.values.truncate(len);
}
pub fn unzip(self) -> (Vec<K>, Vec<V>) {
(self.keys, self.values)
}
pub fn unzip_slices(&self) -> (&[K], &[V]) {
(&self.keys, &self.values)
}
/// # Safety
///
/// keys and values must have the same length, and there must not
@ -250,6 +250,31 @@ where
}
}
impl<K, V> PartialEq for VecMap<K, V>
where
K: PartialEq,
V: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
for (k, v) in self.iter() {
match other.get(k) {
Some(v1) => {
if v != v1 {
return false;
}
}
None => return false,
}
}
true
}
}
#[cfg(test)]
mod test_drain_filter {
use crate::VecMap;

View file

@ -1,4 +1,4 @@
use std::iter::FromIterator;
use std::{borrow::Borrow, iter::FromIterator};
#[derive(Clone, Debug, PartialEq)]
pub struct VecSet<T> {
@ -131,3 +131,17 @@ impl<T> IntoIterator for VecSet<T> {
self.elements.into_iter()
}
}
impl<T> Borrow<[T]> for VecSet<T> {
fn borrow(&self) -> &[T] {
&self.elements
}
}
impl<T> From<Vec<T>> for VecSet<T> {
fn from(elements: Vec<T>) -> Self {
// Not totally safe, but good enough for our purposes - also, duplicates in the VecSet are
// fine, just inefficient.
Self { elements }
}
}

View file

@ -3,7 +3,7 @@ use roc_can::constraint::{Constraint, Constraints};
use roc_can::expected::Expected::{self, *};
use roc_can::num::{FloatBound, FloatWidth, IntBound, IntLitWidth, NumBound, SignDemand};
use roc_module::symbol::Symbol;
use roc_region::all::Region;
use roc_region::all::{Loc, Region};
use roc_types::num::{NumericRange, SingleQuoteBound};
use roc_types::subs::Variable;
use roc_types::types::Type::{self, *};
@ -198,7 +198,11 @@ pub fn num_literal(
#[inline(always)]
pub fn builtin_type(symbol: Symbol, args: Vec<Type>) -> Type {
Type::Apply(symbol, args, Region::zero())
Type::Apply(
symbol,
args.into_iter().map(Loc::at_zero).collect(),
Region::zero(),
)
}
#[inline(always)]

View file

@ -1510,8 +1510,12 @@ fn constrain_function_def(
loc_symbol.value,
Loc {
region: loc_function_def.region,
// todo can we use Type::Variable(expr_var) here?
value: signature.clone(),
// NOTE: we MUST use `expr_var` here so that the correct type variable is
// associated with the function. We prefer this to the annotation type, because the
// annotation type may be instantiated into a fresh type variable that is
// disassociated fromt the rest of the program.
// Below, we'll check that the function actually matches the annotation.
value: Type::Variable(expr_var),
},
);

View file

@ -3,16 +3,23 @@
use std::iter::once;
use roc_can::{
expr::{AnnotatedMark, ClosureData, Expr, Recursive},
expr::{AnnotatedMark, ClosureData, Expr, IntValue, Recursive, WhenBranch, WhenBranchPattern},
num::{IntBound, IntLitWidth},
pattern::Pattern,
};
use roc_derive_key::hash::FlatHashKey;
use roc_module::{called_via::CalledVia, ident::Lowercase, symbol::Symbol};
use roc_region::all::Loc;
use roc_error_macros::internal_error;
use roc_module::{
called_via::CalledVia,
ident::{Lowercase, TagName},
symbol::Symbol,
};
use roc_region::all::{Loc, Region};
use roc_types::{
num::int_lit_width_to_variable,
subs::{
Content, FlatType, LambdaSet, OptVariable, RecordFields, SubsSlice, UnionLambdas, Variable,
VariableSubsSlice,
Content, ExhaustiveMark, FlatType, GetSubsSlice, LambdaSet, OptVariable, RecordFields,
RedundantMark, SubsIndex, SubsSlice, UnionLambdas, UnionTags, Variable, VariableSubsSlice,
},
types::RecordField,
};
@ -20,8 +27,15 @@ use roc_types::{
use crate::{synth_var, util::Env, DerivedBody};
pub(crate) fn derive_hash(env: &mut Env<'_>, key: FlatHashKey, def_symbol: Symbol) -> DerivedBody {
let (body, body_type) = match key {
let (body_type, body) = match key {
FlatHashKey::Record(fields) => hash_record(env, def_symbol, fields),
FlatHashKey::TagUnion(tags) => {
if tags.len() == 1 {
hash_newtype_tag_union(env, def_symbol, tags.into_iter().next().unwrap())
} else {
hash_tag_union(env, def_symbol, tags)
}
}
};
let specialization_lambda_sets =
@ -34,7 +48,7 @@ pub(crate) fn derive_hash(env: &mut Env<'_>, key: FlatHashKey, def_symbol: Symbo
}
}
fn hash_record(env: &mut Env<'_>, fn_name: Symbol, fields: Vec<Lowercase>) -> (Expr, Variable) {
fn hash_record(env: &mut Env<'_>, fn_name: Symbol, fields: Vec<Lowercase>) -> (Variable, Expr) {
// Suppose rcd = { f1, ..., fn }.
// Build a generalized type t_rcd = { f1: t1, ..., fn: tn }, with fresh t1, ..., tn,
// so that we can re-use the derived impl for many records of the same fields.
@ -75,9 +89,9 @@ fn hash_record(env: &mut Env<'_>, fn_name: Symbol, fields: Vec<Lowercase>) -> (E
let hasher_sym = env.new_symbol("hasher");
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Symbol::HASH_HASHER));
let (body, body_var) = record_fields.iter_all().fold(
(Expr::Var(hasher_sym), hasher_var),
|(body, body_var), (field_name, field_var, _)| {
let (body_var, body) = record_fields.iter_all().fold(
(hasher_var, Expr::Var(hasher_sym)),
|total_hasher, (field_name, field_var, _)| {
let field_name = env.subs[field_name].clone();
let field_var = env.subs[field_var];
@ -89,54 +103,326 @@ fn hash_record(env: &mut Env<'_>, fn_name: Symbol, fields: Vec<Lowercase>) -> (E
field: field_name,
};
let (hash_fn_data, returned_hasher_var) = {
// build `Hash.hash ...` function type
//
// hasher, val -[uls]-> hasher | hasher has Hasher, val has Hash
let exposed_hash_fn_var = env.import_builtin_symbol_var(Symbol::HASH_HASH);
// (typeof body), (typeof field) -[clos]-> hasher_result
let this_arguments_slice =
VariableSubsSlice::insert_into_subs(env.subs, [body_var, field_var]);
let this_hash_clos_var = env.subs.fresh_unnamed_flex_var();
let this_hasher_result_var = env.subs.fresh_unnamed_flex_var();
let this_hash_fn_var = synth_var(
env.subs,
Content::Structure(FlatType::Func(
this_arguments_slice,
this_hash_clos_var,
this_hasher_result_var,
)),
);
// hasher, val -[uls]-> hasher | hasher has Hasher, val has Hash
// ~ (typeof body), (typeof field) -[clos]-> hasher_result
env.unify(exposed_hash_fn_var, this_hash_fn_var);
// Hash.hash : hasher, (typeof field) -[clos]-> hasher | hasher has Hasher, (typeof field) has Hash
let hash_fn_head = Expr::AbilityMember(Symbol::HASH_HASH, None, this_hash_fn_var);
let hash_fn_data = Box::new((
this_hash_fn_var,
Loc::at_zero(hash_fn_head),
this_hash_clos_var,
this_hasher_result_var,
));
(hash_fn_data, this_hasher_result_var)
};
let hash_arguments = vec![
(body_var, Loc::at_zero(body)),
(field_var, Loc::at_zero(field_access)),
];
let call_hash = Expr::Call(hash_fn_data, hash_arguments, CalledVia::Space);
(call_hash, returned_hasher_var)
call_hash_hash(env, total_hasher, (field_var, field_access))
},
);
// Finally, build the closure
// \hasher, rcd -> body
build_outer_derived_closure(
env,
fn_name,
(hasher_var, hasher_sym),
(record_var, Pattern::Identifier(rcd_sym)),
(body_var, body),
)
}
/// Build a `hash` implementation for a non-singleton tag union.
fn hash_tag_union(
env: &mut Env<'_>,
fn_name: Symbol,
tags: Vec<(TagName, u16)>,
) -> (Variable, Expr) {
// Suppose tags = [ A p11 .. p1n, ..., Q pq1 .. pqm ]
// Build a generalized type t_tags = [ A t11 .. t1n, ..., Q tq1 .. tqm ],
// with fresh t1, ..., tqm, so that we can re-use the derived impl for many
// unions of the same tags and payloads.
let (union_var, union_tags) = {
let flex_tag_labels = tags
.into_iter()
.map(|(label, arity)| {
let variables_slice = VariableSubsSlice::reserve_into_subs(env.subs, arity.into());
for var_index in variables_slice {
env.subs[var_index] = env.subs.fresh_unnamed_flex_var();
}
(label, variables_slice)
})
.collect::<Vec<_>>();
let union_tags = UnionTags::insert_slices_into_subs(env.subs, flex_tag_labels);
let tag_union_var = synth_var(
env.subs,
Content::Structure(FlatType::TagUnion(union_tags, Variable::EMPTY_TAG_UNION)),
);
(tag_union_var, union_tags)
};
// Now, a hasher for this tag union is
//
// hash_union : hasher, [ A t11 .. t1n, ..., Q tq1 .. tqm ] -> hasher | hasher has Hasher
// hash_union = \hasher, union ->
// when union is
// A x11 .. x1n -> Hash.hash (... (Hash.hash (Hash.uN hasher 0) x11) ...) x1n
// ...
// Q xq1 .. xqm -> Hash.hash (... (Hash.hash (Hash.uN hasher (q - 1)) xq1) ...) xqm
//
// where `Hash.uN` is the appropriate hasher for the discriminant value - typically a `u8`, but
// if there are more than `u8::MAX` tags, we use `u16`, and so on.
let union_sym = env.new_symbol("union");
let hasher_sym = env.new_symbol("hasher");
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Symbol::HASH_HASHER));
let (discr_width, discr_precision_var, hash_discr_member) = if union_tags.len() > u64::MAX as _
{
// Should never happen, `usize` isn't more than 64 bits on most machines, but who knows?
// Maybe someday soon someone will try to compile a huge Roc program on a 128-bit one.
internal_error!("new record unlocked: you fit more than 18 billion, billion tags in a Roc program, and the compiler didn't fall over! But it will now. 🤯")
} else if union_tags.len() > u32::MAX as _ {
(IntLitWidth::U64, Variable::UNSIGNED64, Symbol::HASH_ADD_U64)
} else if union_tags.len() > u16::MAX as _ {
(IntLitWidth::U32, Variable::UNSIGNED32, Symbol::HASH_ADD_U32)
} else if union_tags.len() > u8::MAX as _ {
(IntLitWidth::U16, Variable::UNSIGNED16, Symbol::HASH_ADD_U16)
} else {
(IntLitWidth::U8, Variable::UNSIGNED8, Symbol::HASH_ADD_U8)
};
let discr_num_var = int_lit_width_to_variable(discr_width);
// Build the branches of the body
let whole_hasher_var = env.subs.fresh_unnamed_flex_var();
let branches = union_tags
.iter_all()
.enumerate()
.map(|(discr_n, (tag, payloads))| {
// A
let tag_name = env.subs[tag].clone();
// t11 .. t1n
let payload_vars = env.subs.get_subs_slice(env.subs[payloads]).to_vec();
// x11 .. x1n
let payload_syms: Vec<_> = std::iter::repeat_with(|| env.unique_symbol())
.take(payload_vars.len())
.collect();
// `A x1 .. x1n` pattern
let pattern = Pattern::AppliedTag {
whole_var: union_var,
tag_name,
ext_var: Variable::EMPTY_TAG_UNION,
// (t1, v1) (t2, v2)
arguments: (payload_vars.iter())
.zip(payload_syms.iter())
.map(|(var, sym)| (*var, Loc::at_zero(Pattern::Identifier(*sym))))
.collect(),
};
let branch_pattern = WhenBranchPattern {
pattern: Loc::at_zero(pattern),
degenerate: false,
};
// discrHasher = (Hash.uN hasher n)
let (discr_hasher_var, disc_hasher_expr) = call_hash_ability_member(
env,
hash_discr_member,
(hasher_var, Expr::Var(hasher_sym)),
(
discr_num_var,
Expr::Int(
discr_num_var,
discr_precision_var,
format!("{}", discr_n).into_boxed_str(),
IntValue::I128((discr_n as i128).to_ne_bytes()),
IntBound::Exact(discr_width),
),
),
);
// Fold up `Hash.hash (... (Hash.hash discrHasher x11) ...) x1n`
let (body_var, body_expr) = (payload_vars.into_iter()).zip(payload_syms).fold(
(discr_hasher_var, disc_hasher_expr),
|total_hasher, (payload_var, payload_sym)| {
call_hash_hash(env, total_hasher, (payload_var, Expr::Var(payload_sym)))
},
);
env.unify(whole_hasher_var, body_var);
WhenBranch {
patterns: vec![branch_pattern],
value: Loc::at_zero(body_expr),
guard: None,
redundant: RedundantMark::known_non_redundant(),
}
})
.collect();
// when union is
// ...
let when_var = whole_hasher_var;
let when_expr = Expr::When {
loc_cond: Box::new(Loc::at_zero(Expr::Var(union_sym))),
cond_var: union_var,
expr_var: when_var,
region: Region::zero(),
branches,
branches_cond_var: union_var,
exhaustive: ExhaustiveMark::known_exhaustive(),
};
// Finally, build the closure
// \hasher, rcd -> body
build_outer_derived_closure(
env,
fn_name,
(hasher_var, hasher_sym),
(union_var, Pattern::Identifier(union_sym)),
(when_var, when_expr),
)
}
/// Build a `hash` implementation for a newtype (singleton) tag union.
/// If a tag union is a newtype, we do not need to hash its discriminant.
fn hash_newtype_tag_union(
env: &mut Env<'_>,
fn_name: Symbol,
tag: (TagName, u16),
) -> (Variable, Expr) {
// Suppose tags = [ A p1 .. pn ]
// Build a generalized type t_tags = [ A t1 .. tn ],
// with fresh t1, ..., tn, so that we can re-use the derived impl for many
// unions of the same tag and payload arity.
let (union_var, tag_name, payload_variables) = {
let (label, arity) = tag;
let variables_slice = VariableSubsSlice::reserve_into_subs(env.subs, arity.into());
for var_index in variables_slice {
env.subs[var_index] = env.subs.fresh_unnamed_flex_var();
}
let variables_slices_slice =
SubsSlice::extend_new(&mut env.subs.variable_slices, [variables_slice]);
let tag_name_index = SubsIndex::push_new(&mut env.subs.tag_names, label.clone());
let union_tags = UnionTags::from_slices(tag_name_index.as_slice(), variables_slices_slice);
let tag_union_var = synth_var(
env.subs,
Content::Structure(FlatType::TagUnion(union_tags, Variable::EMPTY_TAG_UNION)),
);
(
tag_union_var,
label,
env.subs.get_subs_slice(variables_slice).to_vec(),
)
};
// Now, a hasher for this tag union is
//
// hash_union : hasher, [ A t1 .. tn ] -> hasher | hasher has Hasher
// hash_union = \hasher, A x1 .. xn ->
// Hash.hash (... (Hash.hash discrHasher x1) ...) xn
let hasher_sym = env.new_symbol("hasher");
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Symbol::HASH_HASHER));
// A
let tag_name = tag_name;
// t1 .. tn
let payload_vars = payload_variables;
// x11 .. x1n
let payload_syms: Vec<_> = std::iter::repeat_with(|| env.unique_symbol())
.take(payload_vars.len())
.collect();
// `A x1 .. x1n` pattern
let pattern = Pattern::AppliedTag {
whole_var: union_var,
tag_name,
ext_var: Variable::EMPTY_TAG_UNION,
// (t1, v1) (t2, v2)
arguments: (payload_vars.iter())
.zip(payload_syms.iter())
.map(|(var, sym)| (*var, Loc::at_zero(Pattern::Identifier(*sym))))
.collect(),
};
// Fold up `Hash.hash (... (Hash.hash discrHasher x11) ...) x1n`
let (body_var, body_expr) = (payload_vars.into_iter()).zip(payload_syms).fold(
(hasher_var, Expr::Var(hasher_sym)),
|total_hasher, (payload_var, payload_sym)| {
call_hash_hash(env, total_hasher, (payload_var, Expr::Var(payload_sym)))
},
);
// Finally, build the closure
// \hasher, rcd -> body
build_outer_derived_closure(
env,
fn_name,
(hasher_var, hasher_sym),
(union_var, pattern),
(body_var, body_expr),
)
}
fn call_hash_hash(
env: &mut Env<'_>,
hasher: (Variable, Expr),
val: (Variable, Expr),
) -> (Variable, Expr) {
call_hash_ability_member(env, Symbol::HASH_HASH, hasher, val)
}
fn call_hash_ability_member(
env: &mut Env<'_>,
member: Symbol,
hasher: (Variable, Expr),
val: (Variable, Expr),
) -> (Variable, Expr) {
let (in_hasher_var, in_hasher_expr) = hasher;
let (in_val_var, in_val_expr) = val;
// build `member ...` function type. `member` here is `Hash.hash` or `Hash.addU16`.
//
// hasher, val -[uls]-> hasher | hasher has Hasher, val has Hash
let exposed_hash_fn_var = env.import_builtin_symbol_var(member);
// (typeof body), (typeof field) -[clos]-> hasher_result
let this_arguments_slice =
VariableSubsSlice::insert_into_subs(env.subs, [in_hasher_var, in_val_var]);
let this_hash_clos_var = env.subs.fresh_unnamed_flex_var();
let this_out_hasher_var = env.subs.fresh_unnamed_flex_var();
let this_hash_fn_var = synth_var(
env.subs,
Content::Structure(FlatType::Func(
this_arguments_slice,
this_hash_clos_var,
this_out_hasher_var,
)),
);
// hasher, val -[uls]-> hasher | hasher has Hasher, val has Hash
// ~ (typeof body), (typeof field) -[clos]-> hasher_result
env.unify(exposed_hash_fn_var, this_hash_fn_var);
// Hash.hash : hasher, (typeof field) -[clos]-> hasher | hasher has Hasher, (typeof field) has Hash
let hash_fn_head = Expr::AbilityMember(member, None, this_hash_fn_var);
let hash_fn_data = Box::new((
this_hash_fn_var,
Loc::at_zero(hash_fn_head),
this_hash_clos_var,
this_out_hasher_var,
));
let hash_arguments = vec![
(in_hasher_var, Loc::at_zero(in_hasher_expr)),
(in_val_var, Loc::at_zero(in_val_expr)),
];
let call_hash = Expr::Call(hash_fn_data, hash_arguments, CalledVia::Space);
(this_out_hasher_var, call_hash)
}
fn build_outer_derived_closure(
env: &mut Env<'_>,
fn_name: Symbol,
hasher: (Variable, Symbol),
val: (Variable, Pattern),
body: (Variable, Expr),
) -> (Variable, Expr) {
let (hasher_var, hasher_sym) = hasher;
let (val_var, val_pattern) = val;
let (body_var, body_expr) = body;
let (fn_var, fn_clos_var) = {
// Create fn_var for ambient capture; we fix it up below.
@ -156,7 +442,7 @@ fn hash_record(env: &mut Env<'_>, fn_name: Symbol, fields: Vec<Lowercase>) -> (E
);
// hasher, rcd_var -[fn_name]-> (hasher = body_var)
let args_slice = SubsSlice::insert_into_subs(env.subs, [hasher_var, record_var]);
let args_slice = SubsSlice::insert_into_subs(env.subs, [hasher_var, val_var]);
env.subs.set_content(
fn_var,
Content::Structure(FlatType::Func(args_slice, fn_clos_var, body_var)),
@ -179,13 +465,13 @@ fn hash_record(env: &mut Env<'_>, fn_name: Symbol, fields: Vec<Lowercase>) -> (E
Loc::at_zero(Pattern::Identifier(hasher_sym)),
),
(
record_var,
val_var,
AnnotatedMark::known_exhaustive(),
Loc::at_zero(Pattern::Identifier(rcd_sym)),
Loc::at_zero(val_pattern),
),
],
loc_body: Box::new(Loc::at_zero(body)),
loc_body: Box::new(Loc::at_zero(body_expr)),
});
(clos_expr, fn_var)
(fn_var, clos_expr)
}

View file

@ -5,7 +5,7 @@ use roc_module::{
use roc_types::subs::{Content, FlatType, GetSubsSlice, Subs, Variable};
use crate::{
util::{check_derivable_ext_var, debug_name_record},
util::{check_derivable_ext_var, debug_name_record, debug_name_tag},
DeriveError,
};
@ -32,19 +32,7 @@ impl FlatEncodableKey {
FlatEncodableKey::Set() => "set".to_string(),
FlatEncodableKey::Dict() => "dict".to_string(),
FlatEncodableKey::Record(fields) => debug_name_record(fields),
FlatEncodableKey::TagUnion(tags) => {
let mut str = String::from('[');
tags.iter().enumerate().for_each(|(i, (tag, arity))| {
if i > 0 {
str.push(',');
}
str.push_str(tag.0.as_str());
str.push(' ');
str.push_str(&arity.to_string());
});
str.push(']');
str
}
FlatEncodableKey::TagUnion(tags) => debug_name_tag(tags),
}
}
}

View file

@ -1,8 +1,11 @@
use roc_module::{ident::Lowercase, symbol::Symbol};
use roc_types::subs::{Content, FlatType, Subs, Variable};
use roc_module::{
ident::{Lowercase, TagName},
symbol::Symbol,
};
use roc_types::subs::{Content, FlatType, GetSubsSlice, Subs, Variable};
use crate::{
util::{check_derivable_ext_var, debug_name_record},
util::{check_derivable_ext_var, debug_name_record, debug_name_tag},
DeriveError,
};
@ -18,12 +21,14 @@ pub enum FlatHash {
pub enum FlatHashKey {
// Unfortunate that we must allocate here, c'est la vie
Record(Vec<Lowercase>),
TagUnion(Vec<(TagName, u16)>),
}
impl FlatHashKey {
pub(crate) fn debug_name(&self) -> String {
match self {
FlatHashKey::Record(fields) => debug_name_record(fields),
FlatHashKey::TagUnion(tags) => debug_name_tag(tags),
}
}
}
@ -60,16 +65,43 @@ impl FlatHash {
Ok(Key(FlatHashKey::Record(field_names)))
}
FlatType::TagUnion(_tags, _ext) | FlatType::RecursiveTagUnion(_, _tags, _ext) => {
Err(Underivable) // yet
}
FlatType::FunctionOrTagUnion(_name_index, _, _) => {
Err(Underivable) // yet
FlatType::TagUnion(tags, ext) | FlatType::RecursiveTagUnion(_, tags, ext) => {
// The recursion var doesn't matter, because the derived implementation will only
// look on the surface of the tag union type, and more over the payloads of the
// arguments will be left generic for the monomorphizer to fill in with the
// appropriate type. That is,
// [ A t1, B t1 t2 ]
// and
// [ A t1, B t1 t2 ] as R
// look the same on the surface, because `R` is only somewhere inside of the
// `t`-prefixed payload types.
let (tags_iter, ext) = tags.unsorted_tags_and_ext(subs, ext);
check_derivable_ext_var(subs, ext, |ext| {
matches!(ext, Content::Structure(FlatType::EmptyTagUnion))
})?;
let mut tag_names_and_payload_sizes: Vec<_> = tags_iter
.tags
.into_iter()
.map(|(name, payload_slice)| {
let payload_size = payload_slice.len();
(name.clone(), payload_size as _)
})
.collect();
tag_names_and_payload_sizes.sort_by(|(t1, _), (t2, _)| t1.cmp(t2));
Ok(Key(FlatHashKey::TagUnion(tag_names_and_payload_sizes)))
}
FlatType::FunctionOrTagUnion(names_index, _, _) => Ok(Key(FlatHashKey::TagUnion(
subs.get_subs_slice(names_index)
.iter()
.map(|t| (t.clone(), 0))
.collect(),
))),
FlatType::EmptyRecord => Ok(Key(FlatHashKey::Record(vec![]))),
FlatType::EmptyTagUnion => {
Err(Underivable) // yet
}
FlatType::EmptyTagUnion => Ok(Key(FlatHashKey::TagUnion(vec![]))),
//
FlatType::Erroneous(_) => Err(Underivable),
FlatType::Func(..) => Err(Underivable),
@ -111,7 +143,8 @@ impl FlatHash {
},
Content::RangedNumber(_) => Err(Underivable),
//
Content::RecursionVar { .. } => Err(Underivable),
Content::RecursionVar { structure, .. } => Self::from_var(subs, structure),
//
Content::Error => Err(Underivable),
Content::FlexVar(_)
| Content::RigidVar(_)

View file

@ -76,6 +76,7 @@ pub enum DeriveBuiltin {
ToEncoder,
Decoder,
Hash,
IsEq,
}
impl TryFrom<Symbol> for DeriveBuiltin {
@ -86,6 +87,7 @@ impl TryFrom<Symbol> for DeriveBuiltin {
Symbol::ENCODE_TO_ENCODER => Ok(DeriveBuiltin::ToEncoder),
Symbol::DECODE_DECODER => Ok(DeriveBuiltin::Decoder),
Symbol::HASH_HASH => Ok(DeriveBuiltin::Hash),
Symbol::BOOL_IS_EQ => Ok(DeriveBuiltin::IsEq),
_ => Err(value),
}
}
@ -112,6 +114,13 @@ impl Derived {
}
FlatHash::Key(repr) => Ok(Derived::Key(DeriveKey::Hash(repr))),
},
DeriveBuiltin::IsEq => {
// If obligation checking passes, we always lower derived implementations of `isEq`
// to the `Eq` low-level, to be fulfilled by the backends.
Ok(Derived::SingleLambdaSetImmediate(
Symbol::BOOL_STRUCTURAL_EQ,
))
}
}
}
}

View file

@ -1,4 +1,4 @@
use roc_module::ident::Lowercase;
use roc_module::ident::{Lowercase, TagName};
use roc_types::subs::{Content, Subs, Variable};
use crate::DeriveError;
@ -42,3 +42,17 @@ pub(crate) fn debug_name_record(fields: &[Lowercase]) -> String {
str.push('}');
str
}
pub(crate) fn debug_name_tag(tags: &[(TagName, u16)]) -> String {
let mut str = String::from('[');
tags.iter().enumerate().for_each(|(i, (tag, arity))| {
if i > 0 {
str.push(',');
}
str.push_str(tag.0.as_str());
str.push(' ');
str.push_str(&arity.to_string());
});
str.push(']');
str
}

View file

@ -88,7 +88,7 @@ This is the general procedure I follow with some helpful links:
## Helpful Resources
- [Compiler Explorer](godbolt.org) -
- [Compiler Explorer](https://godbolt.org/) -
Generates assembly from most languages.
Really good for getting a reference for what is required to do something.
Can answer questions like "how would x be implemented in arm assembly?"

View file

@ -6864,7 +6864,7 @@ pub fn to_cc_return<'a, 'ctx, 'env>(env: &Env<'a, 'ctx, 'env>, layout: &Layout<'
return_size >= 2 * env.target_info.ptr_width() as u32
}
roc_target::OperatingSystem::Unix => return_size > 2 * env.target_info.ptr_width() as u32,
roc_target::OperatingSystem::Wasi => unreachable!(),
roc_target::OperatingSystem::Wasi => return_size > 2 * env.target_info.ptr_width() as u32,
};
if return_size == 0 {

View file

@ -50,6 +50,7 @@ impl From<Layout<'_>> for CodeGenNumType {
|| internal_error!("Tried to perform a Num low-level operation on {:?}", layout);
match layout {
Layout::Builtin(builtin) => match builtin {
Builtin::Bool => I32,
Builtin::Int(int_width) => match int_width {
IntWidth::U8 => I32,
IntWidth::U16 => I32,
@ -1724,6 +1725,7 @@ impl<'a> LowLevelCall<'a> {
let arg_type = CodeGenNumType::from(arg_layout);
let arg_width = match arg_layout {
Layout::Builtin(Builtin::Int(w)) => w,
Layout::Builtin(Builtin::Bool) => IntWidth::U8,
x => internal_error!("Num.intCast is not defined for {:?}", x),
};

View file

@ -20,6 +20,7 @@ roc_builtins = { path = "../builtins" }
roc_module = { path = "../module" }
roc_reporting = { path = "../../reporting" }
roc_target = { path = "../roc_target" }
roc_can = { path = "../can" }
bumpalo = { version = "3.11.0", features = ["collections"] }
[target.'cfg(not(windows))'.build-dependencies]

View file

@ -11,14 +11,15 @@ const SKIP_SUBS_CACHE: bool = {
}
};
// IFTTT: crates/compiler/load/src/lib.rs
const MODULES: &[(ModuleId, &str)] = &[
(ModuleId::BOOL, "Bool.roc"),
(ModuleId::DICT, "Dict.roc"),
(ModuleId::SET, "Set.roc"),
(ModuleId::RESULT, "Result.roc"),
(ModuleId::NUM, "Num.roc"),
(ModuleId::LIST, "List.roc"),
(ModuleId::STR, "Str.roc"),
(ModuleId::DICT, "Dict.roc"),
(ModuleId::SET, "Set.roc"),
(ModuleId::BOX, "Box.roc"),
(ModuleId::ENCODE, "Encode.roc"),
(ModuleId::DECODE, "Decode.roc"),
@ -46,26 +47,27 @@ fn write_subs_for_module(module_id: ModuleId, filename: &str) {
#[cfg(not(windows))]
if SKIP_SUBS_CACHE {
write_subs_for_module_dummy(&output_path)
write_types_for_module_dummy(&output_path)
} else {
write_subs_for_module_real(module_id, filename, &output_path)
write_types_for_module_real(module_id, filename, &output_path)
}
#[cfg(windows)]
{
let _ = SKIP_SUBS_CACHE;
let _ = module_id;
write_subs_for_module_dummy(&output_path)
write_types_for_module_dummy(&output_path)
}
}
fn write_subs_for_module_dummy(output_path: &Path) {
fn write_types_for_module_dummy(output_path: &Path) {
// write out a dummy file
std::fs::write(output_path, &[]).unwrap();
}
#[cfg(not(windows))]
fn write_subs_for_module_real(module_id: ModuleId, filename: &str, output_path: &Path) {
fn write_types_for_module_real(module_id: ModuleId, filename: &str, output_path: &Path) {
use roc_can::module::TypeState;
use roc_load_internal::file::{LoadingProblem, Threading};
let arena = Bump::new();
@ -94,9 +96,19 @@ fn write_subs_for_module_real(module_id: ModuleId, filename: &str, output_path:
}
};
let subs = module.solved.inner();
let subs = module.solved.into_inner();
let exposed_vars_by_symbol: Vec<_> = module.exposed_to_host.into_iter().collect();
let abilities = module.abilities_store;
let solved_implementations = module.resolved_implementations;
let mut file = std::fs::File::create(&output_path).unwrap();
subs.serialize(&exposed_vars_by_symbol, &mut file).unwrap();
let type_state = TypeState {
subs,
exposed_vars_by_symbol,
abilities,
solved_implementations,
};
type_state.serialize(&mut file).unwrap();
}

View file

@ -1,10 +1,9 @@
use bumpalo::Bump;
use roc_can::module::ExposedByModule;
use roc_can::module::{ExposedByModule, TypeState};
use roc_collections::all::MutMap;
use roc_module::symbol::{ModuleId, Symbol};
use roc_module::symbol::ModuleId;
use roc_reporting::report::RenderTarget;
use roc_target::TargetInfo;
use roc_types::subs::{Subs, Variable};
use std::path::PathBuf;
const SKIP_SUBS_CACHE: bool = {
@ -27,9 +26,9 @@ fn load<'a>(
exposed_types: ExposedByModule,
load_config: LoadConfig,
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
let cached_subs = read_cached_subs();
let cached_types = read_cached_types();
roc_load_internal::file::load(arena, load_start, exposed_types, cached_subs, load_config)
roc_load_internal::file::load(arena, load_start, exposed_types, cached_types, load_config)
}
/// Load using only a single thread; used when compiling to webassembly
@ -41,7 +40,7 @@ pub fn load_single_threaded<'a>(
render: RenderTarget,
exec_mode: ExecutionMode,
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
let cached_subs = read_cached_subs();
let cached_subs = read_cached_types();
roc_load_internal::file::load_single_threaded(
arena,
@ -157,37 +156,48 @@ pub fn load_and_typecheck_str<'a>(
}
}
// IFTTT: crates/compiler/load/build.rs
const BOOL: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Bool.dat")) as &[_];
const RESULT: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Result.dat")) as &[_];
const LIST: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/List.dat")) as &[_];
const STR: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Str.dat")) as &[_];
const DICT: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Dict.dat")) as &[_];
const SET: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Set.dat")) as &[_];
const BOX: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Box.dat")) as &[_];
const RESULT: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Result.dat")) as &[_];
const NUM: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Num.dat")) as &[_];
const LIST: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/List.dat")) as &[_];
const STR: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Str.dat")) as &[_];
const BOX: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Box.dat")) as &[_];
const ENCODE: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Encode.dat")) as &[_];
const DECODE: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Decode.dat")) as &[_];
const HASH: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/Hash.dat")) as &[_];
fn deserialize_help(bytes: &[u8]) -> (Subs, Vec<(Symbol, Variable)>) {
let (subs, slice) = Subs::deserialize(bytes);
fn deserialize_help(bytes: &[u8]) -> TypeState {
let (state, _offset) = TypeState::deserialize(bytes);
debug_assert_eq!(bytes.len(), _offset);
(subs, slice.to_vec())
state
}
fn read_cached_subs() -> MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)> {
fn read_cached_types() -> MutMap<ModuleId, TypeState> {
let mut output = MutMap::default();
// Wasm seems to re-order definitions between build time and runtime, but only in release mode.
// That is very strange, but we can solve it separately
if !cfg!(target_family = "wasm") && !cfg!(windows) && !SKIP_SUBS_CACHE {
output.insert(ModuleId::BOOL, deserialize_help(BOOL));
output.insert(ModuleId::RESULT, deserialize_help(RESULT));
output.insert(ModuleId::NUM, deserialize_help(NUM));
output.insert(ModuleId::LIST, deserialize_help(LIST));
output.insert(ModuleId::STR, deserialize_help(STR));
output.insert(ModuleId::DICT, deserialize_help(DICT));
output.insert(ModuleId::SET, deserialize_help(SET));
output.insert(ModuleId::BOX, deserialize_help(BOX));
output.insert(ModuleId::DICT, deserialize_help(DICT));
output.insert(ModuleId::SET, deserialize_help(SET));
output.insert(ModuleId::ENCODE, deserialize_help(ENCODE));
output.insert(ModuleId::DECODE, deserialize_help(DECODE));
output.insert(ModuleId::HASH, deserialize_help(HASH));
}
output

View file

@ -11,7 +11,7 @@ use roc_can::expr::Declarations;
use roc_can::expr::PendingDerives;
use roc_can::module::{
canonicalize_module_defs, ExposedByModule, ExposedForModule, ExposedModuleTypes, Module,
ResolvedImplementations,
ResolvedImplementations, TypeState,
};
use roc_collections::{default_hasher, BumpMap, MutMap, MutSet, VecMap, VecSet};
use roc_constrain::module::constrain_module;
@ -344,6 +344,9 @@ fn start_phase<'a>(
)
});
// Add the declared abilities from the modules we import;
// we may not know all their types yet since type-solving happens in
// parallel, but we'll fill that in during type-checking our module.
abilities_store
.union(import_store.closure_from_imported(exposed_symbols));
}
@ -353,7 +356,7 @@ fn start_phase<'a>(
let skip_constraint_gen = {
// Give this its own scope to make sure that the Guard from the lock() is dropped
// immediately after contains_key returns
state.cached_subs.lock().contains_key(&module_id)
state.cached_types.lock().contains_key(&module_id)
};
BuildTask::CanonicalizeAndConstrain {
@ -398,7 +401,7 @@ fn start_phase<'a>(
&state.exposed_types,
dep_idents,
declarations,
state.cached_subs.clone(),
state.cached_types.clone(),
derived_module,
)
}
@ -934,13 +937,13 @@ struct State<'a> {
make_specializations_pass: MakeSpecializationsPass,
// cached subs (used for builtin modules, could include packages in the future too)
cached_subs: CachedSubs,
// cached types (used for builtin modules, could include packages in the future too)
cached_types: CachedTypeState,
layout_interner: Arc<GlobalInterner<'a, Layout<'a>>>,
}
type CachedSubs = Arc<Mutex<MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)>>>;
type CachedTypeState = Arc<Mutex<MutMap<ModuleId, TypeState>>>;
impl<'a> State<'a> {
fn goal_phase(&self) -> Phase {
@ -954,7 +957,7 @@ impl<'a> State<'a> {
exposed_types: ExposedByModule,
arc_modules: Arc<Mutex<PackageModuleIds<'a>>>,
ident_ids_by_module: SharedIdentIdsByModule,
cached_subs: MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)>,
cached_types: MutMap<ModuleId, TypeState>,
render: RenderTarget,
number_of_workers: usize,
exec_mode: ExecutionMode,
@ -985,7 +988,7 @@ impl<'a> State<'a> {
exposed_symbols_by_module: MutMap::default(),
timings: MutMap::default(),
layout_caches: std::vec::Vec::with_capacity(number_of_workers),
cached_subs: Arc::new(Mutex::new(cached_subs)),
cached_types: Arc::new(Mutex::new(cached_types)),
render,
exec_mode,
make_specializations_pass: MakeSpecializationsPass::Pass(1),
@ -1098,7 +1101,7 @@ enum BuildTask<'a> {
var_store: VarStore,
declarations: Declarations,
dep_idents: IdentIdsByModule,
cached_subs: CachedSubs,
cached_subs: CachedTypeState,
derived_module: SharedDerivedModule,
},
BuildPendingSpecializations {
@ -1463,7 +1466,7 @@ pub fn load<'a>(
arena: &'a Bump,
load_start: LoadStart<'a>,
exposed_types: ExposedByModule,
cached_subs: MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)>,
cached_types: MutMap<ModuleId, TypeState>,
load_config: LoadConfig,
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
enum Threads {
@ -1496,7 +1499,7 @@ pub fn load<'a>(
load_start,
exposed_types,
load_config.target_info,
cached_subs,
cached_types,
load_config.render,
load_config.exec_mode,
),
@ -1505,7 +1508,7 @@ pub fn load<'a>(
load_start,
exposed_types,
load_config.target_info,
cached_subs,
cached_types,
load_config.render,
threads,
load_config.exec_mode,
@ -1520,7 +1523,7 @@ pub fn load_single_threaded<'a>(
load_start: LoadStart<'a>,
exposed_types: ExposedByModule,
target_info: TargetInfo,
cached_subs: MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)>,
cached_types: MutMap<ModuleId, TypeState>,
render: RenderTarget,
exec_mode: ExecutionMode,
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
@ -1546,7 +1549,7 @@ pub fn load_single_threaded<'a>(
exposed_types,
arc_modules,
ident_ids_by_module,
cached_subs,
cached_types,
render,
number_of_workers,
exec_mode,
@ -1768,7 +1771,7 @@ fn load_multi_threaded<'a>(
load_start: LoadStart<'a>,
exposed_types: ExposedByModule,
target_info: TargetInfo,
cached_subs: MutMap<ModuleId, (Subs, Vec<(Symbol, Variable)>)>,
cached_types: MutMap<ModuleId, TypeState>,
render: RenderTarget,
available_threads: usize,
exec_mode: ExecutionMode,
@ -1810,7 +1813,7 @@ fn load_multi_threaded<'a>(
exposed_types,
arc_modules,
ident_ids_by_module,
cached_subs,
cached_types,
render,
num_workers,
exec_mode,
@ -4199,7 +4202,7 @@ impl<'a> BuildTask<'a> {
exposed_types: &ExposedByModule,
dep_idents: IdentIdsByModule,
declarations: Declarations,
cached_subs: CachedSubs,
cached_subs: CachedTypeState,
derived_module: SharedDerivedModule,
) -> Self {
let exposed_by_module = exposed_types.retain_modules(imported_modules.keys());
@ -4339,6 +4342,9 @@ pub fn add_imports(
import_variables.push(list_len_type);
}
// Fill in the implementation information of the abilities from the modules we import, which we
// now know because all imported modules should be solved by now.
//
// TODO: see if we can reduce the amount of specializations we need to import.
// One idea is to just always assume external modules fulfill their specialization obligations
// and save lambda set resolution for mono.
@ -4473,7 +4479,11 @@ fn run_solve_solve(
// ability.
let exposed_vars_by_symbol: Vec<_> = solved_env
.vars_by_symbol()
.filter(|(k, _)| exposed_symbols.contains(k) || is_specialization_symbol(*k))
.filter(|(k, _)| {
exposed_symbols.contains(k)
|| is_specialization_symbol(*k)
|| k.is_exposed_for_builtin_derivers()
})
.collect();
(
@ -4506,7 +4516,7 @@ fn run_solve<'a>(
var_store: VarStore,
decls: Declarations,
dep_idents: IdentIdsByModule,
cached_subs: CachedSubs,
cached_types: CachedTypeState,
derived_module: SharedDerivedModule,
) -> Msg<'a> {
let solve_start = Instant::now();
@ -4522,7 +4532,7 @@ fn run_solve<'a>(
let (solved_subs, solved_implementations, exposed_vars_by_symbol, problems, abilities_store) = {
if module_id.is_builtin() {
match cached_subs.lock().remove(&module_id) {
match cached_types.lock().remove(&module_id) {
None => run_solve_solve(
exposed_for_module,
constraints,
@ -4532,17 +4542,18 @@ fn run_solve<'a>(
module,
derived_module,
),
Some((subs, exposed_vars_by_symbol)) => {
(
Solved(subs),
// TODO(abilities) cache abilities for builtins
VecMap::default(),
exposed_vars_by_symbol.to_vec(),
vec![],
// TODO(abilities) cache abilities for builtins
AbilitiesStore::default(),
)
}
Some(TypeState {
subs,
exposed_vars_by_symbol,
abilities,
solved_implementations,
}) => (
Solved(subs),
solved_implementations,
exposed_vars_by_symbol,
vec![],
abilities,
),
}
} else {
run_solve_solve(

View file

@ -1,9 +1,26 @@
#![warn(clippy::dbg_macro)]
// See github.com/roc-lang/roc/issues/800 for discussion of the large_enum_variant check.
#![allow(clippy::large_enum_variant)]
use roc_module::symbol::ModuleId;
pub mod docs;
pub mod file;
mod work;
#[cfg(target_family = "wasm")]
mod wasm_instant;
pub const BUILTIN_MODULES: &[(ModuleId, &str)] = &[
(ModuleId::BOOL, "Bool"),
(ModuleId::RESULT, "Result"),
(ModuleId::NUM, "Num"),
(ModuleId::LIST, "List"),
(ModuleId::STR, "Str"),
(ModuleId::DICT, "Dict"),
(ModuleId::SET, "Set"),
(ModuleId::BOX, "Box"),
(ModuleId::ENCODE, "Encode"),
(ModuleId::DECODE, "Decode"),
(ModuleId::HASH, "Hash"),
(ModuleId::JSON, "Json"),
];

View file

@ -22,7 +22,7 @@ initialModel = \start ->
}
cheapestOpen : (position -> F64), Model position -> Result position [KeyNotFound]*
cheapestOpen : (position -> F64), Model position -> Result position [KeyNotFound]* | position has Eq
cheapestOpen = \costFunction, model ->
folder = \resSmallestSoFar, position ->
@ -47,7 +47,7 @@ cheapestOpen = \costFunction, model ->
reconstructPath : Dict position position, position -> List position
reconstructPath : Dict position position, position -> List position | position has Eq
reconstructPath = \cameFrom, goal ->
when Dict.get cameFrom goal is
Err KeyNotFound ->
@ -56,7 +56,7 @@ reconstructPath = \cameFrom, goal ->
Ok next ->
List.append (reconstructPath cameFrom next) goal
updateCost : position, position, Model position -> Model position
updateCost : position, position, Model position -> Model position | position has Eq
updateCost = \current, neighbour, model ->
newCameFrom = Dict.insert model.cameFrom neighbour current
@ -80,12 +80,12 @@ updateCost = \current, neighbour, model ->
model
findPath : { costFunction: (position, position -> F64), moveFunction: (position -> Set position), start : position, end : position } -> Result (List position) [KeyNotFound]*
findPath : { costFunction: (position, position -> F64), moveFunction: (position -> Set position), start : position, end : position } -> Result (List position) [KeyNotFound]* | position has Eq
findPath = \{ costFunction, moveFunction, start, end } ->
astar costFunction moveFunction end (initialModel start)
astar : (position, position -> F64), (position -> Set position), position, Model position -> [Err [KeyNotFound]*, Ok (List position)]*
astar : (position, position -> F64), (position -> Set position), position, Model position -> [Err [KeyNotFound]*, Ok (List position)]* | position has Eq
astar = \costFn, moveFn, goal, model ->
when cheapestOpen (\position -> costFn goal position) model is
Err _ ->

View file

@ -481,12 +481,12 @@ fn load_astar() {
expect_types(
loaded_module,
hashmap! {
"findPath" => "{ costFunction : position, position -> F64, end : position, moveFunction : position -> Set position, start : position } -> Result (List position) [KeyNotFound]*",
"findPath" => "{ costFunction : position, position -> F64, end : position, moveFunction : position -> Set position, start : position } -> Result (List position) [KeyNotFound]* | position has Eq",
"initialModel" => "position -> Model position",
"reconstructPath" => "Dict position position, position -> List position",
"updateCost" => "position, position, Model position -> Model position",
"cheapestOpen" => "(position -> F64), Model position -> Result position [KeyNotFound]*",
"astar" => "(position, position -> F64), (position -> Set position), position, Model position -> [Err [KeyNotFound]*, Ok (List position)]*",
"reconstructPath" => "Dict position position, position -> List position | position has Eq",
"updateCost" => "position, position, Model position -> Model position | position has Eq",
"cheapestOpen" => "(position -> F64), Model position -> Result position [KeyNotFound]* | position has Eq",
"astar" => "(position, position -> F64), (position -> Set position), position, Model position -> [Err [KeyNotFound]*, Ok (List position)]* | position has Eq",
},
);
}

View file

@ -217,7 +217,6 @@ macro_rules! map_symbol_to_lowlevel {
LowLevel::StrFromInt => unimplemented!(),
LowLevel::StrFromFloat => unimplemented!(),
LowLevel::NumIsFinite => unimplemented!(),
}
}
};
@ -310,8 +309,8 @@ map_symbol_to_lowlevel! {
NumShiftRightBy <= NUM_SHIFT_RIGHT,
NumShiftRightZfBy <= NUM_SHIFT_RIGHT_ZERO_FILL,
NumToStr <= NUM_TO_STR,
Eq <= BOOL_EQ,
NotEq <= BOOL_NEQ,
Eq <= BOOL_STRUCTURAL_EQ,
NotEq <= BOOL_STRUCTURAL_NOT_EQ,
And <= BOOL_AND,
Or <= BOOL_OR,
Not <= BOOL_NOT,

View file

@ -51,6 +51,7 @@ pub const DERIVABLE_ABILITIES: &[(Symbol, &[Symbol])] = &[
(Symbol::ENCODE_ENCODING, &[Symbol::ENCODE_TO_ENCODER]),
(Symbol::DECODE_DECODING, &[Symbol::DECODE_DECODER]),
(Symbol::HASH_HASH_ABILITY, &[Symbol::HASH_HASH]),
(Symbol::BOOL_EQ, &[Symbol::BOOL_IS_EQ]),
];
/// In Debug builds only, Symbol has a name() method that lets
@ -98,6 +99,17 @@ impl Symbol {
DERIVABLE_ABILITIES.iter().find(|(name, _)| *name == self)
}
/// A symbol that should never be exposed to userspace, but needs to be exposed
/// to compiled modules for deriving abilities for structural types.
pub fn is_exposed_for_builtin_derivers(&self) -> bool {
matches!(
self,
// The `structuralEq` call used deriving structural equality, which will wrap the `Eq`
// low-level implementation.
&Self::BOOL_STRUCTURAL_EQ
)
}
pub fn module_string<'a>(&self, interns: &'a Interns) -> &'a ModuleName {
interns
.module_ids
@ -796,6 +808,7 @@ macro_rules! define_builtins {
$(exposed_type=$exposed_type:literal)?
$(in_scope_for_hints=$in_scope_for_hints:literal)?
)*
$(unexposed $u_ident_id:literal $u_ident_const:ident: $u_ident_name:literal)*
}
)+
num_modules: $total:literal
@ -943,6 +956,9 @@ macro_rules! define_builtins {
$(
pub const $ident_const: Symbol = Symbol::new(ModuleId::$module_const, IdentId($ident_id));
)*
$(
pub const $u_ident_const: Symbol = Symbol::new(ModuleId::$module_const, IdentId($u_ident_id));
)*
)+
/// The default `Apply` types that should be in scope,
@ -1239,8 +1255,12 @@ define_builtins! {
4 BOOL_OR: "or"
5 BOOL_NOT: "not"
6 BOOL_XOR: "xor"
7 BOOL_EQ: "isEq"
8 BOOL_NEQ: "isNotEq"
7 BOOL_NEQ: "isNotEq"
8 BOOL_EQ: "Eq" exposed_type=true
9 BOOL_IS_EQ: "isEq"
10 BOOL_IS_EQ_IMPL: "boolIsEq"
unexposed 11 BOOL_STRUCTURAL_EQ: "structuralEq"
unexposed 12 BOOL_STRUCTURAL_NOT_EQ: "structuralNotEq"
}
5 STR: "Str" => {
0 STR_STR: "Str" exposed_apply_type=true // the Str.Str type alias
@ -1297,6 +1317,7 @@ define_builtins! {
51 STR_REPLACE_FIRST: "replaceFirst"
52 STR_REPLACE_LAST: "replaceLast"
53 STR_WITH_CAPACITY: "withCapacity"
54 STR_WITH_PREFIX: "withPrefix"
}
6 LIST: "List" => {
0 LIST_LIST: "List" exposed_apply_type=true // the List.List type alias
@ -1376,6 +1397,7 @@ define_builtins! {
74 LIST_MAP_TRY: "mapTry"
75 LIST_WALK_TRY: "walkTry"
76 LIST_WALK_BACKWARDS_UNTIL: "walkBackwardsUntil"
77 LIST_COUNT_IF: "countIf"
}
7 RESULT: "Result" => {
0 RESULT_RESULT: "Result" exposed_type=true // the Result.Result type alias
@ -1410,6 +1432,9 @@ define_builtins! {
15 DICT_WITH_CAPACITY: "withCapacity"
16 DICT_CAPACITY: "capacity"
17 DICT_UPDATE: "update"
18 DICT_LIST_GET_UNSAFE: "listGetUnsafe"
}
9 SET: "Set" => {
0 SET_SET: "Set" exposed_type=true // the Set.Set type alias
@ -1509,6 +1534,7 @@ define_builtins! {
14 HASH_COMPLETE: "complete"
15 HASH_HASH_STR_BYTES: "hashStrBytes"
16 HASH_HASH_LIST: "hashList"
17 HASH_HASH_UNORDERED: "hashUnordered"
}
14 JSON: "Json" => {
0 JSON_JSON: "Json"

View file

@ -2604,22 +2604,25 @@ fn from_can_let<'a>(
);
}
if let roc_can::expr::Expr::Var(outer_symbol) = def.loc_expr.value {
store_pattern(env, procs, layout_cache, &mono_pattern, outer_symbol, stmt)
} else {
let outer_symbol = env.unique_symbol();
stmt = store_pattern(env, procs, layout_cache, &mono_pattern, outer_symbol, stmt);
match def.loc_expr.value {
roc_can::expr::Expr::Var(outer_symbol) if !procs.is_module_thunk(outer_symbol) => {
store_pattern(env, procs, layout_cache, &mono_pattern, outer_symbol, stmt)
}
_ => {
let outer_symbol = env.unique_symbol();
stmt = store_pattern(env, procs, layout_cache, &mono_pattern, outer_symbol, stmt);
// convert the def body, store in outer_symbol
with_hole(
env,
def.loc_expr.value,
def.expr_var,
procs,
layout_cache,
outer_symbol,
env.arena.alloc(stmt),
)
// convert the def body, store in outer_symbol
with_hole(
env,
def.loc_expr.value,
def.expr_var,
procs,
layout_cache,
outer_symbol,
env.arena.alloc(stmt),
)
}
}
}
@ -3902,7 +3905,7 @@ fn specialize_naked_symbol<'a>(
std::vec::Vec::new(),
layout_cache,
assigned,
env.arena.alloc(Stmt::Ret(assigned)),
hole,
);
return result;

View file

@ -879,15 +879,8 @@ impl<'a> UnionLayout<'a> {
}
}
pub fn tag_id_layout(&self) -> Layout<'a> {
// TODO is it beneficial to return a more specific layout?
// e.g. Layout::bool() and Layout::VOID
match self.discriminant() {
Discriminant::U0 => Layout::u8(),
Discriminant::U1 => Layout::u8(),
Discriminant::U8 => Layout::u8(),
Discriminant::U16 => Layout::u16(),
}
pub fn tag_id_layout(&self) -> Layout<'static> {
self.discriminant().layout()
}
fn stores_tag_id_in_pointer_bits(tags: &[&[Layout<'a>]], target_info: TargetInfo) -> bool {
@ -1138,6 +1131,17 @@ impl Discriminant {
pub const fn alignment_bytes(&self) -> u32 {
self.stack_size()
}
pub const fn layout(&self) -> Layout<'static> {
// TODO is it beneficial to return a more specific layout?
// e.g. Layout::bool() and Layout::VOID
match self {
Discriminant::U0 => Layout::u8(),
Discriminant::U1 => Layout::u8(),
Discriminant::U8 => Layout::u8(),
Discriminant::U16 => Layout::u16(),
}
}
}
/// Custom type so we can get the numeric representation of a symbol in tests (so `#UserApp.3`
@ -2210,7 +2214,16 @@ impl<'a> Layout<'a> {
// completely, but for now we represent it with the empty tag union
cacheable(Ok(Layout::VOID))
}
FlexAbleVar(_, _) | RigidAbleVar(_, _) => todo_abilities!("Not reachable yet"),
FlexAbleVar(_, _) | RigidAbleVar(_, _) => {
roc_debug_flags::dbg_do!(roc_debug_flags::ROC_NO_UNBOUND_LAYOUT, {
todo_abilities!("Able var is unbound!");
});
// If we encounter an unbound type var (e.g. `*` or `a`)
// then it's zero-sized; In the future we may drop this argument
// completely, but for now we represent it with the empty tag union
cacheable(Ok(Layout::VOID))
}
RecursionVar { structure, .. } => {
let structure_content = env.subs.get_content_without_compacting(structure);
Self::new_help(env, structure, *structure_content)
@ -2713,7 +2726,7 @@ impl<'a> Layout<'a> {
Layout::Builtin(Builtin::Int(IntWidth::U8))
}
pub fn u16() -> Layout<'a> {
pub const fn u16() -> Layout<'a> {
Layout::Builtin(Builtin::Int(IntWidth::U16))
}

View file

@ -0,0 +1,9 @@
[package]
name = "roc_serialize"
version = "0.0.1"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2021"
[dependencies]
roc_collections = { path = "../collections" }

View file

@ -0,0 +1,385 @@
use std::{
borrow::Borrow,
io::{self, Write},
};
use roc_collections::{MutMap, VecMap};
pub fn serialize_slice<T: Copy>(
slice: &[T],
writer: &mut impl Write,
written: usize,
) -> io::Result<usize> {
let alignment = std::mem::align_of::<T>();
let padding_bytes = next_multiple_of(written, alignment) - written;
for _ in 0..padding_bytes {
writer.write_all(&[0])?;
}
let bytes_slice = unsafe { slice_as_bytes(slice) };
writer.write_all(bytes_slice)?;
Ok(written + padding_bytes + bytes_slice.len())
}
pub fn deserialize_slice<T: Copy>(bytes: &[u8], length: usize, mut offset: usize) -> (&[T], usize) {
let alignment = std::mem::align_of::<T>();
let size = std::mem::size_of::<T>();
offset = next_multiple_of(offset, alignment);
let byte_length = length * size;
let byte_slice = &bytes[offset..][..byte_length];
let slice = unsafe { std::slice::from_raw_parts(byte_slice.as_ptr() as *const T, length) };
(slice, offset + byte_length)
}
pub fn deserialize_vec<T: Clone + Copy>(
bytes: &[u8],
length: usize,
offset: usize,
) -> (Vec<T>, usize) {
let (slice, offset) = deserialize_slice(bytes, length, offset);
(slice.to_vec(), offset)
}
#[derive(Copy, Clone)]
struct VecSlice<T> {
pub start: u32,
pub length: u16,
_marker: std::marker::PhantomData<T>,
}
impl<T> VecSlice<T> {
const fn len(&self) -> usize {
self.length as usize
}
fn indices(&self) -> std::ops::Range<usize> {
self.start as usize..(self.start as usize + self.length as usize)
}
fn extend_new(vec: &mut Vec<T>, it: impl IntoIterator<Item = T>) -> Self {
let start = vec.len();
vec.extend(it);
let end = vec.len();
Self {
start: start as u32,
length: (end - start) as u16,
_marker: Default::default(),
}
}
}
pub fn serialize_slice_of_slices<'a, T, U>(
slice_of_slices: &[U],
writer: &mut impl Write,
written: usize,
) -> io::Result<usize>
where
T: 'a + Copy,
U: 'a + Borrow<[T]> + Sized,
{
let mut item_buf: Vec<T> = Vec::new();
let mut serialized_slices: Vec<VecSlice<T>> = Vec::new();
for slice in slice_of_slices {
let slice = VecSlice::extend_new(&mut item_buf, slice.borrow().iter().copied());
serialized_slices.push(slice);
}
let written = serialize_slice(&serialized_slices, writer, written)?;
serialize_slice(&item_buf, writer, written)
}
pub fn deserialize_slice_of_slices<T, Container>(
bytes: &[u8],
length: usize,
offset: usize,
) -> (Vec<Container>, usize)
where
T: Copy,
Container: From<Vec<T>>,
{
let (serialized_slices, offset) = deserialize_slice::<VecSlice<T>>(bytes, length, offset);
let (vars_slice, offset) = {
let total_items = serialized_slices.iter().map(|s| s.len()).sum();
deserialize_slice::<T>(bytes, total_items, offset)
};
let mut slice_of_slices = Vec::with_capacity(length);
for slice in serialized_slices {
let deserialized_slice = &vars_slice[slice.indices()];
slice_of_slices.push(deserialized_slice.to_vec().into())
}
(slice_of_slices, offset)
}
pub fn serialize_map<K: Clone, V: Clone, W: Write>(
map: &MutMap<K, V>,
ser_keys: fn(&[K], &mut W, usize) -> io::Result<usize>,
ser_values: fn(&[V], &mut W, usize) -> io::Result<usize>,
writer: &mut W,
written: usize,
) -> io::Result<usize> {
let keys = map.keys().cloned().collect::<Vec<_>>();
let values = map.values().cloned().collect::<Vec<_>>();
let written = ser_keys(keys.as_slice(), writer, written)?;
let written = ser_values(values.as_slice(), writer, written)?;
Ok(written)
}
#[allow(clippy::type_complexity)]
pub fn deserialize_map<K, V>(
bytes: &[u8],
deser_keys: fn(&[u8], usize, usize) -> (Vec<K>, usize),
deser_values: fn(&[u8], usize, usize) -> (Vec<V>, usize),
length: usize,
offset: usize,
) -> (MutMap<K, V>, usize)
where
K: Clone + std::hash::Hash + Eq,
V: Clone,
{
let (keys, offset) = deser_keys(bytes, length, offset);
let (values, offset) = deser_values(bytes, length, offset);
(
MutMap::from_iter((keys.iter().cloned()).zip(values.iter().cloned())),
offset,
)
}
pub fn serialize_vec_map<K, V, W: Write>(
map: &VecMap<K, V>,
ser_keys: fn(&[K], &mut W, usize) -> io::Result<usize>,
ser_values: fn(&[V], &mut W, usize) -> io::Result<usize>,
writer: &mut W,
written: usize,
) -> io::Result<usize> {
let (keys, values) = map.unzip_slices();
let written = ser_keys(keys, writer, written)?;
let written = ser_values(values, writer, written)?;
Ok(written)
}
#[allow(clippy::type_complexity)]
pub fn deserialize_vec_map<K, V>(
bytes: &[u8],
deser_keys: fn(&[u8], usize, usize) -> (Vec<K>, usize),
deser_values: fn(&[u8], usize, usize) -> (Vec<V>, usize),
length: usize,
offset: usize,
) -> (VecMap<K, V>, usize)
where
K: PartialEq,
{
let (keys, offset) = deser_keys(bytes, length, offset);
let (values, offset) = deser_values(bytes, length, offset);
(unsafe { VecMap::zip(keys, values) }, offset)
}
unsafe fn slice_as_bytes<T>(slice: &[T]) -> &[u8] {
let ptr = slice.as_ptr();
let byte_length = std::mem::size_of::<T>() * slice.len();
std::slice::from_raw_parts(ptr as *const u8, byte_length)
}
// TODO check on https://github.com/rust-lang/rust/issues/88581 some time in the future
pub const fn next_multiple_of(lhs: usize, rhs: usize) -> usize {
match lhs % rhs {
0 => lhs,
r => lhs + (rhs - r),
}
}
#[cfg(test)]
mod test {
use roc_collections::{MutMap, VecMap, VecSet};
use super::{
deserialize_map, deserialize_slice, deserialize_slice_of_slices, deserialize_vec,
deserialize_vec_map, serialize_map, serialize_slice, serialize_slice_of_slices,
serialize_vec_map,
};
#[test]
fn serde_empty_slice() {
let mut buf = vec![];
serialize_slice(&[] as &[u8], &mut buf, 0).unwrap();
assert!(buf.is_empty());
let (out, size) = deserialize_slice::<u8>(&buf, 0, 0);
assert!(out.is_empty());
assert_eq!(size, 0);
}
#[test]
fn serde_slice() {
let input: &[u64] = &[15u64, 23, 37, 89];
let mut buf = vec![];
serialize_slice(input, &mut buf, 0).unwrap();
assert!(!buf.is_empty());
let (out, size) = deserialize_slice::<u64>(&buf, 4, 0);
assert_eq!(out, input);
assert_eq!(size, 4 * 8);
}
#[test]
fn serde_vec() {
let input: &[u64] = &[15u64, 23, 37, 89];
let mut buf = vec![];
serialize_slice(input, &mut buf, 0).unwrap();
assert!(!buf.is_empty());
let (out, size) = deserialize_vec::<u64>(&buf, 4, 0);
assert_eq!(out, input);
assert_eq!(size, buf.len());
}
#[test]
fn serde_empty_slice_of_slices() {
let input: &[&[u64]] = &[];
let mut buf = vec![];
serialize_slice_of_slices(input, &mut buf, 0).unwrap();
assert!(buf.is_empty());
let (out, size) = deserialize_slice_of_slices::<u64, Vec<_>>(&buf, 0, 0);
assert!(out.is_empty());
assert_eq!(size, 0);
}
#[test]
fn serde_slice_of_slices() {
let input: &[&[u64]] = &[&[15, 23, 47], &[61, 72], &[85, 91]];
let mut buf = vec![];
serialize_slice_of_slices(input, &mut buf, 0).unwrap();
assert!(!buf.is_empty());
let (out, size) = deserialize_slice_of_slices::<u64, Vec<_>>(&buf, 3, 0);
assert_eq!(out, input);
assert_eq!(size, buf.len());
}
#[test]
fn serde_slice_of_slices_into_vec_set() {
let input: &[&[u64]] = &[&[15, 23, 47], &[61, 72], &[85, 91]];
let mut buf = vec![];
serialize_slice_of_slices(input, &mut buf, 0).unwrap();
assert!(!buf.is_empty());
let (out, size) = deserialize_slice_of_slices::<u64, VecSet<_>>(&buf, 3, 0);
assert_eq!(size, buf.len());
let mut out = out.into_iter();
assert_eq!(out.next().unwrap().into_vec(), &[15, 23, 47]);
assert_eq!(out.next().unwrap().into_vec(), &[61, 72]);
assert_eq!(out.next().unwrap().into_vec(), &[85, 91]);
assert!(out.next().is_none());
}
#[test]
fn serde_empty_map() {
let input: MutMap<u64, u64> = Default::default();
let mut buf = vec![];
serialize_map(&input, serialize_slice, serialize_slice, &mut buf, 0).unwrap();
assert!(buf.is_empty());
let (out, size) = deserialize_map::<u64, u64>(&buf, deserialize_vec, deserialize_vec, 0, 0);
assert!(out.is_empty());
assert_eq!(size, 0);
}
#[test]
fn serde_map() {
let mut input: MutMap<u64, Vec<u64>> = Default::default();
input.insert(51, vec![15, 23, 37]);
input.insert(39, vec![17, 91, 43]);
input.insert(82, vec![90, 35, 76]);
let mut buf = vec![];
serialize_map(
&input,
serialize_slice,
serialize_slice_of_slices,
&mut buf,
0,
)
.unwrap();
assert!(!buf.is_empty());
let (out, size) = deserialize_map::<u64, Vec<u64>>(
&buf,
deserialize_vec,
deserialize_slice_of_slices,
3,
0,
);
assert_eq!(out, input);
assert_eq!(size, buf.len());
}
#[test]
fn serde_empty_vec_map() {
let input: VecMap<u64, u64> = Default::default();
let mut buf = vec![];
serialize_vec_map(&input, serialize_slice, serialize_slice, &mut buf, 0).unwrap();
assert!(buf.is_empty());
let (out, size) =
deserialize_vec_map::<u64, u64>(&buf, deserialize_vec, deserialize_vec, 0, 0);
assert!(out.is_empty());
assert_eq!(size, 0);
}
#[test]
fn serde_vec_map() {
let mut input: VecMap<u64, Vec<u64>> = Default::default();
input.insert(51, vec![15, 23, 37]);
input.insert(39, vec![17, 91, 43]);
input.insert(82, vec![90, 35, 76]);
let mut buf = vec![];
serialize_vec_map(
&input,
serialize_slice,
serialize_slice_of_slices,
&mut buf,
0,
)
.unwrap();
assert!(!buf.is_empty());
let (out, size) = deserialize_vec_map::<u64, Vec<u64>>(
&buf,
deserialize_vec,
deserialize_slice_of_slices,
3,
0,
);
assert_eq!(out.unzip_slices(), input.unzip_slices());
assert_eq!(size, buf.len());
}
}

View file

@ -0,0 +1 @@
pub mod bytes;

View file

@ -5,7 +5,8 @@ use roc_error_macros::{internal_error, todo_abilities};
use roc_module::symbol::Symbol;
use roc_region::all::{Loc, Region};
use roc_solve_problem::{
NotDerivableContext, NotDerivableDecode, TypeError, UnderivableReason, Unfulfilled,
NotDerivableContext, NotDerivableDecode, NotDerivableEq, TypeError, UnderivableReason,
Unfulfilled,
};
use roc_types::num::NumericRange;
use roc_types::subs::{
@ -49,7 +50,14 @@ pub struct PendingDerivesTable(
);
impl PendingDerivesTable {
pub fn new(subs: &mut Subs, aliases: &mut Aliases, pending_derives: PendingDerives) -> Self {
pub fn new(
subs: &mut Subs,
aliases: &mut Aliases,
pending_derives: PendingDerives,
problems: &mut Vec<TypeError>,
abilities_store: &mut AbilitiesStore,
obligation_cache: &mut ObligationCache,
) -> Self {
let mut table = VecMap::with_capacity(pending_derives.len());
for (opaque, (typ, derives)) in pending_derives.into_iter() {
@ -65,8 +73,16 @@ impl PendingDerivesTable {
let derive_key = RequestedDeriveKey { opaque, ability };
// Neither rank nor pools should matter here.
let opaque_var =
type_to_var(subs, Rank::toplevel(), &mut Pools::default(), aliases, &typ);
let opaque_var = type_to_var(
subs,
Rank::toplevel(),
problems,
abilities_store,
obligation_cache,
&mut Pools::default(),
aliases,
&typ,
);
let real_var = match subs.get_content_without_compacting(opaque_var) {
Content::Alias(_, _, real_var, AliasKind::Opaque) => real_var,
_ => internal_error!("Non-opaque in derives table"),
@ -276,6 +292,8 @@ impl ObligationCache {
Some(DeriveHash::is_derivable(self, abilities_store, subs, var))
}
Symbol::BOOL_EQ => Some(DeriveEq::is_derivable(self, abilities_store, subs, var)),
_ => None,
};
@ -420,7 +438,7 @@ impl ObligationCache {
#[inline(always)]
#[rustfmt::skip]
fn is_builtin_number_alias(symbol: Symbol) -> bool {
fn is_builtin_int_alias(symbol: Symbol) -> bool {
matches!(symbol,
Symbol::NUM_U8 | Symbol::NUM_UNSIGNED8
| Symbol::NUM_U16 | Symbol::NUM_UNSIGNED16
@ -433,12 +451,32 @@ fn is_builtin_number_alias(symbol: Symbol) -> bool {
| Symbol::NUM_I64 | Symbol::NUM_SIGNED64
| Symbol::NUM_I128 | Symbol::NUM_SIGNED128
| Symbol::NUM_NAT | Symbol::NUM_NATURAL
)
}
#[inline(always)]
#[rustfmt::skip]
fn is_builtin_float_alias(symbol: Symbol) -> bool {
matches!(symbol,
| Symbol::NUM_F32 | Symbol::NUM_BINARY32
| Symbol::NUM_F64 | Symbol::NUM_BINARY64
)
}
#[inline(always)]
#[rustfmt::skip]
fn is_builtin_dec_alias(symbol: Symbol) -> bool {
matches!(symbol,
| Symbol::NUM_DEC | Symbol::NUM_DECIMAL,
)
}
#[inline(always)]
#[rustfmt::skip]
fn is_builtin_number_alias(symbol: Symbol) -> bool {
is_builtin_int_alias(symbol) || is_builtin_float_alias(symbol) || is_builtin_dec_alias(symbol)
}
struct NotDerivable {
var: Variable,
context: NotDerivableContext,
@ -986,6 +1024,106 @@ impl DerivableVisitor for DeriveHash {
}
}
struct DeriveEq;
impl DerivableVisitor for DeriveEq {
const ABILITY: Symbol = Symbol::BOOL_EQ;
#[inline(always)]
fn is_derivable_builtin_opaque(symbol: Symbol) -> bool {
is_builtin_int_alias(symbol) || is_builtin_dec_alias(symbol)
}
#[inline(always)]
fn visit_recursion(_var: Variable) -> Result<Descend, NotDerivable> {
Ok(Descend(true))
}
#[inline(always)]
fn visit_apply(var: Variable, symbol: Symbol) -> Result<Descend, NotDerivable> {
if matches!(
symbol,
Symbol::LIST_LIST
| Symbol::SET_SET
| Symbol::DICT_DICT
| Symbol::STR_STR
| Symbol::BOX_BOX_TYPE,
) {
Ok(Descend(true))
} else {
Err(NotDerivable {
var,
context: NotDerivableContext::NoContext,
})
}
}
#[inline(always)]
fn visit_record(
subs: &Subs,
var: Variable,
fields: RecordFields,
) -> Result<Descend, NotDerivable> {
for (field_name, _, field) in fields.iter_all() {
if subs[field].is_optional() {
return Err(NotDerivable {
var,
context: NotDerivableContext::Decode(NotDerivableDecode::OptionalRecordField(
subs[field_name].clone(),
)),
});
}
}
Ok(Descend(true))
}
#[inline(always)]
fn visit_tag_union(_var: Variable) -> Result<Descend, NotDerivable> {
Ok(Descend(true))
}
#[inline(always)]
fn visit_recursive_tag_union(_var: Variable) -> Result<Descend, NotDerivable> {
Ok(Descend(true))
}
#[inline(always)]
fn visit_function_or_tag_union(_var: Variable) -> Result<Descend, NotDerivable> {
Ok(Descend(true))
}
#[inline(always)]
fn visit_empty_record(_var: Variable) -> Result<(), NotDerivable> {
Ok(())
}
#[inline(always)]
fn visit_empty_tag_union(_var: Variable) -> Result<(), NotDerivable> {
Ok(())
}
#[inline(always)]
fn visit_alias(var: Variable, symbol: Symbol) -> Result<Descend, NotDerivable> {
if is_builtin_float_alias(symbol) {
Err(NotDerivable {
var,
context: NotDerivableContext::Eq(NotDerivableEq::FloatingPoint),
})
} else if is_builtin_number_alias(symbol) {
Ok(Descend(false))
} else {
Ok(Descend(true))
}
}
#[inline(always)]
fn visit_ranged_number(_var: Variable, _range: NumericRange) -> Result<(), NotDerivable> {
// Ranged numbers are allowed, because they are always possibly ints - floats can not have
// `isEq` derived, but if something were to be a float, we'd see it exactly as a float.
Ok(())
}
}
/// Determines what type implements an ability member of a specialized signature, given the
/// [MustImplementAbility] constraints of the signature.
pub fn type_implementing_specialization(

View file

@ -1,3 +1,5 @@
#![allow(clippy::too_many_arguments)]
use crate::ability::{
resolve_ability_specialization, type_implementing_specialization, AbilityImplError,
CheckedDerives, ObligationCache, PendingDerivesTable, Resolved,
@ -274,6 +276,9 @@ impl Aliases {
subs: &mut Subs,
rank: Rank,
pools: &mut Pools,
problems: &mut Vec<TypeError>,
abilities_store: &AbilitiesStore,
obligation_cache: &mut ObligationCache,
arena: &bumpalo::Bump,
symbol: Symbol,
alias_variables: AliasVariables,
@ -375,7 +380,18 @@ impl Aliases {
if !can_reuse_old_definition {
let mut typ = typ.clone();
typ.substitute_variables(&substitutions);
let alias_variable = type_to_variable(subs, rank, pools, arena, self, &typ, false);
let alias_variable = type_to_variable(
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
arena,
self,
&typ,
false,
);
(alias_variable, kind)
} else {
if !substitutions.is_empty() {
@ -389,7 +405,18 @@ impl Aliases {
// assumption: an alias does not (transitively) syntactically contain itself
// (if it did it would have to be a recursive tag union, which we should have fixed up
// during canonicalization)
let alias_variable = type_to_variable(subs, rank, pools, arena, self, &t, false);
let alias_variable = type_to_variable(
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
arena,
self,
&t,
false,
);
{
match self.aliases.iter_mut().find(|(s, _, _, _)| *s == symbol) {
@ -562,7 +589,14 @@ fn run_in_place(
let mut obligation_cache = ObligationCache::default();
let mut awaiting_specializations = AwaitingSpecializations::default();
let pending_derives = PendingDerivesTable::new(subs, aliases, pending_derives);
let pending_derives = PendingDerivesTable::new(
subs,
aliases,
pending_derives,
problems,
abilities_store,
&mut obligation_cache,
);
let CheckedDerives {
legal_derives: _,
problems: derives_problems,
@ -687,6 +721,9 @@ fn solve(
constraints,
rank,
pools,
problems,
abilities_store,
obligation_cache,
aliases,
subs,
let_con.def_types,
@ -747,6 +784,9 @@ fn solve(
constraints,
next_rank,
pools,
problems,
abilities_store,
obligation_cache,
aliases,
subs,
let_con.def_types,
@ -858,11 +898,29 @@ fn solve(
Eq(roc_can::constraint::Eq(type_index, expectation_index, category_index, region)) => {
let category = &constraints.categories[category_index.index()];
let actual =
either_type_index_to_var(constraints, subs, rank, pools, aliases, *type_index);
let actual = either_type_index_to_var(
constraints,
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
aliases,
*type_index,
);
let expectation = &constraints.expectations[expectation_index.index()];
let expected = type_to_var(subs, rank, pools, aliases, expectation.get_type_ref());
let expected = type_to_var(
subs,
rank,
problems,
abilities_store,
obligation_cache,
pools,
aliases,
expectation.get_type_ref(),
);
match unify(&mut UEnv::new(subs), actual, expected, Mode::EQ) {
Success {
@ -927,6 +985,9 @@ fn solve(
subs,
rank,
pools,
&mut vec![], // don't report any extra errors
abilities_store,
obligation_cache,
aliases,
*source_index,
);
@ -962,8 +1023,16 @@ fn solve(
let actual = deep_copy_var_in(subs, rank, pools, var, arena);
let expectation = &constraints.expectations[expectation_index.index()];
let expected =
type_to_var(subs, rank, pools, aliases, expectation.get_type_ref());
let expected = type_to_var(
subs,
rank,
problems,
abilities_store,
obligation_cache,
pools,
aliases,
expectation.get_type_ref(),
);
match unify(&mut UEnv::new(subs), actual, expected, Mode::EQ) {
Success {
@ -1048,11 +1117,29 @@ fn solve(
| PatternPresence(type_index, expectation_index, category_index, region) => {
let category = &constraints.pattern_categories[category_index.index()];
let actual =
either_type_index_to_var(constraints, subs, rank, pools, aliases, *type_index);
let actual = either_type_index_to_var(
constraints,
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
aliases,
*type_index,
);
let expectation = &constraints.pattern_expectations[expectation_index.index()];
let expected = type_to_var(subs, rank, pools, aliases, expectation.get_type_ref());
let expected = type_to_var(
subs,
rank,
problems,
abilities_store,
obligation_cache,
pools,
aliases,
expectation.get_type_ref(),
);
let mode = match constraint {
PatternPresence(..) => Mode::PRESENT,
@ -1209,8 +1296,17 @@ fn solve(
}
}
IsOpenType(type_index) => {
let actual =
either_type_index_to_var(constraints, subs, rank, pools, aliases, *type_index);
let actual = either_type_index_to_var(
constraints,
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
aliases,
*type_index,
);
open_tag_union(subs, actual);
@ -1231,12 +1327,30 @@ fn solve(
let tys = &constraints.types[types.indices()];
let pattern_category = &constraints.pattern_categories[pattern_category.index()];
let actual = type_to_var(subs, rank, pools, aliases, typ);
let actual = type_to_var(
subs,
rank,
problems,
abilities_store,
obligation_cache,
pools,
aliases,
typ,
);
let tag_ty = Type::TagUnion(
vec![(tag_name.clone(), tys.to_vec())],
TypeExtension::Closed,
);
let includes = type_to_var(subs, rank, pools, aliases, &tag_ty);
let includes = type_to_var(
subs,
rank,
problems,
abilities_store,
obligation_cache,
pools,
aliases,
&tag_ty,
);
match unify(&mut UEnv::new(subs), actual, includes, Mode::PRESENT) {
Success {
@ -1337,10 +1451,28 @@ fn solve(
}
};
let real_var =
either_type_index_to_var(constraints, subs, rank, pools, aliases, real_var);
let real_var = either_type_index_to_var(
constraints,
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
aliases,
real_var,
);
let branches_var = type_to_var(subs, rank, pools, aliases, expected_type);
let branches_var = type_to_var(
subs,
rank,
problems,
abilities_store,
obligation_cache,
pools,
aliases,
expected_type,
);
let real_content = subs.get_content_without_compacting(real_var);
let branches_content = subs.get_content_without_compacting(branches_var);
@ -1889,6 +2021,9 @@ impl LocalDefVarsVec<(Symbol, Loc<Variable>)> {
constraints: &Constraints,
rank: Rank,
pools: &mut Pools,
problems: &mut Vec<TypeError>,
abilities_store: &mut AbilitiesStore,
obligation_cache: &mut ObligationCache,
aliases: &mut Aliases,
subs: &mut Subs,
def_types_slice: roc_can::constraint::DefTypes,
@ -1899,7 +2034,16 @@ impl LocalDefVarsVec<(Symbol, Loc<Variable>)> {
let mut local_def_vars = Self::with_length(types_slice.len());
for (&(symbol, region), typ) in (loc_symbols_slice.iter()).zip(types_slice) {
let var = type_to_var(subs, rank, pools, aliases, typ);
let var = type_to_var(
subs,
rank,
problems,
abilities_store,
obligation_cache,
pools,
aliases,
typ,
);
local_def_vars.push((symbol, Loc { value: var, region }));
}
@ -1930,6 +2074,9 @@ fn either_type_index_to_var(
subs: &mut Subs,
rank: Rank,
pools: &mut Pools,
problems: &mut Vec<TypeError>,
abilities_store: &mut AbilitiesStore,
obligation_cache: &mut ObligationCache,
aliases: &mut Aliases,
either_type_index: roc_collections::soa::EitherIndex<Type, Variable>,
) -> Variable {
@ -1937,7 +2084,16 @@ fn either_type_index_to_var(
Ok(type_index) => {
let typ = &constraints.types[type_index.index()];
type_to_var(subs, rank, pools, aliases, typ)
type_to_var(
subs,
rank,
problems,
abilities_store,
obligation_cache,
pools,
aliases,
typ,
)
}
Err(var_index) => {
// we cheat, and store the variable directly in the index
@ -1949,6 +2105,9 @@ fn either_type_index_to_var(
pub(crate) fn type_to_var(
subs: &mut Subs,
rank: Rank,
problems: &mut Vec<TypeError>,
abilities_store: &mut AbilitiesStore,
obligation_cache: &mut ObligationCache,
pools: &mut Pools,
aliases: &mut Aliases,
typ: &Type,
@ -1958,7 +2117,18 @@ pub(crate) fn type_to_var(
} else {
let mut arena = take_scratchpad();
let var = type_to_variable(subs, rank, pools, &arena, aliases, typ, false);
let var = type_to_variable(
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
&arena,
aliases,
typ,
false,
);
arena.reset();
put_scratchpad(arena);
@ -2109,6 +2279,9 @@ fn type_to_variable<'a>(
subs: &mut Subs,
rank: Rank,
pools: &mut Pools,
problems: &mut Vec<TypeError>,
abilities_store: &AbilitiesStore,
obligation_cache: &mut ObligationCache,
arena: &'a bumpalo::Bump,
aliases: &mut Aliases,
typ: &Type,
@ -2118,6 +2291,7 @@ fn type_to_variable<'a>(
use bumpalo::collections::Vec;
let mut stack = Vec::with_capacity_in(8, arena);
let mut bind_to_ability = Vec::new_in(arena);
macro_rules! helper {
($typ:expr, $ambient_function_policy:expr) => {{
@ -2165,7 +2339,7 @@ fn type_to_variable<'a>(
Apply(symbol, arguments, _) => {
let new_arguments = VariableSubsSlice::reserve_into_subs(subs, arguments.len());
for (target_index, var_index) in (new_arguments.indices()).zip(arguments) {
let var = helper!(var_index);
let var = helper!(&var_index.value);
subs.variables[target_index] = var;
}
@ -2356,8 +2530,11 @@ fn type_to_variable<'a>(
let new_variables = VariableSubsSlice::reserve_into_subs(subs, length);
for (target_index, arg_type) in (new_variables.indices()).zip(type_arguments) {
let copy_var = helper!(arg_type);
let copy_var = helper!(&arg_type.value.typ);
subs.variables[target_index] = copy_var;
if let Some(ability) = arg_type.value.opt_ability {
bind_to_ability.push((Loc::at(arg_type.region, copy_var), ability));
}
}
let it = (new_variables.indices().skip(type_arguments.len()))
@ -2365,8 +2542,18 @@ fn type_to_variable<'a>(
for (target_index, ls) in it {
// We MUST do this now, otherwise when linking the ambient function during
// instantiation of the real var, there will be nothing to link against.
let copy_var =
type_to_variable(subs, rank, pools, arena, aliases, &ls.0, true);
let copy_var = type_to_variable(
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
arena,
aliases,
&ls.0,
true,
);
subs.variables[target_index] = copy_var;
}
@ -2381,6 +2568,9 @@ fn type_to_variable<'a>(
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
arena,
*symbol,
alias_variables,
@ -2488,8 +2678,18 @@ fn type_to_variable<'a>(
for (target_index, ls) in it {
// We MUST do this now, otherwise when linking the ambient function during
// instantiation of the real var, there will be nothing to link against.
let copy_var =
type_to_variable(subs, rank, pools, arena, aliases, &ls.0, true);
let copy_var = type_to_variable(
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
arena,
aliases,
&ls.0,
true,
);
subs.variables[target_index] = copy_var;
}
@ -2501,8 +2701,18 @@ fn type_to_variable<'a>(
};
// cannot use helper! here because this variable may be involved in unification below
let alias_variable =
type_to_variable(subs, rank, pools, arena, aliases, alias_type, false);
let alias_variable = type_to_variable(
subs,
rank,
pools,
problems,
abilities_store,
obligation_cache,
arena,
aliases,
alias_type,
false,
);
// TODO(opaques): I think host-exposed aliases should always be structural
// (when does it make sense to give a host an opaque type?)
let content = Content::Alias(
@ -2533,6 +2743,67 @@ fn type_to_variable<'a>(
};
}
for (Loc { value: var, region }, ability) in bind_to_ability {
match *subs.get_content_unchecked(var) {
Content::RigidVar(a) => {
subs.set_content(var, Content::RigidAbleVar(a, ability));
}
Content::RigidAbleVar(_, ab) if ab == ability => {
// pass, already bound
}
_ => {
let flex_ability = subs.fresh(Descriptor {
content: Content::FlexAbleVar(None, ability),
rank,
mark: Mark::NONE,
copy: OptVariable::NONE,
});
let category = Category::OpaqueArg;
match unify(&mut UEnv::new(subs), var, flex_ability, Mode::EQ) {
Success {
vars: _,
must_implement_ability,
lambda_sets_to_specialize,
extra_metadata: _,
} => {
// No introduction needed
if !must_implement_ability.is_empty() {
let new_problems = obligation_cache.check_obligations(
subs,
abilities_store,
must_implement_ability,
AbilityImplError::BadExpr(region, category, flex_ability),
);
problems.extend(new_problems);
}
debug_assert!(lambda_sets_to_specialize
.drain()
.all(|(_, vals)| vals.is_empty()));
}
Failure(_vars, actual_type, expected_type, _bad_impls) => {
// No introduction needed
let problem = TypeError::BadExpr(
region,
category,
actual_type,
Expected::NoExpectation(expected_type),
);
problems.push(problem);
}
BadType(_vars, problem) => {
// No introduction needed
problems.push(TypeError::BadType(problem));
}
}
}
}
}
result
}

View file

@ -655,7 +655,7 @@ fn make_specialization_decision<P: Phase>(
})
}
}
Structure(_) | Alias(_, _, _, _) => {
Structure(_) | Alias(_, _, _, _) | RecursionVar { .. } => {
let builtin = match ability_member.try_into() {
Ok(builtin) => builtin,
Err(_) => return SpecializeDecision::Drop,
@ -691,7 +691,6 @@ fn make_specialization_decision<P: Phase>(
| RigidAbleVar(..)
| FlexVar(..)
| RigidVar(..)
| RecursionVar { .. }
| LambdaSet(..)
| RangedNumber(..) => {
internal_error!("unexpected")

View file

@ -378,6 +378,7 @@ mod solve_expr {
let known_specializations = abilities_store.iter_declared_implementations().filter_map(
|(impl_key, member_impl)| match member_impl {
MemberImpl::Impl(impl_symbol) => {
dbg!(impl_symbol);
let specialization = abilities_store.specialization_info(*impl_symbol).expect(
"declared implementations should be resolved conclusively after solving",
);
@ -3469,7 +3470,7 @@ mod solve_expr {
Dict.insert
"#
),
"Dict k v, k, v -> Dict k v",
"Dict k v, k, v -> Dict k v | k has Eq",
);
}
@ -3730,7 +3731,7 @@ mod solve_expr {
infer_eq_without_problem(
indoc!(
r#"
reconstructPath : Dict position position, position -> List position
reconstructPath : Dict position position, position -> List position | position has Eq
reconstructPath = \cameFrom, goal ->
when Dict.get cameFrom goal is
Err KeyNotFound ->
@ -3742,7 +3743,7 @@ mod solve_expr {
reconstructPath
"#
),
"Dict position position, position -> List position",
"Dict position position, position -> List position | position has Eq",
);
}
@ -3777,7 +3778,7 @@ mod solve_expr {
Model position : { openSet : Set position }
cheapestOpen : Model position -> Result position [KeyNotFound]*
cheapestOpen : Model position -> Result position [KeyNotFound]* | position has Eq
cheapestOpen = \model ->
folder = \resSmallestSoFar, position ->
@ -3792,14 +3793,14 @@ mod solve_expr {
Set.walk model.openSet (Ok { position: boom {}, cost: 0.0 }) folder
|> Result.map (\x -> x.position)
astar : Model position -> Result position [KeyNotFound]*
astar : Model position -> Result position [KeyNotFound]* | position has Eq
astar = \model -> cheapestOpen model
main =
astar
"#
),
"Model position -> Result position [KeyNotFound]*",
"Model position -> Result position [KeyNotFound]* | position has Eq",
);
}
@ -4441,7 +4442,7 @@ mod solve_expr {
Key k : Num k
removeHelpEQGT : Key k, RBTree (Key k) v -> RBTree (Key k) v
removeHelpEQGT : Key k, RBTree (Key k) v -> RBTree (Key k) v | k has Eq
removeHelpEQGT = \targetKey, dict ->
when dict is
Node color key value left right ->
@ -4555,7 +4556,7 @@ mod solve_expr {
_ ->
Empty
removeHelp : Key k, RBTree (Key k) v -> RBTree (Key k) v
removeHelp : Key k, RBTree (Key k) v -> RBTree (Key k) v | k has Eq
removeHelp = \targetKey, dict ->
when dict is
Empty ->
@ -4585,7 +4586,7 @@ mod solve_expr {
main : RBTree I64 I64
main =
removeHelp 1 Empty
removeHelp 1i64 Empty
"#
),
"RBTree I64 I64",
@ -4643,7 +4644,7 @@ mod solve_expr {
RBTree k v : [Node NodeColor k v (RBTree k v) (RBTree k v), Empty]
removeHelp : Num k, RBTree (Num k) v -> RBTree (Num k) v
removeHelp : Num k, RBTree (Num k) v -> RBTree (Num k) v | k has Eq
removeHelp = \targetKey, dict ->
when dict is
Empty ->
@ -4678,7 +4679,7 @@ mod solve_expr {
removeHelpPrepEQGT : Key k, RBTree (Key k) v, NodeColor, (Key k), v, RBTree (Key k) v, RBTree (Key k) v -> RBTree (Key k) v
removeHelpEQGT : Key k, RBTree (Key k) v -> RBTree (Key k) v
removeHelpEQGT : Key k, RBTree (Key k) v -> RBTree (Key k) v | k has Eq
removeHelpEQGT = \targetKey, dict ->
when dict is
Node color key value left right ->
@ -4701,7 +4702,7 @@ mod solve_expr {
main : RBTree I64 I64
main =
removeHelp 1 Empty
removeHelp 1i64 Empty
"#
),
"RBTree I64 I64",
@ -7972,4 +7973,40 @@ mod solve_expr {
"O",
);
}
#[test]
fn custom_implement_eq() {
infer_eq_without_problem(
indoc!(
r#"
app "test" provides [main] to "./platform"
Trivial := {} has [Eq {isEq}]
isEq = \@Trivial {}, @Trivial {} -> Bool.true
main = Bool.isEq (@Trivial {}) (@Trivial {})
"#
),
"Bool",
);
}
#[test]
fn expand_able_variables_in_type_alias() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
F a : a | a has Hash
main : F a -> F a
#^^^^{-1}
"#
),
@"main : a -[[main(0)]]-> a | a has Hash"
print_only_under_alias: true
);
}
}

View file

@ -67,9 +67,15 @@ pub enum NotDerivableContext {
UnboundVar,
Opaque(Symbol),
Decode(NotDerivableDecode),
Eq(NotDerivableEq),
}
#[derive(PartialEq, Debug, Clone)]
pub enum NotDerivableDecode {
OptionalRecordField(Lowercase),
}
#[derive(PartialEq, Debug, Clone)]
pub enum NotDerivableEq {
FloatingPoint,
}

View file

@ -294,7 +294,8 @@ fn tag_one_label_zero_args() {
\#Derived.bytes, #Derived.fmt ->
Encode.appendWith
#Derived.bytes
(when #Derived.tag is A -> Encode.tag "A" [])
(when #Derived.tag is
A -> Encode.tag "A" [])
#Derived.fmt
"###
)

View file

@ -0,0 +1,58 @@
#![cfg(test)]
// Even with #[allow(non_snake_case)] on individual idents, rust-analyzer issues diagnostics.
// See https://github.com/rust-lang/rust-analyzer/issues/6541.
// For the `v!` macro we use uppercase variables when constructing tag unions.
#![allow(non_snake_case)]
use crate::{util::check_single_lset_immediate, v};
use roc_module::symbol::Symbol;
use roc_types::subs::Variable;
use roc_derive_key::DeriveBuiltin::IsEq;
#[test]
fn immediates() {
// Everything is an immediate for `Eq`.
check_single_lset_immediate(IsEq, v!(U8), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(U16), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(U32), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(U64), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(U128), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(I8), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(I16), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(I32), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(I64), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(I128), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(STR), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(
IsEq,
v!(Symbol::LIST_LIST v!(U8)),
Symbol::BOOL_STRUCTURAL_EQ,
);
check_single_lset_immediate(
IsEq,
v!(Symbol::LIST_LIST v!(STR)),
Symbol::BOOL_STRUCTURAL_EQ,
);
check_single_lset_immediate(IsEq, v!({ a: v!(U8), }), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(IsEq, v!(EMPTY_RECORD), Symbol::BOOL_STRUCTURAL_EQ);
check_single_lset_immediate(
IsEq,
v!([ A v!(U8) v!(STR), B v!(STR) ]),
Symbol::BOOL_STRUCTURAL_EQ,
);
check_single_lset_immediate(
IsEq,
v!([ A v!(U8) v!(STR), B v!(STR) ]),
Symbol::BOOL_STRUCTURAL_EQ,
);
check_single_lset_immediate(
IsEq,
v!([ Nil, Cons v!(^lst)] as lst),
Symbol::BOOL_STRUCTURAL_EQ,
);
// NOTE: despite this reaching an immediate, `F64`s will never actually be allowed to be
// compared, because obligation checking will rule them out from `isEq`!
check_single_lset_immediate(IsEq, v!(F64), Symbol::BOOL_STRUCTURAL_EQ);
}

View file

@ -27,6 +27,20 @@ test_key_eq! {
v!({ c: v!(U8), a: v!(U8), b: v!(U8), })
explicit_empty_record_and_implicit_empty_record:
v!(EMPTY_RECORD), v!({})
same_tag_union:
v!([ A v!(U8) v!(STR), B v!(STR) ]), v!([ A v!(U8) v!(STR), B v!(STR) ])
same_tag_union_tags_diff_types:
v!([ A v!(U8) v!(U8), B v!(U8) ]), v!([ A v!(STR) v!(STR), B v!(STR) ])
same_tag_union_tags_any_order:
v!([ A v!(U8) v!(U8), B v!(U8), C ]), v!([ C, B v!(STR), A v!(STR) v!(STR) ])
explicit_empty_tag_union_and_implicit_empty_tag_union:
v!(EMPTY_TAG_UNION), v!([])
same_recursive_tag_union:
v!([ Nil, Cons v!(^lst)] as lst), v!([ Nil, Cons v!(^lst)] as lst)
same_tag_union_and_recursive_tag_union_fields:
v!([ Nil, Cons v!(STR)]), v!([ Nil, Cons v!(^lst)] as lst)
}
test_key_neq! {
@ -36,6 +50,13 @@ test_key_neq! {
v!({ a: v!(U8), }), v!({ b: v!(U8), })
record_empty_vs_nonempty:
v!(EMPTY_RECORD), v!({ a: v!(U8), })
different_tag_union_tags:
v!([ A v!(U8) ]), v!([ B v!(U8) ])
tag_union_empty_vs_nonempty:
v!(EMPTY_TAG_UNION), v!([ B v!(U8) ])
different_recursive_tag_union_tags:
v!([ Nil, Cons v!(^lst) ] as lst), v!([ Nil, Next v!(^lst) ] as lst)
}
#[test]
@ -87,6 +108,36 @@ fn derivable_record_with_record_ext() {
);
}
#[test]
fn derivable_tag_ext_flex_var() {
check_derivable(
Hash,
v!([ A v!(STR) ]* ),
DeriveKey::Hash(FlatHashKey::TagUnion(vec![("A".into(), 1)])),
);
}
#[test]
fn derivable_tag_ext_flex_able_var() {
check_derivable(
Hash,
v!([ A v!(STR) ]a has Symbol::ENCODE_TO_ENCODER),
DeriveKey::Hash(FlatHashKey::TagUnion(vec![("A".into(), 1)])),
);
}
#[test]
fn derivable_tag_with_tag_ext() {
check_derivable(
Hash,
v!([ B v!(STR) v!(U8) ][ A v!(STR) ]),
DeriveKey::Hash(FlatHashKey::TagUnion(vec![
("A".into(), 1),
("B".into(), 2),
])),
);
}
#[test]
fn empty_record() {
derive_test(Hash, v!(EMPTY_RECORD), |golden| {
@ -149,3 +200,100 @@ fn two_field_record() {
)
})
}
#[test]
fn tag_one_label_no_payloads() {
derive_test(Hash, v!([A]), |golden| {
assert_snapshot!(golden, @r###"
# derived for [A]
# hasher, [A] -[[hash_[A 0](0)]]-> hasher | hasher has Hasher
# hasher, [A] -[[hash_[A 0](0)]]-> hasher | hasher has Hasher
# Specialization lambda sets:
# @<1>: [[hash_[A 0](0)]]
#Derived.hash_[A 0] = \#Derived.hasher, A -> #Derived.hasher
"###
)
})
}
#[test]
fn tag_one_label_newtype() {
derive_test(Hash, v!([A v!(U8) v!(STR)]), |golden| {
assert_snapshot!(golden, @r###"
# derived for [A U8 Str]
# hasher, [A a a1] -[[hash_[A 2](0)]]-> hasher | a has Hash, a1 has Hash, hasher has Hasher
# hasher, [A a a1] -[[hash_[A 2](0)]]-> hasher | a has Hash, a1 has Hash, hasher has Hasher
# Specialization lambda sets:
# @<1>: [[hash_[A 2](0)]]
#Derived.hash_[A 2] =
\#Derived.hasher, A #Derived.2 #Derived.3 ->
Hash.hash (Hash.hash #Derived.hasher #Derived.2) #Derived.3
"###
)
})
}
#[test]
fn tag_two_labels() {
derive_test(Hash, v!([A v!(U8) v!(STR) v!(U16), B v!(STR)]), |golden| {
assert_snapshot!(golden, @r###"
# derived for [A U8 Str U16, B Str]
# a, [A a1 a2 a3, B a3] -[[hash_[A 3,B 1](0)]]-> a | a has Hasher, a1 has Hash, a2 has Hash, a3 has Hash
# a, [A a1 a2 a3, B a3] -[[hash_[A 3,B 1](0)]]-> a | a has Hasher, a1 has Hash, a2 has Hash, a3 has Hash
# Specialization lambda sets:
# @<1>: [[hash_[A 3,B 1](0)]]
#Derived.hash_[A 3,B 1] =
\#Derived.hasher, #Derived.union ->
when #Derived.union is
A #Derived.3 #Derived.4 #Derived.5 ->
Hash.hash
(Hash.hash
(Hash.hash (Hash.addU8 #Derived.hasher 0) #Derived.3)
#Derived.4)
#Derived.5
B #Derived.6 -> Hash.hash (Hash.addU8 #Derived.hasher 1) #Derived.6
"###
)
})
}
#[test]
fn tag_two_labels_no_payloads() {
derive_test(Hash, v!([A, B]), |golden| {
assert_snapshot!(golden, @r###"
# derived for [A, B]
# a, [A, B] -[[hash_[A 0,B 0](0)]]-> a | a has Hasher
# a, [A, B] -[[hash_[A 0,B 0](0)]]-> a | a has Hasher
# Specialization lambda sets:
# @<1>: [[hash_[A 0,B 0](0)]]
#Derived.hash_[A 0,B 0] =
\#Derived.hasher, #Derived.union ->
when #Derived.union is
A -> Hash.addU8 #Derived.hasher 0
B -> Hash.addU8 #Derived.hasher 1
"###
)
})
}
#[test]
fn recursive_tag_union() {
derive_test(Hash, v!([Nil, Cons v!(U8) v!(^lst) ] as lst), |golden| {
assert_snapshot!(golden, @r###"
# derived for [Cons U8 $rec, Nil] as $rec
# a, [Cons a1 a2, Nil] -[[hash_[Cons 2,Nil 0](0)]]-> a | a has Hasher, a1 has Hash, a2 has Hash
# a, [Cons a1 a2, Nil] -[[hash_[Cons 2,Nil 0](0)]]-> a | a has Hasher, a1 has Hash, a2 has Hash
# Specialization lambda sets:
# @<1>: [[hash_[Cons 2,Nil 0](0)]]
#Derived.hash_[Cons 2,Nil 0] =
\#Derived.hasher, #Derived.union ->
when #Derived.union is
Cons #Derived.3 #Derived.4 ->
Hash.hash
(Hash.hash (Hash.addU8 #Derived.hasher 0) #Derived.3)
#Derived.4
Nil -> Hash.addU8 #Derived.hasher 1
"###
)
})
}

View file

@ -6,6 +6,7 @@ use roc_can::expr::{ClosureData, OpaqueWrapFunctionData, WhenBranch};
use roc_can::pattern::{Pattern, RecordDestruct};
use roc_module::symbol::Interns;
use ven_pretty::{Arena, DocAllocator, DocBuilder};
pub struct Ctx<'a> {
@ -100,8 +101,12 @@ fn expr<'a>(c: &Ctx, p: EPrec, f: &'a Arena<'a>, e: &'a Expr) -> DocBuilder<'a,
.append(expr(c, Free, f, &loc_cond.value))
.append(f.text(" is"))
.append(
f.concat(branches.iter().map(|b| f.line().append(branch(c, f, b))))
.group(),
f.concat(
branches
.iter()
.map(|b| f.hardline().append(branch(c, f, b)))
)
.group(),
)
.nest(2)
.group()
@ -134,7 +139,10 @@ fn expr<'a>(c: &Ctx, p: EPrec, f: &'a Arena<'a>, e: &'a Expr) -> DocBuilder<'a,
)
.group(),
LetRec(_, _, _) => todo!(),
LetNonRec(_, _) => todo!(),
LetNonRec(loc_def, body) => def(c, f, loc_def)
.append(f.hardline())
.append(expr(c, Free, f, &body.value))
.group(),
Call(fun, args, _) => {
let (_, fun, _, _) = &**fun;
maybe_paren!(
@ -154,7 +162,24 @@ fn expr<'a>(c: &Ctx, p: EPrec, f: &'a Arena<'a>, e: &'a Expr) -> DocBuilder<'a,
.nest(2)
)
}
RunLowLevel { .. } => todo!(),
RunLowLevel { args, .. } => {
let op = "LowLevel";
maybe_paren!(
Free,
p,
f.reflow(op)
.append(
f.concat(
args.iter()
.map(|le| f.line().append(expr(c, AppArg, f, &le.1)))
)
.group()
)
.group()
.nest(2)
)
}
ForeignCall { .. } => todo!(),
Closure(ClosureData {
arguments,

View file

@ -2,6 +2,7 @@
mod decoding;
mod encoding;
mod eq;
mod hash;
mod pretty_print;

View file

@ -55,6 +55,11 @@ fn module_source_and_path(builtin: DeriveBuiltin) -> (ModuleId, &'static str, Pa
module_source(ModuleId::HASH),
builtins_path.join("Hash.roc"),
),
DeriveBuiltin::IsEq => (
ModuleId::BOOL,
module_source(ModuleId::BOOL),
builtins_path.join("Bool.roc"),
),
}
}

View file

@ -37,7 +37,7 @@ roc_error_macros = { path = "../../error_macros" }
roc_std = { path = "../../roc_std" }
roc_debug_flags = {path="../debug_flags"}
bumpalo = { version = "3.11.0", features = ["collections"] }
libc = "0.2.133"
libc = "0.2.135"
inkwell = { path = "../../vendor/inkwell" }
target-lexicon = "0.12.3"
libloading = "0.7.1"

View file

@ -1332,7 +1332,7 @@ mod hash {
}
mod derived {
use super::{assert_evals_to, build_test};
use super::{assert_evals_to, build_test, indoc, TEST_HASHER};
use roc_std::RocList;
#[test]
@ -1372,5 +1372,286 @@ mod hash {
RocList<u8>
)
}
#[test]
fn hash_singleton_union() {
assert_evals_to!(
&format!(
indoc!(
r#"
app "test" provides [main] to "./platform"
{}
a : [A]
a = A
main =
@THasher []
|> Hash.hash a
|> tRead
"#
),
TEST_HASHER,
),
RocList::from_slice(&[
// hash nothing because this is a newtype of a unit layout.
] as &[u8]),
RocList<u8>
)
}
#[test]
fn hash_bool_tag_union() {
assert_evals_to!(
&format!(
indoc!(
r#"
app "test" provides [main] to "./platform"
{}
a : [A, B]
a = A
b : [A, B]
b = B
main =
@THasher []
|> Hash.hash a
|> Hash.hash b
|> tRead
"#
),
TEST_HASHER,
),
RocList::from_slice(&[
0, // A
1, // B
]),
RocList<u8>
)
}
#[test]
fn hash_byte_tag_union() {
assert_evals_to!(
&format!(
indoc!(
r#"
app "test" provides [main] to "./platform"
{}
l : List [A, B, C, D, E, F, G, H]
l = [A, B, C, D, E, F, G, H]
main =
@THasher []
|> Hash.hash l
|> tRead
"#
),
TEST_HASHER,
),
RocList::from_slice(&[
0, // A
1, // B
2, // C
3, // D
4, // E
5, // F
6, // G
7, // H
]),
RocList<u8>
)
}
#[test]
fn hash_newtype_tag_union() {
assert_evals_to!(
&format!(
indoc!(
r#"
app "test" provides [main] to "./platform"
{}
a : [A U8 U8 U8]
a = A 15 23 47
main =
@THasher []
|> Hash.hash a
|> tRead
"#
),
TEST_HASHER,
),
RocList::from_slice(&[
// discriminant is skipped because it's a newtype
15, 23, 47
]),
RocList<u8>
)
}
#[test]
fn hash_newtype_by_void_tag_union() {
assert_evals_to!(
&format!(
indoc!(
r#"
app "test" provides [main] to "./platform"
{}
a : Result [A U8 U8 U8] []
a = Ok (A 15 23 47)
main =
@THasher []
|> Hash.hash a
|> tRead
"#
),
TEST_HASHER,
),
RocList::from_slice(&[
1, // Ok
// A is skipped because it is a newtype
15, 23, 47
]),
RocList<u8>
)
}
#[test]
fn hash_heterogenous_tags() {
assert_evals_to!(
&format!(
indoc!(
r#"
app "test" provides [main] to "./platform"
{}
a : [A U8 U8, B {{ a: U8 }}, C Str]
a = A 15 23
b : [A U8 U8, B {{ a: U8 }}, C Str]
b = B {{ a: 37 }}
c : [A U8 U8, B {{ a: U8 }}, C Str]
c = C "abc"
main =
@THasher []
|> Hash.hash a
|> Hash.hash b
|> Hash.hash c
|> tRead
"#
),
TEST_HASHER,
),
RocList::from_slice(&[
0, // dicsr A
15, 23, // payloads A
1, // discr B
37, // payloads B
2, // discr C
97, 98, 99 // payloads C
]),
RocList<u8>
)
}
#[test]
fn hash_recursive_tag_union() {
assert_evals_to!(
&format!(
indoc!(
r#"
app "test" provides [main] to "./platform"
{}
ConsList : [Cons U8 ConsList, Nil]
c : ConsList
c = Cons 1 (Cons 2 Nil)
main =
@THasher []
|> Hash.hash c
|> tRead
"#
),
TEST_HASHER,
),
RocList::from_slice(&[
0, 1, // Cons 1
0, 2, // Cons 2
1, // Nil
]),
RocList<u8>
)
}
}
}
#[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))]
mod eq {
#[cfg(feature = "gen-llvm")]
use crate::helpers::llvm::assert_evals_to;
#[cfg(feature = "gen-wasm")]
use crate::helpers::wasm::assert_evals_to;
use indoc::indoc;
use roc_std::RocStr;
#[test]
fn custom_eq_impl() {
assert_evals_to!(
indoc!(
r#"
app "test" provides [main] to "./platform"
LyingEq := U8 has [Eq {isEq}]
isEq = \@LyingEq m, @LyingEq n -> m != n
main =
a = @LyingEq 10
b = @LyingEq 5
c = @LyingEq 5
if Bool.isEq a b && !(Bool.isEq b c) then
"okay"
else
"fail"
"#
),
RocStr::from("okay"),
RocStr
)
}
#[test]
fn derive_structural_eq() {
assert_evals_to!(
indoc!(
r#"
app "test" provides [main] to "./platform"
main = Bool.isEq 10u8 10u8
"#
),
true,
bool
)
}
}

View file

@ -77,40 +77,6 @@ fn neq_u64() {
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn eq_f64() {
assert_evals_to!(
indoc!(
r#"
i : F64
i = 1
i == i
"#
),
true,
bool
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn neq_f64() {
assert_evals_to!(
indoc!(
r#"
i : F64
i = 1
i != i
"#
),
false,
bool
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn eq_bool_tag() {
@ -673,8 +639,8 @@ fn compare_nullable_recursive_union_same_content() {
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn boxed_eq_int() {
assert_evals_to!("Box.box 1 == Box.box 1", true, bool);
assert_evals_to!("Box.box 2 == Box.box 1", false, bool);
assert_evals_to!("Box.box 1i64 == Box.box 1", true, bool);
assert_evals_to!("Box.box 2i64 == Box.box 1", false, bool);
}
#[test]

View file

@ -917,7 +917,7 @@ fn list_walk_implements_position() {
r#"
Option a : [Some a, None]
find : List a, a -> Option Nat
find : List a, a -> Option Nat | a has Eq
find = \list, needle ->
findHelp list needle
|> .v
@ -1065,6 +1065,92 @@ fn list_keep_if_str_is_hello() {
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn list_count_if_empty_list() {
assert_evals_to!(
indoc!(
r#"
List.countIf [] \_ -> Bool.true
"#
),
0,
usize
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn list_count_if_always_true_for_non_empty_list() {
assert_evals_to!(
indoc!(
r#"
alwaysTrue : I64 -> Bool
alwaysTrue = \_ ->
Bool.true
oneThroughEight : List I64
oneThroughEight =
[1,2,3,4,5,6,7,8]
List.countIf oneThroughEight alwaysTrue
"#
),
8,
usize
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn list_count_if_always_false_for_non_empty_list() {
assert_evals_to!(
indoc!(
r#"
alwaysFalse : I64 -> Bool
alwaysFalse = \_ ->
Bool.false
List.countIf [1,2,3,4,5,6,7,8] alwaysFalse
"#
),
0,
usize
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn list_count_if_condition() {
assert_evals_to!(
indoc!(
r#"
intIsLessThanThree : I64 -> Bool
intIsLessThanThree = \i ->
i < 3
List.countIf [1,2,3,4,5,6,7,8] intIsLessThanThree
"#
),
2,
usize
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn list_count_if_str() {
assert_evals_to!(
indoc!(
r#"
List.countIf ["x", "y", "x"] (\x -> x == "x")
"#
),
2,
usize
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn list_map_on_empty_list_with_int_layout() {
@ -3413,7 +3499,7 @@ fn list_walk_backwards_implements_position() {
r#"
Option a : [Some a, None]
find : List a, a -> Option Nat
find : List a, a -> Option Nat | a has Eq
find = \list, needle ->
findHelp list needle
|> .v

View file

@ -875,7 +875,7 @@ fn gen_int_eq() {
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm", feature = "gen-dev"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn gen_int_neq() {
assert_evals_to!(
indoc!(
@ -948,7 +948,7 @@ fn gen_wrap_int_neq() {
assert_evals_to!(
indoc!(
r#"
wrappedNotEq : a, a -> Bool
wrappedNotEq : a, a -> Bool | a has Eq
wrappedNotEq = \num1, num2 ->
num1 != num2

View file

@ -2595,7 +2595,7 @@ fn pass_through_unresolved_type_variable() {
main : Str
main =
(accept Bool.isEq) "B"
(accept \x -> x) "B"
accept : * -> (b -> b)
@ -2878,10 +2878,10 @@ fn unresolved_tvar_when_capture_is_unused() {
main : I64
main =
r : Bool
r = Bool.false
r : U8
r = 1
p1 = (\_ -> r == (1 == 1))
p1 = (\_ -> r == 1)
oneOfResult = List.map [p1] (\p -> p Green)
when oneOfResult is

View file

@ -1876,9 +1876,9 @@ fn llvm_wasm_str_layout() {
|> Str.reserve 42
"#
),
[0, 5, 42],
[0, 5, 1],
[u32; 3],
|[_ptr, len, cap]: [u32; 3]| [0, len, cap]
|[_ptr, len, cap]: [u32; 3]| [0, len, if cap >= 42 { 1 } else { 0 }]
)
}
@ -1958,3 +1958,70 @@ fn with_capacity_concat() {
RocStr
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn str_with_prefix() {
assert_evals_to!(
indoc!(
r#"
Str.withPrefix "world!" "Hello "
"#
),
RocStr::from("Hello world!"),
RocStr
);
assert_evals_to!(
indoc!(
r#"
"two" |> Str.withPrefix "Forty "
"#
),
RocStr::from("Forty two"),
RocStr
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn destructure_pattern_assigned_from_thunk_opaque() {
assert_evals_to!(
indoc!(
r#"
app "test" provides [main] to "./platform"
MyCustomType := Str
myMsg = @MyCustomType "Hello"
main =
@MyCustomType msg = myMsg
msg
"#
),
RocStr::from("Hello"),
RocStr
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn destructure_pattern_assigned_from_thunk_tag() {
assert_evals_to!(
indoc!(
r#"
app "test" provides [main] to "./platform"
myMsg = A "hello " "world"
main =
A m1 m2 = myMsg
Str.concat m1 m2
"#
),
RocStr::from("hello world"),
RocStr
);
}

View file

@ -33,7 +33,7 @@ dependencies = [
[[package]]
name = "test_mono_macros"
version = "0.1.0"
version = "0.0.1"
dependencies = [
"proc-macro2",
"quote",

View file

@ -11,5 +11,6 @@ roc_region = { path = "../region" }
roc_module = { path = "../module" }
roc_error_macros = {path="../../error_macros"}
roc_debug_flags = {path="../debug_flags"}
roc_serialize = {path="../serialize"}
bumpalo = { version = "3.11.0", features = ["collections"] }
static_assertions = "1.1.0"

View file

@ -77,6 +77,7 @@ struct SubsHeader {
record_fields: u64,
variable_slices: u64,
unspecialized_lambda_sets: u64,
uls_of_var: u64,
exposed_vars_by_symbol: u64,
}
@ -95,6 +96,7 @@ impl SubsHeader {
record_fields: subs.record_fields.len() as u64,
variable_slices: subs.variable_slices.len() as u64,
unspecialized_lambda_sets: subs.unspecialized_lambda_sets.len() as u64,
uls_of_var: subs.uls_of_var.len() as u64,
exposed_vars_by_symbol: exposed_vars_by_symbol as u64,
}
}
@ -110,19 +112,11 @@ impl SubsHeader {
}
}
unsafe fn slice_as_bytes<T>(slice: &[T]) -> &[u8] {
let ptr = slice.as_ptr();
let byte_length = std::mem::size_of::<T>() * slice.len();
unsafe { std::slice::from_raw_parts(ptr as *const u8, byte_length) }
}
fn round_to_multiple_of(value: usize, base: usize) -> usize {
(value + (base - 1)) / base * base
}
#[derive(Clone, Copy)]
struct SerializedTagName(SubsSlice<u8>);
use roc_serialize::bytes;
impl Subs {
pub fn serialize(
&self,
@ -137,14 +131,15 @@ impl Subs {
written = self.utable.serialize(writer, written)?;
written = Self::serialize_slice(&self.variables, writer, written)?;
written = bytes::serialize_slice(&self.variables, writer, written)?;
written = Self::serialize_tag_names(&self.tag_names, writer, written)?;
written = Self::serialize_slice(&self.closure_names, writer, written)?;
written = bytes::serialize_slice(&self.closure_names, writer, written)?;
written = Self::serialize_field_names(&self.field_names, writer, written)?;
written = Self::serialize_slice(&self.record_fields, writer, written)?;
written = Self::serialize_slice(&self.variable_slices, writer, written)?;
written = Self::serialize_slice(&self.unspecialized_lambda_sets, writer, written)?;
written = Self::serialize_slice(exposed_vars_by_symbol, writer, written)?;
written = bytes::serialize_slice(&self.record_fields, writer, written)?;
written = bytes::serialize_slice(&self.variable_slices, writer, written)?;
written = bytes::serialize_slice(&self.unspecialized_lambda_sets, writer, written)?;
written = Self::serialize_uls_of_var(&self.uls_of_var, writer, written)?;
written = bytes::serialize_slice(exposed_vars_by_symbol, writer, written)?;
Ok(written)
}
@ -164,9 +159,9 @@ impl Subs {
slices.push(slice);
}
let written = Self::serialize_slice(&slices, writer, written)?;
let written = bytes::serialize_slice(&slices, writer, written)?;
Self::serialize_slice(&buf, writer, written)
bytes::serialize_slice(&buf, writer, written)
}
/// Global tag names can be heap-allocated
@ -185,30 +180,39 @@ impl Subs {
slices.push(serialized);
}
let written = Self::serialize_slice(&slices, writer, written)?;
let written = bytes::serialize_slice(&slices, writer, written)?;
Self::serialize_slice(&buf, writer, written)
bytes::serialize_slice(&buf, writer, written)
}
pub(crate) fn serialize_slice<T>(
slice: &[T],
fn serialize_uls_of_var(
uls_of_vars: &UlsOfVar,
writer: &mut impl std::io::Write,
written: usize,
) -> std::io::Result<usize> {
let alignment = std::mem::align_of::<T>();
let padding_bytes = round_to_multiple_of(written, alignment) - written;
for _ in 0..padding_bytes {
writer.write_all(&[0])?;
}
let bytes_slice = unsafe { slice_as_bytes(slice) };
writer.write_all(bytes_slice)?;
Ok(written + padding_bytes + bytes_slice.len())
bytes::serialize_vec_map(
&uls_of_vars.0,
bytes::serialize_slice,
bytes::serialize_slice_of_slices,
writer,
written,
)
}
pub fn deserialize(bytes: &[u8]) -> (Self, &[(Symbol, Variable)]) {
fn deserialize_uls_of_var(bytes: &[u8], length: usize, offset: usize) -> (UlsOfVar, usize) {
let (vec_map, offset) = bytes::deserialize_vec_map(
bytes,
bytes::deserialize_vec,
bytes::deserialize_slice_of_slices,
length,
offset,
);
(UlsOfVar(vec_map), offset)
}
#[allow(clippy::type_complexity)]
pub fn deserialize(bytes: &[u8]) -> ((Self, &[(Symbol, Variable)]), usize) {
let mut offset = 0;
let header_slice = &bytes[..std::mem::size_of::<SubsHeader>()];
offset += header_slice.len();
@ -216,37 +220,43 @@ impl Subs {
let (utable, offset) = UnificationTable::deserialize(bytes, header.utable as usize, offset);
let (variables, offset) = Self::deserialize_slice(bytes, header.variables as usize, offset);
let (variables, offset) =
bytes::deserialize_slice(bytes, header.variables as usize, offset);
let (tag_names, offset) =
Self::deserialize_tag_names(bytes, header.tag_names as usize, offset);
let (closure_names, offset) =
Self::deserialize_slice(bytes, header.closure_names as usize, offset);
bytes::deserialize_slice(bytes, header.closure_names as usize, offset);
let (field_names, offset) =
Self::deserialize_field_names(bytes, header.field_names as usize, offset);
let (record_fields, offset) =
Self::deserialize_slice(bytes, header.record_fields as usize, offset);
bytes::deserialize_slice(bytes, header.record_fields as usize, offset);
let (variable_slices, offset) =
Self::deserialize_slice(bytes, header.variable_slices as usize, offset);
bytes::deserialize_slice(bytes, header.variable_slices as usize, offset);
let (unspecialized_lambda_sets, offset) =
Self::deserialize_slice(bytes, header.unspecialized_lambda_sets as usize, offset);
let (exposed_vars_by_symbol, _) =
Self::deserialize_slice(bytes, header.exposed_vars_by_symbol as usize, offset);
bytes::deserialize_slice(bytes, header.unspecialized_lambda_sets as usize, offset);
let (uls_of_var, offset) =
Self::deserialize_uls_of_var(bytes, header.uls_of_var as usize, offset);
let (exposed_vars_by_symbol, offset) =
bytes::deserialize_slice(bytes, header.exposed_vars_by_symbol as usize, offset);
(
Self {
utable,
variables: variables.to_vec(),
tag_names: tag_names.to_vec(),
closure_names: closure_names.to_vec(),
field_names,
record_fields: record_fields.to_vec(),
variable_slices: variable_slices.to_vec(),
unspecialized_lambda_sets: unspecialized_lambda_sets.to_vec(),
tag_name_cache: Default::default(),
problems: Default::default(),
uls_of_var: Default::default(),
},
exposed_vars_by_symbol,
(
Self {
utable,
variables: variables.to_vec(),
tag_names: tag_names.to_vec(),
closure_names: closure_names.to_vec(),
field_names,
record_fields: record_fields.to_vec(),
variable_slices: variable_slices.to_vec(),
unspecialized_lambda_sets: unspecialized_lambda_sets.to_vec(),
tag_name_cache: Default::default(),
problems: Default::default(),
uls_of_var,
},
exposed_vars_by_symbol,
),
offset,
)
}
@ -255,7 +265,7 @@ impl Subs {
length: usize,
offset: usize,
) -> (Vec<Lowercase>, usize) {
let (slices, mut offset) = Self::deserialize_slice::<SubsSlice<u8>>(bytes, length, offset);
let (slices, mut offset) = bytes::deserialize_slice::<SubsSlice<u8>>(bytes, length, offset);
let string_slice = &bytes[offset..];
@ -273,7 +283,7 @@ impl Subs {
fn deserialize_tag_names(bytes: &[u8], length: usize, offset: usize) -> (Vec<TagName>, usize) {
let (slices, mut offset) =
Self::deserialize_slice::<SerializedTagName>(bytes, length, offset);
bytes::deserialize_slice::<SerializedTagName>(bytes, length, offset);
let string_slice = &bytes[offset..];
@ -290,24 +300,6 @@ impl Subs {
(tag_names, offset)
}
pub(crate) fn deserialize_slice<T>(
bytes: &[u8],
length: usize,
mut offset: usize,
) -> (&[T], usize) {
let alignment = std::mem::align_of::<T>();
let size = std::mem::size_of::<T>();
offset = round_to_multiple_of(offset, alignment);
let byte_length = length * size;
let byte_slice = &bytes[offset..][..byte_length];
let slice = unsafe { std::slice::from_raw_parts(byte_slice.as_ptr() as *const T, length) };
(slice, offset + byte_length)
}
}
/// Mapping of variables to [Content::LambdaSet]s containing unspecialized lambda sets depending on

View file

@ -209,7 +209,7 @@ impl LambdaSet {
#[derive(PartialEq, Eq, Clone)]
pub struct AliasCommon {
pub symbol: Symbol,
pub type_arguments: Vec<Type>,
pub type_arguments: Vec<Loc<OptAbleType>>,
pub lambda_set_variables: Vec<LambdaSet>,
}
@ -282,7 +282,7 @@ pub enum Type {
},
RecursiveTagUnion(Variable, Vec<(TagName, Vec<Type>)>, TypeExtension),
/// Applying a type to some arguments (e.g. Dict.Dict String Int)
Apply(Symbol, Vec<Type>, Region),
Apply(Symbol, Vec<Loc<Type>>, Region),
Variable(Variable),
RangedNumber(NumericRange),
/// A type error, which will code gen to a runtime error
@ -793,7 +793,7 @@ impl Type {
..
}) => {
for value in type_arguments.iter_mut() {
stack.push(value);
stack.push(&mut value.value.typ);
}
for lambda_set in lambda_set_variables.iter_mut() {
@ -833,7 +833,7 @@ impl Type {
stack.push(actual_type);
}
Apply(_, args, _) => {
stack.extend(args);
stack.extend(args.iter_mut().map(|t| &mut t.value));
}
RangedNumber(_) => {}
UnspecializedLambdaSet {
@ -915,7 +915,7 @@ impl Type {
..
}) => {
for value in type_arguments.iter_mut() {
stack.push(value);
stack.push(&mut value.value.typ);
}
for lambda_set in lambda_set_variables.iter_mut() {
@ -954,7 +954,7 @@ impl Type {
stack.push(actual_type);
}
Apply(_, args, _) => {
stack.extend(args);
stack.extend(args.iter_mut().map(|t| &mut t.value));
}
RangedNumber(_) => {}
UnspecializedLambdaSet {
@ -1021,7 +1021,9 @@ impl Type {
..
}) => {
for ta in type_arguments {
ta.substitute_alias(rep_symbol, rep_args, actual)?;
ta.value
.typ
.substitute_alias(rep_symbol, rep_args, actual)?;
}
Ok(())
@ -1042,13 +1044,16 @@ impl Type {
} => actual_type.substitute_alias(rep_symbol, rep_args, actual),
Apply(symbol, args, region) if *symbol == rep_symbol => {
if args.len() == rep_args.len()
&& args.iter().zip(rep_args.iter()).all(|(t1, t2)| t1 == t2)
&& args
.iter()
.zip(rep_args.iter())
.all(|(t1, t2)| &t1.value == t2)
{
*self = actual.clone();
if let Apply(_, args, _) = self {
for arg in args {
arg.substitute_alias(rep_symbol, rep_args, actual)?;
arg.value.substitute_alias(rep_symbol, rep_args, actual)?;
}
}
return Ok(());
@ -1057,7 +1062,7 @@ impl Type {
}
Apply(_, args, _) => {
for arg in args {
arg.substitute_alias(rep_symbol, rep_args, actual)?;
arg.value.substitute_alias(rep_symbol, rep_args, actual)?;
}
Ok(())
}
@ -1103,7 +1108,9 @@ impl Type {
..
}) => {
symbol == &rep_symbol
|| type_arguments.iter().any(|v| v.contains_symbol(rep_symbol))
|| type_arguments
.iter()
.any(|v| v.value.typ.contains_symbol(rep_symbol))
|| lambda_set_variables
.iter()
.any(|v| v.0.contains_symbol(rep_symbol))
@ -1117,7 +1124,7 @@ impl Type {
name == &rep_symbol || actual.contains_symbol(rep_symbol)
}
Apply(symbol, _, _) if *symbol == rep_symbol => true,
Apply(_, args, _) => args.iter().any(|arg| arg.contains_symbol(rep_symbol)),
Apply(_, args, _) => args.iter().any(|arg| arg.value.contains_symbol(rep_symbol)),
RangedNumber(_) => false,
UnspecializedLambdaSet {
unspecialized: Uls(_, sym, _),
@ -1174,7 +1181,9 @@ impl Type {
..
} => actual_type.contains_variable(rep_variable),
HostExposedAlias { actual, .. } => actual.contains_variable(rep_variable),
Apply(_, args, _) => args.iter().any(|arg| arg.contains_variable(rep_variable)),
Apply(_, args, _) => args
.iter()
.any(|arg| arg.value.contains_variable(rep_variable)),
RangedNumber(_) => false,
EmptyRec | EmptyTagUnion | Erroneous(_) => false,
}
@ -1259,7 +1268,12 @@ impl Type {
.iter()
.all(|lambda_set| matches!(lambda_set.0, Type::Variable(..))));
type_arguments.iter_mut().for_each(|t| {
t.instantiate_aliases(region, aliases, var_store, new_lambda_set_variables)
t.value.typ.instantiate_aliases(
region,
aliases,
var_store,
new_lambda_set_variables,
)
});
}
HostExposedAlias {
@ -1316,8 +1330,14 @@ impl Type {
if false {
let mut type_var_to_arg = Vec::new();
for (_, arg_ann) in alias.type_variables.iter().zip(args) {
type_var_to_arg.push(arg_ann.clone());
for (alias_var, arg_ann) in alias.type_variables.iter().zip(args) {
type_var_to_arg.push(Loc::at(
arg_ann.region,
OptAbleType {
typ: arg_ann.value.clone(),
opt_ability: alias_var.value.opt_bound_ability,
},
));
}
let mut lambda_set_variables =
@ -1370,17 +1390,17 @@ impl Type {
) in alias.type_variables.iter().zip(args.iter())
{
let mut filler = filler.clone();
filler.instantiate_aliases(
filler.value.instantiate_aliases(
region,
aliases,
var_store,
new_lambda_set_variables,
);
named_args.push(OptAbleType {
typ: filler.clone(),
typ: filler.value.clone(),
opt_ability: *opt_bound_ability,
});
substitution.insert(*placeholder, filler);
substitution.insert(*placeholder, filler.value);
}
// make sure hidden variables are freshly instantiated
@ -1435,7 +1455,12 @@ impl Type {
} else {
// one of the special-cased Apply types.
for x in args {
x.instantiate_aliases(region, aliases, var_store, new_lambda_set_variables);
x.value.instantiate_aliases(
region,
aliases,
var_store,
new_lambda_set_variables,
);
}
}
}
@ -1552,7 +1577,7 @@ fn symbols_help(initial: &Type) -> Vec<Symbol> {
..
}) => {
output.push(*symbol);
stack.extend(type_arguments);
stack.extend(type_arguments.iter().map(|ta| &ta.value.typ));
}
Alias {
symbol: alias_symbol,
@ -1572,7 +1597,7 @@ fn symbols_help(initial: &Type) -> Vec<Symbol> {
}
Apply(symbol, args, _) => {
output.push(*symbol);
stack.extend(args);
stack.extend(args.iter().map(|t| &t.value));
}
Erroneous(Problem::CyclicAlias(alias, _, _)) => {
output.push(*alias);
@ -1679,7 +1704,7 @@ fn variables_help(tipe: &Type, accum: &mut ImSet<Variable>) {
..
}) => {
for arg in type_arguments {
variables_help(arg, accum);
variables_help(&arg.value.typ, accum);
}
for lambda_set in lambda_set_variables {
@ -1709,7 +1734,7 @@ fn variables_help(tipe: &Type, accum: &mut ImSet<Variable>) {
RangedNumber(_) => {}
Apply(_, args, _) => {
for x in args {
variables_help(x, accum);
variables_help(&x.value, accum);
}
}
}
@ -1823,7 +1848,7 @@ fn variables_help_detailed(tipe: &Type, accum: &mut VariableDetail) {
..
}) => {
for arg in type_arguments {
variables_help_detailed(arg, accum);
variables_help_detailed(&arg.value.typ, accum);
}
for lambda_set in lambda_set_variables {
@ -1857,7 +1882,7 @@ fn variables_help_detailed(tipe: &Type, accum: &mut VariableDetail) {
RangedNumber(_) => {}
Apply(_, args, _) => {
for x in args {
variables_help_detailed(x, accum);
variables_help_detailed(&x.value, accum);
}
}
}
@ -2928,7 +2953,7 @@ fn instantiate_lambda_sets_as_unspecialized(
debug_assert!(matches!(lambda_set.0, Type::Variable(_)));
lambda_set.0 = new_uls();
}
stack.extend(type_arguments.iter_mut().rev());
stack.extend(type_arguments.iter_mut().rev().map(|ta| &mut ta.value.typ));
}
Type::Alias {
symbol: _,
@ -2959,7 +2984,7 @@ fn instantiate_lambda_sets_as_unspecialized(
stack.extend(type_arguments.iter_mut().rev());
}
Type::Apply(_sym, args, _region) => {
stack.extend(args.iter_mut().rev());
stack.extend(args.iter_mut().rev().map(|t| &mut t.value));
}
Type::Variable(_) => {}
Type::RangedNumber(_) => {}

View file

@ -1,6 +1,7 @@
use std::hint::unreachable_unchecked;
use crate::subs::{Content, Descriptor, Mark, OptVariable, Rank, Variable, VariableSubsSlice};
use roc_serialize::bytes;
#[derive(Clone, Default)]
pub struct UnificationTable {
@ -376,9 +377,7 @@ impl UnificationTable {
writer: &mut impl std::io::Write,
mut written: usize,
) -> std::io::Result<usize> {
use crate::subs::Subs;
written = Subs::serialize_slice(&self.contents, writer, written)?;
written = bytes::serialize_slice(&self.contents, writer, written)?;
let mut ranks = Vec::new();
let mut marks = Vec::new();
@ -406,22 +405,20 @@ impl UnificationTable {
}
}
written = Subs::serialize_slice(&ranks, writer, written)?;
written = Subs::serialize_slice(&marks, writer, written)?;
written = Subs::serialize_slice(&copies, writer, written)?;
written = Subs::serialize_slice(&redirects, writer, written)?;
written = bytes::serialize_slice(&ranks, writer, written)?;
written = bytes::serialize_slice(&marks, writer, written)?;
written = bytes::serialize_slice(&copies, writer, written)?;
written = bytes::serialize_slice(&redirects, writer, written)?;
Ok(written)
}
pub(crate) fn deserialize(bytes: &[u8], length: usize, offset: usize) -> (Self, usize) {
use crate::subs::Subs;
let (contents, offset) = Subs::deserialize_slice::<Content>(bytes, length, offset);
let (ranks, offset) = Subs::deserialize_slice::<Rank>(bytes, length, offset);
let (marks, offset) = Subs::deserialize_slice::<Mark>(bytes, length, offset);
let (copies, offset) = Subs::deserialize_slice::<OptVariable>(bytes, length, offset);
let (redirects, offset) = Subs::deserialize_slice::<OptVariable>(bytes, length, offset);
let (contents, offset) = bytes::deserialize_slice::<Content>(bytes, length, offset);
let (ranks, offset) = bytes::deserialize_slice::<Rank>(bytes, length, offset);
let (marks, offset) = bytes::deserialize_slice::<Mark>(bytes, length, offset);
let (copies, offset) = bytes::deserialize_slice::<OptVariable>(bytes, length, offset);
let (redirects, offset) = bytes::deserialize_slice::<OptVariable>(bytes, length, offset);
let mut metadata = Vec::with_capacity(ranks.len());

View file

@ -340,6 +340,7 @@ pub fn unify(env: &mut Env, var1: Variable, var2: Variable, mode: Mode) -> Unifi
}
#[inline(always)]
#[must_use]
pub fn unify_introduced_ability_specialization(
env: &mut Env,
ability_member_signature: Variable,
@ -350,6 +351,7 @@ pub fn unify_introduced_ability_specialization(
}
#[inline(always)]
#[must_use]
pub fn unify_with_collector<M: MetaCollector>(
env: &mut Env,
var1: Variable,
@ -360,6 +362,7 @@ pub fn unify_with_collector<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
fn unify_help<M: MetaCollector>(
env: &mut Env,
var1: Variable,
@ -416,6 +419,7 @@ fn unify_help<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
pub fn unify_pool<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -496,6 +500,7 @@ fn debug_print_unified_types<M: MetaCollector>(
})
}
#[must_use]
fn unify_context<M: MetaCollector>(env: &mut Env, pool: &mut Pool, ctx: Context) -> Outcome<M> {
#[cfg(debug_assertions)]
debug_print_unified_types::<M>(env, &ctx, None);
@ -555,6 +560,7 @@ fn not_in_range_mismatch<M: MetaCollector>() -> Outcome<M> {
}
#[inline(always)]
#[must_use]
fn unify_ranged_number<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -714,6 +720,7 @@ fn wrap_range_var(
#[inline(always)]
#[allow(clippy::too_many_arguments)]
#[must_use]
fn unify_two_aliases<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -812,6 +819,7 @@ fn unify_two_aliases<M: MetaCollector>(
// Unifies a structural alias
#[inline(always)]
#[must_use]
fn unify_alias<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -870,6 +878,7 @@ fn opaque_obligation(opaque: Symbol, opaque_var: Variable) -> Obligated {
}
#[inline(always)]
#[must_use]
fn unify_opaque<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -934,6 +943,7 @@ fn unify_opaque<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
fn unify_structure<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -1032,6 +1042,7 @@ fn unify_structure<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
fn unify_lambda_set<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -1358,6 +1369,7 @@ fn is_sorted_unspecialized_lamba_set_list(subs: &Subs, uls: &[Uls]) -> bool {
uls == sort_unspecialized_lambda_sets(subs, uls.to_vec())
}
#[must_use = "must use outcomes!"]
fn unify_unspecialized_lambdas<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -1602,6 +1614,7 @@ fn unify_unspecialized_lambdas<M: MetaCollector>(
))
}
#[must_use]
fn unify_lambda_set_help<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -1695,6 +1708,7 @@ fn unify_lambda_set_help<M: MetaCollector>(
whole_outcome
}
#[must_use]
fn unify_record<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -1801,6 +1815,7 @@ enum OtherFields {
type SharedFields = Vec<(Lowercase, (RecordField<Variable>, RecordField<Variable>))>;
#[must_use]
fn unify_shared_fields<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -2109,6 +2124,7 @@ fn should_extend_ext_with_uninhabited_type(
}
#[allow(clippy::too_many_arguments)]
#[must_use]
fn unify_tag_unions<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -2409,6 +2425,7 @@ fn choose_merged_var(subs: &Subs, var1: Variable, var2: Variable) -> Variable {
}
}
#[must_use]
fn unify_shared_tags_new<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -2539,6 +2556,7 @@ fn unify_shared_tags_new<M: MetaCollector>(
}
}
#[must_use]
fn unify_shared_tags_merge_new<M: MetaCollector>(
env: &mut Env,
ctx: &Context,
@ -2558,6 +2576,7 @@ fn unify_shared_tags_merge_new<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
fn unify_flat_type<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -2757,6 +2776,7 @@ fn unify_flat_type<M: MetaCollector>(
}
}
#[must_use]
fn unify_zip_slices<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -2778,6 +2798,7 @@ fn unify_zip_slices<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
fn unify_rigid<M: MetaCollector>(
env: &mut Env,
ctx: &Context,
@ -2821,6 +2842,7 @@ fn unify_rigid<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
fn unify_rigid_able<M: MetaCollector>(
env: &mut Env,
ctx: &Context,
@ -2870,6 +2892,7 @@ fn unify_rigid_able<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
fn unify_flex<M: MetaCollector>(
env: &mut Env,
ctx: &Context,
@ -2906,6 +2929,7 @@ fn unify_flex<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
fn unify_flex_able<M: MetaCollector>(
env: &mut Env,
ctx: &Context,
@ -2948,7 +2972,6 @@ fn unify_flex_able<M: MetaCollector>(
}
RigidVar(_) => mismatch!("FlexAble can never unify with non-able Rigid"),
RecursionVar { .. } => mismatch!("FlexAble with RecursionVar"),
LambdaSet(..) => mismatch!("FlexAble with LambdaSet"),
Alias(name, _args, _real_var, AliasKind::Opaque) => {
@ -2963,7 +2986,10 @@ fn unify_flex_able<M: MetaCollector>(
)
}
Structure(_) | Alias(_, _, _, AliasKind::Structural) | RangedNumber(..) => {
RecursionVar { .. }
| Structure(_)
| Alias(_, _, _, AliasKind::Structural)
| RangedNumber(..) => {
// Structural type wins.
merge_flex_able_with_concrete(
env,
@ -2979,6 +3005,7 @@ fn unify_flex_able<M: MetaCollector>(
}
}
#[must_use]
fn merge_flex_able_with_concrete<M: MetaCollector>(
env: &mut Env,
ctx: &Context,
@ -3012,6 +3039,7 @@ fn merge_flex_able_with_concrete<M: MetaCollector>(
}
#[inline(always)]
#[must_use]
fn unify_recursion<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
@ -3046,10 +3074,22 @@ fn unify_recursion<M: MetaCollector>(
mismatch!("RecursionVar {:?} with rigid {:?}", ctx.first, &other)
}
FlexAbleVar(..) | RigidAbleVar(..) => {
RigidAbleVar(..) => {
mismatch!("RecursionVar {:?} with able var {:?}", ctx.first, &other)
}
FlexAbleVar(_, ability) => merge_flex_able_with_concrete(
env,
ctx,
ctx.second,
*ability,
RecursionVar {
structure,
opt_name: *opt_name,
},
Obligated::Adhoc(ctx.first),
),
FlexVar(_) => merge(
env,
ctx,
@ -3086,6 +3126,7 @@ fn unify_recursion<M: MetaCollector>(
}
}
#[must_use]
pub fn merge<M: MetaCollector>(env: &mut Env, ctx: &Context, content: Content) -> Outcome<M> {
let mut outcome: Outcome<M> = Outcome::default();
@ -3140,6 +3181,7 @@ fn is_recursion_var(subs: &Subs, var: Variable) -> bool {
}
#[allow(clippy::too_many_arguments)]
#[must_use]
fn unify_function_or_tag_union_and_func<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,

View file

@ -134,10 +134,8 @@ pub fn generate_docs_html(filenames: Vec<PathBuf>) {
}
fn sidebar_link_url(module: &ModuleDocumentation) -> String {
let mut href_buf = base_url();
href_buf.push_str(module.name.as_str());
href_buf
let url = format!("{}{}/", base_url(), module.name.as_str());
url
}
// converts plain-text code to highlighted html

View file

@ -15,7 +15,7 @@
<body>
<nav id="sidebar-nav">
<input id="module-search" aria-labelledby="search-link" type="text" placeholder="Search" />
<label for="module-search" id="search-link">Search</label>
<label for="module-search" id="search-link"><span id="search-link-text">Search</span> <span id="search-link-hint">(shortcut: S)</span></label>
<div class="module-links">
<!-- Module links -->
</div>

Some files were not shown because too many files have changed in this diff Show more