Merge branch 'main' into Frame-Limited

This commit is contained in:
J.Teeuwissen 2023-04-02 19:53:23 +02:00
commit 09ee92fce3
No known key found for this signature in database
GPG key ID: DB5F7A1ED8D478AD
96 changed files with 3223 additions and 1895 deletions

View file

@ -3,6 +3,7 @@ test-gen-llvm = "test -p test_gen"
test-gen-dev = "test -p roc_gen_dev -p test_gen --no-default-features --features gen-dev"
test-gen-wasm = "test -p roc_gen_wasm -p test_gen --no-default-features --features gen-wasm"
test-gen-llvm-wasm = "test -p roc_gen_wasm -p test_gen --no-default-features --features gen-llvm-wasm"
uitest = "test -p uitest"
[target.wasm32-unknown-unknown]
# Rust compiler flags for minimum-sized .wasm binary in the web REPL

243
Cargo.lock generated
View file

@ -232,6 +232,12 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "487f1e0fcbe47deb8b0574e646def1c903389d95241dd1bbcc6ce4a715dfc0c1"
[[package]]
name = "bitmaps"
version = "2.1.0"
@ -420,7 +426,7 @@ version = "2.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"textwrap 0.11.0",
"unicode-width",
]
@ -432,14 +438,42 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5"
dependencies = [
"atty",
"bitflags",
"clap_lex",
"bitflags 1.3.2",
"clap_lex 0.2.4",
"indexmap",
"strsim",
"termcolor",
"textwrap 0.16.0",
]
[[package]]
name = "clap"
version = "4.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42dfd32784433290c51d92c438bb72ea5063797fc3cc9a21a8c4346bebbb2098"
dependencies = [
"bitflags 2.0.2",
"clap_derive",
"clap_lex 0.3.3",
"is-terminal",
"once_cell",
"strsim",
"termcolor",
]
[[package]]
name = "clap_derive"
version = "4.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fddf67631444a3a3e3e5ac51c36a5e01335302de677bd78759eaa90ab1f46644"
dependencies = [
"heck",
"proc-macro-error",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "clap_lex"
version = "0.2.4"
@ -449,6 +483,15 @@ dependencies = [
"os_str_bytes",
]
[[package]]
name = "clap_lex"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "033f6b7a4acb1f358c742aaca805c939ee73b4c6209ae4318ec7aca81c42e646"
dependencies = [
"os_str_bytes",
]
[[package]]
name = "cli_utils"
version = "0.0.1"
@ -493,7 +536,7 @@ version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f425db7937052c684daec3bd6375c8abe2d146dca4b8b143d6db777c39138f3a"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"block",
"cocoa-foundation",
"core-foundation 0.9.3",
@ -509,7 +552,7 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ade49b65d560ca58c403a479bb396592b155c0185eada742ee323d1d68d6318"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"block",
"core-foundation 0.9.3",
"core-graphics-types",
@ -656,7 +699,7 @@ version = "0.19.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3889374e6ea6ab25dba90bb5d96202f61108058361f6dc72e8b03e6f8bbe923"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"core-foundation 0.7.0",
"foreign-types",
"libc",
@ -668,7 +711,7 @@ version = "0.22.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2581bbab3b8ffc6fcbd550bf46c355135d16e9ff2a6ea032ad6b9bf1d7efe4fb"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"core-foundation 0.9.3",
"core-graphics-types",
"foreign-types",
@ -681,7 +724,7 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a68b68b3446082644c91ac778bf50cd4104bfb002b5a6a7c44cca5a2c70788b"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"core-foundation 0.9.3",
"foreign-types",
"libc",
@ -881,7 +924,7 @@ version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2daefd788d1e96e0a9d66dee4b828b883509bc3ea9ce30665f04c3246372690c"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"libloading",
"winapi",
]
@ -1134,7 +1177,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ef1a30ae415c3a691a4f41afddc2dbcd6d70baf338368d85ebc1e8ed92cedb9"
dependencies = [
"cfg-if 1.0.0",
"rustix",
"rustix 0.36.9",
"windows-sys 0.45.0",
]
@ -1419,7 +1462,7 @@ version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fc59e5f710e310e76e6707f86c561dd646f69a8876da9131703b2f717de818d"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"gpu-alloc-types",
]
@ -1429,7 +1472,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54804d0d6bc9d7f26db4eaec1ad10def69b599315f487d32c334a80d1efe67a5"
dependencies = [
"bitflags",
"bitflags 1.3.2",
]
[[package]]
@ -1438,7 +1481,7 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b0c02e1ba0bdb14e965058ca34e09c020f8e507a760df1121728e0aef68d57a"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"gpu-descriptor-types",
"hashbrown 0.12.3",
]
@ -1449,7 +1492,7 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "363e3677e55ad168fef68cf9de3a4a310b53124c5e784c53a1d70e92d23f2126"
dependencies = [
"bitflags",
"bitflags 1.3.2",
]
[[package]]
@ -1520,6 +1563,12 @@ dependencies = [
"libc",
]
[[package]]
name = "hermit-abi"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
[[package]]
name = "hexf-parse"
version = "0.2.1"
@ -1745,6 +1794,18 @@ version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146"
[[package]]
name = "is-terminal"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "256017f749ab3117e93acb91063009e1f1bb56d03965b14c2c8df4eb02c524d8"
dependencies = [
"hermit-abi 0.3.1",
"io-lifetimes",
"rustix 0.37.3",
"windows-sys 0.45.0",
]
[[package]]
name = "itertools"
version = "0.9.0"
@ -1848,6 +1909,17 @@ dependencies = [
"libc",
]
[[package]]
name = "libtest-mimic"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7b603516767d1ab23d0de09d023e62966c3322f7148297c35cf3d97aa8b37fa"
dependencies = [
"clap 4.1.11",
"termcolor",
"threadpool",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
@ -1860,6 +1932,12 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4"
[[package]]
name = "linux-raw-sys"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d59d8c75012853d2e872fb56bc8a2e53718e2cafe1a4c823143141c6d90c322f"
[[package]]
name = "llvm-sys"
version = "130.0.7"
@ -1898,7 +1976,7 @@ version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6f2d7176b94027af58085a2c9d27c4e416586caba409c314569213901d6068"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"byteorder",
"lazy_static",
"libc",
@ -1980,7 +2058,7 @@ version = "0.23.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0514f491f4cc03632ab399ee01e2c1c1b12d3e1cf2d667c1ff5f87d6dcd2084"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"block",
"core-graphics-types",
"foreign-types",
@ -2048,7 +2126,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3012f2dbcc79e8e0b5825a4836a7106a75dd9b2fe42c528163be0f572538c705"
dependencies = [
"bit-set",
"bitflags",
"bitflags 1.3.2",
"codespan-reporting",
"hexf-parse",
"indexmap",
@ -2065,7 +2143,7 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d868f654c72e75f8687572699cdabe755f03effbb62542768e995d5b8d699d"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"jni-sys",
"ndk-sys",
"num_enum",
@ -2127,7 +2205,7 @@ version = "0.22.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4916f159ed8e5de0082076562152a76b7a1f64a01fd9d1e0fea002c37624faf"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"cc",
"cfg-if 1.0.0",
"libc",
@ -2140,7 +2218,7 @@ version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"cc",
"cfg-if 1.0.0",
"libc",
@ -2153,7 +2231,7 @@ version = "0.24.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"cfg-if 1.0.0",
"libc",
"memoffset 0.6.5",
@ -2165,7 +2243,7 @@ version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"cfg-if 1.0.0",
"libc",
"static_assertions",
@ -2646,6 +2724,30 @@ dependencies = [
"toml_edit",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro2"
version = "1.0.51"
@ -2668,7 +2770,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29f1b898011ce9595050a68e60f90bad083ff2987a695a42357134c8381fba70"
dependencies = [
"bit-set",
"bitflags",
"bitflags 1.3.2",
"byteorder",
"lazy_static",
"num-traits",
@ -2688,7 +2790,7 @@ version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"memchr",
"unicase",
]
@ -2843,7 +2945,7 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags",
"bitflags 1.3.2",
]
[[package]]
@ -3818,6 +3920,7 @@ dependencies = [
"indoc",
"insta",
"lazy_static",
"libtest-mimic",
"pretty_assertions",
"regex",
"roc_builtins",
@ -3841,6 +3944,7 @@ dependencies = [
"roc_types",
"roc_unify",
"tempfile",
"test_solve_helpers",
]
[[package]]
@ -3915,7 +4019,7 @@ dependencies = [
name = "roc_unify"
version = "0.0.1"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"roc_collections",
"roc_debug_flags",
"roc_error_macros",
@ -3971,11 +4075,25 @@ version = "0.36.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd5c6ff11fecd55b40746d1995a02f2eb375bf8c00d192d521ee09f42bef37bc"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"errno 0.2.8",
"io-lifetimes",
"libc",
"linux-raw-sys",
"linux-raw-sys 0.1.4",
"windows-sys 0.45.0",
]
[[package]]
name = "rustix"
version = "0.37.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b24138615de35e32031d041a09032ef3487a616d901ca4db224e7d557efae2"
dependencies = [
"bitflags 1.3.2",
"errno 0.3.0",
"io-lifetimes",
"libc",
"linux-raw-sys 0.3.1",
"windows-sys 0.45.0",
]
@ -4023,7 +4141,7 @@ name = "rustyline"
version = "9.1.1"
source = "git+https://github.com/roc-lang/rustyline?rev=e74333c#e74333c0d618896b88175bf06645108f996fe6d0"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"cfg-if 1.0.0",
"clipboard-win 4.5.0",
"dirs-next",
@ -4315,7 +4433,7 @@ version = "0.15.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a28f16a97fa0e8ce563b2774d1e732dd5d4025d2772c5dba0a41a0f90a29da3"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"calloop",
"dlib",
"lazy_static",
@ -4334,7 +4452,7 @@ version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f307c47d32d2715eb2e0ece5589057820e0e5e70d07c247d1063e844e107f454"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"dlib",
"lazy_static",
"log",
@ -4401,7 +4519,7 @@ version = "0.2.0+1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "246bfa38fe3db3f1dfc8ca5a2cdeb7348c78be2112740cc0ec8ef18b6d94f830"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"num-traits",
]
@ -4618,6 +4736,32 @@ dependencies = [
"syn",
]
[[package]]
name = "test_solve_helpers"
version = "0.0.1"
dependencies = [
"bumpalo",
"indoc",
"insta",
"lazy_static",
"pretty_assertions",
"regex",
"roc_can",
"roc_derive",
"roc_late_solve",
"roc_load",
"roc_module",
"roc_packaging",
"roc_problem",
"roc_region",
"roc_reporting",
"roc_solve",
"roc_solve_problem",
"roc_target",
"roc_types",
"tempfile",
]
[[package]]
name = "test_syntax"
version = "0.0.1"
@ -4928,6 +5072,29 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81"
[[package]]
name = "uitest"
version = "0.0.1"
dependencies = [
"bumpalo",
"indoc",
"insta",
"lazy_static",
"libtest-mimic",
"pretty_assertions",
"regex",
"roc_builtins",
"roc_derive",
"roc_load",
"roc_parse",
"roc_problem",
"roc_reporting",
"roc_solve",
"roc_target",
"tempfile",
"test_solve_helpers",
]
[[package]]
name = "unarray"
version = "0.1.4"
@ -5201,7 +5368,7 @@ version = "0.29.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f3b068c05a039c9f755f881dc50f01732214f5685e379829759088967c46715"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"downcast-rs",
"libc",
"nix 0.24.3",
@ -5240,7 +5407,7 @@ version = "0.29.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b950621f9354b322ee817a23474e479b34be96c2e909c14f7bc0100e9a970bc6"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"wayland-client",
"wayland-commons",
"wayland-scanner",
@ -5325,7 +5492,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4688c000eb841ca55f7b35db659b78d6e1cd77d7caf8fb929f4e181f754047d"
dependencies = [
"arrayvec 0.7.2",
"bitflags",
"bitflags 1.3.2",
"cfg_aliases",
"codespan-reporting",
"copyless",
@ -5350,7 +5517,7 @@ dependencies = [
"arrayvec 0.7.2",
"ash",
"bit-set",
"bitflags",
"bitflags 1.3.2",
"block",
"core-graphics-types",
"d3d12",
@ -5385,7 +5552,7 @@ version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "549533d9e1cdd4b4cda7718d33ff500fc4c34b5467b71d76b547ae0324f3b2a2"
dependencies = [
"bitflags",
"bitflags 1.3.2",
]
[[package]]
@ -5527,7 +5694,7 @@ version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b43cc931d58b99461188607efd7acb2a093e65fc621f54cad78517a6063e73a"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"cocoa",
"core-foundation 0.9.3",
"core-graphics 0.22.3",

View file

@ -112,6 +112,7 @@ lazy_static = "1.4.0"
libc = "0.2.139" # update roc_std/Cargo.toml on change
libfuzzer-sys = "0.4"
libloading = "0.7.4"
libtest-mimic = "0.6.0"
log = "0.4.17"
mach_object = "0.1"
maplit = "1.0.2"

View file

@ -1,3 +1,21 @@
## Roc strings are sequences of text values. This module includes functions for combining strings,
## as well as breaking them up into smaller units—most commonly [extended grapheme clusters](http://www.unicode.org/glossary/#extended_grapheme_cluster)
## (referred to in this module's documentation as "graphemes" rather than "characters" for clarity;
## "characters" can mean very different things in different languages).
##
## This module focuses on graphemes (as opposed to, say, Unicode code points or LATIN-1 bytes)
## because graphemes avoid common classes of bugs. Breaking strings up using code points often
## leads to bugs around things like emoji, where multiple code points combine to form to a
## single rendered glyph. Graphemes avoid these bugs by treating multi-code-point things like
## emojis as indivisible units.
##
## Because graphemes can have variable length (there's no upper limit on how many code points one
## grapheme can represent), it takes linear time to count the number of graphemes in a string,
## and also linear time to find an individual grapheme within a string by its position (or "index")
## among the string's other graphemes. The only way to get constant-time access to these is in a way
## that can result in bugs if the string contains multi-code-point things like emojis, which is why
## this module does not offer those.
##
##
## ## Working with Unicode strings in Roc
##
@ -107,6 +125,7 @@ interface Str
replaceLast,
splitFirst,
splitLast,
walkUtf8,
walkUtf8WithIndex,
reserve,
releaseExcessCapacity,
@ -151,8 +170,89 @@ isEmpty : Str -> Bool
concat : Str, Str -> Str
## Returns a string of the specified capacity without any content.
##
## This is a performance optimization tool that's like calling [Str.reserve] on an empty string.
## It's useful when you plan to build up a string incrementally, for example by calling [Str.concat] on it:
##
## ```
## greeting = "Hello and welcome to Roc"
## subject = "Awesome Programmer"
##
## # Evaluates to "Hello and welcome to Roc, Awesome Programmer!"
## helloWorld =
## Str.withCapacity 45
## |> Str.concat greeting
## |> Str.concat ", "
## |> Str.concat subject
## |> Str.concat "!"
## ```
##
## In general, if you plan to use [Str.concat] on an empty string, it will be faster to start with
## [Str.withCapacity] than with `""`. Even if you don't know the exact capacity of the string, giving [withCapacity]
## a higher value than ends up being necessary can help prevent reallocation and copying—at
## the cost of using more memory than is necessary.
##
## For more details on how the performance optimization works, see [Str.reserve].
withCapacity : Nat -> Str
## Increase a string's capacity by at least the given number of additional bytes.
##
## This can improve the performance of string concatenation operations like [Str.concat] by
## allocating extra capacity up front, which can prevent the need for reallocations and copies.
## Consider the following example which does not use [Str.reserve]:
##
## ```
## greeting = "Hello and welcome to Roc"
## subject = "Awesome Programmer"
##
## # Evaluates to "Hello and welcome to Roc, Awesome Programmer!"
## helloWorld =
## greeting
## |> Str.concat ", "
## |> Str.concat subject
## |> Str.concat "!"
## ```
##
## In this example:
## 1. We start with `greeting`, which has both a length and capacity of 24 (bytes).
## 2. `|> Str.concat ", "` will see that there isn't enough capacity to add 2 more bytes for the `", "`, so it will create a new heap allocation with enough bytes to hold both. (This probably will be more than 7 bytes, because when [Str] functions reallocate, they apply a multiplier to the exact capacity required. This makes it less likely that future realloctions will be needed. The multiplier amount is not specified, because it may change in future releases of Roc, but it will likely be around 1.5 to 2 times the exact capacity required.) Then it will copy the current bytes (`"Hello"`) into the new allocation, and finally concatenate the `", "` into the new allocation. The old allocation will then be deallocated because it's no longer referenced anywhere in the program.
## 3. `|> Str.concat subject` will again check if there is enough capacity in the string. If it doesn't find enough capacity once again, it will make a third allocation, copy the existing bytes (`"Hello, "`) into that third allocation, and then deallocate the second allocation because it's already no longer being referenced anywhere else in the program. (It may find enough capacity in this prticular case, because the previous [Str.concat] allocated something like 1.5 to 2 times the necessary capacity in order to anticipate future concatenations like this...but if something longer than `"World"` were being concatenated here, it might still require further reallocation and copying.)
## 4. `|> Str.concat "!\n"` will repeat this process once more.
##
## This process can have significant performance costs due to multiple reallocation of new strings, copying between old strings and new strings, and deallocation of immediately obsolete strings.
##
## Here's a modified example which uses [Str.reserve] to eliminate the need for all that reallocation, copying, and deallocation.
##
## ```
## helloWorld =
## greeting
## |> Str.reserve 21
## |> Str.concat ", "
## |> Str.concat subject
## |> Str.concat "!"
## ```
##
## In this example:
## 1. We again start with `greeting`, which has both a length and capacity of 24 bytes.
## 2. `|> Str.reserve 21` will ensure that there is enough capacity in the string for an additional 21 bytes (to make room for `", "`, `"Awesome Programmer"`, and `"!"`). Since the current capacity is only 24, it will create a new 45-byte (24 + 21) heap allocation and copy the contents of the existing allocation (`greeting`) into it.
## 3. `|> Str.concat ", "` will concatenate `, ` to the string. No reallocation, copying, or deallocation will be necessary, because the string already has a capacity of 45 btytes, and `greeting` will only use 24 of them.
## 4. `|> Str.concat subject` will concatenate `subject` (`"Awesome Programmer"`) to the string. Again, no reallocation, copying, or deallocation will be necessary.
## 5. `|> Str.concat "!\n"` will concatenate `"!\n"` to the string, still without any reallocation, copying, or deallocation.
##
## Here, [Str.reserve] prevented multiple reallocations, copies, and deallocations during the
## [Str.concat] calls. Notice that it did perform a heap allocation before any [Str.concat] calls
## were made, which means that using [Str.reserve] is not free! You should only use it if you actually
## expect to make use of the extra capacity.
##
## Ideally, you'd be able to predict exactly how many extra bytes of capacity will be needed, but this
## may not always be knowable. When you don't know exactly how many bytes to reserve, you can often get better
## performance by choosing a number of bytes that's too high, because a number that's too low could lead to reallocations. There's a limit to
## this, of course; if you always give it ten times what it turns out to need, that could prevent
## reallocations but will also waste a lot of memory!
##
## If you plan to use [Str.reserve] on an empty string, it's generally better to use [Str.withCapacity] instead.
reserve : Str, Nat -> Str
## Combines a [List] of strings into a single string, with a separator
## string in between each.
## ```
@ -200,7 +300,21 @@ repeat : Str, Nat -> Str
## using a single Unicode code point.
countGraphemes : Str -> Nat
## Split a string into its constituent grapheme clusters
## Split a string into its constituent graphemes.
##
## This function breaks a string into its individual [graphemes](https://stackoverflow.com/a/27331885/4200103),
## returning them as a list of strings. This is useful for working with text that
## contains complex characters, such as emojis.
##
## Examples:
## ```
## expect Str.graphemes "Roc" == ["R", "o", "c"]
## expect Str.graphemes "नमस्ते" == ["न", "म", "स्", "ते"]
## expect Str.graphemes "👩‍👩‍👦‍👦" == ["👩‍", "👩‍", "👦‍", "👦"]
## ```
##
## Note that the "👩‍👩‍👦‍👦" example consists of 4 grapheme clusters, although it visually
## appears as a single glyph. This is because it uses an emoji modifier sequence.
graphemes : Str -> List Str
## If the string begins with a [Unicode code point](http://www.unicode.org/glossary/#code_point)
@ -273,6 +387,12 @@ fromUtf8 = \bytes ->
else
Err (BadUtf8 result.dProblemCode result.aByteIndex)
expect (Str.fromUtf8 [82, 111, 99]) == Ok "Roc"
expect (Str.fromUtf8 [224, 174, 154, 224, 174, 191]) == Ok "சி"
expect (Str.fromUtf8 [240, 159, 144, 166]) == Ok "🐦"
expect (Str.fromUtf8 []) == Ok ""
expect (Str.fromUtf8 [255]) |> Result.isErr
## Encode part of a [List] of [U8] UTF-8 [code units](https://unicode.org/glossary/#code_unit)
## into a [Str]
## ```
@ -290,6 +410,12 @@ fromUtf8Range = \bytes, config ->
else
Err OutOfBounds
expect (Str.fromUtf8Range [72, 105, 80, 103] { start: 0, count: 2 }) == Ok "Hi"
expect (Str.fromUtf8Range [233, 185, 143, 224, 174, 154, 224, 174, 191] { start: 3, count: 3 }) == Ok "ச"
expect (Str.fromUtf8Range [240, 159, 144, 166] { start: 0, count: 4 }) == Ok "🐦"
expect (Str.fromUtf8Range [] { start: 0, count: 0 }) == Ok ""
expect (Str.fromUtf8Range [72, 105, 80, 103] { start: 2, count: 3 }) |> Result.isErr
FromUtf8Result : {
aByteIndex : Nat,
bString : Str,
@ -744,8 +870,29 @@ walkUtf8WithIndexHelp = \string, state, step, index, length ->
else
state
## Enlarge a string for at least the given number additional bytes.
reserve : Str, Nat -> Str
## Walks over the `UTF-8` bytes of the given [Str] and calls a function to update
## state for each byte.
##
## ```
## result = walkUtf8 "hello, world!" "" (\state, byte -> state ++ String.fromCodePoint byte)
## expect result == Ok "hello, world!"
## ```
walkUtf8 : Str, state, (state, U8 -> state) -> state
walkUtf8 = \str, initial, step ->
walkUtf8Help str initial step 0 (Str.countUtf8Bytes str)
walkUtf8Help : Str, state, (state, U8 -> state), Nat, Nat -> state
walkUtf8Help = \str, state, step, index, length ->
if index < length then
byte = Str.getUnsafe str index
newState = step state byte
walkUtf8Help str newState step (index + 1) length
else
state
expect (walkUtf8 "ABC" [] List.append) == [65, 66, 67]
expect (walkUtf8 "鹏" [] List.append) == [233, 185, 143]
## Shrink the memory footprint of a str such that it's capacity and length are equal.
## Note: This will also convert seamless slices to regular lists.

View file

@ -2,4 +2,5 @@ mod pretty_print;
pub use pretty_print::pretty_print_declarations;
pub use pretty_print::pretty_print_def;
pub use pretty_print::pretty_write_declarations;
pub use pretty_print::Ctx as PPCtx;

View file

@ -19,18 +19,45 @@ pub struct Ctx<'a> {
pub fn pretty_print_declarations(c: &Ctx, declarations: &Declarations) -> String {
let f = Arena::new();
print_declarations_help(c, &f, declarations)
.1
.pretty(80)
.to_string()
}
pub fn pretty_write_declarations(
writer: &mut impl std::io::Write,
c: &Ctx,
declarations: &Declarations,
) -> std::io::Result<()> {
let f = Arena::new();
print_declarations_help(c, &f, declarations)
.1
.render(80, writer)
}
pub fn pretty_print_def(c: &Ctx, d: &Def) -> String {
let f = Arena::new();
def(c, &f, d).append(f.hardline()).1.pretty(80).to_string()
}
fn print_declarations_help<'a>(
c: &Ctx,
f: &'a Arena<'a>,
declarations: &'a Declarations,
) -> DocBuilder<'a, Arena<'a>> {
let mut defs = Vec::with_capacity(declarations.len());
for (index, tag) in declarations.iter_bottom_up() {
let symbol = declarations.symbols[index].value;
let body = &declarations.expressions[index];
let def = match tag {
DeclarationTag::Value => def_symbol_help(c, &f, symbol, &body.value),
DeclarationTag::Value => def_symbol_help(c, f, symbol, &body.value),
DeclarationTag::Function(f_index)
| DeclarationTag::Recursive(f_index)
| DeclarationTag::TailRecursive(f_index) => {
let function_def = &declarations.function_bodies[f_index.index()].value;
toplevel_function(c, &f, symbol, function_def, &body.value)
toplevel_function(c, f, symbol, function_def, &body.value)
}
DeclarationTag::Expectation => todo!(),
DeclarationTag::ExpectationFx => todo!(),
@ -45,14 +72,6 @@ pub fn pretty_print_declarations(c: &Ctx, declarations: &Declarations) -> String
}
f.intersperse(defs, f.hardline().append(f.hardline()))
.1
.pretty(80)
.to_string()
}
pub fn pretty_print_def(c: &Ctx, d: &Def) -> String {
let f = Arena::new();
def(c, &f, d).append(f.hardline()).1.pretty(80).to_string()
}
macro_rules! maybe_paren {

View file

@ -6,7 +6,7 @@ use roc_types::{subs::Variable, types::MemberImpl};
use crate::{
abilities::AbilitiesStore,
def::{Annotation, Declaration, Def},
def::{Annotation, Def},
expr::{
self, AnnotatedMark, ClosureData, Declarations, Expr, Field, OpaqueWrapFunctionData,
StructAccessorData,
@ -14,19 +14,71 @@ use crate::{
pattern::{DestructType, Pattern, RecordDestruct, TupleDestruct},
};
macro_rules! visit_list {
($visitor:ident, $walk:ident, $list:expr) => {
for elem in $list {
$visitor.$walk(elem)
pub enum DeclarationInfo<'a> {
Value {
loc_symbol: Loc<Symbol>,
loc_expr: &'a Loc<Expr>,
expr_var: Variable,
pattern: Pattern,
annotation: Option<&'a Annotation>,
},
Expectation {
loc_condition: &'a Loc<Expr>,
},
Function {
loc_symbol: Loc<Symbol>,
loc_body: &'a Loc<Expr>,
expr_var: Variable,
pattern: Pattern,
function: &'a Loc<expr::FunctionDef>,
},
Destructure {
loc_pattern: &'a Loc<Pattern>,
opt_pattern_var: Option<Variable>,
loc_expr: &'a Loc<Expr>,
expr_var: Variable,
annotation: Option<&'a Annotation>,
},
}
impl<'a> DeclarationInfo<'a> {
pub fn region(&self) -> Region {
use DeclarationInfo::*;
match self {
Value {
loc_symbol,
loc_expr,
..
} => Region::span_across(&loc_symbol.region, &loc_expr.region),
Expectation { loc_condition } => loc_condition.region,
Function {
loc_symbol,
function,
..
} => Region::span_across(&loc_symbol.region, &function.region),
Destructure {
loc_pattern,
loc_expr,
..
} => Region::span_across(&loc_pattern.region, &loc_expr.region),
}
};
}
fn var(&self) -> Variable {
match self {
DeclarationInfo::Value { expr_var, .. } => *expr_var,
DeclarationInfo::Expectation { .. } => Variable::BOOL,
DeclarationInfo::Function { expr_var, .. } => *expr_var,
DeclarationInfo::Destructure { expr_var, .. } => *expr_var,
}
}
}
pub fn walk_decls<V: Visitor>(visitor: &mut V, decls: &Declarations) {
use crate::expr::DeclarationTag::*;
for (index, tag) in decls.declarations.iter().enumerate() {
match tag {
let info = match tag {
Value => {
let loc_expr = &decls.expressions[index];
@ -40,22 +92,19 @@ pub fn walk_decls<V: Visitor>(visitor: &mut V, decls: &Declarations) {
},
None => Pattern::Identifier(loc_symbol.value),
};
visitor.visit_pattern(&pattern, loc_symbol.region, Some(expr_var));
visitor.visit_expr(&loc_expr.value, loc_expr.region, expr_var);
if let Some(annot) = &decls.annotations[index] {
visitor.visit_annotation(annot);
DeclarationInfo::Value {
loc_symbol,
loc_expr,
expr_var,
pattern,
annotation: decls.annotations[index].as_ref(),
}
}
Expectation => {
Expectation | ExpectationFx => {
let loc_condition = &decls.expressions[index];
visitor.visit_expr(&loc_condition.value, loc_condition.region, Variable::BOOL);
}
ExpectationFx => {
let loc_condition = &decls.expressions[index];
visitor.visit_expr(&loc_condition.value, loc_condition.region, Variable::BOOL);
DeclarationInfo::Expectation { loc_condition }
}
Function(function_index)
| Recursive(function_index)
@ -72,16 +121,16 @@ pub fn walk_decls<V: Visitor>(visitor: &mut V, decls: &Declarations) {
},
None => Pattern::Identifier(loc_symbol.value),
};
visitor.visit_pattern(&pattern, loc_symbol.region, Some(expr_var));
let function_def = &decls.function_bodies[function_index.index() as usize];
walk_closure_help(
visitor,
&function_def.value.arguments,
DeclarationInfo::Function {
loc_symbol,
loc_body,
function_def.value.return_type,
)
expr_var,
pattern,
function: function_def,
}
}
Destructure(destructure_index) => {
let destructure = &decls.destructs[destructure_index.index() as usize];
@ -90,51 +139,83 @@ pub fn walk_decls<V: Visitor>(visitor: &mut V, decls: &Declarations) {
let loc_expr = &decls.expressions[index];
let expr_var = decls.variables[index];
let opt_var = match loc_pattern.value {
let opt_pattern_var = match loc_pattern.value {
Pattern::Identifier(..) | Pattern::AbilityMemberSpecialization { .. } => {
Some(expr_var)
}
_ => loc_pattern.value.opt_var(),
};
visitor.visit_pattern(&loc_pattern.value, loc_pattern.region, opt_var);
visitor.visit_expr(&loc_expr.value, loc_expr.region, expr_var);
if let Some(annot) = &decls.annotations[index] {
visitor.visit_annotation(annot);
DeclarationInfo::Destructure {
loc_pattern,
opt_pattern_var,
loc_expr,
expr_var,
annotation: decls.annotations[index].as_ref(),
}
}
MutualRecursion { .. } => { /* ignore */ }
}
MutualRecursion { .. } => {
// The actual declarations involved in the mutual recursion will come next.
continue;
}
};
visitor.visit_decl(info);
}
}
fn walk_decl<V: Visitor>(visitor: &mut V, decl: &Declaration) {
match decl {
Declaration::Declare(def) => {
visitor.visit_def(def);
}
Declaration::DeclareRec(defs, _cycle_mark) => {
visit_list!(visitor, visit_def, defs)
}
fn walk_decl<V: Visitor>(visitor: &mut V, decl: DeclarationInfo<'_>) {
use DeclarationInfo::*;
Declaration::Expects(expects) => {
let it = expects.regions.iter().zip(expects.conditions.iter());
for (region, condition) in it {
visitor.visit_expr(condition, *region, Variable::BOOL);
match decl {
Value {
loc_symbol,
loc_expr,
expr_var,
pattern,
annotation,
} => {
visitor.visit_pattern(&pattern, loc_symbol.region, Some(expr_var));
visitor.visit_expr(&loc_expr.value, loc_expr.region, expr_var);
if let Some(annot) = annotation {
visitor.visit_annotation(annot);
}
}
Declaration::ExpectsFx(expects) => {
let it = expects.regions.iter().zip(expects.conditions.iter());
for (region, condition) in it {
visitor.visit_expr(condition, *region, Variable::BOOL);
Expectation { loc_condition } => {
visitor.visit_expr(&loc_condition.value, loc_condition.region, Variable::BOOL);
}
Function {
loc_symbol,
loc_body,
expr_var,
pattern,
function,
} => {
visitor.visit_pattern(&pattern, loc_symbol.region, Some(expr_var));
walk_closure_help(
visitor,
&function.value.arguments,
loc_body,
function.value.return_type,
)
}
Destructure {
loc_pattern,
opt_pattern_var,
loc_expr,
expr_var,
annotation,
} => {
visitor.visit_pattern(&loc_pattern.value, loc_pattern.region, opt_pattern_var);
visitor.visit_expr(&loc_expr.value, loc_expr.region, expr_var);
if let Some(annot) = annotation {
visitor.visit_annotation(annot);
}
}
Declaration::Builtin(def) => visitor.visit_def(def),
Declaration::InvalidCycle(_cycles) => {
// ignore
}
}
};
}
pub fn walk_def<V: Visitor>(visitor: &mut V, def: &Def) {
@ -449,7 +530,7 @@ pub trait Visitor: Sized {
walk_decls(self, decls);
}
fn visit_decl(&mut self, decl: &Declaration) {
fn visit_decl(&mut self, decl: DeclarationInfo<'_>) {
if self.should_visit(decl.region()) {
walk_decl(self, decl);
}
@ -582,14 +663,24 @@ pub fn find_type_at(region: Region, decls: &Declarations) -> Option<Variable> {
visitor.typ
}
#[derive(Debug)]
pub enum FoundSymbol {
/// Specialization(T, foo1) is the specialization of foo for T.
Specialization(Symbol, Symbol),
/// AbilityMember(Foo, foo) is the ability member foo of Foo.
AbilityMember(Symbol, Symbol),
/// Raw symbol, not specialized to anything.
Symbol(Symbol),
}
/// Given an ability Foo has foo : ..., returns (T, foo1) if the symbol at the given region is a
/// symbol foo1 that specializes foo for T. Otherwise if the symbol is foo but the specialization
/// is unknown, (Foo, foo) is returned. Otherwise [None] is returned.
pub fn find_ability_member_and_owning_type_at(
pub fn find_symbol_at(
region: Region,
decls: &Declarations,
abilities_store: &AbilitiesStore,
) -> Option<(Symbol, Symbol)> {
) -> Option<FoundSymbol> {
let mut visitor = Finder {
region,
found: None,
@ -601,7 +692,7 @@ pub fn find_ability_member_and_owning_type_at(
struct Finder<'a> {
region: Region,
abilities_store: &'a AbilitiesStore,
found: Option<(Symbol, Symbol)>,
found: Option<FoundSymbol>,
}
impl Visitor for Finder<'_> {
@ -611,16 +702,19 @@ pub fn find_ability_member_and_owning_type_at(
fn visit_pattern(&mut self, pattern: &Pattern, region: Region, _opt_var: Option<Variable>) {
if region == self.region {
if let Pattern::AbilityMemberSpecialization {
ident: spec_symbol,
specializes: _,
} = pattern
{
debug_assert!(self.found.is_none());
let spec_type =
find_specialization_type_of_symbol(*spec_symbol, self.abilities_store)
.unwrap();
self.found = Some((spec_type, *spec_symbol))
match pattern {
Pattern::AbilityMemberSpecialization {
ident: spec_symbol,
specializes: _,
} => {
debug_assert!(self.found.is_none());
let spec_type =
find_specialization_type_of_symbol(*spec_symbol, self.abilities_store)
.unwrap();
self.found = Some(FoundSymbol::Specialization(spec_type, *spec_symbol))
}
Pattern::Identifier(symbol) => self.found = Some(FoundSymbol::Symbol(*symbol)),
_ => {}
}
}
@ -629,29 +723,33 @@ pub fn find_ability_member_and_owning_type_at(
fn visit_expr(&mut self, expr: &Expr, region: Region, var: Variable) {
if region == self.region {
if let &Expr::AbilityMember(member_symbol, specialization_id, _var) = expr {
debug_assert!(self.found.is_none());
self.found = match specialization_id
.and_then(|id| self.abilities_store.get_resolved(id))
{
Some(spec_symbol) => {
let spec_type = find_specialization_type_of_symbol(
spec_symbol,
self.abilities_store,
)
.unwrap();
Some((spec_type, spec_symbol))
}
None => {
let parent_ability = self
.abilities_store
.member_def(member_symbol)
.unwrap()
.parent_ability;
Some((parent_ability, member_symbol))
}
};
return;
match expr {
&Expr::AbilityMember(member_symbol, specialization_id, _var) => {
debug_assert!(self.found.is_none());
self.found = match specialization_id
.and_then(|id| self.abilities_store.get_resolved(id))
{
Some(spec_symbol) => {
let spec_type = find_specialization_type_of_symbol(
spec_symbol,
self.abilities_store,
)
.unwrap();
Some(FoundSymbol::Specialization(spec_type, spec_symbol))
}
None => {
let parent_ability = self
.abilities_store
.member_def(member_symbol)
.unwrap()
.parent_ability;
Some(FoundSymbol::AbilityMember(parent_ability, member_symbol))
}
};
return;
}
Expr::Var(symbol, _var) => self.found = Some(FoundSymbol::Symbol(*symbol)),
_ => {}
}
}
@ -705,3 +803,75 @@ pub fn symbols_introduced_from_pattern(
}
}
}
pub enum FoundDeclaration<'a> {
Decl(DeclarationInfo<'a>),
Def(&'a Def),
}
impl<'a> FoundDeclaration<'a> {
pub fn region(&self) -> Region {
match self {
FoundDeclaration::Decl(decl) => decl.region(),
FoundDeclaration::Def(def) => def.region(),
}
}
pub fn var(&self) -> Variable {
match self {
FoundDeclaration::Decl(decl) => decl.var(),
FoundDeclaration::Def(def) => def.expr_var,
}
}
}
/// Finds the declaration of `symbol`.
pub fn find_declaration(symbol: Symbol, decls: &'_ Declarations) -> Option<FoundDeclaration<'_>> {
let mut visitor = Finder {
symbol,
found: None,
};
visitor.visit_decls(decls);
return visitor.found;
struct Finder<'a> {
symbol: Symbol,
found: Option<FoundDeclaration<'a>>,
}
impl Visitor for Finder<'_> {
fn should_visit(&mut self, _region: Region) -> bool {
true
}
fn visit_decl(&mut self, decl: DeclarationInfo<'_>) {
match decl {
DeclarationInfo::Value { loc_symbol, .. }
| DeclarationInfo::Function { loc_symbol, .. }
if loc_symbol.value == self.symbol =>
{
self.found = Some(FoundDeclaration::Decl(unsafe { std::mem::transmute(decl) }));
}
DeclarationInfo::Destructure { .. } => {
// TODO destructures
walk_decl(self, decl);
}
_ => {
walk_decl(self, decl);
}
}
}
fn visit_def(&mut self, def: &Def) {
if matches!(def.loc_pattern.value, Pattern::Identifier(s) if s == self.symbol) {
debug_assert!(self.found.is_none());
// Safety: the def can't escape the passed in `decls`, and the visitor does not
// synthesize defs.
self.found = Some(FoundDeclaration::Def(unsafe { std::mem::transmute(def) }));
return;
}
walk_def(self, def)
}
}
}

View file

@ -994,7 +994,6 @@ struct State<'a> {
/// From now on, these will be used by multiple threads; time to make an Arc<Mutex<_>>!
pub arc_modules: Arc<Mutex<PackageModuleIds<'a>>>,
pub arc_shorthands: Arc<Mutex<MutMap<&'a str, ShorthandPath>>>,
#[allow(unused)]
pub derived_module: SharedDerivedModule,
pub ident_ids_by_module: SharedIdentIdsByModule,

View file

@ -1,7 +1,79 @@
use self::Associativity::*;
use self::BinOp::*;
use std::cmp::Ordering;
use std::fmt;
const PRECEDENCES: [(BinOp, u8); 20] = [
(Caret, 7),
(Star, 6),
(Slash, 6),
(DoubleSlash, 5),
(Percent, 5),
(Plus, 4),
(Minus, 4),
(Pizza, 3),
(Equals, 2),
(NotEquals, 2),
(LessThan, 1),
(GreaterThan, 1),
(LessThanOrEq, 1),
(GreaterThanOrEq, 1),
(And, 0),
(Or, 0),
// These should never come up
(Assignment, 255),
(IsAliasType, 255),
(IsOpaqueType, 255),
(Backpassing, 255),
];
const ASSOCIATIVITIES: [(BinOp, Associativity); 20] = [
(Caret, RightAssociative),
(Star, LeftAssociative),
(Slash, LeftAssociative),
(DoubleSlash, LeftAssociative),
(Percent, LeftAssociative),
(Plus, LeftAssociative),
(Minus, LeftAssociative),
(Pizza, LeftAssociative),
(Equals, NonAssociative),
(NotEquals, NonAssociative),
(LessThan, NonAssociative),
(GreaterThan, NonAssociative),
(LessThanOrEq, NonAssociative),
(GreaterThanOrEq, NonAssociative),
(And, RightAssociative),
(Or, RightAssociative),
// These should never come up
(Assignment, LeftAssociative),
(IsAliasType, LeftAssociative),
(IsOpaqueType, LeftAssociative),
(Backpassing, LeftAssociative),
];
const DISPLAY_STRINGS: [(BinOp, &str); 20] = [
(Caret, "^"),
(Star, "*"),
(Slash, "/"),
(DoubleSlash, "//"),
(Percent, "%"),
(Plus, "+"),
(Minus, "-"),
(Pizza, "|>"),
(Equals, "=="),
(NotEquals, "!="),
(LessThan, "<"),
(GreaterThan, ">"),
(LessThanOrEq, "<="),
(GreaterThanOrEq, ">="),
(And, "&&"),
(Or, "||"),
(Assignment, "="),
(IsAliasType, ":"),
(IsOpaqueType, ":="),
(Backpassing, "<-"),
];
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CalledVia {
/// Calling with space, e.g. (foo bar)
@ -36,6 +108,7 @@ pub enum BinOp {
Percent,
Plus,
Minus,
Pizza,
Equals,
NotEquals,
LessThan,
@ -44,7 +117,6 @@ pub enum BinOp {
GreaterThanOrEq,
And,
Or,
Pizza,
Assignment,
IsAliasType,
IsOpaqueType,
@ -93,29 +165,27 @@ pub enum Associativity {
impl BinOp {
pub fn associativity(self) -> Associativity {
use self::Associativity::*;
// The compiler should never pass any of these to this function!
debug_assert_ne!(self, Assignment);
debug_assert_ne!(self, IsAliasType);
debug_assert_ne!(self, IsOpaqueType);
debug_assert_ne!(self, Backpassing);
match self {
Pizza | Star | Slash | DoubleSlash | Percent | Plus | Minus => LeftAssociative,
And | Or | Caret => RightAssociative,
Equals | NotEquals | LessThan | GreaterThan | LessThanOrEq | GreaterThanOrEq => {
NonAssociative
}
Assignment | IsAliasType | IsOpaqueType | Backpassing => unreachable!(),
}
const ASSOCIATIVITY_TABLE: [Associativity; 20] = generate_associativity_table();
ASSOCIATIVITY_TABLE[self as usize]
}
fn precedence(self) -> u8 {
match self {
Caret => 7,
Star | Slash | DoubleSlash | Percent => 6,
Plus | Minus => 5,
Equals | NotEquals | LessThan | GreaterThan | LessThanOrEq | GreaterThanOrEq => 4,
And => 3,
Or => 2,
Pizza => 1,
Assignment | IsAliasType | IsOpaqueType | Backpassing => unreachable!(),
}
// The compiler should never pass any of these to this function!
debug_assert_ne!(self, Assignment);
debug_assert_ne!(self, IsAliasType);
debug_assert_ne!(self, IsOpaqueType);
debug_assert_ne!(self, Backpassing);
const PRECEDENCE_TABLE: [u8; 20] = generate_precedence_table();
PRECEDENCE_TABLE[self as usize]
}
}
@ -133,29 +203,75 @@ impl Ord for BinOp {
impl std::fmt::Display for BinOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let as_str = match self {
Caret => "^",
Star => "*",
Slash => "/",
DoubleSlash => "//",
Percent => "%",
Plus => "+",
Minus => "-",
Equals => "==",
NotEquals => "!=",
LessThan => "<",
GreaterThan => ">",
LessThanOrEq => "<=",
GreaterThanOrEq => ">=",
And => "&&",
Or => "||",
Pizza => "|>",
Assignment => "=",
IsAliasType => ":",
IsOpaqueType => ":=",
Backpassing => "<-",
};
debug_assert_ne!(*self, Assignment);
debug_assert_ne!(*self, IsAliasType);
debug_assert_ne!(*self, IsOpaqueType);
debug_assert_ne!(*self, Backpassing);
write!(f, "{}", as_str)
const DISPLAY_TABLE: [&str; 20] = generate_display_table();
write!(f, "{}", DISPLAY_TABLE[*self as usize])
}
}
const fn generate_precedence_table() -> [u8; 20] {
let mut table = [0u8; 20];
let mut i = 0;
while i < PRECEDENCES.len() {
table[(PRECEDENCES[i].0) as usize] = PRECEDENCES[i].1;
i += 1;
}
table
}
const fn generate_associativity_table() -> [Associativity; 20] {
let mut table = [NonAssociative; 20];
let mut i = 0;
while i < ASSOCIATIVITIES.len() {
table[(ASSOCIATIVITIES[i].0) as usize] = ASSOCIATIVITIES[i].1;
i += 1;
}
table
}
const fn generate_display_table() -> [&'static str; 20] {
let mut table = [""; 20];
let mut i = 0;
while i < DISPLAY_STRINGS.len() {
table[(DISPLAY_STRINGS[i].0) as usize] = DISPLAY_STRINGS[i].1;
i += 1;
}
table
}
#[cfg(test)]
mod tests {
use super::{BinOp, ASSOCIATIVITIES, DISPLAY_STRINGS, PRECEDENCES};
fn index_is_binop_u8(iter: impl Iterator<Item = BinOp>, table_name: &'static str) {
for (index, op) in iter.enumerate() {
assert_eq!(op as usize, index, "{op} was found at index {index} in {table_name}, but it should have been at index {} instead.", op as usize);
}
}
#[test]
fn indices_are_correct_in_precedences() {
index_is_binop_u8(PRECEDENCES.iter().map(|(op, _)| *op), "PRECEDENCES")
}
#[test]
fn indices_are_correct_in_associativities() {
index_is_binop_u8(ASSOCIATIVITIES.iter().map(|(op, _)| *op), "ASSOCIATIVITIES")
}
#[test]
fn indices_are_correct_in_display_string() {
index_is_binop_u8(DISPLAY_STRINGS.iter().map(|(op, _)| *op), "DISPLAY_STRINGS")
}
}

View file

@ -1328,6 +1328,7 @@ define_builtins! {
55 STR_GRAPHEMES: "graphemes"
56 STR_IS_VALID_SCALAR: "isValidScalar"
57 STR_RELEASE_EXCESS_CAPACITY: "releaseExcessCapacity"
58 STR_WALK_UTF8: "walkUtf8"
}
6 LIST: "List" => {
0 LIST_LIST: "List" exposed_apply_type=true // the List.List type alias

View file

@ -199,6 +199,10 @@ impl LineColumnRegion {
}
}
pub fn includes(&self, lc: LineColumn) -> bool {
self.contains(&Self::from_pos(lc))
}
pub const fn from_pos(pos: LineColumn) -> Self {
Self {
start: pos,

View file

@ -35,6 +35,7 @@ roc_problem = { path = "../problem" }
roc_reporting = { path = "../../reporting" }
roc_solve = { path = "../solve" }
roc_target = { path = "../roc_target" }
test_solve_helpers = { path = "../test_solve_helpers" }
bumpalo.workspace = true
indoc.workspace = true
@ -43,3 +44,4 @@ lazy_static.workspace = true
pretty_assertions.workspace = true
regex.workspace = true
tempfile.workspace = true
libtest-mimic.workspace = true

View file

@ -1,41 +0,0 @@
extern crate bumpalo;
/// Used in the with_larger_debug_stack() function, for tests that otherwise
/// run out of stack space in debug builds (but don't in --release builds)
#[allow(dead_code)]
const EXPANDED_STACK_SIZE: usize = 8 * 1024 * 1024;
/// Without this, some tests pass in `cargo test --release` but fail without
/// the --release flag because they run out of stack space. This increases
/// stack size for debug builds only, while leaving the stack space at the default
/// amount for release builds.
#[allow(dead_code)]
#[cfg(debug_assertions)]
pub fn with_larger_debug_stack<F>(run_test: F)
where
F: FnOnce(),
F: Send,
F: 'static,
{
std::thread::Builder::new()
.stack_size(EXPANDED_STACK_SIZE)
.spawn(run_test)
.expect("Error while spawning expanded dev stack size thread")
.join()
.expect("Error while joining expanded dev stack size thread")
}
/// In --release builds, don't increase the stack size. Run the test normally.
/// This way, we find out if any of our tests are blowing the stack even after
/// optimizations in release builds.
#[allow(dead_code)]
#[cfg(not(debug_assertions))]
#[inline(always)]
pub fn with_larger_debug_stack<F>(run_test: F)
where
F: FnOnce(),
F: Send,
F: 'static,
{
run_test()
}

File diff suppressed because it is too large Load diff

View file

@ -1822,6 +1822,33 @@ fn str_split_overlapping_substring_2() {
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-dev"))]
fn str_walk_utf8() {
#[cfg(not(feature = "gen-llvm-wasm"))]
assert_evals_to!(
// Reverse the bytes
indoc!(
r#"
Str.walkUtf8 "abcd" [] (\list, byte -> List.prepend list byte)
"#
),
RocList::from_slice(&[b'd', b'c', b'b', b'a']),
RocList<u8>
);
#[cfg(feature = "gen-llvm-wasm")]
assert_evals_to!(
indoc!(
r#"
Str.walkUtf8WithIndex "abcd" [] (\list, byte, index -> List.append list (Pair index byte))
"#
),
RocList::from_slice(&[(0, 'a'), (1, 'b'), (2, 'c'), (3, 'd')]),
RocList<(u32, char)>
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-dev"))]
fn str_walk_utf8_with_index() {

View file

@ -47,8 +47,8 @@ procedure Num.22 (#Attr.2, #Attr.3):
ret Num.276;
procedure Str.3 (#Attr.2, #Attr.3):
let Str.268 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.268;
let Str.300 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.300;
procedure Test.1 (Test.5):
ret Test.5;

View file

@ -319,31 +319,31 @@ procedure Num.24 (#Attr.2, #Attr.3):
ret Num.303;
procedure Str.12 (#Attr.2):
let Str.283 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.283;
let Str.315 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.315;
procedure Str.48 (#Attr.2, #Attr.3, #Attr.4):
let Str.275 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.275;
let Str.307 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.307;
procedure Str.9 (Str.77):
let Str.273 : U64 = 0i64;
let Str.274 : U64 = CallByName List.6 Str.77;
let Str.78 : {U64, Str, Int1, U8} = CallByName Str.48 Str.77 Str.273 Str.274;
let Str.270 : Int1 = StructAtIndex 2 Str.78;
if Str.270 then
let Str.272 : Str = StructAtIndex 1 Str.78;
inc Str.272;
dec Str.78;
let Str.271 : [C {U64, U8}, C Str] = TagId(1) Str.272;
ret Str.271;
procedure Str.9 (Str.79):
let Str.305 : U64 = 0i64;
let Str.306 : U64 = CallByName List.6 Str.79;
let Str.80 : {U64, Str, Int1, U8} = CallByName Str.48 Str.79 Str.305 Str.306;
let Str.302 : Int1 = StructAtIndex 2 Str.80;
if Str.302 then
let Str.304 : Str = StructAtIndex 1 Str.80;
inc Str.304;
dec Str.80;
let Str.303 : [C {U64, U8}, C Str] = TagId(1) Str.304;
ret Str.303;
else
let Str.268 : U8 = StructAtIndex 3 Str.78;
let Str.269 : U64 = StructAtIndex 0 Str.78;
dec Str.78;
let Str.267 : {U64, U8} = Struct {Str.269, Str.268};
let Str.266 : [C {U64, U8}, C Str] = TagId(0) Str.267;
ret Str.266;
let Str.300 : U8 = StructAtIndex 3 Str.80;
let Str.301 : U64 = StructAtIndex 0 Str.80;
dec Str.80;
let Str.299 : {U64, U8} = Struct {Str.301, Str.300};
let Str.298 : [C {U64, U8}, C Str] = TagId(0) Str.299;
ret Str.298;
procedure Test.0 ():
let Test.12 : Str = "bar";

View file

@ -196,31 +196,31 @@ procedure Num.24 (#Attr.2, #Attr.3):
ret Num.284;
procedure Str.12 (#Attr.2):
let Str.281 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.281;
let Str.313 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.313;
procedure Str.48 (#Attr.2, #Attr.3, #Attr.4):
let Str.275 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.275;
let Str.307 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.307;
procedure Str.9 (Str.77):
let Str.273 : U64 = 0i64;
let Str.274 : U64 = CallByName List.6 Str.77;
let Str.78 : {U64, Str, Int1, U8} = CallByName Str.48 Str.77 Str.273 Str.274;
let Str.270 : Int1 = StructAtIndex 2 Str.78;
if Str.270 then
let Str.272 : Str = StructAtIndex 1 Str.78;
inc Str.272;
dec Str.78;
let Str.271 : [C {U64, U8}, C Str] = TagId(1) Str.272;
ret Str.271;
procedure Str.9 (Str.79):
let Str.305 : U64 = 0i64;
let Str.306 : U64 = CallByName List.6 Str.79;
let Str.80 : {U64, Str, Int1, U8} = CallByName Str.48 Str.79 Str.305 Str.306;
let Str.302 : Int1 = StructAtIndex 2 Str.80;
if Str.302 then
let Str.304 : Str = StructAtIndex 1 Str.80;
inc Str.304;
dec Str.80;
let Str.303 : [C {U64, U8}, C Str] = TagId(1) Str.304;
ret Str.303;
else
let Str.268 : U8 = StructAtIndex 3 Str.78;
let Str.269 : U64 = StructAtIndex 0 Str.78;
dec Str.78;
let Str.267 : {U64, U8} = Struct {Str.269, Str.268};
let Str.266 : [C {U64, U8}, C Str] = TagId(0) Str.267;
ret Str.266;
let Str.300 : U8 = StructAtIndex 3 Str.80;
let Str.301 : U64 = StructAtIndex 0 Str.80;
dec Str.80;
let Str.299 : {U64, U8} = Struct {Str.301, Str.300};
let Str.298 : [C {U64, U8}, C Str] = TagId(0) Str.299;
ret Str.298;
procedure Test.0 ():
let Test.11 : Str = "foo";

View file

@ -204,31 +204,31 @@ procedure Num.24 (#Attr.2, #Attr.3):
ret Num.284;
procedure Str.12 (#Attr.2):
let Str.281 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.281;
let Str.313 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.313;
procedure Str.48 (#Attr.2, #Attr.3, #Attr.4):
let Str.275 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.275;
let Str.307 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.307;
procedure Str.9 (Str.77):
let Str.273 : U64 = 0i64;
let Str.274 : U64 = CallByName List.6 Str.77;
let Str.78 : {U64, Str, Int1, U8} = CallByName Str.48 Str.77 Str.273 Str.274;
let Str.270 : Int1 = StructAtIndex 2 Str.78;
if Str.270 then
let Str.272 : Str = StructAtIndex 1 Str.78;
inc Str.272;
dec Str.78;
let Str.271 : [C {U64, U8}, C Str] = TagId(1) Str.272;
ret Str.271;
procedure Str.9 (Str.79):
let Str.305 : U64 = 0i64;
let Str.306 : U64 = CallByName List.6 Str.79;
let Str.80 : {U64, Str, Int1, U8} = CallByName Str.48 Str.79 Str.305 Str.306;
let Str.302 : Int1 = StructAtIndex 2 Str.80;
if Str.302 then
let Str.304 : Str = StructAtIndex 1 Str.80;
inc Str.304;
dec Str.80;
let Str.303 : [C {U64, U8}, C Str] = TagId(1) Str.304;
ret Str.303;
else
let Str.268 : U8 = StructAtIndex 3 Str.78;
let Str.269 : U64 = StructAtIndex 0 Str.78;
dec Str.78;
let Str.267 : {U64, U8} = Struct {Str.269, Str.268};
let Str.266 : [C {U64, U8}, C Str] = TagId(0) Str.267;
ret Str.266;
let Str.300 : U8 = StructAtIndex 3 Str.80;
let Str.301 : U64 = StructAtIndex 0 Str.80;
dec Str.80;
let Str.299 : {U64, U8} = Struct {Str.301, Str.300};
let Str.298 : [C {U64, U8}, C Str] = TagId(0) Str.299;
ret Str.298;
procedure Test.0 ():
let Test.11 : Str = "foo";

View file

@ -57,31 +57,31 @@ procedure Num.127 (#Attr.2):
ret Num.276;
procedure Str.12 (#Attr.2):
let Str.280 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.280;
let Str.312 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.312;
procedure Str.48 (#Attr.2, #Attr.3, #Attr.4):
let Str.275 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.275;
let Str.307 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.307;
procedure Str.9 (Str.77):
let Str.273 : U64 = 0i64;
let Str.274 : U64 = CallByName List.6 Str.77;
let Str.78 : {U64, Str, Int1, U8} = CallByName Str.48 Str.77 Str.273 Str.274;
let Str.270 : Int1 = StructAtIndex 2 Str.78;
if Str.270 then
let Str.272 : Str = StructAtIndex 1 Str.78;
inc Str.272;
dec Str.78;
let Str.271 : [C {U64, U8}, C Str] = TagId(1) Str.272;
ret Str.271;
procedure Str.9 (Str.79):
let Str.305 : U64 = 0i64;
let Str.306 : U64 = CallByName List.6 Str.79;
let Str.80 : {U64, Str, Int1, U8} = CallByName Str.48 Str.79 Str.305 Str.306;
let Str.302 : Int1 = StructAtIndex 2 Str.80;
if Str.302 then
let Str.304 : Str = StructAtIndex 1 Str.80;
inc Str.304;
dec Str.80;
let Str.303 : [C {U64, U8}, C Str] = TagId(1) Str.304;
ret Str.303;
else
let Str.268 : U8 = StructAtIndex 3 Str.78;
let Str.269 : U64 = StructAtIndex 0 Str.78;
dec Str.78;
let Str.267 : {U64, U8} = Struct {Str.269, Str.268};
let Str.266 : [C {U64, U8}, C Str] = TagId(0) Str.267;
ret Str.266;
let Str.300 : U8 = StructAtIndex 3 Str.80;
let Str.301 : U64 = StructAtIndex 0 Str.80;
dec Str.80;
let Str.299 : {U64, U8} = Struct {Str.301, Str.300};
let Str.298 : [C {U64, U8}, C Str] = TagId(0) Str.299;
ret Str.298;
procedure Test.0 ():
let Test.9 : Str = "abc";

View file

@ -205,31 +205,31 @@ procedure Num.24 (#Attr.2, #Attr.3):
ret Num.286;
procedure Str.12 (#Attr.2):
let Str.281 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.281;
let Str.313 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.313;
procedure Str.48 (#Attr.2, #Attr.3, #Attr.4):
let Str.275 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.275;
let Str.307 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.307;
procedure Str.9 (Str.77):
let Str.273 : U64 = 0i64;
let Str.274 : U64 = CallByName List.6 Str.77;
let Str.78 : {U64, Str, Int1, U8} = CallByName Str.48 Str.77 Str.273 Str.274;
let Str.270 : Int1 = StructAtIndex 2 Str.78;
if Str.270 then
let Str.272 : Str = StructAtIndex 1 Str.78;
inc Str.272;
dec Str.78;
let Str.271 : [C {U64, U8}, C Str] = TagId(1) Str.272;
ret Str.271;
procedure Str.9 (Str.79):
let Str.305 : U64 = 0i64;
let Str.306 : U64 = CallByName List.6 Str.79;
let Str.80 : {U64, Str, Int1, U8} = CallByName Str.48 Str.79 Str.305 Str.306;
let Str.302 : Int1 = StructAtIndex 2 Str.80;
if Str.302 then
let Str.304 : Str = StructAtIndex 1 Str.80;
inc Str.304;
dec Str.80;
let Str.303 : [C {U64, U8}, C Str] = TagId(1) Str.304;
ret Str.303;
else
let Str.268 : U8 = StructAtIndex 3 Str.78;
let Str.269 : U64 = StructAtIndex 0 Str.78;
dec Str.78;
let Str.267 : {U64, U8} = Struct {Str.269, Str.268};
let Str.266 : [C {U64, U8}, C Str] = TagId(0) Str.267;
ret Str.266;
let Str.300 : U8 = StructAtIndex 3 Str.80;
let Str.301 : U64 = StructAtIndex 0 Str.80;
dec Str.80;
let Str.299 : {U64, U8} = Struct {Str.301, Str.300};
let Str.298 : [C {U64, U8}, C Str] = TagId(0) Str.299;
ret Str.298;
procedure Test.0 ():
let Test.12 : Str = "foo";

View file

@ -211,31 +211,31 @@ procedure Num.24 (#Attr.2, #Attr.3):
ret Num.286;
procedure Str.12 (#Attr.2):
let Str.281 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.281;
let Str.313 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.313;
procedure Str.48 (#Attr.2, #Attr.3, #Attr.4):
let Str.275 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.275;
let Str.307 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.307;
procedure Str.9 (Str.77):
let Str.273 : U64 = 0i64;
let Str.274 : U64 = CallByName List.6 Str.77;
let Str.78 : {U64, Str, Int1, U8} = CallByName Str.48 Str.77 Str.273 Str.274;
let Str.270 : Int1 = StructAtIndex 2 Str.78;
if Str.270 then
let Str.272 : Str = StructAtIndex 1 Str.78;
inc Str.272;
dec Str.78;
let Str.271 : [C {U64, U8}, C Str] = TagId(1) Str.272;
ret Str.271;
procedure Str.9 (Str.79):
let Str.305 : U64 = 0i64;
let Str.306 : U64 = CallByName List.6 Str.79;
let Str.80 : {U64, Str, Int1, U8} = CallByName Str.48 Str.79 Str.305 Str.306;
let Str.302 : Int1 = StructAtIndex 2 Str.80;
if Str.302 then
let Str.304 : Str = StructAtIndex 1 Str.80;
inc Str.304;
dec Str.80;
let Str.303 : [C {U64, U8}, C Str] = TagId(1) Str.304;
ret Str.303;
else
let Str.268 : U8 = StructAtIndex 3 Str.78;
let Str.269 : U64 = StructAtIndex 0 Str.78;
dec Str.78;
let Str.267 : {U64, U8} = Struct {Str.269, Str.268};
let Str.266 : [C {U64, U8}, C Str] = TagId(0) Str.267;
ret Str.266;
let Str.300 : U8 = StructAtIndex 3 Str.80;
let Str.301 : U64 = StructAtIndex 0 Str.80;
dec Str.80;
let Str.299 : {U64, U8} = Struct {Str.301, Str.300};
let Str.298 : [C {U64, U8}, C Str] = TagId(0) Str.299;
ret Str.298;
procedure Test.0 ():
let Test.13 : Str = "foo";

View file

@ -45,27 +45,27 @@ procedure Num.22 (#Attr.2, #Attr.3):
let Num.275 : Int1 = lowlevel NumLt #Attr.2 #Attr.3;
ret Num.275;
procedure Str.27 (Str.97):
let Str.266 : [C Int1, C I64] = CallByName Str.70 Str.97;
ret Str.266;
procedure Str.27 (Str.99):
let Str.298 : [C Int1, C I64] = CallByName Str.72 Str.99;
ret Str.298;
procedure Str.47 (#Attr.2):
let Str.274 : {I64, U8} = lowlevel StrToNum #Attr.2;
ret Str.274;
let Str.306 : {I64, U8} = lowlevel StrToNum #Attr.2;
ret Str.306;
procedure Str.70 (Str.232):
let Str.233 : {I64, U8} = CallByName Str.47 Str.232;
let Str.272 : U8 = StructAtIndex 1 Str.233;
let Str.273 : U8 = 0i64;
let Str.269 : Int1 = CallByName Bool.11 Str.272 Str.273;
if Str.269 then
let Str.271 : I64 = StructAtIndex 0 Str.233;
let Str.270 : [C Int1, C I64] = TagId(1) Str.271;
ret Str.270;
procedure Str.72 (Str.244):
let Str.245 : {I64, U8} = CallByName Str.47 Str.244;
let Str.304 : U8 = StructAtIndex 1 Str.245;
let Str.305 : U8 = 0i64;
let Str.301 : Int1 = CallByName Bool.11 Str.304 Str.305;
if Str.301 then
let Str.303 : I64 = StructAtIndex 0 Str.245;
let Str.302 : [C Int1, C I64] = TagId(1) Str.303;
ret Str.302;
else
let Str.268 : Int1 = false;
let Str.267 : [C Int1, C I64] = TagId(0) Str.268;
ret Str.267;
let Str.300 : Int1 = false;
let Str.299 : [C Int1, C I64] = TagId(0) Str.300;
ret Str.299;
procedure Test.0 ():
let Test.3 : Int1 = CallByName Bool.2;

View file

@ -348,27 +348,27 @@ procedure Num.77 (#Attr.2, #Attr.3):
ret Num.280;
procedure Str.48 (#Attr.2, #Attr.3, #Attr.4):
let Str.275 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.275;
let Str.307 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.307;
procedure Str.9 (Str.77):
let Str.273 : U64 = 0i64;
let Str.274 : U64 = CallByName List.6 Str.77;
let Str.78 : {U64, Str, Int1, U8} = CallByName Str.48 Str.77 Str.273 Str.274;
let Str.270 : Int1 = StructAtIndex 2 Str.78;
if Str.270 then
let Str.272 : Str = StructAtIndex 1 Str.78;
inc Str.272;
dec Str.78;
let Str.271 : [C {U64, U8}, C Str] = TagId(1) Str.272;
ret Str.271;
procedure Str.9 (Str.79):
let Str.305 : U64 = 0i64;
let Str.306 : U64 = CallByName List.6 Str.79;
let Str.80 : {U64, Str, Int1, U8} = CallByName Str.48 Str.79 Str.305 Str.306;
let Str.302 : Int1 = StructAtIndex 2 Str.80;
if Str.302 then
let Str.304 : Str = StructAtIndex 1 Str.80;
inc Str.304;
dec Str.80;
let Str.303 : [C {U64, U8}, C Str] = TagId(1) Str.304;
ret Str.303;
else
let Str.268 : U8 = StructAtIndex 3 Str.78;
let Str.269 : U64 = StructAtIndex 0 Str.78;
dec Str.78;
let Str.267 : {U64, U8} = Struct {Str.269, Str.268};
let Str.266 : [C {U64, U8}, C Str] = TagId(0) Str.267;
ret Str.266;
let Str.300 : U8 = StructAtIndex 3 Str.80;
let Str.301 : U64 = StructAtIndex 0 Str.80;
dec Str.80;
let Str.299 : {U64, U8} = Struct {Str.301, Str.300};
let Str.298 : [C {U64, U8}, C Str] = TagId(0) Str.299;
ret Str.298;
procedure Test.3 ():
let Test.0 : List U8 = Array [82i64, 111i64, 99i64];

View file

@ -316,53 +316,53 @@ procedure Num.77 (#Attr.2, #Attr.3):
ret Num.280;
procedure Str.12 (#Attr.2):
let Str.275 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.275;
let Str.307 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.307;
procedure Str.27 (Str.97):
let Str.266 : [C {}, C I64] = CallByName Str.70 Str.97;
ret Str.266;
procedure Str.27 (Str.99):
let Str.298 : [C {}, C I64] = CallByName Str.72 Str.99;
ret Str.298;
procedure Str.47 (#Attr.2):
let Str.274 : {I64, U8} = lowlevel StrToNum #Attr.2;
ret Str.274;
let Str.306 : {I64, U8} = lowlevel StrToNum #Attr.2;
ret Str.306;
procedure Str.48 (#Attr.2, #Attr.3, #Attr.4):
let Str.289 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.289;
let Str.321 : {U64, Str, Int1, U8} = lowlevel StrFromUtf8Range #Attr.2 #Attr.3 #Attr.4;
ret Str.321;
procedure Str.70 (Str.232):
let Str.233 : {I64, U8} = CallByName Str.47 Str.232;
let Str.272 : U8 = StructAtIndex 1 Str.233;
let Str.273 : U8 = 0i64;
let Str.269 : Int1 = CallByName Bool.11 Str.272 Str.273;
if Str.269 then
let Str.271 : I64 = StructAtIndex 0 Str.233;
let Str.270 : [C {}, C I64] = TagId(1) Str.271;
ret Str.270;
procedure Str.72 (Str.244):
let Str.245 : {I64, U8} = CallByName Str.47 Str.244;
let Str.304 : U8 = StructAtIndex 1 Str.245;
let Str.305 : U8 = 0i64;
let Str.301 : Int1 = CallByName Bool.11 Str.304 Str.305;
if Str.301 then
let Str.303 : I64 = StructAtIndex 0 Str.245;
let Str.302 : [C {}, C I64] = TagId(1) Str.303;
ret Str.302;
else
let Str.268 : {} = Struct {};
let Str.267 : [C {}, C I64] = TagId(0) Str.268;
ret Str.267;
let Str.300 : {} = Struct {};
let Str.299 : [C {}, C I64] = TagId(0) Str.300;
ret Str.299;
procedure Str.9 (Str.77):
let Str.287 : U64 = 0i64;
let Str.288 : U64 = CallByName List.6 Str.77;
let Str.78 : {U64, Str, Int1, U8} = CallByName Str.48 Str.77 Str.287 Str.288;
let Str.284 : Int1 = StructAtIndex 2 Str.78;
if Str.284 then
let Str.286 : Str = StructAtIndex 1 Str.78;
inc Str.286;
dec Str.78;
let Str.285 : [C {U64, U8}, C Str] = TagId(1) Str.286;
ret Str.285;
procedure Str.9 (Str.79):
let Str.319 : U64 = 0i64;
let Str.320 : U64 = CallByName List.6 Str.79;
let Str.80 : {U64, Str, Int1, U8} = CallByName Str.48 Str.79 Str.319 Str.320;
let Str.316 : Int1 = StructAtIndex 2 Str.80;
if Str.316 then
let Str.318 : Str = StructAtIndex 1 Str.80;
inc Str.318;
dec Str.80;
let Str.317 : [C {U64, U8}, C Str] = TagId(1) Str.318;
ret Str.317;
else
let Str.282 : U8 = StructAtIndex 3 Str.78;
let Str.283 : U64 = StructAtIndex 0 Str.78;
dec Str.78;
let Str.281 : {U64, U8} = Struct {Str.283, Str.282};
let Str.280 : [C {U64, U8}, C Str] = TagId(0) Str.281;
ret Str.280;
let Str.314 : U8 = StructAtIndex 3 Str.80;
let Str.315 : U64 = StructAtIndex 0 Str.80;
dec Str.80;
let Str.313 : {U64, U8} = Struct {Str.315, Str.314};
let Str.312 : [C {U64, U8}, C Str] = TagId(0) Str.313;
ret Str.312;
procedure Test.0 ():
let Test.37 : Str = "-1234";

View file

@ -27,12 +27,12 @@ procedure Num.22 (#Attr.2, #Attr.3):
ret Num.275;
procedure Str.16 (#Attr.2, #Attr.3):
let Str.266 : Str = lowlevel StrRepeat #Attr.2 #Attr.3;
ret Str.266;
let Str.298 : Str = lowlevel StrRepeat #Attr.2 #Attr.3;
ret Str.298;
procedure Str.3 (#Attr.2, #Attr.3):
let Str.267 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.267;
let Str.299 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.299;
procedure Test.1 ():
let Test.21 : Str = "lllllllllllllllllllllooooooooooong";

View file

@ -28,8 +28,8 @@ procedure Num.22 (#Attr.2, #Attr.3):
ret Num.275;
procedure Str.3 (#Attr.2, #Attr.3):
let Str.267 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.267;
let Str.299 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.299;
procedure Test.1 ():
let Test.21 : Str = "lllllllllllllllllllllooooooooooong";

View file

@ -1,6 +1,6 @@
procedure Str.3 (#Attr.2, #Attr.3):
let Str.267 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.267;
let Str.299 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.299;
procedure Test.2 (Test.4):
let Test.16 : U8 = GetTagId Test.4;

View file

@ -3,8 +3,8 @@ procedure Bool.11 (#Attr.2, #Attr.3):
ret Bool.23;
procedure Str.3 (#Attr.2, #Attr.3):
let Str.267 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.267;
let Str.299 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.299;
procedure Test.2 (Test.7):
let Test.24 : Str = ".trace(\"";

View file

@ -3,8 +3,8 @@ procedure Num.20 (#Attr.2, #Attr.3):
ret Num.275;
procedure Str.3 (#Attr.2, #Attr.3):
let Str.268 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.268;
let Str.300 : Str = lowlevel StrConcat #Attr.2 #Attr.3;
ret Str.300;
procedure Test.11 (Test.29, #Attr.12):
let Test.10 : {} = UnionAtIndex (Id 0) (Index 0) #Attr.12;

View file

@ -190,8 +190,8 @@ procedure Num.24 (#Attr.2, #Attr.3):
ret Num.286;
procedure Str.12 (#Attr.2):
let Str.267 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.267;
let Str.299 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.299;
procedure Test.2 (Test.10):
let Test.15 : {Str, Str} = CallByName Encode.23 Test.10;

View file

@ -330,8 +330,8 @@ procedure Num.24 (#Attr.2, #Attr.3):
ret Num.305;
procedure Str.12 (#Attr.2):
let Str.268 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.268;
let Str.300 : List U8 = lowlevel StrToUtf8 #Attr.2;
ret Str.300;
procedure Test.2 (Test.11):
let Test.18 : {{}, {}} = CallByName Encode.23 Test.11;

View file

@ -0,0 +1,31 @@
[package]
name = "test_solve_helpers"
description = "Utilities for testing the solver. This should eventually made into the roc_solve crate, but currently, solve tests have multiple harnesses."
authors.workspace = true
edition.workspace = true
license.workspace = true
version.workspace = true
[dependencies]
roc_can = { path = "../can" }
roc_derive = { path = "../derive" }
roc_load = { path = "../load" }
roc_module = { path = "../module" }
roc_packaging = { path = "../../packaging" }
roc_problem = { path = "../problem" }
roc_region = { path = "../region" }
roc_reporting = { path = "../../reporting" }
roc_solve = { path = "../solve" }
roc_late_solve = { path = "../late_solve" }
roc_solve_problem = { path = "../solve_problem" }
roc_target = { path = "../roc_target" }
roc_types = { path = "../types" }
bumpalo.workspace = true
indoc.workspace = true
insta.workspace = true
lazy_static.workspace = true
pretty_assertions.workspace = true
regex.workspace = true
tempfile.workspace = true

View file

@ -0,0 +1,599 @@
use std::{error::Error, io, path::PathBuf};
use bumpalo::Bump;
use lazy_static::lazy_static;
use regex::Regex;
use roc_can::{
abilities::AbilitiesStore,
expr::Declarations,
module::ExposedByModule,
traverse::{find_declaration, find_symbol_at, find_type_at, FoundSymbol},
};
use roc_derive::SharedDerivedModule;
use roc_late_solve::AbilitiesView;
use roc_load::LoadedModule;
use roc_module::symbol::{Interns, ModuleId};
use roc_packaging::cache::RocCacheDir;
use roc_problem::can::Problem;
use roc_region::all::{LineColumn, LineColumnRegion, LineInfo, Region};
use roc_reporting::report::{can_problem, type_problem, RocDocAllocator};
use roc_solve_problem::TypeError;
use roc_types::{
pretty_print::{name_and_print_var, DebugPrint},
subs::{Subs, Variable},
};
fn promote_expr_to_module(src: &str) -> String {
let mut buffer = String::from(indoc::indoc!(
r#"
app "test"
imports []
provides [main] to "./platform"
main =
"#
));
for line in src.lines() {
// indent the body!
buffer.push_str(" ");
buffer.push_str(line);
buffer.push('\n');
}
buffer
}
pub fn run_load_and_infer(
src: &str,
no_promote: bool,
) -> Result<(LoadedModule, String), std::io::Error> {
use tempfile::tempdir;
let arena = &Bump::new();
let module_src;
let temp;
if src.starts_with("app") || no_promote {
// this is already a module
module_src = src;
} else {
// this is an expression, promote it to a module
temp = promote_expr_to_module(src);
module_src = &temp;
}
let loaded = {
let dir = tempdir()?;
let filename = PathBuf::from("Test.roc");
let file_path = dir.path().join(filename);
let result = roc_load::load_and_typecheck_str(
arena,
file_path,
module_src,
dir.path().to_path_buf(),
roc_target::TargetInfo::default_x86_64(),
roc_reporting::report::RenderTarget::Generic,
RocCacheDir::Disallowed,
roc_reporting::report::DEFAULT_PALETTE,
);
dir.close()?;
result
};
let loaded = loaded.expect("failed to load module");
Ok((loaded, module_src.to_string()))
}
pub fn format_problems(
src: &str,
home: ModuleId,
interns: &Interns,
can_problems: Vec<Problem>,
type_problems: Vec<TypeError>,
) -> (String, String) {
let filename = PathBuf::from("test.roc");
let src_lines: Vec<&str> = src.split('\n').collect();
let lines = LineInfo::new(src);
let alloc = RocDocAllocator::new(&src_lines, home, interns);
let mut can_reports = vec![];
let mut type_reports = vec![];
for problem in can_problems {
let report = can_problem(&alloc, &lines, filename.clone(), problem.clone());
can_reports.push(report.pretty(&alloc));
}
for problem in type_problems {
if let Some(report) = type_problem(&alloc, &lines, filename.clone(), problem.clone()) {
type_reports.push(report.pretty(&alloc));
}
}
let mut can_reports_buf = String::new();
let mut type_reports_buf = String::new();
use roc_reporting::report::CiWrite;
alloc
.stack(can_reports)
.1
.render_raw(70, &mut CiWrite::new(&mut can_reports_buf))
.unwrap();
alloc
.stack(type_reports)
.1
.render_raw(70, &mut CiWrite::new(&mut type_reports_buf))
.unwrap();
(can_reports_buf, type_reports_buf)
}
lazy_static! {
/// Queries of the form
///
/// ```text
/// ^^^{(directive),*}?
///
/// directive :=
/// -\d+ # shift the query left by N columns
/// inst # instantiate the given generic instance
/// ```
static ref RE_TYPE_QUERY: Regex =
Regex::new(r#"(?P<where>\^+)(?:\{(?P<directives>.*?)\})?"#).unwrap();
static ref RE_DIRECTIVE : Regex =
Regex::new(r#"(?:-(?P<sub>\d+))|(?P<inst>inst)"#).unwrap();
}
/// Markers of nested query lines, that should be skipped.
pub const MUTLILINE_MARKER: &str = "";
#[derive(Debug, Clone)]
pub struct TypeQuery {
query_region: Region,
/// If true, the query is under a function call, which should be instantiated with the present
/// value and have its nested queries printed.
instantiate: bool,
source: String,
comment_column: u32,
source_line_column: LineColumn,
}
/// Parse inference queries in a Roc program.
/// See [RE_TYPE_QUERY].
fn parse_queries(src: &str, line_info: &LineInfo) -> Vec<TypeQuery> {
let mut queries = vec![];
let mut consecutive_query_lines = 0;
for (i, line) in src.lines().enumerate() {
// If this is a query line, it should start with a comment somewhere before the query
// lines.
let comment_column = match line.find('#') {
Some(i) => i as _,
None => {
consecutive_query_lines = 0;
continue;
}
};
let mut queries_on_line = RE_TYPE_QUERY.captures_iter(line).into_iter().peekable();
if queries_on_line.peek().is_none() || line.contains(MUTLILINE_MARKER) {
consecutive_query_lines = 0;
continue;
} else {
consecutive_query_lines += 1;
}
for capture in queries_on_line {
let source = capture
.get(0)
.expect("full capture must always exist")
.as_str()
.to_string();
let wher = capture.name("where").unwrap();
let mut subtract_col = 0u32;
let mut instantiate = false;
if let Some(directives) = capture.name("directives") {
for directive in directives.as_str().split(',') {
let directive = RE_DIRECTIVE
.captures(directive)
.unwrap_or_else(|| panic!("directive {directive} must match RE_DIRECTIVE"));
if let Some(sub) = directive.name("sub") {
subtract_col += sub.as_str().parse::<u32>().expect("must be a number");
}
if directive.name("inst").is_some() {
instantiate = true;
}
}
}
let (source_start, source_end) = (wher.start() as u32, wher.end() as u32);
let (query_start, query_end) = (source_start - subtract_col, source_end - subtract_col);
let source_line_column = LineColumn {
line: i as u32,
column: source_start,
};
let query_region = {
let last_line = i as u32 - consecutive_query_lines;
let query_start_lc = LineColumn {
line: last_line,
column: query_start,
};
let query_end_lc = LineColumn {
line: last_line,
column: query_end,
};
let query_lc_region = LineColumnRegion::new(query_start_lc, query_end_lc);
line_info.convert_line_column_region(query_lc_region)
};
queries.push(TypeQuery {
query_region,
source,
comment_column,
source_line_column,
instantiate,
});
}
}
queries
}
#[derive(Default, Clone, Copy)]
pub struct InferOptions {
pub print_can_decls: bool,
pub print_only_under_alias: bool,
pub allow_errors: bool,
pub no_promote: bool,
}
#[derive(Debug)]
pub enum Elaboration {
Specialization {
specialized_name: String,
typ: String,
},
Source {
source: String,
typ: String,
},
Instantiation {
typ: String,
source: String,
offset_line: u32,
queries_in_instantiation: InferredQueries,
},
}
#[derive(Debug)]
pub struct InferredQuery {
pub elaboration: Elaboration,
/// Where the comment before the query string was written in the source.
pub comment_column: u32,
/// Where the query string "^^^" itself was written in the source.
pub source_line_column: LineColumn,
/// The content of the query string.
pub source: String,
}
pub struct Program {
home: ModuleId,
interns: Interns,
declarations: Declarations,
}
impl Program {
pub fn write_can_decls(&self, writer: &mut impl io::Write) -> io::Result<()> {
use roc_can::debug::{pretty_write_declarations, PPCtx};
let ctx = PPCtx {
home: self.home,
interns: &self.interns,
print_lambda_names: true,
};
pretty_write_declarations(writer, &ctx, &self.declarations)
}
}
#[derive(Debug)]
pub struct InferredQueries(Vec<InferredQuery>);
impl InferredQueries {
/// Returns all inferred queries, sorted by
/// - increasing source line
/// - on ties, decreasing source column
pub fn into_sorted(self) -> Vec<InferredQuery> {
let mut queries = self.0;
queries.sort_by(|lc1, lc2| {
let line1 = lc1.source_line_column.line;
let line2 = lc2.source_line_column.line;
let col1 = lc1.source_line_column.column;
let col2 = lc2.source_line_column.column;
line1.cmp(&line2).then(col2.cmp(&col1))
});
queries
}
}
pub struct InferredProgram {
program: Program,
inferred_queries: Vec<InferredQuery>,
}
impl InferredProgram {
/// Decomposes the program and inferred queries.
pub fn decompose(self) -> (InferredQueries, Program) {
let Self {
program,
inferred_queries,
} = self;
(InferredQueries(inferred_queries), program)
}
}
pub fn infer_queries(src: &str, options: InferOptions) -> Result<InferredProgram, Box<dyn Error>> {
let (
LoadedModule {
module_id: home,
mut can_problems,
mut type_problems,
mut declarations_by_id,
mut solved,
interns,
abilities_store,
..
},
src,
) = run_load_and_infer(src, options.no_promote)?;
let declarations = declarations_by_id.remove(&home).unwrap();
let subs = solved.inner_mut();
let can_problems = can_problems.remove(&home).unwrap_or_default();
let type_problems = type_problems.remove(&home).unwrap_or_default();
if !options.allow_errors {
let (can_problems, type_problems) =
format_problems(&src, home, &interns, can_problems, type_problems);
if !can_problems.is_empty() {
return Err(format!("Canonicalization problems: {can_problems}",).into());
}
if !type_problems.is_empty() {
return Err(format!("Type problems: {type_problems}",).into());
}
}
let line_info = LineInfo::new(&src);
let queries = parse_queries(&src, &line_info);
if queries.is_empty() {
return Err("No queries provided!".into());
}
let mut inferred_queries = Vec::with_capacity(queries.len());
let exposed_by_module = ExposedByModule::default();
let arena = Bump::new();
let mut ctx = QueryCtx {
all_queries: &queries,
arena: &arena,
source: &src,
declarations: &declarations,
subs,
abilities_store: &abilities_store,
home,
interns: &interns,
line_info,
derived_module: Default::default(),
exposed_by_module,
options,
};
for query in queries.iter() {
let answer = ctx.answer(query)?;
inferred_queries.push(answer);
}
Ok(InferredProgram {
program: Program {
home,
interns,
declarations,
},
inferred_queries,
})
}
struct QueryCtx<'a> {
all_queries: &'a [TypeQuery],
arena: &'a Bump,
source: &'a str,
declarations: &'a Declarations,
subs: &'a mut Subs,
abilities_store: &'a AbilitiesStore,
home: ModuleId,
interns: &'a Interns,
line_info: LineInfo,
derived_module: SharedDerivedModule,
exposed_by_module: ExposedByModule,
options: InferOptions,
}
impl<'a> QueryCtx<'a> {
fn answer(&mut self, query: &TypeQuery) -> Result<InferredQuery, Box<dyn Error>> {
let TypeQuery {
query_region,
source,
comment_column,
source_line_column,
instantiate,
} = query;
let start = query_region.start().offset;
let end = query_region.end().offset;
let text = &self.source[start as usize..end as usize];
let var = find_type_at(*query_region, self.declarations).ok_or_else(|| {
format!(
"No type for {:?} ({:?})!",
&text,
self.line_info.convert_region(*query_region)
)
})?;
let snapshot = self.subs.snapshot();
let type_string = name_and_print_var(
var,
self.subs,
self.home,
self.interns,
DebugPrint {
print_lambda_sets: true,
print_only_under_alias: self.options.print_only_under_alias,
ignore_polarity: true,
print_weakened_vars: true,
},
);
let elaboration = if *instantiate {
self.infer_instantiated(var, type_string, *query_region, text)?
} else {
self.infer_direct(type_string, *query_region, text)
};
self.subs.rollback_to(snapshot);
Ok(InferredQuery {
elaboration,
comment_column: *comment_column,
source_line_column: *source_line_column,
source: source.to_string(),
})
}
fn infer_direct(&mut self, typ: String, query_region: Region, text: &str) -> Elaboration {
match find_symbol_at(query_region, self.declarations, self.abilities_store) {
Some(found_symbol) => match found_symbol {
FoundSymbol::Specialization(spec_type, spec_symbol)
| FoundSymbol::AbilityMember(spec_type, spec_symbol) => {
Elaboration::Specialization {
specialized_name: format!(
"{}#{}({})",
spec_type.as_str(self.interns),
text,
spec_symbol.ident_id().index(),
),
typ,
}
}
FoundSymbol::Symbol(symbol) => Elaboration::Source {
source: symbol.as_str(self.interns).to_owned(),
typ,
},
},
None => Elaboration::Source {
source: text.to_owned(),
typ,
},
}
}
fn infer_instantiated(
&mut self,
var: Variable,
typ: String,
query_region: Region,
text: &str,
) -> Result<Elaboration, Box<dyn Error>> {
let symbol = match find_symbol_at(query_region, self.declarations, self.abilities_store) {
Some(FoundSymbol::Symbol(symbol) | FoundSymbol::Specialization(_, symbol)) => symbol,
_ => return Err(format!("No symbol under {text:?}",).into()),
};
let def = find_declaration(symbol, self.declarations)
.ok_or_else(|| format!("No def found for {text:?}"))?;
let region = def.region();
let LineColumnRegion { start, end } = self.line_info.convert_region(region);
let start_pos = self.line_info.convert_line_column(LineColumn {
line: start.line,
column: 0,
});
let end_pos = self.line_info.convert_line_column(LineColumn {
line: end.line + 1,
column: 0,
});
let def_region = Region::new(start_pos, end_pos);
let def_source = &self.source[start_pos.offset as usize..end_pos.offset as usize];
roc_late_solve::unify(
self.home,
self.arena,
self.subs,
&AbilitiesView::Module(self.abilities_store),
&self.derived_module,
&self.exposed_by_module,
var,
def.var(),
)
.map_err(|_| "does not unify")?;
let queries_in_instantiation = self
.all_queries
.iter()
.filter(|query| def_region.contains(&query.query_region))
.map(|query| self.answer(query))
.collect::<Result<Vec<_>, _>>()?;
Ok(Elaboration::Instantiation {
typ,
source: def_source.to_owned(),
offset_line: start.line,
queries_in_instantiation: InferredQueries(queries_in_instantiation),
})
}
}
pub fn infer_queries_help(src: &str, expected: impl FnOnce(&str), options: InferOptions) {
let InferredProgram {
program,
inferred_queries,
} = infer_queries(src, options).unwrap();
let mut output_parts = Vec::with_capacity(inferred_queries.len() + 2);
if options.print_can_decls {
use roc_can::debug::{pretty_print_declarations, PPCtx};
let ctx = PPCtx {
home: program.home,
interns: &program.interns,
print_lambda_names: true,
};
let pretty_decls = pretty_print_declarations(&ctx, &program.declarations);
output_parts.push(pretty_decls);
output_parts.push("\n".to_owned());
}
for InferredQuery { elaboration, .. } in inferred_queries {
let output_part = match elaboration {
Elaboration::Specialization {
specialized_name,
typ,
} => format!("{specialized_name} : {typ}"),
Elaboration::Source { source, typ } => format!("{source} : {typ}"),
Elaboration::Instantiation { .. } => panic!("Use uitest instead"),
};
output_parts.push(output_part);
}
let pretty_output = output_parts.join("\n");
expected(&pretty_output);
}

View file

@ -0,0 +1,33 @@
[package]
name = "uitest"
description = "Integration tests for the solver."
authors.workspace = true
edition.workspace = true
license.workspace = true
version.workspace = true
[[test]]
name = "uitest"
path = "tests/uitest.rs"
harness = false
[dev-dependencies]
roc_builtins = { path = "../builtins" }
roc_derive = { path = "../derive", features = ["debug-derived-symbols"] }
roc_load = { path = "../load" }
roc_parse = { path = "../parse" }
roc_problem = { path = "../problem" }
roc_reporting = { path = "../../reporting" }
roc_solve = { path = "../solve" }
roc_target = { path = "../roc_target" }
test_solve_helpers = { path = "../test_solve_helpers" }
bumpalo.workspace = true
indoc.workspace = true
insta.workspace = true
lazy_static.workspace = true
pretty_assertions.workspace = true
regex.workspace = true
tempfile.workspace = true
libtest-mimic.workspace = true

View file

@ -0,0 +1,448 @@
use std::{
error::Error,
ffi::OsStr,
fs, io,
path::{Path, PathBuf},
process::Command,
};
use lazy_static::lazy_static;
use libtest_mimic::{run, Arguments, Failed, Trial};
use regex::Regex;
use test_solve_helpers::{
infer_queries, Elaboration, InferOptions, InferredProgram, InferredQuery, MUTLILINE_MARKER,
};
fn main() -> Result<(), Box<dyn Error>> {
let args = Arguments::from_args();
let test_files = collect_uitest_files()?;
let tests = test_files
.into_iter()
.map(into_test)
.collect::<Result<_, _>>()?;
run(&args, tests).exit()
}
lazy_static! {
static ref UITEST_PATH: PathBuf = PathBuf::from(std::env!("ROC_WORKSPACE_DIR"))
.join("crates")
.join("compiler")
.join("uitest")
.join("tests");
/// # +opt infer:<opt>
static ref RE_OPT_INFER: Regex =
Regex::new(r#"# \+opt infer:(?P<opt>.*)"#).unwrap();
/// # +opt print:<opt>
static ref RE_OPT_PRINT: Regex =
Regex::new(r#"# \+opt print:(?P<opt>.*)"#).unwrap();
}
fn collect_uitest_files() -> io::Result<Vec<PathBuf>> {
let mut tests = Vec::with_capacity(200);
let mut dirs_to_visit = vec![UITEST_PATH.clone()];
while let Some(dir) = dirs_to_visit.pop() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
let entry_type = entry.file_type()?;
if entry_type.is_dir() {
dirs_to_visit.push(path);
continue;
}
if path.extension() == Some(OsStr::new("txt")) {
tests.push(path);
}
}
}
Ok(tests)
}
fn into_test(path: PathBuf) -> io::Result<Trial> {
let name = path
.strip_prefix(UITEST_PATH.as_path().parent().unwrap())
.expect("collected path does not have uitest prefix")
.display()
.to_string();
let trial = Trial::test(name, move || run_test(path));
Ok(trial)
}
fn run_test(path: PathBuf) -> Result<(), Failed> {
let data = std::fs::read_to_string(&path)?;
let TestCase {
infer_options,
print_options,
source,
} = TestCase::parse(data)?;
let inferred_program = infer_queries(&source, infer_options)?;
{
let mut fd = fs::OpenOptions::new()
.write(true)
.truncate(true)
.open(&path)?;
assemble_query_output(&mut fd, &source, inferred_program, print_options)?;
}
check_for_changes(&path)?;
Ok(())
}
const EMIT_HEADER: &str = "# -emit:";
struct TestCase {
infer_options: InferOptions,
print_options: PrintOptions,
source: String,
}
#[derive(Default)]
struct PrintOptions {
can_decls: bool,
}
impl TestCase {
fn parse(mut data: String) -> Result<Self, Failed> {
// Drop anything following `# -emit:` header lines; that's the output.
if let Some(drop_at) = data.find(EMIT_HEADER) {
data.truncate(drop_at);
data.truncate(data.trim_end().len());
}
Ok(TestCase {
infer_options: Self::parse_infer_options(&data)?,
print_options: Self::parse_print_options(&data)?,
source: data,
})
}
fn parse_infer_options(data: &str) -> Result<InferOptions, Failed> {
let mut infer_opts = InferOptions {
no_promote: true,
..Default::default()
};
let found_infer_opts = RE_OPT_INFER.captures_iter(data);
for infer_opt in found_infer_opts {
let opt = infer_opt.name("opt").unwrap().as_str();
match opt.trim() {
"allow_errors" => infer_opts.allow_errors = true,
"print_only_under_alias" => infer_opts.print_only_under_alias = true,
other => return Err(format!("unknown infer option: {other:?}").into()),
}
}
Ok(infer_opts)
}
fn parse_print_options(data: &str) -> Result<PrintOptions, Failed> {
let mut print_opts = PrintOptions::default();
let found_infer_opts = RE_OPT_PRINT.captures_iter(data);
for infer_opt in found_infer_opts {
let opt = infer_opt.name("opt").unwrap().as_str();
match opt.trim() {
"can_decls" => print_opts.can_decls = true,
other => return Err(format!("unknown print option: {other:?}").into()),
}
}
Ok(print_opts)
}
}
fn check_for_changes(path: &Path) -> Result<(), Failed> {
Command::new("git").args(["add", "-N"]).arg(path).output()?;
let has_changes = Command::new("git")
.args(["diff", "--color=always"])
.arg(path)
.output()?;
if !has_changes.stdout.is_empty() {
return Err(format!(
"{}\nOutput has changed. If it looks okay, `git` add the file.",
std::str::from_utf8(&has_changes.stdout)?
)
.into());
}
Ok(())
}
/// Assemble the output for a test, with queries elaborated in-line.
fn assemble_query_output(
writer: &mut impl io::Write,
source: &str,
inferred_program: InferredProgram,
print_options: PrintOptions,
) -> io::Result<()> {
// Reverse the queries so that we can pop them off the end as we pass through the lines.
let (queries, program) = inferred_program.decompose();
let mut sorted_queries = queries.into_sorted();
sorted_queries.reverse();
let mut reflow = Reflow::new_unindented(writer);
write_source_with_answers(&mut reflow, source, sorted_queries, 0)?;
// Finish up with any remaining print options we were asked to provide.
let PrintOptions { can_decls } = print_options;
if can_decls {
writeln!(writer, "\n{EMIT_HEADER}can_decls")?;
program.write_can_decls(writer)?;
}
Ok(())
}
fn write_source_with_answers<W: io::Write>(
reflow: &mut Reflow<'_, W>,
source: &str,
mut sorted_queries: Vec<InferredQuery>,
offset_line: usize,
) -> io::Result<()> {
for (i, line) in source.lines().enumerate() {
let i = i + offset_line;
let mut is_query_line = false;
// Write all elaborated query lines if applicable.
while matches!(
sorted_queries.last(),
Some(InferredQuery {
source_line_column,
..
}) if source_line_column.line == i as _
) {
let inferred = sorted_queries.pop().unwrap();
reflow.scoped(|reflow| reconstruct_comment_line(reflow, inferred))?;
reflow.write("\n")?;
is_query_line = true;
}
// If this was previously a multi-line query output line, skip it, since we already wrote
// the new output above.
if line.contains(MUTLILINE_MARKER) {
continue;
}
// Otherwise, write the Roc source line.
if !is_query_line {
reflow.write(line.trim_end())?;
reflow.write("\n")?;
}
}
let mut sorted_queries = sorted_queries.into_iter().peekable();
while let Some(sorted_query) = sorted_queries.next() {
reflow.scoped(|reflow| reconstruct_comment_line(reflow, sorted_query))?;
// Only write a newline if we're not yet at the end of the source.
// Otherwise, a newline will be written for us after exiting the reconstruction of the
// comment line, since this must happen in the reconsutrction of a multi-line query.
if sorted_queries.peek().is_some() {
reflow.write("\n")?;
}
}
Ok(())
}
fn reconstruct_comment_line<W: io::Write>(
reflow: &mut Reflow<'_, W>,
inferred: InferredQuery,
) -> io::Result<()> {
let InferredQuery {
comment_column,
source_line_column,
source,
elaboration,
} = inferred;
reflow.add_layer(comment_column as _, source_line_column.column as _);
reflow.write_and_bump(&format!("{source} "))?;
match elaboration {
Elaboration::Specialization {
specialized_name,
typ,
} => {
reflow.write_and_bump(&format!("{specialized_name}: "))?;
reflow.write(&typ)
}
Elaboration::Source { source: _, typ } => reflow.write(&typ),
Elaboration::Instantiation {
typ,
source,
offset_line,
queries_in_instantiation,
} => {
reflow.write(&typ)?;
// Write the source on new line, but at the reflow column the comment is aligned at.
reflow.set_content(source_line_column.column as _);
reflow.write("\n")?;
let queries = queries_in_instantiation.into_sorted();
write_source_with_answers(reflow, source.trim_end(), queries, offset_line as _)
}
}
}
struct Reflow<'a, W: io::Write> {
writer: &'a mut W,
state: ReflowState,
}
#[derive(Clone, Debug)]
struct ReflowState {
/// true if the first line of the elaboration comment has been written.
top_line_written: bool,
/// Number of `content columns` prefixes written.
/// If this equals the number of content columns, the whole prefix for a line has been written.
content_prefixes_written: usize,
/// The column at which to insert the comment prefix "#".
comment_column: usize,
/// The columns at which content occurs.
/// If the stack is >1, then
/// - at the first content column, the [MUTLILINE_MARKER] may be written as appropriate
/// - for subsequent columns, spaces are inserted until the column is reached.
content_columns: Vec<usize>,
}
impl<'a, W: io::Write> std::ops::Deref for Reflow<'a, W> {
type Target = ReflowState;
fn deref(&self) -> &Self::Target {
&self.state
}
}
impl<'a, W: io::Write> std::ops::DerefMut for Reflow<'a, W> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.state
}
}
impl<'a, W: io::Write> Reflow<'a, W> {
fn new_unindented(writer: &'a mut W) -> Self {
Self {
writer,
state: ReflowState {
top_line_written: false,
content_prefixes_written: 0,
comment_column: 0,
content_columns: vec![],
},
}
}
fn scoped<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
let state = self.state.clone();
let result = f(self);
self.state = state;
result
}
fn add_layer(&mut self, comment_column: usize, content_column: usize) {
if self.comment_column == 0 {
// If the comment column is not yet set, this is the top-level and we should update the
// state; otherwise, we already have a comment column, only add to the content-ful
// layer.
self.comment_column = comment_column;
}
self.content_columns.push(content_column);
}
fn set_content(&mut self, content_column: usize) {
let latest_column = self
.content_columns
.last_mut()
.expect("cannot set content before adding a layer");
*latest_column = content_column;
}
fn write(&mut self, content: &str) -> io::Result<()> {
for (i, content_line) in content.split('\n').enumerate() {
if i > 0 {
// new line
writeln!(self.writer)?;
self.content_prefixes_written = 0;
}
// If the content columns are empty, this is top-level and we
// have no prefix to write.
if self.content_prefixes_written != self.content_columns.len() {
if self.content_prefixes_written == 0 {
self.write_n_spaces(self.comment_column)?;
write!(self.writer, "#")?;
// For the first column content - write spaces up to the column, and then if we are
// in a multiline context, add the multi-line marker.
{
self.write_n_spaces(self.content_columns[0] - self.comment_column - 1)?;
if self.top_line_written {
write!(self.writer, "{MUTLILINE_MARKER} ")?;
}
}
self.content_prefixes_written = 1;
}
// For all remaining content columns, fill them in with spaces.
let remaining_content_columns = self
.content_columns
.iter()
.skip(self.content_prefixes_written);
self.write_n_spaces(remaining_content_columns.sum())?;
self.content_prefixes_written = self.content_columns.len();
self.top_line_written = true;
}
write!(self.writer, "{content_line}")?;
}
Ok(())
}
fn write_and_bump(&mut self, content: &str) -> io::Result<()> {
assert!(
content.lines().count() == 1,
"cannot bump with multi-line content"
);
self.write(content)?;
let column = self
.content_columns
.last_mut()
.expect("cannot write_and_bump before adding layer");
*column += content.len();
Ok(())
}
fn write_n_spaces(&mut self, n: usize) -> io::Result<()> {
for _ in 0..n {
write!(self.writer, " ")?;
}
Ok(())
}
}

View file

@ -0,0 +1,7 @@
# +opt infer:print_only_under_alias
app "test" provides [main] to "./platform"
F a : a | a has Hash
main : F a -> F a
#^^^^{-1} a -[[main(0)]]-> a | a has Hash

View file

@ -0,0 +1,7 @@
# +opt infer:print_only_under_alias
app "test" provides [main] to "./platform"
F a : a | a has Hash & Eq & Decoding
main : F a -> F a
#^^^^{-1} a -[[main(0)]]-> a | a has Hash & Decoding & Eq

View file

@ -0,0 +1,8 @@
app "test" provides [main] to "./platform"
f : x -> x | x has Hash
g : x -> x | x has Decoding & Encoding
main : x -> x | x has Hash & Decoding & Encoding
main = \x -> x |> f |> g
#^^^^{-1} x -[[main(0)]]-> x | x has Hash & Encoding & Decoding

View file

@ -0,0 +1,12 @@
app "test" provides [top] to "./platform"
MDict u := (List u) | u has Hash & Eq
bot : MDict k -> MDict k
bot = \@MDict data ->
when {} is
{} -> @MDict data
top : MDict v -> MDict v
top = \x -> bot x
#^^^{-1} MDict v -[[top(0)]]-> MDict v | v has Hash & Eq

View file

@ -0,0 +1,9 @@
app "test" provides [isEqQ] to "./platform"
Q := [ F (Str -> Str), G ] has [Eq { isEq: isEqQ }]
isEqQ = \@Q q1, @Q q2 -> when T q1 q2 is
#^^^^^{-1} Q, Q -[[isEqQ(0)]]-> Bool
T (F _) (F _) -> Bool.true
T G G -> Bool.true
_ -> Bool.false

View file

@ -0,0 +1,10 @@
# +opt infer:print_only_under_alias
app "test" provides [main] to "./platform"
Q := ({} -> Str) has [Eq {isEq: isEqQ}]
isEqQ = \@Q f1, @Q f2 -> (f1 {} == f2 {})
#^^^^^{-1} ({} -[[]]-> Str), ({} -[[]]-> Str) -[[isEqQ(2)]]-> [False, True]
main = isEqQ (@Q \{} -> "a") (@Q \{} -> "a")
# ^^^^^ ({} -[[6, 7]]-> Str), ({} -[[6, 7]]-> Str) -[[isEqQ(2)]]-> [False, True]

View file

@ -0,0 +1,6 @@
app "test" provides [main] to "./platform"
main : Decoder Bool _
main = Decode.custom \bytes, fmt ->
Decode.decodeWith bytes Decode.decoder fmt
# ^^^^^^^^^^^^^^ Decoding#Decode.decoder(4): Decoder Bool fmt | fmt has DecoderFormatting

View file

@ -0,0 +1,4 @@
app "test" provides [main] to "./platform"
main = Bool.isEq Bool.true Bool.false
# ^^^^^^^^^ Eq#Bool.isEq(9): Bool, Bool -[[Bool.structuralEq(11)]]-> Bool

View file

@ -0,0 +1,5 @@
app "test" provides [main] to "./platform"
main =
\h -> Hash.hash h Bool.true
# ^^^^^^^^^ Hash#Hash.hash(1): a, Bool -[[Hash.hashBool(9)]]-> a | a has Hasher

View file

@ -0,0 +1,4 @@
app "test" provides [main] to "./platform"
main = Encode.toEncoder Bool.true
# ^^^^^^^^^^^^^^^^ Encoding#Encode.toEncoder(2): Bool -[[] + fmt:Encode.bool(17):1]-> Encoder fmt | fmt has EncoderFormatting

View file

@ -0,0 +1,6 @@
app "test" provides [main] to "./platform"
n : Num *
main = n == 1.
# ^ Dec

View file

@ -0,0 +1,9 @@
# +opt infer:print_only_under_alias
app "test" provides [main] to "./platform"
N := U8 has [Decoding]
main : Decoder N _
main = Decode.custom \bytes, fmt ->
Decode.decodeWith bytes Decode.decoder fmt
# ^^^^^^^^^^^^^^ N#Decode.decoder(3): List U8, fmt -[[7]]-> { rest : List U8, result : [Err [TooShort], Ok U8] } | fmt has DecoderFormatting

View file

@ -0,0 +1,6 @@
app "test" provides [main] to "./platform"
N := U8 has [Encoding]
main = Encode.toEncoder (@N 15)
# ^^^^^^^^^^^^^^^^ N#Encode.toEncoder(3): N -[[#N_toEncoder(3)]]-> Encoder fmt | fmt has EncoderFormatting

View file

@ -0,0 +1,8 @@
app "test" provides [main] to "./platform"
Trivial := {} has [Eq {isEq}]
isEq = \@Trivial {}, @Trivial {} -> Bool.true
main = Bool.isEq (@Trivial {}) (@Trivial {})
# ^^^^^^^^^ Trivial#Bool.isEq(2): Trivial, Trivial -[[isEq(2)]]-> Bool

View file

@ -0,0 +1,6 @@
app "test" provides [main] to "./platform"
N := U8 has [Eq]
main = Bool.isEq (@N 15) (@N 23)
# ^^^^^^^^^ N#Bool.isEq(3): N, N -[[#N_isEq(3)]]-> Bool

View file

@ -0,0 +1,8 @@
app "test" provides [main] to "./platform"
Noop := {} has [Hash {hash}]
hash = \hasher, @Noop {} -> hasher
main = \hasher -> hash hasher (@Noop {})
#^^^^{-1} hasher -[[main(0)]]-> hasher | hasher has Hasher

View file

@ -0,0 +1,6 @@
app "test" provides [main] to "./platform"
N := U8 has [Hash]
main = \hasher, @N n -> Hash.hash hasher (@N n)
# ^^^^^^^^^ N#Hash.hash(3): a, N -[[#N_hash(3)]]-> a | a has Hasher

View file

@ -0,0 +1,5 @@
app "test" provides [main] to "./platform"
main =
\h -> Hash.hash h 7
# ^^^^^^^^^ Hash#Hash.hash(1): a, I64 -[[Hash.hashI64(13)]]-> a | a has Hasher

View file

@ -0,0 +1,12 @@
app "test" provides [main] to "./platform"
main =
s1 : Set U8
s1 = Set.empty {}
s2 : Set Str
s2 = Set.empty {}
Bool.isEq s1 s1 && Bool.isEq s2 s2
# ^^^^^^^^^ Set#Bool.isEq(17): Set Str, Set Str -[[Set.isEq(17)]]-> Bool
# ^^^^^^^^^ Set#Bool.isEq(17): Set U8, Set U8 -[[Set.isEq(17)]]-> Bool

View file

@ -0,0 +1,8 @@
app "test" provides [main] to "./platform"
polyDbg = \x ->
#^^^^^^^{-1} a -[[polyDbg(1)]]-> a
dbg x
x
main = polyDbg ""

View file

@ -0,0 +1,8 @@
# +opt infer:allow_errors
app "test" provides [main] to "./platform"
main =
\x -> when x is
#^ [A [B]w_a [C]w_b]
A B _ -> ""
A _ C -> ""

View file

@ -0,0 +1,7 @@
app "test" provides [main] to "./platform"
main =
\x -> when x is
#^ { a : [A { b : [B]w_a }*]w_b }*
{ a: A { b: B } } -> ""
_ -> ""

View file

@ -0,0 +1,7 @@
app "test" provides [main] to "./platform"
walkHelp : {} -> [Continue {}, Break []]
main = when walkHelp {} is
# ^^^^^^^^^^^ [Break [], Continue {}]
Continue {} -> {}

View file

@ -0,0 +1,8 @@
app "test" provides [main] to "./platform"
x : Result Str []
x = Ok "abc"
main = when x is
#^^^^{-1} Str
Ok s -> s

View file

@ -0,0 +1,9 @@
app "test" provides [main] to "./platform"
x : Result Str []
x = Ok "abc"
Ok str = x
main = str
# ^^^ Str

View file

@ -0,0 +1,5 @@
app "test" provides [x] to "./platform"
x : Result Str [] -> Str
x = \Ok s -> s
#^{-1} Result Str [] -[[x(0)]]-> Str

View file

@ -0,0 +1,5 @@
app "test" provides [f] to "./platform"
f : _ -> {}
f = \_ -> f {}
#^{-1} {} -[[f(0)]]-> {}

View file

@ -0,0 +1,6 @@
app "test" provides [getInfallible] to "./platform"
getInfallible = \result -> when result is
#^^^^^^^^^^^^^{-1} [Ok a]w_b -[[getInfallible(0)]]-> a
Ok x -> x
_ -> crash "turns out this was fallible"

View file

@ -0,0 +1,30 @@
app "test" provides [main] to "./platform"
X has
consume : a -> {} | a has X
O := {} has [X {consume: consumeO}]
consumeO = \@O {} -> {}
P := {} has [X {consume: consumeP}]
consumeP = \@P {} -> {}
caller = \x -> consume x
# ^ a | a has X
# ^^^^^^^ X#consume(2): a -[[] + a:consume(2):1]-> {} | a has X
main = {
a: caller (@O {}),
# ^^^^^^{inst} O -[[caller(7)]]-> {}
# │ caller = \x -> consume x
# │ ^ O
# │ ^^^^^^^ X#consume(2): O -[[consumeO(5)]]-> {}
b: caller (@P {}),
# ^^^^^^{inst} P -[[caller(7)]]-> {}
# │ caller = \x -> consume x
# │ ^ P
# │ ^^^^^^^ X#consume(2): P -[[consumeP(6)]]-> {}
}

View file

@ -0,0 +1,45 @@
# +opt print:can_decls
# +opt infer:print_only_under_alias
app "test" provides [main] to "./platform"
Parser a : {} -> a
v1 : {}
v1 = {}
v2 : Str
v2 = ""
apply : Parser (a -> Str), a -> Parser Str
apply = \fnParser, valParser ->
\{} ->
(fnParser {}) (valParser)
map : a, (a -> Str) -> Parser Str
map = \simpleParser, transform ->
apply (\{} -> transform) simpleParser
parseInput = \{} ->
when [ map v1 (\{} -> ""), map v2 (\s -> s) ] is
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ List (({} -[[9 (({} -[[12 (Str -[[14]]-> Str)]]-> (Str -[[14]]-> Str))) Str, 9 (({} -[[12 ({} -[[13]]-> Str)]]-> ({} -[[13]]-> Str))) {}]]-> Str))
_ -> ""
main = parseInput {} == ""
# -emit:can_decls
v1 = {}
v2 = ""
apply = \fnParser, valParser -> \{} -[9]-> (fnParser {}) valParser
map = \simpleParser, transform -> apply \{} -[12]-> transform simpleParser
parseInput = \{} ->
when [
map v1 \{} -[13]-> "",
map v2 \s -[14]-> s,
] is
_ -> ""
main = Bool.isEq (parseInput {}) ""

View file

@ -0,0 +1,6 @@
app "test" provides [main] to "./platform"
g = if Bool.true then A else B
main = g ""
# ^ Str -[[2, 3]]-> [A Str, B Str]w_a

View file

@ -0,0 +1,16 @@
app "test" provides [main] to "./platform"
main =
x = Bool.true
y = Bool.false
a = "foo"
b = "bar"
foo = \{} -> if x then a else bar {}
#^^^{-1} {} -[[foo(5) Bool Bool Str Str]]-> Str
bar = \{} -> if y then b else foo {}
#^^^{-1} {} -[[bar(6) Bool Bool Str Str]]-> Str
bar {}

View file

@ -0,0 +1,22 @@
app "test" provides [main] to "./platform"
main =
compose = \f, g ->
closCompose = \x -> g (f x)
closCompose
const = \x ->
closConst = \_ -> x
closConst
list = []
res : Str -> Str
res = List.walk list (const "z") (\c1, c2 -> compose c1 c2)
# ^^^^^^^ (Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str), (Str -[[]]-> Str) -[[compose(1)]]-> (Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str)
# ^^^^^ Str -[[const(2)]]-> (Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str)
# ^^^^^^^^^^^^^^^^^^^^^^^^ (Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str), (Str -[[]]-> Str) -[[11]]-> (Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str)
#^^^{-1} Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str
res "hello"
#^^^{-1} Str -[[closCompose(7) (Str -a-> Str) (Str -[[]]-> Str), closConst(10) Str] as a]-> Str

View file

@ -0,0 +1,14 @@
app "test" provides [f] to "./platform"
thenDo = \x, callback ->
callback x
f = \{} ->
code = 10u16
bf = \{} ->
#^^{-1} {} -[[bf(5) U16]]-> *
thenDo code \_ -> bf {}
# ^^^^^^^^^^^ U16 -[[6 U16]]-> *
bf {}

View file

@ -0,0 +1,11 @@
app "test" provides [main] to "./platform"
main =
x = "abc"
getX = \{} -> x
h = \{} -> (getX {})
#^{-1} {}* -[[h(3) Str]]-> Str
h {}

View file

@ -0,0 +1,11 @@
app "test" provides [main] to "./platform"
main =
h = \{} -> (getX {})
#^{-1} {}* -[[h(1) Str]]-> Str
getX = \{} -> x
x = "abc"
h {}

View file

@ -0,0 +1 @@
Tests for open-in-output-position semantics.

View file

@ -0,0 +1,15 @@
app "test" provides [accum] to "./platform"
Q : [Green, Blue]
f : Q -> Q
f = \q -> when q is
#^{-1} Q -[[f(2)]]-> Q
Green -> Green
Blue -> Blue
accum = \q -> when q is
#^^^^^{-1} [A, B, C] -[[accum(0)]]-> [Blue, Green, Orange, Yellow]*
A -> f Green
B -> Yellow
C -> Orange

View file

@ -0,0 +1,14 @@
app "test" provides [main] to "./platform"
input : [A Str, B Str]
input = A "foo"
drop : a -> {}
drop = \_ -> {}
main = when input is
# ^^^^^ [A Str, B Str]
A _ as a -> drop a
# ^ [A Str, B Str]
B _ as b -> drop b
# ^ [A Str, B Str]

View file

@ -0,0 +1,10 @@
app "test" provides [main] to "./platform"
input : List Str
input = [ "foo", "bar" ]
main = when input is
# ^^^^^ List Str
[ _first, .. as rest ] -> 1 + List.len rest
# ^^^^ List Str
[] -> 0

View file

@ -0,0 +1,7 @@
app "test" provides [main] to "./platform"
main = when A "foo" is
A _ as a -> a
# ^ [A Str]w_a
b -> b
# ^ [A Str]w_a

View file

@ -0,0 +1,6 @@
app "test" provides [main] to "./platform"
Pair x _ = Pair 0 1
main = x
# ^ Num w_a

View file

@ -0,0 +1,5 @@
app "test" provides [x] to "./platform"
x : U16
x = '.'
#^{-1} U16

View file

@ -0,0 +1,5 @@
app "test" provides [x] to "./platform"
x : U32
x = '.'
#^{-1} U32

View file

@ -0,0 +1,5 @@
app "test" provides [x] to "./platform"
x : U8
x = '.'
#^{-1} U8

View file

@ -0,0 +1,10 @@
app "test" provides [main] to "./platform"
f : U16 -> _
f = \c ->
when c is
'.' -> 'A'
c1 -> c1
main = f
# ^ U16 -[[f(1)]]-> U16

View file

@ -0,0 +1,10 @@
app "test" provides [main] to "./platform"
f : U32 -> _
f = \c ->
when c is
'.' -> 'A'
c1 -> c1
main = f
# ^ U32 -[[f(1)]]-> U32

View file

@ -0,0 +1,10 @@
app "test" provides [main] to "./platform"
f : U8 -> _
f = \c ->
when c is
'.' -> 'A'
c1 -> c1
main = f
# ^ U8 -[[f(1)]]-> U8

View file

@ -0,0 +1,6 @@
app "test" provides [main] to "./platform"
f : { x ? Str, y ? Str } -> {}
main = f {x : ""}
# ^ { x : Str, y ? Str } -[[f(1)]]-> {}

View file

@ -0,0 +1,8 @@
app "test" provides [b] to "./platform"
O := {} -> {}
a = @O \{} -> ((\@O f -> f {}) b)
b = a
#^{-1} O

View file

@ -0,0 +1,6 @@
app "test" provides [fx] to "./platform"
after : ({} -> a), ({} -> b) -> ({} -> b)
fx = after (\{} -> {}) \{} -> if Bool.true then fx {} else {}
#^^{-1} {} -[[]]-> {}

View file

@ -0,0 +1,9 @@
app "test" provides [main] to "./platform"
main =
after : ({} -> a), ({} -> b) -> ({} -> b)
fx = after (\{} -> {}) \{} -> if Bool.true then fx {} else {}
fx
# ^^ {} -[[]]-> {}

View file

@ -0,0 +1,13 @@
app "test" provides [doIt] to "./platform"
Effect : [
DoIt {} ({} -> Effect),
]
Task := ({} -> Effect) -> Effect
doIt : {} -> Task
doIt = \{} ->
#^^^^{-1} {} -[[doIt(0)]]-> Task
@Task \toNext ->
DoIt {} \{} -> (toNext {})

View file

@ -0,0 +1,11 @@
# +opt infer:print_only_under_alias
app "test" provides [job] to "./platform"
F : [Bar, FromG G]
G : [G {lst : List F}]
job : { lst : List F } -> G
job = \config -> G config
#^^^{-1} { lst : List [Bar, FromG ([G { lst : List [Bar, FromG a] }] as a)] } -[[job(0)]]-> [G { lst : List [Bar, FromG a] }] as a
# ^^^^^^^^ [G { lst : List [Bar, FromG a] }] as a
# ^^^^^^ { lst : List [Bar, FromG ([G { lst : List [Bar, FromG a] }] as a)] }

View file

@ -0,0 +1,6 @@
app "test" provides [translateStatic] to "./platform"
translateStatic : _ -> _
translateStatic = \Element c ->
#^^^^^^^^^^^^^^^{-1} [Element (List a)] as a -[[translateStatic(0)]]-> [Element (List b)]* as b
Element (List.map c translateStatic)

View file

@ -0,0 +1,25 @@
# +opt infer:print_only_under_alias
app "test" provides [main] to "./platform"
Input := [
FromJob Job
]
Job := [
Job (List Input)
]
job : List Input -> Job
job = \inputs ->
@Job (Job inputs)
helloWorld : Job
helloWorld =
@Job ( Job [ @Input (FromJob greeting) ] )
# ^^^^^^^^^^^^^^^^^^^^^^^^^ [FromJob ([Job (List [FromJob a])] as a)]
greeting : Job
greeting =
job []
main = (\_ -> "Which platform am I running on now?\n") helloWorld

View file

@ -0,0 +1,448 @@
use std::{
error::Error,
ffi::OsStr,
fs, io,
path::{Path, PathBuf},
process::Command,
};
use lazy_static::lazy_static;
use libtest_mimic::{run, Arguments, Failed, Trial};
use regex::Regex;
use test_solve_helpers::{
infer_queries, Elaboration, InferOptions, InferredProgram, InferredQuery, MUTLILINE_MARKER,
};
fn main() -> Result<(), Box<dyn Error>> {
let args = Arguments::from_args();
let test_files = collect_uitest_files()?;
let tests = test_files
.into_iter()
.map(into_test)
.collect::<Result<_, _>>()?;
run(&args, tests).exit()
}
lazy_static! {
static ref UITEST_PATH: PathBuf = PathBuf::from(std::env!("ROC_WORKSPACE_DIR"))
.join("crates")
.join("compiler")
.join("uitest")
.join("tests");
/// # +opt infer:<opt>
static ref RE_OPT_INFER: Regex =
Regex::new(r#"# \+opt infer:(?P<opt>.*)"#).unwrap();
/// # +opt print:<opt>
static ref RE_OPT_PRINT: Regex =
Regex::new(r#"# \+opt print:(?P<opt>.*)"#).unwrap();
}
fn collect_uitest_files() -> io::Result<Vec<PathBuf>> {
let mut tests = Vec::with_capacity(200);
let mut dirs_to_visit = vec![UITEST_PATH.clone()];
while let Some(dir) = dirs_to_visit.pop() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
let entry_type = entry.file_type()?;
if entry_type.is_dir() {
dirs_to_visit.push(path);
continue;
}
if path.extension() == Some(OsStr::new("txt")) {
tests.push(path);
}
}
}
Ok(tests)
}
fn into_test(path: PathBuf) -> io::Result<Trial> {
let name = path
.strip_prefix(UITEST_PATH.as_path())
.expect("collected path does not have uitest prefix")
.display()
.to_string();
let trial = Trial::test(name, move || run_test(path));
Ok(trial)
}
fn run_test(path: PathBuf) -> Result<(), Failed> {
let data = std::fs::read_to_string(&path)?;
let TestCase {
infer_options,
print_options,
source,
} = TestCase::parse(data)?;
let inferred_program = infer_queries(&source, infer_options)?;
{
let mut fd = fs::OpenOptions::new()
.write(true)
.truncate(true)
.open(&path)?;
assemble_query_output(&mut fd, &source, inferred_program, print_options)?;
}
check_for_changes(&path)?;
Ok(())
}
const EMIT_HEADER: &str = "# -emit:";
struct TestCase {
infer_options: InferOptions,
print_options: PrintOptions,
source: String,
}
#[derive(Default)]
struct PrintOptions {
can_decls: bool,
}
impl TestCase {
fn parse(mut data: String) -> Result<Self, Failed> {
// Drop anything following `# -emit:` header lines; that's the output.
if let Some(drop_at) = data.find(EMIT_HEADER) {
data.truncate(drop_at);
data.truncate(data.trim_end().len());
}
Ok(TestCase {
infer_options: Self::parse_infer_options(&data)?,
print_options: Self::parse_print_options(&data)?,
source: data,
})
}
fn parse_infer_options(data: &str) -> Result<InferOptions, Failed> {
let mut infer_opts = InferOptions {
no_promote: true,
..Default::default()
};
let found_infer_opts = RE_OPT_INFER.captures_iter(data);
for infer_opt in found_infer_opts {
let opt = infer_opt.name("opt").unwrap().as_str();
match opt {
"allow_errors" => infer_opts.allow_errors = true,
"print_only_under_alias" => infer_opts.print_only_under_alias = true,
other => return Err(format!("unknown infer option: {other}").into()),
}
}
Ok(infer_opts)
}
fn parse_print_options(data: &str) -> Result<PrintOptions, Failed> {
let mut print_opts = PrintOptions::default();
let found_infer_opts = RE_OPT_PRINT.captures_iter(data);
for infer_opt in found_infer_opts {
let opt = infer_opt.name("opt").unwrap().as_str();
match opt {
"can_decls" => print_opts.can_decls = true,
other => return Err(format!("unknown print option: {other}").into()),
}
}
Ok(print_opts)
}
}
fn check_for_changes(path: &Path) -> Result<(), Failed> {
Command::new("git").args(["add", "-N"]).arg(path).output()?;
let has_changes = Command::new("git")
.args(["diff", "--color=always"])
.arg(path)
.output()?;
if !has_changes.stdout.is_empty() {
return Err(format!(
"{}\nOutput has changed. If it looks okay, `git` add the file.",
std::str::from_utf8(&has_changes.stdout)?
)
.into());
}
Ok(())
}
/// Assemble the output for a test, with queries elaborated in-line.
fn assemble_query_output(
writer: &mut impl io::Write,
source: &str,
inferred_program: InferredProgram,
print_options: PrintOptions,
) -> io::Result<()> {
// Reverse the queries so that we can pop them off the end as we pass through the lines.
let (queries, program) = inferred_program.decompose();
let mut sorted_queries = queries.into_sorted();
sorted_queries.reverse();
let mut reflow = Reflow::new_unindented(writer);
write_source_with_answers(&mut reflow, source, sorted_queries, 0)?;
// Finish up with any remaining print options we were asked to provide.
let PrintOptions { can_decls } = print_options;
if can_decls {
writeln!(writer, "\n{EMIT_HEADER}can_decls")?;
program.write_can_decls(writer)?;
}
Ok(())
}
fn write_source_with_answers<W: io::Write>(
reflow: &mut Reflow<'_, W>,
source: &str,
mut sorted_queries: Vec<InferredQuery>,
offset_line: usize,
) -> io::Result<()> {
for (i, line) in source.lines().enumerate() {
let i = i + offset_line;
let mut is_query_line = false;
// Write all elaborated query lines if applicable.
while matches!(
sorted_queries.last(),
Some(InferredQuery {
source_line_column,
..
}) if source_line_column.line == i as _
) {
let inferred = sorted_queries.pop().unwrap();
reflow.scoped(|reflow| reconstruct_comment_line(reflow, inferred))?;
reflow.write("\n")?;
is_query_line = true;
}
// If this was previously a multi-line query output line, skip it, since we already wrote
// the new output above.
if line.contains(MUTLILINE_MARKER) {
continue;
}
// Otherwise, write the Roc source line.
if !is_query_line {
reflow.write(line.trim_end())?;
reflow.write("\n")?;
}
}
let mut sorted_queries = sorted_queries.into_iter().peekable();
while let Some(sorted_query) = sorted_queries.next() {
reflow.scoped(|reflow| reconstruct_comment_line(reflow, sorted_query))?;
// Only write a newline if we're not yet at the end of the source.
// Otherwise, a newline will be written for us after exiting the reconstruction of the
// comment line, since this must happen in the reconsutrction of a multi-line query.
if sorted_queries.peek().is_some() {
reflow.write("\n")?;
}
}
Ok(())
}
fn reconstruct_comment_line<W: io::Write>(
reflow: &mut Reflow<'_, W>,
inferred: InferredQuery,
) -> io::Result<()> {
let InferredQuery {
comment_column,
source_line_column,
source,
elaboration,
} = inferred;
reflow.add_layer(comment_column as _, source_line_column.column as _);
reflow.write_and_bump(&format!("{source} "))?;
match elaboration {
Elaboration::Specialization {
specialized_name,
typ,
} => {
reflow.write_and_bump(&format!("{specialized_name}: "))?;
reflow.write(&typ)
}
Elaboration::Source { source: _, typ } => reflow.write(&typ),
Elaboration::Instantiation {
typ,
source,
offset_line,
queries_in_instantiation,
} => {
reflow.write(&typ)?;
// Write the source on new line, but at the reflow column the comment is aligned at.
reflow.set_content(source_line_column.column as _);
reflow.write("\n")?;
let queries = queries_in_instantiation.into_sorted();
write_source_with_answers(reflow, source.trim_end(), queries, offset_line as _)
}
}
}
struct Reflow<'a, W: io::Write> {
writer: &'a mut W,
state: ReflowState,
}
#[derive(Clone, Debug)]
struct ReflowState {
/// true if the first line of the elaboration comment has been written.
top_line_written: bool,
/// Number of `content columns` prefixes written.
/// If this equals the number of content columns, the whole prefix for a line has been written.
content_prefixes_written: usize,
/// The column at which to insert the comment prefix "#".
comment_column: usize,
/// The columns at which content occurs.
/// If the stack is >1, then
/// - at the first content column, the [MUTLILINE_MARKER] may be written as appropriate
/// - for subsequent columns, spaces are inserted until the column is reached.
content_columns: Vec<usize>,
}
impl<'a, W: io::Write> std::ops::Deref for Reflow<'a, W> {
type Target = ReflowState;
fn deref(&self) -> &Self::Target {
&self.state
}
}
impl<'a, W: io::Write> std::ops::DerefMut for Reflow<'a, W> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.state
}
}
impl<'a, W: io::Write> Reflow<'a, W> {
fn new_unindented(writer: &'a mut W) -> Self {
Self {
writer,
state: ReflowState {
top_line_written: false,
content_prefixes_written: 0,
comment_column: 0,
content_columns: vec![],
},
}
}
fn scoped<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
let state = self.state.clone();
let result = f(self);
self.state = state;
result
}
fn add_layer(&mut self, comment_column: usize, content_column: usize) {
if self.comment_column == 0 {
// If the comment column is not yet set, this is the top-level and we should update the
// state; otherwise, we already have a comment column, only add to the content-ful
// layer.
self.comment_column = comment_column;
}
self.content_columns.push(content_column);
}
fn set_content(&mut self, content_column: usize) {
let latest_column = self
.content_columns
.last_mut()
.expect("cannot set content before adding a layer");
*latest_column = content_column;
}
fn write(&mut self, content: &str) -> io::Result<()> {
for (i, content_line) in content.split('\n').enumerate() {
if i > 0 {
// new line
writeln!(self.writer)?;
self.content_prefixes_written = 0;
}
// If the content columns are empty, this is top-level and we
// have no prefix to write.
if self.content_prefixes_written != self.content_columns.len() {
if self.content_prefixes_written == 0 {
self.write_n_spaces(self.comment_column)?;
write!(self.writer, "#")?;
// For the first column content - write spaces up to the column, and then if we are
// in a multiline context, add the multi-line marker.
{
self.write_n_spaces(self.content_columns[0] - self.comment_column - 1)?;
if self.top_line_written {
write!(self.writer, "{MUTLILINE_MARKER} ")?;
}
}
self.content_prefixes_written = 1;
}
// For all remaining content columns, fill them in with spaces.
let remaining_content_columns = self
.content_columns
.iter()
.skip(self.content_prefixes_written);
self.write_n_spaces(remaining_content_columns.sum())?;
self.content_prefixes_written = self.content_columns.len();
self.top_line_written = true;
}
write!(self.writer, "{content_line}")?;
}
Ok(())
}
fn write_and_bump(&mut self, content: &str) -> io::Result<()> {
assert!(
content.lines().count() == 1,
"cannot bump with multi-line content"
);
self.write(content)?;
let column = self
.content_columns
.last_mut()
.expect("cannot write_and_bump before adding layer");
*column += content.len();
Ok(())
}
fn write_n_spaces(&mut self, n: usize) -> io::Result<()> {
for _ in 0..n {
write!(self.writer, " ")?;
}
Ok(())
}
}

View file

@ -0,0 +1,7 @@
app "test" provides [main] to "./platform"
main =
#^^^^{-1} (a -[[]]-> b) -[[main(0)]]-> (a -[[y(2) (a -[[]]-> b)]]-> b)
\x ->
y = \z -> x z
y

View file

@ -0,0 +1,4 @@
app "test" provides [main] to "./platform"
main = []
#^^^^{-1} List w_a

View file

@ -0,0 +1,12 @@
app "test" provides [main] to "./platform"
main = \{} -> when Red is
#^^^^{-1} {}* -[[main(0)]]-> { y : [Green, Red]a, z : [Green, Red]a }
x ->
y : [Red]_
y = x
z : [Red, Green]_
z = x
{y, z}