Merge branch 'trunk' into cohere-comment-formatting

This commit is contained in:
Chadtech 2020-01-09 00:35:32 -05:00 committed by GitHub
commit dd525d3a13
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
39 changed files with 2917 additions and 778 deletions

214
Cargo.lock generated
View file

@ -39,6 +39,11 @@ name = "bumpalo"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byteorder"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "bytes"
version = "0.5.3"
@ -62,6 +67,105 @@ dependencies = [
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cranelift"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cranelift-codegen 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-frontend 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cranelift-bforest"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cranelift-entity 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cranelift-codegen"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-bforest 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-codegen-meta 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-codegen-shared 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-entity 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"target-lexicon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"thiserror 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cranelift-codegen-meta"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cranelift-codegen-shared 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-entity 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cranelift-codegen-shared"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cranelift-entity"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cranelift-frontend"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cranelift-codegen 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"target-lexicon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cranelift-module"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cranelift-codegen 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-entity 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"thiserror 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cranelift-native"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cranelift-codegen 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"target-lexicon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cranelift-simplejit"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cranelift-codegen 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-module 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-native 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"errno 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"region 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"target-lexicon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "difference"
version = "2.0.0"
@ -81,6 +185,25 @@ dependencies = [
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "errno"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"errno-dragonfly 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "fixedbitset"
version = "0.1.9"
@ -178,6 +301,11 @@ dependencies = [
"slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "gcc"
version = "0.3.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "hermit-abi"
version = "0.1.5"
@ -300,6 +428,14 @@ dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "mach"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "maplit"
version = "1.0.2"
@ -556,6 +692,16 @@ dependencies = [
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "raw-cpuid"
version = "7.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"cc 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rdrand"
version = "0.4.0"
@ -585,11 +731,26 @@ name = "regex-syntax"
version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "region"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"mach 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "roc"
version = "0.1.0"
dependencies = [
"bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-codegen 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-module 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-simplejit 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"im 14.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"im-rc 14.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -603,10 +764,19 @@ dependencies = [
"pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"quickcheck 0.8.5 (registry+https://github.com/rust-lang/crates.io-index)",
"quickcheck_macros 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"target-lexicon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"wyhash 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc_version"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "scopeguard"
version = "1.0.0"
@ -664,6 +834,29 @@ dependencies = [
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "target-lexicon"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "thiserror"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"thiserror-impl 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "thiserror-impl"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "thread_local"
version = "0.3.6"
@ -742,13 +935,26 @@ dependencies = [
"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
"checksum bitmaps 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "81e039a80914325b37fde728ef7693c212f0ac913d5599607d7b95a9484aae0b"
"checksum bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad807f2fc2bf185eeb98ff3a901bd46dc5ad58163d0fa4577ba0d25674d71708"
"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
"checksum bytes 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "10004c15deb332055f7a4a208190aed362cf9a7c2f6ab70a305fba50e1105f38"
"checksum cc 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)" = "f52a465a666ca3d838ebbf08b241383421412fe7ebb463527bba275526d89f76"
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum cranelift 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f0f3b64a6eff63e7ea2dc39ecd36fa43f9f790c8c81ad802748bf51981409701"
"checksum cranelift-bforest 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "56aa72ef104c5d634f2f9e84ef2c47e116c1d185fae13f196b97ca84b0a514f1"
"checksum cranelift-codegen 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "460b9d20793543599308d22f5a1172c196e63a780c4e9aacb0b3f4f63d63ffe1"
"checksum cranelift-codegen-meta 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cc70e4e8ccebd53a4f925147def857c9e9f7fe0fdbef4bb645a420473e012f50"
"checksum cranelift-codegen-shared 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3992000be4d18df0fe332b7c42c120de896e8ec54cd7b6cfa050910a8c9f6e2f"
"checksum cranelift-entity 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "722957e05064d97a3157bf0976deed0f3e8ee4f8a4ce167a7c724ca63a4e8bd9"
"checksum cranelift-frontend 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13051964302dc7948e8869735de42591559ea55e319b9b92da5b38f8e6a75cb7"
"checksum cranelift-module 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f1632d5670f0b02ce967a385b859b7af9593485f6e198ebf0970fc8b0f8f9841"
"checksum cranelift-native 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "21398a0bc6ba389ea86964ac4a495426dd61080f2ddd306184777a8560fe9976"
"checksum cranelift-simplejit 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1236096fd657486d6ef35c8958a681f3f1377d42d7dc371ef9c7193e65c07521"
"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3"
"checksum env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aafcde04e90a5226a6443b7aabdb016ba2f8307c847d524724bd9b346dd1a2d3"
"checksum errno 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c2a071601ed01b988f896ab14b95e67335d1eeb50190932a1320f7fe3cadc84e"
"checksum errno-dragonfly 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "14ca354e36190500e1e1fb267c647932382b54053c50b14970856c0b00a35067"
"checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33"
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
@ -761,6 +967,7 @@ dependencies = [
"checksum futures-sink 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "171be33efae63c2d59e6dbba34186fe0d6394fb378069a76dfd80fdcffd43c16"
"checksum futures-task 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0bae52d6b29cf440e298856fec3965ee6fa71b06aa7495178615953fd669e5f9"
"checksum futures-util 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c0d66274fb76985d3c62c886d1da7ac4c0903a8c9f754e8fe0f35a6a6cc39e76"
"checksum gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)" = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2"
"checksum hermit-abi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f629dc602392d3ec14bfc8a09b5e644d7ffd725102b48b81e59f90f2633621d7"
"checksum im 14.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b047368c60cde48aa2b2e2fa4794eec965526749b60b55f8cec1bad926e7f6e9"
"checksum im-rc 14.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ed5135086ffe74654d797c02fd673c4046cdb7f552c98f1b1aa6851d6572f84f"
@ -774,6 +981,7 @@ dependencies = [
"checksum llvm-sys 80.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cf2969773884a5701f0c255e2a14d48d4522a66db898ec1088cb21879a228377"
"checksum lock_api 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e57b3997725d2b60dbec1297f6c2e2957cc383db1cebd6be812163f969c7d586"
"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
"checksum mach 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "86dd2487cdfea56def77b88438a2c915fb45113c5319bfe7e14306ca4cd0b0e1"
"checksum maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
"checksum num_cpus 1.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "76dac5ed2a876980778b8b85f75a71b6cbf0db0b1232ee12f826bccb00d09d72"
@ -805,10 +1013,13 @@ dependencies = [
"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
"checksum rand_xoshiro 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004"
"checksum raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf"
"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
"checksum regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd"
"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
"checksum region 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "448e868c6e4cfddfa49b6a72c95906c04e8547465e9536575b95c70a4044f856"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
"checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
@ -817,6 +1028,9 @@ dependencies = [
"checksum smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44e59e0c9fa00817912ae6e4e6e3c4fe04455e75699d06eedc7d85917ed8e8f4"
"checksum syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)" = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
"checksum syn 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ddc157159e2a7df58cd67b1cace10b8ed256a404fb0070593f137d8ba6bef4de"
"checksum target-lexicon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f4c118a7a38378f305a9e111fcb2f7f838c0be324bfb31a77ea04f7f6e684b4"
"checksum thiserror 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "6f357d1814b33bc2dc221243f8424104bfe72dbe911d5b71b3816a2dff1c977e"
"checksum thiserror-impl 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2e25d25307eb8436894f727aba8f65d07adf02e5b35a13cebed48bd282bfef"
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
"checksum tokio 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0e1bef565a52394086ecac0a6fa3b8ace4cb3a138ee1d96bd2b93283b56824e3"
"checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9"

View file

@ -22,9 +22,14 @@ inlinable_string = "0.1.0"
inkwell = { git = "https://github.com/TheDan64/inkwell", branch = "llvm8-0" }
futures = "0.3"
lazy_static = "1.4"
target-lexicon = "0.9" # NOTE: we must use the same version of target-lexicon as cranelift!
cranelift = "0.52" # All cranelift crates should have the same version!
cranelift-simplejit = "0.52" # All cranelift crates should have the same version!
cranelift-module = "0.52" # All cranelift crates should have the same version!
cranelift-codegen = "0.52" # All cranelift crates should have the same version!
[dev-dependencies]
pretty_assertions = "0.5.1"
pretty_assertions = "0.5.1 "
maplit = "1.0.1"
indoc = "0.3.3"
quickcheck = "0.8"

View file

@ -73,26 +73,25 @@ fn can_annotation_help(
Type::Variable(var)
}
}
Record(fields) => {
Record { fields, ext } => {
let mut field_types = SendMap::default();
for field in fields {
for field in fields.iter() {
can_assigned_field(&field.value, var_store, rigids, &mut field_types);
}
// This is a closed record, so the fragment must be {}
let fragment_type = Type::EmptyRec;
let ext_type = match ext {
Some(loc_ann) => can_annotation_help(&loc_ann.value, var_store, rigids),
None => Type::EmptyRec,
};
Type::Record(field_types, Box::new(fragment_type))
Type::Record(field_types, Box::new(ext_type))
}
RecordFragment(fields, fragment) => {
let mut field_types = SendMap::default();
for field in fields {
can_assigned_field(&field.value, var_store, rigids, &mut field_types);
}
let fragment_type = can_annotation_help(&fragment.value, var_store, rigids);
Type::Record(field_types, Box::new(fragment_type))
TagUnion { tags, ext } => {
panic!(
"TODO canonicalize tag union annotation: {:?} {:?}",
tags, ext
);
}
SpaceBefore(nested, _) | SpaceAfter(nested, _) => {
can_annotation_help(nested, var_store, rigids)
@ -118,11 +117,6 @@ fn can_assigned_field<'a>(
let label = Lowercase::from(field_name.value);
field_types.insert(label, field_type);
}
OptionalField(field_name, _, annotation) => {
let field_type = can_annotation_help(&annotation.value, var_store, rigids);
let label = Lowercase::from(field_name.value);
field_types.insert(label, field_type);
}
LabelOnly(loc_field_name) => {
// Interpret { a, b } as { a : a, b : b }
let field_name = Lowercase::from(loc_field_name.value);

View file

@ -1011,8 +1011,6 @@ fn canonicalize_field<'a>(
)
}
OptionalField(_, _, _) => panic!("invalid in expressions"),
// A label with no value, e.g. `{ name }` (this is sugar for { name: name })
LabelOnly(_) => {
panic!("Somehow a LabelOnly record field was not desugared!");

View file

@ -298,7 +298,6 @@ fn desugar_field<'a>(
spaces,
desugar_expr(arena, loc_expr),
),
OptionalField(_, _, _) => panic!("invalid in expressions"),
LabelOnly(loc_str) => {
// Desugar { x } into { x: x }
let loc_expr = Located {

428
src/crane/build.rs Normal file
View file

@ -0,0 +1,428 @@
use bumpalo::Bump;
use cranelift::prelude::{
AbiParam, ExternalName, FunctionBuilder, FunctionBuilderContext, MemFlags,
};
use cranelift_codegen::ir::entities::{StackSlot, Value};
use cranelift_codegen::ir::stackslot::{StackSlotData, StackSlotKind};
use cranelift_codegen::ir::{immediates::Offset32, types, InstBuilder, Signature, Type};
use cranelift_codegen::isa::TargetFrontendConfig;
use cranelift_codegen::Context;
use cranelift_module::{Backend, FuncId, Linkage, Module};
use inlinable_string::InlinableString;
use crate::collections::ImMap;
use crate::crane::convert::{content_to_crane_type, sig_from_layout, type_from_layout};
use crate::mono::expr::{Expr, Proc, Procs};
use crate::mono::layout::Layout;
use crate::subs::Subs;
type Scope = ImMap<InlinableString, ScopeEntry>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ScopeEntry {
Stack { expr_type: Type, slot: StackSlot },
Heap { expr_type: Type, ptr: Value },
Arg { expr_type: Type, param: Value },
Func { sig: Signature, func_id: FuncId },
}
pub struct Env<'a> {
pub arena: &'a Bump,
pub cfg: TargetFrontendConfig,
pub subs: Subs,
}
pub fn build_expr<'a, B: Backend>(
env: &Env<'a>,
scope: &Scope,
module: &mut Module<B>,
builder: &mut FunctionBuilder,
expr: &Expr<'a>,
procs: &Procs<'a>,
) -> Value {
use crate::mono::expr::Expr::*;
match expr {
Int(num) => builder.ins().iconst(types::I64, *num),
Float(num) => builder.ins().f64const(*num),
Bool(val) => builder.ins().bconst(types::B1, *val),
Byte(val) => builder.ins().iconst(types::I8, *val as i64),
// Cond {
// cond_lhs,
// cond_rhs,
// pass,
// fail,
// ret_var,
// } => {
// let cond = Cond2 {
// cond_lhs,
// cond_rhs,
// pass,
// fail,
// ret_var: *ret_var,
// };
// build_cond(env, scope, cond, procs)
// }
Branches { .. } => {
panic!("TODO build_branches(env, scope, cond_lhs, branches, procs)");
}
Store(ref stores, ref ret) => {
let mut scope = im_rc::HashMap::clone(scope);
let arena = &env.arena;
let subs = &env.subs;
let cfg = env.cfg;
for (name, var, expr) in stores.iter() {
let val = build_expr(env, &scope, module, builder, &expr, procs);
let content = subs.get_without_compacting(*var).content;
let layout = Layout::from_content(arena, content, subs)
.unwrap_or_else(|()| panic!("TODO generate a runtime error here!"));
let expr_type = type_from_layout(cfg, &layout, subs);
let slot = builder.create_stack_slot(StackSlotData::new(
StackSlotKind::ExplicitSlot,
layout.stack_size(cfg),
));
builder.ins().stack_store(val, slot, Offset32::new(0));
// Make a new scope which includes the binding we just encountered.
// This should be done *after* compiling the bound expr, since any
// recursive (in the LetRec sense) bindings should already have
// been extracted as procedures. Nothing in here should need to
// access itself!
scope = im_rc::HashMap::clone(&scope);
scope.insert(name.clone(), ScopeEntry::Stack { expr_type, slot });
}
build_expr(env, &scope, module, builder, ret, procs)
}
CallByName(ref name, ref args) => {
// TODO try one of these alternative strategies (preferably the latter):
//
// 1. use SIMD string comparison to compare these strings faster
// 2. pre-register Bool.or using module.add_function, and see if LLVM inlines it
// 3. intern all these strings
if name == "Bool.or" {
panic!("TODO create a branch for ||");
} else if name == "Bool.and" {
panic!("TODO create a branch for &&");
} else {
let mut arg_vals = Vec::with_capacity(args.len());
for arg in args.iter() {
arg_vals.push(build_expr(env, scope, module, builder, arg, procs));
}
let fn_id = match scope.get(name) {
Some(ScopeEntry::Func{ func_id, .. }) => *func_id,
other => panic!(
"CallByName could not find function named {:?} in scope; instead, found {:?} in scope {:?}",
name, other, scope
),
};
let local_func = module.declare_func_in_func(fn_id, &mut builder.func);
let call = builder.ins().call(local_func, &arg_vals);
let results = builder.inst_results(call);
debug_assert!(results.len() == 1);
results[0]
}
}
FunctionPointer(ref name) => {
let fn_id = match scope.get(name) {
Some(ScopeEntry::Func{ func_id, .. }) => *func_id,
other => panic!(
"FunctionPointer could not find function named {:?} in scope; instead, found {:?} in scope {:?}",
name, other, scope
),
};
let func_ref = module.declare_func_in_func(fn_id, &mut builder.func);
builder.ins().func_addr(env.cfg.pointer_type(), func_ref)
}
CallByPointer(ref sub_expr, ref args, ref fn_var) => {
let subs = &env.subs;
let mut arg_vals = Vec::with_capacity(args.len());
for arg in args.iter() {
arg_vals.push(build_expr(env, scope, module, builder, arg, procs));
}
let content = subs.get_without_compacting(*fn_var).content;
let layout = Layout::from_content(env.arena, content, &subs)
.unwrap_or_else(|()| panic!("TODO generate a runtime error here!"));
let sig = sig_from_layout(env.cfg, module, layout, &subs);
let callee = build_expr(env, scope, module, builder, sub_expr, procs);
let sig_ref = builder.import_signature(sig);
let call = builder.ins().call_indirect(sig_ref, callee, &arg_vals);
let results = builder.inst_results(call);
debug_assert!(results.len() == 1);
results[0]
}
Load(name) => match scope.get(name) {
Some(ScopeEntry::Stack { expr_type, slot }) => {
builder
.ins()
.stack_load(*expr_type, *slot, Offset32::new(0))
}
Some(ScopeEntry::Arg { param, .. }) => *param,
Some(ScopeEntry::Heap { expr_type, ptr }) => {
builder
.ins()
.load(*expr_type, MemFlags::new(), *ptr, Offset32::new(0))
}
Some(ScopeEntry::Func { .. }) => {
panic!("TODO I don't yet know how to return fn pointers")
}
None => panic!("Could not find a var for {:?} in scope {:?}", name, scope),
},
_ => {
panic!("I don't yet know how to crane build {:?}", expr);
}
}
}
// struct Cond2<'a> {
// cond_lhs: &'a Expr<'a>,
// cond_rhs: &'a Expr<'a>,
// pass: &'a Expr<'a>,
// fail: &'a Expr<'a>,
// ret_var: Variable,
// }
// fn build_cond<'a, 'ctx, 'env>(
// env: &Env<'ctx, 'env>,
// scope: &Scope<'ctx>,
// parent: FunctionValue<'ctx>,
// cond: Cond2<'a>,
// procs: &Procs<'a>,
// ) -> BasicValueEnum<'ctx> {
// let builder = env.builder;
// let context = env.context;
// let subs = &env.subs;
// let content = subs.get_without_compacting(cond.ret_var).content;
// let ret_type = content_to_crane_type(&content, subs, context).unwrap_or_else(|err| {
// panic!(
// "Error converting cond branch ret_type content {:?} to basic type: {:?}",
// cond.pass, err
// )
// });
// let lhs = build_expr(env, scope, cond.cond_lhs, procs);
// let rhs = build_expr(env, scope, cond.cond_rhs, procs);
// match (lhs, rhs) {
// (FloatValue(lhs_float), FloatValue(rhs_float)) => {
// let comparison =
// builder.build_float_compare(FloatPredicate::OEQ, lhs_float, rhs_float, "cond");
// build_phi2(
// env, scope, comparison, cond.pass, cond.fail, ret_type, procs,
// )
// }
// (IntValue(lhs_int), IntValue(rhs_int)) => {
// let comparison = builder.build_int_compare(IntPredicate::EQ, lhs_int, rhs_int, "cond");
// build_phi2(
// env, scope, comparison, cond.pass, cond.fail, ret_type, procs,
// )
// }
// _ => panic!(
// "Tried to make a branch out of incompatible conditions: lhs = {:?} and rhs = {:?}",
// cond.cond_lhs, cond.cond_rhs
// ),
// }
// }
// fn build_branches<'a, 'ctx, 'env>(
// env: &Env<'ctx, 'env>,
// scope: &Scope<'ctx>,
// parent: FunctionValue<'ctx>,
// cond_lhs: &'a Expr<'a>,
// branches: &'a [(Expr<'a>, Expr<'a>, Expr<'a>)],
// ret_type: BasicValueEnum<'ctx>,
// procs: &Procs<'a, 'ctx>,
// ) -> BasicValueEnum<'ctx> {
// let builder = env.builder;
// let context = env.context;
// let lhs = build_expr(env, scope, cond_lhs, procs);
// let mut branch_iter = branches.into_iter();
// let content = subs.get_without_compacting(cond.ret_var).content;
// let ret_type = content_to_crane_type(&content, subs, context).unwrap_or_else(|err| {
// panic!(
// "Error converting cond branch ret_type content {:?} to basic type: {:?}",
// cond.pass, err
// )
// });
// for (cond_rhs, cond_pass, cond_else) in branches {
// let rhs = build_expr(env, scope, cond_rhs, procs);
// let pass = build_expr(env, scope, cond_pass, procs);
// let fail = build_expr(env, scope, cond_else, procs);
// let cond = Cond {
// lhs,
// rhs,
// pass,
// fail,
// ret_type,
// };
// build_cond(env, scope, cond, procs)
// }
// }
// TODO trim down these arguments
// #[allow(clippy::too_many_arguments)]
// fn build_phi2<'a, 'ctx, 'env>(
// env: &Env<'ctx, 'env>,
// scope: &Scope<'ctx>,
// parent: FunctionValue<'ctx>,
// comparison: IntValue<'ctx>,
// pass: &'a Expr<'a>,
// fail: &'a Expr<'a>,
// ret_type: BasicTypeEnum<'ctx>,
// procs: &Procs<'a>,
// ) -> BasicValueEnum<'ctx> {
// let builder = env.builder;
// let context = env.context;
// // build branch
// let then_bb = context.append_basic_block("then");
// let else_bb = context.append_basic_block("else");
// let cont_bb = context.append_basic_block("branchcont");
// builder.build_conditional_branch(comparison, &then_bb, &else_bb);
// // build then block
// builder.position_at_end(&then_bb);
// let then_val = build_expr(env, scope, pass, procs);
// builder.build_unconditional_branch(&cont_bb);
// let then_bb = builder.get_insert_block().unwrap();
// // build else block
// builder.position_at_end(&else_bb);
// let else_val = build_expr(env, scope, fail, procs);
// builder.build_unconditional_branch(&cont_bb);
// let else_bb = builder.get_insert_block().unwrap();
// // emit merge block
// builder.position_at_end(&cont_bb);
// let phi = builder.build_phi(ret_type, "branch");
// phi.add_incoming(&[
// (&Into::<BasicValueEnum>::into(then_val), &then_bb),
// (&Into::<BasicValueEnum>::into(else_val), &else_bb),
// ]);
// phi.as_basic_value()
// }
pub fn declare_proc<'a, B: Backend>(
env: &Env<'a>,
module: &mut Module<B>,
name: InlinableString,
proc: &Proc<'a>,
) -> (FuncId, Signature) {
let args = proc.args;
let subs = &env.subs;
let cfg = env.cfg;
let ret_content = subs.get_without_compacting(proc.ret_var).content;
// TODO this content_to_crane_type is duplicated when building this Proc
let ret_type = content_to_crane_type(&ret_content, subs, env.cfg).unwrap_or_else(|err| {
panic!(
"Error converting function return value content to basic type: {:?}",
err
)
});
// Create a signature for the function
let mut sig = module.make_signature();
// Add return type to the signature
sig.returns.push(AbiParam::new(ret_type));
// Add params to the signature
for (layout, _name, _var) in args.iter() {
let arg_type = type_from_layout(cfg, &layout, subs);
sig.params.push(AbiParam::new(arg_type));
}
// Declare the function in the module
let fn_id = module
.declare_function(&name, Linkage::Local, &sig)
.unwrap_or_else(|err| panic!("Error when building function {:?} - {:?}", name, err));
(fn_id, sig)
}
// TODO trim down these arguments
#[allow(clippy::too_many_arguments)]
pub fn define_proc_body<'a, B: Backend>(
env: &Env<'a>,
ctx: &mut Context,
module: &mut Module<B>,
fn_id: FuncId,
scope: &Scope,
sig: Signature,
proc: Proc<'a>,
procs: &Procs<'a>,
) {
let args = proc.args;
let subs = &env.subs;
let cfg = env.cfg;
// Build the body of the function
{
let mut scope = scope.clone();
let arena = env.arena;
ctx.func.signature = sig;
ctx.func.name = ExternalName::user(0, fn_id.as_u32());
let mut func_ctx = FunctionBuilderContext::new();
let mut builder: FunctionBuilder = FunctionBuilder::new(&mut ctx.func, &mut func_ctx);
let ebb = builder.create_ebb();
builder.switch_to_block(ebb);
builder.append_ebb_params_for_function_params(ebb);
// Add args to scope
for (&param, (_, arg_name, var)) in builder.ebb_params(ebb).iter().zip(args) {
let content = subs.get_without_compacting(*var).content;
// TODO this content_to_crane_type is duplicated when building this Proc
//
let layout = Layout::from_content(arena, content, subs)
.unwrap_or_else(|()| panic!("TODO generate a runtime error here!"));
let expr_type = type_from_layout(cfg, &layout, subs);
scope.insert(arg_name.clone(), ScopeEntry::Arg { expr_type, param });
}
let body = build_expr(env, &scope, module, &mut builder, &proc.body, procs);
builder.ins().return_(&[body]);
builder.seal_all_blocks();
builder.finalize();
}
module
.define_function(fn_id, ctx)
.expect("Defining Cranelift function failed");
module.clear_context(ctx);
}

133
src/crane/convert.rs Normal file
View file

@ -0,0 +1,133 @@
use cranelift::prelude::AbiParam;
use cranelift_codegen::ir::{types, Signature, Type};
use cranelift_codegen::isa::TargetFrontendConfig;
use crate::mono::layout::Layout;
use crate::subs::FlatType::*;
use crate::subs::{Content, Subs};
use cranelift_module::{Backend, Module};
pub fn content_to_crane_type(
content: &Content,
subs: &Subs,
cfg: TargetFrontendConfig,
) -> Result<Type, String> {
match content {
Content::Structure(flat_type) => match flat_type {
Apply {
module_name,
name,
args,
} => {
let module_name = module_name.as_str();
let name = name.as_str();
if module_name == crate::types::MOD_NUM && name == crate::types::TYPE_NUM {
let arg = *args.iter().next().unwrap();
let arg_content = subs.get_without_compacting(arg).content;
num_to_crane_type(arg_content)
} else {
panic!(
"TODO handle content_to_crane_type for FlatType::Apply of {}.{} with args {:?}",
module_name, name, args
);
}
}
Func(_, _) => Ok(cfg.pointer_type()),
other => panic!("TODO handle content_to_crane_type for {:?}", other),
},
other => Err(format!("Cannot convert {:?} to BasicTypeEnum", other)),
}
}
fn num_to_crane_type(content: Content) -> Result<Type, String> {
match content {
Content::Structure(flat_type) => match flat_type {
Apply {
module_name,
name,
args,
} => {
let module_name = module_name.as_str();
let name = name.as_str();
if module_name == crate::types::MOD_FLOAT
&& name == crate::types::TYPE_FLOATINGPOINT
&& args.is_empty()
{
debug_assert!(args.is_empty());
Ok(types::F64)
} else if module_name == crate::types::MOD_INT
&& name == crate::types::TYPE_INTEGER
&& args.is_empty()
{
debug_assert!(args.is_empty());
Ok(types::I64)
} else {
Err(format!(
"Unrecognized numeric type: {}.{} with args {:?}",
module_name, name, args
))
}
}
other => panic!(
"TODO handle num_to_crane_type (branch 0) for {:?} which is NESTED inside Num.Num",
other
),
},
other => panic!(
"TODO handle num_to_crane_type (branch 1) for {:?} which is NESTED inside Num.Num",
other
),
}
}
pub fn type_from_layout(cfg: TargetFrontendConfig, layout: &Layout<'_>, _subs: &Subs) -> Type {
use crate::mono::layout::Builtin::*;
use crate::mono::layout::Layout::*;
match layout {
Pointer(_) | FunctionPointer(_, _) => cfg.pointer_type(),
Struct(_fields) => {
panic!("TODO layout_to_crane_type for Struct");
}
Builtin(builtin) => match builtin {
Int64 => types::I64,
Float64 => types::F64,
Str => panic!("TODO layout_to_crane_type for Builtin::Str"),
Map(_, _) => panic!("TODO layout_to_crane_type for Builtin::Map"),
Set(_) => panic!("TODO layout_to_crane_type for Builtin::Set"),
},
}
}
pub fn sig_from_layout<B: Backend>(
cfg: TargetFrontendConfig,
module: &mut Module<B>,
layout: Layout,
subs: &Subs,
) -> Signature {
match layout {
Layout::FunctionPointer(args, ret) => {
let ret_type = type_from_layout(cfg, &ret, subs);
let mut sig = module.make_signature();
// Add return type to the signature
sig.returns.push(AbiParam::new(ret_type));
// Add params to the signature
for layout in args.iter() {
let arg_type = type_from_layout(cfg, &layout, subs);
sig.params.push(AbiParam::new(arg_type));
}
sig
}
_ => {
panic!("Could not make Signature from Layout {:?}", layout);
}
}
}

View file

@ -1,3 +1,2 @@
pub mod build;
pub mod convert;
pub mod env;

View file

@ -74,8 +74,9 @@ pub fn fmt_expr<'a>(
}
buf.push_str("\"\"\"");
}
Int(string) => buf.push_str(string),
Float(string) => buf.push_str(string),
Int(string) | Float(string) | GlobalTag(string) | PrivateTag(string) => {
buf.push_str(string)
}
NonBase10Int {
base,
string,
@ -247,22 +248,6 @@ pub fn fmt_field<'a>(
buf.push(' ');
fmt_expr(buf, &value.value, indent, apply_needs_parens, true);
}
OptionalField(name, spaces, value) => {
if is_multiline {
newline(buf, indent);
}
buf.push_str(name.value);
buf.push('?');
if !spaces.is_empty() {
fmt_spaces(buf, spaces.iter(), indent);
}
buf.push(':');
buf.push(' ');
fmt_expr(buf, &value.value, indent, apply_needs_parens, true);
}
LabelOnly(name) => {
if is_multiline {
newline(buf, indent);
@ -407,7 +392,6 @@ pub fn is_multiline_field<'a, Val>(field: &'a AssignedField<'a, Val>) -> bool {
match field {
LabeledValue(_, spaces, _) => !spaces.is_empty(),
OptionalField(_, spaces, _) => !spaces.is_empty(),
LabelOnly(_) => false,
AssignedField::SpaceBefore(_, _) | AssignedField::SpaceAfter(_, _) => true,
Malformed(text) => text.chars().any(|c| c == '\n'),
@ -551,7 +535,7 @@ pub fn fmt_closure<'a>(
any_args_printed = true;
}
fmt_pattern(buf, &loc_pattern.value, indent, true);
fmt_pattern(buf, &loc_pattern.value, indent, false);
}
if !arguments_are_multiline {
@ -591,6 +575,19 @@ pub fn fmt_record<'a>(
) {
buf.push('{');
match _update {
None => {}
// We are presuming this to be a Var()
// If it wasnt a Var() we would not have made
// it this far. For example "{ 4 & hello = 9 }"
// doesnt make sense.
Some(record_var) => {
buf.push(' ');
fmt_expr(buf, &record_var.value, indent, false, false);
buf.push_str(" &");
}
}
let is_multiline = loc_fields
.iter()
.any(|loc_field| is_multiline_field(&loc_field.value));

View file

@ -1,11 +0,0 @@
use crate::subs::Subs;
use inkwell::builder::Builder;
use inkwell::context::Context;
use inkwell::module::Module;
pub struct Env<'ctx, 'env> {
pub context: &'ctx Context,
pub builder: &'env Builder<'ctx>,
pub module: &'ctx Module<'ctx>,
pub subs: Subs,
}

View file

@ -1,87 +0,0 @@
use inkwell::basic_block::BasicBlock;
use inkwell::module::Linkage;
use inkwell::types::BasicType;
use inkwell::types::BasicTypeEnum;
use inkwell::values::BasicValueEnum::{self, *};
use inkwell::values::{BasicValue, FunctionValue, IntValue, PointerValue};
use inkwell::{FloatPredicate, IntPredicate};
use crate::can::expr::Expr;
use crate::can::ident::Lowercase;
use crate::can::pattern::Pattern::{self, *};
use crate::can::symbol::Symbol;
use crate::collections::ImMap;
use crate::collections::MutMap;
use crate::gen::convert::content_to_basic_type;
use crate::gen::env::Env;
use crate::subs::{Content, FlatType, Subs};
fn extract_procs(loc_expr: Located<Expr>, module: &Module<'ctx>, name: Option<Lowercase>, procs, &mut Procs<'ctx>) -> Located<Expr> {
let mut procs = Vec::new();
match expr {
LetNonRec(def, ret_expr, var) => {
let loc_pattern = def.loc_pattern;
let loc_expr = def.loc_expr;
// If we're defining a named closure, insert it into Procs and then
// remove the Let. When code later goes to look it up, it'll be in Procs!
//
// Before:
//
// identity = \a -> a
//
// identity 5
//
// After: (`identity` is now in Procs)
//
// identity 5
//
let pattern = match loc_pattern.value {
Identifier(name) => {
match &loc_expr.value {
Closure(_, _, _, _, _) => {
// Extract Procs, but discard the resulting Expr::Var.
// That Var looks up the pointer, which we won't use here!
extract_procs(loc_expr, Some(name), procs);
// Discard this LetNonRec by replacing it with its ret_expr.
return ret_expr;
}
_ => {
// If this isn't a Closure, proceed as normal.
Identifier(name)
}
}
}
pat => pat
}
// At this point, it's safe to assume we aren't assigning a Closure to a def.
// Extract Procs from the def body and the ret expression, and return the result!
let ret_expr = extract_procs(ret_expr, None, procs);
let loc_expr = extract_procs(def.loc_expr, None, procs);
let loc_pattern = Located { region: def.loc_pattern.region, value: pattern };
let def = Def { loc_pattern, loc_expr, ..def };
LetNonRec(def, ret_expr, var)
}
Closure(var, symbol, recursive, loc_args, boxed_ret) => {
let (loc_ret, var) = boxed_ret;
let name = match name {
Some(name) => name.as_str(),
None => {
// Give the closure a name like "_0" or "_1".
// We know procs.len() will be unique!
format!("_{}", procs.len()).as_str();
}
};
let fn_val = module.add_function(name, fn_type, linkage);
panic!("push to procs");
}
};
}

View file

@ -23,13 +23,14 @@ pub mod uniqueness;
pub mod string;
pub mod constrain;
pub mod crane;
pub mod ena;
pub mod fmt;
pub mod gen;
pub mod infer;
pub mod ll;
pub mod llvm;
pub mod load;
pub mod module;
pub mod mono;
pub mod pretty_print_types;
pub mod solve;
pub mod subs;

View file

@ -1,3 +1,6 @@
use inkwell::builder::Builder;
use inkwell::context::Context;
use inkwell::module::{Linkage, Module};
use inkwell::types::BasicTypeEnum;
use inkwell::values::BasicValueEnum::{self, *};
use inkwell::values::{FunctionValue, IntValue, PointerValue};
@ -5,10 +8,9 @@ use inkwell::{FloatPredicate, IntPredicate};
use inlinable_string::InlinableString;
use crate::collections::ImMap;
use crate::gen::convert::{content_to_basic_type, layout_to_basic_type};
use crate::gen::env::Env;
use crate::ll::expr::{Expr, Proc, Procs};
use crate::subs::Variable;
use crate::llvm::convert::{content_to_basic_type, get_fn_type, layout_to_basic_type};
use crate::mono::expr::{Expr, Proc, Procs};
use crate::subs::{Subs, Variable};
/// This is for Inkwell's FunctionValue::verify - we want to know the verification
/// output in debug builds, but we don't want it to print to stdout in release builds!
@ -20,14 +22,21 @@ const PRINT_FN_VERIFICATION_OUTPUT: bool = false;
type Scope<'ctx> = ImMap<InlinableString, (Variable, PointerValue<'ctx>)>;
pub struct Env<'ctx, 'env> {
pub context: &'ctx Context,
pub builder: &'env Builder<'ctx>,
pub module: &'ctx Module<'ctx>,
pub subs: Subs,
}
pub fn build_expr<'a, 'ctx, 'env>(
env: &Env<'ctx, 'env>,
scope: &Scope<'ctx>,
parent: FunctionValue<'ctx>,
expr: &Expr<'a>,
procs: &Procs<'a, 'ctx>,
procs: &Procs<'a>,
) -> BasicValueEnum<'ctx> {
use crate::ll::expr::Expr::*;
use crate::mono::expr::Expr::*;
match expr {
Int(num) => env.context.i64_type().const_int(*num as u64, false).into(),
@ -88,6 +97,7 @@ pub fn build_expr<'a, 'ctx, 'env>(
//
// 1. use SIMD string comparison to compare these strings faster
// 2. pre-register Bool.or using module.add_function, and see if LLVM inlines it
// 3. intern all these strings
if name == "Bool.or" {
panic!("TODO create a phi node for ||");
} else if name == "Bool.and" {
@ -106,35 +116,45 @@ pub fn build_expr<'a, 'ctx, 'env>(
let call = env.builder.build_call(fn_val, arg_vals.as_slice(), "tmp");
call.try_as_basic_value()
.left()
.unwrap_or_else(|| panic!("LLVM error: Invalid call by name."))
call.try_as_basic_value().left().unwrap_or_else(|| {
panic!("LLVM error: Invalid call by name for name {:?}", name)
})
}
}
CallByPointer(ref _ptr, ref args) => {
FunctionPointer(ref fn_name) => {
let ptr = env
.module
.get_function(fn_name)
.unwrap_or_else(|| {
panic!("Could not get pointer to unknown function {:?}", fn_name)
})
.as_global_value()
.as_pointer_value();
BasicValueEnum::PointerValue(ptr)
}
CallByPointer(ref sub_expr, ref args, _var) => {
let mut arg_vals: Vec<BasicValueEnum> = Vec::with_capacity(args.len());
for arg in args.iter() {
arg_vals.push(build_expr(env, scope, parent, arg, procs));
}
panic!("TODO do a load(ptr) to get back the pointer, then pass *that* in here!");
let call = match build_expr(env, scope, parent, sub_expr, procs) {
BasicValueEnum::PointerValue(ptr) => {
env.builder.build_call(ptr, arg_vals.as_slice(), "tmp")
}
non_ptr => {
panic!(
"Tried to call by pointer, but encountered a non-pointer: {:?}",
non_ptr
);
}
};
// let call = match build_expr(env, scope, parent, expr, procs) {
// BasicValueEnum::PointerValue(ptr) => {
// env.builder.build_call(ptr, arg_vals.as_slice(), "tmp")
// }
// non_ptr => {
// panic!(
// "Tried to call by pointer, but encountered a non-pointer: {:?}",
// non_ptr
// );
// }
// };
// call.try_as_basic_value()
// .left()
// .unwrap_or_else(|| panic!("LLVM error: Invalid call by pointer."))
call.try_as_basic_value()
.left()
.unwrap_or_else(|| panic!("LLVM error: Invalid call by pointer."))
}
Load(name) => match scope.get(name) {
@ -142,7 +162,7 @@ pub fn build_expr<'a, 'ctx, 'env>(
None => panic!("Could not find a var for {:?} in scope {:?}", name, scope),
},
_ => {
panic!("I don't yet know how to build {:?}", expr);
panic!("I don't yet know how to LLVM build {:?}", expr);
}
}
}
@ -160,7 +180,7 @@ fn build_cond<'a, 'ctx, 'env>(
scope: &Scope<'ctx>,
parent: FunctionValue<'ctx>,
cond: Cond2<'a>,
procs: &Procs<'a, 'ctx>,
procs: &Procs<'a>,
) -> BasicValueEnum<'ctx> {
let builder = env.builder;
let context = env.context;
@ -249,7 +269,7 @@ fn build_phi2<'a, 'ctx, 'env>(
pass: &'a Expr<'a>,
fail: &'a Expr<'a>,
ret_type: BasicTypeEnum<'ctx>,
procs: &Procs<'a, 'ctx>,
procs: &Procs<'a>,
) -> BasicValueEnum<'ctx> {
let builder = env.builder;
let context = env.context;
@ -320,37 +340,44 @@ pub fn create_entry_block_alloca<'ctx>(
pub fn build_proc<'a, 'ctx, 'env>(
env: &Env<'ctx, 'env>,
scope: &Scope<'ctx>,
name: InlinableString,
proc: Proc<'a>,
procs: &Procs<'a, 'ctx>,
) {
procs: &Procs<'a>,
) -> FunctionValue<'ctx> {
let args = proc.args;
let mut arg_names = Vec::new();
let subs = &env.subs;
let context = &env.context;
let ret_content = subs.get_without_compacting(proc.ret_var).content;
// TODO this content_to_basic_type is duplicated when building this Proc
let ret_type = content_to_basic_type(&ret_content, subs, context).unwrap_or_else(|err| {
panic!(
"Error converting function return value content to basic type: {:?}",
err
)
});
let mut arg_basic_types = Vec::with_capacity(args.len());
let mut arg_names = Vec::new();
for (layout, name, _var) in args.iter() {
let arg_type = layout_to_basic_type(&layout, &env.subs, env.context);
let arg_type = layout_to_basic_type(&layout, subs, env.context);
arg_basic_types.push(arg_type);
arg_names.push(name);
}
// Retrieve the function value from the module
let fn_val = env.module.get_function(&name).unwrap_or_else(|| {
panic!(
"Function {:?} should have been registered in the LLVM module, but it was not!",
name
)
});
let fn_type = get_fn_type(&ret_type, &arg_basic_types);
let fn_val = env
.module
.add_function(&name, fn_type, Some(Linkage::Private));
// Add a basic block for the entry point
let entry = env.context.append_basic_block(fn_val, "entry");
let entry = context.append_basic_block(fn_val, "entry");
let builder = env.builder;
builder.position_at_end(&entry);
let mut scope = scope.clone();
let mut scope = ImMap::default();
// Add args to scope
for ((arg_val, arg_type), (_, arg_name, var)) in
@ -369,9 +396,11 @@ pub fn build_proc<'a, 'ctx, 'env>(
builder.build_return(Some(&body));
if fn_val.verify(PRINT_FN_VERIFICATION_OUTPUT) {
// TODO call pass_manager.run_on(&fn_val) to optimize it!
} else {
fn_val
}
pub fn verify_fn(fn_val: FunctionValue<'_>) {
if !fn_val.verify(PRINT_FN_VERIFICATION_OUTPUT) {
unsafe {
fn_val.delete();
}

View file

@ -3,7 +3,7 @@ use inkwell::types::BasicTypeEnum::{self, *};
use inkwell::types::{BasicType, FunctionType};
use inkwell::AddressSpace;
use crate::ll::layout::Layout;
use crate::mono::layout::Layout;
use crate::subs::FlatType::*;
use crate::subs::{Content, Subs};
use crate::types;
@ -46,8 +46,9 @@ pub fn content_to_basic_type<'ctx>(
arg_basic_types.push(content_to_basic_type(&arg_content, subs, context)?);
}
let fn_type = get_fn_type(&ret_type, arg_basic_types.as_slice());
let ptr_type = fn_type.ptr_type(AddressSpace::Generic);
Ok(fn_type.ptr_type(AddressSpace::Global).as_basic_type_enum())
Ok(ptr_type.as_basic_type_enum())
}
other => panic!("TODO handle content_to_basic_type for {:?}", other),
},
@ -99,7 +100,7 @@ fn num_to_basic_type(content: Content, context: &Context) -> Result<BasicTypeEnu
}
/// TODO could this be added to Inkwell itself as a method on BasicValueEnum?
fn get_fn_type<'ctx>(
pub fn get_fn_type<'ctx>(
bt_enum: &BasicTypeEnum<'ctx>,
arg_types: &[BasicTypeEnum<'ctx>],
) -> FunctionType<'ctx> {
@ -118,8 +119,8 @@ pub fn layout_to_basic_type<'ctx>(
_subs: &Subs,
context: &'ctx Context,
) -> BasicTypeEnum<'ctx> {
use crate::ll::layout::Builtin::*;
use crate::ll::layout::Layout::*;
use crate::mono::layout::Builtin::*;
use crate::mono::layout::Layout::*;
match layout {
FunctionPointer(_arg_layouts, _ret_layout) => {

2
src/llvm/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod build;
pub mod convert;

View file

@ -1,19 +1,14 @@
use crate::can;
use crate::can::pattern::Pattern;
use crate::collections::MutMap;
use crate::gen::convert::content_to_basic_type;
use crate::ll::layout::Layout;
use crate::mono::layout::Layout;
use crate::region::Located;
use crate::subs::{Subs, Variable};
use bumpalo::collections::Vec;
use bumpalo::Bump;
use inkwell::context::Context;
use inkwell::module::Module;
use inkwell::types::{BasicType, BasicTypeEnum};
use inkwell::values::FunctionValue;
use inlinable_string::InlinableString;
pub type Procs<'a, 'ctx> = MutMap<InlinableString, (Option<Proc<'a>>, FunctionValue<'ctx>)>;
pub type Procs<'a> = MutMap<InlinableString, Option<Proc<'a>>>;
#[derive(Clone, Debug, PartialEq)]
pub struct Proc<'a> {
@ -23,11 +18,9 @@ pub struct Proc<'a> {
pub ret_var: Variable,
}
struct Env<'a, 'ctx> {
arena: &'a Bump,
subs: &'a Subs,
module: &'ctx Module<'ctx>,
context: &'ctx Context,
struct Env<'a> {
pub arena: &'a Bump,
pub subs: &'a Subs,
}
#[derive(Clone, Debug, PartialEq)]
@ -36,6 +29,15 @@ pub enum Expr<'a> {
Int(i64),
Float(f64),
Str(&'a str),
/// Closed tag unions containing exactly two (0-arity) tags compile to Expr::Bool,
/// so they can (at least potentially) be emitted as 1-bit machine bools.
///
/// So [ True, False ] compiles to this, and so do [ A, B ] and [ Foo, Bar ].
/// However, a union like [ True, False, Other Int ] would not.
Bool(bool),
/// Closed tag unions containing between 3 and 256 tags (all of 0 arity)
/// compile to bytes, e.g. [ Blue, Black, Red, Green, White ]
Byte(u8),
// Load/Store
Load(InlinableString),
@ -43,8 +45,8 @@ pub enum Expr<'a> {
// Functions
FunctionPointer(InlinableString),
CallByPointer(InlinableString, &'a [Expr<'a>]),
CallByName(InlinableString, &'a [Expr<'a>]),
CallByPointer(&'a Expr<'a>, &'a [Expr<'a>], Variable),
// Exactly two conditional branches, e.g. if/else
Cond {
@ -67,6 +69,12 @@ pub enum Expr<'a> {
branches: &'a [(Expr<'a>, Expr<'a>, Expr<'a>)],
ret_var: Variable,
},
Tag {
variant_var: Variable,
ext_var: Variable,
name: InlinableString,
arguments: &'a [Expr<'a>],
},
Struct(&'a [(InlinableString, Expr<'a>)]),
@ -74,29 +82,22 @@ pub enum Expr<'a> {
}
impl<'a> Expr<'a> {
pub fn new<'ctx>(
pub fn new(
arena: &'a Bump,
subs: &'a Subs,
module: &'ctx Module<'ctx>,
context: &'ctx Context,
can_expr: can::expr::Expr,
procs: &mut Procs<'a, 'ctx>,
procs: &mut Procs<'a>,
) -> Self {
let env = Env {
arena,
subs,
module,
context,
};
let env = Env { arena, subs };
from_can(&env, can_expr, procs, None)
}
}
fn from_can<'a, 'ctx>(
env: &Env<'a, 'ctx>,
fn from_can<'a>(
env: &Env<'a>,
can_expr: can::expr::Expr,
procs: &mut Procs<'a, 'ctx>,
procs: &mut Procs<'a>,
name: Option<InlinableString>,
) -> Expr<'a> {
use crate::can::expr::Expr::*;
@ -158,16 +159,13 @@ fn from_can<'a, 'ctx>(
Closure(_, _symbol, _, loc_args, boxed_body) => {
let (loc_body, ret_var) = *boxed_body;
let name = name.unwrap_or_else(||
// Give the closure a name like "_0" or "_1".
// We know procs.len() will be unique!
format!("_{}", procs.len()).into());
let name = name.unwrap_or_else(|| gen_closure_name(procs));
add_closure(env, name, loc_body.value, ret_var, &loc_args, procs)
}
Call(boxed, loc_args, _) => {
let (_, loc_expr, _) = *boxed;
let (fn_var, loc_expr, _) = *boxed;
let mut args = Vec::with_capacity_in(loc_args.len(), env.arena);
for (_, loc_arg) in loc_args {
@ -176,7 +174,7 @@ fn from_can<'a, 'ctx>(
match from_can(env, loc_expr.value, procs, None) {
Expr::Load(proc_name) => Expr::CallByName(proc_name, args.into_bump_slice()),
Expr::FunctionPointer(proc_name) => {
ptr => {
// Call by pointer - the closure was anonymous, e.g.
//
// ((\a -> a) 5)
@ -184,13 +182,7 @@ fn from_can<'a, 'ctx>(
// It might even be the anonymous result of a conditional:
//
// ((if x > 0 then \a -> a else \_ -> 0) 5)
Expr::CallByPointer(proc_name, args.into_bump_slice())
}
non_ptr => {
panic!(
"Tried to call by pointer, but encountered a non-pointer: {:?}",
non_ptr
);
Expr::CallByPointer(&*env.arena.alloc(ptr), args.into_bump_slice(), fn_var)
}
}
}
@ -279,45 +271,28 @@ fn from_can<'a, 'ctx>(
}
}
fn add_closure<'a, 'ctx>(
env: &Env<'a, 'ctx>,
fn add_closure<'a>(
env: &Env<'a>,
name: InlinableString,
can_body: can::expr::Expr,
ret_var: Variable,
loc_args: &[(Variable, Located<Pattern>)],
procs: &mut Procs<'a, 'ctx>,
procs: &mut Procs<'a>,
) -> Expr<'a> {
let subs = &env.subs;
let context = env.context;
let arena = env.arena;
let ret_content = subs.get_without_compacting(ret_var).content;
let ret_type = content_to_basic_type(&ret_content, subs, context).unwrap_or_else(|err| {
panic!(
"Error converting function return value content to basic type: {:?}",
err
)
});
let mut arg_names = Vec::with_capacity_in(loc_args.len(), arena);
let mut arg_basic_types = Vec::with_capacity_in(loc_args.len(), arena);
let mut proc_args = Vec::with_capacity_in(loc_args.len(), arena);
for (arg_var, loc_arg) in loc_args.iter() {
let content = subs.get_without_compacting(*arg_var).content;
arg_basic_types.push(
content_to_basic_type(&content, subs, context).unwrap_or_else(|err| {
panic!(
"Error converting function arg content to basic type: {:?}",
err
)
}),
);
let layout = match Layout::from_content(arena, content, subs) {
Ok(layout) => layout,
Err(()) => {
return invalid_closure(env, name, ret_type, procs);
// Invalid closure!
procs.insert(name.clone(), None);
return Expr::FunctionPointer(name);
}
};
@ -328,12 +303,9 @@ fn add_closure<'a, 'ctx>(
}
};
arg_names.push(arg_name.clone());
proc_args.push((layout, arg_name, *arg_var));
}
let fn_type = ret_type.fn_type(arg_basic_types.into_bump_slice(), false);
let fn_val = env.module.add_function(&name, fn_type, None);
let proc = Proc {
args: proc_args.into_bump_slice(),
body: from_can(env, can_body, procs, None),
@ -341,31 +313,17 @@ fn add_closure<'a, 'ctx>(
ret_var,
};
procs.insert(name.clone(), (Some(proc), fn_val));
procs.insert(name.clone(), Some(proc));
Expr::FunctionPointer(name)
}
fn invalid_closure<'a, 'ctx>(
env: &Env<'a, 'ctx>,
name: InlinableString,
ret_type: BasicTypeEnum<'ctx>,
procs: &mut Procs<'a, 'ctx>,
) -> Expr<'a> {
let fn_type = ret_type.fn_type(&[], false);
let fn_val = env.module.add_function(&name, fn_type, None);
procs.insert(name.clone(), (None, fn_val));
Expr::FunctionPointer(name)
}
fn store_pattern<'a, 'ctx>(
env: &Env<'a, 'ctx>,
fn store_pattern<'a>(
env: &Env<'a>,
can_pat: Pattern,
can_expr: can::expr::Expr,
var: Variable,
procs: &mut Procs<'a, 'ctx>,
procs: &mut Procs<'a>,
stored: &mut Vec<'a, (InlinableString, Variable, Expr<'a>)>,
) {
use crate::can::pattern::Pattern::*;
@ -394,3 +352,9 @@ fn store_pattern<'a, 'ctx>(
}
}
}
fn gen_closure_name(procs: &Procs<'_>) -> InlinableString {
// Give the closure a name like "_0" or "_1".
// We know procs.len() will be unique!
format!("_{}", procs.len()).into()
}

View file

@ -2,6 +2,7 @@ use crate::subs::{Content, FlatType, Subs};
use crate::types;
use bumpalo::collections::Vec;
use bumpalo::Bump;
use cranelift_codegen::isa::TargetFrontendConfig;
use inlinable_string::InlinableString;
/// Types for code gen must be monomorphic. No type variables allowed!
@ -41,6 +42,50 @@ impl<'a> Layout<'a> {
Error => Err(()),
}
}
pub fn stack_size(&self, cfg: TargetFrontendConfig) -> u32 {
use Layout::*;
match self {
Builtin(builtin) => builtin.stack_size(cfg),
Struct(fields) => {
let mut sum = 0;
for (_, field_layout) in *fields {
sum += field_layout.stack_size(cfg);
}
sum
}
Pointer(_) | FunctionPointer(_, _) => pointer_size(cfg),
}
}
}
fn pointer_size(cfg: TargetFrontendConfig) -> u32 {
cfg.pointer_bytes() as u32
}
impl<'a> Builtin<'a> {
const I64_SIZE: u32 = std::mem::size_of::<i64>() as u32;
const F64_SIZE: u32 = std::mem::size_of::<f64>() as u32;
/// Number of machine words in an empty one of these
const STR_WORDS: u32 = 3;
const MAP_WORDS: u32 = 6;
const SET_WORDS: u32 = Builtin::MAP_WORDS; // Set is an alias for Map with {} for value
pub fn stack_size(&self, cfg: TargetFrontendConfig) -> u32 {
use Builtin::*;
match self {
Int64 => Builtin::I64_SIZE,
Float64 => Builtin::F64_SIZE,
Str => Builtin::STR_WORDS * pointer_size(cfg),
Map(_, _) => Builtin::MAP_WORDS * pointer_size(cfg),
Set(_) => Builtin::SET_WORDS * pointer_size(cfg),
}
}
}
fn layout_from_flat_type<'a>(
@ -102,6 +147,9 @@ fn layout_from_flat_type<'a>(
EmptyTagUnion => {
panic!("TODO make Layout for empty Tag Union");
}
Boolean(_) => {
panic!("TODO make Layout for Boolean");
}
Erroneous(_) => Err(()),
EmptyRecord => Ok(Layout::Struct(&[])),
}

View file

@ -217,15 +217,20 @@ pub enum TypeAnnotation<'a> {
/// A bound type variable, e.g. `a` in `(a -> a)`
BoundVariable(&'a str),
/// A plain record, e.g. `{ name: String, email: Email }`
Record(Vec<'a, Loc<AssignedField<'a, TypeAnnotation<'a>>>>),
Record {
fields: &'a [Loc<AssignedField<'a, TypeAnnotation<'a>>>],
/// The row type variable in an open record, e.g. the `r` in `{ name: Str }r`.
/// This is None if it's a closed record annotation like `{ name: Str }`.
ext: Option<&'a Loc<TypeAnnotation<'a>>>,
},
/// A record fragment, e.g. `{ name: String, email: Email }...r`
RecordFragment(
Vec<'a, Loc<AssignedField<'a, TypeAnnotation<'a>>>>,
// the fragment type variable, e.g. the `r` in `{ name: String }...r`
&'a Loc<TypeAnnotation<'a>>,
),
/// A tag union, e.g. `[
TagUnion {
tags: &'a [Loc<Tag<'a>>],
/// The row type variable in an open tag union, e.g. the `a` in `[ Foo, Bar ]a`.
/// This is None if it's a closed tag union like `[ Foo, Bar]`.
ext: Option<&'a Loc<TypeAnnotation<'a>>>,
},
/// The `*` type variable, e.g. in (List *)
Wildcard,
@ -238,14 +243,31 @@ pub enum TypeAnnotation<'a> {
Malformed(&'a str),
}
#[derive(Debug, Clone, PartialEq)]
pub enum Tag<'a> {
Global {
name: Loc<&'a str>,
args: &'a [Loc<TypeAnnotation<'a>>],
},
Private {
name: Loc<&'a str>,
args: &'a [Loc<TypeAnnotation<'a>>],
},
// We preserve this for the formatter; canonicalization ignores it.
SpaceBefore(&'a Tag<'a>, &'a [CommentOrNewline<'a>]),
SpaceAfter(&'a Tag<'a>, &'a [CommentOrNewline<'a>]),
/// A malformed tag, which will code gen to a runtime error
Malformed(&'a str),
}
#[derive(Debug, Clone, PartialEq)]
pub enum AssignedField<'a, Val> {
// Both a label and a value, e.g. `{ name: "blah" }`
LabeledValue(Loc<&'a str>, &'a [CommentOrNewline<'a>], &'a Loc<Val>),
// An optional field, e.g. `{ name? : String }`. Only for types
OptionalField(Loc<&'a str>, &'a [CommentOrNewline<'a>], &'a Loc<Val>),
// A label with no value, e.g. `{ name }` (this is sugar for { name: name })
LabelOnly(Loc<&'a str>),
@ -497,6 +519,15 @@ impl<'a, Val> Spaceable<'a> for AssignedField<'a, Val> {
}
}
impl<'a> Spaceable<'a> for Tag<'a> {
fn before(&'a self, spaces: &'a [CommentOrNewline<'a>]) -> Self {
Tag::SpaceBefore(self, spaces)
}
fn after(&'a self, spaces: &'a [CommentOrNewline<'a>]) -> Self {
Tag::SpaceAfter(self, spaces)
}
}
impl<'a> Spaceable<'a> for Def<'a> {
fn before(&'a self, spaces: &'a [CommentOrNewline<'a>]) -> Self {
Def::SpaceBefore(self, spaces)

View file

@ -49,7 +49,7 @@ impl<'a> Ident<'a> {
/// Sometimes we may want to check for those later in the process, and give
/// more contextually-aware error messages than "unexpected `if`" or the like.
#[inline(always)]
pub fn parse_into<'a, I>(
pub fn parse_ident<'a, I>(
arena: &'a Bump,
chars: &mut I,
state: State<'a>,
@ -103,7 +103,7 @@ where
}
};
let mut chars_parsed = 1;
let mut chars_parsed = part_buf.len();
let mut next_char = None;
while let Some(ch) = chars.next() {
@ -310,7 +310,7 @@ where
pub fn ident<'a>() -> impl Parser<'a, Ident<'a>> {
move |arena: &'a Bump, state: State<'a>| {
// Discard next_char; we don't need it.
let ((string, _), state) = parse_into(arena, &mut state.input.chars(), state)?;
let ((string, _), state) = parse_ident(arena, &mut state.input.chars(), state)?;
Ok((string, state))
}

View file

@ -359,7 +359,6 @@ pub fn assigned_expr_field_to_pattern<'a>(
)
}
}
AssignedField::OptionalField(_, _, _) => panic!("invalid in literals"),
AssignedField::LabelOnly(name) => Pattern::Identifier(name.value),
AssignedField::SpaceBefore(nested, spaces) => Pattern::SpaceBefore(
arena.alloc(assigned_expr_field_to_pattern(arena, nested)?),
@ -400,9 +399,6 @@ pub fn assigned_pattern_field_to_pattern<'a>(
)
}
}
AssignedField::OptionalField(_, _, _) => {
panic!("invalid as a pattern");
}
AssignedField::LabelOnly(name) => Located::at(name.region, Pattern::Identifier(name.value)),
AssignedField::SpaceBefore(nested, spaces) => {
let can_nested = assigned_pattern_field_to_pattern(arena, nested, backup_region)?;
@ -434,7 +430,7 @@ pub fn loc_parenthetical_def<'a>(min_indent: u16) -> impl Parser<'a, Located<Exp
space0_after(
between!(
char('('),
space0_around(loc!(pattern(min_indent)), min_indent),
space0_around(loc_pattern(min_indent), min_indent),
char(')')
),
min_indent,
@ -756,23 +752,32 @@ fn parse_closure_param<'a>(
// e.g. \User.UserId userId -> ...
between!(
char('('),
space0_around(loc!(pattern(min_indent)), min_indent),
space0_around(loc_pattern(min_indent), min_indent),
char(')')
),
// The least common, but still allowed, e.g. \Foo -> ...
loc!(tag_pattern())
loc_tag_pattern(min_indent)
)
.parse(arena, state)
}
fn pattern<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>> {
fn loc_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>> {
one_of!(
underscore_pattern(),
tag_pattern(),
ident_pattern(),
record_destructure(min_indent),
string_pattern(),
int_pattern()
loc_parenthetical_pattern(min_indent),
loc!(underscore_pattern()),
loc_tag_pattern(min_indent),
loc!(ident_pattern()),
loc!(record_destructure(min_indent)),
loc!(string_pattern()),
loc!(int_pattern())
)
}
fn loc_parenthetical_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>> {
between!(
char('('),
move |arena, state| loc_pattern(min_indent).parse(arena, state),
char(')')
)
}
@ -800,7 +805,7 @@ fn underscore_pattern<'a>() -> impl Parser<'a, Pattern<'a>> {
fn record_destructure<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>> {
then(
record_without_update!(loc!(pattern(min_indent)), min_indent),
record_without_update!(loc_pattern(min_indent), min_indent),
move |arena, state, assigned_fields| {
let mut patterns = Vec::with_capacity_in(assigned_fields.len(), arena);
for assigned_field in assigned_fields {
@ -819,10 +824,29 @@ fn record_destructure<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>> {
)
}
fn tag_pattern<'a>() -> impl Parser<'a, Pattern<'a>> {
one_of!(
map!(private_tag(), Pattern::PrivateTag),
map!(global_tag(), Pattern::GlobalTag)
fn loc_tag_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>> {
map_with_arena!(
and!(
loc!(one_of!(
map!(private_tag(), Pattern::PrivateTag),
map!(global_tag(), Pattern::GlobalTag)
)),
// This can optionally be an applied pattern, e.g. (Foo bar) instead of (Foo)
zero_or_more!(space1_before(loc_pattern(min_indent), min_indent))
),
|arena: &'a Bump,
(loc_tag, loc_args): (Located<Pattern<'a>>, Vec<'a, Located<Pattern<'a>>>)| {
if loc_args.is_empty() {
loc_tag
} else {
// TODO FIME this region doesn't cover the tag's
// arguments; need to add them to the region!
let region = loc_tag.region;
let value = Pattern::Apply(&*arena.alloc(loc_tag), loc_args.into_bump_slice());
Located { region, value }
}
}
)
}
@ -872,7 +896,7 @@ pub fn case_branches<'a>(
// 2. Parse the other branches. Their indentation levels must be == the first branch's.
let (mut loc_first_pattern, state) =
space1_before(loc!(pattern(min_indent)), min_indent).parse(arena, state)?;
space1_before(loc_pattern(min_indent), min_indent).parse(arena, state)?;
let original_indent = state.indent_col;
let indented_more = original_indent + 1;
let (spaces_before_arrow, state) = space0(min_indent).parse(arena, state)?;
@ -902,7 +926,7 @@ pub fn case_branches<'a>(
let branch_parser = and!(
then(
space1_around(loc!(pattern(min_indent)), min_indent),
space1_around(loc_pattern(min_indent), min_indent),
move |_arena, state, loc_pattern| {
if state.indent_col == original_indent {
Ok((loc_pattern, state))

View file

@ -905,9 +905,6 @@ macro_rules! record_field {
// You must have a field name, e.g. "email"
let (loc_label, state) = loc!(lowercase_ident()).parse(arena, state)?;
let (opt_field, state) =
$crate::parse::parser::optional(char('?')).parse(arena, state)?;
let (spaces, state) = space0($min_indent).parse(arena, state)?;
// Having a value is optional; both `{ email }` and `{ email: blah }` work.
// (This is true in both literals and types.)
@ -917,27 +914,24 @@ macro_rules! record_field {
))
.parse(arena, state)?;
let answer = match (opt_loc_val, opt_field) {
(Some(loc_val), None) => LabeledValue(loc_label, spaces, arena.alloc(loc_val)),
(Some(loc_val), Some(_)) => OptionalField(loc_label, spaces, arena.alloc(loc_val)),
let answer = match opt_loc_val {
Some(loc_val) => LabeledValue(loc_label, spaces, arena.alloc(loc_val)),
// If no value was provided, record it as a Var.
// Canonicalize will know what to do with a Var later.
(None, None) => {
None => {
if !spaces.is_empty() {
SpaceAfter(arena.alloc(LabelOnly(loc_label)), spaces)
} else {
LabelOnly(loc_label)
}
}
(None, Some(_)) => {
panic!("TODO should `{ x? }` be valid? realistically, how of often does `{ a : a }` occur in a type?");
}
};
Ok((answer, state))
}
};
}
#[macro_export]
macro_rules! record_without_update {
($val_parser:expr, $min_indent:expr) => {

View file

@ -1,9 +1,11 @@
use crate::collections::arena_join;
use crate::parse::ast::{Attempting, TypeAnnotation};
use crate::parse::ast::{AssignedField, Attempting, Tag, TypeAnnotation};
use crate::parse::blankspace::{space0_around, space0_before, space1_before};
use crate::parse::parser::{
char, optional, string, unexpected, unexpected_eof, ParseResult, Parser, State,
allocated, char, optional, string, unexpected, unexpected_eof, Either, ParseResult, Parser,
State,
};
use crate::parse::{global_tag, private_tag};
use crate::region::Located;
use bumpalo::collections::string::String;
use bumpalo::collections::vec::Vec;
@ -13,6 +15,33 @@ pub fn located<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a
expression(min_indent)
}
macro_rules! tag_union {
($min_indent:expr) => {
map!(
and!(
collection!(
char('['),
loc!(tag_type($min_indent)),
char(','),
char(']'),
$min_indent
),
optional(
// This could be an open tag union, e.g. `[ Foo, Bar ]a`
move |arena, state| allocated(term($min_indent)).parse(arena, state)
)
),
|(tags, ext): (
Vec<'a, Located<Tag<'a>>>,
Option<&'a Located<TypeAnnotation<'a>>>,
)| TypeAnnotation::TagUnion {
tags: tags.into_bump_slice(),
ext
}
)
};
}
pub fn term<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a>>> {
one_of!(
// The `*` type variable, e.g. in (List *) Wildcard,
@ -21,6 +50,7 @@ pub fn term<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a>>>
}),
loc_parenthetical_type(min_indent),
loc!(record_type(min_indent)),
loc!(tag_union!(min_indent)),
loc!(applied_type(min_indent)),
loc!(parse_type_variable)
)
@ -38,25 +68,56 @@ fn loc_parenthetical_type<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAn
)
}
#[inline(always)]
#[allow(clippy::type_complexity)]
fn tag_type<'a>(min_indent: u16) -> impl Parser<'a, Tag<'a>> {
map!(
and!(
either!(loc!(private_tag()), loc!(global_tag())),
// Optionally parse space-separated arguments for the constructor,
// e.g. `ok err` in `Result ok err`
zero_or_more!(space1_before(
move |arena, state| term(min_indent).parse(arena, state),
min_indent,
))
),
|(either_name, args): (
Either<Located<&'a str>, Located<&'a str>>,
Vec<'a, Located<TypeAnnotation<'a>>>
)| match either_name {
Either::First(name) => Tag::Private {
name,
args: args.into_bump_slice()
},
Either::Second(name) => Tag::Global {
name,
args: args.into_bump_slice()
},
}
)
}
#[inline(always)]
fn record_type<'a>(min_indent: u16) -> impl Parser<'a, TypeAnnotation<'a>> {
use crate::parse::type_annotation::TypeAnnotation::*;
map_with_arena!(
map!(
and!(
record_without_update!(
move |arena, state| term(min_indent).parse(arena, state),
min_indent
),
optional(skip_first!(
// This could be a record fragment, e.g. `{ name: String }...r`
string("..."),
move |arena, state| term(min_indent).parse(arena, state)
))
optional(
// This could be an open record, e.g. `{ name: Str }r`
move |arena, state| allocated(term(min_indent)).parse(arena, state)
)
),
|arena: &'a Bump, (rec, opt_bound_var)| match opt_bound_var {
None => Record(rec),
Some(loc_bound_var) => RecordFragment(rec, arena.alloc(loc_bound_var)),
|(fields, ext): (
Vec<'a, Located<AssignedField<'a, TypeAnnotation<'a>>>>,
Option<&'a Located<TypeAnnotation<'a>>>,
)| Record {
fields: fields.into_bump_slice(),
ext
}
)
}

View file

@ -2,6 +2,7 @@ use crate::can::ident::{Lowercase, ModuleName, Uppercase};
use crate::collections::{MutMap, MutSet};
use crate::subs::{Content, FlatType, Subs, Variable};
use crate::types::{self, name_type_var};
use crate::uniqueness::boolean_algebra::Bool;
static WILDCARD: &str = "*";
static EMPTY_RECORD: &str = "{}";
@ -104,6 +105,11 @@ fn find_names_needed(
find_names_needed(ext_var, subs, roots, root_appearances, names_taken);
}
Structure(Boolean(b)) => {
for var in b.variables() {
find_names_needed(var, subs, roots, root_appearances, names_taken);
}
}
RigidVar(name) => {
// User-defined names are already taken.
// We must not accidentally generate names that collide with them!
@ -202,6 +208,13 @@ fn write_flat_type(flat_type: FlatType, subs: &mut Subs, buf: &mut String, paren
EmptyTagUnion => buf.push_str(EMPTY_TAG_UNION),
Func(args, ret) => write_fn(args, ret, subs, buf, parens),
Record(fields, ext_var) => {
use crate::unify::gather_fields;
use crate::unify::RecordStructure;
// If the `ext` has concrete fields (e.g. { foo : Int}{ bar : Bool }), merge them
let RecordStructure { fields, ext } = gather_fields(subs, fields, ext_var);
let ext_var = ext;
if fields.is_empty() {
buf.push_str(EMPTY_RECORD)
} else {
@ -295,6 +308,10 @@ fn write_flat_type(flat_type: FlatType, subs: &mut Subs, buf: &mut String, paren
}
}
}
Boolean(Bool::Variable(var)) => write_content(subs.get(var).content, subs, buf, parens),
Boolean(b) => {
buf.push_str(&format!("{:?}", b));
}
Erroneous(problem) => {
buf.push_str(&format!("<Type Mismatch: {:?}>", problem));
}

View file

@ -401,6 +401,11 @@ fn type_to_variable(
register(subs, rank, pools, content)
}
Boolean(b) => {
let content = Content::Structure(FlatType::Boolean(b.clone()));
register(subs, rank, pools, content)
}
Function(args, ret_type) => {
let mut arg_vars = Vec::with_capacity(args.len());
@ -673,6 +678,15 @@ fn adjust_rank_content(
rank
}
Boolean(b) => {
let mut rank = Rank::toplevel();
for var in b.variables() {
rank = rank.max(adjust_rank(subs, young_mark, visit_mark, group_rank, var));
}
rank
}
Erroneous(_) => group_rank,
}
}
@ -806,6 +820,12 @@ fn deep_copy_var_help(
TagUnion(new_tags, deep_copy_var_help(subs, max_rank, pools, ext_var))
}
Boolean(b) => {
let mut mapper = |var| deep_copy_var_help(subs, max_rank, pools, var);
Boolean(b.map_variables(&mut mapper))
}
};
subs.set(copy, make_descriptor(Structure(new_flat_type)));

View file

@ -3,6 +3,7 @@ use crate::can::symbol::Symbol;
use crate::collections::{ImMap, ImSet, MutSet, SendMap};
use crate::ena::unify::{InPlace, UnificationTable, UnifyKey};
use crate::types::{name_type_var, ErrorType, Problem, RecordFieldLabel, TypeExt};
use crate::uniqueness::boolean_algebra;
use std::fmt;
use std::sync::atomic::{AtomicUsize, Ordering};
@ -134,7 +135,7 @@ impl Into<Option<Variable>> for OptVariable {
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Variable(usize);
impl Variable {
@ -145,6 +146,10 @@ impl Variable {
const NULL: Variable = Variable(0);
const FIRST_USER_SPACE_VAR: Variable = Variable(1);
pub fn unsafe_debug_variable(v: usize) -> Self {
Variable(v)
}
}
impl Into<OptVariable> for Variable {
@ -435,6 +440,7 @@ pub enum FlatType {
Erroneous(Problem),
EmptyRecord,
EmptyTagUnion,
Boolean(boolean_algebra::Bool),
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
@ -478,6 +484,10 @@ fn occurs(subs: &mut Subs, seen: &ImSet<Variable>, var: Variable) -> bool {
.values()
.any(|vars| vars.iter().any(|var| occurs(subs, &new_seen, *var)))
}
Boolean(b) => b
.variables()
.iter()
.any(|var| occurs(subs, &new_seen, *var)),
EmptyRecord | EmptyTagUnion | Erroneous(_) => false,
}
}
@ -556,6 +566,12 @@ fn get_var_names(
taken_names
}
FlatType::Boolean(b) => b
.variables()
.into_iter()
.fold(taken_names, |answer, arg_var| {
get_var_names(subs, arg_var, answer)
}),
},
}
}
@ -755,6 +771,8 @@ fn flat_type_to_err_type(subs: &mut Subs, state: &mut NameState, flat_type: Flat
}
}
Boolean(b) => ErrorType::Boolean(b),
Erroneous(_) => ErrorType::Error,
}
}
@ -806,6 +824,11 @@ fn restore_content(subs: &mut Subs, content: &Content) {
subs.restore(*ext_var);
}
Boolean(b) => {
for var in b.variables() {
subs.restore(var);
}
}
Erroneous(_) => (),
},
Alias(_, _, args, var) => {

View file

@ -7,6 +7,7 @@ use crate::operator::{ArgSide, BinOp};
use crate::region::Located;
use crate::region::Region;
use crate::subs::Variable;
use crate::uniqueness::boolean_algebra;
use std::fmt;
// The standard modules
@ -39,6 +40,8 @@ pub enum Type {
name: Uppercase,
args: Vec<Type>,
},
/// Boolean type used in uniqueness inference
Boolean(boolean_algebra::Bool),
Variable(Variable),
/// A type error, which will code gen to a runtime error
Erroneous(Problem),
@ -185,6 +188,7 @@ impl fmt::Debug for Type {
}
}
}
Type::Boolean(b) => write!(f, "{:?}", b),
}
}
}
@ -391,6 +395,7 @@ pub enum ErrorType {
Vec<(Lowercase, ErrorType)>,
Box<ErrorType>,
),
Boolean(boolean_algebra::Bool),
Error,
}

View file

@ -5,6 +5,7 @@ use crate::subs::Content::{self, *};
use crate::subs::{Descriptor, FlatType, Mark, OptVariable, Subs, Variable};
use crate::types::RecordFieldLabel;
use crate::types::{Mismatch, Problem};
use crate::uniqueness::boolean_algebra;
type Pool = Vec<Variable>;
@ -15,9 +16,9 @@ struct Context {
second_desc: Descriptor,
}
struct RecordStructure {
fields: ImMap<RecordFieldLabel, Variable>,
ext: Variable,
pub struct RecordStructure {
pub fields: ImMap<RecordFieldLabel, Variable>,
pub ext: Variable,
}
struct TagUnionStructure {
@ -404,6 +405,18 @@ fn unify_flat_type(
unify_tag_union(subs, pool, ctx, union1, union2)
}
(Boolean(b1), Boolean(b2)) => {
if let Some(substitution) = boolean_algebra::try_unify(b1.clone(), b2.clone()) {
for (var, replacement) in substitution {
subs.set_content(var, Structure(FlatType::Boolean(replacement)));
}
vec![]
} else {
mismatch()
}
}
(
Apply {
module_name: l_module_name,
@ -506,7 +519,7 @@ fn unify_flex(
}
}
fn gather_fields(
pub fn gather_fields(
subs: &mut Subs,
fields: ImMap<RecordFieldLabel, Variable>,
var: Variable,

View file

@ -1,168 +1,573 @@
use crate::collections::ImMap;
// Based on work by Edsko de Vries for tfp 2007
//
// http://www.edsko.net/tcd/pub/tfp07-prototype-snapshot.tar.gz
//
// Thank you Edsko!
//
// quoting from that work:
//
// > Main reference for this module is "Boolean Reasoning: The Logic of
// > Boolean Equations" by Frank Markham Brown.
use crate::collections::{ImMap, ImSet};
use crate::subs::Variable;
pub fn unify(typ: &BooleanAlgebra, _expected: &BooleanAlgebra) -> Option<Substitution> {
// find the most general unifier.
let mut val = typ.clone();
let fv = val.variables();
let (mgu, consistency_condition) = boolean_unification(&mut val, &fv);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Bool {
Zero,
One,
And(Box<Bool>, Box<Bool>),
Or(Box<Bool>, Box<Bool>),
Not(Box<Bool>),
Variable(Variable),
}
// the consistency_condition must be a base term, and must evaluate to False
if !consistency_condition.evaluate() {
Some(mgu)
use self::Bool::*;
#[inline(always)]
pub fn not(nested: Bool) -> Bool {
Not(Box::new(nested))
}
#[inline(always)]
pub fn and(left: Bool, right: Bool) -> Bool {
And(Box::new(left), Box::new(right))
}
#[inline(always)]
pub fn or(left: Bool, right: Bool) -> Bool {
Or(Box::new(left), Box::new(right))
}
pub fn any<I>(mut it: I) -> Bool
where
I: Iterator<Item = Bool>,
{
if let Some(first) = it.next() {
it.fold(first, or)
} else {
Zero
}
}
pub fn all(terms: Product<Bool>) -> Bool {
let mut it = terms.into_iter();
if let Some(first) = it.next() {
it.fold(first, and)
} else {
One
}
}
type Substitution = ImMap<Variable, Bool>;
type Product<A> = ImSet<A>;
type Sum<A> = ImSet<A>;
#[allow(clippy::should_implement_trait)]
impl Bool {
pub fn variables(&self) -> ImSet<Variable> {
let mut result = ImSet::default();
self.variables_help(&mut result);
result
}
fn variables_help(&self, vars: &mut ImSet<Variable>) {
match self {
Zero => (),
One => (),
And(left, right) => {
left.variables_help(vars);
right.variables_help(vars)
}
Or(left, right) => {
left.variables_help(vars);
right.variables_help(vars)
}
Not(nested) => nested.variables_help(vars),
Variable(var) => {
vars.insert(*var);
}
};
}
pub fn map_variables<F>(&self, f: &mut F) -> Self
where
F: FnMut(Variable) -> Variable,
{
match self {
Zero => Zero,
One => One,
And(left, right) => and(left.map_variables(f), right.map_variables(f)),
Or(left, right) => or(left.map_variables(f), right.map_variables(f)),
Not(nested) => not(nested.map_variables(f)),
Variable(current) => Variable(f(*current)),
}
}
pub fn substitute(&self, substitutions: &Substitution) -> Self {
match self {
Zero => Zero,
One => One,
And(left, right) => and(
left.substitute(substitutions),
right.substitute(substitutions),
),
Or(left, right) => or(
left.substitute(substitutions),
right.substitute(substitutions),
),
Not(nested) => not(nested.substitute(substitutions)),
Variable(current) => match substitutions.get(current) {
Some(new) => new.clone(),
None => Variable(*current),
},
}
}
#[inline(always)]
pub fn is_var(&self) -> bool {
match self {
Variable(_) => true,
_ => false,
}
}
#[inline(always)]
pub fn not(nested: Bool) -> Bool {
not(nested)
}
#[inline(always)]
pub fn and(left: Bool, right: Bool) -> Bool {
and(left, right)
}
#[inline(always)]
pub fn or(left: Bool, right: Bool) -> Bool {
or(left, right)
}
}
pub fn simplify(term: Bool) -> Bool {
let normalized = normalize_term(term);
let a = term_to_sop(normalized);
let b = normalize_sop(a);
let after_bcf = bcf(b);
sop_to_term(simplify_sop(after_bcf))
}
#[inline(always)]
pub fn sop_to_term(sop: Sop) -> Bool {
any(sop.into_iter().map(all))
}
pub fn simplify_sop(sop: Sop) -> Sop {
// sort by length longest to shortest (proxy for how many variables there are)
let mut sorted: Vec<ImSet<Bool>> = sop.clone().into_iter().collect();
sorted.sort_by(|x, y| y.len().cmp(&x.len()));
// filter out anything that is included in the remaining elements
let mut active = sop;
let mut result = ImSet::default();
for t in sorted {
if !(active.remove(&t).is_some() && included(all(t.clone()), sop_to_term(active.clone()))) {
result.insert(t);
}
}
result
}
/// Blake canonical form
fn bcf(sop: Sop) -> Sop {
absorptive(syllogistic(sop))
}
fn syllogistic(terms: Sop) -> Sop {
let mut cs_prime = ImSet::default();
for c in cartesian_product(terms.clone())
.iter()
.filter_map(|(x, y)| consensus(x, y))
{
if !terms
.clone()
.into_iter()
.any(|x| included_term(c.clone(), x))
{
cs_prime.insert(c);
}
}
if cs_prime.is_empty() {
terms
} else {
syllogistic(terms.union(cs_prime))
}
}
/// Absorption (apply the identify p + pq = p)
fn absorptive(sop: Sop) -> Sop {
// TODO this is extremely inefficient!
let mut accum: Vec<Product<Bool>> = Vec::new();
for product in sop {
accum = accum
.into_iter()
.filter(|v| !absorbs(&product, v))
.collect();
accum.push(product);
}
accum.into()
}
/// Does p absorb q? (can we replace p + q by p?)
/// TODO investigate: either the comment or the implementation is wrong I think?
fn absorbs(p: &Product<Bool>, q: &Product<Bool>) -> bool {
p.iter().all(|x| q.contains(x))
}
fn consensus(p: &Product<Bool>, q: &Product<Bool>) -> Option<Product<Bool>> {
let mut it = oppositions(p, q).into_iter();
// oppositions must have exactly one element
if let Some(x) = it.next() {
if it.next().is_none() {
let compx = not(x.clone());
return Some(
p.clone()
.into_iter()
.chain(q.clone().into_iter())
.filter(|y| *y != x && *y != compx)
.collect(),
);
}
}
None
}
fn oppositions(ps: &Product<Bool>, qs: &Product<Bool>) -> Product<Bool> {
let it1 = ps
.clone()
.into_iter()
.filter(|p| qs.contains(&not(p.clone())));
let it2 = qs
.clone()
.into_iter()
.filter(|q| ps.contains(&not(q.clone())));
it1.chain(it2).collect()
}
pub fn try_unify(p: Bool, q: Bool) -> Option<Substitution> {
let (sub, consistency) = unify(p, q);
let substitution = sub
.into_iter()
.filter(|(x, p)| *p != Variable(*x))
.collect();
if consistency == Zero {
Some(substitution)
} else {
// the unification has no solution
None
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum BooleanAlgebra {
Ground(bool),
Disjunction(Box<BooleanAlgebra>, Box<BooleanAlgebra>),
Conjunction(Box<BooleanAlgebra>, Box<BooleanAlgebra>),
Negation(Box<BooleanAlgebra>),
Variable(Variable),
fn unify(p: Bool, q: Bool) -> (Substitution, Bool) {
let condition = q.is_var() && !p.is_var();
let t = if condition {
or(and(q.clone(), not(p.clone())), and(not(q), p))
} else {
or(and(p.clone(), not(q.clone())), and(not(p), q))
};
unify0(t.variables(), t)
}
impl BooleanAlgebra {
pub fn simplify(&mut self) {
*self = simplify(self.clone());
fn unify0(names: ImSet<Variable>, mut term: Bool) -> (Substitution, Bool) {
// NOTE sort is required for stable test order that is the same as the Haskell ref. impl.
let mut substitution: Substitution = ImMap::default();
let mut sorted_names: Vec<Variable> = names.into_iter().collect();
sorted_names.sort();
for x in sorted_names.into_iter() {
let mut sub_zero = ImMap::default();
sub_zero.insert(x, Zero);
let mut sub_one = ImMap::default();
sub_one.insert(x, One);
let subbed_zero = term.substitute(&sub_zero);
let subbed_one = term.substitute(&sub_one);
term = and(subbed_zero.clone(), subbed_one.clone());
let replacement = simplify(or(
subbed_zero.substitute(&substitution),
and(Variable(x), not(subbed_one.substitute(&substitution))),
));
substitution.insert(x, replacement);
}
pub fn substitute(&self, var: Variable, expanded: &BooleanAlgebra) -> Self {
use BooleanAlgebra::*;
match self {
Variable(v) if v == &var => expanded.clone(),
Variable(_) | Ground(_) => self.clone(),
(substitution, simplify(term))
}
Negation(t) => Negation(Box::new(t.substitute(var, expanded))),
// --- Simplification ---
Disjunction(l, r) => Disjunction(
Box::new(l.substitute(var, expanded)),
Box::new(r.substitute(var, expanded)),
),
/// Normalization of terms. Applies (in bottom-up fashion) the identities
///
/// x * 1 = x
/// x * 0 = 0
/// x + 1 = 1
/// x + 0 = x
/// !1 = 0
/// !0 = 1
pub fn normalize_term(term: Bool) -> Bool {
match term {
And(left, right) => {
let p = normalize_term(*left);
let q = normalize_term(*right);
Conjunction(l, r) => Conjunction(
Box::new(l.substitute(var, expanded)),
Box::new(r.substitute(var, expanded)),
),
match (p == One, p == Zero, q == One, q == Zero) {
(true, _, _, _) => q,
(_, true, _, _) => Zero,
(_, _, true, _) => p,
(_, _, _, true) => Zero,
_ => and(p, q),
}
}
Or(left, right) => {
let p = normalize_term(*left);
let q = normalize_term(*right);
match (p == One, p == Zero, q == One, q == Zero) {
(true, _, _, _) => One,
(_, true, _, _) => q,
(_, _, true, _) => One,
(_, _, _, true) => p,
_ => or(p, q),
}
}
Not(nested) => {
let p = normalize_term(*nested);
match (p == One, p == Zero) {
(true, _) => Zero,
(_, true) => One,
_ => not(p),
}
}
_ => term,
}
}
// --- Inclusion ---
pub fn included(g: Bool, h: Bool) -> bool {
contradiction(and(g, not(h)))
}
fn included_term(g: Product<Bool>, h: Product<Bool>) -> bool {
included(all(g), all(h))
}
// --- Tautology / Contradiction ---
fn tautology(term: Bool) -> bool {
normalize_pos(term_to_pos(normalize_term(term))).is_empty()
}
pub fn contradiction(term: Bool) -> bool {
tautology(not(term))
}
// --- Normalization of POS / SOP
type Pos = Product<Sum<Bool>>;
type Sop = Sum<Product<Bool>>;
fn term_to_pos(term: Bool) -> Pos {
conj_to_list(cnf(term))
.into_iter()
.map(disj_to_list)
.collect()
}
pub fn term_to_sop(term: Bool) -> Sop {
disj_to_list(dnf(term))
.into_iter()
.map(conj_to_list)
.collect()
}
fn conj_to_list(term: Bool) -> Product<Bool> {
match term {
And(left, right) => {
let p = conj_to_list(*left);
let q = conj_to_list(*right);
p.union(q)
}
_ => unit(term),
}
}
fn disj_to_list(term: Bool) -> Sum<Bool> {
match term {
Or(left, right) => {
let p = disj_to_list(*left);
let q = disj_to_list(*right);
p.union(q)
}
_ => unit(term),
}
}
fn normalize_pos(pos: Pos) -> Pos {
let singleton_one = unit(One);
pos.into_iter()
.map(normalize_disj)
.filter(|normalized| *normalized != singleton_one)
.collect()
}
pub fn normalize_sop(sop: Sop) -> Sop {
let singleton_zero = unit(Zero);
sop.into_iter()
.map(normalize_conj)
.filter(|normalized| *normalized != singleton_zero)
.collect()
}
fn cartesian_product<A>(set: ImSet<A>) -> ImSet<(A, A)>
where
A: Eq + Clone + core::hash::Hash,
{
let mut result = ImSet::default();
for x in set.clone().into_iter() {
for y in set.clone() {
if x == y {
break;
}
result.insert((x.clone(), y));
}
}
pub fn evaluate(&self) -> bool {
use BooleanAlgebra::*;
match self {
Variable(v) => panic!(
"Cannot evaluate boolean expression with unbound variable {:?}",
v
),
Ground(b) => *b,
Negation(t) => !(t.evaluate()),
Disjunction(l, r) => l.evaluate() || r.evaluate(),
Conjunction(l, r) => l.evaluate() && r.evaluate(),
}
}
result
}
pub fn variables(&self) -> Vec<Variable> {
let mut vars = Vec::new();
variables_help(self, &mut vars);
vars
fn unit<A>(a: A) -> ImSet<A>
where
A: Clone + Eq + core::hash::Hash,
{
let mut result = ImSet::default();
result.insert(a);
result
}
fn normalize_disj(mut sum: Sum<Bool>) -> Sum<Bool> {
let is_always_true =
sum.clone().into_iter().any(|x| sum.contains(&not(x))) || sum.contains(&One);
if is_always_true {
unit(One)
} else {
sum.remove(&Zero);
sum
}
}
fn variables_help(bconstraint: &BooleanAlgebra, variables: &mut Vec<Variable>) {
use BooleanAlgebra::*;
fn normalize_conj(mut product: Product<Bool>) -> Product<Bool> {
let is_always_false = product
.clone()
.into_iter()
.any(|x| product.contains(&not(x)))
|| product.contains(&Zero);
match bconstraint {
Variable(v) => variables.push(v.clone()),
Ground(_) => {}
Negation(t) => variables_help(t, variables),
Disjunction(l, r) => {
variables_help(l, variables);
variables_help(r, variables);
}
Conjunction(l, r) => {
variables_help(l, variables);
variables_help(r, variables);
}
if is_always_false {
unit(Zero)
} else {
product.remove(&One);
product
}
}
fn simplify(bconstraint: BooleanAlgebra) -> BooleanAlgebra {
use BooleanAlgebra::*;
match bconstraint {
Variable(_) | Ground(_) => bconstraint,
Negation(nested) => match simplify(*nested) {
Ground(t) => Ground(!t),
other => Negation(Box::new(other)),
},
Disjunction(l, r) => match (simplify(*l), simplify(*r)) {
(Ground(true), _) => Ground(true),
(_, Ground(true)) => Ground(true),
(Ground(false), rr) => rr,
(ll, Ground(false)) => ll,
(ll, rr) => Disjunction(Box::new(ll), Box::new(rr)),
},
Conjunction(l, r) => match (simplify(*l), simplify(*r)) {
(Ground(true), rr) => rr,
(ll, Ground(true)) => ll,
(Ground(false), _) => Ground(false),
(_, Ground(false)) => Ground(false),
(ll, rr) => Conjunction(Box::new(ll), Box::new(rr)),
},
/// Conjunction Normal Form
fn cnf(term: Bool) -> Bool {
match nnf(term) {
And(p, q) => and(cnf(*p), cnf(*q)),
Or(p, q) => distr_cnf(cnf(*p), cnf(*q)),
other => other,
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Substitution {
pairs: ImMap<Variable, BooleanAlgebra>,
}
impl Substitution {
pub fn empty() -> Self {
Substitution {
pairs: ImMap::default(),
}
}
pub fn insert(&mut self, var: Variable, term: BooleanAlgebra) {
self.pairs.insert(var, term);
}
pub fn get(&self, var: Variable) -> Option<&BooleanAlgebra> {
self.pairs.get(&var)
fn distr_cnf(p: Bool, q: Bool) -> Bool {
match p {
And(p1, p2) => and(distr_cnf(*p1, q.clone()), distr_cnf(*p2, q)),
_ => distr_cnf_help(p, q),
}
}
fn boolean_unification(
term: &mut BooleanAlgebra,
variables: &[Variable],
) -> (Substitution, BooleanAlgebra) {
use BooleanAlgebra::*;
let mut substitution = Substitution::empty();
for var in variables {
let t0 = term.clone().substitute(*var, &Ground(false));
let t1 = term.clone().substitute(*var, &Ground(true));
*term = Conjunction(Box::new(t1.clone()), Box::new(t0.clone()));
term.simplify();
let mut sub = Disjunction(
Box::new(t0),
Box::new(Conjunction(
Box::new(Variable(*var)),
Box::new(Negation(Box::new(t1))),
)),
);
sub.simplify();
substitution.insert(var.clone(), sub);
fn distr_cnf_help(p: Bool, q: Bool) -> Bool {
match q {
And(q1, q2) => and(distr_cnf(p.clone(), *q1), distr_cnf(p, *q2)),
_ => or(p, q),
}
}
/// Disjunction Normal Form
pub fn dnf(term: Bool) -> Bool {
match nnf(term) {
And(p, q) => distr_dnf(dnf(*p), dnf(*q)),
Or(p, q) => or(dnf(*p), dnf(*q)),
other => other,
}
}
fn distr_dnf(p: Bool, q: Bool) -> Bool {
match p {
Or(p1, p2) => or(distr_dnf(*p1, q.clone()), distr_dnf(*p2, q)),
_ => distr_dnf_help(p, q),
}
}
fn distr_dnf_help(p: Bool, q: Bool) -> Bool {
match q {
Or(q1, q2) => or(distr_dnf(p.clone(), *q1), distr_dnf(p, *q2)),
_ => and(p, q),
}
}
/// Negation Normal Form
pub fn nnf(term: Bool) -> Bool {
match term {
Not(n) => nnf_help(*n),
_ => term,
}
}
pub fn nnf_help(term: Bool) -> Bool {
match term {
Zero => One,
One => Zero,
And(p, q) => or(nnf_help(*p), nnf_help(*q)),
Or(p, q) => and(nnf_help(*p), nnf_help(*q)),
// double negation
Not(nested) => nnf(*nested),
Variable(_) => not(term),
}
(substitution, term.clone())
}

View file

@ -5,6 +5,7 @@ use crate::types::Constraint::{self, *};
use crate::types::Expected::{self, *};
use crate::types::Type::{self, *};
use crate::types::{self, LetConstraint, Reason};
use crate::uniqueness::boolean_algebra::Bool;
pub fn exists(flex_vars: Vec<Variable>, constraint: Constraint) -> Constraint {
Constraint::Let(Box::new(LetConstraint {
@ -18,7 +19,7 @@ pub fn exists(flex_vars: Vec<Variable>, constraint: Constraint) -> Constraint {
pub fn lift(var_store: &VarStore, typ: Type) -> Type {
let uniq_var = var_store.fresh();
let uniq_type = Variable(uniq_var);
let uniq_type = Type::Boolean(Bool::Variable(uniq_var));
attr_type(uniq_type, typ)
}

View file

@ -1,11 +1,10 @@
use crate::can::def::Def;
use crate::can::expr::Expr;
use crate::can::expr::Field;
use crate::can::expr::Output;
use crate::can::ident::Lowercase;
use crate::can::pattern;
use crate::can::pattern::{Pattern, RecordDestruct};
use crate::can::procedure::{Procedure, References};
use crate::can::procedure::Procedure;
use crate::can::symbol::Symbol;
use crate::collections::{ImMap, SendMap};
use crate::constrain::expr::{Info, Rigids};
@ -35,17 +34,17 @@ pub struct Env {
pub procedures: ImMap<Symbol, Procedure>,
}
pub fn canonicalize_declaration(
pub fn constrain_declaration(
var_store: &VarStore,
region: Region,
loc_expr: Located<Expr>,
_declared_idents: &ImMap<Ident, (Symbol, Region)>,
expected: Expected<Type>,
) -> (Output, Constraint) {
) -> Constraint {
let rigids = ImMap::default();
let mut var_usage = VarUsage::default();
canonicalize_expr(
constrain_expr(
&rigids,
var_store,
&mut var_usage,
@ -61,7 +60,7 @@ pub struct PatternState {
pub constraints: Vec<Constraint>,
}
fn canonicalize_pattern(
fn constrain_pattern(
var_store: &VarStore,
state: &mut PatternState,
pattern: &Located<Pattern>,
@ -120,27 +119,23 @@ fn canonicalize_pattern(
} in patterns
{
let pat_type = Type::Variable(*var);
let pattern_expected = PExpected::NoExpectation(pat_type.clone());
let expected = PExpected::NoExpectation(pat_type.clone());
match guard {
Some((_guard_var, loc_guard)) => {
state.headers.insert(
symbol.clone(),
Located {
region: pattern.region,
value: pat_type.clone(),
},
);
if !state.headers.contains_key(&symbol) {
state.headers.insert(
symbol.clone(),
Located::at(pattern.region, pat_type.clone()),
);
}
canonicalize_pattern(var_store, state, loc_guard, pattern_expected);
}
None => {
canonicalize_pattern(var_store, state, pattern, pattern_expected);
}
field_types.insert(label.clone(), pat_type.clone());
// TODO investigate: shouldn't guard_var be constrained somewhere?
if let Some((_guard_var, loc_guard)) = guard {
constrain_pattern(var_store, state, loc_guard, expected);
}
state.vars.push(*var);
field_types.insert(label.clone(), pat_type);
}
let record_type =
@ -165,51 +160,39 @@ fn canonicalize_pattern(
}
}
pub fn canonicalize_expr(
pub fn constrain_expr(
rigids: &Rigids,
var_store: &VarStore,
var_usage: &mut VarUsage,
region: Region,
expr: &Expr,
expected: Expected<Type>,
) -> (Output, Constraint) {
) -> Constraint {
pub use crate::can::expr::Expr::*;
match expr {
Int(_, _) => {
let constraint = constrain::int_literal(var_store, expected, region);
(Output::default(), constraint)
}
Float(_, _) => {
let constraint = constrain::float_literal(var_store, expected, region);
(Output::default(), constraint)
}
Int(_, _) => constrain::int_literal(var_store, expected, region),
Float(_, _) => constrain::float_literal(var_store, expected, region),
BlockStr(_) | Str(_) => {
let inferred = constrain::lift(var_store, constrain::str_type());
let constraint = Eq(inferred, expected, region);
(Output::default(), constraint)
}
EmptyRecord => {
let constraint = Eq(constrain::lift(var_store, EmptyRec), expected, region);
(Output::default(), constraint)
Eq(inferred, expected, region)
}
EmptyRecord => Eq(constrain::lift(var_store, EmptyRec), expected, region),
Record(variable, fields) => {
// NOTE: canonicalization guarantees at least one field
// zero fields generates an EmptyRecord
let mut field_types = SendMap::default();
let mut field_vars = Vec::with_capacity(fields.len());
// Constraints need capacity for each field + 1 for the record itself.
let mut constraints = Vec::with_capacity(1 + fields.len());
let mut output = Output::default();
// Constraints need capacity for each field + 1 for the record itself + 1 for ext
let mut constraints = Vec::with_capacity(2 + fields.len());
for (label, ref field) in fields.iter() {
let field_var = var_store.fresh();
let field_type = Variable(field_var);
let field_expected = Expected::NoExpectation(field_type.clone());
let loc_expr = &*field.loc_expr;
let (field_out, field_con) = canonicalize_expr(
let field_con = constrain_expr(
rigids,
var_store,
var_usage,
@ -222,7 +205,6 @@ pub fn canonicalize_expr(
field_types.insert(label.clone(), field_type);
constraints.push(field_con);
output.references = output.references.union(field_out.references);
}
let record_type = constrain::lift(
@ -243,7 +225,7 @@ pub fn canonicalize_expr(
let constraint = exists(field_vars, And(constraints));
(output, constraint)
(constraint)
}
Tag { .. } => {
panic!("TODO implement tag");
@ -252,14 +234,12 @@ pub fn canonicalize_expr(
if loc_elems.is_empty() {
let list_var = *variable;
let inferred = constrain::lift(var_store, constrain::empty_list_type(list_var));
let constraint = Eq(inferred, expected, region);
(Output::default(), constraint)
Eq(inferred, expected, region)
} else {
// constrain `expected ~ List a` and that all elements `~ a`.
let list_var = *variable; // `v` in the type (List v)
let list_type = Type::Variable(list_var);
let mut constraints = Vec::with_capacity(1 + (loc_elems.len() * 2));
let mut references = References::new();
for (elem_var, loc_elem) in loc_elems.iter() {
let elem_type = Variable(*elem_var);
@ -269,7 +249,7 @@ pub fn canonicalize_expr(
Expected::ForReason(Reason::ElemInList, elem_type, region),
region,
);
let (elem_out, constraint) = canonicalize_expr(
let constraint = constrain_expr(
rigids,
var_store,
var_usage,
@ -280,27 +260,20 @@ pub fn canonicalize_expr(
constraints.push(list_elem_constraint);
constraints.push(constraint);
references = references.union(elem_out.references);
}
let inferred = constrain::lift(var_store, constrain::list_type(list_type));
constraints.push(Eq(inferred, expected, region));
let mut output = Output::default();
output.references = references;
// A list literal is never a tail call!
output.tail_call = None;
(output, And(constraints))
And(constraints)
}
}
Var {
symbol_for_lookup, ..
} => {
var_usage.register(symbol_for_lookup);
match var_usage.get_usage(symbol_for_lookup) {
let usage = var_usage.get_usage(symbol_for_lookup);
match usage {
Some(sharing::ReferenceCount::Shared) => {
// the variable is used/consumed more than once, so it must be Shared
let val_var = var_store.fresh();
@ -311,30 +284,24 @@ pub fn canonicalize_expr(
let attr_type = constrain::attr_type(uniq_type.clone(), val_type);
(
Output::default(),
And(vec![
Lookup(symbol_for_lookup.clone(), expected.clone(), region),
Eq(attr_type, expected, region),
Eq(
uniq_type,
Expected::NoExpectation(constrain::shared_type()),
region,
),
]),
)
And(vec![
Lookup(symbol_for_lookup.clone(), expected.clone(), region),
Eq(attr_type, expected, region),
Eq(
uniq_type,
Expected::NoExpectation(constrain::shared_type()),
region,
),
])
}
Some(sharing::ReferenceCount::Unique) => {
// no additional constraints, keep uniqueness unbound
(
Output::default(),
Lookup(symbol_for_lookup.clone(), expected.clone(), region),
)
Lookup(symbol_for_lookup.clone(), expected.clone(), region)
}
None => panic!("symbol not analyzed"),
}
}
Closure(_fn_var, _symbol, _recursion, args, boxed_body) => {
Closure(fn_var, _symbol, _recursion, args, boxed_body) => {
let (body, ret_var) = &**boxed_body;
// first, generate constraints for the arguments
@ -354,7 +321,7 @@ pub fn canonicalize_expr(
for (arg_var, pattern) in args {
let arg_typ = Variable(*arg_var);
canonicalize_pattern(
constrain_pattern(
var_store,
&mut state,
&pattern,
@ -371,7 +338,7 @@ pub fn canonicalize_expr(
Type::Function(arg_types, Box::new(ret_type.clone())),
);
let (output, ret_constraint) = canonicalize_expr(
let ret_constraint = constrain_expr(
rigids,
var_store,
var_usage,
@ -388,7 +355,7 @@ pub fn canonicalize_expr(
}
let defs_constraint = And(state.constraints);
let constraint = exists(
exists(
vars,
And(vec![
Let(Box::new(LetConstraint {
@ -399,11 +366,10 @@ pub fn canonicalize_expr(
ret_constraint,
})),
// "the closure's type is equal to expected type"
Eq(fn_typ, expected, region),
Eq(fn_typ, expected.clone(), region),
Eq(Type::Variable(*fn_var), expected, region),
]),
);
(output, constraint)
)
}
Call(boxed, loc_args, _) => {
@ -416,7 +382,7 @@ pub fn canonicalize_expr(
let mut vars = Vec::with_capacity(2 + loc_args.len());
// Canonicalize the function expression and its arguments
let (_, fn_con) = canonicalize_expr(
let fn_con = constrain_expr(
rigids,
var_store,
var_usage,
@ -442,7 +408,7 @@ pub fn canonicalize_expr(
};
let expected_arg = Expected::ForReason(reason, arg_type.clone(), region);
let (_, arg_con) = canonicalize_expr(
let arg_con = constrain_expr(
rigids,
var_store,
var_usage,
@ -462,23 +428,20 @@ pub fn canonicalize_expr(
region,
);
(
Output::default(),
exists(
vars,
And(vec![
fn_con,
Eq(fn_type, expected_fn_type, fn_region),
And(arg_cons),
Eq(ret_type, expected, region),
]),
),
exists(
vars,
And(vec![
fn_con,
Eq(fn_type, expected_fn_type, fn_region),
And(arg_cons),
Eq(ret_type, expected, region),
]),
)
}
LetRec(defs, loc_ret, _) => {
// NOTE doesn't currently unregister bound symbols
// may be a problem when symbols are not globally unique
let (_, body_con) = canonicalize_expr(
let body_con = constrain_expr(
rigids,
var_store,
var_usage,
@ -486,15 +449,12 @@ pub fn canonicalize_expr(
&loc_ret.value,
expected,
);
(
Output::default(),
constrain_recursive_defs(rigids, var_store, var_usage, defs, body_con),
)
constrain_recursive_defs(rigids, var_store, var_usage, defs, body_con)
}
LetNonRec(def, loc_ret, _) => {
// NOTE doesn't currently unregister bound symbols
// may be a problem when symbols are not globally unique
let (_, body_con) = canonicalize_expr(
let body_con = constrain_expr(
rigids,
var_store,
var_usage,
@ -503,10 +463,7 @@ pub fn canonicalize_expr(
expected,
);
(
Output::default(),
constrain_def(rigids, var_store, var_usage, def, body_con),
)
constrain_def(rigids, var_store, var_usage, def, body_con)
}
If { .. } => panic!("TODO constrain uniq if"),
When {
@ -517,7 +474,7 @@ pub fn canonicalize_expr(
} => {
let cond_var = *cond_var;
let cond_type = Variable(cond_var);
let (mut output, expr_con) = canonicalize_expr(
let expr_con = constrain_expr(
rigids,
var_store,
var_usage,
@ -534,7 +491,7 @@ pub fn canonicalize_expr(
Expected::FromAnnotation(name, arity, _, typ) => {
for (index, (loc_pattern, loc_expr)) in branches.iter().enumerate() {
let mut branch_var_usage = old_var_usage.clone();
let branch_con = canonicalize_when_branch(
let branch_con = constrain_when_branch(
var_store,
&mut branch_var_usage,
rigids,
@ -552,7 +509,6 @@ pub fn canonicalize_expr(
TypedWhenBranch(index),
typ.clone(),
),
&mut output,
);
// required for a case like
@ -584,7 +540,7 @@ pub fn canonicalize_expr(
for (index, (loc_pattern, loc_expr)) in branches.iter().enumerate() {
let mut branch_var_usage = old_var_usage.clone();
let branch_con = canonicalize_when_branch(
let branch_con = constrain_when_branch(
var_store,
&mut branch_var_usage,
rigids,
@ -601,7 +557,6 @@ pub fn canonicalize_expr(
branch_type.clone(),
region,
),
&mut output,
);
// required for a case like
@ -636,7 +591,7 @@ pub fn canonicalize_expr(
}
}
(output, And(constraints))
And(constraints)
}
Update {
@ -648,7 +603,7 @@ pub fn canonicalize_expr(
} => {
let mut fields: SendMap<Lowercase, Type> = SendMap::default();
let mut vars = Vec::with_capacity(updates.len() + 2);
let mut cons = Vec::with_capacity(updates.len() + 1);
let mut cons = Vec::with_capacity(updates.len() + 3);
for (field_name, Field { var, loc_expr, .. }) in updates.clone() {
let (var, tipe, con) = constrain_field_update(
rigids,
@ -695,7 +650,7 @@ pub fn canonicalize_expr(
cons.push(fields_con);
cons.push(record_con);
(Output::default(), exists(vars, And(cons)))
exists(vars, And(cons))
}
Access {
@ -715,7 +670,7 @@ pub fn canonicalize_expr(
constrain::lift(var_store, Type::Record(rec_field_types, Box::new(ext_type)));
let record_expected = Expected::NoExpectation(record_type);
let (output, mut constraint) = canonicalize_expr(
let mut constraint = constrain_expr(
rigids,
var_store,
var_usage,
@ -729,7 +684,7 @@ pub fn canonicalize_expr(
And(vec![constraint, Eq(field_type, expected, region)]),
);
(output, constraint)
constraint
}
Accessor {
@ -746,27 +701,23 @@ pub fn canonicalize_expr(
let record_type =
constrain::lift(var_store, Type::Record(field_types, Box::new(ext_type)));
(
Output::default(),
exists(
vec![*field_var, *ext_var],
Eq(
Type::Function(vec![record_type], Box::new(field_type)),
expected,
region,
),
exists(
vec![*field_var, *ext_var],
Eq(
Type::Function(vec![record_type], Box::new(field_type)),
expected,
region,
),
)
}
RuntimeError(_) => (Output::default(), True),
// _ => panic!("{:?}", expr),
RuntimeError(_) => True,
}
}
// TODO trim down these arguments
#[allow(clippy::too_many_arguments)]
#[inline(always)]
fn canonicalize_when_branch(
fn constrain_when_branch(
var_store: &VarStore,
var_usage: &mut VarUsage,
rigids: &Rigids,
@ -775,9 +726,8 @@ fn canonicalize_when_branch(
loc_expr: &Located<Expr>,
pattern_expected: PExpected<Type>,
expr_expected: Expected<Type>,
_output: &mut Output,
) -> Constraint {
let (_, ret_constraint) = canonicalize_expr(
let ret_constraint = constrain_expr(
rigids,
var_store,
var_usage,
@ -793,7 +743,7 @@ fn canonicalize_when_branch(
};
// mutates the state, so return value is not used
canonicalize_pattern(var_store, &mut state, &loc_pattern, pattern_expected);
constrain_pattern(var_store, &mut state, &loc_pattern, pattern_expected);
Constraint::Let(Box::new(LetConstraint {
rigid_vars: Vec::new(),
@ -819,7 +769,7 @@ fn constrain_def_pattern(
constraints: Vec::with_capacity(1),
};
canonicalize_pattern(var_store, &mut state, loc_pattern, pattern_expected);
constrain_pattern(var_store, &mut state, loc_pattern, pattern_expected);
state
}
@ -870,7 +820,7 @@ pub fn constrain_def(
Region::zero(),
));
canonicalize_expr(
constrain_expr(
&ftv,
var_store,
var_usage,
@ -878,19 +828,15 @@ pub fn constrain_def(
&def.loc_expr.value,
annotation_expected,
)
.1
}
None => {
canonicalize_expr(
rigids,
var_store,
var_usage,
def.loc_expr.region,
&def.loc_expr.value,
Expected::NoExpectation(expr_type),
)
.1
}
None => constrain_expr(
rigids,
var_store,
var_usage,
def.loc_expr.region,
&def.loc_expr.value,
Expected::NoExpectation(expr_type),
),
};
Let(Box::new(LetConstraint {
@ -948,7 +894,7 @@ pub fn rec_defs_help(
constraints: Vec::with_capacity(1),
};
canonicalize_pattern(
constrain_pattern(
var_store,
&mut pattern_state,
&def.loc_pattern,
@ -960,7 +906,7 @@ pub fn rec_defs_help(
let mut new_rigids = Vec::new();
match &def.annotation {
None => {
let (_, expr_con) = canonicalize_expr(
let expr_con = constrain_expr(
rigids,
var_store,
var_usage,
@ -1003,7 +949,7 @@ pub fn rec_defs_help(
AnnotationSource::TypedBody,
annotation.clone(),
);
let (_, expr_con) = canonicalize_expr(
let expr_con = constrain_expr(
&ftv,
var_store,
var_usage,
@ -1075,7 +1021,7 @@ fn constrain_field_update(
let field_type = Type::Variable(var);
let reason = Reason::RecordUpdateValue(field);
let expected = Expected::ForReason(reason, field_type.clone(), region);
let (_, con) = canonicalize_expr(
let con = constrain_expr(
rigids,
var_store,
var_usage,

View file

@ -89,7 +89,6 @@ pub fn can_expr(expr_str: &str) -> (Expr, Output, Vec<Problem>, VarStore, Variab
pub fn uniq_expr(
expr_str: &str,
) -> (
Output,
Output,
Vec<Problem>,
Subs,
@ -108,7 +107,6 @@ pub fn uniq_expr_with(
expr_str: &str,
declared_idents: &ImMap<Ident, (Symbol, Region)>,
) -> (
Output,
Output,
Vec<Problem>,
Subs,
@ -124,12 +122,13 @@ pub fn uniq_expr_with(
let next_var = var_store1.into();
let subs1 = Subs::new(next_var);
// double check
let var_store2 = VarStore::new(next_var);
let variable2 = var_store2.fresh();
let expected2 = Expected::NoExpectation(Type::Variable(variable2));
let (output2, constraint2) = roc::uniqueness::canonicalize_declaration(
let constraint2 = roc::uniqueness::constrain_declaration(
&var_store2,
Region::zero(),
loc_expr,
@ -140,7 +139,6 @@ pub fn uniq_expr_with(
let subs2 = Subs::new(var_store2.into());
(
output2,
output,
problems,
subs1,

View file

@ -1,3 +1,6 @@
#[macro_use]
extern crate maplit;
#[macro_use]
extern crate pretty_assertions;
@ -8,13 +11,24 @@ mod helpers;
#[cfg(test)]
mod test_boolean_algebra {
use roc::subs;
use roc::subs::VarStore;
use roc::uniqueness::boolean_algebra;
use roc::uniqueness::boolean_algebra::BooleanAlgebra::{self, *};
use roc::uniqueness::boolean_algebra::Bool::{self, *};
// HELPERS
fn simplify_eq(mut a: BooleanAlgebra, mut b: BooleanAlgebra) {
assert_eq!(a.simplify(), b.simplify());
fn to_var(v: usize) -> subs::Variable {
subs::Variable::unsafe_debug_variable(v)
}
fn simplify_eq(a: Bool, b: Bool) {
assert_eq!(boolean_algebra::simplify(a), boolean_algebra::simplify(b));
}
fn unify_eq(a: Bool, b: Bool, expected: std::collections::HashMap<roc::subs::Variable, Bool>) {
let result = boolean_algebra::try_unify(a, b);
assert_eq!(result, Some(expected.into()));
}
#[test]
@ -22,10 +36,7 @@ mod test_boolean_algebra {
let var_store = VarStore::default();
let var = var_store.fresh();
simplify_eq(
Disjunction(Box::new(Ground(true)), Box::new(Variable(var))),
Ground(true),
);
simplify_eq(Bool::or(One, Variable(var)), One);
}
#[test]
@ -33,10 +44,7 @@ mod test_boolean_algebra {
let var_store = VarStore::default();
let var = var_store.fresh();
simplify_eq(
Disjunction(Box::new(Ground(false)), Box::new(Variable(var))),
Variable(var),
);
simplify_eq(Bool::or(Zero, Variable(var)), Variable(var));
}
#[test]
@ -44,42 +52,127 @@ mod test_boolean_algebra {
let var_store = VarStore::default();
let var = var_store.fresh();
simplify_eq(
Conjunction(Box::new(Ground(false)), Box::new(Variable(var))),
Ground(false),
simplify_eq(Bool::and(Zero, Variable(var)), Zero);
}
#[test]
fn unify_example_const() {
unify_eq(
Variable(to_var(1)),
Variable(to_var(4)),
hashmap![to_var(1) => Variable(to_var(4))].into(),
);
unify_eq(
Variable(to_var(5)),
Variable(to_var(2)),
hashmap![to_var(2) => Variable(to_var(5))].into(),
);
unify_eq(
Variable(to_var(6)),
Variable(to_var(0)),
hashmap![to_var(0) => Variable(to_var(6))].into(),
);
}
#[test]
fn unify_single_var() {
let var_store = VarStore::default();
let var = var_store.fresh();
let result = boolean_algebra::unify(&Variable(var), &Ground(true));
if let Some(sub) = result {
assert_eq!(Some(&Ground(false)), sub.get(var));
} else {
panic!("result is None");
}
fn unify_example_apply() {
unify_eq(
Variable(to_var(1)),
Variable(to_var(6)),
hashmap![to_var(1) => Variable(to_var(6))].into(),
);
unify_eq(
Variable(to_var(5)),
Variable(to_var(6)),
hashmap![to_var(5) => Variable(to_var(6))].into(),
);
unify_eq(
Variable(to_var(3)),
Variable(to_var(7)),
hashmap![to_var(3) => Variable(to_var(7))].into(),
);
unify_eq(
Variable(to_var(8)),
Variable(to_var(4)),
hashmap![to_var(4) => Variable(to_var(8))].into(),
);
unify_eq(
Variable(to_var(9)),
Variable(to_var(2)),
hashmap![to_var(2) => Variable(to_var(9))].into(),
);
unify_eq(
Variable(to_var(10)),
Variable(to_var(0)),
hashmap![to_var(0) => Variable(to_var(10))].into(),
);
}
#[test]
fn unify_or() {
let var_store = VarStore::default();
let a = var_store.fresh();
let b = var_store.fresh();
let result = boolean_algebra::unify(
&Disjunction(Box::new(Variable(a)), Box::new(Variable(b))),
&Ground(true),
fn unify_example_fst() {
unify_eq(
Variable(to_var(1)),
Variable(to_var(5)),
hashmap![to_var(1) => Variable(to_var(5))].into(),
);
unify_eq(
Variable(to_var(3)),
Variable(to_var(2)),
hashmap![to_var(2) => Variable(to_var(3))].into(),
);
unify_eq(
Variable(to_var(5)),
Bool::or(Bool::or(Variable(to_var(3)), Zero), Zero),
hashmap![to_var(3) => Variable(to_var(5))].into(),
);
unify_eq(
Variable(to_var(6)),
Variable(to_var(0)),
hashmap![to_var(0) => Variable(to_var(6))].into(),
);
}
if let Some(sub) = result {
assert_eq!(Some(&Variable(b)), sub.get(a));
assert_eq!(Some(&Ground(false)), sub.get(b));
} else {
panic!("result is None");
}
#[test]
fn unify_example_idid() {
unify_eq(
Variable(to_var(3)),
Variable(to_var(4)),
hashmap![to_var(3) => Variable(to_var(4))].into(),
);
unify_eq(
Variable(to_var(5)),
Variable(to_var(2)),
hashmap![to_var(2) => Variable(to_var(5))].into(),
);
unify_eq(
Variable(to_var(5)),
Variable(to_var(1)),
hashmap![to_var(1) => Variable(to_var(5))].into(),
);
unify_eq(
Zero,
Variable(to_var(5)),
hashmap![to_var(5) => Zero].into(),
);
unify_eq(
Variable(to_var(7)),
Variable(to_var(4)),
hashmap![to_var(4) => Variable(to_var(7))].into(),
);
unify_eq(
Variable(to_var(8)),
Variable(to_var(9)),
hashmap![to_var(8) => Variable(to_var(9))].into(),
);
unify_eq(
Variable(to_var(7)),
Zero,
hashmap![to_var(7) => Zero].into(),
);
unify_eq(
Zero,
Variable(to_var(0)),
hashmap![to_var(0) => Zero].into(),
);
}
}

378
tests/test_crane.rs Normal file
View file

@ -0,0 +1,378 @@
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
extern crate indoc;
extern crate bumpalo;
extern crate inkwell;
extern crate roc;
mod helpers;
#[cfg(test)]
mod test_crane {
use crate::helpers::can_expr;
use bumpalo::Bump;
use cranelift::prelude::*;
use cranelift_codegen::isa;
use cranelift_codegen::settings::{self};
use cranelift_module::{default_libcall_names, Linkage, Module};
use cranelift_simplejit::{SimpleJITBackend, SimpleJITBuilder};
use roc::collections::{ImMap, MutMap};
use roc::crane::build::{build_expr, declare_proc, define_proc_body, Env, ScopeEntry};
use roc::crane::convert::content_to_crane_type;
use roc::infer::infer_expr;
use roc::mono::expr::Expr;
use roc::subs::Subs;
use std::mem;
use target_lexicon::HOST;
macro_rules! assert_evals_to {
($src:expr, $expected:expr, $ty:ty) => {
let arena = Bump::new();
let mut module: Module<SimpleJITBackend> =
Module::new(SimpleJITBuilder::new(default_libcall_names()));
let mut ctx = module.make_context();
let mut func_ctx = FunctionBuilderContext::new();
let (expr, _output, _problems, var_store, variable, constraint) = can_expr($src);
let mut subs = Subs::new(var_store.into());
let mut unify_problems = Vec::new();
let content = infer_expr(&mut subs, &mut unify_problems, &constraint, variable);
let shared_builder = settings::builder();
let shared_flags = settings::Flags::new(shared_builder);
let cfg = match isa::lookup(HOST) {
Err(err) => {
panic!(
"Unsupported target ISA for test runner {:?} - error: {:?}",
HOST, err
);
}
Ok(isa_builder) => {
let isa = isa_builder.finish(shared_flags);
isa.frontend_config()
}
};
let main_fn_name = "$Test.main";
// Compute main_fn_ret_type before moving subs to Env
let main_ret_type = content_to_crane_type(&content, &mut subs, cfg)
.expect("Unable to infer type for test expr");
// Compile and add all the Procs before adding main
let mut procs = MutMap::default();
let env = Env {
arena: &arena,
subs,
cfg,
};
// Populate Procs and Subs, and get the low-level Expr from the canonical Expr
let mono_expr = Expr::new(&arena, &env.subs, expr, &mut procs);
let mut scope = ImMap::default();
let mut declared = Vec::with_capacity(procs.len());
// Declare all the Procs, then insert them into scope so their bodies
// can look up their Funcs in scope later when calling each other by value.
for (name, opt_proc) in procs.iter() {
if let Some(proc) = opt_proc {
let (func_id, sig) = declare_proc(&env, &mut module, name.clone(), proc);
declared.push((proc.clone(), sig.clone(), func_id));
scope.insert(name.clone(), ScopeEntry::Func { func_id, sig });
}
}
// Now that scope includes all the Procs, we can build their bodies.
for (proc, sig, fn_id) in declared {
define_proc_body(
&env,
&mut ctx,
&mut module,
fn_id,
&scope,
sig,
proc,
&procs,
);
}
// Add main itself
let mut sig = module.make_signature();
sig.returns.push(AbiParam::new(main_ret_type));
let main_fn = module
.declare_function(main_fn_name, Linkage::Local, &sig)
.unwrap();
ctx.func.signature = sig;
ctx.func.name = ExternalName::user(0, main_fn.as_u32());
{
let mut builder: FunctionBuilder =
FunctionBuilder::new(&mut ctx.func, &mut func_ctx);
let ebb = builder.create_ebb();
builder.switch_to_block(ebb);
// TODO try deleting this line and seeing if everything still works.
builder.append_ebb_params_for_function_params(ebb);
let main_body =
build_expr(&env, &scope, &mut module, &mut builder, &mono_expr, &procs);
builder.ins().return_(&[main_body]);
builder.seal_all_blocks();
builder.finalize();
}
module.define_function(main_fn, &mut ctx).unwrap();
module.clear_context(&mut ctx);
// Perform linking
module.finalize_definitions();
let main_ptr = module.get_finalized_function(main_fn);
let run_main = unsafe { mem::transmute::<_, fn() -> $ty>(main_ptr) };
assert_eq!(run_main(), $expected);
};
}
#[test]
fn basic_int() {
assert_evals_to!("123", 123, i64);
}
#[test]
fn basic_float() {
assert_evals_to!("1234.0", 1234.0, f64);
}
// #[test]
// fn gen_when_take_first_branch() {
// assert_evals_to!(
// indoc!(
// r#"
// when 1 is
// 1 -> 12
// _ -> 34
// "#
// ),
// 12,
// i64
// );
// }
// #[test]
// fn gen_when_take_second_branch() {
// assert_evals_to!(
// indoc!(
// r#"
// when 2 is
// 1 -> 63
// _ -> 48
// "#
// ),
// 48,
// i64
// );
// }
// #[test]
// fn gen_when_one_branch() {
// assert_evals_to!(
// indoc!(
// r#"
// when 3.14 is
// _ -> 23
// "#
// ),
// 23,
// i64
// );
// }
#[test]
fn gen_basic_def() {
assert_evals_to!(
indoc!(
r#"
answer = 42
answer
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
pi = 3.14
pi
"#
),
3.14,
f64
);
}
#[test]
fn gen_multiple_defs() {
assert_evals_to!(
indoc!(
r#"
answer = 42
pi = 3.14
answer
"#
),
42,
i64
);
assert_evals_to!(
indoc!(
r#"
answer = 42
pi = 3.14
pi
"#
),
3.14,
f64
);
}
#[test]
fn gen_chained_defs() {
assert_evals_to!(
indoc!(
r#"
x = i1
i3 = i2
i1 = 1337
i2 = i1
y = 12.4
i3
"#
),
1337,
i64
);
}
#[test]
fn gen_nested_defs() {
assert_evals_to!(
indoc!(
r#"
x = 5
answer =
i3 = i2
nested =
a = 1.0
b = 5
i1
i1 = 1337
i2 = i1
nested
# None of this should affect anything, even though names
# overlap with the previous nested defs
unused =
nested = 17
i1 = 84.2
nested
y = 12.4
answer
"#
),
1337,
i64
);
}
#[test]
fn gen_basic_fn() {
assert_evals_to!(
indoc!(
r#"
always42 : Num.Num Int.Integer -> Num.Num Int.Integer
always42 = \num -> 42
always42 5
"#
),
42,
i64
);
}
// #[test]
// fn gen_when_fn() {
// assert_evals_to!(
// indoc!(
// r#"
// limitedNegate = \num ->
// when num is
// 1 -> -1
// _ -> num
// limitedNegate 1
// "#
// ),
// -1,
// i64
// );
// }
#[test]
fn apply_unnamed_fn() {
assert_evals_to!(
indoc!(
r#"
(\a -> a) 5
"#
),
5,
i64
);
}
#[test]
fn return_unnamed_fn() {
assert_evals_to!(
indoc!(
r#"
alwaysIdentity : Num.Num Int.Integer -> (Num.Num Float.FloatingPoint -> Num.Num Float.FloatingPoint)
alwaysIdentity = \num ->
(\a -> a)
(alwaysIdentity 2) 3.14
"#
),
3.14,
f64
);
}
}

View file

@ -240,7 +240,7 @@ mod test_format {
expr_formats_same(indoc!(
r#"
"""
"" \""" ""\"
"""
@ -293,6 +293,24 @@ mod test_format {
));
}
#[test]
fn destructure_tag_closure() {
expr_formats_same(indoc!(
r#"
\Foo a -> Foo a
"#
));
}
#[test]
fn destructure_nested_tag_closure() {
expr_formats_same(indoc!(
r#"
\Foo (Bar a) -> Foo (Bar a)
"#
));
}
// DEFS
#[test]
@ -523,6 +541,68 @@ mod test_format {
));
}
#[test]
fn record_updating() {
expr_formats_same(indoc!(
r#"
{ shoes & leftShoe: nothing }
"#
));
expr_formats_to(
indoc!(
r#"
{ shoes & rightShoe : nothing }
"#
),
indoc!(
r#"
{ shoes & rightShoe: nothing }
"#
),
);
expr_formats_to(
indoc!(
r#"
{ shoes & rightShoe : nothing }
"#
),
indoc!(
r#"
{ shoes & rightShoe: nothing }
"#
),
);
expr_formats_same(indoc!(
r#"
{ shoes &
rightShoe: newRightShoe,
leftShoe: newLeftShoe
}
"#
));
expr_formats_to(
indoc!(
r#"
{ shoes
& rightShoe: bareFoot
, leftShoe: bareFoot }
"#
),
indoc!(
r#"
{ shoes &
rightShoe: bareFoot,
leftShoe: bareFoot
}
"#
),
);
}
// #[test]
// fn record_field_destructuring() {
// expr_formats_same(indoc!(
@ -728,6 +808,36 @@ mod test_format {
));
}
// #[test]
// fn multi_line_list_def() {
// expr_formats_same(indoc!(
// r#"
// scores =
// [
// 5,
// 10
// ]
//
// scores
// "#
// ));
// }
//
// #[test]
// fn multi_line_record_def() {
// expr_formats_same(indoc!(
// r#"
// pos =
// {
// x: 5,
// x: 10
// }
//
// pos
// "#
// ));
// }
#[test]
fn two_fields_center_newline() {
expr_formats_to(

View file

@ -1006,7 +1006,7 @@ mod test_infer {
{ user & year: "foo" }
"#
),
"{ year : Str }{ name : Str }",
"{ name : Str, year : Str }",
);
}
@ -1068,19 +1068,16 @@ mod test_infer {
);
}
// currently doesn't work because of a parsing issue
// @Foo x y isn't turned into Apply(@Foo, [x,y]) currently
// #[test]
// fn private_tag_application() {
// infer_eq(
// indoc!(
// r#"@Foo "happy" 2020
// "#
// ),
// "[ Test.Foo Str Int ]*",
// );
// }
//
#[test]
fn private_tag_application() {
infer_eq(
indoc!(
r#"@Foo "happy" 2020
"#
),
"[ Test.@Foo Str Int ]*",
);
}
#[test]
fn if_then_else() {
@ -1092,4 +1089,22 @@ mod test_infer {
"Int",
);
}
#[test]
fn record_extraction() {
with_larger_debug_stack(|| {
infer_eq(
indoc!(
r#"
f = \x ->
when x is
{ a, b } -> a
f
"#
),
"{ a : a, b : * }* -> a",
);
});
}
}

View file

@ -10,19 +10,19 @@ extern crate roc;
mod helpers;
#[cfg(test)]
mod test_gen {
mod test_llvm {
use crate::helpers::can_expr;
use bumpalo::Bump;
use inkwell::context::Context;
use inkwell::execution_engine::JitFunction;
use inkwell::passes::PassManager;
use inkwell::types::BasicType;
use inkwell::OptimizationLevel;
use roc::collections::{ImMap, MutMap};
use roc::gen::build::{build_expr, build_proc};
use roc::gen::convert::content_to_basic_type;
use roc::gen::env::Env;
use roc::infer::infer_expr;
use roc::ll::expr::Expr;
use roc::llvm::build::{build_expr, build_proc, Env};
use roc::llvm::convert::content_to_basic_type;
use roc::mono::expr::Expr;
use roc::subs::Subs;
macro_rules! assert_evals_to {
@ -34,8 +34,25 @@ mod test_gen {
let content = infer_expr(&mut subs, &mut unify_problems, &constraint, variable);
let context = Context::create();
let builder = context.create_builder();
let module = context.create_module("app");
let builder = context.create_builder();
let fpm = PassManager::create(&module);
// Enable optimizations when running cargo test --release
if !cfg!(debug_assetions) {
fpm.add_instruction_combining_pass();
fpm.add_reassociate_pass();
fpm.add_basic_alias_analysis_pass();
fpm.add_promote_memory_to_register_pass();
fpm.add_cfg_simplification_pass();
fpm.add_gvn_pass();
// TODO figure out why enabling any of these (even alone) causes LLVM to segfault
// fpm.add_strip_dead_prototypes_pass();
// fpm.add_dead_arg_elimination_pass();
// fpm.add_function_inlining_pass();
}
fpm.initialize();
// Compute main_fn_type before moving subs to Env
let main_fn_type = content_to_basic_type(&content, &mut subs, &context)
@ -53,12 +70,23 @@ mod test_gen {
};
// Populate Procs and get the low-level Expr from the canonical Expr
let main_body = Expr::new(&arena, &env.subs, &env.module, &context, expr, &mut procs);
let main_body = Expr::new(&arena, &env.subs, expr, &mut procs);
// Add all the Procs to the module
for (name, (opt_proc, _fn_val)) in procs.clone() {
for (name, opt_proc) in procs.clone() {
if let Some(proc) = opt_proc {
build_proc(&env, &ImMap::default(), name, proc, &procs);
// NOTE: This is here to be uncommented in case verification fails.
// (This approach means we don't have to defensively clone name here.)
//
// println!("\n\nBuilding and then verifying function {}\n\n", name);
let fn_val = build_proc(&env, name, proc, &procs);
if fn_val.verify(true) {
fpm.run_on(&fn_val);
} else {
// NOTE: If this fails, uncomment the above println to debug
panic!("Non-main function failed LLVM verification.");
}
}
}
@ -84,15 +112,15 @@ mod test_gen {
panic!("Function {} failed LLVM verification.", main_fn_name);
}
// Uncomment this to see the module's LLVM instruction output:
// env.module.print_to_stderr();
let execution_engine = env
.module
.create_jit_execution_engine(OptimizationLevel::None)
.expect("errored");
.expect("Error creating JIT execution engine for test");
unsafe {
// Uncomment this to see the module's LLVM instruction output:
// env.module.print_to_stderr();
let main: JitFunction<unsafe extern "C" fn() -> $ty> = execution_engine
.get_function(main_fn_name)
.ok()
@ -299,7 +327,7 @@ mod test_gen {
limitedNegate = \num ->
when num is
1 -> -1
_ -> 0
_ -> num
limitedNegate 1
"#
@ -308,4 +336,34 @@ mod test_gen {
i64
);
}
#[test]
fn apply_unnamed_fn() {
assert_evals_to!(
indoc!(
r#"
(\a -> a) 5
"#
),
5,
i64
);
}
#[test]
fn return_unnamed_fn() {
assert_evals_to!(
indoc!(
r#"
alwaysIdentity : Num.Num Int.Integer -> (Num.Num Float.FloatingPoint -> Num.Num Float.FloatingPoint)
alwaysIdentity = \num ->
(\a -> a)
(alwaysIdentity 2) 3.14
"#
),
3.14,
f64
);
}
}

View file

@ -25,7 +25,7 @@ mod test_parse {
use roc::parse::ast::CommentOrNewline::*;
use roc::parse::ast::Expr::{self, *};
use roc::parse::ast::Pattern::{self, *};
use roc::parse::ast::{Attempting, Def, InterfaceHeader, Spaceable};
use roc::parse::ast::{Attempting, Def, InterfaceHeader, Spaceable, Tag, TypeAnnotation};
use roc::parse::module::{interface_header, module_defs};
use roc::parse::parser::{Fail, FailReason, Parser, State};
use roc::region::{Located, Region};
@ -641,7 +641,7 @@ mod test_parse {
assert_eq!(Ok(expected), actual);
}
// VARIANT
// TAG
#[test]
fn basic_global_tag() {
@ -661,6 +661,38 @@ mod test_parse {
assert_eq!(Ok(expected), actual);
}
#[test]
fn apply_private_tag() {
let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 6, 8, Int("12")));
let arg2 = arena.alloc(Located::new(0, 0, 9, 11, Int("34")));
let args = bumpalo::vec![in &arena; &*arg1, &*arg2];
let expected = Expr::Apply(
arena.alloc(Located::new(0, 0, 0, 5, Expr::PrivateTag("@Whee"))),
args,
CalledVia::Space,
);
let actual = parse_with(&arena, "@Whee 12 34");
assert_eq!(Ok(expected), actual);
}
#[test]
fn apply_global_tag() {
let arena = Bump::new();
let arg1 = arena.alloc(Located::new(0, 0, 5, 7, Int("12")));
let arg2 = arena.alloc(Located::new(0, 0, 8, 10, Int("34")));
let args = bumpalo::vec![in &arena; &*arg1, &*arg2];
let expected = Expr::Apply(
arena.alloc(Located::new(0, 0, 0, 4, Expr::GlobalTag("Whee"))),
args,
CalledVia::Space,
);
let actual = parse_with(&arena, "Whee 12 34");
assert_eq!(Ok(expected), actual);
}
#[test]
fn qualified_global_tag() {
let arena = Bump::new();
@ -1245,27 +1277,17 @@ mod test_parse {
let arena = Bump::new();
let newline = bumpalo::vec![in &arena; Newline];
let newlines = bumpalo::vec![in &arena; Newline, Newline];
let applied_ann = TypeAnnotation::Apply(&[], "Int", &[]);
let signature = Def::Annotation(
Located::new(0, 0, 0, 3, Identifier("foo")),
Located::new(
0,
0,
6,
9,
roc::parse::ast::TypeAnnotation::Apply(&[], "Int", &[]),
),
Located::new(0, 0, 6, 9, applied_ann),
);
let def = Def::Body(
arena.alloc(Located::new(1, 1, 0, 3, Identifier("foo"))),
arena.alloc(Located::new(1, 1, 6, 7, Int("4"))),
);
let loc_def = &*arena.alloc(Located::new(
1,
1,
0,
7,
Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice()),
));
let spaced_def = Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice());
let loc_def = &*arena.alloc(Located::new(1, 1, 0, 7, spaced_def));
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
@ -1288,7 +1310,7 @@ mod test_parse {
#[test]
fn type_signature_function_def() {
use roc::parse::ast::TypeAnnotation;
use TypeAnnotation;
let arena = Bump::new();
let newline = bumpalo::vec![in &arena; Newline];
let newlines = bumpalo::vec![in &arena; Newline, Newline];
@ -1302,16 +1324,10 @@ mod test_parse {
Located::new(0, 0, 11, 16, float_type)
];
let return_type = Located::new(0, 0, 20, 24, bool_type);
let fn_ann = TypeAnnotation::Function(&arguments, &return_type);
let signature = Def::Annotation(
Located::new(0, 0, 0, 3, Identifier("foo")),
Located::new(
0,
0,
20,
24,
TypeAnnotation::Function(&arguments, &return_type),
),
Located::new(0, 0, 20, 24, fn_ann),
);
let args = bumpalo::vec![in &arena;
@ -1326,13 +1342,8 @@ mod test_parse {
arena.alloc(Located::new(1, 1, 0, 3, Identifier("foo"))),
arena.alloc(Located::new(1, 1, 6, 17, closure)),
);
let loc_def = &*arena.alloc(Located::new(
1,
1,
0,
17,
Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice()),
));
let spaced = Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice());
let loc_def = &*arena.alloc(Located::new(1, 1, 0, 17, spaced));
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
@ -1353,6 +1364,220 @@ mod test_parse {
);
}
#[test]
fn ann_private_open_union() {
let arena = Bump::new();
let newline = bumpalo::vec![in &arena; Newline];
let newlines = bumpalo::vec![in &arena; Newline, Newline];
let tag1 = Tag::Private {
name: Located::new(0, 0, 8, 13, "True"),
args: &[],
};
let tag2arg = Located::new(0, 0, 24, 29, TypeAnnotation::Apply(&[], "Thing", &[]));
let tag2args = bumpalo::vec![in &arena; tag2arg];
let tag2 = Tag::Private {
name: Located::new(0, 0, 15, 23, "Perhaps"),
args: tag2args.into_bump_slice(),
};
let tags = bumpalo::vec![in &arena;
Located::new(0, 0, 8, 13, tag1),
Located::new(0, 0, 15, 29, tag2)
];
let loc_wildcard = Located::new(0, 0, 31, 32, TypeAnnotation::Wildcard);
let applied_ann = TypeAnnotation::TagUnion {
tags: tags.into_bump_slice(),
ext: Some(arena.alloc(loc_wildcard)),
};
let signature = Def::Annotation(
Located::new(0, 0, 0, 3, Identifier("foo")),
Located::new(0, 0, 6, 32, applied_ann),
);
let def = Def::Body(
arena.alloc(Located::new(1, 1, 0, 3, Identifier("foo"))),
arena.alloc(Located::new(1, 1, 6, 10, Expr::GlobalTag("True"))),
);
let spaced_def = Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice());
let loc_def = &*arena.alloc(Located::new(1, 1, 0, 10, spaced_def));
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret));
assert_parses_to(
indoc!(
r#"
foo : [ @True, @Perhaps Thing ]*
foo = True
42
"#
),
expected,
);
}
#[test]
fn ann_private_closed_union() {
let arena = Bump::new();
let newline = bumpalo::vec![in &arena; Newline];
let newlines = bumpalo::vec![in &arena; Newline, Newline];
let tag1 = Tag::Private {
name: Located::new(0, 0, 8, 13, "True"),
args: &[],
};
let tag2arg = Located::new(0, 0, 24, 29, TypeAnnotation::Apply(&[], "Thing", &[]));
let tag2args = bumpalo::vec![in &arena; tag2arg];
let tag2 = Tag::Private {
name: Located::new(0, 0, 15, 23, "Perhaps"),
args: tag2args.into_bump_slice(),
};
let tags = bumpalo::vec![in &arena;
Located::new(0, 0, 8, 13, tag1),
Located::new(0, 0, 15, 29, tag2)
];
let applied_ann = TypeAnnotation::TagUnion {
tags: tags.into_bump_slice(),
ext: None,
};
let signature = Def::Annotation(
Located::new(0, 0, 0, 3, Identifier("foo")),
Located::new(0, 0, 6, 31, applied_ann),
);
let def = Def::Body(
arena.alloc(Located::new(1, 1, 0, 3, Identifier("foo"))),
arena.alloc(Located::new(1, 1, 6, 10, Expr::GlobalTag("True"))),
);
let spaced_def = Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice());
let loc_def = &*arena.alloc(Located::new(1, 1, 0, 10, spaced_def));
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret));
assert_parses_to(
indoc!(
r#"
foo : [ @True, @Perhaps Thing ]
foo = True
42
"#
),
expected,
);
}
#[test]
fn ann_global_open_union() {
let arena = Bump::new();
let newline = bumpalo::vec![in &arena; Newline];
let newlines = bumpalo::vec![in &arena; Newline, Newline];
let tag1 = Tag::Global {
name: Located::new(0, 0, 8, 12, "True"),
args: &[],
};
let tag2arg = Located::new(0, 0, 22, 27, TypeAnnotation::Apply(&[], "Thing", &[]));
let tag2args = bumpalo::vec![in &arena; tag2arg];
let tag2 = Tag::Global {
name: Located::new(0, 0, 14, 21, "Perhaps"),
args: tag2args.into_bump_slice(),
};
let tags = bumpalo::vec![in &arena;
Located::new(0, 0, 8, 12, tag1),
Located::new(0, 0, 14, 27, tag2)
];
let loc_wildcard = Located::new(0, 0, 29, 30, TypeAnnotation::Wildcard);
let applied_ann = TypeAnnotation::TagUnion {
tags: tags.into_bump_slice(),
ext: Some(arena.alloc(loc_wildcard)),
};
let signature = Def::Annotation(
Located::new(0, 0, 0, 3, Identifier("foo")),
Located::new(0, 0, 6, 30, applied_ann),
);
let def = Def::Body(
arena.alloc(Located::new(1, 1, 0, 3, Identifier("foo"))),
arena.alloc(Located::new(1, 1, 6, 10, Expr::GlobalTag("True"))),
);
let spaced_def = Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice());
let loc_def = &*arena.alloc(Located::new(1, 1, 0, 10, spaced_def));
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret));
assert_parses_to(
indoc!(
r#"
foo : [ True, Perhaps Thing ]*
foo = True
42
"#
),
expected,
);
}
#[test]
fn ann_global_closed_union() {
let arena = Bump::new();
let newline = bumpalo::vec![in &arena; Newline];
let newlines = bumpalo::vec![in &arena; Newline, Newline];
let tag1 = Tag::Global {
name: Located::new(0, 0, 8, 12, "True"),
args: &[],
};
let tag2arg = Located::new(0, 0, 22, 27, TypeAnnotation::Apply(&[], "Thing", &[]));
let tag2args = bumpalo::vec![in &arena; tag2arg];
let tag2 = Tag::Global {
name: Located::new(0, 0, 14, 21, "Perhaps"),
args: tag2args.into_bump_slice(),
};
let tags = bumpalo::vec![in &arena;
Located::new(0, 0, 8, 12, tag1),
Located::new(0, 0, 14, 27, tag2)
];
let applied_ann = TypeAnnotation::TagUnion {
tags: tags.into_bump_slice(),
ext: None,
};
let signature = Def::Annotation(
Located::new(0, 0, 0, 3, Identifier("foo")),
Located::new(0, 0, 6, 29, applied_ann),
);
let def = Def::Body(
arena.alloc(Located::new(1, 1, 0, 3, Identifier("foo"))),
arena.alloc(Located::new(1, 1, 6, 10, Expr::GlobalTag("True"))),
);
let spaced_def = Def::SpaceBefore(arena.alloc(def), newline.into_bump_slice());
let loc_def = &*arena.alloc(Located::new(1, 1, 0, 10, spaced_def));
let loc_ann = &*arena.alloc(Located::new(0, 0, 0, 3, signature));
let defs = bumpalo::vec![in &arena; loc_ann, loc_def];
let ret = Expr::SpaceBefore(arena.alloc(Int("42")), newlines.into_bump_slice());
let loc_ret = Located::new(3, 3, 0, 2, ret);
let expected = Defs(defs, arena.alloc(loc_ret));
assert_parses_to(
indoc!(
r#"
foo : [ True, Perhaps Thing ]
foo = True
42
"#
),
expected,
);
}
// WHEN
#[test]

View file

@ -10,24 +10,15 @@ mod helpers;
#[cfg(test)]
mod test_infer_uniq {
use crate::helpers::uniq_expr;
use crate::helpers::{uniq_expr, with_larger_debug_stack};
use roc::infer::infer_expr;
use roc::pretty_print_types::{content_to_string, name_all_type_vars};
// HELPERS
fn infer_eq(src: &str, expected: &str) {
let (
_output2,
_output1,
_,
mut subs1,
variable1,
mut subs2,
variable2,
constraint1,
constraint2,
) = uniq_expr(src);
let (_output1, _, mut subs1, variable1, mut subs2, variable2, constraint1, constraint2) =
uniq_expr(src);
let mut unify_problems = Vec::new();
let content1 = infer_expr(&mut subs1, &mut unify_problems, &constraint1, variable1);
@ -509,42 +500,41 @@ mod test_infer_uniq {
);
}
#[test]
fn identity_infers_principal_type() {
infer_eq(
indoc!(
r#"
identity = \a -> a
x = identity 5
identity
"#
),
// TODO investigate why not shared
// perhaps because `x` is DCE'd?
"Attr.Attr * (a -> a)",
);
}
#[test]
fn identity_works_on_incompatible_types() {
infer_eq(
indoc!(
r#"
identity = \a -> a
x = identity 5
y = identity "hi"
x
"#
),
// TODO investigate why is this not shared?
// maybe because y is not used it is dropped?
"Attr.Attr * Int",
);
}
// #[test]
// fn identity_infers_principal_type() {
// infer_eq(
// indoc!(
// r#"
// identity = \a -> a
//
// x = identity 5
//
// identity
// "#
// ),
// // TODO this is wrong!
// "Attr.Attr Attr.Shared (Attr.Attr a Int -> Attr.Attr a Int)",
// );
// }
//
// #[test]
// fn identity_works_on_incompatible_types() {
// infer_eq(
// indoc!(
// r#"
// identity = \a -> a
//
// x = identity 5
// y = identity "hi"
//
// x
// "#
// ),
// // TODO investigate why is this not shared?
// // maybe because y is not used it is dropped?
// "Attr.Attr * Int",
// );
// }
#[test]
fn call_returns_list() {
@ -927,7 +917,25 @@ mod test_infer_uniq {
{ user & year: "foo" }
"#
),
"Attr.Attr * { year : (Attr.Attr * Str) }{ name : (Attr.Attr * Str) }",
"Attr.Attr * { name : (Attr.Attr * Str), year : (Attr.Attr * Str) }",
);
}
// #[test]
// fn record_extraction() {
// with_larger_debug_stack(|| {
// infer_eq(
// indoc!(
// r#"
// f = \x ->
// when x is
// { a, b } -> a
//
// f
// "#
// ),
// "Attr.Attr * (Attr.Attr u { a : Attr u a, b : * }* -> Attr u a)",
// );
// });
// }
}