Merge remote-tracking branch 'origin/main' into roc-dev-inline-expects

This commit is contained in:
Folkert 2022-10-09 18:46:57 +02:00
commit e62ab00c65
No known key found for this signature in database
GPG key ID: 1F17F6FFD112B97C
275 changed files with 4038 additions and 2432 deletions

View file

@ -374,7 +374,7 @@ pub fn build_zig_host_wasm32(
"c",
"-target",
zig_target,
// "-femit-llvm-ir=/home/folkertdev/roc/roc/examples/benchmarks/platform/host.ll",
// "-femit-llvm-ir=/home/folkertdev/roc/roc/crates/cli_testing_examples/benchmarks/platform/host.ll",
"-fPIC",
"--strip",
];
@ -635,6 +635,7 @@ pub fn rebuild_host(
} else if cargo_host_src.exists() {
// Compile and link Cargo.toml, if it exists
let cargo_dir = host_input_path.parent().unwrap();
let cargo_out_dir = cargo_dir.join("target").join(
if matches!(opt_level, OptLevel::Optimize | OptLevel::Size) {
"release"
@ -1215,7 +1216,7 @@ fn link_wasm32(
"-O",
"ReleaseSmall",
// useful for debugging
// "-femit-llvm-ir=/home/folkertdev/roc/roc/examples/benchmarks/platform/host.ll",
// "-femit-llvm-ir=/home/folkertdev/roc/roc/crates/cli_testing_examples/benchmarks/platform/host.ll",
])
.spawn()?;

View file

@ -15,7 +15,7 @@ lazy_static = "1.4.0"
[build-dependencies]
# dunce can be removed once ziglang/zig#5109 is fixed
dunce = "1.0.2"
dunce = "1.0.3"
[target.'cfg(target_os = "macos")'.build-dependencies]
tempfile = "3.2.0"

View file

@ -93,20 +93,6 @@ pub const RocList = extern struct {
return (ptr - 1)[0] == utils.REFCOUNT_ONE;
}
pub fn allocate(
alignment: u32,
length: usize,
element_size: usize,
) RocList {
const data_bytes = length * element_size;
return RocList{
.bytes = utils.allocateWithRefcount(data_bytes, alignment),
.length = length,
.capacity = length,
};
}
pub fn makeUniqueExtra(self: RocList, alignment: u32, element_width: usize, update_mode: UpdateMode) RocList {
if (update_mode == .InPlace) {
return self;
@ -140,11 +126,117 @@ pub const RocList = extern struct {
return new_list;
}
// We follow roughly the [fbvector](https://github.com/facebook/folly/blob/main/folly/docs/FBVector.md) when it comes to growing a RocList.
// Here is [their growth strategy](https://github.com/facebook/folly/blob/3e0525988fd444201b19b76b390a5927c15cb697/folly/FBVector.h#L1128) for push_back:
//
// (1) initial size
// Instead of growing to size 1 from empty, fbvector allocates at least
// 64 bytes. You may still use reserve to reserve a lesser amount of
// memory.
// (2) 1.5x
// For medium-sized vectors, the growth strategy is 1.5x. See the docs
// for details.
// This does not apply to very small or very large fbvectors. This is a
// heuristic.
//
// In our case, we exposed allocate and reallocate, which will use a smart growth stategy.
// We also expose allocateExact and reallocateExact for case where a specific number of elements is requested.
// calculateCapacity should only be called in cases the list will be growing.
// requested_length should always be greater than old_capacity.
inline fn calculateCapacity(
old_capacity: usize,
requested_length: usize,
element_width: usize,
) usize {
// TODO: there are two adjustments that would likely lead to better results for Roc.
// 1. Deal with the fact we allocate an extra u64 for refcount.
// This may lead to allocating page size + 8 bytes.
// That could mean allocating an entire page for 8 bytes of data which isn't great.
// 2. Deal with the fact that we can request more than 1 element at a time.
// fbvector assumes just appending 1 element at a time when using this algorithm.
// As such, they will generally grow in a way that should better match certain memory multiple.
// This is also the normal case for roc, but we could also grow by a much larger amount.
// We may want to round to multiples of 2 or something similar.
var new_capacity: usize = 0;
if (element_width == 0) {
return requested_length;
} else if (old_capacity == 0) {
new_capacity = 64 / element_width;
} else if (old_capacity < 4096 / element_width) {
new_capacity = old_capacity * 2;
} else if (old_capacity > 4096 * 32 / element_width) {
new_capacity = old_capacity * 2;
} else {
new_capacity = (old_capacity * 3 + 1) / 2;
}
return @maximum(new_capacity, requested_length);
}
pub fn allocate(
alignment: u32,
length: usize,
element_width: usize,
) RocList {
if (length == 0) {
return empty();
}
const capacity = calculateCapacity(0, length, element_width);
const data_bytes = capacity * element_width;
return RocList{
.bytes = utils.allocateWithRefcount(data_bytes, alignment),
.length = length,
.capacity = capacity,
};
}
pub fn allocateExact(
alignment: u32,
length: usize,
element_width: usize,
) RocList {
if (length == 0) {
return empty();
}
const data_bytes = length * element_width;
return RocList{
.bytes = utils.allocateWithRefcount(data_bytes, alignment),
.length = length,
.capacity = length,
};
}
pub fn reallocate(
self: RocList,
alignment: u32,
new_length: usize,
element_width: usize,
) RocList {
if (self.bytes) |source_ptr| {
if (self.isUnique()) {
if (self.capacity >= new_length) {
return RocList{ .bytes = self.bytes, .length = new_length, .capacity = self.capacity };
} else {
const new_capacity = calculateCapacity(self.capacity, new_length, element_width);
const new_source = utils.unsafeReallocate(source_ptr, alignment, self.len(), new_capacity, element_width);
return RocList{ .bytes = new_source, .length = new_length, .capacity = new_capacity };
}
}
// TODO: Investigate the performance of this.
// Maybe we should just always reallocate to the new_length instead of expanding capacity?
const new_capacity = if (self.capacity >= new_length) self.capacity else calculateCapacity(self.capacity, new_length, element_width);
return self.reallocateFresh(alignment, new_length, new_capacity, element_width);
}
return RocList.allocate(alignment, new_length, element_width);
}
pub fn reallocateExact(
self: RocList,
alignment: u32,
new_length: usize,
element_width: usize,
) RocList {
if (self.bytes) |source_ptr| {
if (self.isUnique()) {
@ -155,9 +247,9 @@ pub const RocList = extern struct {
return RocList{ .bytes = new_source, .length = new_length, .capacity = new_length };
}
}
return self.reallocateFresh(alignment, new_length, new_length, element_width);
}
return self.reallocateFresh(alignment, new_length, element_width);
return RocList.allocateExact(alignment, new_length, element_width);
}
/// reallocate by explicitly making a new allocation and copying elements over
@ -165,16 +257,16 @@ pub const RocList = extern struct {
self: RocList,
alignment: u32,
new_length: usize,
new_capacity: usize,
element_width: usize,
) RocList {
const old_length = self.length;
const delta_length = new_length - old_length;
const data_bytes = new_length * element_width;
const data_bytes = new_capacity * element_width;
const first_slot = utils.allocateWithRefcount(data_bytes, alignment);
// transfer the memory
if (self.bytes) |source_ptr| {
const dest_ptr = first_slot;
@ -185,7 +277,7 @@ pub const RocList = extern struct {
const result = RocList{
.bytes = first_slot,
.length = new_length,
.capacity = new_length,
.capacity = new_capacity,
};
utils.decref(self.bytes, old_length * element_width, alignment);
@ -412,7 +504,7 @@ pub fn listWithCapacity(
alignment: u32,
element_width: usize,
) callconv(.C) RocList {
var output = RocList.allocate(alignment, capacity, element_width);
var output = RocList.allocateExact(alignment, capacity, element_width);
output.length = 0;
return output;
}
@ -517,17 +609,25 @@ pub fn listSublist(
len: usize,
dec: Dec,
) callconv(.C) RocList {
if (len == 0) {
const size = list.len();
if (len == 0 or start >= size) {
if (list.isUnique()) {
// Decrement the reference counts of all elements.
if (list.bytes) |source_ptr| {
var i: usize = 0;
while (i < size) : (i += 1) {
const element = source_ptr + i * element_width;
dec(element);
}
var output = list;
output.length = 0;
return output;
}
}
return RocList.empty();
}
if (list.bytes) |source_ptr| {
const size = list.len();
if (start >= size) {
return RocList.empty();
}
const keep_len = std.math.min(len, size - start);
const drop_start_len = start;
const drop_end_len = size - (start + keep_len);
@ -546,10 +646,17 @@ pub fn listSublist(
dec(element);
}
if (start == 0 and list.isUnique()) {
if (list.isUnique()) {
var output = list;
output.length = keep_len;
return output;
if (start == 0) {
return output;
} else {
// We want memmove due to aliasing. Zig does not expose it directly.
// Instead use copy which can write to aliases as long as the dest is before the source.
mem.copy(u8, source_ptr[0 .. keep_len * element_width], source_ptr[start * element_width .. (start + keep_len) * element_width]);
return output;
}
} else {
const output = RocList.allocate(alignment, keep_len, element_width);
const target_ptr = output.bytes orelse unreachable;

View file

@ -144,6 +144,7 @@ comptime {
exportStrFn(str.strTrimLeft, "trim_left");
exportStrFn(str.strTrimRight, "trim_right");
exportStrFn(str.strCloneTo, "clone_to");
exportStrFn(str.withCapacity, "with_capacity");
inline for (INTEGERS) |T| {
str.exportFromInt(T, ROC_BUILTINS ++ "." ++ STR ++ ".from_int.");

View file

@ -2596,6 +2596,10 @@ pub fn reserve(string: RocStr, capacity: usize) callconv(.C) RocStr {
}
}
pub fn withCapacity(capacity: usize) callconv(.C) RocStr {
return RocStr.allocate(0, capacity);
}
pub fn getScalarUnsafe(string: RocStr, index: usize) callconv(.C) extern struct { bytesParsed: usize, scalar: u32 } {
const slice = string.asSlice();
const bytesParsed = @intCast(usize, std.unicode.utf8ByteSequenceLength(slice[index]) catch unreachable);

View file

@ -30,6 +30,23 @@ interface Decode
]
imports [
List,
Result.{ Result },
Num.{
U8,
U16,
U32,
U64,
U128,
I8,
I16,
I32,
I64,
I128,
F32,
F64,
Dec,
},
Bool.{ Bool },
]
DecodeError : [TooShort]

View file

@ -20,6 +20,7 @@ interface Dict
Bool.{ Bool },
Result.{ Result },
List,
Num.{ Nat },
]
## A [dictionary](https://en.wikipedia.org/wiki/Associative_array) that lets you can associate keys with values.

View file

@ -27,7 +27,24 @@ interface Encode
append,
toBytes,
]
imports []
imports [
Num.{
U8,
U16,
U32,
U64,
U128,
I8,
I16,
I32,
I64,
I128,
F32,
F64,
Dec,
},
Bool.{ Bool },
]
Encoder fmt := List U8, fmt -> List U8 | fmt has EncoderFormatting

View file

@ -20,6 +20,7 @@ interface Hash
] imports [
List,
Str,
Num.{ U8, U16, U32, U64, U128, I8, I16, I32, I64, I128 },
]
## A value that can hashed.

View file

@ -18,6 +18,23 @@ interface Json
DecoderFormatting,
DecodeResult,
},
Num.{
U8,
U16,
U32,
U64,
U128,
I8,
I16,
I32,
I64,
I128,
F32,
F64,
Dec,
},
Bool.{ Bool },
Result,
]
Json := {} has [
@ -187,9 +204,8 @@ takeWhile = \list, predicate ->
helper { taken: [], rest: list }
asciiByte = \b -> Num.toU8 b
digits = List.range (asciiByte '0') (asciiByte '9' + 1)
digits : List U8
digits = List.range '0' ('9' + 1)
takeDigits = \bytes ->
takeWhile bytes \n -> List.contains digits n
@ -198,10 +214,10 @@ takeFloat = \bytes ->
{ taken: intPart, rest } = takeDigits bytes
when List.get rest 0 is
Ok 46 -> # 46 = .
Ok '.' ->
{ taken: floatPart, rest: afterAll } = takeDigits (List.split rest 1).others
builtFloat =
List.concat (List.append intPart (asciiByte '.')) floatPart
List.concat (List.append intPart '.') floatPart
{ taken: builtFloat, rest: afterAll }
@ -305,14 +321,14 @@ decodeBool = Decode.custom \bytes, @Json {} ->
# Note: this could be more performant by traversing both branches char-by-char.
# Doing that would also make `rest` more correct in the erroring case.
if
maybeFalse == [asciiByte 'f', asciiByte 'a', asciiByte 'l', asciiByte 's', asciiByte 'e']
maybeFalse == ['f', 'a', 'l', 's', 'e']
then
{ result: Ok Bool.false, rest: afterFalse }
else
{ before: maybeTrue, others: afterTrue } = List.split bytes 4
if
maybeTrue == [asciiByte 't', asciiByte 'r', asciiByte 'u', asciiByte 'e']
maybeTrue == ['t', 'r', 'u', 'e']
then
{ result: Ok Bool.true, rest: afterTrue }
else
@ -323,10 +339,10 @@ jsonString = \bytes ->
{ before, others: afterStartingQuote } = List.split bytes 1
if
before == [asciiByte '"']
before == ['"']
then
# TODO: handle escape sequences
{ taken: strSequence, rest } = takeWhile afterStartingQuote \n -> n != asciiByte '"'
{ taken: strSequence, rest } = takeWhile afterStartingQuote \n -> n != '"'
when Str.fromUtf8 strSequence is
Ok s ->
@ -351,7 +367,7 @@ decodeList = \decodeElem -> Decode.custom \bytes, @Json {} ->
{ before: afterElem, others } = List.split rest 1
if
afterElem == [asciiByte ',']
afterElem == [',']
then
decodeElems others (List.append accum val)
else
@ -362,7 +378,7 @@ decodeList = \decodeElem -> Decode.custom \bytes, @Json {} ->
{ before, others: afterStartingBrace } = List.split bytes 1
if
before == [asciiByte '[']
before == ['[']
then
# TODO: empty lists
when decodeElems afterStartingBrace [] is
@ -371,7 +387,7 @@ decodeList = \decodeElem -> Decode.custom \bytes, @Json {} ->
{ before: maybeEndingBrace, others: afterEndingBrace } = List.split rest 1
if
maybeEndingBrace == [asciiByte ']']
maybeEndingBrace == [']']
then
{ result: Ok vals, rest: afterEndingBrace }
else
@ -393,10 +409,10 @@ parseExactChar = \bytes, char ->
Err _ -> { result: Err TooShort, rest: bytes }
openBrace : List U8 -> DecodeResult {}
openBrace = \bytes -> parseExactChar bytes (asciiByte '{')
openBrace = \bytes -> parseExactChar bytes '{'
closingBrace : List U8 -> DecodeResult {}
closingBrace = \bytes -> parseExactChar bytes (asciiByte '}')
closingBrace = \bytes -> parseExactChar bytes '}'
recordKey : List U8 -> DecodeResult Str
recordKey = \bytes -> jsonString bytes
@ -405,10 +421,10 @@ anything : List U8 -> DecodeResult {}
anything = \bytes -> { result: Err TooShort, rest: bytes }
colon : List U8 -> DecodeResult {}
colon = \bytes -> parseExactChar bytes (asciiByte ':')
colon = \bytes -> parseExactChar bytes ':'
comma : List U8 -> DecodeResult {}
comma = \bytes -> parseExactChar bytes (asciiByte ',')
comma = \bytes -> parseExactChar bytes ','
tryDecode : DecodeResult a, ({ val : a, rest : List U8 } -> DecodeResult b) -> DecodeResult b
tryDecode = \{ result, rest }, mapper ->

View file

@ -39,6 +39,7 @@ interface List
max,
map4,
mapTry,
walkTry,
dropFirst,
joinMap,
any,
@ -60,9 +61,12 @@ interface List
sortAsc,
sortDesc,
reserve,
walkBackwardsUntil,
]
imports [
Bool.{ Bool },
Result.{ Result },
Num.{ Nat, Num, Int },
]
## Types
@ -85,9 +89,8 @@ interface List
##
## ## Performance Details
##
## Under the hood, a list is a record containing a `len : Nat` field as well
## as a pointer to a reference count and a flat array of bytes. Unique lists
## store a capacity #Nat instead of a reference count.
## Under the hood, a list is a record containing a `len : Nat` field, a `capacity : Nat`
## field, and a pointer to a reference count and a flat array of bytes.
##
## ## Shared Lists
##
@ -109,9 +112,8 @@ interface List
## begins with a refcount of 1, because so far only `ratings` is referencing it.
##
## The second line alters this refcount. `{ foo: ratings` references
## the `ratings` list, which will result in its refcount getting incremented
## from 0 to 1. Similarly, `bar: ratings }` also references the `ratings` list,
## which will result in its refcount getting incremented from 1 to 2.
## the `ratings` list, and so does `bar: ratings }`. This will result in its
## refcount getting incremented from 1 to 3.
##
## Let's turn this example into a function.
##
@ -129,11 +131,11 @@ interface List
##
## Since `ratings` represented a way to reference the list, and that way is no
## longer accessible, the list's refcount gets decremented when `ratings` goes
## out of scope. It will decrease from 2 back down to 1.
## out of scope. It will decrease from 3 back down to 2.
##
## Putting these together, when we call `getRatings 5`, what we get back is
## a record with two fields, `foo`, and `bar`, each of which refers to the same
## list, and that list has a refcount of 1.
## list, and that list has a refcount of 2.
##
## Let's change the last line to be `(getRatings 5).bar` instead of `getRatings 5`:
##
@ -433,6 +435,13 @@ walkUntil = \list, initial, step ->
Continue new -> new
Break new -> new
## Same as [List.walkUntil], but does it from the end of the list instead.
walkBackwardsUntil : List elem, state, (state, elem -> [Continue state, Break state]) -> state
walkBackwardsUntil = \list, initial, func ->
when List.iterateBackwards list initial func is
Continue new -> new
Break new -> new
sum : List (Num a) -> Num a
sum = \list ->
List.walk list 0 Num.add
@ -957,9 +966,8 @@ mapTry = \list, toResult ->
Result.map (toResult elem) \ok ->
List.append state ok
## This is the same as `iterate` but with Result instead of [Continue, Break].
## This is the same as `iterate` but with [Result] instead of `[Continue, Break]`.
## Using `Result` saves a conditional in `mapTry`.
## It might be useful to expose this in userspace?
walkTry : List elem, state, (state, elem -> Result state err) -> Result state err
walkTry = \list, init, func ->
walkTryHelp list init func 0 (List.len list)

View file

@ -145,6 +145,7 @@ interface Num
]
imports [
Bool.{ Bool },
Result.{ Result },
]
## Represents a number that could be either an [Int] or a [Frac].
@ -574,7 +575,6 @@ isGte : Num a, Num a -> Bool
## Returns `Bool.true` if the number is `0`, and `Bool.false` otherwise.
isZero : Num a -> Bool
isZero = \x -> x == 0
## A number is even if dividing it by 2 gives a remainder of 0.
##

View file

@ -14,7 +14,7 @@ interface Set
intersection,
difference,
]
imports [List, Bool.{ Bool }, Dict.{ Dict }]
imports [List, Bool.{ Bool }, Dict.{ Dict }, Num.{ Nat }]
Set k := Dict.Dict k {}

View file

@ -43,8 +43,14 @@ interface Str
appendScalar,
walkScalars,
walkScalarsUntil,
withCapacity,
]
imports [
Bool.{ Bool },
Result.{ Result },
List,
Num.{ Nat, Num, U8, U16, U32, U64, U128, I8, I16, I32, I64, I128, F32, F64, Dec },
]
imports [Bool.{ Bool }, Result.{ Result }, List]
## # Types
##
@ -139,6 +145,9 @@ Utf8Problem : { byteIndex : Nat, problem : Utf8ByteProblem }
isEmpty : Str -> Bool
concat : Str, Str -> Str
## Returns a string of the specified capacity without any content
withCapacity : Nat -> Str
## Combine a list of strings into a single string, with a separator
## string in between each.
##

View file

@ -361,6 +361,7 @@ pub const STR_RESERVE: &str = "roc_builtins.str.reserve";
pub const STR_APPEND_SCALAR: &str = "roc_builtins.str.append_scalar";
pub const STR_GET_SCALAR_UNSAFE: &str = "roc_builtins.str.get_scalar_unsafe";
pub const STR_CLONE_TO: &str = "roc_builtins.str.clone_to";
pub const STR_WITH_CAPACITY: &str = "roc_builtins.str.with_capacity";
pub const LIST_MAP: &str = "roc_builtins.list.map";
pub const LIST_MAP2: &str = "roc_builtins.list.map2";

View file

@ -55,6 +55,8 @@ macro_rules! map_symbol_to_lowlevel_and_arity {
Symbol::NUM_TO_F32_CHECKED => Some(to_num_checked(Symbol::NUM_TO_F32_CHECKED, var_store, LowLevel::NumToFloatChecked)),
Symbol::NUM_TO_F64_CHECKED => Some(to_num_checked(Symbol::NUM_TO_F64_CHECKED, var_store, LowLevel::NumToFloatChecked)),
Symbol::NUM_IS_ZERO => Some(to_num_is_zero(Symbol::NUM_IS_ZERO, var_store)),
_ => None,
}
}
@ -121,6 +123,7 @@ map_symbol_to_lowlevel_and_arity! {
StrGetScalarUnsafe; STR_GET_SCALAR_UNSAFE; 2,
StrToNum; STR_TO_NUM; 1,
StrGetCapacity; STR_CAPACITY; 1,
StrWithCapacity; STR_WITH_CAPACITY; 1,
ListLen; LIST_LEN; 1,
ListWithCapacity; LIST_WITH_CAPACITY; 1,
@ -535,3 +538,33 @@ fn to_num_checked(symbol: Symbol, var_store: &mut VarStore, lowlevel: LowLevel)
ret_var,
)
}
fn to_num_is_zero(symbol: Symbol, var_store: &mut VarStore) -> Def {
let bool_var = var_store.fresh();
let num_var = var_store.fresh();
let body = Expr::RunLowLevel {
op: LowLevel::Eq,
args: vec![
(num_var, Var(Symbol::ARG_1)),
(
num_var,
Num(
var_store.fresh(),
"0".to_string().into_boxed_str(),
crate::expr::IntValue::I128(0i128.to_ne_bytes()),
roc_types::num::NumBound::None,
),
),
],
ret_var: bool_var,
};
defn(
symbol,
vec![(num_var, Symbol::ARG_1)],
var_store,
body,
bool_var,
)
}

View file

@ -60,8 +60,6 @@ trait CopyEnv {
fn clone_name(&mut self, name: SubsIndex<Lowercase>) -> SubsIndex<Lowercase>;
fn clone_tag_name(&mut self, tag_name: SubsIndex<TagName>) -> SubsIndex<TagName>;
fn clone_field_names(&mut self, field_names: SubsSlice<Lowercase>) -> SubsSlice<Lowercase>;
fn clone_tag_names(&mut self, tag_names: SubsSlice<TagName>) -> SubsSlice<TagName>;
@ -95,11 +93,6 @@ impl CopyEnv for Subs {
name
}
#[inline(always)]
fn clone_tag_name(&mut self, tag_name: SubsIndex<TagName>) -> SubsIndex<TagName> {
tag_name
}
#[inline(always)]
fn clone_field_names(&mut self, field_names: SubsSlice<Lowercase>) -> SubsSlice<Lowercase> {
field_names
@ -150,11 +143,6 @@ impl<'a> CopyEnv for AcrossSubs<'a> {
SubsIndex::push_new(&mut self.target.field_names, self.source[name].clone())
}
#[inline(always)]
fn clone_tag_name(&mut self, tag_name: SubsIndex<TagName>) -> SubsIndex<TagName> {
SubsIndex::push_new(&mut self.target.tag_names, self.source[tag_name].clone())
}
#[inline(always)]
fn clone_field_names(&mut self, field_names: SubsSlice<Lowercase>) -> SubsSlice<Lowercase> {
SubsSlice::extend_new(
@ -259,7 +247,7 @@ fn deep_copy_expr_help<C: CopyEnv>(env: &mut C, copied: &mut Vec<Variable>, expr
Int(v1, v2, str, val, bound) => Int(sub!(*v1), sub!(*v2), str.clone(), *val, *bound),
Float(v1, v2, str, val, bound) => Float(sub!(*v1), sub!(*v2), str.clone(), *val, *bound),
Str(str) => Str(str.clone()),
SingleQuote(char) => SingleQuote(*char),
SingleQuote(v1, v2, char, bound) => SingleQuote(sub!(*v1), sub!(*v2), *char, *bound),
List {
elem_var,
loc_elems,
@ -725,7 +713,7 @@ fn deep_copy_pattern_help<C: CopyEnv>(
FloatLiteral(sub!(*v1), sub!(*v2), s.clone(), *n, *bound)
}
StrLiteral(s) => StrLiteral(s.clone()),
SingleQuote(c) => SingleQuote(*c),
SingleQuote(v1, v2, c, bound) => SingleQuote(sub!(*v1), sub!(*v2), *c, *bound),
Underscore => Underscore,
AbilityMemberSpecialization { ident, specializes } => AbilityMemberSpecialization {
ident: *ident,
@ -935,12 +923,13 @@ fn deep_copy_type_vars<C: CopyEnv>(
Structure(RecursiveTagUnion(new_rec_var, new_union_tags, new_ext_var))
})
}
FunctionOrTagUnion(tag_name, symbol, ext_var) => {
FunctionOrTagUnion(tag_names, symbols, ext_var) => {
let new_ext_var = descend_var!(ext_var);
let new_tag_name = env.clone_tag_name(tag_name);
let new_tag_names = env.clone_tag_names(tag_names);
let new_symbols = env.clone_lambda_names(symbols);
perform_clone!(Structure(FunctionOrTagUnion(
new_tag_name,
symbol,
new_tag_names,
new_symbols,
new_ext_var
)))
}

View file

@ -1884,7 +1884,7 @@ fn pattern_to_vars_by_symbol(
| IntLiteral(..)
| FloatLiteral(..)
| StrLiteral(_)
| SingleQuote(_)
| SingleQuote(..)
| Underscore
| MalformedPattern(_, _)
| UnsupportedPattern(_)

View file

@ -253,7 +253,7 @@ fn sketch_pattern(pattern: &crate::pattern::Pattern) -> SketchedPattern {
}
&FloatLiteral(_, _, _, f, _) => SP::Literal(Literal::Float(f64::to_bits(f))),
StrLiteral(v) => SP::Literal(Literal::Str(v.clone())),
&SingleQuote(c) => SP::Literal(Literal::Byte(c as u8)),
&SingleQuote(_, _, c, _) => SP::Literal(Literal::Byte(c as u8)),
RecordDestructure { destructs, .. } => {
let tag_id = TagId(0);
let mut patterns = std::vec::Vec::with_capacity(destructs.len());

View file

@ -22,6 +22,7 @@ use roc_parse::ast::{self, Defs, EscapedChar, StrLiteral};
use roc_parse::pattern::PatternType::*;
use roc_problem::can::{PrecedenceProblem, Problem, RuntimeError};
use roc_region::all::{Loc, Region};
use roc_types::num::SingleQuoteBound;
use roc_types::subs::{ExhaustiveMark, IllegalCycleMark, RedundantMark, VarStore, Variable};
use roc_types::types::{Alias, Category, LambdaSet, OptAbleVar, Type};
use std::fmt::{Debug, Display};
@ -91,7 +92,8 @@ pub enum Expr {
Int(Variable, Variable, Box<str>, IntValue, IntBound),
Float(Variable, Variable, Box<str>, f64, FloatBound),
Str(Box<str>),
SingleQuote(char),
// Number variable, precision variable, value, bound
SingleQuote(Variable, Variable, char, SingleQuoteBound),
List {
elem_var: Variable,
loc_elems: Vec<Loc<Expr>>,
@ -637,7 +639,15 @@ pub fn canonicalize_expr<'a>(
let mut it = string.chars().peekable();
if let Some(char) = it.next() {
if it.peek().is_none() {
(Expr::SingleQuote(char), Output::default())
(
Expr::SingleQuote(
var_store.fresh(),
var_store.fresh(),
char,
SingleQuoteBound::from_char(char),
),
Output::default(),
)
} else {
// multiple chars is found
let error = roc_problem::can::RuntimeError::MultipleCharsInSingleQuote(region);
@ -1642,7 +1652,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
| other @ Int(..)
| other @ Float(..)
| other @ Str { .. }
| other @ SingleQuote(_)
| other @ SingleQuote(..)
| other @ RuntimeError(_)
| other @ EmptyRecord
| other @ Accessor { .. }
@ -2703,7 +2713,7 @@ fn get_lookup_symbols(expr: &Expr, var_store: &mut VarStore) -> Vec<(Symbol, Var
| Expr::Str(_)
| Expr::ZeroArgumentTag { .. }
| Expr::Accessor(_)
| Expr::SingleQuote(_)
| Expr::SingleQuote(..)
| Expr::EmptyRecord
| Expr::TypedHole(_)
| Expr::RuntimeError(_)

View file

@ -899,7 +899,7 @@ fn fix_values_captured_in_closure_pattern(
| IntLiteral(..)
| FloatLiteral(..)
| StrLiteral(_)
| SingleQuote(_)
| SingleQuote(..)
| Underscore
| Shadowed(..)
| MalformedPattern(_, _)
@ -1038,7 +1038,7 @@ fn fix_values_captured_in_closure_expr(
| Int(..)
| Float(..)
| Str(_)
| SingleQuote(_)
| SingleQuote(..)
| Var(_)
| AbilityMember(..)
| EmptyRecord

View file

@ -12,6 +12,7 @@ use roc_parse::ast::{self, StrLiteral, StrSegment};
use roc_parse::pattern::PatternType;
use roc_problem::can::{MalformedPatternProblem, Problem, RuntimeError, ShadowKind};
use roc_region::all::{Loc, Region};
use roc_types::num::SingleQuoteBound;
use roc_types::subs::{VarStore, Variable};
use roc_types::types::{LambdaSet, OptAbleVar, PatternCategory, Type};
@ -59,7 +60,7 @@ pub enum Pattern {
IntLiteral(Variable, Variable, Box<str>, IntValue, IntBound),
FloatLiteral(Variable, Variable, Box<str>, f64, FloatBound),
StrLiteral(Box<str>),
SingleQuote(char),
SingleQuote(Variable, Variable, char, SingleQuoteBound),
Underscore,
/// An identifier that marks a specialization of an ability member.
@ -95,7 +96,7 @@ impl Pattern {
IntLiteral(var, ..) => Some(*var),
FloatLiteral(var, ..) => Some(*var),
StrLiteral(_) => None,
SingleQuote(_) => None,
SingleQuote(..) => None,
Underscore => None,
AbilityMemberSpecialization { .. } => None,
@ -148,7 +149,7 @@ impl Pattern {
IntLiteral(..) => C::Int,
FloatLiteral(..) => C::Float,
StrLiteral(_) => C::Str,
SingleQuote(_) => C::Character,
SingleQuote(..) => C::Character,
Underscore => C::PatternDefault,
AbilityMemberSpecialization { .. } => C::PatternDefault,
@ -456,7 +457,12 @@ pub fn canonicalize_pattern<'a>(
let mut it = string.chars().peekable();
if let Some(char) = it.next() {
if it.peek().is_none() {
Pattern::SingleQuote(char)
Pattern::SingleQuote(
var_store.fresh(),
var_store.fresh(),
char,
SingleQuoteBound::from_char(char),
)
} else {
// multiple chars is found
let problem = MalformedPatternProblem::MultipleCharsInSingleQuote;
@ -724,7 +730,7 @@ impl<'a> BindingsFromPattern<'a> {
| IntLiteral(..)
| FloatLiteral(..)
| StrLiteral(_)
| SingleQuote(_)
| SingleQuote(..)
| Underscore
| Shadowed(_, _, _)
| MalformedPattern(_, _)

View file

@ -4,7 +4,7 @@ use roc_can::expected::Expected::{self, *};
use roc_can::num::{FloatBound, FloatWidth, IntBound, IntLitWidth, NumBound, SignDemand};
use roc_module::symbol::Symbol;
use roc_region::all::Region;
use roc_types::num::NumericRange;
use roc_types::num::{NumericRange, SingleQuoteBound};
use roc_types::subs::Variable;
use roc_types::types::Type::{self, *};
use roc_types::types::{AliasKind, Category};
@ -99,6 +99,42 @@ pub fn int_literal(
constraints.exists([num_var], and_constraint)
}
pub fn single_quote_literal(
constraints: &mut Constraints,
num_var: Variable,
precision_var: Variable,
expected: Expected<Type>,
region: Region,
bound: SingleQuoteBound,
) -> Constraint {
let reason = Reason::IntLiteral;
// Always add the bound first; this improves the resolved type quality in case it's an alias like "U8".
let mut constrs = ArrayVec::<_, 3>::new();
let num_type = add_numeric_bound_constr(
constraints,
&mut constrs,
num_var,
precision_var,
bound,
region,
Category::Character,
);
constrs.extend([
constraints.equal_types(
num_type.clone(),
ForReason(reason, num_int(Type::Variable(precision_var)), region),
Category::Character,
region,
),
constraints.equal_types(num_type, expected, Category::Character, region),
]);
let and_constraint = constraints.and_constraint(constrs);
constraints.exists([num_var], and_constraint)
}
#[inline(always)]
pub fn float_literal(
constraints: &mut Constraints,
@ -332,6 +368,16 @@ impl TypedNumericBound for NumBound {
}
}
impl TypedNumericBound for SingleQuoteBound {
fn numeric_bound(&self) -> NumericBound {
match self {
&SingleQuoteBound::AtLeast { width } => {
NumericBound::Range(NumericRange::IntAtLeastEitherSign(width))
}
}
}
}
/// A bound placed on a number because of its literal value.
/// e.g. `-5` cannot be unsigned, and 300 does not fit in a U8
#[derive(Debug, Clone, Copy, PartialEq, Eq)]

View file

@ -1,7 +1,8 @@
use std::ops::Range;
use crate::builtins::{
empty_list_type, float_literal, int_literal, list_type, num_literal, num_u32, str_type,
empty_list_type, float_literal, int_literal, list_type, num_literal, single_quote_literal,
str_type,
};
use crate::pattern::{constrain_pattern, PatternState};
use roc_can::annotation::IntroducedVariables;
@ -292,7 +293,14 @@ pub fn constrain_expr(
constraints.exists(vars, and_constraint)
}
Str(_) => constraints.equal_types(str_type(), expected, Category::Str, region),
SingleQuote(_) => constraints.equal_types(num_u32(), expected, Category::Character, region),
SingleQuote(num_var, precision_var, _, bound) => single_quote_literal(
constraints,
*num_var,
*precision_var,
expected,
region,
*bound,
),
List {
elem_var,
loc_elems,

View file

@ -71,7 +71,7 @@ fn headers_from_annotation_help(
| NumLiteral(..)
| IntLiteral(..)
| FloatLiteral(..)
| SingleQuote(_)
| SingleQuote(..)
| StrLiteral(_) => true,
RecordDestructure { destructs, .. } => match annotation.value.shallow_dealias() {
@ -320,9 +320,32 @@ pub fn constrain_pattern(
));
}
SingleQuote(_) => {
&SingleQuote(num_var, precision_var, _, bound) => {
// First constraint on the free num var; this improves the resolved type quality in
// case the bound is an alias.
let num_type = builtins::add_numeric_bound_constr(
constraints,
&mut state.constraints,
num_var,
num_var,
bound,
region,
Category::Int,
);
// Link the free num var with the int var and our expectation.
let int_type = builtins::num_int(Type::Variable(precision_var));
state.constraints.push(constraints.equal_types(
num_type.clone(), // TODO check me if something breaks!
Expected::NoExpectation(int_type),
Category::Int,
region,
));
// Also constrain the pattern against the num var, again to reuse aliases if they're present.
state.constraints.push(constraints.equal_pattern_types(
builtins::num_u32(),
num_type,
expected,
PatternCategory::Character,
region,

View file

@ -1,14 +1,191 @@
//! Derivers for the `Hash` ability.
use std::iter::once;
use roc_can::{
expr::{AnnotatedMark, ClosureData, Expr, Recursive},
pattern::Pattern,
};
use roc_derive_key::hash::FlatHashKey;
use roc_module::symbol::Symbol;
use roc_module::{called_via::CalledVia, ident::Lowercase, symbol::Symbol};
use roc_region::all::Loc;
use roc_types::{
subs::{
Content, FlatType, LambdaSet, OptVariable, RecordFields, SubsSlice, UnionLambdas, Variable,
VariableSubsSlice,
},
types::RecordField,
};
use crate::{util::Env, DerivedBody};
use crate::{synth_var, util::Env, DerivedBody};
pub(crate) fn derive_hash(
_env: &mut Env<'_>,
key: FlatHashKey,
_def_symbol: Symbol,
) -> DerivedBody {
match key {}
pub(crate) fn derive_hash(env: &mut Env<'_>, key: FlatHashKey, def_symbol: Symbol) -> DerivedBody {
let (body, body_type) = match key {
FlatHashKey::Record(fields) => hash_record(env, def_symbol, fields),
};
let specialization_lambda_sets =
env.get_specialization_lambda_sets(body_type, Symbol::HASH_HASH);
DerivedBody {
body,
body_type,
specialization_lambda_sets,
}
}
fn hash_record(env: &mut Env<'_>, fn_name: Symbol, fields: Vec<Lowercase>) -> (Expr, Variable) {
// Suppose rcd = { f1, ..., fn }.
// Build a generalized type t_rcd = { f1: t1, ..., fn: tn }, with fresh t1, ..., tn,
// so that we can re-use the derived impl for many records of the same fields.
let (record_var, record_fields) = {
let flex_fields = fields
.into_iter()
.map(|name| {
(
name,
RecordField::Required(env.subs.fresh_unnamed_flex_var()),
)
})
.collect::<Vec<(Lowercase, _)>>();
let fields = RecordFields::insert_into_subs(env.subs, flex_fields);
let record_var = synth_var(
env.subs,
Content::Structure(FlatType::Record(fields, Variable::EMPTY_RECORD)),
);
(record_var, fields)
};
// Now, a hasher for this record is
//
// hash_rcd : hasher, { f1: t1, ..., fn: tn } -> hasher | hasher has Hasher
// hash_rcd = \hasher, rcd ->
// Hash.hash (
// Hash.hash
// ...
// (Hash.hash hasher rcd.f1)
// ...
// rcd.f_n1)
// rcd.fn
//
// So, just a build a fold travelling up the fields.
let rcd_sym = env.new_symbol("rcd");
let hasher_sym = env.new_symbol("hasher");
let hasher_var = synth_var(env.subs, Content::FlexAbleVar(None, Symbol::HASH_HASHER));
let (body, body_var) = record_fields.iter_all().fold(
(Expr::Var(hasher_sym), hasher_var),
|(body, body_var), (field_name, field_var, _)| {
let field_name = env.subs[field_name].clone();
let field_var = env.subs[field_var];
let field_access = Expr::Access {
record_var,
field_var,
ext_var: env.subs.fresh_unnamed_flex_var(),
loc_expr: Box::new(Loc::at_zero(Expr::Var(rcd_sym))),
field: field_name,
};
let (hash_fn_data, returned_hasher_var) = {
// build `Hash.hash ...` function type
//
// hasher, val -[uls]-> hasher | hasher has Hasher, val has Hash
let exposed_hash_fn_var = env.import_builtin_symbol_var(Symbol::HASH_HASH);
// (typeof body), (typeof field) -[clos]-> hasher_result
let this_arguments_slice =
VariableSubsSlice::insert_into_subs(env.subs, [body_var, field_var]);
let this_hash_clos_var = env.subs.fresh_unnamed_flex_var();
let this_hasher_result_var = env.subs.fresh_unnamed_flex_var();
let this_hash_fn_var = synth_var(
env.subs,
Content::Structure(FlatType::Func(
this_arguments_slice,
this_hash_clos_var,
this_hasher_result_var,
)),
);
// hasher, val -[uls]-> hasher | hasher has Hasher, val has Hash
// ~ (typeof body), (typeof field) -[clos]-> hasher_result
env.unify(exposed_hash_fn_var, this_hash_fn_var);
// Hash.hash : hasher, (typeof field) -[clos]-> hasher | hasher has Hasher, (typeof field) has Hash
let hash_fn_head = Expr::AbilityMember(Symbol::HASH_HASH, None, this_hash_fn_var);
let hash_fn_data = Box::new((
this_hash_fn_var,
Loc::at_zero(hash_fn_head),
this_hash_clos_var,
this_hasher_result_var,
));
(hash_fn_data, this_hasher_result_var)
};
let hash_arguments = vec![
(body_var, Loc::at_zero(body)),
(field_var, Loc::at_zero(field_access)),
];
let call_hash = Expr::Call(hash_fn_data, hash_arguments, CalledVia::Space);
(call_hash, returned_hasher_var)
},
);
// Finally, build the closure
// \hasher, rcd -> body
let (fn_var, fn_clos_var) = {
// Create fn_var for ambient capture; we fix it up below.
let fn_var = synth_var(env.subs, Content::Error);
// -[fn_name]->
let fn_captures = vec![];
let fn_name_labels = UnionLambdas::insert_into_subs(env.subs, once((fn_name, fn_captures)));
let fn_clos_var = synth_var(
env.subs,
Content::LambdaSet(LambdaSet {
solved: fn_name_labels,
recursion_var: OptVariable::NONE,
unspecialized: SubsSlice::default(),
ambient_function: fn_var,
}),
);
// hasher, rcd_var -[fn_name]-> (hasher = body_var)
let args_slice = SubsSlice::insert_into_subs(env.subs, [hasher_var, record_var]);
env.subs.set_content(
fn_var,
Content::Structure(FlatType::Func(args_slice, fn_clos_var, body_var)),
);
(fn_var, fn_clos_var)
};
let clos_expr = Expr::Closure(ClosureData {
function_type: fn_var,
closure_type: fn_clos_var,
return_type: body_var,
name: fn_name,
captured_symbols: vec![],
recursive: Recursive::NotRecursive,
arguments: vec![
(
hasher_var,
AnnotatedMark::known_exhaustive(),
Loc::at_zero(Pattern::Identifier(hasher_sym)),
),
(
record_var,
AnnotatedMark::known_exhaustive(),
Loc::at_zero(Pattern::Identifier(rcd_sym)),
),
],
loc_body: Box::new(Loc::at_zero(body)),
});
(clos_expr, fn_var)
}

View file

@ -2,7 +2,7 @@ use roc_module::{
ident::{Lowercase, TagName},
symbol::Symbol,
};
use roc_types::subs::{Content, FlatType, Subs, Variable};
use roc_types::subs::{Content, FlatType, GetSubsSlice, Subs, Variable};
use crate::{
util::{check_derivable_ext_var, debug_name_record},
@ -107,9 +107,14 @@ impl FlatEncodable {
Ok(Key(FlatEncodableKey::TagUnion(tag_names_and_payload_sizes)))
}
FlatType::FunctionOrTagUnion(name_index, _, _) => Ok(Key(
FlatEncodableKey::TagUnion(vec![(subs[name_index].clone(), 0)]),
)),
FlatType::FunctionOrTagUnion(names_index, _, _) => {
Ok(Key(FlatEncodableKey::TagUnion(
subs.get_subs_slice(names_index)
.iter()
.map(|t| (t.clone(), 0))
.collect(),
)))
}
FlatType::EmptyRecord => Ok(Key(FlatEncodableKey::Record(vec![]))),
FlatType::EmptyTagUnion => Ok(Key(FlatEncodableKey::TagUnion(vec![]))),
//

View file

@ -1,7 +1,10 @@
use roc_module::symbol::Symbol;
use roc_module::{ident::Lowercase, symbol::Symbol};
use roc_types::subs::{Content, FlatType, Subs, Variable};
use crate::DeriveError;
use crate::{
util::{check_derivable_ext_var, debug_name_record},
DeriveError,
};
#[derive(Hash)]
pub enum FlatHash {
@ -12,11 +15,16 @@ pub enum FlatHash {
}
#[derive(Hash, PartialEq, Eq, Debug, Clone)]
pub enum FlatHashKey {}
pub enum FlatHashKey {
// Unfortunate that we must allocate here, c'est la vie
Record(Vec<Lowercase>),
}
impl FlatHashKey {
pub(crate) fn debug_name(&self) -> String {
unreachable!() // yet
match self {
FlatHashKey::Record(fields) => debug_name_record(fields),
}
}
}
@ -31,8 +39,26 @@ impl FlatHash {
Symbol::STR_STR => Ok(SingleLambdaSetImmediate(Symbol::HASH_HASH_STR_BYTES)),
_ => Err(Underivable),
},
FlatType::Record(_fields, _ext) => {
Err(Underivable) // yet
FlatType::Record(fields, ext) => {
let (fields_iter, ext) = fields.unsorted_iterator_and_ext(subs, ext);
check_derivable_ext_var(subs, ext, |ext| {
matches!(ext, Content::Structure(FlatType::EmptyRecord))
})?;
let mut field_names = Vec::with_capacity(fields.len());
for (field_name, record_field) in fields_iter {
if record_field.is_optional() {
// Can't derive a concrete decoder for optional fields, since those are
// compile-time-polymorphic
return Err(Underivable);
}
field_names.push(field_name.clone());
}
field_names.sort();
Ok(Key(FlatHashKey::Record(field_names)))
}
FlatType::TagUnion(_tags, _ext) | FlatType::RecursiveTagUnion(_, _tags, _ext) => {
Err(Underivable) // yet
@ -40,7 +66,7 @@ impl FlatHash {
FlatType::FunctionOrTagUnion(_name_index, _, _) => {
Err(Underivable) // yet
}
FlatType::EmptyRecord => Err(Underivable), // yet
FlatType::EmptyRecord => Ok(Key(FlatHashKey::Record(vec![]))),
FlatType::EmptyTagUnion => {
Err(Underivable) // yet
}

View file

@ -718,7 +718,7 @@ pub fn construct_optimization_passes<'a>(
OptLevel::Optimize => {
pmb.set_optimization_level(OptimizationLevel::Aggressive);
// this threshold seems to do what we want
pmb.set_inliner_with_threshold(275);
pmb.set_inliner_with_threshold(750);
}
}
@ -6049,6 +6049,20 @@ fn run_low_level<'a, 'ctx, 'env>(
bitcode::STR_TRIM_RIGHT,
)
}
StrWithCapacity => {
// Str.withCapacity : Nat -> Str
debug_assert_eq!(args.len(), 1);
let str_len = load_symbol(scope, &args[0]);
call_str_bitcode_fn(
env,
&[],
&[str_len],
BitcodeReturns::Str,
bitcode::STR_WITH_CAPACITY,
)
}
ListLen => {
// List.len : List * -> Nat
debug_assert_eq!(args.len(), 1);
@ -6171,7 +6185,7 @@ fn run_low_level<'a, 'ctx, 'env>(
list_prepend(env, original_wrapper, elem, elem_layout)
}
StrGetUnsafe => {
// List.getUnsafe : Str, Nat -> u8
// Str.getUnsafe : Str, Nat -> u8
debug_assert_eq!(args.len(), 2);
let wrapper_struct = load_symbol(scope, &args[0]);

View file

@ -302,6 +302,7 @@ impl<'a> LowLevelCall<'a> {
StrSubstringUnsafe => {
self.load_args_and_call_zig(backend, bitcode::STR_SUBSTRING_UNSAFE)
}
StrWithCapacity => self.load_args_and_call_zig(backend, bitcode::STR_WITH_CAPACITY),
// List
ListLen => match backend.storage.get(&self.arguments[0]) {

View file

@ -2239,24 +2239,15 @@ fn update<'a>(
// add the prelude
let mut header = header;
if ![ModuleId::RESULT, ModuleId::BOOL].contains(&header.module_id) {
extend_header_with_builtin(&mut header, ModuleId::RESULT);
}
if ![ModuleId::NUM, ModuleId::BOOL, ModuleId::RESULT].contains(&header.module_id) {
extend_header_with_builtin(&mut header, ModuleId::NUM);
}
if ![ModuleId::BOOL].contains(&header.module_id) {
extend_header_with_builtin(&mut header, ModuleId::BOOL);
}
if !header.module_id.is_builtin() {
extend_header_with_builtin(&mut header, ModuleId::BOX);
extend_header_with_builtin(&mut header, ModuleId::NUM);
extend_header_with_builtin(&mut header, ModuleId::BOOL);
extend_header_with_builtin(&mut header, ModuleId::STR);
extend_header_with_builtin(&mut header, ModuleId::LIST);
extend_header_with_builtin(&mut header, ModuleId::RESULT);
extend_header_with_builtin(&mut header, ModuleId::DICT);
extend_header_with_builtin(&mut header, ModuleId::SET);
extend_header_with_builtin(&mut header, ModuleId::LIST);
extend_header_with_builtin(&mut header, ModuleId::BOX);
extend_header_with_builtin(&mut header, ModuleId::ENCODE);
extend_header_with_builtin(&mut header, ModuleId::DECODE);
extend_header_with_builtin(&mut header, ModuleId::HASH);

View file

@ -30,6 +30,7 @@ pub enum LowLevel {
StrAppendScalar,
StrGetScalarUnsafe,
StrGetCapacity,
StrWithCapacity,
ListLen,
ListWithCapacity,
ListReserve,
@ -249,6 +250,7 @@ map_symbol_to_lowlevel! {
StrGetScalarUnsafe <= STR_GET_SCALAR_UNSAFE,
StrToNum <= STR_TO_NUM,
StrGetCapacity <= STR_CAPACITY,
StrWithCapacity <= STR_WITH_CAPACITY,
ListLen <= LIST_LEN,
ListGetCapacity <= LIST_CAPACITY,
ListWithCapacity <= LIST_WITH_CAPACITY,

View file

@ -1296,6 +1296,7 @@ define_builtins! {
50 STR_REPLACE_EACH: "replaceEach"
51 STR_REPLACE_FIRST: "replaceFirst"
52 STR_REPLACE_LAST: "replaceLast"
53 STR_WITH_CAPACITY: "withCapacity"
}
6 LIST: "List" => {
0 LIST_LIST: "List" exposed_apply_type=true // the List.List type alias
@ -1373,6 +1374,8 @@ define_builtins! {
72 LIST_SUBLIST_LOWLEVEL: "sublistLowlevel"
73 LIST_CAPACITY: "capacity"
74 LIST_MAP_TRY: "mapTry"
75 LIST_WALK_TRY: "walkTry"
76 LIST_WALK_BACKWARDS_UNTIL: "walkBackwardsUntil"
}
7 RESULT: "Result" => {
0 RESULT_RESULT: "Result" exposed_type=true // the Result.Result type alias

View file

@ -881,7 +881,7 @@ pub fn lowlevel_borrow_signature(arena: &Bump, op: LowLevel) -> &[bool] {
Unreachable => arena.alloc_slice_copy(&[irrelevant]),
ListLen | StrIsEmpty | StrToScalars | StrCountGraphemes | StrCountUtf8Bytes
| StrGetCapacity | ListGetCapacity => arena.alloc_slice_copy(&[borrowed]),
ListWithCapacity => arena.alloc_slice_copy(&[irrelevant]),
ListWithCapacity | StrWithCapacity => arena.alloc_slice_copy(&[irrelevant]),
ListReplaceUnsafe => arena.alloc_slice_copy(&[owned, irrelevant, irrelevant]),
StrGetUnsafe | ListGetUnsafe => arena.alloc_slice_copy(&[borrowed, irrelevant]),
ListConcat => arena.alloc_slice_copy(&[owned, owned]),

View file

@ -4027,12 +4027,18 @@ pub fn with_hole<'a>(
hole,
),
SingleQuote(character) => Stmt::Let(
assigned,
Expr::Literal(Literal::Int((character as i128).to_ne_bytes())),
Layout::int_width(IntWidth::I32),
hole,
),
SingleQuote(_, _, character, _) => {
let layout = layout_cache
.from_var(env.arena, variable, env.subs)
.unwrap();
Stmt::Let(
assigned,
Expr::Literal(Literal::Int((character as i128).to_ne_bytes())),
layout,
hole,
)
}
LetNonRec(def, cont) => from_can_let(
env,
procs,
@ -8883,10 +8889,12 @@ fn from_can_pattern_help<'a>(
IntOrFloatValue::Float(*float),
)),
StrLiteral(v) => Ok(Pattern::StrLiteral(v.clone())),
SingleQuote(c) => Ok(Pattern::IntLiteral(
(*c as i128).to_ne_bytes(),
IntWidth::I32,
)),
SingleQuote(var, _, c, _) => match layout_cache.from_var(env.arena, *var, env.subs) {
Ok(Layout::Builtin(Builtin::Int(width))) => {
Ok(Pattern::IntLiteral((*c as i128).to_ne_bytes(), width))
}
o => internal_error!("an integer width was expected, but we found {:?}", o),
},
Shadowed(region, ident, _new_symbol) => Err(RuntimeError::Shadowing {
original_region: *region,
shadow: ident.clone(),

View file

@ -12,7 +12,7 @@ use roc_problem::can::RuntimeError;
use roc_target::{PtrWidth, TargetInfo};
use roc_types::num::NumericRange;
use roc_types::subs::{
self, Content, FlatType, GetSubsSlice, Label, OptVariable, RecordFields, Subs, UnionTags,
self, Content, FlatType, GetSubsSlice, Label, OptVariable, RecordFields, Subs,
UnsortedUnionLabels, Variable,
};
use roc_types::types::{gather_fields_unsorted_iter, RecordField, RecordFieldsError};
@ -3152,16 +3152,18 @@ fn layout_from_flat_type<'a>(
layout_from_non_recursive_union(env, &tags).map(Ok)
}
FunctionOrTagUnion(tag_name, _, ext_var) => {
FunctionOrTagUnion(tag_names, _, ext_var) => {
debug_assert!(
ext_var_is_empty_tag_union(subs, ext_var),
"If ext_var wasn't empty, this wouldn't be a FunctionOrTagUnion!"
);
let union_tags = UnionTags::from_tag_name_index(tag_name);
let (tags, _) = union_tags.unsorted_tags_and_ext(subs, ext_var);
let tag_names = subs.get_subs_slice(tag_names);
let unsorted_tags = UnsortedUnionLabels {
tags: tag_names.iter().map(|t| (t, &[] as &[Variable])).collect(),
};
layout_from_non_recursive_union(env, &tags).map(Ok)
layout_from_non_recursive_union(env, &unsorted_tags).map(Ok)
}
RecursiveTagUnion(rec_var, tags, ext_var) => {
let (tags, ext_var) = tags.unsorted_tags_and_ext(subs, ext_var);

View file

@ -345,7 +345,12 @@ fn eat_spaces<'a>(
state = state.advance(1);
return eat_line_comment(state, comments_and_newlines);
}
_ => break,
_ => {
if !comments_and_newlines.is_empty() {
state = state.mark_current_indent();
}
break;
}
}
}
@ -398,7 +403,10 @@ fn eat_line_comment<'a>(
index += 1;
continue 'outer;
}
_ => break,
_ => {
state = state.mark_current_indent();
break;
}
}
index += 1;
@ -490,7 +498,10 @@ fn eat_line_comment<'a>(
index += 1;
continue 'outer;
}
_ => break,
_ => {
state = state.mark_current_indent();
break;
}
}
index += 1;
@ -554,7 +565,10 @@ fn eat_line_comment<'a>(
index += 1;
continue 'outer;
}
_ => break,
_ => {
state = state.mark_current_indent();
break;
}
}
index += 1;

View file

@ -8,8 +8,8 @@ use crate::blankspace::{
use crate::ident::{lowercase_ident, parse_ident, Ident};
use crate::keyword;
use crate::parser::{
self, backtrackable, optional, sep_by1, sep_by1_e, specialize, specialize_ref, then,
trailing_sep_by0, word1, word2, EExpect, EExpr, EIf, EInParens, ELambda, EList, ENumber,
self, backtrackable, optional, parse_word1, sep_by1, sep_by1_e, specialize, specialize_ref,
then, trailing_sep_by0, word1, word2, EClosure, EExpect, EExpr, EIf, EInParens, EList, ENumber,
EPattern, ERecord, EString, EType, EWhen, Either, ParseResult, Parser,
};
use crate::pattern::{loc_closure_param, loc_has_parser};
@ -205,7 +205,10 @@ fn parse_loc_term_or_underscore_or_conditional<'a>(
loc!(specialize(EExpr::Str, string_literal_help())),
loc!(specialize(EExpr::SingleQuote, single_quote_literal_help())),
loc!(specialize(EExpr::Number, positive_number_literal_help())),
loc!(specialize(EExpr::Lambda, closure_help(min_indent, options))),
loc!(specialize(
EExpr::Closure,
closure_help(min_indent, options)
)),
loc!(underscore_expression()),
loc!(record_literal_help(min_indent)),
loc!(specialize(EExpr::List, list_literal_help(min_indent))),
@ -230,7 +233,10 @@ fn parse_loc_term_or_underscore<'a>(
loc!(specialize(EExpr::Str, string_literal_help())),
loc!(specialize(EExpr::SingleQuote, single_quote_literal_help())),
loc!(specialize(EExpr::Number, positive_number_literal_help())),
loc!(specialize(EExpr::Lambda, closure_help(min_indent, options))),
loc!(specialize(
EExpr::Closure,
closure_help(min_indent, options)
)),
loc!(underscore_expression()),
loc!(record_literal_help(min_indent)),
loc!(specialize(EExpr::List, list_literal_help(min_indent))),
@ -253,7 +259,10 @@ fn parse_loc_term<'a>(
loc!(specialize(EExpr::Str, string_literal_help())),
loc!(specialize(EExpr::SingleQuote, single_quote_literal_help())),
loc!(specialize(EExpr::Number, positive_number_literal_help())),
loc!(specialize(EExpr::Lambda, closure_help(min_indent, options))),
loc!(specialize(
EExpr::Closure,
closure_help(min_indent, options)
)),
loc!(record_literal_help(min_indent)),
loc!(specialize(EExpr::List, list_literal_help(min_indent))),
loc!(map_with_arena!(
@ -344,7 +353,6 @@ fn unary_negate<'a>() -> impl Parser<'a, (), EExpr<'a>> {
fn parse_expr_start<'a>(
min_indent: u32,
options: ExprParseOptions,
start_column: u32,
arena: &'a Bump,
state: State<'a>,
) -> ParseResult<'a, Loc<Expr<'a>>, EExpr<'a>> {
@ -355,8 +363,11 @@ fn parse_expr_start<'a>(
when::expr_help(min_indent, options)
)),
loc!(specialize(EExpr::Expect, expect_help(min_indent, options))),
loc!(specialize(EExpr::Lambda, closure_help(min_indent, options))),
loc!(move |a, s| parse_expr_operator_chain(min_indent, options, start_column, a, s)),
loc!(specialize(
EExpr::Closure,
closure_help(min_indent, options)
)),
loc!(move |a, s| parse_expr_operator_chain(min_indent, options, a, s)),
fail_expr_start_e()
]
.parse(arena, state)
@ -365,10 +376,11 @@ fn parse_expr_start<'a>(
fn parse_expr_operator_chain<'a>(
min_indent: u32,
options: ExprParseOptions,
start_column: u32,
arena: &'a Bump,
state: State<'a>,
) -> ParseResult<'a, Expr<'a>, EExpr<'a>> {
let min_indent = state.check_indent(min_indent, EExpr::IndentStart)?;
let (_, expr, state) =
loc_possibly_negative_or_negated_term(min_indent, options).parse(arena, state)?;
@ -387,7 +399,7 @@ fn parse_expr_operator_chain<'a>(
end,
};
parse_expr_end(min_indent, options, start_column, expr_state, arena, state)
parse_expr_end(min_indent, options, expr_state, arena, state)
}
}
}
@ -604,13 +616,11 @@ fn numeric_negate_expression<'a, T>(
fn parse_defs_end<'a>(
_options: ExprParseOptions,
start_column: u32,
min_indent: u32,
mut defs: Defs<'a>,
arena: &'a Bump,
state: State<'a>,
) -> ParseResult<'a, Defs<'a>, EExpr<'a>> {
let min_indent = start_column;
let mut global_state = state;
loop {
@ -723,7 +733,7 @@ fn parse_defs_end<'a>(
loc_has_parser(min_indent).parse(arena, state.clone())
{
let (_, (type_def, def_region), state) = finish_parsing_ability_def_help(
start_column,
min_indent,
Loc::at(name_region, name),
args,
loc_has,
@ -967,14 +977,12 @@ fn parse_defs_end<'a>(
fn parse_defs_expr<'a>(
options: ExprParseOptions,
start_column: u32,
min_indent: u32,
defs: Defs<'a>,
arena: &'a Bump,
state: State<'a>,
) -> ParseResult<'a, Expr<'a>, EExpr<'a>> {
let min_indent = start_column;
match parse_defs_end(options, start_column, defs, arena, state) {
match parse_defs_end(options, min_indent, defs, arena, state) {
Err(bad) => Err(bad),
Ok((_, def_state, state)) => {
// this is no def, because there is no `=` or `:`; parse as an expr
@ -1050,7 +1058,6 @@ enum AliasOrOpaque {
fn finish_parsing_alias_or_opaque<'a>(
min_indent: u32,
options: ExprParseOptions,
start_column: u32,
expr_state: ExprState<'a>,
loc_op: Loc<BinOp>,
arena: &'a Bump,
@ -1059,7 +1066,7 @@ fn finish_parsing_alias_or_opaque<'a>(
kind: AliasOrOpaque,
) -> ParseResult<'a, Expr<'a>, EExpr<'a>> {
let expr_region = expr_state.expr.region;
let indented_more = start_column + 1;
let indented_more = min_indent + 1;
let (expr, arguments) = expr_state
.validate_is_type_def(arena, loc_op, kind)
@ -1175,7 +1182,7 @@ fn finish_parsing_alias_or_opaque<'a>(
}
};
parse_defs_expr(options, start_column, defs, arena, state)
parse_defs_expr(options, min_indent, defs, arena, state)
}
mod ability {
@ -1364,7 +1371,6 @@ fn finish_parsing_ability_def_help<'a>(
fn parse_expr_operator<'a>(
min_indent: u32,
options: ExprParseOptions,
start_column: u32,
mut expr_state: ExprState<'a>,
loc_op: Loc<BinOp>,
arena: &'a Bump,
@ -1405,11 +1411,11 @@ fn parse_expr_operator<'a>(
expr_state.spaces_after = spaces;
expr_state.end = new_end;
parse_expr_end(min_indent, options, start_column, expr_state, arena, state)
parse_expr_end(min_indent, options, expr_state, arena, state)
}
BinOp::Assignment => {
let expr_region = expr_state.expr.region;
let indented_more = start_column + 1;
let indented_more = min_indent + 1;
let call = expr_state
.validate_assignment_or_backpassing(arena, loc_op, EExpr::ElmStyleFunction)
@ -1448,11 +1454,11 @@ fn parse_expr_operator<'a>(
let mut defs = Defs::default();
defs.push_value_def(value_def, def_region, &[], &[]);
parse_defs_expr(options, start_column, defs, arena, state)
parse_defs_expr(options, min_indent, defs, arena, state)
}
BinOp::Backpassing => {
let expr_region = expr_state.expr.region;
let indented_more = start_column + 1;
let indented_more = min_indent + 1;
let call = expr_state
.validate_assignment_or_backpassing(arena, loc_op, |_, pos| {
@ -1502,7 +1508,6 @@ fn parse_expr_operator<'a>(
BinOp::IsAliasType | BinOp::IsOpaqueType => finish_parsing_alias_or_opaque(
min_indent,
options,
start_column,
expr_state,
loc_op,
arena,
@ -1552,7 +1557,7 @@ fn parse_expr_operator<'a>(
expr_state.spaces_after = spaces;
// TODO new start?
parse_expr_end(min_indent, options, start_column, expr_state, arena, state)
parse_expr_end(min_indent, options, expr_state, arena, state)
}
}
}
@ -1566,7 +1571,6 @@ fn parse_expr_operator<'a>(
fn parse_expr_end<'a>(
min_indent: u32,
options: ExprParseOptions,
start_column: u32,
mut expr_state: ExprState<'a>,
arena: &'a Bump,
state: State<'a>,
@ -1626,13 +1630,13 @@ fn parse_expr_end<'a>(
let args = arguments.into_bump_slice();
let (_, (type_def, def_region), state) =
finish_parsing_ability_def_help(start_column, name, args, has, arena, state)?;
finish_parsing_ability_def_help(min_indent, name, args, has, arena, state)?;
let mut defs = Defs::default();
defs.push_type_def(type_def, def_region, &[], &[]);
parse_defs_expr(options, start_column, defs, arena, state)
parse_defs_expr(options, min_indent, defs, arena, state)
}
Ok((_, mut arg, state)) => {
let new_end = state.pos();
@ -1660,7 +1664,7 @@ fn parse_expr_end<'a>(
expr_state.end = new_end;
expr_state.spaces_after = new_spaces;
parse_expr_end(min_indent, options, start_column, expr_state, arena, state)
parse_expr_end(min_indent, options, expr_state, arena, state)
}
}
}
@ -1672,15 +1676,7 @@ fn parse_expr_end<'a>(
Ok((_, loc_op, state)) => {
expr_state.consume_spaces(arena);
expr_state.initial = before_op;
parse_expr_operator(
min_indent,
options,
start_column,
expr_state,
loc_op,
arena,
state,
)
parse_expr_operator(min_indent, options, expr_state, loc_op, arena, state)
}
Err((NoProgress, _, mut state)) => {
// try multi-backpassing
@ -1714,8 +1710,6 @@ fn parse_expr_end<'a>(
match word2(b'<', b'-', EExpr::BackpassArrow).parse(arena, state) {
Err((_, fail, state)) => Err((MadeProgress, fail, state)),
Ok((_, _, state)) => {
let min_indent = start_column;
let parse_body = space0_before_e(
move |a, s| parse_loc_expr(min_indent + 1, a, s),
min_indent,
@ -1793,8 +1787,7 @@ fn parse_loc_expr_with_options<'a>(
arena: &'a Bump,
state: State<'a>,
) -> ParseResult<'a, Loc<Expr<'a>>, EExpr<'a>> {
let column = state.column();
parse_expr_start(min_indent, options, column, arena, state)
parse_expr_start(min_indent, options, arena, state)
}
/// If the given Expr would parse the same way as a valid Pattern, convert it.
@ -1973,47 +1966,67 @@ pub fn toplevel_defs<'a>(min_indent: u32) -> impl Parser<'a, Defs<'a>, EExpr<'a>
fn closure_help<'a>(
min_indent: u32,
options: ExprParseOptions,
) -> impl Parser<'a, Expr<'a>, ELambda<'a>> {
map_with_arena!(
skip_first!(
// All closures start with a '\' - e.g. (\x -> x + 1)
word1(b'\\', ELambda::Start),
// Once we see the '\', we're committed to parsing this as a closure.
// It may turn out to be malformed, but it is definitely a closure.
and!(
// Parse the params
// Params are comma-separated
sep_by1_e(
word1(b',', ELambda::Comma),
space0_around_ee(
specialize(ELambda::Pattern, loc_closure_param(min_indent)),
min_indent,
ELambda::IndentArg,
ELambda::IndentArrow
),
ELambda::Arg,
),
skip_first!(
// Parse the -> which separates params from body
word2(b'-', b'>', ELambda::Arrow),
// Parse the body
space0_before_e(
specialize_ref(ELambda::Body, move |arena, state| {
parse_loc_expr_with_options(min_indent, options, arena, state)
}),
min_indent,
ELambda::IndentBody
)
)
)
),
|arena: &'a Bump, (params, loc_body)| {
let params: Vec<'a, Loc<Pattern<'a>>> = params;
let params: &'a [Loc<Pattern<'a>>] = params.into_bump_slice();
) -> impl Parser<'a, Expr<'a>, EClosure<'a>> {
move |arena, state| parse_closure_help(arena, state, min_indent, options)
}
Expr::Closure(params, arena.alloc(loc_body))
}
fn parse_closure_help<'a>(
arena: &'a Bump,
state: State<'a>,
_min_indent: u32,
options: ExprParseOptions,
) -> ParseResult<'a, Expr<'a>, EClosure<'a>> {
let start_indent = state.line_indent();
let min_indent = start_indent;
// All closures start with a '\' - e.g. (\x -> x + 1)
let (_, (), state) = parse_word1(state, min_indent, b'\\', EClosure::Start)?;
// After the first token, all other tokens must be indented past the start of the line
let min_indent = min_indent + 1;
// Once we see the '\', we're committed to parsing this as a closure.
// It may turn out to be malformed, but it is definitely a closure.
// Parse the params
// Params are comma-separated
let (_, params, state) = sep_by1_e(
word1(b',', EClosure::Comma),
space0_around_ee(
specialize(EClosure::Pattern, loc_closure_param(min_indent)),
min_indent,
EClosure::IndentArg,
EClosure::IndentArrow,
),
EClosure::Arg,
)
.parse(arena, state)
.map_err(|(_p, e, s)| (MadeProgress, e, s))?;
let (_, loc_body, state) = skip_first!(
// Parse the -> which separates params from body
word2(b'-', b'>', EClosure::Arrow),
// Parse the body
space0_before_e(
specialize_ref(EClosure::Body, move |arena, state| {
parse_loc_expr_with_options(min_indent, options, arena, state)
}),
min_indent,
EClosure::IndentBody
)
)
.parse(arena, state)
.map_err(|(_p, e, s)| (MadeProgress, e, s))?;
let params: Vec<'a, Loc<Pattern<'a>>> = params;
let params: &'a [Loc<Pattern<'a>>] = params.into_bump_slice();
Ok((
MadeProgress,
Expr::Closure(params, arena.alloc(loc_body)),
state,
))
}
mod when {

View file

@ -89,7 +89,7 @@ impl_space_problem! {
EIf<'a>,
EImports,
EInParens<'a>,
ELambda<'a>,
EClosure<'a>,
EList<'a>,
EPackageEntry<'a>,
EPackages<'a>,
@ -354,7 +354,7 @@ pub enum EExpr<'a> {
Expect(EExpect<'a>, Position),
Lambda(ELambda<'a>, Position),
Closure(EClosure<'a>, Position),
Underscore(Position),
InParens(EInParens<'a>, Position),
@ -428,7 +428,7 @@ pub enum EInParens<'a> {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ELambda<'a> {
pub enum EClosure<'a> {
Space(BadInputError, Position),
Start(Position),
Arrow(Position),
@ -1452,6 +1452,31 @@ where
}
}
pub fn parse_word1<'a, ToError, E>(
state: State<'a>,
min_indent: u32,
word: u8,
to_error: ToError,
) -> ParseResult<'a, (), E>
where
ToError: Fn(Position) -> E,
E: 'a,
{
debug_assert_ne!(word, b'\n');
if min_indent > state.column() {
return Err((NoProgress, to_error(state.pos()), state));
}
match state.bytes().first() {
Some(x) if *x == word => {
let state = state.advance(1);
Ok((MadeProgress, (), state))
}
_ => Err((NoProgress, to_error(state.pos()), state)),
}
}
pub fn word2<'a, ToError, E>(word_1: u8, word_2: u8, to_error: ToError) -> impl Parser<'a, (), E>
where
ToError: Fn(Position) -> E,

View file

@ -1,13 +1,15 @@
use roc_region::all::{Position, Region};
use std::fmt;
use crate::parser::Progress;
/// A position in a source file.
// NB: [Copy] is explicitly NOT derived to reduce the chance of bugs due to accidentally re-using
// parser state.
#[derive(Clone)]
pub struct State<'a> {
/// The raw input bytes from the file.
/// Beware: original_bytes[0] always points the the start of the file.
/// Beware: original_bytes[0] always points at the start of the file.
/// Use bytes()[0] to access the current byte the parser is inspecting
original_bytes: &'a [u8],
@ -16,6 +18,9 @@ pub struct State<'a> {
/// Position of the start of the current line
pub(crate) line_start: Position,
/// Position of the first non-whitespace character on the current line
pub(crate) line_start_after_whitespace: Position,
}
impl<'a> State<'a> {
@ -24,6 +29,10 @@ impl<'a> State<'a> {
original_bytes: bytes,
offset: 0,
line_start: Position::zero(),
// Technically not correct.
// We don't know the position of the first non-whitespace character yet.
line_start_after_whitespace: Position::zero(),
}
}
@ -39,6 +48,24 @@ impl<'a> State<'a> {
self.pos().offset - self.line_start.offset
}
pub fn line_indent(&self) -> u32 {
self.line_start_after_whitespace.offset - self.line_start.offset
}
/// Check that the indent is at least `indent` spaces.
/// Return a new indent if the current indent is greater than `indent`.
pub fn check_indent<E>(
&self,
indent: u32,
e: impl Fn(Position) -> E,
) -> Result<u32, (Progress, E, State<'a>)> {
if self.column() < indent {
Err((Progress::NoProgress, e(self.pos()), self.clone()))
} else {
Ok(std::cmp::max(indent, self.line_indent()))
}
}
/// Mutably advance the state by a given offset
#[inline(always)]
pub(crate) fn advance_mut(&mut self, offset: usize) {
@ -70,6 +97,18 @@ impl<'a> State<'a> {
pub(crate) const fn advance_newline(mut self) -> State<'a> {
self.offset += 1;
self.line_start = self.pos();
// WARNING! COULD CAUSE BUGS IF WE FORGET TO CALL mark_current_ident LATER!
// We really need to be stricter about this.
self.line_start_after_whitespace = self.line_start;
self
}
#[must_use]
#[inline(always)]
pub(crate) const fn mark_current_indent(mut self) -> State<'a> {
self.line_start_after_whitespace = self.pos();
self
}

View file

@ -0,0 +1 @@
Expr(Closure(Arg(@1), @1), @0)

View file

@ -0,0 +1 @@
\,x -> 1

View file

@ -0,0 +1 @@
Expr(Closure(IndentBody(@5), @3), @0)

View file

@ -0,0 +1,2 @@
\x ->
1

View file

@ -0,0 +1,71 @@
BinOps(
[
(
@0-10 SpaceAfter(
Str(
PlainLine(
"a string",
),
),
[
Newline,
],
),
@11-13 Pizza,
),
(
@14-24 SpaceAfter(
Var {
module_name: "Str",
ident: "toUtf8",
},
[
Newline,
],
),
@25-27 Pizza,
),
(
@28-54 SpaceAfter(
Apply(
@28-36 Var {
module_name: "List",
ident: "map",
},
[
@37-54 Closure(
[
@38-42 Identifier(
"byte",
),
],
@46-54 BinOps(
[
(
@46-50 Var {
module_name: "",
ident: "byte",
},
@51-52 Plus,
),
],
@53-54 Num(
"1",
),
),
),
],
Space,
),
[
Newline,
],
),
@55-57 Pizza,
),
],
@58-70 Var {
module_name: "List",
ident: "reverse",
},
)

View file

@ -0,0 +1,4 @@
"a string"
|> Str.toUtf8
|> List.map \byte -> byte + 1
|> List.reverse

View file

@ -0,0 +1,15 @@
Closure(
[
@1-2 Identifier(
"x",
),
],
@8-9 SpaceBefore(
Num(
"1",
),
[
Newline,
],
),
)

View file

@ -0,0 +1,2 @@
\x ->
1

View file

@ -120,6 +120,8 @@ mod test_parse {
// see tests/snapshots to see test input(.roc) and expected output(.result-ast)
snapshot_tests! {
fail/lambda_extra_comma.expr,
fail/lambda_missing_indent.expr,
fail/type_argument_no_arrow.expr,
fail/type_double_comma.expr,
pass/ability_demand_signature_is_multiline.expr,
@ -157,6 +159,7 @@ mod test_parse {
pass/empty_string.expr,
pass/equals_with_spaces.expr,
pass/equals.expr,
pass/expect_fx.module,
pass/expect.expr,
pass/float_with_underscores.expr,
pass/full_app_header_trailing_commas.header,
@ -167,6 +170,8 @@ mod test_parse {
pass/if_def.expr,
pass/int_with_underscore.expr,
pass/interface_with_newline.header,
pass/lambda_in_chain.expr,
pass/lambda_indent.expr,
pass/list_closing_indent_not_enough.expr,
pass/list_closing_same_indent_no_trailing_comma.expr,
pass/list_closing_same_indent_with_trailing_comma.expr,
@ -181,6 +186,7 @@ mod test_parse {
pass/module_def_newline.module,
pass/multi_backpassing.expr,
pass/multi_char_string.expr,
pass/multiline_string.expr,
pass/multiline_type_signature_with_comment.expr,
pass/multiline_type_signature.expr,
pass/multiple_fields.expr,
@ -204,7 +210,6 @@ mod test_parse {
pass/not_docs.expr,
pass/number_literal_suffixes.expr,
pass/one_backpassing.expr,
pass/multiline_string.expr,
pass/one_char_string.expr,
pass/one_def.expr,
pass/one_minus_two.expr,
@ -251,7 +256,6 @@ mod test_parse {
pass/spaced_singleton_list.expr,
pass/spaces_inside_empty_list.expr,
pass/standalone_module_defs.module,
pass/expect_fx.module,
pass/string_without_escape.expr,
pass/sub_var_with_spaces.expr,
pass/sub_with_spaces.expr,
@ -277,9 +281,9 @@ mod test_parse {
pass/var_minus_two.expr,
pass/var_then.expr,
pass/var_when.expr,
pass/when_if_guard.expr,
pass/when_in_assignment.expr,
pass/when_in_function.expr,
pass/when_if_guard.expr,
pass/when_in_parens_indented.expr,
pass/when_in_parens.expr,
pass/when_with_alternative_patterns.expr,

View file

@ -1522,8 +1522,9 @@ fn solve(
symbols.iter().any(|(s, _)| {
let var = env.get_var_by_symbol(s).expect("Symbol not solved!");
let content = subs.get_content_without_compacting(var);
!matches!(content, Error | Structure(FlatType::Func(..)))
let (_, underlying_content) = chase_alias_content(subs, var);
!matches!(underlying_content, Error | Structure(FlatType::Func(..)))
})
};
@ -1555,6 +1556,17 @@ fn solve(
state
}
fn chase_alias_content(subs: &Subs, mut var: Variable) -> (Variable, &Content) {
loop {
match subs.get_content_without_compacting(var) {
Content::Alias(_, _, real_var, _) => {
var = *real_var;
}
content => return (var, content),
}
}
}
#[allow(clippy::too_many_arguments)]
fn compact_lambdas_and_check_obligations(
arena: &Bump,
@ -2299,10 +2311,11 @@ fn type_to_variable<'a>(
unreachable!("we assert that the ext var is empty; otherwise we'd already know it was a tag union!");
}
let slice = SubsIndex::new(subs.tag_names.len() as u32);
subs.tag_names.push(tag_name.clone());
let tag_names = SubsSlice::extend_new(&mut subs.tag_names, [tag_name.clone()]);
let symbols = SubsSlice::extend_new(&mut subs.closure_names, [*symbol]);
let content = Content::Structure(FlatType::FunctionOrTagUnion(slice, *symbol, ext));
let content =
Content::Structure(FlatType::FunctionOrTagUnion(tag_names, symbols, ext));
register_with_known_var(subs, destination, rank, pools, content)
}

View file

@ -7841,4 +7841,135 @@ mod solve_expr {
"hasher -> hasher | hasher has Hasher",
);
}
#[test]
fn dispatch_tag_union_function_inferred() {
infer_eq_without_problem(
indoc!(
r#"
g = if Bool.true then A else B
g ""
"#
),
"[A Str, B Str]*",
);
}
#[test]
fn check_char_as_u8() {
infer_eq_without_problem(
indoc!(
r#"
x : U8
x = '.'
x
"#
),
"U8",
);
}
#[test]
fn check_char_as_u16() {
infer_eq_without_problem(
indoc!(
r#"
x : U16
x = '.'
x
"#
),
"U16",
);
}
#[test]
fn check_char_as_u32() {
infer_eq_without_problem(
indoc!(
r#"
x : U32
x = '.'
x
"#
),
"U32",
);
}
#[test]
fn check_char_pattern_as_u8() {
infer_eq_without_problem(
indoc!(
r#"
f : U8 -> _
f = \c ->
when c is
'.' -> 'A'
c1 -> c1
f
"#
),
"U8 -> U8",
);
}
#[test]
fn check_char_pattern_as_u16() {
infer_eq_without_problem(
indoc!(
r#"
f : U16 -> _
f = \c ->
when c is
'.' -> 'A'
c1 -> c1
f
"#
),
"U16 -> U16",
);
}
#[test]
fn check_char_pattern_as_u32() {
infer_eq_without_problem(
indoc!(
r#"
f : U32 -> _
f = \c ->
when c is
'.' -> 'A'
c1 -> c1
f
"#
),
"U32 -> U32",
);
}
#[test]
fn issue_4246_admit_recursion_between_opaque_functions() {
infer_eq_without_problem(
indoc!(
r#"
app "test" provides [b] to "./platform"
O := {} -> {}
a = @O \{} -> ((\@O f -> f {}) b)
b = a
"#
),
"O",
);
}
}

View file

@ -4,11 +4,39 @@
// For the `v!` macro we use uppercase variables when constructing tag unions.
#![allow(non_snake_case)]
use crate::{util::check_single_lset_immediate, v};
use crate::{
test_key_eq, test_key_neq,
util::{check_derivable, check_single_lset_immediate, check_underivable, derive_test},
v,
};
use insta::assert_snapshot;
use roc_module::symbol::Symbol;
use roc_types::subs::Variable;
use roc_derive_key::DeriveBuiltin::Hash;
use roc_derive_key::{hash::FlatHashKey, DeriveBuiltin::Hash, DeriveError, DeriveKey};
test_key_eq! {
Hash,
same_record:
v!({ a: v!(U8), }), v!({ a: v!(U8), })
same_record_fields_diff_types:
v!({ a: v!(U8), }), v!({ a: v!(STR), })
same_record_fields_any_order:
v!({ a: v!(U8), b: v!(U8), c: v!(U8), }),
v!({ c: v!(U8), a: v!(U8), b: v!(U8), })
explicit_empty_record_and_implicit_empty_record:
v!(EMPTY_RECORD), v!({})
}
test_key_neq! {
Hash,
different_record_fields:
v!({ a: v!(U8), }), v!({ b: v!(U8), })
record_empty_vs_nonempty:
v!(EMPTY_RECORD), v!({ a: v!(U8), })
}
#[test]
fn immediates() {
@ -26,3 +54,98 @@ fn immediates() {
check_single_lset_immediate(Hash, v!(Symbol::LIST_LIST v!(U8)), Symbol::HASH_HASH_LIST);
check_single_lset_immediate(Hash, v!(Symbol::LIST_LIST v!(STR)), Symbol::HASH_HASH_LIST);
}
#[test]
fn optional_record_field_derive_error() {
check_underivable(Hash, v!({ ?a: v!(U8), }), DeriveError::Underivable);
}
#[test]
fn derivable_record_ext_flex_var() {
check_derivable(
Hash,
v!({ a: v!(STR), }* ),
DeriveKey::Hash(FlatHashKey::Record(vec!["a".into()])),
);
}
#[test]
fn derivable_record_ext_flex_able_var() {
check_derivable(
Hash,
v!({ a: v!(STR), }a has Symbol::DECODE_DECODER ),
DeriveKey::Hash(FlatHashKey::Record(vec!["a".into()])),
);
}
#[test]
fn derivable_record_with_record_ext() {
check_derivable(
Hash,
v!({ b: v!(STR), }{ a: v!(STR), } ),
DeriveKey::Hash(FlatHashKey::Record(vec!["a".into(), "b".into()])),
);
}
#[test]
fn empty_record() {
derive_test(Hash, v!(EMPTY_RECORD), |golden| {
assert_snapshot!(golden, @r###"
# derived for {}
# hasher, {} -[[hash_{}(0)]]-> hasher | hasher has Hasher
# hasher, {} -[[hash_{}(0)]]-> hasher | hasher has Hasher
# Specialization lambda sets:
# @<1>: [[hash_{}(0)]]
#Derived.hash_{} = \#Derived.hasher, #Derived.rcd -> #Derived.hasher
"###
)
})
}
#[test]
fn zero_field_record() {
derive_test(Hash, v!({}), |golden| {
assert_snapshot!(golden, @r###"
# derived for {}
# hasher, {} -[[hash_{}(0)]]-> hasher | hasher has Hasher
# hasher, {} -[[hash_{}(0)]]-> hasher | hasher has Hasher
# Specialization lambda sets:
# @<1>: [[hash_{}(0)]]
#Derived.hash_{} = \#Derived.hasher, #Derived.rcd -> #Derived.hasher
"###
)
})
}
#[test]
fn one_field_record() {
derive_test(Hash, v!({ a: v!(U8), }), |golden| {
assert_snapshot!(golden, @r###"
# derived for { a : U8 }
# hasher, { a : a } -[[hash_{a}(0)]]-> hasher | a has Hash, hasher has Hasher
# hasher, { a : a } -[[hash_{a}(0)]]-> hasher | a has Hash, hasher has Hasher
# Specialization lambda sets:
# @<1>: [[hash_{a}(0)]]
#Derived.hash_{a} =
\#Derived.hasher, #Derived.rcd -> Hash.hash #Derived.hasher #Derived.rcd.a
"###
)
})
}
#[test]
fn two_field_record() {
derive_test(Hash, v!({ a: v!(U8), b: v!(STR), }), |golden| {
assert_snapshot!(golden, @r###"
# derived for { a : U8, b : Str }
# hasher, { a : a, b : a1 } -[[hash_{a,b}(0)]]-> hasher | a has Hash, a1 has Hash, hasher has Hasher
# hasher, { a : a, b : a1 } -[[hash_{a,b}(0)]]-> hasher | a has Hash, a1 has Hash, hasher has Hasher
# Specialization lambda sets:
# @<1>: [[hash_{a,b}(0)]]
#Derived.hash_{a,b} =
\#Derived.hasher, #Derived.rcd ->
Hash.hash (Hash.hash #Derived.hasher #Derived.rcd.a) #Derived.rcd.b
"###
)
})
}

View file

@ -58,7 +58,7 @@ fn expr<'a>(c: &Ctx, p: EPrec, f: &'a Arena<'a>, e: &'a Expr) -> DocBuilder<'a,
match e {
Num(_, n, _, _) | Int(_, _, n, _, _) | Float(_, _, n, _, _) => f.text(&**n),
Str(s) => f.text(format!(r#""{}""#, s)),
SingleQuote(c) => f.text(format!("'{}'", c)),
SingleQuote(_, _, c, _) => f.text(format!("'{}'", c)),
List {
elem_var: _,
loc_elems,
@ -366,7 +366,7 @@ fn pattern<'a>(
f.text(&**n)
}
StrLiteral(s) => f.text(format!(r#""{}""#, s)),
SingleQuote(c) => f.text(format!("'{}'", c)),
SingleQuote(_, _, c, _) => f.text(format!("'{}'", c)),
Underscore => f.text("_"),
Shadowed(_, _, _) => todo!(),

View file

@ -1330,4 +1330,47 @@ mod hash {
)
}
}
mod derived {
use super::{assert_evals_to, build_test};
use roc_std::RocList;
#[test]
fn empty_record() {
assert_evals_to!(
&build_test(r#"{}"#),
RocList::from_slice(&[] as &[u8]),
RocList<u8>
)
}
#[test]
fn record_of_u8_and_str() {
assert_evals_to!(
&build_test(r#"{ a: 15u8, b: "bc" }"#),
RocList::from_slice(&[15, 98, 99]),
RocList<u8>
)
}
#[test]
fn record_of_records() {
assert_evals_to!(
&build_test(r#"{ a: { b: 15u8, c: "bc" }, d: { b: 23u8, e: "ef" } }"#),
RocList::from_slice(&[15, 98, 99, 23, 101, 102]),
RocList<u8>
)
}
#[test]
fn record_of_list_of_records() {
assert_evals_to!(
&build_test(
r#"{ a: [ { b: 15u8 }, { b: 23u8 } ], b: [ { c: 45u8 }, { c: 73u8 } ] }"#
),
RocList::from_slice(&[15, 23, 45, 73]),
RocList<u8>
)
}
}
}

View file

@ -3395,3 +3395,59 @@ fn list_let_generalization() {
usize
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn list_walk_backwards_until_sum() {
assert_evals_to!(
r#"List.walkBackwardsUntil [1, 2] 0 \a,b -> Continue (a + b)"#,
3,
i64
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn list_walk_backwards_implements_position() {
assert_evals_to!(
r#"
Option a : [Some a, None]
find : List a, a -> Option Nat
find = \list, needle ->
findHelp list needle
|> .v
findHelp = \list, needle ->
List.walkBackwardsUntil list { n: 0, v: None } \{ n, v }, element ->
if element == needle then
Break { n, v: Some n }
else
Continue { n: n + 1, v }
when find [1, 2, 3] 3 is
None -> 0
Some v -> v
"#,
0,
usize
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn list_walk_backwards_until_even_prefix_sum() {
assert_evals_to!(
r#"
helper = \a, b ->
if Num.isEven b then
Continue (a + b)
else
Break a
List.walkBackwardsUntil [9, 8, 4, 2] 0 helper"#,
2 + 4 + 8,
i64
);
}

View file

@ -4067,3 +4067,21 @@ fn int_let_generalization() {
RocStr
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn pattern_match_char() {
assert_evals_to!(
indoc!(
r#"
c = 'A'
when c is
'A' -> "okay"
_ -> "FAIL"
"#
),
RocStr::from("okay"),
RocStr
);
}

View file

@ -1930,3 +1930,31 @@ fn when_on_strings() {
i64
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn with_capacity() {
assert_evals_to!(
indoc!(
r#"
Str.withCapacity 10
"#
),
RocStr::from(""),
RocStr
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn with_capacity_concat() {
assert_evals_to!(
indoc!(
r#"
Str.withCapacity 10 |> Str.concat "Forty-two"
"#
),
RocStr::from("Forty-two"),
RocStr
);
}

View file

@ -2004,3 +2004,21 @@ fn match_on_result_with_uninhabited_error_branch() {
RocStr
);
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn dispatch_tag_union_function_inferred() {
assert_evals_to!(
indoc!(
r#"
g = \b -> if b then H else J
when P ((g Bool.true) "") ((g Bool.false) "") is
P (H _) (J _) -> "okay"
_ -> "FAIL"
"#
),
RocStr::from("okay"),
RocStr
);
}

View file

@ -328,6 +328,26 @@ pub enum NumBound {
},
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum SingleQuoteBound {
AtLeast { width: IntLitWidth },
}
impl SingleQuoteBound {
pub fn from_char(c: char) -> Self {
let n = c as u32;
let width = if n > u16::MAX as _ {
IntLitWidth::U32
} else if n > u8::MAX as _ {
IntLitWidth::U16
} else {
IntLitWidth::U8
};
Self::AtLeast { width }
}
}
pub const fn int_lit_width_to_variable(w: IntLitWidth) -> Variable {
match w {
IntLitWidth::U8 => Variable::U8,

View file

@ -1156,11 +1156,15 @@ fn write_flat_type<'a>(
)
}
FunctionOrTagUnion(tag_name, _, ext_var) => {
FunctionOrTagUnion(tag_names, _, ext_var) => {
buf.push('[');
let mut tags: MutMap<TagName, _> = MutMap::default();
tags.insert(subs[*tag_name].clone(), vec![]);
tags.extend(
subs.get_subs_slice(*tag_names)
.iter()
.map(|t| (t.clone(), vec![])),
);
let ext_content = write_sorted_tags(env, ctx, subs, buf, &tags, *ext_var);
buf.push(']');
@ -1241,8 +1245,12 @@ pub fn chase_ext_tag_union(
push_union(subs, tags, fields);
chase_ext_tag_union(subs, *ext_var, fields)
}
Content::Structure(FunctionOrTagUnion(tag_name, _, ext_var)) => {
fields.push((subs[*tag_name].clone(), vec![]));
Content::Structure(FunctionOrTagUnion(tag_names, _, ext_var)) => {
fields.extend(
subs.get_subs_slice(*tag_names)
.iter()
.map(|t| (t.clone(), vec![])),
);
chase_ext_tag_union(subs, *ext_var, fields)
}

View file

@ -962,13 +962,13 @@ fn subs_fmt_flat_type(this: &FlatType, subs: &Subs, f: &mut fmt::Formatter) -> f
write!(f, "]<{:?}>", new_ext)
}
FlatType::FunctionOrTagUnion(tagname_index, symbol, ext) => {
let tagname: &TagName = &subs[*tagname_index];
FlatType::FunctionOrTagUnion(tagnames, symbol, ext) => {
let tagnames: &[TagName] = subs.get_subs_slice(*tagnames);
write!(
f,
"FunctionOrTagUnion({:?}, {:?}, {:?})",
tagname, symbol, ext
tagnames, symbol, ext
)
}
FlatType::RecursiveTagUnion(rec, tags, ext) => {
@ -2424,7 +2424,12 @@ pub enum FlatType {
Func(VariableSubsSlice, Variable, Variable),
Record(RecordFields, Variable),
TagUnion(UnionTags, Variable),
FunctionOrTagUnion(SubsIndex<TagName>, Symbol, Variable),
/// `A` might either be a function
/// x -> A x : a -> [A a, B a, C a]
/// or a tag `[A, B, C]`
FunctionOrTagUnion(SubsSlice<TagName>, SubsSlice<Symbol>, Variable),
RecursiveTagUnion(Variable, UnionTags, Variable),
Erroneous(SubsIndex<Problem>),
EmptyRecord,
@ -3881,12 +3886,12 @@ fn flat_type_to_err_type(
}
}
FunctionOrTagUnion(tag_name, _, ext_var) => {
let tag_name = subs[tag_name].clone();
FunctionOrTagUnion(tag_names, _, ext_var) => {
let tag_names = subs.get_subs_slice(tag_names);
let mut err_tags = SendMap::default();
let mut err_tags: SendMap<TagName, Vec<_>> = SendMap::default();
err_tags.insert(tag_name, vec![]);
err_tags.extend(tag_names.iter().map(|t| (t.clone(), vec![])));
match var_to_err_type(subs, state, ext_var).unwrap_structural_alias() {
ErrorType::TagUnion(sub_tags, sub_ext) => {
@ -4202,8 +4207,8 @@ impl StorageSubs {
Self::offset_tag_union(offsets, *union_tags),
Self::offset_variable(offsets, *ext),
),
FlatType::FunctionOrTagUnion(tag_name, symbol, ext) => FlatType::FunctionOrTagUnion(
Self::offset_tag_name_index(offsets, *tag_name),
FlatType::FunctionOrTagUnion(tag_names, symbol, ext) => FlatType::FunctionOrTagUnion(
Self::offset_tag_name_slice(offsets, *tag_names),
*symbol,
Self::offset_variable(offsets, *ext),
),
@ -4295,13 +4300,13 @@ impl StorageSubs {
record_fields
}
fn offset_tag_name_index(
fn offset_tag_name_slice(
offsets: &StorageSubsOffsets,
mut tag_name: SubsIndex<TagName>,
) -> SubsIndex<TagName> {
tag_name.index += offsets.tag_names;
mut tag_names: SubsSlice<TagName>,
) -> SubsSlice<TagName> {
tag_names.start += offsets.tag_names;
tag_name
tag_names
}
fn offset_variable(offsets: &StorageSubsOffsets, variable: Variable) -> Variable {
@ -4542,12 +4547,22 @@ fn storage_copy_var_to_help(env: &mut StorageCopyVarToEnv<'_>, var: Variable) ->
TagUnion(union_tags, new_ext)
}
FunctionOrTagUnion(tag_name, symbol, ext_var) => {
let new_tag_name = SubsIndex::new(env.target.tag_names.len() as u32);
FunctionOrTagUnion(tag_names, symbols, ext_var) => {
let new_tag_names = SubsSlice::extend_new(
&mut env.target.tag_names,
env.source.get_subs_slice(tag_names).iter().cloned(),
);
env.target.tag_names.push(env.source[tag_name].clone());
let new_symbols = SubsSlice::extend_new(
&mut env.target.closure_names,
env.source.get_subs_slice(symbols).iter().cloned(),
);
FunctionOrTagUnion(new_tag_name, symbol, storage_copy_var_to_help(env, ext_var))
FunctionOrTagUnion(
new_tag_names,
new_symbols,
storage_copy_var_to_help(env, ext_var),
)
}
RecursiveTagUnion(rec_var, tags, ext_var) => {
@ -4981,14 +4996,20 @@ fn copy_import_to_help(env: &mut CopyImportEnv<'_>, max_rank: Rank, var: Variabl
TagUnion(union_tags, new_ext)
}
FunctionOrTagUnion(tag_name, symbol, ext_var) => {
let new_tag_name = SubsIndex::new(env.target.tag_names.len() as u32);
FunctionOrTagUnion(tag_names, symbols, ext_var) => {
let new_tag_names = SubsSlice::extend_new(
&mut env.target.tag_names,
env.source.get_subs_slice(tag_names).iter().cloned(),
);
env.target.tag_names.push(env.source[tag_name].clone());
let new_symbols = SubsSlice::extend_new(
&mut env.target.closure_names,
env.source.get_subs_slice(symbols).iter().cloned(),
);
FunctionOrTagUnion(
new_tag_name,
symbol,
new_tag_names,
new_symbols,
copy_import_to_help(env, max_rank, ext_var),
)
}

View file

@ -2657,13 +2657,13 @@ fn unify_flat_type<M: MetaCollector>(
outcome
}
(FunctionOrTagUnion(tag_name, tag_symbol, ext), Func(args, closure, ret)) => {
(FunctionOrTagUnion(tag_names, tag_symbols, ext), Func(args, closure, ret)) => {
unify_function_or_tag_union_and_func(
env,
pool,
ctx,
tag_name,
*tag_symbol,
*tag_names,
*tag_symbols,
*ext,
*args,
*ret,
@ -2671,13 +2671,13 @@ fn unify_flat_type<M: MetaCollector>(
true,
)
}
(Func(args, closure, ret), FunctionOrTagUnion(tag_name, tag_symbol, ext)) => {
(Func(args, closure, ret), FunctionOrTagUnion(tag_names, tag_symbols, ext)) => {
unify_function_or_tag_union_and_func(
env,
pool,
ctx,
tag_name,
*tag_symbol,
*tag_names,
*tag_symbols,
*ext,
*args,
*ret,
@ -2685,50 +2685,61 @@ fn unify_flat_type<M: MetaCollector>(
false,
)
}
(FunctionOrTagUnion(tag_name_1, _, ext1), FunctionOrTagUnion(tag_name_2, _, ext2)) => {
let tag_name_1_ref = &env.subs[*tag_name_1];
let tag_name_2_ref = &env.subs[*tag_name_2];
if tag_name_1_ref == tag_name_2_ref {
let outcome = unify_pool(env, pool, *ext1, *ext2, ctx.mode);
if outcome.mismatches.is_empty() {
let content = *env.subs.get_content_without_compacting(ctx.second);
merge(env, ctx, content)
} else {
outcome
}
} else {
let tags1 = UnionTags::from_tag_name_index(*tag_name_1);
let tags2 = UnionTags::from_tag_name_index(*tag_name_2);
unify_tag_unions(env, pool, ctx, tags1, *ext1, tags2, *ext2, Rec::None)
}
}
(TagUnion(tags1, ext1), FunctionOrTagUnion(tag_name, _, ext2)) => {
let tags2 = UnionTags::from_tag_name_index(*tag_name);
(
FunctionOrTagUnion(tag_names_1, tag_symbols_1, ext1),
FunctionOrTagUnion(tag_names_2, tag_symbols_2, ext2),
) => unify_two_function_or_tag_unions(
env,
pool,
ctx,
*tag_names_1,
*tag_symbols_1,
*ext1,
*tag_names_2,
*tag_symbols_2,
*ext2,
),
(TagUnion(tags1, ext1), FunctionOrTagUnion(tag_names, _, ext2)) => {
let empty_tag_var_slices = SubsSlice::extend_new(
&mut env.subs.variable_slices,
std::iter::repeat(Default::default()).take(tag_names.len()),
);
let tags2 = UnionTags::from_slices(*tag_names, empty_tag_var_slices);
unify_tag_unions(env, pool, ctx, *tags1, *ext1, tags2, *ext2, Rec::None)
}
(FunctionOrTagUnion(tag_name, _, ext1), TagUnion(tags2, ext2)) => {
let tags1 = UnionTags::from_tag_name_index(*tag_name);
(FunctionOrTagUnion(tag_names, _, ext1), TagUnion(tags2, ext2)) => {
let empty_tag_var_slices = SubsSlice::extend_new(
&mut env.subs.variable_slices,
std::iter::repeat(Default::default()).take(tag_names.len()),
);
let tags1 = UnionTags::from_slices(*tag_names, empty_tag_var_slices);
unify_tag_unions(env, pool, ctx, tags1, *ext1, *tags2, *ext2, Rec::None)
}
(RecursiveTagUnion(recursion_var, tags1, ext1), FunctionOrTagUnion(tag_name, _, ext2)) => {
(RecursiveTagUnion(recursion_var, tags1, ext1), FunctionOrTagUnion(tag_names, _, ext2)) => {
// this never happens in type-correct programs, but may happen if there is a type error
debug_assert!(is_recursion_var(env.subs, *recursion_var));
let tags2 = UnionTags::from_tag_name_index(*tag_name);
let empty_tag_var_slices = SubsSlice::extend_new(
&mut env.subs.variable_slices,
std::iter::repeat(Default::default()).take(tag_names.len()),
);
let tags2 = UnionTags::from_slices(*tag_names, empty_tag_var_slices);
let rec = Rec::Left(*recursion_var);
unify_tag_unions(env, pool, ctx, *tags1, *ext1, tags2, *ext2, rec)
}
(FunctionOrTagUnion(tag_name, _, ext1), RecursiveTagUnion(recursion_var, tags2, ext2)) => {
(FunctionOrTagUnion(tag_names, _, ext1), RecursiveTagUnion(recursion_var, tags2, ext2)) => {
debug_assert!(is_recursion_var(env.subs, *recursion_var));
let tags1 = UnionTags::from_tag_name_index(*tag_name);
let empty_tag_var_slices = SubsSlice::extend_new(
&mut env.subs.variable_slices,
std::iter::repeat(Default::default()).take(tag_names.len()),
);
let tags1 = UnionTags::from_slices(*tag_names, empty_tag_var_slices);
let rec = Rec::Right(*recursion_var);
unify_tag_unions(env, pool, ctx, tags1, *ext1, *tags2, *ext2, rec)
@ -3133,17 +3144,20 @@ fn unify_function_or_tag_union_and_func<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
ctx: &Context,
tag_name_index: &SubsIndex<TagName>,
tag_symbol: Symbol,
tag_names_slice: SubsSlice<TagName>,
tag_fn_lambdas: SubsSlice<Symbol>,
tag_ext: Variable,
function_arguments: VariableSubsSlice,
function_return: Variable,
function_lambda_set: Variable,
left: bool,
) -> Outcome<M> {
let tag_name = env.subs[*tag_name_index].clone();
let tag_names = env.subs.get_subs_slice(tag_names_slice).to_vec();
let union_tags = UnionTags::insert_slices_into_subs(env.subs, [(tag_name, function_arguments)]);
let union_tags = UnionTags::insert_slices_into_subs(
env.subs,
tag_names.into_iter().map(|tag| (tag, function_arguments)),
);
let content = Content::Structure(FlatType::TagUnion(union_tags, tag_ext));
let new_tag_union_var = fresh(env, pool, ctx, content);
@ -3155,7 +3169,14 @@ fn unify_function_or_tag_union_and_func<M: MetaCollector>(
};
{
let union_tags = UnionLambdas::tag_without_arguments(env.subs, tag_symbol);
let lambda_names = env.subs.get_subs_slice(tag_fn_lambdas).to_vec();
let new_lambda_names = SubsSlice::extend_new(&mut env.subs.closure_names, lambda_names);
let empty_captures_slices = SubsSlice::extend_new(
&mut env.subs.variable_slices,
std::iter::repeat(Default::default()).take(new_lambda_names.len()),
);
let union_tags = UnionLambdas::from_slices(new_lambda_names, empty_captures_slices);
let ambient_function_var = if left { ctx.first } else { ctx.second };
let lambda_set_content = LambdaSet(self::LambdaSet {
solved: union_tags,
@ -3196,3 +3217,53 @@ fn unify_function_or_tag_union_and_func<M: MetaCollector>(
outcome
}
#[allow(clippy::too_many_arguments)]
fn unify_two_function_or_tag_unions<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
ctx: &Context,
tag_names_1: SubsSlice<TagName>,
tag_symbols_1: SubsSlice<Symbol>,
ext1: Variable,
tag_names_2: SubsSlice<TagName>,
tag_symbols_2: SubsSlice<Symbol>,
ext2: Variable,
) -> Outcome<M> {
let merged_tags = {
let mut all_tags: Vec<_> = (env.subs.get_subs_slice(tag_names_1).iter())
.chain(env.subs.get_subs_slice(tag_names_2))
.cloned()
.collect();
all_tags.sort();
all_tags.dedup();
SubsSlice::extend_new(&mut env.subs.tag_names, all_tags)
};
let merged_lambdas = {
let mut all_lambdas: Vec<_> = (env.subs.get_subs_slice(tag_symbols_1).iter())
.chain(env.subs.get_subs_slice(tag_symbols_2))
.cloned()
.collect();
all_lambdas.sort();
all_lambdas.dedup();
SubsSlice::extend_new(&mut env.subs.closure_names, all_lambdas)
};
let mut outcome = unify_pool(env, pool, ext1, ext2, ctx.mode);
if !outcome.mismatches.is_empty() {
return outcome;
}
let merge_outcome = merge(
env,
ctx,
Content::Structure(FlatType::FunctionOrTagUnion(
merged_tags,
merged_lambdas,
ext1,
)),
);
outcome.union(merge_outcome);
outcome
}