Merge remote-tracking branch 'origin/trunk' into windows-linking

This commit is contained in:
Folkert de Vries 2022-08-02 21:43:46 +02:00
commit 1dffa4a734
22 changed files with 1152 additions and 235 deletions

View file

@ -24,6 +24,12 @@ jobs:
run: zig version
- name: Install LLVM
run: brew install llvm@13
# build has to be done before tests #2572
- name: build release
uses: actions-rs/cargo@v1
with:
command: build
args: --release --locked
- name: execute rust tests
uses: actions-rs/cargo@v1
with:
@ -31,11 +37,6 @@ jobs:
args: --locked # no --release yet until #3166 is fixed
- name: write version to file
run: ./ci/write_version.sh
- name: build release
uses: actions-rs/cargo@v1
with:
command: build
args: --release --locked
- name: package release
run: ./ci/package_release.sh roc_darwin_x86_64.tar.gz
- name: Create pre-release with test_archive.tar.gz

View file

@ -2362,7 +2362,7 @@ pub mod test_constrain {
\f -> (\a, b -> f b a)
"#
),
"(a, b -> c) -> (b, a -> c)",
"(a, b -> d) -> (b, a -> d)",
);
}
@ -2400,7 +2400,7 @@ pub mod test_constrain {
\{} -> x
"#
),
"{}* -> Num *",
"{}* -> Num a",
)
}

View file

@ -131,18 +131,16 @@ const RC_TYPE = Refcount.normal;
pub fn increfC(ptr_to_refcount: *isize, amount: isize) callconv(.C) void {
if (RC_TYPE == Refcount.none) return;
var refcount = ptr_to_refcount.*;
if (refcount < REFCOUNT_MAX_ISIZE) {
// Ensure that the refcount is not whole program lifetime.
if (ptr_to_refcount.* != REFCOUNT_MAX_ISIZE) {
// Note: we assume that a refcount will never overflow.
// As such, we do not need to cap incrementing.
switch (RC_TYPE) {
Refcount.normal => {
ptr_to_refcount.* = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE);
ptr_to_refcount.* += amount;
},
Refcount.atomic => {
var next = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE);
while (@cmpxchgWeak(isize, ptr_to_refcount, refcount, next, Monotonic, Monotonic)) |found| {
refcount = found;
next = std.math.min(refcount + amount, REFCOUNT_MAX_ISIZE);
}
_ = @atomicRmw(isize, ptr_to_refcount, std.builtin.AtomicRmwOp.Add, amount, Monotonic);
},
Refcount.none => unreachable,
}
@ -194,24 +192,24 @@ inline fn decref_ptr_to_refcount(
) void {
if (RC_TYPE == Refcount.none) return;
const extra_bytes = std.math.max(alignment, @sizeOf(usize));
switch (RC_TYPE) {
Refcount.normal => {
const refcount: isize = refcount_ptr[0];
if (refcount == REFCOUNT_ONE_ISIZE) {
dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment);
} else if (refcount < REFCOUNT_MAX_ISIZE) {
refcount_ptr[0] = refcount - 1;
}
},
Refcount.atomic => {
if (refcount_ptr[0] < REFCOUNT_MAX_ISIZE) {
// Ensure that the refcount is not whole program lifetime.
const refcount: isize = refcount_ptr[0];
if (refcount != REFCOUNT_MAX_ISIZE) {
switch (RC_TYPE) {
Refcount.normal => {
refcount_ptr[0] = refcount -% 1;
if (refcount == REFCOUNT_ONE_ISIZE) {
dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment);
}
},
Refcount.atomic => {
var last = @atomicRmw(isize, &refcount_ptr[0], std.builtin.AtomicRmwOp.Sub, 1, Monotonic);
if (last == REFCOUNT_ONE_ISIZE) {
dealloc(@ptrCast([*]u8, refcount_ptr) - (extra_bytes - @sizeOf(usize)), alignment);
}
}
},
Refcount.none => unreachable,
},
Refcount.none => unreachable,
}
}
}

View file

@ -0,0 +1,79 @@
interface Decode
exposes [
DecodeError,
DecodeResult,
Decoder,
Decoding,
DecoderFormatting,
decoder,
u8,
u16,
u32,
u64,
u128,
i8,
i16,
i32,
i64,
i128,
f32,
f64,
dec,
bool,
string,
list,
custom,
decodeWith,
fromBytesPartial,
fromBytes,
]
imports [
List,
]
DecodeError : [TooShort]
DecodeResult val : { result : Result val DecodeError, rest : List U8 }
Decoder val fmt := List U8, fmt -> DecodeResult val | fmt has DecoderFormatting
Decoding has
decoder : Decoder val fmt | val has Decoding, fmt has DecoderFormatting
DecoderFormatting has
u8 : Decoder U8 fmt | fmt has DecoderFormatting
u16 : Decoder U16 fmt | fmt has DecoderFormatting
u32 : Decoder U32 fmt | fmt has DecoderFormatting
u64 : Decoder U64 fmt | fmt has DecoderFormatting
u128 : Decoder U128 fmt | fmt has DecoderFormatting
i8 : Decoder I8 fmt | fmt has DecoderFormatting
i16 : Decoder I16 fmt | fmt has DecoderFormatting
i32 : Decoder I32 fmt | fmt has DecoderFormatting
i64 : Decoder I64 fmt | fmt has DecoderFormatting
i128 : Decoder I128 fmt | fmt has DecoderFormatting
f32 : Decoder F32 fmt | fmt has DecoderFormatting
f64 : Decoder F64 fmt | fmt has DecoderFormatting
dec : Decoder Dec fmt | fmt has DecoderFormatting
bool : Decoder Bool fmt | fmt has DecoderFormatting
string : Decoder Str fmt | fmt has DecoderFormatting
list : Decoder elem fmt -> Decoder (List elem) fmt | fmt has DecoderFormatting
custom : (List U8, fmt -> DecodeResult val) -> Decoder val fmt | fmt has DecoderFormatting
custom = \decode -> @Decoder decode
decodeWith : List U8, Decoder val fmt, fmt -> DecodeResult val | fmt has DecoderFormatting
decodeWith = \bytes, @Decoder decode, fmt -> decode bytes fmt
fromBytesPartial : List U8, fmt -> DecodeResult val | val has Decoding, fmt has DecoderFormatting
fromBytesPartial = \bytes, fmt -> decodeWith bytes decoder fmt
fromBytes : List U8, fmt -> Result val [Leftover (List U8)]DecodeError | val has Decoding, fmt has DecoderFormatting
fromBytes = \bytes, fmt ->
when fromBytesPartial bytes fmt is
{ result, rest } ->
if List.isEmpty rest then
when result is
Ok val -> Ok val
Err TooShort -> Err TooShort
else
Err (Leftover rest)

View file

@ -2,92 +2,98 @@ interface Json
exposes [
Json,
toUtf8,
fromUtf8,
]
imports [
List,
Str,
Encode,
Encode.{
Encoder,
EncoderFormatting,
custom,
appendWith,
u8,
u16,
u32,
u64,
u128,
i8,
i16,
i32,
i64,
i128,
f32,
f64,
dec,
bool,
string,
list,
record,
tag,
},
Decode,
Decode.{
DecoderFormatting,
},
]
Json := {} has [
EncoderFormatting {
u8,
u16,
u32,
u64,
u128,
i8,
i16,
i32,
i64,
i128,
f32,
f64,
dec,
bool,
string,
list,
record,
tag,
u8: encodeU8,
u16: encodeU16,
u32: encodeU32,
u64: encodeU64,
u128: encodeU128,
i8: encodeI8,
i16: encodeI16,
i32: encodeI32,
i64: encodeI64,
i128: encodeI128,
f32: encodeF32,
f64: encodeF64,
dec: encodeDec,
bool: encodeBool,
string: encodeString,
list: encodeList,
record: encodeRecord,
tag: encodeTag,
},
DecoderFormatting {
u8: decodeU8,
u16: decodeU16,
u32: decodeU32,
u64: decodeU64,
u128: decodeU128,
i8: decodeI8,
i16: decodeI16,
i32: decodeI32,
i64: decodeI64,
i128: decodeI128,
f32: decodeF32,
f64: decodeF64,
dec: decodeDec,
bool: decodeBool,
string: decodeString,
list: decodeList,
},
]
toUtf8 = @Json {}
fromUtf8 = @Json {}
numToBytes = \n ->
n |> Num.toStr |> Str.toUtf8
# impl EncoderFormatting for Json
u8 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeU8 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
u16 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeU16 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
u32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeU32 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
u64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeU64 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
u128 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeU128 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i8 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeI8 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i16 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeI16 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeI32 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeI64 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i128 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeI128 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
f32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeF32 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
f64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeF64 = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
dec = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
encodeDec = \n -> Encode.custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
bool = \b -> custom \bytes, @Json {} ->
encodeBool = \b -> Encode.custom \bytes, @Json {} ->
if
b
then
@ -95,13 +101,13 @@ bool = \b -> custom \bytes, @Json {} ->
else
List.concat bytes (Str.toUtf8 "false")
string = \s -> custom \bytes, @Json {} ->
encodeString = \s -> Encode.custom \bytes, @Json {} ->
List.append bytes (Num.toU8 '"')
|> List.concat (Str.toUtf8 s)
|> List.append (Num.toU8 '"')
list = \lst, encodeElem ->
custom \bytes, @Json {} ->
encodeList = \lst, encodeElem ->
Encode.custom \bytes, @Json {} ->
writeList = \{ buffer, elemsLeft }, elem ->
bufferWithElem = appendWith buffer (encodeElem elem) (@Json {})
bufferWithSuffix =
@ -117,8 +123,8 @@ list = \lst, encodeElem ->
List.append withList (Num.toU8 ']')
record = \fields ->
custom \bytes, @Json {} ->
encodeRecord = \fields ->
Encode.custom \bytes, @Json {} ->
writeRecord = \{ buffer, fieldsLeft }, { key, value } ->
bufferWithKeyValue =
List.append buffer (Num.toU8 '"')
@ -140,8 +146,8 @@ record = \fields ->
List.append bytesWithRecord (Num.toU8 '}')
tag = \name, payload ->
custom \bytes, @Json {} ->
encodeTag = \name, payload ->
Encode.custom \bytes, @Json {} ->
# Idea: encode `A v1 v2` as `{"A": [v1, v2]}`
writePayload = \{ buffer, itemsLeft }, encoder ->
bufferWithValue = appendWith buffer encoder (@Json {})
@ -165,3 +171,204 @@ tag = \name, payload ->
List.append bytesWithPayload (Num.toU8 ']')
|> List.append (Num.toU8 '}')
takeWhile = \list, predicate ->
helper = \{ taken, rest } ->
when List.first rest is
Ok elem ->
if predicate elem then
helper { taken: List.append taken elem, rest: List.split rest 1 |> .others }
else
{ taken, rest }
Err _ -> { taken, rest }
helper { taken: [], rest: list }
asciiByte = \b -> Num.toU8 b
digits = List.range (asciiByte '0') (asciiByte '9' + 1)
takeDigits = \bytes ->
takeWhile bytes \n -> List.contains digits n
takeFloat = \bytes ->
{ taken: intPart, rest } = takeDigits bytes
when List.get rest 0 is
Ok 46 -> # 46 = .
{ taken: floatPart, rest: afterAll } = takeDigits rest
builtFloat =
List.concat (List.append intPart (asciiByte '.')) floatPart
{ taken: builtFloat, rest: afterAll }
_ ->
{ taken: intPart, rest }
decodeU8 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toU8 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeU16 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toU16 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeU32 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toU32 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeU64 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toU64 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeU128 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toU128 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeI8 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toI8 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeI16 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toI16 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeI32 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toI32 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeI64 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toI64 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeI128 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeDigits bytes
when Str.fromUtf8 taken |> Result.try Str.toI128 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeF32 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeFloat bytes
when Str.fromUtf8 taken |> Result.try Str.toF32 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeF64 = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeFloat bytes
when Str.fromUtf8 taken |> Result.try Str.toF64 is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeDec = Decode.custom \bytes, @Json {} ->
{ taken, rest } = takeFloat bytes
when Str.fromUtf8 taken |> Result.try Str.toDec is
Ok n -> { result: Ok n, rest }
Err _ -> { result: Err TooShort, rest }
decodeBool = Decode.custom \bytes, @Json {} ->
{ before: maybeFalse, others: afterFalse } = List.split bytes 5
# Note: this could be more performant by traversing both branches char-by-char.
# Doing that would also make `rest` more correct in the erroring case.
if
maybeFalse == [asciiByte 'f', asciiByte 'a', asciiByte 'l', asciiByte 's', asciiByte 'e']
then
{ result: Ok False, rest: afterFalse }
else
{ before: maybeTrue, others: afterTrue } = List.split bytes 4
if
maybeTrue == [asciiByte 't', asciiByte 'r', asciiByte 'u', asciiByte 'e']
then
{ result: Ok True, rest: afterTrue }
else
{ result: Err TooShort, rest: bytes }
decodeString = Decode.custom \bytes, @Json {} ->
{ before, others: afterStartingQuote } = List.split bytes 1
if
before == [asciiByte '"']
then
# TODO: handle escape sequences
{ taken: strSequence, rest } = takeWhile afterStartingQuote \n -> n != asciiByte '"'
when Str.fromUtf8 strSequence is
Ok s ->
{ others: afterEndingQuote } = List.split rest 1
{ result: Ok s, rest: afterEndingQuote }
Err _ -> { result: Err TooShort, rest }
else
{ result: Err TooShort, rest: bytes }
decodeList = \decodeElem -> Decode.custom \bytes, @Json {} ->
decodeElems = \chunk, accum ->
when Decode.decodeWith chunk decodeElem (@Json {}) is
{ result, rest } ->
when result is
Ok val ->
# TODO: handle spaces before ','
{ before: afterElem, others } = List.split rest 1
if
afterElem == [asciiByte ',']
then
decodeElems others (List.append accum val)
else
Done (List.append accum val) rest
Err e -> Errored e rest
{ before, others: afterStartingBrace } = List.split bytes 1
if
before == [asciiByte '[']
then
# TODO: empty lists
when decodeElems afterStartingBrace [] is
Errored e rest -> { result: Err e, rest }
Done vals rest ->
{ before: maybeEndingBrace, others: afterEndingBrace } = List.split rest 1
if
maybeEndingBrace == [asciiByte ']']
then
{ result: Ok vals, rest: afterEndingBrace }
else
{ result: Err TooShort, rest }
else
{ result: Err TooShort, rest: bytes }

View file

@ -12,6 +12,7 @@ pub fn module_source(module_id: ModuleId) -> &'static str {
ModuleId::BOX => BOX,
ModuleId::BOOL => BOOL,
ModuleId::ENCODE => ENCODE,
ModuleId::DECODE => DECODE,
ModuleId::JSON => JSON,
_ => panic!(
"ModuleId {:?} is not part of the standard library",
@ -29,4 +30,5 @@ const SET: &str = include_str!("../roc/Set.roc");
const BOX: &str = include_str!("../roc/Box.roc");
const BOOL: &str = include_str!("../roc/Bool.roc");
const ENCODE: &str = include_str!("../roc/Encode.roc");
const DECODE: &str = include_str!("../roc/Decode.roc");
const JSON: &str = include_str!("../roc/Json.roc");

View file

@ -422,12 +422,13 @@ pub fn constrain_expr(
constraints.lookup(*symbol, expected, region)
}
&AbilityMember(symbol, specialization_id, specialization_var) => {
// make lookup constraint to lookup this symbol's type in the environment
let store_expected = constraints.equal_types_var(
// Save the expectation in the `specialization_var` so we know what to specialize, then
// lookup the member in the environment.
let store_expected = constraints.store(
expected.get_type_ref().clone(),
specialization_var,
expected,
Category::Storage(file!(), line!()),
region,
file!(),
line!(),
);
let lookup_constr = constraints.lookup(
symbol,
@ -435,7 +436,7 @@ pub fn constrain_expr(
region,
);
// Make sure we attempt to resolve the specialization, if we need to.
// Make sure we attempt to resolve the specialization, if we can.
if let Some(specialization_id) = specialization_id {
env.resolutions_to_make.push(OpportunisticResolve {
specialization_variable: specialization_var,

View file

@ -10,13 +10,14 @@ use roc_collections::MutMap;
use roc_derive::SharedDerivedModule;
use roc_error_macros::internal_error;
use roc_module::symbol::ModuleId;
use roc_module::symbol::Symbol;
use roc_solve::ability::AbilityResolver;
use roc_solve::solve::Pools;
use roc_solve::specialize::{compact_lambda_sets_of_vars, DerivedEnv, Phase};
use roc_types::subs::{get_member_lambda_sets_at_region, Content, FlatType, LambdaSet};
use roc_types::subs::{ExposedTypesStorageSubs, Subs, Variable};
use roc_unify::unify::{unify as unify_unify, Env, Mode, Unified};
pub use roc_solve::ability::resolve_ability_specialization;
pub use roc_solve::ability::Resolved;
pub use roc_types::subs::instantiate_rigids;
@ -50,12 +51,12 @@ impl WorldAbilities {
#[inline(always)]
pub fn with_module_exposed_type<T>(
&mut self,
&self,
module: ModuleId,
mut f: impl FnMut(&mut ExposedTypesStorageSubs) -> T,
mut f: impl FnMut(&ExposedTypesStorageSubs) -> T,
) -> T {
let mut world = self.world.write().unwrap();
let (_, exposed_types) = world.get_mut(&module).expect("module not in the world");
let world = self.world.read().unwrap();
let (_, exposed_types) = world.get(&module).expect("module not in the world");
f(exposed_types)
}
@ -97,6 +98,75 @@ impl AbilitiesView<'_> {
}
}
pub struct LateResolver<'a> {
home: ModuleId,
abilities: &'a AbilitiesView<'a>,
}
impl<'a> AbilityResolver for LateResolver<'a> {
fn member_parent_and_signature_var(
&self,
ability_member: roc_module::symbol::Symbol,
home_subs: &mut Subs,
) -> Option<(roc_module::symbol::Symbol, Variable)> {
let (parent_ability, signature_var) =
self.abilities
.with_module_abilities_store(ability_member.module_id(), |store| {
store
.member_def(ability_member)
.map(|def| (def.parent_ability, def.signature_var()))
})?;
let parent_ability_module = parent_ability.module_id();
debug_assert_eq!(parent_ability_module, ability_member.module_id());
let signature_var = match (parent_ability_module == self.home, self.abilities) {
(false, AbilitiesView::World(world)) => {
// Need to copy the type from an external module into our home subs
world.with_module_exposed_type(parent_ability_module, |external_types| {
let stored_signature_var =
external_types.stored_ability_member_vars.get(&signature_var).expect("Ability member is in an external store, but its signature variables are not stored accordingly!");
let home_copy = external_types
.storage_subs
.export_variable_to(home_subs, *stored_signature_var);
home_copy.variable
})
}
_ => signature_var,
};
Some((parent_ability, signature_var))
}
fn get_implementation(
&self,
impl_key: roc_can::abilities::ImplKey,
) -> Option<roc_types::types::MemberImpl> {
self.abilities
.with_module_abilities_store(impl_key.opaque.module_id(), |store| {
store.get_implementation(impl_key).copied()
})
}
}
pub fn resolve_ability_specialization(
home: ModuleId,
subs: &mut Subs,
abilities: &AbilitiesView,
ability_member: Symbol,
specialization_var: Variable,
) -> Option<Resolved> {
let late_resolver = LateResolver { home, abilities };
roc_solve::ability::resolve_ability_specialization(
subs,
&late_resolver,
ability_member,
specialization_var,
)
}
pub struct LatePhase<'a> {
home: ModuleId,
abilities: &'a AbilitiesView<'a>,

View file

@ -14,6 +14,7 @@ const MODULES: &[(ModuleId, &str)] = &[
(ModuleId::SET, "Set.roc"),
(ModuleId::BOX, "Box.roc"),
(ModuleId::ENCODE, "Encode.roc"),
(ModuleId::DECODE, "Decode.roc"),
(ModuleId::JSON, "Json.roc"),
];

View file

@ -167,6 +167,7 @@ impl Default for ModuleCache<'_> {
NUM,
BOX,
ENCODE,
DECODE,
JSON,
}
@ -3088,6 +3089,7 @@ fn load_module<'a>(
"Bool", ModuleId::BOOL
"Box", ModuleId::BOX
"Encode", ModuleId::ENCODE
"Decode", ModuleId::DECODE
"Json", ModuleId::JSON
}

View file

@ -78,6 +78,7 @@ impl ModuleName {
pub const RESULT: &'static str = "Result";
pub const BOX: &'static str = "Box";
pub const ENCODE: &'static str = "Encode";
pub const DECODE: &'static str = "Decode";
pub const JSON: &'static str = "Json";
pub fn as_str(&self) -> &str {

View file

@ -1391,9 +1391,37 @@ define_builtins! {
24 ENCODE_APPEND: "append"
25 ENCODE_TO_BYTES: "toBytes"
}
12 JSON: "Json" => {
12 DECODE: "Decode" => {
0 DECODE_DECODE_ERROR: "DecodeError"
1 DECODE_DECODE_RESULT: "DecodeResult"
2 DECODE_DECODER_OPAQUE: "Decoder"
3 DECODE_DECODING: "Decoding"
4 DECODE_DECODER: "decoder"
5 DECODE_DECODERFORMATTING: "DecoderFormatting"
6 DECODE_U8: "u8"
7 DECODE_U16: "u16"
8 DECODE_U32: "u32"
9 DECODE_U64: "u64"
10 DECODE_U128: "u128"
11 DECODE_I8: "i8"
12 DECODE_I16: "i16"
13 DECODE_I32: "i32"
14 DECODE_I64: "i64"
15 DECODE_I128: "i128"
16 DECODE_F32: "f32"
17 DECODE_F64: "f64"
18 DECODE_DEC: "dec"
19 DECODE_BOOL: "bool"
20 DECODE_STRING: "string"
21 DECODE_LIST: "list"
22 DECODE_CUSTOM: "custom"
23 DECODE_DECODE_WITH: "decodeWith"
24 DECODE_FROM_BYTES_PARTIAL: "fromBytesPartial"
25 DECODE_FROM_BYTES: "fromBytes"
}
13 JSON: "Json" => {
0 JSON_JSON: "Json"
}
num_modules: 13 // Keep this count up to date by hand! (TODO: see the mut_map! macro for how we could determine this count correctly in the macro)
num_modules: 14 // Keep this count up to date by hand! (TODO: see the mut_map! macro for how we could determine this count correctly in the macro)
}

View file

@ -4916,10 +4916,8 @@ pub fn with_hole<'a>(
UnspecializedExpr(symbol) => {
match procs.ability_member_aliases.get(symbol).unwrap() {
&self::AbilityMember(member) => {
let resolved_proc = env.abilities.with_module_abilities_store(env.home, |store|
resolve_ability_specialization(env.subs, store, member, fn_var)
.expect("Recorded as an ability member, but it doesn't have a specialization")
);
let resolved_proc = resolve_ability_specialization(env.home, env.subs, &env.abilities, member, fn_var)
.expect("Recorded as an ability member, but it doesn't have a specialization");
let resolved_proc = match resolved_proc {
Resolved::Specialization(symbol) => symbol,
@ -5227,12 +5225,14 @@ fn late_resolve_ability_specialization<'a>(
env.subs[spec_symbol_index]
} else {
// Otherwise, resolve by checking the able var.
let specialization = env
.abilities
.with_module_abilities_store(env.home, |store| {
resolve_ability_specialization(env.subs, store, member, specialization_var)
.expect("Ability specialization is unknown - code generation cannot proceed!")
});
let specialization = resolve_ability_specialization(
env.home,
env.subs,
&env.abilities,
member,
specialization_var,
)
.expect("Ability specialization is unknown - code generation cannot proceed!");
match specialization {
Resolved::Specialization(symbol) => symbol,

View file

@ -6,7 +6,7 @@ use roc_module::symbol::Symbol;
use roc_region::all::{Loc, Region};
use roc_solve_problem::{TypeError, UnderivableReason, Unfulfilled};
use roc_types::subs::{instantiate_rigids, Content, FlatType, GetSubsSlice, Rank, Subs, Variable};
use roc_types::types::{AliasKind, Category, PatternCategory};
use roc_types::types::{AliasKind, Category, MemberImpl, PatternCategory};
use roc_unify::unify::{Env, MustImplementConstraints};
use roc_unify::unify::{MustImplementAbility, Obligated};
@ -547,7 +547,7 @@ pub fn type_implementing_specialization(
}
/// Result of trying to resolve an ability specialization.
#[derive(Clone, Copy)]
#[derive(Clone, Copy, Debug)]
pub enum Resolved {
/// A user-defined specialization should be used.
Specialization(Symbol),
@ -555,23 +555,61 @@ pub enum Resolved {
NeedsGenerated,
}
pub fn resolve_ability_specialization(
/// An [`AbilityResolver`] is a shell of an abilities store that answers questions needed for
/// [resolving ability specializations][`resolve_ability_specialization`].
///
/// The trait is provided so you can implement your own resolver at other points in the compilation
/// process, for example during monomorphization we have module-re-entrant ability stores that are
/// not available during solving.
pub trait AbilityResolver {
/// Gets the parent ability and type of an ability member.
///
/// If needed, the type of the ability member will be imported into a local `subs` buffer; as
/// such, subs must be provided.
fn member_parent_and_signature_var(
&self,
ability_member: Symbol,
home_subs: &mut Subs,
) -> Option<(Symbol, Variable)>;
/// Finds the declared implementation of an [`ImplKey`][roc_can::abilities::ImplKey].
fn get_implementation(&self, impl_key: roc_can::abilities::ImplKey) -> Option<MemberImpl>;
}
/// Trivial implementation of a resolver for a module-local abilities store, that defers all
/// queries to the module store.
impl AbilityResolver for AbilitiesStore {
#[inline(always)]
fn member_parent_and_signature_var(
&self,
ability_member: Symbol,
_home_subs: &mut Subs, // only have access to one abilities store, do nothing with subs
) -> Option<(Symbol, Variable)> {
self.member_def(ability_member)
.map(|def| (def.parent_ability, def.signature_var()))
}
#[inline(always)]
fn get_implementation(&self, impl_key: roc_can::abilities::ImplKey) -> Option<MemberImpl> {
self.get_implementation(impl_key).copied()
}
}
pub fn resolve_ability_specialization<R: AbilityResolver>(
subs: &mut Subs,
abilities_store: &AbilitiesStore,
resolver: &R,
ability_member: Symbol,
specialization_var: Variable,
) -> Option<Resolved> {
use roc_unify::unify::{unify, Mode};
let member_def = abilities_store
.member_def(ability_member)
let (parent_ability, signature_var) = resolver
.member_parent_and_signature_var(ability_member, subs)
.expect("Not an ability member symbol");
// Figure out the ability we're resolving in a temporary subs snapshot.
let snapshot = subs.snapshot();
let signature_var = member_def.signature_var();
instantiate_rigids(subs, signature_var);
let (_vars, must_implement_ability, _lambda_sets_to_specialize, _meta) = unify(
&mut Env::new(subs),
@ -585,8 +623,7 @@ pub fn resolve_ability_specialization(
subs.rollback_to(snapshot);
let obligated =
type_implementing_specialization(&must_implement_ability, member_def.parent_ability)?;
let obligated = type_implementing_specialization(&must_implement_ability, parent_ability)?;
let resolved = match obligated {
Obligated::Opaque(symbol) => {
@ -595,9 +632,9 @@ pub fn resolve_ability_specialization(
ability_member,
};
match abilities_store.get_implementation(impl_key)? {
match resolver.get_implementation(impl_key)? {
roc_types::types::MemberImpl::Impl(spec_symbol) => {
Resolved::Specialization(*spec_symbol)
Resolved::Specialization(spec_symbol)
}
roc_types::types::MemberImpl::Derived => Resolved::NeedsGenerated,
// TODO this is not correct. We can replace `Resolved` with `MemberImpl` entirely,

View file

@ -928,51 +928,10 @@ fn solve(
aliases,
*source_index,
);
let target = *target;
match unify(&mut UEnv::new(subs), actual, target, Mode::EQ) {
Success {
vars,
// ERROR NOT REPORTED
must_implement_ability: _,
lambda_sets_to_specialize,
extra_metadata: _,
} => {
introduce(subs, rank, pools, &vars);
let CompactionResult {
obligations,
awaiting_specialization,
} = compact_lambda_sets_of_vars(
subs,
derived_env,
arena,
pools,
lambda_sets_to_specialize,
&SolvePhase { abilities_store },
);
// implement obligations not reported
_ = obligations;
// but awaited specializations must be recorded
awaiting_specializations.union(awaiting_specialization);
state
}
Failure(vars, _actual_type, _expected_type, _bad_impls) => {
introduce(subs, rank, pools, &vars);
// ERROR NOT REPORTED
state
}
BadType(vars, _) => {
introduce(subs, rank, pools, &vars);
// ERROR NOT REPORTED
state
}
}
let actual_desc = subs.get(actual);
subs.union(*target, actual, actual_desc);
state
}
Lookup(symbol, expectation_index, region) => {
match env.get_var_by_symbol(symbol) {

View file

@ -579,9 +579,13 @@ fn compact_lambda_set<P: Phase>(
// 3. Unify `t_f1 ~ t_f2`.
trace_compact!(3iter_start. subs, this_lambda_set, t_f1, t_f2);
let (vars, new_obligations, new_lambda_sets_to_specialize, _meta) =
unify(&mut UEnv::new(subs), t_f1, t_f2, Mode::EQ)
.expect_success("ambient functions don't unify");
let (vars, new_obligations, new_lambda_sets_to_specialize, _meta) = unify(
&mut UEnv::new(subs),
t_f1,
t_f2,
Mode::LAMBDA_SET_SPECIALIZATION,
)
.expect_success("ambient functions don't unify");
trace_compact!(3iter_end. subs, t_f1);
introduce(subs, target_rank, pools, &vars);

View file

@ -6715,7 +6715,7 @@ mod solve_expr {
),
@r#"
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
Id#id(3) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
Id#id(3) : a -[[] + a:id(3):1]-> ({} -[[] + a:id(3):2]-> a) | a has Id
alias : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
"#
print_only_under_alias: true
@ -7324,6 +7324,169 @@ mod solve_expr {
);
}
#[test]
fn polymorphic_lambda_set_specialization_varying_over_multiple_variables() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
J has j : j -> (k -> {}) | j has J, k has K
K has k : k -> {} | k has K
C := {} has [J {j: jC}]
jC = \@C _ -> k
#^^{-1}
D := {} has [J {j: jD}]
jD = \@D _ -> k
#^^{-1}
E := {} has [K {k}]
k = \@E _ -> {}
#^{-1}
f = \flag, a, b ->
# ^ ^
it =
# ^^
when flag is
A -> j a
# ^
B -> j b
# ^
it
# ^^
main = (f A (@C {}) (@D {})) (@E {})
# ^
# ^^^^^^^^^^^^^^^^^^^
#^^^^{-1}
"#
),
@r###"
jC : C -[[jC(8)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
jD : D -[[jD(9)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
E#k(10) : E -[[k(10)]]-> {}
a : j | j has J
b : j | j has J
it : k -[[] + j:j(2):2 + a:j(2):2]-> {} | a has J, j has J, k has K
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j:j(2):2 + a:j(2):2]-> {}) | a has J, j has J, k has K
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + a:j(2):2 + j:j(2):2]-> {}) | a has J, j has J, k has K
it : k -[[] + j:j(2):2 + a:j(2):2]-> {} | a has J, j has J, k has K
f : [A, B], C, D -[[f(11)]]-> (E -[[k(10)]]-> {})
f A (@C {}) (@D {}) : E -[[k(10)]]-> {}
main : {}
"###
);
}
#[test]
fn polymorphic_lambda_set_specialization_varying_over_multiple_variables_two_results() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
J has j : j -> (k -> {}) | j has J, k has K
K has k : k -> {} | k has K
C := {} has [J {j: jC}]
jC = \@C _ -> k
#^^{-1}
D := {} has [J {j: jD}]
jD = \@D _ -> k
#^^{-1}
E := {} has [K {k: kE}]
kE = \@E _ -> {}
#^^{-1}
F := {} has [K {k: kF}]
kF = \@F _ -> {}
#^^{-1}
f = \flag, a, b ->
# ^ ^
it =
# ^^
when flag is
A -> j a
# ^
B -> j b
# ^
it
# ^^
main =
#^^^^{-1}
it =
# ^^
(f A (@C {}) (@D {}))
# ^
if True
then it (@E {})
# ^^
else it (@F {})
# ^^
"#
),
@r###"
jC : C -[[jC(9)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
jD : D -[[jD(10)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
kE : E -[[kE(11)]]-> {}
kF : F -[[kF(12)]]-> {}
a : j | j has J
b : j | j has J
it : k -[[] + j:j(2):2 + a:j(2):2]-> {} | a has J, j has J, k has K
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + j:j(2):2 + a:j(2):2]-> {}) | a has J, j has J, k has K
J#j(2) : j -[[] + j:j(2):1]-> (k -[[] + a:j(2):2 + j:j(2):2]-> {}) | a has J, j has J, k has K
it : k -[[] + j:j(2):2 + a:j(2):2]-> {} | a has J, j has J, k has K
main : {}
it : k -[[] + k:k(4):1]-> {} | k has K
f : [A, B], C, D -[[f(13)]]-> (k -[[] + k:k(4):1]-> {}) | k has K
it : E -[[kE(11)]]-> {}
it : F -[[kF(12)]]-> {}
"###
);
}
#[test]
fn polymorphic_lambda_set_specialization_branching_over_single_variable() {
infer_queries!(
indoc!(
r#"
app "test" provides [f] to "./platform"
J has j : j -> (k -> {}) | j has J, k has K
K has k : k -> {} | k has K
C := {} has [J {j: jC}]
jC = \@C _ -> k
D := {} has [J {j: jD}]
jD = \@D _ -> k
E := {} has [K {k}]
k = \@E _ -> {}
f = \flag, a, c ->
it =
when flag is
A -> j a
B -> j a
it c
# ^^ ^
"#
),
@r###"
it : k -[[] + j:j(2):2]-> {} | j has J, k has K
c : k | k has K
"###
);
}
#[test]
fn wrap_recursive_opaque_negative_position() {
infer_eq_without_problem(

View file

@ -350,7 +350,7 @@ fn encode_use_stdlib() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_use_stdlib_without_wrapping_custom() {
assert_evals_to!(
indoc!(
@ -375,7 +375,7 @@ fn encode_use_stdlib_without_wrapping_custom() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn to_encoder_encode_custom_has_capture() {
assert_evals_to!(
indoc!(
@ -406,6 +406,9 @@ mod encode_immediate {
#[cfg(feature = "gen-llvm")]
use crate::helpers::llvm::assert_evals_to;
#[cfg(feature = "gen-wasm")]
use crate::helpers::wasm::assert_evals_to;
#[cfg(all(test, any(feature = "gen-llvm", feature = "gen-wasm")))]
use indoc::indoc;
@ -413,7 +416,7 @@ mod encode_immediate {
use roc_std::RocStr;
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn string() {
assert_evals_to!(
indoc!(
@ -472,7 +475,7 @@ mod encode_immediate {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_record_one_field_string() {
assert_evals_to!(
indoc!(
@ -494,7 +497,7 @@ fn encode_derived_record_one_field_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_record_two_fields_strings() {
assert_evals_to!(
indoc!(
@ -517,7 +520,7 @@ fn encode_derived_record_two_fields_strings() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_nested_record_string() {
assert_evals_to!(
indoc!(
@ -541,7 +544,7 @@ fn encode_derived_nested_record_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_tag_one_payload_string() {
assert_evals_to!(
indoc!(
@ -565,7 +568,7 @@ fn encode_derived_tag_one_payload_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_tag_two_payloads_string() {
assert_evals_to!(
indoc!(
@ -589,7 +592,7 @@ fn encode_derived_tag_two_payloads_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_nested_tag_string() {
assert_evals_to!(
indoc!(
@ -614,7 +617,7 @@ fn encode_derived_nested_tag_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_nested_record_tag_record() {
assert_evals_to!(
indoc!(
@ -639,7 +642,7 @@ fn encode_derived_nested_record_tag_record() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_list_string() {
assert_evals_to!(
indoc!(
@ -663,7 +666,7 @@ fn encode_derived_list_string() {
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn encode_derived_list_of_records() {
assert_evals_to!(
indoc!(
@ -685,3 +688,95 @@ fn encode_derived_list_of_records() {
RocStr
)
}
#[test]
#[cfg(all(
any(feature = "gen-llvm", feature = "gen-wasm"),
not(feature = "gen-llvm-wasm") // hits a stack limit in wasm3
))]
fn encode_derived_record_with_many_types() {
assert_evals_to!(
indoc!(
r#"
app "test"
imports [Encode.{ toEncoder }, Json]
provides [main] to "./platform"
main =
fresh : [Fresh Str, Rotten Str]
fresh = Fresh "tomatoes"
rcd = {actors: ["Idris Elba", "Mila Kunis"], year: 2004u16, rating: {average: 7u8, min: 1u8, max: 10u8, sentiment: fresh}}
result = Str.fromUtf8 (Encode.toBytes rcd Json.toUtf8)
when result is
Ok s -> s
_ -> "<bad>"
"#
),
RocStr::from(
r#"{"actors":["Idris Elba","Mila Kunis"],"rating":{"average":7,"max":10,"min":1,"sentiment":{"Fresh":["tomatoes"]}},"year":2004}"#
),
RocStr
)
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn decode_use_stdlib() {
assert_evals_to!(
indoc!(
r#"
app "test"
imports [Decode.{ Decoding }, Json]
provides [main] to "./platform"
MyNum := U8 has [Decoding {decoder: myDecoder}]
myDecoder =
Decode.custom \bytes, fmt ->
when Decode.decodeWith bytes Decode.u8 fmt is
{result, rest} ->
when result is
Ok n -> {result: Ok (@MyNum n), rest}
Err e -> {result: Err e, rest}
main =
when Decode.fromBytes [49, 53] Json.fromUtf8 is
Ok (@MyNum n) -> n
_ -> 101
"#
),
15,
u8
)
}
#[test]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn decode_use_stdlib_json_list() {
assert_evals_to!(
indoc!(
r#"
app "test"
imports [Decode.{ Decoding }, Json]
provides [main] to "./platform"
MyNumList := List U8 has [Decoding {decoder: myDecoder}]
myDecoder =
Decode.custom \bytes, fmt ->
when Decode.decodeWith bytes (Decode.list Decode.u8) fmt is
{result, rest} ->
when result is
Ok lst -> {result: Ok (@MyNumList lst), rest}
Err e -> {result: Err e, rest}
main =
when Str.toUtf8 "[1,2,3]" |> Decode.fromBytes Json.fromUtf8 is
Ok (@MyNumList lst) -> lst
_ -> []
"#
),
RocList::from_slice(&[1u8, 2u8, 3u8]),
RocList<u8>
)
}

View file

@ -202,7 +202,7 @@ fn find_names_needed(
);
}
}
Structure(Func(arg_vars, _closure_var, ret_var)) => {
Structure(Func(arg_vars, closure_var, ret_var)) => {
for index in arg_vars.into_iter() {
let var = subs[index];
find_names_needed(
@ -215,6 +215,15 @@ fn find_names_needed(
);
}
find_names_needed(
*closure_var,
subs,
roots,
root_appearances,
names_taken,
find_under_alias,
);
find_names_needed(
*ret_var,
subs,

View file

@ -925,7 +925,16 @@ fn subs_fmt_flat_type(this: &FlatType, subs: &Subs, f: &mut fmt::Formatter) -> f
RecordField::Required(_) => ':',
RecordField::Demanded(_) => ':',
};
write!(f, "{:?} {} {:?}, ", name, separator, content)?;
write!(
f,
"{:?} {} {:?}, ",
name,
separator,
SubsFmtContent(
subs.get_content_without_compacting(*content.as_inner()),
subs
)
)?;
}
write!(f, "}}<{:?}>", new_ext)

View file

@ -100,6 +100,10 @@ bitflags! {
///
/// For example, t1 += [A Str] says we should "add" the tag "A Str" to the type of "t1".
const PRESENT = 1 << 1;
/// Like [`Mode::EQ`], but also instructs the unifier that the ambient lambda set
/// specialization algorithm is running. This has implications for the unification of
/// unspecialized lambda sets; see [`unify_unspecialized_lambdas`].
const LAMBDA_SET_SPECIALIZATION = Mode::EQ.bits | (1 << 2);
}
}
@ -114,6 +118,11 @@ impl Mode {
self.contains(Mode::PRESENT)
}
fn is_lambda_set_specialization(&self) -> bool {
debug_assert!(!self.contains(Mode::EQ | Mode::PRESENT));
self.contains(Mode::LAMBDA_SET_SPECIALIZATION)
}
fn as_eq(self) -> Self {
(self - Mode::PRESENT) | Mode::EQ
}
@ -1065,6 +1074,7 @@ struct SeparatedUnionLambdas {
fn separate_union_lambdas<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
mode: Mode,
fields1: UnionLambdas,
fields2: UnionLambdas,
) -> (Outcome<M>, SeparatedUnionLambdas) {
@ -1173,7 +1183,7 @@ fn separate_union_lambdas<M: MetaCollector>(
maybe_mark_union_recursive(env, var1);
maybe_mark_union_recursive(env, var2);
let outcome = unify_pool(env, pool, var1, var2, Mode::EQ);
let outcome = unify_pool(env, pool, var1, var2, mode);
if !outcome.mismatches.is_empty() {
env.subs.rollback_to(snapshot);
@ -1214,64 +1224,292 @@ fn separate_union_lambdas<M: MetaCollector>(
)
}
/// ULS-SORT-ORDER:
/// - Arrange into partitions of (_, member, region), in ascending order of (member, region).
/// - Within each partition, place flex-able vars at the end of the partition.
/// - Amongst all flex-able vars, sort by their root key, so that identical vars are next to each other.
#[inline(always)]
fn unspecialized_lambda_set_sorter(subs: &Subs, uls1: Uls, uls2: Uls) -> std::cmp::Ordering {
let Uls(var1, sym1, region1) = uls1;
let Uls(var2, sym2, region2) = uls2;
use std::cmp::Ordering::*;
use Content::*;
match (sym1, region1).cmp(&(sym2, region2)) {
Equal => {
match (
subs.get_content_without_compacting(var1),
subs.get_content_without_compacting(var2),
) {
(FlexAbleVar(..) | RigidAbleVar(..), FlexAbleVar(..) | RigidAbleVar(..)) => subs
.get_root_key_without_compacting(var1)
.cmp(&subs.get_root_key_without_compacting(var2)),
(FlexVar(..) | RigidVar(..), _) | (_, FlexVar(..) | RigidVar(..)) => {
internal_error!("unexpected variable type in unspecialized lambda set!")
}
(FlexAbleVar(..), _) => Greater,
(_, FlexAbleVar(..)) => Less,
// For everything else, the order is irrelevant
(_, _) => Less,
}
}
ord => ord,
}
}
#[inline(always)]
fn sort_unspecialized_lambda_sets(subs: &Subs, mut uls: Vec<Uls>) -> Vec<Uls> {
uls.sort_by(|&uls1, &uls2| unspecialized_lambda_set_sorter(subs, uls1, uls2));
uls
}
#[inline(always)]
fn is_sorted_unspecialized_lamba_set_list(subs: &Subs, uls: &[Uls]) -> bool {
uls == sort_unspecialized_lambda_sets(subs, uls.to_vec())
}
fn unify_unspecialized_lambdas<M: MetaCollector>(
env: &mut Env,
pool: &mut Pool,
uls1: SubsSlice<Uls>,
uls2: SubsSlice<Uls>,
mode: Mode,
uls_left: SubsSlice<Uls>,
uls_right: SubsSlice<Uls>,
) -> Result<(SubsSlice<Uls>, Outcome<M>), Outcome<M>> {
// For now we merge all variables of unspecialized lambdas in a lambda set that share the same
// ability member/region.
// See the section "A property that's lost, and how we can hold on to it" of
// solve/docs/ambient_lambda_set_specialization.md to see how we can loosen this restriction.
// Note that we don't need to update the bookkeeping of variable -> lambda set to be resolved,
// because if we had v1 -> lset1, and now lset1 ~ lset2, then afterward either lset1 still
// resolves to itself or re-points to lset2.
// In either case the merged unspecialized lambda sets will be there.
match (uls1.is_empty(), uls2.is_empty()) {
(true, true) => Ok((SubsSlice::default(), Default::default())),
(false, true) => Ok((uls1, Default::default())),
(true, false) => Ok((uls2, Default::default())),
(false, false) => {
let mut all_uls = (env.subs.get_subs_slice(uls1).iter())
.chain(env.subs.get_subs_slice(uls2))
.map(|&Uls(var, sym, region)| {
// Take the root key to deduplicate
Uls(env.subs.get_root_key_without_compacting(var), sym, region)
})
.collect::<Vec<_>>();
// Arrange into partitions of (_, member, region).
all_uls.sort_by_key(|&Uls(_, sym, region)| (sym, region));
let (uls_left, uls_right) = match (uls_left.is_empty(), uls_right.is_empty()) {
(true, true) => return Ok((SubsSlice::default(), Default::default())),
(false, true) => return Ok((uls_left, Default::default())),
(true, false) => return Ok((uls_right, Default::default())),
(false, false) => (
env.subs.get_subs_slice(uls_left).to_vec(),
env.subs.get_subs_slice(uls_right).to_vec(),
),
};
// Now merge the variables of unspecialized lambdas pointing to the same
// member/region.
let mut whole_outcome = Outcome::default();
let mut j = 1;
while j < all_uls.len() {
let i = j - 1;
let Uls(var_i, sym_i, region_i) = all_uls[i];
let Uls(var_j, sym_j, region_j) = all_uls[j];
if sym_i == sym_j && region_i == region_j {
let outcome = unify_pool(env, pool, var_i, var_j, Mode::EQ);
if !outcome.mismatches.is_empty() {
return Err(outcome);
// Unfortunately, it is not an invariant that `uls_left` and `uls_right` obey ULS-SORT-ORDER before
// merging.
//
// That's because flex-able variables in unspecialized lambda sets may be unified at any time,
// and unification of flex-able variables may change their root keys, which ULS-SORT-ORDER
// considers.
//
// As such, we must sort beforehand. In practice these sets are very, very small (<5 elements).
let uls_left = sort_unspecialized_lambda_sets(env.subs, uls_left);
let uls_right = sort_unspecialized_lambda_sets(env.subs, uls_right);
let (mut uls_left, mut uls_right) = (uls_left.iter().peekable(), uls_right.iter().peekable());
let mut merged_uls = Vec::with_capacity(uls_left.len() + uls_right.len());
let mut whole_outcome = Outcome::default();
loop {
let (uls_l, uls_r) = match (uls_left.peek(), uls_right.peek()) {
(Some(uls_l), Some(uls_r)) => (**uls_l, **uls_r),
(Some(_), None) => {
merged_uls.push(*uls_left.next().unwrap());
continue;
}
(None, Some(_)) => {
merged_uls.push(*uls_right.next().unwrap());
continue;
}
(None, None) => break,
};
let Uls(var_l, sym_l, region_l) = uls_l;
let Uls(var_r, sym_r, region_r) = uls_r;
use std::cmp::Ordering::*;
match (sym_l, region_l).cmp(&(sym_r, region_r)) {
Less => {
// Left needs to catch up to right, add it to the merged lambdas.
merged_uls.push(*uls_left.next().unwrap());
}
Greater => {
// Right needs to catch up to left, add it to the merged lambdas.
merged_uls.push(*uls_right.next().unwrap());
}
Equal => {
// The interesting case - both point to the same specialization.
use Content::*;
match (
env.subs.get_content_without_compacting(var_l),
env.subs.get_content_without_compacting(var_r),
) {
(FlexAbleVar(..) | RigidAbleVar(..), FlexAbleVar(..) | RigidAbleVar(..)) => {
// If the types are root-equivalent, de-duplicate them.
//
// Otherwise, the type variables are disjoint, and we want to keep both
// of them, for purposes of disjoint variable lambda specialization.
//
// For more information, see "A Property thats lost, and how we can hold on to it"
// in solve/docs/ambient_lambda_set_specialization.md.
if env.subs.equivalent_without_compacting(var_l, var_r) {
// ... a1 ...
// ... b1=a1 ...
// => ... a1 ...
//
// Keep the one on the left, drop the one on the right.
//
// Then progress both, because the invariant tells us they must be
// disjoint, and if there were any concrete variables, they would have
// appeared earlier.
let _dropped = uls_right.next().unwrap();
let kept = uls_left.next().unwrap();
merged_uls.push(*kept);
} else if mode.is_lambda_set_specialization() {
// ... a1 ...
// ... b1 ...
// => ... a1=b1 ...
//
// If we're in the process of running the ambient lambda set
// specialization procedure, disjoint type variables being merged from
// the left and right lists are treated specially!
//
// In particular, we are unifying a local list of lambda sets, for
// which the specialization is for (on the left), with specialization
// lambda sets, which have just been freshened (on the right).
//
// [ .. a:lam:1 ] (local, undergoing specialization)
// [ .. a':lam:1 ] (specialization lambda sets, just freshened)
//
// Because the specialization lambdas are freshened, they certainly are
// disjoint from the local lambdas - but they may be equivalent in
// principle, from the perspective of a human looking at the
// unification!
//
// Running with the example above, the specialization lambda set has an
// unspecialized lambda `a':lam:1`. Now, this is disjoint from
// `a:lam:1` in the local lambda set, from the purely technical
// perspective that `a' != a`.
//
// But, in expected function, they **should not** be treated as disjoint!
// In this case, the specialization lambda is not introducing any new
// information, and is targeting exactly the local lambda `a:lam:1`.
//
// So, to avoid introducing superfluous variables, we unify these disjoint
// variables once, and then progress on both sides. We progress on both
// sides to avoid unifying more than what we should in our principle.
//
// It doesn't matter which side we choose to progress on, since after
// unification of flex vars roots are equivalent. So, choose the left
// side.
//
// See the ambient lambda set specialization document for more details.
let outcome = unify_pool(env, pool, var_l, var_r, mode);
if !outcome.mismatches.is_empty() {
return Err(outcome);
}
whole_outcome.union(outcome);
debug_assert!(env.subs.equivalent_without_compacting(var_l, var_r));
let _dropped = uls_right.next().unwrap();
let kept = uls_left.next().unwrap();
merged_uls.push(*kept);
} else {
// ... a1 ...
// ... b1 ...
// => ... a1, b1 ...
//
// Keep both. But, we have to be careful about how we do this -
// immediately add the one with the lower root, and advance that side;
// keep the other as-is, because the next variable on the advanced side
// might be lower than the current non-advanced variable. For example:
//
// ... 640 645 ...
// ... 670 ...
//
// we want to add `640` to the merged list and advance to
//
// ... 645 ...
// ... 670 ...
//
// rather than adding both `640` and `670`, and skipping the comparison
// of `645` with `670`.
//
// An important thing to notice is that we *don't* want to advance
// both sides, because if these two variables are disjoint, then
// advancing one side *might* make the next comparison be between
// equivalent variables, for example in a case like
//
// ... 640 670 ...
// ... 670 ...
//
// In the above case, we certainly only want to advance the left side!
if env.subs.get_root_key(var_l) < env.subs.get_root_key(var_r) {
let kept = uls_left.next().unwrap();
merged_uls.push(*kept);
} else {
let kept = uls_right.next().unwrap();
merged_uls.push(*kept);
}
}
}
(FlexAbleVar(..) | RigidAbleVar(..), _) => {
// ... a1 ...
// ... {foo: _} ...
// => ... {foo: _} ...
//
// Unify them, then advance the merged flex var.
let outcome = unify_pool(env, pool, var_l, var_r, mode);
if !outcome.mismatches.is_empty() {
return Err(outcome);
}
whole_outcome.union(outcome);
let _dropped = uls_right.next().unwrap();
}
(_, FlexAbleVar(..) | RigidAbleVar(..)) => {
// ... {foo: _} ...
// ... a1 ...
// => ... {foo: _} ...
//
// Unify them, then advance the merged flex var.
let outcome = unify_pool(env, pool, var_l, var_r, mode);
if !outcome.mismatches.is_empty() {
return Err(outcome);
}
whole_outcome.union(outcome);
let _dropped = uls_left.next().unwrap();
}
(_, _) => {
// ... {foo: _} ...
// ... {foo: _} ...
// => ... {foo: _} ...
//
// Unify them, then advance one.
// (the choice is arbitrary, so we choose the left)
let outcome = unify_pool(env, pool, var_l, var_r, mode);
if !outcome.mismatches.is_empty() {
return Err(outcome);
}
whole_outcome.union(outcome);
let _dropped = uls_left.next().unwrap();
}
whole_outcome.union(outcome);
// Keep the Uls in position `i` and remove the one in position `j`.
all_uls.remove(j);
} else {
// Keep both Uls, look at the next one.
j += 1;
}
}
Ok((
SubsSlice::extend_new(&mut env.subs.unspecialized_lambda_sets, all_uls),
whole_outcome,
))
}
}
debug_assert!(
is_sorted_unspecialized_lamba_set_list(env.subs, &merged_uls),
"merging of unspecialized lambda sets does not preserve sort! {:?}",
merged_uls
);
Ok((
SubsSlice::extend_new(&mut env.subs.unspecialized_lambda_sets, merged_uls),
whole_outcome,
))
}
fn unify_lambda_set_help<M: MetaCollector>(
@ -1316,7 +1554,7 @@ fn unify_lambda_set_help<M: MetaCollector>(
only_in_right,
joined,
},
) = separate_union_lambdas(env, pool, solved1, solved2);
) = separate_union_lambdas(env, pool, ctx.mode, solved1, solved2);
let all_lambdas = joined
.into_iter()
@ -1343,7 +1581,7 @@ fn unify_lambda_set_help<M: MetaCollector>(
(None, None) => OptVariable::NONE,
};
let merged_unspecialized = match unify_unspecialized_lambdas(env, pool, uls1, uls2) {
let merged_unspecialized = match unify_unspecialized_lambdas(env, pool, ctx.mode, uls1, uls2) {
Ok((merged, outcome)) => {
whole_outcome.union(outcome);
merged

View file

@ -1226,7 +1226,7 @@ mod test_reporting {
// variables they can put themselves in, and to run the constraint algorithm
// against that extra variable, rather than possibly having to translate a `Type`
// again.
@r#"
@r###"
CIRCULAR TYPE /code/proj/Main.roc
I'm inferring a weird self-referential type for `f`:
@ -1265,7 +1265,20 @@ mod test_reporting {
infinitely.
List -> List a
"#
CIRCULAR TYPE /code/proj/Main.roc
I'm inferring a weird self-referential type for `main`:
3 main =
^^^^
Here is my best effort at writing down the type. You will see for
parts of the type that repeat something already printed out
infinitely.
List -> List a
"###
);
test_report!(
@ -3613,8 +3626,8 @@ mod test_reporting {
Is there an import missing? Perhaps there is a typo. Did you mean one
of these?
List
Set
List
Dict
Result