Merge pull request #7480 from gamebox/new-pnc-expr-node

Move PNC apply to separate Expr/Pattern variant
This commit is contained in:
Luke Boswell 2025-01-09 10:17:19 +11:00 committed by GitHub
commit fbf448cac8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
76 changed files with 1173 additions and 701 deletions

View file

@ -133,8 +133,10 @@ hash_dict = \hasher, dict -> Hash.hash_unordered(hasher, to_list(dict), List.wal
to_inspector_dict : Dict k v -> Inspector f where k implements Inspect & Hash & Eq, v implements Inspect, f implements InspectFormatter to_inspector_dict : Dict k v -> Inspector f where k implements Inspect & Hash & Eq, v implements Inspect, f implements InspectFormatter
to_inspector_dict = \dict -> to_inspector_dict = \dict ->
Inspect.custom(\fmt -> Inspect.custom(
Inspect.apply(Inspect.dict(dict, walk, Inspect.to_inspector, Inspect.to_inspector), fmt)) \fmt ->
Inspect.apply(Inspect.dict(dict, walk, Inspect.to_inspector, Inspect.to_inspector), fmt),
)
## Return an empty dictionary. ## Return an empty dictionary.
## ```roc ## ```roc
@ -142,13 +144,15 @@ to_inspector_dict = \dict ->
## ``` ## ```
empty : {} -> Dict * * empty : {} -> Dict * *
empty = \{} -> empty = \{} ->
@Dict({ @Dict(
{
buckets: [], buckets: [],
data: [], data: [],
max_bucket_capacity: 0, max_bucket_capacity: 0,
max_load_factor: default_max_load_factor, max_load_factor: default_max_load_factor,
shifts: initial_shifts, shifts: initial_shifts,
}) },
)
## Return a dictionary with space allocated for a number of entries. This ## Return a dictionary with space allocated for a number of entries. This
## may provide a performance optimization if you know how many entries will be ## may provide a performance optimization if you know how many entries will be
@ -169,13 +173,15 @@ reserve = \@Dict({ buckets, data, max_bucket_capacity: original_max_bucket_capac
if List.is_empty(buckets) || requested_shifts > shifts then if List.is_empty(buckets) || requested_shifts > shifts then
(buckets0, max_bucket_capacity) = alloc_buckets_from_shift(requested_shifts, max_load_factor) (buckets0, max_bucket_capacity) = alloc_buckets_from_shift(requested_shifts, max_load_factor)
buckets1 = fill_buckets_from_data(buckets0, data, requested_shifts) buckets1 = fill_buckets_from_data(buckets0, data, requested_shifts)
@Dict({ @Dict(
{
buckets: buckets1, buckets: buckets1,
data: List.reserve(data, Num.sub_saturated(size, current_size)), data: List.reserve(data, Num.sub_saturated(size, current_size)),
max_bucket_capacity, max_bucket_capacity,
max_load_factor, max_load_factor,
shifts: requested_shifts, shifts: requested_shifts,
}) },
)
else else
@Dict({ buckets, data, max_bucket_capacity: original_max_bucket_capacity, max_load_factor, shifts }) @Dict({ buckets, data, max_bucket_capacity: original_max_bucket_capacity, max_load_factor, shifts })
@ -191,13 +197,15 @@ release_excess_capacity = \@Dict({ buckets, data, max_bucket_capacity: original_
if min_shifts < shifts then if min_shifts < shifts then
(buckets0, max_bucket_capacity) = alloc_buckets_from_shift(min_shifts, max_load_factor) (buckets0, max_bucket_capacity) = alloc_buckets_from_shift(min_shifts, max_load_factor)
buckets1 = fill_buckets_from_data(buckets0, data, min_shifts) buckets1 = fill_buckets_from_data(buckets0, data, min_shifts)
@Dict({ @Dict(
{
buckets: buckets1, buckets: buckets1,
data: List.release_excess_capacity(data), data: List.release_excess_capacity(data),
max_bucket_capacity, max_bucket_capacity,
max_load_factor, max_load_factor,
shifts: min_shifts, shifts: min_shifts,
}) },
)
else else
@Dict({ buckets, data, max_bucket_capacity: original_max_bucket_capacity, max_load_factor, shifts }) @Dict({ buckets, data, max_bucket_capacity: original_max_bucket_capacity, max_load_factor, shifts })
@ -280,14 +288,16 @@ is_empty = \@Dict({ data }) ->
## ``` ## ```
clear : Dict k v -> Dict k v clear : Dict k v -> Dict k v
clear = \@Dict({ buckets, data, max_bucket_capacity, max_load_factor, shifts }) -> clear = \@Dict({ buckets, data, max_bucket_capacity, max_load_factor, shifts }) ->
@Dict({ @Dict(
{
buckets: List.map(buckets, \_ -> empty_bucket), buckets: List.map(buckets, \_ -> empty_bucket),
# use take_first to keep around the capacity # use take_first to keep around the capacity
data: List.take_first(data, 0), data: List.take_first(data, 0),
max_bucket_capacity, max_bucket_capacity,
max_load_factor, max_load_factor,
shifts, shifts,
}) },
)
## Convert each value in the dictionary to something new, by calling a conversion ## Convert each value in the dictionary to something new, by calling a conversion
## function on each of them which receives both the key and the old value. Then return a ## function on each of them which receives both the key and the old value. Then return a
@ -822,21 +832,25 @@ remove_bucket = \@Dict({ buckets: buckets0, data: data0, max_bucket_capacity, ma
bucket_index3 = scan_for_index(buckets2, bucket_index2, Num.to_u32(last_data_index)) bucket_index3 = scan_for_index(buckets2, bucket_index2, Num.to_u32(last_data_index))
swap_bucket = list_get_unsafe(buckets2, bucket_index3) swap_bucket = list_get_unsafe(buckets2, bucket_index3)
@Dict({ @Dict(
{
buckets: List.set(buckets2, bucket_index3, { swap_bucket & data_index: data_index_to_remove }), buckets: List.set(buckets2, bucket_index3, { swap_bucket & data_index: data_index_to_remove }),
data: List.drop_last(data1, 1), data: List.drop_last(data1, 1),
max_bucket_capacity, max_bucket_capacity,
max_load_factor, max_load_factor,
shifts, shifts,
}) },
)
else else
@Dict({ @Dict(
{
buckets: buckets2, buckets: buckets2,
data: List.drop_last(data0, 1), data: List.drop_last(data0, 1),
max_bucket_capacity, max_bucket_capacity,
max_load_factor, max_load_factor,
shifts, shifts,
}) },
)
scan_for_index : List Bucket, U64, U32 -> U64 scan_for_index : List Bucket, U64, U32 -> U64
scan_for_index = \buckets, bucket_index, data_index -> scan_for_index = \buckets, bucket_index, data_index ->
@ -863,13 +877,15 @@ increase_size = \@Dict({ data, max_bucket_capacity, max_load_factor, shifts }) -
new_shifts = shifts |> Num.sub_wrap(1) new_shifts = shifts |> Num.sub_wrap(1)
(buckets0, new_max_bucket_capacity) = alloc_buckets_from_shift(new_shifts, max_load_factor) (buckets0, new_max_bucket_capacity) = alloc_buckets_from_shift(new_shifts, max_load_factor)
buckets1 = fill_buckets_from_data(buckets0, data, new_shifts) buckets1 = fill_buckets_from_data(buckets0, data, new_shifts)
@Dict({ @Dict(
{
buckets: buckets1, buckets: buckets1,
data, data,
max_bucket_capacity: new_max_bucket_capacity, max_bucket_capacity: new_max_bucket_capacity,
max_load_factor, max_load_factor,
shifts: new_shifts, shifts: new_shifts,
}) },
)
else else
crash("Dict hit limit of $(Num.to_str(max_bucket_count)) elements. Unable to grow more.") crash("Dict hit limit of $(Num.to_str(max_bucket_count)) elements. Unable to grow more.")

View file

@ -1092,11 +1092,15 @@ min = \list ->
min_help : List (Num a), Num a -> Num a min_help : List (Num a), Num a -> Num a
min_help = \list, initial -> min_help = \list, initial ->
List.walk(list, initial, \best_so_far, current -> List.walk(
list,
initial,
\best_so_far, current ->
if current < best_so_far then if current < best_so_far then
current current
else else
best_so_far) best_so_far,
)
max : List (Num a) -> Result (Num a) [ListWasEmpty] max : List (Num a) -> Result (Num a) [ListWasEmpty]
max = \list -> max = \list ->
@ -1109,11 +1113,15 @@ max = \list ->
max_help : List (Num a), Num a -> Num a max_help : List (Num a), Num a -> Num a
max_help = \list, initial -> max_help = \list, initial ->
List.walk(list, initial, \best_so_far, current -> List.walk(
list,
initial,
\best_so_far, current ->
if current > best_so_far then if current > best_so_far then
current current
else else
best_so_far) best_so_far,
)
## Like [List.map], except the transformation function wraps the return value ## Like [List.map], except the transformation function wraps the return value
## in a list. At the end, all the lists get joined together into one list. ## in a list. At the end, all the lists get joined together into one list.
@ -1156,11 +1164,15 @@ find_last = \list, pred ->
## If no satisfying element is found, an `Err NotFound` is returned. ## If no satisfying element is found, an `Err NotFound` is returned.
find_first_index : List elem, (elem -> Bool) -> Result U64 [NotFound] find_first_index : List elem, (elem -> Bool) -> Result U64 [NotFound]
find_first_index = \list, matcher -> find_first_index = \list, matcher ->
found_index = List.iterate(list, 0, \index, elem -> found_index = List.iterate(
list,
0,
\index, elem ->
if matcher(elem) then if matcher(elem) then
Break(index) Break(index)
else else
Continue(Num.add_wrap(index, 1))) Continue(Num.add_wrap(index, 1)),
)
when found_index is when found_index is
Break(index) -> Ok(index) Break(index) -> Ok(index)
@ -1171,13 +1183,17 @@ find_first_index = \list, matcher ->
## If no satisfying element is found, an `Err NotFound` is returned. ## If no satisfying element is found, an `Err NotFound` is returned.
find_last_index : List elem, (elem -> Bool) -> Result U64 [NotFound] find_last_index : List elem, (elem -> Bool) -> Result U64 [NotFound]
find_last_index = \list, matches -> find_last_index = \list, matches ->
found_index = List.iterate_backwards(list, List.len(list), \prev_index, elem -> found_index = List.iterate_backwards(
list,
List.len(list),
\prev_index, elem ->
answer = Num.sub_wrap(prev_index, 1) answer = Num.sub_wrap(prev_index, 1)
if matches(elem) then if matches(elem) then
Break(answer) Break(answer)
else else
Continue(answer)) Continue(answer),
)
when found_index is when found_index is
Break(index) -> Ok(index) Break(index) -> Ok(index)
@ -1214,10 +1230,14 @@ intersperse = \list, sep ->
capacity = 2 * List.len(list) capacity = 2 * List.len(list)
init = List.with_capacity(capacity) init = List.with_capacity(capacity)
new_list = new_list =
List.walk(list, init, \acc, elem -> List.walk(
list,
init,
\acc, elem ->
acc acc
|> List.append_unsafe(elem) |> List.append_unsafe(elem)
|> List.append_unsafe(sep)) |> List.append_unsafe(sep),
)
List.drop_last(new_list, 1) List.drop_last(new_list, 1)
@ -1359,9 +1379,16 @@ chunks_of_help = \list_rest, chunk_size, chunks ->
## If it returns `Ok` for every element, [map_try] returns `Ok` with the transformed list. ## If it returns `Ok` for every element, [map_try] returns `Ok` with the transformed list.
map_try : List elem, (elem -> Result ok err) -> Result (List ok) err map_try : List elem, (elem -> Result ok err) -> Result (List ok) err
map_try = \list, to_result -> map_try = \list, to_result ->
walk_try(list, [], \state, elem -> walk_try(
Result.map(to_result(elem), \ok -> list,
List.append(state, ok))) [],
\state, elem ->
Result.map(
to_result(elem),
\ok ->
List.append(state, ok),
),
)
## Same as [List.walk], except you can stop walking early by returning `Err`. ## Same as [List.walk], except you can stop walking early by returning `Err`.
## ##

View file

@ -51,19 +51,25 @@ is_eq = \xs, ys ->
if len(xs) != len(ys) then if len(xs) != len(ys) then
Bool.false Bool.false
else else
walk_until(xs, Bool.true, \_, elem -> walk_until(
xs,
Bool.true,
\_, elem ->
if contains(ys, elem) then if contains(ys, elem) then
Continue(Bool.true) Continue(Bool.true)
else else
Break(Bool.false)) Break(Bool.false),
)
hash_set : hasher, Set k -> hasher where hasher implements Hasher hash_set : hasher, Set k -> hasher where hasher implements Hasher
hash_set = \hasher, @Set(inner) -> Hash.hash(hasher, inner) hash_set = \hasher, @Set(inner) -> Hash.hash(hasher, inner)
to_inspector_set : Set k -> Inspector f where k implements Inspect & Hash & Eq, f implements InspectFormatter to_inspector_set : Set k -> Inspector f where k implements Inspect & Hash & Eq, f implements InspectFormatter
to_inspector_set = \set -> to_inspector_set = \set ->
Inspect.custom(\fmt -> Inspect.custom(
Inspect.apply(Inspect.set(set, walk, Inspect.to_inspector), fmt)) \fmt ->
Inspect.apply(Inspect.set(set, walk, Inspect.to_inspector), fmt),
)
## Creates a new empty `Set`. ## Creates a new empty `Set`.
## ```roc ## ```roc
@ -326,8 +332,12 @@ map : Set a, (a -> b) -> Set b
map = \set, transform -> map = \set, transform ->
init = with_capacity(capacity(set)) init = with_capacity(capacity(set))
walk(set, init, \answer, k -> walk(
insert(answer, transform(k))) set,
init,
\answer, k ->
insert(answer, transform(k)),
)
## Like [Set.map], except the transformation function wraps the return value ## Like [Set.map], except the transformation function wraps the return value
## in a set. At the end, all the sets get joined together ## in a set. At the end, all the sets get joined together
@ -338,8 +348,12 @@ join_map : Set a, (a -> Set b) -> Set b
join_map = \set, transform -> join_map = \set, transform ->
init = with_capacity(capacity(set)) # Might be a pessimization init = with_capacity(capacity(set)) # Might be a pessimization
walk(set, init, \answer, k -> walk(
union(answer, transform(k))) set,
init,
\answer, k ->
union(answer, transform(k)),
)
## Iterate through the values of a given `Set` and build a value, can stop ## Iterate through the values of a given `Set` and build a value, can stop
## iterating part way through the collection. ## iterating part way through the collection.

View file

@ -940,14 +940,16 @@ matches_at = \haystack, haystack_index, needle ->
needle_length = Str.count_utf8_bytes(needle) needle_length = Str.count_utf8_bytes(needle)
end_index = min(Num.add_saturated(haystack_index, needle_length), haystack_length) end_index = min(Num.add_saturated(haystack_index, needle_length), haystack_length)
matches_at_help({ matches_at_help(
{
haystack, haystack,
haystack_index, haystack_index,
needle, needle,
needle_index: 0, needle_index: 0,
needle_length, needle_length,
end_index, end_index,
}) },
)
matches_at_help = \state -> matches_at_help = \state ->
{ haystack, haystack_index, needle, needle_index, needle_length, end_index } = state { haystack, haystack_index, needle, needle_index, needle_length, end_index } = state

View file

@ -108,10 +108,12 @@ err = \a -> @Task(\{} -> Err(a))
## matching to handle the success and possible failure cases. ## matching to handle the success and possible failure cases.
attempt : Task a b, (Result a b -> Task c d) -> Task c d attempt : Task a b, (Result a b -> Task c d) -> Task c d
attempt = \@Task(task), transform -> attempt = \@Task(task), transform ->
@Task(\{} -> @Task(
\{} ->
@Task(transformed) = transform(task({})) @Task(transformed) = transform(task({}))
transformed({})) transformed({}),
)
## Take the success value from a given [Task] and use that to generate a new [Task]. ## Take the success value from a given [Task] and use that to generate a new [Task].
## ##
@ -131,14 +133,16 @@ attempt = \@Task(task), transform ->
## ``` ## ```
await : Task a b, (a -> Task c b) -> Task c b await : Task a b, (a -> Task c b) -> Task c b
await = \@Task(task), transform -> await = \@Task(task), transform ->
@Task(\{} -> @Task(
\{} ->
when task({}) is when task({}) is
Ok(a) -> Ok(a) ->
@Task(transformed) = transform(a) @Task(transformed) = transform(a)
transformed({}) transformed({})
Err(b) -> Err(b) ->
Err(b)) Err(b),
)
## Take the error value from a given [Task] and use that to generate a new [Task]. ## Take the error value from a given [Task] and use that to generate a new [Task].
## ##
@ -149,14 +153,16 @@ await = \@Task(task), transform ->
## ``` ## ```
on_err : Task a b, (b -> Task a c) -> Task a c on_err : Task a b, (b -> Task a c) -> Task a c
on_err = \@Task(task), transform -> on_err = \@Task(task), transform ->
@Task(\{} -> @Task(
\{} ->
when task({}) is when task({}) is
Ok(a) -> Ok(a) ->
Ok(a) Ok(a)
Err(b) -> Err(b) ->
@Task(transformed) = transform(b) @Task(transformed) = transform(b)
transformed({})) transformed({}),
)
## Transform the success value of a given [Task] with a given function. ## Transform the success value of a given [Task] with a given function.
## ##
@ -167,10 +173,12 @@ on_err = \@Task(task), transform ->
## ``` ## ```
map : Task a c, (a -> b) -> Task b c map : Task a c, (a -> b) -> Task b c
map = \@Task(task), transform -> map = \@Task(task), transform ->
@Task(\{} -> @Task(
\{} ->
when task({}) is when task({}) is
Ok(a) -> Ok(transform(a)) Ok(a) -> Ok(transform(a))
Err(b) -> Err(b)) Err(b) -> Err(b),
)
## Transform the error value of a given [Task] with a given function. ## Transform the error value of a given [Task] with a given function.
## ##
@ -181,10 +189,12 @@ map = \@Task(task), transform ->
## ``` ## ```
map_err : Task c a, (a -> b) -> Task c b map_err : Task c a, (a -> b) -> Task c b
map_err = \@Task(task), transform -> map_err = \@Task(task), transform ->
@Task(\{} -> @Task(
\{} ->
when task({}) is when task({}) is
Ok(a) -> Ok(a) Ok(a) -> Ok(a)
Err(b) -> Err(transform(b))) Err(b) -> Err(transform(b)),
)
## Use a Result among other Tasks by converting it into a [Task]. ## Use a Result among other Tasks by converting it into a [Task].
from_result : Result a b -> Task a b from_result : Result a b -> Task a b
@ -197,8 +207,11 @@ from_result = \res ->
batch : Task a c -> (Task (a -> b) c -> Task b c) batch : Task a c -> (Task (a -> b) c -> Task b c)
batch = \current -> batch = \current ->
\next -> \next ->
await(next, \f -> await(
map(current, f)) next,
\f ->
map(current, f),
)
## Combine the values of two tasks with a custom combining function. ## Combine the values of two tasks with a custom combining function.
## ##
@ -214,11 +227,13 @@ batch = \current ->
## ``` ## ```
combine : Task a err, Task b err, (a, b -> c) -> Task c err combine : Task a err, Task b err, (a, b -> c) -> Task c err
combine = \@Task(left_task), @Task(right_task), combiner -> combine = \@Task(left_task), @Task(right_task), combiner ->
@Task(\{} -> @Task(
\{} ->
left = try(left_task, {}) left = try(left_task, {})
right = try(right_task, {}) right = try(right_task, {})
Ok(combiner(left, right))) Ok(combiner(left, right)),
)
## Apply each task in a list sequentially, and return a list of the resulting values. ## Apply each task in a list sequentially, and return a list of the resulting values.
## Each task will be awaited before beginning the next task. ## Each task will be awaited before beginning the next task.
@ -232,14 +247,20 @@ combine = \@Task(left_task), @Task(right_task), combiner ->
## ##
sequence : List (Task ok err) -> Task (List ok) err sequence : List (Task ok err) -> Task (List ok) err
sequence = \task_list -> sequence = \task_list ->
Task.loop((task_list, List.with_capacity(List.len(task_list))), \(tasks, values) -> Task.loop(
(task_list, List.with_capacity(List.len(task_list))),
\(tasks, values) ->
when tasks is when tasks is
[task, .. as rest] -> [task, .. as rest] ->
Task.map(task, \value -> Task.map(
Step((rest, List.append(values, value)))) task,
\value ->
Step((rest, List.append(values, value))),
)
[] -> [] ->
Task.ok(Done(values))) Task.ok(Done(values)),
)
## Apply a task repeatedly for each item in a list ## Apply a task repeatedly for each item in a list
## ##
@ -253,8 +274,12 @@ sequence = \task_list ->
## ##
for_each : List a, (a -> Task {} b) -> Task {} b for_each : List a, (a -> Task {} b) -> Task {} b
for_each = \items, fn -> for_each = \items, fn ->
List.walk(items, ok({}), \state, item -> List.walk(
state |> await(\_ -> fn(item))) items,
ok({}),
\state, item ->
state |> await(\_ -> fn(item)),
)
## Transform a task that can either succeed with `ok`, or fail with `err`, into ## Transform a task that can either succeed with `ok`, or fail with `err`, into
## a task that succeeds with `Result ok err`. ## a task that succeeds with `Result ok err`.
@ -274,5 +299,7 @@ for_each = \items, fn ->
## ##
result : Task ok err -> Task (Result ok err) * result : Task ok err -> Task (Result ok err) *
result = \@Task(task) -> result = \@Task(task) ->
@Task(\{} -> @Task(
Ok(task({}))) \{} ->
Ok(task({})),
)

View file

@ -8,7 +8,7 @@
use roc_error_macros::internal_error; use roc_error_macros::internal_error;
use roc_module::{called_via::CalledVia, symbol::Symbol}; use roc_module::{called_via::CalledVia, symbol::Symbol};
use roc_parse::ast::{self, Collection, PatternApplyStyle}; use roc_parse::ast::{self, Collection};
use roc_region::all::{Loc, Region}; use roc_region::all::{Loc, Region};
use crate::{env::Env, pattern::Pattern, scope::Scope}; use crate::{env::Env, pattern::Pattern, scope::Scope};
@ -27,7 +27,6 @@ fn to_encoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
DERIVED_REGION, DERIVED_REGION,
ast::Pattern::Identifier { ident: payload }, ast::Pattern::Identifier { ident: payload },
)]), )]),
ast::PatternApplyStyle::Whitespace,
); );
// Encode.to_encoder(payload) // Encode.to_encoder(payload)
@ -40,7 +39,7 @@ fn to_encoder<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
module_name: "", module_name: "",
ident: payload, ident: payload,
})]), })]),
roc_module::called_via::CalledVia::Space, CalledVia::Space,
)); ));
// \@Opaq payload -> Encode.to_encoder(payload) // \@Opaq payload -> Encode.to_encoder(payload)
@ -133,7 +132,6 @@ fn hash<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
DERIVED_REGION, DERIVED_REGION,
ast::Pattern::Identifier { ident: payload }, ast::Pattern::Identifier { ident: payload },
)]), )]),
PatternApplyStyle::Whitespace,
); );
// Hash.hash(hasher, payload) // Hash.hash(hasher, payload)
@ -152,7 +150,7 @@ fn hash<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
ident: payload, ident: payload,
}), }),
]), ]),
roc_module::called_via::CalledVia::Space, CalledVia::Space,
)); ));
// \hasher, @Opaq payload -> Hash.hash(hasher, payload) // \hasher, @Opaq payload -> Hash.hash(hasher, payload)
@ -180,7 +178,6 @@ fn is_eq<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
DERIVED_REGION, DERIVED_REGION,
ast::Pattern::Identifier { ident: payload1 }, ast::Pattern::Identifier { ident: payload1 },
)]), )]),
PatternApplyStyle::Whitespace,
); );
// \@Opaq payload2 // \@Opaq payload2
let opaque2 = ast::Pattern::Apply( let opaque2 = ast::Pattern::Apply(
@ -189,7 +186,6 @@ fn is_eq<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
DERIVED_REGION, DERIVED_REGION,
ast::Pattern::Identifier { ident: payload2 }, ast::Pattern::Identifier { ident: payload2 },
)]), )]),
PatternApplyStyle::Whitespace,
); );
// Bool.is_eq(payload1, payload2) // Bool.is_eq(payload1, payload2)
@ -208,7 +204,7 @@ fn is_eq<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
ident: payload2, ident: payload2,
}), }),
]), ]),
roc_module::called_via::CalledVia::Space, CalledVia::Space,
)); ));
// \@Opaq payload1, @Opaq payload2 -> Bool.is_eq(payload1, payload2) // \@Opaq payload1, @Opaq payload2 -> Bool.is_eq(payload1, payload2)
@ -236,7 +232,6 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
DERIVED_REGION, DERIVED_REGION,
ast::Pattern::Identifier { ident: payload }, ast::Pattern::Identifier { ident: payload },
)]), )]),
PatternApplyStyle::Whitespace,
); );
// Inspect.to_inspector(payload) // Inspect.to_inspector(payload)
@ -249,7 +244,7 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
module_name: "", module_name: "",
ident: payload, ident: payload,
})]), })]),
roc_module::called_via::CalledVia::Space, CalledVia::Space,
)); ));
// Inspect.tag("@opaque", [Inspect.to_inspector(payload)]) // Inspect.tag("@opaque", [Inspect.to_inspector(payload)])
@ -264,7 +259,7 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
ident: "tag", ident: "tag",
}), }),
&*env.arena.alloc([&*opaque_name, &*to_inspector_list]), &*env.arena.alloc([&*opaque_name, &*to_inspector_list]),
roc_module::called_via::CalledVia::Space, CalledVia::Space,
)); ));
let fmt = "#fmt"; let fmt = "#fmt";
@ -282,7 +277,7 @@ fn to_inspector<'a>(env: &mut Env<'a>, at_opaque: &'a str) -> ast::Expr<'a> {
ident: fmt, ident: fmt,
}), }),
]), ]),
roc_module::called_via::CalledVia::Space, CalledVia::Space,
)); ));
let custom_closure = alloc_expr(ast::Expr::Closure( let custom_closure = alloc_expr(ast::Expr::Closure(

View file

@ -11,9 +11,8 @@ use roc_module::called_via::{BinOp, CalledVia};
use roc_module::ident::ModuleName; use roc_module::ident::ModuleName;
use roc_parse::ast::Expr::{self, *}; use roc_parse::ast::Expr::{self, *};
use roc_parse::ast::{ use roc_parse::ast::{
is_expr_suffixed, AssignedField, Collection, Defs, ModuleImportParams, Pattern, is_expr_suffixed, AssignedField, Collection, Defs, ModuleImportParams, Pattern, ResultTryKind,
PatternApplyStyle, ResultTryKind, StrLiteral, StrSegment, TryTarget, TypeAnnotation, ValueDef, StrLiteral, StrSegment, TryTarget, TypeAnnotation, ValueDef, WhenBranch,
WhenBranch,
}; };
use roc_problem::can::Problem; use roc_problem::can::Problem;
use roc_region::all::{Loc, Region}; use roc_region::all::{Loc, Region};
@ -50,6 +49,29 @@ fn new_op_call_expr<'a>(
Expr::LowLevelTry(desugared_left, ResultTryKind::KeywordPrefix), Expr::LowLevelTry(desugared_left, ResultTryKind::KeywordPrefix),
); );
} }
PncApply(&Loc { value: Try, .. }, arguments) => {
let try_fn = desugar_expr(env, scope, arguments.items.first().unwrap());
let mut args = Vec::with_capacity_in(arguments.len(), env.arena);
args.push(desugar_expr(env, scope, left));
args.extend(
arguments
.iter()
.skip(1)
.map(|a| desugar_expr(env, scope, a)),
);
return Loc::at(
region,
Expr::LowLevelTry(
env.arena.alloc(Loc::at(
region,
Expr::Apply(try_fn, args.into_bump_slice(), CalledVia::Try),
)),
ResultTryKind::KeywordPrefix,
),
);
}
Apply(&Loc { value: Try, .. }, arguments, _called_via) => { Apply(&Loc { value: Try, .. }, arguments, _called_via) => {
let try_fn = desugar_expr(env, scope, arguments.first().unwrap()); let try_fn = desugar_expr(env, scope, arguments.first().unwrap());
@ -95,6 +117,41 @@ fn new_op_call_expr<'a>(
), ),
); );
} }
PncApply(
&Loc {
value:
TrySuffix {
target: TryTarget::Result,
expr: fn_expr,
},
region: fn_region,
},
loc_args,
) => {
let loc_fn = env.arena.alloc(Loc::at(fn_region, *fn_expr));
let function = desugar_expr(env, scope, loc_fn);
let mut desugared_args = Vec::with_capacity_in(loc_args.len() + 1, env.arena);
desugared_args.push(desugar_expr(env, scope, left));
for loc_arg in loc_args.items {
desugared_args.push(desugar_expr(env, scope, loc_arg));
}
return Loc::at(
region,
LowLevelTry(
env.arena.alloc(Loc::at(
region,
Expr::Apply(
function,
desugared_args.into_bump_slice(),
CalledVia::Try,
),
)),
ResultTryKind::OperatorSuffix,
),
);
}
Apply( Apply(
&Loc { &Loc {
value: value:
@ -148,6 +205,16 @@ fn new_op_call_expr<'a>(
Apply(function, args, CalledVia::BinOp(Pizza)) Apply(function, args, CalledVia::BinOp(Pizza))
} }
PncApply(function, arguments) => {
let mut args = Vec::with_capacity_in(1 + arguments.len(), env.arena);
args.push(left);
args.extend(arguments.iter());
let args = args.into_bump_slice();
Apply(function, args, CalledVia::BinOp(Pizza))
}
Dbg => *desugar_dbg_expr(env, scope, left, region), Dbg => *desugar_dbg_expr(env, scope, left, region),
_ => { _ => {
// e.g. `1 |> (if b then (\a -> a) else (\c -> c))` // e.g. `1 |> (if b then (\a -> a) else (\c -> c))`
@ -178,10 +245,9 @@ fn new_op_call_expr<'a>(
env.arena.alloc(Loc::at(left.region, Pattern::Tag("Ok"))); env.arena.alloc(Loc::at(left.region, Pattern::Tag("Ok")));
branch_1_patts.push(Loc::at( branch_1_patts.push(Loc::at(
left.region, left.region,
Pattern::Apply( Pattern::PncApply(
branch_1_tag, branch_1_tag,
branch_1_patts_args.into_bump_slice(), Collection::with_items(branch_1_patts_args.into_bump_slice()),
PatternApplyStyle::ParensAndCommas,
), ),
)); ));
let branch_one: &WhenBranch<'_> = env.arena.alloc(WhenBranch { let branch_one: &WhenBranch<'_> = env.arena.alloc(WhenBranch {
@ -203,10 +269,9 @@ fn new_op_call_expr<'a>(
env.arena.alloc(Loc::at(left.region, Pattern::Tag("Err"))); env.arena.alloc(Loc::at(left.region, Pattern::Tag("Err")));
branch_2_patts.push(Loc::at( branch_2_patts.push(Loc::at(
right.region, right.region,
Pattern::Apply( Pattern::PncApply(
branch_2_tag, branch_2_tag,
branch_2_patts_args.into_bump_slice(), Collection::with_items(branch_2_patts_args.into_bump_slice()),
PatternApplyStyle::ParensAndCommas,
), ),
)); ));
let branch_two: &WhenBranch<'_> = env.arena.alloc(WhenBranch { let branch_two: &WhenBranch<'_> = env.arena.alloc(WhenBranch {
@ -1126,6 +1191,94 @@ pub fn desugar_expr<'a>(
region: loc_expr.region, region: loc_expr.region,
}) })
} }
PncApply(Loc { value: Dbg, .. }, loc_args) => {
if loc_args.is_empty() {
env.problem(Problem::UnappliedDbg {
region: loc_expr.region,
});
env.arena.alloc(Loc {
value: *desugar_invalid_dbg_expr(env, scope, loc_expr.region),
region: loc_expr.region,
})
} else if loc_args.len() > 1 {
let args_region = Region::span_across(
&loc_args.items.first().unwrap().region,
&loc_args.items.last().unwrap().region,
);
env.problem(Problem::OverAppliedDbg {
region: args_region,
});
env.arena.alloc(Loc {
value: *desugar_invalid_dbg_expr(env, scope, loc_expr.region),
region: loc_expr.region,
})
} else {
let desugared_arg = desugar_expr(env, scope, loc_args.items.first().unwrap());
env.arena.alloc(Loc {
value: *desugar_dbg_expr(env, scope, desugared_arg, loc_expr.region),
region: loc_expr.region,
})
}
}
PncApply(
Loc {
value: Try,
region: _,
},
loc_args,
) => {
let result_expr = if loc_args.len() == 1 {
desugar_expr(env, scope, loc_args.items[0])
} else {
let function = desugar_expr(env, scope, loc_args.items.first().unwrap());
let mut desugared_args = Vec::with_capacity_in(loc_args.len() - 1, env.arena);
for loc_arg in &loc_args.items[1..] {
desugared_args.push(desugar_expr(env, scope, loc_arg));
}
let args_region = Region::span_across(
&loc_args.items[0].region,
&loc_args.items[loc_args.items.len() - 1].region,
);
env.arena.alloc(Loc::at(
args_region,
Expr::Apply(function, desugared_args.into_bump_slice(), CalledVia::Try),
))
};
env.arena.alloc(Loc::at(
loc_expr.region,
Expr::LowLevelTry(result_expr, ResultTryKind::KeywordPrefix),
))
}
PncApply(loc_fn, loc_args) => {
let mut desugared_args = Vec::with_capacity_in(loc_args.len(), env.arena);
for loc_arg in loc_args.iter() {
let mut current = loc_arg.value;
let arg = loop {
match current {
SpaceBefore(expr, _) | SpaceAfter(expr, _) => {
current = *expr;
}
_ => break loc_arg,
}
};
desugared_args.push(desugar_expr(env, scope, arg));
}
let desugared_args = Collection::with_items(desugared_args.into_bump_slice());
env.arena.alloc(Loc {
value: PncApply(desugar_expr(env, scope, loc_fn), desugared_args),
region: loc_expr.region,
})
}
When(loc_cond_expr, branches) => { When(loc_cond_expr, branches) => {
let loc_desugared_cond = &*env.arena.alloc(desugar_expr(env, scope, loc_cond_expr)); let loc_desugared_cond = &*env.arena.alloc(desugar_expr(env, scope, loc_cond_expr));
let mut desugared_branches = Vec::with_capacity_in(branches.len(), env.arena); let mut desugared_branches = Vec::with_capacity_in(branches.len(), env.arena);
@ -1422,7 +1575,7 @@ fn desugar_pattern<'a>(env: &mut Env<'a>, scope: &mut Scope, pattern: Pattern<'a
| MalformedIdent(_, _) | MalformedIdent(_, _)
| QualifiedIdentifier { .. } => pattern, | QualifiedIdentifier { .. } => pattern,
Apply(tag, arg_patterns, style) => { Apply(tag, arg_patterns) => {
// Skip desugaring the tag, it should either be a Tag or OpaqueRef // Skip desugaring the tag, it should either be a Tag or OpaqueRef
let mut desugared_arg_patterns = Vec::with_capacity_in(arg_patterns.len(), env.arena); let mut desugared_arg_patterns = Vec::with_capacity_in(arg_patterns.len(), env.arena);
for arg_pattern in arg_patterns.iter() { for arg_pattern in arg_patterns.iter() {
@ -1432,7 +1585,22 @@ fn desugar_pattern<'a>(env: &mut Env<'a>, scope: &mut Scope, pattern: Pattern<'a
}); });
} }
Apply(tag, desugared_arg_patterns.into_bump_slice(), style) Apply(tag, desugared_arg_patterns.into_bump_slice())
}
PncApply(tag, arg_patterns) => {
// Skip desugaring the tag, it should either be a Tag or OpaqueRef
let mut desugared_arg_patterns = Vec::with_capacity_in(arg_patterns.len(), env.arena);
for arg_pattern in arg_patterns.iter() {
desugared_arg_patterns.push(Loc {
region: arg_pattern.region,
value: desugar_pattern(env, scope, arg_pattern.value),
});
}
PncApply(
tag,
Collection::with_items(desugared_arg_patterns.into_bump_slice()),
)
} }
RecordDestructure(field_patterns) => { RecordDestructure(field_patterns) => {
RecordDestructure(desugar_record_destructures(env, scope, field_patterns)) RecordDestructure(desugar_record_destructures(env, scope, field_patterns))

View file

@ -11,6 +11,7 @@ use crate::pattern::{canonicalize_pattern, BindingsFromPattern, Pattern, PermitS
use crate::procedure::{QualifiedReference, References}; use crate::procedure::{QualifiedReference, References};
use crate::scope::{Scope, SymbolLookup}; use crate::scope::{Scope, SymbolLookup};
use crate::traverse::{walk_expr, Visitor}; use crate::traverse::{walk_expr, Visitor};
use bumpalo::collections::Vec as BumpVec;
use roc_collections::soa::index_push_new; use roc_collections::soa::index_push_new;
use roc_collections::{SendMap, VecMap, VecSet}; use roc_collections::{SendMap, VecMap, VecSet};
use roc_error_macros::internal_error; use roc_error_macros::internal_error;
@ -760,6 +761,166 @@ pub struct WhenBranch {
pub redundant: RedundantMark, pub redundant: RedundantMark,
} }
#[allow(clippy::too_many_arguments)]
fn canonicalize_expr_apply<'a>(
env: &mut Env<'a>,
var_store: &mut VarStore,
scope: &mut Scope,
loc_fn: &&'a Loc<ast::Expr>,
args: BumpVec<'a, (Variable, Loc<Expr>)>,
output: &mut Output,
region: Region,
application_style: CalledVia,
) -> Expr {
use Expr::*;
let fn_region = loc_fn.region;
if let ast::Expr::OpaqueRef(name) = loc_fn.value {
// We treat opaques specially, since an opaque can wrap exactly one argument.
if args.is_empty() {
let loc_name = Loc::at(region, (*name).into());
let problem = roc_problem::can::RuntimeError::OpaqueNotApplied(loc_name);
env.problem(Problem::RuntimeError(problem.clone()));
RuntimeError(problem)
} else if args.len() > 1 {
let problem = roc_problem::can::RuntimeError::OpaqueAppliedToMultipleArgs(region);
env.problem(Problem::RuntimeError(problem.clone()));
RuntimeError(problem)
} else {
match scope.lookup_opaque_ref(name, loc_fn.region) {
Err(runtime_error) => {
env.problem(Problem::RuntimeError(runtime_error.clone()));
RuntimeError(runtime_error)
}
Ok((name, opaque_def)) => {
let argument = Box::new(args.first().unwrap().clone());
output
.references
.insert_type_lookup(name, QualifiedReference::Unqualified);
let (type_arguments, lambda_set_variables, specialized_def_type) =
freshen_opaque_def(var_store, opaque_def);
OpaqueRef {
opaque_var: var_store.fresh(),
name,
argument,
specialized_def_type: Box::new(specialized_def_type),
type_arguments,
lambda_set_variables,
}
}
}
}
} else if let ast::Expr::Crash = loc_fn.value {
// We treat crash specially, since crashing must be applied with one argument.
debug_assert!(!args.is_empty());
let crash = if args.len() > 1 {
let args_region = Region::span_across(
&args.first().unwrap().1.region,
&args.last().unwrap().1.region,
);
env.problem(Problem::OverAppliedCrash {
region: args_region,
});
// Still crash, just with our own message, and drop the references.
Crash {
msg: Box::new(Loc::at(
region,
Expr::Str(String::from("hit a crash!").into_boxed_str()),
)),
ret_var: var_store.fresh(),
}
} else {
let msg = args.first().unwrap();
Crash {
msg: Box::new(msg.1.clone()),
ret_var: var_store.fresh(),
}
};
crash
} else {
// Canonicalize the function expression and its arguments
let (fn_expr, fn_expr_output) =
canonicalize_expr(env, var_store, scope, fn_region, &loc_fn.value);
output.union(fn_expr_output);
// Default: We're not tail-calling a symbol (by name), we're tail-calling a function value.
output.tail_calls = vec![];
match fn_expr.value {
Var(symbol, _) => {
output.references.insert_call(symbol);
// we're tail-calling a symbol by name, check if it's the tail-callable symbol
if env
.tailcallable_symbol
.is_some_and(|tc_sym| tc_sym == symbol)
{
output.tail_calls.push(symbol);
}
Call(
Box::new((
var_store.fresh(),
fn_expr,
var_store.fresh(),
var_store.fresh(),
var_store.fresh(),
)),
args.to_vec(),
application_style,
)
}
RuntimeError(_) => {
// We can't call a runtime error; bail out by propagating it!
return fn_expr.value;
}
Tag {
tag_union_var: variant_var,
ext_var,
name,
..
} => Tag {
tag_union_var: variant_var,
ext_var,
name,
arguments: args.to_vec(),
},
ZeroArgumentTag {
variant_var,
ext_var,
name,
..
} => Tag {
tag_union_var: variant_var,
ext_var,
name,
arguments: args.to_vec(),
},
_ => {
// This could be something like ((if True then fn1 else fn2) arg1 arg2).
Call(
Box::new((
var_store.fresh(),
fn_expr,
var_store.fresh(),
var_store.fresh(),
var_store.fresh(),
)),
args.to_vec(),
application_style,
)
}
}
}
}
pub fn canonicalize_expr<'a>( pub fn canonicalize_expr<'a>(
env: &mut Env<'a>, env: &mut Env<'a>,
var_store: &mut VarStore, var_store: &mut VarStore,
@ -923,13 +1084,36 @@ pub fn canonicalize_expr<'a>(
) )
} }
} }
ast::Expr::PncApply(loc_fn, loc_args) => {
// The function's return type
let mut args = BumpVec::with_capacity_in(loc_args.items.len(), env.arena);
let mut output = Output::default();
for loc_arg in loc_args.items.iter() {
let (arg_expr, arg_out) =
canonicalize_expr(env, var_store, scope, loc_arg.region, &loc_arg.value);
args.push((var_store.fresh(), arg_expr));
output.references.union_mut(&arg_out.references);
}
let value = canonicalize_expr_apply(
env,
var_store,
scope,
loc_fn,
args,
&mut output,
region,
CalledVia::Space,
);
(value, output)
}
ast::Expr::Apply(loc_fn, loc_args, application_style) => { ast::Expr::Apply(loc_fn, loc_args, application_style) => {
// The expression that evaluates to the function being called, e.g. `foo` in // The expression that evaluates to the function being called, e.g. `foo` in
// (foo) bar baz // (foo) bar baz
let fn_region = loc_fn.region;
// The function's return type // The function's return type
let mut args = Vec::new(); let mut args = BumpVec::with_capacity_in(loc_args.len(), env.arena);
let mut output = Output::default(); let mut output = Output::default();
for loc_arg in loc_args.iter() { for loc_arg in loc_args.iter() {
@ -939,164 +1123,17 @@ pub fn canonicalize_expr<'a>(
args.push((var_store.fresh(), arg_expr)); args.push((var_store.fresh(), arg_expr));
output.references.union_mut(&arg_out.references); output.references.union_mut(&arg_out.references);
} }
let value = canonicalize_expr_apply(
if let ast::Expr::OpaqueRef(name) = loc_fn.value { env,
// We treat opaques specially, since an opaque can wrap exactly one argument. var_store,
scope,
debug_assert!(!args.is_empty()); loc_fn,
args,
if args.len() > 1 { &mut output,
let problem =
roc_problem::can::RuntimeError::OpaqueAppliedToMultipleArgs(region);
env.problem(Problem::RuntimeError(problem.clone()));
(RuntimeError(problem), output)
} else {
match scope.lookup_opaque_ref(name, loc_fn.region) {
Err(runtime_error) => {
env.problem(Problem::RuntimeError(runtime_error.clone()));
(RuntimeError(runtime_error), output)
}
Ok((name, opaque_def)) => {
let argument = Box::new(args.pop().unwrap());
output
.references
.insert_type_lookup(name, QualifiedReference::Unqualified);
let (type_arguments, lambda_set_variables, specialized_def_type) =
freshen_opaque_def(var_store, opaque_def);
let opaque_ref = OpaqueRef {
opaque_var: var_store.fresh(),
name,
argument,
specialized_def_type: Box::new(specialized_def_type),
type_arguments,
lambda_set_variables,
};
(opaque_ref, output)
}
}
}
} else if let ast::Expr::Crash = loc_fn.value {
// We treat crash specially, since crashing must be applied with one argument.
debug_assert!(!args.is_empty());
let mut args = Vec::new();
let mut output = Output::default();
for loc_arg in loc_args.iter() {
let (arg_expr, arg_out) =
canonicalize_expr(env, var_store, scope, loc_arg.region, &loc_arg.value);
args.push(arg_expr);
output.references.union_mut(&arg_out.references);
}
let crash = if args.len() > 1 {
let args_region = Region::span_across(
&loc_args.first().unwrap().region,
&loc_args.last().unwrap().region,
);
env.problem(Problem::OverAppliedCrash {
region: args_region,
});
// Still crash, just with our own message, and drop the references.
Crash {
msg: Box::new(Loc::at(
region, region,
Expr::Str(String::from("hit a crash!").into_boxed_str()),
)),
ret_var: var_store.fresh(),
}
} else {
let msg = args.pop().unwrap();
Crash {
msg: Box::new(msg),
ret_var: var_store.fresh(),
}
};
(crash, output)
} else {
// Canonicalize the function expression and its arguments
let (fn_expr, fn_expr_output) =
canonicalize_expr(env, var_store, scope, fn_region, &loc_fn.value);
output.union(fn_expr_output);
// Default: We're not tail-calling a symbol (by name), we're tail-calling a function value.
output.tail_calls = vec![];
let expr = match fn_expr.value {
Var(symbol, _) => {
output.references.insert_call(symbol);
// we're tail-calling a symbol by name, check if it's the tail-callable symbol
if env
.tailcallable_symbol
.is_some_and(|tc_sym| tc_sym == symbol)
{
output.tail_calls.push(symbol);
}
Call(
Box::new((
var_store.fresh(),
fn_expr,
var_store.fresh(),
var_store.fresh(),
var_store.fresh(),
)),
args,
*application_style, *application_style,
) );
} (value, output)
RuntimeError(_) => {
// We can't call a runtime error; bail out by propagating it!
return (fn_expr, output);
}
Tag {
tag_union_var: variant_var,
ext_var,
name,
..
} => Tag {
tag_union_var: variant_var,
ext_var,
name,
arguments: args,
},
ZeroArgumentTag {
variant_var,
ext_var,
name,
..
} => Tag {
tag_union_var: variant_var,
ext_var,
name,
arguments: args,
},
_ => {
// This could be something like ((if True then fn1 else fn2) arg1 arg2).
Call(
Box::new((
var_store.fresh(),
fn_expr,
var_store.fresh(),
var_store.fresh(),
var_store.fresh(),
)),
args,
*application_style,
)
}
};
(expr, output)
}
} }
ast::Expr::Var { module_name, ident } => { ast::Expr::Var { module_name, ident } => {
canonicalize_var_lookup(env, var_store, scope, module_name, ident, region) canonicalize_var_lookup(env, var_store, scope, module_name, ident, region)
@ -2266,6 +2303,12 @@ pub fn is_valid_interpolation(expr: &ast::Expr<'_>) -> bool {
.iter() .iter()
.all(|loc_arg| is_valid_interpolation(&loc_arg.value)) .all(|loc_arg| is_valid_interpolation(&loc_arg.value))
} }
ast::Expr::PncApply(loc_expr, args) => {
is_valid_interpolation(&loc_expr.value)
&& args
.iter()
.all(|loc_arg| is_valid_interpolation(&loc_arg.value))
}
ast::Expr::BinOps(loc_exprs, loc_expr) => { ast::Expr::BinOps(loc_exprs, loc_expr) => {
is_valid_interpolation(&loc_expr.value) is_valid_interpolation(&loc_expr.value)
&& loc_exprs && loc_exprs

View file

@ -6,6 +6,7 @@ use crate::num::{
ParsedNumResult, ParsedNumResult,
}; };
use crate::scope::{PendingAbilitiesInScope, Scope}; use crate::scope::{PendingAbilitiesInScope, Scope};
use bumpalo::collections::Vec as BumpVec;
use roc_exhaustive::ListArity; use roc_exhaustive::ListArity;
use roc_module::ident::{Ident, Lowercase, TagName}; use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
@ -367,6 +368,75 @@ fn canonicalize_pattern_symbol(
} }
} }
pub fn canonicalize_apply_tag<'a>(
tag: Loc<ast::Pattern<'a>>,
env: &mut Env<'a>,
var_store: &mut VarStore,
scope: &mut Scope,
output: &mut Output,
region: Region,
can_patterns: BumpVec<(Variable, Loc<Pattern>)>,
) -> Pattern {
use ast::Pattern::*;
match tag.value {
Tag(name) => {
let tag_name = TagName(name.into());
Pattern::AppliedTag {
whole_var: var_store.fresh(),
ext_var: var_store.fresh(),
tag_name,
arguments: can_patterns.to_vec(),
}
}
OpaqueRef(name) => match scope.lookup_opaque_ref(name, tag.region) {
Ok((opaque, opaque_def)) => {
debug_assert!(!can_patterns.is_empty());
if can_patterns.len() > 1 {
env.problem(Problem::RuntimeError(
RuntimeError::OpaqueAppliedToMultipleArgs(region),
));
Pattern::UnsupportedPattern(region)
} else {
let argument = Box::new(can_patterns[0].clone());
let (type_arguments, lambda_set_variables, specialized_def_type) =
freshen_opaque_def(var_store, opaque_def);
output.references.insert_type_lookup(
opaque,
crate::procedure::QualifiedReference::Unqualified,
);
Pattern::UnwrappedOpaque {
whole_var: var_store.fresh(),
opaque,
argument,
specialized_def_type: Box::new(specialized_def_type),
type_arguments,
lambda_set_variables,
}
}
}
Err(runtime_error) => {
env.problem(Problem::RuntimeError(runtime_error));
Pattern::OpaqueNotInScope(Loc::at(tag.region, name.into()))
}
},
_ => {
env.problem(Problem::RuntimeError(RuntimeError::MalformedPattern(
MalformedPatternProblem::CantApplyPattern,
tag.region,
)));
Pattern::UnsupportedPattern(region)
}
}
}
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn canonicalize_pattern<'a>( pub fn canonicalize_pattern<'a>(
env: &mut Env<'a>, env: &mut Env<'a>,
@ -411,9 +481,9 @@ pub fn canonicalize_pattern<'a>(
))); )));
Pattern::UnsupportedPattern(region) Pattern::UnsupportedPattern(region)
} }
Apply(tag, patterns, _) => { PncApply(tag, patterns) => {
let mut can_patterns = Vec::with_capacity(patterns.len()); let mut can_patterns = BumpVec::with_capacity_in(patterns.len(), env.arena);
for loc_pattern in *patterns { for loc_pattern in patterns.items.iter() {
let can_pattern = canonicalize_pattern( let can_pattern = canonicalize_pattern(
env, env,
var_store, var_store,
@ -428,63 +498,25 @@ pub fn canonicalize_pattern<'a>(
can_patterns.push((var_store.fresh(), can_pattern)); can_patterns.push((var_store.fresh(), can_pattern));
} }
match tag.value { canonicalize_apply_tag(**tag, env, var_store, scope, output, region, can_patterns)
Tag(name) => {
let tag_name = TagName(name.into());
Pattern::AppliedTag {
whole_var: var_store.fresh(),
ext_var: var_store.fresh(),
tag_name,
arguments: can_patterns,
} }
} Apply(tag, patterns) => {
let mut can_patterns = BumpVec::with_capacity_in(patterns.len(), env.arena);
OpaqueRef(name) => match scope.lookup_opaque_ref(name, tag.region) { for loc_pattern in *patterns {
Ok((opaque, opaque_def)) => { let can_pattern = canonicalize_pattern(
debug_assert!(!can_patterns.is_empty()); env,
var_store,
if can_patterns.len() > 1 { scope,
env.problem(Problem::RuntimeError( output,
RuntimeError::OpaqueAppliedToMultipleArgs(region), pattern_type,
)); &loc_pattern.value,
loc_pattern.region,
Pattern::UnsupportedPattern(region) permit_shadows,
} else {
let argument = Box::new(can_patterns.pop().unwrap());
let (type_arguments, lambda_set_variables, specialized_def_type) =
freshen_opaque_def(var_store, opaque_def);
output.references.insert_type_lookup(
opaque,
crate::procedure::QualifiedReference::Unqualified,
); );
Pattern::UnwrappedOpaque { can_patterns.push((var_store.fresh(), can_pattern));
whole_var: var_store.fresh(),
opaque,
argument,
specialized_def_type: Box::new(specialized_def_type),
type_arguments,
lambda_set_variables,
}
}
}
Err(runtime_error) => {
env.problem(Problem::RuntimeError(runtime_error));
Pattern::OpaqueNotInScope(Loc::at(tag.region, name.into()))
}
},
_ => {
env.problem(Problem::RuntimeError(RuntimeError::MalformedPattern(
MalformedPatternProblem::CantApplyPattern,
tag.region,
)));
Pattern::UnsupportedPattern(region)
}
} }
canonicalize_apply_tag(**tag, env, var_store, scope, output, region, can_patterns)
} }
&FloatLiteral(str) => match pattern_type { &FloatLiteral(str) => match pattern_type {

View file

@ -88,7 +88,7 @@ Defs {
[ [
WhenBranch { WhenBranch {
patterns: [ patterns: [
@24-32 Apply( @24-32 PncApply(
@24-32 Tag( @24-32 Tag(
"Ok", "Ok",
), ),
@ -97,7 +97,6 @@ Defs {
ident: "success_BRANCH1_24_32", ident: "success_BRANCH1_24_32",
}, },
], ],
ParensAndCommas,
), ),
], ],
value: @24-32 Var { value: @24-32 Var {
@ -108,7 +107,7 @@ Defs {
}, },
WhenBranch { WhenBranch {
patterns: [ patterns: [
@36-39 Apply( @36-39 PncApply(
@24-32 Tag( @24-32 Tag(
"Err", "Err",
), ),
@ -117,7 +116,6 @@ Defs {
"", "",
), ),
], ],
ParensAndCommas,
), ),
], ],
value: @36-39 Num( value: @36-39 Num(

View file

@ -78,7 +78,6 @@ Defs {
ident: "name", ident: "name",
}, },
], ],
Whitespace,
), ),
], ],
value: @125-154 Apply( value: @125-154 Apply(

View file

@ -19,8 +19,8 @@ use roc_error_macros::internal_error;
use roc_parse::ast::{ use roc_parse::ast::{
AbilityMember, Defs, Expr, ExtractSpaces, ImportAlias, ImportAsKeyword, ImportExposingKeyword, AbilityMember, Defs, Expr, ExtractSpaces, ImportAlias, ImportAsKeyword, ImportExposingKeyword,
ImportedModuleName, IngestedFileAnnotation, IngestedFileImport, ModuleImport, ImportedModuleName, IngestedFileAnnotation, IngestedFileImport, ModuleImport,
ModuleImportParams, Pattern, PatternApplyStyle, Spaces, SpacesBefore, StrLiteral, ModuleImportParams, Pattern, Spaces, SpacesBefore, StrLiteral, TypeAnnotation, TypeDef,
TypeAnnotation, TypeDef, TypeHeader, ValueDef, TypeHeader, ValueDef,
}; };
use roc_parse::expr::merge_spaces; use roc_parse::expr::merge_spaces;
use roc_parse::header::Keyword; use roc_parse::header::Keyword;
@ -556,7 +556,8 @@ impl<'a> Formattable for TypeHeader<'a> {
Parens::NotNeeded, Parens::NotNeeded,
indent, indent,
self.vars.iter().any(|v| v.is_multiline()), self.vars.iter().any(|v| v.is_multiline()),
PatternApplyStyle::Whitespace, false,
None,
); );
buf.flags = old_flags; buf.flags = old_flags;
} }
@ -569,7 +570,6 @@ fn type_head_lift_spaces<'a, 'b: 'a>(
let pat = Pattern::Apply( let pat = Pattern::Apply(
arena.alloc(Loc::at(head.name.region, Pattern::Tag(head.name.value))), arena.alloc(Loc::at(head.name.region, Pattern::Tag(head.name.value))),
head.vars, head.vars,
PatternApplyStyle::Whitespace,
); );
pattern_lift_spaces(arena, &pat) pattern_lift_spaces(arena, &pat)
@ -891,9 +891,7 @@ impl<'a> Formattable for ValueDef<'a> {
fn ann_pattern_needs_parens(value: &Pattern<'_>) -> bool { fn ann_pattern_needs_parens(value: &Pattern<'_>) -> bool {
match value.extract_spaces().item { match value.extract_spaces().item {
Pattern::Tag(_) => true, Pattern::Tag(_) => true,
Pattern::Apply(func, _args, _style) Pattern::Apply(func, _args) if matches!(func.extract_spaces().item, Pattern::Tag(..)) => {
if matches!(func.extract_spaces().item, Pattern::Tag(..)) =>
{
true true
} }
_ => false, _ => false,
@ -1085,6 +1083,13 @@ pub fn fmt_body<'a>(
.. ..
}, },
.., ..,
)
| Expr::PncApply(
Loc {
value: Expr::Str(StrLiteral::Block(..)),
..
},
..,
) => { ) => {
buf.spaces(1); buf.spaces(1);
body.format_with_options(buf, Parens::NotNeeded, Newlines::Yes, indent + INDENT); body.format_with_options(buf, Parens::NotNeeded, Newlines::Yes, indent + INDENT);
@ -1148,6 +1153,7 @@ fn starts_with_expect_ident(expr: &Expr<'_>) -> bool {
// If we removed the `{}=` in this case, that would change the meaning // If we removed the `{}=` in this case, that would change the meaning
match expr { match expr {
Expr::Apply(inner, _, _) => starts_with_expect_ident(&inner.value), Expr::Apply(inner, _, _) => starts_with_expect_ident(&inner.value),
Expr::PncApply(inner, _) => starts_with_expect_ident(&inner.value),
Expr::Var { module_name, ident } => { Expr::Var { module_name, ident } => {
module_name.is_empty() && (*ident == "expect" || *ident == "expect!") module_name.is_empty() && (*ident == "expect" || *ident == "expect!")
} }
@ -1162,6 +1168,7 @@ pub fn starts_with_block_string_literal(expr: &Expr<'_>) -> bool {
starts_with_block_string_literal(inner) starts_with_block_string_literal(inner)
} }
Expr::Apply(inner, _, _) => starts_with_block_string_literal(&inner.value), Expr::Apply(inner, _, _) => starts_with_block_string_literal(&inner.value),
Expr::PncApply(inner, _) => starts_with_block_string_literal(&inner.value),
Expr::TrySuffix { target: _, expr } => starts_with_block_string_literal(expr), Expr::TrySuffix { target: _, expr } => starts_with_block_string_literal(expr),
_ => false, _ => false,
} }

View file

@ -11,7 +11,7 @@ use crate::spaces::{
use crate::Buf; use crate::Buf;
use bumpalo::collections::Vec; use bumpalo::collections::Vec;
use bumpalo::Bump; use bumpalo::Bump;
use roc_module::called_via::{self, BinOp, CalledVia, UnaryOp}; use roc_module::called_via::{self, BinOp, UnaryOp};
use roc_parse::ast::{ use roc_parse::ast::{
AssignedField, Base, Collection, CommentOrNewline, Expr, ExtractSpaces, Pattern, Spaceable, AssignedField, Base, Collection, CommentOrNewline, Expr, ExtractSpaces, Pattern, Spaceable,
Spaces, SpacesAfter, SpacesBefore, TryTarget, WhenBranch, Spaces, SpacesAfter, SpacesBefore, TryTarget, WhenBranch,
@ -91,13 +91,23 @@ fn format_expr_only(
buf.indent(indent); buf.indent(indent);
buf.push_str("try"); buf.push_str("try");
} }
Expr::Apply(loc_expr, loc_args, called_via::CalledVia::ParensAndCommas) => { Expr::PncApply(
fmt_apply(loc_expr, loc_args, indent, buf, true); loc_expr @ Loc {
value: Expr::Dbg, ..
},
loc_args,
) => {
fmt_apply(loc_expr, loc_args.items, indent, buf);
}
Expr::PncApply(loc_expr, loc_args) => {
fmt_pnc_apply(loc_expr, loc_args, indent, buf);
} }
Expr::Apply(loc_expr, loc_args, _) => { Expr::Apply(loc_expr, loc_args, _) => {
let apply_needs_parens = parens == Parens::InApply || parens == Parens::InApplyLastArg; let apply_needs_parens = parens == Parens::InApply || parens == Parens::InApplyLastArg;
if buf.flags().parens_and_commas || !apply_needs_parens || loc_args.is_empty() { if buf.flags().parens_and_commas {
fmt_apply(loc_expr, loc_args, indent, buf, false); fmt_pnc_apply(loc_expr, &Collection::with_items(loc_args), indent, buf);
} else if !apply_needs_parens || loc_args.is_empty() {
fmt_apply(loc_expr, loc_args, indent, buf);
} else { } else {
fmt_parens(item, buf, indent); fmt_parens(item, buf, indent);
} }
@ -519,6 +529,12 @@ pub fn expr_is_multiline(me: &Expr<'_>, comments_only: bool) -> bool {
.iter() .iter()
.any(|loc_arg| expr_is_multiline(&loc_arg.value, comments_only)) .any(|loc_arg| expr_is_multiline(&loc_arg.value, comments_only))
} }
Expr::PncApply(loc_expr, args) => {
expr_is_multiline(&loc_expr.value, comments_only)
|| args
.iter()
.any(|loc_arg| expr_is_multiline(&loc_arg.value, comments_only))
}
Expr::DbgStmt { .. } => true, Expr::DbgStmt { .. } => true,
Expr::LowLevelDbg(_, _, _) => { Expr::LowLevelDbg(_, _, _) => {
@ -639,13 +655,28 @@ fn requires_space_after_unary(item: &Expr<'_>) -> bool {
} }
} }
fn fmt_pnc_apply(
loc_expr: &Loc<Expr<'_>>,
loc_args: &Collection<'_, &Loc<Expr<'_>>>,
indent: u16,
buf: &mut Buf<'_>,
) {
let expr = expr_lift_spaces(Parens::InApply, buf.text.bump(), &loc_expr.value);
if !expr.before.is_empty() {
format_spaces(buf, expr.before, Newlines::Yes, indent);
}
expr.item
.format_with_options(buf, Parens::InApply, Newlines::Yes, indent);
fmt_expr_collection(buf, indent, Braces::Round, *loc_args, Newlines::No);
}
fn fmt_apply( fn fmt_apply(
loc_expr: &Loc<Expr<'_>>, loc_expr: &Loc<Expr<'_>>,
loc_args: &[&Loc<Expr<'_>>], loc_args: &[&Loc<Expr<'_>>],
indent: u16, indent: u16,
buf: &mut Buf<'_>, buf: &mut Buf<'_>,
expr_used_commas_and_parens: bool,
) { ) {
// should_reflow_outdentable, aka should we transform this: // should_reflow_outdentable, aka should we transform this:
// //
@ -665,7 +696,6 @@ fn fmt_apply(
// 2, // 2,
// ] // ]
// ``` // ```
let use_commas_and_parens = expr_used_commas_and_parens || buf.flags().parens_and_commas;
let should_reflow_outdentable = loc_expr.extract_spaces().after.is_empty() let should_reflow_outdentable = loc_expr.extract_spaces().after.is_empty()
&& except_last(loc_args).all(|a| !a.is_multiline()) && except_last(loc_args).all(|a| !a.is_multiline())
&& loc_args && loc_args
@ -701,23 +731,17 @@ fn fmt_apply(
if !expr.before.is_empty() { if !expr.before.is_empty() {
format_spaces(buf, expr.before, Newlines::Yes, indent); format_spaces(buf, expr.before, Newlines::Yes, indent);
} }
expr.item expr.item
.format_with_options(buf, Parens::InApply, Newlines::Yes, indent); .format_with_options(buf, Parens::InApply, Newlines::Yes, indent);
if use_commas_and_parens {
buf.push('(');
}
let mut last_after = expr.after; let mut last_after = expr.after;
for (i, loc_arg) in loc_args.iter().enumerate() { for (i, loc_arg) in loc_args.iter().enumerate() {
let is_last_arg = i == loc_args.len() - 1; let is_last_arg = i == loc_args.len() - 1;
let is_first_arg = i == 0;
let arg = expr_lift_spaces( let arg = expr_lift_spaces(
if use_commas_and_parens { if is_last_arg {
Parens::NotNeeded
} else if is_last_arg {
Parens::InApplyLastArg Parens::InApplyLastArg
} else { } else {
Parens::InApply Parens::InApply
@ -738,7 +762,7 @@ fn fmt_apply(
last_after = arg.after; last_after = arg.after;
if needs_indent { if needs_indent {
buf.ensure_ends_with_newline(); buf.ensure_ends_with_newline();
} else if !(is_first_arg && use_commas_and_parens) { } else {
buf.spaces(1); buf.spaces(1);
} }
@ -746,34 +770,13 @@ fn fmt_apply(
{ {
fmt_parens(&arg.item, buf, arg_indent); fmt_parens(&arg.item, buf, arg_indent);
} else { } else {
format_expr_only( format_expr_only(&arg.item, buf, Parens::InApply, Newlines::Yes, arg_indent);
&arg.item,
buf,
if use_commas_and_parens {
Parens::NotNeeded
} else {
Parens::InApply
},
Newlines::Yes,
arg_indent,
);
}
if use_commas_and_parens && (!is_last_arg || needs_indent) {
buf.push(',');
} }
} }
if !last_after.is_empty() { if !last_after.is_empty() {
format_spaces(buf, last_after, Newlines::Yes, arg_indent); format_spaces(buf, last_after, Newlines::Yes, arg_indent);
} }
if use_commas_and_parens {
if needs_indent {
buf.ensure_ends_with_newline();
buf.indent(indent);
}
buf.push(')');
}
} }
fn is_outdentable_collection(expr: &Expr<'_>) -> bool { fn is_outdentable_collection(expr: &Expr<'_>) -> bool {
@ -1038,16 +1041,12 @@ pub fn expr_lift_spaces<'a, 'b: 'a>(
expr: &Expr<'b>, expr: &Expr<'b>,
) -> Spaces<'a, Expr<'a>> { ) -> Spaces<'a, Expr<'a>> {
match expr { match expr {
Expr::Apply(func, args, CalledVia::ParensAndCommas) => { Expr::PncApply(func, args) => {
let lifted = expr_lift_spaces_before(Parens::NotNeeded, arena, &func.value); let lifted = expr_lift_spaces_before(Parens::InApply, arena, &func.value);
Spaces { Spaces {
before: lifted.before, before: lifted.before,
item: Expr::Apply( item: Expr::PncApply(arena.alloc(Loc::at(func.region, lifted.item)), *args),
arena.alloc(Loc::at(func.region, lifted.item)),
args,
CalledVia::ParensAndCommas,
),
after: arena.alloc([]), after: arena.alloc([]),
} }
} }

View file

@ -7,8 +7,7 @@ use crate::spaces::{fmt_comments_only, fmt_spaces, NewlineAt, INDENT};
use crate::Buf; use crate::Buf;
use bumpalo::Bump; use bumpalo::Bump;
use roc_parse::ast::{ use roc_parse::ast::{
Base, CommentOrNewline, Pattern, PatternApplyStyle, PatternAs, Spaceable, Spaces, SpacesAfter, Base, CommentOrNewline, Pattern, PatternAs, Spaceable, Spaces, SpacesAfter, SpacesBefore,
SpacesBefore,
}; };
use roc_parse::expr::merge_spaces; use roc_parse::expr::merge_spaces;
use roc_region::all::Loc; use roc_region::all::Loc;
@ -73,9 +72,14 @@ impl<'a> Formattable for Pattern<'a> {
} }
}, },
Pattern::StrLiteral(literal) => is_str_multiline(literal), Pattern::StrLiteral(literal) => is_str_multiline(literal),
Pattern::Apply(pat, args, _) => { Pattern::Apply(pat, args) => {
pat.is_multiline() || args.iter().any(|a| a.is_multiline()) pat.is_multiline() || args.iter().any(|a| a.is_multiline())
} }
Pattern::PncApply(pat, args) => {
pat.is_multiline()
|| args.iter().any(|a| a.is_multiline())
|| !args.final_comments().is_empty()
}
Pattern::Identifier { .. } Pattern::Identifier { .. }
| Pattern::Tag(_) | Pattern::Tag(_)
@ -163,22 +167,19 @@ fn fmt_pattern_only(
buf.indent(indent); buf.indent(indent);
buf.push_str(name); buf.push_str(name);
} }
Pattern::Apply( Pattern::PncApply(loc_pattern, loc_arg_patterns) => {
loc_pattern,
loc_arg_patterns,
style @ PatternApplyStyle::ParensAndCommas,
) => {
pattern_fmt_apply( pattern_fmt_apply(
buf, buf,
loc_pattern.value, loc_pattern.value,
loc_arg_patterns, loc_arg_patterns.items,
Parens::NotNeeded, Parens::NotNeeded,
indent, indent,
is_multiline, is_multiline,
*style, true,
Some(loc_arg_patterns.final_comments()),
); );
} }
Pattern::Apply(loc_pattern, loc_arg_patterns, style) => { Pattern::Apply(loc_pattern, loc_arg_patterns) => {
pattern_fmt_apply( pattern_fmt_apply(
buf, buf,
loc_pattern.value, loc_pattern.value,
@ -186,7 +187,8 @@ fn fmt_pattern_only(
parens, parens,
indent, indent,
is_multiline, is_multiline,
*style, false,
None,
); );
} }
Pattern::RecordDestructure(loc_patterns) => { Pattern::RecordDestructure(loc_patterns) => {
@ -455,6 +457,7 @@ fn fmt_pattern_only(
} }
} }
#[allow(clippy::too_many_arguments)]
pub fn pattern_fmt_apply( pub fn pattern_fmt_apply(
buf: &mut Buf<'_>, buf: &mut Buf<'_>,
func: Pattern<'_>, func: Pattern<'_>,
@ -462,10 +465,10 @@ pub fn pattern_fmt_apply(
parens: Parens, parens: Parens,
indent: u16, indent: u16,
is_multiline: bool, is_multiline: bool,
style: PatternApplyStyle, is_pnc: bool,
final_comments: Option<&[CommentOrNewline]>,
) { ) {
let use_commas_and_parens = let use_commas_and_parens = is_pnc || buf.flags().parens_and_commas;
matches!(style, PatternApplyStyle::ParensAndCommas) || buf.flags().parens_and_commas;
buf.indent(indent); buf.indent(indent);
// Sometimes, an Apply pattern needs parens around it. // Sometimes, an Apply pattern needs parens around it.
// In particular when an Apply's argument is itself an Apply (> 0) arguments // In particular when an Apply's argument is itself an Apply (> 0) arguments
@ -560,7 +563,7 @@ pub fn pattern_fmt_apply(
buf.push_str("(implements)"); buf.push_str("(implements)");
} else { } else {
fmt_pattern_only(&arg.item, buf, parens, indent_more, arg.item.is_multiline()); fmt_pattern_only(&arg.item, buf, parens, indent_more, arg.item.is_multiline());
if use_commas_and_parens && (!is_last_arg || add_newlines) { if use_commas_and_parens && (!is_last_arg || is_multiline) {
buf.push(','); buf.push(',');
} }
} }
@ -570,6 +573,13 @@ pub fn pattern_fmt_apply(
add_newlines |= was_multiline; add_newlines |= was_multiline;
} }
if let Some(comments) = final_comments {
if !is_multiline {
fmt_comments_only(buf, comments.iter(), NewlineAt::Bottom, indent_more);
} else {
fmt_spaces(buf, comments.iter(), indent_more);
}
}
if !last_after.is_empty() { if !last_after.is_empty() {
if !is_multiline { if !is_multiline {
fmt_comments_only(buf, last_after.iter(), NewlineAt::Bottom, indent_more) fmt_comments_only(buf, last_after.iter(), NewlineAt::Bottom, indent_more)
@ -578,7 +588,21 @@ pub fn pattern_fmt_apply(
} }
} }
if parens || use_commas_and_parens { if use_commas_and_parens {
if is_multiline {
buf.ensure_ends_with_newline();
buf.indent(indent);
}
if buf.ends_with_newline() {
buf.indent(indent);
}
if buf.ends_with_newline() {
buf.indent(indent);
}
buf.push(')');
}
if parens {
buf.push(')'); buf.push(')');
} }
} }
@ -632,8 +656,9 @@ fn pattern_prec(pat: Pattern<'_>) -> Prec {
| Pattern::SingleQuote(_) | Pattern::SingleQuote(_)
| Pattern::Tuple(..) | Pattern::Tuple(..)
| Pattern::List(..) | Pattern::List(..)
| Pattern::ListRest(_) => Prec::Term, | Pattern::ListRest(_)
Pattern::Apply(_, _, _) | Pattern::As(_, _) => Prec::Apply, | Pattern::PncApply(_, _) => Prec::Term,
Pattern::Apply(_, _) | Pattern::As(_, _) => Prec::Apply,
Pattern::SpaceBefore(inner, _) | Pattern::SpaceAfter(inner, _) => pattern_prec(*inner), Pattern::SpaceBefore(inner, _) | Pattern::SpaceAfter(inner, _) => pattern_prec(*inner),
Pattern::Malformed(_) | Pattern::MalformedIdent(..) => Prec::Term, Pattern::Malformed(_) | Pattern::MalformedIdent(..) => Prec::Term,
} }
@ -653,7 +678,7 @@ pub fn pattern_lift_spaces<'a, 'b: 'a>(
pat: &Pattern<'b>, pat: &Pattern<'b>,
) -> Spaces<'a, Pattern<'a>> { ) -> Spaces<'a, Pattern<'a>> {
match pat { match pat {
Pattern::Apply(func, args, style) => { Pattern::Apply(func, args) => {
let func_lifted = pattern_lift_spaces(arena, &func.value); let func_lifted = pattern_lift_spaces(arena, &func.value);
let args = arena.alloc_slice_copy(args); let args = arena.alloc_slice_copy(args);
@ -688,10 +713,19 @@ pub fn pattern_lift_spaces<'a, 'b: 'a>(
}; };
Spaces { Spaces {
before, before,
item: Pattern::Apply(arena.alloc(func), args, *style), item: Pattern::Apply(arena.alloc(func), args),
after, after,
} }
} }
Pattern::PncApply(func, args) => {
let func_lifted = pattern_lift_spaces_before(arena, &func.value);
Spaces {
before: func_lifted.before,
item: Pattern::PncApply(arena.alloc(func), *args),
after: &[],
}
}
Pattern::OptionalField(name, expr) => { Pattern::OptionalField(name, expr) => {
let lifted = expr_lift_spaces_after(Parens::NotNeeded, arena, &expr.value); let lifted = expr_lift_spaces_after(Parens::NotNeeded, arena, &expr.value);
Spaces { Spaces {
@ -748,7 +782,9 @@ fn handle_multiline_str_spaces<'a>(pat: &Pattern<'_>, before: &mut &'a [CommentO
fn starts_with_block_str(item: &Pattern<'_>) -> bool { fn starts_with_block_str(item: &Pattern<'_>) -> bool {
match item { match item {
Pattern::As(inner, _) | Pattern::Apply(inner, _, _) => starts_with_block_str(&inner.value), Pattern::As(inner, _) | Pattern::Apply(inner, _) | Pattern::PncApply(inner, _) => {
starts_with_block_str(&inner.value)
}
Pattern::SpaceBefore(inner, _) | Pattern::SpaceAfter(inner, _) => { Pattern::SpaceBefore(inner, _) | Pattern::SpaceAfter(inner, _) => {
starts_with_block_str(inner) starts_with_block_str(inner)
} }

View file

@ -68,9 +68,6 @@ pub enum CalledVia {
/// Calling with space, e.g. (foo bar) /// Calling with space, e.g. (foo bar)
Space, Space,
/// Calling parens-and-comms style, e.g. foo(bar)
ParensAndCommas,
/// Calling with an operator, e.g. (bar |> foo) or (1 + 2) /// Calling with an operator, e.g. (bar |> foo) or (1 + 2)
BinOp(BinOp), BinOp(BinOp),

View file

@ -554,6 +554,7 @@ pub enum Expr<'a> {
/// To apply by name, do Apply(Var(...), ...) /// To apply by name, do Apply(Var(...), ...)
/// To apply a tag by name, do Apply(Tag(...), ...) /// To apply a tag by name, do Apply(Tag(...), ...)
Apply(&'a Loc<Expr<'a>>, &'a [&'a Loc<Expr<'a>>], CalledVia), Apply(&'a Loc<Expr<'a>>, &'a [&'a Loc<Expr<'a>>], CalledVia),
PncApply(&'a Loc<Expr<'a>>, Collection<'a, &'a Loc<Expr<'a>>>),
BinOps(&'a [(Loc<Expr<'a>>, Loc<BinOp>)], &'a Loc<Expr<'a>>), BinOps(&'a [(Loc<Expr<'a>>, Loc<BinOp>)], &'a Loc<Expr<'a>>),
UnaryOp(&'a Loc<Expr<'a>>, Loc<UnaryOp>), UnaryOp(&'a Loc<Expr<'a>>, Loc<UnaryOp>),
@ -631,6 +632,7 @@ pub fn is_top_level_suffixed(expr: &Expr) -> bool {
match expr { match expr {
Expr::TrySuffix { .. } => true, Expr::TrySuffix { .. } => true,
Expr::Apply(a, _, _) => is_top_level_suffixed(&a.value), Expr::Apply(a, _, _) => is_top_level_suffixed(&a.value),
Expr::PncApply(a, _) => is_top_level_suffixed(&a.value),
Expr::SpaceBefore(a, _) => is_top_level_suffixed(a), Expr::SpaceBefore(a, _) => is_top_level_suffixed(a),
Expr::SpaceAfter(a, _) => is_top_level_suffixed(a), Expr::SpaceAfter(a, _) => is_top_level_suffixed(a),
_ => false, _ => false,
@ -653,6 +655,15 @@ pub fn is_expr_suffixed(expr: &Expr) -> bool {
any_args_suffixed || is_function_suffixed any_args_suffixed || is_function_suffixed
} }
Expr::PncApply(sub_loc_expr, apply_arg_collection) => {
let is_function_suffixed = is_expr_suffixed(&sub_loc_expr.value);
let any_args_suffixed = apply_arg_collection
.iter()
.any(|arg| is_expr_suffixed(&arg.value));
any_args_suffixed || is_function_suffixed
}
// expression in a pipeline, `"hi" |> say!` // expression in a pipeline, `"hi" |> say!`
Expr::BinOps(firsts, last) => { Expr::BinOps(firsts, last) => {
firsts firsts
@ -1021,6 +1032,14 @@ impl<'a, 'b> RecursiveValueDefIter<'a, 'b> {
expr_stack.push(&loc_expr.value); expr_stack.push(&loc_expr.value);
} }
} }
PncApply(fun, args) => {
expr_stack.reserve(args.len() + 1);
expr_stack.push(&fun.value);
for loc_expr in args.iter() {
expr_stack.push(&loc_expr.value);
}
}
BinOps(ops, expr) => { BinOps(ops, expr) => {
expr_stack.reserve(ops.len() + 1); expr_stack.reserve(ops.len() + 1);
@ -1725,12 +1744,6 @@ impl<'a> PatternAs<'a> {
} }
} }
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PatternApplyStyle {
Whitespace,
ParensAndCommas,
}
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum Pattern<'a> { pub enum Pattern<'a> {
// Identifier // Identifier
@ -1746,11 +1759,9 @@ pub enum Pattern<'a> {
OpaqueRef(&'a str), OpaqueRef(&'a str),
Apply( Apply(&'a Loc<Pattern<'a>>, &'a [Loc<Pattern<'a>>]),
&'a Loc<Pattern<'a>>,
&'a [Loc<Pattern<'a>>], PncApply(&'a Loc<Pattern<'a>>, Collection<'a, Loc<Pattern<'a>>>),
PatternApplyStyle,
),
/// This is Located<Pattern> rather than Located<str> so we can record comments /// This is Located<Pattern> rather than Located<str> so we can record comments
/// around the destructured names, e.g. { x ### x does stuff ###, y } /// around the destructured names, e.g. { x ### x does stuff ###, y }
@ -1831,8 +1842,34 @@ impl<'a> Pattern<'a> {
false false
} }
} }
Apply(constructor_x, args_x, _) => { Apply(constructor_x, args_x) => {
if let Apply(constructor_y, args_y, _) = other { if let Apply(constructor_y, args_y) = other {
let equivalent_args = args_x
.iter()
.zip(args_y.iter())
.all(|(p, q)| p.value.equivalent(&q.value));
constructor_x.value.equivalent(&constructor_y.value) && equivalent_args
} else if let PncApply(constructor_y, args_y) = other {
let equivalent_args = args_x
.iter()
.zip(args_y.iter())
.all(|(p, q)| p.value.equivalent(&q.value));
constructor_x.value.equivalent(&constructor_y.value) && equivalent_args
} else {
false
}
}
PncApply(constructor_x, args_x) => {
if let PncApply(constructor_y, args_y) = other {
let equivalent_args = args_x
.iter()
.zip(args_y.iter())
.all(|(p, q)| p.value.equivalent(&q.value));
constructor_x.value.equivalent(&constructor_y.value) && equivalent_args
} else if let Apply(constructor_y, args_y) = other {
let equivalent_args = args_x let equivalent_args = args_x
.iter() .iter()
.zip(args_y.iter()) .zip(args_y.iter())
@ -2554,6 +2591,7 @@ impl<'a> Malformed for Expr<'a> {
LowLevelTry(loc_expr, _) => loc_expr.is_malformed(), LowLevelTry(loc_expr, _) => loc_expr.is_malformed(),
Return(return_value, after_return) => return_value.is_malformed() || after_return.is_some_and(|ar| ar.is_malformed()), Return(return_value, after_return) => return_value.is_malformed() || after_return.is_some_and(|ar| ar.is_malformed()),
Apply(func, args, _) => func.is_malformed() || args.iter().any(|arg| arg.is_malformed()), Apply(func, args, _) => func.is_malformed() || args.iter().any(|arg| arg.is_malformed()),
PncApply(func, args) => func.is_malformed() || args.iter().any(|arg| arg.is_malformed()),
BinOps(firsts, last) => firsts.iter().any(|(expr, _)| expr.is_malformed()) || last.is_malformed(), BinOps(firsts, last) => firsts.iter().any(|(expr, _)| expr.is_malformed()) || last.is_malformed(),
UnaryOp(expr, _) => expr.is_malformed(), UnaryOp(expr, _) => expr.is_malformed(),
If { if_thens, final_else, ..} => if_thens.iter().any(|(cond, body)| cond.is_malformed() || body.is_malformed()) || final_else.is_malformed(), If { if_thens, final_else, ..} => if_thens.iter().any(|(cond, body)| cond.is_malformed() || body.is_malformed()) || final_else.is_malformed(),
@ -2650,7 +2688,8 @@ impl<'a> Malformed for Pattern<'a> {
Identifier{ .. } | Identifier{ .. } |
Tag(_) | Tag(_) |
OpaqueRef(_) => false, OpaqueRef(_) => false,
Apply(func, args, _) => func.is_malformed() || args.iter().any(|arg| arg.is_malformed()), Apply(func, args) => func.is_malformed() || args.iter().any(|arg| arg.is_malformed()),
PncApply(func, args) => func.is_malformed() || args.iter().any(|arg| arg.is_malformed()),
RecordDestructure(items) => items.iter().any(|item| item.is_malformed()), RecordDestructure(items) => items.iter().any(|item| item.is_malformed()),
RequiredField(_, pat) => pat.is_malformed(), RequiredField(_, pat) => pat.is_malformed(),
OptionalField(_, expr) => expr.is_malformed(), OptionalField(_, expr) => expr.is_malformed(),

View file

@ -2,8 +2,8 @@ use crate::ast::{
is_expr_suffixed, AssignedField, Collection, CommentOrNewline, Defs, Expr, ExtractSpaces, is_expr_suffixed, AssignedField, Collection, CommentOrNewline, Defs, Expr, ExtractSpaces,
Implements, ImplementsAbilities, ImportAlias, ImportAsKeyword, ImportExposingKeyword, Implements, ImplementsAbilities, ImportAlias, ImportAsKeyword, ImportExposingKeyword,
ImportedModuleName, IngestedFileAnnotation, IngestedFileImport, ModuleImport, ImportedModuleName, IngestedFileAnnotation, IngestedFileImport, ModuleImport,
ModuleImportParams, Pattern, PatternApplyStyle, Spaceable, Spaced, Spaces, SpacesBefore, ModuleImportParams, Pattern, Spaceable, Spaced, Spaces, SpacesBefore, TryTarget,
TryTarget, TypeAnnotation, TypeDef, TypeHeader, ValueDef, TypeAnnotation, TypeDef, TypeHeader, ValueDef,
}; };
use crate::blankspace::{ use crate::blankspace::{
loc_space0_e, require_newline_or_eof, space0_after_e, space0_around_ee, space0_before_e, loc_space0_e, require_newline_or_eof, space0_after_e, space0_around_ee, space0_before_e,
@ -233,12 +233,41 @@ fn loc_term<'a>() -> impl Parser<'a, Loc<Expr<'a>>, EExpr<'a>> {
), ),
zero_or_more(pnc_args()), zero_or_more(pnc_args()),
), ),
|arena, (expr, arg_locs_vec)| { #[allow(clippy::type_complexity)]
|arena,
(expr, arg_locs_with_suffixes_vec): (
Loc<Expr<'a>>,
bumpalo::collections::Vec<
'a,
(
Loc<Collection<'a, &'a Loc<Expr>>>,
Option<Vec<'a, Suffix<'a>>>,
),
>,
)| {
let mut e = expr; let mut e = expr;
for args_loc in arg_locs_vec.iter() { let orig_region = e.region;
for (args_loc, maybe_suffixes) in arg_locs_with_suffixes_vec.iter() {
let value = if matches!(
e,
Loc {
value: Expr::Dbg,
..
}
) {
Expr::Apply(arena.alloc(e), args_loc.value.items, CalledVia::Space)
} else if let Some(suffixes) = maybe_suffixes {
apply_expr_access_chain(
arena,
Expr::PncApply(arena.alloc(e), args_loc.value),
suffixes.clone(),
)
} else {
Expr::PncApply(arena.alloc(e), args_loc.value)
};
e = Loc { e = Loc {
value: Expr::Apply(arena.alloc(e), args_loc.value, CalledVia::ParensAndCommas), value,
region: Region::span_across(&expr.region, &args_loc.region), region: Region::span_across(&orig_region, &args_loc.region),
}; };
} }
e e
@ -247,9 +276,16 @@ fn loc_term<'a>() -> impl Parser<'a, Loc<Expr<'a>>, EExpr<'a>> {
.trace("term") .trace("term")
} }
fn pnc_args<'a>() -> impl Parser<'a, Loc<&'a [&'a Loc<Expr<'a>>]>, EExpr<'a>> { fn pnc_args<'a>() -> impl Parser<
'a,
(
Loc<Collection<'a, &'a Loc<Expr<'a>>>>,
Option<Vec<'a, Suffix<'a>>>,
),
EExpr<'a>,
> {
|arena: &'a Bump, state: State<'a>, min_indent: u32| { |arena: &'a Bump, state: State<'a>, min_indent: u32| {
map_with_arena( let args_then_suffixes = and(
specialize_err( specialize_err(
EExpr::InParens, EExpr::InParens,
loc(collection_trailing_sep_e( loc(collection_trailing_sep_e(
@ -260,32 +296,23 @@ fn pnc_args<'a>() -> impl Parser<'a, Loc<&'a [&'a Loc<Expr<'a>>]>, EExpr<'a>> {
Expr::SpaceBefore, Expr::SpaceBefore,
)), )),
), ),
|arena, arg_loc: Loc<Collection<'a, Loc<Expr<'a>>>>| { optional(record_field_access_chain()),
let mut args_vec = Vec::new_in(arena); );
let args_len = arg_loc.value.items.len(); map_with_arena(
for (i, arg) in arg_loc.value.items.iter().enumerate() { args_then_suffixes,
if i == (args_len - 1) { |arena: &'a Bump,
let last_comments = arg_loc.value.final_comments(); (loc_args_coll, maybe_suffixes): (
if !last_comments.is_empty() { Loc<Collection<'a, Loc<Expr<'a>>>>,
let sa = Expr::SpaceAfter(arena.alloc(arg.value), last_comments); Option<Vec<'a, Suffix<'a>>>,
let arg_with_spaces: &Loc<Expr<'a>> = arena.alloc(Loc { )| {
value: sa, let args = loc_args_coll.value.ptrify_items(arena);
region: arg.region, (
});
args_vec.push(arg_with_spaces);
} else {
let a: &Loc<Expr<'a>> = arena.alloc(arg);
args_vec.push(a);
}
} else {
let a: &Loc<Expr<'a>> = arena.alloc(arg);
args_vec.push(a);
}
}
Loc { Loc {
value: args_vec.into_bump_slice(), region: loc_args_coll.region,
region: arg_loc.region, value: args,
} },
maybe_suffixes,
)
}, },
) )
.parse(arena, state, min_indent) .parse(arena, state, min_indent)
@ -2117,7 +2144,7 @@ fn expr_to_pattern_help<'a>(arena: &'a Bump, expr: &Expr<'a>) -> Result<Pattern<
Expr::Underscore(opt_name) => Pattern::Underscore(opt_name), Expr::Underscore(opt_name) => Pattern::Underscore(opt_name),
Expr::Tag(value) => Pattern::Tag(value), Expr::Tag(value) => Pattern::Tag(value),
Expr::OpaqueRef(value) => Pattern::OpaqueRef(value), Expr::OpaqueRef(value) => Pattern::OpaqueRef(value),
Expr::Apply(loc_val, loc_args, called_via) => { Expr::Apply(loc_val, loc_args, _) => {
let region = loc_val.region; let region = loc_val.region;
let value = expr_to_pattern_help(arena, &loc_val.value)?; let value = expr_to_pattern_help(arena, &loc_val.value)?;
let val_pattern = arena.alloc(Loc { region, value }); let val_pattern = arena.alloc(Loc { region, value });
@ -2131,18 +2158,22 @@ fn expr_to_pattern_help<'a>(arena: &'a Bump, expr: &Expr<'a>) -> Result<Pattern<
arg_patterns.push(Loc { region, value }); arg_patterns.push(Loc { region, value });
} }
let pattern = Pattern::Apply( let pattern = Pattern::Apply(val_pattern, arg_patterns.into_bump_slice());
val_pattern,
arg_patterns.into_bump_slice(),
if matches!(called_via, CalledVia::ParensAndCommas) {
PatternApplyStyle::ParensAndCommas
} else {
PatternApplyStyle::Whitespace
},
);
pattern pattern
} }
Expr::PncApply(loc_val, args) => {
let region = loc_val.region;
let value = expr_to_pattern_help(arena, &loc_val.value)?;
let val_pattern = arena.alloc(Loc { region, value });
let pattern_args = args.map_items_result(arena, |arg| {
let region = arg.region;
let value = expr_to_pattern_help(arena, &arg.value)?;
Ok(Loc { region, value })
})?;
Pattern::PncApply(val_pattern, pattern_args)
}
Expr::Try => Pattern::Identifier { ident: "try" }, Expr::Try => Pattern::Identifier { ident: "try" },
@ -3143,6 +3174,31 @@ fn stmts_to_defs<'a>(
last_expr = Some(Loc::at(sp_stmt.item.region, e)); last_expr = Some(Loc::at(sp_stmt.item.region, e));
// don't re-process the rest of the statements; they got consumed by the dbg expr
break;
} else if let Expr::PncApply(
Loc {
value: Expr::Dbg, ..
},
args,
) = e
{
let condition = &args.items[0];
let rest = stmts_to_expr(&stmts[i + 1..], arena)?;
let e = Expr::DbgStmt {
first: condition,
extra_args: &args.items[1..],
continuation: arena.alloc(rest),
};
let e = if sp_stmt.before.is_empty() {
e
} else {
arena.alloc(e).before(sp_stmt.before)
};
last_expr = Some(Loc::at(sp_stmt.item.region, e));
// don't re-process the rest of the statements; they got consumed by the dbg expr // don't re-process the rest of the statements; they got consumed by the dbg expr
break; break;
} else { } else {
@ -3313,7 +3369,8 @@ fn starts_with_spaces_conservative(value: &Pattern<'_>) -> bool {
| Pattern::ListRest(_) | Pattern::ListRest(_)
| Pattern::OpaqueRef(_) => false, | Pattern::OpaqueRef(_) => false,
Pattern::As(left, _) => starts_with_spaces_conservative(&left.value), Pattern::As(left, _) => starts_with_spaces_conservative(&left.value),
Pattern::Apply(left, _, _) => starts_with_spaces_conservative(&left.value), Pattern::Apply(left, _) => starts_with_spaces_conservative(&left.value),
Pattern::PncApply(left, _) => starts_with_spaces_conservative(&left.value),
Pattern::RecordDestructure(_) => false, Pattern::RecordDestructure(_) => false,
Pattern::RequiredField(_, _) | Pattern::OptionalField(_, _) => false, Pattern::RequiredField(_, _) | Pattern::OptionalField(_, _) => false,
Pattern::SpaceBefore(_, _) => true, Pattern::SpaceBefore(_, _) => true,
@ -3329,7 +3386,6 @@ fn header_to_pat<'a>(arena: &'a Bump, header: TypeHeader<'a>) -> Pattern<'a> {
Pattern::Apply( Pattern::Apply(
arena.alloc(Loc::at(header.name.region, Pattern::Tag(header.name.value))), arena.alloc(Loc::at(header.name.region, Pattern::Tag(header.name.value))),
header.vars, header.vars,
PatternApplyStyle::Whitespace,
) )
} }
} }
@ -3380,8 +3436,8 @@ fn pat_ends_with_spaces_conservative(pat: &Pattern<'_>) -> bool {
| Pattern::ListRest(_) | Pattern::ListRest(_)
| Pattern::As(_, _) | Pattern::As(_, _)
| Pattern::OpaqueRef(_) | Pattern::OpaqueRef(_)
| Pattern::Apply(_, _, PatternApplyStyle::ParensAndCommas) => false, | Pattern::PncApply(_, _) => false,
Pattern::Apply(_, args, _) => args Pattern::Apply(_, args) => args
.last() .last()
.map_or(false, |a| pat_ends_with_spaces_conservative(&a.value)), .map_or(false, |a| pat_ends_with_spaces_conservative(&a.value)),
Pattern::RecordDestructure(_) => false, Pattern::RecordDestructure(_) => false,
@ -3403,7 +3459,7 @@ pub fn join_alias_to_body<'a>(
body_expr: &'a Loc<Expr<'a>>, body_expr: &'a Loc<Expr<'a>>,
) -> ValueDef<'a> { ) -> ValueDef<'a> {
let loc_name = arena.alloc(header.name.map(|x| Pattern::Tag(x))); let loc_name = arena.alloc(header.name.map(|x| Pattern::Tag(x)));
let ann_pattern = Pattern::Apply(loc_name, header.vars, PatternApplyStyle::Whitespace); let ann_pattern = Pattern::Apply(loc_name, header.vars);
let vars_region = Region::across_all(header.vars.iter().map(|v| &v.region)); let vars_region = Region::across_all(header.vars.iter().map(|v| &v.region));
let region_ann_pattern = Region::span_across(&loc_name.region, &vars_region); let region_ann_pattern = Region::span_across(&loc_name.region, &vars_region);

View file

@ -737,8 +737,13 @@ impl<'a> Normalize<'a> for Expr<'a> {
arena.alloc(a.normalize(arena)), arena.alloc(a.normalize(arena)),
b.map(|loc_b| &*arena.alloc(loc_b.normalize(arena))), b.map(|loc_b| &*arena.alloc(loc_b.normalize(arena))),
), ),
Expr::Apply(a, b, c) => { Expr::Apply(a, b, called_via) => Expr::Apply(
Expr::Apply(arena.alloc(a.normalize(arena)), b.normalize(arena), c) arena.alloc(a.normalize(arena)),
b.normalize(arena),
called_via,
),
Expr::PncApply(a, b) => {
Expr::PncApply(arena.alloc(a.normalize(arena)), b.normalize(arena))
} }
Expr::BinOps(a, b) => Expr::BinOps(a.normalize(arena), arena.alloc(b.normalize(arena))), Expr::BinOps(a, b) => Expr::BinOps(a.normalize(arena), arena.alloc(b.normalize(arena))),
Expr::UnaryOp(a, b) => { Expr::UnaryOp(a, b) => {
@ -835,6 +840,30 @@ fn fold_defs<'a>(
arena.alloc(Loc::at_zero(new_final)), arena.alloc(Loc::at_zero(new_final)),
); );
} }
ValueDef::Stmt(&Loc {
value:
Expr::PncApply(
&Loc {
value: Expr::Dbg, ..
},
args,
),
..
}) => {
let rest = fold_defs(arena, defs, final_expr);
let new_final = Expr::DbgStmt {
first: args.items[0],
extra_args: &args.items[1..],
continuation: arena.alloc(Loc::at_zero(rest)),
};
if new_defs.is_empty() {
return new_final;
}
return Expr::Defs(
arena.alloc(new_defs),
arena.alloc(Loc::at_zero(new_final)),
);
}
_ => { _ => {
new_defs.push_value_def(vd, Region::zero(), &[], &[]); new_defs.push_value_def(vd, Region::zero(), &[], &[]);
} }
@ -879,11 +908,13 @@ impl<'a> Normalize<'a> for Pattern<'a> {
Pattern::Identifier { ident } => Pattern::Identifier { ident }, Pattern::Identifier { ident } => Pattern::Identifier { ident },
Pattern::Tag(a) => Pattern::Tag(a), Pattern::Tag(a) => Pattern::Tag(a),
Pattern::OpaqueRef(a) => Pattern::OpaqueRef(a), Pattern::OpaqueRef(a) => Pattern::OpaqueRef(a),
Pattern::Apply(a, b, c) => Pattern::Apply( Pattern::Apply(a, b) => Pattern::Apply(
arena.alloc(a.normalize(arena)), arena.alloc(a.normalize(arena)),
arena.alloc(b.normalize(arena)), arena.alloc(b.normalize(arena)),
c,
), ),
Pattern::PncApply(a, b) => {
Pattern::PncApply(arena.alloc(a.normalize(arena)), b.normalize(arena))
}
Pattern::RecordDestructure(a) => Pattern::RecordDestructure(a.normalize(arena)), Pattern::RecordDestructure(a) => Pattern::RecordDestructure(a.normalize(arena)),
Pattern::RequiredField(a, b) => { Pattern::RequiredField(a, b) => {
Pattern::RequiredField(a, arena.alloc(b.normalize(arena))) Pattern::RequiredField(a, arena.alloc(b.normalize(arena)))

View file

@ -1,6 +1,4 @@
use crate::ast::{ use crate::ast::{Collection, ExtractSpaces, Implements, Pattern, PatternAs, Spaceable};
Collection, ExtractSpaces, Implements, Pattern, PatternApplyStyle, PatternAs, Spaceable,
};
use crate::blankspace::{space0_before_optional_after, space0_e, spaces, spaces_before}; use crate::blankspace::{space0_before_optional_after, space0_e, spaces, spaces_before};
use crate::ident::{lowercase_ident, parse_ident, Accessor, Ident}; use crate::ident::{lowercase_ident, parse_ident, Accessor, Ident};
use crate::keyword; use crate::keyword;
@ -337,35 +335,30 @@ fn loc_ident_pattern_help<'a>(
let (_, loc_ident, state) = specialize_err(|_, pos| EPattern::Start(pos), loc(parse_ident)) let (_, loc_ident, state) = specialize_err(|_, pos| EPattern::Start(pos), loc(parse_ident))
.parse(arena, state, min_indent)?; .parse(arena, state, min_indent)?;
let commas_and_paren_args_help = map_with_arena( enum ArgType<'a> {
collection_trailing_sep_e( PncArgs(Loc<Collection<'a, Loc<Pattern<'a>>>>),
WhitespaceArgs(&'a [Loc<Pattern<'a>>]),
}
let commas_and_paren_args_help = map(
loc(collection_trailing_sep_e(
byte(b'(', EPattern::ParenStart), byte(b'(', EPattern::ParenStart),
loc_tag_pattern_arg(false), loc_tag_pattern_arg(false),
byte(b',', EPattern::NotAPattern), byte(b',', EPattern::NotAPattern),
byte(b')', EPattern::ParenEnd), byte(b')', EPattern::ParenEnd),
Pattern::SpaceBefore, Pattern::SpaceBefore,
), )),
|arena, args| { ArgType::PncArgs,
let mut args_vec = Vec::new_in(arena);
for arg in args.iter() {
let a: &Loc<Pattern<'a>> = arena.alloc(arg);
args_vec.push(*a);
}
(
args_vec.into_bump_slice(),
PatternApplyStyle::ParensAndCommas,
)
},
); );
let whitespace_args = let whitespace_args =
map_with_arena(loc_type_def_tag_pattern_args_help(), |arena, args| { map_with_arena(loc_type_def_tag_pattern_args_help(), |arena, args| {
let mut args_vec = Vec::new_in(arena); let mut args_vec = Vec::with_capacity_in(args.len(), arena);
for arg in args.iter() { for arg in args.iter() {
let a: &Loc<Pattern<'a>> = arena.alloc(arg); let a: &Loc<Pattern<'a>> = arena.alloc(arg);
args_vec.push(*a); args_vec.push(*a);
} }
(args_vec.into_bump_slice(), PatternApplyStyle::Whitespace) ArgType::WhitespaceArgs(args_vec.into_bump_slice())
}); });
match loc_ident.value { match loc_ident.value {
@ -376,7 +369,7 @@ fn loc_ident_pattern_help<'a>(
}; };
// Make sure `Foo Bar 1` is parsed as `Foo (Bar) 1`, and not `Foo (Bar 1)` // Make sure `Foo Bar 1` is parsed as `Foo (Bar) 1`, and not `Foo (Bar 1)`
let (_, (args, style), state) = if can_have_arguments { let (_, arg_type, state) = if can_have_arguments {
one_of!(commas_and_paren_args_help, whitespace_args) one_of!(commas_and_paren_args_help, whitespace_args)
.parse(arena, state, min_indent)? .parse(arena, state, min_indent)?
} else { } else {
@ -386,9 +379,15 @@ fn loc_ident_pattern_help<'a>(
Err((MadeProgress, e)) => return Err((MadeProgress, e)), Err((MadeProgress, e)) => return Err((MadeProgress, e)),
} }
}; };
match arg_type {
ArgType::PncArgs(args) => {
let pnc_args = args.value;
let value = Pattern::PncApply(&*arena.alloc(loc_tag), pnc_args);
let region = Region::span_across(&loc_ident.region, &args.region);
Ok((MadeProgress, Loc { region, value }, state))
}
ArgType::WhitespaceArgs(args) => {
let loc_args: &[Loc<Pattern<'_>>] = { args }; let loc_args: &[Loc<Pattern<'_>>] = { args };
if loc_args.is_empty() { if loc_args.is_empty() {
Ok((MadeProgress, loc_tag, state)) Ok((MadeProgress, loc_tag, state))
} else { } else {
@ -396,11 +395,13 @@ fn loc_ident_pattern_help<'a>(
std::iter::once(&loc_ident.region) std::iter::once(&loc_ident.region)
.chain(loc_args.iter().map(|loc_arg| &loc_arg.region)), .chain(loc_args.iter().map(|loc_arg| &loc_arg.region)),
); );
let value = Pattern::Apply(&*arena.alloc(loc_tag), loc_args, style); let value = Pattern::Apply(&*arena.alloc(loc_tag), loc_args);
Ok((MadeProgress, Loc { region, value }, state)) Ok((MadeProgress, Loc { region, value }, state))
} }
} }
}
}
Ident::OpaqueRef(name) => { Ident::OpaqueRef(name) => {
let loc_pat = Loc { let loc_pat = Loc {
region: loc_ident.region, region: loc_ident.region,
@ -408,15 +409,25 @@ fn loc_ident_pattern_help<'a>(
}; };
// Make sure `@Foo Bar 1` is parsed as `@Foo (Bar) 1`, and not `@Foo (Bar 1)` // Make sure `@Foo Bar 1` is parsed as `@Foo (Bar) 1`, and not `@Foo (Bar 1)`
let (_, (args, style), state) = if can_have_arguments { let (_, arg_type, state) = if can_have_arguments {
one_of!(commas_and_paren_args_help, whitespace_args) one_of!(commas_and_paren_args_help, whitespace_args)
.parse(arena, state, min_indent)? .parse(arena, state, min_indent)?
} else { } else {
commas_and_paren_args_help.parse(arena, state, min_indent)? match commas_and_paren_args_help.parse(arena, state.clone(), min_indent) {
Ok((_, res, new_state)) => (MadeProgress, res, new_state),
Err((NoProgress, _)) => return Ok((MadeProgress, loc_pat, state)),
Err((MadeProgress, e)) => return Err((MadeProgress, e)),
}
}; };
match arg_type {
ArgType::PncArgs(args) => {
let pnc_args = args.value;
let value = Pattern::PncApply(&*arena.alloc(loc_pat), pnc_args);
let region = Region::span_across(&loc_ident.region, &args.region);
Ok((MadeProgress, Loc { region, value }, state))
}
ArgType::WhitespaceArgs(args) => {
let loc_args: &[Loc<Pattern<'_>>] = { args }; let loc_args: &[Loc<Pattern<'_>>] = { args };
if loc_args.is_empty() { if loc_args.is_empty() {
Ok((MadeProgress, loc_pat, state)) Ok((MadeProgress, loc_pat, state))
} else { } else {
@ -424,11 +435,13 @@ fn loc_ident_pattern_help<'a>(
std::iter::once(&loc_ident.region) std::iter::once(&loc_ident.region)
.chain(loc_args.iter().map(|loc_arg| &loc_arg.region)), .chain(loc_args.iter().map(|loc_arg| &loc_arg.region)),
); );
let value = Pattern::Apply(&*arena.alloc(loc_pat), loc_args, style); let value = Pattern::Apply(&*arena.alloc(loc_pat), loc_args);
Ok((MadeProgress, Loc { region, value }, state)) Ok((MadeProgress, Loc { region, value }, state))
} }
} }
}
}
Ident::Access { Ident::Access {
module_name, parts, .. module_name, parts, ..
} => { } => {
@ -683,7 +696,7 @@ mod test_parse_pattern {
region: new_region(3, 4), region: new_region(3, 4),
}]; }];
let expected = Loc { let expected = Loc {
value: Pattern::Apply(&expected_tag, &expected_args, PatternApplyStyle::Whitespace), value: Pattern::Apply(&expected_tag, &expected_args),
region: new_region(0, 4), region: new_region(0, 4),
}; };
assert_eq!(format!("{res:#?}"), format!("{expected:#?}")); assert_eq!(format!("{res:#?}"), format!("{expected:#?}"));
@ -699,17 +712,13 @@ mod test_parse_pattern {
value: Pattern::Tag("Ok"), value: Pattern::Tag("Ok"),
region: new_region(0, 2), region: new_region(0, 2),
}; };
let expected_args = [Loc { let expected_args = Collection::with_items(arena.alloc([Loc {
value: Pattern::Identifier { ident: "a" }, value: Pattern::Identifier { ident: "a" },
region: new_region(3, 4), region: new_region(3, 4),
}]; }]));
let expected = Loc { let expected = Loc {
value: Pattern::Apply( value: Pattern::PncApply(&expected_tag, expected_args),
&expected_tag, region: new_region(0, 5),
&expected_args,
PatternApplyStyle::ParensAndCommas,
),
region: new_region(0, 4),
}; };
assert_eq!(format!("{res:#?}"), format!("{expected:#?}")); assert_eq!(format!("{res:#?}"), format!("{expected:#?}"));
} }

View file

@ -512,13 +512,14 @@ impl Problem {
| Problem::ReturnAtEndOfFunction { region } | Problem::ReturnAtEndOfFunction { region }
| Problem::UnboundTypeVarsInAs(region) | Problem::UnboundTypeVarsInAs(region)
| Problem::UnsuffixedEffectfulRecordField(region) | Problem::UnsuffixedEffectfulRecordField(region)
| Problem::SuffixedPureRecordField(region) => Some(*region), | Problem::SuffixedPureRecordField(region)
| Problem::StmtAfterExpr(region) => Some(*region),
Problem::RuntimeError(RuntimeError::CircularDef(cycle_entries)) Problem::RuntimeError(RuntimeError::CircularDef(cycle_entries))
| Problem::BadRecursion(cycle_entries) => { | Problem::BadRecursion(cycle_entries) => {
cycle_entries.first().map(|entry| entry.expr_region) cycle_entries.first().map(|entry| entry.expr_region)
} }
Problem::StmtAfterExpr(region) => Some(*region),
Problem::RuntimeError(RuntimeError::UnresolvedTypeVar) Problem::RuntimeError(RuntimeError::UnresolvedTypeVar)
| Problem::RuntimeError(RuntimeError::ErroneousType) | Problem::RuntimeError(RuntimeError::ErroneousType)
| Problem::RuntimeError(RuntimeError::NonExhaustivePattern) | Problem::RuntimeError(RuntimeError::NonExhaustivePattern)

View file

@ -23,7 +23,7 @@ use roc_parse::{
}; };
use roc_region::all::Loc; use roc_region::all::Loc;
use roc_region::all::Region; use roc_region::all::Region;
use roc_test_utils::assert_multiline_str_eq; use roc_test_utils::{assert_multiline_str_eq, pretty_compare_string};
use roc_types::{ use roc_types::{
subs::{VarStore, Variable}, subs::{VarStore, Variable},
types::{AliasVar, Type}, types::{AliasVar, Type},
@ -392,6 +392,10 @@ impl<'a> Input<'a> {
// I don't have the patience to debug this right now, so let's leave it for another day... // I don't have the patience to debug this right now, so let's leave it for another day...
// TODO: fix PartialEq impl on ast types // TODO: fix PartialEq impl on ast types
if format!("{ast_normalized:?}") != format!("{reparsed_ast_normalized:?}") { if format!("{ast_normalized:?}") != format!("{reparsed_ast_normalized:?}") {
pretty_compare_string(
format!("{ast_normalized:#?}").as_str(),
format!("{reparsed_ast_normalized:#?}").as_str(),
);
panic!( panic!(
"Formatting bug; formatting didn't reparse to the same AST (after removing spaces)\n\n\ "Formatting bug; formatting didn't reparse to the same AST (after removing spaces)\n\n\
* * * Source code before formatting:\n{}\n\n\ * * * Source code before formatting:\n{}\n\n\
@ -411,6 +415,10 @@ impl<'a> Input<'a> {
let reformatted = reparsed_ast.format(flags); let reformatted = reparsed_ast.format(flags);
if output != reformatted { if output != reformatted {
pretty_compare_string(
format!("{actual:#?}").as_str(),
format!("{reparsed_ast:#?}").as_str(),
);
eprintln!("Formatting bug; formatting is not stable.\nOriginal code:\n{}\n\nFormatted code:\n{}\n\nAST:\n{:#?}\n\nReparsed AST:\n{:#?}\n\n", eprintln!("Formatting bug; formatting is not stable.\nOriginal code:\n{}\n\nFormatted code:\n{}\n\nAST:\n{:#?}\n\nReparsed AST:\n{:#?}\n\n",
self.as_str(), self.as_str(),
output.as_ref().as_str(), output.as_ref().as_str(),

View file

@ -21,7 +21,6 @@
"E", "E",
), ),
[], [],
Whitespace,
), ),
ann_type: @2-3 Apply( ann_type: @2-3 Apply(
"", "",

View file

@ -26,7 +26,6 @@
ident: "x", ident: "x",
}, },
], ],
Whitespace,
), ),
ann_type: @11-25 TagUnion { ann_type: @11-25 TagUnion {
ext: None, ext: None,
@ -55,7 +54,6 @@
ident: "x", ident: "x",
}, },
], ],
Whitespace,
), ),
body_expr: @37-46 Apply( body_expr: @37-46 Apply(
@37-43 Tag( @37-43 Tag(

View file

@ -1,5 +1,5 @@
@0-12 SpaceAfter( @0-12 SpaceAfter(
Apply( PncApply(
@0-4 Tag( @0-4 Tag(
"Whee", "Whee",
), ),
@ -11,7 +11,6 @@
"34", "34",
), ),
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,

View file

@ -1,5 +1,5 @@
@0-4 SpaceAfter( @0-5 SpaceAfter(
Apply( PncApply(
@0-2 Tag( @0-2 Tag(
"Ok", "Ok",
), ),
@ -8,7 +8,6 @@
ident: "a", ident: "a",
}, },
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,

View file

@ -8,7 +8,6 @@
ident: "a", ident: "a",
}, },
], ],
Whitespace,
), ),
[ [
Newline, Newline,

View file

@ -1,5 +1,5 @@
@0-13 SpaceAfter( @0-13 SpaceAfter(
Apply( PncApply(
@0-4 Var { @0-4 Var {
module_name: "", module_name: "",
ident: "whee", ident: "whee",
@ -12,7 +12,6 @@
"34", "34",
), ),
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,

View file

@ -27,7 +27,6 @@
ident: "inner", ident: "inner",
}, },
], ],
Whitespace,
), ),
PatternAs { PatternAs {
spaces_before: [], spaces_before: [],

View file

@ -38,7 +38,6 @@
}, },
), ),
], ],
Whitespace,
), ),
], ],
@17-18 SpaceBefore( @17-18 SpaceBefore(

View file

@ -1,7 +1,7 @@
@0-13 SpaceAfter( @0-13 SpaceAfter(
Closure( Closure(
[ [
@1-5 Apply( @1-9 PncApply(
@1-2 Tag( @1-2 Tag(
"L", "L",
), ),
@ -20,7 +20,6 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
], ],
@11-13 Num( @11-13 Num(

View file

@ -1,5 +1,5 @@
@0-44 SpaceAfter( @0-44 SpaceAfter(
Apply( PncApply(
@0-2 Tag( @0-2 Tag(
"PP", "PP",
), ),
@ -10,7 +10,7 @@
"P", "P",
), ),
[ [
@4-42 Apply( @4-42 PncApply(
@4-6 OpaqueRef( @4-6 OpaqueRef(
"@P", "@P",
), ),
@ -22,13 +22,13 @@
), ),
[ [
@9-24 SpaceBefore( @9-24 SpaceBefore(
Apply( PncApply(
@9-10 Tag( @9-10 Tag(
"P", "P",
), ),
[ [
@11-22 SpaceAfter( @11-22 SpaceAfter(
Apply( PncApply(
@11-14 Tag( @11-14 Tag(
"PPP", "PPP",
), ),
@ -55,21 +55,19 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,
], ],
), ),
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,
], ],
), ),
@25-40 SpaceBefore( @25-40 SpaceBefore(
Apply( PncApply(
@25-27 Tag( @25-27 Tag(
"PP", "PP",
), ),
@ -86,7 +84,6 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,
@ -100,7 +97,6 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
], ],
Space, Space,
@ -110,7 +106,6 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,

View file

@ -16,7 +16,6 @@
], ],
), ),
], ],
Whitespace,
), ),
], ],
@8-10 UnaryOp( @8-10 UnaryOp(

View file

@ -19,7 +19,6 @@
], ],
), ),
], ],
Whitespace,
), ),
], ],
@10-12 Var { @10-12 Var {

View file

@ -1,4 +1,5 @@
1(0) # 1(
0, #
# #
: gi ) : gi
M M

View file

@ -16,7 +16,7 @@
type_defs: [], type_defs: [],
value_defs: [ value_defs: [
Annotation( Annotation(
@0-10 Apply( @0-10 PncApply(
@0-1 NumLiteral( @0-1 NumLiteral(
"1", "1",
), ),
@ -35,7 +35,6 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
@11-13 BoundVariable( @11-13 BoundVariable(
"gi", "gi",

View file

@ -1,5 +1,7 @@
1( 1(
0( # 0( #
0,),) 0,
),
)
f : f f : f
t t

View file

@ -18,13 +18,13 @@
value_defs: [ value_defs: [
Annotation( Annotation(
@0-12 Apply( @0-12 Apply(
@0-12 Apply( @0-12 PncApply(
@0-1 NumLiteral( @0-1 NumLiteral(
"1", "1",
), ),
[ [
@2-11 SpaceAfter( @2-11 SpaceAfter(
Apply( PncApply(
@3-4 NumLiteral( @3-4 NumLiteral(
"0", "0",
), ),
@ -40,21 +40,18 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,
], ],
), ),
], ],
ParensAndCommas,
), ),
[ [
@12-13 Identifier { @12-13 Identifier {
ident: "f", ident: "f",
}, },
], ],
Whitespace,
), ),
@14-15 BoundVariable( @14-15 BoundVariable(
"f", "f",

View file

@ -1,7 +1,9 @@
1( 1(
ts(0),) ts(
0,
# #
),
)
f : i7f f : i7f
e e

View file

@ -17,13 +17,13 @@
value_defs: [ value_defs: [
Annotation( Annotation(
@0-15 Apply( @0-15 Apply(
@0-15 Apply( @0-15 PncApply(
@0-1 NumLiteral( @0-1 NumLiteral(
"1", "1",
), ),
[ [
@2-13 SpaceAfter( @2-13 SpaceAfter(
Apply( PncApply(
@2-4 Identifier { @2-4 Identifier {
ident: "ts", ident: "ts",
}, },
@ -41,21 +41,18 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,
], ],
), ),
], ],
ParensAndCommas,
), ),
[ [
@15-16 Identifier { @15-16 Identifier {
ident: "f", ident: "f",
}, },
], ],
Whitespace,
), ),
@17-20 BoundVariable( @17-20 BoundVariable(
"i7f", "i7f",

View file

@ -34,7 +34,6 @@
ident: "f", ident: "f",
}, },
], ],
Whitespace,
), ),
@14-15 BoundVariable( @14-15 BoundVariable(
"f", "f",

View file

@ -26,7 +26,6 @@
ident: "str", ident: "str",
}, },
], ],
Whitespace,
), ),
@12-36 Apply( @12-36 Apply(
@12-17 Tag( @12-17 Tag(

View file

@ -35,7 +35,6 @@
ident: "e", ident: "e",
}, },
], ],
Whitespace,
), ),
@8-14 Apply( @8-14 Apply(
@9-10 SpaceAfter( @9-10 SpaceAfter(
@ -53,7 +52,6 @@
ident: "p", ident: "p",
}, },
], ],
Whitespace,
), ),
], ],
), ),

View file

@ -33,7 +33,6 @@
], ],
), ),
], ],
Whitespace,
), ),
@15-16 BoundVariable( @15-16 BoundVariable(
"s", "s",

View file

@ -1,8 +1,10 @@
combine( combine(
mix(vodka, gin), mix(vodka, gin),
Juices({ Juices(
{
color: Colors.orange, color: Colors.orange,
flavor: Flavors.orange, flavor: Flavors.orange,
amount: 1 + 2, amount: 1 + 2,
}), },
),
) )

View file

@ -1,11 +1,11 @@
@0-109 SpaceAfter( @0-109 SpaceAfter(
Apply( PncApply(
@0-7 Var { @0-7 Var {
module_name: "", module_name: "",
ident: "combine", ident: "combine",
}, },
[ [
@8-23 Apply( @8-23 PncApply(
@8-11 Var { @8-11 Var {
module_name: "", module_name: "",
ident: "mix", ident: "mix",
@ -20,9 +20,8 @@
ident: "gin", ident: "gin",
}, },
], ],
ParensAndCommas,
), ),
@25-108 Apply( @25-108 PncApply(
@25-31 Tag( @25-31 Tag(
"Juices", "Juices",
), ),
@ -85,10 +84,8 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
], ],
ParensAndCommas,
), ),
[ [
Newline, Newline,

View file

@ -1,6 +1,8 @@
1( 1(
0 # 0 #
f,)( f,
)(
0 # 0 #
f,) : f f,
) : f
e e

View file

@ -17,8 +17,8 @@
type_defs: [], type_defs: [],
value_defs: [ value_defs: [
Annotation( Annotation(
@0-17 Apply( @0-17 PncApply(
@0-9 Apply( @0-9 PncApply(
@0-1 NumLiteral( @0-1 NumLiteral(
"1", "1",
), ),
@ -39,10 +39,8 @@
ident: "f", ident: "f",
}, },
], ],
Whitespace,
), ),
], ],
ParensAndCommas,
), ),
[ [
@10-16 Apply( @10-16 Apply(
@ -61,10 +59,8 @@
ident: "f", ident: "f",
}, },
], ],
Whitespace,
), ),
], ],
ParensAndCommas,
), ),
@18-19 BoundVariable( @18-19 BoundVariable(
"f", "f",

View file

@ -32,7 +32,6 @@
], ],
), ),
], ],
Whitespace,
), ),
[ [
Newline, Newline,

View file

@ -34,7 +34,6 @@
), ),
), ),
], ],
Whitespace,
), ),
@12-13 Apply( @12-13 Apply(
"", "",

View file

@ -42,14 +42,12 @@
), ),
), ),
], ],
Whitespace,
), ),
[ [
@12-13 Identifier { @12-13 Identifier {
ident: "f", ident: "f",
}, },
], ],
Whitespace,
), ),
@14-15 Apply( @14-15 Apply(
"", "",

View file

@ -33,10 +33,8 @@
"0", "0",
), ),
], ],
Whitespace,
), ),
], ],
Whitespace,
), ),
@15-16 BoundVariable( @15-16 BoundVariable(
"f", "f",

View file

@ -36,7 +36,6 @@
"2", "2",
), ),
], ],
Whitespace,
), ),
@12-13 Apply( @12-13 Apply(
"", "",

View file

@ -1,6 +1,6 @@
@0-9 UnaryOp( @0-9 UnaryOp(
@2-8 ParensAround( @2-8 ParensAround(
Apply( PncApply(
@2-3 Num( @2-3 Num(
"0", "0",
), ),
@ -23,7 +23,6 @@
Space, Space,
), ),
], ],
ParensAndCommas,
), ),
), ),
@0-1 Negate, @0-1 Negate,

View file

@ -32,7 +32,6 @@
], ],
), ),
], ],
Whitespace,
), ),
], ],
@14-15 Var { @14-15 Var {

View file

@ -25,7 +25,6 @@
ident: "x", ident: "x",
}, },
], ],
Whitespace,
), ),
@7-8 BoundVariable( @7-8 BoundVariable(
"i", "i",

View file

@ -23,7 +23,6 @@
], ],
), ),
], ],
Whitespace,
), ),
[ [
Newline, Newline,

View file

@ -26,7 +26,6 @@
ident: "it", ident: "it",
}, },
], ],
Whitespace,
), ),
@12-22 Apply( @12-22 Apply(
@12-14 Var { @12-14 Var {

View file

@ -20,7 +20,6 @@
ident: "m", ident: "m",
}, },
], ],
Whitespace,
), ),
[ [
Newline, Newline,

View file

@ -28,7 +28,6 @@
ident: "a", ident: "a",
}, },
], ],
Whitespace,
), ),
], ],
}, },

View file

@ -1,3 +1,5 @@
1(i, p) # 1(
: f i,
p, #
) : f
n n

View file

@ -16,7 +16,7 @@
type_defs: [], type_defs: [],
value_defs: [ value_defs: [
Annotation( Annotation(
@0-8 Apply( @0-8 PncApply(
@0-1 NumLiteral( @0-1 NumLiteral(
"1", "1",
), ),
@ -35,7 +35,6 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
@9-10 BoundVariable( @9-10 BoundVariable(
"f", "f",

View file

@ -37,7 +37,6 @@
], ],
), ),
], ],
Whitespace,
), ),
@7-8 BoundVariable( @7-8 BoundVariable(
"s", "s",

View file

@ -23,7 +23,6 @@
}, },
), ),
], ],
Whitespace,
), ),
[ [
Newline, Newline,

View file

@ -43,13 +43,11 @@
ident: "ry", ident: "ry",
}, },
], ],
Whitespace,
), ),
@47-48 Underscore( @47-48 Underscore(
"", "",
), ),
], ],
Whitespace,
), ),
[ [
Newline, Newline,

View file

@ -32,7 +32,6 @@
), ),
), ),
], ],
Whitespace,
), ),
], ],
}, },

View file

@ -37,7 +37,6 @@
ident: "a", ident: "a",
}, },
], ],
Whitespace,
), ),
@10-11 BoundVariable( @10-11 BoundVariable(
"t", "t",

View file

@ -45,7 +45,6 @@
"K", "K",
), ),
], ],
Whitespace,
), ),
[ [
LineComment( LineComment(

View file

@ -29,7 +29,6 @@
ident: "code", ident: "code",
}, },
], ],
Whitespace,
), ),
@30-33 Var { @30-33 Var {
module_name: "", module_name: "",

View file

@ -29,7 +29,6 @@
ident: "code", ident: "code",
}, },
], ],
Whitespace,
), ),
@29-32 Var { @29-32 Var {
module_name: "", module_name: "",

View file

@ -18,12 +18,12 @@
value_defs: [ value_defs: [
Annotation( Annotation(
@0-10 Apply( @0-10 Apply(
@0-10 Apply( @0-10 PncApply(
@0-1 NumLiteral( @0-1 NumLiteral(
"1", "1",
), ),
[ [
@2-9 Apply( @2-9 PncApply(
@3-4 NumLiteral( @3-4 NumLiteral(
"0", "0",
), ),
@ -37,17 +37,14 @@
], ],
), ),
], ],
ParensAndCommas,
), ),
], ],
ParensAndCommas,
), ),
[ [
@10-11 Identifier { @10-11 Identifier {
ident: "f", ident: "f",
}, },
], ],
Whitespace,
), ),
@12-13 BoundVariable( @12-13 BoundVariable(
"f", "f",

View file

@ -28,7 +28,6 @@
), ),
), ),
], ],
Whitespace,
), ),
@8-15 Apply( @8-15 Apply(
@8-9 Var { @8-9 Var {

View file

@ -50,7 +50,6 @@
"", "",
), ),
], ],
Whitespace,
), ),
@11-19 Apply( @11-19 Apply(
@11-15 Tag( @11-15 Tag(
@ -80,7 +79,6 @@
ident: "y", ident: "y",
}, },
], ],
Whitespace,
), ),
@31-39 Apply( @31-39 Apply(
@31-35 Tag( @31-35 Tag(
@ -110,7 +108,6 @@
"", "",
), ),
], ],
Whitespace,
), ),
@51-59 Apply( @51-59 Apply(
@51-55 Tag( @51-55 Tag(
@ -164,7 +161,6 @@
"", "",
), ),
], ],
Whitespace,
), ),
@90-98 Apply( @90-98 Apply(
@90-94 Tag( @90-94 Tag(
@ -178,10 +174,8 @@
ident: "y", ident: "y",
}, },
], ],
Whitespace,
), ),
], ],
Whitespace,
), ),
@102-128 Apply( @102-128 Apply(
@102-106 Tag( @102-106 Tag(

View file

@ -23,7 +23,6 @@
[], [],
), ),
], ],
Whitespace,
), ),
[ [
Newline, Newline,

View file

@ -3810,6 +3810,16 @@ mod test_fmt {
)); ));
} }
#[test]
fn pipe_pnc_application_with_comment_no_args() {
expr_formats_same(indoc!(
r"
combine( # This is a comment
)
"
));
}
#[test] #[test]
fn single_line_nested_application_with_parens() { fn single_line_nested_application_with_parens() {
expr_formats_same(indoc!( expr_formats_same(indoc!(

View file

@ -694,6 +694,9 @@ impl IterTokens for Loc<Expr<'_>> {
Expr::Apply(e1, e2, _called_via) => (e1.iter_tokens(arena).into_iter()) Expr::Apply(e1, e2, _called_via) => (e1.iter_tokens(arena).into_iter())
.chain(e2.iter_tokens(arena)) .chain(e2.iter_tokens(arena))
.collect_in(arena), .collect_in(arena),
Expr::PncApply(e1, e2) => (e1.iter_tokens(arena).into_iter())
.chain(e2.iter_tokens(arena))
.collect_in(arena),
Expr::BinOps(e1, e2) => (e1.iter_tokens(arena).into_iter()) Expr::BinOps(e1, e2) => (e1.iter_tokens(arena).into_iter())
.chain(e2.iter_tokens(arena)) .chain(e2.iter_tokens(arena))
.collect_in(arena), .collect_in(arena),
@ -762,7 +765,10 @@ impl IterTokens for Loc<Pattern<'_>> {
Pattern::Identifier { .. } => onetoken(Token::Variable, region, arena), Pattern::Identifier { .. } => onetoken(Token::Variable, region, arena),
Pattern::Tag(_) => onetoken(Token::Tag, region, arena), Pattern::Tag(_) => onetoken(Token::Tag, region, arena),
Pattern::OpaqueRef(_) => onetoken(Token::Type, region, arena), Pattern::OpaqueRef(_) => onetoken(Token::Type, region, arena),
Pattern::Apply(p1, p2, _) => (p1.iter_tokens(arena).into_iter()) Pattern::Apply(p1, p2) => (p1.iter_tokens(arena).into_iter())
.chain(p2.iter_tokens(arena))
.collect_in(arena),
Pattern::PncApply(p1, p2) => (p1.iter_tokens(arena).into_iter())
.chain(p2.iter_tokens(arena)) .chain(p2.iter_tokens(arena))
.collect_in(arena), .collect_in(arena),
Pattern::RecordDestructure(ps) => ps.iter_tokens(arena), Pattern::RecordDestructure(ps) => ps.iter_tokens(arena),

View file

@ -2,6 +2,7 @@
#[doc(hidden)] #[doc(hidden)]
pub use pretty_assertions::assert_eq as _pretty_assert_eq; pub use pretty_assertions::assert_eq as _pretty_assert_eq;
use pretty_assertions::StrComparison;
#[derive(PartialEq, Eq)] #[derive(PartialEq, Eq)]
pub struct DebugAsDisplay<T>(pub T); pub struct DebugAsDisplay<T>(pub T);
@ -19,6 +20,17 @@ macro_rules! assert_multiline_str_eq {
}; };
} }
pub fn pretty_compare_string(a: &str, b: &str) {
println!("{}", StrComparison::new(a, b));
}
#[macro_export]
macro_rules! print_pretty_string_comparison {
($a:expr, $b:expr) => {
$crate::pretty_compare_strings($a, $b)
};
}
/// a very simple implementation of En/DecoderFormatting to be embedded in roc source under test /// a very simple implementation of En/DecoderFormatting to be embedded in roc source under test
/// ///
/// - numbers and bools are encoded as 'n' <num> ' ' /// - numbers and bools are encoded as 'n' <num> ' '