Merge branch 'trunk' of github.com:rtfeldman/roc into builtin-maxI128

This commit is contained in:
Eric Henry 2021-03-12 17:40:57 -05:00
commit 66f07d984e
41 changed files with 3892 additions and 3648 deletions

1338
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -8,7 +8,7 @@ install-other-libs:
FROM +prep-debian FROM +prep-debian
RUN apt -y install wget git RUN apt -y install wget git
RUN apt -y install libxcb-shape0-dev libxcb-xfixes0-dev # for editor clipboard RUN apt -y install libxcb-shape0-dev libxcb-xfixes0-dev # for editor clipboard
RUN apt -y install libc++-dev libc++abi-dev libunwind-dev pkg-config libx11-dev zlib1g-dev RUN apt -y install libc++-dev libc++abi-dev g++ libunwind-dev pkg-config libx11-dev zlib1g-dev
install-zig-llvm-valgrind-clippy-rustfmt: install-zig-llvm-valgrind-clippy-rustfmt:
FROM +install-other-libs FROM +install-other-libs
@ -30,10 +30,10 @@ install-zig-llvm-valgrind-clippy-rustfmt:
RUN wget https://sourceware.org/pub/valgrind/valgrind-3.16.1.tar.bz2 RUN wget https://sourceware.org/pub/valgrind/valgrind-3.16.1.tar.bz2
RUN tar -xf valgrind-3.16.1.tar.bz2 RUN tar -xf valgrind-3.16.1.tar.bz2
# need to cd every time, every command starts at WORKDIR # need to cd every time, every command starts at WORKDIR
RUN cd valgrind-3.16.1; ./autogen.sh RUN cd valgrind-3.16.1 && ./autogen.sh
RUN cd valgrind-3.16.1; ./configure --disable-dependency-tracking RUN cd valgrind-3.16.1 && ./configure --disable-dependency-tracking
RUN cd valgrind-3.16.1; make -j`nproc` RUN cd valgrind-3.16.1 && make -j`nproc`
RUN cd valgrind-3.16.1; make install RUN cd valgrind-3.16.1 && make install
# clippy # clippy
RUN rustup component add clippy RUN rustup component add clippy
# rustfmt # rustfmt
@ -75,16 +75,16 @@ save-cache:
FROM +install-zig-llvm-valgrind-clippy-rustfmt FROM +install-zig-llvm-valgrind-clippy-rustfmt
COPY +prepare-cache/recipe.json ./ COPY +prepare-cache/recipe.json ./
RUN --mount=type=cache,target=$SCCACHE_DIR \ RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo chef cook; sccache --show-stats # for clippy cargo chef cook && sccache --show-stats # for clippy
RUN --mount=type=cache,target=$SCCACHE_DIR \ RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo chef cook --release --tests; sccache --show-stats cargo chef cook --release --tests && sccache --show-stats
SAVE ARTIFACT target SAVE ARTIFACT target
SAVE ARTIFACT $CARGO_HOME cargo_home SAVE ARTIFACT $CARGO_HOME cargo_home
test-zig: test-zig:
FROM +install-zig-llvm-valgrind-clippy-rustfmt FROM +install-zig-llvm-valgrind-clippy-rustfmt
COPY --dir compiler/builtins/bitcode ./ COPY --dir compiler/builtins/bitcode ./
RUN cd bitcode; ./run-tests.sh; RUN cd bitcode && ./run-tests.sh
check-clippy: check-clippy:
FROM +copy-dirs-and-cache FROM +copy-dirs-and-cache
@ -101,7 +101,7 @@ test-rust:
FROM +copy-dirs-and-cache FROM +copy-dirs-and-cache
ENV RUST_BACKTRACE=1 ENV RUST_BACKTRACE=1
RUN --mount=type=cache,target=$SCCACHE_DIR \ RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --release; sccache --show-stats cargo test --release && sccache --show-stats
test-all: test-all:
BUILD +test-zig BUILD +test-zig

View file

@ -114,6 +114,7 @@ pub const RocList = extern struct {
const Caller1 = fn (?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void; const Caller1 = fn (?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
const Caller2 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void; const Caller2 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
const Caller3 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
pub fn listMap(list: RocList, transform: Opaque, caller: Caller1, alignment: usize, old_element_width: usize, new_element_width: usize) callconv(.C) RocList { pub fn listMap(list: RocList, transform: Opaque, caller: Caller1, alignment: usize, old_element_width: usize, new_element_width: usize) callconv(.C) RocList {
if (list.bytes) |source_ptr| { if (list.bytes) |source_ptr| {
@ -213,6 +214,129 @@ pub fn listMap2(list1: RocList, list2: RocList, transform: Opaque, caller: Calle
} }
} }
pub fn listMap3(list1: RocList, list2: RocList, list3: RocList, transform: Opaque, caller: Caller3, alignment: usize, a_width: usize, b_width: usize, c_width: usize, d_width: usize, dec_a: Dec, dec_b: Dec, dec_c: Dec) callconv(.C) RocList {
const smaller_length = std.math.min(list1.len(), list2.len());
const output_length = std.math.min(smaller_length, list3.len());
if (list1.bytes) |source_a| {
if (list2.bytes) |source_b| {
if (list3.bytes) |source_c| {
const output = RocList.allocate(std.heap.c_allocator, alignment, output_length, d_width);
const target_ptr = output.bytes orelse unreachable;
var i: usize = 0;
while (i < output_length) : (i += 1) {
const element_a = source_a + i * a_width;
const element_b = source_b + i * b_width;
const element_c = source_c + i * c_width;
const target = target_ptr + i * d_width;
caller(transform, element_a, element_b, element_c, target);
}
// if the lists don't have equal length, we must consume the remaining elements
// In this case we consume by (recursively) decrementing the elements
if (list1.len() > output_length) {
i = output_length;
while (i < list1.len()) : (i += 1) {
const element_a = source_a + i * a_width;
dec_a(element_a);
}
}
if (list2.len() > output_length) {
i = output_length;
while (i < list2.len()) : (i += 1) {
const element_b = source_b + i * b_width;
dec_b(element_b);
}
}
if (list3.len() > output_length) {
i = output_length;
while (i < list3.len()) : (i += 1) {
const element_c = source_c + i * c_width;
dec_c(element_c);
}
}
utils.decref(std.heap.c_allocator, alignment, list1.bytes, list1.len() * a_width);
utils.decref(std.heap.c_allocator, alignment, list2.bytes, list2.len() * b_width);
utils.decref(std.heap.c_allocator, alignment, list3.bytes, list3.len() * c_width);
return output;
} else {
// consume list1 elements (we know there is at least one because the list1.bytes pointer is non-null
var i: usize = 0;
while (i < list1.len()) : (i += 1) {
const element_a = source_a + i * a_width;
dec_a(element_a);
}
utils.decref(std.heap.c_allocator, alignment, list1.bytes, list1.len() * a_width);
// consume list2 elements (we know there is at least one because the list1.bytes pointer is non-null
i = 0;
while (i < list2.len()) : (i += 1) {
const element_b = source_b + i * b_width;
dec_b(element_b);
}
utils.decref(std.heap.c_allocator, alignment, list2.bytes, list2.len() * b_width);
return RocList.empty();
}
} else {
// consume list1 elements (we know there is at least one because the list1.bytes pointer is non-null
var i: usize = 0;
while (i < list1.len()) : (i += 1) {
const element_a = source_a + i * a_width;
dec_a(element_a);
}
utils.decref(std.heap.c_allocator, alignment, list1.bytes, list1.len() * a_width);
// consume list3 elements (if any)
if (list3.bytes) |source_c| {
i = 0;
while (i < list2.len()) : (i += 1) {
const element_c = source_c + i * c_width;
dec_c(element_c);
}
utils.decref(std.heap.c_allocator, alignment, list3.bytes, list3.len() * c_width);
}
return RocList.empty();
}
} else {
// consume list2 elements (if any)
if (list2.bytes) |source_b| {
var i: usize = 0;
while (i < list2.len()) : (i += 1) {
const element_b = source_b + i * b_width;
dec_b(element_b);
}
utils.decref(std.heap.c_allocator, alignment, list2.bytes, list2.len() * b_width);
}
// consume list3 elements (if any)
if (list3.bytes) |source_c| {
var i: usize = 0;
while (i < list2.len()) : (i += 1) {
const element_c = source_c + i * c_width;
dec_c(element_c);
}
utils.decref(std.heap.c_allocator, alignment, list3.bytes, list3.len() * c_width);
}
return RocList.empty();
}
}
pub fn listKeepIf(list: RocList, transform: Opaque, caller: Caller1, alignment: usize, element_width: usize, inc: Inc, dec: Dec) callconv(.C) RocList { pub fn listKeepIf(list: RocList, transform: Opaque, caller: Caller1, alignment: usize, element_width: usize, inc: Inc, dec: Dec) callconv(.C) RocList {
if (list.bytes) |source_ptr| { if (list.bytes) |source_ptr| {
const size = list.len(); const size = list.len();

View file

@ -8,6 +8,7 @@ const list = @import("list.zig");
comptime { comptime {
exportListFn(list.listMap, "map"); exportListFn(list.listMap, "map");
exportListFn(list.listMap2, "map2"); exportListFn(list.listMap2, "map2");
exportListFn(list.listMap3, "map3");
exportListFn(list.listMapWithIndex, "map_with_index"); exportListFn(list.listMapWithIndex, "map_with_index");
exportListFn(list.listKeepIf, "keep_if"); exportListFn(list.listKeepIf, "keep_if");
exportListFn(list.listWalk, "walk"); exportListFn(list.listWalk, "walk");

View file

@ -64,6 +64,7 @@ pub const SET_FROM_LIST: &str = "roc_builtins.dict.set_from_list";
pub const LIST_MAP: &str = "roc_builtins.list.map"; pub const LIST_MAP: &str = "roc_builtins.list.map";
pub const LIST_MAP2: &str = "roc_builtins.list.map2"; pub const LIST_MAP2: &str = "roc_builtins.list.map2";
pub const LIST_MAP3: &str = "roc_builtins.list.map3";
pub const LIST_MAP_WITH_INDEX: &str = "roc_builtins.list.map_with_index"; pub const LIST_MAP_WITH_INDEX: &str = "roc_builtins.list.map_with_index";
pub const LIST_KEEP_IF: &str = "roc_builtins.list.keep_if"; pub const LIST_KEEP_IF: &str = "roc_builtins.list.keep_if";
pub const LIST_KEEP_OKS: &str = "roc_builtins.list.keep_oks"; pub const LIST_KEEP_OKS: &str = "roc_builtins.list.keep_oks";

View file

@ -821,6 +821,21 @@ pub fn types() -> MutMap<Symbol, (SolvedType, Region)> {
) )
}); });
// map3 : List a, List b, List c, (a, b, c -> d) -> List d
add_type(Symbol::LIST_MAP3, {
let_tvars! {a, b, c, d, cvar};
top_level_function(
vec![
list_type(flex(a)),
list_type(flex(b)),
list_type(flex(c)),
closure(vec![flex(a), flex(b), flex(c)], cvar, Box::new(flex(d))),
],
Box::new(list_type(flex(d))),
)
});
// append : List elem, elem -> List elem // append : List elem, elem -> List elem
add_type( add_type(
Symbol::LIST_APPEND, Symbol::LIST_APPEND,

View file

@ -81,6 +81,7 @@ pub fn builtin_defs_map(symbol: Symbol, var_store: &mut VarStore) -> Option<Def>
LIST_JOIN => list_join, LIST_JOIN => list_join,
LIST_MAP => list_map, LIST_MAP => list_map,
LIST_MAP2 => list_map2, LIST_MAP2 => list_map2,
LIST_MAP3 => list_map3,
LIST_MAP_WITH_INDEX => list_map_with_index, LIST_MAP_WITH_INDEX => list_map_with_index,
LIST_KEEP_IF => list_keep_if, LIST_KEEP_IF => list_keep_if,
LIST_KEEP_OKS => list_keep_oks, LIST_KEEP_OKS => list_keep_oks,
@ -219,6 +220,7 @@ pub fn builtin_defs(var_store: &mut VarStore) -> MutMap<Symbol, Def> {
Symbol::LIST_JOIN => list_join, Symbol::LIST_JOIN => list_join,
Symbol::LIST_MAP => list_map, Symbol::LIST_MAP => list_map,
Symbol::LIST_MAP2 => list_map2, Symbol::LIST_MAP2 => list_map2,
Symbol::LIST_MAP3 => list_map3,
Symbol::LIST_MAP_WITH_INDEX => list_map_with_index, Symbol::LIST_MAP_WITH_INDEX => list_map_with_index,
Symbol::LIST_KEEP_IF => list_keep_if, Symbol::LIST_KEEP_IF => list_keep_if,
Symbol::LIST_KEEP_OKS => list_keep_oks, Symbol::LIST_KEEP_OKS => list_keep_oks,
@ -372,6 +374,38 @@ fn lowlevel_3(symbol: Symbol, op: LowLevel, var_store: &mut VarStore) -> Def {
) )
} }
fn lowlevel_4(symbol: Symbol, op: LowLevel, var_store: &mut VarStore) -> Def {
let arg1_var = var_store.fresh();
let arg2_var = var_store.fresh();
let arg3_var = var_store.fresh();
let arg4_var = var_store.fresh();
let ret_var = var_store.fresh();
let body = RunLowLevel {
op,
args: vec![
(arg1_var, Var(Symbol::ARG_1)),
(arg2_var, Var(Symbol::ARG_2)),
(arg3_var, Var(Symbol::ARG_3)),
(arg4_var, Var(Symbol::ARG_4)),
],
ret_var,
};
defn(
symbol,
vec![
(arg1_var, Symbol::ARG_1),
(arg2_var, Symbol::ARG_2),
(arg3_var, Symbol::ARG_3),
(arg4_var, Symbol::ARG_4),
],
var_store,
body,
ret_var,
)
}
/// Num.maxInt : Int /// Num.maxInt : Int
fn num_max_int(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_max_int(symbol: Symbol, var_store: &mut VarStore) -> Def {
let int_var = var_store.fresh(); let int_var = var_store.fresh();
@ -2151,6 +2185,11 @@ fn list_map2(symbol: Symbol, var_store: &mut VarStore) -> Def {
lowlevel_3(symbol, LowLevel::ListMap2, var_store) lowlevel_3(symbol, LowLevel::ListMap2, var_store)
} }
/// List.map3 : List a, List b, (a, b -> c) -> List c
fn list_map3(symbol: Symbol, var_store: &mut VarStore) -> Def {
lowlevel_4(symbol, LowLevel::ListMap3, var_store)
}
/// Dict.hashTestOnly : k, v -> Nat /// Dict.hashTestOnly : k, v -> Nat
pub fn dict_hash_test_only(symbol: Symbol, var_store: &mut VarStore) -> Def { pub fn dict_hash_test_only(symbol: Symbol, var_store: &mut VarStore) -> Def {
lowlevel_2(symbol, LowLevel::Hash, var_store) lowlevel_2(symbol, LowLevel::Hash, var_store)

View file

@ -8,9 +8,6 @@ use roc_can::operator;
use roc_can::scope::Scope; use roc_can::scope::Scope;
use roc_collections::all::MutMap; use roc_collections::all::MutMap;
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds}; use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds};
use roc_parse::ast::{self, Attempting};
use roc_parse::blankspace::space0_before;
use roc_parse::parser::{loc, Parser, State, SyntaxError};
use roc_problem::can::Problem; use roc_problem::can::Problem;
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::{VarStore, Variable}; use roc_types::subs::{VarStore, Variable};
@ -20,25 +17,6 @@ pub fn test_home() -> ModuleId {
ModuleIds::default().get_or_insert(&"Test".into()) ModuleIds::default().get_or_insert(&"Test".into())
} }
#[allow(dead_code)]
pub fn parse_with<'a>(arena: &'a Bump, input: &'a str) -> Result<ast::Expr<'a>, SyntaxError<'a>> {
parse_loc_with(arena, input).map(|loc_expr| loc_expr.value)
}
#[allow(dead_code)]
pub fn parse_loc_with<'a>(
arena: &'a Bump,
input: &'a str,
) -> Result<Located<ast::Expr<'a>>, SyntaxError<'a>> {
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
let parser = space0_before(loc(roc_parse::expr::expr(0)), 0);
let answer = parser.parse(&arena, state);
answer
.map(|(_, loc_expr, _)| loc_expr)
.map_err(|(_, fail, _)| fail)
}
#[allow(dead_code)] #[allow(dead_code)]
pub fn can_expr(expr_str: &str) -> CanExprOut { pub fn can_expr(expr_str: &str) -> CanExprOut {
can_expr_with(&Bump::new(), test_home(), expr_str) can_expr_with(&Bump::new(), test_home(), expr_str)
@ -56,7 +34,7 @@ pub struct CanExprOut {
#[allow(dead_code)] #[allow(dead_code)]
pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut { pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut {
let loc_expr = parse_loc_with(&arena, expr_str).unwrap_or_else(|e| { let loc_expr = roc_parse::test_helpers::parse_loc_with(&arena, expr_str).unwrap_or_else(|e| {
panic!( panic!(
"can_expr_with() got a parse error when attempting to canonicalize:\n\n{:?} {:?}", "can_expr_with() got a parse error when attempting to canonicalize:\n\n{:?} {:?}",
expr_str, e expr_str, e

View file

@ -4,8 +4,6 @@ extern crate pretty_assertions;
extern crate indoc; extern crate indoc;
extern crate bumpalo; extern crate bumpalo;
extern crate roc_fmt; extern crate roc_fmt;
#[macro_use]
extern crate roc_parse;
#[cfg(test)] #[cfg(test)]
mod test_fmt { mod test_fmt {
@ -14,27 +12,15 @@ mod test_fmt {
use roc_fmt::annotation::{Formattable, Newlines, Parens}; use roc_fmt::annotation::{Formattable, Newlines, Parens};
use roc_fmt::def::fmt_def; use roc_fmt::def::fmt_def;
use roc_fmt::module::fmt_module; use roc_fmt::module::fmt_module;
use roc_parse::ast::{Attempting, Expr};
use roc_parse::blankspace::space0_before;
use roc_parse::module::{self, module_defs}; use roc_parse::module::{self, module_defs};
use roc_parse::parser::{Parser, State, SyntaxError}; use roc_parse::parser::{Parser, State};
fn parse_with<'a>(arena: &'a Bump, input: &'a str) -> Result<Expr<'a>, SyntaxError<'a>> {
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
let parser = space0_before(loc!(roc_parse::expr::expr(0)), 0);
let answer = parser.parse(&arena, state);
answer
.map(|(_, loc_expr, _)| loc_expr.value)
.map_err(|(_, fail, _)| fail)
}
fn expr_formats_to(input: &str, expected: &str) { fn expr_formats_to(input: &str, expected: &str) {
let arena = Bump::new(); let arena = Bump::new();
let input = input.trim_end(); let input = input.trim_end();
let expected = expected.trim_end(); let expected = expected.trim_end();
match parse_with(&arena, input) { match roc_parse::test_helpers::parse_expr_with(&arena, input.trim()) {
Ok(actual) => { Ok(actual) => {
let mut buf = String::new_in(&arena); let mut buf = String::new_in(&arena);
@ -55,8 +41,8 @@ mod test_fmt {
let src = src.trim_end(); let src = src.trim_end();
let expected = expected.trim_end(); let expected = expected.trim_end();
match module::header().parse(&arena, State::new_in(&arena, src.as_bytes(), Attempting::Module)) { match module::parse_header(&arena, State::new(src.as_bytes())) {
Ok((_, actual, state)) => { Ok((actual, state)) => {
let mut buf = String::new_in(&arena); let mut buf = String::new_in(&arena);
fmt_module(&mut buf, &actual); fmt_module(&mut buf, &actual);

View file

@ -7,8 +7,8 @@ use crate::llvm::build_hash::generic_hash;
use crate::llvm::build_list::{ use crate::llvm::build_list::{
allocate_list, empty_list, empty_polymorphic_list, list_append, list_concat, list_contains, allocate_list, empty_list, empty_polymorphic_list, list_append, list_concat, list_contains,
list_get_unsafe, list_join, list_keep_errs, list_keep_if, list_keep_oks, list_len, list_map, list_get_unsafe, list_join, list_keep_errs, list_keep_if, list_keep_oks, list_len, list_map,
list_map2, list_map_with_index, list_prepend, list_repeat, list_reverse, list_set, list_single, list_map2, list_map3, list_map_with_index, list_prepend, list_repeat, list_reverse, list_set,
list_sum, list_walk, list_walk_backwards, list_single, list_sum, list_walk, list_walk_backwards,
}; };
use crate::llvm::build_str::{ use crate::llvm::build_str::{
str_concat, str_count_graphemes, str_ends_with, str_from_float, str_from_int, str_from_utf8, str_concat, str_count_graphemes, str_ends_with, str_from_float, str_from_int, str_from_utf8,
@ -3743,6 +3743,38 @@ fn run_low_level<'a, 'ctx, 'env>(
_ => unreachable!("invalid list layout"), _ => unreachable!("invalid list layout"),
} }
} }
ListMap3 => {
debug_assert_eq!(args.len(), 4);
let (list1, list1_layout) = load_symbol_and_layout(scope, &args[0]);
let (list2, list2_layout) = load_symbol_and_layout(scope, &args[1]);
let (list3, list3_layout) = load_symbol_and_layout(scope, &args[2]);
let (func, func_layout) = load_symbol_and_layout(scope, &args[3]);
match (list1_layout, list2_layout, list3_layout) {
(
Layout::Builtin(Builtin::List(_, element1_layout)),
Layout::Builtin(Builtin::List(_, element2_layout)),
Layout::Builtin(Builtin::List(_, element3_layout)),
) => list_map3(
env,
layout_ids,
func,
func_layout,
list1,
list2,
list3,
element1_layout,
element2_layout,
element3_layout,
),
(Layout::Builtin(Builtin::EmptyList), _, _)
| (_, Layout::Builtin(Builtin::EmptyList), _)
| (_, _, Layout::Builtin(Builtin::EmptyList)) => empty_list(env),
_ => unreachable!("invalid list layout"),
}
}
ListMapWithIndex => { ListMapWithIndex => {
// List.map : List before, (before -> after) -> List after // List.map : List before, (before -> after) -> List after
debug_assert_eq!(args.len(), 2); debug_assert_eq!(args.len(), 2);

View file

@ -1305,6 +1305,114 @@ pub fn list_map2<'a, 'ctx, 'env>(
) )
} }
pub fn list_map3<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
transform: BasicValueEnum<'ctx>,
transform_layout: &Layout<'a>,
list1: BasicValueEnum<'ctx>,
list2: BasicValueEnum<'ctx>,
list3: BasicValueEnum<'ctx>,
element1_layout: &Layout<'a>,
element2_layout: &Layout<'a>,
element3_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
let builder = env.builder;
let return_layout = match transform_layout {
Layout::FunctionPointer(_, ret) => ret,
Layout::Closure(_, _, ret) => ret,
_ => unreachable!("not a callable layout"),
};
let u8_ptr = env.context.i8_type().ptr_type(AddressSpace::Generic);
let list1_i128 = complex_bitcast(
env.builder,
list1,
env.context.i128_type().into(),
"to_i128",
);
let list2_i128 = complex_bitcast(
env.builder,
list2,
env.context.i128_type().into(),
"to_i128",
);
let list3_i128 = complex_bitcast(
env.builder,
list3,
env.context.i128_type().into(),
"to_i128",
);
let transform_ptr = builder.build_alloca(transform.get_type(), "transform_ptr");
env.builder.build_store(transform_ptr, transform);
let argument_layouts = [
element1_layout.clone(),
element2_layout.clone(),
element3_layout.clone(),
];
let stepper_caller =
build_transform_caller(env, layout_ids, transform_layout, &argument_layouts)
.as_global_value()
.as_pointer_value();
let a_width = env
.ptr_int()
.const_int(element1_layout.stack_size(env.ptr_bytes) as u64, false);
let b_width = env
.ptr_int()
.const_int(element2_layout.stack_size(env.ptr_bytes) as u64, false);
let c_width = env
.ptr_int()
.const_int(element3_layout.stack_size(env.ptr_bytes) as u64, false);
let d_width = env
.ptr_int()
.const_int(return_layout.stack_size(env.ptr_bytes) as u64, false);
let alignment = return_layout.alignment_bytes(env.ptr_bytes);
let alignment_iv = env.ptr_int().const_int(alignment as u64, false);
let dec_a = build_dec_wrapper(env, layout_ids, element1_layout);
let dec_b = build_dec_wrapper(env, layout_ids, element2_layout);
let dec_c = build_dec_wrapper(env, layout_ids, element3_layout);
let output = call_bitcode_fn(
env,
&[
list1_i128,
list2_i128,
list3_i128,
env.builder
.build_bitcast(transform_ptr, u8_ptr, "to_opaque"),
stepper_caller.into(),
alignment_iv.into(),
a_width.into(),
b_width.into(),
c_width.into(),
d_width.into(),
dec_a.as_global_value().as_pointer_value().into(),
dec_b.as_global_value().as_pointer_value().into(),
dec_c.as_global_value().as_pointer_value().into(),
],
bitcode::LIST_MAP3,
);
complex_bitcast(
env.builder,
output,
collection(env.context, env.ptr_bytes).into(),
"from_i128",
)
}
/// List.concat : List elem, List elem -> List elem /// List.concat : List elem, List elem -> List elem
pub fn list_concat<'a, 'ctx, 'env>( pub fn list_concat<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>, env: &Env<'a, 'ctx, 'env>,

View file

@ -22,7 +22,7 @@ use roc_mono::ir::{
CapturedSymbols, ExternalSpecializations, PartialProc, PendingSpecialization, Proc, Procs, CapturedSymbols, ExternalSpecializations, PartialProc, PendingSpecialization, Proc, Procs,
}; };
use roc_mono::layout::{Layout, LayoutCache, LayoutProblem}; use roc_mono::layout::{Layout, LayoutCache, LayoutProblem};
use roc_parse::ast::{self, Attempting, StrLiteral, TypeAnnotation}; use roc_parse::ast::{self, StrLiteral, TypeAnnotation};
use roc_parse::header::{ use roc_parse::header::{
ExposesEntry, ImportsEntry, PackageEntry, PackageOrPath, PlatformHeader, To, TypedIdent, ExposesEntry, ImportsEntry, PackageEntry, PackageOrPath, PlatformHeader, To, TypedIdent,
}; };
@ -2304,8 +2304,8 @@ fn load_pkg_config<'a>(
Ok(bytes_vec) => { Ok(bytes_vec) => {
let parse_start = SystemTime::now(); let parse_start = SystemTime::now();
let bytes = arena.alloc(bytes_vec); let bytes = arena.alloc(bytes_vec);
let parse_state = parser::State::new_in(arena, bytes, Attempting::Module); let parse_state = parser::State::new(bytes);
let parsed = roc_parse::module::header().parse(&arena, parse_state); let parsed = roc_parse::module::parse_header(&arena, parse_state);
let parse_header_duration = parse_start.elapsed().unwrap(); let parse_header_duration = parse_start.elapsed().unwrap();
// Insert the first entries for this module's timings // Insert the first entries for this module's timings
@ -2319,19 +2319,19 @@ fn load_pkg_config<'a>(
effect_module_timing.parse_header = parse_header_duration; effect_module_timing.parse_header = parse_header_duration;
match parsed { match parsed {
Ok((_, ast::Module::Interface { header }, _parse_state)) => { Ok((ast::Module::Interface { header }, _parse_state)) => {
Err(LoadingProblem::UnexpectedHeader(format!( Err(LoadingProblem::UnexpectedHeader(format!(
"expected platform/package module, got Interface with header\n{:?}", "expected platform/package module, got Interface with header\n{:?}",
header header
))) )))
} }
Ok((_, ast::Module::App { header }, _parse_state)) => { Ok((ast::Module::App { header }, _parse_state)) => {
Err(LoadingProblem::UnexpectedHeader(format!( Err(LoadingProblem::UnexpectedHeader(format!(
"expected platform/package module, got App with header\n{:?}", "expected platform/package module, got App with header\n{:?}",
header header
))) )))
} }
Ok((_, ast::Module::Platform { header }, parser_state)) => { Ok((ast::Module::Platform { header }, parser_state)) => {
// make a Pkg-Config module that ultimately exposes `main` to the host // make a Pkg-Config module that ultimately exposes `main` to the host
let pkg_config_module_msg = fabricate_pkg_config_module( let pkg_config_module_msg = fabricate_pkg_config_module(
arena, arena,
@ -2359,8 +2359,8 @@ fn load_pkg_config<'a>(
Ok(Msg::Many(vec![effects_module_msg, pkg_config_module_msg])) Ok(Msg::Many(vec![effects_module_msg, pkg_config_module_msg]))
} }
Err((_, fail, _)) => Err(LoadingProblem::ParsingFailed( Err(fail) => Err(LoadingProblem::ParsingFailed(
fail.into_parse_problem(filename, bytes), SyntaxError::Header(fail).into_parse_problem(filename, bytes),
)), )),
} }
} }
@ -2474,8 +2474,8 @@ fn parse_header<'a>(
start_time: SystemTime, start_time: SystemTime,
) -> Result<(ModuleId, Msg<'a>), LoadingProblem<'a>> { ) -> Result<(ModuleId, Msg<'a>), LoadingProblem<'a>> {
let parse_start = SystemTime::now(); let parse_start = SystemTime::now();
let parse_state = parser::State::new_in(arena, src_bytes, Attempting::Module); let parse_state = parser::State::new(src_bytes);
let parsed = roc_parse::module::header().parse(&arena, parse_state); let parsed = roc_parse::module::parse_header(&arena, parse_state);
let parse_header_duration = parse_start.elapsed().unwrap(); let parse_header_duration = parse_start.elapsed().unwrap();
// Insert the first entries for this module's timings // Insert the first entries for this module's timings
@ -2485,7 +2485,7 @@ fn parse_header<'a>(
module_timing.parse_header = parse_header_duration; module_timing.parse_header = parse_header_duration;
match parsed { match parsed {
Ok((_, ast::Module::Interface { header }, parse_state)) => { Ok((ast::Module::Interface { header }, parse_state)) => {
let header_src = unsafe { let header_src = unsafe {
let chomped = src_bytes.len() - parse_state.bytes.len(); let chomped = src_bytes.len() - parse_state.bytes.len();
std::str::from_utf8_unchecked(&src_bytes[..chomped]) std::str::from_utf8_unchecked(&src_bytes[..chomped])
@ -2514,7 +2514,7 @@ fn parse_header<'a>(
module_timing, module_timing,
)) ))
} }
Ok((_, ast::Module::App { header }, parse_state)) => { Ok((ast::Module::App { header }, parse_state)) => {
let mut pkg_config_dir = filename.clone(); let mut pkg_config_dir = filename.clone();
pkg_config_dir.pop(); pkg_config_dir.pop();
@ -2623,7 +2623,7 @@ fn parse_header<'a>(
}, },
} }
} }
Ok((_, ast::Module::Platform { header }, _parse_state)) => Ok(fabricate_effects_module( Ok((ast::Module::Platform { header }, _parse_state)) => Ok(fabricate_effects_module(
arena, arena,
&"", &"",
module_ids, module_ids,
@ -2632,8 +2632,8 @@ fn parse_header<'a>(
header, header,
module_timing, module_timing,
)), )),
Err((_, fail, _)) => Err(LoadingProblem::ParsingFailed( Err(fail) => Err(LoadingProblem::ParsingFailed(
fail.into_parse_problem(filename, src_bytes), SyntaxError::Header(fail).into_parse_problem(filename, src_bytes),
)), )),
} }
} }

View file

@ -28,6 +28,7 @@ pub enum LowLevel {
ListJoin, ListJoin,
ListMap, ListMap,
ListMap2, ListMap2,
ListMap3,
ListMapWithIndex, ListMapWithIndex,
ListKeepIf, ListKeepIf,
ListWalk, ListWalk,

View file

@ -911,6 +911,7 @@ define_builtins! {
22 LIST_KEEP_ERRS: "keepErrs" 22 LIST_KEEP_ERRS: "keepErrs"
23 LIST_MAP_WITH_INDEX: "mapWithIndex" 23 LIST_MAP_WITH_INDEX: "mapWithIndex"
24 LIST_MAP2: "map2" 24 LIST_MAP2: "map2"
25 LIST_MAP3: "map3"
} }
5 RESULT: "Result" => { 5 RESULT: "Result" => {
0 RESULT_RESULT: "Result" imported // the Result.Result type alias 0 RESULT_RESULT: "Result" imported // the Result.Result type alias

View file

@ -652,6 +652,7 @@ pub fn lowlevel_borrow_signature(arena: &Bump, op: LowLevel) -> &[bool] {
ListJoin => arena.alloc_slice_copy(&[irrelevant]), ListJoin => arena.alloc_slice_copy(&[irrelevant]),
ListMap | ListMapWithIndex => arena.alloc_slice_copy(&[owned, irrelevant]), ListMap | ListMapWithIndex => arena.alloc_slice_copy(&[owned, irrelevant]),
ListMap2 => arena.alloc_slice_copy(&[owned, owned, irrelevant]), ListMap2 => arena.alloc_slice_copy(&[owned, owned, irrelevant]),
ListMap3 => arena.alloc_slice_copy(&[owned, owned, owned, irrelevant]),
ListKeepIf | ListKeepOks | ListKeepErrs => arena.alloc_slice_copy(&[owned, borrowed]), ListKeepIf | ListKeepOks | ListKeepErrs => arena.alloc_slice_copy(&[owned, borrowed]),
ListContains => arena.alloc_slice_copy(&[borrowed, irrelevant]), ListContains => arena.alloc_slice_copy(&[borrowed, irrelevant]),
ListWalk => arena.alloc_slice_copy(&[owned, irrelevant, owned]), ListWalk => arena.alloc_slice_copy(&[owned, irrelevant, owned]),

View file

@ -589,33 +589,6 @@ impl<'a> Spaceable<'a> for Def<'a> {
} }
} }
/// What we're currently attempting to parse, e.g.
/// "currently attempting to parse a list." This helps error messages!
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Attempting {
LineComment,
List,
Keyword,
StrLiteral,
RecordLiteral,
RecordFieldLabel,
InterpolatedString,
NumberLiteral,
UnicodeEscape,
ClosureParams,
ClosureBody,
Def,
Module,
Record,
Identifier,
HexDigit,
ConcreteType,
TypeVariable,
WhenCondition,
WhenBranch,
TODO,
}
impl<'a> Expr<'a> { impl<'a> Expr<'a> {
pub fn loc_ref(&'a self, region: Region) -> Loc<&'a Self> { pub fn loc_ref(&'a self, region: Region) -> Loc<&'a Self> {
Loc { Loc {

File diff suppressed because it is too large Load diff

View file

@ -1,15 +1,14 @@
use crate::ast::{AssignedField, CommentOrNewline, Def, Expr, Pattern, Spaceable, TypeAnnotation}; use crate::ast::{AssignedField, CommentOrNewline, Def, Expr, Pattern, Spaceable, TypeAnnotation};
use crate::blankspace::{ use crate::blankspace::{
line_comment, space0_after_e, space0_around_ee, space0_before_e, space0_e, space1_e, space0_after_e, space0_around_ee, space0_before_e, space0_e, space1_e, spaces_exactly_e,
spaces_exactly_e,
}; };
use crate::ident::{ident, lowercase_ident, Ident}; use crate::ident::{lowercase_ident, parse_ident_help, Ident};
use crate::keyword; use crate::keyword;
use crate::parser::{ use crate::parser::{
self, allocated, and_then_with_indent_level, ascii_char, backtrackable, map, newline_char, self, allocated, and_then_with_indent_level, backtrackable, map, optional, sep_by1, sep_by1_e,
optional, sep_by1, sep_by1_e, specialize, specialize_ref, then, trailing_sep_by0, word1, word2, specialize, specialize_ref, then, trailing_sep_by0, word1, word2, EExpr, EInParens, ELambda,
EExpr, EInParens, ELambda, EPattern, ERecord, EString, Either, If, List, Number, ParseResult, EPattern, ERecord, EString, Either, If, List, Number, ParseResult, Parser, State, SyntaxError,
Parser, State, SyntaxError, Type, When, Type, When,
}; };
use crate::pattern::loc_closure_param; use crate::pattern::loc_closure_param;
use crate::type_annotation; use crate::type_annotation;
@ -20,6 +19,25 @@ use roc_region::all::{Located, Region};
use crate::parser::Progress::{self, *}; use crate::parser::Progress::{self, *};
pub fn test_parse_expr<'a>(
min_indent: u16,
arena: &'a bumpalo::Bump,
state: State<'a>,
) -> Result<Located<Expr<'a>>, EExpr<'a>> {
let parser = space0_before_e(
loc!(|a, s| parse_expr_help(min_indent, a, s)),
min_indent,
EExpr::Space,
EExpr::IndentStart,
);
match parser.parse(arena, state) {
Ok((_, expression, _)) => Ok(expression),
Err((_, fail, _)) => Err(fail),
}
}
// public for testing purposes
pub fn expr<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>, SyntaxError<'a>> { pub fn expr<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>, SyntaxError<'a>> {
// Recursive parsers must not directly invoke functions which return (impl Parser), // Recursive parsers must not directly invoke functions which return (impl Parser),
// as this causes rustc to stack overflow. Thus, parse_expr must be a // as this causes rustc to stack overflow. Thus, parse_expr must be a
@ -30,6 +48,10 @@ pub fn expr<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>, SyntaxError<'a>> {
) )
} }
pub fn expr_help<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>, EExpr<'a>> {
move |arena, state: State<'a>| parse_expr_help(min_indent, arena, state)
}
fn loc_expr_in_parens_help<'a>( fn loc_expr_in_parens_help<'a>(
min_indent: u16, min_indent: u16,
) -> impl Parser<'a, Located<Expr<'a>>, EInParens<'a>> { ) -> impl Parser<'a, Located<Expr<'a>>, EInParens<'a>> {
@ -155,9 +177,9 @@ fn record_field_access_chain<'a>() -> impl Parser<'a, Vec<'a, &'a str>, EExpr<'a
} }
fn record_field_access<'a>() -> impl Parser<'a, &'a str, EExpr<'a>> { fn record_field_access<'a>() -> impl Parser<'a, &'a str, EExpr<'a>> {
specialize( skip_first!(
|_, r, c| EExpr::Access(r, c), word1(b'.', EExpr::Access),
skip_first!(ascii_char(b'.'), lowercase_ident()), specialize(|_, r, c| EExpr::Access(r, c), lowercase_ident())
) )
} }
@ -487,7 +509,7 @@ fn parse_expr_help<'a>(
] ]
.parse(arena, state)?; .parse(arena, state)?;
let initial = state.clone(); let initial = state;
match space0_e(min_indent, EExpr::Space, EExpr::IndentEnd).parse(arena, state) { match space0_e(min_indent, EExpr::Space, EExpr::IndentEnd).parse(arena, state) {
Err((_, _, state)) => Ok((MadeProgress, loc_expr1.value, state)), Err((_, _, state)) => Ok((MadeProgress, loc_expr1.value, state)),
@ -717,47 +739,6 @@ fn assigned_expr_field_to_pattern_help<'a>(
}) })
} }
/// A def beginning with a parenthetical pattern, for example:
///
/// (UserId userId) = ...
///
/// Note: Parenthetical patterns are a shorthand convenience, and may not have type annotations.
/// It would be too weird to parse; imagine `(UserId userId) : ...` above `(UserId userId) = ...`
/// !!!! THIS IS NOT USED !!!!
// fn loc_parenthetical_def<'a>(min_indent: u16) -> impl Parser<'a, Located<Expr<'a>>> {
// move |arena, state| {
// let (loc_tuple, state) = loc!(and!(
// space0_after(
// between!(
// ascii_char(b'('),
// space0_around(loc_pattern(min_indent), min_indent),
// ascii_char(b')')
// ),
// min_indent,
// ),
// equals_with_indent()
// ))
// .parse(arena, state)?;
// let region = loc_tuple.region;
// let (loc_first_pattern, equals_sign_indent) = loc_tuple.value;
// // Continue parsing the expression as a Def.
// let (spaces_after_equals, state) = space0(min_indent).parse(arena, state)?;
// let (value, state) = parse_def_expr(
// region.start_col,
// min_indent,
// equals_sign_indent,
// arena,
// state,
// loc_first_pattern,
// spaces_after_equals,
// )?;
// Ok((Located { value, region }, state))
// }
// }
fn parse_defs_help<'a>( fn parse_defs_help<'a>(
min_indent: u16, min_indent: u16,
) -> impl Parser<'a, Vec<'a, &'a Located<Def<'a>>>, EExpr<'a>> { ) -> impl Parser<'a, Vec<'a, &'a Located<Def<'a>>>, EExpr<'a>> {
@ -794,7 +775,7 @@ pub fn def<'a>(min_indent: u16) -> impl Parser<'a, Def<'a>, SyntaxError<'a>> {
specialize(|e, _, _| SyntaxError::Expr(e), def_help(min_indent)) specialize(|e, _, _| SyntaxError::Expr(e), def_help(min_indent))
} }
fn def_help<'a>(min_indent: u16) -> impl Parser<'a, Def<'a>, EExpr<'a>> { pub fn def_help<'a>(min_indent: u16) -> impl Parser<'a, Def<'a>, EExpr<'a>> {
let indented_more = min_indent + 1; let indented_more = min_indent + 1;
enum DefKind { enum DefKind {
@ -834,7 +815,7 @@ fn def_help<'a>(min_indent: u16) -> impl Parser<'a, Def<'a>, EExpr<'a>> {
// see if there is a definition (assuming the preceding characters were a type // see if there is a definition (assuming the preceding characters were a type
// annotation // annotation
let (_, opt_rest, state) = optional(and!( let (_, opt_rest, state) = optional(and!(
spaces_then_comment_or_newline_help(), spaces_till_end_of_line(),
body_at_indent_help(min_indent) body_at_indent_help(min_indent)
)) ))
.parse(arena, state)?; .parse(arena, state)?;
@ -889,20 +870,10 @@ fn pattern_help<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>, EE
) )
} }
fn spaces_then_comment_or_newline_help<'a>() -> impl Parser<'a, Option<&'a str>, EExpr<'a>> { fn spaces_till_end_of_line<'a>() -> impl Parser<'a, Option<&'a str>, EExpr<'a>> {
specialize_ref( crate::blankspace::spaces_till_end_of_line(|r, c| {
EExpr::Syntax, EExpr::Space(parser::BadInputError::HasTab, r, c)
skip_first!( })
zero_or_more!(ascii_char(b' ')),
map!(
either!(newline_char(), line_comment()),
|either_comment_or_newline| match either_comment_or_newline {
Either::First(_) => None,
Either::Second(comment) => Some(comment),
}
)
),
)
} }
type Body<'a> = (Located<Pattern<'a>>, Located<Expr<'a>>); type Body<'a> = (Located<Pattern<'a>>, Located<Expr<'a>>);
@ -1193,7 +1164,7 @@ fn parse_def_signature_help<'a>(
// Indented more beyond the original indent. // Indented more beyond the original indent.
let indented_more = original_indent + 1; let indented_more = original_indent + 1;
and!( let parser1 = {
// Parse the first annotation. It doesn't need any spaces // Parse the first annotation. It doesn't need any spaces
// around it parsed, because both the subsquent defs and the // around it parsed, because both the subsquent defs and the
// final body will have space1_before on them. // final body will have space1_before on them.
@ -1205,23 +1176,28 @@ fn parse_def_signature_help<'a>(
specialize(EExpr::Type, type_annotation::located_help(indented_more)), specialize(EExpr::Type, type_annotation::located_help(indented_more)),
min_indent, min_indent,
EExpr::Space, EExpr::Space,
EExpr::IndentAnnotation EExpr::IndentAnnotation,
), ),
// The first annotation may be immediately (spaces_then_comment_or_newline()) // The first annotation may be immediately (spaces_then_comment_or_newline())
// followed by a body at the exact same indent_level // followed by a body at the exact same indent_level
// leading to an AnnotatedBody in this case // leading to an AnnotatedBody in this case
|_progress, type_ann, indent_level| map( |_progress, type_ann, indent_level| {
map(
optional(and!( optional(and!(
backtrackable(spaces_then_comment_or_newline_help()), backtrackable(spaces_till_end_of_line()),
body_at_indent_help(indent_level) body_at_indent_help(indent_level)
)), )),
move |opt_body| (type_ann.clone(), opt_body) move |opt_body| (type_ann.clone(), opt_body),
) )
), },
)
};
let parser2 = {
and!( and!(
// Optionally parse additional defs. // Optionally parse additional defs.
zero_or_more!(backtrackable(allocated(space0_before_e( zero_or_more!(backtrackable(allocated(space0_before_e(
loc!(specialize_ref(EExpr::Syntax, def(original_indent))), loc!(def_help(original_indent)),
original_indent, original_indent,
EExpr::Space, EExpr::Space,
EExpr::IndentStart, EExpr::IndentStart,
@ -1229,15 +1205,22 @@ fn parse_def_signature_help<'a>(
// Parse the final expression that will be returned. // Parse the final expression that will be returned.
// It should be indented the same amount as the original. // It should be indented the same amount as the original.
space0_before_e( space0_before_e(
loc!(|arena, state| parse_expr_help(original_indent, arena, state)), loc!(one_of![
|arena, state| parse_expr_help(original_indent, arena, state),
|_, state: State<'a>| Err((
MadeProgress,
EExpr::DefMissingFinalExpr(state.line, state.column),
state
)),
]),
original_indent, original_indent,
EExpr::Space, EExpr::Space,
EExpr::IndentEnd, EExpr::IndentEnd,
) )
) )
) };
.parse(arena, state)
.map( and!(parser1, parser2).parse(arena, state).map(
move |(progress, ((loc_first_annotation, opt_body), (mut defs, loc_ret)), state)| { move |(progress, ((loc_first_annotation, opt_body), (mut defs, loc_ret)), state)| {
let loc_first_def: Located<Def<'a>> = match opt_body { let loc_first_def: Located<Def<'a>> = match opt_body {
None => { None => {
@ -1713,7 +1696,7 @@ fn unary_negate_function_arg_help<'a>(
fn loc_function_args_help<'a>( fn loc_function_args_help<'a>(
min_indent: u16, min_indent: u16,
) -> impl Parser<'a, Vec<'a, Located<Expr<'a>>>, EExpr<'a>> { ) -> impl Parser<'a, Vec<'a, Located<Expr<'a>>>, EExpr<'a>> {
one_or_more_e!( one_or_more!(
move |arena: &'a Bump, s| { move |arena: &'a Bump, s| {
map!( map!(
and!( and!(
@ -1966,11 +1949,11 @@ fn ident_then_args<'a>(
} }
fn ident_without_apply_help<'a>() -> impl Parser<'a, Expr<'a>, EExpr<'a>> { fn ident_without_apply_help<'a>() -> impl Parser<'a, Expr<'a>, EExpr<'a>> {
specialize_ref( then(
EExpr::Syntax, loc!(parse_ident_help),
then(loc!(ident()), move |arena, state, progress, loc_ident| { move |arena, state, progress, loc_ident| {
Ok((progress, ident_to_expr(arena, loc_ident.value), state)) Ok((progress, ident_to_expr(arena, loc_ident.value), state))
}), },
) )
} }
@ -2079,7 +2062,7 @@ fn list_literal_help<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>, List<'a>>
move |arena, state| { move |arena, state| {
let (_, (parsed_elems, final_comments), state) = collection_trailing_sep_e!( let (_, (parsed_elems, final_comments), state) = collection_trailing_sep_e!(
word1(b'[', List::Open), word1(b'[', List::Open),
specialize_ref(List::Syntax, loc!(expr(min_indent))), specialize_ref(List::Expr, loc!(expr_help(min_indent))),
word1(b',', List::End), word1(b',', List::End),
word1(b']', List::End), word1(b']', List::End),
min_indent, min_indent,
@ -2127,7 +2110,7 @@ fn record_field_help<'a>(
word1(b'?', ERecord::QuestionMark) word1(b'?', ERecord::QuestionMark)
), ),
space0_before_e( space0_before_e(
specialize_ref(ERecord::Syntax, loc!(expr(min_indent))), specialize_ref(ERecord::Expr, loc!(expr_help(min_indent))),
min_indent, min_indent,
ERecord::Space, ERecord::Space,
ERecord::IndentEnd, ERecord::IndentEnd,
@ -2162,7 +2145,7 @@ fn record_field_help<'a>(
fn record_updateable_identifier<'a>() -> impl Parser<'a, Expr<'a>, ERecord<'a>> { fn record_updateable_identifier<'a>() -> impl Parser<'a, Expr<'a>, ERecord<'a>> {
specialize( specialize(
|_, r, c| ERecord::Updateable(r, c), |_, r, c| ERecord::Updateable(r, c),
map_with_arena!(ident(), ident_to_expr), map_with_arena!(parse_ident_help, ident_to_expr),
) )
} }

View file

@ -1,12 +1,11 @@
use crate::blankspace::space0; use crate::ast::{CommentOrNewline, Spaceable, StrLiteral, TypeAnnotation};
use crate::blankspace::space0_e;
use crate::ident::lowercase_ident; use crate::ident::lowercase_ident;
use crate::module::package_name; use crate::parser::Progress::{self, *};
use crate::parser::{ascii_char, optional, Either, Parser, Progress::*, State, SyntaxError}; use crate::parser::{
use crate::string_literal; specialize, word1, EPackageEntry, EPackageName, EPackageOrPath, Parser, State,
use crate::{
ast::{CommentOrNewline, Spaceable, StrLiteral, TypeAnnotation},
parser::specialize,
}; };
use crate::string_literal;
use bumpalo::collections::Vec; use bumpalo::collections::Vec;
use inlinable_string::InlinableString; use inlinable_string::InlinableString;
use roc_region::all::Loc; use roc_region::all::Loc;
@ -242,18 +241,32 @@ impl<'a> Spaceable<'a> for PackageEntry<'a> {
} }
} }
pub fn package_entry<'a>() -> impl Parser<'a, PackageEntry<'a>, SyntaxError<'a>> { pub fn package_entry<'a>() -> impl Parser<'a, PackageEntry<'a>, EPackageEntry<'a>> {
move |arena, state| { move |arena, state| {
// You may optionally have a package shorthand, // You may optionally have a package shorthand,
// e.g. "uc" in `uc: roc/unicode 1.0.0` // e.g. "uc" in `uc: roc/unicode 1.0.0`
// //
// (Indirect dependencies don't have a shorthand.) // (Indirect dependencies don't have a shorthand.)
let (_, opt_shorthand, state) = optional(and!( let min_indent = 1;
skip_second!(lowercase_ident(), ascii_char(b':')),
space0(1) let (_, opt_shorthand, state) = maybe!(and!(
skip_second!(
specialize(|_, r, c| EPackageEntry::Shorthand(r, c), lowercase_ident()),
word1(b':', EPackageEntry::Colon)
),
space0_e(
min_indent,
EPackageEntry::Space,
EPackageEntry::IndentPackageOrPath
)
))
.parse(arena, state)?;
let (_, package_or_path, state) = loc!(specialize(
EPackageEntry::BadPackageOrPath,
package_or_path()
)) ))
.parse(arena, state)?; .parse(arena, state)?;
let (_, package_or_path, state) = loc!(package_or_path()).parse(arena, state)?;
let entry = match opt_shorthand { let entry = match opt_shorthand {
Some((shorthand, spaces_after_shorthand)) => PackageEntry::Entry { Some((shorthand, spaces_after_shorthand)) => PackageEntry::Entry {
@ -272,27 +285,117 @@ pub fn package_entry<'a>() -> impl Parser<'a, PackageEntry<'a>, SyntaxError<'a>>
} }
} }
pub fn package_or_path<'a>() -> impl Parser<'a, PackageOrPath<'a>, SyntaxError<'a>> { pub fn package_or_path<'a>() -> impl Parser<'a, PackageOrPath<'a>, EPackageOrPath<'a>> {
one_of![
map!( map!(
either!( specialize(EPackageOrPath::BadPath, string_literal::parse()),
specialize( PackageOrPath::Path
|e, r, c| SyntaxError::Expr(crate::parser::EExpr::Str(e, r, c)),
string_literal::parse()
), ),
map!(
and!( and!(
package_name(), specialize(EPackageOrPath::BadPackage, package_name()),
skip_first!(one_or_more!(ascii_char(b' ')), package_version()) skip_first!(skip_spaces(), package_version())
)
), ),
|answer| { |(name, version)| { PackageOrPath::Package(name, version) }
match answer {
Either::First(str_literal) => PackageOrPath::Path(str_literal),
Either::Second((name, version)) => PackageOrPath::Package(name, version),
}
}
) )
]
} }
fn package_version<'a>() -> impl Parser<'a, Version<'a>, SyntaxError<'a>> { fn skip_spaces<'a, T>() -> impl Parser<'a, (), T>
where
T: 'a,
{
|_, mut state: State<'a>| {
let mut chomped = 0;
let mut it = state.bytes.iter();
while let Some(b' ') = it.next() {
chomped += 1;
}
if chomped == 0 {
Ok((NoProgress, (), state))
} else {
state.column += chomped;
state.bytes = it.as_slice();
Ok((MadeProgress, (), state))
}
}
}
fn package_version<'a, T>() -> impl Parser<'a, Version<'a>, T>
where
T: 'a,
{
move |_, _| todo!("TODO parse package version") move |_, _| todo!("TODO parse package version")
} }
#[inline(always)]
pub fn package_name<'a>() -> impl Parser<'a, PackageName<'a>, EPackageName> {
use encode_unicode::CharExt;
// e.g. rtfeldman/blah
//
// Package names and accounts can be capitalized and can contain dashes.
// They cannot contain underscores or other special characters.
// They must be ASCII.
|_, mut state: State<'a>| match chomp_package_part(state.bytes) {
Err(progress) => Err((
progress,
EPackageName::Account(state.line, state.column),
state,
)),
Ok(account) => {
let mut chomped = account.len();
if let Ok(('/', width)) = char::from_utf8_slice_start(&state.bytes[chomped..]) {
chomped += width;
match chomp_package_part(&state.bytes[chomped..]) {
Err(progress) => Err((
progress,
EPackageName::Pkg(state.line, state.column + chomped as u16),
state,
)),
Ok(pkg) => {
chomped += pkg.len();
state.column += chomped as u16;
state.bytes = &state.bytes[chomped..];
let value = PackageName { account, pkg };
Ok((MadeProgress, value, state))
}
}
} else {
Err((
MadeProgress,
EPackageName::MissingSlash(state.line, state.column + chomped as u16),
state,
))
}
}
}
}
fn chomp_package_part(buffer: &[u8]) -> Result<&str, Progress> {
use encode_unicode::CharExt;
let mut chomped = 0;
while let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
if ch == '-' || ch.is_ascii_alphanumeric() {
chomped += width;
} else {
// we're done
break;
}
}
if chomped == 0 {
Err(Progress::NoProgress)
} else {
let name = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
Ok(name)
}
}

View file

@ -1,14 +1,7 @@
use crate::ast::Attempting;
use crate::keyword;
use crate::parser::Progress::{self, *}; use crate::parser::Progress::{self, *};
use crate::parser::{ use crate::parser::{BadInputError, Col, EExpr, ParseResult, Parser, Row, State};
peek_utf8_char, unexpected, BadInputError, Col, EExpr, ParseResult, Parser, Row, State,
SyntaxError,
};
use bumpalo::collections::string::String;
use bumpalo::collections::vec::Vec; use bumpalo::collections::vec::Vec;
use bumpalo::Bump; use bumpalo::Bump;
use roc_region::all::Region;
/// The parser accepts all of these in any position where any one of them could /// The parser accepts all of these in any position where any one of them could
/// appear. This way, canonicalization can give more helpful error messages like /// appear. This way, canonicalization can give more helpful error messages like
@ -61,82 +54,43 @@ impl<'a> Ident<'a> {
} }
} }
pub fn ident<'a>() -> impl Parser<'a, Ident<'a>, SyntaxError<'a>> {
crate::parser::specialize(|e, _, _| SyntaxError::Expr(e), parse_ident_help)
}
pub fn global_tag_or_ident<'a, F>(pred: F) -> impl Parser<'a, &'a str, SyntaxError<'a>>
where
F: Fn(char) -> bool,
{
move |arena, mut state: State<'a>| {
// pred will determine if this is a tag or ident (based on capitalization)
let (first_letter, bytes_parsed) = match peek_utf8_char(&state) {
Ok((first_letter, bytes_parsed)) => {
if !pred(first_letter) {
return Err(unexpected(0, Attempting::RecordFieldLabel, state));
}
(first_letter, bytes_parsed)
}
Err(reason) => return state.fail(arena, NoProgress, reason),
};
let mut buf = String::with_capacity_in(1, arena);
buf.push(first_letter);
state = state.advance_without_indenting(bytes_parsed)?;
while !state.bytes.is_empty() {
match peek_utf8_char(&state) {
Ok((ch, bytes_parsed)) => {
// After the first character, only these are allowed:
//
// * Unicode alphabetic chars - you might include `鹏` if that's clear to your readers
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
// * A ':' indicating the end of the field
if ch.is_alphabetic() || ch.is_ascii_digit() {
buf.push(ch);
state = state.advance_without_indenting(bytes_parsed)?;
} else {
// This is the end of the field. We're done!
break;
}
}
Err(reason) => return state.fail(arena, MadeProgress, reason),
};
}
Ok((MadeProgress, buf.into_bump_str(), state))
}
}
/// This could be: /// This could be:
/// ///
/// * A record field, e.g. "email" in `.email` or in `email:` /// * A record field, e.g. "email" in `.email` or in `email:`
/// * A named pattern match, e.g. "foo" in `foo =` or `foo ->` or `\foo ->` /// * A named pattern match, e.g. "foo" in `foo =` or `foo ->` or `\foo ->`
pub fn lowercase_ident<'a>() -> impl Parser<'a, &'a str, SyntaxError<'a>> { pub fn lowercase_ident<'a>() -> impl Parser<'a, &'a str, ()> {
move |arena, state: State<'a>| { move |_, state: State<'a>| match chomp_lowercase_part(state.bytes) {
let (progress, ident, state) = Err(progress) => Err((progress, (), state)),
global_tag_or_ident(|first_char| first_char.is_lowercase()).parse(arena, state)?; Ok(ident) => {
if crate::keyword::KEYWORDS.iter().any(|kw| &ident == kw) {
// to parse a valid ident, progress must be made Err((NoProgress, (), state))
debug_assert_eq!(progress, MadeProgress);
if (ident == keyword::IF)
|| (ident == keyword::THEN)
|| (ident == keyword::ELSE)
|| (ident == keyword::WHEN)
|| (ident == keyword::IS)
|| (ident == keyword::AS)
{
// TODO Calculate the correct region based on state
let region = Region::zero();
Err((MadeProgress, SyntaxError::ReservedKeyword(region), state))
} else { } else {
Ok((MadeProgress, ident, state)) let width = ident.len();
match state.advance_without_indenting_ee(width, |_, _| ()) {
Ok(state) => Ok((MadeProgress, ident, state)),
Err(bad) => Err(bad),
}
}
}
}
}
pub fn tag_name<'a>() -> impl Parser<'a, &'a str, ()> {
move |arena, state: State<'a>| {
if state.bytes.starts_with(b"@") {
match chomp_private_tag(state.bytes, state.line, state.column) {
Err(BadIdent::Start(_, _)) => Err((NoProgress, (), state)),
Err(_) => Err((MadeProgress, (), state)),
Ok(ident) => {
let width = ident.len();
match state.advance_without_indenting_ee(width, |_, _| ()) {
Ok(state) => Ok((MadeProgress, ident, state)),
Err(bad) => Err(bad),
}
}
}
} else {
uppercase_ident().parse(arena, state)
} }
} }
} }
@ -146,30 +100,34 @@ pub fn lowercase_ident<'a>() -> impl Parser<'a, &'a str, SyntaxError<'a>> {
/// * A module name /// * A module name
/// * A type name /// * A type name
/// * A global tag /// * A global tag
pub fn uppercase_ident<'a>() -> impl Parser<'a, &'a str, SyntaxError<'a>> { pub fn uppercase_ident<'a>() -> impl Parser<'a, &'a str, ()> {
global_tag_or_ident(|first_char| first_char.is_uppercase()) move |_, state: State<'a>| match chomp_uppercase_part(state.bytes) {
Err(progress) => Err((progress, (), state)),
Ok(ident) => {
let width = ident.len();
match state.advance_without_indenting_ee(width, |_, _| ()) {
Ok(state) => Ok((MadeProgress, ident, state)),
Err(bad) => Err(bad),
}
}
}
} }
pub fn unqualified_ident<'a>() -> impl Parser<'a, &'a str, SyntaxError<'a>> { pub fn unqualified_ident<'a>() -> impl Parser<'a, &'a str, ()> {
global_tag_or_ident(|first_char| first_char.is_alphabetic()) move |_, state: State<'a>| match chomp_part(|c| c.is_alphabetic(), state.bytes) {
} Err(progress) => Err((progress, (), state)),
Ok(ident) => {
pub fn join_module_parts<'a>(arena: &'a Bump, module_parts: &[&str]) -> &'a str { if crate::keyword::KEYWORDS.iter().any(|kw| &ident == kw) {
let capacity = module_parts.len() * 3; // Module parts tend to be 3+ characters. Err((MadeProgress, (), state))
let mut buf = String::with_capacity_in(capacity, arena);
let mut any_parts_added = false;
for part in module_parts {
if any_parts_added {
buf.push('.');
} else { } else {
any_parts_added = true; let width = ident.len();
match state.advance_without_indenting_ee(width, |_, _| ()) {
Ok(state) => Ok((MadeProgress, ident, state)),
Err(bad) => Err(bad),
}
}
} }
buf.push_str(part);
} }
buf.into_bump_str()
} }
macro_rules! advance_state { macro_rules! advance_state {
@ -184,10 +142,10 @@ pub fn parse_ident_help<'a>(
arena: &'a Bump, arena: &'a Bump,
state: State<'a>, state: State<'a>,
) -> ParseResult<'a, Ident<'a>, EExpr<'a>> { ) -> ParseResult<'a, Ident<'a>, EExpr<'a>> {
let initial = state.clone(); let initial = state;
match parse_ident_help_help(arena, state) { match parse_ident_help_help(arena, state) {
Ok((progress, (ident, _), state)) => { Ok((progress, ident, state)) => {
if let Ident::Access { module_name, parts } = ident { if let Ident::Access { module_name, parts } = ident {
if module_name.is_empty() { if module_name.is_empty() {
if let Some(first) = parts.first() { if let Some(first) = parts.first() {
@ -212,7 +170,7 @@ pub fn parse_ident_help<'a>(
Err((MadeProgress, fail, state)) => match fail { Err((MadeProgress, fail, state)) => match fail {
BadIdent::Start(r, c) => Err((NoProgress, EExpr::Start(r, c), state)), BadIdent::Start(r, c) => Err((NoProgress, EExpr::Start(r, c), state)),
BadIdent::Space(e, r, c) => Err((NoProgress, EExpr::Space(e, r, c), state)), BadIdent::Space(e, r, c) => Err((NoProgress, EExpr::Space(e, r, c), state)),
_ => malformed_identifier(initial.bytes, fail, arena, state), _ => malformed_identifier(initial.bytes, fail, state),
}, },
} }
} }
@ -220,294 +178,367 @@ pub fn parse_ident_help<'a>(
fn malformed_identifier<'a>( fn malformed_identifier<'a>(
initial_bytes: &'a [u8], initial_bytes: &'a [u8],
problem: BadIdent, problem: BadIdent,
_arena: &'a Bump,
mut state: State<'a>, mut state: State<'a>,
) -> ParseResult<'a, Ident<'a>, EExpr<'a>> { ) -> ParseResult<'a, Ident<'a>, EExpr<'a>> {
// skip forward to the next non-identifier character let chomped = chomp_malformed(state.bytes);
while !state.bytes.is_empty() { let delta = initial_bytes.len() - state.bytes.len();
match peek_utf8_char(&state) { let parsed_str = unsafe { std::str::from_utf8_unchecked(&initial_bytes[..chomped + delta]) };
Ok((ch, bytes_parsed)) => {
state = state.advance_without_indenting_ee(chomped, |r, c| {
EExpr::Space(crate::parser::BadInputError::LineTooLong, r, c)
})?;
Ok((MadeProgress, Ident::Malformed(parsed_str, problem), state))
}
/// skip forward to the next non-identifier character
pub fn chomp_malformed(bytes: &[u8]) -> usize {
use encode_unicode::CharExt;
let mut chomped = 0;
while let Ok((ch, width)) = char::from_utf8_slice_start(&bytes[chomped..]) {
// We can't use ch.is_alphanumeric() here because that passes for // We can't use ch.is_alphanumeric() here because that passes for
// things that are "numeric" but not ASCII digits, like `¾` // things that are "numeric" but not ASCII digits, like `¾`
if ch == '.' || ch == '_' || ch.is_alphabetic() || ch.is_ascii_digit() { if ch == '.' || ch == '_' || ch.is_alphabetic() || ch.is_ascii_digit() {
state = state.advance_without_indenting_ee(bytes_parsed, |r, c| { chomped += width;
EExpr::Space(crate::parser::BadInputError::LineTooLong, r, c)
})?;
continue; continue;
} else { } else {
break; break;
} }
} }
Err(_reason) => {
break;
}
}
}
let parsed = &initial_bytes[..(initial_bytes.len() - state.bytes.len())]; chomped
let parsed_str = unsafe { std::str::from_utf8_unchecked(parsed) };
Ok((MadeProgress, Ident::Malformed(parsed_str, problem), state))
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BadIdent { pub enum BadIdent {
Start(Row, Col), Start(Row, Col),
Space(BadInputError, Row, Col), Space(BadInputError, Row, Col),
Underscore(Row, Col), Underscore(Row, Col),
QualifiedTag(Row, Col), QualifiedTag(Row, Col),
PrivateTagNotUppercase(Row, Col),
PartStartsWithNumber(Row, Col),
WeirdAccessor(Row, Col), WeirdAccessor(Row, Col),
PrivateTagFieldAccess(Row, Col),
WeirdDotAccess(Row, Col), WeirdDotAccess(Row, Col),
WeirdDotQualified(Row, Col), WeirdDotQualified(Row, Col),
DoubleDot(Row, Col),
StrayDot(Row, Col), StrayDot(Row, Col),
BadPrivateTag(Row, Col),
} }
/// Parse an identifier into a string. fn chomp_lowercase_part(buffer: &[u8]) -> Result<&str, Progress> {
/// chomp_part(|c: char| c.is_lowercase(), buffer)
/// This is separate from the `ident` Parser because string interpolation }
/// wants to use it this way.
pub fn parse_ident_help_help<'a>( fn chomp_uppercase_part(buffer: &[u8]) -> Result<&str, Progress> {
chomp_part(|c: char| c.is_uppercase(), buffer)
}
#[inline(always)]
fn chomp_part<F>(leading_is_good: F, buffer: &[u8]) -> Result<&str, Progress>
where
F: Fn(char) -> bool,
{
use encode_unicode::CharExt;
let mut chomped = 0;
if let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
if leading_is_good(ch) {
chomped += width;
} else {
return Err(NoProgress);
}
}
while let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
if ch.is_alphabetic() || ch.is_ascii_digit() {
chomped += width;
} else {
// we're done
break;
}
}
if chomped == 0 {
Err(NoProgress)
} else {
let name = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
Ok(name)
}
}
/// a `.foo` accessor function
fn chomp_accessor(buffer: &[u8], row: Row, col: Col) -> Result<&str, BadIdent> {
// assumes the leading `.` has been chomped already
use encode_unicode::CharExt;
match chomp_lowercase_part(buffer) {
Ok(name) => {
let chomped = name.len();
if let Ok(('.', _)) = char::from_utf8_slice_start(&buffer[chomped..]) {
Err(BadIdent::WeirdAccessor(row, col))
} else {
Ok(name)
}
}
Err(_) => {
// we've already made progress with the initial `.`
Err(BadIdent::StrayDot(row, col + 1))
}
}
}
/// a `@Token` private tag
fn chomp_private_tag(buffer: &[u8], row: Row, col: Col) -> Result<&str, BadIdent> {
// assumes the leading `@` has NOT been chomped already
debug_assert_eq!(buffer.get(0), Some(&b'@'));
use encode_unicode::CharExt;
match chomp_uppercase_part(&buffer[1..]) {
Ok(name) => {
let width = 1 + name.len();
if let Ok(('.', _)) = char::from_utf8_slice_start(&buffer[width..]) {
Err(BadIdent::BadPrivateTag(row, col + width as u16))
} else {
let value = unsafe { std::str::from_utf8_unchecked(&buffer[..width]) };
Ok(value)
}
}
Err(_) => Err(BadIdent::BadPrivateTag(row, col + 1)),
}
}
fn chomp_identifier_chain<'a>(
arena: &'a Bump, arena: &'a Bump,
mut state: State<'a>, buffer: &'a [u8],
) -> ParseResult<'a, (Ident<'a>, Option<char>), BadIdent> { row: Row,
let mut part_buf = String::new_in(arena); // The current "part" (parts are dot-separated.) col: Col,
let mut capitalized_parts: Vec<&'a str> = Vec::new_in(arena); ) -> Result<(u16, Ident<'a>), (u16, BadIdent)> {
let mut noncapitalized_parts: Vec<&'a str> = Vec::new_in(arena); use encode_unicode::CharExt;
let mut is_capitalized;
let is_accessor_fn;
let mut is_private_tag = false;
// Identifiers and accessor functions must start with either a letter or a dot. let first_is_uppercase;
// If this starts with neither, it must be something else! let mut chomped = 0;
match peek_utf8_char(&state) {
Ok((first_ch, bytes_parsed)) => {
if first_ch.is_alphabetic() {
part_buf.push(first_ch);
is_capitalized = first_ch.is_uppercase(); match char::from_utf8_slice_start(&buffer[chomped..]) {
is_accessor_fn = false; Ok((ch, width)) => match ch {
'.' => match chomp_accessor(&buffer[1..], row, col) {
Ok(accessor) => {
let bytes_parsed = 1 + accessor.len();
state = advance_state!(state, bytes_parsed)?; return Ok((bytes_parsed as u16, Ident::AccessorFunction(accessor)));
} else if first_ch == '.' { }
is_capitalized = false; Err(fail) => return Err((1, fail)),
is_accessor_fn = true; },
'@' => match chomp_private_tag(buffer, row, col) {
Ok(tagname) => {
let bytes_parsed = tagname.len();
state = advance_state!(state, bytes_parsed)?; return Ok((bytes_parsed as u16, Ident::PrivateTag(tagname)));
} else if first_ch == '@' { }
state = advance_state!(state, bytes_parsed)?; Err(fail) => return Err((1, fail)),
},
c if c.is_alphabetic() => {
// fall through
chomped += width;
first_is_uppercase = c.is_uppercase();
}
_ => {
return Err((0, BadIdent::Start(row, col)));
}
},
Err(_) => return Err((0, BadIdent::Start(row, col))),
}
// '@' must always be followed by a capital letter! while let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
match peek_utf8_char(&state) { if ch.is_alphabetic() || ch.is_ascii_digit() {
Ok((next_ch, next_bytes_parsed)) => { chomped += width;
if next_ch.is_uppercase() {
state = advance_state!(state, next_bytes_parsed)?;
part_buf.push('@');
part_buf.push(next_ch);
is_private_tag = true;
is_capitalized = true;
is_accessor_fn = false;
} else { } else {
return Err(( // we're done
MadeProgress, break;
BadIdent::PrivateTagNotUppercase(state.line, state.column),
state,
));
} }
} }
Err(_reason) => {
return Err(( if let Ok(('.', _)) = char::from_utf8_slice_start(&buffer[chomped..]) {
MadeProgress, let module_name = if first_is_uppercase {
BadIdent::PrivateTagNotUppercase(state.line, state.column), match chomp_module_chain(&buffer[chomped..]) {
state, Ok(width) => {
)); chomped += width as usize;
unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) }
} }
Err(MadeProgress) => todo!(),
Err(NoProgress) => unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) },
} }
} else { } else {
return Err((NoProgress, BadIdent::Start(state.line, state.column), state)); ""
} };
}
Err(_reason) => { let mut parts = Vec::with_capacity_in(4, arena);
return Err((NoProgress, BadIdent::Start(state.line, state.column), state));
} if !first_is_uppercase {
let first_part = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
parts.push(first_part);
} }
while !state.bytes.is_empty() { match chomp_access_chain(&buffer[chomped..], &mut parts) {
match peek_utf8_char(&state) { Ok(width) => {
Ok((ch, bytes_parsed)) => { chomped += width as usize;
// After the first character, only these are allowed:
//
// * Unicode alphabetic chars - you might name a variable `鹏` if that's clear to your readers
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
// * A dot ('.')
if ch.is_alphabetic() {
if part_buf.is_empty() {
// Capitalization is determined by the first character in the part.
is_capitalized = ch.is_uppercase();
}
part_buf.push(ch); let ident = Ident::Access {
} else if ch.is_ascii_digit() { module_name,
// Parts may not start with numbers! parts: parts.into_bump_slice(),
if part_buf.is_empty() { };
return Err((
MadeProgress,
BadIdent::PartStartsWithNumber(state.line, state.column),
state,
));
}
part_buf.push(ch); Ok((chomped as u16, ident))
} else if ch == '.' {
// There are two posssible errors here:
//
// 1. Having two consecutive dots is an error.
// 2. Having capitalized parts after noncapitalized (e.g. `foo.Bar`) is an error.
if part_buf.is_empty() {
return Err((
MadeProgress,
BadIdent::DoubleDot(state.line, state.column),
state,
));
} }
Err(0) if !module_name.is_empty() => Err((
if is_capitalized && !noncapitalized_parts.is_empty() { chomped as u16,
return Err(( BadIdent::QualifiedTag(row, chomped as u16 + col),
MadeProgress, )),
BadIdent::WeirdDotQualified(state.line, state.column), Err(1) if parts.is_empty() => Err((
state, chomped as u16 + 1,
)); BadIdent::WeirdDotQualified(row, chomped as u16 + col + 1),
)),
Err(width) => Err((
chomped as u16 + width,
BadIdent::WeirdDotAccess(row, chomped as u16 + col + width),
)),
} }
} else if let Ok(('_', _)) = char::from_utf8_slice_start(&buffer[chomped..]) {
if is_capitalized {
capitalized_parts.push(part_buf.into_bump_str());
} else {
noncapitalized_parts.push(part_buf.into_bump_str());
}
// Now that we've recorded the contents of the current buffer, reset it.
part_buf = String::new_in(arena);
} else if ch == '_' {
// we don't allow underscores in the middle of an identifier // we don't allow underscores in the middle of an identifier
// but still parse them (and generate a malformed identifier) // but still parse them (and generate a malformed identifier)
// to give good error messages for this case // to give good error messages for this case
state = advance_state!(state, bytes_parsed)?; Err((
return Err(( chomped as u16 + 1,
MadeProgress, BadIdent::Underscore(row, col + chomped as u16 + 1),
BadIdent::Underscore(state.line, state.column), ))
state, } else if first_is_uppercase {
)); // just one segment, starting with an uppercase letter; that's a global tag
let value = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
Ok((chomped as u16, Ident::GlobalTag(value)))
} else { } else {
// This must be the end of the identifier. We're done! // just one segment, starting with a lowercase letter; that's a normal identifier
let value = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
break; let ident = Ident::Access {
} module_name: "",
parts: arena.alloc([value]),
state = advance_state!(state, bytes_parsed)?;
}
Err(_reason) => {
//
return Err((
MadeProgress,
BadIdent::Start(state.line, state.column),
state,
));
}
}
}
if part_buf.is_empty() {
// We probably had a trailing dot, e.g. `Foo.bar.` - this is malformed!
//
// This condition might also occur if we encounter a malformed accessor like `.|`
//
// If we made it this far and don't have a next_char, then necessarily
// we have consumed a '.' char previously.
let fail = if noncapitalized_parts.is_empty() {
if capitalized_parts.is_empty() {
BadIdent::StrayDot(state.line, state.column)
} else {
BadIdent::WeirdDotQualified(state.line, state.column)
}
} else {
BadIdent::WeirdDotAccess(state.line, state.column)
}; };
Ok((chomped as u16, ident))
return Err((MadeProgress, fail, state)); }
}
fn chomp_module_chain(buffer: &[u8]) -> Result<u16, Progress> {
let mut chomped = 0;
while let Some(b'.') = buffer.get(chomped) {
match &buffer.get(chomped + 1..) {
Some(slice) => match chomp_uppercase_part(slice) {
Ok(name) => {
chomped += name.len() + 1;
}
Err(MadeProgress) => return Err(MadeProgress),
Err(NoProgress) => break,
},
None => return Err(MadeProgress),
}
}
if chomped == 0 {
Err(NoProgress)
} else {
Ok(chomped as u16)
}
}
pub fn concrete_type<'a>() -> impl Parser<'a, (&'a str, &'a str), ()> {
move |_, state: State<'a>| match chomp_concrete_type(state.bytes) {
Err(progress) => Err((progress, (), state)),
Ok((module_name, type_name, width)) => {
match state.advance_without_indenting_ee(width, |_, _| ()) {
Ok(state) => Ok((MadeProgress, (module_name, type_name), state)),
Err(bad) => Err(bad),
}
}
}
}
// parse a type name like `Result` or `Result.Result`
fn chomp_concrete_type(buffer: &[u8]) -> Result<(&str, &str, usize), Progress> {
let first = crate::ident::chomp_uppercase_part(buffer)?;
if let Some(b'.') = buffer.get(first.len()) {
match crate::ident::chomp_module_chain(&buffer[first.len()..]) {
Err(_) => Err(MadeProgress),
Ok(rest) => {
let width = first.len() + rest as usize;
// we must explicitly check here for a trailing `.`
if let Some(b'.') = buffer.get(width) {
return Err(MadeProgress);
}
let slice = &buffer[..width];
match slice.iter().rev().position(|c| *c == b'.') {
None => Ok(("", first, first.len())),
Some(rev_index) => {
let index = slice.len() - rev_index;
let module_name =
unsafe { std::str::from_utf8_unchecked(&slice[..index - 1]) };
let type_name = unsafe { std::str::from_utf8_unchecked(&slice[index..]) };
Ok((module_name, type_name, width))
}
}
}
}
} else {
Ok(("", first, first.len()))
}
}
fn chomp_access_chain<'a>(buffer: &'a [u8], parts: &mut Vec<'a, &'a str>) -> Result<u16, u16> {
let mut chomped = 0;
while let Some(b'.') = buffer.get(chomped) {
match &buffer.get(chomped + 1..) {
Some(slice) => match chomp_lowercase_part(slice) {
Ok(name) => {
let value = unsafe {
std::str::from_utf8_unchecked(
&buffer[chomped + 1..chomped + 1 + name.len()],
)
};
parts.push(value);
chomped += name.len() + 1;
}
Err(_) => return Err(chomped as u16 + 1),
},
None => return Err(chomped as u16 + 1),
}
}
if chomped == 0 {
Err(0)
} else {
Ok(chomped as u16)
}
}
fn parse_ident_help_help<'a>(
arena: &'a Bump,
mut state: State<'a>,
) -> ParseResult<'a, Ident<'a>, BadIdent> {
match chomp_identifier_chain(arena, state.bytes, state.line, state.column) {
Ok((width, ident)) => {
state = advance_state!(state, width as usize)?;
Ok((MadeProgress, ident, state))
}
Err((0, fail)) => Err((NoProgress, fail, state)),
Err((width, fail)) => {
state = advance_state!(state, width as usize)?;
Err((MadeProgress, fail, state))
}
} }
// Record the final parts.
if is_capitalized {
capitalized_parts.push(part_buf.into_bump_str());
} else {
noncapitalized_parts.push(part_buf.into_bump_str());
}
let answer = if is_accessor_fn {
// Handle accessor functions first because they have the strictest requirements.
// Accessor functions may have exactly 1 noncapitalized part, and no capitalzed parts.
if capitalized_parts.is_empty() && noncapitalized_parts.len() == 1 && !is_private_tag {
let value = noncapitalized_parts.iter().next().unwrap();
Ident::AccessorFunction(value)
} else {
return Err((
MadeProgress,
BadIdent::WeirdAccessor(state.line, state.column),
state,
));
}
} else if noncapitalized_parts.is_empty() {
// We have capitalized parts only, so this must be a tag.
match capitalized_parts.first() {
Some(value) => {
if capitalized_parts.len() == 1 {
if is_private_tag {
Ident::PrivateTag(value)
} else {
Ident::GlobalTag(value)
}
} else {
// This is a qualified tag, which is not allowed!
return Err((
MadeProgress,
BadIdent::QualifiedTag(state.line, state.column),
state,
));
}
}
None => {
// We had neither capitalized nor noncapitalized parts,
// yet we made it this far. The only explanation is that this was
// a stray '.' drifting through the cosmos.
return Err((
MadeProgress,
BadIdent::StrayDot(state.line, state.column),
state,
));
}
}
} else if is_private_tag {
// This is qualified field access with an '@' in front, which does not make sense!
return Err((
MadeProgress,
BadIdent::PrivateTagFieldAccess(state.line, state.column),
state,
));
} else {
// We have multiple noncapitalized parts, so this must be field access.
Ident::Access {
module_name: join_module_parts(arena, capitalized_parts.into_bump_slice()),
parts: noncapitalized_parts.into_bump_slice(),
}
};
Ok((Progress::MadeProgress, (answer, None), state))
} }

View file

@ -1,62 +1,64 @@
use crate::ast::{Attempting, CommentOrNewline, Def, Module}; use crate::ast::{CommentOrNewline, Def, Module};
use crate::blankspace::{space0, space0_around, space0_before, space1}; use crate::blankspace::{space0_before_e, space0_e};
use crate::expr::def;
use crate::header::{ use crate::header::{
package_entry, package_or_path, AppHeader, Effects, ExposesEntry, ImportsEntry, package_entry, package_name, package_or_path, AppHeader, Effects, ExposesEntry, ImportsEntry,
InterfaceHeader, ModuleName, PackageEntry, PackageName, PackageOrPath, PlatformHeader, To, InterfaceHeader, ModuleName, PackageEntry, PlatformHeader, To, TypedIdent,
TypedIdent,
}; };
use crate::ident::{lowercase_ident, unqualified_ident, uppercase_ident}; use crate::ident::{lowercase_ident, unqualified_ident, uppercase_ident};
use crate::parser::Progress::{self, *}; use crate::parser::Progress::{self, *};
use crate::parser::{ use crate::parser::{
self, ascii_char, ascii_string, backtrackable, end_of_file, loc, optional, peek_utf8_char, backtrackable, specialize, word1, Col, EEffects, EExposes, EHeader, EImports, EPackages,
peek_utf8_char_at, unexpected, unexpected_eof, Either, ParseResult, Parser, State, SyntaxError, EProvides, ERequires, ETypedIdent, Parser, Row, State, SyntaxError,
}; };
use crate::string_literal; use crate::string_literal;
use crate::type_annotation; use crate::type_annotation;
use bumpalo::collections::{String, Vec}; use bumpalo::collections::Vec;
use bumpalo::Bump;
use roc_region::all::Located; use roc_region::all::Located;
pub fn header<'a>() -> impl Parser<'a, Module<'a>, SyntaxError<'a>> { pub fn parse_header<'a>(
one_of!(interface_module(), app_module(), platform_module()) arena: &'a bumpalo::Bump,
state: State<'a>,
) -> Result<(Module<'a>, State<'a>), EHeader<'a>> {
match header().parse(arena, state) {
Ok((_, module, state)) => Ok((module, state)),
Err((_, fail, _)) => Err(fail),
}
} }
#[inline(always)] fn header<'a>() -> impl Parser<'a, Module<'a>, EHeader<'a>> {
fn app_module<'a>() -> impl Parser<'a, Module<'a>, SyntaxError<'a>> { use crate::parser::keyword_e;
map!(app_header(), |header| { Module::App { header } })
}
#[inline(always)] one_of![
fn platform_module<'a>() -> impl Parser<'a, Module<'a>, SyntaxError<'a>> { map!(
map!(platform_header(), |header| { Module::Platform { header } }) skip_first!(keyword_e("app", EHeader::Start), app_header()),
} |header| { Module::App { header } }
#[inline(always)]
fn interface_module<'a>() -> impl Parser<'a, Module<'a>, SyntaxError<'a>> {
map!(interface_header(), |header| {
Module::Interface { header }
})
}
#[inline(always)]
pub fn interface_header<'a>() -> impl Parser<'a, InterfaceHeader<'a>, SyntaxError<'a>> {
parser::map(
and!(
skip_first!(
ascii_string("interface"),
and!(space1(1), loc!(module_name()))
), ),
and!(exposes_values(), imports()) map!(
skip_first!(keyword_e("platform", EHeader::Start), platform_header()),
|header| { Module::Platform { header } }
), ),
|( map!(
(after_interface_keyword, name), skip_first!(keyword_e("interface", EHeader::Start), interface_header()),
( |header| { Module::Interface { header } }
((before_exposes, after_exposes), exposes), )
((before_imports, after_imports), imports), ]
), }
)| {
InterfaceHeader { #[inline(always)]
fn interface_header<'a>() -> impl Parser<'a, InterfaceHeader<'a>, EHeader<'a>> {
|arena, state| {
let min_indent = 1;
let (_, after_interface_keyword, state) =
space0_e(min_indent, EHeader::Space, EHeader::IndentStart).parse(arena, state)?;
let (_, name, state) = loc!(module_name_help(EHeader::ModuleName)).parse(arena, state)?;
let (_, ((before_exposes, after_exposes), exposes), state) =
specialize(EHeader::Exposes, exposes_values()).parse(arena, state)?;
let (_, ((before_imports, after_imports), imports), state) =
specialize(EHeader::Imports, imports()).parse(arena, state)?;
let header = InterfaceHeader {
name, name,
exposes, exposes,
imports, imports,
@ -65,144 +67,92 @@ pub fn interface_header<'a>() -> impl Parser<'a, InterfaceHeader<'a>, SyntaxErro
after_exposes, after_exposes,
before_imports, before_imports,
after_imports, after_imports,
}
},
)
}
#[inline(always)]
pub fn package_name<'a>() -> impl Parser<'a, PackageName<'a>, SyntaxError<'a>> {
// e.g. rtfeldman/blah
//
// Package names and accounts can be capitalized and can contain dashes.
// They cannot contain underscores or other special characters.
// They must be ASCII.
map!(
and!(
parse_package_part,
skip_first!(ascii_char(b'/'), parse_package_part)
),
|(account, pkg)| { PackageName { account, pkg } }
)
}
pub fn parse_package_part<'a>(
arena: &'a Bump,
mut state: State<'a>,
) -> ParseResult<'a, &'a str, SyntaxError<'a>> {
let mut part_buf = String::new_in(arena); // The current "part" (parts are dot-separated.)
while !state.bytes.is_empty() {
match peek_utf8_char(&state) {
Ok((ch, bytes_parsed)) => {
if ch == '-' || ch.is_ascii_alphanumeric() {
part_buf.push(ch);
state = state.advance_without_indenting(bytes_parsed)?;
} else {
let progress = Progress::progress_when(!part_buf.is_empty());
return Ok((progress, part_buf.into_bump_str(), state));
}
}
Err(reason) => {
let progress = Progress::progress_when(!part_buf.is_empty());
return state.fail(arena, progress, reason);
}
}
}
Err(unexpected_eof(arena, state, 0))
}
#[inline(always)]
pub fn module_name<'a>() -> impl Parser<'a, ModuleName<'a>, SyntaxError<'a>> {
move |arena, mut state: State<'a>| {
match peek_utf8_char(&state) {
Ok((first_letter, bytes_parsed)) => {
if !first_letter.is_uppercase() {
return Err(unexpected(0, Attempting::Module, state));
}; };
let mut buf = String::with_capacity_in(4, arena); Ok((MadeProgress, header, state))
}
}
buf.push(first_letter); fn chomp_module_name(buffer: &[u8]) -> Result<&str, Progress> {
use encode_unicode::CharExt;
state = state.advance_without_indenting(bytes_parsed)?; let mut chomped = 0;
while !state.bytes.is_empty() { if let Ok((first_letter, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
match peek_utf8_char(&state) { if first_letter.is_uppercase() {
Ok((ch, bytes_parsed)) => { chomped += width;
} else {
return Err(Progress::NoProgress);
}
}
while let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
// After the first character, only these are allowed: // After the first character, only these are allowed:
// //
// * Unicode alphabetic chars - you might include `鹏` if that's clear to your readers // * Unicode alphabetic chars - you might include `鹏` if that's clear to your readers
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric() // * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
// * A '.' separating module parts // * A '.' separating module parts
if ch.is_alphabetic() || ch.is_ascii_digit() { if ch.is_alphabetic() || ch.is_ascii_digit() {
state = state.advance_without_indenting(bytes_parsed)?; chomped += width;
buf.push(ch);
} else if ch == '.' { } else if ch == '.' {
match peek_utf8_char_at(&state, 1) { chomped += width;
Ok((next, next_bytes_parsed)) => {
if next.is_uppercase() {
// If we hit another uppercase letter, keep going!
buf.push('.');
buf.push(next);
state = state.advance_without_indenting( if let Ok((first_letter, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
bytes_parsed + next_bytes_parsed, if first_letter.is_uppercase() {
)?; chomped += width;
} else if first_letter == '{' {
// the .{ starting a `Foo.{ bar, baz }` importing clauses
chomped -= width;
break;
} else { } else {
// We have finished parsing the module name. return Err(Progress::MadeProgress);
//
// There may be an identifier after this '.',
// e.g. "baz" in `Foo.Bar.baz`
return Ok((
MadeProgress,
ModuleName::new(buf.into_bump_str()),
state,
));
} }
} }
Err(reason) => return state.fail(arena, MadeProgress, reason),
}
} else { } else {
// This is the end of the module name. We're done! // we're done
break; break;
} }
} }
Err(reason) => return state.fail(arena, MadeProgress, reason),
}
}
Ok((MadeProgress, ModuleName::new(buf.into_bump_str()), state)) let name = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
}
Err(reason) => state.fail(arena, MadeProgress, reason), Ok(name)
}
#[inline(always)]
fn module_name<'a>() -> impl Parser<'a, ModuleName<'a>, ()> {
|_, mut state: State<'a>| match chomp_module_name(state.bytes) {
Ok(name) => {
let width = name.len();
state.column += width as u16;
state.bytes = &state.bytes[width..];
Ok((MadeProgress, ModuleName::new(name), state))
} }
Err(progress) => Err((progress, (), state)),
} }
} }
#[inline(always)] #[inline(always)]
pub fn app_header<'a>() -> impl Parser<'a, AppHeader<'a>, SyntaxError<'a>> { fn app_header<'a>() -> impl Parser<'a, AppHeader<'a>, EHeader<'a>> {
map_with_arena!( |arena, state| {
and!( let min_indent = 1;
skip_first!(
ascii_string("app"), let (_, after_app_keyword, state) =
and!( space0_e(min_indent, EHeader::Space, EHeader::IndentStart).parse(arena, state)?;
space1(1), let (_, name, state) = loc!(crate::parser::specialize(
loc!(crate::parser::specialize( EHeader::AppName,
|e, r, c| SyntaxError::Expr(crate::parser::EExpr::Str(e, r, c)),
string_literal::parse() string_literal::parse()
)) ))
) .parse(arena, state)?;
),
and!( let (_, opt_pkgs, state) =
optional(packages()), maybe!(specialize(EHeader::Packages, packages())).parse(arena, state)?;
and!(optional(imports()), provides_to()) let (_, opt_imports, state) =
) maybe!(specialize(EHeader::Imports, imports())).parse(arena, state)?;
), let (_, provides, state) =
|arena, ((after_app_keyword, name), (opt_pkgs, (opt_imports, provides)))| { specialize(EHeader::Provides, provides_to()).parse(arena, state)?;
let (before_packages, after_packages, package_entries) = match opt_pkgs { let (before_packages, after_packages, package_entries) = match opt_pkgs {
Some(pkgs) => { Some(pkgs) => {
let pkgs: Packages<'a> = pkgs; // rustc must be told the type here let pkgs: Packages<'a> = pkgs; // rustc must be told the type here
@ -227,7 +177,7 @@ pub fn app_header<'a>() -> impl Parser<'a, AppHeader<'a>, SyntaxError<'a>> {
opt_imports.unwrap_or_else(|| ((&[] as _, &[] as _), Vec::new_in(arena))); opt_imports.unwrap_or_else(|| ((&[] as _, &[] as _), Vec::new_in(arena)));
let provides: ProvidesTo<'a> = provides; // rustc must be told the type here let provides: ProvidesTo<'a> = provides; // rustc must be told the type here
AppHeader { let header = AppHeader {
name, name,
packages: package_entries, packages: package_entries,
imports, imports,
@ -242,44 +192,39 @@ pub fn app_header<'a>() -> impl Parser<'a, AppHeader<'a>, SyntaxError<'a>> {
after_provides: provides.after_provides_keyword, after_provides: provides.after_provides_keyword,
before_to: provides.before_to_keyword, before_to: provides.before_to_keyword,
after_to: provides.after_to_keyword, after_to: provides.after_to_keyword,
};
Ok((MadeProgress, header, state))
} }
}
)
} }
#[inline(always)] #[inline(always)]
pub fn platform_header<'a>() -> impl Parser<'a, PlatformHeader<'a>, SyntaxError<'a>> { fn platform_header<'a>() -> impl Parser<'a, PlatformHeader<'a>, EHeader<'a>> {
parser::map( |arena, state| {
and!( let min_indent = 1;
skip_first!(
ascii_string("platform"), let (_, after_platform_keyword, state) =
and!(space1(1), loc!(package_name())) space0_e(min_indent, EHeader::Space, EHeader::IndentStart).parse(arena, state)?;
), let (_, name, state) =
and!( loc!(specialize(EHeader::PlatformName, package_name())).parse(arena, state)?;
and!(
and!(requires(), and!(exposes_modules(), packages())), let (_, ((before_requires, after_requires), requires), state) =
and!(imports(), provides_without_to()) specialize(EHeader::Requires, requires()).parse(arena, state)?;
),
effects() let (_, ((before_exposes, after_exposes), exposes), state) =
) specialize(EHeader::Exposes, exposes_modules()).parse(arena, state)?;
),
|( let (_, packages, state) = specialize(EHeader::Packages, packages()).parse(arena, state)?;
(after_platform_keyword, name),
( let (_, ((before_imports, after_imports), imports), state) =
( specialize(EHeader::Imports, imports()).parse(arena, state)?;
(
((before_requires, after_requires), requires), let (_, ((before_provides, after_provides), provides), state) =
(((before_exposes, after_exposes), exposes), packages), specialize(EHeader::Provides, provides_without_to()).parse(arena, state)?;
),
( let (_, effects, state) = specialize(EHeader::Effects, effects()).parse(arena, state)?;
((before_imports, after_imports), imports),
((before_provides, after_provides), provides), let header = PlatformHeader {
),
),
effects,
),
)| {
PlatformHeader {
name, name,
requires, requires,
exposes, exposes,
@ -298,17 +243,42 @@ pub fn platform_header<'a>() -> impl Parser<'a, PlatformHeader<'a>, SyntaxError<
after_imports, after_imports,
before_provides, before_provides,
after_provides, after_provides,
};
Ok((MadeProgress, header, state))
}
}
fn end_of_file<'a>() -> impl Parser<'a, (), SyntaxError<'a>> {
|_arena, state: State<'a>| {
if state.has_reached_end() {
Ok((NoProgress, (), state))
} else {
Err((NoProgress, SyntaxError::ConditionFailed, state))
}
} }
},
)
} }
#[inline(always)] #[inline(always)]
pub fn module_defs<'a>() -> impl Parser<'a, Vec<'a, Located<Def<'a>>>, SyntaxError<'a>> { pub fn module_defs<'a>() -> impl Parser<'a, Vec<'a, Located<Def<'a>>>, SyntaxError<'a>> {
use crate::parser::EExpr;
// force that we pare until the end of the input // force that we pare until the end of the input
skip_second!(zero_or_more!(space0_around(loc(def(0)), 0)), end_of_file()) let min_indent = 0;
skip_second!(
specialize(
|e, _, _| SyntaxError::Expr(e),
zero_or_more!(crate::blankspace::space0_around_ee(
loc!(crate::expr::def_help(min_indent)),
min_indent,
EExpr::Space,
EExpr::IndentStart,
EExpr::IndentEnd,
))
),
end_of_file()
)
} }
#[derive(Debug)]
struct ProvidesTo<'a> { struct ProvidesTo<'a> {
entries: Vec<'a, Located<ExposesEntry<'a, &'a str>>>, entries: Vec<'a, Located<ExposesEntry<'a, &'a str>>>,
to: Located<To<'a>>, to: Located<To<'a>>,
@ -319,48 +289,39 @@ struct ProvidesTo<'a> {
after_to_keyword: &'a [CommentOrNewline<'a>], after_to_keyword: &'a [CommentOrNewline<'a>],
} }
fn provides_to_package<'a>() -> impl Parser<'a, To<'a>, EProvides<'a>> {
one_of![
specialize(
|_, r, c| EProvides::Identifier(r, c),
map!(lowercase_ident(), To::ExistingPackage)
),
specialize(EProvides::Package, map!(package_or_path(), To::NewPackage))
]
}
#[inline(always)] #[inline(always)]
fn provides_to<'a>() -> impl Parser<'a, ProvidesTo<'a>, SyntaxError<'a>> { fn provides_to<'a>() -> impl Parser<'a, ProvidesTo<'a>, EProvides<'a>> {
let min_indent = 1;
map!( map!(
and!( and!(
provides_without_to(),
and!( and!(
skip_second!(backtrackable(space1(1)), ascii_string("provides")), spaces_around_keyword(
space1(1) min_indent,
"to",
EProvides::To,
EProvides::Space,
EProvides::IndentTo,
EProvides::IndentListStart
), ),
and!( loc!(provides_to_package())
collection!(
ascii_char(b'['),
loc!(map!(unqualified_ident(), ExposesEntry::Exposed)),
ascii_char(b','),
ascii_char(b']'),
1
),
and!(
space1(1),
skip_first!(
ascii_string("to"),
and!(
space1(1),
loc!(either!(lowercase_ident(), package_or_path()))
)
)
)
) )
), ),
|( |(
(before_provides_keyword, after_provides_keyword), ((before_provides_keyword, after_provides_keyword), entries),
(entries, (before_to_keyword, (after_to_keyword, loc_to))), ((before_to_keyword, after_to_keyword), to),
)| { )| {
let loc_to: Located<Either<&'a str, PackageOrPath<'a>>> = loc_to;
let to_val = match loc_to.value {
Either::First(pkg) => To::ExistingPackage(pkg),
Either::Second(pkg) => To::NewPackage(pkg),
};
let to = Located {
value: to_val,
region: loc_to.region,
};
ProvidesTo { ProvidesTo {
entries, entries,
to, to,
@ -380,20 +341,44 @@ fn provides_without_to<'a>() -> impl Parser<
(&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]), (&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]),
Vec<'a, Located<ExposesEntry<'a, &'a str>>>, Vec<'a, Located<ExposesEntry<'a, &'a str>>>,
), ),
SyntaxError<'a>, EProvides<'a>,
> { > {
let min_indent = 1;
and!( and!(
and!(skip_second!(space1(1), ascii_string("provides")), space1(1)), spaces_around_keyword(
collection!( min_indent,
ascii_char(b'['), "provides",
loc!(map!(unqualified_ident(), ExposesEntry::Exposed)), EProvides::Provides,
ascii_char(b','), EProvides::Space,
ascii_char(b']'), EProvides::IndentProvides,
1 EProvides::IndentListStart
),
collection_e!(
word1(b'[', EProvides::ListStart),
exposes_entry(EProvides::Identifier),
word1(b',', EProvides::ListEnd),
word1(b']', EProvides::ListEnd),
min_indent,
EProvides::Space,
EProvides::IndentListEnd
) )
) )
} }
fn exposes_entry<'a, F, E>(
to_expectation: F,
) -> impl Parser<'a, Located<ExposesEntry<'a, &'a str>>, E>
where
F: Fn(crate::parser::Row, crate::parser::Col) -> E,
F: Copy,
E: 'a,
{
loc!(map!(
specialize(|_, r, c| to_expectation(r, c), unqualified_ident()),
ExposesEntry::Exposed
))
}
#[inline(always)] #[inline(always)]
fn requires<'a>() -> impl Parser< fn requires<'a>() -> impl Parser<
'a, 'a,
@ -401,16 +386,26 @@ fn requires<'a>() -> impl Parser<
(&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]), (&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]),
Vec<'a, Located<TypedIdent<'a>>>, Vec<'a, Located<TypedIdent<'a>>>,
), ),
SyntaxError<'a>, ERequires<'a>,
> { > {
let min_indent = 1;
and!( and!(
and!(skip_second!(space1(1), ascii_string("requires")), space1(1)), spaces_around_keyword(
collection!( min_indent,
ascii_char(b'{'), "requires",
loc!(typed_ident()), ERequires::Requires,
ascii_char(b','), ERequires::Space,
ascii_char(b'}'), ERequires::IndentRequires,
1 ERequires::IndentListStart
),
collection_e!(
word1(b'{', ERequires::ListStart),
specialize(ERequires::TypedIdent, loc!(typed_ident())),
word1(b',', ERequires::ListEnd),
word1(b'}', ERequires::ListEnd),
min_indent,
ERequires::Space,
ERequires::IndentListEnd
) )
) )
} }
@ -422,20 +417,51 @@ fn exposes_values<'a>() -> impl Parser<
(&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]), (&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]),
Vec<'a, Located<ExposesEntry<'a, &'a str>>>, Vec<'a, Located<ExposesEntry<'a, &'a str>>>,
), ),
SyntaxError<'a>, EExposes,
> { > {
let min_indent = 1;
and!( and!(
and!(skip_second!(space1(1), ascii_string("exposes")), space1(1)), spaces_around_keyword(
collection!( min_indent,
ascii_char(b'['), "exposes",
loc!(map!(unqualified_ident(), ExposesEntry::Exposed)), EExposes::Exposes,
ascii_char(b','), EExposes::Space,
ascii_char(b']'), EExposes::IndentExposes,
1 EExposes::IndentListStart
),
collection_e!(
word1(b'[', EExposes::ListStart),
exposes_entry(EExposes::Identifier),
word1(b',', EExposes::ListEnd),
word1(b']', EExposes::ListEnd),
min_indent,
EExposes::Space,
EExposes::IndentListEnd
) )
) )
} }
fn spaces_around_keyword<'a, E>(
min_indent: u16,
keyword: &'static str,
expectation: fn(Row, Col) -> E,
space_problem: fn(crate::parser::BadInputError, Row, Col) -> E,
indent_problem1: fn(Row, Col) -> E,
indent_problem2: fn(Row, Col) -> E,
) -> impl Parser<'a, (&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]), E>
where
E: 'a,
{
and!(
skip_second!(
backtrackable(space0_e(min_indent, space_problem, indent_problem1)),
crate::parser::keyword_e(keyword, expectation)
),
space0_e(min_indent, space_problem, indent_problem2)
)
}
#[inline(always)] #[inline(always)]
fn exposes_modules<'a>() -> impl Parser< fn exposes_modules<'a>() -> impl Parser<
'a, 'a,
@ -443,20 +469,45 @@ fn exposes_modules<'a>() -> impl Parser<
(&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]), (&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]),
Vec<'a, Located<ExposesEntry<'a, ModuleName<'a>>>>, Vec<'a, Located<ExposesEntry<'a, ModuleName<'a>>>>,
), ),
SyntaxError<'a>, EExposes,
> { > {
let min_indent = 1;
and!( and!(
and!(skip_second!(space1(1), ascii_string("exposes")), space1(1)), spaces_around_keyword(
collection!( min_indent,
ascii_char(b'['), "exposes",
loc!(map!(module_name(), ExposesEntry::Exposed)), EExposes::Exposes,
ascii_char(b','), EExposes::Space,
ascii_char(b']'), EExposes::IndentExposes,
1 EExposes::IndentListStart
),
collection_e!(
word1(b'[', EExposes::ListStart),
exposes_module(EExposes::Identifier),
word1(b',', EExposes::ListEnd),
word1(b']', EExposes::ListEnd),
min_indent,
EExposes::Space,
EExposes::IndentListEnd
) )
) )
} }
fn exposes_module<'a, F, E>(
to_expectation: F,
) -> impl Parser<'a, Located<ExposesEntry<'a, ModuleName<'a>>>, E>
where
F: Fn(crate::parser::Row, crate::parser::Col) -> E,
F: Copy,
E: 'a,
{
loc!(map!(
specialize(|_, r, c| to_expectation(r, c), module_name()),
ExposesEntry::Exposed
))
}
#[derive(Debug)] #[derive(Debug)]
struct Packages<'a> { struct Packages<'a> {
entries: Vec<'a, Located<PackageEntry<'a>>>, entries: Vec<'a, Located<PackageEntry<'a>>>,
@ -466,19 +517,27 @@ struct Packages<'a> {
} }
#[inline(always)] #[inline(always)]
fn packages<'a>() -> impl Parser<'a, Packages<'a>, SyntaxError<'a>> { fn packages<'a>() -> impl Parser<'a, Packages<'a>, EPackages<'a>> {
let min_indent = 1;
map!( map!(
and!( and!(
and!( spaces_around_keyword(
skip_second!(backtrackable(space1(1)), ascii_string("packages")), min_indent,
space1(1) "packages",
EPackages::Packages,
EPackages::Space,
EPackages::IndentPackages,
EPackages::IndentListStart
), ),
collection!( collection_e!(
ascii_char(b'{'), word1(b'{', EPackages::ListStart),
loc!(package_entry()), specialize(EPackages::PackageEntry, loc!(package_entry())),
ascii_char(b','), word1(b',', EPackages::ListEnd),
ascii_char(b'}'), word1(b'}', EPackages::ListEnd),
1 min_indent,
EPackages::Space,
EPackages::IndentListEnd
) )
), ),
|((before_packages_keyword, after_packages_keyword), entries)| { |((before_packages_keyword, after_packages_keyword), entries)| {
@ -498,42 +557,68 @@ fn imports<'a>() -> impl Parser<
(&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]), (&'a [CommentOrNewline<'a>], &'a [CommentOrNewline<'a>]),
Vec<'a, Located<ImportsEntry<'a>>>, Vec<'a, Located<ImportsEntry<'a>>>,
), ),
SyntaxError<'a>, EImports,
> { > {
let min_indent = 1;
and!( and!(
and!( spaces_around_keyword(
skip_second!(backtrackable(space1(1)), ascii_string("imports")), min_indent,
space1(1) "imports",
EImports::Imports,
EImports::Space,
EImports::IndentImports,
EImports::IndentListStart
), ),
collection!( collection_e!(
ascii_char(b'['), word1(b'[', EImports::ListStart),
loc!(imports_entry()), loc!(imports_entry()),
ascii_char(b','), word1(b',', EImports::ListEnd),
ascii_char(b']'), word1(b']', EImports::ListEnd),
1 min_indent,
EImports::Space,
EImports::IndentListEnd
) )
) )
} }
#[inline(always)] #[inline(always)]
fn effects<'a>() -> impl Parser<'a, Effects<'a>, SyntaxError<'a>> { fn effects<'a>() -> impl Parser<'a, Effects<'a>, EEffects<'a>> {
move |arena, state| { move |arena, state| {
let (_, spaces_before_effects_keyword, state) = let min_indent = 1;
skip_second!(space1(0), ascii_string("effects")).parse(arena, state)?;
let (_, spaces_after_effects_keyword, state) = space1(0).parse(arena, state)?; let (_, (spaces_before_effects_keyword, spaces_after_effects_keyword), state) =
spaces_around_keyword(
min_indent,
"effects",
EEffects::Effects,
EEffects::Space,
EEffects::IndentEffects,
EEffects::IndentListStart,
)
.parse(arena, state)?;
// e.g. `fx.` // e.g. `fx.`
let (_, type_shortname, state) = let (_, type_shortname, state) = skip_second!(
skip_second!(lowercase_ident(), ascii_char(b'.')).parse(arena, state)?; specialize(|_, r, c| EEffects::Shorthand(r, c), lowercase_ident()),
word1(b'.', EEffects::ShorthandDot)
)
.parse(arena, state)?;
let (_, (type_name, spaces_after_type_name), state) = // the type name, e.g. Effects
and!(uppercase_ident(), space1(0)).parse(arena, state)?; let (_, (type_name, spaces_after_type_name), state) = and!(
let (_, entries, state) = collection!( specialize(|_, r, c| EEffects::TypeName(r, c), uppercase_ident()),
ascii_char(b'{'), space0_e(min_indent, EEffects::Space, EEffects::IndentListStart)
loc!(typed_ident()), )
ascii_char(b','), .parse(arena, state)?;
ascii_char(b'}'), let (_, entries, state) = collection_e!(
1 word1(b'{', EEffects::ListStart),
specialize(EEffects::TypedIdent, loc!(typed_ident())),
word1(b',', EEffects::ListEnd),
word1(b'}', EEffects::ListEnd),
min_indent,
EEffects::Space,
EEffects::IndentListEnd
) )
.parse(arena, state)?; .parse(arena, state)?;
@ -553,63 +638,89 @@ fn effects<'a>() -> impl Parser<'a, Effects<'a>, SyntaxError<'a>> {
} }
#[inline(always)] #[inline(always)]
fn typed_ident<'a>() -> impl Parser<'a, TypedIdent<'a>, SyntaxError<'a>> { fn typed_ident<'a>() -> impl Parser<'a, TypedIdent<'a>, ETypedIdent<'a>> {
move |arena, state| {
// You must have a field name, e.g. "email"
let (_, ident, state) = loc!(lowercase_ident()).parse(arena, state)?;
let (_, spaces_before_colon, state) = space0(0).parse(arena, state)?;
let (_, ann, state) = skip_first!(
ascii_char(b':'),
space0_before(type_annotation::located(0), 0)
)
.parse(arena, state)?;
// e.g. // e.g.
// //
// printLine : Str -> Effect {} // printLine : Str -> Effect {}
let min_indent = 0;
Ok(( map!(
MadeProgress, and!(
and!(
loc!(specialize(
|_, r, c| ETypedIdent::Identifier(r, c),
lowercase_ident()
)),
space0_e(min_indent, ETypedIdent::Space, ETypedIdent::IndentHasType)
),
skip_first!(
word1(b':', ETypedIdent::HasType),
space0_before_e(
specialize(ETypedIdent::Type, type_annotation::located_help(min_indent)),
min_indent,
ETypedIdent::Space,
ETypedIdent::IndentType,
)
)
),
|((ident, spaces_before_colon), ann)| {
TypedIdent::Entry { TypedIdent::Entry {
ident, ident,
spaces_before_colon, spaces_before_colon,
ann, ann,
},
state,
))
} }
}
)
}
fn shortname<'a>() -> impl Parser<'a, &'a str, EImports> {
specialize(|_, r, c| EImports::Shorthand(r, c), lowercase_ident())
}
fn module_name_help<'a, F, E>(to_expectation: F) -> impl Parser<'a, ModuleName<'a>, E>
where
F: Fn(crate::parser::Row, crate::parser::Col) -> E,
E: 'a,
F: 'a,
{
specialize(move |_, r, c| to_expectation(r, c), module_name())
} }
#[inline(always)] #[inline(always)]
#[allow(clippy::type_complexity)] fn imports_entry<'a>() -> impl Parser<'a, ImportsEntry<'a>, EImports> {
fn imports_entry<'a>() -> impl Parser<'a, ImportsEntry<'a>, SyntaxError<'a>> { let min_indent = 1;
type Temp<'a> = (
(Option<&'a str>, ModuleName<'a>),
Option<Vec<'a, Located<ExposesEntry<'a, &'a str>>>>,
);
map_with_arena!( map_with_arena!(
and!( and!(
and!( and!(
// e.g. `base.` // e.g. `base.`
optional(skip_second!(lowercase_ident(), ascii_char(b'.'))), maybe!(skip_second!(
shortname(),
word1(b'.', EImports::ShorthandDot)
)),
// e.g. `Task` // e.g. `Task`
module_name() module_name_help(EImports::ModuleName)
), ),
// e.g. `.{ Task, after}` // e.g. `.{ Task, after}`
optional(skip_first!( maybe!(skip_first!(
ascii_char(b'.'), word1(b'.', EImports::ExposingDot),
collection!( collection_e!(
ascii_char(b'{'), word1(b'{', EImports::SetStart),
loc!(map!(unqualified_ident(), ExposesEntry::Exposed)), exposes_entry(EImports::Identifier),
ascii_char(b','), word1(b',', EImports::SetEnd),
ascii_char(b'}'), word1(b'}', EImports::SetEnd),
1 min_indent,
EImports::Space,
EImports::IndentSetEnd
) )
)) ))
), ),
|arena, |arena, ((opt_shortname, module_name), opt_values): Temp<'a>| {
((opt_shortname, module_name), opt_values): (
(Option<&'a str>, ModuleName<'a>),
Option<Vec<'a, Located<ExposesEntry<'a, &'a str>>>>
)| {
let exposed_values = opt_values.unwrap_or_else(|| Vec::new_in(arena)); let exposed_values = opt_values.unwrap_or_else(|| Vec::new_in(arena));
match opt_shortname { match opt_shortname {

View file

@ -1,7 +1,5 @@
use crate::ast::Base; use crate::ast::Base;
use crate::parser::{parse_utf8, Number, ParseResult, Parser, Progress, State, SyntaxError}; use crate::parser::{Number, ParseResult, Parser, Progress, State};
use std::char;
use std::str::from_utf8_unchecked;
pub enum NumLiteral<'a> { pub enum NumLiteral<'a> {
Float(&'a str), Float(&'a str),
@ -52,10 +50,12 @@ fn chomp_number_base<'a>(
) -> ParseResult<'a, NumLiteral<'a>, Number> { ) -> ParseResult<'a, NumLiteral<'a>, Number> {
let (_is_float, chomped) = chomp_number(bytes); let (_is_float, chomped) = chomp_number(bytes);
match parse_utf8(&bytes[0..chomped]) { let string = unsafe { std::str::from_utf8_unchecked(&bytes[..chomped]) };
Ok(string) => match state.advance_without_indenting(chomped + 2 + is_negative as usize) {
Ok(new) => { let new = state.advance_without_indenting_ee(chomped + 2 + is_negative as usize, |_, _| {
// all is well Number::LineTooLong
})?;
Ok(( Ok((
Progress::MadeProgress, Progress::MadeProgress,
NumLiteral::NonBase10Int { NumLiteral::NonBase10Int {
@ -65,16 +65,6 @@ fn chomp_number_base<'a>(
}, },
new, new,
)) ))
}
Err((_, SyntaxError::LineTooLong(_), new)) => {
// the only error we care about in this context
Err((Progress::MadeProgress, Number::LineTooLong, new))
}
Err(_) => unreachable!("we know advancing will succeed if there is space on the line"),
},
Err(_) => unreachable!("no invalid utf8 could have been chomped"),
}
} }
fn chomp_number_dec<'a>( fn chomp_number_dec<'a>(
@ -94,11 +84,12 @@ fn chomp_number_dec<'a>(
return Err((Progress::NoProgress, Number::End, state)); return Err((Progress::NoProgress, Number::End, state));
} }
let string = unsafe { from_utf8_unchecked(&state.bytes[0..chomped + is_negative as usize]) }; let string =
unsafe { std::str::from_utf8_unchecked(&state.bytes[0..chomped + is_negative as usize]) };
let new = state
.advance_without_indenting_ee(chomped + is_negative as usize, |_, _| Number::LineTooLong)?;
match state.advance_without_indenting(chomped + is_negative as usize) {
Ok(new) => {
// all is well
Ok(( Ok((
Progress::MadeProgress, Progress::MadeProgress,
if is_float { if is_float {
@ -108,13 +99,6 @@ fn chomp_number_dec<'a>(
}, },
new, new,
)) ))
}
Err((_, SyntaxError::LineTooLong(_), new)) => {
// the only error we care about in this context
Err((Progress::MadeProgress, Number::LineTooLong, new))
}
Err(_) => unreachable!("we know advancing will succeed if there is space on the line"),
}
} }
fn chomp_number(mut bytes: &[u8]) -> (bool, usize) { fn chomp_number(mut bytes: &[u8]) -> (bool, usize) {

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,10 @@
use crate::ast::Pattern; use crate::ast::Pattern;
use crate::blankspace::{space0_around_ee, space0_before_e, space0_e}; use crate::blankspace::{space0_around_ee, space0_before_e, space0_e};
use crate::ident::{ident, lowercase_ident, Ident}; use crate::ident::{lowercase_ident, parse_ident_help, Ident};
use crate::parser::Progress::{self, *}; use crate::parser::Progress::{self, *};
use crate::parser::{ use crate::parser::{
backtrackable, optional, specialize, specialize_ref, word1, EPattern, PInParens, PRecord, backtrackable, optional, specialize, specialize_ref, word1, EPattern, PInParens, PRecord,
ParseResult, Parser, State, SyntaxError, ParseResult, Parser, State,
}; };
use bumpalo::collections::string::String; use bumpalo::collections::string::String;
use bumpalo::collections::Vec; use bumpalo::collections::Vec;
@ -51,13 +51,6 @@ fn parse_closure_param<'a>(
.parse(arena, state) .parse(arena, state)
} }
pub fn loc_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>, SyntaxError<'a>> {
specialize(
|e, _, _| SyntaxError::Pattern(e),
loc_pattern_help(min_indent),
)
}
pub fn loc_pattern_help<'a>( pub fn loc_pattern_help<'a>(
min_indent: u16, min_indent: u16,
) -> impl Parser<'a, Located<Pattern<'a>>, EPattern<'a>> { ) -> impl Parser<'a, Located<Pattern<'a>>, EPattern<'a>> {
@ -130,7 +123,7 @@ fn loc_pattern_in_parens_help<'a>(
between!( between!(
word1(b'(', PInParens::Open), word1(b'(', PInParens::Open),
space0_around_ee( space0_around_ee(
move |arena, state| specialize_ref(PInParens::Syntax, loc_pattern(min_indent)) move |arena, state| specialize_ref(PInParens::Pattern, loc_pattern_help(min_indent))
.parse(arena, state), .parse(arena, state),
min_indent, min_indent,
PInParens::Space, PInParens::Space,
@ -176,10 +169,11 @@ fn loc_ident_pattern_help<'a>(
can_have_arguments: bool, can_have_arguments: bool,
) -> impl Parser<'a, Located<Pattern<'a>>, EPattern<'a>> { ) -> impl Parser<'a, Located<Pattern<'a>>, EPattern<'a>> {
move |arena: &'a Bump, state: State<'a>| { move |arena: &'a Bump, state: State<'a>| {
let original_state = state.clone(); let original_state = state;
let (_, loc_ident, state) = let (_, loc_ident, state) =
specialize(|_, r, c| EPattern::Start(r, c), loc!(ident())).parse(arena, state)?; specialize(|_, r, c| EPattern::Start(r, c), loc!(parse_ident_help))
.parse(arena, state)?;
match loc_ident.value { match loc_ident.value {
Ident::GlobalTag(tag) => { Ident::GlobalTag(tag) => {
@ -296,10 +290,6 @@ fn loc_ident_pattern_help<'a>(
} }
} }
pub fn underscore_pattern<'a>() -> impl Parser<'a, Pattern<'a>, SyntaxError<'a>> {
specialize(|e, _, _| SyntaxError::Pattern(e), underscore_pattern_help())
}
fn underscore_pattern_help<'a>() -> impl Parser<'a, Pattern<'a>, EPattern<'a>> { fn underscore_pattern_help<'a>() -> impl Parser<'a, Pattern<'a>, EPattern<'a>> {
move |arena: &'a Bump, state: State<'a>| { move |arena: &'a Bump, state: State<'a>| {
let (_, _, next_state) = word1(b'_', EPattern::Underscore).parse(arena, state)?; let (_, _, next_state) = word1(b'_', EPattern::Underscore).parse(arena, state)?;
@ -324,13 +314,6 @@ fn lowercase_ident_pattern<'a>(
specialize(move |_, _, _| EPattern::End(row, col), lowercase_ident()).parse(arena, state) specialize(move |_, _, _| EPattern::End(row, col), lowercase_ident()).parse(arena, state)
} }
pub fn record_pattern<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>, SyntaxError<'a>> {
specialize(
|e, r, c| SyntaxError::Pattern(EPattern::Record(e, r, c)),
record_pattern_help(min_indent),
)
}
#[inline(always)] #[inline(always)]
fn record_pattern_help<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>, PRecord<'a>> { fn record_pattern_help<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>, PRecord<'a>> {
move |arena, state| { move |arena, state| {
@ -385,7 +368,7 @@ fn record_pattern_field<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<
match opt_loc_val { match opt_loc_val {
Some(First(_)) => { Some(First(_)) => {
let val_parser = specialize_ref(PRecord::Syntax, loc_pattern(min_indent)); let val_parser = specialize_ref(PRecord::Pattern, loc_pattern_help(min_indent));
let (_, loc_val, state) = let (_, loc_val, state) =
space0_before_e(val_parser, min_indent, PRecord::Space, PRecord::IndentColon) space0_before_e(val_parser, min_indent, PRecord::Space, PRecord::IndentColon)
.parse(arena, state)?; .parse(arena, state)?;
@ -413,7 +396,7 @@ fn record_pattern_field<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<
} }
Some(Second(_)) => { Some(Second(_)) => {
let val_parser = let val_parser =
specialize_ref(PRecord::Syntax, loc!(crate::expr::expr(min_indent))); specialize_ref(PRecord::Expr, loc!(crate::expr::expr_help(min_indent)));
let (_, loc_val, state) = let (_, loc_val, state) =
space0_before_e(val_parser, min_indent, PRecord::Space, PRecord::IndentColon) space0_before_e(val_parser, min_indent, PRecord::Space, PRecord::IndentColon)

View file

@ -1,10 +1,7 @@
use crate::ast::{EscapedChar, StrLiteral, StrSegment}; use crate::ast::{EscapedChar, StrLiteral, StrSegment};
use crate::expr; use crate::expr;
use crate::parser::Progress::*; use crate::parser::Progress::*;
use crate::parser::{ use crate::parser::{allocated, loc, specialize_ref, word1, BadInputError, EString, Parser, State};
allocated, ascii_char, loc, parse_utf8, specialize_ref, word1, BadInputError, EString, Parser,
State,
};
use bumpalo::collections::vec::Vec; use bumpalo::collections::vec::Vec;
use bumpalo::Bump; use bumpalo::Bump;
@ -102,7 +99,7 @@ pub fn parse<'a>() -> impl Parser<'a, StrLiteral<'a>, EString<'a>> {
// to exclude that char we just parsed. // to exclude that char we just parsed.
let string_bytes = &state.bytes[0..(segment_parsed_bytes - 1)]; let string_bytes = &state.bytes[0..(segment_parsed_bytes - 1)];
match parse_utf8(string_bytes) { match std::str::from_utf8(string_bytes) {
Ok(string) => { Ok(string) => {
state = advance_state!(state, string.len())?; state = advance_state!(state, string.len())?;
@ -233,9 +230,9 @@ pub fn parse<'a>() -> impl Parser<'a, StrLiteral<'a>, EString<'a>> {
// Parse an arbitrary expression, then give a // Parse an arbitrary expression, then give a
// canonicalization error if that expression variant // canonicalization error if that expression variant
// is not allowed inside a string interpolation. // is not allowed inside a string interpolation.
let (_progress, loc_expr, new_state) = specialize_ref( let (_progress, loc_expr, new_state) = skip_second!(
EString::Format, specialize_ref(EString::Format, loc(allocated(expr::expr_help(0)))),
skip_second!(loc(allocated(expr::expr(0))), ascii_char(b')')), word1(b')', EString::FormatEnd)
) )
.parse(arena, state)?; .parse(arena, state)?;

View file

@ -1,13 +1,10 @@
use crate::ast::{self, Attempting}; use crate::ast;
use crate::blankspace::space0_before; use crate::module::module_defs;
use crate::expr::expr; use crate::parser::{Parser, State, SyntaxError};
use crate::module::{header, module_defs};
use crate::parser::{loc, Parser, State, SyntaxError};
use bumpalo::collections::Vec; use bumpalo::collections::Vec;
use bumpalo::Bump; use bumpalo::Bump;
use roc_region::all::Located; use roc_region::all::Located;
#[allow(dead_code)]
pub fn parse_expr_with<'a>( pub fn parse_expr_with<'a>(
arena: &'a Bump, arena: &'a Bump,
input: &'a str, input: &'a str,
@ -15,24 +12,12 @@ pub fn parse_expr_with<'a>(
parse_loc_with(arena, input).map(|loc_expr| loc_expr.value) parse_loc_with(arena, input).map(|loc_expr| loc_expr.value)
} }
pub fn parse_header_with<'a>(
arena: &'a Bump,
input: &'a str,
) -> Result<ast::Module<'a>, SyntaxError<'a>> {
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
let answer = header().parse(arena, state);
answer
.map(|(_, loc_expr, _)| loc_expr)
.map_err(|(_, fail, _)| fail)
}
#[allow(dead_code)] #[allow(dead_code)]
pub fn parse_defs_with<'a>( pub fn parse_defs_with<'a>(
arena: &'a Bump, arena: &'a Bump,
input: &'a str, input: &'a str,
) -> Result<Vec<'a, Located<ast::Def<'a>>>, SyntaxError<'a>> { ) -> Result<Vec<'a, Located<ast::Def<'a>>>, SyntaxError<'a>> {
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module); let state = State::new(input.trim().as_bytes());
let answer = module_defs().parse(arena, state); let answer = module_defs().parse(arena, state);
answer answer
.map(|(_, loc_expr, _)| loc_expr) .map(|(_, loc_expr, _)| loc_expr)
@ -44,11 +29,10 @@ pub fn parse_loc_with<'a>(
arena: &'a Bump, arena: &'a Bump,
input: &'a str, input: &'a str,
) -> Result<Located<ast::Expr<'a>>, SyntaxError<'a>> { ) -> Result<Located<ast::Expr<'a>>, SyntaxError<'a>> {
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module); let state = State::new(input.trim().as_bytes());
let parser = space0_before(loc(expr(0)), 0);
let answer = parser.parse(&arena, state);
answer match crate::expr::test_parse_expr(0, arena, state) {
.map(|(_, loc_expr, _)| loc_expr) Ok(loc_expr) => Ok(loc_expr),
.map_err(|(_, fail, _)| fail) Err(fail) => Err(SyntaxError::Expr(fail)),
}
} }

View file

@ -1,24 +1,16 @@
use crate::ast::{AssignedField, Tag, TypeAnnotation}; use crate::ast::{AssignedField, Tag, TypeAnnotation};
use crate::blankspace::{space0_around_ee, space0_before_e, space0_e}; use crate::blankspace::{space0_around_ee, space0_before_e, space0_e};
use crate::ident::join_module_parts;
use crate::keyword; use crate::keyword;
use crate::parser::{ use crate::parser::{
allocated, backtrackable, not_e, optional, peek_utf8_char_e, specialize, specialize_ref, word1, allocated, backtrackable, optional, specialize, specialize_ref, word1, word2, ParseResult,
word2, ParseResult, Parser, Parser,
Progress::{self, *}, Progress::{self, *},
State, SyntaxError, TApply, TInParens, TRecord, TTagUnion, TVariable, Type, State, TApply, TInParens, TRecord, TTagUnion, Type,
}; };
use bumpalo::collections::string::String;
use bumpalo::collections::vec::Vec; use bumpalo::collections::vec::Vec;
use bumpalo::Bump; use bumpalo::Bump;
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
pub fn located<'a>(
min_indent: u16,
) -> impl Parser<'a, Located<TypeAnnotation<'a>>, SyntaxError<'a>> {
specialize(|x, _, _| SyntaxError::Type(x), expression(min_indent))
}
pub fn located_help<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a>>, Type<'a>> { pub fn located_help<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a>>, Type<'a>> {
expression(min_indent) expression(min_indent)
} }
@ -62,7 +54,7 @@ fn term<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a>>, Typ
loc!(specialize(Type::TRecord, record_type(min_indent))), loc!(specialize(Type::TRecord, record_type(min_indent))),
loc!(specialize(Type::TTagUnion, tag_union_type(min_indent))), loc!(specialize(Type::TTagUnion, tag_union_type(min_indent))),
loc!(applied_type(min_indent)), loc!(applied_type(min_indent)),
loc!(specialize(Type::TVariable, parse_type_variable)) loc!(parse_type_variable)
), ),
// Inline alias notation, e.g. [ Nil, Cons a (List a) ] as List a // Inline alias notation, e.g. [ Nil, Cons a (List a) ] as List a
one_of![ one_of![
@ -117,21 +109,13 @@ fn loc_applied_arg<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotatio
map_with_arena!( map_with_arena!(
and!( and!(
backtrackable(space0_e(min_indent, Type::TSpace, Type::TIndentStart)), backtrackable(space0_e(min_indent, Type::TSpace, Type::TIndentStart)),
skip_first!(
// Once we hit an "as", stop parsing args
// and roll back parsing of preceding spaces
not_e(
crate::parser::keyword(keyword::AS, min_indent),
Type::TStart
),
one_of!( one_of!(
loc_wildcard(), loc_wildcard(),
specialize(Type::TInParens, loc_type_in_parens(min_indent)), specialize(Type::TInParens, loc_type_in_parens(min_indent)),
loc!(specialize(Type::TRecord, record_type(min_indent))), loc!(specialize(Type::TRecord, record_type(min_indent))),
loc!(specialize(Type::TTagUnion, tag_union_type(min_indent))), loc!(specialize(Type::TTagUnion, tag_union_type(min_indent))),
loc!(specialize(Type::TApply, parse_concrete_type)), loc!(specialize(Type::TApply, parse_concrete_type)),
loc!(specialize(Type::TVariable, parse_type_variable)) loc!(parse_type_variable)
)
) )
), ),
|arena: &'a Bump, (spaces, argument): (&'a [_], Located<TypeAnnotation<'a>>)| { |arena: &'a Bump, (spaces, argument): (&'a [_], Located<TypeAnnotation<'a>>)| {
@ -192,102 +176,9 @@ where
F: Fn(Row, Col) -> E, F: Fn(Row, Col) -> E,
E: 'a, E: 'a,
{ {
use encode_unicode::CharExt; move |arena, state: State<'a>| match crate::ident::tag_name().parse(arena, state) {
Ok(good) => Ok(good),
move |arena, mut state: State<'a>| { Err((progress, _, state)) => Err((progress, to_problem(state.line, state.column), state)),
let mut buf;
match char::from_utf8_slice_start(state.bytes) {
Ok((first_letter, bytes_parsed)) => match first_letter {
'@' => {
debug_assert_eq!(bytes_parsed, 1);
// parsing a private tag name
match char::from_utf8_slice_start(&state.bytes[1..]) {
Ok((second_letter, bytes_parsed_2)) if second_letter.is_uppercase() => {
let total_parsed = bytes_parsed + bytes_parsed_2;
buf = String::with_capacity_in(total_parsed, arena);
buf.push('@');
buf.push(second_letter);
state = state.advance_without_indenting(total_parsed).map_err(
|(progress, _, state)| {
(progress, to_problem(state.line, state.column), state)
},
)?;
}
_ => {
// important for error messages
state = state.advance_without_indenting(bytes_parsed).map_err(
|(progress, _, state)| {
(progress, to_problem(state.line, state.column), state)
},
)?;
let row = state.line;
let col = state.column;
return state.fail(arena, MadeProgress, to_problem(row, col));
}
}
}
_ if first_letter.is_uppercase() => {
buf = String::with_capacity_in(1, arena);
buf.push(first_letter);
state = state.advance_without_indenting(bytes_parsed).map_err(
|(progress, _, state)| {
(progress, to_problem(state.line, state.column), state)
},
)?;
}
_ => {
let row = state.line;
let col = state.column;
return state.fail(arena, NoProgress, to_problem(row, col));
}
},
Err(_) => {
let row = state.line;
let col = state.column;
return state.fail(arena, NoProgress, to_problem(row, col));
}
};
while !state.bytes.is_empty() {
match char::from_utf8_slice_start(state.bytes) {
Ok((ch, bytes_parsed)) => {
// After the first character, only these are allowed:
//
// * Unicode alphabetic chars - you might include `鹏` if that's clear to your readers
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
// * A ':' indicating the end of the field
if ch.is_alphabetic() || ch.is_ascii_digit() {
buf.push(ch);
state = state.advance_without_indenting(bytes_parsed).map_err(
|(progress, _, state)| {
(progress, to_problem(state.line, state.column), state)
},
)?;
} else {
// This is the end of the field. We're done!
break;
}
}
Err(_) => {
let row = state.line;
let col = state.column;
return state.fail(arena, MadeProgress, to_problem(row, col));
}
};
}
Ok((MadeProgress, buf.into_bump_str(), state))
} }
} }
@ -517,156 +408,52 @@ fn expression<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a>
fn parse_concrete_type<'a>( fn parse_concrete_type<'a>(
arena: &'a Bump, arena: &'a Bump,
mut state: State<'a>, state: State<'a>,
) -> ParseResult<'a, TypeAnnotation<'a>, TApply> { ) -> ParseResult<'a, TypeAnnotation<'a>, TApply> {
let mut part_buf = String::new_in(arena); // The current "part" (parts are dot-separated.) let initial_bytes = state.bytes;
let mut parts: Vec<&'a str> = Vec::new_in(arena);
// Qualified types must start with a capitalized letter. match crate::ident::concrete_type().parse(arena, state) {
match peek_utf8_char_e(&state, TApply::StartNotUppercase, TApply::Space) { Ok((_, (module_name, type_name), state)) => {
Ok((first_letter, bytes_parsed)) => { let answer = TypeAnnotation::Apply(module_name, type_name, &[]);
if first_letter.is_alphabetic() && first_letter.is_uppercase() {
part_buf.push(first_letter);
} else {
let problem = TApply::StartNotUppercase(state.line, state.column + 1);
return Err((NoProgress, problem, state));
}
state = state.advance_without_indenting_e(bytes_parsed, TApply::Space)?;
}
Err(reason) => return Err((NoProgress, reason, state)),
}
while !state.bytes.is_empty() {
match peek_utf8_char_e(&state, TApply::End, TApply::Space) {
Ok((ch, bytes_parsed)) => {
// After the first character, only these are allowed:
//
// * Unicode alphabetic chars - you might name a variable `鹏` if that's clear to your readers
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
// * A dot ('.')
if ch.is_alphabetic() {
if part_buf.is_empty() && !ch.is_uppercase() {
// Each part must begin with a capital letter.
return Err((
MadeProgress,
TApply::StartNotUppercase(state.line, state.column),
state,
));
}
part_buf.push(ch);
} else if ch.is_ascii_digit() {
// Parts may not start with numbers!
if part_buf.is_empty() {
return Err((
MadeProgress,
TApply::StartIsNumber(state.line, state.column),
state,
));
}
part_buf.push(ch);
} else if ch == '.' {
// Having two consecutive dots is an error.
if part_buf.is_empty() {
return Err((
MadeProgress,
TApply::DoubleDot(state.line, state.column),
state,
));
}
parts.push(part_buf.into_bump_str());
// Now that we've recorded the contents of the current buffer, reset it.
part_buf = String::new_in(arena);
} else {
// This must be the end of the type. We're done!
break;
}
state = state.advance_without_indenting_e(bytes_parsed, TApply::Space)?;
}
Err(reason) => {
return Err((MadeProgress, reason, state));
}
}
}
if part_buf.is_empty() {
// We probably had a trailing dot, e.g. `Foo.bar.` - this is malformed!
//
// This condition might also occur if we encounter a malformed accessor like `.|`
//
// If we made it this far and don't have a next_char, then necessarily
// we have consumed a '.' char previously.
return Err((
MadeProgress,
TApply::TrailingDot(state.line, state.column),
state,
));
}
let answer = TypeAnnotation::Apply(
join_module_parts(arena, parts.into_bump_slice()),
part_buf.into_bump_str(),
&[],
);
Ok((MadeProgress, answer, state)) Ok((MadeProgress, answer, state))
}
Err((NoProgress, _, state)) => {
Err((NoProgress, TApply::End(state.line, state.column), state))
}
Err((MadeProgress, _, mut state)) => {
// we made some progress, but ultimately failed.
// that means a malformed type name
let chomped = crate::ident::chomp_malformed(state.bytes);
let delta = initial_bytes.len() - state.bytes.len();
let parsed_str =
unsafe { std::str::from_utf8_unchecked(&initial_bytes[..chomped + delta]) };
state = state.advance_without_indenting_ee(chomped, |r, c| {
TApply::Space(crate::parser::BadInputError::LineTooLong, r, c)
})?;
dbg!(&state);
Ok((MadeProgress, TypeAnnotation::Malformed(parsed_str), state))
}
}
} }
fn parse_type_variable<'a>( fn parse_type_variable<'a>(
arena: &'a Bump, arena: &'a Bump,
mut state: State<'a>, state: State<'a>,
) -> ParseResult<'a, TypeAnnotation<'a>, TVariable> { ) -> ParseResult<'a, TypeAnnotation<'a>, Type<'a>> {
let mut buf = String::new_in(arena); match crate::ident::lowercase_ident().parse(arena, state) {
Ok((_, name, state)) => {
let answer = TypeAnnotation::BoundVariable(name);
let start_bytes_len = state.bytes.len(); Ok((MadeProgress, answer, state))
}
match peek_utf8_char_e(&state, TVariable::StartNotLowercase, TVariable::Space) { Err((progress, _, state)) => Err((
Ok((first_letter, bytes_parsed)) => { progress,
// Type variables must start with a lowercase letter. Type::TBadTypeVariable(state.line, state.column),
if first_letter.is_alphabetic() && first_letter.is_lowercase() {
buf.push(first_letter);
} else {
return Err((
NoProgress,
TVariable::StartNotLowercase(state.line, state.column),
state, state,
)); )),
} }
state = state.advance_without_indenting_e(bytes_parsed, TVariable::Space)?;
}
Err(reason) => return Err((NoProgress, reason, state)),
}
while !state.bytes.is_empty() {
match peek_utf8_char_e(&state, TVariable::End, TVariable::Space) {
Ok((ch, bytes_parsed)) => {
// After the first character, only these are allowed:
//
// * Unicode alphabetic chars - you might name a variable `鹏` if that's clear to your readers
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
if ch.is_alphabetic() || ch.is_ascii_digit() {
buf.push(ch);
} else {
// This must be the end of the type. We're done!
break;
}
state = state.advance_without_indenting_e(bytes_parsed, TVariable::Space)?;
}
Err(reason) => {
return state.fail(arena, MadeProgress, reason);
}
}
}
let answer = TypeAnnotation::BoundVariable(buf.into_bump_str());
let progress = Progress::from_lengths(start_bytes_len, state.bytes.len());
Ok((progress, answer, state))
} }

View file

@ -23,14 +23,12 @@ mod test_parse {
use roc_parse::ast::Pattern::{self, *}; use roc_parse::ast::Pattern::{self, *};
use roc_parse::ast::StrLiteral::{self, *}; use roc_parse::ast::StrLiteral::{self, *};
use roc_parse::ast::StrSegment::*; use roc_parse::ast::StrSegment::*;
use roc_parse::ast::{ use roc_parse::ast::{self, Def, EscapedChar, Spaceable, TypeAnnotation, WhenBranch};
self, Attempting, Def, EscapedChar, Spaceable, TypeAnnotation, WhenBranch,
};
use roc_parse::header::{ use roc_parse::header::{
AppHeader, Effects, ExposesEntry, ImportsEntry, InterfaceHeader, ModuleName, PackageEntry, AppHeader, Effects, ExposesEntry, ImportsEntry, InterfaceHeader, ModuleName, PackageEntry,
PackageName, PackageOrPath, PlatformHeader, To, PackageName, PackageOrPath, PlatformHeader, To,
}; };
use roc_parse::module::{app_header, interface_header, module_defs, platform_header}; use roc_parse::module::module_defs;
use roc_parse::parser::{Parser, State, SyntaxError}; use roc_parse::parser::{Parser, State, SyntaxError};
use roc_parse::test_helpers::parse_expr_with; use roc_parse::test_helpers::parse_expr_with;
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
@ -43,10 +41,9 @@ mod test_parse {
assert_eq!(Ok(expected_expr), actual); assert_eq!(Ok(expected_expr), actual);
} }
fn assert_parsing_fails<'a>(input: &'a str, _reason: SyntaxError, _attempting: Attempting) { fn assert_parsing_fails<'a>(input: &'a str, _reason: SyntaxError) {
let arena = Bump::new(); let arena = Bump::new();
let actual = parse_expr_with(&arena, input); let actual = parse_expr_with(&arena, input);
// let expected_fail = Fail { reason, attempting };
assert!(actual.is_err()); assert!(actual.is_err());
} }
@ -291,7 +288,7 @@ mod test_parse {
#[test] #[test]
fn empty_source_file() { fn empty_source_file() {
assert_parsing_fails("", SyntaxError::Eof(Region::zero()), Attempting::Module); assert_parsing_fails("", SyntaxError::Eof(Region::zero()));
} }
#[test] #[test]
@ -308,11 +305,7 @@ mod test_parse {
// Make sure it's longer than our maximum line length // Make sure it's longer than our maximum line length
assert_eq!(too_long_str.len(), max_line_length + 1); assert_eq!(too_long_str.len(), max_line_length + 1);
assert_parsing_fails( assert_parsing_fails(&too_long_str, SyntaxError::LineTooLong(0));
&too_long_str,
SyntaxError::LineTooLong(0),
Attempting::Module,
);
} }
// INT LITERALS // INT LITERALS
@ -1012,7 +1005,7 @@ mod test_parse {
use roc_parse::ident::BadIdent; use roc_parse::ident::BadIdent;
let arena = Bump::new(); let arena = Bump::new();
let expected = Expr::MalformedIdent("@One.Two.Whee", BadIdent::QualifiedTag(0, 13)); let expected = Expr::MalformedIdent("@One.Two.Whee", BadIdent::BadPrivateTag(0, 4));
let actual = parse_expr_with(&arena, "@One.Two.Whee"); let actual = parse_expr_with(&arena, "@One.Two.Whee");
assert_eq!(Ok(expected), actual); assert_eq!(Ok(expected), actual);
@ -2416,7 +2409,7 @@ mod test_parse {
let imports = Vec::new_in(&arena); let imports = Vec::new_in(&arena);
let provides = Vec::new_in(&arena); let provides = Vec::new_in(&arena);
let module_name = StrLiteral::PlainLine("test-app"); let module_name = StrLiteral::PlainLine("test-app");
let expected = AppHeader { let header = AppHeader {
name: Located::new(0, 0, 4, 14, module_name), name: Located::new(0, 0, 4, 14, module_name),
packages, packages,
imports, imports,
@ -2433,17 +2426,15 @@ mod test_parse {
after_to: &[], after_to: &[],
}; };
let expected = roc_parse::ast::Module::App { header };
let src = indoc!( let src = indoc!(
r#" r#"
app "test-app" packages {} imports [] provides [] to blah app "test-app" packages {} imports [] provides [] to blah
"# "#
); );
let actual = app_header() let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
.parse( .map(|tuple| tuple.0);
&arena,
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1);
assert_eq!(Ok(expected), actual); assert_eq!(Ok(expected), actual);
} }
@ -2457,7 +2448,7 @@ mod test_parse {
let imports = Vec::new_in(&arena); let imports = Vec::new_in(&arena);
let provides = Vec::new_in(&arena); let provides = Vec::new_in(&arena);
let module_name = StrLiteral::PlainLine("test-app"); let module_name = StrLiteral::PlainLine("test-app");
let expected = AppHeader { let header = AppHeader {
name: Located::new(0, 0, 4, 14, module_name), name: Located::new(0, 0, 4, 14, module_name),
packages, packages,
imports, imports,
@ -2474,17 +2465,16 @@ mod test_parse {
after_to: &[], after_to: &[],
}; };
let expected = roc_parse::ast::Module::App { header };
let src = indoc!( let src = indoc!(
r#" r#"
app "test-app" provides [] to "./blah" app "test-app" provides [] to "./blah"
"# "#
); );
let actual = app_header()
.parse( let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
&arena, .map(|tuple| tuple.0);
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1);
assert_eq!(Ok(expected), actual); assert_eq!(Ok(expected), actual);
} }
@ -2509,7 +2499,8 @@ mod test_parse {
let provide_entry = Located::new(3, 3, 15, 24, Exposed("quicksort")); let provide_entry = Located::new(3, 3, 15, 24, Exposed("quicksort"));
let provides = bumpalo::vec![in &arena; provide_entry]; let provides = bumpalo::vec![in &arena; provide_entry];
let module_name = StrLiteral::PlainLine("quicksort"); let module_name = StrLiteral::PlainLine("quicksort");
let expected = AppHeader {
let header = AppHeader {
name: Located::new(0, 0, 4, 15, module_name), name: Located::new(0, 0, 4, 15, module_name),
packages, packages,
imports, imports,
@ -2526,6 +2517,8 @@ mod test_parse {
after_to: &[], after_to: &[],
}; };
let expected = roc_parse::ast::Module::App { header };
let src = indoc!( let src = indoc!(
r#" r#"
app "quicksort" app "quicksort"
@ -2535,12 +2528,8 @@ mod test_parse {
"# "#
); );
let actual = app_header() let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
.parse( .map(|tuple| tuple.0);
&arena,
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1);
assert_eq!(Ok(expected), actual); assert_eq!(Ok(expected), actual);
} }
@ -2560,7 +2549,7 @@ mod test_parse {
spaces_after_effects_keyword: &[], spaces_after_effects_keyword: &[],
spaces_after_type_name: &[], spaces_after_type_name: &[],
}; };
let expected = PlatformHeader { let header = PlatformHeader {
name: Located::new(0, 0, 9, 23, pkg_name), name: Located::new(0, 0, 9, 23, pkg_name),
requires: Vec::new_in(&arena), requires: Vec::new_in(&arena),
exposes: Vec::new_in(&arena), exposes: Vec::new_in(&arena),
@ -2581,13 +2570,11 @@ mod test_parse {
after_provides: &[], after_provides: &[],
}; };
let expected = roc_parse::ast::Module::Platform { header };
let src = "platform rtfeldman/blah requires {} exposes [] packages {} imports [] provides [] effects fx.Blah {}"; let src = "platform rtfeldman/blah requires {} exposes [] packages {} imports [] provides [] effects fx.Blah {}";
let actual = platform_header() let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
.parse( .map(|tuple| tuple.0);
&arena,
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1);
assert_eq!(Ok(expected), actual); assert_eq!(Ok(expected), actual);
} }
@ -2621,7 +2608,7 @@ mod test_parse {
spaces_after_effects_keyword: &[], spaces_after_effects_keyword: &[],
spaces_after_type_name: &[], spaces_after_type_name: &[],
}; };
let expected = PlatformHeader { let header = PlatformHeader {
name: Located::new(0, 0, 9, 19, pkg_name), name: Located::new(0, 0, 9, 19, pkg_name),
requires: Vec::new_in(&arena), requires: Vec::new_in(&arena),
exposes: Vec::new_in(&arena), exposes: Vec::new_in(&arena),
@ -2642,6 +2629,8 @@ mod test_parse {
after_provides: &[], after_provides: &[],
}; };
let expected = roc_parse::ast::Module::Platform { header };
let src = indoc!( let src = indoc!(
r#" r#"
platform foo/barbaz platform foo/barbaz
@ -2653,12 +2642,8 @@ mod test_parse {
effects fx.Effect {} effects fx.Effect {}
"# "#
); );
let actual = platform_header() let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
.parse( .map(|tuple| tuple.0);
&arena,
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1);
assert_eq!(Ok(expected), actual); assert_eq!(Ok(expected), actual);
} }
@ -2669,7 +2654,7 @@ mod test_parse {
let exposes = Vec::new_in(&arena); let exposes = Vec::new_in(&arena);
let imports = Vec::new_in(&arena); let imports = Vec::new_in(&arena);
let module_name = ModuleName::new("Foo"); let module_name = ModuleName::new("Foo");
let expected = InterfaceHeader { let header = InterfaceHeader {
name: Located::new(0, 0, 10, 13, module_name), name: Located::new(0, 0, 10, 13, module_name),
exposes, exposes,
imports, imports,
@ -2680,17 +2665,16 @@ mod test_parse {
before_imports: &[], before_imports: &[],
after_imports: &[], after_imports: &[],
}; };
let expected = roc_parse::ast::Module::Interface { header };
let src = indoc!( let src = indoc!(
r#" r#"
interface Foo exposes [] imports [] interface Foo exposes [] imports []
"# "#
); );
let actual = interface_header() let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
.parse( .map(|tuple| tuple.0);
&arena,
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1);
assert_eq!(Ok(expected), actual); assert_eq!(Ok(expected), actual);
} }
@ -2701,7 +2685,7 @@ mod test_parse {
let exposes = Vec::new_in(&arena); let exposes = Vec::new_in(&arena);
let imports = Vec::new_in(&arena); let imports = Vec::new_in(&arena);
let module_name = ModuleName::new("Foo.Bar.Baz"); let module_name = ModuleName::new("Foo.Bar.Baz");
let expected = InterfaceHeader { let header = InterfaceHeader {
name: Located::new(0, 0, 10, 21, module_name), name: Located::new(0, 0, 10, 21, module_name),
exposes, exposes,
imports, imports,
@ -2712,17 +2696,16 @@ mod test_parse {
before_imports: &[], before_imports: &[],
after_imports: &[], after_imports: &[],
}; };
let expected = roc_parse::ast::Module::Interface { header };
let src = indoc!( let src = indoc!(
r#" r#"
interface Foo.Bar.Baz exposes [] imports [] interface Foo.Bar.Baz exposes [] imports []
"# "#
); );
let actual = interface_header() let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
.parse( .map(|tuple| tuple.0);
&arena,
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1);
assert_eq!(Ok(expected), actual); assert_eq!(Ok(expected), actual);
} }
@ -2748,10 +2731,7 @@ mod test_parse {
"# "#
); );
let actual = module_defs() let actual = module_defs()
.parse( .parse(&arena, State::new(src.as_bytes()))
&arena,
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1); .map(|tuple| tuple.1);
// It should occur twice in the debug output - once for the pattern, // It should occur twice in the debug output - once for the pattern,
@ -2810,10 +2790,7 @@ mod test_parse {
); );
let actual = module_defs() let actual = module_defs()
.parse( .parse(&arena, State::new(src.as_bytes()))
&arena,
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1); .map(|tuple| tuple.1);
assert_eq!(Ok(expected), actual); assert_eq!(Ok(expected), actual);
@ -2833,11 +2810,8 @@ mod test_parse {
); );
let actual = module_defs() let actual = module_defs()
.parse( .parse(&arena, State::new(src.as_bytes()))
&arena, .map(|tuple| tuple.0);
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1);
assert!(actual.is_ok()); assert!(actual.is_ok());
} }
@ -2858,18 +2832,15 @@ mod test_parse {
); );
let actual = module_defs() let actual = module_defs()
.parse( .parse(&arena, State::new(src.as_bytes()))
&arena, .map(|tuple| tuple.0);
State::new_in(&arena, src.as_bytes(), Attempting::Module),
)
.map(|tuple| tuple.1);
assert!(actual.is_ok()); assert!(actual.is_ok());
} }
#[test] #[test]
fn outdenting_newline_after_else() { fn outdenting_newline_after_else() {
let arena = Bump::new(); let arena = &Bump::new();
// highlights a problem with the else branch demanding a newline after its expression // highlights a problem with the else branch demanding a newline after its expression
let src = indoc!( let src = indoc!(
@ -2881,16 +2852,19 @@ mod test_parse {
"# "#
); );
let actual = module_defs() let state = State::new(src.as_bytes());
.parse( let parser = module_defs();
&arena, let parsed = parser.parse(arena, state);
State::new_in(&arena, src.as_bytes(), Attempting::Module), match parsed {
) Ok((_, _, state)) => {
.map(|tuple| tuple.1); dbg!(state);
return;
dbg!(&actual); }
Err((_, _fail, _state)) => {
assert!(actual.is_ok()); dbg!(_fail, _state);
assert!(false);
}
}
} }
#[test] #[test]

View file

@ -1,4 +1,5 @@
use roc_collections::all::MutSet; use roc_collections::all::MutSet;
use roc_parse::parser::{Col, Row};
use roc_problem::can::PrecedenceProblem::BothNonAssociative; use roc_problem::can::PrecedenceProblem::BothNonAssociative;
use roc_problem::can::{FloatErrorKind, IntErrorKind, Problem, RuntimeError}; use roc_problem::can::{FloatErrorKind, IntErrorKind, Problem, RuntimeError};
use roc_region::all::Region; use roc_region::all::Region;
@ -357,23 +358,7 @@ fn to_bad_ident_expr_report<'b>(
let region = Region::from_row_col(row, col); let region = Region::from_row_col(row, col);
alloc.stack(vec![ alloc.stack(vec![
alloc.reflow(r"I trying to parse a record field accessor here:"), alloc.reflow(r"I trying to parse a record field access here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("Something like "),
alloc.parser_suggestion(".name"),
alloc.reflow(" or "),
alloc.parser_suggestion(".height"),
alloc.reflow(" that accesses a value from a record."),
]),
])
}
PartStartsWithNumber(row, col) => {
let region = Region::from_row_col(row, col);
alloc.stack(vec![
alloc.reflow("I trying to parse a record field access here:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![ alloc.concat(vec![
alloc.reflow("So I expect to see a lowercase letter next, like "), alloc.reflow("So I expect to see a lowercase letter next, like "),
@ -430,9 +415,54 @@ fn to_bad_ident_expr_report<'b>(
]), ]),
]) ])
} }
PrivateTagNotUppercase(row, col) => {
let region = Region::from_row_col(row, col);
Underscore(row, col) => {
let region =
Region::from_rows_cols(surroundings.start_line, surroundings.start_col, row, col);
alloc.stack(vec![
alloc.reflow("Underscores are not allowed in identifier names:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![alloc.reflow(
r"I recommend using camelCase, it is the standard in the Roc ecosystem.",
)]),
])
}
BadPrivateTag(row, col) => {
use BadIdentNext::*;
match what_is_next(alloc.src_lines, row, col) {
LowercaseAccess(width) => {
let region = Region::from_rows_cols(row, col, row, col + width);
alloc.stack(vec![
alloc.reflow("I am very confused by this field access:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow(r"It looks like a record field access on a private tag.")
]),
])
}
UppercaseAccess(width) => {
let region = Region::from_rows_cols(row, col, row, col + width);
alloc.stack(vec![
alloc.reflow("I am very confused by this expression:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow(
r"Looks like a private tag is treated like a module name. ",
),
alloc.reflow(r"Maybe you wanted a qualified name, like "),
alloc.parser_suggestion("Json.Decode.string"),
alloc.text("?"),
]),
])
}
Other(Some(c)) if c.is_lowercase() => {
let region = Region::from_rows_cols(
surroundings.start_line,
surroundings.start_col + 1,
row,
col + 1,
);
alloc.stack(vec![ alloc.stack(vec![
alloc.reflow("I am trying to parse a private tag here:"), alloc.reflow("I am trying to parse a private tag here:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
@ -449,15 +479,9 @@ fn to_bad_ident_expr_report<'b>(
]), ]),
]) ])
} }
other => todo!("{:?}", other),
PrivateTagFieldAccess(_row, _col) => alloc.stack(vec![ }
alloc.reflow("I am very confused by this field access:"), }
alloc.region(surroundings),
alloc.concat(vec![
alloc.reflow(r"It looks like a record field access on a private tag.")
]),
]),
_ => todo!(),
} }
} }
@ -486,22 +510,6 @@ fn to_bad_ident_pattern_report<'b>(
]) ])
} }
PartStartsWithNumber(row, col) => {
let region = Region::from_row_col(row, col);
alloc.stack(vec![
alloc.reflow("I trying to parse a record field access here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("So I expect to see a lowercase letter next, like "),
alloc.parser_suggestion(".name"),
alloc.reflow(" or "),
alloc.parser_suggestion(".height"),
alloc.reflow("."),
]),
])
}
WeirdAccessor(_row, _col) => alloc.stack(vec![ WeirdAccessor(_row, _col) => alloc.stack(vec![
alloc.reflow("I am very confused by this field access"), alloc.reflow("I am very confused by this field access"),
alloc.region(surroundings), alloc.region(surroundings),
@ -547,33 +555,6 @@ fn to_bad_ident_pattern_report<'b>(
]), ]),
]) ])
} }
PrivateTagNotUppercase(row, col) => {
let region = Region::from_row_col(row, col);
alloc.stack(vec![
alloc.reflow("I am trying to parse a private tag here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow(r"But after the "),
alloc.keyword("@"),
alloc.reflow(r" symbol I found a lowercase letter. "),
alloc.reflow(r"All tag names (global and private)"),
alloc.reflow(r" must start with an uppercase letter, like "),
alloc.parser_suggestion("@UUID"),
alloc.reflow(" or "),
alloc.parser_suggestion("@Secrets"),
alloc.reflow("."),
]),
])
}
PrivateTagFieldAccess(_row, _col) => alloc.stack(vec![
alloc.reflow("I am very confused by this field access:"),
alloc.region(surroundings),
alloc.concat(vec![
alloc.reflow(r"It looks like a record field access on a private tag.")
]),
]),
Underscore(row, col) => { Underscore(row, col) => {
let region = Region::from_row_col(row, col - 1); let region = Region::from_row_col(row, col - 1);
@ -591,6 +572,69 @@ fn to_bad_ident_pattern_report<'b>(
} }
} }
#[derive(Debug)]
enum BadIdentNext<'a> {
LowercaseAccess(u16),
UppercaseAccess(u16),
NumberAccess(u16),
Keyword(&'a str),
DanglingDot,
Other(Option<char>),
}
fn what_is_next<'a>(source_lines: &'a [&'a str], row: Row, col: Col) -> BadIdentNext<'a> {
let row_index = row as usize;
let col_index = col as usize;
match source_lines.get(row_index) {
None => BadIdentNext::Other(None),
Some(line) => {
let chars = &line[col_index..];
let mut it = chars.chars();
match roc_parse::keyword::KEYWORDS
.iter()
.find(|keyword| crate::error::parse::starts_with_keyword(chars, keyword))
{
Some(keyword) => BadIdentNext::Keyword(keyword),
None => match it.next() {
None => BadIdentNext::Other(None),
Some('.') => match it.next() {
Some(c) if c.is_lowercase() => {
BadIdentNext::LowercaseAccess(2 + till_whitespace(it) as u16)
}
Some(c) if c.is_uppercase() => {
BadIdentNext::UppercaseAccess(2 + till_whitespace(it) as u16)
}
Some(c) if c.is_ascii_digit() => {
BadIdentNext::NumberAccess(2 + till_whitespace(it) as u16)
}
_ => BadIdentNext::DanglingDot,
},
Some(c) => BadIdentNext::Other(Some(c)),
},
}
}
}
}
fn till_whitespace<I>(it: I) -> usize
where
I: Iterator<Item = char>,
{
let mut chomped = 0;
for c in it {
if c.is_ascii_whitespace() || c == '#' {
break;
} else {
chomped += 1;
continue;
}
}
chomped
}
fn pretty_runtime_error<'b>( fn pretty_runtime_error<'b>(
alloc: &'b RocDocAllocator<'b>, alloc: &'b RocDocAllocator<'b>,
runtime_error: RuntimeError, runtime_error: RuntimeError,

View file

@ -153,6 +153,7 @@ fn to_syntax_report<'a>(
0, 0,
0, 0,
), ),
Header(header) => to_header_report(alloc, filename, &header, 0, 0),
_ => todo!("unhandled parse error: {:?}", parse_problem), _ => todo!("unhandled parse error: {:?}", parse_problem),
} }
} }
@ -171,6 +172,8 @@ enum Node {
IfElseBranch, IfElseBranch,
ListElement, ListElement,
InsideParens, InsideParens,
RecordConditionalDefault,
StringFormat,
} }
fn to_expr_report<'a>( fn to_expr_report<'a>(
@ -197,14 +200,12 @@ fn to_expr_report<'a>(
to_expr_in_parens_report(alloc, filename, context, &expr, *row, *col) to_expr_in_parens_report(alloc, filename, context, &expr, *row, *col)
} }
EExpr::Type(tipe, row, col) => to_type_report(alloc, filename, &tipe, *row, *col), EExpr::Type(tipe, row, col) => to_type_report(alloc, filename, &tipe, *row, *col),
EExpr::Def(syntax, row, col) => to_syntax_report(alloc, filename, syntax, *row, *col),
EExpr::ElmStyleFunction(region, row, col) => { EExpr::ElmStyleFunction(region, row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col); let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
let region = *region; let region = *region;
let doc = alloc.stack(vec![ let doc = alloc.stack(vec![
alloc.reflow(r"I am in the middle of parsing a definition, but I got stuck here:"), alloc.reflow(r"I am partway through parsing a definition, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![ alloc.concat(vec![
alloc.reflow("Looks like you are trying to define a function. "), alloc.reflow("Looks like you are trying to define a function. "),
@ -344,6 +345,8 @@ fn to_expr_report<'a>(
]), ]),
), ),
Node::ListElement => (r, c, alloc.text("a list")), Node::ListElement => (r, c, alloc.text("a list")),
Node::RecordConditionalDefault => (r, c, alloc.text("record field default")),
Node::StringFormat => (r, c, alloc.text("a string format")),
Node::InsideParens => (r, c, alloc.text("some parentheses")), Node::InsideParens => (r, c, alloc.text("some parentheses")),
}, },
Context::InDef(r, c) => (r, c, alloc.text("a definition")), Context::InDef(r, c) => (r, c, alloc.text("a definition")),
@ -375,12 +378,39 @@ fn to_expr_report<'a>(
} }
} }
EExpr::DefMissingFinalExpr(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
let region = Region::from_row_col(*row, *col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a definition, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("This definition is missing a final expression."),
alloc.reflow(" A nested definition must be followed by"),
alloc.reflow(" either another definition, or an expression"),
]),
alloc.vcat(vec![
alloc.text("x = 4").indent(4),
alloc.text("y = 2").indent(4),
alloc.text("").indent(4),
alloc.text("x + y").indent(4),
]),
]);
Report {
filename,
doc,
title: "MISSING FINAL EXPRESSION".to_string(),
}
}
EExpr::Colon(row, col) => { EExpr::Colon(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col); let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
let region = Region::from_row_col(*row, *col); let region = Region::from_row_col(*row, *col);
let doc = alloc.stack(vec![ let doc = alloc.stack(vec![
alloc.reflow(r"I am in the middle of parsing a definition, but I got stuck here:"), alloc.reflow(r"I am partway through parsing a definition, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![ alloc.concat(vec![
alloc.reflow("Looks like you are trying to define a function. "), alloc.reflow("Looks like you are trying to define a function. "),
@ -419,7 +449,7 @@ fn to_lambda_report<'a>(
let doc = alloc.stack(vec![ let doc = alloc.stack(vec![
alloc alloc
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"), .reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![ alloc.concat(vec![
alloc.reflow("I was expecting a "), alloc.reflow("I was expecting a "),
@ -440,7 +470,7 @@ fn to_lambda_report<'a>(
let doc = alloc.stack(vec![ let doc = alloc.stack(vec![
alloc alloc
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"), .reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![ alloc.concat(vec![
alloc.reflow("I was expecting a "), alloc.reflow("I was expecting a "),
@ -464,7 +494,7 @@ fn to_lambda_report<'a>(
let doc = alloc.stack(vec![ let doc = alloc.stack(vec![
alloc alloc
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"), .reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![ alloc.concat(vec![
alloc.reflow("I was expecting a "), alloc.reflow("I was expecting a "),
@ -485,7 +515,7 @@ fn to_lambda_report<'a>(
let doc = alloc.stack(vec![ let doc = alloc.stack(vec![
alloc alloc
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"), .reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![ alloc.concat(vec![
alloc.reflow("I was expecting a "), alloc.reflow("I was expecting a "),
@ -509,7 +539,7 @@ fn to_lambda_report<'a>(
let doc = alloc.stack(vec![ let doc = alloc.stack(vec![
alloc alloc
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck at this comma:"), .reflow(r"I am partway through parsing a function argument list, but I got stuck at this comma:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![ alloc.concat(vec![
alloc.reflow("I was expecting an argument pattern before this, "), alloc.reflow("I was expecting an argument pattern before this, "),
@ -529,7 +559,7 @@ fn to_lambda_report<'a>(
let doc = alloc.stack(vec![ let doc = alloc.stack(vec![
alloc alloc
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"), .reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region), alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![ alloc.concat(vec![
alloc.reflow("I was expecting an argument pattern before this, "), alloc.reflow("I was expecting an argument pattern before this, "),
@ -636,7 +666,7 @@ fn to_unfinished_lambda_report<'a>(
fn to_str_report<'a>( fn to_str_report<'a>(
alloc: &'a RocDocAllocator<'a>, alloc: &'a RocDocAllocator<'a>,
filename: PathBuf, filename: PathBuf,
_context: Context, context: Context,
parse_problem: &roc_parse::parser::EString<'a>, parse_problem: &roc_parse::parser::EString<'a>,
start_row: Row, start_row: Row,
start_col: Col, start_col: Col,
@ -645,7 +675,14 @@ fn to_str_report<'a>(
match *parse_problem { match *parse_problem {
EString::Open(_row, _col) => unreachable!("another branch would be taken"), EString::Open(_row, _col) => unreachable!("another branch would be taken"),
EString::Format(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col), EString::Format(expr, row, col) => to_expr_report(
alloc,
filename,
Context::InNode(Node::StringFormat, start_row, start_col, Box::new(context)),
expr,
row,
col,
),
EString::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col), EString::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
EString::UnknownEscape(row, col) => { EString::UnknownEscape(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col); let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
@ -712,6 +749,26 @@ fn to_str_report<'a>(
title: "WEIRD CODE POINT".to_string(), title: "WEIRD CODE POINT".to_string(),
} }
} }
EString::FormatEnd(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I cannot find the end of this format expression:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow(r"You could change it to something like "),
alloc.parser_suggestion("\"The count is \\(count\\)\""),
alloc.reflow("."),
]),
]);
Report {
filename,
doc,
title: "ENDLESS FORMAT".to_string(),
}
}
EString::EndlessSingle(row, col) => { EString::EndlessSingle(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col); let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col); let region = Region::from_row_col(row, col);
@ -839,7 +896,6 @@ fn to_list_report<'a>(
use roc_parse::parser::List; use roc_parse::parser::List;
match *parse_problem { match *parse_problem {
List::Syntax(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col),
List::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col), List::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
List::Expr(expr, row, col) => to_expr_report( List::Expr(expr, row, col) => to_expr_report(
@ -948,7 +1004,6 @@ fn to_if_report<'a>(
use roc_parse::parser::If; use roc_parse::parser::If;
match *parse_problem { match *parse_problem {
If::Syntax(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col),
If::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col), If::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
If::Condition(expr, row, col) => to_expr_report( If::Condition(expr, row, col) => to_expr_report(
@ -1119,7 +1174,6 @@ fn to_when_report<'a>(
} }
} }
When::Syntax(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col),
When::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col), When::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
When::Branch(expr, row, col) => to_expr_report( When::Branch(expr, row, col) => to_expr_report(
@ -1512,7 +1566,20 @@ fn to_precord_report<'a>(
PRecord::Pattern(pattern, row, col) => { PRecord::Pattern(pattern, row, col) => {
to_pattern_report(alloc, filename, pattern, row, col) to_pattern_report(alloc, filename, pattern, row, col)
} }
PRecord::Syntax(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col),
PRecord::Expr(expr, row, col) => to_expr_report(
alloc,
filename,
Context::InNode(
Node::RecordConditionalDefault,
start_row,
start_col,
Box::new(Context::InDef(row, col)),
),
expr,
row,
col,
),
PRecord::IndentOpen(row, col) => { PRecord::IndentOpen(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col); let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
@ -2469,6 +2536,479 @@ fn to_tapply_report<'a>(
} }
} }
fn to_header_report<'a>(
alloc: &'a RocDocAllocator<'a>,
filename: PathBuf,
parse_problem: &roc_parse::parser::EHeader<'a>,
start_row: Row,
start_col: Col,
) -> Report<'a> {
use roc_parse::parser::EHeader;
match parse_problem {
EHeader::Provides(provides, row, col) => {
to_provides_report(alloc, filename, &provides, *row, *col)
}
EHeader::Exposes(exposes, row, col) => {
to_exposes_report(alloc, filename, &exposes, *row, *col)
}
EHeader::Imports(imports, row, col) => {
to_imports_report(alloc, filename, &imports, *row, *col)
}
EHeader::Requires(requires, row, col) => {
to_requires_report(alloc, filename, &requires, *row, *col)
}
EHeader::Packages(packages, row, col) => {
to_packages_report(alloc, filename, &packages, *row, *col)
}
EHeader::Effects(effects, row, col) => {
to_effects_report(alloc, filename, &effects, *row, *col)
}
EHeader::IndentStart(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
let region = Region::from_row_col(*row, *col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![alloc.reflow("I may be confused by indentation.")]),
]);
Report {
filename,
doc,
title: "INCOMPLETE HEADER".to_string(),
}
}
EHeader::Start(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
let region = Region::from_row_col(*row, *col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am expecting a header, but got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting a module keyword next, one of "),
alloc.keyword("interface"),
alloc.reflow(", "),
alloc.keyword("app"),
alloc.reflow(" or "),
alloc.keyword("platform"),
alloc.reflow("."),
]),
]);
Report {
filename,
doc,
title: "MISSING HEADER".to_string(),
}
}
EHeader::ModuleName(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
let region = Region::from_row_col(*row, *col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting a module name next, like "),
alloc.parser_suggestion("BigNum"),
alloc.reflow(" or "),
alloc.parser_suggestion("Main"),
alloc.reflow(". Module names must start with an uppercase letter."),
]),
]);
Report {
filename,
doc,
title: "WEIRD MODULE NAME".to_string(),
}
}
EHeader::AppName(_, row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
let region = Region::from_row_col(*row, *col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting an application name next, like "),
alloc.parser_suggestion("app \"main\""),
alloc.reflow(" or "),
alloc.parser_suggestion("app \"editor\""),
alloc.reflow(". App names are surrounded by quotation marks."),
]),
]);
Report {
filename,
doc,
title: "WEIRD APP NAME".to_string(),
}
}
EHeader::PlatformName(_, row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
let region = Region::from_row_col(*row, *col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting a platform name next, like "),
alloc.parser_suggestion("roc/core"),
alloc.reflow("."),
]),
]);
Report {
filename,
doc,
title: "WEIRD MODULE NAME".to_string(),
}
}
EHeader::Space(error, row, col) => to_space_report(alloc, filename, &error, *row, *col),
}
}
fn to_provides_report<'a>(
alloc: &'a RocDocAllocator<'a>,
filename: PathBuf,
parse_problem: &roc_parse::parser::EProvides,
start_row: Row,
start_col: Col,
) -> Report<'a> {
use roc_parse::parser::EProvides;
match *parse_problem {
EProvides::Identifier(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc
.reflow(r"I am partway through parsing a provides list, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![alloc.reflow(
"I was expecting a type name, value name or function name next, like ",
)]),
alloc
.parser_suggestion("provides [ Animal, default, tame ]")
.indent(4),
]);
Report {
filename,
doc,
title: "WEIRD PROVIDES".to_string(),
}
}
EProvides::Provides(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting the "),
alloc.keyword("provides"),
alloc.reflow(" keyword next, like "),
]),
alloc
.parser_suggestion("provides [ Animal, default, tame ]")
.indent(4),
]);
Report {
filename,
doc,
title: "WEIRD PROVIDES".to_string(),
}
}
EProvides::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
_ => todo!("unhandled parse error {:?}", parse_problem),
}
}
fn to_exposes_report<'a>(
alloc: &'a RocDocAllocator<'a>,
filename: PathBuf,
parse_problem: &roc_parse::parser::EExposes,
start_row: Row,
start_col: Col,
) -> Report<'a> {
use roc_parse::parser::EExposes;
match *parse_problem {
EExposes::Identifier(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a exposes list, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![alloc.reflow(
"I was expecting a type name, value name or function name next, like ",
)]),
alloc
.parser_suggestion("exposes [ Animal, default, tame ]")
.indent(4),
]);
Report {
filename,
doc,
title: "WEIRD EXPOSES".to_string(),
}
}
EExposes::Exposes(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting the "),
alloc.keyword("exposes"),
alloc.reflow(" keyword next, like "),
]),
alloc
.parser_suggestion("exposes [ Animal, default, tame ]")
.indent(4),
]);
Report {
filename,
doc,
title: "WEIRD EXPOSES".to_string(),
}
}
EExposes::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
_ => todo!("unhandled parse error {:?}", parse_problem),
}
}
fn to_imports_report<'a>(
alloc: &'a RocDocAllocator<'a>,
filename: PathBuf,
parse_problem: &roc_parse::parser::EImports,
start_row: Row,
start_col: Col,
) -> Report<'a> {
use roc_parse::parser::EImports;
match *parse_problem {
EImports::Identifier(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a imports list, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![alloc.reflow(
"I was expecting a type name, value name or function name next, like ",
)]),
alloc
.parser_suggestion("imports [ Animal, default, tame ]")
.indent(4),
]);
Report {
filename,
doc,
title: "WEIRD EXPOSES".to_string(),
}
}
EImports::Imports(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting the "),
alloc.keyword("imports"),
alloc.reflow(" keyword next, like "),
]),
alloc
.parser_suggestion("imports [ Animal, default, tame ]")
.indent(4),
]);
Report {
filename,
doc,
title: "WEIRD IMPORTS".to_string(),
}
}
EImports::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
EImports::ModuleName(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting a module name next, like "),
alloc.parser_suggestion("BigNum"),
alloc.reflow(" or "),
alloc.parser_suggestion("Main"),
alloc.reflow(". Module names must start with an uppercase letter."),
]),
]);
Report {
filename,
doc,
title: "WEIRD MODULE NAME".to_string(),
}
}
_ => todo!("unhandled parse error {:?}", parse_problem),
}
}
fn to_requires_report<'a>(
alloc: &'a RocDocAllocator<'a>,
filename: PathBuf,
parse_problem: &roc_parse::parser::ERequires<'a>,
start_row: Row,
start_col: Col,
) -> Report<'a> {
use roc_parse::parser::ERequires;
match *parse_problem {
ERequires::Requires(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting the "),
alloc.keyword("requires"),
alloc.reflow(" keyword next, like "),
]),
alloc
.parser_suggestion("requires { main : Task I64 Str }")
.indent(4),
]);
Report {
filename,
doc,
title: "MISSING REQUIRES".to_string(),
}
}
ERequires::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
_ => todo!("unhandled parse error {:?}", parse_problem),
}
}
fn to_packages_report<'a>(
alloc: &'a RocDocAllocator<'a>,
filename: PathBuf,
parse_problem: &roc_parse::parser::EPackages,
start_row: Row,
start_col: Col,
) -> Report<'a> {
use roc_parse::parser::EPackages;
match *parse_problem {
EPackages::Packages(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting the "),
alloc.keyword("packages"),
alloc.reflow(" keyword next, like "),
]),
alloc.parser_suggestion("packages {}").indent(4),
]);
Report {
filename,
doc,
title: "MISSING PACKAGES".to_string(),
}
}
EPackages::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
_ => todo!("unhandled parse error {:?}", parse_problem),
}
}
fn to_effects_report<'a>(
alloc: &'a RocDocAllocator<'a>,
filename: PathBuf,
parse_problem: &roc_parse::parser::EEffects,
start_row: Row,
start_col: Col,
) -> Report<'a> {
use roc_parse::parser::EEffects;
match *parse_problem {
EEffects::Effects(row, col) => {
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
let region = Region::from_row_col(row, col);
let doc = alloc.stack(vec![
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
alloc.region_with_subregion(surroundings, region),
alloc.concat(vec![
alloc.reflow("I am expecting the "),
alloc.keyword("effects"),
alloc.reflow(" keyword next, like "),
]),
alloc.parser_suggestion("effects {}").indent(4),
]);
Report {
filename,
doc,
title: "MISSING PACKAGES".to_string(),
}
}
EEffects::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
_ => todo!("unhandled parse error {:?}", parse_problem),
}
}
fn to_space_report<'a>( fn to_space_report<'a>(
alloc: &'a RocDocAllocator<'a>, alloc: &'a RocDocAllocator<'a>,
filename: PathBuf, filename: PathBuf,
@ -2538,7 +3078,7 @@ fn what_is_next<'a>(source_lines: &'a [&'a str], row: Row, col: Col) -> Next<'a>
} }
} }
fn starts_with_keyword(rest_of_line: &str, keyword: &str) -> bool { pub fn starts_with_keyword(rest_of_line: &str, keyword: &str) -> bool {
if let Some(stripped) = rest_of_line.strip_prefix(keyword) { if let Some(stripped) = rest_of_line.strip_prefix(keyword) {
match stripped.chars().next() { match stripped.chars().next() {
None => true, None => true,

View file

@ -11,9 +11,6 @@ use roc_collections::all::{ImMap, MutMap, SendSet};
use roc_constrain::expr::constrain_expr; use roc_constrain::expr::constrain_expr;
use roc_constrain::module::{constrain_imported_values, Import}; use roc_constrain::module::{constrain_imported_values, Import};
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds}; use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds};
use roc_parse::ast::{self, Attempting};
use roc_parse::blankspace::space0_before;
use roc_parse::parser::{loc, Parser, State, SyntaxError};
use roc_problem::can::Problem; use roc_problem::can::Problem;
use roc_region::all::Located; use roc_region::all::Located;
use roc_solve::solve; use roc_solve::solve;
@ -100,28 +97,9 @@ pub struct CanExprOut {
pub constraint: Constraint, pub constraint: Constraint,
} }
#[allow(dead_code)]
pub fn parse_with<'a>(arena: &'a Bump, input: &'a str) -> Result<ast::Expr<'a>, SyntaxError<'a>> {
parse_loc_with(arena, input).map(|loc_expr| loc_expr.value)
}
#[allow(dead_code)]
pub fn parse_loc_with<'a>(
arena: &'a Bump,
input: &'a str,
) -> Result<Located<ast::Expr<'a>>, SyntaxError<'a>> {
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
let parser = space0_before(loc(roc_parse::expr::expr(0)), 0);
let answer = parser.parse(&arena, state);
answer
.map(|(_, loc_expr, _)| loc_expr)
.map_err(|(_, fail, _)| fail)
}
#[derive(Debug)] #[derive(Debug)]
pub struct ParseErrOut<'a> { pub struct ParseErrOut<'a> {
pub fail: SyntaxError<'a>, pub fail: roc_parse::parser::SyntaxError<'a>,
pub home: ModuleId, pub home: ModuleId,
pub interns: Interns, pub interns: Interns,
} }
@ -132,7 +110,7 @@ pub fn can_expr_with<'a>(
home: ModuleId, home: ModuleId,
expr_str: &'a str, expr_str: &'a str,
) -> Result<CanExprOut, ParseErrOut<'a>> { ) -> Result<CanExprOut, ParseErrOut<'a>> {
let loc_expr = match parse_loc_with(&arena, expr_str) { let loc_expr = match roc_parse::test_helpers::parse_loc_with(&arena, expr_str) {
Ok(e) => e, Ok(e) => e,
Err(fail) => { Err(fail) => {
let interns = Interns::default(); let interns = Interns::default();

View file

@ -169,6 +169,37 @@ mod test_reporting {
} }
} }
fn list_header_reports<F>(arena: &Bump, src: &str, buf: &mut String, callback: F)
where
F: FnOnce(RocDocBuilder<'_>, &mut String),
{
use ven_pretty::DocAllocator;
use roc_parse::parser::State;
let state = State::new(src.as_bytes());
let filename = filename_from_string(r"\code\proj\Main.roc");
let src_lines: Vec<&str> = src.split('\n').collect();
match roc_parse::module::parse_header(arena, state) {
Err(fail) => {
let interns = Interns::default();
let home = crate::helpers::test_home();
let alloc = RocDocAllocator::new(&src_lines, home, &interns);
use roc_parse::parser::SyntaxError;
let problem =
SyntaxError::Header(fail).into_parse_problem(filename.clone(), src.as_bytes());
let doc = parse_problem(&alloc, filename, 0, problem);
callback(doc.pretty(&alloc).append(alloc.line()), buf)
}
Ok(_) => todo!(),
}
}
fn report_problem_as(src: &str, expected_rendering: &str) { fn report_problem_as(src: &str, expected_rendering: &str) {
let mut buf: String = String::new(); let mut buf: String = String::new();
let arena = Bump::new(); let arena = Bump::new();
@ -193,6 +224,30 @@ mod test_reporting {
assert_eq!(buf, expected_rendering); assert_eq!(buf, expected_rendering);
} }
fn report_header_problem_as(src: &str, expected_rendering: &str) {
let mut buf: String = String::new();
let arena = Bump::new();
let callback = |doc: RocDocBuilder<'_>, buf: &mut String| {
doc.1
.render_raw(70, &mut roc_reporting::report::CiWrite::new(buf))
.expect("list_reports")
};
list_header_reports(&arena, src, &mut buf, callback);
// convenient to copy-paste the generated message
if true {
if buf != expected_rendering {
for line in buf.split("\n") {
println!(" {}", line);
}
}
}
assert_eq!(buf, expected_rendering);
}
fn color_report_problem_as(src: &str, expected_rendering: &str) { fn color_report_problem_as(src: &str, expected_rendering: &str) {
let mut buf: String = String::new(); let mut buf: String = String::new();
let arena = Bump::new(); let arena = Bump::new();
@ -3160,7 +3215,7 @@ mod test_reporting {
r#" r#"
ARGUMENTS BEFORE EQUALS ARGUMENTS BEFORE EQUALS
I am in the middle of parsing a definition, but I got stuck here: I am partway through parsing a definition, but I got stuck here:
1 f x y = x 1 f x y = x
^^^ ^^^
@ -4070,12 +4125,12 @@ mod test_reporting {
r#" r#"
SYNTAX PROBLEM SYNTAX PROBLEM
I trying to parse a record field accessor here: I trying to parse a record field access here:
1 foo.bar. 1 foo.bar.
^ ^
Something like .name or .height that accesses a value from a record. So I expect to see a lowercase letter next, like .name or .height.
"# "#
), ),
) )
@ -4093,14 +4148,13 @@ mod test_reporting {
r#" r#"
SYNTAX PROBLEM SYNTAX PROBLEM
I am trying to parse a qualified name here: I am very confused by this expression:
1 @Foo.Bar 1 @Foo.Bar
^ ^^^^
This looks like a qualified tag name to me, but tags cannot be Looks like a private tag is treated like a module name. Maybe you
qualified! Maybe you wanted a qualified name, something like wanted a qualified name, like Json.Decode.string?
Json.Decode.string?
"# "#
), ),
) )
@ -4508,21 +4562,21 @@ mod test_reporting {
indoc!( indoc!(
r#" r#"
f : Foo..Bar f : Foo..Bar
f
"# "#
), ),
indoc!( indoc!(r#""#),
r#"
DOUBLE DOT
I encountered two dots in a row:
1 f : Foo..Bar
^
Try removing one of them.
"#
),
) )
// ── DOUBLE DOT ──────────────────────────────────────────────────────────────────
//
// I encountered two dots in a row:
//
// 1│ f : Foo..Bar
// ^
//
// Try removing one of them.
} }
#[test] #[test]
@ -4531,22 +4585,22 @@ mod test_reporting {
indoc!( indoc!(
r#" r#"
f : Foo.Bar. f : Foo.Bar.
f
"# "#
), ),
indoc!( indoc!(r#""#),
r#"
TRAILING DOT
I encountered a dot with nothing after it:
1 f : Foo.Bar.
^
Dots are used to refer to a type in a qualified way, like
Num.I64 or List.List a. Try adding a type name next.
"#
),
) )
// ── TRAILING DOT ────────────────────────────────────────────────────────────────
//
// I encountered a dot with nothing after it:
//
// 1│ f : Foo.Bar.
// ^
//
// Dots are used to refer to a type in a qualified way, like
// Num.I64 or List.List a. Try adding a type name next.
} }
#[test] #[test]
@ -4582,26 +4636,40 @@ mod test_reporting {
indoc!( indoc!(
r#" r#"
f : Foo.1 f : Foo.1
f
"# "#
), ),
indoc!( indoc!(r#""#),
r#"
WEIRD QUALIFIED NAME
I encountered a number at the start of a qualified name segment:
1 f : Foo.1
^
All parts of a qualified type name must start with an uppercase
letter, like Num.I64 or List.List a.
"#
),
) )
// ── WEIRD QUALIFIED NAME ────────────────────────────────────────────────────────
//
// I encountered a number at the start of a qualified name segment:
//
// 1│ f : Foo.1
// ^
//
// All parts of a qualified type name must start with an uppercase
// letter, like Num.I64 or List.List a.
} }
#[test] #[test]
fn type_apply_start_with_lowercase() { fn type_apply_start_with_lowercase() {
report_problem_as(
indoc!(
r#"
f : Foo.foo
f
"#
),
indoc!(r#""#),
)
}
#[test]
fn def_missing_final_expression() {
report_problem_as( report_problem_as(
indoc!( indoc!(
r#" r#"
@ -4610,16 +4678,20 @@ mod test_reporting {
), ),
indoc!( indoc!(
r#" r#"
WEIRD QUALIFIED NAME MISSING FINAL EXPRESSION
I encountered a lowercase letter at the start of a qualified name I am partway through parsing a definition, but I got stuck here:
segment:
1 f : Foo.foo 1 f : Foo.foo
^ ^
All parts of a qualified type name must start with an uppercase This definition is missing a final expression. A nested definition
letter, like Num.I64 or List.List a. must be followed by either another definition, or an expression
x = 4
y = 2
x + y
"# "#
), ),
) )
@ -4984,8 +5056,8 @@ mod test_reporting {
r#" r#"
UNFINISHED ARGUMENT LIST UNFINISHED ARGUMENT LIST
I am in the middle of parsing a function argument list, but I got I am partway through parsing a function argument list, but I got stuck
stuck at this comma: at this comma:
1 \a,,b -> 1 1 \a,,b -> 1
^ ^
@ -5009,8 +5081,8 @@ mod test_reporting {
r#" r#"
UNFINISHED ARGUMENT LIST UNFINISHED ARGUMENT LIST
I am in the middle of parsing a function argument list, but I got I am partway through parsing a function argument list, but I got stuck
stuck at this comma: at this comma:
1 \,b -> 1 1 \,b -> 1
^ ^
@ -5417,12 +5489,12 @@ mod test_reporting {
r#" r#"
SYNTAX PROBLEM SYNTAX PROBLEM
I trying to parse a record field accessor here: I trying to parse a record field access here:
1 Num.add . 23 1 Num.add . 23
^ ^
Something like .name or .height that accesses a value from a record. So I expect to see a lowercase letter next, like .name or .height.
"# "#
), ),
) )
@ -5468,7 +5540,7 @@ mod test_reporting {
I am very confused by this field access: I am very confused by this field access:
1 @UUID.bar 1 @UUID.bar
^^^^^^^^^ ^^^^
It looks like a record field access on a private tag. It looks like a record field access on a private tag.
"# "#
@ -5702,4 +5774,113 @@ mod test_reporting {
), ),
) )
} }
#[test]
fn provides_to_identifier() {
report_header_problem_as(
indoc!(
r#"
app "test-base64"
packages { base: "platform" }
imports [base.Task, Base64 ]
provides [ main, @Foo ] to base
"#
),
indoc!(
r#"
WEIRD PROVIDES
I am partway through parsing a provides list, but I got stuck here:
3 imports [base.Task, Base64 ]
4 provides [ main, @Foo ] to base
^
I was expecting a type name, value name or function name next, like
provides [ Animal, default, tame ]
"#
),
)
}
#[test]
fn exposes_identifier() {
report_header_problem_as(
indoc!(
r#"
interface Foobar
exposes [ main, @Foo ]
imports [base.Task, Base64 ]
"#
),
indoc!(
r#"
WEIRD EXPOSES
I am partway through parsing a exposes list, but I got stuck here:
1 interface Foobar
2 exposes [ main, @Foo ]
^
I was expecting a type name, value name or function name next, like
exposes [ Animal, default, tame ]
"#
),
)
}
#[test]
fn invalid_module_name() {
report_header_problem_as(
indoc!(
r#"
interface foobar
exposes [ main, @Foo ]
imports [base.Task, Base64 ]
"#
),
indoc!(
r#"
WEIRD MODULE NAME
I am partway through parsing a header, but got stuck here:
1 interface foobar
^
I am expecting a module name next, like BigNum or Main. Module names
must start with an uppercase letter.
"#
),
)
}
#[test]
fn invalid_app_name() {
report_header_problem_as(
indoc!(
r#"
app foobar
exposes [ main, @Foo ]
imports [base.Task, Base64 ]
"#
),
indoc!(
r#"
WEIRD APP NAME
I am partway through parsing a header, but got stuck here:
1 app foobar
^
I am expecting an application name next, like app "main" or
app "editor". App names are surrounded by quotation marks.
"#
),
)
}
} }

View file

@ -568,6 +568,36 @@ fn list_map_closure() {
); );
} }
#[test]
fn list_map3_group() {
assert_evals_to!(
indoc!(
r#"
List.map3 [1,2,3] [3,2,1] [2,1,3] (\a, b, c -> Group a b c)
"#
),
RocList::from_slice(&[(1, 3, 2), (2, 2, 1), (3, 1, 3)]),
RocList<(i64, i64, i64)>
);
}
#[test]
fn list_map3_different_length() {
assert_evals_to!(
indoc!(
r#"
List.map3
["a", "b", "d"]
["b", "x"]
["c"]
(\a, b, c -> Str.concat a (Str.concat b c))
"#
),
RocList::from_slice(&[RocStr::from_slice("abc".as_bytes()),]),
RocList<RocStr>
);
}
#[test] #[test]
fn list_map2_pair() { fn list_map2_pair() {
assert_evals_to!( assert_evals_to!(

View file

@ -19,29 +19,29 @@ roc_fmt = { path = "../compiler/fmt" }
roc_reporting = { path = "../compiler/reporting" } roc_reporting = { path = "../compiler/reporting" }
# TODO switch to clap 3.0.0 once it's out. Tried adding clap = "~3.0.0-beta.1" and cargo wouldn't accept it # TODO switch to clap 3.0.0 once it's out. Tried adding clap = "~3.0.0-beta.1" and cargo wouldn't accept it
ven_graph = { path = "../vendor/pathfinding" } ven_graph = { path = "../vendor/pathfinding" }
im = "14" # im and im-rc should always have the same version! im = "15" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version! im-rc = "15" # im and im-rc should always have the same version!
bumpalo = { version = "3.2", features = ["collections"] } bumpalo = { version = "3.2", features = ["collections"] }
inlinable_string = "0.1" inlinable_string = "0.1"
arraystring = "0.3.0" arraystring = "0.3.0"
libc = "0.2" libc = "0.2"
page_size = "0.4" page_size = "0.4"
winit = "0.22" winit = "0.24"
wgpu = "0.6" wgpu = "0.7"
glyph_brush = "0.7" glyph_brush = "0.7"
log = "0.4" log = "0.4"
zerocopy = "0.3" zerocopy = "0.3"
env_logger = "0.7" env_logger = "0.8"
futures = "0.3" futures = "0.3"
wgpu_glyph = "0.10" wgpu_glyph = "0.11"
cgmath = "0.17.0" cgmath = "0.18.0"
snafu = { version = "0.6", features = ["backtraces"] } snafu = { version = "0.6", features = ["backtraces"] }
colored = "2" colored = "2"
pest = "2.1" pest = "2.1"
pest_derive = "2.1" pest_derive = "2.1"
ropey = "1.2.0" ropey = "1.2.0"
copypasta = "0.7.1" copypasta = "0.7.1"
indoc = "0.3.3" indoc = "1.0"
palette = "0.5" palette = "0.5"
confy = { git = 'https://github.com/rust-cli/confy', features = [ confy = { git = 'https://github.com/rust-cli/confy', features = [
"yaml_conf" "yaml_conf"
@ -53,10 +53,10 @@ version = "1.4"
features = ["derive"] features = ["derive"]
[dev-dependencies] [dev-dependencies]
pretty_assertions = "0.5.1" pretty_assertions = "0.6"
maplit = "1.0.1" maplit = "1.0.1"
quickcheck = "0.8" quickcheck = "1.0"
quickcheck_macros = "0.8" quickcheck_macros = "1.0"
criterion = "0.3" criterion = "0.3"
rand = "0.8.2" rand = "0.8.2"

View file

@ -66,6 +66,7 @@ e.g. you have a test `calculate_sum_test` that only uses the function `add`, whe
* When refactoring; * When refactoring;
- Cutting and pasting code to a new file should automatically add imports to the new file and delete them from the old file. - Cutting and pasting code to a new file should automatically add imports to the new file and delete them from the old file.
- Ability to link e.g. variable name in comments to actual variable name. Comment is automatically updated when variable name is changed. - Ability to link e.g. variable name in comments to actual variable name. Comment is automatically updated when variable name is changed.
- When updating dependencies with breaking changes; show similar diffs from github projects that have succesfully updated that dependency.
- AST backed renaming, changing variable/function/type name should change it all over the codebase. - AST backed renaming, changing variable/function/type name should change it all over the codebase.
* Automatically create all "arms" when pattern matching after entering `when var is` based on the type. * Automatically create all "arms" when pattern matching after entering `when var is` based on the type.
- All `when ... is` should be updated if the type is changed, e.g. adding Indigo to the Color type should add an arm everywhere where `when color is` is used. - All `when ... is` should be updated if the type is changed, e.g. adding Indigo to the Color type should add an arm everywhere where `when color is` is used.

View file

@ -88,9 +88,9 @@ fn run_event_loop(file_path_opt: Option<&Path>) -> Result<(), Box<dyn Error>> {
adapter adapter
.request_device( .request_device(
&wgpu::DeviceDescriptor { &wgpu::DeviceDescriptor {
label: None,
features: wgpu::Features::empty(), features: wgpu::Features::empty(),
limits: wgpu::Limits::default(), limits: wgpu::Limits::default(),
shader_validation: false,
}, },
None, None,
) )
@ -108,12 +108,12 @@ fn run_event_loop(file_path_opt: Option<&Path>) -> Result<(), Box<dyn Error>> {
let mut size = window.inner_size(); let mut size = window.inner_size();
let swap_chain_descr = wgpu::SwapChainDescriptor { let swap_chain_descr = wgpu::SwapChainDescriptor {
usage: wgpu::TextureUsage::OUTPUT_ATTACHMENT, usage: wgpu::TextureUsage::RENDER_ATTACHMENT,
format: render_format, format: render_format,
width: size.width, width: size.width,
height: size.height, height: size.height,
//Immediate may cause tearing, change present_mode if this becomes a problem // TODO go back to Immediate
present_mode: wgpu::PresentMode::Immediate, present_mode: wgpu::PresentMode::Fifo,
}; };
let mut swap_chain = gpu_device.create_swap_chain(&surface, &swap_chain_descr); let mut swap_chain = gpu_device.create_swap_chain(&surface, &swap_chain_descr);
@ -182,12 +182,12 @@ fn run_event_loop(file_path_opt: Option<&Path>) -> Result<(), Box<dyn Error>> {
swap_chain = gpu_device.create_swap_chain( swap_chain = gpu_device.create_swap_chain(
&surface, &surface,
&wgpu::SwapChainDescriptor { &wgpu::SwapChainDescriptor {
usage: wgpu::TextureUsage::OUTPUT_ATTACHMENT, usage: wgpu::TextureUsage::RENDER_ATTACHMENT,
format: render_format, format: render_format,
width: size.width, width: size.width,
height: size.height, height: size.height,
//Immediate may cause tearing, change present_mode if this becomes a problem // TODO go back to Immediate
present_mode: wgpu::PresentMode::Immediate, present_mode: wgpu::PresentMode::Fifo,
}, },
); );
@ -376,7 +376,10 @@ fn draw_all_rects(
render_pass.set_pipeline(&rect_resources.pipeline); render_pass.set_pipeline(&rect_resources.pipeline);
render_pass.set_bind_group(0, &rect_resources.ortho.bind_group, &[]); render_pass.set_bind_group(0, &rect_resources.ortho.bind_group, &[]);
render_pass.set_vertex_buffer(0, rect_buffers.vertex_buffer.slice(..)); render_pass.set_vertex_buffer(0, rect_buffers.vertex_buffer.slice(..));
render_pass.set_index_buffer(rect_buffers.index_buffer.slice(..)); render_pass.set_index_buffer(
rect_buffers.index_buffer.slice(..),
wgpu::IndexFormat::Uint32,
);
render_pass.draw_indexed(0..rect_buffers.num_rects, 0, 0..1); render_pass.draw_indexed(0..rect_buffers.num_rects, 0, 0..1);
} else { } else {
// need to begin render pass to clear screen // need to begin render pass to clear screen
@ -403,6 +406,7 @@ fn begin_render_pass<'a>(
}, },
}], }],
depth_stencil_attachment: None, depth_stencil_attachment: None,
label: None,
}) })
} }

View file

@ -91,8 +91,9 @@ pub fn init_ortho(
entries: &[BindGroupLayoutEntry { entries: &[BindGroupLayoutEntry {
binding: 0, binding: 0,
visibility: ShaderStage::VERTEX, visibility: ShaderStage::VERTEX,
ty: wgpu::BindingType::UniformBuffer { ty: wgpu::BindingType::Buffer {
dynamic: false, ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None, min_binding_size: None,
}, },
count: None, count: None,
@ -104,7 +105,7 @@ pub fn init_ortho(
layout: &ortho_bind_group_layout, layout: &ortho_bind_group_layout,
entries: &[wgpu::BindGroupEntry { entries: &[wgpu::BindGroupEntry {
binding: 0, binding: 0,
resource: wgpu::BindingResource::Buffer(ortho_buffer.slice(..)), resource: ortho_buffer.as_entire_binding(),
}], }],
label: Some("Ortho bind group"), label: Some("Ortho bind group"),
}); });

View file

@ -21,9 +21,8 @@ pub fn make_rect_pipeline(
&gpu_device, &gpu_device,
&pipeline_layout, &pipeline_layout,
swap_chain_descr.format, swap_chain_descr.format,
&[Vertex::DESC], &wgpu::include_spirv!("../shaders/rect.vert.spv"),
wgpu::include_spirv!("../shaders/rect.vert.spv"), &wgpu::include_spirv!("../shaders/rect.frag.spv"),
wgpu::include_spirv!("../shaders/rect.frag.spv"),
); );
RectResources { pipeline, ortho } RectResources { pipeline, ortho }
@ -33,9 +32,8 @@ pub fn create_render_pipeline(
device: &wgpu::Device, device: &wgpu::Device,
layout: &wgpu::PipelineLayout, layout: &wgpu::PipelineLayout,
color_format: wgpu::TextureFormat, color_format: wgpu::TextureFormat,
vertex_descs: &[wgpu::VertexBufferDescriptor], vs_src: &wgpu::ShaderModuleDescriptor,
vs_src: wgpu::ShaderModuleSource, fs_src: &wgpu::ShaderModuleDescriptor,
fs_src: wgpu::ShaderModuleSource,
) -> wgpu::RenderPipeline { ) -> wgpu::RenderPipeline {
let vs_module = device.create_shader_module(vs_src); let vs_module = device.create_shader_module(vs_src);
let fs_module = device.create_shader_module(fs_src); let fs_module = device.create_shader_module(fs_src);
@ -43,29 +41,27 @@ pub fn create_render_pipeline(
device.create_render_pipeline(&wgpu::RenderPipelineDescriptor { device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("Render pipeline"), label: Some("Render pipeline"),
layout: Some(&layout), layout: Some(&layout),
vertex_stage: wgpu::ProgrammableStageDescriptor { vertex: wgpu::VertexState {
module: &vs_module, module: &vs_module,
entry_point: "main", entry_point: "main",
buffers: &[Vertex::DESC],
}, },
fragment_stage: Some(wgpu::ProgrammableStageDescriptor { fragment: Some(wgpu::FragmentState {
module: &fs_module, module: &fs_module,
entry_point: "main", entry_point: "main",
}), targets: &[wgpu::ColorTargetState {
rasterization_state: None,
primitive_topology: wgpu::PrimitiveTopology::TriangleList,
color_states: &[wgpu::ColorStateDescriptor {
format: color_format, format: color_format,
color_blend: wgpu::BlendDescriptor::REPLACE, color_blend: wgpu::BlendState::REPLACE,
alpha_blend: wgpu::BlendDescriptor::REPLACE, alpha_blend: wgpu::BlendState::REPLACE,
write_mask: wgpu::ColorWrite::ALL, write_mask: wgpu::ColorWrite::ALL,
}], }],
depth_stencil_state: None, }),
sample_count: 1, primitive: wgpu::PrimitiveState::default(),
sample_mask: !0, depth_stencil: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false, alpha_to_coverage_enabled: false,
vertex_state: wgpu::VertexStateDescriptor {
index_format: wgpu::IndexFormat::Uint32,
vertex_buffers: vertex_descs,
}, },
}) })
} }

View file

@ -13,18 +13,18 @@ unsafe impl bytemuck::Zeroable for Vertex {}
impl Vertex { impl Vertex {
pub const SIZE: wgpu::BufferAddress = std::mem::size_of::<Self>() as wgpu::BufferAddress; pub const SIZE: wgpu::BufferAddress = std::mem::size_of::<Self>() as wgpu::BufferAddress;
pub const DESC: wgpu::VertexBufferDescriptor<'static> = wgpu::VertexBufferDescriptor { pub const DESC: wgpu::VertexBufferLayout<'static> = wgpu::VertexBufferLayout {
stride: Self::SIZE, array_stride: Self::SIZE,
step_mode: wgpu::InputStepMode::Vertex, step_mode: wgpu::InputStepMode::Vertex,
attributes: &[ attributes: &[
// position // position
wgpu::VertexAttributeDescriptor { wgpu::VertexAttribute {
offset: 0, offset: 0,
shader_location: 0, shader_location: 0,
format: wgpu::VertexFormat::Float2, format: wgpu::VertexFormat::Float2,
}, },
// color // color
wgpu::VertexAttributeDescriptor { wgpu::VertexAttribute {
offset: std::mem::size_of::<[f32; 2]>() as wgpu::BufferAddress, offset: std::mem::size_of::<[f32; 2]>() as wgpu::BufferAddress,
shader_location: 1, shader_location: 1,
format: wgpu::VertexFormat::Float4, format: wgpu::VertexFormat::Float4,

View file

@ -16,9 +16,8 @@ use roc_module::ident::ModuleName;
use roc_module::low_level::LowLevel; use roc_module::low_level::LowLevel;
use roc_module::operator::CalledVia; use roc_module::operator::CalledVia;
use roc_module::symbol::{IdentIds, ModuleId, ModuleIds, Symbol}; use roc_module::symbol::{IdentIds, ModuleId, ModuleIds, Symbol};
use roc_parse::ast;
use roc_parse::ast::StrLiteral; use roc_parse::ast::StrLiteral;
use roc_parse::ast::{self, Attempting};
use roc_parse::blankspace::space0_before;
use roc_parse::expr::expr; use roc_parse::expr::expr;
use roc_parse::parser::{loc, Parser, State, SyntaxError}; use roc_parse::parser::{loc, Parser, State, SyntaxError};
use roc_problem::can::{Problem, RuntimeError}; use roc_problem::can::{Problem, RuntimeError};
@ -235,14 +234,10 @@ pub fn str_to_expr2<'a>(
scope: &mut Scope, scope: &mut Scope,
region: Region, region: Region,
) -> Result<(Expr2, self::Output), SyntaxError<'a>> { ) -> Result<(Expr2, self::Output), SyntaxError<'a>> {
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module); match roc_parse::test_helpers::parse_loc_with(arena, input.trim()) {
let parser = space0_before(loc(expr(0)), 0); Ok(loc_expr) => Ok(to_expr2(env, scope, arena.alloc(loc_expr.value), region)),
let parse_res = parser.parse(&arena, state); Err(fail) => Err(fail),
}
parse_res
.map(|(_, loc_expr, _)| arena.alloc(loc_expr.value))
.map(|loc_expr_val_ref| to_expr2(env, scope, loc_expr_val_ref, region))
.map_err(|(_, fail, _)| fail)
} }
pub fn to_expr2<'a>( pub fn to_expr2<'a>(

View file

@ -2,7 +2,7 @@ use bumpalo::collections::Vec;
use bumpalo::Bump; use bumpalo::Bump;
use roc_fmt::def::fmt_def; use roc_fmt::def::fmt_def;
use roc_fmt::module::fmt_module; use roc_fmt::module::fmt_module;
use roc_parse::ast::{Attempting, Def, Module}; use roc_parse::ast::{Def, Module};
use roc_parse::module::module_defs; use roc_parse::module::module_defs;
use roc_parse::parser; use roc_parse::parser;
use roc_parse::parser::{Parser, SyntaxError}; use roc_parse::parser::{Parser, SyntaxError};
@ -36,11 +36,11 @@ impl<'a> File<'a> {
let allocation = arena.alloc(bytes); let allocation = arena.alloc(bytes);
let module_parse_state = parser::State::new_in(arena, allocation, Attempting::Module); let module_parse_state = parser::State::new(allocation);
let parsed_module = roc_parse::module::header().parse(&arena, module_parse_state); let parsed_module = roc_parse::module::parse_header(&arena, module_parse_state);
match parsed_module { match parsed_module {
Ok((_, module, state)) => { Ok((module, state)) => {
let parsed_defs = module_defs().parse(&arena, state); let parsed_defs = module_defs().parse(&arena, state);
match parsed_defs { match parsed_defs {
@ -52,7 +52,7 @@ impl<'a> File<'a> {
Err((_, error, _)) => Err(ReadError::ParseDefs(error)), Err((_, error, _)) => Err(ReadError::ParseDefs(error)),
} }
} }
Err((_, error, _)) => Err(ReadError::ParseHeader(error)), Err(error) => Err(ReadError::ParseHeader(SyntaxError::Header(error))),
} }
} }