mirror of
https://github.com/roc-lang/roc.git
synced 2025-09-28 14:24:45 +00:00
Merge branch 'trunk' of github.com:rtfeldman/roc into builtin-maxI128
This commit is contained in:
commit
66f07d984e
41 changed files with 3892 additions and 3648 deletions
1338
Cargo.lock
generated
1338
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
18
Earthfile
18
Earthfile
|
@ -8,7 +8,7 @@ install-other-libs:
|
|||
FROM +prep-debian
|
||||
RUN apt -y install wget git
|
||||
RUN apt -y install libxcb-shape0-dev libxcb-xfixes0-dev # for editor clipboard
|
||||
RUN apt -y install libc++-dev libc++abi-dev libunwind-dev pkg-config libx11-dev zlib1g-dev
|
||||
RUN apt -y install libc++-dev libc++abi-dev g++ libunwind-dev pkg-config libx11-dev zlib1g-dev
|
||||
|
||||
install-zig-llvm-valgrind-clippy-rustfmt:
|
||||
FROM +install-other-libs
|
||||
|
@ -30,10 +30,10 @@ install-zig-llvm-valgrind-clippy-rustfmt:
|
|||
RUN wget https://sourceware.org/pub/valgrind/valgrind-3.16.1.tar.bz2
|
||||
RUN tar -xf valgrind-3.16.1.tar.bz2
|
||||
# need to cd every time, every command starts at WORKDIR
|
||||
RUN cd valgrind-3.16.1; ./autogen.sh
|
||||
RUN cd valgrind-3.16.1; ./configure --disable-dependency-tracking
|
||||
RUN cd valgrind-3.16.1; make -j`nproc`
|
||||
RUN cd valgrind-3.16.1; make install
|
||||
RUN cd valgrind-3.16.1 && ./autogen.sh
|
||||
RUN cd valgrind-3.16.1 && ./configure --disable-dependency-tracking
|
||||
RUN cd valgrind-3.16.1 && make -j`nproc`
|
||||
RUN cd valgrind-3.16.1 && make install
|
||||
# clippy
|
||||
RUN rustup component add clippy
|
||||
# rustfmt
|
||||
|
@ -75,16 +75,16 @@ save-cache:
|
|||
FROM +install-zig-llvm-valgrind-clippy-rustfmt
|
||||
COPY +prepare-cache/recipe.json ./
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
cargo chef cook; sccache --show-stats # for clippy
|
||||
cargo chef cook && sccache --show-stats # for clippy
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
cargo chef cook --release --tests; sccache --show-stats
|
||||
cargo chef cook --release --tests && sccache --show-stats
|
||||
SAVE ARTIFACT target
|
||||
SAVE ARTIFACT $CARGO_HOME cargo_home
|
||||
|
||||
test-zig:
|
||||
FROM +install-zig-llvm-valgrind-clippy-rustfmt
|
||||
COPY --dir compiler/builtins/bitcode ./
|
||||
RUN cd bitcode; ./run-tests.sh;
|
||||
RUN cd bitcode && ./run-tests.sh
|
||||
|
||||
check-clippy:
|
||||
FROM +copy-dirs-and-cache
|
||||
|
@ -101,7 +101,7 @@ test-rust:
|
|||
FROM +copy-dirs-and-cache
|
||||
ENV RUST_BACKTRACE=1
|
||||
RUN --mount=type=cache,target=$SCCACHE_DIR \
|
||||
cargo test --release; sccache --show-stats
|
||||
cargo test --release && sccache --show-stats
|
||||
|
||||
test-all:
|
||||
BUILD +test-zig
|
||||
|
|
|
@ -114,6 +114,7 @@ pub const RocList = extern struct {
|
|||
|
||||
const Caller1 = fn (?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
|
||||
const Caller2 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
|
||||
const Caller3 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
|
||||
|
||||
pub fn listMap(list: RocList, transform: Opaque, caller: Caller1, alignment: usize, old_element_width: usize, new_element_width: usize) callconv(.C) RocList {
|
||||
if (list.bytes) |source_ptr| {
|
||||
|
@ -213,6 +214,129 @@ pub fn listMap2(list1: RocList, list2: RocList, transform: Opaque, caller: Calle
|
|||
}
|
||||
}
|
||||
|
||||
pub fn listMap3(list1: RocList, list2: RocList, list3: RocList, transform: Opaque, caller: Caller3, alignment: usize, a_width: usize, b_width: usize, c_width: usize, d_width: usize, dec_a: Dec, dec_b: Dec, dec_c: Dec) callconv(.C) RocList {
|
||||
const smaller_length = std.math.min(list1.len(), list2.len());
|
||||
const output_length = std.math.min(smaller_length, list3.len());
|
||||
|
||||
if (list1.bytes) |source_a| {
|
||||
if (list2.bytes) |source_b| {
|
||||
if (list3.bytes) |source_c| {
|
||||
const output = RocList.allocate(std.heap.c_allocator, alignment, output_length, d_width);
|
||||
const target_ptr = output.bytes orelse unreachable;
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < output_length) : (i += 1) {
|
||||
const element_a = source_a + i * a_width;
|
||||
const element_b = source_b + i * b_width;
|
||||
const element_c = source_c + i * c_width;
|
||||
const target = target_ptr + i * d_width;
|
||||
|
||||
caller(transform, element_a, element_b, element_c, target);
|
||||
}
|
||||
|
||||
// if the lists don't have equal length, we must consume the remaining elements
|
||||
// In this case we consume by (recursively) decrementing the elements
|
||||
if (list1.len() > output_length) {
|
||||
i = output_length;
|
||||
while (i < list1.len()) : (i += 1) {
|
||||
const element_a = source_a + i * a_width;
|
||||
dec_a(element_a);
|
||||
}
|
||||
}
|
||||
|
||||
if (list2.len() > output_length) {
|
||||
i = output_length;
|
||||
while (i < list2.len()) : (i += 1) {
|
||||
const element_b = source_b + i * b_width;
|
||||
dec_b(element_b);
|
||||
}
|
||||
}
|
||||
|
||||
if (list3.len() > output_length) {
|
||||
i = output_length;
|
||||
while (i < list3.len()) : (i += 1) {
|
||||
const element_c = source_c + i * c_width;
|
||||
dec_c(element_c);
|
||||
}
|
||||
}
|
||||
|
||||
utils.decref(std.heap.c_allocator, alignment, list1.bytes, list1.len() * a_width);
|
||||
utils.decref(std.heap.c_allocator, alignment, list2.bytes, list2.len() * b_width);
|
||||
utils.decref(std.heap.c_allocator, alignment, list3.bytes, list3.len() * c_width);
|
||||
|
||||
return output;
|
||||
} else {
|
||||
// consume list1 elements (we know there is at least one because the list1.bytes pointer is non-null
|
||||
var i: usize = 0;
|
||||
while (i < list1.len()) : (i += 1) {
|
||||
const element_a = source_a + i * a_width;
|
||||
dec_a(element_a);
|
||||
}
|
||||
utils.decref(std.heap.c_allocator, alignment, list1.bytes, list1.len() * a_width);
|
||||
|
||||
// consume list2 elements (we know there is at least one because the list1.bytes pointer is non-null
|
||||
i = 0;
|
||||
while (i < list2.len()) : (i += 1) {
|
||||
const element_b = source_b + i * b_width;
|
||||
dec_b(element_b);
|
||||
}
|
||||
utils.decref(std.heap.c_allocator, alignment, list2.bytes, list2.len() * b_width);
|
||||
|
||||
return RocList.empty();
|
||||
}
|
||||
} else {
|
||||
// consume list1 elements (we know there is at least one because the list1.bytes pointer is non-null
|
||||
var i: usize = 0;
|
||||
while (i < list1.len()) : (i += 1) {
|
||||
const element_a = source_a + i * a_width;
|
||||
dec_a(element_a);
|
||||
}
|
||||
|
||||
utils.decref(std.heap.c_allocator, alignment, list1.bytes, list1.len() * a_width);
|
||||
|
||||
// consume list3 elements (if any)
|
||||
if (list3.bytes) |source_c| {
|
||||
i = 0;
|
||||
|
||||
while (i < list2.len()) : (i += 1) {
|
||||
const element_c = source_c + i * c_width;
|
||||
dec_c(element_c);
|
||||
}
|
||||
|
||||
utils.decref(std.heap.c_allocator, alignment, list3.bytes, list3.len() * c_width);
|
||||
}
|
||||
|
||||
return RocList.empty();
|
||||
}
|
||||
} else {
|
||||
// consume list2 elements (if any)
|
||||
if (list2.bytes) |source_b| {
|
||||
var i: usize = 0;
|
||||
|
||||
while (i < list2.len()) : (i += 1) {
|
||||
const element_b = source_b + i * b_width;
|
||||
dec_b(element_b);
|
||||
}
|
||||
|
||||
utils.decref(std.heap.c_allocator, alignment, list2.bytes, list2.len() * b_width);
|
||||
}
|
||||
|
||||
// consume list3 elements (if any)
|
||||
if (list3.bytes) |source_c| {
|
||||
var i: usize = 0;
|
||||
|
||||
while (i < list2.len()) : (i += 1) {
|
||||
const element_c = source_c + i * c_width;
|
||||
dec_c(element_c);
|
||||
}
|
||||
|
||||
utils.decref(std.heap.c_allocator, alignment, list3.bytes, list3.len() * c_width);
|
||||
}
|
||||
|
||||
return RocList.empty();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn listKeepIf(list: RocList, transform: Opaque, caller: Caller1, alignment: usize, element_width: usize, inc: Inc, dec: Dec) callconv(.C) RocList {
|
||||
if (list.bytes) |source_ptr| {
|
||||
const size = list.len();
|
||||
|
|
|
@ -8,6 +8,7 @@ const list = @import("list.zig");
|
|||
comptime {
|
||||
exportListFn(list.listMap, "map");
|
||||
exportListFn(list.listMap2, "map2");
|
||||
exportListFn(list.listMap3, "map3");
|
||||
exportListFn(list.listMapWithIndex, "map_with_index");
|
||||
exportListFn(list.listKeepIf, "keep_if");
|
||||
exportListFn(list.listWalk, "walk");
|
||||
|
|
|
@ -64,6 +64,7 @@ pub const SET_FROM_LIST: &str = "roc_builtins.dict.set_from_list";
|
|||
|
||||
pub const LIST_MAP: &str = "roc_builtins.list.map";
|
||||
pub const LIST_MAP2: &str = "roc_builtins.list.map2";
|
||||
pub const LIST_MAP3: &str = "roc_builtins.list.map3";
|
||||
pub const LIST_MAP_WITH_INDEX: &str = "roc_builtins.list.map_with_index";
|
||||
pub const LIST_KEEP_IF: &str = "roc_builtins.list.keep_if";
|
||||
pub const LIST_KEEP_OKS: &str = "roc_builtins.list.keep_oks";
|
||||
|
|
|
@ -821,6 +821,21 @@ pub fn types() -> MutMap<Symbol, (SolvedType, Region)> {
|
|||
)
|
||||
});
|
||||
|
||||
// map3 : List a, List b, List c, (a, b, c -> d) -> List d
|
||||
add_type(Symbol::LIST_MAP3, {
|
||||
let_tvars! {a, b, c, d, cvar};
|
||||
|
||||
top_level_function(
|
||||
vec![
|
||||
list_type(flex(a)),
|
||||
list_type(flex(b)),
|
||||
list_type(flex(c)),
|
||||
closure(vec![flex(a), flex(b), flex(c)], cvar, Box::new(flex(d))),
|
||||
],
|
||||
Box::new(list_type(flex(d))),
|
||||
)
|
||||
});
|
||||
|
||||
// append : List elem, elem -> List elem
|
||||
add_type(
|
||||
Symbol::LIST_APPEND,
|
||||
|
|
|
@ -81,6 +81,7 @@ pub fn builtin_defs_map(symbol: Symbol, var_store: &mut VarStore) -> Option<Def>
|
|||
LIST_JOIN => list_join,
|
||||
LIST_MAP => list_map,
|
||||
LIST_MAP2 => list_map2,
|
||||
LIST_MAP3 => list_map3,
|
||||
LIST_MAP_WITH_INDEX => list_map_with_index,
|
||||
LIST_KEEP_IF => list_keep_if,
|
||||
LIST_KEEP_OKS => list_keep_oks,
|
||||
|
@ -219,6 +220,7 @@ pub fn builtin_defs(var_store: &mut VarStore) -> MutMap<Symbol, Def> {
|
|||
Symbol::LIST_JOIN => list_join,
|
||||
Symbol::LIST_MAP => list_map,
|
||||
Symbol::LIST_MAP2 => list_map2,
|
||||
Symbol::LIST_MAP3 => list_map3,
|
||||
Symbol::LIST_MAP_WITH_INDEX => list_map_with_index,
|
||||
Symbol::LIST_KEEP_IF => list_keep_if,
|
||||
Symbol::LIST_KEEP_OKS => list_keep_oks,
|
||||
|
@ -372,6 +374,38 @@ fn lowlevel_3(symbol: Symbol, op: LowLevel, var_store: &mut VarStore) -> Def {
|
|||
)
|
||||
}
|
||||
|
||||
fn lowlevel_4(symbol: Symbol, op: LowLevel, var_store: &mut VarStore) -> Def {
|
||||
let arg1_var = var_store.fresh();
|
||||
let arg2_var = var_store.fresh();
|
||||
let arg3_var = var_store.fresh();
|
||||
let arg4_var = var_store.fresh();
|
||||
let ret_var = var_store.fresh();
|
||||
|
||||
let body = RunLowLevel {
|
||||
op,
|
||||
args: vec![
|
||||
(arg1_var, Var(Symbol::ARG_1)),
|
||||
(arg2_var, Var(Symbol::ARG_2)),
|
||||
(arg3_var, Var(Symbol::ARG_3)),
|
||||
(arg4_var, Var(Symbol::ARG_4)),
|
||||
],
|
||||
ret_var,
|
||||
};
|
||||
|
||||
defn(
|
||||
symbol,
|
||||
vec![
|
||||
(arg1_var, Symbol::ARG_1),
|
||||
(arg2_var, Symbol::ARG_2),
|
||||
(arg3_var, Symbol::ARG_3),
|
||||
(arg4_var, Symbol::ARG_4),
|
||||
],
|
||||
var_store,
|
||||
body,
|
||||
ret_var,
|
||||
)
|
||||
}
|
||||
|
||||
/// Num.maxInt : Int
|
||||
fn num_max_int(symbol: Symbol, var_store: &mut VarStore) -> Def {
|
||||
let int_var = var_store.fresh();
|
||||
|
@ -2151,6 +2185,11 @@ fn list_map2(symbol: Symbol, var_store: &mut VarStore) -> Def {
|
|||
lowlevel_3(symbol, LowLevel::ListMap2, var_store)
|
||||
}
|
||||
|
||||
/// List.map3 : List a, List b, (a, b -> c) -> List c
|
||||
fn list_map3(symbol: Symbol, var_store: &mut VarStore) -> Def {
|
||||
lowlevel_4(symbol, LowLevel::ListMap3, var_store)
|
||||
}
|
||||
|
||||
/// Dict.hashTestOnly : k, v -> Nat
|
||||
pub fn dict_hash_test_only(symbol: Symbol, var_store: &mut VarStore) -> Def {
|
||||
lowlevel_2(symbol, LowLevel::Hash, var_store)
|
||||
|
|
|
@ -8,9 +8,6 @@ use roc_can::operator;
|
|||
use roc_can::scope::Scope;
|
||||
use roc_collections::all::MutMap;
|
||||
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds};
|
||||
use roc_parse::ast::{self, Attempting};
|
||||
use roc_parse::blankspace::space0_before;
|
||||
use roc_parse::parser::{loc, Parser, State, SyntaxError};
|
||||
use roc_problem::can::Problem;
|
||||
use roc_region::all::{Located, Region};
|
||||
use roc_types::subs::{VarStore, Variable};
|
||||
|
@ -20,25 +17,6 @@ pub fn test_home() -> ModuleId {
|
|||
ModuleIds::default().get_or_insert(&"Test".into())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn parse_with<'a>(arena: &'a Bump, input: &'a str) -> Result<ast::Expr<'a>, SyntaxError<'a>> {
|
||||
parse_loc_with(arena, input).map(|loc_expr| loc_expr.value)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn parse_loc_with<'a>(
|
||||
arena: &'a Bump,
|
||||
input: &'a str,
|
||||
) -> Result<Located<ast::Expr<'a>>, SyntaxError<'a>> {
|
||||
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
|
||||
let parser = space0_before(loc(roc_parse::expr::expr(0)), 0);
|
||||
let answer = parser.parse(&arena, state);
|
||||
|
||||
answer
|
||||
.map(|(_, loc_expr, _)| loc_expr)
|
||||
.map_err(|(_, fail, _)| fail)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn can_expr(expr_str: &str) -> CanExprOut {
|
||||
can_expr_with(&Bump::new(), test_home(), expr_str)
|
||||
|
@ -56,7 +34,7 @@ pub struct CanExprOut {
|
|||
|
||||
#[allow(dead_code)]
|
||||
pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut {
|
||||
let loc_expr = parse_loc_with(&arena, expr_str).unwrap_or_else(|e| {
|
||||
let loc_expr = roc_parse::test_helpers::parse_loc_with(&arena, expr_str).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"can_expr_with() got a parse error when attempting to canonicalize:\n\n{:?} {:?}",
|
||||
expr_str, e
|
||||
|
|
|
@ -4,8 +4,6 @@ extern crate pretty_assertions;
|
|||
extern crate indoc;
|
||||
extern crate bumpalo;
|
||||
extern crate roc_fmt;
|
||||
#[macro_use]
|
||||
extern crate roc_parse;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_fmt {
|
||||
|
@ -14,27 +12,15 @@ mod test_fmt {
|
|||
use roc_fmt::annotation::{Formattable, Newlines, Parens};
|
||||
use roc_fmt::def::fmt_def;
|
||||
use roc_fmt::module::fmt_module;
|
||||
use roc_parse::ast::{Attempting, Expr};
|
||||
use roc_parse::blankspace::space0_before;
|
||||
use roc_parse::module::{self, module_defs};
|
||||
use roc_parse::parser::{Parser, State, SyntaxError};
|
||||
|
||||
fn parse_with<'a>(arena: &'a Bump, input: &'a str) -> Result<Expr<'a>, SyntaxError<'a>> {
|
||||
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
|
||||
let parser = space0_before(loc!(roc_parse::expr::expr(0)), 0);
|
||||
let answer = parser.parse(&arena, state);
|
||||
|
||||
answer
|
||||
.map(|(_, loc_expr, _)| loc_expr.value)
|
||||
.map_err(|(_, fail, _)| fail)
|
||||
}
|
||||
use roc_parse::parser::{Parser, State};
|
||||
|
||||
fn expr_formats_to(input: &str, expected: &str) {
|
||||
let arena = Bump::new();
|
||||
let input = input.trim_end();
|
||||
let expected = expected.trim_end();
|
||||
|
||||
match parse_with(&arena, input) {
|
||||
match roc_parse::test_helpers::parse_expr_with(&arena, input.trim()) {
|
||||
Ok(actual) => {
|
||||
let mut buf = String::new_in(&arena);
|
||||
|
||||
|
@ -55,8 +41,8 @@ mod test_fmt {
|
|||
let src = src.trim_end();
|
||||
let expected = expected.trim_end();
|
||||
|
||||
match module::header().parse(&arena, State::new_in(&arena, src.as_bytes(), Attempting::Module)) {
|
||||
Ok((_, actual, state)) => {
|
||||
match module::parse_header(&arena, State::new(src.as_bytes())) {
|
||||
Ok((actual, state)) => {
|
||||
let mut buf = String::new_in(&arena);
|
||||
|
||||
fmt_module(&mut buf, &actual);
|
||||
|
|
|
@ -7,8 +7,8 @@ use crate::llvm::build_hash::generic_hash;
|
|||
use crate::llvm::build_list::{
|
||||
allocate_list, empty_list, empty_polymorphic_list, list_append, list_concat, list_contains,
|
||||
list_get_unsafe, list_join, list_keep_errs, list_keep_if, list_keep_oks, list_len, list_map,
|
||||
list_map2, list_map_with_index, list_prepend, list_repeat, list_reverse, list_set, list_single,
|
||||
list_sum, list_walk, list_walk_backwards,
|
||||
list_map2, list_map3, list_map_with_index, list_prepend, list_repeat, list_reverse, list_set,
|
||||
list_single, list_sum, list_walk, list_walk_backwards,
|
||||
};
|
||||
use crate::llvm::build_str::{
|
||||
str_concat, str_count_graphemes, str_ends_with, str_from_float, str_from_int, str_from_utf8,
|
||||
|
@ -3743,6 +3743,38 @@ fn run_low_level<'a, 'ctx, 'env>(
|
|||
_ => unreachable!("invalid list layout"),
|
||||
}
|
||||
}
|
||||
ListMap3 => {
|
||||
debug_assert_eq!(args.len(), 4);
|
||||
|
||||
let (list1, list1_layout) = load_symbol_and_layout(scope, &args[0]);
|
||||
let (list2, list2_layout) = load_symbol_and_layout(scope, &args[1]);
|
||||
let (list3, list3_layout) = load_symbol_and_layout(scope, &args[2]);
|
||||
|
||||
let (func, func_layout) = load_symbol_and_layout(scope, &args[3]);
|
||||
|
||||
match (list1_layout, list2_layout, list3_layout) {
|
||||
(
|
||||
Layout::Builtin(Builtin::List(_, element1_layout)),
|
||||
Layout::Builtin(Builtin::List(_, element2_layout)),
|
||||
Layout::Builtin(Builtin::List(_, element3_layout)),
|
||||
) => list_map3(
|
||||
env,
|
||||
layout_ids,
|
||||
func,
|
||||
func_layout,
|
||||
list1,
|
||||
list2,
|
||||
list3,
|
||||
element1_layout,
|
||||
element2_layout,
|
||||
element3_layout,
|
||||
),
|
||||
(Layout::Builtin(Builtin::EmptyList), _, _)
|
||||
| (_, Layout::Builtin(Builtin::EmptyList), _)
|
||||
| (_, _, Layout::Builtin(Builtin::EmptyList)) => empty_list(env),
|
||||
_ => unreachable!("invalid list layout"),
|
||||
}
|
||||
}
|
||||
ListMapWithIndex => {
|
||||
// List.map : List before, (before -> after) -> List after
|
||||
debug_assert_eq!(args.len(), 2);
|
||||
|
|
|
@ -1305,6 +1305,114 @@ pub fn list_map2<'a, 'ctx, 'env>(
|
|||
)
|
||||
}
|
||||
|
||||
pub fn list_map3<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
layout_ids: &mut LayoutIds<'a>,
|
||||
transform: BasicValueEnum<'ctx>,
|
||||
transform_layout: &Layout<'a>,
|
||||
list1: BasicValueEnum<'ctx>,
|
||||
list2: BasicValueEnum<'ctx>,
|
||||
list3: BasicValueEnum<'ctx>,
|
||||
element1_layout: &Layout<'a>,
|
||||
element2_layout: &Layout<'a>,
|
||||
element3_layout: &Layout<'a>,
|
||||
) -> BasicValueEnum<'ctx> {
|
||||
let builder = env.builder;
|
||||
|
||||
let return_layout = match transform_layout {
|
||||
Layout::FunctionPointer(_, ret) => ret,
|
||||
Layout::Closure(_, _, ret) => ret,
|
||||
_ => unreachable!("not a callable layout"),
|
||||
};
|
||||
|
||||
let u8_ptr = env.context.i8_type().ptr_type(AddressSpace::Generic);
|
||||
|
||||
let list1_i128 = complex_bitcast(
|
||||
env.builder,
|
||||
list1,
|
||||
env.context.i128_type().into(),
|
||||
"to_i128",
|
||||
);
|
||||
|
||||
let list2_i128 = complex_bitcast(
|
||||
env.builder,
|
||||
list2,
|
||||
env.context.i128_type().into(),
|
||||
"to_i128",
|
||||
);
|
||||
|
||||
let list3_i128 = complex_bitcast(
|
||||
env.builder,
|
||||
list3,
|
||||
env.context.i128_type().into(),
|
||||
"to_i128",
|
||||
);
|
||||
|
||||
let transform_ptr = builder.build_alloca(transform.get_type(), "transform_ptr");
|
||||
env.builder.build_store(transform_ptr, transform);
|
||||
|
||||
let argument_layouts = [
|
||||
element1_layout.clone(),
|
||||
element2_layout.clone(),
|
||||
element3_layout.clone(),
|
||||
];
|
||||
let stepper_caller =
|
||||
build_transform_caller(env, layout_ids, transform_layout, &argument_layouts)
|
||||
.as_global_value()
|
||||
.as_pointer_value();
|
||||
|
||||
let a_width = env
|
||||
.ptr_int()
|
||||
.const_int(element1_layout.stack_size(env.ptr_bytes) as u64, false);
|
||||
|
||||
let b_width = env
|
||||
.ptr_int()
|
||||
.const_int(element2_layout.stack_size(env.ptr_bytes) as u64, false);
|
||||
|
||||
let c_width = env
|
||||
.ptr_int()
|
||||
.const_int(element3_layout.stack_size(env.ptr_bytes) as u64, false);
|
||||
|
||||
let d_width = env
|
||||
.ptr_int()
|
||||
.const_int(return_layout.stack_size(env.ptr_bytes) as u64, false);
|
||||
|
||||
let alignment = return_layout.alignment_bytes(env.ptr_bytes);
|
||||
let alignment_iv = env.ptr_int().const_int(alignment as u64, false);
|
||||
|
||||
let dec_a = build_dec_wrapper(env, layout_ids, element1_layout);
|
||||
let dec_b = build_dec_wrapper(env, layout_ids, element2_layout);
|
||||
let dec_c = build_dec_wrapper(env, layout_ids, element3_layout);
|
||||
|
||||
let output = call_bitcode_fn(
|
||||
env,
|
||||
&[
|
||||
list1_i128,
|
||||
list2_i128,
|
||||
list3_i128,
|
||||
env.builder
|
||||
.build_bitcast(transform_ptr, u8_ptr, "to_opaque"),
|
||||
stepper_caller.into(),
|
||||
alignment_iv.into(),
|
||||
a_width.into(),
|
||||
b_width.into(),
|
||||
c_width.into(),
|
||||
d_width.into(),
|
||||
dec_a.as_global_value().as_pointer_value().into(),
|
||||
dec_b.as_global_value().as_pointer_value().into(),
|
||||
dec_c.as_global_value().as_pointer_value().into(),
|
||||
],
|
||||
bitcode::LIST_MAP3,
|
||||
);
|
||||
|
||||
complex_bitcast(
|
||||
env.builder,
|
||||
output,
|
||||
collection(env.context, env.ptr_bytes).into(),
|
||||
"from_i128",
|
||||
)
|
||||
}
|
||||
|
||||
/// List.concat : List elem, List elem -> List elem
|
||||
pub fn list_concat<'a, 'ctx, 'env>(
|
||||
env: &Env<'a, 'ctx, 'env>,
|
||||
|
|
|
@ -22,7 +22,7 @@ use roc_mono::ir::{
|
|||
CapturedSymbols, ExternalSpecializations, PartialProc, PendingSpecialization, Proc, Procs,
|
||||
};
|
||||
use roc_mono::layout::{Layout, LayoutCache, LayoutProblem};
|
||||
use roc_parse::ast::{self, Attempting, StrLiteral, TypeAnnotation};
|
||||
use roc_parse::ast::{self, StrLiteral, TypeAnnotation};
|
||||
use roc_parse::header::{
|
||||
ExposesEntry, ImportsEntry, PackageEntry, PackageOrPath, PlatformHeader, To, TypedIdent,
|
||||
};
|
||||
|
@ -2304,8 +2304,8 @@ fn load_pkg_config<'a>(
|
|||
Ok(bytes_vec) => {
|
||||
let parse_start = SystemTime::now();
|
||||
let bytes = arena.alloc(bytes_vec);
|
||||
let parse_state = parser::State::new_in(arena, bytes, Attempting::Module);
|
||||
let parsed = roc_parse::module::header().parse(&arena, parse_state);
|
||||
let parse_state = parser::State::new(bytes);
|
||||
let parsed = roc_parse::module::parse_header(&arena, parse_state);
|
||||
let parse_header_duration = parse_start.elapsed().unwrap();
|
||||
|
||||
// Insert the first entries for this module's timings
|
||||
|
@ -2319,19 +2319,19 @@ fn load_pkg_config<'a>(
|
|||
effect_module_timing.parse_header = parse_header_duration;
|
||||
|
||||
match parsed {
|
||||
Ok((_, ast::Module::Interface { header }, _parse_state)) => {
|
||||
Ok((ast::Module::Interface { header }, _parse_state)) => {
|
||||
Err(LoadingProblem::UnexpectedHeader(format!(
|
||||
"expected platform/package module, got Interface with header\n{:?}",
|
||||
header
|
||||
)))
|
||||
}
|
||||
Ok((_, ast::Module::App { header }, _parse_state)) => {
|
||||
Ok((ast::Module::App { header }, _parse_state)) => {
|
||||
Err(LoadingProblem::UnexpectedHeader(format!(
|
||||
"expected platform/package module, got App with header\n{:?}",
|
||||
header
|
||||
)))
|
||||
}
|
||||
Ok((_, ast::Module::Platform { header }, parser_state)) => {
|
||||
Ok((ast::Module::Platform { header }, parser_state)) => {
|
||||
// make a Pkg-Config module that ultimately exposes `main` to the host
|
||||
let pkg_config_module_msg = fabricate_pkg_config_module(
|
||||
arena,
|
||||
|
@ -2359,8 +2359,8 @@ fn load_pkg_config<'a>(
|
|||
|
||||
Ok(Msg::Many(vec![effects_module_msg, pkg_config_module_msg]))
|
||||
}
|
||||
Err((_, fail, _)) => Err(LoadingProblem::ParsingFailed(
|
||||
fail.into_parse_problem(filename, bytes),
|
||||
Err(fail) => Err(LoadingProblem::ParsingFailed(
|
||||
SyntaxError::Header(fail).into_parse_problem(filename, bytes),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
@ -2474,8 +2474,8 @@ fn parse_header<'a>(
|
|||
start_time: SystemTime,
|
||||
) -> Result<(ModuleId, Msg<'a>), LoadingProblem<'a>> {
|
||||
let parse_start = SystemTime::now();
|
||||
let parse_state = parser::State::new_in(arena, src_bytes, Attempting::Module);
|
||||
let parsed = roc_parse::module::header().parse(&arena, parse_state);
|
||||
let parse_state = parser::State::new(src_bytes);
|
||||
let parsed = roc_parse::module::parse_header(&arena, parse_state);
|
||||
let parse_header_duration = parse_start.elapsed().unwrap();
|
||||
|
||||
// Insert the first entries for this module's timings
|
||||
|
@ -2485,7 +2485,7 @@ fn parse_header<'a>(
|
|||
module_timing.parse_header = parse_header_duration;
|
||||
|
||||
match parsed {
|
||||
Ok((_, ast::Module::Interface { header }, parse_state)) => {
|
||||
Ok((ast::Module::Interface { header }, parse_state)) => {
|
||||
let header_src = unsafe {
|
||||
let chomped = src_bytes.len() - parse_state.bytes.len();
|
||||
std::str::from_utf8_unchecked(&src_bytes[..chomped])
|
||||
|
@ -2514,7 +2514,7 @@ fn parse_header<'a>(
|
|||
module_timing,
|
||||
))
|
||||
}
|
||||
Ok((_, ast::Module::App { header }, parse_state)) => {
|
||||
Ok((ast::Module::App { header }, parse_state)) => {
|
||||
let mut pkg_config_dir = filename.clone();
|
||||
pkg_config_dir.pop();
|
||||
|
||||
|
@ -2623,7 +2623,7 @@ fn parse_header<'a>(
|
|||
},
|
||||
}
|
||||
}
|
||||
Ok((_, ast::Module::Platform { header }, _parse_state)) => Ok(fabricate_effects_module(
|
||||
Ok((ast::Module::Platform { header }, _parse_state)) => Ok(fabricate_effects_module(
|
||||
arena,
|
||||
&"",
|
||||
module_ids,
|
||||
|
@ -2632,8 +2632,8 @@ fn parse_header<'a>(
|
|||
header,
|
||||
module_timing,
|
||||
)),
|
||||
Err((_, fail, _)) => Err(LoadingProblem::ParsingFailed(
|
||||
fail.into_parse_problem(filename, src_bytes),
|
||||
Err(fail) => Err(LoadingProblem::ParsingFailed(
|
||||
SyntaxError::Header(fail).into_parse_problem(filename, src_bytes),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ pub enum LowLevel {
|
|||
ListJoin,
|
||||
ListMap,
|
||||
ListMap2,
|
||||
ListMap3,
|
||||
ListMapWithIndex,
|
||||
ListKeepIf,
|
||||
ListWalk,
|
||||
|
|
|
@ -911,6 +911,7 @@ define_builtins! {
|
|||
22 LIST_KEEP_ERRS: "keepErrs"
|
||||
23 LIST_MAP_WITH_INDEX: "mapWithIndex"
|
||||
24 LIST_MAP2: "map2"
|
||||
25 LIST_MAP3: "map3"
|
||||
}
|
||||
5 RESULT: "Result" => {
|
||||
0 RESULT_RESULT: "Result" imported // the Result.Result type alias
|
||||
|
|
|
@ -652,6 +652,7 @@ pub fn lowlevel_borrow_signature(arena: &Bump, op: LowLevel) -> &[bool] {
|
|||
ListJoin => arena.alloc_slice_copy(&[irrelevant]),
|
||||
ListMap | ListMapWithIndex => arena.alloc_slice_copy(&[owned, irrelevant]),
|
||||
ListMap2 => arena.alloc_slice_copy(&[owned, owned, irrelevant]),
|
||||
ListMap3 => arena.alloc_slice_copy(&[owned, owned, owned, irrelevant]),
|
||||
ListKeepIf | ListKeepOks | ListKeepErrs => arena.alloc_slice_copy(&[owned, borrowed]),
|
||||
ListContains => arena.alloc_slice_copy(&[borrowed, irrelevant]),
|
||||
ListWalk => arena.alloc_slice_copy(&[owned, irrelevant, owned]),
|
||||
|
|
|
@ -589,33 +589,6 @@ impl<'a> Spaceable<'a> for Def<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// What we're currently attempting to parse, e.g.
|
||||
/// "currently attempting to parse a list." This helps error messages!
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Attempting {
|
||||
LineComment,
|
||||
List,
|
||||
Keyword,
|
||||
StrLiteral,
|
||||
RecordLiteral,
|
||||
RecordFieldLabel,
|
||||
InterpolatedString,
|
||||
NumberLiteral,
|
||||
UnicodeEscape,
|
||||
ClosureParams,
|
||||
ClosureBody,
|
||||
Def,
|
||||
Module,
|
||||
Record,
|
||||
Identifier,
|
||||
HexDigit,
|
||||
ConcreteType,
|
||||
TypeVariable,
|
||||
WhenCondition,
|
||||
WhenBranch,
|
||||
TODO,
|
||||
}
|
||||
|
||||
impl<'a> Expr<'a> {
|
||||
pub fn loc_ref(&'a self, region: Region) -> Loc<&'a Self> {
|
||||
Loc {
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,15 +1,14 @@
|
|||
use crate::ast::{AssignedField, CommentOrNewline, Def, Expr, Pattern, Spaceable, TypeAnnotation};
|
||||
use crate::blankspace::{
|
||||
line_comment, space0_after_e, space0_around_ee, space0_before_e, space0_e, space1_e,
|
||||
spaces_exactly_e,
|
||||
space0_after_e, space0_around_ee, space0_before_e, space0_e, space1_e, spaces_exactly_e,
|
||||
};
|
||||
use crate::ident::{ident, lowercase_ident, Ident};
|
||||
use crate::ident::{lowercase_ident, parse_ident_help, Ident};
|
||||
use crate::keyword;
|
||||
use crate::parser::{
|
||||
self, allocated, and_then_with_indent_level, ascii_char, backtrackable, map, newline_char,
|
||||
optional, sep_by1, sep_by1_e, specialize, specialize_ref, then, trailing_sep_by0, word1, word2,
|
||||
EExpr, EInParens, ELambda, EPattern, ERecord, EString, Either, If, List, Number, ParseResult,
|
||||
Parser, State, SyntaxError, Type, When,
|
||||
self, allocated, and_then_with_indent_level, backtrackable, map, optional, sep_by1, sep_by1_e,
|
||||
specialize, specialize_ref, then, trailing_sep_by0, word1, word2, EExpr, EInParens, ELambda,
|
||||
EPattern, ERecord, EString, Either, If, List, Number, ParseResult, Parser, State, SyntaxError,
|
||||
Type, When,
|
||||
};
|
||||
use crate::pattern::loc_closure_param;
|
||||
use crate::type_annotation;
|
||||
|
@ -20,6 +19,25 @@ use roc_region::all::{Located, Region};
|
|||
|
||||
use crate::parser::Progress::{self, *};
|
||||
|
||||
pub fn test_parse_expr<'a>(
|
||||
min_indent: u16,
|
||||
arena: &'a bumpalo::Bump,
|
||||
state: State<'a>,
|
||||
) -> Result<Located<Expr<'a>>, EExpr<'a>> {
|
||||
let parser = space0_before_e(
|
||||
loc!(|a, s| parse_expr_help(min_indent, a, s)),
|
||||
min_indent,
|
||||
EExpr::Space,
|
||||
EExpr::IndentStart,
|
||||
);
|
||||
|
||||
match parser.parse(arena, state) {
|
||||
Ok((_, expression, _)) => Ok(expression),
|
||||
Err((_, fail, _)) => Err(fail),
|
||||
}
|
||||
}
|
||||
|
||||
// public for testing purposes
|
||||
pub fn expr<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>, SyntaxError<'a>> {
|
||||
// Recursive parsers must not directly invoke functions which return (impl Parser),
|
||||
// as this causes rustc to stack overflow. Thus, parse_expr must be a
|
||||
|
@ -30,6 +48,10 @@ pub fn expr<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>, SyntaxError<'a>> {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn expr_help<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>, EExpr<'a>> {
|
||||
move |arena, state: State<'a>| parse_expr_help(min_indent, arena, state)
|
||||
}
|
||||
|
||||
fn loc_expr_in_parens_help<'a>(
|
||||
min_indent: u16,
|
||||
) -> impl Parser<'a, Located<Expr<'a>>, EInParens<'a>> {
|
||||
|
@ -155,9 +177,9 @@ fn record_field_access_chain<'a>() -> impl Parser<'a, Vec<'a, &'a str>, EExpr<'a
|
|||
}
|
||||
|
||||
fn record_field_access<'a>() -> impl Parser<'a, &'a str, EExpr<'a>> {
|
||||
specialize(
|
||||
|_, r, c| EExpr::Access(r, c),
|
||||
skip_first!(ascii_char(b'.'), lowercase_ident()),
|
||||
skip_first!(
|
||||
word1(b'.', EExpr::Access),
|
||||
specialize(|_, r, c| EExpr::Access(r, c), lowercase_ident())
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -487,7 +509,7 @@ fn parse_expr_help<'a>(
|
|||
]
|
||||
.parse(arena, state)?;
|
||||
|
||||
let initial = state.clone();
|
||||
let initial = state;
|
||||
|
||||
match space0_e(min_indent, EExpr::Space, EExpr::IndentEnd).parse(arena, state) {
|
||||
Err((_, _, state)) => Ok((MadeProgress, loc_expr1.value, state)),
|
||||
|
@ -717,47 +739,6 @@ fn assigned_expr_field_to_pattern_help<'a>(
|
|||
})
|
||||
}
|
||||
|
||||
/// A def beginning with a parenthetical pattern, for example:
|
||||
///
|
||||
/// (UserId userId) = ...
|
||||
///
|
||||
/// Note: Parenthetical patterns are a shorthand convenience, and may not have type annotations.
|
||||
/// It would be too weird to parse; imagine `(UserId userId) : ...` above `(UserId userId) = ...`
|
||||
/// !!!! THIS IS NOT USED !!!!
|
||||
// fn loc_parenthetical_def<'a>(min_indent: u16) -> impl Parser<'a, Located<Expr<'a>>> {
|
||||
// move |arena, state| {
|
||||
// let (loc_tuple, state) = loc!(and!(
|
||||
// space0_after(
|
||||
// between!(
|
||||
// ascii_char(b'('),
|
||||
// space0_around(loc_pattern(min_indent), min_indent),
|
||||
// ascii_char(b')')
|
||||
// ),
|
||||
// min_indent,
|
||||
// ),
|
||||
// equals_with_indent()
|
||||
// ))
|
||||
// .parse(arena, state)?;
|
||||
|
||||
// let region = loc_tuple.region;
|
||||
// let (loc_first_pattern, equals_sign_indent) = loc_tuple.value;
|
||||
|
||||
// // Continue parsing the expression as a Def.
|
||||
// let (spaces_after_equals, state) = space0(min_indent).parse(arena, state)?;
|
||||
// let (value, state) = parse_def_expr(
|
||||
// region.start_col,
|
||||
// min_indent,
|
||||
// equals_sign_indent,
|
||||
// arena,
|
||||
// state,
|
||||
// loc_first_pattern,
|
||||
// spaces_after_equals,
|
||||
// )?;
|
||||
|
||||
// Ok((Located { value, region }, state))
|
||||
// }
|
||||
// }
|
||||
|
||||
fn parse_defs_help<'a>(
|
||||
min_indent: u16,
|
||||
) -> impl Parser<'a, Vec<'a, &'a Located<Def<'a>>>, EExpr<'a>> {
|
||||
|
@ -794,7 +775,7 @@ pub fn def<'a>(min_indent: u16) -> impl Parser<'a, Def<'a>, SyntaxError<'a>> {
|
|||
specialize(|e, _, _| SyntaxError::Expr(e), def_help(min_indent))
|
||||
}
|
||||
|
||||
fn def_help<'a>(min_indent: u16) -> impl Parser<'a, Def<'a>, EExpr<'a>> {
|
||||
pub fn def_help<'a>(min_indent: u16) -> impl Parser<'a, Def<'a>, EExpr<'a>> {
|
||||
let indented_more = min_indent + 1;
|
||||
|
||||
enum DefKind {
|
||||
|
@ -834,7 +815,7 @@ fn def_help<'a>(min_indent: u16) -> impl Parser<'a, Def<'a>, EExpr<'a>> {
|
|||
// see if there is a definition (assuming the preceding characters were a type
|
||||
// annotation
|
||||
let (_, opt_rest, state) = optional(and!(
|
||||
spaces_then_comment_or_newline_help(),
|
||||
spaces_till_end_of_line(),
|
||||
body_at_indent_help(min_indent)
|
||||
))
|
||||
.parse(arena, state)?;
|
||||
|
@ -889,20 +870,10 @@ fn pattern_help<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>, EE
|
|||
)
|
||||
}
|
||||
|
||||
fn spaces_then_comment_or_newline_help<'a>() -> impl Parser<'a, Option<&'a str>, EExpr<'a>> {
|
||||
specialize_ref(
|
||||
EExpr::Syntax,
|
||||
skip_first!(
|
||||
zero_or_more!(ascii_char(b' ')),
|
||||
map!(
|
||||
either!(newline_char(), line_comment()),
|
||||
|either_comment_or_newline| match either_comment_or_newline {
|
||||
Either::First(_) => None,
|
||||
Either::Second(comment) => Some(comment),
|
||||
}
|
||||
)
|
||||
),
|
||||
)
|
||||
fn spaces_till_end_of_line<'a>() -> impl Parser<'a, Option<&'a str>, EExpr<'a>> {
|
||||
crate::blankspace::spaces_till_end_of_line(|r, c| {
|
||||
EExpr::Space(parser::BadInputError::HasTab, r, c)
|
||||
})
|
||||
}
|
||||
|
||||
type Body<'a> = (Located<Pattern<'a>>, Located<Expr<'a>>);
|
||||
|
@ -1193,7 +1164,7 @@ fn parse_def_signature_help<'a>(
|
|||
// Indented more beyond the original indent.
|
||||
let indented_more = original_indent + 1;
|
||||
|
||||
and!(
|
||||
let parser1 = {
|
||||
// Parse the first annotation. It doesn't need any spaces
|
||||
// around it parsed, because both the subsquent defs and the
|
||||
// final body will have space1_before on them.
|
||||
|
@ -1205,23 +1176,28 @@ fn parse_def_signature_help<'a>(
|
|||
specialize(EExpr::Type, type_annotation::located_help(indented_more)),
|
||||
min_indent,
|
||||
EExpr::Space,
|
||||
EExpr::IndentAnnotation
|
||||
EExpr::IndentAnnotation,
|
||||
),
|
||||
// The first annotation may be immediately (spaces_then_comment_or_newline())
|
||||
// followed by a body at the exact same indent_level
|
||||
// leading to an AnnotatedBody in this case
|
||||
|_progress, type_ann, indent_level| map(
|
||||
optional(and!(
|
||||
backtrackable(spaces_then_comment_or_newline_help()),
|
||||
body_at_indent_help(indent_level)
|
||||
)),
|
||||
move |opt_body| (type_ann.clone(), opt_body)
|
||||
)
|
||||
),
|
||||
|_progress, type_ann, indent_level| {
|
||||
map(
|
||||
optional(and!(
|
||||
backtrackable(spaces_till_end_of_line()),
|
||||
body_at_indent_help(indent_level)
|
||||
)),
|
||||
move |opt_body| (type_ann.clone(), opt_body),
|
||||
)
|
||||
},
|
||||
)
|
||||
};
|
||||
|
||||
let parser2 = {
|
||||
and!(
|
||||
// Optionally parse additional defs.
|
||||
zero_or_more!(backtrackable(allocated(space0_before_e(
|
||||
loc!(specialize_ref(EExpr::Syntax, def(original_indent))),
|
||||
loc!(def_help(original_indent)),
|
||||
original_indent,
|
||||
EExpr::Space,
|
||||
EExpr::IndentStart,
|
||||
|
@ -1229,15 +1205,22 @@ fn parse_def_signature_help<'a>(
|
|||
// Parse the final expression that will be returned.
|
||||
// It should be indented the same amount as the original.
|
||||
space0_before_e(
|
||||
loc!(|arena, state| parse_expr_help(original_indent, arena, state)),
|
||||
loc!(one_of![
|
||||
|arena, state| parse_expr_help(original_indent, arena, state),
|
||||
|_, state: State<'a>| Err((
|
||||
MadeProgress,
|
||||
EExpr::DefMissingFinalExpr(state.line, state.column),
|
||||
state
|
||||
)),
|
||||
]),
|
||||
original_indent,
|
||||
EExpr::Space,
|
||||
EExpr::IndentEnd,
|
||||
)
|
||||
)
|
||||
)
|
||||
.parse(arena, state)
|
||||
.map(
|
||||
};
|
||||
|
||||
and!(parser1, parser2).parse(arena, state).map(
|
||||
move |(progress, ((loc_first_annotation, opt_body), (mut defs, loc_ret)), state)| {
|
||||
let loc_first_def: Located<Def<'a>> = match opt_body {
|
||||
None => {
|
||||
|
@ -1713,7 +1696,7 @@ fn unary_negate_function_arg_help<'a>(
|
|||
fn loc_function_args_help<'a>(
|
||||
min_indent: u16,
|
||||
) -> impl Parser<'a, Vec<'a, Located<Expr<'a>>>, EExpr<'a>> {
|
||||
one_or_more_e!(
|
||||
one_or_more!(
|
||||
move |arena: &'a Bump, s| {
|
||||
map!(
|
||||
and!(
|
||||
|
@ -1966,11 +1949,11 @@ fn ident_then_args<'a>(
|
|||
}
|
||||
|
||||
fn ident_without_apply_help<'a>() -> impl Parser<'a, Expr<'a>, EExpr<'a>> {
|
||||
specialize_ref(
|
||||
EExpr::Syntax,
|
||||
then(loc!(ident()), move |arena, state, progress, loc_ident| {
|
||||
then(
|
||||
loc!(parse_ident_help),
|
||||
move |arena, state, progress, loc_ident| {
|
||||
Ok((progress, ident_to_expr(arena, loc_ident.value), state))
|
||||
}),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2079,7 +2062,7 @@ fn list_literal_help<'a>(min_indent: u16) -> impl Parser<'a, Expr<'a>, List<'a>>
|
|||
move |arena, state| {
|
||||
let (_, (parsed_elems, final_comments), state) = collection_trailing_sep_e!(
|
||||
word1(b'[', List::Open),
|
||||
specialize_ref(List::Syntax, loc!(expr(min_indent))),
|
||||
specialize_ref(List::Expr, loc!(expr_help(min_indent))),
|
||||
word1(b',', List::End),
|
||||
word1(b']', List::End),
|
||||
min_indent,
|
||||
|
@ -2127,7 +2110,7 @@ fn record_field_help<'a>(
|
|||
word1(b'?', ERecord::QuestionMark)
|
||||
),
|
||||
space0_before_e(
|
||||
specialize_ref(ERecord::Syntax, loc!(expr(min_indent))),
|
||||
specialize_ref(ERecord::Expr, loc!(expr_help(min_indent))),
|
||||
min_indent,
|
||||
ERecord::Space,
|
||||
ERecord::IndentEnd,
|
||||
|
@ -2162,7 +2145,7 @@ fn record_field_help<'a>(
|
|||
fn record_updateable_identifier<'a>() -> impl Parser<'a, Expr<'a>, ERecord<'a>> {
|
||||
specialize(
|
||||
|_, r, c| ERecord::Updateable(r, c),
|
||||
map_with_arena!(ident(), ident_to_expr),
|
||||
map_with_arena!(parse_ident_help, ident_to_expr),
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
use crate::blankspace::space0;
|
||||
use crate::ast::{CommentOrNewline, Spaceable, StrLiteral, TypeAnnotation};
|
||||
use crate::blankspace::space0_e;
|
||||
use crate::ident::lowercase_ident;
|
||||
use crate::module::package_name;
|
||||
use crate::parser::{ascii_char, optional, Either, Parser, Progress::*, State, SyntaxError};
|
||||
use crate::string_literal;
|
||||
use crate::{
|
||||
ast::{CommentOrNewline, Spaceable, StrLiteral, TypeAnnotation},
|
||||
parser::specialize,
|
||||
use crate::parser::Progress::{self, *};
|
||||
use crate::parser::{
|
||||
specialize, word1, EPackageEntry, EPackageName, EPackageOrPath, Parser, State,
|
||||
};
|
||||
use crate::string_literal;
|
||||
use bumpalo::collections::Vec;
|
||||
use inlinable_string::InlinableString;
|
||||
use roc_region::all::Loc;
|
||||
|
@ -242,18 +241,32 @@ impl<'a> Spaceable<'a> for PackageEntry<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn package_entry<'a>() -> impl Parser<'a, PackageEntry<'a>, SyntaxError<'a>> {
|
||||
pub fn package_entry<'a>() -> impl Parser<'a, PackageEntry<'a>, EPackageEntry<'a>> {
|
||||
move |arena, state| {
|
||||
// You may optionally have a package shorthand,
|
||||
// e.g. "uc" in `uc: roc/unicode 1.0.0`
|
||||
//
|
||||
// (Indirect dependencies don't have a shorthand.)
|
||||
let (_, opt_shorthand, state) = optional(and!(
|
||||
skip_second!(lowercase_ident(), ascii_char(b':')),
|
||||
space0(1)
|
||||
let min_indent = 1;
|
||||
|
||||
let (_, opt_shorthand, state) = maybe!(and!(
|
||||
skip_second!(
|
||||
specialize(|_, r, c| EPackageEntry::Shorthand(r, c), lowercase_ident()),
|
||||
word1(b':', EPackageEntry::Colon)
|
||||
),
|
||||
space0_e(
|
||||
min_indent,
|
||||
EPackageEntry::Space,
|
||||
EPackageEntry::IndentPackageOrPath
|
||||
)
|
||||
))
|
||||
.parse(arena, state)?;
|
||||
|
||||
let (_, package_or_path, state) = loc!(specialize(
|
||||
EPackageEntry::BadPackageOrPath,
|
||||
package_or_path()
|
||||
))
|
||||
.parse(arena, state)?;
|
||||
let (_, package_or_path, state) = loc!(package_or_path()).parse(arena, state)?;
|
||||
|
||||
let entry = match opt_shorthand {
|
||||
Some((shorthand, spaces_after_shorthand)) => PackageEntry::Entry {
|
||||
|
@ -272,27 +285,117 @@ pub fn package_entry<'a>() -> impl Parser<'a, PackageEntry<'a>, SyntaxError<'a>>
|
|||
}
|
||||
}
|
||||
|
||||
pub fn package_or_path<'a>() -> impl Parser<'a, PackageOrPath<'a>, SyntaxError<'a>> {
|
||||
map!(
|
||||
either!(
|
||||
specialize(
|
||||
|e, r, c| SyntaxError::Expr(crate::parser::EExpr::Str(e, r, c)),
|
||||
string_literal::parse()
|
||||
),
|
||||
and!(
|
||||
package_name(),
|
||||
skip_first!(one_or_more!(ascii_char(b' ')), package_version())
|
||||
)
|
||||
pub fn package_or_path<'a>() -> impl Parser<'a, PackageOrPath<'a>, EPackageOrPath<'a>> {
|
||||
one_of![
|
||||
map!(
|
||||
specialize(EPackageOrPath::BadPath, string_literal::parse()),
|
||||
PackageOrPath::Path
|
||||
),
|
||||
|answer| {
|
||||
match answer {
|
||||
Either::First(str_literal) => PackageOrPath::Path(str_literal),
|
||||
Either::Second((name, version)) => PackageOrPath::Package(name, version),
|
||||
}
|
||||
}
|
||||
)
|
||||
map!(
|
||||
and!(
|
||||
specialize(EPackageOrPath::BadPackage, package_name()),
|
||||
skip_first!(skip_spaces(), package_version())
|
||||
),
|
||||
|(name, version)| { PackageOrPath::Package(name, version) }
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
fn package_version<'a>() -> impl Parser<'a, Version<'a>, SyntaxError<'a>> {
|
||||
fn skip_spaces<'a, T>() -> impl Parser<'a, (), T>
|
||||
where
|
||||
T: 'a,
|
||||
{
|
||||
|_, mut state: State<'a>| {
|
||||
let mut chomped = 0;
|
||||
let mut it = state.bytes.iter();
|
||||
|
||||
while let Some(b' ') = it.next() {
|
||||
chomped += 1;
|
||||
}
|
||||
|
||||
if chomped == 0 {
|
||||
Ok((NoProgress, (), state))
|
||||
} else {
|
||||
state.column += chomped;
|
||||
state.bytes = it.as_slice();
|
||||
|
||||
Ok((MadeProgress, (), state))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn package_version<'a, T>() -> impl Parser<'a, Version<'a>, T>
|
||||
where
|
||||
T: 'a,
|
||||
{
|
||||
move |_, _| todo!("TODO parse package version")
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn package_name<'a>() -> impl Parser<'a, PackageName<'a>, EPackageName> {
|
||||
use encode_unicode::CharExt;
|
||||
// e.g. rtfeldman/blah
|
||||
//
|
||||
// Package names and accounts can be capitalized and can contain dashes.
|
||||
// They cannot contain underscores or other special characters.
|
||||
// They must be ASCII.
|
||||
|
||||
|_, mut state: State<'a>| match chomp_package_part(state.bytes) {
|
||||
Err(progress) => Err((
|
||||
progress,
|
||||
EPackageName::Account(state.line, state.column),
|
||||
state,
|
||||
)),
|
||||
Ok(account) => {
|
||||
let mut chomped = account.len();
|
||||
if let Ok(('/', width)) = char::from_utf8_slice_start(&state.bytes[chomped..]) {
|
||||
chomped += width;
|
||||
match chomp_package_part(&state.bytes[chomped..]) {
|
||||
Err(progress) => Err((
|
||||
progress,
|
||||
EPackageName::Pkg(state.line, state.column + chomped as u16),
|
||||
state,
|
||||
)),
|
||||
Ok(pkg) => {
|
||||
chomped += pkg.len();
|
||||
|
||||
state.column += chomped as u16;
|
||||
state.bytes = &state.bytes[chomped..];
|
||||
|
||||
let value = PackageName { account, pkg };
|
||||
Ok((MadeProgress, value, state))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Err((
|
||||
MadeProgress,
|
||||
EPackageName::MissingSlash(state.line, state.column + chomped as u16),
|
||||
state,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn chomp_package_part(buffer: &[u8]) -> Result<&str, Progress> {
|
||||
use encode_unicode::CharExt;
|
||||
|
||||
let mut chomped = 0;
|
||||
|
||||
while let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
|
||||
if ch == '-' || ch.is_ascii_alphanumeric() {
|
||||
chomped += width;
|
||||
} else {
|
||||
// we're done
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if chomped == 0 {
|
||||
Err(Progress::NoProgress)
|
||||
} else {
|
||||
let name = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
|
||||
|
||||
Ok(name)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,14 +1,7 @@
|
|||
use crate::ast::Attempting;
|
||||
use crate::keyword;
|
||||
use crate::parser::Progress::{self, *};
|
||||
use crate::parser::{
|
||||
peek_utf8_char, unexpected, BadInputError, Col, EExpr, ParseResult, Parser, Row, State,
|
||||
SyntaxError,
|
||||
};
|
||||
use bumpalo::collections::string::String;
|
||||
use crate::parser::{BadInputError, Col, EExpr, ParseResult, Parser, Row, State};
|
||||
use bumpalo::collections::vec::Vec;
|
||||
use bumpalo::Bump;
|
||||
use roc_region::all::Region;
|
||||
|
||||
/// The parser accepts all of these in any position where any one of them could
|
||||
/// appear. This way, canonicalization can give more helpful error messages like
|
||||
|
@ -61,82 +54,43 @@ impl<'a> Ident<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ident<'a>() -> impl Parser<'a, Ident<'a>, SyntaxError<'a>> {
|
||||
crate::parser::specialize(|e, _, _| SyntaxError::Expr(e), parse_ident_help)
|
||||
}
|
||||
|
||||
pub fn global_tag_or_ident<'a, F>(pred: F) -> impl Parser<'a, &'a str, SyntaxError<'a>>
|
||||
where
|
||||
F: Fn(char) -> bool,
|
||||
{
|
||||
move |arena, mut state: State<'a>| {
|
||||
// pred will determine if this is a tag or ident (based on capitalization)
|
||||
let (first_letter, bytes_parsed) = match peek_utf8_char(&state) {
|
||||
Ok((first_letter, bytes_parsed)) => {
|
||||
if !pred(first_letter) {
|
||||
return Err(unexpected(0, Attempting::RecordFieldLabel, state));
|
||||
}
|
||||
|
||||
(first_letter, bytes_parsed)
|
||||
}
|
||||
Err(reason) => return state.fail(arena, NoProgress, reason),
|
||||
};
|
||||
|
||||
let mut buf = String::with_capacity_in(1, arena);
|
||||
|
||||
buf.push(first_letter);
|
||||
|
||||
state = state.advance_without_indenting(bytes_parsed)?;
|
||||
|
||||
while !state.bytes.is_empty() {
|
||||
match peek_utf8_char(&state) {
|
||||
Ok((ch, bytes_parsed)) => {
|
||||
// After the first character, only these are allowed:
|
||||
//
|
||||
// * Unicode alphabetic chars - you might include `鹏` if that's clear to your readers
|
||||
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
|
||||
// * A ':' indicating the end of the field
|
||||
if ch.is_alphabetic() || ch.is_ascii_digit() {
|
||||
buf.push(ch);
|
||||
|
||||
state = state.advance_without_indenting(bytes_parsed)?;
|
||||
} else {
|
||||
// This is the end of the field. We're done!
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(reason) => return state.fail(arena, MadeProgress, reason),
|
||||
};
|
||||
}
|
||||
|
||||
Ok((MadeProgress, buf.into_bump_str(), state))
|
||||
}
|
||||
}
|
||||
|
||||
/// This could be:
|
||||
///
|
||||
/// * A record field, e.g. "email" in `.email` or in `email:`
|
||||
/// * A named pattern match, e.g. "foo" in `foo =` or `foo ->` or `\foo ->`
|
||||
pub fn lowercase_ident<'a>() -> impl Parser<'a, &'a str, SyntaxError<'a>> {
|
||||
pub fn lowercase_ident<'a>() -> impl Parser<'a, &'a str, ()> {
|
||||
move |_, state: State<'a>| match chomp_lowercase_part(state.bytes) {
|
||||
Err(progress) => Err((progress, (), state)),
|
||||
Ok(ident) => {
|
||||
if crate::keyword::KEYWORDS.iter().any(|kw| &ident == kw) {
|
||||
Err((NoProgress, (), state))
|
||||
} else {
|
||||
let width = ident.len();
|
||||
match state.advance_without_indenting_ee(width, |_, _| ()) {
|
||||
Ok(state) => Ok((MadeProgress, ident, state)),
|
||||
Err(bad) => Err(bad),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tag_name<'a>() -> impl Parser<'a, &'a str, ()> {
|
||||
move |arena, state: State<'a>| {
|
||||
let (progress, ident, state) =
|
||||
global_tag_or_ident(|first_char| first_char.is_lowercase()).parse(arena, state)?;
|
||||
|
||||
// to parse a valid ident, progress must be made
|
||||
debug_assert_eq!(progress, MadeProgress);
|
||||
|
||||
if (ident == keyword::IF)
|
||||
|| (ident == keyword::THEN)
|
||||
|| (ident == keyword::ELSE)
|
||||
|| (ident == keyword::WHEN)
|
||||
|| (ident == keyword::IS)
|
||||
|| (ident == keyword::AS)
|
||||
{
|
||||
// TODO Calculate the correct region based on state
|
||||
let region = Region::zero();
|
||||
Err((MadeProgress, SyntaxError::ReservedKeyword(region), state))
|
||||
if state.bytes.starts_with(b"@") {
|
||||
match chomp_private_tag(state.bytes, state.line, state.column) {
|
||||
Err(BadIdent::Start(_, _)) => Err((NoProgress, (), state)),
|
||||
Err(_) => Err((MadeProgress, (), state)),
|
||||
Ok(ident) => {
|
||||
let width = ident.len();
|
||||
match state.advance_without_indenting_ee(width, |_, _| ()) {
|
||||
Ok(state) => Ok((MadeProgress, ident, state)),
|
||||
Err(bad) => Err(bad),
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Ok((MadeProgress, ident, state))
|
||||
uppercase_ident().parse(arena, state)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -146,30 +100,34 @@ pub fn lowercase_ident<'a>() -> impl Parser<'a, &'a str, SyntaxError<'a>> {
|
|||
/// * A module name
|
||||
/// * A type name
|
||||
/// * A global tag
|
||||
pub fn uppercase_ident<'a>() -> impl Parser<'a, &'a str, SyntaxError<'a>> {
|
||||
global_tag_or_ident(|first_char| first_char.is_uppercase())
|
||||
}
|
||||
|
||||
pub fn unqualified_ident<'a>() -> impl Parser<'a, &'a str, SyntaxError<'a>> {
|
||||
global_tag_or_ident(|first_char| first_char.is_alphabetic())
|
||||
}
|
||||
|
||||
pub fn join_module_parts<'a>(arena: &'a Bump, module_parts: &[&str]) -> &'a str {
|
||||
let capacity = module_parts.len() * 3; // Module parts tend to be 3+ characters.
|
||||
let mut buf = String::with_capacity_in(capacity, arena);
|
||||
let mut any_parts_added = false;
|
||||
|
||||
for part in module_parts {
|
||||
if any_parts_added {
|
||||
buf.push('.');
|
||||
} else {
|
||||
any_parts_added = true;
|
||||
pub fn uppercase_ident<'a>() -> impl Parser<'a, &'a str, ()> {
|
||||
move |_, state: State<'a>| match chomp_uppercase_part(state.bytes) {
|
||||
Err(progress) => Err((progress, (), state)),
|
||||
Ok(ident) => {
|
||||
let width = ident.len();
|
||||
match state.advance_without_indenting_ee(width, |_, _| ()) {
|
||||
Ok(state) => Ok((MadeProgress, ident, state)),
|
||||
Err(bad) => Err(bad),
|
||||
}
|
||||
}
|
||||
|
||||
buf.push_str(part);
|
||||
}
|
||||
}
|
||||
|
||||
buf.into_bump_str()
|
||||
pub fn unqualified_ident<'a>() -> impl Parser<'a, &'a str, ()> {
|
||||
move |_, state: State<'a>| match chomp_part(|c| c.is_alphabetic(), state.bytes) {
|
||||
Err(progress) => Err((progress, (), state)),
|
||||
Ok(ident) => {
|
||||
if crate::keyword::KEYWORDS.iter().any(|kw| &ident == kw) {
|
||||
Err((MadeProgress, (), state))
|
||||
} else {
|
||||
let width = ident.len();
|
||||
match state.advance_without_indenting_ee(width, |_, _| ()) {
|
||||
Ok(state) => Ok((MadeProgress, ident, state)),
|
||||
Err(bad) => Err(bad),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! advance_state {
|
||||
|
@ -184,10 +142,10 @@ pub fn parse_ident_help<'a>(
|
|||
arena: &'a Bump,
|
||||
state: State<'a>,
|
||||
) -> ParseResult<'a, Ident<'a>, EExpr<'a>> {
|
||||
let initial = state.clone();
|
||||
let initial = state;
|
||||
|
||||
match parse_ident_help_help(arena, state) {
|
||||
Ok((progress, (ident, _), state)) => {
|
||||
Ok((progress, ident, state)) => {
|
||||
if let Ident::Access { module_name, parts } = ident {
|
||||
if module_name.is_empty() {
|
||||
if let Some(first) = parts.first() {
|
||||
|
@ -212,7 +170,7 @@ pub fn parse_ident_help<'a>(
|
|||
Err((MadeProgress, fail, state)) => match fail {
|
||||
BadIdent::Start(r, c) => Err((NoProgress, EExpr::Start(r, c), state)),
|
||||
BadIdent::Space(e, r, c) => Err((NoProgress, EExpr::Space(e, r, c), state)),
|
||||
_ => malformed_identifier(initial.bytes, fail, arena, state),
|
||||
_ => malformed_identifier(initial.bytes, fail, state),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -220,294 +178,367 @@ pub fn parse_ident_help<'a>(
|
|||
fn malformed_identifier<'a>(
|
||||
initial_bytes: &'a [u8],
|
||||
problem: BadIdent,
|
||||
_arena: &'a Bump,
|
||||
mut state: State<'a>,
|
||||
) -> ParseResult<'a, Ident<'a>, EExpr<'a>> {
|
||||
// skip forward to the next non-identifier character
|
||||
while !state.bytes.is_empty() {
|
||||
match peek_utf8_char(&state) {
|
||||
Ok((ch, bytes_parsed)) => {
|
||||
// We can't use ch.is_alphanumeric() here because that passes for
|
||||
// things that are "numeric" but not ASCII digits, like `¾`
|
||||
if ch == '.' || ch == '_' || ch.is_alphabetic() || ch.is_ascii_digit() {
|
||||
state = state.advance_without_indenting_ee(bytes_parsed, |r, c| {
|
||||
EExpr::Space(crate::parser::BadInputError::LineTooLong, r, c)
|
||||
})?;
|
||||
continue;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_reason) => {
|
||||
break;
|
||||
}
|
||||
let chomped = chomp_malformed(state.bytes);
|
||||
let delta = initial_bytes.len() - state.bytes.len();
|
||||
let parsed_str = unsafe { std::str::from_utf8_unchecked(&initial_bytes[..chomped + delta]) };
|
||||
|
||||
state = state.advance_without_indenting_ee(chomped, |r, c| {
|
||||
EExpr::Space(crate::parser::BadInputError::LineTooLong, r, c)
|
||||
})?;
|
||||
|
||||
Ok((MadeProgress, Ident::Malformed(parsed_str, problem), state))
|
||||
}
|
||||
|
||||
/// skip forward to the next non-identifier character
|
||||
pub fn chomp_malformed(bytes: &[u8]) -> usize {
|
||||
use encode_unicode::CharExt;
|
||||
let mut chomped = 0;
|
||||
while let Ok((ch, width)) = char::from_utf8_slice_start(&bytes[chomped..]) {
|
||||
// We can't use ch.is_alphanumeric() here because that passes for
|
||||
// things that are "numeric" but not ASCII digits, like `¾`
|
||||
if ch == '.' || ch == '_' || ch.is_alphabetic() || ch.is_ascii_digit() {
|
||||
chomped += width;
|
||||
continue;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let parsed = &initial_bytes[..(initial_bytes.len() - state.bytes.len())];
|
||||
|
||||
let parsed_str = unsafe { std::str::from_utf8_unchecked(parsed) };
|
||||
|
||||
Ok((MadeProgress, Ident::Malformed(parsed_str, problem), state))
|
||||
chomped
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum BadIdent {
|
||||
Start(Row, Col),
|
||||
Space(BadInputError, Row, Col),
|
||||
|
||||
Underscore(Row, Col),
|
||||
QualifiedTag(Row, Col),
|
||||
PrivateTagNotUppercase(Row, Col),
|
||||
PartStartsWithNumber(Row, Col),
|
||||
WeirdAccessor(Row, Col),
|
||||
PrivateTagFieldAccess(Row, Col),
|
||||
|
||||
WeirdDotAccess(Row, Col),
|
||||
WeirdDotQualified(Row, Col),
|
||||
DoubleDot(Row, Col),
|
||||
StrayDot(Row, Col),
|
||||
BadPrivateTag(Row, Col),
|
||||
}
|
||||
|
||||
/// Parse an identifier into a string.
|
||||
///
|
||||
/// This is separate from the `ident` Parser because string interpolation
|
||||
/// wants to use it this way.
|
||||
pub fn parse_ident_help_help<'a>(
|
||||
fn chomp_lowercase_part(buffer: &[u8]) -> Result<&str, Progress> {
|
||||
chomp_part(|c: char| c.is_lowercase(), buffer)
|
||||
}
|
||||
|
||||
fn chomp_uppercase_part(buffer: &[u8]) -> Result<&str, Progress> {
|
||||
chomp_part(|c: char| c.is_uppercase(), buffer)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn chomp_part<F>(leading_is_good: F, buffer: &[u8]) -> Result<&str, Progress>
|
||||
where
|
||||
F: Fn(char) -> bool,
|
||||
{
|
||||
use encode_unicode::CharExt;
|
||||
|
||||
let mut chomped = 0;
|
||||
|
||||
if let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
|
||||
if leading_is_good(ch) {
|
||||
chomped += width;
|
||||
} else {
|
||||
return Err(NoProgress);
|
||||
}
|
||||
}
|
||||
|
||||
while let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
|
||||
if ch.is_alphabetic() || ch.is_ascii_digit() {
|
||||
chomped += width;
|
||||
} else {
|
||||
// we're done
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if chomped == 0 {
|
||||
Err(NoProgress)
|
||||
} else {
|
||||
let name = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
|
||||
|
||||
Ok(name)
|
||||
}
|
||||
}
|
||||
|
||||
/// a `.foo` accessor function
|
||||
fn chomp_accessor(buffer: &[u8], row: Row, col: Col) -> Result<&str, BadIdent> {
|
||||
// assumes the leading `.` has been chomped already
|
||||
use encode_unicode::CharExt;
|
||||
|
||||
match chomp_lowercase_part(buffer) {
|
||||
Ok(name) => {
|
||||
let chomped = name.len();
|
||||
|
||||
if let Ok(('.', _)) = char::from_utf8_slice_start(&buffer[chomped..]) {
|
||||
Err(BadIdent::WeirdAccessor(row, col))
|
||||
} else {
|
||||
Ok(name)
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
// we've already made progress with the initial `.`
|
||||
Err(BadIdent::StrayDot(row, col + 1))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// a `@Token` private tag
|
||||
fn chomp_private_tag(buffer: &[u8], row: Row, col: Col) -> Result<&str, BadIdent> {
|
||||
// assumes the leading `@` has NOT been chomped already
|
||||
debug_assert_eq!(buffer.get(0), Some(&b'@'));
|
||||
use encode_unicode::CharExt;
|
||||
|
||||
match chomp_uppercase_part(&buffer[1..]) {
|
||||
Ok(name) => {
|
||||
let width = 1 + name.len();
|
||||
|
||||
if let Ok(('.', _)) = char::from_utf8_slice_start(&buffer[width..]) {
|
||||
Err(BadIdent::BadPrivateTag(row, col + width as u16))
|
||||
} else {
|
||||
let value = unsafe { std::str::from_utf8_unchecked(&buffer[..width]) };
|
||||
Ok(value)
|
||||
}
|
||||
}
|
||||
Err(_) => Err(BadIdent::BadPrivateTag(row, col + 1)),
|
||||
}
|
||||
}
|
||||
|
||||
fn chomp_identifier_chain<'a>(
|
||||
arena: &'a Bump,
|
||||
mut state: State<'a>,
|
||||
) -> ParseResult<'a, (Ident<'a>, Option<char>), BadIdent> {
|
||||
let mut part_buf = String::new_in(arena); // The current "part" (parts are dot-separated.)
|
||||
let mut capitalized_parts: Vec<&'a str> = Vec::new_in(arena);
|
||||
let mut noncapitalized_parts: Vec<&'a str> = Vec::new_in(arena);
|
||||
let mut is_capitalized;
|
||||
let is_accessor_fn;
|
||||
let mut is_private_tag = false;
|
||||
buffer: &'a [u8],
|
||||
row: Row,
|
||||
col: Col,
|
||||
) -> Result<(u16, Ident<'a>), (u16, BadIdent)> {
|
||||
use encode_unicode::CharExt;
|
||||
|
||||
// Identifiers and accessor functions must start with either a letter or a dot.
|
||||
// If this starts with neither, it must be something else!
|
||||
match peek_utf8_char(&state) {
|
||||
Ok((first_ch, bytes_parsed)) => {
|
||||
if first_ch.is_alphabetic() {
|
||||
part_buf.push(first_ch);
|
||||
let first_is_uppercase;
|
||||
let mut chomped = 0;
|
||||
|
||||
is_capitalized = first_ch.is_uppercase();
|
||||
is_accessor_fn = false;
|
||||
match char::from_utf8_slice_start(&buffer[chomped..]) {
|
||||
Ok((ch, width)) => match ch {
|
||||
'.' => match chomp_accessor(&buffer[1..], row, col) {
|
||||
Ok(accessor) => {
|
||||
let bytes_parsed = 1 + accessor.len();
|
||||
|
||||
state = advance_state!(state, bytes_parsed)?;
|
||||
} else if first_ch == '.' {
|
||||
is_capitalized = false;
|
||||
is_accessor_fn = true;
|
||||
|
||||
state = advance_state!(state, bytes_parsed)?;
|
||||
} else if first_ch == '@' {
|
||||
state = advance_state!(state, bytes_parsed)?;
|
||||
|
||||
// '@' must always be followed by a capital letter!
|
||||
match peek_utf8_char(&state) {
|
||||
Ok((next_ch, next_bytes_parsed)) => {
|
||||
if next_ch.is_uppercase() {
|
||||
state = advance_state!(state, next_bytes_parsed)?;
|
||||
|
||||
part_buf.push('@');
|
||||
part_buf.push(next_ch);
|
||||
|
||||
is_private_tag = true;
|
||||
is_capitalized = true;
|
||||
is_accessor_fn = false;
|
||||
} else {
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::PrivateTagNotUppercase(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
}
|
||||
Err(_reason) => {
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::PrivateTagNotUppercase(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
return Ok((bytes_parsed as u16, Ident::AccessorFunction(accessor)));
|
||||
}
|
||||
} else {
|
||||
return Err((NoProgress, BadIdent::Start(state.line, state.column), state));
|
||||
Err(fail) => return Err((1, fail)),
|
||||
},
|
||||
'@' => match chomp_private_tag(buffer, row, col) {
|
||||
Ok(tagname) => {
|
||||
let bytes_parsed = tagname.len();
|
||||
|
||||
return Ok((bytes_parsed as u16, Ident::PrivateTag(tagname)));
|
||||
}
|
||||
Err(fail) => return Err((1, fail)),
|
||||
},
|
||||
c if c.is_alphabetic() => {
|
||||
// fall through
|
||||
chomped += width;
|
||||
first_is_uppercase = c.is_uppercase();
|
||||
}
|
||||
}
|
||||
Err(_reason) => {
|
||||
return Err((NoProgress, BadIdent::Start(state.line, state.column), state));
|
||||
_ => {
|
||||
return Err((0, BadIdent::Start(row, col)));
|
||||
}
|
||||
},
|
||||
Err(_) => return Err((0, BadIdent::Start(row, col))),
|
||||
}
|
||||
|
||||
while let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
|
||||
if ch.is_alphabetic() || ch.is_ascii_digit() {
|
||||
chomped += width;
|
||||
} else {
|
||||
// we're done
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
while !state.bytes.is_empty() {
|
||||
match peek_utf8_char(&state) {
|
||||
Ok((ch, bytes_parsed)) => {
|
||||
// After the first character, only these are allowed:
|
||||
//
|
||||
// * Unicode alphabetic chars - you might name a variable `鹏` if that's clear to your readers
|
||||
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
|
||||
// * A dot ('.')
|
||||
if ch.is_alphabetic() {
|
||||
if part_buf.is_empty() {
|
||||
// Capitalization is determined by the first character in the part.
|
||||
is_capitalized = ch.is_uppercase();
|
||||
}
|
||||
|
||||
part_buf.push(ch);
|
||||
} else if ch.is_ascii_digit() {
|
||||
// Parts may not start with numbers!
|
||||
if part_buf.is_empty() {
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::PartStartsWithNumber(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
|
||||
part_buf.push(ch);
|
||||
} else if ch == '.' {
|
||||
// There are two posssible errors here:
|
||||
//
|
||||
// 1. Having two consecutive dots is an error.
|
||||
// 2. Having capitalized parts after noncapitalized (e.g. `foo.Bar`) is an error.
|
||||
if part_buf.is_empty() {
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::DoubleDot(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
|
||||
if is_capitalized && !noncapitalized_parts.is_empty() {
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::WeirdDotQualified(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
|
||||
if is_capitalized {
|
||||
capitalized_parts.push(part_buf.into_bump_str());
|
||||
} else {
|
||||
noncapitalized_parts.push(part_buf.into_bump_str());
|
||||
}
|
||||
|
||||
// Now that we've recorded the contents of the current buffer, reset it.
|
||||
part_buf = String::new_in(arena);
|
||||
} else if ch == '_' {
|
||||
// we don't allow underscores in the middle of an identifier
|
||||
// but still parse them (and generate a malformed identifier)
|
||||
// to give good error messages for this case
|
||||
state = advance_state!(state, bytes_parsed)?;
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::Underscore(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
} else {
|
||||
// This must be the end of the identifier. We're done!
|
||||
|
||||
break;
|
||||
if let Ok(('.', _)) = char::from_utf8_slice_start(&buffer[chomped..]) {
|
||||
let module_name = if first_is_uppercase {
|
||||
match chomp_module_chain(&buffer[chomped..]) {
|
||||
Ok(width) => {
|
||||
chomped += width as usize;
|
||||
unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) }
|
||||
}
|
||||
|
||||
state = advance_state!(state, bytes_parsed)?;
|
||||
}
|
||||
Err(_reason) => {
|
||||
//
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::Start(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if part_buf.is_empty() {
|
||||
// We probably had a trailing dot, e.g. `Foo.bar.` - this is malformed!
|
||||
//
|
||||
// This condition might also occur if we encounter a malformed accessor like `.|`
|
||||
//
|
||||
// If we made it this far and don't have a next_char, then necessarily
|
||||
// we have consumed a '.' char previously.
|
||||
let fail = if noncapitalized_parts.is_empty() {
|
||||
if capitalized_parts.is_empty() {
|
||||
BadIdent::StrayDot(state.line, state.column)
|
||||
} else {
|
||||
BadIdent::WeirdDotQualified(state.line, state.column)
|
||||
Err(MadeProgress) => todo!(),
|
||||
Err(NoProgress) => unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) },
|
||||
}
|
||||
} else {
|
||||
BadIdent::WeirdDotAccess(state.line, state.column)
|
||||
""
|
||||
};
|
||||
|
||||
return Err((MadeProgress, fail, state));
|
||||
}
|
||||
let mut parts = Vec::with_capacity_in(4, arena);
|
||||
|
||||
// Record the final parts.
|
||||
if is_capitalized {
|
||||
capitalized_parts.push(part_buf.into_bump_str());
|
||||
} else {
|
||||
noncapitalized_parts.push(part_buf.into_bump_str());
|
||||
}
|
||||
|
||||
let answer = if is_accessor_fn {
|
||||
// Handle accessor functions first because they have the strictest requirements.
|
||||
// Accessor functions may have exactly 1 noncapitalized part, and no capitalzed parts.
|
||||
if capitalized_parts.is_empty() && noncapitalized_parts.len() == 1 && !is_private_tag {
|
||||
let value = noncapitalized_parts.iter().next().unwrap();
|
||||
|
||||
Ident::AccessorFunction(value)
|
||||
} else {
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::WeirdAccessor(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
if !first_is_uppercase {
|
||||
let first_part = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
|
||||
parts.push(first_part);
|
||||
}
|
||||
} else if noncapitalized_parts.is_empty() {
|
||||
// We have capitalized parts only, so this must be a tag.
|
||||
match capitalized_parts.first() {
|
||||
Some(value) => {
|
||||
if capitalized_parts.len() == 1 {
|
||||
if is_private_tag {
|
||||
Ident::PrivateTag(value)
|
||||
} else {
|
||||
Ident::GlobalTag(value)
|
||||
|
||||
match chomp_access_chain(&buffer[chomped..], &mut parts) {
|
||||
Ok(width) => {
|
||||
chomped += width as usize;
|
||||
|
||||
let ident = Ident::Access {
|
||||
module_name,
|
||||
parts: parts.into_bump_slice(),
|
||||
};
|
||||
|
||||
Ok((chomped as u16, ident))
|
||||
}
|
||||
Err(0) if !module_name.is_empty() => Err((
|
||||
chomped as u16,
|
||||
BadIdent::QualifiedTag(row, chomped as u16 + col),
|
||||
)),
|
||||
Err(1) if parts.is_empty() => Err((
|
||||
chomped as u16 + 1,
|
||||
BadIdent::WeirdDotQualified(row, chomped as u16 + col + 1),
|
||||
)),
|
||||
Err(width) => Err((
|
||||
chomped as u16 + width,
|
||||
BadIdent::WeirdDotAccess(row, chomped as u16 + col + width),
|
||||
)),
|
||||
}
|
||||
} else if let Ok(('_', _)) = char::from_utf8_slice_start(&buffer[chomped..]) {
|
||||
// we don't allow underscores in the middle of an identifier
|
||||
// but still parse them (and generate a malformed identifier)
|
||||
// to give good error messages for this case
|
||||
Err((
|
||||
chomped as u16 + 1,
|
||||
BadIdent::Underscore(row, col + chomped as u16 + 1),
|
||||
))
|
||||
} else if first_is_uppercase {
|
||||
// just one segment, starting with an uppercase letter; that's a global tag
|
||||
let value = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
|
||||
Ok((chomped as u16, Ident::GlobalTag(value)))
|
||||
} else {
|
||||
// just one segment, starting with a lowercase letter; that's a normal identifier
|
||||
let value = unsafe { std::str::from_utf8_unchecked(&buffer[..chomped]) };
|
||||
let ident = Ident::Access {
|
||||
module_name: "",
|
||||
parts: arena.alloc([value]),
|
||||
};
|
||||
Ok((chomped as u16, ident))
|
||||
}
|
||||
}
|
||||
|
||||
fn chomp_module_chain(buffer: &[u8]) -> Result<u16, Progress> {
|
||||
let mut chomped = 0;
|
||||
|
||||
while let Some(b'.') = buffer.get(chomped) {
|
||||
match &buffer.get(chomped + 1..) {
|
||||
Some(slice) => match chomp_uppercase_part(slice) {
|
||||
Ok(name) => {
|
||||
chomped += name.len() + 1;
|
||||
}
|
||||
Err(MadeProgress) => return Err(MadeProgress),
|
||||
Err(NoProgress) => break,
|
||||
},
|
||||
None => return Err(MadeProgress),
|
||||
}
|
||||
}
|
||||
|
||||
if chomped == 0 {
|
||||
Err(NoProgress)
|
||||
} else {
|
||||
Ok(chomped as u16)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn concrete_type<'a>() -> impl Parser<'a, (&'a str, &'a str), ()> {
|
||||
move |_, state: State<'a>| match chomp_concrete_type(state.bytes) {
|
||||
Err(progress) => Err((progress, (), state)),
|
||||
Ok((module_name, type_name, width)) => {
|
||||
match state.advance_without_indenting_ee(width, |_, _| ()) {
|
||||
Ok(state) => Ok((MadeProgress, (module_name, type_name), state)),
|
||||
Err(bad) => Err(bad),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parse a type name like `Result` or `Result.Result`
|
||||
fn chomp_concrete_type(buffer: &[u8]) -> Result<(&str, &str, usize), Progress> {
|
||||
let first = crate::ident::chomp_uppercase_part(buffer)?;
|
||||
|
||||
if let Some(b'.') = buffer.get(first.len()) {
|
||||
match crate::ident::chomp_module_chain(&buffer[first.len()..]) {
|
||||
Err(_) => Err(MadeProgress),
|
||||
Ok(rest) => {
|
||||
let width = first.len() + rest as usize;
|
||||
|
||||
// we must explicitly check here for a trailing `.`
|
||||
if let Some(b'.') = buffer.get(width) {
|
||||
return Err(MadeProgress);
|
||||
}
|
||||
|
||||
let slice = &buffer[..width];
|
||||
|
||||
match slice.iter().rev().position(|c| *c == b'.') {
|
||||
None => Ok(("", first, first.len())),
|
||||
Some(rev_index) => {
|
||||
let index = slice.len() - rev_index;
|
||||
let module_name =
|
||||
unsafe { std::str::from_utf8_unchecked(&slice[..index - 1]) };
|
||||
let type_name = unsafe { std::str::from_utf8_unchecked(&slice[index..]) };
|
||||
|
||||
Ok((module_name, type_name, width))
|
||||
}
|
||||
} else {
|
||||
// This is a qualified tag, which is not allowed!
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::QualifiedTag(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// We had neither capitalized nor noncapitalized parts,
|
||||
// yet we made it this far. The only explanation is that this was
|
||||
// a stray '.' drifting through the cosmos.
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::StrayDot(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
}
|
||||
} else if is_private_tag {
|
||||
// This is qualified field access with an '@' in front, which does not make sense!
|
||||
return Err((
|
||||
MadeProgress,
|
||||
BadIdent::PrivateTagFieldAccess(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
} else {
|
||||
// We have multiple noncapitalized parts, so this must be field access.
|
||||
Ident::Access {
|
||||
module_name: join_module_parts(arena, capitalized_parts.into_bump_slice()),
|
||||
parts: noncapitalized_parts.into_bump_slice(),
|
||||
}
|
||||
};
|
||||
|
||||
Ok((Progress::MadeProgress, (answer, None), state))
|
||||
Ok(("", first, first.len()))
|
||||
}
|
||||
}
|
||||
|
||||
fn chomp_access_chain<'a>(buffer: &'a [u8], parts: &mut Vec<'a, &'a str>) -> Result<u16, u16> {
|
||||
let mut chomped = 0;
|
||||
|
||||
while let Some(b'.') = buffer.get(chomped) {
|
||||
match &buffer.get(chomped + 1..) {
|
||||
Some(slice) => match chomp_lowercase_part(slice) {
|
||||
Ok(name) => {
|
||||
let value = unsafe {
|
||||
std::str::from_utf8_unchecked(
|
||||
&buffer[chomped + 1..chomped + 1 + name.len()],
|
||||
)
|
||||
};
|
||||
parts.push(value);
|
||||
|
||||
chomped += name.len() + 1;
|
||||
}
|
||||
Err(_) => return Err(chomped as u16 + 1),
|
||||
},
|
||||
None => return Err(chomped as u16 + 1),
|
||||
}
|
||||
}
|
||||
|
||||
if chomped == 0 {
|
||||
Err(0)
|
||||
} else {
|
||||
Ok(chomped as u16)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_ident_help_help<'a>(
|
||||
arena: &'a Bump,
|
||||
mut state: State<'a>,
|
||||
) -> ParseResult<'a, Ident<'a>, BadIdent> {
|
||||
match chomp_identifier_chain(arena, state.bytes, state.line, state.column) {
|
||||
Ok((width, ident)) => {
|
||||
state = advance_state!(state, width as usize)?;
|
||||
Ok((MadeProgress, ident, state))
|
||||
}
|
||||
Err((0, fail)) => Err((NoProgress, fail, state)),
|
||||
Err((width, fail)) => {
|
||||
state = advance_state!(state, width as usize)?;
|
||||
Err((MadeProgress, fail, state))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,7 +1,5 @@
|
|||
use crate::ast::Base;
|
||||
use crate::parser::{parse_utf8, Number, ParseResult, Parser, Progress, State, SyntaxError};
|
||||
use std::char;
|
||||
use std::str::from_utf8_unchecked;
|
||||
use crate::parser::{Number, ParseResult, Parser, Progress, State};
|
||||
|
||||
pub enum NumLiteral<'a> {
|
||||
Float(&'a str),
|
||||
|
@ -52,29 +50,21 @@ fn chomp_number_base<'a>(
|
|||
) -> ParseResult<'a, NumLiteral<'a>, Number> {
|
||||
let (_is_float, chomped) = chomp_number(bytes);
|
||||
|
||||
match parse_utf8(&bytes[0..chomped]) {
|
||||
Ok(string) => match state.advance_without_indenting(chomped + 2 + is_negative as usize) {
|
||||
Ok(new) => {
|
||||
// all is well
|
||||
Ok((
|
||||
Progress::MadeProgress,
|
||||
NumLiteral::NonBase10Int {
|
||||
is_negative,
|
||||
string,
|
||||
base,
|
||||
},
|
||||
new,
|
||||
))
|
||||
}
|
||||
Err((_, SyntaxError::LineTooLong(_), new)) => {
|
||||
// the only error we care about in this context
|
||||
Err((Progress::MadeProgress, Number::LineTooLong, new))
|
||||
}
|
||||
Err(_) => unreachable!("we know advancing will succeed if there is space on the line"),
|
||||
},
|
||||
let string = unsafe { std::str::from_utf8_unchecked(&bytes[..chomped]) };
|
||||
|
||||
Err(_) => unreachable!("no invalid utf8 could have been chomped"),
|
||||
}
|
||||
let new = state.advance_without_indenting_ee(chomped + 2 + is_negative as usize, |_, _| {
|
||||
Number::LineTooLong
|
||||
})?;
|
||||
|
||||
Ok((
|
||||
Progress::MadeProgress,
|
||||
NumLiteral::NonBase10Int {
|
||||
is_negative,
|
||||
string,
|
||||
base,
|
||||
},
|
||||
new,
|
||||
))
|
||||
}
|
||||
|
||||
fn chomp_number_dec<'a>(
|
||||
|
@ -94,27 +84,21 @@ fn chomp_number_dec<'a>(
|
|||
return Err((Progress::NoProgress, Number::End, state));
|
||||
}
|
||||
|
||||
let string = unsafe { from_utf8_unchecked(&state.bytes[0..chomped + is_negative as usize]) };
|
||||
let string =
|
||||
unsafe { std::str::from_utf8_unchecked(&state.bytes[0..chomped + is_negative as usize]) };
|
||||
|
||||
match state.advance_without_indenting(chomped + is_negative as usize) {
|
||||
Ok(new) => {
|
||||
// all is well
|
||||
Ok((
|
||||
Progress::MadeProgress,
|
||||
if is_float {
|
||||
NumLiteral::Float(string)
|
||||
} else {
|
||||
NumLiteral::Num(string)
|
||||
},
|
||||
new,
|
||||
))
|
||||
}
|
||||
Err((_, SyntaxError::LineTooLong(_), new)) => {
|
||||
// the only error we care about in this context
|
||||
Err((Progress::MadeProgress, Number::LineTooLong, new))
|
||||
}
|
||||
Err(_) => unreachable!("we know advancing will succeed if there is space on the line"),
|
||||
}
|
||||
let new = state
|
||||
.advance_without_indenting_ee(chomped + is_negative as usize, |_, _| Number::LineTooLong)?;
|
||||
|
||||
Ok((
|
||||
Progress::MadeProgress,
|
||||
if is_float {
|
||||
NumLiteral::Float(string)
|
||||
} else {
|
||||
NumLiteral::Num(string)
|
||||
},
|
||||
new,
|
||||
))
|
||||
}
|
||||
|
||||
fn chomp_number(mut bytes: &[u8]) -> (bool, usize) {
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,10 +1,10 @@
|
|||
use crate::ast::Pattern;
|
||||
use crate::blankspace::{space0_around_ee, space0_before_e, space0_e};
|
||||
use crate::ident::{ident, lowercase_ident, Ident};
|
||||
use crate::ident::{lowercase_ident, parse_ident_help, Ident};
|
||||
use crate::parser::Progress::{self, *};
|
||||
use crate::parser::{
|
||||
backtrackable, optional, specialize, specialize_ref, word1, EPattern, PInParens, PRecord,
|
||||
ParseResult, Parser, State, SyntaxError,
|
||||
ParseResult, Parser, State,
|
||||
};
|
||||
use bumpalo::collections::string::String;
|
||||
use bumpalo::collections::Vec;
|
||||
|
@ -51,13 +51,6 @@ fn parse_closure_param<'a>(
|
|||
.parse(arena, state)
|
||||
}
|
||||
|
||||
pub fn loc_pattern<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<'a>>, SyntaxError<'a>> {
|
||||
specialize(
|
||||
|e, _, _| SyntaxError::Pattern(e),
|
||||
loc_pattern_help(min_indent),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn loc_pattern_help<'a>(
|
||||
min_indent: u16,
|
||||
) -> impl Parser<'a, Located<Pattern<'a>>, EPattern<'a>> {
|
||||
|
@ -130,7 +123,7 @@ fn loc_pattern_in_parens_help<'a>(
|
|||
between!(
|
||||
word1(b'(', PInParens::Open),
|
||||
space0_around_ee(
|
||||
move |arena, state| specialize_ref(PInParens::Syntax, loc_pattern(min_indent))
|
||||
move |arena, state| specialize_ref(PInParens::Pattern, loc_pattern_help(min_indent))
|
||||
.parse(arena, state),
|
||||
min_indent,
|
||||
PInParens::Space,
|
||||
|
@ -176,10 +169,11 @@ fn loc_ident_pattern_help<'a>(
|
|||
can_have_arguments: bool,
|
||||
) -> impl Parser<'a, Located<Pattern<'a>>, EPattern<'a>> {
|
||||
move |arena: &'a Bump, state: State<'a>| {
|
||||
let original_state = state.clone();
|
||||
let original_state = state;
|
||||
|
||||
let (_, loc_ident, state) =
|
||||
specialize(|_, r, c| EPattern::Start(r, c), loc!(ident())).parse(arena, state)?;
|
||||
specialize(|_, r, c| EPattern::Start(r, c), loc!(parse_ident_help))
|
||||
.parse(arena, state)?;
|
||||
|
||||
match loc_ident.value {
|
||||
Ident::GlobalTag(tag) => {
|
||||
|
@ -296,10 +290,6 @@ fn loc_ident_pattern_help<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
pub fn underscore_pattern<'a>() -> impl Parser<'a, Pattern<'a>, SyntaxError<'a>> {
|
||||
specialize(|e, _, _| SyntaxError::Pattern(e), underscore_pattern_help())
|
||||
}
|
||||
|
||||
fn underscore_pattern_help<'a>() -> impl Parser<'a, Pattern<'a>, EPattern<'a>> {
|
||||
move |arena: &'a Bump, state: State<'a>| {
|
||||
let (_, _, next_state) = word1(b'_', EPattern::Underscore).parse(arena, state)?;
|
||||
|
@ -324,13 +314,6 @@ fn lowercase_ident_pattern<'a>(
|
|||
specialize(move |_, _, _| EPattern::End(row, col), lowercase_ident()).parse(arena, state)
|
||||
}
|
||||
|
||||
pub fn record_pattern<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>, SyntaxError<'a>> {
|
||||
specialize(
|
||||
|e, r, c| SyntaxError::Pattern(EPattern::Record(e, r, c)),
|
||||
record_pattern_help(min_indent),
|
||||
)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn record_pattern_help<'a>(min_indent: u16) -> impl Parser<'a, Pattern<'a>, PRecord<'a>> {
|
||||
move |arena, state| {
|
||||
|
@ -385,7 +368,7 @@ fn record_pattern_field<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<
|
|||
|
||||
match opt_loc_val {
|
||||
Some(First(_)) => {
|
||||
let val_parser = specialize_ref(PRecord::Syntax, loc_pattern(min_indent));
|
||||
let val_parser = specialize_ref(PRecord::Pattern, loc_pattern_help(min_indent));
|
||||
let (_, loc_val, state) =
|
||||
space0_before_e(val_parser, min_indent, PRecord::Space, PRecord::IndentColon)
|
||||
.parse(arena, state)?;
|
||||
|
@ -413,7 +396,7 @@ fn record_pattern_field<'a>(min_indent: u16) -> impl Parser<'a, Located<Pattern<
|
|||
}
|
||||
Some(Second(_)) => {
|
||||
let val_parser =
|
||||
specialize_ref(PRecord::Syntax, loc!(crate::expr::expr(min_indent)));
|
||||
specialize_ref(PRecord::Expr, loc!(crate::expr::expr_help(min_indent)));
|
||||
|
||||
let (_, loc_val, state) =
|
||||
space0_before_e(val_parser, min_indent, PRecord::Space, PRecord::IndentColon)
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
use crate::ast::{EscapedChar, StrLiteral, StrSegment};
|
||||
use crate::expr;
|
||||
use crate::parser::Progress::*;
|
||||
use crate::parser::{
|
||||
allocated, ascii_char, loc, parse_utf8, specialize_ref, word1, BadInputError, EString, Parser,
|
||||
State,
|
||||
};
|
||||
use crate::parser::{allocated, loc, specialize_ref, word1, BadInputError, EString, Parser, State};
|
||||
use bumpalo::collections::vec::Vec;
|
||||
use bumpalo::Bump;
|
||||
|
||||
|
@ -102,7 +99,7 @@ pub fn parse<'a>() -> impl Parser<'a, StrLiteral<'a>, EString<'a>> {
|
|||
// to exclude that char we just parsed.
|
||||
let string_bytes = &state.bytes[0..(segment_parsed_bytes - 1)];
|
||||
|
||||
match parse_utf8(string_bytes) {
|
||||
match std::str::from_utf8(string_bytes) {
|
||||
Ok(string) => {
|
||||
state = advance_state!(state, string.len())?;
|
||||
|
||||
|
@ -233,9 +230,9 @@ pub fn parse<'a>() -> impl Parser<'a, StrLiteral<'a>, EString<'a>> {
|
|||
// Parse an arbitrary expression, then give a
|
||||
// canonicalization error if that expression variant
|
||||
// is not allowed inside a string interpolation.
|
||||
let (_progress, loc_expr, new_state) = specialize_ref(
|
||||
EString::Format,
|
||||
skip_second!(loc(allocated(expr::expr(0))), ascii_char(b')')),
|
||||
let (_progress, loc_expr, new_state) = skip_second!(
|
||||
specialize_ref(EString::Format, loc(allocated(expr::expr_help(0)))),
|
||||
word1(b')', EString::FormatEnd)
|
||||
)
|
||||
.parse(arena, state)?;
|
||||
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
use crate::ast::{self, Attempting};
|
||||
use crate::blankspace::space0_before;
|
||||
use crate::expr::expr;
|
||||
use crate::module::{header, module_defs};
|
||||
use crate::parser::{loc, Parser, State, SyntaxError};
|
||||
use crate::ast;
|
||||
use crate::module::module_defs;
|
||||
use crate::parser::{Parser, State, SyntaxError};
|
||||
use bumpalo::collections::Vec;
|
||||
use bumpalo::Bump;
|
||||
use roc_region::all::Located;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn parse_expr_with<'a>(
|
||||
arena: &'a Bump,
|
||||
input: &'a str,
|
||||
|
@ -15,24 +12,12 @@ pub fn parse_expr_with<'a>(
|
|||
parse_loc_with(arena, input).map(|loc_expr| loc_expr.value)
|
||||
}
|
||||
|
||||
pub fn parse_header_with<'a>(
|
||||
arena: &'a Bump,
|
||||
input: &'a str,
|
||||
) -> Result<ast::Module<'a>, SyntaxError<'a>> {
|
||||
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
|
||||
let answer = header().parse(arena, state);
|
||||
|
||||
answer
|
||||
.map(|(_, loc_expr, _)| loc_expr)
|
||||
.map_err(|(_, fail, _)| fail)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn parse_defs_with<'a>(
|
||||
arena: &'a Bump,
|
||||
input: &'a str,
|
||||
) -> Result<Vec<'a, Located<ast::Def<'a>>>, SyntaxError<'a>> {
|
||||
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
|
||||
let state = State::new(input.trim().as_bytes());
|
||||
let answer = module_defs().parse(arena, state);
|
||||
answer
|
||||
.map(|(_, loc_expr, _)| loc_expr)
|
||||
|
@ -44,11 +29,10 @@ pub fn parse_loc_with<'a>(
|
|||
arena: &'a Bump,
|
||||
input: &'a str,
|
||||
) -> Result<Located<ast::Expr<'a>>, SyntaxError<'a>> {
|
||||
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
|
||||
let parser = space0_before(loc(expr(0)), 0);
|
||||
let answer = parser.parse(&arena, state);
|
||||
let state = State::new(input.trim().as_bytes());
|
||||
|
||||
answer
|
||||
.map(|(_, loc_expr, _)| loc_expr)
|
||||
.map_err(|(_, fail, _)| fail)
|
||||
match crate::expr::test_parse_expr(0, arena, state) {
|
||||
Ok(loc_expr) => Ok(loc_expr),
|
||||
Err(fail) => Err(SyntaxError::Expr(fail)),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,24 +1,16 @@
|
|||
use crate::ast::{AssignedField, Tag, TypeAnnotation};
|
||||
use crate::blankspace::{space0_around_ee, space0_before_e, space0_e};
|
||||
use crate::ident::join_module_parts;
|
||||
use crate::keyword;
|
||||
use crate::parser::{
|
||||
allocated, backtrackable, not_e, optional, peek_utf8_char_e, specialize, specialize_ref, word1,
|
||||
word2, ParseResult, Parser,
|
||||
allocated, backtrackable, optional, specialize, specialize_ref, word1, word2, ParseResult,
|
||||
Parser,
|
||||
Progress::{self, *},
|
||||
State, SyntaxError, TApply, TInParens, TRecord, TTagUnion, TVariable, Type,
|
||||
State, TApply, TInParens, TRecord, TTagUnion, Type,
|
||||
};
|
||||
use bumpalo::collections::string::String;
|
||||
use bumpalo::collections::vec::Vec;
|
||||
use bumpalo::Bump;
|
||||
use roc_region::all::{Located, Region};
|
||||
|
||||
pub fn located<'a>(
|
||||
min_indent: u16,
|
||||
) -> impl Parser<'a, Located<TypeAnnotation<'a>>, SyntaxError<'a>> {
|
||||
specialize(|x, _, _| SyntaxError::Type(x), expression(min_indent))
|
||||
}
|
||||
|
||||
pub fn located_help<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a>>, Type<'a>> {
|
||||
expression(min_indent)
|
||||
}
|
||||
|
@ -62,7 +54,7 @@ fn term<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a>>, Typ
|
|||
loc!(specialize(Type::TRecord, record_type(min_indent))),
|
||||
loc!(specialize(Type::TTagUnion, tag_union_type(min_indent))),
|
||||
loc!(applied_type(min_indent)),
|
||||
loc!(specialize(Type::TVariable, parse_type_variable))
|
||||
loc!(parse_type_variable)
|
||||
),
|
||||
// Inline alias notation, e.g. [ Nil, Cons a (List a) ] as List a
|
||||
one_of![
|
||||
|
@ -117,21 +109,13 @@ fn loc_applied_arg<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotatio
|
|||
map_with_arena!(
|
||||
and!(
|
||||
backtrackable(space0_e(min_indent, Type::TSpace, Type::TIndentStart)),
|
||||
skip_first!(
|
||||
// Once we hit an "as", stop parsing args
|
||||
// and roll back parsing of preceding spaces
|
||||
not_e(
|
||||
crate::parser::keyword(keyword::AS, min_indent),
|
||||
Type::TStart
|
||||
),
|
||||
one_of!(
|
||||
loc_wildcard(),
|
||||
specialize(Type::TInParens, loc_type_in_parens(min_indent)),
|
||||
loc!(specialize(Type::TRecord, record_type(min_indent))),
|
||||
loc!(specialize(Type::TTagUnion, tag_union_type(min_indent))),
|
||||
loc!(specialize(Type::TApply, parse_concrete_type)),
|
||||
loc!(specialize(Type::TVariable, parse_type_variable))
|
||||
)
|
||||
one_of!(
|
||||
loc_wildcard(),
|
||||
specialize(Type::TInParens, loc_type_in_parens(min_indent)),
|
||||
loc!(specialize(Type::TRecord, record_type(min_indent))),
|
||||
loc!(specialize(Type::TTagUnion, tag_union_type(min_indent))),
|
||||
loc!(specialize(Type::TApply, parse_concrete_type)),
|
||||
loc!(parse_type_variable)
|
||||
)
|
||||
),
|
||||
|arena: &'a Bump, (spaces, argument): (&'a [_], Located<TypeAnnotation<'a>>)| {
|
||||
|
@ -192,102 +176,9 @@ where
|
|||
F: Fn(Row, Col) -> E,
|
||||
E: 'a,
|
||||
{
|
||||
use encode_unicode::CharExt;
|
||||
|
||||
move |arena, mut state: State<'a>| {
|
||||
let mut buf;
|
||||
|
||||
match char::from_utf8_slice_start(state.bytes) {
|
||||
Ok((first_letter, bytes_parsed)) => match first_letter {
|
||||
'@' => {
|
||||
debug_assert_eq!(bytes_parsed, 1);
|
||||
|
||||
// parsing a private tag name
|
||||
match char::from_utf8_slice_start(&state.bytes[1..]) {
|
||||
Ok((second_letter, bytes_parsed_2)) if second_letter.is_uppercase() => {
|
||||
let total_parsed = bytes_parsed + bytes_parsed_2;
|
||||
|
||||
buf = String::with_capacity_in(total_parsed, arena);
|
||||
|
||||
buf.push('@');
|
||||
buf.push(second_letter);
|
||||
|
||||
state = state.advance_without_indenting(total_parsed).map_err(
|
||||
|(progress, _, state)| {
|
||||
(progress, to_problem(state.line, state.column), state)
|
||||
},
|
||||
)?;
|
||||
}
|
||||
_ => {
|
||||
// important for error messages
|
||||
state = state.advance_without_indenting(bytes_parsed).map_err(
|
||||
|(progress, _, state)| {
|
||||
(progress, to_problem(state.line, state.column), state)
|
||||
},
|
||||
)?;
|
||||
|
||||
let row = state.line;
|
||||
let col = state.column;
|
||||
return state.fail(arena, MadeProgress, to_problem(row, col));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ if first_letter.is_uppercase() => {
|
||||
buf = String::with_capacity_in(1, arena);
|
||||
|
||||
buf.push(first_letter);
|
||||
|
||||
state = state.advance_without_indenting(bytes_parsed).map_err(
|
||||
|(progress, _, state)| {
|
||||
(progress, to_problem(state.line, state.column), state)
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
||||
_ => {
|
||||
let row = state.line;
|
||||
let col = state.column;
|
||||
return state.fail(arena, NoProgress, to_problem(row, col));
|
||||
}
|
||||
},
|
||||
Err(_) => {
|
||||
let row = state.line;
|
||||
let col = state.column;
|
||||
return state.fail(arena, NoProgress, to_problem(row, col));
|
||||
}
|
||||
};
|
||||
|
||||
while !state.bytes.is_empty() {
|
||||
match char::from_utf8_slice_start(state.bytes) {
|
||||
Ok((ch, bytes_parsed)) => {
|
||||
// After the first character, only these are allowed:
|
||||
//
|
||||
// * Unicode alphabetic chars - you might include `鹏` if that's clear to your readers
|
||||
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
|
||||
// * A ':' indicating the end of the field
|
||||
if ch.is_alphabetic() || ch.is_ascii_digit() {
|
||||
buf.push(ch);
|
||||
|
||||
state = state.advance_without_indenting(bytes_parsed).map_err(
|
||||
|(progress, _, state)| {
|
||||
(progress, to_problem(state.line, state.column), state)
|
||||
},
|
||||
)?;
|
||||
} else {
|
||||
// This is the end of the field. We're done!
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
let row = state.line;
|
||||
let col = state.column;
|
||||
return state.fail(arena, MadeProgress, to_problem(row, col));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok((MadeProgress, buf.into_bump_str(), state))
|
||||
move |arena, state: State<'a>| match crate::ident::tag_name().parse(arena, state) {
|
||||
Ok(good) => Ok(good),
|
||||
Err((progress, _, state)) => Err((progress, to_problem(state.line, state.column), state)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -517,156 +408,52 @@ fn expression<'a>(min_indent: u16) -> impl Parser<'a, Located<TypeAnnotation<'a>
|
|||
|
||||
fn parse_concrete_type<'a>(
|
||||
arena: &'a Bump,
|
||||
mut state: State<'a>,
|
||||
state: State<'a>,
|
||||
) -> ParseResult<'a, TypeAnnotation<'a>, TApply> {
|
||||
let mut part_buf = String::new_in(arena); // The current "part" (parts are dot-separated.)
|
||||
let mut parts: Vec<&'a str> = Vec::new_in(arena);
|
||||
let initial_bytes = state.bytes;
|
||||
|
||||
// Qualified types must start with a capitalized letter.
|
||||
match peek_utf8_char_e(&state, TApply::StartNotUppercase, TApply::Space) {
|
||||
Ok((first_letter, bytes_parsed)) => {
|
||||
if first_letter.is_alphabetic() && first_letter.is_uppercase() {
|
||||
part_buf.push(first_letter);
|
||||
} else {
|
||||
let problem = TApply::StartNotUppercase(state.line, state.column + 1);
|
||||
return Err((NoProgress, problem, state));
|
||||
}
|
||||
match crate::ident::concrete_type().parse(arena, state) {
|
||||
Ok((_, (module_name, type_name), state)) => {
|
||||
let answer = TypeAnnotation::Apply(module_name, type_name, &[]);
|
||||
|
||||
state = state.advance_without_indenting_e(bytes_parsed, TApply::Space)?;
|
||||
Ok((MadeProgress, answer, state))
|
||||
}
|
||||
Err(reason) => return Err((NoProgress, reason, state)),
|
||||
}
|
||||
Err((NoProgress, _, state)) => {
|
||||
Err((NoProgress, TApply::End(state.line, state.column), state))
|
||||
}
|
||||
Err((MadeProgress, _, mut state)) => {
|
||||
// we made some progress, but ultimately failed.
|
||||
// that means a malformed type name
|
||||
let chomped = crate::ident::chomp_malformed(state.bytes);
|
||||
let delta = initial_bytes.len() - state.bytes.len();
|
||||
let parsed_str =
|
||||
unsafe { std::str::from_utf8_unchecked(&initial_bytes[..chomped + delta]) };
|
||||
|
||||
while !state.bytes.is_empty() {
|
||||
match peek_utf8_char_e(&state, TApply::End, TApply::Space) {
|
||||
Ok((ch, bytes_parsed)) => {
|
||||
// After the first character, only these are allowed:
|
||||
//
|
||||
// * Unicode alphabetic chars - you might name a variable `鹏` if that's clear to your readers
|
||||
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
|
||||
// * A dot ('.')
|
||||
if ch.is_alphabetic() {
|
||||
if part_buf.is_empty() && !ch.is_uppercase() {
|
||||
// Each part must begin with a capital letter.
|
||||
return Err((
|
||||
MadeProgress,
|
||||
TApply::StartNotUppercase(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
state = state.advance_without_indenting_ee(chomped, |r, c| {
|
||||
TApply::Space(crate::parser::BadInputError::LineTooLong, r, c)
|
||||
})?;
|
||||
|
||||
part_buf.push(ch);
|
||||
} else if ch.is_ascii_digit() {
|
||||
// Parts may not start with numbers!
|
||||
if part_buf.is_empty() {
|
||||
return Err((
|
||||
MadeProgress,
|
||||
TApply::StartIsNumber(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
dbg!(&state);
|
||||
|
||||
part_buf.push(ch);
|
||||
} else if ch == '.' {
|
||||
// Having two consecutive dots is an error.
|
||||
if part_buf.is_empty() {
|
||||
return Err((
|
||||
MadeProgress,
|
||||
TApply::DoubleDot(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
|
||||
parts.push(part_buf.into_bump_str());
|
||||
|
||||
// Now that we've recorded the contents of the current buffer, reset it.
|
||||
part_buf = String::new_in(arena);
|
||||
} else {
|
||||
// This must be the end of the type. We're done!
|
||||
break;
|
||||
}
|
||||
|
||||
state = state.advance_without_indenting_e(bytes_parsed, TApply::Space)?;
|
||||
}
|
||||
Err(reason) => {
|
||||
return Err((MadeProgress, reason, state));
|
||||
}
|
||||
Ok((MadeProgress, TypeAnnotation::Malformed(parsed_str), state))
|
||||
}
|
||||
}
|
||||
|
||||
if part_buf.is_empty() {
|
||||
// We probably had a trailing dot, e.g. `Foo.bar.` - this is malformed!
|
||||
//
|
||||
// This condition might also occur if we encounter a malformed accessor like `.|`
|
||||
//
|
||||
// If we made it this far and don't have a next_char, then necessarily
|
||||
// we have consumed a '.' char previously.
|
||||
return Err((
|
||||
MadeProgress,
|
||||
TApply::TrailingDot(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
|
||||
let answer = TypeAnnotation::Apply(
|
||||
join_module_parts(arena, parts.into_bump_slice()),
|
||||
part_buf.into_bump_str(),
|
||||
&[],
|
||||
);
|
||||
|
||||
Ok((MadeProgress, answer, state))
|
||||
}
|
||||
|
||||
fn parse_type_variable<'a>(
|
||||
arena: &'a Bump,
|
||||
mut state: State<'a>,
|
||||
) -> ParseResult<'a, TypeAnnotation<'a>, TVariable> {
|
||||
let mut buf = String::new_in(arena);
|
||||
state: State<'a>,
|
||||
) -> ParseResult<'a, TypeAnnotation<'a>, Type<'a>> {
|
||||
match crate::ident::lowercase_ident().parse(arena, state) {
|
||||
Ok((_, name, state)) => {
|
||||
let answer = TypeAnnotation::BoundVariable(name);
|
||||
|
||||
let start_bytes_len = state.bytes.len();
|
||||
|
||||
match peek_utf8_char_e(&state, TVariable::StartNotLowercase, TVariable::Space) {
|
||||
Ok((first_letter, bytes_parsed)) => {
|
||||
// Type variables must start with a lowercase letter.
|
||||
if first_letter.is_alphabetic() && first_letter.is_lowercase() {
|
||||
buf.push(first_letter);
|
||||
} else {
|
||||
return Err((
|
||||
NoProgress,
|
||||
TVariable::StartNotLowercase(state.line, state.column),
|
||||
state,
|
||||
));
|
||||
}
|
||||
|
||||
state = state.advance_without_indenting_e(bytes_parsed, TVariable::Space)?;
|
||||
Ok((MadeProgress, answer, state))
|
||||
}
|
||||
Err(reason) => return Err((NoProgress, reason, state)),
|
||||
Err((progress, _, state)) => Err((
|
||||
progress,
|
||||
Type::TBadTypeVariable(state.line, state.column),
|
||||
state,
|
||||
)),
|
||||
}
|
||||
|
||||
while !state.bytes.is_empty() {
|
||||
match peek_utf8_char_e(&state, TVariable::End, TVariable::Space) {
|
||||
Ok((ch, bytes_parsed)) => {
|
||||
// After the first character, only these are allowed:
|
||||
//
|
||||
// * Unicode alphabetic chars - you might name a variable `鹏` if that's clear to your readers
|
||||
// * ASCII digits - e.g. `1` but not `¾`, both of which pass .is_numeric()
|
||||
if ch.is_alphabetic() || ch.is_ascii_digit() {
|
||||
buf.push(ch);
|
||||
} else {
|
||||
// This must be the end of the type. We're done!
|
||||
break;
|
||||
}
|
||||
|
||||
state = state.advance_without_indenting_e(bytes_parsed, TVariable::Space)?;
|
||||
}
|
||||
Err(reason) => {
|
||||
return state.fail(arena, MadeProgress, reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let answer = TypeAnnotation::BoundVariable(buf.into_bump_str());
|
||||
|
||||
let progress = Progress::from_lengths(start_bytes_len, state.bytes.len());
|
||||
Ok((progress, answer, state))
|
||||
}
|
||||
|
|
|
@ -23,14 +23,12 @@ mod test_parse {
|
|||
use roc_parse::ast::Pattern::{self, *};
|
||||
use roc_parse::ast::StrLiteral::{self, *};
|
||||
use roc_parse::ast::StrSegment::*;
|
||||
use roc_parse::ast::{
|
||||
self, Attempting, Def, EscapedChar, Spaceable, TypeAnnotation, WhenBranch,
|
||||
};
|
||||
use roc_parse::ast::{self, Def, EscapedChar, Spaceable, TypeAnnotation, WhenBranch};
|
||||
use roc_parse::header::{
|
||||
AppHeader, Effects, ExposesEntry, ImportsEntry, InterfaceHeader, ModuleName, PackageEntry,
|
||||
PackageName, PackageOrPath, PlatformHeader, To,
|
||||
};
|
||||
use roc_parse::module::{app_header, interface_header, module_defs, platform_header};
|
||||
use roc_parse::module::module_defs;
|
||||
use roc_parse::parser::{Parser, State, SyntaxError};
|
||||
use roc_parse::test_helpers::parse_expr_with;
|
||||
use roc_region::all::{Located, Region};
|
||||
|
@ -43,10 +41,9 @@ mod test_parse {
|
|||
assert_eq!(Ok(expected_expr), actual);
|
||||
}
|
||||
|
||||
fn assert_parsing_fails<'a>(input: &'a str, _reason: SyntaxError, _attempting: Attempting) {
|
||||
fn assert_parsing_fails<'a>(input: &'a str, _reason: SyntaxError) {
|
||||
let arena = Bump::new();
|
||||
let actual = parse_expr_with(&arena, input);
|
||||
// let expected_fail = Fail { reason, attempting };
|
||||
|
||||
assert!(actual.is_err());
|
||||
}
|
||||
|
@ -291,7 +288,7 @@ mod test_parse {
|
|||
|
||||
#[test]
|
||||
fn empty_source_file() {
|
||||
assert_parsing_fails("", SyntaxError::Eof(Region::zero()), Attempting::Module);
|
||||
assert_parsing_fails("", SyntaxError::Eof(Region::zero()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -308,11 +305,7 @@ mod test_parse {
|
|||
// Make sure it's longer than our maximum line length
|
||||
assert_eq!(too_long_str.len(), max_line_length + 1);
|
||||
|
||||
assert_parsing_fails(
|
||||
&too_long_str,
|
||||
SyntaxError::LineTooLong(0),
|
||||
Attempting::Module,
|
||||
);
|
||||
assert_parsing_fails(&too_long_str, SyntaxError::LineTooLong(0));
|
||||
}
|
||||
|
||||
// INT LITERALS
|
||||
|
@ -1012,7 +1005,7 @@ mod test_parse {
|
|||
use roc_parse::ident::BadIdent;
|
||||
|
||||
let arena = Bump::new();
|
||||
let expected = Expr::MalformedIdent("@One.Two.Whee", BadIdent::QualifiedTag(0, 13));
|
||||
let expected = Expr::MalformedIdent("@One.Two.Whee", BadIdent::BadPrivateTag(0, 4));
|
||||
let actual = parse_expr_with(&arena, "@One.Two.Whee");
|
||||
|
||||
assert_eq!(Ok(expected), actual);
|
||||
|
@ -2416,7 +2409,7 @@ mod test_parse {
|
|||
let imports = Vec::new_in(&arena);
|
||||
let provides = Vec::new_in(&arena);
|
||||
let module_name = StrLiteral::PlainLine("test-app");
|
||||
let expected = AppHeader {
|
||||
let header = AppHeader {
|
||||
name: Located::new(0, 0, 4, 14, module_name),
|
||||
packages,
|
||||
imports,
|
||||
|
@ -2433,17 +2426,15 @@ mod test_parse {
|
|||
after_to: &[],
|
||||
};
|
||||
|
||||
let expected = roc_parse::ast::Module::App { header };
|
||||
|
||||
let src = indoc!(
|
||||
r#"
|
||||
app "test-app" packages {} imports [] provides [] to blah
|
||||
"#
|
||||
);
|
||||
let actual = app_header()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.0);
|
||||
|
||||
assert_eq!(Ok(expected), actual);
|
||||
}
|
||||
|
@ -2457,7 +2448,7 @@ mod test_parse {
|
|||
let imports = Vec::new_in(&arena);
|
||||
let provides = Vec::new_in(&arena);
|
||||
let module_name = StrLiteral::PlainLine("test-app");
|
||||
let expected = AppHeader {
|
||||
let header = AppHeader {
|
||||
name: Located::new(0, 0, 4, 14, module_name),
|
||||
packages,
|
||||
imports,
|
||||
|
@ -2474,17 +2465,16 @@ mod test_parse {
|
|||
after_to: &[],
|
||||
};
|
||||
|
||||
let expected = roc_parse::ast::Module::App { header };
|
||||
|
||||
let src = indoc!(
|
||||
r#"
|
||||
app "test-app" provides [] to "./blah"
|
||||
"#
|
||||
);
|
||||
let actual = app_header()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
|
||||
let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.0);
|
||||
|
||||
assert_eq!(Ok(expected), actual);
|
||||
}
|
||||
|
@ -2509,7 +2499,8 @@ mod test_parse {
|
|||
let provide_entry = Located::new(3, 3, 15, 24, Exposed("quicksort"));
|
||||
let provides = bumpalo::vec![in &arena; provide_entry];
|
||||
let module_name = StrLiteral::PlainLine("quicksort");
|
||||
let expected = AppHeader {
|
||||
|
||||
let header = AppHeader {
|
||||
name: Located::new(0, 0, 4, 15, module_name),
|
||||
packages,
|
||||
imports,
|
||||
|
@ -2526,6 +2517,8 @@ mod test_parse {
|
|||
after_to: &[],
|
||||
};
|
||||
|
||||
let expected = roc_parse::ast::Module::App { header };
|
||||
|
||||
let src = indoc!(
|
||||
r#"
|
||||
app "quicksort"
|
||||
|
@ -2535,12 +2528,8 @@ mod test_parse {
|
|||
"#
|
||||
);
|
||||
|
||||
let actual = app_header()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.0);
|
||||
|
||||
assert_eq!(Ok(expected), actual);
|
||||
}
|
||||
|
@ -2560,7 +2549,7 @@ mod test_parse {
|
|||
spaces_after_effects_keyword: &[],
|
||||
spaces_after_type_name: &[],
|
||||
};
|
||||
let expected = PlatformHeader {
|
||||
let header = PlatformHeader {
|
||||
name: Located::new(0, 0, 9, 23, pkg_name),
|
||||
requires: Vec::new_in(&arena),
|
||||
exposes: Vec::new_in(&arena),
|
||||
|
@ -2581,13 +2570,11 @@ mod test_parse {
|
|||
after_provides: &[],
|
||||
};
|
||||
|
||||
let expected = roc_parse::ast::Module::Platform { header };
|
||||
|
||||
let src = "platform rtfeldman/blah requires {} exposes [] packages {} imports [] provides [] effects fx.Blah {}";
|
||||
let actual = platform_header()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.0);
|
||||
|
||||
assert_eq!(Ok(expected), actual);
|
||||
}
|
||||
|
@ -2621,7 +2608,7 @@ mod test_parse {
|
|||
spaces_after_effects_keyword: &[],
|
||||
spaces_after_type_name: &[],
|
||||
};
|
||||
let expected = PlatformHeader {
|
||||
let header = PlatformHeader {
|
||||
name: Located::new(0, 0, 9, 19, pkg_name),
|
||||
requires: Vec::new_in(&arena),
|
||||
exposes: Vec::new_in(&arena),
|
||||
|
@ -2642,6 +2629,8 @@ mod test_parse {
|
|||
after_provides: &[],
|
||||
};
|
||||
|
||||
let expected = roc_parse::ast::Module::Platform { header };
|
||||
|
||||
let src = indoc!(
|
||||
r#"
|
||||
platform foo/barbaz
|
||||
|
@ -2653,12 +2642,8 @@ mod test_parse {
|
|||
effects fx.Effect {}
|
||||
"#
|
||||
);
|
||||
let actual = platform_header()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.0);
|
||||
|
||||
assert_eq!(Ok(expected), actual);
|
||||
}
|
||||
|
@ -2669,7 +2654,7 @@ mod test_parse {
|
|||
let exposes = Vec::new_in(&arena);
|
||||
let imports = Vec::new_in(&arena);
|
||||
let module_name = ModuleName::new("Foo");
|
||||
let expected = InterfaceHeader {
|
||||
let header = InterfaceHeader {
|
||||
name: Located::new(0, 0, 10, 13, module_name),
|
||||
exposes,
|
||||
imports,
|
||||
|
@ -2680,17 +2665,16 @@ mod test_parse {
|
|||
before_imports: &[],
|
||||
after_imports: &[],
|
||||
};
|
||||
|
||||
let expected = roc_parse::ast::Module::Interface { header };
|
||||
|
||||
let src = indoc!(
|
||||
r#"
|
||||
interface Foo exposes [] imports []
|
||||
"#
|
||||
);
|
||||
let actual = interface_header()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.0);
|
||||
|
||||
assert_eq!(Ok(expected), actual);
|
||||
}
|
||||
|
@ -2701,7 +2685,7 @@ mod test_parse {
|
|||
let exposes = Vec::new_in(&arena);
|
||||
let imports = Vec::new_in(&arena);
|
||||
let module_name = ModuleName::new("Foo.Bar.Baz");
|
||||
let expected = InterfaceHeader {
|
||||
let header = InterfaceHeader {
|
||||
name: Located::new(0, 0, 10, 21, module_name),
|
||||
exposes,
|
||||
imports,
|
||||
|
@ -2712,17 +2696,16 @@ mod test_parse {
|
|||
before_imports: &[],
|
||||
after_imports: &[],
|
||||
};
|
||||
|
||||
let expected = roc_parse::ast::Module::Interface { header };
|
||||
|
||||
let src = indoc!(
|
||||
r#"
|
||||
interface Foo.Bar.Baz exposes [] imports []
|
||||
"#
|
||||
);
|
||||
let actual = interface_header()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
let actual = roc_parse::module::parse_header(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.0);
|
||||
|
||||
assert_eq!(Ok(expected), actual);
|
||||
}
|
||||
|
@ -2748,10 +2731,7 @@ mod test_parse {
|
|||
"#
|
||||
);
|
||||
let actual = module_defs()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.parse(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.1);
|
||||
|
||||
// It should occur twice in the debug output - once for the pattern,
|
||||
|
@ -2810,10 +2790,7 @@ mod test_parse {
|
|||
);
|
||||
|
||||
let actual = module_defs()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.parse(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.1);
|
||||
|
||||
assert_eq!(Ok(expected), actual);
|
||||
|
@ -2833,11 +2810,8 @@ mod test_parse {
|
|||
);
|
||||
|
||||
let actual = module_defs()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
.parse(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.0);
|
||||
|
||||
assert!(actual.is_ok());
|
||||
}
|
||||
|
@ -2858,18 +2832,15 @@ mod test_parse {
|
|||
);
|
||||
|
||||
let actual = module_defs()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
.parse(&arena, State::new(src.as_bytes()))
|
||||
.map(|tuple| tuple.0);
|
||||
|
||||
assert!(actual.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn outdenting_newline_after_else() {
|
||||
let arena = Bump::new();
|
||||
let arena = &Bump::new();
|
||||
|
||||
// highlights a problem with the else branch demanding a newline after its expression
|
||||
let src = indoc!(
|
||||
|
@ -2881,16 +2852,19 @@ mod test_parse {
|
|||
"#
|
||||
);
|
||||
|
||||
let actual = module_defs()
|
||||
.parse(
|
||||
&arena,
|
||||
State::new_in(&arena, src.as_bytes(), Attempting::Module),
|
||||
)
|
||||
.map(|tuple| tuple.1);
|
||||
|
||||
dbg!(&actual);
|
||||
|
||||
assert!(actual.is_ok());
|
||||
let state = State::new(src.as_bytes());
|
||||
let parser = module_defs();
|
||||
let parsed = parser.parse(arena, state);
|
||||
match parsed {
|
||||
Ok((_, _, state)) => {
|
||||
dbg!(state);
|
||||
return;
|
||||
}
|
||||
Err((_, _fail, _state)) => {
|
||||
dbg!(_fail, _state);
|
||||
assert!(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use roc_collections::all::MutSet;
|
||||
use roc_parse::parser::{Col, Row};
|
||||
use roc_problem::can::PrecedenceProblem::BothNonAssociative;
|
||||
use roc_problem::can::{FloatErrorKind, IntErrorKind, Problem, RuntimeError};
|
||||
use roc_region::all::Region;
|
||||
|
@ -357,23 +358,7 @@ fn to_bad_ident_expr_report<'b>(
|
|||
let region = Region::from_row_col(row, col);
|
||||
|
||||
alloc.stack(vec![
|
||||
alloc.reflow(r"I trying to parse a record field accessor here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("Something like "),
|
||||
alloc.parser_suggestion(".name"),
|
||||
alloc.reflow(" or "),
|
||||
alloc.parser_suggestion(".height"),
|
||||
alloc.reflow(" that accesses a value from a record."),
|
||||
]),
|
||||
])
|
||||
}
|
||||
|
||||
PartStartsWithNumber(row, col) => {
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
alloc.stack(vec![
|
||||
alloc.reflow("I trying to parse a record field access here:"),
|
||||
alloc.reflow(r"I trying to parse a record field access here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("So I expect to see a lowercase letter next, like "),
|
||||
|
@ -430,34 +415,73 @@ fn to_bad_ident_expr_report<'b>(
|
|||
]),
|
||||
])
|
||||
}
|
||||
PrivateTagNotUppercase(row, col) => {
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
Underscore(row, col) => {
|
||||
let region =
|
||||
Region::from_rows_cols(surroundings.start_line, surroundings.start_col, row, col);
|
||||
alloc.stack(vec![
|
||||
alloc.reflow("I am trying to parse a private tag here:"),
|
||||
alloc.reflow("Underscores are not allowed in identifier names:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow(r"But after the "),
|
||||
alloc.keyword("@"),
|
||||
alloc.reflow(r" symbol I found a lowercase letter. "),
|
||||
alloc.reflow(r"All tag names (global and private)"),
|
||||
alloc.reflow(r" must start with an uppercase letter, like "),
|
||||
alloc.parser_suggestion("@UUID"),
|
||||
alloc.reflow(" or "),
|
||||
alloc.parser_suggestion("@Secrets"),
|
||||
alloc.reflow("."),
|
||||
]),
|
||||
alloc.concat(vec![alloc.reflow(
|
||||
r"I recommend using camelCase, it is the standard in the Roc ecosystem.",
|
||||
)]),
|
||||
])
|
||||
}
|
||||
|
||||
PrivateTagFieldAccess(_row, _col) => alloc.stack(vec![
|
||||
alloc.reflow("I am very confused by this field access:"),
|
||||
alloc.region(surroundings),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow(r"It looks like a record field access on a private tag.")
|
||||
]),
|
||||
]),
|
||||
_ => todo!(),
|
||||
BadPrivateTag(row, col) => {
|
||||
use BadIdentNext::*;
|
||||
match what_is_next(alloc.src_lines, row, col) {
|
||||
LowercaseAccess(width) => {
|
||||
let region = Region::from_rows_cols(row, col, row, col + width);
|
||||
alloc.stack(vec![
|
||||
alloc.reflow("I am very confused by this field access:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow(r"It looks like a record field access on a private tag.")
|
||||
]),
|
||||
])
|
||||
}
|
||||
UppercaseAccess(width) => {
|
||||
let region = Region::from_rows_cols(row, col, row, col + width);
|
||||
alloc.stack(vec![
|
||||
alloc.reflow("I am very confused by this expression:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow(
|
||||
r"Looks like a private tag is treated like a module name. ",
|
||||
),
|
||||
alloc.reflow(r"Maybe you wanted a qualified name, like "),
|
||||
alloc.parser_suggestion("Json.Decode.string"),
|
||||
alloc.text("?"),
|
||||
]),
|
||||
])
|
||||
}
|
||||
Other(Some(c)) if c.is_lowercase() => {
|
||||
let region = Region::from_rows_cols(
|
||||
surroundings.start_line,
|
||||
surroundings.start_col + 1,
|
||||
row,
|
||||
col + 1,
|
||||
);
|
||||
alloc.stack(vec![
|
||||
alloc.reflow("I am trying to parse a private tag here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow(r"But after the "),
|
||||
alloc.keyword("@"),
|
||||
alloc.reflow(r" symbol I found a lowercase letter. "),
|
||||
alloc.reflow(r"All tag names (global and private)"),
|
||||
alloc.reflow(r" must start with an uppercase letter, like "),
|
||||
alloc.parser_suggestion("@UUID"),
|
||||
alloc.reflow(" or "),
|
||||
alloc.parser_suggestion("@Secrets"),
|
||||
alloc.reflow("."),
|
||||
]),
|
||||
])
|
||||
}
|
||||
other => todo!("{:?}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -486,22 +510,6 @@ fn to_bad_ident_pattern_report<'b>(
|
|||
])
|
||||
}
|
||||
|
||||
PartStartsWithNumber(row, col) => {
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
alloc.stack(vec![
|
||||
alloc.reflow("I trying to parse a record field access here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("So I expect to see a lowercase letter next, like "),
|
||||
alloc.parser_suggestion(".name"),
|
||||
alloc.reflow(" or "),
|
||||
alloc.parser_suggestion(".height"),
|
||||
alloc.reflow("."),
|
||||
]),
|
||||
])
|
||||
}
|
||||
|
||||
WeirdAccessor(_row, _col) => alloc.stack(vec![
|
||||
alloc.reflow("I am very confused by this field access"),
|
||||
alloc.region(surroundings),
|
||||
|
@ -547,33 +555,6 @@ fn to_bad_ident_pattern_report<'b>(
|
|||
]),
|
||||
])
|
||||
}
|
||||
PrivateTagNotUppercase(row, col) => {
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
alloc.stack(vec![
|
||||
alloc.reflow("I am trying to parse a private tag here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow(r"But after the "),
|
||||
alloc.keyword("@"),
|
||||
alloc.reflow(r" symbol I found a lowercase letter. "),
|
||||
alloc.reflow(r"All tag names (global and private)"),
|
||||
alloc.reflow(r" must start with an uppercase letter, like "),
|
||||
alloc.parser_suggestion("@UUID"),
|
||||
alloc.reflow(" or "),
|
||||
alloc.parser_suggestion("@Secrets"),
|
||||
alloc.reflow("."),
|
||||
]),
|
||||
])
|
||||
}
|
||||
|
||||
PrivateTagFieldAccess(_row, _col) => alloc.stack(vec![
|
||||
alloc.reflow("I am very confused by this field access:"),
|
||||
alloc.region(surroundings),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow(r"It looks like a record field access on a private tag.")
|
||||
]),
|
||||
]),
|
||||
|
||||
Underscore(row, col) => {
|
||||
let region = Region::from_row_col(row, col - 1);
|
||||
|
@ -591,6 +572,69 @@ fn to_bad_ident_pattern_report<'b>(
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum BadIdentNext<'a> {
|
||||
LowercaseAccess(u16),
|
||||
UppercaseAccess(u16),
|
||||
NumberAccess(u16),
|
||||
Keyword(&'a str),
|
||||
DanglingDot,
|
||||
Other(Option<char>),
|
||||
}
|
||||
|
||||
fn what_is_next<'a>(source_lines: &'a [&'a str], row: Row, col: Col) -> BadIdentNext<'a> {
|
||||
let row_index = row as usize;
|
||||
let col_index = col as usize;
|
||||
match source_lines.get(row_index) {
|
||||
None => BadIdentNext::Other(None),
|
||||
Some(line) => {
|
||||
let chars = &line[col_index..];
|
||||
let mut it = chars.chars();
|
||||
|
||||
match roc_parse::keyword::KEYWORDS
|
||||
.iter()
|
||||
.find(|keyword| crate::error::parse::starts_with_keyword(chars, keyword))
|
||||
{
|
||||
Some(keyword) => BadIdentNext::Keyword(keyword),
|
||||
None => match it.next() {
|
||||
None => BadIdentNext::Other(None),
|
||||
Some('.') => match it.next() {
|
||||
Some(c) if c.is_lowercase() => {
|
||||
BadIdentNext::LowercaseAccess(2 + till_whitespace(it) as u16)
|
||||
}
|
||||
Some(c) if c.is_uppercase() => {
|
||||
BadIdentNext::UppercaseAccess(2 + till_whitespace(it) as u16)
|
||||
}
|
||||
Some(c) if c.is_ascii_digit() => {
|
||||
BadIdentNext::NumberAccess(2 + till_whitespace(it) as u16)
|
||||
}
|
||||
_ => BadIdentNext::DanglingDot,
|
||||
},
|
||||
Some(c) => BadIdentNext::Other(Some(c)),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn till_whitespace<I>(it: I) -> usize
|
||||
where
|
||||
I: Iterator<Item = char>,
|
||||
{
|
||||
let mut chomped = 0;
|
||||
|
||||
for c in it {
|
||||
if c.is_ascii_whitespace() || c == '#' {
|
||||
break;
|
||||
} else {
|
||||
chomped += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
chomped
|
||||
}
|
||||
|
||||
fn pretty_runtime_error<'b>(
|
||||
alloc: &'b RocDocAllocator<'b>,
|
||||
runtime_error: RuntimeError,
|
||||
|
|
|
@ -153,6 +153,7 @@ fn to_syntax_report<'a>(
|
|||
0,
|
||||
0,
|
||||
),
|
||||
Header(header) => to_header_report(alloc, filename, &header, 0, 0),
|
||||
_ => todo!("unhandled parse error: {:?}", parse_problem),
|
||||
}
|
||||
}
|
||||
|
@ -171,6 +172,8 @@ enum Node {
|
|||
IfElseBranch,
|
||||
ListElement,
|
||||
InsideParens,
|
||||
RecordConditionalDefault,
|
||||
StringFormat,
|
||||
}
|
||||
|
||||
fn to_expr_report<'a>(
|
||||
|
@ -197,14 +200,12 @@ fn to_expr_report<'a>(
|
|||
to_expr_in_parens_report(alloc, filename, context, &expr, *row, *col)
|
||||
}
|
||||
EExpr::Type(tipe, row, col) => to_type_report(alloc, filename, &tipe, *row, *col),
|
||||
EExpr::Def(syntax, row, col) => to_syntax_report(alloc, filename, syntax, *row, *col),
|
||||
|
||||
EExpr::ElmStyleFunction(region, row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
|
||||
let region = *region;
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am in the middle of parsing a definition, but I got stuck here:"),
|
||||
alloc.reflow(r"I am partway through parsing a definition, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("Looks like you are trying to define a function. "),
|
||||
|
@ -344,6 +345,8 @@ fn to_expr_report<'a>(
|
|||
]),
|
||||
),
|
||||
Node::ListElement => (r, c, alloc.text("a list")),
|
||||
Node::RecordConditionalDefault => (r, c, alloc.text("record field default")),
|
||||
Node::StringFormat => (r, c, alloc.text("a string format")),
|
||||
Node::InsideParens => (r, c, alloc.text("some parentheses")),
|
||||
},
|
||||
Context::InDef(r, c) => (r, c, alloc.text("a definition")),
|
||||
|
@ -375,12 +378,39 @@ fn to_expr_report<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
EExpr::DefMissingFinalExpr(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
|
||||
let region = Region::from_row_col(*row, *col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a definition, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("This definition is missing a final expression."),
|
||||
alloc.reflow(" A nested definition must be followed by"),
|
||||
alloc.reflow(" either another definition, or an expression"),
|
||||
]),
|
||||
alloc.vcat(vec![
|
||||
alloc.text("x = 4").indent(4),
|
||||
alloc.text("y = 2").indent(4),
|
||||
alloc.text("").indent(4),
|
||||
alloc.text("x + y").indent(4),
|
||||
]),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "MISSING FINAL EXPRESSION".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EExpr::Colon(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
|
||||
let region = Region::from_row_col(*row, *col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am in the middle of parsing a definition, but I got stuck here:"),
|
||||
alloc.reflow(r"I am partway through parsing a definition, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("Looks like you are trying to define a function. "),
|
||||
|
@ -419,7 +449,7 @@ fn to_lambda_report<'a>(
|
|||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc
|
||||
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"),
|
||||
.reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I was expecting a "),
|
||||
|
@ -440,7 +470,7 @@ fn to_lambda_report<'a>(
|
|||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc
|
||||
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"),
|
||||
.reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I was expecting a "),
|
||||
|
@ -464,7 +494,7 @@ fn to_lambda_report<'a>(
|
|||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc
|
||||
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"),
|
||||
.reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I was expecting a "),
|
||||
|
@ -485,7 +515,7 @@ fn to_lambda_report<'a>(
|
|||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc
|
||||
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"),
|
||||
.reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I was expecting a "),
|
||||
|
@ -509,7 +539,7 @@ fn to_lambda_report<'a>(
|
|||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc
|
||||
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck at this comma:"),
|
||||
.reflow(r"I am partway through parsing a function argument list, but I got stuck at this comma:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I was expecting an argument pattern before this, "),
|
||||
|
@ -529,7 +559,7 @@ fn to_lambda_report<'a>(
|
|||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc
|
||||
.reflow(r"I am in the middle of parsing a function argument list, but I got stuck here:"),
|
||||
.reflow(r"I am partway through parsing a function argument list, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I was expecting an argument pattern before this, "),
|
||||
|
@ -636,7 +666,7 @@ fn to_unfinished_lambda_report<'a>(
|
|||
fn to_str_report<'a>(
|
||||
alloc: &'a RocDocAllocator<'a>,
|
||||
filename: PathBuf,
|
||||
_context: Context,
|
||||
context: Context,
|
||||
parse_problem: &roc_parse::parser::EString<'a>,
|
||||
start_row: Row,
|
||||
start_col: Col,
|
||||
|
@ -645,7 +675,14 @@ fn to_str_report<'a>(
|
|||
|
||||
match *parse_problem {
|
||||
EString::Open(_row, _col) => unreachable!("another branch would be taken"),
|
||||
EString::Format(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col),
|
||||
EString::Format(expr, row, col) => to_expr_report(
|
||||
alloc,
|
||||
filename,
|
||||
Context::InNode(Node::StringFormat, start_row, start_col, Box::new(context)),
|
||||
expr,
|
||||
row,
|
||||
col,
|
||||
),
|
||||
EString::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
EString::UnknownEscape(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
|
@ -712,6 +749,26 @@ fn to_str_report<'a>(
|
|||
title: "WEIRD CODE POINT".to_string(),
|
||||
}
|
||||
}
|
||||
EString::FormatEnd(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I cannot find the end of this format expression:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow(r"You could change it to something like "),
|
||||
alloc.parser_suggestion("\"The count is \\(count\\)\""),
|
||||
alloc.reflow("."),
|
||||
]),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "ENDLESS FORMAT".to_string(),
|
||||
}
|
||||
}
|
||||
EString::EndlessSingle(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
@ -839,7 +896,6 @@ fn to_list_report<'a>(
|
|||
use roc_parse::parser::List;
|
||||
|
||||
match *parse_problem {
|
||||
List::Syntax(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col),
|
||||
List::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
|
||||
List::Expr(expr, row, col) => to_expr_report(
|
||||
|
@ -948,7 +1004,6 @@ fn to_if_report<'a>(
|
|||
use roc_parse::parser::If;
|
||||
|
||||
match *parse_problem {
|
||||
If::Syntax(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col),
|
||||
If::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
|
||||
If::Condition(expr, row, col) => to_expr_report(
|
||||
|
@ -1119,7 +1174,6 @@ fn to_when_report<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
When::Syntax(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col),
|
||||
When::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
|
||||
When::Branch(expr, row, col) => to_expr_report(
|
||||
|
@ -1512,7 +1566,20 @@ fn to_precord_report<'a>(
|
|||
PRecord::Pattern(pattern, row, col) => {
|
||||
to_pattern_report(alloc, filename, pattern, row, col)
|
||||
}
|
||||
PRecord::Syntax(syntax, row, col) => to_syntax_report(alloc, filename, syntax, row, col),
|
||||
|
||||
PRecord::Expr(expr, row, col) => to_expr_report(
|
||||
alloc,
|
||||
filename,
|
||||
Context::InNode(
|
||||
Node::RecordConditionalDefault,
|
||||
start_row,
|
||||
start_col,
|
||||
Box::new(Context::InDef(row, col)),
|
||||
),
|
||||
expr,
|
||||
row,
|
||||
col,
|
||||
),
|
||||
|
||||
PRecord::IndentOpen(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
|
@ -2469,6 +2536,479 @@ fn to_tapply_report<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
fn to_header_report<'a>(
|
||||
alloc: &'a RocDocAllocator<'a>,
|
||||
filename: PathBuf,
|
||||
parse_problem: &roc_parse::parser::EHeader<'a>,
|
||||
start_row: Row,
|
||||
start_col: Col,
|
||||
) -> Report<'a> {
|
||||
use roc_parse::parser::EHeader;
|
||||
|
||||
match parse_problem {
|
||||
EHeader::Provides(provides, row, col) => {
|
||||
to_provides_report(alloc, filename, &provides, *row, *col)
|
||||
}
|
||||
|
||||
EHeader::Exposes(exposes, row, col) => {
|
||||
to_exposes_report(alloc, filename, &exposes, *row, *col)
|
||||
}
|
||||
|
||||
EHeader::Imports(imports, row, col) => {
|
||||
to_imports_report(alloc, filename, &imports, *row, *col)
|
||||
}
|
||||
|
||||
EHeader::Requires(requires, row, col) => {
|
||||
to_requires_report(alloc, filename, &requires, *row, *col)
|
||||
}
|
||||
|
||||
EHeader::Packages(packages, row, col) => {
|
||||
to_packages_report(alloc, filename, &packages, *row, *col)
|
||||
}
|
||||
|
||||
EHeader::Effects(effects, row, col) => {
|
||||
to_effects_report(alloc, filename, &effects, *row, *col)
|
||||
}
|
||||
|
||||
EHeader::IndentStart(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
|
||||
let region = Region::from_row_col(*row, *col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![alloc.reflow("I may be confused by indentation.")]),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "INCOMPLETE HEADER".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EHeader::Start(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
|
||||
let region = Region::from_row_col(*row, *col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am expecting a header, but got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting a module keyword next, one of "),
|
||||
alloc.keyword("interface"),
|
||||
alloc.reflow(", "),
|
||||
alloc.keyword("app"),
|
||||
alloc.reflow(" or "),
|
||||
alloc.keyword("platform"),
|
||||
alloc.reflow("."),
|
||||
]),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "MISSING HEADER".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EHeader::ModuleName(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
|
||||
let region = Region::from_row_col(*row, *col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting a module name next, like "),
|
||||
alloc.parser_suggestion("BigNum"),
|
||||
alloc.reflow(" or "),
|
||||
alloc.parser_suggestion("Main"),
|
||||
alloc.reflow(". Module names must start with an uppercase letter."),
|
||||
]),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD MODULE NAME".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EHeader::AppName(_, row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
|
||||
let region = Region::from_row_col(*row, *col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting an application name next, like "),
|
||||
alloc.parser_suggestion("app \"main\""),
|
||||
alloc.reflow(" or "),
|
||||
alloc.parser_suggestion("app \"editor\""),
|
||||
alloc.reflow(". App names are surrounded by quotation marks."),
|
||||
]),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD APP NAME".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EHeader::PlatformName(_, row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, *row, *col);
|
||||
let region = Region::from_row_col(*row, *col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting a platform name next, like "),
|
||||
alloc.parser_suggestion("roc/core"),
|
||||
alloc.reflow("."),
|
||||
]),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD MODULE NAME".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EHeader::Space(error, row, col) => to_space_report(alloc, filename, &error, *row, *col),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_provides_report<'a>(
|
||||
alloc: &'a RocDocAllocator<'a>,
|
||||
filename: PathBuf,
|
||||
parse_problem: &roc_parse::parser::EProvides,
|
||||
start_row: Row,
|
||||
start_col: Col,
|
||||
) -> Report<'a> {
|
||||
use roc_parse::parser::EProvides;
|
||||
|
||||
match *parse_problem {
|
||||
EProvides::Identifier(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc
|
||||
.reflow(r"I am partway through parsing a provides list, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![alloc.reflow(
|
||||
"I was expecting a type name, value name or function name next, like ",
|
||||
)]),
|
||||
alloc
|
||||
.parser_suggestion("provides [ Animal, default, tame ]")
|
||||
.indent(4),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD PROVIDES".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EProvides::Provides(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting the "),
|
||||
alloc.keyword("provides"),
|
||||
alloc.reflow(" keyword next, like "),
|
||||
]),
|
||||
alloc
|
||||
.parser_suggestion("provides [ Animal, default, tame ]")
|
||||
.indent(4),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD PROVIDES".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EProvides::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
|
||||
_ => todo!("unhandled parse error {:?}", parse_problem),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_exposes_report<'a>(
|
||||
alloc: &'a RocDocAllocator<'a>,
|
||||
filename: PathBuf,
|
||||
parse_problem: &roc_parse::parser::EExposes,
|
||||
start_row: Row,
|
||||
start_col: Col,
|
||||
) -> Report<'a> {
|
||||
use roc_parse::parser::EExposes;
|
||||
|
||||
match *parse_problem {
|
||||
EExposes::Identifier(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a exposes list, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![alloc.reflow(
|
||||
"I was expecting a type name, value name or function name next, like ",
|
||||
)]),
|
||||
alloc
|
||||
.parser_suggestion("exposes [ Animal, default, tame ]")
|
||||
.indent(4),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD EXPOSES".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EExposes::Exposes(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting the "),
|
||||
alloc.keyword("exposes"),
|
||||
alloc.reflow(" keyword next, like "),
|
||||
]),
|
||||
alloc
|
||||
.parser_suggestion("exposes [ Animal, default, tame ]")
|
||||
.indent(4),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD EXPOSES".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EExposes::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
|
||||
_ => todo!("unhandled parse error {:?}", parse_problem),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_imports_report<'a>(
|
||||
alloc: &'a RocDocAllocator<'a>,
|
||||
filename: PathBuf,
|
||||
parse_problem: &roc_parse::parser::EImports,
|
||||
start_row: Row,
|
||||
start_col: Col,
|
||||
) -> Report<'a> {
|
||||
use roc_parse::parser::EImports;
|
||||
|
||||
match *parse_problem {
|
||||
EImports::Identifier(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a imports list, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![alloc.reflow(
|
||||
"I was expecting a type name, value name or function name next, like ",
|
||||
)]),
|
||||
alloc
|
||||
.parser_suggestion("imports [ Animal, default, tame ]")
|
||||
.indent(4),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD EXPOSES".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EImports::Imports(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting the "),
|
||||
alloc.keyword("imports"),
|
||||
alloc.reflow(" keyword next, like "),
|
||||
]),
|
||||
alloc
|
||||
.parser_suggestion("imports [ Animal, default, tame ]")
|
||||
.indent(4),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD IMPORTS".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EImports::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
|
||||
EImports::ModuleName(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting a module name next, like "),
|
||||
alloc.parser_suggestion("BigNum"),
|
||||
alloc.reflow(" or "),
|
||||
alloc.parser_suggestion("Main"),
|
||||
alloc.reflow(". Module names must start with an uppercase letter."),
|
||||
]),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "WEIRD MODULE NAME".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
_ => todo!("unhandled parse error {:?}", parse_problem),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_requires_report<'a>(
|
||||
alloc: &'a RocDocAllocator<'a>,
|
||||
filename: PathBuf,
|
||||
parse_problem: &roc_parse::parser::ERequires<'a>,
|
||||
start_row: Row,
|
||||
start_col: Col,
|
||||
) -> Report<'a> {
|
||||
use roc_parse::parser::ERequires;
|
||||
|
||||
match *parse_problem {
|
||||
ERequires::Requires(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting the "),
|
||||
alloc.keyword("requires"),
|
||||
alloc.reflow(" keyword next, like "),
|
||||
]),
|
||||
alloc
|
||||
.parser_suggestion("requires { main : Task I64 Str }")
|
||||
.indent(4),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "MISSING REQUIRES".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
ERequires::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
|
||||
_ => todo!("unhandled parse error {:?}", parse_problem),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_packages_report<'a>(
|
||||
alloc: &'a RocDocAllocator<'a>,
|
||||
filename: PathBuf,
|
||||
parse_problem: &roc_parse::parser::EPackages,
|
||||
start_row: Row,
|
||||
start_col: Col,
|
||||
) -> Report<'a> {
|
||||
use roc_parse::parser::EPackages;
|
||||
|
||||
match *parse_problem {
|
||||
EPackages::Packages(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting the "),
|
||||
alloc.keyword("packages"),
|
||||
alloc.reflow(" keyword next, like "),
|
||||
]),
|
||||
alloc.parser_suggestion("packages {}").indent(4),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "MISSING PACKAGES".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EPackages::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
|
||||
_ => todo!("unhandled parse error {:?}", parse_problem),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_effects_report<'a>(
|
||||
alloc: &'a RocDocAllocator<'a>,
|
||||
filename: PathBuf,
|
||||
parse_problem: &roc_parse::parser::EEffects,
|
||||
start_row: Row,
|
||||
start_col: Col,
|
||||
) -> Report<'a> {
|
||||
use roc_parse::parser::EEffects;
|
||||
|
||||
match *parse_problem {
|
||||
EEffects::Effects(row, col) => {
|
||||
let surroundings = Region::from_rows_cols(start_row, start_col, row, col);
|
||||
let region = Region::from_row_col(row, col);
|
||||
|
||||
let doc = alloc.stack(vec![
|
||||
alloc.reflow(r"I am partway through parsing a header, but I got stuck here:"),
|
||||
alloc.region_with_subregion(surroundings, region),
|
||||
alloc.concat(vec![
|
||||
alloc.reflow("I am expecting the "),
|
||||
alloc.keyword("effects"),
|
||||
alloc.reflow(" keyword next, like "),
|
||||
]),
|
||||
alloc.parser_suggestion("effects {}").indent(4),
|
||||
]);
|
||||
|
||||
Report {
|
||||
filename,
|
||||
doc,
|
||||
title: "MISSING PACKAGES".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
EEffects::Space(error, row, col) => to_space_report(alloc, filename, &error, row, col),
|
||||
|
||||
_ => todo!("unhandled parse error {:?}", parse_problem),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_space_report<'a>(
|
||||
alloc: &'a RocDocAllocator<'a>,
|
||||
filename: PathBuf,
|
||||
|
@ -2538,7 +3078,7 @@ fn what_is_next<'a>(source_lines: &'a [&'a str], row: Row, col: Col) -> Next<'a>
|
|||
}
|
||||
}
|
||||
|
||||
fn starts_with_keyword(rest_of_line: &str, keyword: &str) -> bool {
|
||||
pub fn starts_with_keyword(rest_of_line: &str, keyword: &str) -> bool {
|
||||
if let Some(stripped) = rest_of_line.strip_prefix(keyword) {
|
||||
match stripped.chars().next() {
|
||||
None => true,
|
||||
|
|
|
@ -11,9 +11,6 @@ use roc_collections::all::{ImMap, MutMap, SendSet};
|
|||
use roc_constrain::expr::constrain_expr;
|
||||
use roc_constrain::module::{constrain_imported_values, Import};
|
||||
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds};
|
||||
use roc_parse::ast::{self, Attempting};
|
||||
use roc_parse::blankspace::space0_before;
|
||||
use roc_parse::parser::{loc, Parser, State, SyntaxError};
|
||||
use roc_problem::can::Problem;
|
||||
use roc_region::all::Located;
|
||||
use roc_solve::solve;
|
||||
|
@ -100,28 +97,9 @@ pub struct CanExprOut {
|
|||
pub constraint: Constraint,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn parse_with<'a>(arena: &'a Bump, input: &'a str) -> Result<ast::Expr<'a>, SyntaxError<'a>> {
|
||||
parse_loc_with(arena, input).map(|loc_expr| loc_expr.value)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn parse_loc_with<'a>(
|
||||
arena: &'a Bump,
|
||||
input: &'a str,
|
||||
) -> Result<Located<ast::Expr<'a>>, SyntaxError<'a>> {
|
||||
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
|
||||
let parser = space0_before(loc(roc_parse::expr::expr(0)), 0);
|
||||
let answer = parser.parse(&arena, state);
|
||||
|
||||
answer
|
||||
.map(|(_, loc_expr, _)| loc_expr)
|
||||
.map_err(|(_, fail, _)| fail)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ParseErrOut<'a> {
|
||||
pub fail: SyntaxError<'a>,
|
||||
pub fail: roc_parse::parser::SyntaxError<'a>,
|
||||
pub home: ModuleId,
|
||||
pub interns: Interns,
|
||||
}
|
||||
|
@ -132,7 +110,7 @@ pub fn can_expr_with<'a>(
|
|||
home: ModuleId,
|
||||
expr_str: &'a str,
|
||||
) -> Result<CanExprOut, ParseErrOut<'a>> {
|
||||
let loc_expr = match parse_loc_with(&arena, expr_str) {
|
||||
let loc_expr = match roc_parse::test_helpers::parse_loc_with(&arena, expr_str) {
|
||||
Ok(e) => e,
|
||||
Err(fail) => {
|
||||
let interns = Interns::default();
|
||||
|
|
|
@ -169,6 +169,37 @@ mod test_reporting {
|
|||
}
|
||||
}
|
||||
|
||||
fn list_header_reports<F>(arena: &Bump, src: &str, buf: &mut String, callback: F)
|
||||
where
|
||||
F: FnOnce(RocDocBuilder<'_>, &mut String),
|
||||
{
|
||||
use ven_pretty::DocAllocator;
|
||||
|
||||
use roc_parse::parser::State;
|
||||
|
||||
let state = State::new(src.as_bytes());
|
||||
|
||||
let filename = filename_from_string(r"\code\proj\Main.roc");
|
||||
let src_lines: Vec<&str> = src.split('\n').collect();
|
||||
|
||||
match roc_parse::module::parse_header(arena, state) {
|
||||
Err(fail) => {
|
||||
let interns = Interns::default();
|
||||
let home = crate::helpers::test_home();
|
||||
|
||||
let alloc = RocDocAllocator::new(&src_lines, home, &interns);
|
||||
|
||||
use roc_parse::parser::SyntaxError;
|
||||
let problem =
|
||||
SyntaxError::Header(fail).into_parse_problem(filename.clone(), src.as_bytes());
|
||||
let doc = parse_problem(&alloc, filename, 0, problem);
|
||||
|
||||
callback(doc.pretty(&alloc).append(alloc.line()), buf)
|
||||
}
|
||||
Ok(_) => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn report_problem_as(src: &str, expected_rendering: &str) {
|
||||
let mut buf: String = String::new();
|
||||
let arena = Bump::new();
|
||||
|
@ -193,6 +224,30 @@ mod test_reporting {
|
|||
assert_eq!(buf, expected_rendering);
|
||||
}
|
||||
|
||||
fn report_header_problem_as(src: &str, expected_rendering: &str) {
|
||||
let mut buf: String = String::new();
|
||||
let arena = Bump::new();
|
||||
|
||||
let callback = |doc: RocDocBuilder<'_>, buf: &mut String| {
|
||||
doc.1
|
||||
.render_raw(70, &mut roc_reporting::report::CiWrite::new(buf))
|
||||
.expect("list_reports")
|
||||
};
|
||||
|
||||
list_header_reports(&arena, src, &mut buf, callback);
|
||||
|
||||
// convenient to copy-paste the generated message
|
||||
if true {
|
||||
if buf != expected_rendering {
|
||||
for line in buf.split("\n") {
|
||||
println!(" {}", line);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(buf, expected_rendering);
|
||||
}
|
||||
|
||||
fn color_report_problem_as(src: &str, expected_rendering: &str) {
|
||||
let mut buf: String = String::new();
|
||||
let arena = Bump::new();
|
||||
|
@ -3160,7 +3215,7 @@ mod test_reporting {
|
|||
r#"
|
||||
── ARGUMENTS BEFORE EQUALS ─────────────────────────────────────────────────────
|
||||
|
||||
I am in the middle of parsing a definition, but I got stuck here:
|
||||
I am partway through parsing a definition, but I got stuck here:
|
||||
|
||||
1│ f x y = x
|
||||
^^^
|
||||
|
@ -4070,12 +4125,12 @@ mod test_reporting {
|
|||
r#"
|
||||
── SYNTAX PROBLEM ──────────────────────────────────────────────────────────────
|
||||
|
||||
I trying to parse a record field accessor here:
|
||||
I trying to parse a record field access here:
|
||||
|
||||
1│ foo.bar.
|
||||
^
|
||||
|
||||
Something like .name or .height that accesses a value from a record.
|
||||
So I expect to see a lowercase letter next, like .name or .height.
|
||||
"#
|
||||
),
|
||||
)
|
||||
|
@ -4093,14 +4148,13 @@ mod test_reporting {
|
|||
r#"
|
||||
── SYNTAX PROBLEM ──────────────────────────────────────────────────────────────
|
||||
|
||||
I am trying to parse a qualified name here:
|
||||
I am very confused by this expression:
|
||||
|
||||
1│ @Foo.Bar
|
||||
^
|
||||
^^^^
|
||||
|
||||
This looks like a qualified tag name to me, but tags cannot be
|
||||
qualified! Maybe you wanted a qualified name, something like
|
||||
Json.Decode.string?
|
||||
Looks like a private tag is treated like a module name. Maybe you
|
||||
wanted a qualified name, like Json.Decode.string?
|
||||
"#
|
||||
),
|
||||
)
|
||||
|
@ -4508,21 +4562,21 @@ mod test_reporting {
|
|||
indoc!(
|
||||
r#"
|
||||
f : Foo..Bar
|
||||
|
||||
f
|
||||
"#
|
||||
),
|
||||
indoc!(
|
||||
r#"
|
||||
── DOUBLE DOT ──────────────────────────────────────────────────────────────────
|
||||
|
||||
I encountered two dots in a row:
|
||||
|
||||
1│ f : Foo..Bar
|
||||
^
|
||||
|
||||
Try removing one of them.
|
||||
"#
|
||||
),
|
||||
indoc!(r#""#),
|
||||
)
|
||||
|
||||
// ── DOUBLE DOT ──────────────────────────────────────────────────────────────────
|
||||
//
|
||||
// I encountered two dots in a row:
|
||||
//
|
||||
// 1│ f : Foo..Bar
|
||||
// ^
|
||||
//
|
||||
// Try removing one of them.
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -4531,22 +4585,22 @@ mod test_reporting {
|
|||
indoc!(
|
||||
r#"
|
||||
f : Foo.Bar.
|
||||
|
||||
f
|
||||
"#
|
||||
),
|
||||
indoc!(
|
||||
r#"
|
||||
── TRAILING DOT ────────────────────────────────────────────────────────────────
|
||||
|
||||
I encountered a dot with nothing after it:
|
||||
|
||||
1│ f : Foo.Bar.
|
||||
^
|
||||
|
||||
Dots are used to refer to a type in a qualified way, like
|
||||
Num.I64 or List.List a. Try adding a type name next.
|
||||
"#
|
||||
),
|
||||
indoc!(r#""#),
|
||||
)
|
||||
|
||||
// ── TRAILING DOT ────────────────────────────────────────────────────────────────
|
||||
//
|
||||
// I encountered a dot with nothing after it:
|
||||
//
|
||||
// 1│ f : Foo.Bar.
|
||||
// ^
|
||||
//
|
||||
// Dots are used to refer to a type in a qualified way, like
|
||||
// Num.I64 or List.List a. Try adding a type name next.
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -4582,26 +4636,40 @@ mod test_reporting {
|
|||
indoc!(
|
||||
r#"
|
||||
f : Foo.1
|
||||
|
||||
f
|
||||
"#
|
||||
),
|
||||
indoc!(
|
||||
r#"
|
||||
── WEIRD QUALIFIED NAME ────────────────────────────────────────────────────────
|
||||
|
||||
I encountered a number at the start of a qualified name segment:
|
||||
|
||||
1│ f : Foo.1
|
||||
^
|
||||
|
||||
All parts of a qualified type name must start with an uppercase
|
||||
letter, like Num.I64 or List.List a.
|
||||
"#
|
||||
),
|
||||
indoc!(r#""#),
|
||||
)
|
||||
|
||||
// ── WEIRD QUALIFIED NAME ────────────────────────────────────────────────────────
|
||||
//
|
||||
// I encountered a number at the start of a qualified name segment:
|
||||
//
|
||||
// 1│ f : Foo.1
|
||||
// ^
|
||||
//
|
||||
// All parts of a qualified type name must start with an uppercase
|
||||
// letter, like Num.I64 or List.List a.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_apply_start_with_lowercase() {
|
||||
report_problem_as(
|
||||
indoc!(
|
||||
r#"
|
||||
f : Foo.foo
|
||||
|
||||
f
|
||||
"#
|
||||
),
|
||||
indoc!(r#""#),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_missing_final_expression() {
|
||||
report_problem_as(
|
||||
indoc!(
|
||||
r#"
|
||||
|
@ -4610,16 +4678,20 @@ mod test_reporting {
|
|||
),
|
||||
indoc!(
|
||||
r#"
|
||||
── WEIRD QUALIFIED NAME ────────────────────────────────────────────────────────
|
||||
── MISSING FINAL EXPRESSION ────────────────────────────────────────────────────
|
||||
|
||||
I encountered a lowercase letter at the start of a qualified name
|
||||
segment:
|
||||
I am partway through parsing a definition, but I got stuck here:
|
||||
|
||||
1│ f : Foo.foo
|
||||
^
|
||||
^
|
||||
|
||||
All parts of a qualified type name must start with an uppercase
|
||||
letter, like Num.I64 or List.List a.
|
||||
This definition is missing a final expression. A nested definition
|
||||
must be followed by either another definition, or an expression
|
||||
|
||||
x = 4
|
||||
y = 2
|
||||
|
||||
x + y
|
||||
"#
|
||||
),
|
||||
)
|
||||
|
@ -4984,8 +5056,8 @@ mod test_reporting {
|
|||
r#"
|
||||
── UNFINISHED ARGUMENT LIST ────────────────────────────────────────────────────
|
||||
|
||||
I am in the middle of parsing a function argument list, but I got
|
||||
stuck at this comma:
|
||||
I am partway through parsing a function argument list, but I got stuck
|
||||
at this comma:
|
||||
|
||||
1│ \a,,b -> 1
|
||||
^
|
||||
|
@ -5009,8 +5081,8 @@ mod test_reporting {
|
|||
r#"
|
||||
── UNFINISHED ARGUMENT LIST ────────────────────────────────────────────────────
|
||||
|
||||
I am in the middle of parsing a function argument list, but I got
|
||||
stuck at this comma:
|
||||
I am partway through parsing a function argument list, but I got stuck
|
||||
at this comma:
|
||||
|
||||
1│ \,b -> 1
|
||||
^
|
||||
|
@ -5417,12 +5489,12 @@ mod test_reporting {
|
|||
r#"
|
||||
── SYNTAX PROBLEM ──────────────────────────────────────────────────────────────
|
||||
|
||||
I trying to parse a record field accessor here:
|
||||
I trying to parse a record field access here:
|
||||
|
||||
1│ Num.add . 23
|
||||
^
|
||||
|
||||
Something like .name or .height that accesses a value from a record.
|
||||
So I expect to see a lowercase letter next, like .name or .height.
|
||||
"#
|
||||
),
|
||||
)
|
||||
|
@ -5468,7 +5540,7 @@ mod test_reporting {
|
|||
I am very confused by this field access:
|
||||
|
||||
1│ @UUID.bar
|
||||
^^^^^^^^^
|
||||
^^^^
|
||||
|
||||
It looks like a record field access on a private tag.
|
||||
"#
|
||||
|
@ -5702,4 +5774,113 @@ mod test_reporting {
|
|||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn provides_to_identifier() {
|
||||
report_header_problem_as(
|
||||
indoc!(
|
||||
r#"
|
||||
app "test-base64"
|
||||
packages { base: "platform" }
|
||||
imports [base.Task, Base64 ]
|
||||
provides [ main, @Foo ] to base
|
||||
"#
|
||||
),
|
||||
indoc!(
|
||||
r#"
|
||||
── WEIRD PROVIDES ──────────────────────────────────────────────────────────────
|
||||
|
||||
I am partway through parsing a provides list, but I got stuck here:
|
||||
|
||||
3│ imports [base.Task, Base64 ]
|
||||
4│ provides [ main, @Foo ] to base
|
||||
^
|
||||
|
||||
I was expecting a type name, value name or function name next, like
|
||||
|
||||
provides [ Animal, default, tame ]
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exposes_identifier() {
|
||||
report_header_problem_as(
|
||||
indoc!(
|
||||
r#"
|
||||
interface Foobar
|
||||
exposes [ main, @Foo ]
|
||||
imports [base.Task, Base64 ]
|
||||
"#
|
||||
),
|
||||
indoc!(
|
||||
r#"
|
||||
── WEIRD EXPOSES ───────────────────────────────────────────────────────────────
|
||||
|
||||
I am partway through parsing a exposes list, but I got stuck here:
|
||||
|
||||
1│ interface Foobar
|
||||
2│ exposes [ main, @Foo ]
|
||||
^
|
||||
|
||||
I was expecting a type name, value name or function name next, like
|
||||
|
||||
exposes [ Animal, default, tame ]
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_module_name() {
|
||||
report_header_problem_as(
|
||||
indoc!(
|
||||
r#"
|
||||
interface foobar
|
||||
exposes [ main, @Foo ]
|
||||
imports [base.Task, Base64 ]
|
||||
"#
|
||||
),
|
||||
indoc!(
|
||||
r#"
|
||||
── WEIRD MODULE NAME ───────────────────────────────────────────────────────────
|
||||
|
||||
I am partway through parsing a header, but got stuck here:
|
||||
|
||||
1│ interface foobar
|
||||
^
|
||||
|
||||
I am expecting a module name next, like BigNum or Main. Module names
|
||||
must start with an uppercase letter.
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_app_name() {
|
||||
report_header_problem_as(
|
||||
indoc!(
|
||||
r#"
|
||||
app foobar
|
||||
exposes [ main, @Foo ]
|
||||
imports [base.Task, Base64 ]
|
||||
"#
|
||||
),
|
||||
indoc!(
|
||||
r#"
|
||||
── WEIRD APP NAME ──────────────────────────────────────────────────────────────
|
||||
|
||||
I am partway through parsing a header, but got stuck here:
|
||||
|
||||
1│ app foobar
|
||||
^
|
||||
|
||||
I am expecting an application name next, like app "main" or
|
||||
app "editor". App names are surrounded by quotation marks.
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -568,6 +568,36 @@ fn list_map_closure() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_map3_group() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
List.map3 [1,2,3] [3,2,1] [2,1,3] (\a, b, c -> Group a b c)
|
||||
"#
|
||||
),
|
||||
RocList::from_slice(&[(1, 3, 2), (2, 2, 1), (3, 1, 3)]),
|
||||
RocList<(i64, i64, i64)>
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_map3_different_length() {
|
||||
assert_evals_to!(
|
||||
indoc!(
|
||||
r#"
|
||||
List.map3
|
||||
["a", "b", "d"]
|
||||
["b", "x"]
|
||||
["c"]
|
||||
(\a, b, c -> Str.concat a (Str.concat b c))
|
||||
"#
|
||||
),
|
||||
RocList::from_slice(&[RocStr::from_slice("abc".as_bytes()),]),
|
||||
RocList<RocStr>
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_map2_pair() {
|
||||
assert_evals_to!(
|
||||
|
|
|
@ -19,29 +19,29 @@ roc_fmt = { path = "../compiler/fmt" }
|
|||
roc_reporting = { path = "../compiler/reporting" }
|
||||
# TODO switch to clap 3.0.0 once it's out. Tried adding clap = "~3.0.0-beta.1" and cargo wouldn't accept it
|
||||
ven_graph = { path = "../vendor/pathfinding" }
|
||||
im = "14" # im and im-rc should always have the same version!
|
||||
im-rc = "14" # im and im-rc should always have the same version!
|
||||
im = "15" # im and im-rc should always have the same version!
|
||||
im-rc = "15" # im and im-rc should always have the same version!
|
||||
bumpalo = { version = "3.2", features = ["collections"] }
|
||||
inlinable_string = "0.1"
|
||||
arraystring = "0.3.0"
|
||||
libc = "0.2"
|
||||
page_size = "0.4"
|
||||
winit = "0.22"
|
||||
wgpu = "0.6"
|
||||
winit = "0.24"
|
||||
wgpu = "0.7"
|
||||
glyph_brush = "0.7"
|
||||
log = "0.4"
|
||||
zerocopy = "0.3"
|
||||
env_logger = "0.7"
|
||||
env_logger = "0.8"
|
||||
futures = "0.3"
|
||||
wgpu_glyph = "0.10"
|
||||
cgmath = "0.17.0"
|
||||
wgpu_glyph = "0.11"
|
||||
cgmath = "0.18.0"
|
||||
snafu = { version = "0.6", features = ["backtraces"] }
|
||||
colored = "2"
|
||||
pest = "2.1"
|
||||
pest_derive = "2.1"
|
||||
ropey = "1.2.0"
|
||||
copypasta = "0.7.1"
|
||||
indoc = "0.3.3"
|
||||
indoc = "1.0"
|
||||
palette = "0.5"
|
||||
confy = { git = 'https://github.com/rust-cli/confy', features = [
|
||||
"yaml_conf"
|
||||
|
@ -53,10 +53,10 @@ version = "1.4"
|
|||
features = ["derive"]
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.5.1"
|
||||
pretty_assertions = "0.6"
|
||||
maplit = "1.0.1"
|
||||
quickcheck = "0.8"
|
||||
quickcheck_macros = "0.8"
|
||||
quickcheck = "1.0"
|
||||
quickcheck_macros = "1.0"
|
||||
criterion = "0.3"
|
||||
rand = "0.8.2"
|
||||
|
||||
|
|
|
@ -66,6 +66,7 @@ e.g. you have a test `calculate_sum_test` that only uses the function `add`, whe
|
|||
* When refactoring;
|
||||
- Cutting and pasting code to a new file should automatically add imports to the new file and delete them from the old file.
|
||||
- Ability to link e.g. variable name in comments to actual variable name. Comment is automatically updated when variable name is changed.
|
||||
- When updating dependencies with breaking changes; show similar diffs from github projects that have succesfully updated that dependency.
|
||||
- AST backed renaming, changing variable/function/type name should change it all over the codebase.
|
||||
* Automatically create all "arms" when pattern matching after entering `when var is` based on the type.
|
||||
- All `when ... is` should be updated if the type is changed, e.g. adding Indigo to the Color type should add an arm everywhere where `when color is` is used.
|
||||
|
|
|
@ -88,9 +88,9 @@ fn run_event_loop(file_path_opt: Option<&Path>) -> Result<(), Box<dyn Error>> {
|
|||
adapter
|
||||
.request_device(
|
||||
&wgpu::DeviceDescriptor {
|
||||
label: None,
|
||||
features: wgpu::Features::empty(),
|
||||
limits: wgpu::Limits::default(),
|
||||
shader_validation: false,
|
||||
},
|
||||
None,
|
||||
)
|
||||
|
@ -108,12 +108,12 @@ fn run_event_loop(file_path_opt: Option<&Path>) -> Result<(), Box<dyn Error>> {
|
|||
let mut size = window.inner_size();
|
||||
|
||||
let swap_chain_descr = wgpu::SwapChainDescriptor {
|
||||
usage: wgpu::TextureUsage::OUTPUT_ATTACHMENT,
|
||||
usage: wgpu::TextureUsage::RENDER_ATTACHMENT,
|
||||
format: render_format,
|
||||
width: size.width,
|
||||
height: size.height,
|
||||
//Immediate may cause tearing, change present_mode if this becomes a problem
|
||||
present_mode: wgpu::PresentMode::Immediate,
|
||||
// TODO go back to Immediate
|
||||
present_mode: wgpu::PresentMode::Fifo,
|
||||
};
|
||||
|
||||
let mut swap_chain = gpu_device.create_swap_chain(&surface, &swap_chain_descr);
|
||||
|
@ -182,12 +182,12 @@ fn run_event_loop(file_path_opt: Option<&Path>) -> Result<(), Box<dyn Error>> {
|
|||
swap_chain = gpu_device.create_swap_chain(
|
||||
&surface,
|
||||
&wgpu::SwapChainDescriptor {
|
||||
usage: wgpu::TextureUsage::OUTPUT_ATTACHMENT,
|
||||
usage: wgpu::TextureUsage::RENDER_ATTACHMENT,
|
||||
format: render_format,
|
||||
width: size.width,
|
||||
height: size.height,
|
||||
//Immediate may cause tearing, change present_mode if this becomes a problem
|
||||
present_mode: wgpu::PresentMode::Immediate,
|
||||
// TODO go back to Immediate
|
||||
present_mode: wgpu::PresentMode::Fifo,
|
||||
},
|
||||
);
|
||||
|
||||
|
@ -376,7 +376,10 @@ fn draw_all_rects(
|
|||
render_pass.set_pipeline(&rect_resources.pipeline);
|
||||
render_pass.set_bind_group(0, &rect_resources.ortho.bind_group, &[]);
|
||||
render_pass.set_vertex_buffer(0, rect_buffers.vertex_buffer.slice(..));
|
||||
render_pass.set_index_buffer(rect_buffers.index_buffer.slice(..));
|
||||
render_pass.set_index_buffer(
|
||||
rect_buffers.index_buffer.slice(..),
|
||||
wgpu::IndexFormat::Uint32,
|
||||
);
|
||||
render_pass.draw_indexed(0..rect_buffers.num_rects, 0, 0..1);
|
||||
} else {
|
||||
// need to begin render pass to clear screen
|
||||
|
@ -403,6 +406,7 @@ fn begin_render_pass<'a>(
|
|||
},
|
||||
}],
|
||||
depth_stencil_attachment: None,
|
||||
label: None,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -91,8 +91,9 @@ pub fn init_ortho(
|
|||
entries: &[BindGroupLayoutEntry {
|
||||
binding: 0,
|
||||
visibility: ShaderStage::VERTEX,
|
||||
ty: wgpu::BindingType::UniformBuffer {
|
||||
dynamic: false,
|
||||
ty: wgpu::BindingType::Buffer {
|
||||
ty: wgpu::BufferBindingType::Uniform,
|
||||
has_dynamic_offset: false,
|
||||
min_binding_size: None,
|
||||
},
|
||||
count: None,
|
||||
|
@ -104,7 +105,7 @@ pub fn init_ortho(
|
|||
layout: &ortho_bind_group_layout,
|
||||
entries: &[wgpu::BindGroupEntry {
|
||||
binding: 0,
|
||||
resource: wgpu::BindingResource::Buffer(ortho_buffer.slice(..)),
|
||||
resource: ortho_buffer.as_entire_binding(),
|
||||
}],
|
||||
label: Some("Ortho bind group"),
|
||||
});
|
||||
|
|
|
@ -21,9 +21,8 @@ pub fn make_rect_pipeline(
|
|||
&gpu_device,
|
||||
&pipeline_layout,
|
||||
swap_chain_descr.format,
|
||||
&[Vertex::DESC],
|
||||
wgpu::include_spirv!("../shaders/rect.vert.spv"),
|
||||
wgpu::include_spirv!("../shaders/rect.frag.spv"),
|
||||
&wgpu::include_spirv!("../shaders/rect.vert.spv"),
|
||||
&wgpu::include_spirv!("../shaders/rect.frag.spv"),
|
||||
);
|
||||
|
||||
RectResources { pipeline, ortho }
|
||||
|
@ -33,9 +32,8 @@ pub fn create_render_pipeline(
|
|||
device: &wgpu::Device,
|
||||
layout: &wgpu::PipelineLayout,
|
||||
color_format: wgpu::TextureFormat,
|
||||
vertex_descs: &[wgpu::VertexBufferDescriptor],
|
||||
vs_src: wgpu::ShaderModuleSource,
|
||||
fs_src: wgpu::ShaderModuleSource,
|
||||
vs_src: &wgpu::ShaderModuleDescriptor,
|
||||
fs_src: &wgpu::ShaderModuleDescriptor,
|
||||
) -> wgpu::RenderPipeline {
|
||||
let vs_module = device.create_shader_module(vs_src);
|
||||
let fs_module = device.create_shader_module(fs_src);
|
||||
|
@ -43,29 +41,27 @@ pub fn create_render_pipeline(
|
|||
device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
|
||||
label: Some("Render pipeline"),
|
||||
layout: Some(&layout),
|
||||
vertex_stage: wgpu::ProgrammableStageDescriptor {
|
||||
vertex: wgpu::VertexState {
|
||||
module: &vs_module,
|
||||
entry_point: "main",
|
||||
buffers: &[Vertex::DESC],
|
||||
},
|
||||
fragment_stage: Some(wgpu::ProgrammableStageDescriptor {
|
||||
fragment: Some(wgpu::FragmentState {
|
||||
module: &fs_module,
|
||||
entry_point: "main",
|
||||
targets: &[wgpu::ColorTargetState {
|
||||
format: color_format,
|
||||
color_blend: wgpu::BlendState::REPLACE,
|
||||
alpha_blend: wgpu::BlendState::REPLACE,
|
||||
write_mask: wgpu::ColorWrite::ALL,
|
||||
}],
|
||||
}),
|
||||
rasterization_state: None,
|
||||
primitive_topology: wgpu::PrimitiveTopology::TriangleList,
|
||||
color_states: &[wgpu::ColorStateDescriptor {
|
||||
format: color_format,
|
||||
color_blend: wgpu::BlendDescriptor::REPLACE,
|
||||
alpha_blend: wgpu::BlendDescriptor::REPLACE,
|
||||
write_mask: wgpu::ColorWrite::ALL,
|
||||
}],
|
||||
depth_stencil_state: None,
|
||||
sample_count: 1,
|
||||
sample_mask: !0,
|
||||
alpha_to_coverage_enabled: false,
|
||||
vertex_state: wgpu::VertexStateDescriptor {
|
||||
index_format: wgpu::IndexFormat::Uint32,
|
||||
vertex_buffers: vertex_descs,
|
||||
primitive: wgpu::PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: wgpu::MultisampleState {
|
||||
count: 1,
|
||||
mask: !0,
|
||||
alpha_to_coverage_enabled: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
@ -13,18 +13,18 @@ unsafe impl bytemuck::Zeroable for Vertex {}
|
|||
|
||||
impl Vertex {
|
||||
pub const SIZE: wgpu::BufferAddress = std::mem::size_of::<Self>() as wgpu::BufferAddress;
|
||||
pub const DESC: wgpu::VertexBufferDescriptor<'static> = wgpu::VertexBufferDescriptor {
|
||||
stride: Self::SIZE,
|
||||
pub const DESC: wgpu::VertexBufferLayout<'static> = wgpu::VertexBufferLayout {
|
||||
array_stride: Self::SIZE,
|
||||
step_mode: wgpu::InputStepMode::Vertex,
|
||||
attributes: &[
|
||||
// position
|
||||
wgpu::VertexAttributeDescriptor {
|
||||
wgpu::VertexAttribute {
|
||||
offset: 0,
|
||||
shader_location: 0,
|
||||
format: wgpu::VertexFormat::Float2,
|
||||
},
|
||||
// color
|
||||
wgpu::VertexAttributeDescriptor {
|
||||
wgpu::VertexAttribute {
|
||||
offset: std::mem::size_of::<[f32; 2]>() as wgpu::BufferAddress,
|
||||
shader_location: 1,
|
||||
format: wgpu::VertexFormat::Float4,
|
||||
|
|
|
@ -16,9 +16,8 @@ use roc_module::ident::ModuleName;
|
|||
use roc_module::low_level::LowLevel;
|
||||
use roc_module::operator::CalledVia;
|
||||
use roc_module::symbol::{IdentIds, ModuleId, ModuleIds, Symbol};
|
||||
use roc_parse::ast;
|
||||
use roc_parse::ast::StrLiteral;
|
||||
use roc_parse::ast::{self, Attempting};
|
||||
use roc_parse::blankspace::space0_before;
|
||||
use roc_parse::expr::expr;
|
||||
use roc_parse::parser::{loc, Parser, State, SyntaxError};
|
||||
use roc_problem::can::{Problem, RuntimeError};
|
||||
|
@ -235,14 +234,10 @@ pub fn str_to_expr2<'a>(
|
|||
scope: &mut Scope,
|
||||
region: Region,
|
||||
) -> Result<(Expr2, self::Output), SyntaxError<'a>> {
|
||||
let state = State::new_in(arena, input.trim().as_bytes(), Attempting::Module);
|
||||
let parser = space0_before(loc(expr(0)), 0);
|
||||
let parse_res = parser.parse(&arena, state);
|
||||
|
||||
parse_res
|
||||
.map(|(_, loc_expr, _)| arena.alloc(loc_expr.value))
|
||||
.map(|loc_expr_val_ref| to_expr2(env, scope, loc_expr_val_ref, region))
|
||||
.map_err(|(_, fail, _)| fail)
|
||||
match roc_parse::test_helpers::parse_loc_with(arena, input.trim()) {
|
||||
Ok(loc_expr) => Ok(to_expr2(env, scope, arena.alloc(loc_expr.value), region)),
|
||||
Err(fail) => Err(fail),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_expr2<'a>(
|
||||
|
|
|
@ -2,7 +2,7 @@ use bumpalo::collections::Vec;
|
|||
use bumpalo::Bump;
|
||||
use roc_fmt::def::fmt_def;
|
||||
use roc_fmt::module::fmt_module;
|
||||
use roc_parse::ast::{Attempting, Def, Module};
|
||||
use roc_parse::ast::{Def, Module};
|
||||
use roc_parse::module::module_defs;
|
||||
use roc_parse::parser;
|
||||
use roc_parse::parser::{Parser, SyntaxError};
|
||||
|
@ -36,11 +36,11 @@ impl<'a> File<'a> {
|
|||
|
||||
let allocation = arena.alloc(bytes);
|
||||
|
||||
let module_parse_state = parser::State::new_in(arena, allocation, Attempting::Module);
|
||||
let parsed_module = roc_parse::module::header().parse(&arena, module_parse_state);
|
||||
let module_parse_state = parser::State::new(allocation);
|
||||
let parsed_module = roc_parse::module::parse_header(&arena, module_parse_state);
|
||||
|
||||
match parsed_module {
|
||||
Ok((_, module, state)) => {
|
||||
Ok((module, state)) => {
|
||||
let parsed_defs = module_defs().parse(&arena, state);
|
||||
|
||||
match parsed_defs {
|
||||
|
@ -52,7 +52,7 @@ impl<'a> File<'a> {
|
|||
Err((_, error, _)) => Err(ReadError::ParseDefs(error)),
|
||||
}
|
||||
}
|
||||
Err((_, error, _)) => Err(ReadError::ParseHeader(error)),
|
||||
Err(error) => Err(ReadError::ParseHeader(SyntaxError::Header(error))),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue