Merge remote-tracking branch 'origin/trunk' into reset-reuse

This commit is contained in:
Folkert 2021-07-13 00:44:05 +02:00
commit e02d950c7c
50 changed files with 2387 additions and 992 deletions

View file

@ -1,4 +1,7 @@
on: [pull_request] on:
pull_request:
paths-ignore:
- '**.md'
name: Benchmarks name: Benchmarks
@ -31,13 +34,5 @@ jobs:
- name: on current branch; prepare a self-contained benchmark folder - name: on current branch; prepare a self-contained benchmark folder
run: ./ci/safe-earthly.sh +prep-bench-folder run: ./ci/safe-earthly.sh +prep-bench-folder
- name: benchmark trunk - name: execute benchmarks with regression check
run: ulimit -s unlimited && cd bench-folder-trunk && ./target/release/deps/time_bench --bench run: ./ci/bench-runner.sh
# ulimit to prevent stack overflow on cfold
- name: move benchmark results so they can be compared later
run: cp -r bench-folder-trunk/target/criterion bench-folder-branch/target/
- name: benchmark current branch
run: ulimit -s unlimited && cd bench-folder-branch && ./target/release/deps/time_bench --bench
# ulimit to prevent stack overflow on cfold

View file

@ -1,4 +1,7 @@
on: [pull_request] on:
pull_request:
paths-ignore:
- '**.md'
name: CI name: CI

24
.github/workflows/spellcheck.yml vendored Normal file
View file

@ -0,0 +1,24 @@
on: [pull_request]
name: SpellCheck
env:
RUST_BACKTRACE: 1
jobs:
spell-check:
name: spell check
runs-on: [self-hosted]
timeout-minutes: 10
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v2
with:
clean: "true"
- name: Earthly version
run: earthly --version
- name: install spell checker, do spell check
run: ./ci/safe-earthly.sh +check-typos

1
Cargo.lock generated
View file

@ -3139,6 +3139,7 @@ dependencies = [
"roc_load", "roc_load",
"roc_module", "roc_module",
"roc_region", "roc_region",
"roc_types",
] ]
[[package]] [[package]]

View file

@ -101,7 +101,7 @@ check-rustfmt:
RUN cargo fmt --all -- --check RUN cargo fmt --all -- --check
check-typos: check-typos:
RUN cargo install typos-cli --version 1.0.4 # use latest version on resolution of issue crate-ci/typos#277 RUN cargo install typos-cli
COPY --dir .github ci cli compiler docs editor examples nightly_benches packages roc_std www *.md LEGAL_DETAILS shell.nix ./ COPY --dir .github ci cli compiler docs editor examples nightly_benches packages roc_std www *.md LEGAL_DETAILS shell.nix ./
RUN typos RUN typos

67
ci/bench-runner.sh Executable file
View file

@ -0,0 +1,67 @@
#!/usr/bin/env bash
# script to return exit code 1 if benchmarks have regressed
# benchmark trunk
ulimit -s unlimited
cd bench-folder-trunk
./target/release/deps/time_bench --bench
cd ..
# move benchmark results so they can be compared later
cp -r bench-folder-trunk/target/criterion bench-folder-branch/target/
cd bench-folder-branch
LOG_FILE="bench_log.txt"
touch $LOG_FILE
FULL_CMD=" ./target/release/deps/time_bench --bench"
echo $FULL_CMD
script -efq $LOG_FILE -c "$FULL_CMD"
EXIT_CODE=$?
if grep -q "regressed" "$LOG_FILE"; then
echo ""
echo ""
echo "------<<<<<<>>>>>>------"
echo "Benchmark detected regression. Running benchmark again to confirm..."
echo "------<<<<<<>>>>>>------"
echo ""
echo ""
# delete criterion folder to compare to trunk only
rm -rf ./target/criterion
# copy benchmark data from trunk again
cp -r ../bench-folder-trunk/target/criterion ./target
rm $LOG_FILE
touch $LOG_FILE
script -efq $LOG_FILE -c "$FULL_CMD"
EXIT_CODE=$?
if grep -q "regressed" "$LOG_FILE"; then
echo ""
echo ""
echo "------<<<<<<!!!!!!>>>>>>------"
echo "Benchmarks were run twice and a regression was detected both times."
echo "------<<<<<<!!!!!!>>>>>>------"
echo ""
echo ""
exit 1
else
echo "Benchmarks were run twice and a regression was detected on one run. We assume this was a fluke."
exit 0
fi
else
echo ""
echo ""
echo "------<<<<<<!!!!!!>>>>>>------"
echo "Benchmark execution failed with exit code: $EXIT_CODE."
echo "------<<<<<<!!!!!!>>>>>>------"
echo ""
echo ""
exit $EXIT_CODE
fi

View file

@ -26,6 +26,16 @@ pub enum BuildOutcome {
Errors, Errors,
} }
impl BuildOutcome {
pub fn status_code(&self) -> i32 {
match self {
Self::NoProblems => 0,
Self::OnlyWarnings => 1,
Self::Errors => 2,
}
}
}
pub struct BuiltFile { pub struct BuiltFile {
pub binary_path: PathBuf, pub binary_path: PathBuf,
pub outcome: BuildOutcome, pub outcome: BuildOutcome,
@ -205,10 +215,14 @@ pub fn build_file<'a>(
let total_time = compilation_start.elapsed().unwrap(); let total_time = compilation_start.elapsed().unwrap();
// If the cmd errored out, return the Err. // If the cmd errored out, return the Err.
cmd_result?; let exit_status = cmd_result?;
// TODO change this to report whether there were errors or warnings! // TODO change this to report whether there were errors or warnings!
let outcome = BuildOutcome::NoProblems; let outcome = if exit_status.success() {
BuildOutcome::NoProblems
} else {
BuildOutcome::Errors
};
Ok(BuiltFile { Ok(BuiltFile {
binary_path, binary_path,

View file

@ -94,7 +94,7 @@ pub fn build_app<'a>() -> App<'a> {
.arg(Arg::with_name(DIRECTORY_OR_FILES) .arg(Arg::with_name(DIRECTORY_OR_FILES)
.index(1) .index(1)
.multiple(true) .multiple(true)
.required(true) .required(false)
.help("The directory or files to build documentation for") .help("The directory or files to build documentation for")
) )
@ -196,13 +196,6 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
.strip_prefix(env::current_dir().unwrap()) .strip_prefix(env::current_dir().unwrap())
.unwrap_or(&binary_path); .unwrap_or(&binary_path);
// Return a nonzero exit code if there were problems
let status_code = match outcome {
BuildOutcome::NoProblems => 0,
BuildOutcome::OnlyWarnings => 1,
BuildOutcome::Errors => 2,
};
// No need to waste time freeing this memory, // No need to waste time freeing this memory,
// since the process is about to exit anyway. // since the process is about to exit anyway.
std::mem::forget(arena); std::mem::forget(arena);
@ -213,7 +206,8 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
total_time.as_millis() total_time.as_millis()
); );
Ok(status_code) // Return a nonzero exit code if there were problems
Ok(outcome.status_code())
} }
BuildAndRun { roc_file_arg_index } => { BuildAndRun { roc_file_arg_index } => {
let mut cmd = Command::new(binary_path); let mut cmd = Command::new(binary_path);
@ -231,7 +225,10 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
} }
} }
roc_run(cmd.current_dir(original_cwd)) match outcome {
BuildOutcome::Errors => Ok(outcome.status_code()),
_ => roc_run(cmd.current_dir(original_cwd)),
}
} }
} }
} }

View file

@ -9,6 +9,7 @@ use target_lexicon::Triple;
#[cfg(feature = "llvm")] #[cfg(feature = "llvm")]
use roc_cli::build; use roc_cli::build;
use std::ffi::{OsStr, OsString};
#[cfg(not(feature = "llvm"))] #[cfg(not(feature = "llvm"))]
fn build(_target: &Triple, _matches: &clap::ArgMatches, _config: BuildConfig) -> io::Result<i32> { fn build(_target: &Triple, _matches: &clap::ArgMatches, _config: BuildConfig) -> io::Result<i32> {
@ -68,19 +69,34 @@ fn main() -> io::Result<()> {
Ok(0) Ok(0)
} }
Some(CMD_DOCS) => { Some(CMD_DOCS) => {
let values = matches let maybe_values = matches
.subcommand_matches(CMD_DOCS) .subcommand_matches(CMD_DOCS)
.unwrap() .unwrap()
.values_of_os(DIRECTORY_OR_FILES) .values_of_os(DIRECTORY_OR_FILES);
.unwrap();
let mut values: Vec<OsString> = Vec::new();
match maybe_values {
None => {
let mut os_string_values: Vec<OsString> = Vec::new();
read_all_roc_files(&OsStr::new("./").to_os_string(), &mut os_string_values)?;
for os_string in os_string_values {
values.push(os_string);
}
}
Some(os_values) => {
for os_str in os_values {
values.push(os_str.to_os_string());
}
}
}
let mut roc_files = Vec::new(); let mut roc_files = Vec::new();
// Populate roc_files // Populate roc_files
for os_str in values { for os_str in values {
let metadata = fs::metadata(os_str)?; let metadata = fs::metadata(os_str.clone())?;
roc_files_recursive(os_str.as_os_str(), metadata.file_type(), &mut roc_files)?;
roc_files_recursive(os_str, metadata.file_type(), &mut roc_files)?;
} }
docs(roc_files); docs(roc_files);
@ -93,6 +109,26 @@ fn main() -> io::Result<()> {
std::process::exit(exit_code); std::process::exit(exit_code);
} }
fn read_all_roc_files(
dir: &OsString,
mut roc_file_paths: &mut Vec<OsString>,
) -> Result<(), std::io::Error> {
let entries = fs::read_dir(dir)?;
for entry in entries {
let path = entry?.path();
if path.is_dir() {
read_all_roc_files(&path.into_os_string(), &mut roc_file_paths)?;
} else if path.extension().and_then(OsStr::to_str) == Some("roc") {
let file_path = path.into_os_string();
roc_file_paths.push(file_path);
}
}
Ok(())
}
fn roc_files_recursive<P: AsRef<Path>>( fn roc_files_recursive<P: AsRef<Path>>(
path: P, path: P,
file_type: FileType, file_type: FileType,

View file

@ -7,16 +7,16 @@ const RocStr = str.RocStr;
pub const RocDec = struct { pub const RocDec = struct {
num: i128, num: i128,
pub const decimal_places: comptime u5 = 18; pub const decimal_places: u5 = 18;
pub const whole_number_places: comptime u5 = 21; pub const whole_number_places: u5 = 21;
const max_digits: comptime u6 = 39; const max_digits: u6 = 39;
const leading_zeros: comptime [17]u8 = "00000000000000000".*; const leading_zeros: [17]u8 = "00000000000000000".*;
pub const min: comptime RocDec = .{ .num = math.minInt(i128) }; pub const min: RocDec = .{ .num = math.minInt(i128) };
pub const max: comptime RocDec = .{ .num = math.maxInt(i128) }; pub const max: RocDec = .{ .num = math.maxInt(i128) };
pub const one_point_zero_i128: comptime i128 = comptime math.pow(i128, 10, RocDec.decimal_places); pub const one_point_zero_i128: i128 = math.pow(i128, 10, RocDec.decimal_places);
pub const one_point_zero: comptime RocDec = .{ .num = one_point_zero_i128 }; pub const one_point_zero: RocDec = .{ .num = one_point_zero_i128 };
pub fn fromU64(num: u64) RocDec { pub fn fromU64(num: u64) RocDec {
return .{ .num = num * one_point_zero_i128 }; return .{ .num = num * one_point_zero_i128 };
@ -362,7 +362,7 @@ fn mul_and_decimalize(a: u128, b: u128) i128 {
const lk = mul_u128(lhs_hi, rhs_hi); const lk = mul_u128(lhs_hi, rhs_hi);
const e = ea.hi; const e = ea.hi;
const _a = ea.lo; // const _a = ea.lo;
const g = gf.hi; const g = gf.hi;
const f = gf.lo; const f = gf.lo;

View file

@ -568,14 +568,6 @@ pub fn dictKeys(dict: RocDict, alignment: Alignment, key_width: usize, value_wid
const data_bytes = length * key_width; const data_bytes = length * key_width;
var ptr = allocateWithRefcount(data_bytes, alignment); var ptr = allocateWithRefcount(data_bytes, alignment);
var offset = blk: {
if (alignment.keyFirst()) {
break :blk 0;
} else {
break :blk (dict.capacity() * value_width);
}
};
i = 0; i = 0;
var copied: usize = 0; var copied: usize = 0;
while (i < size) : (i += 1) { while (i < size) : (i += 1) {
@ -617,14 +609,6 @@ pub fn dictValues(dict: RocDict, alignment: Alignment, key_width: usize, value_w
const data_bytes = length * value_width; const data_bytes = length * value_width;
var ptr = allocateWithRefcount(data_bytes, alignment); var ptr = allocateWithRefcount(data_bytes, alignment);
var offset = blk: {
if (alignment.keyFirst()) {
break :blk (dict.capacity() * key_width);
} else {
break :blk 0;
}
};
i = 0; i = 0;
var copied: usize = 0; var copied: usize = 0;
while (i < size) : (i += 1) { while (i < size) : (i += 1) {
@ -644,7 +628,7 @@ pub fn dictValues(dict: RocDict, alignment: Alignment, key_width: usize, value_w
output.* = RocList{ .bytes = ptr, .length = length }; output.* = RocList{ .bytes = ptr, .length = length };
} }
fn doNothing(ptr: Opaque) callconv(.C) void { fn doNothing(_: Opaque) callconv(.C) void {
return; return;
} }
@ -764,8 +748,6 @@ pub fn dictWalk(
key_width: usize, key_width: usize,
value_width: usize, value_width: usize,
accum_width: usize, accum_width: usize,
inc_key: Inc,
inc_value: Inc,
output: Opaque, output: Opaque,
) callconv(.C) void { ) callconv(.C) void {
const alignment_u32 = alignment.toU32(); const alignment_u32 = alignment.toU32();

View file

@ -626,7 +626,6 @@ pub fn listWalkUntil(
output: Opaque, output: Opaque,
) callconv(.C) void { ) callconv(.C) void {
// [ Continue a, Stop a ] // [ Continue a, Stop a ]
const CONTINUE: usize = 0;
if (accum_width == 0) { if (accum_width == 0) {
return; return;
@ -922,7 +921,7 @@ inline fn swapHelp(width: usize, temporary: [*]u8, ptr1: [*]u8, ptr2: [*]u8) voi
} }
fn swap(width_initial: usize, p1: [*]u8, p2: [*]u8) void { fn swap(width_initial: usize, p1: [*]u8, p2: [*]u8) void {
const threshold: comptime usize = 64; const threshold: usize = 64;
var width = width_initial; var width = width_initial;
@ -948,11 +947,6 @@ fn swap(width_initial: usize, p1: [*]u8, p2: [*]u8) void {
} }
fn swapElements(source_ptr: [*]u8, element_width: usize, index_1: usize, index_2: usize) void { fn swapElements(source_ptr: [*]u8, element_width: usize, index_1: usize, index_2: usize) void {
const threshold: comptime usize = 64;
var buffer_actual: [threshold]u8 = undefined;
var buffer: [*]u8 = buffer_actual[0..];
var element_at_i = source_ptr + (index_1 * element_width); var element_at_i = source_ptr + (index_1 * element_width);
var element_at_j = source_ptr + (index_2 * element_width); var element_at_j = source_ptr + (index_2 * element_width);
@ -1029,8 +1023,6 @@ pub fn listConcat(list_a: RocList, list_b: RocList, alignment: u32, element_widt
pub fn listSetInPlace( pub fn listSetInPlace(
bytes: ?[*]u8, bytes: ?[*]u8,
length: usize,
alignment: u32,
index: usize, index: usize,
element: Opaque, element: Opaque,
element_width: usize, element_width: usize,
@ -1043,7 +1035,7 @@ pub fn listSetInPlace(
// so we don't do a bounds check here. Hence, the list is also non-empty, // so we don't do a bounds check here. Hence, the list is also non-empty,
// because inserting into an empty list is always out of bounds // because inserting into an empty list is always out of bounds
return listSetInPlaceHelp(bytes, length, alignment, index, element, element_width, dec); return listSetInPlaceHelp(bytes, index, element, element_width, dec);
} }
pub fn listSet( pub fn listSet(
@ -1064,7 +1056,7 @@ pub fn listSet(
const ptr: [*]usize = @ptrCast([*]usize, @alignCast(8, bytes)); const ptr: [*]usize = @ptrCast([*]usize, @alignCast(8, bytes));
if ((ptr - 1)[0] == utils.REFCOUNT_ONE) { if ((ptr - 1)[0] == utils.REFCOUNT_ONE) {
return listSetInPlaceHelp(bytes, length, alignment, index, element, element_width, dec); return listSetInPlaceHelp(bytes, index, element, element_width, dec);
} else { } else {
return listSetImmutable(bytes, length, alignment, index, element, element_width, dec); return listSetImmutable(bytes, length, alignment, index, element, element_width, dec);
} }
@ -1072,8 +1064,6 @@ pub fn listSet(
inline fn listSetInPlaceHelp( inline fn listSetInPlaceHelp(
bytes: ?[*]u8, bytes: ?[*]u8,
length: usize,
alignment: u32,
index: usize, index: usize,
element: Opaque, element: Opaque,
element_width: usize, element_width: usize,

View file

@ -20,18 +20,18 @@ comptime {
} }
} }
fn testing_roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void { fn testing_roc_alloc(size: usize, _: u32) callconv(.C) ?*c_void {
return @ptrCast(?*c_void, std.testing.allocator.alloc(u8, size) catch unreachable); return @ptrCast(?*c_void, std.testing.allocator.alloc(u8, size) catch unreachable);
} }
fn testing_roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void { fn testing_roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, _: u32) callconv(.C) ?*c_void {
const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr)); const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr));
const slice = ptr[0..old_size]; const slice = ptr[0..old_size];
return @ptrCast(?*c_void, std.testing.allocator.realloc(slice, new_size) catch unreachable); return @ptrCast(?*c_void, std.testing.allocator.realloc(slice, new_size) catch unreachable);
} }
fn testing_roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void { fn testing_roc_dealloc(c_ptr: *c_void, _: u32) callconv(.C) void {
const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr)); const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr));
std.testing.allocator.destroy(ptr); std.testing.allocator.destroy(ptr);
@ -53,8 +53,8 @@ pub const Inc = fn (?[*]u8) callconv(.C) void;
pub const IncN = fn (?[*]u8, u64) callconv(.C) void; pub const IncN = fn (?[*]u8, u64) callconv(.C) void;
pub const Dec = fn (?[*]u8) callconv(.C) void; pub const Dec = fn (?[*]u8) callconv(.C) void;
const REFCOUNT_MAX_ISIZE: comptime isize = 0; const REFCOUNT_MAX_ISIZE: isize = 0;
pub const REFCOUNT_ONE_ISIZE: comptime isize = std.math.minInt(isize); pub const REFCOUNT_ONE_ISIZE: isize = std.math.minInt(isize);
pub const REFCOUNT_ONE: usize = @bitCast(usize, REFCOUNT_ONE_ISIZE); pub const REFCOUNT_ONE: usize = @bitCast(usize, REFCOUNT_ONE_ISIZE);
pub const IntWidth = enum(u8) { pub const IntWidth = enum(u8) {
@ -110,7 +110,7 @@ pub fn allocateWithRefcount(
data_bytes: usize, data_bytes: usize,
alignment: u32, alignment: u32,
) [*]u8 { ) [*]u8 {
comptime const result_in_place = false; const result_in_place = false;
switch (alignment) { switch (alignment) {
16 => { 16 => {

View file

@ -4344,7 +4344,6 @@ fn run_higher_order_low_level<'a, 'ctx, 'env>(
dict_walk( dict_walk(
env, env,
layout_ids,
roc_function_call, roc_function_call,
dict, dict,
default, default,

View file

@ -635,7 +635,6 @@ fn dict_intersect_or_difference<'a, 'ctx, 'env>(
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn dict_walk<'a, 'ctx, 'env>( pub fn dict_walk<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>, env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
roc_function_call: RocFunctionCall<'ctx>, roc_function_call: RocFunctionCall<'ctx>,
dict: BasicValueEnum<'ctx>, dict: BasicValueEnum<'ctx>,
accum: BasicValueEnum<'ctx>, accum: BasicValueEnum<'ctx>,
@ -660,9 +659,6 @@ pub fn dict_walk<'a, 'ctx, 'env>(
let output_ptr = builder.build_alloca(accum_bt, "output_ptr"); let output_ptr = builder.build_alloca(accum_bt, "output_ptr");
let inc_key_fn = build_inc_wrapper(env, layout_ids, key_layout);
let inc_value_fn = build_inc_wrapper(env, layout_ids, value_layout);
call_void_bitcode_fn( call_void_bitcode_fn(
env, env,
&[ &[
@ -676,8 +672,6 @@ pub fn dict_walk<'a, 'ctx, 'env>(
layout_width(env, key_layout), layout_width(env, key_layout),
layout_width(env, value_layout), layout_width(env, value_layout),
layout_width(env, accum_layout), layout_width(env, accum_layout),
inc_key_fn.as_global_value().as_pointer_value().into(),
inc_value_fn.as_global_value().as_pointer_value().into(),
env.builder.build_bitcast(output_ptr, u8_ptr, "to_opaque"), env.builder.build_bitcast(output_ptr, u8_ptr, "to_opaque"),
], ],
&bitcode::DICT_WALK, &bitcode::DICT_WALK,

View file

@ -361,12 +361,19 @@ pub fn list_set<'a, 'ctx, 'env>(
env.context.i8_type().ptr_type(AddressSpace::Generic), env.context.i8_type().ptr_type(AddressSpace::Generic),
); );
let symbol = match update_mode { let new_bytes = match update_mode {
UpdateMode::InPlace => bitcode::LIST_SET_IN_PLACE, UpdateMode::InPlace => call_bitcode_fn(
UpdateMode::Immutable => bitcode::LIST_SET, env,
}; &[
bytes.into(),
let new_bytes = call_bitcode_fn( index.into(),
pass_element_as_opaque(env, element),
layout_width(env, element_layout),
dec_element_fn.as_global_value().as_pointer_value().into(),
],
bitcode::LIST_SET_IN_PLACE,
),
UpdateMode::Immutable => call_bitcode_fn(
env, env,
&[ &[
bytes.into(), bytes.into(),
@ -377,8 +384,9 @@ pub fn list_set<'a, 'ctx, 'env>(
layout_width(env, element_layout), layout_width(env, element_layout),
dec_element_fn.as_global_value().as_pointer_value().into(), dec_element_fn.as_global_value().as_pointer_value().into(),
], ],
&symbol, bitcode::LIST_SET,
); ),
};
store_list(env, new_bytes.into_pointer_value(), length) store_list(env, new_bytes.into_pointer_value(), length)
} }

View file

@ -123,7 +123,10 @@ fn detached_docs_from_comments_and_new_lines<'a>(
} }
CommentOrNewline::LineComment(_) | CommentOrNewline::Newline => { CommentOrNewline::LineComment(_) | CommentOrNewline::Newline => {
if !docs.is_empty() {
detached_docs.push(docs.clone()); detached_docs.push(docs.clone());
}
docs = String::new(); docs = String::new();
} }
} }

View file

@ -622,6 +622,7 @@ pub struct LoadedModule {
pub declarations_by_id: MutMap<ModuleId, Vec<Declaration>>, pub declarations_by_id: MutMap<ModuleId, Vec<Declaration>>,
pub exposed_to_host: MutMap<Symbol, Variable>, pub exposed_to_host: MutMap<Symbol, Variable>,
pub exposed_aliases: MutMap<Symbol, Alias>, pub exposed_aliases: MutMap<Symbol, Alias>,
pub exposed_values: Vec<Symbol>,
pub header_sources: MutMap<ModuleId, (PathBuf, Box<str>)>, pub header_sources: MutMap<ModuleId, (PathBuf, Box<str>)>,
pub sources: MutMap<ModuleId, (PathBuf, Box<str>)>, pub sources: MutMap<ModuleId, (PathBuf, Box<str>)>,
pub timings: MutMap<ModuleId, ModuleTiming>, pub timings: MutMap<ModuleId, ModuleTiming>,
@ -765,6 +766,7 @@ enum Msg<'a> {
solved_subs: Solved<Subs>, solved_subs: Solved<Subs>,
exposed_vars_by_symbol: MutMap<Symbol, Variable>, exposed_vars_by_symbol: MutMap<Symbol, Variable>,
exposed_aliases_by_symbol: MutMap<Symbol, Alias>, exposed_aliases_by_symbol: MutMap<Symbol, Alias>,
exposed_values: Vec<Symbol>,
documentation: MutMap<ModuleId, ModuleDocumentation>, documentation: MutMap<ModuleId, ModuleDocumentation>,
}, },
FoundSpecializations { FoundSpecializations {
@ -1513,6 +1515,7 @@ where
solved_subs, solved_subs,
exposed_vars_by_symbol, exposed_vars_by_symbol,
exposed_aliases_by_symbol, exposed_aliases_by_symbol,
exposed_values,
documentation, documentation,
} => { } => {
// We're done! There should be no more messages pending. // We're done! There should be no more messages pending.
@ -1528,6 +1531,7 @@ where
return Ok(LoadResult::TypeChecked(finish( return Ok(LoadResult::TypeChecked(finish(
state, state,
solved_subs, solved_subs,
exposed_values,
exposed_aliases_by_symbol, exposed_aliases_by_symbol,
exposed_vars_by_symbol, exposed_vars_by_symbol,
documentation, documentation,
@ -1943,6 +1947,7 @@ fn update<'a>(
.send(Msg::FinishedAllTypeChecking { .send(Msg::FinishedAllTypeChecking {
solved_subs, solved_subs,
exposed_vars_by_symbol: solved_module.exposed_vars_by_symbol, exposed_vars_by_symbol: solved_module.exposed_vars_by_symbol,
exposed_values: solved_module.exposed_symbols,
exposed_aliases_by_symbol: solved_module.aliases, exposed_aliases_by_symbol: solved_module.aliases,
documentation, documentation,
}) })
@ -2275,6 +2280,7 @@ fn finish_specialization(
fn finish( fn finish(
state: State, state: State,
solved: Solved<Subs>, solved: Solved<Subs>,
exposed_values: Vec<Symbol>,
exposed_aliases_by_symbol: MutMap<Symbol, Alias>, exposed_aliases_by_symbol: MutMap<Symbol, Alias>,
exposed_vars_by_symbol: MutMap<Symbol, Variable>, exposed_vars_by_symbol: MutMap<Symbol, Variable>,
documentation: MutMap<ModuleId, ModuleDocumentation>, documentation: MutMap<ModuleId, ModuleDocumentation>,
@ -2310,8 +2316,9 @@ fn finish(
can_problems: state.module_cache.can_problems, can_problems: state.module_cache.can_problems,
type_problems: state.module_cache.type_problems, type_problems: state.module_cache.type_problems,
declarations_by_id: state.declarations_by_id, declarations_by_id: state.declarations_by_id,
exposed_to_host: exposed_vars_by_symbol.into_iter().collect(),
exposed_aliases: exposed_aliases_by_symbol, exposed_aliases: exposed_aliases_by_symbol,
exposed_values,
exposed_to_host: exposed_vars_by_symbol.into_iter().collect(),
header_sources, header_sources,
sources, sources,
timings: state.timings, timings: state.timings,
@ -3304,6 +3311,7 @@ fn run_solve<'a>(
let solved_module = SolvedModule { let solved_module = SolvedModule {
exposed_vars_by_symbol, exposed_vars_by_symbol,
exposed_symbols: exposed_symbols.into_iter().collect::<Vec<_>>(),
solved_types, solved_types,
problems, problems,
aliases: solved_env.aliases, aliases: solved_env.aliases,
@ -3540,7 +3548,6 @@ fn fabricate_effects_module<'a>(
&mut var_store, &mut var_store,
annotation, annotation,
); );
exposed_symbols.insert(symbol); exposed_symbols.insert(symbol);
declarations.push(Declaration::Declare(def)); declarations.push(Declaration::Declare(def));

View file

@ -196,7 +196,11 @@ impl<'a> ParamMap<'a> {
let already_in_there = self let already_in_there = self
.items .items
.insert(Key::JoinPoint(*j), Self::init_borrow_params(arena, xs)); .insert(Key::JoinPoint(*j), Self::init_borrow_params(arena, xs));
debug_assert!(already_in_there.is_none()); debug_assert!(
already_in_there.is_none(),
"join point {:?} is already defined!",
j
);
stack.push(v); stack.push(v);
stack.push(b); stack.push(b);

File diff suppressed because it is too large Load diff

View file

@ -3166,13 +3166,11 @@ pub fn with_hole<'a>(
branches, branches,
final_else, final_else,
} => { } => {
let ret_layout = layout_cache match (
.from_var(env.arena, branch_var, env.subs) layout_cache.from_var(env.arena, branch_var, env.subs),
.expect("invalid ret_layout"); layout_cache.from_var(env.arena, cond_var, env.subs),
let cond_layout = layout_cache ) {
.from_var(env.arena, cond_var, env.subs) (Ok(ret_layout), Ok(cond_layout)) => {
.expect("invalid cond_layout");
// if the hole is a return, then we don't need to merge the two // if the hole is a return, then we don't need to merge the two
// branches together again, we can just immediately return // branches together again, we can just immediately return
let is_terminated = matches!(hole, Stmt::Ret(_)); let is_terminated = matches!(hole, Stmt::Ret(_));
@ -3236,7 +3234,8 @@ pub fn with_hole<'a>(
); );
for (loc_cond, loc_then) in branches.into_iter().rev() { for (loc_cond, loc_then) in branches.into_iter().rev() {
let branching_symbol = possible_reuse_symbol(env, procs, &loc_cond.value); let branching_symbol =
possible_reuse_symbol(env, procs, &loc_cond.value);
let then = with_hole( let then = with_hole(
env, env,
@ -3264,7 +3263,9 @@ pub fn with_hole<'a>(
let layout = layout_cache let layout = layout_cache
.from_var(env.arena, branch_var, env.subs) .from_var(env.arena, branch_var, env.subs)
.unwrap_or_else(|err| panic!("TODO turn fn_var into a RuntimeError {:?}", err)); .unwrap_or_else(|err| {
panic!("TODO turn fn_var into a RuntimeError {:?}", err)
});
let param = Param { let param = Param {
symbol: assigned, symbol: assigned,
@ -3280,6 +3281,10 @@ pub fn with_hole<'a>(
} }
} }
} }
(Err(_), _) => Stmt::RuntimeError("invalid ret_layout"),
(_, Err(_)) => Stmt::RuntimeError("invalid cond_layout"),
}
}
When { When {
cond_var, cond_var,
@ -4140,17 +4145,17 @@ fn convert_tag_union<'a>(
hole, hole,
), ),
ByteUnion(tag_names) => { ByteUnion(tag_names) => {
let tag_id = tag_names let opt_tag_id = tag_names.iter().position(|key| key == &tag_name);
.iter()
.position(|key| key == &tag_name)
.expect("tag must be in its own type");
Stmt::Let( match opt_tag_id {
Some(tag_id) => Stmt::Let(
assigned, assigned,
Expr::Literal(Literal::Byte(tag_id as u8)), Expr::Literal(Literal::Byte(tag_id as u8)),
Layout::Builtin(Builtin::Int8), Layout::Builtin(Builtin::Int8),
hole, hole,
) ),
None => Stmt::RuntimeError("tag must be in its own type"),
}
} }
Newtype { Newtype {
@ -5099,21 +5104,17 @@ fn from_can_when<'a>(
jump, jump,
); );
let new_guard_stmt =
store_pattern(env, procs, layout_cache, &pattern, cond_symbol, guard_stmt);
( (
pattern, pattern.clone(),
Guard::Guard { Guard::Guard {
id, id,
symbol, pattern,
stmt: new_guard_stmt, stmt: guard_stmt,
}, },
branch_stmt, branch_stmt,
) )
} else { } else {
let new_branch_stmt = (pattern, Guard::NoGuard, branch_stmt)
store_pattern(env, procs, layout_cache, &pattern, cond_symbol, branch_stmt);
(pattern, Guard::NoGuard, new_branch_stmt)
} }
}); });
let mono_branches = Vec::from_iter_in(it, arena); let mono_branches = Vec::from_iter_in(it, arena);
@ -5511,7 +5512,7 @@ fn substitute_in_expr<'a>(
} }
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn store_pattern<'a>( pub fn store_pattern<'a>(
env: &mut Env<'a, '_>, env: &mut Env<'a, '_>,
procs: &mut Procs<'a>, procs: &mut Procs<'a>,
layout_cache: &mut LayoutCache<'a>, layout_cache: &mut LayoutCache<'a>,

View file

@ -208,6 +208,15 @@ impl<'a> UnionLayout<'a> {
UnionLayout::NullableUnwrapped { nullable_id, .. } => *nullable_id == (tag_id != 0), UnionLayout::NullableUnwrapped { nullable_id, .. } => *nullable_id == (tag_id != 0),
} }
} }
pub fn is_nullable(&self) -> bool {
match self {
UnionLayout::NonRecursive(_)
| UnionLayout::Recursive(_)
| UnionLayout::NonNullableUnwrapped { .. } => false,
UnionLayout::NullableWrapped { .. } | UnionLayout::NullableUnwrapped { .. } => true,
}
}
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]

View file

@ -11,6 +11,7 @@ use roc_types::types::Alias;
pub struct SolvedModule { pub struct SolvedModule {
pub solved_types: MutMap<Symbol, SolvedType>, pub solved_types: MutMap<Symbol, SolvedType>,
pub aliases: MutMap<Symbol, Alias>, pub aliases: MutMap<Symbol, Alias>,
pub exposed_symbols: Vec<Symbol>,
pub exposed_vars_by_symbol: MutMap<Symbol, Variable>, pub exposed_vars_by_symbol: MutMap<Symbol, Variable>,
pub problems: Vec<solve::TypeError>, pub problems: Vec<solve::TypeError>,
} }

View file

@ -1939,3 +1939,23 @@ fn list_sort_with() {
RocList<i64> RocList<i64>
); );
} }
#[test]
#[should_panic(expected = r#"Roc failed with message: "invalid ret_layout""#)]
fn lists_with_incompatible_type_param_in_if() {
assert_evals_to!(
indoc!(
r#"
list1 = [ {} ]
list2 = [ "" ]
x = if True then list1 else list2
""
"#
),
RocStr::empty(),
RocStr
);
}

View file

@ -468,6 +468,119 @@ fn nested_pattern_match() {
i64 i64
); );
} }
#[test]
fn if_guard_vanilla() {
assert_evals_to!(
indoc!(
r#"
when "fooz" is
s if s == "foo" -> 0
s -> List.len (Str.toBytes s)
"#
),
4,
i64
);
}
#[test]
#[ignore]
fn when_on_single_value_tag() {
// this fails because the switched-on symbol is not defined
assert_evals_to!(
indoc!(
r#"
when Identity 0 is
Identity 0 -> 0
Identity s -> s
"#
),
6,
i64
);
}
#[test]
#[ignore]
fn if_guard_multiple() {
assert_evals_to!(
indoc!(
r#"
f = \n ->
when Identity n 0 is
Identity x _ if x == 0 -> x + 0
Identity x _ if x == 1 -> x + 0
Identity x _ if x == 2 -> x + 0
Identity x _ -> x - x
{ a: f 0, b: f 1, c: f 2, d: f 4 }
"#
),
(0, 1, 2, 0),
(i64, i64, i64, i64)
);
}
#[test]
fn if_guard_constructor_switch() {
assert_evals_to!(
indoc!(
r#"
when Identity 32 0 is
Identity 41 _ -> 0
Identity s 0 if s == 32 -> 3
# Identity s 0 -> s
Identity z _ -> z
"#
),
3,
i64
);
assert_evals_to!(
indoc!(
r#"
when Identity 42 "" is
Identity 41 _ -> 0
Identity 42 _ if 3 == 3 -> 1
Identity z _ -> z
"#
),
1,
i64
);
assert_evals_to!(
indoc!(
r#"
when Identity 42 "" is
Identity 41 _ -> 0
Identity 42 _ if 3 != 3 -> 1
Identity z _ -> z
"#
),
42,
i64
);
}
#[test]
fn if_guard_constructor_chain() {
assert_evals_to!(
indoc!(
r#"
when Identity 43 0 is
Identity 42 _ if 3 == 3 -> 43
# Identity 42 _ -> 1
Identity z _ -> z
"#
),
43,
i64
);
}
#[test] #[test]
fn if_guard_pattern_false() { fn if_guard_pattern_false() {
assert_evals_to!( assert_evals_to!(
@ -486,6 +599,24 @@ fn if_guard_pattern_false() {
); );
} }
#[test]
fn if_guard_switch() {
assert_evals_to!(
indoc!(
r#"
wrapper = \{} ->
when 2 is
2 | 3 if False -> 0
_ -> 42
wrapper {}
"#
),
42,
i64
);
}
#[test] #[test]
fn if_guard_pattern_true() { fn if_guard_pattern_true() {
assert_evals_to!( assert_evals_to!(
@ -1033,3 +1164,20 @@ fn applied_tag_function_linked_list() {
i64 i64
); );
} }
#[test]
#[should_panic(expected = "")]
fn tag_must_be_its_own_type() {
assert_evals_to!(
indoc!(
r#"
z : [ A, B, C ]
z = Z
z
"#
),
1,
i64
);
}

View file

@ -1,23 +1,23 @@
procedure Test.1 (Test.3): procedure Test.1 (Test.3):
let Test.6 = 2i64; let Test.6 = 2i64;
joinpoint Test.12: joinpoint Test.11:
let Test.10 = 0i64; let Test.10 = 0i64;
ret Test.10; ret Test.10;
in in
let Test.11 = 2i64; let Test.13 = 2i64;
let Test.14 = lowlevel Eq Test.11 Test.6; let Test.14 = lowlevel Eq Test.13 Test.6;
if Test.14 then if Test.14 then
joinpoint Test.8 Test.13: joinpoint Test.8 Test.12:
if Test.13 then if Test.12 then
let Test.7 = 42i64; let Test.7 = 42i64;
ret Test.7; ret Test.7;
else else
jump Test.12; jump Test.11;
in in
let Test.9 = false; let Test.9 = false;
jump Test.8 Test.9; jump Test.8 Test.9;
else else
jump Test.12; jump Test.11;
procedure Test.0 (): procedure Test.0 ():
let Test.5 = Struct {}; let Test.5 = Struct {};

View file

@ -7,8 +7,8 @@ procedure Test.0 ():
let Test.20 = Just Test.21; let Test.20 = Just Test.21;
let Test.2 = Just Test.20; let Test.2 = Just Test.20;
joinpoint Test.17: joinpoint Test.17:
let Test.11 = 1i64; let Test.10 = 1i64;
ret Test.11; ret Test.10;
in in
let Test.15 = 0i64; let Test.15 = 0i64;
let Test.16 = GetTagId Test.2; let Test.16 = GetTagId Test.2;
@ -19,8 +19,8 @@ procedure Test.0 ():
let Test.14 = GetTagId Test.12; let Test.14 = GetTagId Test.12;
let Test.18 = lowlevel Eq Test.13 Test.14; let Test.18 = lowlevel Eq Test.13 Test.14;
if Test.18 then if Test.18 then
let Test.10 = UnionAtIndex (Id 0) (Index 0) Test.2; let Test.11 = UnionAtIndex (Id 0) (Index 0) Test.2;
let Test.5 = UnionAtIndex (Id 0) (Index 0) Test.10; let Test.5 = UnionAtIndex (Id 0) (Index 0) Test.11;
let Test.7 = 1i64; let Test.7 = 1i64;
let Test.6 = CallByName Num.24 Test.5 Test.7; let Test.6 = CallByName Num.24 Test.5 Test.7;
ret Test.6; ret Test.6;

View file

@ -22,9 +22,9 @@ procedure Test.0 ():
let Test.7 = 1i64; let Test.7 = 1i64;
ret Test.7; ret Test.7;
else else
let Test.9 = 0i64; let Test.8 = 0i64;
ret Test.9; ret Test.8;
else else
dec Test.2; dec Test.2;
let Test.10 = 0i64; let Test.9 = 0i64;
ret Test.10; ret Test.9;

View file

@ -26,8 +26,8 @@ procedure Test.1 (Test.2):
let Test.29 = CallByName List.3 Test.2 Test.31; let Test.29 = CallByName List.3 Test.2 Test.31;
let Test.8 = Struct {Test.29, Test.30}; let Test.8 = Struct {Test.29, Test.30};
joinpoint Test.26: joinpoint Test.26:
let Test.19 = Array []; let Test.17 = Array [];
ret Test.19; ret Test.17;
in in
let Test.23 = StructAtIndex 1 Test.8; let Test.23 = StructAtIndex 1 Test.8;
let Test.24 = 1i64; let Test.24 = 1i64;
@ -39,10 +39,10 @@ procedure Test.1 (Test.2):
let Test.22 = GetTagId Test.20; let Test.22 = GetTagId Test.20;
let Test.27 = lowlevel Eq Test.21 Test.22; let Test.27 = lowlevel Eq Test.21 Test.22;
if Test.27 then if Test.27 then
let Test.18 = StructAtIndex 0 Test.8; let Test.19 = StructAtIndex 0 Test.8;
let Test.4 = UnionAtIndex (Id 1) (Index 0) Test.18; let Test.4 = UnionAtIndex (Id 1) (Index 0) Test.19;
let Test.17 = StructAtIndex 1 Test.8; let Test.18 = StructAtIndex 1 Test.8;
let Test.5 = UnionAtIndex (Id 1) (Index 0) Test.17; let Test.5 = UnionAtIndex (Id 1) (Index 0) Test.18;
let Test.16 = 0i64; let Test.16 = 0i64;
let Test.10 = CallByName List.4 Test.2 Test.16 Test.5; let Test.10 = CallByName List.4 Test.2 Test.16 Test.5;
let Test.11 = 0i64; let Test.11 = 0i64;

View file

@ -24,8 +24,8 @@ procedure Test.1 (Test.2, Test.3, Test.4):
let Test.32 = CallByName List.3 Test.4 Test.2; let Test.32 = CallByName List.3 Test.4 Test.2;
let Test.13 = Struct {Test.32, Test.33}; let Test.13 = Struct {Test.32, Test.33};
joinpoint Test.29: joinpoint Test.29:
let Test.22 = Array []; let Test.20 = Array [];
ret Test.22; ret Test.20;
in in
let Test.26 = StructAtIndex 1 Test.13; let Test.26 = StructAtIndex 1 Test.13;
let Test.27 = 1i64; let Test.27 = 1i64;
@ -37,10 +37,10 @@ procedure Test.1 (Test.2, Test.3, Test.4):
let Test.25 = GetTagId Test.23; let Test.25 = GetTagId Test.23;
let Test.30 = lowlevel Eq Test.24 Test.25; let Test.30 = lowlevel Eq Test.24 Test.25;
if Test.30 then if Test.30 then
let Test.21 = StructAtIndex 0 Test.13; let Test.22 = StructAtIndex 0 Test.13;
let Test.6 = UnionAtIndex (Id 1) (Index 0) Test.21; let Test.6 = UnionAtIndex (Id 1) (Index 0) Test.22;
let Test.20 = StructAtIndex 1 Test.13; let Test.21 = StructAtIndex 1 Test.13;
let Test.7 = UnionAtIndex (Id 1) (Index 0) Test.20; let Test.7 = UnionAtIndex (Id 1) (Index 0) Test.21;
let Test.15 = CallByName List.4 Test.4 Test.2 Test.7; let Test.15 = CallByName List.4 Test.4 Test.2 Test.7;
let Test.14 = CallByName List.4 Test.15 Test.3 Test.6; let Test.14 = CallByName List.4 Test.15 Test.3 Test.6;
ret Test.14; ret Test.14;

View file

@ -7,8 +7,8 @@ procedure Test.0 ():
let Test.20 = Just Test.21; let Test.20 = Just Test.21;
let Test.2 = Just Test.20; let Test.2 = Just Test.20;
joinpoint Test.17: joinpoint Test.17:
let Test.11 = 1i64; let Test.10 = 1i64;
ret Test.11; ret Test.10;
in in
let Test.15 = 0i64; let Test.15 = 0i64;
let Test.16 = GetTagId Test.2; let Test.16 = GetTagId Test.2;
@ -19,8 +19,8 @@ procedure Test.0 ():
let Test.14 = GetTagId Test.12; let Test.14 = GetTagId Test.12;
let Test.18 = lowlevel Eq Test.13 Test.14; let Test.18 = lowlevel Eq Test.13 Test.14;
if Test.18 then if Test.18 then
let Test.10 = UnionAtIndex (Id 0) (Index 0) Test.2; let Test.11 = UnionAtIndex (Id 0) (Index 0) Test.2;
let Test.5 = UnionAtIndex (Id 0) (Index 0) Test.10; let Test.5 = UnionAtIndex (Id 0) (Index 0) Test.11;
let Test.7 = 1i64; let Test.7 = 1i64;
let Test.6 = CallByName Num.24 Test.5 Test.7; let Test.6 = CallByName Num.24 Test.5 Test.7;
ret Test.6; ret Test.6;

View file

@ -14,6 +14,7 @@ roc_builtins = { path = "../compiler/builtins" }
roc_can = { path = "../compiler/can" } roc_can = { path = "../compiler/can" }
roc_module = { path = "../compiler/module" } roc_module = { path = "../compiler/module" }
roc_region = { path = "../compiler/region" } roc_region = { path = "../compiler/region" }
roc_types = { path = "../compiler/types" }
roc_collections = { path = "../compiler/collections" } roc_collections = { path = "../compiler/collections" }
bumpalo = { version = "3.2", features = ["collections"] } bumpalo = { version = "3.2", features = ["collections"] }

View file

@ -63,23 +63,12 @@ pub fn generate(filenames: Vec<PathBuf>, std_lib: StdLib, build_dir: &Path) {
// Write each package's module docs html file // Write each package's module docs html file
for loaded_module in package.modules.iter_mut() { for loaded_module in package.modules.iter_mut() {
let mut exposed_values = loaded_module let exports = loaded_module
.exposed_to_host .exposed_values
.iter() .iter()
.map(|(symbol, _)| symbol.ident_string(&loaded_module.interns).to_string()) .map(|symbol| symbol.ident_string(&loaded_module.interns).to_string())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
let mut exposed_aliases = loaded_module
.exposed_aliases
.iter()
.map(|(symbol, _)| symbol.ident_string(&loaded_module.interns).to_string())
.collect::<Vec<String>>();
let mut exports = Vec::new();
exports.append(&mut exposed_values);
exports.append(&mut exposed_aliases);
for module in loaded_module.documentation.values_mut() { for module in loaded_module.documentation.values_mut() {
let module_dir = build_dir.join(module.name.replace(".", "/").as_str()); let module_dir = build_dir.join(module.name.replace(".", "/").as_str());
@ -122,12 +111,14 @@ fn render_main_content(
); );
for entry in &module.entries { for entry in &module.entries {
let mut should_render_entry = true;
if let DocDef(def) = entry { if let DocDef(def) = entry {
if !exposed_values.contains(&def.name) { // We dont want to render entries that arent exposed
break; should_render_entry = exposed_values.contains(&def.name);
}
} }
if should_render_entry {
match entry { match entry {
DocEntry::DocDef(doc_def) => { DocEntry::DocDef(doc_def) => {
let mut href = String::new(); let mut href = String::new();
@ -172,12 +163,12 @@ fn render_main_content(
} }
} }
DocEntry::DetachedDoc(docs) => { DocEntry::DetachedDoc(docs) => {
buf.push_str( let markdown = markdown_to_html(&mut module.scope, interns, docs.to_string());
markdown_to_html(&mut module.scope, interns, docs.to_string()).as_str(), buf.push_str(markdown.as_str());
);
} }
}; };
} }
}
buf buf
} }
@ -256,6 +247,7 @@ fn render_sidebar<'a, I: Iterator<Item = &'a ModuleDocumentation>>(modules: I) -
let href = { let href = {
let mut href_buf = String::new(); let mut href_buf = String::new();
href_buf.push('/');
href_buf.push_str(name); href_buf.push_str(name);
href_buf href_buf
}; };
@ -435,10 +427,9 @@ fn type_annotation_to_html(indent_level: usize, buf: &mut String, type_ann: &Typ
let more_than_one_field = fields_len > 1; let more_than_one_field = fields_len > 1;
let record_indent = indent_level + 1; let record_indent = indent_level + 1;
if more_than_one_field {
new_line(buf);
indent(buf, record_indent); if more_than_one_field {
indent(buf, indent_level);
} }
buf.push('{'); buf.push('{');
@ -534,7 +525,7 @@ fn type_annotation_to_html(indent_level: usize, buf: &mut String, type_ann: &Typ
} }
} }
fn insert_doc_links(scope: &mut Scope, interns: &Interns, markdown: String) -> String { pub fn insert_doc_links(scope: &mut Scope, interns: &Interns, markdown: String) -> String {
let buf = &markdown; let buf = &markdown;
let mut result = String::new(); let mut result = String::new();
@ -565,7 +556,7 @@ fn insert_doc_links(scope: &mut Scope, interns: &Interns, markdown: String) -> S
interns, interns,
&buf.chars() &buf.chars()
.skip(from + 1) .skip(from + 1)
.take(index - from) .take(index - from - 1)
.collect::<String>(), .collect::<String>(),
); );
@ -580,17 +571,23 @@ fn insert_doc_links(scope: &mut Scope, interns: &Interns, markdown: String) -> S
} }
} }
if chomping_from == None {
markdown
} else {
result result
} }
}
fn make_doc_link(scope: &mut Scope, interns: &Interns, doc_item: &str) -> String { fn make_doc_link(scope: &mut Scope, interns: &Interns, doc_item: &str) -> String {
match scope.lookup(&doc_item.into(), Region::zero()) { match scope.lookup(&doc_item.into(), Region::zero()) {
Ok(symbol) => { Ok(symbol) => {
let module_str = symbol.module_string(interns); let module_str = symbol.module_string(interns);
let ident_str = symbol.ident_string(interns); let ident_str = symbol.ident_string(interns);
let mut link = String::new(); let mut link = String::new();
link.push('/');
link.push_str(module_str); link.push_str(module_str);
link.push('#'); link.push('#');
link.push_str(ident_str); link.push_str(ident_str);
@ -627,7 +624,8 @@ fn markdown_to_html(scope: &mut Scope, interns: &Interns, markdown: String) -> S
let mut docs_parser = vec![]; let mut docs_parser = vec![];
let (_, _) = pulldown_cmark::Parser::new_ext(&markdown_with_links, markdown_options).fold( let (_, _) = pulldown_cmark::Parser::new_ext(&markdown_with_links, markdown_options).fold(
(0, 0), (0, 0),
|(start_quote_count, end_quote_count), event| match event { |(start_quote_count, end_quote_count), event| {
match event {
// Replace this sequence (`>>>` syntax): // Replace this sequence (`>>>` syntax):
// Start(BlockQuote) // Start(BlockQuote)
// Start(BlockQuote) // Start(BlockQuote)
@ -679,10 +677,12 @@ fn markdown_to_html(scope: &mut Scope, interns: &Interns, markdown: String) -> S
docs_parser.push(event); docs_parser.push(event);
(0, 0) (0, 0)
} }
}
}, },
); );
let mut docs_html = String::new(); let mut docs_html = String::new();
pulldown_cmark::html::push_html(&mut docs_html, docs_parser.into_iter()); pulldown_cmark::html::push_html(&mut docs_html, docs_parser.into_iter());
docs_html docs_html

View file

@ -0,0 +1,43 @@
#[macro_use]
extern crate pretty_assertions;
#[cfg(test)]
mod insert_doc_links {
use roc_can::env::Env;
use roc_can::scope::Scope;
use roc_collections::all::MutMap;
use roc_docs::insert_doc_links;
use roc_module::symbol::{IdentIds, Interns, ModuleIds};
use roc_types::subs::VarStore;
#[test]
fn no_doc_links() {
let home = ModuleIds::default().get_or_insert(&"Test".into());
let module_ids = ModuleIds::default();
let dep_idents = IdentIds::exposed_builtins(0);
let env = Env::new(home, dep_idents, &module_ids, IdentIds::default());
let all_ident_ids = MutMap::default();
let interns = Interns {
module_ids: env.module_ids.clone(),
all_ident_ids,
};
let var_store = &mut VarStore::default();
let scope = &mut Scope::new(home, var_store);
let markdown = r#"
# Hello
Hello thanks for using my package
"#;
assert_eq!(
markdown,
insert_doc_links(scope, &interns, markdown.to_string()),
);
}
}

View file

@ -32,6 +32,8 @@ Nice collection of research on innovative editors, [link](https://futureofcoding
* [Primitive](https://primitive.io/) code exploration in Virtual Reality * [Primitive](https://primitive.io/) code exploration in Virtual Reality
* [Luna](https://www.luna-lang.org/) language for interactive data processing and visualization * [Luna](https://www.luna-lang.org/) language for interactive data processing and visualization
* [Hazel Livelits](https://hazel.org/papers/livelits-paper.pdf) interactive plugins, see GIF's [here](https://twitter.com/disconcision/status/1408155781120376833). * [Hazel Livelits](https://hazel.org/papers/livelits-paper.pdf) interactive plugins, see GIF's [here](https://twitter.com/disconcision/status/1408155781120376833).
* [Thorough review](https://drossbucket.com/2021/06/30/hacker-news-folk-wisdom-on-visual-programming/) of pros and cons of text versus visual programming.
### Debugging ### Debugging
* [VS code debug visualization](https://marketplace.visualstudio.com/items?itemName=hediet.debug-visualizer) * [VS code debug visualization](https://marketplace.visualstudio.com/items?itemName=hediet.debug-visualizer)
@ -44,6 +46,8 @@ Nice collection of research on innovative editors, [link](https://futureofcoding
e.g. you have a test `calculate_sum_test` that only uses the function `add`, when the test fails you should be able to see a diff showing only what changed for the function `add`. It would also be great to have a diff of [expression values](https://homepages.cwi.nl/~storm/livelit/images/bret.png) Bret Victor style. An ambitious project would be to suggest or automatically try fixes based on these diffs. e.g. you have a test `calculate_sum_test` that only uses the function `add`, when the test fails you should be able to see a diff showing only what changed for the function `add`. It would also be great to have a diff of [expression values](https://homepages.cwi.nl/~storm/livelit/images/bret.png) Bret Victor style. An ambitious project would be to suggest or automatically try fixes based on these diffs.
* I think it could be possible to create a minimal reproduction of a program / block of code / code used by a single test. So for a failing unit test I would expect it to extract imports, the platform, types and functions that are necessary to run only that unit test and put them in a standalone roc project. This would be useful for sharing bugs with library+application authors and colleagues, for profiling or debugging with all "clutter" removed. * I think it could be possible to create a minimal reproduction of a program / block of code / code used by a single test. So for a failing unit test I would expect it to extract imports, the platform, types and functions that are necessary to run only that unit test and put them in a standalone roc project. This would be useful for sharing bugs with library+application authors and colleagues, for profiling or debugging with all "clutter" removed.
* Ability to share program state at a breakpoint with someone else. * Ability to share program state at a breakpoint with someone else.
* For debugging we should aim for maximal useful observability. For example Rust's enum values can not be easily viewed in the CodeLLDB debugger, you actually need to call a print method that does pattern matching to be able to view useful information.
* We previuously discussed recording full traces of programs so they do not have to be re-run multiple times in the debugging process. We should encourage roc developers to experiment with creating debugging representations of this AST+"execution trace", it could lead to some cool stuff.
### Cool regular editors ### Cool regular editors
@ -71,6 +75,7 @@ e.g. you have a test `calculate_sum_test` that only uses the function `add`, whe
* [Sourcetrail](https://www.sourcetrail.com/) nice tree-like source explorer. * [Sourcetrail](https://www.sourcetrail.com/) nice tree-like source explorer.
* [Unisonweb](https://www.unisonweb.org), definition based [editor](https://twitter.com/shojberg/status/1364666092598288385) as opposed to file based. * [Unisonweb](https://www.unisonweb.org), definition based [editor](https://twitter.com/shojberg/status/1364666092598288385) as opposed to file based.
* [Utopia](https://utopia.app/) integrated design and development environment for React. Design and code update each other, in real time. * [Utopia](https://utopia.app/) integrated design and development environment for React. Design and code update each other, in real time.
* [Paredit](https://calva.io/paredit/) structural clojure editing, navigation and selection. [Another overview](http://danmidwood.com/content/2014/11/21/animated-paredit.html)
### Voice Interaction Related ### Voice Interaction Related
@ -84,6 +89,26 @@ e.g. you have a test `calculate_sum_test` that only uses the function `add`, whe
* Adjusting settings: switch to light theme, increase font size... * Adjusting settings: switch to light theme, increase font size...
* Use (context specific) voice command state machine to assist Machine Learning voice recognition model. * Use (context specific) voice command state machine to assist Machine Learning voice recognition model.
* Nice special use case: using voice to code while on treadmill desk. * Nice special use case: using voice to code while on treadmill desk.
* Use word embeddings to find most similar voice command to recorded input in vector space.
#### Useful voice commands
* clear all breakpoints
* increase/decrease font size
* switch to dark/light/high-contrast mode
* open/go to file "Main"(fuzzy matching)
* go to function "foo"
* go to definition
* show all references(uses) of this function/type/...
* show history timeline of this function/file
* show recent projects
* generate unit test for this function
* generate unit test for this function based on debug trace (input and output is recorded and used in test)
* who wrote this line (git blame integration)
* search documentation of library X for Foo
* show example of how to use library function Foo
* open google/github/duckduckgo search for error...
* show editor plugins for library X
#### Inspiration #### Inspiration
@ -116,7 +141,7 @@ e.g. you have a test `calculate_sum_test` that only uses the function `add`, whe
* Show productivity/feature tips on startup. Show link to page with all tips. Allow not seeing tips next time. * Show productivity/feature tips on startup. Show link to page with all tips. Allow not seeing tips next time.
* Search friendly editor docs inside the editor. Offer to send search string to Roc maintainers when no results, or if no results were clicked. * Search friendly editor docs inside the editor. Offer to send search string to Roc maintainers when no results, or if no results were clicked.
* File history timeline view. Show timeline with commits that changed this file, the number of lines added and deleted as well as which user made the changes. Arrow navigation should allow you to quickly view different versions of the file. * File history timeline view. Show timeline with commits that changed this file, the number of lines added and deleted as well as which user made the changes. Arrow navigation should allow you to quickly view different versions of the file.
* Suggested quick fixes should be directly visible and clickable. Not like in vs code where you put the caret on an error until a lightbulb appears in the margin which you have to click for the fixes to apppear, after which you click to apply the fix you want :( . * Suggested quick fixes should be directly visible and clickable. Not like in vs code where you put the caret on an error until a lightbulb appears in the margin which you have to click for the fixes to apppear, after which you click to apply the fix you want :( . You should be able to apply suggestions in rapid succession. e.g. if you copy some roc code from the internet you should be able to apply 5 import suggestions quickly.
* Regex-like find and substitution based on plain english description and example (replacement). i.e. replace all `[` between double quotes with `{`. [Inspiration](https://alexmoltzau.medium.com/english-to-regex-thanks-to-gpt-3-13f03b68236e). * Regex-like find and substitution based on plain english description and example (replacement). i.e. replace all `[` between double quotes with `{`. [Inspiration](https://alexmoltzau.medium.com/english-to-regex-thanks-to-gpt-3-13f03b68236e).
* Show productivity tips based on behavior. i.e. if the user is scrolling through the error bar and clicking on the next error several times, show a tip with "go to next error" shortcut. * Show productivity tips based on behavior. i.e. if the user is scrolling through the error bar and clicking on the next error several times, show a tip with "go to next error" shortcut.
* Command to "benchmark this function" or "benchmark this test" with flamegraph and execution time per line. * Command to "benchmark this function" or "benchmark this test" with flamegraph and execution time per line.
@ -127,6 +152,8 @@ e.g. you have a test `calculate_sum_test` that only uses the function `add`, whe
* search a local history of previously encountered errors and fixes * search a local history of previously encountered errors and fixes
* search through a database of our zullip questions * search through a database of our zullip questions
* ... * ...
* smart insert: press a shortcut and enter a plain english description of a code snippet you need. Examples: "convert string to list of chars", "sort list of records by field foo descending", "plot this list with date on x-axis"...
* After the user has refactored code to be simpler, try finding other places in the code base where the same simplification can be made.
#### Autocomplete #### Autocomplete
@ -198,6 +225,12 @@ e.g. you have a test `calculate_sum_test` that only uses the function `add`, whe
The API and documentation are meant to interface with humans. The API and documentation are meant to interface with humans.
* [DocC](https://developer.apple.com/videos/play/wwdc2021/10166/) neat documentation approach for swift. * [DocC](https://developer.apple.com/videos/play/wwdc2021/10166/) neat documentation approach for swift.
## General Plugin Ideas
### Inspiration
- [Boop](https://github.com/IvanMathy/Boop) scriptable scratchpad for developers. Contains collection of useful conversions: json formatting, url encoding, encode to base64...
## General Thoughts/Ideas ## General Thoughts/Ideas
Thoughts and ideas possibly taken from above inspirations or separate. Thoughts and ideas possibly taken from above inspirations or separate.

View file

@ -1,5 +0,0 @@
#!/bin/bash
sed -i -e 's/\/\/pub mod mvc/pub mod mvc/g' src/lib.rs
sed -i -e 's/\/\/pub mod text_buffer/pub mod text_buffer/g' src/lib.rs
sed -i -e 's/^mod mvc/\/\/mod mvc/g' src/lib.rs
sed -i -e 's/^mod text_buffer/\/\/mod text_buffer/g' src/lib.rs

View file

@ -1,5 +1,5 @@
use crate::editor::slow_pool::MarkNodeId;
use crate::ui::ui_error::UIResult; use crate::ui::ui_error::UIResult;
use crate::{editor::slow_pool::MarkNodeId, ui::text::text_pos::TextPos};
use colored::*; use colored::*;
use snafu::{Backtrace, ErrorCompat, NoneError, ResultExt, Snafu}; use snafu::{Backtrace, ErrorCompat, NoneError, ResultExt, Snafu};
@ -111,6 +111,15 @@ pub enum EdError {
backtrace: Backtrace, backtrace: Backtrace,
}, },
#[snafu(display(
"NoNodeAtCaretPosition: there was no node at the current caret position {:?}.",
caret_pos,
))]
NoNodeAtCaretPosition {
caret_pos: TextPos,
backtrace: Backtrace,
},
#[snafu(display( #[snafu(display(
"UnexpectedASTNode: required a {} at this position, node was a {}.", "UnexpectedASTNode: required a {} at this position, node was a {}.",
required_node_type, required_node_type,

View file

@ -2,16 +2,19 @@ use super::attribute::Attributes;
use crate::editor::ed_error::EdResult; use crate::editor::ed_error::EdResult;
use crate::editor::ed_error::ExpectedTextNode; use crate::editor::ed_error::ExpectedTextNode;
use crate::editor::ed_error::GetContentOnNestedNode; use crate::editor::ed_error::GetContentOnNestedNode;
use crate::editor::ed_error::NestedNodeRequired; use crate::editor::ed_error::{NestedNodeMissingChild, NestedNodeRequired};
use crate::editor::slow_pool::MarkNodeId; use crate::editor::slow_pool::MarkNodeId;
use crate::editor::slow_pool::SlowPool; use crate::editor::slow_pool::SlowPool;
use crate::editor::syntax_highlight::HighlightStyle; use crate::editor::syntax_highlight::HighlightStyle;
use crate::editor::util::index_of;
use crate::lang::ast::ExprId;
use crate::lang::ast::RecordField; use crate::lang::ast::RecordField;
use crate::lang::{ use crate::lang::{
ast::Expr2, ast::Expr2,
expr::Env, expr::Env,
pool::{NodeId, PoolStr}, pool::{NodeId, PoolStr},
}; };
use crate::ui::util::slice_get;
use bumpalo::Bump; use bumpalo::Bump;
use std::fmt; use std::fmt;
@ -72,6 +75,83 @@ impl MarkupNode {
} }
} }
// return (index of child in list of children, closest ast index of child corresponding to ast node)
pub fn get_child_indices(
&self,
child_id: MarkNodeId,
markup_node_pool: &SlowPool,
) -> EdResult<(usize, usize)> {
match self {
MarkupNode::Nested { children_ids, .. } => {
let mut mark_child_index_opt: Option<usize> = None;
let mut child_ids_with_ast: Vec<MarkNodeId> = Vec::new();
let self_ast_id = self.get_ast_node_id();
for (indx, &mark_child_id) in children_ids.iter().enumerate() {
if mark_child_id == child_id {
mark_child_index_opt = Some(indx);
}
let child_mark_node = markup_node_pool.get(mark_child_id);
// a node that points to the same ast_node as the parent is a ',', '[', ']'
// those are not "real" ast children
if child_mark_node.get_ast_node_id() != self_ast_id {
child_ids_with_ast.push(mark_child_id)
}
}
if let Some(child_index) = mark_child_index_opt {
if child_index == (children_ids.len() - 1) {
let ast_child_index = child_ids_with_ast.len();
Ok((child_index, ast_child_index))
} else {
// we want to find the index of the closest ast mark node to child_index
let indices_in_mark_res: EdResult<Vec<usize>> = child_ids_with_ast
.iter()
.map(|c_id| index_of(*c_id, children_ids))
.collect();
let indices_in_mark = indices_in_mark_res?;
let mut last_diff = usize::MAX;
let mut best_index = 0;
for index in indices_in_mark.iter() {
let curr_diff =
isize::abs((*index as isize) - (child_index as isize)) as usize;
if curr_diff >= last_diff {
break;
} else {
last_diff = curr_diff;
best_index = *index;
}
}
let closest_ast_child = slice_get(best_index, &children_ids)?;
let closest_ast_child_index =
index_of(*closest_ast_child, &child_ids_with_ast)?;
// +1 because we want to insert after ast_child
Ok((child_index, closest_ast_child_index + 1))
}
} else {
NestedNodeMissingChild {
node_id: child_id,
children_ids: children_ids.clone(),
}
.fail()
}
}
_ => NestedNodeRequired {
node_type: self.node_type_as_string(),
}
.fail(),
}
}
// can't be &str, this creates borrowing issues // can't be &str, this creates borrowing issues
pub fn get_content(&self) -> EdResult<String> { pub fn get_content(&self) -> EdResult<String> {
match self { match self {
@ -146,6 +226,7 @@ pub const RIGHT_ACCOLADE: &str = " }";
pub const LEFT_SQUARE_BR: &str = "[ "; pub const LEFT_SQUARE_BR: &str = "[ ";
pub const RIGHT_SQUARE_BR: &str = " ]"; pub const RIGHT_SQUARE_BR: &str = " ]";
pub const COLON: &str = ": "; pub const COLON: &str = ": ";
pub const COMMA: &str = ", ";
pub const STRING_QUOTES: &str = "\"\""; pub const STRING_QUOTES: &str = "\"\"";
fn new_markup_node( fn new_markup_node(
@ -177,12 +258,16 @@ pub fn expr2_to_markup<'a, 'b>(
Expr2::SmallInt { text, .. } Expr2::SmallInt { text, .. }
| Expr2::I128 { text, .. } | Expr2::I128 { text, .. }
| Expr2::U128 { text, .. } | Expr2::U128 { text, .. }
| Expr2::Float { text, .. } => new_markup_node( | Expr2::Float { text, .. } => {
get_string(env, &text), let num_str = get_string(env, &text);
new_markup_node(
num_str,
expr2_node_id, expr2_node_id,
HighlightStyle::Number, HighlightStyle::Number,
markup_node_pool, markup_node_pool,
), )
}
Expr2::Str(text) => new_markup_node( Expr2::Str(text) => new_markup_node(
"\"".to_owned() + text.as_str(env.pool) + "\"", "\"".to_owned() + text.as_str(env.pool) + "\"",
expr2_node_id, expr2_node_id,
@ -217,21 +302,24 @@ pub fn expr2_to_markup<'a, 'b>(
markup_node_pool, markup_node_pool,
)]; )];
for (idx, node_id) in elems.iter_node_ids().enumerate() { let indexed_node_ids: Vec<(usize, ExprId)> =
let sub_expr2 = env.pool.get(node_id); elems.iter(env.pool).copied().enumerate().collect();
for (idx, node_id) in indexed_node_ids.iter() {
let sub_expr2 = env.pool.get(*node_id);
children_ids.push(expr2_to_markup( children_ids.push(expr2_to_markup(
arena, arena,
env, env,
sub_expr2, sub_expr2,
node_id, *node_id,
markup_node_pool, markup_node_pool,
)); ));
if idx + 1 < elems.len() { if idx + 1 < elems.len() {
children_ids.push(new_markup_node( children_ids.push(new_markup_node(
", ".to_string(), ", ".to_string(),
node_id, expr2_node_id,
HighlightStyle::Operator, HighlightStyle::Operator,
markup_node_pool, markup_node_pool,
)); ));
@ -349,6 +437,12 @@ pub fn expr2_to_markup<'a, 'b>(
syn_high_style: HighlightStyle::Blank, syn_high_style: HighlightStyle::Blank,
parent_id_opt: None, parent_id_opt: None,
}), }),
Expr2::RuntimeError() => new_markup_node(
"RunTimeError".to_string(),
expr2_node_id,
HighlightStyle::Blank,
markup_node_pool,
),
rest => todo!("implement expr2_to_markup for {:?}", rest), rest => todo!("implement expr2_to_markup for {:?}", rest),
} }
} }

View file

@ -4,7 +4,7 @@ use crate::editor::slow_pool::{MarkNodeId, SlowPool};
use crate::editor::syntax_highlight::HighlightStyle; use crate::editor::syntax_highlight::HighlightStyle;
use crate::editor::{ use crate::editor::{
ed_error::EdError::ParseError, ed_error::EdError::ParseError,
ed_error::EdResult, ed_error::{EdResult, MissingParent, NoNodeAtCaretPosition},
markup::attribute::Attributes, markup::attribute::Attributes,
markup::nodes::{expr2_to_markup, set_parent_for_all, MarkupNode}, markup::nodes::{expr2_to_markup, set_parent_for_all, MarkupNode},
}; };
@ -134,6 +134,29 @@ impl<'a> EdModel<'a> {
pub fn node_exists_at_caret(&self) -> bool { pub fn node_exists_at_caret(&self) -> bool {
self.grid_node_map.node_exists_at_pos(self.get_caret()) self.grid_node_map.node_exists_at_pos(self.get_caret())
} }
// return (index of child in list of children, closest ast index of child corresponding to ast node) of MarkupNode at current caret position
pub fn get_curr_child_indices(&self) -> EdResult<(usize, usize)> {
if self.node_exists_at_caret() {
let curr_mark_node_id = self.get_curr_mark_node_id()?;
let curr_mark_node = self.markup_node_pool.get(curr_mark_node_id);
if let Some(parent_id) = curr_mark_node.get_parent_id_opt() {
let parent = self.markup_node_pool.get(parent_id);
parent.get_child_indices(curr_mark_node_id, &self.markup_node_pool)
} else {
MissingParent {
node_id: curr_mark_node_id,
}
.fail()
}
} else {
NoNodeAtCaretPosition {
caret_pos: self.get_caret(),
}
.fail()
}
}
} }
#[derive(Debug)] #[derive(Debug)]
@ -159,8 +182,7 @@ impl<'a> EdModule<'a> {
let ast_root_id = env.pool.add(expr2); let ast_root_id = env.pool.add(expr2);
// for debugging // for debugging
// let expr2_str = expr2_to_string(ast_root_id, env.pool); // dbg!(expr2_to_string(ast_root_id, env.pool));
// println!("expr2_string: {}", expr2_str);
Ok(EdModule { env, ast_root_id }) Ok(EdModule { env, ast_root_id })
} }

View file

@ -13,7 +13,7 @@ use crate::editor::mvc::ed_model::EdModel;
use crate::editor::mvc::ed_model::SelectedExpression; use crate::editor::mvc::ed_model::SelectedExpression;
use crate::editor::mvc::int_update::start_new_int; use crate::editor::mvc::int_update::start_new_int;
use crate::editor::mvc::int_update::update_int; use crate::editor::mvc::int_update::update_int;
use crate::editor::mvc::list_update::{prep_empty_list, start_new_list}; use crate::editor::mvc::list_update::{add_blank_child, start_new_list};
use crate::editor::mvc::lookup_update::update_invalid_lookup; use crate::editor::mvc::lookup_update::update_invalid_lookup;
use crate::editor::mvc::record_update::start_new_record; use crate::editor::mvc::record_update::start_new_record;
use crate::editor::mvc::record_update::update_empty_record; use crate::editor::mvc::record_update::update_empty_record;
@ -80,6 +80,15 @@ impl<'a> EdModel<'a> {
} }
} }
// disregards EdModel.code_lines because the caller knows the resulting caret position will be valid.
// allows us to prevent multiple updates to EdModel.code_lines
pub fn simple_move_carets_left(&mut self, repeat: usize) {
for caret_tup in self.caret_w_select_vec.iter_mut() {
caret_tup.0.caret_pos.column -= repeat;
caret_tup.1 = None;
}
}
pub fn build_node_map_from_markup( pub fn build_node_map_from_markup(
markup_root_id: MarkNodeId, markup_root_id: MarkNodeId,
markup_node_pool: &SlowPool, markup_node_pool: &SlowPool,
@ -687,16 +696,16 @@ pub fn handle_new_char(received_char: &char, ed_model: &mut EdModel) -> EdResult
Expr2::List{ elem_var: _, elems: _} => { Expr2::List{ elem_var: _, elems: _} => {
let prev_mark_node = ed_model.markup_node_pool.get(prev_mark_node_id); let prev_mark_node = ed_model.markup_node_pool.get(prev_mark_node_id);
if prev_mark_node.get_content()? == nodes::LEFT_SQUARE_BR { if prev_mark_node.get_content()? == nodes::LEFT_SQUARE_BR && curr_mark_node.get_content()? == nodes::RIGHT_SQUARE_BR {
if curr_mark_node.get_content()? == nodes::RIGHT_SQUARE_BR { // based on if, we are at the start of the list
prep_empty_list(ed_model)?; // insert a Blank first, this results in cleaner code let new_child_index = 1;
let new_ast_child_index = 0;
// insert a Blank first, this results in cleaner code
add_blank_child(new_child_index, new_ast_child_index, ed_model)?;
handle_new_char(received_char, ed_model)? handle_new_char(received_char, ed_model)?
} else { } else {
InputOutcome::Ignored InputOutcome::Ignored
} }
} else {
InputOutcome::Ignored
}
} }
_ => { _ => {
match ast_node_ref { match ast_node_ref {
@ -726,19 +735,54 @@ pub fn handle_new_char(received_char: &char, ed_model: &mut EdModel) -> EdResult
} else { } else {
InputOutcome::Ignored InputOutcome::Ignored
} }
} else if *ch == ',' {
if curr_mark_node.get_content()? == nodes::LEFT_SQUARE_BR {
InputOutcome::Ignored
} else {
let mark_parent_id_opt = curr_mark_node.get_parent_id_opt();
if let Some(mark_parent_id) = mark_parent_id_opt {
let parent_ast_id = ed_model.markup_node_pool.get(mark_parent_id).get_ast_node_id();
let parent_expr2 = ed_model.module.env.pool.get(parent_ast_id);
match parent_expr2 {
Expr2::List { elem_var:_, elems:_} => {
let (new_child_index, new_ast_child_index) = ed_model.get_curr_child_indices()?;
// insert a Blank first, this results in cleaner code
add_blank_child(
new_child_index,
new_ast_child_index,
ed_model
)?
}
Expr2::Record { record_var:_, fields:_ } => {
todo!("multiple record fields")
}
_ => {
InputOutcome::Ignored
}
}
} else {
InputOutcome::Ignored
}
}
} else if "\"{[".contains(*ch) { } else if "\"{[".contains(*ch) {
let prev_mark_node = ed_model.markup_node_pool.get(prev_mark_node_id); let prev_mark_node = ed_model.markup_node_pool.get(prev_mark_node_id);
if prev_mark_node.get_content()? == nodes::LEFT_SQUARE_BR { if prev_mark_node.get_content()? == nodes::LEFT_SQUARE_BR && curr_mark_node.get_content()? == nodes::RIGHT_SQUARE_BR {
if curr_mark_node.get_content()? == nodes::RIGHT_SQUARE_BR { let (new_child_index, new_ast_child_index) = ed_model.get_curr_child_indices()?;
prep_empty_list(ed_model)?; // insert a Blank first, this results in cleaner code // insert a Blank first, this results in cleaner code
add_blank_child(
new_child_index,
new_ast_child_index,
ed_model
)?;
handle_new_char(received_char, ed_model)? handle_new_char(received_char, ed_model)?
} else { } else {
InputOutcome::Ignored InputOutcome::Ignored
} }
} else {
InputOutcome::Ignored
}
} else { } else {
InputOutcome::Ignored InputOutcome::Ignored
} }
@ -849,8 +893,14 @@ pub mod test_ed_update {
let mut ed_model = ed_model_from_dsl(&code_str, pre_lines, &mut model_refs)?; let mut ed_model = ed_model_from_dsl(&code_str, pre_lines, &mut model_refs)?;
for input_char in new_char_seq.chars() { for input_char in new_char_seq.chars() {
if input_char == '🡲' {
ed_model.simple_move_carets_right(1);
} else if input_char == '🡰' {
ed_model.simple_move_carets_left(1);
} else {
ed_res_to_res(handle_new_char(&input_char, &mut ed_model))?; ed_res_to_res(handle_new_char(&input_char, &mut ed_model))?;
} }
}
let post_lines = ui_res_to_res(ed_model_to_dsl(&ed_model))?; let post_lines = ui_res_to_res(ed_model_to_dsl(&ed_model))?;
@ -1048,27 +1098,27 @@ pub mod test_ed_update {
fn test_record() -> Result<(), String> { fn test_record() -> Result<(), String> {
assert_insert(&[""], &["{ ┃ }"], '{')?; assert_insert(&[""], &["{ ┃ }"], '{')?;
assert_insert(&["{ ┃ }"], &["{ a┃ }"], 'a')?; assert_insert(&["{ ┃ }"], &["{ a┃ }"], 'a')?;
assert_insert(&["{ a┃ }"], &["{ ab┃ }"], 'b')?; assert_insert(&["{ a┃ }"], &["{ ab┃: RunTimeError }"], 'b')?;
assert_insert(&["{ a┃ }"], &["{ a1┃ }"], '1')?; assert_insert(&["{ a┃ }"], &["{ a1┃: RunTimeError }"], '1')?;
assert_insert(&["{ a1┃ }"], &["{ a1z┃ }"], 'z')?; assert_insert(&["{ a1┃ }"], &["{ a1z┃: RunTimeError }"], 'z')?;
assert_insert(&["{ a1┃ }"], &["{ a15┃ }"], '5')?; assert_insert(&["{ a1┃ }"], &["{ a15┃: RunTimeError }"], '5')?;
assert_insert(&["{ ab┃ }"], &["{ abc┃ }"], 'c')?; assert_insert(&["{ ab┃ }"], &["{ abc┃: RunTimeError }"], 'c')?;
assert_insert(&["{ ┃abc }"], &["{ z┃abc }"], 'z')?; assert_insert(&["{ ┃abc }"], &["{ z┃abc: RunTimeError }"], 'z')?;
assert_insert(&["{ a┃b }"], &["{ az┃b }"], 'z')?; assert_insert(&["{ a┃b }"], &["{ az┃b: RunTimeError }"], 'z')?;
assert_insert(&["{ a┃b }"], &["{ a9┃b }"], '9')?; assert_insert(&["{ a┃b }"], &["{ a9┃b: RunTimeError }"], '9')?;
// extra space for Blank node // extra space for Blank node
assert_insert(&["{ a┃ }"], &["{ a: ┃ }"], ':')?; assert_insert(&["{ a┃ }"], &["{ a┃: RunTimeError }"], ':')?;
assert_insert(&["{ abc┃ }"], &["{ abc: ┃ }"], ':')?; assert_insert(&["{ abc┃ }"], &["{ abc┃: RunTimeError }"], ':')?;
assert_insert(&["{ aBc┃ }"], &["{ aBc: ┃ }"], ':')?; assert_insert(&["{ aBc┃ }"], &["{ aBc┃: RunTimeError }"], ':')?;
assert_insert_seq(&["{ a┃ }"], &["{ a: \"\" }"], ":\"")?; assert_insert_seq(&["{ a┃ }"], &["{ a┃: RunTimeError }"], ":\"")?;
assert_insert_seq(&["{ abc┃ }"], &["{ abc: \"\" }"], ":\"")?; assert_insert_seq(&["{ abc┃ }"], &["{ abc┃: RunTimeError }"], ":\"")?;
assert_insert_seq(&["{ a┃ }"], &["{ a: 0┃ }"], ":0")?; assert_insert_seq(&["{ a┃ }"], &["{ a0┃: RunTimeError }"], ":0")?;
assert_insert_seq(&["{ abc┃ }"], &["{ abc: 9┃ }"], ":9")?; assert_insert_seq(&["{ abc┃ }"], &["{ abc9┃: RunTimeError }"], ":9")?;
assert_insert_seq(&["{ a┃ }"], &["{ a: 1000┃ }"], ":1000")?; assert_insert_seq(&["{ a┃ }"], &["{ a1000┃: RunTimeError }"], ":1000")?;
assert_insert_seq(&["{ abc┃ }"], &["{ abc: 98761┃ }"], ":98761")?; assert_insert_seq(&["{ abc┃ }"], &["{ abc98761┃: RunTimeError }"], ":98761")?;
assert_insert(&["{ a: \"\" }"], &["{ a: \"a┃\" }"], 'a')?; assert_insert(&["{ a: \"\" }"], &["{ a: \"a┃\" }"], 'a')?;
assert_insert(&["{ a: \"a┃\" }"], &["{ a: \"ab┃\" }"], 'b')?; assert_insert(&["{ a: \"a┃\" }"], &["{ a: \"ab┃\" }"], 'b')?;
@ -1124,9 +1174,9 @@ pub mod test_ed_update {
#[test] #[test]
fn test_nested_record() -> Result<(), String> { fn test_nested_record() -> Result<(), String> {
assert_insert_seq(&["{ a┃ }"], &["{ a: { ┃ } }"], ":{")?; assert_insert_seq(&["{ a┃ }"], &["{ a┃: RunTimeError }"], ":{")?;
assert_insert_seq(&["{ abc┃ }"], &["{ abc: { ┃ } }"], ":{")?; assert_insert_seq(&["{ abc┃ }"], &["{ abc┃: RunTimeError }"], ":{")?;
assert_insert_seq(&["{ camelCase┃ }"], &["{ camelCase: { ┃ } }"], ":{")?; assert_insert_seq(&["{ camelCase┃ }"], &["{ camelCase┃: RunTimeError }"], ":{")?;
assert_insert_seq(&["{ a: { ┃ } }"], &["{ a: { zulu┃ } }"], "zulu")?; assert_insert_seq(&["{ a: { ┃ } }"], &["{ a: { zulu┃ } }"], "zulu")?;
assert_insert_seq( assert_insert_seq(
@ -1136,35 +1186,51 @@ pub mod test_ed_update {
)?; )?;
assert_insert_seq(&["{ camelCase: { ┃ } }"], &["{ camelCase: { z┃ } }"], "z")?; assert_insert_seq(&["{ camelCase: { ┃ } }"], &["{ camelCase: { z┃ } }"], "z")?;
assert_insert_seq(&["{ a: { zulu┃ } }"], &["{ a: { zulu: ┃ } }"], ":")?; assert_insert_seq(
&["{ a: { zulu┃ } }"],
&["{ a: { zulu┃: RunTimeError } }"],
":",
)?;
assert_insert_seq( assert_insert_seq(
&["{ abc: { camelCase┃ } }"], &["{ abc: { camelCase┃ } }"],
&["{ abc: { camelCase: ┃ } }"], &["{ abc: { camelCase┃: RunTimeError } }"],
":", ":",
)?; )?;
assert_insert_seq( assert_insert_seq(
&["{ camelCase: { z┃ } }"], &["{ camelCase: { z┃ } }"],
&["{ camelCase: { z: ┃ } }"], &["{ camelCase: { z┃: RunTimeError } }"],
":", ":",
)?; )?;
assert_insert_seq(&["{ a┃: { zulu } }"], &["{ a0┃: { zulu } }"], "0")?; assert_insert_seq(
&["{ a┃: { zulu } }"],
&["{ a0┃: { zulu: RunTimeError } }"],
"0",
)?;
assert_insert_seq( assert_insert_seq(
&["{ ab┃c: { camelCase } }"], &["{ ab┃c: { camelCase } }"],
&["{ abz┃c: { camelCase } }"], &["{ abz┃c: { camelCase: RunTimeError } }"],
"z", "z",
)?; )?;
assert_insert_seq(&["{ ┃camelCase: { z } }"], &["{ x┃camelCase: { z } }"], "x")?; assert_insert_seq(
&["{ ┃camelCase: { z } }"],
&["{ x┃camelCase: { z: RunTimeError } }"],
"x",
)?;
assert_insert_seq(&["{ a: { zulu┃ } }"], &["{ a: { zulu: \"\" } }"], ":\"")?; assert_insert_seq(
&["{ a: { zulu┃ } }"],
&["{ a: { zulu┃: RunTimeError } }"],
":\"",
)?;
assert_insert_seq( assert_insert_seq(
&["{ abc: { camelCase┃ } }"], &["{ abc: { camelCase┃ } }"],
&["{ abc: { camelCase: \"\" } }"], &["{ abc: { camelCase┃: RunTimeError } }"],
":\"", ":\"",
)?; )?;
assert_insert_seq( assert_insert_seq(
&["{ camelCase: { z┃ } }"], &["{ camelCase: { z┃ } }"],
&["{ camelCase: { z: \"\" } }"], &["{ camelCase: { z┃: RunTimeError } }"],
":\"", ":\"",
)?; )?;
@ -1179,15 +1245,19 @@ pub mod test_ed_update {
"ul", "ul",
)?; )?;
assert_insert_seq(&["{ a: { zulu┃ } }"], &["{ a: { zulu: 1┃ } }"], ":1")?; assert_insert_seq(
&["{ a: { zulu┃ } }"],
&["{ a: { zulu1┃: RunTimeError } }"],
":1",
)?;
assert_insert_seq( assert_insert_seq(
&["{ abc: { camelCase┃ } }"], &["{ abc: { camelCase┃ } }"],
&["{ abc: { camelCase: 0┃ } }"], &["{ abc: { camelCase0┃: RunTimeError } }"],
":0", ":0",
)?; )?;
assert_insert_seq( assert_insert_seq(
&["{ camelCase: { z┃ } }"], &["{ camelCase: { z┃ } }"],
&["{ camelCase: { z: 45┃ } }"], &["{ camelCase: { z45┃: RunTimeError } }"],
":45", ":45",
)?; )?;
@ -1198,15 +1268,19 @@ pub mod test_ed_update {
"77", "77",
)?; )?;
assert_insert_seq(&["{ a: { zulu┃ } }"], &["{ a: { zulu: { ┃ } } }"], ":{")?; assert_insert_seq(
&["{ a: { zulu┃ } }"],
&["{ a: { zulu┃: RunTimeError } }"],
":{",
)?;
assert_insert_seq( assert_insert_seq(
&["{ abc: { camelCase┃ } }"], &["{ abc: { camelCase┃ } }"],
&["{ abc: { camelCase: { ┃ } } }"], &["{ abc: { camelCase┃: RunTimeError } }"],
":{", ":{",
)?; )?;
assert_insert_seq( assert_insert_seq(
&["{ camelCase: { z┃ } }"], &["{ camelCase: { z┃ } }"],
&["{ camelCase: { z: { ┃ } } }"], &["{ camelCase: { z┃: RunTimeError } }"],
":{", ":{",
)?; )?;
@ -1233,17 +1307,17 @@ pub mod test_ed_update {
assert_insert_seq( assert_insert_seq(
&["{ a┃: { bcD: { eFgHij: { k15 } } } }"], &["{ a┃: { bcD: { eFgHij: { k15 } } } }"],
&["{ a4┃: { bcD: { eFgHij: { k15 } } } }"], &["{ a4┃: { bcD: { eFgHij: { k15: RunTimeError } } } }"],
"4", "4",
)?; )?;
assert_insert_seq( assert_insert_seq(
&["{ ┃a: { bcD: { eFgHij: { k15 } } } }"], &["{ ┃a: { bcD: { eFgHij: { k15 } } } }"],
&["{ y┃a: { bcD: { eFgHij: { k15 } } } }"], &["{ y┃a: { bcD: { eFgHij: { k15: RunTimeError } } } }"],
"y", "y",
)?; )?;
assert_insert_seq( assert_insert_seq(
&["{ a: { bcD: { eF┃gHij: { k15 } } } }"], &["{ a: { bcD: { eF┃gHij: { k15 } } } }"],
&["{ a: { bcD: { eFxyz┃gHij: { k15 } } } }"], &["{ a: { bcD: { eFxyz┃gHij: { k15: RunTimeError } } } }"],
"xyz", "xyz",
)?; )?;
@ -1268,23 +1342,23 @@ pub mod test_ed_update {
assert_insert_seq_ignore(&["{ ┃}"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{ ┃}"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{ ┃ }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ ┃ }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ ┃a }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ ┃a: RunTimeError }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ ┃abc }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ ┃abc: RunTimeError }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["┃{ a }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["┃{ a: RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{ a }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{ a: ┃RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{┃ a }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{┃ a: RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{ a }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{ a:┃ RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["┃{ a15 }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["┃{ a15: RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{ a15 }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{ a15: ┃RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{┃ a15 }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{┃ a15: RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{ a15 }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{ a15:┃ RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["┃{ camelCase }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["┃{ camelCase: RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{ camelCase }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{ camelCase: ┃RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{┃ camelCase }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{┃ camelCase: RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{ camelCase }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{ camelCase:┃ RunTimeError }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["┃{ a: \"\" }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["┃{ a: \"\" }"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["{┃ a: \"\" }"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["{┃ a: \"\" }"], IGNORE_CHARS)?;
@ -1360,17 +1434,17 @@ pub mod test_ed_update {
assert_insert_seq_ignore(&["┃{ a: { } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["┃{ a: { } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ ┃a: { } }"], "1")?; assert_insert_seq_ignore(&["{ ┃a: { } }"], "1")?;
assert_insert_seq_ignore(&["{ camelCaseB1: { z15a } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ camelCaseB1: { z15a:┃ RunTimeError } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ camelCaseB1: {┃ z15a } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ camelCaseB1: {┃ z15a: RunTimeError } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ camelCaseB1: ┃{ z15a } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ camelCaseB1: ┃{ z15a: RunTimeError } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ camelCaseB1: { z15a } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ camelCaseB1: { z15a: ┃RunTimeError } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ camelCaseB1: { z15a } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ camelCaseB1: { z15a: R┃unTimeError } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ camelCaseB1: { z15a } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ camelCaseB1: { z15a: Ru┃nTimeError } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ camelCaseB1:┃ { z15a } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ camelCaseB1:┃ { z15a: RunTimeError } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{┃ camelCaseB1: { z15a } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{┃ camelCaseB1: { z15a: RunTimeError } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["┃{ camelCaseB1: { z15a } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["┃{ camelCaseB1: { z15a: RunTimeError } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ ┃camelCaseB1: { z15a } }"], "1")?; assert_insert_seq_ignore(&["{ ┃camelCaseB1: { z15a: RunTimeError } }"], "1")?;
assert_insert_seq_ignore(&["{ camelCaseB1: { ┃z15a } }"], "1")?; assert_insert_seq_ignore(&["{ camelCaseB1: { ┃z15a: RunTimeError } }"], "1")?;
assert_insert_seq_ignore(&["{ camelCaseB1: { z15a: \"\"┃ } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ camelCaseB1: { z15a: \"\"┃ } }"], IGNORE_NO_LTR)?;
assert_insert_seq_ignore(&["{ camelCaseB1: { z15a: ┃\"\" } }"], IGNORE_NO_LTR)?; assert_insert_seq_ignore(&["{ camelCaseB1: { z15a: ┃\"\" } }"], IGNORE_NO_LTR)?;
@ -1460,46 +1534,46 @@ pub mod test_ed_update {
)?; )?;
assert_insert_seq_ignore( assert_insert_seq_ignore(
&["{ g: { oi: { ng: { d: { e: { e: { p: { camelCase } } } } } } } }"], &["{ g: { oi: { ng: { d: { e: { e: { p: { camelCase:┃ RunTimeError } } } } } } } }"],
IGNORE_NO_LTR, IGNORE_NO_LTR,
)?; )?;
assert_insert_seq_ignore( assert_insert_seq_ignore(
&["{ g: { oi: { ng: { d: { e: { e: { p: { camelCase } } } } } } } }"], &["{ g: { oi: { ng: { d: { e: { e: { p: { camelCase: R┃unTimeError } } } } } } } }"],
IGNORE_NO_LTR, IGNORE_NO_LTR,
)?; )?;
assert_insert_seq_ignore( assert_insert_seq_ignore(
&["{ g: { oi: { ng: { d: { e: { e: { p: { camelCase } } } } } } } }┃"], &["{ g: { oi: { ng: { d: { e: { e: { p: { camelCase: RunTimeError } } } } } } } }┃"],
IGNORE_NO_LTR, IGNORE_NO_LTR,
)?; )?;
assert_insert_seq_ignore( assert_insert_seq_ignore(
&["{ g: { oi: { ng: { d: { e: { e: { p: { camelCase } } } } } } } }"], &["{ g: { oi: { ng: { d: { e: { e: { p: { camelCase: RunTimeEr┃ror } } } } } } } }"],
IGNORE_NO_LTR, IGNORE_NO_LTR,
)?; )?;
assert_insert_seq_ignore( assert_insert_seq_ignore(
&["{ g: { oi: { ng: { d: { e: {┃ e: { p: { camelCase } } } } } } } }"], &["{ g: { oi: { ng: { d: { e: {┃ e: { p: { camelCase: RunTimeError } } } } } } } }"],
IGNORE_NO_LTR, IGNORE_NO_LTR,
)?; )?;
assert_insert_seq_ignore( assert_insert_seq_ignore(
&["{ g: { oi: { ng: { d: { e: { e:┃ { p: { camelCase } } } } } } } }"], &["{ g: { oi: { ng: { d: { e: { e:┃ { p: { camelCase: RunTimeError } } } } } } } }"],
IGNORE_NO_LTR, IGNORE_NO_LTR,
)?; )?;
assert_insert_seq_ignore( assert_insert_seq_ignore(
&["{┃ g: { oi: { ng: { d: { e: { e: { p: { camelCase } } } } } } } }"], &["{┃ g: { oi: { ng: { d: { e: { e: { p: { camelCase: RunTimeError } } } } } } } }"],
IGNORE_NO_LTR, IGNORE_NO_LTR,
)?; )?;
assert_insert_seq_ignore( assert_insert_seq_ignore(
&["┃{ g: { oi: { ng: { d: { e: { e: { p: { camelCase } } } } } } } }"], &["┃{ g: { oi: { ng: { d: { e: { e: { p: { camelCase: RunTimeError } } } } } } } }"],
IGNORE_NO_LTR, IGNORE_NO_LTR,
)?; )?;
assert_insert_seq_ignore( assert_insert_seq_ignore(
&["{ ┃g: { oi: { ng: { d: { e: { e: { p: { camelCase } } } } } } } }"], &["{ ┃g: { oi: { ng: { d: { e: { e: { p: { camelCase: RunTimeError } } } } } } } }"],
"2", "2",
)?; )?;
Ok(()) Ok(())
} }
#[test] #[test]
fn test_list() -> Result<(), String> { fn test_single_elt_list() -> Result<(), String> {
assert_insert(&[""], &["[ ┃ ]"], '[')?; assert_insert(&[""], &["[ ┃ ]"], '[')?;
assert_insert_seq(&[""], &["[ 0┃ ]"], "[0")?; assert_insert_seq(&[""], &["[ 0┃ ]"], "[0")?;
@ -1535,7 +1609,48 @@ pub mod test_ed_update {
} }
#[test] #[test]
fn test_ignore_list() -> Result<(), String> { fn test_multi_elt_list() -> Result<(), String> {
assert_insert_seq(&[""], &["[ 0, 1┃ ]"], "[0,1")?;
assert_insert_seq(&[""], &["[ 987, 6543, 210┃ ]"], "[987,6543,210")?;
assert_insert_seq(
&[""],
&["[ \"a\", \"bcd\", \"EFGH┃\" ]"],
"[\"a🡲,\"bcd🡲,\"EFGH",
)?;
assert_insert_seq(
&[""],
&["[ { a: 1 }, { b: 23 }, { c: 456┃ } ]"],
"[{a:1🡲🡲,{b:23🡲🡲,{c:456",
)?;
assert_insert_seq(&[""], &["[ [ 1 ], [ 23 ], [ 456┃ ] ]"], "[[1🡲🡲,[23🡲🡲,[456")?;
// insert element in between
assert_insert_seq(&[""], &["[ 0, 2┃, 1 ]"], "[0,1🡰🡰🡰,2")?;
assert_insert_seq(&[""], &["[ 0, 2, 3┃, 1 ]"], "[0,1🡰🡰🡰,2,3")?;
assert_insert_seq(&[""], &["[ 0, 3┃, 2, 1 ]"], "[0,1🡰🡰🡰,2🡰🡰🡰,3")?;
assert_insert_seq(
&[""],
&["[ \"abc\", \"f┃\", \"de\" ]"],
"[\"abc🡲,\"de🡰🡰🡰🡰🡰,\"f",
)?;
assert_insert_seq(&[""], &["[ [ 0 ], [ 2┃ ], [ 1 ] ]"], "[[0🡲🡲,[1🡰🡰🡰🡰🡰,[2")?;
assert_insert_seq(
&[""],
&["[ { a: 0 }, { a: 2┃ }, { a: 1 } ]"],
"[{a:0🡲🡲,{a:1🡰🡰🡰🡰🡰🡰🡰🡰,{a:2",
)?;
Ok(())
}
#[test]
fn test_ignore_single_elt_list() -> Result<(), String> {
assert_insert_seq_ignore(&["┃[ ]"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["┃[ ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ ]┃"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["[ ]┃"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[┃ ]"], IGNORE_CHARS)?; assert_insert_seq_ignore(&["[┃ ]"], IGNORE_CHARS)?;
@ -1582,6 +1697,51 @@ pub mod test_ed_update {
Ok(()) Ok(())
} }
#[test]
fn test_ignore_multi_elt_list() -> Result<(), String> {
assert_insert_seq_ignore(&["┃[ 0, 1 ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ 0, 1 ]┃"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[┃ 0, 1 ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ 0, 1 ┃]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ 0,┃ 1 ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["┃[ 123, 56, 7 ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ 123, 56, 7 ]┃"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[┃ 123, 56, 7 ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ 123, 56, 7 ┃]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ 123,┃ 56, 7 ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ 123, 56,┃ 7 ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["┃[ \"123\", \"56\", \"7\" ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ \"123\", \"56\", \"7\" ]┃"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[┃ \"123\", \"56\", \"7\" ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ \"123\", \"56\", \"7\" ┃]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ \"123\",┃ \"56\", \"7\" ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ \"123\", \"56\",┃ \"7\" ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["┃[ { a: 0 }, { a: 1 } ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ { a: 0 }, { a: 1 } ]┃"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[┃ { a: 0 }, { a: 1 } ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ { a: 0 }, { a: 1 } ┃]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ { a: 0 },┃ { a: 1 } ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["┃[ [ 0 ], [ 1 ] ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [ 0 ], [ 1 ] ]┃"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[┃ [ 0 ], [ 1 ] ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [ 0 ], [ 1 ] ┃]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [ 0 ],┃ [ 1 ] ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ ┃[ 0 ], [ 1 ] ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [ 0 ]┃, [ 1 ] ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [┃ 0 ], [ 1 ] ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [ 0 ┃], [ 1 ] ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [ 0 ], ┃[ 1 ] ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [ 0 ], [┃ 1 ] ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [ 0 ], [ 1 ]┃ ]"], IGNORE_CHARS)?;
assert_insert_seq_ignore(&["[ [ 0 ], [ 1 ┃] ]"], IGNORE_CHARS)?;
Ok(())
}
// Create ed_model from pre_lines DSL, do ctrl+shift+up as many times as repeat. // Create ed_model from pre_lines DSL, do ctrl+shift+up as many times as repeat.
// check if modified ed_model has expected string representation of code, caret position and active selection. // check if modified ed_model has expected string representation of code, caret position and active selection.
pub fn assert_ctrl_shift_up_repeat( pub fn assert_ctrl_shift_up_repeat(
@ -1855,8 +2015,12 @@ pub mod test_ed_update {
let mut ed_model = ed_model_from_dsl(&code_str, pre_lines, &mut model_refs)?; let mut ed_model = ed_model_from_dsl(&code_str, pre_lines, &mut model_refs)?;
for input_char in new_char_seq.chars() { for input_char in new_char_seq.chars() {
if input_char == '🡲' {
ed_model.simple_move_carets_right(1);
} else {
ed_res_to_res(handle_new_char(&input_char, &mut ed_model))?; ed_res_to_res(handle_new_char(&input_char, &mut ed_model))?;
} }
}
for expected_tooltip in expected_tooltips.iter() { for expected_tooltip in expected_tooltips.iter() {
ed_model.select_expr()?; ed_model.select_expr()?;
@ -1912,16 +2076,13 @@ pub mod test_ed_update {
assert_type_tooltip_clean(&["{ ┃z: { } }"], "{ z : {} }")?; assert_type_tooltip_clean(&["{ ┃z: { } }"], "{ z : {} }")?;
assert_type_tooltip_clean(&["{ camelCase: ┃0 }"], "Num *")?; assert_type_tooltip_clean(&["{ camelCase: ┃0 }"], "Num *")?;
assert_type_tooltips_seq(&[""], &vec!["*"], "")?; assert_type_tooltips_seq(&[""], &["*"], "")?;
assert_type_tooltips_seq(&[""], &vec!["*", "{ a : * }"], "{a:")?; assert_type_tooltips_seq(&[""], &["*", "{ a : * }"], "{a:")?;
assert_type_tooltips_clean( assert_type_tooltips_clean(&["{ camelCase: ┃0 }"], &["Num *", "{ camelCase : Num * }"])?;
&["{ camelCase: ┃0 }"],
&vec!["Num *", "{ camelCase : Num * }"],
)?;
assert_type_tooltips_clean( assert_type_tooltips_clean(
&["{ a: { b: { c: \"hello┃, hello.0123456789ZXY{}[]-><-\" } } }"], &["{ a: { b: { c: \"hello┃, hello.0123456789ZXY{}[]-><-\" } } }"],
&vec![ &[
"Str", "Str",
"{ c : Str }", "{ c : Str }",
"{ b : { c : Str } }", "{ b : { c : Str } }",
@ -1929,13 +2090,18 @@ pub mod test_ed_update {
], ],
)?; )?;
Ok(())
}
#[test]
fn test_type_tooltip_list() -> Result<(), String> {
assert_type_tooltip(&[""], "List *", '[')?; assert_type_tooltip(&[""], "List *", '[')?;
assert_type_tooltips_seq(&[""], &vec!["List (Num *)"], "[0")?; assert_type_tooltips_seq(&[""], &["List (Num *)"], "[0")?;
assert_type_tooltips_seq(&[""], &vec!["List (Num *)", "List (List (Num *))"], "[[0")?; assert_type_tooltips_seq(&[""], &["List (Num *)", "List (List (Num *))"], "[[0")?;
assert_type_tooltips_seq(&[""], &vec!["Str", "List Str"], "[\"a")?; assert_type_tooltips_seq(&[""], &["Str", "List Str"], "[\"a")?;
assert_type_tooltips_seq( assert_type_tooltips_seq(
&[""], &[""],
&vec![ &[
"Str", "Str",
"List Str", "List Str",
"List (List Str)", "List (List Str)",
@ -1945,7 +2111,7 @@ pub mod test_ed_update {
)?; )?;
assert_type_tooltips_seq( assert_type_tooltips_seq(
&[""], &[""],
&vec![ &[
"{ a : Num * }", "{ a : Num * }",
"List { a : Num * }", "List { a : Num * }",
"List (List { a : Num * })", "List (List { a : Num * })",
@ -1953,6 +2119,34 @@ pub mod test_ed_update {
"[[{a:1", "[[{a:1",
)?; )?;
// multi element lists
assert_type_tooltips_seq(&[""], &["List (Num *)"], "[1,2,3")?;
assert_type_tooltips_seq(&[""], &["Str", "List Str"], "[\"abc🡲,\"de🡲,\"f")?;
assert_type_tooltips_seq(
&[""],
&["{ a : Num * }", "List { a : Num * }"],
"[{a:0🡲🡲,{a:12🡲🡲,{a:444",
)?;
Ok(())
}
#[test]
fn test_type_tooltip_mismatch() -> Result<(), String> {
assert_type_tooltips_seq(&[""], &["Str", "List <type mismatch>"], "[1,\"abc")?;
assert_type_tooltips_seq(&[""], &["List <type mismatch>"], "[\"abc🡲,50")?;
assert_type_tooltips_seq(
&[""],
&["Str", "{ a : Str }", "List <type mismatch>"],
"[{a:0🡲🡲,{a:\"0",
)?;
assert_type_tooltips_seq(
&[""],
&["List (Num *)", "List (List <type mismatch>)"],
"[[0,1,\"2🡲🡲🡲,[3, 4, 5",
)?;
Ok(()) Ok(())
} }

View file

@ -1,5 +1,5 @@
use crate::editor::ed_error::EdResult; use crate::editor::ed_error::EdResult;
use crate::editor::ed_error::{MissingParent, UnexpectedASTNode, UnexpectedEmptyPoolVec}; use crate::editor::ed_error::{MissingParent, UnexpectedASTNode};
use crate::editor::markup::attribute::Attributes; use crate::editor::markup::attribute::Attributes;
use crate::editor::markup::nodes; use crate::editor::markup::nodes;
use crate::editor::markup::nodes::MarkupNode; use crate::editor::markup::nodes::MarkupNode;
@ -7,11 +7,12 @@ use crate::editor::mvc::app_update::InputOutcome;
use crate::editor::mvc::ed_model::EdModel; use crate::editor::mvc::ed_model::EdModel;
use crate::editor::mvc::ed_update::get_node_context; use crate::editor::mvc::ed_update::get_node_context;
use crate::editor::mvc::ed_update::NodeContext; use crate::editor::mvc::ed_update::NodeContext;
use crate::editor::slow_pool::MarkNodeId;
use crate::editor::syntax_highlight::HighlightStyle; use crate::editor::syntax_highlight::HighlightStyle;
use crate::lang::ast::expr2_to_string;
use crate::lang::ast::Expr2; use crate::lang::ast::Expr2;
use crate::lang::ast::{expr2_to_string, ExprId};
use crate::lang::pool::PoolVec; use crate::lang::pool::PoolVec;
use snafu::OptionExt; use crate::ui::text::text_pos::TextPos;
pub fn start_new_list(ed_model: &mut EdModel) -> EdResult<InputOutcome> { pub fn start_new_list(ed_model: &mut EdModel) -> EdResult<InputOutcome> {
let NodeContext { let NodeContext {
@ -89,7 +90,11 @@ pub fn start_new_list(ed_model: &mut EdModel) -> EdResult<InputOutcome> {
} }
// insert Blank at current position for easy code reuse // insert Blank at current position for easy code reuse
pub fn prep_empty_list(ed_model: &mut EdModel) -> EdResult<InputOutcome> { pub fn add_blank_child(
new_child_index: usize,
new_ast_child_index: usize,
ed_model: &mut EdModel,
) -> EdResult<InputOutcome> {
let NodeContext { let NodeContext {
old_caret_pos, old_caret_pos,
curr_mark_node_id, curr_mark_node_id,
@ -98,30 +103,94 @@ pub fn prep_empty_list(ed_model: &mut EdModel) -> EdResult<InputOutcome> {
ast_node_id, ast_node_id,
} = get_node_context(&ed_model)?; } = get_node_context(&ed_model)?;
let blank_elt = Expr2::Blank; let trip_result: EdResult<(ExprId, ExprId, MarkNodeId)> = if let Some(parent_id) = parent_id_opt
{
let parent = ed_model.markup_node_pool.get(parent_id);
let list_ast_node = ed_model.module.env.pool.get(ast_node_id); let list_ast_node_id = parent.get_ast_node_id();
let list_ast_node = ed_model.module.env.pool.get(list_ast_node_id);
match list_ast_node { match list_ast_node {
Expr2::List { elem_var, elems: _ } => { Expr2::List {
let children: Vec<Expr2> = vec![blank_elt]; elem_var: _,
let children_pool_vec = PoolVec::new(children.into_iter(), ed_model.module.env.pool); elems: _,
} => {
let blank_elt = Expr2::Blank;
let blank_elt_id = ed_model.module.env.pool.add(blank_elt);
let blank_elt_id = Ok((blank_elt_id, list_ast_node_id, parent_id))
children_pool_vec }
.iter_node_ids() _ => UnexpectedASTNode {
.next() required_node_type: "List".to_string(),
.context(UnexpectedEmptyPoolVec { encountered_node_type: expr2_to_string(ast_node_id, ed_model.module.env.pool),
descriptive_vec_name: "\"children of List AST node\"", }
})?; .fail(),
}
} else {
MissingParent {
node_id: curr_mark_node_id,
}
.fail()
};
let (blank_elt_id, list_ast_node_id, parent_id) = trip_result?;
let list_ast_node = ed_model.module.env.pool.get(list_ast_node_id);
match list_ast_node {
Expr2::List { elem_var, elems } => {
let mut new_elems: Vec<ExprId> =
elems.iter(ed_model.module.env.pool).copied().collect();
new_elems.insert(new_ast_child_index, blank_elt_id);
let new_list_node = Expr2::List { let new_list_node = Expr2::List {
elem_var: *elem_var, elem_var: *elem_var,
elems: children_pool_vec, elems: PoolVec::new(new_elems.into_iter(), ed_model.module.env.pool),
}; };
ed_model.module.env.pool.set(ast_node_id, new_list_node); ed_model
.module
.env
.pool
.set(list_ast_node_id, new_list_node);
Ok(())
}
_ => UnexpectedASTNode {
required_node_type: "List".to_string(),
encountered_node_type: expr2_to_string(ast_node_id, ed_model.module.env.pool),
}
.fail(),
}?;
let new_mark_children = update_mark_children(
new_child_index,
blank_elt_id,
list_ast_node_id,
old_caret_pos,
parent_id_opt,
ed_model,
)?;
let parent = ed_model.markup_node_pool.get_mut(parent_id);
for (indx, child) in new_mark_children.iter().enumerate() {
parent.add_child_at_index(new_child_index + indx, *child)?;
}
Ok(InputOutcome::Accepted)
}
// add a Blank child to the Nested mark node and update the caret
pub fn update_mark_children(
new_child_index: usize,
blank_elt_id: ExprId,
list_ast_node_id: ExprId,
old_caret_pos: TextPos,
parent_id_opt: Option<MarkNodeId>,
ed_model: &mut EdModel,
) -> EdResult<Vec<MarkNodeId>> {
let blank_mark_node = MarkupNode::Blank { let blank_mark_node = MarkupNode::Blank {
ast_node_id: blank_elt_id, ast_node_id: blank_elt_id,
syn_high_style: HighlightStyle::Blank, syn_high_style: HighlightStyle::Blank,
@ -131,34 +200,46 @@ pub fn prep_empty_list(ed_model: &mut EdModel) -> EdResult<InputOutcome> {
let blank_mark_node_id = ed_model.markup_node_pool.add(blank_mark_node); let blank_mark_node_id = ed_model.markup_node_pool.add(blank_mark_node);
// add blank mark node to nested mark node from list let mut children: Vec<MarkNodeId> = vec![];
if let Some(parent_id) = parent_id_opt {
let parent = ed_model.markup_node_pool.get_mut(parent_id);
let new_child_index = 1; // 1 because left bracket is first element if new_child_index > 1 {
let comma_mark_node = MarkupNode::Text {
content: nodes::COMMA.to_owned(),
ast_node_id: list_ast_node_id,
syn_high_style: HighlightStyle::Blank,
attributes: Attributes::new(),
parent_id_opt,
};
parent.add_child_at_index(new_child_index, blank_mark_node_id)?; let comma_mark_node_id = ed_model.markup_node_pool.add(comma_mark_node);
ed_model.simple_move_carets_right(nodes::COMMA.len());
ed_model.insert_between_line(
old_caret_pos.line,
old_caret_pos.column,
nodes::COMMA,
comma_mark_node_id,
)?;
children.push(comma_mark_node_id);
}
children.push(blank_mark_node_id);
let comma_shift = if new_child_index == 1 {
0
} else { } else {
MissingParent { nodes::COMMA.len()
node_id: curr_mark_node_id, };
}
.fail()?
}
// update GridNodeMap and CodeLines // update GridNodeMap and CodeLines
ed_model.insert_between_line( ed_model.insert_between_line(
old_caret_pos.line, old_caret_pos.line,
old_caret_pos.column, old_caret_pos.column + comma_shift,
nodes::BLANK_PLACEHOLDER, nodes::BLANK_PLACEHOLDER,
blank_mark_node_id, blank_mark_node_id,
)?; )?;
Ok(InputOutcome::Accepted) Ok(children)
}
_ => UnexpectedASTNode {
required_node_type: "List".to_string(),
encountered_node_type: expr2_to_string(ast_node_id, ed_model.module.env.pool),
}
.fail()?,
}
} }

View file

@ -19,7 +19,7 @@ pub fn build_debug_graphics(
let area_bounds = (size.width as f32, size.height as f32); let area_bounds = (size.width as f32, size.height as f32);
let layout = wgpu_glyph::Layout::default().h_align(wgpu_glyph::HorizontalAlign::Left); let layout = wgpu_glyph::Layout::default().h_align(wgpu_glyph::HorizontalAlign::Left);
let debug_txt_coords: Vector2<f32> = (txt_coords.x, txt_coords.y * 6.0).into(); let debug_txt_coords: Vector2<f32> = (txt_coords.x, txt_coords.y * 3.0).into();
let grid_node_map_text = glyph_brush::OwnedText::new(format!("{}", ed_model.grid_node_map)) let grid_node_map_text = glyph_brush::OwnedText::new(format!("{}", ed_model.grid_node_map))
.with_color(colors::to_slice(from_hsb(20, 41, 100))) .with_color(colors::to_slice(from_hsb(20, 41, 100)))

View file

@ -44,12 +44,28 @@ impl fmt::Display for SlowPool {
write!(f, "\n\n(mark_node_pool)\n")?; write!(f, "\n\n(mark_node_pool)\n")?;
for (index, node) in self.nodes.iter().enumerate() { for (index, node) in self.nodes.iter().enumerate() {
let ast_node_id_str = format!("{:?}", node.get_ast_node_id());
let ast_node_id: String = ast_node_id_str
.chars()
.filter(|c| c.is_ascii_digit())
.collect();
let mut child_str = String::new();
let node_children = node.get_children_ids();
if !node_children.is_empty() {
child_str = format!("children: {:?}", node_children);
}
writeln!( writeln!(
f, f,
"{}: {} ({})", "{}: {} ({}) ast_id {:?} {}",
index, index,
node.node_type_as_string(), node.node_type_as_string(),
node.get_content().unwrap_or_else(|_| "".to_string()), node.get_content().unwrap_or_else(|_| "".to_string()),
ast_node_id.parse::<usize>().unwrap(),
child_str
)?; )?;
} }

View file

@ -1,11 +1,15 @@
#![allow(clippy::manual_map)] #![allow(clippy::manual_map)]
use std::collections::{HashMap, HashSet};
use std::hash::BuildHasherDefault;
use crate::lang::pattern::{Pattern2, PatternId}; use crate::lang::pattern::{Pattern2, PatternId};
use crate::lang::pool::Pool; use crate::lang::pool::Pool;
use crate::lang::pool::{NodeId, PoolStr, PoolVec, ShallowClone}; use crate::lang::pool::{NodeId, PoolStr, PoolVec, ShallowClone};
use crate::lang::types::{Type2, TypeId}; use crate::lang::types::{Type2, TypeId};
use arraystring::{typenum::U30, ArrayString}; use arraystring::{typenum::U30, ArrayString};
use roc_can::expr::Recursive; use roc_can::expr::Recursive;
use roc_collections::all::WyHash;
use roc_module::low_level::LowLevel; use roc_module::low_level::LowLevel;
use roc_module::operator::CalledVia; use roc_module::operator::CalledVia;
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
@ -115,7 +119,7 @@ pub enum Expr2 {
List { List {
elem_var: Variable, // 4B elem_var: Variable, // 4B
elems: PoolVec<Expr2>, // 8B elems: PoolVec<ExprId>, // 8B
}, },
If { If {
cond_var: Variable, // 4B cond_var: Variable, // 4B
@ -135,7 +139,7 @@ pub enum Expr2 {
body_id: NodeId<Expr2>, // 4B body_id: NodeId<Expr2>, // 4B
}, },
LetFunction { LetFunction {
def: NodeId<FunctionDef>, // 4B def_id: NodeId<FunctionDef>, // 4B
body_var: Variable, // 8B body_var: Variable, // 8B
body_id: NodeId<Expr2>, // 4B body_id: NodeId<Expr2>, // 4B
}, },
@ -217,21 +221,46 @@ pub enum Expr2 {
} }
#[derive(Debug)] #[derive(Debug)]
pub struct ValueDef { pub enum ValueDef {
pub pattern: PatternId, // 4B WithAnnotation {
pub expr_type: Option<(TypeId, Rigids)>, // ? pattern_id: PatternId, // 4B
pub expr_var: Variable, // 4B expr_id: ExprId, // 4B
type_id: TypeId,
rigids: Rigids,
expr_var: Variable, // 4B
},
NoAnnotation {
pattern_id: PatternId, // 4B
expr_id: ExprId, // 4B
expr_var: Variable, // 4B
},
} }
impl ShallowClone for ValueDef { impl ShallowClone for ValueDef {
fn shallow_clone(&self) -> Self { fn shallow_clone(&self) -> Self {
Self { match self {
pattern: self.pattern, Self::WithAnnotation {
expr_type: match &self.expr_type { pattern_id,
Some((id, rigids)) => Some((*id, rigids.shallow_clone())), expr_id,
None => None, type_id,
rigids,
expr_var,
} => Self::WithAnnotation {
pattern_id: *pattern_id,
expr_id: *expr_id,
type_id: *type_id,
rigids: rigids.shallow_clone(),
expr_var: *expr_var,
},
Self::NoAnnotation {
pattern_id,
expr_id,
expr_var,
} => Self::NoAnnotation {
pattern_id: *pattern_id,
expr_id: *expr_id,
expr_var: *expr_var,
}, },
expr_var: self.expr_var,
} }
} }
} }
@ -287,8 +316,68 @@ impl ShallowClone for FunctionDef {
#[derive(Debug)] #[derive(Debug)]
pub struct Rigids { pub struct Rigids {
pub named: PoolVec<(PoolStr, Variable)>, // 8B pub names: PoolVec<(Option<PoolStr>, Variable)>, // 8B
pub unnamed: PoolVec<Variable>, // 8B padding: [u8; 1],
}
#[allow(clippy::needless_collect)]
impl Rigids {
pub fn new(
named: HashMap<&str, Variable, BuildHasherDefault<WyHash>>,
unnamed: HashSet<Variable, BuildHasherDefault<WyHash>>,
pool: &mut Pool,
) -> Self {
let names = PoolVec::with_capacity((named.len() + unnamed.len()) as u32, pool);
let mut temp_names = Vec::new();
temp_names.extend(named.iter().map(|(name, var)| (Some(*name), *var)));
temp_names.extend(unnamed.iter().map(|var| (None, *var)));
for (node_id, (opt_name, variable)) in names.iter_node_ids().zip(temp_names) {
let poolstr = opt_name.map(|name| PoolStr::new(name, pool));
pool[node_id] = (poolstr, variable);
}
Self {
names,
padding: Default::default(),
}
}
pub fn named(&self, pool: &mut Pool) -> PoolVec<(PoolStr, Variable)> {
let named = self
.names
.iter(pool)
.filter_map(|(opt_pool_str, var)| {
if let Some(pool_str) = opt_pool_str {
Some((*pool_str, *var))
} else {
None
}
})
.collect::<Vec<(PoolStr, Variable)>>();
PoolVec::new(named.into_iter(), pool)
}
pub fn unnamed(&self, pool: &mut Pool) -> PoolVec<Variable> {
let unnamed = self
.names
.iter(pool)
.filter_map(|(opt_pool_str, var)| {
if opt_pool_str.is_none() {
Some(*var)
} else {
None
}
})
.collect::<Vec<Variable>>();
PoolVec::new(unnamed.into_iter(), pool)
}
} }
/// This is overflow data from a Closure variant, which needs to store /// This is overflow data from a Closure variant, which needs to store
@ -438,13 +527,15 @@ fn expr2_to_string_helper(
let mut first_elt = true; let mut first_elt = true;
for elem_expr2 in elems.iter(pool) { for elem_expr2_id in elems.iter(pool) {
if !first_elt { if !first_elt {
out_string.push_str(", ") out_string.push_str(", ")
} else { } else {
first_elt = false; first_elt = false;
} }
let elem_expr2 = pool.get(*elem_expr2_id);
expr2_to_string_helper(elem_expr2, indent_level + 2, pool, out_string) expr2_to_string_helper(elem_expr2, indent_level + 2, pool, out_string)
} }
@ -474,8 +565,8 @@ fn size_of_expr() {
impl ShallowClone for Rigids { impl ShallowClone for Rigids {
fn shallow_clone(&self) -> Self { fn shallow_clone(&self) -> Self {
Self { Self {
named: self.named.shallow_clone(), names: self.names.shallow_clone(),
unnamed: self.unnamed.shallow_clone(), padding: self.padding,
} }
} }
} }

View file

@ -1,7 +1,7 @@
use bumpalo::{collections::Vec as BumpVec, Bump}; use bumpalo::{collections::Vec as BumpVec, Bump};
use crate::lang::{ use crate::lang::{
ast::{Expr2, RecordField, WhenBranch}, ast::{Expr2, ExprId, RecordField, ValueDef, WhenBranch},
expr::Env, expr::Env,
pattern::{DestructType, Pattern2, PatternState2, RecordDestruct}, pattern::{DestructType, Pattern2, PatternState2, RecordDestruct},
pool::{Pool, PoolStr, PoolVec, ShallowClone}, pool::{Pool, PoolStr, PoolVec, ShallowClone},
@ -9,8 +9,11 @@ use crate::lang::{
}; };
use roc_can::expected::{Expected, PExpected}; use roc_can::expected::{Expected, PExpected};
use roc_collections::all::{BumpMap, BumpMapDefault, Index}; use roc_collections::all::{BumpMap, BumpMapDefault, Index, SendMap};
use roc_module::{ident::TagName, symbol::Symbol}; use roc_module::{
ident::{Lowercase, TagName},
symbol::Symbol,
};
use roc_region::all::Region; use roc_region::all::Region;
use roc_types::{ use roc_types::{
subs::Variable, subs::Variable,
@ -131,7 +134,10 @@ pub fn constrain_expr<'a>(
let list_elem_type = Type2::Variable(*elem_var); let list_elem_type = Type2::Variable(*elem_var);
for (index, elem_node_id) in elems.iter_node_ids().enumerate() { let indexed_node_ids: Vec<(usize, ExprId)> =
elems.iter(env.pool).copied().enumerate().collect();
for (index, elem_node_id) in indexed_node_ids {
let elem_expr = env.pool.get(elem_node_id); let elem_expr = env.pool.get(elem_node_id);
let elem_expected = Expected::ForReason( let elem_expected = Expected::ForReason(
@ -754,6 +760,156 @@ pub fn constrain_expr<'a>(
// exhautiveness checking happens when converting to mono::Expr // exhautiveness checking happens when converting to mono::Expr
exists(arena, flex_vars, And(constraints)) exists(arena, flex_vars, And(constraints))
} }
Expr2::LetValue {
def_id,
body_id,
body_var,
} => {
let value_def = env.pool.get(*def_id);
let body = env.pool.get(*body_id);
let body_con = constrain_expr(arena, env, body, expected.shallow_clone(), region);
match value_def {
ValueDef::WithAnnotation { .. } => todo!("implement {:?}", value_def),
ValueDef::NoAnnotation {
pattern_id,
expr_id,
expr_var,
} => {
let pattern = env.pool.get(*pattern_id);
let mut flex_vars = BumpVec::with_capacity_in(1, arena);
flex_vars.push(*body_var);
let expr_type = Type2::Variable(*expr_var);
let pattern_expected = PExpected::NoExpectation(expr_type.shallow_clone());
let mut state = PatternState2 {
headers: BumpMap::new_in(arena),
vars: BumpVec::with_capacity_in(1, arena),
constraints: BumpVec::with_capacity_in(1, arena),
};
constrain_pattern(arena, env, pattern, region, pattern_expected, &mut state);
state.vars.push(*expr_var);
let def_expr = env.pool.get(*expr_id);
let constrained_def = Let(arena.alloc(LetConstraint {
rigid_vars: BumpVec::new_in(arena),
flex_vars: state.vars,
def_types: state.headers,
defs_constraint: Let(arena.alloc(LetConstraint {
rigid_vars: BumpVec::new_in(arena), // always empty
flex_vars: BumpVec::new_in(arena), // empty, because our functions have no arguments
def_types: BumpMap::new_in(arena), // empty, because our functions have no arguments!
defs_constraint: And(state.constraints),
ret_constraint: constrain_expr(
arena,
env,
def_expr,
Expected::NoExpectation(expr_type),
region,
),
})),
ret_constraint: body_con,
}));
let mut and_constraints = BumpVec::with_capacity_in(2, arena);
and_constraints.push(constrained_def);
and_constraints.push(Eq(
Type2::Variable(*body_var),
expected,
Category::Storage(std::file!(), std::line!()),
// TODO: needs to be ret region
region,
));
exists(arena, flex_vars, And(and_constraints))
}
}
}
Expr2::Update {
symbol,
updates,
ext_var,
record_var,
} => {
let field_types = PoolVec::with_capacity(updates.len() as u32, env.pool);
let mut flex_vars = BumpVec::with_capacity_in(updates.len() + 2, arena);
let mut cons = BumpVec::with_capacity_in(updates.len() + 1, arena);
let mut record_key_updates = SendMap::default();
for (record_field_id, field_type_node_id) in
updates.iter_node_ids().zip(field_types.iter_node_ids())
{
let record_field = env.pool.get(record_field_id);
match record_field {
RecordField::LabeledValue(pool_str, var, node_id) => {
let expr = env.pool.get(*node_id);
let (field_type, field_con) = constrain_field_update(
arena,
env,
*var,
pool_str.as_str(env.pool).into(),
expr,
);
let field_type_id = env.pool.add(field_type);
env.pool[field_type_node_id] =
(*pool_str, types::RecordField::Required(field_type_id));
record_key_updates.insert(pool_str.as_str(env.pool).into(), Region::zero());
flex_vars.push(*var);
cons.push(field_con);
}
e => todo!("{:?}", e),
}
}
let fields_type = Type2::Record(field_types, env.pool.add(Type2::Variable(*ext_var)));
let record_type = Type2::Variable(*record_var);
// NOTE from elm compiler: fields_type is separate so that Error propagates better
let fields_con = Eq(
record_type.shallow_clone(),
Expected::NoExpectation(fields_type),
Category::Record,
region,
);
let record_con = Eq(
record_type.shallow_clone(),
expected,
Category::Record,
region,
);
flex_vars.push(*record_var);
flex_vars.push(*ext_var);
let con = Lookup(
*symbol,
Expected::ForReason(
Reason::RecordUpdateKeys(*symbol, record_key_updates),
record_type,
region,
),
region,
);
// ensure constraints are solved in this order, gives better errors
cons.insert(0, fields_con);
cons.insert(1, con);
cons.insert(2, record_con);
exists(arena, flex_vars, And(cons))
}
_ => todo!("implement constraints for {:?}", expr), _ => todo!("implement constraints for {:?}", expr),
} }
} }
@ -785,6 +941,22 @@ fn constrain_field<'a>(
(field_type, constraint) (field_type, constraint)
} }
#[inline(always)]
fn constrain_field_update<'a>(
arena: &'a Bump,
env: &mut Env,
field_var: Variable,
field: Lowercase,
expr: &Expr2,
) -> (Type2, Constraint<'a>) {
let field_type = Type2::Variable(field_var);
let reason = Reason::RecordUpdateValue(field);
let field_expected = Expected::ForReason(reason, field_type.shallow_clone(), Region::zero());
let con = constrain_expr(arena, env, expr, field_expected, Region::zero());
(field_type, con)
}
fn constrain_empty_record<'a>(expected: Expected<Type2>, region: Region) -> Constraint<'a> { fn constrain_empty_record<'a>(expected: Expected<Type2>, region: Region) -> Constraint<'a> {
Constraint::Eq(Type2::EmptyRec, expected, Category::Record, region) Constraint::Eq(Type2::EmptyRec, expected, Category::Record, region)
} }

View file

@ -46,10 +46,13 @@ impl Def {
match self { match self {
Def::AnnotationOnly { .. } => todo!("lost pattern information here ... "), Def::AnnotationOnly { .. } => todo!("lost pattern information here ... "),
Def::Value(ValueDef { pattern, .. }) => { Def::Value(value_def) => match value_def {
let pattern2 = &pool[*pattern]; ValueDef::WithAnnotation { pattern_id, .. }
| ValueDef::NoAnnotation { pattern_id, .. } => {
let pattern2 = &pool[*pattern_id];
output.extend(symbols_from_pattern(pool, pattern2)); output.extend(symbols_from_pattern(pool, pattern2));
} }
},
Def::Function(function_def) => match function_def { Def::Function(function_def) => match function_def {
FunctionDef::NoAnnotation { name, .. } FunctionDef::NoAnnotation { name, .. }
| FunctionDef::WithAnnotation { name, .. } => { | FunctionDef::WithAnnotation { name, .. } => {
@ -79,7 +82,7 @@ impl ShallowClone for Def {
/// but no Expr canonicalization has happened yet. Also, it has had spaces /// but no Expr canonicalization has happened yet. Also, it has had spaces
/// and nesting resolved, and knows whether annotations are standalone or not. /// and nesting resolved, and knows whether annotations are standalone or not.
#[derive(Debug)] #[derive(Debug)]
enum PendingDef<'a> { pub enum PendingDef<'a> {
/// A standalone annotation with no body /// A standalone annotation with no body
AnnotationOnly( AnnotationOnly(
&'a Located<ast::Pattern<'a>>, &'a Located<ast::Pattern<'a>>,
@ -315,23 +318,7 @@ fn from_pending_alias<'a>(
} }
} }
let named = PoolVec::with_capacity(named_rigids.len() as u32, env.pool); let rigids = Rigids::new(named_rigids, unnamed_rigids, env.pool);
let unnamed = PoolVec::with_capacity(unnamed_rigids.len() as u32, env.pool);
for (node_id, (name, variable)) in named.iter_node_ids().zip(named_rigids) {
let poolstr = PoolStr::new(name, env.pool);
env.pool[node_id] = (poolstr, variable);
}
for (node_id, rigid) in unnamed.iter_node_ids().zip(unnamed_rigids) {
env.pool[node_id] = rigid;
}
let rigids = Rigids {
named: named.shallow_clone(),
unnamed,
};
let annotation = match signature { let annotation = match signature {
Signature::Value { annotation } => annotation, Signature::Value { annotation } => annotation,
@ -355,6 +342,8 @@ fn from_pending_alias<'a>(
rec_type_union.substitute_alias(env.pool, symbol, Type2::Variable(rec_var)); rec_type_union.substitute_alias(env.pool, symbol, Type2::Variable(rec_var));
let annotation_id = env.add(rec_type_union, ann.region); let annotation_id = env.add(rec_type_union, ann.region);
let named = rigids.named(env.pool);
scope.add_alias(env.pool, symbol, named, annotation_id); scope.add_alias(env.pool, symbol, named, annotation_id);
} else { } else {
env.problem(Problem::CyclicAlias(symbol, name.region, vec![])); env.problem(Problem::CyclicAlias(symbol, name.region, vec![]));
@ -362,6 +351,8 @@ fn from_pending_alias<'a>(
} }
} else { } else {
let annotation_id = env.add(annotation, ann.region); let annotation_id = env.add(annotation, ann.region);
let named = rigids.named(env.pool);
scope.add_alias(env.pool, symbol, named, annotation_id); scope.add_alias(env.pool, symbol, named, annotation_id);
} }
@ -407,21 +398,7 @@ fn canonicalize_pending_def<'a>(
output.references.referenced_aliases.insert(symbol); output.references.referenced_aliases.insert(symbol);
} }
let rigids = { let rigids = Rigids::new(named_rigids, unnamed_rigids, env.pool);
let named = PoolVec::with_capacity(named_rigids.len() as u32, env.pool);
let unnamed = PoolVec::with_capacity(unnamed_rigids.len() as u32, env.pool);
for (node_id, (name, variable)) in named.iter_node_ids().zip(named_rigids) {
let poolstr = PoolStr::new(name, env.pool);
env.pool[node_id] = (poolstr, variable);
}
for (node_id, rigid) in unnamed.iter_node_ids().zip(unnamed_rigids) {
env.pool[node_id] = rigid;
}
Rigids { named, unnamed }
};
let annotation = match signature { let annotation = match signature {
Signature::Value { annotation } => annotation, Signature::Value { annotation } => annotation,
@ -470,21 +447,7 @@ fn canonicalize_pending_def<'a>(
output.references.referenced_aliases.insert(symbol); output.references.referenced_aliases.insert(symbol);
} }
let rigids = { let rigids = Rigids::new(named_rigids, unnamed_rigids, env.pool);
let named = PoolVec::with_capacity(named_rigids.len() as u32, env.pool);
let unnamed = PoolVec::with_capacity(unnamed_rigids.len() as u32, env.pool);
for (node_id, (name, variable)) in named.iter_node_ids().zip(named_rigids) {
let poolstr = PoolStr::new(name, env.pool);
env.pool[node_id] = (poolstr, variable);
}
for (node_id, rigid) in unnamed.iter_node_ids().zip(unnamed_rigids) {
env.pool[node_id] = rigid;
}
Rigids { named, unnamed }
};
// bookkeeping for tail-call detection. If we're assigning to an // bookkeeping for tail-call detection. If we're assigning to an
// identifier (e.g. `f = \x -> ...`), then this symbol can be tail-called. // identifier (e.g. `f = \x -> ...`), then this symbol can be tail-called.
@ -624,9 +587,11 @@ fn canonicalize_pending_def<'a>(
}; };
let annotation = env.add(annotation, loc_ann.region); let annotation = env.add(annotation, loc_ann.region);
let value_def = ValueDef { let value_def = ValueDef::WithAnnotation {
pattern: loc_can_pattern, pattern_id: loc_can_pattern,
expr_type: Some((annotation, rigids)), expr_id: env.pool.add(loc_can_expr),
type_id: annotation,
rigids: rigids,
expr_var: env.var_store.fresh(), expr_var: env.var_store.fresh(),
}; };
@ -745,9 +710,9 @@ fn canonicalize_pending_def<'a>(
} }
_ => { _ => {
let value_def = ValueDef { let value_def = ValueDef::NoAnnotation {
pattern: loc_can_pattern, pattern_id: loc_can_pattern,
expr_type: None, expr_id: env.pool.add(loc_can_expr),
expr_var: env.var_store.fresh(), expr_var: env.var_store.fresh(),
}; };

View file

@ -1,34 +1,93 @@
#![allow(clippy::all)] #![allow(clippy::all)]
#![allow(dead_code)] #![allow(dead_code)]
#![allow(unused_imports)] #![allow(unused_imports)]
use crate::lang::ast::expr2_to_string; use bumpalo::{collections::Vec as BumpVec, Bump};
use crate::lang::ast::RecordField; use inlinable_string::InlinableString;
use crate::lang::ast::{ClosureExtra, Expr2, ExprId, FloatVal, IntStyle, IntVal, WhenBranch}; use std::collections::HashMap;
use crate::lang::def::References;
use crate::lang::pattern::to_pattern2; use crate::lang::ast::{
expr2_to_string, ClosureExtra, Expr2, ExprId, FloatVal, IntStyle, IntVal, RecordField,
WhenBranch,
};
use crate::lang::def::{
canonicalize_defs, sort_can_defs, CanDefs, Declaration, Def, PendingDef, References,
};
use crate::lang::pattern::{to_pattern2, Pattern2, PatternId};
use crate::lang::pool::{NodeId, Pool, PoolStr, PoolVec, ShallowClone}; use crate::lang::pool::{NodeId, Pool, PoolStr, PoolVec, ShallowClone};
use crate::lang::scope::Scope; use crate::lang::scope::Scope;
use crate::lang::types::{Alias, Type2, TypeId}; use crate::lang::types::{Alias, Annotation2, Type2, TypeId};
use bumpalo::Bump;
use inlinable_string::InlinableString;
use roc_can::expr::Recursive; use roc_can::expr::Recursive;
use roc_can::num::{finish_parsing_base, finish_parsing_float, finish_parsing_int}; use roc_can::num::{finish_parsing_base, finish_parsing_float, finish_parsing_int};
use roc_can::operator::desugar_expr;
use roc_collections::all::default_hasher;
use roc_collections::all::{MutMap, MutSet}; use roc_collections::all::{MutMap, MutSet};
use roc_module::ident::Lowercase;
use roc_module::ident::ModuleName; use roc_module::ident::ModuleName;
use roc_module::low_level::LowLevel; use roc_module::low_level::LowLevel;
use roc_module::operator::CalledVia; use roc_module::operator::CalledVia;
use roc_module::symbol::{IdentIds, ModuleId, ModuleIds, Symbol}; use roc_module::symbol::{IdentIds, ModuleId, ModuleIds, Symbol};
use roc_parse::ast; use roc_parse::ast;
use roc_parse::ast::Expr;
use roc_parse::ast::StrLiteral; use roc_parse::ast::StrLiteral;
use roc_parse::parser::{loc, Parser, State, SyntaxError}; use roc_parse::parser::{loc, Parser, State, SyntaxError};
use roc_parse::pattern::PatternType;
use roc_problem::can::{Problem, RuntimeError}; use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::{VarStore, Variable}; use roc_types::subs::{VarStore, Variable};
#[derive(Clone, Debug, PartialEq, Default)]
pub struct IntroducedVariables {
// Rigids must be unique within a type annoation.
// E.g. in `identity : a -> a`, there should only be one
// variable (a rigid one, with name "a").
// Hence `rigids : Map<Lowercase, Variable>`
//
// But then between annotations, the same name can occur multiple times,
// but a variable can only have one name. Therefore
// `ftv : Map<Variable, Lowercase>`.
pub wildcards: Vec<Variable>,
pub var_by_name: MutMap<Lowercase, Variable>,
pub name_by_var: MutMap<Variable, Lowercase>,
pub host_exposed_aliases: MutMap<Symbol, Variable>,
}
impl IntroducedVariables {
pub fn insert_named(&mut self, name: Lowercase, var: Variable) {
self.var_by_name.insert(name.clone(), var);
self.name_by_var.insert(var, name);
}
pub fn insert_wildcard(&mut self, var: Variable) {
self.wildcards.push(var);
}
pub fn insert_host_exposed_alias(&mut self, symbol: Symbol, var: Variable) {
self.host_exposed_aliases.insert(symbol, var);
}
pub fn union(&mut self, other: &Self) {
self.wildcards.extend(other.wildcards.iter().cloned());
self.var_by_name.extend(other.var_by_name.clone());
self.name_by_var.extend(other.name_by_var.clone());
self.host_exposed_aliases
.extend(other.host_exposed_aliases.clone());
}
pub fn var_by_name(&self, name: &Lowercase) -> Option<&Variable> {
self.var_by_name.get(name)
}
pub fn name_by_var(&self, var: Variable) -> Option<&Lowercase> {
self.name_by_var.get(&var)
}
}
#[derive(Clone, Default, Debug, PartialEq)] #[derive(Clone, Default, Debug, PartialEq)]
pub struct Output { pub struct Output {
pub references: References, pub references: References,
pub tail_call: Option<Symbol>, pub tail_call: Option<Symbol>,
pub introduced_variables: IntroducedVariables,
pub aliases: MutMap<Symbol, NodeId<Alias>>, pub aliases: MutMap<Symbol, NodeId<Alias>>,
pub non_closures: MutSet<Symbol>, pub non_closures: MutSet<Symbol>,
} }
@ -53,6 +112,8 @@ pub struct Env<'a> {
pub pool: &'a mut Pool, pub pool: &'a mut Pool,
pub arena: &'a Bump, pub arena: &'a Bump,
pub problems: BumpVec<'a, Problem>,
pub dep_idents: MutMap<ModuleId, IdentIds>, pub dep_idents: MutMap<ModuleId, IdentIds>,
pub module_ids: &'a ModuleIds, pub module_ids: &'a ModuleIds,
pub ident_ids: IdentIds, pub ident_ids: IdentIds,
@ -82,6 +143,7 @@ impl<'a> Env<'a> {
home, home,
arena, arena,
pool, pool,
problems: BumpVec::new_in(arena),
var_store, var_store,
dep_idents, dep_idents,
module_ids, module_ids,
@ -102,8 +164,8 @@ impl<'a> Env<'a> {
id id
} }
pub fn problem(&mut self, _problem: Problem) { pub fn problem(&mut self, problem: Problem) {
todo!(); self.problems.push(problem);
} }
pub fn set_region<T>(&mut self, _node_id: NodeId<T>, _region: Region) { pub fn set_region<T>(&mut self, _node_id: NodeId<T>, _region: Region) {
@ -236,7 +298,16 @@ pub fn str_to_expr2<'a>(
region: Region, region: Region,
) -> Result<(Expr2, self::Output), SyntaxError<'a>> { ) -> Result<(Expr2, self::Output), SyntaxError<'a>> {
match roc_parse::test_helpers::parse_loc_with(arena, input.trim()) { match roc_parse::test_helpers::parse_loc_with(arena, input.trim()) {
Ok(loc_expr) => Ok(to_expr2(env, scope, arena.alloc(loc_expr.value), region)), Ok(loc_expr) => {
let desugared_loc_expr = desugar_expr(arena, arena.alloc(loc_expr));
Ok(to_expr2(
env,
scope,
arena.alloc(desugared_loc_expr.value),
region,
))
}
Err(fail) => Err(fail), Err(fail) => Err(fail),
} }
} }
@ -248,6 +319,7 @@ pub fn to_expr2<'a>(
region: Region, region: Region,
) -> (Expr2, self::Output) { ) -> (Expr2, self::Output) {
use roc_parse::ast::Expr::*; use roc_parse::ast::Expr::*;
match parse_expr { match parse_expr {
Float(string) => { Float(string) => {
match finish_parsing_float(string) { match finish_parsing_float(string) {
@ -335,14 +407,16 @@ pub fn to_expr2<'a>(
let mut output = Output::default(); let mut output = Output::default();
let output_ref = &mut output; let output_ref = &mut output;
let elems = PoolVec::with_capacity(items.len() as u32, env.pool); let elems: PoolVec<NodeId<Expr2>> =
PoolVec::with_capacity(items.len() as u32, env.pool);
for (node_id, item) in elems.iter_node_ids().zip(items.iter()) { for (node_id, item) in elems.iter_node_ids().zip(items.iter()) {
let (expr, sub_output) = to_expr2(env, scope, &item.value, item.region); let (expr, sub_output) = to_expr2(env, scope, &item.value, item.region);
output_ref.union(sub_output); output_ref.union(sub_output);
env.pool[node_id] = expr; let expr_id = env.pool.add(expr);
env.pool[node_id] = expr_id;
} }
let expr = Expr2::List { let expr = Expr2::List {
@ -780,7 +854,50 @@ pub fn to_expr2<'a>(
} }
Defs(loc_defs, loc_ret) => { Defs(loc_defs, loc_ret) => {
todo!("{:?} {:?}", loc_defs, loc_ret) let (unsorted, mut scope, defs_output, symbols_introduced) = canonicalize_defs(
env,
Output::default(),
&scope,
loc_defs,
PatternType::DefExpr,
);
// The def as a whole is a tail call iff its return expression is a tail call.
// Use its output as a starting point because its tail_call already has the right answer!
let (ret_expr, mut output) = to_expr2(env, &mut scope, &loc_ret.value, loc_ret.region);
output
.introduced_variables
.union(&defs_output.introduced_variables);
output.references.union_mut(defs_output.references);
// Now that we've collected all the references, check to see if any of the new idents
// we defined went unused by the return expression. If any were unused, report it.
for (symbol, region) in symbols_introduced {
if !output.references.has_lookup(symbol) {
env.problem(Problem::UnusedDef(symbol, region));
}
}
let (can_defs, output) = sort_can_defs(env, unsorted, output);
match can_defs {
Ok(decls) => {
let mut expr = ret_expr;
for declaration in decls.into_iter().rev() {
expr = decl_to_let(env.pool, env.var_store, declaration, expr);
}
(expr, output)
}
Err(_err) => {
// TODO: fix this to be something from Expr2
// (RuntimeError(err), output)
todo!()
}
}
} }
PrecedenceConflict { .. } => { PrecedenceConflict { .. } => {
@ -1262,11 +1379,10 @@ fn canonicalize_lookup(
Var(symbol) Var(symbol)
} }
Err(_problem) => { Err(problem) => {
// env.problem(Problem::RuntimeError(problem.clone())); env.problem(Problem::RuntimeError(problem.clone()));
// RuntimeError(problem) RuntimeError()
todo!()
} }
} }
} else { } else {
@ -1278,14 +1394,12 @@ fn canonicalize_lookup(
Var(symbol) Var(symbol)
} }
Err(_problem) => { Err(problem) => {
// Either the module wasn't imported, or // Either the module wasn't imported, or
// it was imported but it doesn't expose this ident. // it was imported but it doesn't expose this ident.
// env.problem(Problem::RuntimeError(problem.clone())); env.problem(Problem::RuntimeError(problem.clone()));
// RuntimeError(problem) RuntimeError()
todo!()
} }
} }
}; };
@ -1294,3 +1408,59 @@ fn canonicalize_lookup(
(can_expr, output) (can_expr, output)
} }
fn decl_to_let(pool: &mut Pool, var_store: &mut VarStore, decl: Declaration, ret: Expr2) -> Expr2 {
match decl {
Declaration::Declare(def) => match def {
Def::AnnotationOnly { .. } => todo!(),
Def::Value(value_def) => {
let def_id = pool.add(value_def);
let body_id = pool.add(ret);
Expr2::LetValue {
def_id,
body_id,
body_var: var_store.fresh(),
}
}
Def::Function(function_def) => {
let def_id = pool.add(function_def);
let body_id = pool.add(ret);
Expr2::LetFunction {
def_id,
body_id,
body_var: var_store.fresh(),
}
}
},
Declaration::DeclareRec(defs) => {
let mut function_defs = vec![];
for def in defs {
match def {
Def::AnnotationOnly { .. } => todo!(),
Def::Function(function_def) => function_defs.push(function_def),
Def::Value(_) => unreachable!(),
}
}
let body_id = pool.add(ret);
Expr2::LetRec {
defs: PoolVec::new(function_defs.into_iter(), pool),
body_var: var_store.fresh(),
body_id,
}
}
Declaration::InvalidCycle(_entries, _) => {
// TODO: replace with something from Expr2
// Expr::RuntimeError(RuntimeError::CircularDef(entries))
todo!()
}
Declaration::Builtin(_) => {
// Builtins should only be added to top-level decls, not to let-exprs!
unreachable!()
}
}
}

View file

@ -2,7 +2,7 @@
#![allow(dead_code)] #![allow(dead_code)]
#![allow(unused_imports)] #![allow(unused_imports)]
#![allow(unused_variables)] #![allow(unused_variables)]
use crate::lang::ast::{FunctionDef, ValueDef}; use crate::lang::ast::{Expr2, FunctionDef, ValueDef};
use crate::lang::def::{canonicalize_defs, sort_can_defs, Declaration, Def}; use crate::lang::def::{canonicalize_defs, sort_can_defs, Declaration, Def};
use crate::lang::expr::Env; use crate::lang::expr::Env;
use crate::lang::expr::Output; use crate::lang::expr::Output;
@ -249,10 +249,11 @@ pub fn canonicalize_module_defs<'a>(
let runtime_error = RuntimeError::ExposedButNotDefined(symbol); let runtime_error = RuntimeError::ExposedButNotDefined(symbol);
let value_def = { let value_def = {
let pattern = env.pool.add(Pattern2::Identifier(symbol)); let pattern_id = env.pool.add(Pattern2::Identifier(symbol));
ValueDef { let expr_id = env.pool.add(Expr2::RuntimeError());
pattern, ValueDef::NoAnnotation {
expr_type: None, pattern_id,
expr_id,
expr_var: env.var_store.fresh(), expr_var: env.var_store.fresh(),
} }
}; };

View file

@ -352,11 +352,11 @@ impl<'a, T: 'a + Sized> PoolVec<T> {
let index = first_node_id.index as isize; let index = first_node_id.index as isize;
let mut next_node_ptr = unsafe { pool.nodes.offset(index) } as *mut T; let mut next_node_ptr = unsafe { pool.nodes.offset(index) } as *mut T;
for node in nodes { for (indx_inc, node) in nodes.enumerate() {
unsafe { unsafe {
*next_node_ptr = node; *next_node_ptr = node;
next_node_ptr = next_node_ptr.offset(1); next_node_ptr = pool.nodes.offset(index + (indx_inc as isize) + 1) as *mut T;
} }
} }
@ -603,6 +603,17 @@ impl<T> Iterator for PoolVecIterNodeIds<T> {
} }
} }
#[test]
fn pool_vec_iter_test() {
let expected_vec: Vec<usize> = vec![2, 4, 8, 16];
let mut test_pool = Pool::with_capacity(1024);
let pool_vec = PoolVec::new(expected_vec.clone().into_iter(), &mut test_pool);
let current_vec: Vec<usize> = pool_vec.iter(&test_pool).copied().collect();
assert_eq!(current_vec, expected_vec);
}
/// Clones the outer node, but does not clone any nodeids /// Clones the outer node, but does not clone any nodeids
pub trait ShallowClone { pub trait ShallowClone {
fn shallow_clone(&self) -> Self; fn shallow_clone(&self) -> Self;

View file

@ -117,7 +117,7 @@ fn infer_eq(actual: &str, expected_str: &str) {
let content = subs.get(var).content; let content = subs.get(var).content;
let interns = Interns { let interns = Interns {
module_ids, module_ids: env.module_ids.clone(),
all_ident_ids: dep_idents, all_ident_ids: dep_idents,
}; };
@ -300,3 +300,31 @@ fn constrain_when() {
"[ Blue, Purple ]*", "[ Blue, Purple ]*",
) )
} }
#[test]
fn constrain_let_value() {
infer_eq(
indoc!(
r#"
person = { name: "roc" }
person
"#
),
"{ name : Str }",
)
}
#[test]
fn constrain_update() {
infer_eq(
indoc!(
r#"
person = { name: "roc" }
{ person & name: "bird" }
"#
),
"{ name : Str }",
)
}