Merge remote-tracking branch 'origin/main' into builtin-str2

This commit is contained in:
Richard Feldman 2025-10-24 11:57:12 -04:00
commit 4462c6e16d
No known key found for this signature in database
75 changed files with 1960 additions and 1014 deletions

View file

@ -631,24 +631,24 @@ fn discoverBuiltinRocFiles(b: *std.Build) ![]const []const u8 {
var builtin_roc_dir = try std.fs.openDirAbsolute(builtin_roc_path, .{ .iterate = true });
defer builtin_roc_dir.close();
var roc_files = std.array_list.Managed([]const u8).init(b.allocator);
errdefer roc_files.deinit();
var roc_files = std.ArrayList([]const u8).empty;
errdefer roc_files.deinit(b.allocator);
var iter = builtin_roc_dir.iterate();
while (try iter.next()) |entry| {
if (entry.kind == .file and std.mem.endsWith(u8, entry.name, ".roc")) {
const full_path = b.fmt("src/build/roc/{s}", .{entry.name});
try roc_files.append(full_path);
try roc_files.append(b.allocator, full_path);
}
}
return roc_files.toOwnedSlice();
return roc_files.toOwnedSlice(b.allocator);
}
fn generateCompiledBuiltinsSource(b: *std.Build, roc_files: []const []const u8) ![]const u8 {
var builtins_source = std.array_list.Managed(u8).init(b.allocator);
errdefer builtins_source.deinit();
const writer = builtins_source.writer();
var builtins_source = std.ArrayList(u8).empty;
errdefer builtins_source.deinit(b.allocator);
const writer = builtins_source.writer(b.allocator);
for (roc_files) |roc_path| {
const roc_basename = std.fs.path.basename(roc_path);
@ -671,7 +671,7 @@ fn generateCompiledBuiltinsSource(b: *std.Build, roc_files: []const []const u8)
// Also embed builtin_indices.bin
try writer.writeAll("pub const builtin_indices_bin = @embedFile(\"builtin_indices.bin\");\n");
return builtins_source.toOwnedSlice();
return builtins_source.toOwnedSlice(b.allocator);
}
fn add_fuzz_target(
@ -770,7 +770,7 @@ fn addMainExe(
.root_source_file = b.path("test/str/platform/host.zig"),
.target = target,
.optimize = optimize,
.strip = true,
.strip = optimize != .Debug,
.pic = true, // Enable Position Independent Code for PIE compatibility
}),
});
@ -794,7 +794,7 @@ fn addMainExe(
.root_source_file = b.path("test/int/platform/host.zig"),
.target = target,
.optimize = optimize,
.strip = true,
.strip = optimize != .Debug,
.pic = true, // Enable Position Independent Code for PIE compatibility
}),
});
@ -827,7 +827,7 @@ fn addMainExe(
.root_source_file = b.path("test/int/platform/host.zig"),
.target = cross_resolved_target,
.optimize = optimize,
.strip = true,
.strip = optimize != .Debug,
.pic = true,
}),
.linkage = .static,
@ -872,7 +872,7 @@ fn addMainExe(
.root_source_file = b.path("src/interpreter_shim/main.zig"),
.target = target,
.optimize = optimize,
.strip = true,
.strip = optimize != .Debug,
.pic = true, // Enable Position Independent Code for PIE compatibility
}),
.linkage = .static,
@ -953,13 +953,13 @@ const ParsedBuildArgs = struct {
};
fn appendFilter(
list: *std.array_list.Managed([]const u8),
list: *std.ArrayList([]const u8),
b: *std.Build,
value: []const u8,
) void {
const trimmed = std.mem.trim(u8, value, " \t\n\r");
if (trimmed.len == 0) return;
list.append(b.dupe(trimmed)) catch @panic("OOM while parsing --test-filter value");
list.append(b.allocator, b.dupe(trimmed)) catch @panic("OOM while parsing --test-filter value");
}
fn parseBuildArgs(b: *std.Build) ParsedBuildArgs {
@ -968,8 +968,8 @@ fn parseBuildArgs(b: *std.Build) ParsedBuildArgs {
.test_filters = &.{},
};
var run_args_list = std.array_list.Managed([]const u8).init(b.allocator);
var filter_list = std.array_list.Managed([]const u8).init(b.allocator);
var run_args_list = std.ArrayList([]const u8).empty;
var filter_list = std.ArrayList([]const u8).empty;
var i: usize = 0;
while (i < raw_args.len) {
@ -994,12 +994,12 @@ fn parseBuildArgs(b: *std.Build) ParsedBuildArgs {
continue;
}
run_args_list.append(arg) catch @panic("OOM while recording build arguments");
run_args_list.append(b.allocator, arg) catch @panic("OOM while recording build arguments");
i += 1;
}
const run_args = run_args_list.toOwnedSlice() catch @panic("OOM while finalizing build arguments");
const test_filters = filter_list.toOwnedSlice() catch @panic("OOM while finalizing test filters");
const run_args = run_args_list.toOwnedSlice(b.allocator) catch @panic("OOM while finalizing build arguments");
const test_filters = filter_list.toOwnedSlice(b.allocator) catch @panic("OOM while finalizing test filters");
return .{ .run_args = run_args, .test_filters = test_filters };
}
@ -1423,10 +1423,10 @@ fn getCompilerVersion(b: *std.Build, optimize: OptimizeMode) []const u8 {
fn generateGlibcStub(b: *std.Build, target: ResolvedTarget, target_name: []const u8) ?*Step.UpdateSourceFiles {
// Generate assembly stub with comprehensive symbols using the new build module
var assembly_buf = std.array_list.Managed(u8).init(b.allocator);
defer assembly_buf.deinit();
var assembly_buf = std.ArrayList(u8).empty;
defer assembly_buf.deinit(b.allocator);
const writer = assembly_buf.writer();
const writer = assembly_buf.writer(b.allocator);
const target_arch = target.result.cpu.arch;
const target_abi = target.result.abi;

View file

@ -602,13 +602,13 @@ test "Ident.Store comprehensive CompactWriter roundtrip" {
.{ .text = "hello", .expected_idx = 1 }, // duplicate, should reuse
};
var indices = std.array_list.Managed(Ident.Idx).init(gpa);
defer indices.deinit();
var indices = std.ArrayList(Ident.Idx).empty;
defer indices.deinit(gpa);
for (test_idents) |test_ident| {
const ident = Ident.for_text(test_ident.text);
const idx = try original.insert(gpa, ident);
try indices.append(idx);
try indices.append(gpa, idx);
// Verify the index matches expectation
try std.testing.expectEqual(test_ident.expected_idx, idx.idx);
}

View file

@ -24,10 +24,20 @@ pub fn Scratch(comptime T: type) type {
}
/// Returns the start position for a new Span of indexes in scratch
pub fn top(self: *Self) u32 {
pub fn top(self: *const Self) u32 {
return @as(u32, @intCast(self.items.items.len));
}
/// Check if a value is in the array
pub fn contains(self: *const Self, val: T) bool {
for (self.items.items) |item| {
if (item == val) {
return true;
}
}
return false;
}
/// Places a new index of type `T` in the scratch
pub fn append(self: *Self, idx: T) std.mem.Allocator.Error!void {
try self.items.append(idx);
@ -48,6 +58,25 @@ pub fn Scratch(comptime T: type) type {
return self.items.items[@intCast(start)..];
}
/// Creates slice from the provided start index
pub fn sliceFromSpan(self: *Self, span: DataSpan) []T {
const start: usize = @intCast(span.start);
const end: usize = @intCast(span.start + span.len);
std.debug.assert(start <= end);
std.debug.assert(end <= self.items.items.len);
return self.items.items[start..end];
}
/// Creates span from the provided start index to the end of the list
pub fn spanFrom(self: *Self, start: u32) DataSpan {
return DataSpan{
.start = start,
.len = @as(u32, @intCast(self.items.items.len)) - start,
};
}
/// Creates a new span starting at start. Moves the items from scratch
/// to extra_data as appropriate.
pub fn spanFromStart(self: *Self, start: u32, data: *std.array_list.Managed(u32)) std.mem.Allocator.Error!DataSpan {

View file

@ -305,12 +305,12 @@ test "SmallStringInterner basic CompactWriter roundtrip" {
"duplicate", // Should reuse the same index
};
var indices = std.array_list.Managed(SmallStringInterner.Idx).init(gpa);
defer indices.deinit();
var indices = std.ArrayList(SmallStringInterner.Idx).empty;
defer indices.deinit(gpa);
for (test_strings) |str| {
const idx = try original.insert(gpa, str);
try indices.append(idx);
try indices.append(gpa, idx);
}
// Verify duplicate detection worked
@ -507,12 +507,12 @@ test "SmallStringInterner edge cases CompactWriter roundtrip" {
" start_with_space",
};
var indices = std.array_list.Managed(SmallStringInterner.Idx).init(gpa);
defer indices.deinit();
var indices = std.ArrayList(SmallStringInterner.Idx).empty;
defer indices.deinit(gpa);
for (edge_cases) |str| {
const idx = try original.insert(gpa, str);
try indices.append(idx);
try indices.append(gpa, idx);
}
// Create a temp file

View file

@ -257,12 +257,12 @@ test "Store comprehensive CompactWriter roundtrip" {
"very long string " ** 50, // long string
};
var indices = std.array_list.Managed(Idx).init(gpa);
defer indices.deinit();
var indices = std.ArrayList(Idx).empty;
defer indices.deinit(gpa);
for (test_strings) |str| {
const idx = try original.insert(gpa, str);
try indices.append(idx);
try indices.append(gpa, idx);
}
// Create a temp file

View file

@ -336,12 +336,12 @@ fn compileModule(
// 6. Type check
// Build the list of other modules for type checking
var imported_envs = std.array_list.Managed(*const ModuleEnv).init(gpa);
defer imported_envs.deinit();
var imported_envs = std.ArrayList(*const ModuleEnv).empty;
defer imported_envs.deinit(gpa);
// Add dependencies
for (deps) |dep| {
try imported_envs.append(dep.env);
try imported_envs.append(gpa, dep.env);
}
var checker = try Check.init(

View file

@ -41,16 +41,16 @@ fn extendWithAggregatorFilters(
const extras = aggregatorFilters(module_type);
if (extras.len == 0) return base;
var list = std.array_list.Managed([]const u8).init(b.allocator);
list.ensureTotalCapacity(base.len + extras.len) catch @panic("OOM while extending module test filters");
list.appendSlice(base) catch @panic("OOM while extending module test filters");
var list = std.ArrayList([]const u8).empty;
list.ensureTotalCapacity(b.allocator, base.len + extras.len) catch @panic("OOM while extending module test filters");
list.appendSlice(b.allocator, base) catch @panic("OOM while extending module test filters");
for (extras) |extra| {
if (filtersContain(base, extra)) continue;
list.append(b.dupe(extra)) catch @panic("OOM while extending module test filters");
list.append(b.allocator, b.dupe(extra)) catch @panic("OOM while extending module test filters");
}
return list.toOwnedSlice() catch @panic("OOM while finalizing module test filters");
return list.toOwnedSlice(b.allocator) catch @panic("OOM while finalizing module test filters");
}
/// Represents a test module with its compilation and execution steps.

View file

@ -1067,10 +1067,10 @@ test "double roundtrip bundle -> unbundle -> bundle -> unbundle" {
var first_bundle_writer: std.Io.Writer.Allocating = .init(allocator);
defer first_bundle_writer.deinit();
var paths1 = std.array_list.Managed([]const u8).init(allocator);
defer paths1.deinit();
var paths1 = std.ArrayList([]const u8).empty;
defer paths1.deinit(allocator);
for (test_files) |test_file| {
try paths1.append(test_file.path);
try paths1.append(allocator, test_file.path);
}
var iter1 = FilePathIterator{ .paths = paths1.items };
@ -1106,10 +1106,10 @@ test "double roundtrip bundle -> unbundle -> bundle -> unbundle" {
var second_bundle_writer: std.Io.Writer.Allocating = .init(allocator);
defer second_bundle_writer.deinit();
var paths2 = std.array_list.Managed([]const u8).init(allocator);
defer paths2.deinit();
var paths2 = std.ArrayList([]const u8).empty;
defer paths2.deinit(allocator);
for (test_files) |test_file| {
try paths2.append(test_file.path);
try paths2.append(allocator, test_file.path);
}
var iter2 = FilePathIterator{ .paths = paths2.items };
@ -1216,13 +1216,13 @@ test "CLI unbundle with no args defaults to all .tar.zst files" {
var cwd = try tmp_dir.openDir(".", .{ .iterate = true });
defer cwd.close();
var found_archives = std.array_list.Managed([]const u8).init(allocator);
defer found_archives.deinit();
var found_archives = std.ArrayList([]const u8).empty;
defer found_archives.deinit(allocator);
var iter = cwd.iterate();
while (try iter.next()) |entry| {
if (entry.kind == .file and std.mem.endsWith(u8, entry.name, ".tar.zst")) {
try found_archives.append(entry.name);
try found_archives.append(allocator, entry.name);
}
}

View file

@ -218,14 +218,14 @@ test "different compression levels" {
);
defer reader.deinit();
var decompressed = std.array_list.Managed(u8).init(allocator);
defer decompressed.deinit();
var decompressed = std.ArrayList(u8).empty;
defer decompressed.deinit(allocator);
var buffer: [1024]u8 = undefined;
while (true) {
const n = try reader.read(&buffer);
if (n == 0) break;
try decompressed.appendSlice(buffer[0..n]);
try decompressed.appendSlice(allocator, buffer[0..n]);
}
try std.testing.expectEqualStrings(test_data, decompressed.items);

View file

@ -24,39 +24,6 @@ const DataSpan = base.DataSpan;
const ModuleEnv = @import("ModuleEnv.zig");
const Node = @import("Node.zig");
/// Both the canonicalized expression and any free variables
///
/// We keep track of the free variables as we go so we can union these
/// in our Lambda's in a single forward pass during canonicalization.
pub const CanonicalizedExpr = struct {
idx: Expr.Idx,
free_vars: ?[]Pattern.Idx,
pub fn get_idx(self: @This()) Expr.Idx {
return self.idx;
}
pub fn maybe_expr_get_idx(self: ?@This()) ?Expr.Idx {
if (self != null) {
return self.?.idx;
} else {
return null;
}
}
};
const TypeVarProblemKind = enum {
unused_type_var,
type_var_marked_unused,
type_var_ending_in_underscore,
};
const TypeVarProblem = struct {
ident: Ident.Idx,
problem: TypeVarProblemKind,
ast_anno: AST.TypeAnno.Idx,
};
/// Information about an auto-imported module type
pub const AutoImportedType = struct {
env: *const ModuleEnv,
@ -64,7 +31,7 @@ pub const AutoImportedType = struct {
env: *ModuleEnv,
parse_ir: *AST,
scopes: std.ArrayListUnmanaged(Scope) = .{},
scopes: std.ArrayList(Scope) = .{},
/// Special scope for rigid type variables in annotations
type_vars_scope: base.Scratch(TypeVarScope),
/// Special scope for tracking exposed items from module header
@ -103,6 +70,8 @@ scratch_seen_record_fields: base.Scratch(SeenRecordField),
scratch_tags: base.Scratch(types.Tag),
/// Scratch free variables
scratch_free_vars: base.Scratch(Pattern.Idx),
/// Scratch free variables
scratch_captures: base.Scratch(Pattern.Idx),
const Ident = base.Ident;
const Region = base.Region;
@ -137,6 +106,39 @@ const RecordField = CIR.RecordField;
/// Struct to track fields that have been seen before during canonicalization
const SeenRecordField = struct { ident: base.Ident.Idx, region: base.Region };
/// Both the canonicalized expression and any free variables
///
/// We keep track of the free variables as we go so we can union these
/// in our Lambda's in a single forward pass during canonicalization.
pub const CanonicalizedExpr = struct {
idx: Expr.Idx,
free_vars: ?DataSpan, // This is a span into scratch_free_vars
pub fn get_idx(self: @This()) Expr.Idx {
return self.idx;
}
pub fn maybe_expr_get_idx(self: ?@This()) ?Expr.Idx {
if (self != null) {
return self.?.idx;
} else {
return null;
}
}
};
const TypeVarProblemKind = enum {
unused_type_var,
type_var_marked_unused,
type_var_ending_in_underscore,
};
const TypeVarProblem = struct {
ident: Ident.Idx,
problem: TypeVarProblemKind,
ast_anno: AST.TypeAnno.Idx,
};
/// Deinitialize canonicalizer resources
pub fn deinit(
self: *Self,
@ -169,6 +171,7 @@ pub fn deinit(
self.import_indices.deinit(gpa);
self.scratch_tags.deinit();
self.scratch_free_vars.deinit();
self.scratch_captures.deinit();
}
/// Options for initializing the canonicalizer.
@ -201,6 +204,7 @@ pub fn init(
.scratch_tags = try base.Scratch(types.Tag).init(gpa),
.unqualified_nominal_tags = std.StringHashMapUnmanaged(Statement.Idx){},
.scratch_free_vars = try base.Scratch(Pattern.Idx).init(gpa),
.scratch_captures = try base.Scratch(Pattern.Idx).init(gpa),
};
// Top-level scope is not a function boundary
@ -1520,7 +1524,7 @@ fn bringImportIntoScope(
// const res = self.env.imports.getOrInsert(gpa, import_name, shorthand);
// if (res.was_present) {
// _ = self.env.problems.append(gpa, Problem.Canonicalize.make(.{ .DuplicateImport = .{
// _ = self.env.problems.append(Problem.Canonicalize.make(.{ .DuplicateImport = .{
// .duplicate_import_region = region,
// } }));
// }
@ -1947,19 +1951,7 @@ fn canonicalizeDeclWithAnnotation(
const trace = tracy.trace(@src());
defer trace.end();
const pattern_region = self.parse_ir.tokenizedRegionToRegion(self.parse_ir.store.getPattern(decl.pattern).to_tokenized_region());
const pattern_idx = blk: {
if (try self.canonicalizePattern(decl.pattern)) |idx| {
break :blk idx;
} else {
const malformed_idx = try self.env.pushMalformed(Pattern.Idx, Diagnostic{ .pattern_not_canonicalized = .{
.region = pattern_region,
} });
break :blk malformed_idx;
}
};
const pattern_idx = try self.canonicalizePatternOrMalformed(decl.pattern);
const can_expr = try self.canonicalizeExprOrMalformed(decl.body);
// Create the def entry and set def type variable to a flex var
@ -2066,8 +2058,8 @@ fn canonicalizeStringLike(
.span = can_str_span,
} }, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
}
fn canonicalizeSingleQuote(
@ -2218,8 +2210,8 @@ pub fn canonicalizeExpr(
},
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
},
.ident => |e| {
const region = self.parse_ir.tokenizedRegionToRegion(e.region);
@ -2250,8 +2242,7 @@ pub fn canonicalizeExpr(
const free_vars_start = self.scratch_free_vars.top();
try self.scratch_free_vars.append(found_pattern_idx);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = DataSpan.init(free_vars_start, 1) };
},
.not_found => {
// Not a local qualified identifier, try module-qualified lookup
@ -2387,8 +2378,8 @@ pub fn canonicalizeExpr(
const free_vars_start = self.scratch_free_vars.top();
try self.scratch_free_vars.append(found_pattern_idx);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
},
.not_found => {
// Check if this identifier is an exposed item from an import
@ -2781,8 +2772,8 @@ pub fn canonicalizeExpr(
.e_list = .{ .elems = elems_span },
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
},
.tag => |e| {
const region = self.parse_ir.tokenizedRegionToRegion(e.region);
@ -2853,8 +2844,8 @@ pub fn canonicalizeExpr(
},
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
}
},
.record => |e| {
@ -2946,8 +2937,8 @@ pub fn canonicalizeExpr(
},
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
},
.lambda => |e| {
const region = self.parse_ir.tokenizedRegionToRegion(e.region);
@ -2960,7 +2951,7 @@ pub fn canonicalizeExpr(
try self.scopeEnter(self.env.gpa, true); // true = is_function_boundary
defer self.scopeExit(self.env.gpa) catch {};
// args
// Canonicalize the lambda args
const args_start = self.env.store.scratch.?.patterns.top();
for (self.parse_ir.store.patternSlice(e.args)) |arg_pattern_idx| {
if (try self.canonicalizePattern(arg_pattern_idx)) |pattern_idx| {
@ -2976,61 +2967,67 @@ pub fn canonicalizeExpr(
}
const args_span = try self.env.store.patternSpanFrom(args_start);
// body (this will detect and record captures)
const body_free_vars_start = self.scratch_free_vars.top();
const can_body = try self.canonicalizeExpr(e.body) orelse {
self.scratch_free_vars.clearFrom(body_free_vars_start);
const ast_body = self.parse_ir.store.getExpr(e.body);
const body_region = self.parse_ir.tokenizedRegionToRegion(ast_body.to_tokenized_region());
const malformed_idx = try self.env.pushMalformed(Expr.Idx, Diagnostic{
.lambda_body_not_canonicalized = .{ .region = body_region },
});
return CanonicalizedExpr{ .idx = malformed_idx, .free_vars = null };
};
// Define the set of captures
const captures_top = self.scratch_captures.top();
defer self.scratch_captures.clearFrom(captures_top);
// Determine captures: free variables in body minus variables bound by args
var bound_vars = std.AutoHashMapUnmanaged(Pattern.Idx, void){};
defer bound_vars.deinit(self.env.gpa);
for (self.env.store.slicePatterns(args_span)) |arg_pat_idx| {
try self.collectBoundVars(arg_pat_idx, &bound_vars);
}
// Canonicalize the lambda body
const body_idx = blk: {
const body_free_vars_start = self.scratch_free_vars.top();
defer self.scratch_free_vars.clearFrom(body_free_vars_start);
var captures_set = std.AutoHashMapUnmanaged(Pattern.Idx, void){};
defer captures_set.deinit(self.env.gpa);
const can_body = try self.canonicalizeExpr(e.body) orelse {
const ast_body = self.parse_ir.store.getExpr(e.body);
const body_region = self.parse_ir.tokenizedRegionToRegion(ast_body.to_tokenized_region());
const malformed_idx = try self.env.pushMalformed(Expr.Idx, Diagnostic{
.lambda_body_not_canonicalized = .{ .region = body_region },
});
return CanonicalizedExpr{ .idx = malformed_idx, .free_vars = null };
};
const body_free_vars_slice = can_body.free_vars orelse &.{};
for (body_free_vars_slice) |fv| {
if (!bound_vars.contains(fv)) {
try captures_set.put(self.env.gpa, fv, {});
// Determine captures: free variables in body minus variables bound by args
var bound_vars = std.AutoHashMapUnmanaged(Pattern.Idx, void){};
defer bound_vars.deinit(self.env.gpa);
for (self.env.store.slicePatterns(args_span)) |arg_pat_idx| {
try self.collectBoundVars(arg_pat_idx, &bound_vars);
}
}
// Now that we have the captures, we can clear the free variables from the body
// from the scratch buffer.
self.scratch_free_vars.clearFrom(body_free_vars_start);
const body_free_vars_slice = self.scratch_free_vars.sliceFromSpan(can_body.free_vars orelse DataSpan.empty());
for (body_free_vars_slice) |fv| {
if (!self.scratch_captures.contains(fv) and !bound_vars.contains(fv)) {
try self.scratch_captures.append(fv);
}
}
break :blk can_body.idx;
};
// Create the pure lambda expression first
const lambda_expr = Expr{
.e_lambda = .{
.args = args_span,
.body = can_body.idx,
.body = body_idx,
},
};
const lambda_idx = try self.env.addExpr(lambda_expr, region);
// Get a slice of the captured vars in the body
const captures_slice = self.scratch_captures.sliceFromStart(captures_top);
// If there are no captures, this is a pure lambda.
// Otherwise, it's a closure.
if (captures_set.count() == 0) {
// A pure lambda has no free variables.
// A pure lambda has no free variables.
if (captures_slice.len == 0) {
return CanonicalizedExpr{ .idx = lambda_idx, .free_vars = null };
}
// Otherwise, it's a closure.
// Copy the captures into the store
const capture_info: Expr.Capture.Span = blk: {
const scratch_start = self.env.store.scratch.?.captures.top();
var cap_it = captures_set.iterator();
while (cap_it.next()) |entry| {
const pattern_idx = entry.key_ptr.*;
for (captures_slice) |pattern_idx| {
const pattern = self.env.store.getPattern(pattern_idx);
const name = switch (pattern) {
.assign => |a| a.ident,
@ -3055,19 +3052,17 @@ pub fn canonicalizeExpr(
.captures = capture_info,
},
};
// The type of the closure is the same as the type of the pure lambda
const expr_idx = try self.env.addExpr(closure_expr, region);
// The free variables of the lambda are its captures.
// I need to add them to the global list and return a span.
// Copy the contiguous list to the backing array
const lambda_free_vars_start = self.scratch_free_vars.top();
var cap_it = captures_set.iterator();
while (cap_it.next()) |entry| {
try self.scratch_free_vars.append(entry.key_ptr.*);
for (captures_slice) |pattern_idx| {
try self.scratch_free_vars.append(pattern_idx);
}
const free_vars_slice = self.scratch_free_vars.slice(lambda_free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(lambda_free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
},
.record_updater => |_| {
const feature = try self.env.insertString("canonicalize record_updater expression");
@ -3141,8 +3136,8 @@ pub fn canonicalizeExpr(
.e_binop = Expr.Binop.init(op, can_lhs.idx, can_rhs.idx),
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
},
.suffix_single_question => |_| {
const feature = try self.env.insertString("canonicalize suffix_single_question expression");
@ -3262,8 +3257,8 @@ pub fn canonicalizeExpr(
},
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
},
.match => |m| {
const region = self.parse_ir.tokenizedRegionToRegion(m.region);
@ -3384,8 +3379,8 @@ pub fn canonicalizeExpr(
};
const expr_idx = try self.env.addExpr(CIR.Expr{ .e_match = match_expr }, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null };
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_span.len > 0) free_vars_span else null };
},
.dbg => |d| {
// Debug expression - canonicalize the inner expression
@ -3513,10 +3508,10 @@ fn canonicalizeTagExpr(self: *Self, e: AST.TagExpr, mb_args: ?AST.Expr.Span, reg
},
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{
.idx = expr_idx,
.free_vars = if (free_vars_slice.len > 0) free_vars_slice else null,
.free_vars = free_vars_span,
};
},
.s_alias_decl => {
@ -3563,10 +3558,10 @@ fn canonicalizeTagExpr(self: *Self, e: AST.TagExpr, mb_args: ?AST.Expr.Span, reg
},
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{
.idx = expr_idx,
.free_vars = if (free_vars_slice.len > 0) free_vars_slice else null,
.free_vars = if (free_vars_span.len > 0) free_vars_span else null,
};
}
}
@ -3635,10 +3630,10 @@ fn canonicalizeTagExpr(self: *Self, e: AST.TagExpr, mb_args: ?AST.Expr.Span, reg
},
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{
.idx = expr_idx,
.free_vars = if (free_vars_slice.len > 0) free_vars_slice else null,
.free_vars = free_vars_span,
};
}
@ -3703,10 +3698,10 @@ fn canonicalizeTagExpr(self: *Self, e: AST.TagExpr, mb_args: ?AST.Expr.Span, reg
},
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{
.idx = expr_idx,
.free_vars = if (free_vars_slice.len > 0) free_vars_slice else null,
.free_vars = free_vars_span,
};
},
.s_alias_decl => {
@ -3806,10 +3801,10 @@ fn canonicalizeTagExpr(self: *Self, e: AST.TagExpr, mb_args: ?AST.Expr.Span, reg
},
}, region);
const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top());
const free_vars_span = self.scratch_free_vars.spanFrom(free_vars_start);
return CanonicalizedExpr{
.idx = expr_idx,
.free_vars = if (free_vars_slice.len > 0) free_vars_slice else null,
.free_vars = if (free_vars_span.len > 0) free_vars_span else null,
};
}
}
@ -3895,6 +3890,21 @@ fn extractMultilineStringSegments(self: *Self, parts: []const AST.Expr.Idx) std.
return try self.env.store.exprSpanFrom(start);
}
fn canonicalizePatternOrMalformed(
self: *Self,
ast_pattern_idx: AST.Pattern.Idx,
) std.mem.Allocator.Error!Pattern.Idx {
if (try self.canonicalizePattern(ast_pattern_idx)) |idx| {
return idx;
} else {
const pattern_region = self.parse_ir.tokenizedRegionToRegion(self.parse_ir.store.getPattern(ast_pattern_idx).to_tokenized_region());
const malformed_idx = try self.env.pushMalformed(Pattern.Idx, Diagnostic{ .pattern_not_canonicalized = .{
.region = pattern_region,
} });
return malformed_idx;
}
}
fn canonicalizePattern(
self: *Self,
ast_pattern_idx: AST.Pattern.Idx,
@ -6009,8 +6019,8 @@ fn canonicalizeBlock(self: *Self, e: AST.Block) std.mem.Allocator.Error!Canonica
var bound_vars = std.AutoHashMapUnmanaged(Pattern.Idx, void){};
defer bound_vars.deinit(self.env.gpa);
var captures = std.AutoHashMapUnmanaged(Pattern.Idx, void){};
defer captures.deinit(self.env.gpa);
const captures_top = self.scratch_captures.top();
defer self.scratch_captures.clearFrom(captures_top);
// Canonicalize all statements in the block
const ast_stmt_idxs = self.parse_ir.store.statementSlice(e.statements);
@ -6105,11 +6115,10 @@ fn canonicalizeBlock(self: *Self, e: AST.Block) std.mem.Allocator.Error!Canonica
}
// Collect free vars from the statement into the block's scratch space
if (canonicailzed_stmt.free_vars) |fvs| {
for (fvs) |fv| {
if (!bound_vars.contains(fv)) {
try captures.put(self.env.gpa, fv, {});
}
const stmt_free_vars_slice = self.scratch_free_vars.sliceFromSpan(canonicailzed_stmt.free_vars orelse DataSpan.empty());
for (stmt_free_vars_slice) |fv| {
if (!self.scratch_captures.contains(fv) and !bound_vars.contains(fv)) {
try self.scratch_captures.append(fv);
}
}
}
@ -6136,21 +6145,22 @@ fn canonicalizeBlock(self: *Self, e: AST.Block) std.mem.Allocator.Error!Canonica
};
// Add free vars from the final expression to the block's scratch space
if (final_expr.free_vars) |fvs| {
for (fvs) |fv| {
if (!bound_vars.contains(fv)) {
try captures.put(self.env.gpa, fv, {});
}
const final_expr_free_vars_slice = self.scratch_free_vars.sliceFromSpan(final_expr.free_vars orelse DataSpan.empty());
for (final_expr_free_vars_slice) |fv| {
if (!self.scratch_captures.contains(fv) and !bound_vars.contains(fv)) {
try self.scratch_captures.append(fv);
}
}
// Get a slice of the captured vars in the block
const captures_slice = self.scratch_captures.sliceFromStart(captures_top);
// Add the actual free variables (captures) to the parent's scratch space
const captures_start = self.scratch_free_vars.top();
var cap_it = captures.iterator();
while (cap_it.next()) |entry| {
try self.scratch_free_vars.append(entry.key_ptr.*);
const block_captures_start = self.scratch_free_vars.top();
for (captures_slice) |ptrn_idx| {
try self.scratch_free_vars.append(ptrn_idx);
}
const captures_slice = self.scratch_free_vars.slice(captures_start, self.scratch_free_vars.top());
const block_free_vars = self.scratch_free_vars.spanFrom(block_captures_start);
// Create statement span
const stmt_span = try self.env.store.statementSpanFrom(stmt_start);
@ -6164,7 +6174,7 @@ fn canonicalizeBlock(self: *Self, e: AST.Block) std.mem.Allocator.Error!Canonica
};
const block_idx = try self.env.addExpr(block_expr, block_region);
return CanonicalizedExpr{ .idx = block_idx, .free_vars = if (captures_slice.len > 0) captures_slice else null };
return CanonicalizedExpr{ .idx = block_idx, .free_vars = if (block_free_vars.len > 0) block_free_vars else null };
}
const StatementResult = struct {
@ -6313,6 +6323,13 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt
mb_canonicailzed_stmt = CanonicalizedStatement{ .idx = stmt_idx, .free_vars = expr.free_vars };
},
.@"return" => |r| {
// To implement early returns and make them usable, we need to:
// 1. Update the parse to allow for if statements (as opposed to if expressions)
// 2. Track function scope in czer and capture the function for this return in `s_return`
// 3. When type checking a lambda, capture all early returns
// a. Unify all early returns together
// b. Unify early returns with func return type
const region = self.parse_ir.tokenizedRegionToRegion(r.region);
// Canonicalize the return expression
@ -6433,14 +6450,90 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt
// then in the canonicalize IR
_ = try self.canonicalizeImportStatement(import_stmt);
},
else => {
// Other statement types not yet implemented
const feature = try self.env.insertString("statement type in block");
const malformed_idx = try self.env.pushMalformed(Statement.Idx, Diagnostic{ .not_implemented = .{
.feature = feature,
.region = Region.zero(),
} });
mb_canonicailzed_stmt = CanonicalizedStatement{ .idx = malformed_idx, .free_vars = null };
.@"for" => |for_stmt| {
// Tmp state to capture free vars from both expr & body
//
// This is stored as a map, so we can avoid adding duplicate captures
// if both the expr and the body reference the same var
var captures = std.AutoHashMapUnmanaged(Pattern.Idx, void){};
defer captures.deinit(self.env.gpa);
// Canoncalize the list expr
// for item in [1,2,3] {
// ^^^^^^^
const expr = blk: {
const body_free_vars_start = self.scratch_free_vars.top();
defer self.scratch_free_vars.clearFrom(body_free_vars_start);
const czerd_expr = try self.canonicalizeExprOrMalformed(for_stmt.expr);
// Copy free vars into scratch array
const free_vars_slice = self.scratch_free_vars.sliceFromSpan(czerd_expr.free_vars orelse DataSpan.empty());
for (free_vars_slice) |fv| {
try captures.put(self.env.gpa, fv, {});
}
break :blk czerd_expr;
};
// Canoncalize the pattern
// for item in [1,2,3] {
// ^^^^
const ptrn = try self.canonicalizePatternOrMalformed(for_stmt.patt);
// Collect bound vars from pattern
var for_bound_vars = std.AutoHashMapUnmanaged(Pattern.Idx, void){};
defer for_bound_vars.deinit(self.env.gpa);
try self.collectBoundVars(ptrn, &for_bound_vars);
// Canoncalize the body
// for item in [1,2,3] {
// print!(item.toStr()) <<<<
// }
// Canonicalize body with scoping
const body = blk: {
const body_free_vars_start = self.scratch_free_vars.top();
defer self.scratch_free_vars.clearFrom(body_free_vars_start);
const body_expr = try self.canonicalizeExprOrMalformed(for_stmt.body);
// Copy free vars into scratch array
const body_free_vars_slice = self.scratch_free_vars.sliceFromSpan(body_expr.free_vars orelse DataSpan.empty());
for (body_free_vars_slice) |fv| {
if (!for_bound_vars.contains(fv)) {
try captures.put(self.env.gpa, fv, {});
}
}
break :blk body_expr;
};
// Get captures and copy to free_vars for parent
const free_vars_start = self.scratch_free_vars.top();
var captures_iter = captures.keyIterator();
while (captures_iter.next()) |capture| {
try self.scratch_free_vars.append(capture.*);
}
const free_vars = if (self.scratch_free_vars.top() > free_vars_start)
self.scratch_free_vars.spanFrom(free_vars_start)
else
null;
// Insert into store
const region = self.parse_ir.tokenizedRegionToRegion(for_stmt.region);
const stmt_idx = try self.env.addStatement(Statement{
.s_for = .{
.patt = ptrn,
.expr = expr.idx,
.body = body.idx,
},
}, region);
mb_canonicailzed_stmt = CanonicalizedStatement{ .idx = stmt_idx, .free_vars = free_vars };
},
.malformed => |_| {
// Stmt was malformed, parse reports this error, so do nothing here
mb_canonicailzed_stmt = null;
},
}
@ -6544,7 +6637,7 @@ pub fn canonicalizeBlockDecl(self: *Self, d: AST.Statement.Decl, mb_last_anno: ?
// A canonicalized statement
const CanonicalizedStatement = struct {
idx: Statement.Idx,
free_vars: ?[]Pattern.Idx,
free_vars: ?DataSpan, // This is a span into scratch_free_vars
};
// special type var scope //
@ -6964,8 +7057,8 @@ fn checkScopeForUnusedVariables(self: *Self, scope: *const Scope) std.mem.Alloca
const UnusedVar = struct { ident: base.Ident.Idx, region: Region };
// Collect all unused variables first so we can sort them
var unused_vars = std.array_list.Managed(UnusedVar).init(self.env.gpa);
defer unused_vars.deinit();
var unused_vars = std.ArrayList(UnusedVar).empty;
defer unused_vars.deinit(self.env.gpa);
// Iterate through all identifiers in this scope
var iterator = scope.idents.iterator();
@ -7010,7 +7103,7 @@ fn checkScopeForUnusedVariables(self: *Self, scope: *const Scope) std.mem.Alloca
const region = self.env.store.getPatternRegion(pattern_idx);
// Collect unused variable for sorting
try unused_vars.append(.{
try unused_vars.append(self.env.gpa, .{
.ident = ident_idx,
.region = region,
});

View file

@ -20,7 +20,7 @@ const ModuleEnv = @import("ModuleEnv.zig");
/// Edges point from dependent to dependency (A -> B means A depends on B).
pub const DependencyGraph = struct {
/// Map from def_idx to list of def_idx it depends on
edges: std.AutoHashMapUnmanaged(CIR.Def.Idx, std.ArrayListUnmanaged(CIR.Def.Idx)),
edges: std.AutoHashMapUnmanaged(CIR.Def.Idx, std.ArrayList(CIR.Def.Idx)),
/// All defs in the graph
nodes: []const CIR.Def.Idx,
@ -354,13 +354,13 @@ const TarjanState = struct {
visited: std.AutoHashMapUnmanaged(CIR.Def.Idx, void),
/// Stack for Tarjan's algorithm
stack: std.ArrayListUnmanaged(CIR.Def.Idx),
stack: std.ArrayList(CIR.Def.Idx),
/// Set of nodes currently on stack
on_stack: std.AutoHashMapUnmanaged(CIR.Def.Idx, void),
/// Resulting SCCs (in reverse topological order during construction)
sccs: std.ArrayListUnmanaged(SCC),
sccs: std.ArrayList(SCC),
allocator: std.mem.Allocator,
@ -422,7 +422,7 @@ const TarjanState = struct {
const v_lowlink = self.lowlinks.get(v).?;
const v_index = self.indices.get(v).?;
if (v_lowlink == v_index) {
var scc_defs = std.ArrayListUnmanaged(CIR.Def.Idx){};
var scc_defs = std.ArrayList(CIR.Def.Idx){};
while (true) {
const w = self.stack.pop() orelse unreachable; // Stack should not be empty

File diff suppressed because it is too large Load diff

View file

@ -1589,6 +1589,13 @@ fn generateBuiltinTypeInstance(
}
}
// types //
const Expected = union(enum) {
no_expectation,
expected: struct { var_: Var, from_annotation: bool },
};
// pattern //
/// Check the types for the provided pattern, saving the type in-place
@ -2036,11 +2043,6 @@ fn checkPatternHelp(
// expr //
const Expected = union(enum) {
no_expectation,
expected: struct { var_: Var, from_annotation: bool },
};
fn checkExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: types_mod.Rank, expected: Expected) std.mem.Allocator.Error!bool {
const trace = tracy.trace(@src());
defer trace.end();
@ -2474,110 +2476,7 @@ fn checkExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: types_mod.Rank, expected
// Check all statements in the block
const statements = self.cir.store.sliceStatements(block.stmts);
for (statements) |stmt_idx| {
const stmt = self.cir.store.getStatement(stmt_idx);
switch (stmt) {
.s_decl => |decl_stmt| {
// Check the pattern
try self.checkPattern(decl_stmt.pattern, rank, .no_expectation);
const decl_pattern_var: Var = ModuleEnv.varFrom(decl_stmt.pattern);
// Check the annotation, if it exists
const check_mode = blk: {
if (decl_stmt.anno) |annotation_idx| {
// Generate the annotation type var in-place
try self.generateAnnotationType(annotation_idx);
const annotation_var = ModuleEnv.varFrom(annotation_idx);
// Return the expectation
break :blk Expected{
.expected = .{ .var_ = annotation_var, .from_annotation = true },
};
} else {
break :blk Expected.no_expectation;
}
};
{
// Enter a new rank
try self.var_pool.pushRank();
defer self.var_pool.popRank();
const next_rank = rank.next();
std.debug.assert(next_rank == self.var_pool.current_rank);
does_fx = try self.checkExpr(decl_stmt.expr, next_rank, check_mode) or does_fx;
// Now that we are existing the scope, we must generalize then pop this rank
try self.generalizer.generalize(&self.var_pool, next_rank);
}
// Unify the pattern with the expression
const decl_expr_var: Var = ModuleEnv.varFrom(decl_stmt.expr);
_ = try self.unify(decl_pattern_var, decl_expr_var, rank);
},
.s_reassign => |reassign| {
// Check the pattern
try self.checkPattern(reassign.pattern_idx, rank, .no_expectation);
const reassign_pattern_var: Var = ModuleEnv.varFrom(reassign.pattern_idx);
{
// Enter a new rank
try self.var_pool.pushRank();
defer self.var_pool.popRank();
const next_rank = rank.next();
std.debug.assert(next_rank == self.var_pool.current_rank);
does_fx = try self.checkExpr(reassign.expr, next_rank, .no_expectation) or does_fx;
// Now that we are existing the scope, we must generalize then pop this rank
try self.generalizer.generalize(&self.var_pool, next_rank);
}
// Unify the pattern with the expression
const reassign_expr_var: Var = ModuleEnv.varFrom(reassign.expr);
_ = try self.unify(reassign_pattern_var, reassign_expr_var, rank);
},
.s_expr => |expr_stmt| {
does_fx = try self.checkExpr(expr_stmt.expr, rank, .no_expectation) or does_fx;
},
.s_expect => |expr_stmt| {
does_fx = try self.checkExpr(expr_stmt.body, rank, .no_expectation) or does_fx;
const stmt_expr: Var = ModuleEnv.varFrom(expr_stmt.body);
const bool_var = try self.freshBool(rank, expr_region);
_ = try self.unify(bool_var, stmt_expr, rank);
},
.s_var => |var_stmt| {
// Check the pattern
try self.checkPattern(var_stmt.pattern_idx, rank, .no_expectation);
const var_pattern_var: Var = ModuleEnv.varFrom(var_stmt.pattern_idx);
{
// Enter a new rank
try self.var_pool.pushRank();
defer self.var_pool.popRank();
const next_rank = rank.next();
std.debug.assert(next_rank == self.var_pool.current_rank);
does_fx = try self.checkExpr(var_stmt.expr, next_rank, Expected.no_expectation) or does_fx;
// Now that we are existing the scope, we must generalize then pop this rank
try self.generalizer.generalize(&self.var_pool, next_rank);
}
// Unify the pattern with the expression
const var_expr_var: Var = ModuleEnv.varFrom(var_stmt.expr);
_ = try self.unify(var_pattern_var, var_expr_var, rank);
},
else => {
// TODO
},
}
}
does_fx = try self.checkBlockStatements(statements, rank, expr_region) or does_fx;
// Check the final expression
does_fx = try self.checkExpr(block.final_expr, rank, expected) or does_fx;
@ -3093,6 +2992,199 @@ fn checkExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: types_mod.Rank, expected
return does_fx;
}
// stmts //
/// Given a slice of stmts, type check each one
fn checkBlockStatements(self: *Self, statements: []const CIR.Statement.Idx, rank: types_mod.Rank, _: Region) std.mem.Allocator.Error!bool {
var does_fx = false;
for (statements) |stmt_idx| {
const stmt = self.cir.store.getStatement(stmt_idx);
const stmt_var = ModuleEnv.varFrom(stmt_idx);
const stmt_region = self.cir.store.getNodeRegion(ModuleEnv.nodeIdxFrom(stmt_idx));
switch (stmt) {
.s_decl => |decl_stmt| {
// Check the pattern
try self.checkPattern(decl_stmt.pattern, rank, .no_expectation);
const decl_pattern_var: Var = ModuleEnv.varFrom(decl_stmt.pattern);
// Check the annotation, if it exists
const expectation = blk: {
if (decl_stmt.anno) |annotation_idx| {
// Generate the annotation type var in-place
try self.generateAnnotationType(annotation_idx);
const annotation_var = ModuleEnv.varFrom(annotation_idx);
// Return the expectation
break :blk Expected{
.expected = .{ .var_ = annotation_var, .from_annotation = true },
};
} else {
break :blk Expected.no_expectation;
}
};
// Evaluate the rhs of the expression
{
// Enter a new rank
try self.var_pool.pushRank();
defer self.var_pool.popRank();
const next_rank = rank.next();
std.debug.assert(next_rank == self.var_pool.current_rank);
does_fx = try self.checkExpr(decl_stmt.expr, next_rank, expectation) or does_fx;
// Now that we are existing the scope, we must generalize then pop this rank
try self.generalizer.generalize(&self.var_pool, next_rank);
}
// Unify the pattern with the expression
const decl_expr_var: Var = ModuleEnv.varFrom(decl_stmt.expr);
_ = try self.unify(decl_pattern_var, decl_expr_var, rank);
try self.types.setVarRedirect(stmt_var, decl_expr_var);
},
.s_var => |var_stmt| {
// Check the pattern
try self.checkPattern(var_stmt.pattern_idx, rank, .no_expectation);
const reassign_pattern_var: Var = ModuleEnv.varFrom(var_stmt.pattern_idx);
// Evaluate the rhs of the expression
{
// Enter a new rank
try self.var_pool.pushRank();
defer self.var_pool.popRank();
const next_rank = rank.next();
std.debug.assert(next_rank == self.var_pool.current_rank);
does_fx = try self.checkExpr(var_stmt.expr, next_rank, .no_expectation) or does_fx;
// Now that we are existing the scope, we must generalize then pop this rank
try self.generalizer.generalize(&self.var_pool, next_rank);
}
// Unify the pattern with the expression
const var_expr: Var = ModuleEnv.varFrom(var_stmt.expr);
_ = try self.unify(reassign_pattern_var, var_expr, rank);
try self.types.setVarRedirect(stmt_var, var_expr);
},
.s_reassign => |reassign| {
// Check the pattern
try self.checkPattern(reassign.pattern_idx, rank, .no_expectation);
const reassign_pattern_var: Var = ModuleEnv.varFrom(reassign.pattern_idx);
// Evaluate the rhs of the expression
{
// Enter a new rank
try self.var_pool.pushRank();
defer self.var_pool.popRank();
const next_rank = rank.next();
std.debug.assert(next_rank == self.var_pool.current_rank);
does_fx = try self.checkExpr(reassign.expr, next_rank, .no_expectation) or does_fx;
// Now that we are existing the scope, we must generalize then pop this rank
try self.generalizer.generalize(&self.var_pool, next_rank);
}
// Unify the pattern with the expression
const reassign_expr_var: Var = ModuleEnv.varFrom(reassign.expr);
_ = try self.unify(reassign_pattern_var, reassign_expr_var, rank);
try self.types.setVarRedirect(stmt_var, reassign_expr_var);
},
.s_for => |for_stmt| {
// Check the pattern
// for item in [1,2,3] {
// ^^^^
try self.checkPattern(for_stmt.patt, rank, .no_expectation);
const for_ptrn_var: Var = ModuleEnv.varFrom(for_stmt.patt);
// Check the expr
// for item in [1,2,3] {
// ^^^^^^^
does_fx = try self.checkExpr(for_stmt.expr, rank, .no_expectation) or does_fx;
const for_expr_region = self.cir.store.getNodeRegion(ModuleEnv.nodeIdxFrom(for_stmt.expr));
const for_expr_var: Var = ModuleEnv.varFrom(for_stmt.expr);
// Check that the expr is list of the ptrn
const list_var = try self.freshFromContent(.{ .structure = .{ .list = for_ptrn_var } }, rank, for_expr_region);
_ = try self.unify(list_var, for_expr_var, rank);
// Check the body
// for item in [1,2,3] {
// print!(item.toStr()) <<<<
// }
does_fx = try self.checkExpr(for_stmt.body, rank, .no_expectation) or does_fx;
const for_body_var: Var = ModuleEnv.varFrom(for_stmt.body);
// Check that the for body evaluates to {}
const body_ret = try self.freshFromContent(.{ .structure = .empty_record }, rank, for_expr_region);
_ = try self.unify(body_ret, for_body_var, rank);
try self.types.setVarRedirect(stmt_var, for_body_var);
},
.s_expr => |expr| {
does_fx = try self.checkExpr(expr.expr, rank, .no_expectation) or does_fx;
const expr_var: Var = ModuleEnv.varFrom(expr.expr);
const resolved = self.types.resolveVar(expr_var).desc.content;
if (resolved == .err or (resolved == .structure and resolved.structure == .empty_record)) {
// If this type resolves to an empty record, then we are good!
} else {
const snapshot = try self.snapshots.deepCopyVar(self.types, expr_var);
_ = try self.problems.appendProblem(self.cir.gpa, .{ .unused_value = .{
.var_ = expr_var,
.snapshot = snapshot,
} });
}
try self.types.setVarRedirect(stmt_var, expr_var);
},
.s_dbg => |expr| {
does_fx = try self.checkExpr(expr.expr, rank, .no_expectation) or does_fx;
const expr_var: Var = ModuleEnv.varFrom(expr.expr);
try self.types.setVarRedirect(stmt_var, expr_var);
},
.s_expect => |expr_stmt| {
does_fx = try self.checkExpr(expr_stmt.body, rank, .no_expectation) or does_fx;
const body_var: Var = ModuleEnv.varFrom(expr_stmt.body);
const bool_var = try self.freshBool(rank, stmt_region);
_ = try self.unify(bool_var, body_var, rank);
try self.types.setVarRedirect(stmt_var, body_var);
},
.s_crash => |_| {
try self.updateVar(stmt_var, .{ .flex = Flex.init() }, rank);
},
.s_return => |_| {
// To implement early returns and make them usable, we need to:
// 1. Update the parse to allow for if statements (as opposed to if expressions)
// 2. Track function scope in czer and capture the function for this return in `s_return`
// 3. When type checking a lambda, capture all early returns
// a. Unify all early returns together
// b. Unify early returns with func return type
try self.updateVar(stmt_var, .{ .structure = .empty_record }, rank);
},
.s_import, .s_alias_decl, .s_nominal_decl, .s_type_anno => {
// These are only valid at the top level, czer reports error
try self.updateVar(stmt_var, .err, rank);
},
.s_runtime_error => {
try self.updateVar(stmt_var, .err, rank);
},
}
}
return does_fx;
}
// if-else //
/// Check the types for an if-else expr

View file

@ -165,17 +165,17 @@ fn copyAlias(
const type_name_str = source_idents.getText(source_alias.ident.ident_idx);
const translated_ident = try dest_idents.insert(allocator, base.Ident.for_text(type_name_str));
var dest_args = std.array_list.Managed(Var).init(dest_store.gpa);
defer dest_args.deinit();
var dest_args = std.ArrayList(Var).empty;
defer dest_args.deinit(dest_store.gpa);
const origin_backing = source_store.getAliasBackingVar(source_alias);
const dest_backing = try copyVar(source_store, dest_store, origin_backing, var_mapping, source_idents, dest_idents, allocator);
try dest_args.append(dest_backing);
try dest_args.append(dest_store.gpa, dest_backing);
const origin_args = source_store.sliceAliasArgs(source_alias);
for (origin_args) |arg_var| {
const dest_arg = try copyVar(source_store, dest_store, arg_var, var_mapping, source_idents, dest_idents, allocator);
try dest_args.append(dest_arg);
try dest_args.append(dest_store.gpa, dest_arg);
}
const dest_vars_span = try dest_store.appendVars(dest_args.items);
@ -225,12 +225,12 @@ fn copyTuple(
) std.mem.Allocator.Error!types_mod.Tuple {
const elems_slice = source_store.sliceVars(tuple.elems);
var dest_elems = std.array_list.Managed(Var).init(dest_store.gpa);
defer dest_elems.deinit();
var dest_elems = std.ArrayList(Var).empty;
defer dest_elems.deinit(dest_store.gpa);
for (elems_slice) |elem_var| {
const dest_elem = try copyVar(source_store, dest_store, elem_var, var_mapping, source_idents, dest_idents, allocator);
try dest_elems.append(dest_elem);
try dest_elems.append(dest_store.gpa, dest_elem);
}
const dest_range = try dest_store.appendVars(dest_elems.items);
@ -270,12 +270,12 @@ fn copyFunc(
) std.mem.Allocator.Error!Func {
const args_slice = source_store.sliceVars(func.args);
var dest_args = std.array_list.Managed(Var).init(dest_store.gpa);
defer dest_args.deinit();
var dest_args = std.ArrayList(Var).empty;
defer dest_args.deinit(dest_store.gpa);
for (args_slice) |arg_var| {
const dest_arg = try copyVar(source_store, dest_store, arg_var, var_mapping, source_idents, dest_idents, allocator);
try dest_args.append(dest_arg);
try dest_args.append(dest_store.gpa, dest_arg);
}
const dest_ret = try copyVar(source_store, dest_store, func.ret, var_mapping, source_idents, dest_idents, allocator);
@ -299,13 +299,13 @@ fn copyRecordFields(
) std.mem.Allocator.Error!types_mod.RecordField.SafeMultiList.Range {
const source_fields = source_store.getRecordFieldsSlice(fields_range);
var fresh_fields = std.array_list.Managed(RecordField).init(allocator);
defer fresh_fields.deinit();
var fresh_fields = std.ArrayList(RecordField).empty;
defer fresh_fields.deinit(allocator);
for (source_fields.items(.name), source_fields.items(.var_)) |name, var_| {
const name_str = source_idents.getText(name);
const translated_name = try dest_idents.insert(allocator, base.Ident.for_text(name_str));
_ = try fresh_fields.append(.{
_ = try fresh_fields.append(allocator, .{
.name = translated_name, // Field names are local to the record type
.var_ = try copyVar(source_store, dest_store, var_, var_mapping, source_idents, dest_idents, allocator),
});
@ -350,18 +350,18 @@ fn copyTagUnion(
) std.mem.Allocator.Error!TagUnion {
const tags_slice = source_store.getTagsSlice(tag_union.tags);
var fresh_tags = std.array_list.Managed(Tag).init(allocator);
defer fresh_tags.deinit();
var fresh_tags = std.ArrayList(Tag).empty;
defer fresh_tags.deinit(allocator);
for (tags_slice.items(.name), tags_slice.items(.args)) |name, args_range| {
const args_slice = source_store.sliceVars(args_range);
var dest_args = std.array_list.Managed(Var).init(dest_store.gpa);
defer dest_args.deinit();
var dest_args = std.ArrayList(Var).empty;
defer dest_args.deinit(dest_store.gpa);
for (args_slice) |arg_var| {
const dest_arg = try copyVar(source_store, dest_store, arg_var, var_mapping, source_idents, dest_idents, allocator);
try dest_args.append(dest_arg);
try dest_args.append(dest_store.gpa, dest_arg);
}
const dest_args_range = try dest_store.appendVars(dest_args.items);
@ -369,7 +369,7 @@ fn copyTagUnion(
const name_str = source_idents.getText(name);
const translated_name = try dest_idents.insert(allocator, base.Ident.for_text(name_str));
_ = try fresh_tags.append(.{
_ = try fresh_tags.append(allocator, .{
.name = translated_name, // Tag names are local to the union type
.args = dest_args_range,
});
@ -400,17 +400,17 @@ fn copyNominalType(
const origin_str = source_idents.getText(source_nominal.origin_module);
const translated_origin = try dest_idents.insert(allocator, base.Ident.for_text(origin_str));
var dest_args = std.array_list.Managed(Var).init(dest_store.gpa);
defer dest_args.deinit();
var dest_args = std.ArrayList(Var).empty;
defer dest_args.deinit(dest_store.gpa);
const origin_backing = source_store.getNominalBackingVar(source_nominal);
const dest_backing = try copyVar(source_store, dest_store, origin_backing, var_mapping, source_idents, dest_idents, allocator);
try dest_args.append(dest_backing);
try dest_args.append(dest_store.gpa, dest_backing);
const origin_args = source_store.sliceNominalArgs(source_nominal);
for (origin_args) |arg_var| {
const dest_arg = try copyVar(source_store, dest_store, arg_var, var_mapping, source_idents, dest_idents, allocator);
try dest_args.append(dest_arg);
try dest_args.append(dest_store.gpa, dest_arg);
}
const dest_vars_span = try dest_store.appendVars(dest_args.items);

View file

@ -39,6 +39,7 @@ pub const Problem = union(enum) {
static_dispach: StaticDispatch,
number_does_not_fit: NumberDoesNotFit,
negative_unsigned_int: NegativeUnsignedInt,
unused_value: UnusedValue,
infinite_recursion: struct { var_: Var },
anonymous_recursion: struct { var_: Var },
invalid_number_type: VarProblem1,
@ -91,6 +92,12 @@ pub const NegativeUnsignedInt = struct {
expected_type: SnapshotContentIdx,
};
/// Error when a stmt expression returns a non-empty record value
pub const UnusedValue = struct {
var_: Var,
snapshot: SnapshotContentIdx,
};
// type mismatch //
/// These two variables mismatch. This should usually be cast into a more
@ -351,6 +358,9 @@ pub const ReportBuilder = struct {
.negative_unsigned_int => |data| {
return self.buildNegativeUnsignedIntReport(data);
},
.unused_value => |data| {
return self.buildUnusedValueReport(data);
},
.infinite_recursion => |_| return self.buildUnimplementedReport("infinite_recursion"),
.anonymous_recursion => |_| return self.buildUnimplementedReport("anonymous_recursion"),
.invalid_number_type => |_| return self.buildUnimplementedReport("invalid_number_type"),
@ -1778,6 +1788,38 @@ pub const ReportBuilder = struct {
return report;
}
/// Build a report for "negative unsigned integer" diagnostic
fn buildUnusedValueReport(self: *Self, data: UnusedValue) !Report {
var report = Report.init(self.gpa, "UNUSED VALUE", .runtime_error);
errdefer report.deinit();
self.snapshot_writer.resetContext();
try self.snapshot_writer.write(data.snapshot);
const owned_expected = try report.addOwnedString(self.snapshot_writer.get());
const region = self.can_ir.store.regions.get(@enumFromInt(@intFromEnum(data.var_)));
const region_info = self.module_env.calcRegionInfo(region.*);
try report.document.addReflowingText("This expression produces a value, but it's not being used:");
try report.document.addLineBreak();
try report.document.addSourceRegion(
region_info,
.error_highlight,
self.filename,
self.source,
self.module_env.getLineStarts(),
);
try report.document.addLineBreak();
try report.document.addReflowingText("It has the type:");
try report.document.addLineBreak();
try report.document.addText(" ");
try report.document.addAnnotated(owned_expected, .type_variable);
return report;
}
// cross-module import //
/// Build a report for cross-module import type mismatch
@ -2054,11 +2096,11 @@ pub const Store = struct {
const Self = @This();
const ALIGNMENT = std.mem.Alignment.@"16";
problems: std.ArrayListAlignedUnmanaged(Problem, ALIGNMENT) = .{},
problems: std.ArrayListAligned(Problem, ALIGNMENT) = .{},
pub fn initCapacity(gpa: Allocator, capacity: usize) std.mem.Allocator.Error!Self {
return .{
.problems = try std.ArrayListAlignedUnmanaged(Problem, ALIGNMENT).initCapacity(gpa, capacity),
.problems = try std.ArrayListAligned(Problem, ALIGNMENT).initCapacity(gpa, capacity),
};
}

View file

@ -227,10 +227,10 @@ test "compiled builtins - use Set and Dict together" {
try can_result.validateForChecking();
// Type check
var imported_envs = std.array_list.Managed(*const ModuleEnv).init(gpa);
defer imported_envs.deinit();
try imported_envs.append(set_loaded.env);
try imported_envs.append(dict_loaded.env);
var imported_envs = std.ArrayList(*const ModuleEnv).empty;
defer imported_envs.deinit(gpa);
try imported_envs.append(gpa, set_loaded.env);
try imported_envs.append(gpa, dict_loaded.env);
var checker = try Check.init(
gpa,

View file

@ -83,8 +83,8 @@ test "nominal type origin - displays origin in snapshot writer" {
// Test 3: Origin shown with type arguments
{
var buf = std.array_list.Managed(u8).init(test_allocator);
defer buf.deinit();
var buf = std.ArrayList(u8).empty;
defer buf.deinit(test_allocator);
// Create type arguments
const str_content = snapshot.SnapshotContent{ .structure = .{ .str = {} } };

View file

@ -947,7 +947,7 @@ test "check type - patterns record field mismatch" {
try checkTypesExpr(source, .fail, "INCOMPATIBLE MATCH PATTERNS");
}
// vars
// vars + reassignment //
test "check type - var ressignment" {
const source =
@ -960,7 +960,7 @@ test "check type - var ressignment" {
try checkTypesModule(source, .{ .pass = .last_def }, "Num(_size)");
}
// expect
// expect //
test "check type - expect" {
const source =
@ -984,6 +984,92 @@ test "check type - expect not bool" {
try checkTypesModule(source, .fail, "TYPE MISMATCH");
}
// crash //
test "check type - crash" {
const source =
\\module []
\\
\\y : U64
\\y = {
\\ crash "bug"
\\}
\\
\\main = {
\\ x = 1
\\ x + y
\\}
;
try checkTypesModule(
source,
.{ .pass = .{ .def = "main" } },
"Num(Int(Unsigned64))",
);
}
// debug //
test "check type - debug" {
const source =
\\module []
\\
\\y : U64
\\y = {
\\ debug 2
\\}
\\
\\main = {
\\ x = 1
\\ x + y
\\}
;
try checkTypesModule(
source,
.{ .pass = .{ .def = "main" } },
"Num(Int(Unsigned64))",
);
}
// for //
test "check type - for" {
const source =
\\module []
\\
\\main = {
\\ var result = 0
\\ for x in [1, 2, 3] {
\\ result = result + x
\\ }
\\ result
\\}
;
try checkTypesModule(
source,
.{ .pass = .{ .def = "main" } },
"Num(_size)",
);
}
test "check type - for mismatch" {
const source =
\\module []
\\
\\main = {
\\ var result = 0
\\ for x in ["a", "b", "c"] {
\\ result = result + x
\\ }
\\ result
\\}
;
try checkTypesModule(
source,
.fail,
"TYPE MISMATCH",
);
}
// static dispatch //
test "check type - static dispatch - polymorphic - annotation" {

View file

@ -1886,8 +1886,8 @@ pub fn rocBundle(allocs: *Allocators, args: cli_args.BundleArgs) !void {
}
// Collect all files to bundle
var file_paths = std.array_list.Managed([]const u8).init(allocs.arena);
defer file_paths.deinit();
var file_paths = std.ArrayList([]const u8).empty;
defer file_paths.deinit(allocs.arena);
var uncompressed_size: u64 = 0;
@ -1908,7 +1908,7 @@ pub fn rocBundle(allocs: *Allocators, args: cli_args.BundleArgs) !void {
const stat = try file.stat();
uncompressed_size += stat.size;
try file_paths.append(path);
try file_paths.append(allocs.arena, path);
}
// Sort and deduplicate paths
@ -2623,14 +2623,14 @@ fn rocFormat(allocs: *Allocators, args: cli_args.FormatArgs) !void {
var exit_code: u8 = 0;
if (args.check) {
var unformatted_files = std.array_list.Managed([]const u8).init(allocs.gpa);
defer unformatted_files.deinit();
var unformatted_files = std.ArrayList([]const u8).empty;
defer unformatted_files.deinit(allocs.gpa);
for (args.paths) |path| {
var result = try fmt.formatPath(allocs.gpa, allocs.arena, std.fs.cwd(), path, true);
defer result.deinit();
if (result.unformatted_files) |files| {
try unformatted_files.appendSlice(files.items);
try unformatted_files.appendSlice(allocs.gpa, files.items);
}
failure_count += result.failure;
}
@ -2695,6 +2695,10 @@ fn handleProcessFileError(err: anytype, stderr: anytype, path: []const u8) noret
// Catch-all for any other errors
else => stderr.print("{s}", .{@errorName(err)}) catch {},
}
stderr.print("\n", .{}) catch {};
// Flush stderr before exit to ensure error message is visible
stderr_writer.interface.flush() catch {};
std.process.exit(1);
}
@ -3695,11 +3699,11 @@ fn generateAppDocs(
}
// Convert map to sorted list
var modules_list = std.array_list.Managed(ModuleInfo).init(allocs.gpa);
defer modules_list.deinit();
var modules_list = std.ArrayList(ModuleInfo).empty;
defer modules_list.deinit(allocs.gpa);
var map_iter = modules_map.iterator();
while (map_iter.next()) |entry| {
try modules_list.append(entry.value_ptr.*);
try modules_list.append(allocs.gpa, entry.value_ptr.*);
}
// Collect package shorthands

View file

@ -14,10 +14,10 @@ test "bundle paths - empty list defaults to main.roc" {
test "bundle paths - single file unchanged" {
const allocator = testing.allocator;
var file_paths = std.array_list.Managed([]const u8).init(allocator);
defer file_paths.deinit();
var file_paths = std.ArrayList([]const u8).empty;
defer file_paths.deinit(allocator);
try file_paths.append("app.roc");
try file_paths.append(allocator, "app.roc");
try testing.expectEqual(@as(usize, 1), file_paths.items.len);
try testing.expectEqualStrings("app.roc", file_paths.items[0]);
@ -26,14 +26,14 @@ test "bundle paths - single file unchanged" {
test "bundle paths - sorting and deduplication" {
const allocator = testing.allocator;
var file_paths = std.array_list.Managed([]const u8).init(allocator);
defer file_paths.deinit();
var file_paths = std.ArrayList([]const u8).empty;
defer file_paths.deinit(allocator);
// Add paths in non-sorted order with duplicates
try file_paths.append("zebra.roc");
try file_paths.append("apple.roc");
try file_paths.append("banana.roc");
try file_paths.append("apple.roc");
try file_paths.append(allocator, "zebra.roc");
try file_paths.append(allocator, "apple.roc");
try file_paths.append(allocator, "banana.roc");
try file_paths.append(allocator, "apple.roc");
const first_cli_path = file_paths.items[0]; // "zebra.roc"
@ -85,18 +85,18 @@ test "bundle paths - sorting and deduplication" {
test "bundle paths - preserves first CLI arg with many files" {
const allocator = testing.allocator;
var file_paths = std.array_list.Managed([]const u8).init(allocator);
defer file_paths.deinit();
var file_paths = std.ArrayList([]const u8).empty;
defer file_paths.deinit(allocator);
// Add 8 paths with specific first
try file_paths.append("tests/test2.roc");
try file_paths.append("main.roc");
try file_paths.append("src/app.roc");
try file_paths.append("src/lib.roc");
try file_paths.append("src/utils/helper.roc");
try file_paths.append("tests/test1.roc");
try file_paths.append("docs/readme.md");
try file_paths.append("config.roc");
try file_paths.append(allocator, "tests/test2.roc");
try file_paths.append(allocator, "main.roc");
try file_paths.append(allocator, "src/app.roc");
try file_paths.append(allocator, "src/lib.roc");
try file_paths.append(allocator, "src/utils/helper.roc");
try file_paths.append(allocator, "tests/test1.roc");
try file_paths.append(allocator, "docs/readme.md");
try file_paths.append(allocator, "config.roc");
const first_cli_path = file_paths.items[0]; // "tests/test2.roc"

View file

@ -1218,8 +1218,8 @@ test "SafeList CompactWriter interleaved pattern with alignment tracking" {
defer writer.deinit(gpa);
// Track offsets as we go
var offsets = std.array_list.Managed(usize).init(gpa);
defer offsets.deinit();
var offsets = std.ArrayList(usize).empty;
defer offsets.deinit(gpa);
// Create temp file
var tmp_dir = testing.tmpDir(.{});
@ -1238,7 +1238,7 @@ test "SafeList CompactWriter interleaved pattern with alignment tracking" {
_ = try list1.append(gpa, 3);
const start1 = writer.total_bytes;
try offsets.append(start1); // Serialized struct is placed at current position
try offsets.append(gpa, start1); // Serialized struct is placed at current position
const serialized1 = try writer.appendAlloc(gpa, SafeList(u8).Serialized);
try serialized1.serialize(&list1, gpa, &writer);
@ -1249,7 +1249,7 @@ test "SafeList CompactWriter interleaved pattern with alignment tracking" {
_ = try list2.append(gpa, 2_000_000);
const start2 = writer.total_bytes;
try offsets.append(start2); // Serialized struct is placed at current position
try offsets.append(gpa, start2); // Serialized struct is placed at current position
const serialized2 = try writer.appendAlloc(gpa, SafeList(u64).Serialized);
try serialized2.serialize(&list2, gpa, &writer);
@ -1266,7 +1266,7 @@ test "SafeList CompactWriter interleaved pattern with alignment tracking" {
_ = try list3.append(gpa, 400);
const start3 = writer.total_bytes;
try offsets.append(start3); // Serialized struct is placed at current position
try offsets.append(gpa, start3); // Serialized struct is placed at current position
const serialized3 = try writer.appendAlloc(gpa, SafeList(u16).Serialized);
try serialized3.serialize(&list3, gpa, &writer);
@ -1276,7 +1276,7 @@ test "SafeList CompactWriter interleaved pattern with alignment tracking" {
_ = try list4.append(gpa, 42);
const start4 = writer.total_bytes;
try offsets.append(start4); // Serialized struct is placed at current position
try offsets.append(gpa, start4); // Serialized struct is placed at current position
const serialized4 = try writer.appendAlloc(gpa, SafeList(u32).Serialized);
try serialized4.serialize(&list4, gpa, &writer);

View file

@ -715,9 +715,9 @@ pub const BuildEnv = struct {
// Simple DFS walk on shorthand edges to detect if to_pkg reaches from_pkg
if (std.mem.eql(u8, from_pkg, to_pkg)) return true;
var stack = std.array_list.Managed([]const u8).init(self.gpa);
defer stack.deinit();
stack.append(to_pkg) catch {
var stack = std.ArrayList([]const u8).empty;
defer stack.deinit(self.gpa);
stack.append(self.gpa, to_pkg) catch {
return false;
};
@ -739,7 +739,7 @@ pub const BuildEnv = struct {
var it = pkg.shorthands.iterator();
while (it.next()) |e| {
const next = e.value_ptr.name;
stack.append(next) catch {
stack.append(self.gpa, next) catch {
return false;
};
}
@ -906,8 +906,8 @@ pub const BuildEnv = struct {
const e = ast.store.getExpr(expr_idx);
return switch (e) {
.string => |s| blk: {
var buf = std.array_list.Managed(u8).init(self.gpa);
errdefer buf.deinit();
var buf = std.ArrayList(u8).empty;
errdefer buf.deinit(self.gpa);
// Use exprSlice to properly iterate through string parts
for (ast.store.exprSlice(s.parts)) |part_idx| {
@ -915,11 +915,11 @@ pub const BuildEnv = struct {
if (part == .string_part) {
const tok = part.string_part.token;
const slice = ast.resolve(tok);
try buf.appendSlice(slice);
try buf.appendSlice(self.gpa, slice);
}
}
const result = try buf.toOwnedSlice();
const result = try buf.toOwnedSlice(self.gpa);
// Check for null bytes in the string, which are invalid in file paths
if (std.mem.indexOfScalar(u8, result, 0) != null) {
@ -944,13 +944,13 @@ pub const BuildEnv = struct {
fn dottedToPath(self: *BuildEnv, root_dir: []const u8, dotted: []const u8) ![]const u8 {
var parts = std.mem.splitScalar(u8, dotted, '.');
var segs = std.array_list.Managed([]const u8).init(self.gpa);
defer segs.deinit();
var segs = std.ArrayList([]const u8).empty;
defer segs.deinit(self.gpa);
try segs.append(root_dir);
try segs.append(self.gpa, root_dir);
while (parts.next()) |p| {
if (p.len == 0) continue;
try segs.append(p);
try segs.append(self.gpa, p);
}
const joined = try std.fs.path.join(self.gpa, segs.items);
@ -1154,12 +1154,12 @@ pub const BuildEnv = struct {
// sort by (min dependency depth from root app, then package and module names).
fn emitDeterministic(self: *BuildEnv) !void {
// Build arrays of package names, module names, and depths
var pkg_names = std.array_list.Managed([]const u8).init(self.gpa);
defer pkg_names.deinit();
var module_names = std.array_list.Managed([]const u8).init(self.gpa);
defer module_names.deinit();
var depths = std.array_list.Managed(u32).init(self.gpa);
defer depths.deinit();
var pkg_names = std.ArrayList([]const u8).empty;
defer pkg_names.deinit(self.gpa);
var module_names = std.ArrayList([]const u8).empty;
defer module_names.deinit(self.gpa);
var depths = std.ArrayList(u32).empty;
defer depths.deinit(self.gpa);
var it = self.schedulers.iterator();
while (it.next()) |e| {
@ -1171,9 +1171,9 @@ pub const BuildEnv = struct {
const mod = me.key_ptr.*;
const depth = sched.getModuleDepth(mod) orelse @as(u32, std.math.maxInt(u32));
try pkg_names.append(pkg_name);
try module_names.append(mod);
try depths.append(depth);
try pkg_names.append(self.gpa, pkg_name);
try module_names.append(self.gpa, mod);
try depths.append(self.gpa, depth);
}
}

View file

@ -100,11 +100,11 @@ const ModuleState = struct {
path: []const u8,
env: ?ModuleEnv = null,
phase: Phase = .Parse,
imports: std.array_list.Managed(ModuleId),
imports: std.ArrayList(ModuleId),
/// External imports qualified via package shorthand (e.g. "cli.Stdout") - still strings as they reference other packages
external_imports: std.array_list.Managed([]const u8),
dependents: std.array_list.Managed(ModuleId),
reports: std.array_list.Managed(Report),
external_imports: std.ArrayList([]const u8),
dependents: std.ArrayList(ModuleId),
reports: std.ArrayList(Report),
depth: u32 = std.math.maxInt(u32), // min depth from root
/// DFS visitation color for cycle detection: 0=white (unvisited), 1=gray (visiting), 2=black (finished)
visit_color: u8 = 0,
@ -116,24 +116,24 @@ const ModuleState = struct {
const source = if (self.env) |*e| e.common.source else null;
if (self.env) |*e| e.deinit();
if (source) |s| gpa.free(s);
self.imports.deinit();
self.external_imports.deinit();
self.dependents.deinit();
self.imports.deinit(gpa);
self.external_imports.deinit(gpa);
self.dependents.deinit(gpa);
// NOTE: Do NOT deinit reports here! Ownership has been transferred to OrderedSink
// when reports were emitted via sink.emitFn. The OrderedSink is responsible for
// deinitiating the reports after they've been drained and rendered.
self.reports.deinit();
self.reports.deinit(gpa);
gpa.free(self.path);
}
fn init(gpa: Allocator, name: []const u8, path: []const u8) ModuleState {
fn init(name: []const u8, path: []const u8) ModuleState {
return .{
.name = name,
.path = path,
.imports = std.array_list.Managed(ModuleId).init(gpa),
.external_imports = std.array_list.Managed([]const u8).init(gpa),
.dependents = std.array_list.Managed(ModuleId).init(gpa),
.reports = std.array_list.Managed(Report).init(gpa),
.imports = std.ArrayList(ModuleId).empty,
.external_imports = std.ArrayList([]const u8).empty,
.dependents = std.ArrayList(ModuleId).empty,
.reports = std.ArrayList(Report).empty,
};
}
};
@ -160,10 +160,10 @@ pub const PackageEnv = struct {
cond: Condition = .{},
// Work queue
injector: std.array_list.Managed(Task),
injector: std.ArrayList(Task),
// Module storage
modules: std.array_list.Managed(ModuleState),
modules: std.ArrayList(ModuleState),
// String intern table: module name -> module ID
module_names: std.StringHashMapUnmanaged(ModuleId) = .{},
@ -171,7 +171,7 @@ pub const PackageEnv = struct {
remaining_modules: usize = 0,
// Track module discovery order and which modules have had their reports emitted
discovered: std.array_list.Managed(ModuleId),
discovered: std.ArrayList(ModuleId),
emitted: std.bit_set.DynamicBitSetUnmanaged = .{},
// Timing collection (accumulated across all modules)
@ -192,9 +192,9 @@ pub const PackageEnv = struct {
.schedule_hook = schedule_hook,
.compiler_version = compiler_version,
.builtin_modules = builtin_modules,
.injector = std.array_list.Managed(Task).init(gpa),
.modules = std.array_list.Managed(ModuleState).init(gpa),
.discovered = std.array_list.Managed(ModuleId).init(gpa),
.injector = std.ArrayList(Task).empty,
.modules = std.ArrayList(ModuleState).empty,
.discovered = std.ArrayList(ModuleId).empty,
};
}
@ -221,9 +221,9 @@ pub const PackageEnv = struct {
.schedule_hook = schedule_hook,
.compiler_version = compiler_version,
.builtin_modules = builtin_modules,
.injector = std.array_list.Managed(Task).init(gpa),
.modules = std.array_list.Managed(ModuleState).init(gpa),
.discovered = std.array_list.Managed(ModuleId).init(gpa),
.injector = std.ArrayList(Task).empty,
.modules = std.ArrayList(ModuleState).empty,
.discovered = std.ArrayList(ModuleId).empty,
};
}
@ -234,7 +234,7 @@ pub const PackageEnv = struct {
for (self.modules.items) |*ms| {
ms.deinit(self.gpa);
}
self.modules.deinit();
self.modules.deinit(self.gpa);
// Free interned strings
var it = self.module_names.iterator();
@ -243,8 +243,8 @@ pub const PackageEnv = struct {
}
self.module_names.deinit(self.gpa);
self.injector.deinit();
self.discovered.deinit();
self.injector.deinit(self.gpa);
self.discovered.deinit(self.gpa);
self.emitted.deinit(self.gpa);
}
@ -357,8 +357,8 @@ pub const PackageEnv = struct {
// This is a new module
const owned_path = try self.gpa.dupe(u8, path);
const owned_name = self.module_names.getKey(name).?; // We just interned it
try self.modules.append(ModuleState.init(self.gpa, owned_name, owned_path));
try self.discovered.append(module_id);
try self.modules.append(self.gpa, ModuleState.init(owned_name, owned_path));
try self.discovered.append(self.gpa, module_id);
// Invoke scheduling hook for new module discovery/scheduling
self.schedule_hook.onSchedule(self.schedule_hook.ctx, self.package_name, owned_name, owned_path, 0);
@ -403,7 +403,7 @@ pub const PackageEnv = struct {
self.schedule_hook.onSchedule(self.schedule_hook.ctx, self.package_name, st.name, st.path, st.depth);
} else {
// Default behavior: use internal injector
try self.injector.append(.{ .module_id = module_id });
try self.injector.append(self.gpa, .{ .module_id = module_id });
if (@import("builtin").target.cpu.arch != .wasm32) self.cond.signal();
}
}
@ -571,11 +571,11 @@ pub const PackageEnv = struct {
// Convert parse diagnostics to reports
for (parse_ast.tokenize_diagnostics.items) |diagnostic| {
const report = try parse_ast.tokenizeDiagnosticToReport(diagnostic, self.gpa);
try st.reports.append(report);
try st.reports.append(self.gpa, report);
}
for (parse_ast.parse_diagnostics.items) |diagnostic| {
const report = try parse_ast.parseDiagnosticToReport(&env.common, diagnostic, self.gpa, st.path);
try st.reports.append(report);
try st.reports.append(self.gpa, report);
}
// canonicalize using the AST
@ -609,7 +609,7 @@ pub const PackageEnv = struct {
defer self.gpa.free(diags);
for (diags) |d| {
const report = try env.diagnosticToReport(d, self.gpa, st.path);
try st.reports.append(report);
try st.reports.append(self.gpa, report);
}
const canon_diag_end = if (@import("builtin").target.cpu.arch != .wasm32) std.time.nanoTimestamp() else 0;
if (@import("builtin").target.cpu.arch != .wasm32) {
@ -629,7 +629,7 @@ pub const PackageEnv = struct {
if (qualified) {
// Qualified imports refer to external packages; track and schedule externally
try st.external_imports.append(mod_name);
try st.external_imports.append(self.gpa, mod_name);
if (self.resolver) |r| r.scheduleExternal(r.ctx, self.package_name, mod_name);
// External dependencies are resolved by the workspace; skip local scheduling/cycle detection
continue;
@ -642,13 +642,13 @@ pub const PackageEnv = struct {
st = &self.modules.items[module_id];
env = &st.env.?;
const existed = child_id < self.modules.items.len - 1;
try st.imports.append(child_id);
try st.imports.append(self.gpa, child_id);
// parent depth + 1
try self.setDepthIfSmaller(child_id, st.depth + 1);
// Cycle detection for local deps
var child = &self.modules.items[child_id];
try child.dependents.append(module_id);
try child.dependents.append(self.gpa, module_id);
if (child.visit_color == 1 or child_id == module_id) {
// Build a report on the current module describing the cycle
@ -680,7 +680,7 @@ pub const PackageEnv = struct {
}
// Store the report on both modules for clarity
try st.reports.append(rep);
try st.reports.append(self.gpa, rep);
// Duplicate for child as well so it gets emitted too
var rep_child = Report.init(self.gpa, "Import cycle detected", .runtime_error);
const child_msg = try rep_child.addOwnedString("This module participates in an import cycle. Cycles between modules are not allowed.");
@ -691,7 +691,7 @@ pub const PackageEnv = struct {
try rep_child.document.addText(" -> ");
try rep_child.document.addAnnotated(mod_name, .emphasized);
try rep_child.document.addLineBreak();
try child.reports.append(rep_child);
try child.reports.append(self.gpa, rep_child);
// Mark both Done and adjust counters
if (st.phase != .Done) {
@ -780,7 +780,7 @@ pub const PackageEnv = struct {
// Build other_modules array according to env.imports order
const import_count = env.imports.imports.items.items.len;
var imported_envs = try std.array_list.Managed(*ModuleEnv).initCapacity(self.gpa, import_count);
var imported_envs = try std.ArrayList(*ModuleEnv).initCapacity(self.gpa, import_count);
// NOTE: Don't deinit 'imported_envs' yet - comptime_evaluator holds a reference to imported_envs.items
for (env.imports.imports.items.items[0..import_count]) |str_idx| {
const import_name = env.getString(str_idx);
@ -791,7 +791,7 @@ pub const PackageEnv = struct {
if (self.resolver) |r| {
if (r.getEnv(r.ctx, self.package_name, import_name)) |ext_env_ptr| {
// External env is already a pointer, use it directly
try imported_envs.append(ext_env_ptr);
try imported_envs.append(self.gpa, ext_env_ptr);
} else {
// External env not ready; skip (tryUnblock should have prevented this)
}
@ -802,7 +802,7 @@ pub const PackageEnv = struct {
// Get a pointer to the child's env (stored in the modules ArrayList)
// This is safe because we don't modify the modules ArrayList during type checking
const child_env_ptr = &child.env.?;
try imported_envs.append(child_env_ptr);
try imported_envs.append(self.gpa, child_env_ptr);
}
}
@ -846,7 +846,7 @@ pub const PackageEnv = struct {
defer rb.deinit();
for (checker.problems.problems.items) |prob| {
const rep = rb.build(prob) catch continue;
try st.reports.append(rep);
try st.reports.append(self.gpa, rep);
}
const check_diag_end = if (@import("builtin").target.cpu.arch != .wasm32) std.time.nanoTimestamp() else 0;
if (@import("builtin").target.cpu.arch != .wasm32) {
@ -857,7 +857,7 @@ pub const PackageEnv = struct {
comptime_evaluator.deinit();
// Now we can safely deinit the 'imported_envs' ArrayList
imported_envs.deinit();
imported_envs.deinit(self.gpa);
// Note: We no longer need to free the 'imported_envs' items because they now point directly
// to ModuleEnv instances stored in the modules ArrayList, not to heap-allocated copies.
@ -878,16 +878,16 @@ pub const PackageEnv = struct {
}
// Default: convert dotted module name to path under root_dir
var buffer = std.array_list.Managed(u8).init(self.gpa);
defer buffer.deinit();
var buffer = std.ArrayList(u8).empty;
defer buffer.deinit(self.gpa);
var it = std.mem.splitScalar(u8, mod_name, '.');
var first = true;
while (it.next()) |part| {
if (!first) try buffer.appendSlice(std.fs.path.sep_str) else first = false;
try buffer.appendSlice(part);
if (!first) try buffer.appendSlice(self.gpa, std.fs.path.sep_str) else first = false;
try buffer.appendSlice(self.gpa, part);
}
try buffer.appendSlice(".roc");
const rel = try buffer.toOwnedSlice();
try buffer.appendSlice(self.gpa, ".roc");
const rel = try buffer.toOwnedSlice(self.gpa);
const full = try std.fs.path.join(self.gpa, &.{ self.root_dir, rel });
self.gpa.free(rel);
return full;
@ -920,15 +920,15 @@ pub const PackageEnv = struct {
try visited.resize(self.gpa, self.modules.items.len, false);
const Frame = struct { id: ModuleId, next_idx: usize };
var frames = std.array_list.Managed(Frame).init(self.gpa);
defer frames.deinit();
var frames = std.ArrayList(Frame).empty;
defer frames.deinit(self.gpa);
var stack_ids = std.array_list.Managed(ModuleId).init(self.gpa);
defer stack_ids.deinit();
var stack_ids = std.ArrayList(ModuleId).empty;
defer stack_ids.deinit(self.gpa);
visited.set(start);
try frames.append(.{ .id = start, .next_idx = 0 });
try stack_ids.append(start);
try frames.append(self.gpa, .{ .id = start, .next_idx = 0 });
try stack_ids.append(self.gpa, start);
while (frames.items.len > 0) {
var top = &frames.items[frames.items.len - 1];
@ -951,8 +951,8 @@ pub const PackageEnv = struct {
if (!visited.isSet(child)) {
visited.set(child);
try frames.append(.{ .id = child, .next_idx = 0 });
try stack_ids.append(child);
try frames.append(self.gpa, .{ .id = child, .next_idx = 0 });
try stack_ids.append(self.gpa, child);
}
}
return null;

View file

@ -344,9 +344,9 @@
// defer original_tree.deinit();
// try module_env.pushToSExprTree(null, &original_tree);
// var original_sexpr = std.array_list.Managed(u8).init(gpa);
// defer original_sexpr.deinit();
// try original_tree.toStringPretty(original_sexpr.writer().any());
// var original_sexpr = std.ArrayList(u8).empty;
// defer original_sexpr.deinit(gpa);
// try original_tree.toStringPretty(original_sexpr.writer(gpa).any());
// // Create arena for serialization
// var arena = std.heap.ArenaAllocator.init(gpa);
@ -373,8 +373,8 @@
// try restored_env.pushToSExprTree(null, &restored_tree);
// var restored_sexpr = std.array_list.Managed(u8).init(gpa);
// defer restored_sexpr.deinit();
// var restored_sexpr = std.ArrayList(u8).empty;
// defer restored_sexpr.deinit(gpa);
// try restored_tree.toStringPretty(restored_sexpr.writer().any());
@ -420,9 +420,9 @@
// // defer original_tree.deinit();
// // try module_env.pushToSExprTree(null, &original_tree);
// // var original_sexpr = std.array_list.Managed(u8).init(gpa);
// // defer original_sexpr.deinit();
// // try original_tree.toStringPretty(original_sexpr.writer().any());
// // var original_sexpr = std.ArrayList(u8).empty;
// // defer original_sexpr.deinit(gpa);
// // try original_tree.toStringPretty(original_sexpr.writer(gpa).any());
// // // Create arena for serialization
// // var arena = std.heap.ArenaAllocator.init(gpa);
@ -515,8 +515,8 @@
// // try restored_env.pushToSExprTree(null, &restored_tree);
// // var restored_sexpr = std.array_list.Managed(u8).init(gpa);
// // defer restored_sexpr.deinit();
// // var restored_sexpr = std.ArrayList(u8).empty;
// // defer restored_sexpr.deinit(gpa);
// // try restored_tree.toStringPretty(restored_sexpr.writer().any());

View file

@ -410,9 +410,9 @@ test "ModuleEnv pushExprTypesToSExprTree extracts and formats types" {
try env.pushTypesToSExprTree(expr_idx, &tree);
// Convert tree to string
var result = std.array_list.Managed(u8).init(gpa);
defer result.deinit();
try tree.toStringPretty(result.writer().any(), .include_linecol);
var result = std.ArrayList(u8).empty;
defer result.deinit(gpa);
try tree.toStringPretty(result.writer(gpa).any(), .include_linecol);
// Verify the output contains the type information
const result_str = result.items;

View file

@ -19,8 +19,8 @@ test "roc check writes parse errors to stderr" {
return error.SkipZigTest;
};
// Use the existing test/str/app.roc file which has type errors
const test_file = try std.fs.path.join(gpa, &.{ cwd_path, "test", "str", "app.roc" });
// Use a test file that intentionally has a parse error
const test_file = try std.fs.path.join(gpa, &.{ cwd_path, "test", "compile", "has_parse_error.roc" });
defer gpa.free(test_file);
// Run roc check and capture stderr
@ -40,9 +40,9 @@ test "roc check writes parse errors to stderr" {
// 2. Stderr contains error information (THIS IS THE KEY TEST - without flush, this will be empty)
try testing.expect(result.stderr.len > 0);
// 3. Stderr contains error reporting (look for UNDECLARED TYPE error)
const has_error = std.mem.indexOf(u8, result.stderr, "UNDECLARED TYPE") != null or
// 3. Stderr contains error reporting
const has_error = std.mem.indexOf(u8, result.stderr, "Failed to check") != null or
std.mem.indexOf(u8, result.stderr, "error") != null or
std.mem.indexOf(u8, result.stderr, "Found") != null;
std.mem.indexOf(u8, result.stderr, "Unsupported") != null;
try testing.expect(has_error);
}

View file

@ -516,16 +516,16 @@
// const mod_name = try std.fmt.allocPrint(gpa, "Mod{}", .{i});
// defer gpa.free(mod_name);
// var imports = std.array_list.Managed(u8).init(gpa);
// defer imports.deinit();
// var imports = std.ArrayList(u8).empty;
// defer imports.deinit(gpa);
// // Each module imports the next two modules (circular style)
// var j: usize = 1;
// while (j <= 2) : (j += 1) {
// const import_idx = (i + j) % module_count;
// try imports.appendSlice("import Mod");
// try imports.appendSlice(gpa, "import Mod");
// try std.fmt.formatInt(import_idx, 10, .lower, .{}, imports.writer());
// try imports.append('\n');
// try imports.append(gpa, '\n');
// }
// const content = try std.fmt.allocPrint(gpa,

View file

@ -206,11 +206,11 @@
// const TestSink = struct {
// gpa: std.mem.Allocator,
// reports: std.array_list.Managed(Report),
// modules: std.array_list.Managed([]const u8),
// reports: std.ArrayList(Report),
// modules: std.ArrayList([]const u8),
// fn init(gpa: std.mem.Allocator) TestSink {
// return .{ .gpa = gpa, .reports = std.array_list.Managed(Report).init(gpa), .modules = std.array_list.Managed([]const u8).init(gpa) };
// return .{ .gpa = gpa, .reports = std.ArrayList.empty };
// }
// fn deinit(self: *TestSink) void {

View file

@ -137,8 +137,8 @@ pub const Interpreter = struct {
pub fn init(allocator: std.mem.Allocator, env: *can.ModuleEnv, builtin_types: BuiltinTypes, imported_modules_map: ?*const std.AutoHashMap(base_pkg.Ident.Idx, can.Can.AutoImportedType)) !Interpreter {
// Convert imported modules map to other_envs slice
// IMPORTANT: The order must match Import.Idx order (not hash map iteration order!)
var other_envs_list = std.array_list.Managed(*const can.ModuleEnv).init(allocator);
errdefer other_envs_list.deinit();
var other_envs_list = std.ArrayList(*const can.ModuleEnv).empty;
errdefer other_envs_list.deinit(allocator);
if (imported_modules_map) |modules_map| {
// Iterate through imports in Import.Idx order (0, 1, 2, ...)
@ -156,7 +156,7 @@ pub const Interpreter = struct {
// Look up the module env in the map
if (modules_map.get(module_ident)) |auto_imported| {
try other_envs_list.append(auto_imported.env);
try other_envs_list.append(allocator, auto_imported.env);
} else {
// Import exists in env.imports but not in the map we were given.
// This shouldn't happen in practice if the map is properly constructed.
@ -167,7 +167,7 @@ pub const Interpreter = struct {
// Transfer ownership of the slice to the Interpreter
// Note: The caller is responsible for freeing this via deinitAndFreeOtherEnvs()
const other_envs = try other_envs_list.toOwnedSlice();
const other_envs = try other_envs_list.toOwnedSlice(allocator);
return initWithOtherEnvs(allocator, env, other_envs, builtin_types);
}
@ -3566,7 +3566,7 @@ pub const Interpreter = struct {
break :blk try self.runtime_types.freshFromContent(.{ .structure = .{ .num = .{ .num_compact = compact_num } } });
},
.tag_union => |tu| {
var rt_tag_args = try std.ArrayListUnmanaged(types.Var).initCapacity(self.allocator, 8);
var rt_tag_args = try std.ArrayList(types.Var).initCapacity(self.allocator, 8);
defer rt_tag_args.deinit(self.allocator);
var rt_tags = try self.gatherTags(module, tu);
@ -3746,8 +3746,8 @@ pub const Interpreter = struct {
ctx: *const Interpreter,
module: *can.ModuleEnv,
tag_union: types.TagUnion,
) std.mem.Allocator.Error!std.ArrayListUnmanaged(types.Tag) {
var scratch_tags = try std.ArrayListUnmanaged(types.Tag).initCapacity(ctx.allocator, 8);
) std.mem.Allocator.Error!std.ArrayList(types.Tag) {
var scratch_tags = try std.ArrayList(types.Tag).initCapacity(ctx.allocator, 8);
const tag_slice = module.types.getTagsSlice(tag_union.tags);
for (tag_slice.items(.name), tag_slice.items(.args)) |name, args| {

View file

@ -170,12 +170,12 @@ fn parseCheckAndEvalModuleWithImport(src: []const u8, import_name: []const u8, i
try czer.canonicalizeFile();
// Set up other_envs for type checking (include Bool, Result, and Str modules)
var imported_envs = std.array_list.Managed(*const ModuleEnv).init(gpa);
defer imported_envs.deinit();
try imported_envs.append(imported_module);
try imported_envs.append(bool_module.env);
try imported_envs.append(result_module.env);
try imported_envs.append(str_module.env);
var imported_envs = std.ArrayList(*const ModuleEnv).empty;
defer imported_envs.deinit(gpa);
try imported_envs.append(gpa, imported_module);
try imported_envs.append(gpa, bool_module.env);
try imported_envs.append(gpa, result_module.env);
try imported_envs.append(gpa, str_module.env);
// Type check the module
var checker = try Check.init(gpa, &module_env.types, module_env, imported_envs.items, &module_envs, &module_env.store.regions, common_idents);

View file

@ -155,14 +155,14 @@ pub const Dir = struct {
/// Find all filepaths in this directory recursively.
///
/// The text of the relative paths are stored in the `string_arena`
/// and the slices over said paths are returned in an `ArrayListUnmanaged`
/// and the slices over said paths are returned in an `ArrayList`
/// that must be `deinit`ed by the caller.
pub fn findAllFilesRecursively(
dir: *Dir,
gpa: std.mem.Allocator,
string_arena: *std.heap.ArenaAllocator,
) !std.array_list.Managed([]const u8) {
var files = std.array_list.Managed([]const u8){};
) !std.ArrayList([]const u8) {
var files = std.ArrayList([]const u8){};
errdefer files.deinit(gpa);
var walker = try dir.dir.walk(gpa);

View file

@ -189,12 +189,12 @@ pub const Store = struct {
field_layouts: []const Layout,
field_names: []const Ident.Idx,
) std.mem.Allocator.Error!Idx {
var temp_fields = std.array_list.Managed(RecordField).init(self.env.gpa);
defer temp_fields.deinit();
var temp_fields = std.ArrayList(RecordField).empty;
defer temp_fields.deinit(self.env.gpa);
for (field_layouts, field_names) |field_layout, field_name| {
const field_layout_idx = try self.insertLayout(field_layout);
try temp_fields.append(.{
try temp_fields.append(self.env.gpa, .{
.name = field_name,
.layout = field_layout_idx,
});
@ -263,12 +263,12 @@ pub const Store = struct {
/// Insert a tuple layout from concrete element layouts
pub fn putTuple(self: *Self, element_layouts: []const Layout) std.mem.Allocator.Error!Idx {
// Collect fields
var temp_fields = std.array_list.Managed(TupleField).init(self.env.gpa);
defer temp_fields.deinit();
var temp_fields = std.ArrayList(TupleField).empty;
defer temp_fields.deinit(self.env.gpa);
for (element_layouts, 0..) |elem_layout, i| {
const elem_idx = try self.insertLayout(elem_layout);
try temp_fields.append(.{ .index = @intCast(i), .layout = elem_idx });
try temp_fields.append(self.env.gpa, .{ .index = @intCast(i), .layout = elem_idx });
}
// Sort by alignment desc, then by original index asc
@ -620,11 +620,11 @@ pub const Store = struct {
const field_idxs = self.work.resolved_record_fields.items(.field_idx);
// First, collect the fields into a temporary array so we can sort them
var temp_fields = std.array_list.Managed(RecordField).init(self.env.gpa);
defer temp_fields.deinit();
var temp_fields = std.ArrayList(RecordField).empty;
defer temp_fields.deinit(self.env.gpa);
for (updated_record.resolved_fields_start..resolved_fields_end) |i| {
try temp_fields.append(.{
try temp_fields.append(self.env.gpa, .{
.name = field_names[i],
.layout = field_idxs[i],
});
@ -721,11 +721,11 @@ pub const Store = struct {
const field_idxs = self.work.resolved_tuple_fields.items(.field_idx);
// First, collect the fields into a temporary array so we can sort them
var temp_fields = std.array_list.Managed(TupleField).init(self.env.gpa);
defer temp_fields.deinit();
var temp_fields = std.ArrayList(TupleField).empty;
defer temp_fields.deinit(self.env.gpa);
for (updated_tuple.resolved_fields_start..resolved_fields_end) |i| {
try temp_fields.append(.{
try temp_fields.append(self.env.gpa, .{
.index = field_indices[i],
.layout = field_idxs[i],
});

View file

@ -35,8 +35,8 @@ env: *CommonEnv,
tokens: TokenizedBuffer,
store: NodeStore,
root_node_idx: u32 = 0,
tokenize_diagnostics: std.ArrayListUnmanaged(tokenize.Diagnostic),
parse_diagnostics: std.ArrayListUnmanaged(AST.Diagnostic),
tokenize_diagnostics: std.ArrayList(tokenize.Diagnostic),
parse_diagnostics: std.ArrayList(AST.Diagnostic),
/// Calculate whether this region is - or will be - multiline
pub fn regionIsMultiline(self: *AST, region: TokenizedRegion) bool {

View file

@ -20,7 +20,7 @@ const NodeStore = @This();
gpa: std.mem.Allocator,
nodes: Node.List,
extra_data: std.array_list.Managed(u32),
extra_data: std.ArrayList(u32),
scratch_statements: base.Scratch(AST.Statement.Idx),
scratch_tokens: base.Scratch(Token.Idx),
scratch_exprs: base.Scratch(AST.Expr.Idx),
@ -54,7 +54,7 @@ pub fn initCapacity(gpa: std.mem.Allocator, capacity: usize) std.mem.Allocator.E
var store: NodeStore = .{
.gpa = gpa,
.nodes = try Node.List.initCapacity(gpa, capacity),
.extra_data = try std.array_list.Managed(u32).initCapacity(gpa, capacity / 2),
.extra_data = try std.ArrayList(u32).initCapacity(gpa, capacity / 2),
.scratch_statements = try base.Scratch(AST.Statement.Idx).init(gpa),
.scratch_tokens = try base.Scratch(Token.Idx).init(gpa),
.scratch_exprs = try base.Scratch(AST.Expr.Idx).init(gpa),
@ -88,7 +88,7 @@ const scratch_90th_percentile_capacity = std.math.ceilPowerOfTwoAssert(usize, 64
/// method.
pub fn deinit(store: *NodeStore) void {
store.nodes.deinit(store.gpa);
store.extra_data.deinit();
store.extra_data.deinit(store.gpa);
store.scratch_statements.deinit();
store.scratch_tokens.deinit();
store.scratch_exprs.deinit();
@ -160,7 +160,7 @@ pub fn addMalformed(store: *NodeStore, comptime T: type, reason: Diagnostic.Tag,
/// Adds a file node to the store.
pub fn addFile(store: *NodeStore, file: AST.File) std.mem.Allocator.Error!void {
try store.extra_data.append(@intFromEnum(file.header));
try store.extra_data.append(store.gpa, @intFromEnum(file.header));
store.nodes.set(@enumFromInt(0), .{
.tag = .root,
.main_token = 0,
@ -204,7 +204,7 @@ pub fn addHeader(store: *NodeStore, header: AST.Header) std.mem.Allocator.Error!
node.data.rhs = @intFromEnum(app.packages);
node.region = app.region;
try store.extra_data.append(@intFromEnum(app.platform_idx));
try store.extra_data.append(store.gpa, @intFromEnum(app.platform_idx));
},
.module => |mod| {
node.tag = .module_header;
@ -227,11 +227,11 @@ pub fn addHeader(store: *NodeStore, header: AST.Header) std.mem.Allocator.Error!
node.main_token = platform.name;
const ed_start = store.extra_data.items.len;
try store.extra_data.append(@intFromEnum(platform.requires_rigids));
try store.extra_data.append(@intFromEnum(platform.requires_signatures));
try store.extra_data.append(@intFromEnum(platform.exposes));
try store.extra_data.append(@intFromEnum(platform.packages));
try store.extra_data.append(@intFromEnum(platform.provides));
try store.extra_data.append(store.gpa, @intFromEnum(platform.requires_rigids));
try store.extra_data.append(store.gpa, @intFromEnum(platform.requires_signatures));
try store.extra_data.append(store.gpa, @intFromEnum(platform.exposes));
try store.extra_data.append(store.gpa, @intFromEnum(platform.packages));
try store.extra_data.append(store.gpa, @intFromEnum(platform.provides));
const ed_len = store.extra_data.items.len - ed_start;
node.data.lhs = @intCast(ed_start);
@ -376,16 +376,16 @@ pub fn addStatement(store: *NodeStore, statement: AST.Statement) std.mem.Allocat
// Store all import data in a flat format:
// [exposes.span.start, exposes.span.len, qualifier_tok?, alias_tok?]
const data_start = @as(u32, @intCast(store.extra_data.items.len));
try store.extra_data.append(i.exposes.span.start);
try store.extra_data.append(i.exposes.span.len);
try store.extra_data.append(store.gpa, i.exposes.span.start);
try store.extra_data.append(store.gpa, i.exposes.span.len);
if (i.qualifier_tok) |tok| {
rhs.qualified = 1;
try store.extra_data.append(tok);
try store.extra_data.append(store.gpa, tok);
}
if (i.alias_tok) |tok| {
rhs.aliased = 1;
try store.extra_data.append(tok);
try store.extra_data.append(store.gpa, tok);
}
node.data.rhs = @as(u32, @bitCast(rhs));
@ -410,17 +410,17 @@ pub fn addStatement(store: *NodeStore, statement: AST.Statement) std.mem.Allocat
// Store where clause index (0 if null)
const where_idx = if (d.where) |w| @intFromEnum(w) else 0;
try store.extra_data.append(where_idx);
try store.extra_data.append(store.gpa, where_idx);
// Store associated data if present
if (d.associated) |assoc| {
try store.extra_data.append(1); // has_associated = 1
try store.extra_data.append(assoc.statements.span.start);
try store.extra_data.append(assoc.statements.span.len);
try store.extra_data.append(assoc.region.start);
try store.extra_data.append(assoc.region.end);
try store.extra_data.append(store.gpa, 1); // has_associated = 1
try store.extra_data.append(store.gpa, assoc.statements.span.start);
try store.extra_data.append(store.gpa, assoc.statements.span.len);
try store.extra_data.append(store.gpa, assoc.region.start);
try store.extra_data.append(store.gpa, assoc.region.end);
} else {
try store.extra_data.append(0); // has_associated = 0
try store.extra_data.append(store.gpa, 0); // has_associated = 0
}
node.main_token = extra_start;
@ -464,9 +464,9 @@ pub fn addPattern(store: *NodeStore, pattern: AST.Pattern) std.mem.Allocator.Err
},
.tag => |t| {
const data_start = @as(u32, @intCast(store.extra_data.items.len));
try store.extra_data.append(t.args.span.len);
try store.extra_data.append(t.qualifiers.span.start);
try store.extra_data.append(t.qualifiers.span.len);
try store.extra_data.append(store.gpa, t.args.span.len);
try store.extra_data.append(store.gpa, t.qualifiers.span.start);
try store.extra_data.append(store.gpa, t.qualifiers.span.len);
node.tag = .tag_patt;
node.region = t.region;
@ -617,12 +617,12 @@ pub fn addExpr(store: *NodeStore, expr: AST.Expr) std.mem.Allocator.Error!AST.Ex
// Store all record data in flat format:
// [fields.span.start, fields.span.len, ext_or_zero]
const data_start = @as(u32, @intCast(store.extra_data.items.len));
try store.extra_data.append(r.fields.span.start);
try store.extra_data.append(r.fields.span.len);
try store.extra_data.append(store.gpa, r.fields.span.start);
try store.extra_data.append(store.gpa, r.fields.span.len);
// Store ext value or 0 for null
const ext_value = if (r.ext) |ext| @intFromEnum(ext) else 0;
try store.extra_data.append(ext_value);
try store.extra_data.append(store.gpa, ext_value);
node.data.lhs = data_start;
node.data.rhs = 0; // Not used
@ -633,7 +633,7 @@ pub fn addExpr(store: *NodeStore, expr: AST.Expr) std.mem.Allocator.Error!AST.Ex
node.data.lhs = l.args.span.start;
node.data.rhs = l.args.span.len;
const body_idx = store.extra_data.items.len;
try store.extra_data.append(@intFromEnum(l.body));
try store.extra_data.append(store.gpa, @intFromEnum(l.body));
node.main_token = @as(u32, @intCast(body_idx));
},
.apply => |app| {
@ -642,7 +642,7 @@ pub fn addExpr(store: *NodeStore, expr: AST.Expr) std.mem.Allocator.Error!AST.Ex
node.data.lhs = app.args.span.start;
node.data.rhs = app.args.span.len;
const fn_ed_idx = store.extra_data.items.len;
try store.extra_data.append(@intFromEnum(app.@"fn"));
try store.extra_data.append(store.gpa, @intFromEnum(app.@"fn"));
node.main_token = @as(u32, @intCast(fn_ed_idx));
},
.record_updater => |_| {},
@ -684,8 +684,8 @@ pub fn addExpr(store: *NodeStore, expr: AST.Expr) std.mem.Allocator.Error!AST.Ex
node.region = i.region;
node.data.lhs = @intFromEnum(i.condition);
node.data.rhs = @as(u32, @intCast(store.extra_data.items.len));
try store.extra_data.append(@intFromEnum(i.then));
try store.extra_data.append(@intFromEnum(i.@"else"));
try store.extra_data.append(store.gpa, @intFromEnum(i.then));
try store.extra_data.append(store.gpa, @intFromEnum(i.@"else"));
},
.match => |m| {
node.tag = .match;
@ -693,7 +693,7 @@ pub fn addExpr(store: *NodeStore, expr: AST.Expr) std.mem.Allocator.Error!AST.Ex
node.data.lhs = m.branches.span.start;
node.data.rhs = m.branches.span.len;
const expr_idx = store.extra_data.items.len;
try store.extra_data.append(@intFromEnum(m.expr));
try store.extra_data.append(store.gpa, @intFromEnum(m.expr));
node.main_token = @as(u32, @intCast(expr_idx));
},
.ident => |id| {
@ -854,9 +854,9 @@ pub fn addWhereClause(store: *NodeStore, clause: AST.WhereClause) std.mem.Alloca
node.region = c.region;
node.main_token = c.var_tok;
const ed_start = store.extra_data.items.len;
try store.extra_data.append(c.name_tok);
try store.extra_data.append(@intFromEnum(c.args));
try store.extra_data.append(@intFromEnum(c.ret_anno));
try store.extra_data.append(store.gpa, c.name_tok);
try store.extra_data.append(store.gpa, @intFromEnum(c.args));
try store.extra_data.append(store.gpa, @intFromEnum(c.ret_anno));
node.data.lhs = @intCast(ed_start);
},
.mod_alias => |c| {
@ -921,8 +921,8 @@ pub fn addTypeAnno(store: *NodeStore, anno: AST.TypeAnno) std.mem.Allocator.Erro
// Store all tag_union data in flat format:
// [tags.span.start, tags.span.len, open_anno?]
const data_start = @as(u32, @intCast(store.extra_data.items.len));
try store.extra_data.append(tu.tags.span.start);
try store.extra_data.append(tu.tags.span.len);
try store.extra_data.append(store.gpa, tu.tags.span.start);
try store.extra_data.append(store.gpa, tu.tags.span.len);
var rhs = AST.TypeAnno.TagUnionRhs{
.open = 0,
@ -930,7 +930,7 @@ pub fn addTypeAnno(store: *NodeStore, anno: AST.TypeAnno) std.mem.Allocator.Erro
};
if (tu.open_anno) |a| {
rhs.open = 1;
try store.extra_data.append(@intFromEnum(a));
try store.extra_data.append(store.gpa, @intFromEnum(a));
}
node.data.lhs = data_start;
@ -957,7 +957,7 @@ pub fn addTypeAnno(store: *NodeStore, anno: AST.TypeAnno) std.mem.Allocator.Erro
.args_len = @intCast(f.args.span.len), // We hope a function has less than 2.147b args
});
const ret_idx = store.extra_data.items.len;
try store.extra_data.append(@intFromEnum(f.ret));
try store.extra_data.append(store.gpa, @intFromEnum(f.ret));
node.main_token = @intCast(ret_idx);
},
.parens => |p| {
@ -1874,7 +1874,7 @@ pub fn exprSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Error!AST.E
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
std.debug.assert(end >= i);
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_exprs.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_exprs.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -1912,7 +1912,7 @@ pub fn statementSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Error!
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
std.debug.assert(end >= i);
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_statements.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_statements.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -1950,7 +1950,7 @@ pub fn patternSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Error!AS
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
std.debug.assert(end >= i);
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_patterns.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_patterns.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -1997,7 +1997,7 @@ pub fn patternRecordFieldSpanFrom(store: *NodeStore, start: u32) std.mem.Allocat
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_pattern_record_fields.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_pattern_record_fields.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -2033,7 +2033,7 @@ pub fn recordFieldSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Erro
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_record_fields.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_record_fields.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -2064,7 +2064,7 @@ pub fn matchBranchSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Erro
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_match_branches.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_match_branches.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -2101,7 +2101,7 @@ pub fn typeAnnoSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Error!A
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_type_annos.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_type_annos.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -2138,7 +2138,7 @@ pub fn annoRecordFieldSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_anno_record_fields.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_anno_record_fields.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -2175,7 +2175,7 @@ pub fn tokenSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Error!Toke
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(store.scratch_tokens.items.items[i]);
try store.extra_data.append(store.gpa, store.scratch_tokens.items.items[i]);
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -2212,7 +2212,7 @@ pub fn exposedItemSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Erro
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_exposed_items.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_exposed_items.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
@ -2249,7 +2249,7 @@ pub fn whereClauseSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Erro
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(@intFromEnum(store.scratch_where_clauses.items.items[i]));
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_where_clauses.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };

View file

@ -29,8 +29,8 @@ gpa: std.mem.Allocator,
pos: TokenIdx,
tok_buf: TokenizedBuffer,
store: NodeStore,
scratch_nodes: std.ArrayListUnmanaged(Node.Idx),
diagnostics: std.ArrayListUnmanaged(AST.Diagnostic),
scratch_nodes: std.ArrayList(Node.Idx),
diagnostics: std.ArrayList(AST.Diagnostic),
cached_malformed_node: ?Node.Idx,
nesting_counter: u8,

View file

@ -52,7 +52,7 @@ fn runParse(env: *CommonEnv, gpa: std.mem.Allocator, parserCall: *const fn (*Par
const idx = try parserCall(&parser);
const tokenize_diagnostics_slice = try gpa.dupe(tokenize.Diagnostic, result.messages);
const tokenize_diagnostics = std.ArrayListUnmanaged(tokenize.Diagnostic).fromOwnedSlice(tokenize_diagnostics_slice);
const tokenize_diagnostics = std.ArrayList(tokenize.Diagnostic).fromOwnedSlice(tokenize_diagnostics_slice);
return .{
.env = env,

View file

@ -54,10 +54,10 @@ test "NodeStore round trip - Headers" {
var store = try NodeStore.initCapacity(gpa, NodeStore.AST_HEADER_NODE_COUNT);
defer store.deinit();
var headers = std.array_list.Managed(AST.Header).init(gpa);
defer headers.deinit();
var headers = std.ArrayList(AST.Header).empty;
defer headers.deinit(gpa);
try headers.append(AST.Header{
try headers.append(gpa, AST.Header{
.app = .{
.packages = rand_idx(AST.Collection.Idx),
.platform_idx = rand_idx(AST.RecordField.Idx),
@ -66,14 +66,14 @@ test "NodeStore round trip - Headers" {
},
});
try headers.append(AST.Header{
try headers.append(gpa, AST.Header{
.module = .{
.exposes = rand_idx(AST.Collection.Idx),
.region = rand_region(),
},
});
try headers.append(AST.Header{
try headers.append(gpa, AST.Header{
.package = .{
.exposes = rand_idx(AST.Collection.Idx),
.packages = rand_idx(AST.Collection.Idx),
@ -81,7 +81,7 @@ test "NodeStore round trip - Headers" {
},
});
try headers.append(AST.Header{
try headers.append(gpa, AST.Header{
.platform = .{
.exposes = rand_idx(AST.Collection.Idx),
.name = rand_token_idx(),
@ -93,7 +93,7 @@ test "NodeStore round trip - Headers" {
},
});
try headers.append(AST.Header{
try headers.append(gpa, AST.Header{
.hosted = .{
.exposes = rand_idx(AST.Collection.Idx),
.region = rand_region(),
@ -126,48 +126,48 @@ test "NodeStore round trip - Statement" {
var store = try NodeStore.initCapacity(gpa, NodeStore.AST_STATEMENT_NODE_COUNT);
defer store.deinit();
var statements = std.array_list.Managed(AST.Statement).init(gpa);
defer statements.deinit();
var statements = std.ArrayList(AST.Statement).empty;
defer statements.deinit(gpa);
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.decl = .{
.body = rand_idx(AST.Expr.Idx),
.pattern = rand_idx(AST.Pattern.Idx),
.region = rand_region(),
},
});
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.@"var" = .{
.name = rand_token_idx(),
.body = rand_idx(AST.Expr.Idx),
.region = rand_region(),
},
});
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.expr = .{
.expr = rand_idx(AST.Expr.Idx),
.region = rand_region(),
},
});
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.crash = .{
.expr = rand_idx(AST.Expr.Idx),
.region = rand_region(),
},
});
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.dbg = .{
.expr = rand_idx(AST.Expr.Idx),
.region = rand_region(),
},
});
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.expect = .{
.body = rand_idx(AST.Expr.Idx),
.region = rand_region(),
},
});
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.@"for" = .{
.patt = rand_idx(AST.Pattern.Idx),
.expr = rand_idx(AST.Expr.Idx),
@ -175,14 +175,14 @@ test "NodeStore round trip - Statement" {
.region = rand_region(),
},
});
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.@"return" = .{
.expr = rand_idx(AST.Expr.Idx),
.region = rand_region(),
},
});
// Simple import with no tokens
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.import = .{
.alias_tok = null,
.module_name_tok = rand_token_idx(),
@ -192,7 +192,7 @@ test "NodeStore round trip - Statement" {
},
});
// Import with alias
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.import = .{
.alias_tok = rand_token_idx(),
.module_name_tok = rand_token_idx(),
@ -202,7 +202,7 @@ test "NodeStore round trip - Statement" {
},
});
// Import with qualifier but no alias
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.import = .{
.alias_tok = null,
.module_name_tok = rand_token_idx(),
@ -212,7 +212,7 @@ test "NodeStore round trip - Statement" {
},
});
// Import with both qualifier and alias
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.import = .{
.alias_tok = rand_token_idx(),
.module_name_tok = rand_token_idx(),
@ -221,7 +221,7 @@ test "NodeStore round trip - Statement" {
.exposes = AST.ExposedItem.Span{ .span = rand_span() },
},
});
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.type_decl = .{
.anno = rand_idx(AST.TypeAnno.Idx),
.header = rand_idx(AST.TypeHeader.Idx),
@ -231,7 +231,7 @@ test "NodeStore round trip - Statement" {
.associated = null,
},
});
try statements.append(AST.Statement{
try statements.append(gpa, AST.Statement{
.type_anno = .{
.name = rand_token_idx(),
.anno = rand_idx(AST.TypeAnno.Idx),
@ -268,16 +268,16 @@ test "NodeStore round trip - Pattern" {
var expected_test_count: usize = NodeStore.AST_PATTERN_NODE_COUNT;
var patterns = std.array_list.Managed(AST.Pattern).init(gpa);
defer patterns.deinit();
var patterns = std.ArrayList(AST.Pattern).empty;
defer patterns.deinit(gpa);
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.ident = .{
.ident_tok = rand_token_idx(),
.region = rand_region(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.tag = .{
.args = AST.Pattern.Span{ .span = rand_span() },
.qualifiers = AST.Token.Span{ .span = rand_span() },
@ -285,67 +285,67 @@ test "NodeStore round trip - Pattern" {
.tag_tok = rand_token_idx(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.int = .{
.number_tok = rand_token_idx(),
.region = rand_region(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.frac = .{
.number_tok = rand_token_idx(),
.region = rand_region(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.string = .{
.expr = rand_idx(AST.Expr.Idx),
.region = rand_region(),
.string_tok = rand_token_idx(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.single_quote = .{
.region = rand_region(),
.token = rand_token_idx(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.record = .{
.fields = AST.PatternRecordField.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.list = .{
.patterns = AST.Pattern.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.list_rest = .{
.name = rand_token_idx(),
.region = rand_region(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.tuple = .{
.patterns = AST.Pattern.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.underscore = .{
.region = rand_region(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.alternatives = .{
.patterns = AST.Pattern.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try patterns.append(AST.Pattern{
try patterns.append(gpa, AST.Pattern{
.as = .{ .name = rand_token_idx(), .region = rand_region(), .pattern = rand_idx(AST.Pattern.Idx) },
});
@ -378,53 +378,53 @@ test "NodeStore round trip - TypeAnno" {
var expected_test_count: usize = NodeStore.AST_TYPE_ANNO_NODE_COUNT;
var ty_annos = std.array_list.Managed(AST.TypeAnno).init(gpa);
defer ty_annos.deinit();
var ty_annos = std.ArrayList(AST.TypeAnno).empty;
defer ty_annos.deinit(gpa);
try ty_annos.append(AST.TypeAnno{
try ty_annos.append(gpa, AST.TypeAnno{
.apply = .{
.args = AST.TypeAnno.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try ty_annos.append(AST.TypeAnno{
try ty_annos.append(gpa, AST.TypeAnno{
.ty_var = .{
.region = rand_region(),
.tok = rand_token_idx(),
},
});
try ty_annos.append(AST.TypeAnno{
try ty_annos.append(gpa, AST.TypeAnno{
.underscore = .{
.region = rand_region(),
},
});
try ty_annos.append(AST.TypeAnno{
try ty_annos.append(gpa, AST.TypeAnno{
.ty = .{
.qualifiers = AST.Token.Span{ .span = rand_span() },
.region = rand_region(),
.token = rand_token_idx(),
},
});
try ty_annos.append(AST.TypeAnno{
try ty_annos.append(gpa, AST.TypeAnno{
.tag_union = .{
.open_anno = rand_idx(AST.TypeAnno.Idx),
.tags = AST.TypeAnno.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try ty_annos.append(AST.TypeAnno{
try ty_annos.append(gpa, AST.TypeAnno{
.tuple = .{
.annos = AST.TypeAnno.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try ty_annos.append(AST.TypeAnno{
try ty_annos.append(gpa, AST.TypeAnno{
.record = .{
.fields = AST.AnnoRecordField.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try ty_annos.append(AST.TypeAnno{
try ty_annos.append(gpa, AST.TypeAnno{
.@"fn" = .{
.args = AST.TypeAnno.Span{ .span = rand_span() },
.ret = rand_idx(AST.TypeAnno.Idx),
@ -432,7 +432,7 @@ test "NodeStore round trip - TypeAnno" {
.region = rand_region(),
},
});
try ty_annos.append(AST.TypeAnno{
try ty_annos.append(gpa, AST.TypeAnno{
.parens = .{
.anno = rand_idx(AST.TypeAnno.Idx),
.region = rand_region(),
@ -468,74 +468,74 @@ test "NodeStore round trip - Expr" {
var expected_test_count: usize = NodeStore.AST_EXPR_NODE_COUNT;
var expressions = std.array_list.Managed(AST.Expr).init(gpa);
defer expressions.deinit();
var expressions = std.ArrayList(AST.Expr).empty;
defer expressions.deinit(gpa);
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.int = .{
.region = rand_region(),
.token = rand_token_idx(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.frac = .{
.region = rand_region(),
.token = rand_token_idx(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.single_quote = .{
.region = rand_region(),
.token = rand_token_idx(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.string_part = .{
.region = rand_region(),
.token = rand_token_idx(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.string = .{
.parts = AST.Expr.Span{ .span = rand_span() },
.region = rand_region(),
.token = rand_token_idx(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.list = .{
.items = AST.Expr.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.tuple = .{
.items = AST.Expr.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.record = .{
.ext = rand_idx(AST.Expr.Idx),
.fields = AST.RecordField.Span{ .span = rand_span() },
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.tag = .{
.qualifiers = AST.Token.Span{ .span = rand_span() },
.region = rand_region(),
.token = rand_token_idx(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.lambda = .{
.args = AST.Pattern.Span{ .span = rand_span() },
.region = rand_region(),
.body = rand_idx(AST.Expr.Idx),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.apply = .{
.@"fn" = rand_idx(AST.Expr.Idx),
.args = AST.Expr.Span{ .span = rand_span() },
@ -543,7 +543,7 @@ test "NodeStore round trip - Expr" {
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.field_access = .{
.left = rand_idx(AST.Expr.Idx),
.right = rand_idx(AST.Expr.Idx),
@ -551,7 +551,7 @@ test "NodeStore round trip - Expr" {
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.local_dispatch = .{
.left = rand_idx(AST.Expr.Idx),
.right = rand_idx(AST.Expr.Idx),
@ -559,7 +559,7 @@ test "NodeStore round trip - Expr" {
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.bin_op = .{
.left = rand_idx(AST.Expr.Idx),
.right = rand_idx(AST.Expr.Idx),
@ -567,21 +567,21 @@ test "NodeStore round trip - Expr" {
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.suffix_single_question = .{
.expr = rand_idx(AST.Expr.Idx),
.operator = rand_token_idx(),
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.unary_op = .{
.expr = rand_idx(AST.Expr.Idx),
.operator = rand_token_idx(),
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.if_then_else = .{
.@"else" = rand_idx(AST.Expr.Idx),
.condition = rand_idx(AST.Expr.Idx),
@ -589,37 +589,37 @@ test "NodeStore round trip - Expr" {
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.match = .{
.branches = AST.MatchBranch.Span{ .span = rand_span() },
.expr = rand_idx(AST.Expr.Idx),
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.ident = .{
.qualifiers = AST.Token.Span{ .span = rand_span() },
.region = rand_region(),
.token = rand_token_idx(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.dbg = .{
.expr = rand_idx(AST.Expr.Idx),
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.record_builder = .{
.fields = rand_idx(AST.RecordField.Idx),
.mapper = rand_idx(AST.Expr.Idx),
.region = rand_region(),
},
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.ellipsis = .{ .region = rand_region() },
});
try expressions.append(AST.Expr{
try expressions.append(gpa, AST.Expr{
.block = .{
.region = rand_region(),
.statements = AST.Statement.Span{ .span = rand_span() },

View file

@ -27,12 +27,13 @@ const LoadedModule = struct {
gpa: std.mem.Allocator,
fn deinit(self: *LoadedModule) void {
// IMPORTANT: When a module is deserialized from a buffer, all its internal structures
// (common, types, external_decls, imports, store) contain pointers INTO the buffer,
// not separately allocated memory. Therefore we should NOT call deinit() on any of them.
// The only memory we need to free is:
// 1. The buffer itself (which contains all the deserialized data)
// 2. The env struct itself (which was allocated with create())
// IMPORTANT: When a module is deserialized from a buffer, most of its internal structures
// (common, types, external_decls, store) contain pointers INTO the buffer,
// not separately allocated memory. However, the imports.map is allocated separately
// during deserialization and must be freed.
// Free the imports map (allocated in CIR.Import.Store.Serialized.deserialize)
self.env.imports.map.deinit(self.gpa);
// Free the buffer (all data structures point into this buffer)
self.gpa.free(self.buffer);
@ -216,9 +217,9 @@ pub const Repl = struct {
defer tree.deinit();
try module_env.pushToSExprTree(expr_idx, &tree);
var can_buffer = std.array_list.Managed(u8).init(self.allocator);
defer can_buffer.deinit();
try tree.toStringPretty(can_buffer.writer().any(), .include_linecol);
var can_buffer = std.ArrayList(u8).empty;
defer can_buffer.deinit(self.allocator);
try tree.toStringPretty(can_buffer.writer(self.allocator).any(), .include_linecol);
const can_html = try self.allocator.dupe(u8, can_buffer.items);
try self.debug_can_html.append(can_html);
@ -230,9 +231,9 @@ pub const Repl = struct {
defer tree.deinit();
try module_env.pushTypesToSExprTree(expr_idx, &tree);
var types_buffer = std.array_list.Managed(u8).init(self.allocator);
defer types_buffer.deinit();
try tree.toStringPretty(types_buffer.writer().any(), .include_linecol);
var types_buffer = std.ArrayList(u8).empty;
defer types_buffer.deinit(self.allocator);
try tree.toStringPretty(types_buffer.writer(self.allocator).any(), .include_linecol);
const types_html = try self.allocator.dupe(u8, types_buffer.items);
try self.debug_types_html.append(types_html);
@ -429,29 +430,29 @@ pub const Repl = struct {
return try self.allocator.dupe(u8, current_expr);
}
var buffer = std.array_list.Managed(u8).init(self.allocator);
defer buffer.deinit();
var buffer = std.ArrayList(u8).empty;
errdefer buffer.deinit(self.allocator);
// Start block
try buffer.appendSlice("{\n");
try buffer.appendSlice(self.allocator, "{\n");
// Add all definitions in order
var iterator = self.definitions.iterator();
while (iterator.next()) |kv| {
try buffer.appendSlice(" ");
try buffer.appendSlice(kv.value_ptr.*);
try buffer.append('\n');
try buffer.appendSlice(self.allocator, " ");
try buffer.appendSlice(self.allocator, kv.value_ptr.*);
try buffer.append(self.allocator, '\n');
}
// Add current expression
try buffer.appendSlice(" ");
try buffer.appendSlice(current_expr);
try buffer.append('\n');
try buffer.appendSlice(self.allocator, " ");
try buffer.appendSlice(self.allocator, current_expr);
try buffer.append(self.allocator, '\n');
// End block
try buffer.append('}');
try buffer.append(self.allocator, '}');
return try buffer.toOwnedSlice();
return try buffer.toOwnedSlice(self.allocator);
}
/// Evaluate source code

View file

@ -698,13 +698,13 @@ test "Document string memory safety" {
defer document.deinit();
// Create temporary strings that would be freed in real usage
var temp_text = std.array_list.Managed(u8).init(gpa);
defer temp_text.deinit();
try temp_text.appendSlice("This is test text");
var temp_text = std.ArrayList(u8).empty;
defer temp_text.deinit(gpa);
try temp_text.appendSlice(gpa, "This is test text");
var temp_annotated = std.array_list.Managed(u8).init(gpa);
defer temp_annotated.deinit();
try temp_annotated.appendSlice("This is annotated");
var temp_annotated = std.ArrayList(u8).empty;
defer temp_annotated.deinit(gpa);
try temp_annotated.appendSlice(gpa, "This is annotated");
// Add text and annotated content (should be copied)
try document.addText(temp_text.items);

View file

@ -19,8 +19,8 @@ const ColorPalette = @import("style.zig").ColorPalette;
test "SYNTAX_PROBLEM report along with all four render types" {
const gpa = testing.allocator;
var buffer = std.array_list.Managed(u8).init(gpa);
defer buffer.deinit();
var buffer = std.ArrayList(u8).empty;
defer buffer.deinit(gpa);
// Create a Report
var r = Report.init(gpa, "SYNTAX PROBLEM", .runtime_error);

View file

@ -572,7 +572,7 @@ fn renderReportsToExpectedContent(allocator: std.mem.Allocator, reports: *const
}
// Render all reports to markdown and then parse the problems
var problems_buffer_unmanaged = std.ArrayListUnmanaged(u8).empty;
var problems_buffer_unmanaged = std.ArrayList(u8).empty;
var problems_writer_allocating: std.Io.Writer.Allocating = .fromArrayList(allocator, &problems_buffer_unmanaged);
defer problems_buffer_unmanaged.deinit(allocator);
@ -1478,11 +1478,11 @@ fn processSnapshotContent(
}
// Buffer all output in memory before writing files
var md_buffer_unmanaged = std.ArrayListUnmanaged(u8).empty;
var md_buffer_unmanaged = std.ArrayList(u8).empty;
var md_writer_allocating: std.Io.Writer.Allocating = .fromArrayList(allocator, &md_buffer_unmanaged);
defer md_buffer_unmanaged.deinit(allocator);
var html_buffer_unmanaged: ?std.ArrayListUnmanaged(u8) = if (config.generate_html) std.ArrayListUnmanaged(u8).empty else null;
var html_buffer_unmanaged: ?std.ArrayList(u8) = if (config.generate_html) std.ArrayList(u8).empty else null;
var html_writer_allocating: ?std.Io.Writer.Allocating = if (config.generate_html) .fromArrayList(allocator, &html_buffer_unmanaged.?) else null;
defer {
if (html_buffer_unmanaged) |*buf| buf.deinit(allocator);
@ -2490,7 +2490,7 @@ fn generateTypesSection(output: *DualOutput, can_ir: *ModuleEnv, maybe_expr_idx:
/// Generate TYPES section displaying types store for both markdown and HTML
/// This is used for debugging.
fn generateTypesStoreSection(gpa: std.mem.Allocator, output: *DualOutput, can_ir: *ModuleEnv) !void {
var solved_unmanaged = std.ArrayListUnmanaged(u8).empty;
var solved_unmanaged = std.ArrayList(u8).empty;
var solved_writer: std.Io.Writer.Allocating = .fromArrayList(output.gpa, &solved_unmanaged);
defer solved_unmanaged.deinit(output.gpa);
@ -2607,7 +2607,7 @@ fn generateHtmlClosing(output: *DualOutput) !void {
}
/// Write HTML buffer to file
fn writeHtmlFile(gpa: Allocator, snapshot_path: []const u8, html_buffer: *std.ArrayListUnmanaged(u8)) !void {
fn writeHtmlFile(gpa: Allocator, snapshot_path: []const u8, html_buffer: *std.ArrayList(u8)) !void {
// Convert .md path to .html path
const html_path = blk: {
if (std.mem.endsWith(u8, snapshot_path, ".md")) {
@ -2805,11 +2805,11 @@ fn processReplSnapshot(allocator: Allocator, content: Content, output_path: []co
log("Processing REPL snapshot: {s}", .{output_path});
// Buffer all output in memory before writing files
var md_buffer_unmanaged = std.ArrayListUnmanaged(u8).empty;
var md_buffer_unmanaged = std.ArrayList(u8).empty;
var md_writer_allocating: std.Io.Writer.Allocating = .fromArrayList(allocator, &md_buffer_unmanaged);
defer md_buffer_unmanaged.deinit(allocator);
var html_buffer_unmanaged: ?std.ArrayListUnmanaged(u8) = if (config.generate_html) std.ArrayListUnmanaged(u8).empty else null;
var html_buffer_unmanaged: ?std.ArrayList(u8) = if (config.generate_html) std.ArrayList(u8).empty else null;
var html_writer_allocating: ?std.Io.Writer.Allocating = if (config.generate_html) .fromArrayList(allocator, &html_buffer_unmanaged.?) else null;
defer {
if (html_buffer_unmanaged) |*buf| buf.deinit(allocator);

View file

@ -194,13 +194,13 @@ pub const Instantiator = struct {
}
fn instantiateAlias(self: *Self, alias: Alias) std.mem.Allocator.Error!Content {
var fresh_vars = std.array_list.Managed(Var).init(self.store.gpa);
defer fresh_vars.deinit();
var fresh_vars = std.ArrayList(Var).empty;
defer fresh_vars.deinit(self.store.gpa);
var iter = self.store.iterAliasArgs(alias);
while (iter.next()) |arg_var| {
const fresh_elem = try self.instantiateVar(arg_var);
try fresh_vars.append(fresh_elem);
try fresh_vars.append(self.store.gpa, fresh_elem);
}
const backing_var = self.store.getAliasBackingVar(alias);
@ -233,13 +233,13 @@ pub const Instantiator = struct {
const backing_var = self.store.getNominalBackingVar(nominal);
const fresh_backing_var = try self.instantiateVar(backing_var);
var fresh_vars = std.array_list.Managed(Var).init(self.store.gpa);
defer fresh_vars.deinit();
var fresh_vars = std.ArrayList(Var).empty;
defer fresh_vars.deinit(self.store.gpa);
var iter = self.store.iterNominalArgs(nominal);
while (iter.next()) |arg_var| {
const fresh_elem = try self.instantiateVar(arg_var);
try fresh_vars.append(fresh_elem);
try fresh_vars.append(self.store.gpa, fresh_elem);
}
return (try self.store.mkNominal(nominal.ident, fresh_backing_var, fresh_vars.items, nominal.origin_module)).structure.nominal_type;
@ -247,12 +247,12 @@ pub const Instantiator = struct {
fn instantiateTuple(self: *Self, tuple: Tuple) std.mem.Allocator.Error!Tuple {
const elems_slice = self.store.sliceVars(tuple.elems);
var fresh_elems = std.array_list.Managed(Var).init(self.store.gpa);
defer fresh_elems.deinit();
var fresh_elems = std.ArrayList(Var).empty;
defer fresh_elems.deinit(self.store.gpa);
for (elems_slice) |elem_var| {
const fresh_elem = try self.instantiateVar(elem_var);
try fresh_elems.append(fresh_elem);
try fresh_elems.append(self.store.gpa, fresh_elem);
}
const fresh_elems_range = try self.store.appendVars(fresh_elems.items);
@ -276,12 +276,12 @@ pub const Instantiator = struct {
fn instantiateFunc(self: *Self, func: Func) std.mem.Allocator.Error!Func {
const args_slice = self.store.sliceVars(func.args);
var fresh_args = std.array_list.Managed(Var).init(self.store.gpa);
defer fresh_args.deinit();
var fresh_args = std.ArrayList(Var).empty;
defer fresh_args.deinit(self.store.gpa);
for (args_slice) |arg_var| {
const fresh_arg = try self.instantiateVar(arg_var);
try fresh_args.append(fresh_arg);
try fresh_args.append(self.store.gpa, fresh_arg);
}
const fresh_ret = try self.instantiateVar(func.ret);
@ -296,12 +296,12 @@ pub const Instantiator = struct {
fn instantiateRecordFields(self: *Self, fields: RecordField.SafeMultiList.Range) std.mem.Allocator.Error!RecordField.SafeMultiList.Range {
const fields_slice = self.store.getRecordFieldsSlice(fields);
var fresh_fields = std.array_list.Managed(RecordField).init(self.store.gpa);
defer fresh_fields.deinit();
var fresh_fields = std.ArrayList(RecordField).empty;
defer fresh_fields.deinit(self.store.gpa);
for (fields_slice.items(.name), fields_slice.items(.var_)) |name, type_var| {
const fresh_type = try self.instantiateVar(type_var);
_ = try fresh_fields.append(RecordField{
_ = try fresh_fields.append(self.store.gpa, RecordField{
.name = name,
.var_ = fresh_type,
});
@ -313,12 +313,12 @@ pub const Instantiator = struct {
fn instantiateRecord(self: *Self, record: Record) std.mem.Allocator.Error!Record {
const fields_slice = self.store.getRecordFieldsSlice(record.fields);
var fresh_fields = std.array_list.Managed(RecordField).init(self.store.gpa);
defer fresh_fields.deinit();
var fresh_fields = std.ArrayList(RecordField).empty;
defer fresh_fields.deinit(self.store.gpa);
for (fields_slice.items(.name), fields_slice.items(.var_)) |name, type_var| {
const fresh_type = try self.instantiateVar(type_var);
_ = try fresh_fields.append(RecordField{
_ = try fresh_fields.append(self.store.gpa, RecordField{
.name = name,
.var_ = fresh_type,
});
@ -334,22 +334,22 @@ pub const Instantiator = struct {
fn instantiateTagUnion(self: *Self, tag_union: TagUnion) std.mem.Allocator.Error!TagUnion {
const tags_slice = self.store.getTagsSlice(tag_union.tags);
var fresh_tags = std.array_list.Managed(Tag).init(self.store.gpa);
defer fresh_tags.deinit();
var fresh_tags = std.ArrayList(Tag).empty;
defer fresh_tags.deinit(self.store.gpa);
for (tags_slice.items(.name), tags_slice.items(.args)) |tag_name, tag_args| {
var fresh_args = std.array_list.Managed(Var).init(self.store.gpa);
defer fresh_args.deinit();
var fresh_args = std.ArrayList(Var).empty;
defer fresh_args.deinit(self.store.gpa);
const args_slice = self.store.sliceVars(tag_args);
for (args_slice) |arg_var| {
const fresh_arg = try self.instantiateVar(arg_var);
try fresh_args.append(fresh_arg);
try fresh_args.append(self.store.gpa, fresh_arg);
}
const fresh_args_range = try self.store.appendVars(fresh_args.items);
_ = try fresh_tags.append(Tag{
_ = try fresh_tags.append(self.store.gpa, Tag{
.name = tag_name,
.args = fresh_args_range,
});
@ -371,12 +371,12 @@ pub const Instantiator = struct {
if (constraints_len == 0) {
return StaticDispatchConstraint.SafeList.Range.empty();
} else {
var fresh_constraints = try std.array_list.Managed(StaticDispatchConstraint).initCapacity(self.store.gpa, constraints.len());
defer fresh_constraints.deinit();
var fresh_constraints = try std.ArrayList(StaticDispatchConstraint).initCapacity(self.store.gpa, constraints.len());
defer fresh_constraints.deinit(self.store.gpa);
for (self.store.sliceStaticDispatchConstraints(constraints)) |constraint| {
const fresh_constraint = try self.instantiateStaticDispatchConstraint(constraint);
try fresh_constraints.append(fresh_constraint);
try fresh_constraints.append(self.store.gpa, fresh_constraint);
}
const fresh_constraints_range = try self.store.appendStaticDispatchConstraints(fresh_constraints.items);

View file

@ -1368,18 +1368,17 @@ test "thread safety" {
const global = struct {
var event_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0);
var mutex: std.Thread.Mutex = .{};
var events: std.array_list.Managed([]const u8) = undefined;
var events: std.ArrayList([]const u8) = .empty;
};
global.events = std.array_list.Managed([]const u8).init(allocator);
defer global.events.deinit();
defer global.events.deinit(allocator);
const callback = struct {
fn cb(event: WatchEvent) void {
_ = global.event_count.fetchAdd(1, .seq_cst);
global.mutex.lock();
defer global.mutex.unlock();
global.events.append(allocator.dupe(u8, event.path) catch return) catch return;
global.events.append(allocator, allocator.dupe(u8, event.path) catch return) catch return;
}
}.cb;
@ -1541,14 +1540,14 @@ test "windows long path handling" {
}
// Create the nested directories
var current_path = std.array_list.Managed(u8).init(allocator);
defer current_path.deinit();
var current_path = std.ArrayList(u8).empty;
defer current_path.deinit(allocator);
for (path_components.items) |component| {
if (current_path.items.len > 0) {
try current_path.append(std.fs.path.sep);
try current_path.append(allocator, std.fs.path.sep);
}
try current_path.appendSlice(component);
try current_path.appendSlice(allocator, component);
try temp_dir.dir.makePath(current_path.items);
}

View file

@ -0,0 +1,7 @@
app [main] {}
# This file intentionally has a parse error for testing stderr reporting
main =
# Missing closing quote to trigger tokenization error
x = "unclosed string
x

View file

@ -841,15 +841,15 @@ fn runTests(arena: std.mem.Allocator, gpa: std.mem.Allocator, test_cases: []cons
.skipped = 0,
};
var failures = std.array_list.Managed(TestFailure).init(arena);
defer failures.deinit();
var failures = std.ArrayList(TestFailure).empty;
defer failures.deinit(arena);
for (test_cases) |case| {
logDebug("\n[INFO] Setting up WASM interface for test case: {s}...\n", .{case.name});
var wasm_interface = setupWasm(gpa, arena, wasm_path) catch |err| {
logDebug("[ERROR] Failed to setup WASM for test case '{s}': {}\n", .{ case.name, err });
stats.failed += 1;
try failures.append(.{
try failures.append(arena, .{
.case_name = case.name,
.step_index = 0,
.message = "WASM setup failed",
@ -865,7 +865,7 @@ fn runTests(arena: std.mem.Allocator, gpa: std.mem.Allocator, test_cases: []cons
.failed => {
stats.failed += 1;
const failure_msg = case_execution_result.failure_message orelse "Test failed";
try failures.append(.{
try failures.append(arena, .{
.case_name = case.name,
.step_index = 0, // Could be enhanced to track specific step
.message = failure_msg,
@ -964,8 +964,8 @@ pub fn main() !void {
const playground_wasm_path = wasm_path orelse "zig-out/bin/playground.wasm";
// Setup our test cases
var test_cases = std.array_list.Managed(TestCase).init(allocator);
defer test_cases.deinit(); // This will free the TestCase structs and their `steps` slices.
var test_cases = std.ArrayList(TestCase).empty;
defer test_cases.deinit(allocator); // This will free the TestCase structs and their `steps` slices.
// Functional Test
var happy_path_steps = try allocator.alloc(MessageStep, 8);
@ -1009,21 +1009,21 @@ pub fn main() !void {
.expected_status = "SUCCESS",
.expected_hover_info_contains = "Str",
};
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "Happy Path - Simple Roc Program",
.steps = happy_path_steps,
});
// Error Handling Test
const syntax_error_code_val = try TestData.syntaxErrorRocCode(allocator);
try test_cases.append(try createSimpleTest(allocator, "Syntax Error - Mismatched Braces", syntax_error_code_val, .{ .min_errors = 1, .error_messages = &.{"LIST NOT CLOSED"} }, true));
try test_cases.append(allocator, try createSimpleTest(allocator, "Syntax Error - Mismatched Braces", syntax_error_code_val, .{ .min_errors = 1, .error_messages = &.{"LIST NOT CLOSED"} }, true));
const type_error_code_val = try TestData.typeErrorRocCode(allocator);
try test_cases.append(try createSimpleTest(allocator, "Type Error - Adding String and Number", type_error_code_val, .{ .min_errors = 1, .error_messages = &.{"TYPE MISMATCH"} }, true));
try test_cases.append(allocator, try createSimpleTest(allocator, "Type Error - Adding String and Number", type_error_code_val, .{ .min_errors = 1, .error_messages = &.{"TYPE MISMATCH"} }, true));
// Empty Source Test
const empty_source_code = try allocator.dupe(u8, "");
try test_cases.append(try createSimpleTest(allocator, "Empty Source Code", empty_source_code, null, false)); // Disable diagnostic expectations
try test_cases.append(allocator, try createSimpleTest(allocator, "Empty Source Code", empty_source_code, null, false)); // Disable diagnostic expectations
// Code Formatting Test
var formatted_test_steps = try allocator.alloc(MessageStep, 3);
@ -1040,7 +1040,7 @@ pub fn main() !void {
.expected_status = "SUCCESS",
.expected_data_contains = "foo",
};
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "QUERY_FORMATTED - Code Formatting",
.steps = formatted_test_steps,
});
@ -1053,7 +1053,7 @@ pub fn main() !void {
.expected_status = "INVALID_MESSAGE",
.expected_message_contains = "Unknown message type",
};
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "Invalid Message Type",
.steps = invalid_msg_type_steps,
});
@ -1077,7 +1077,7 @@ pub fn main() !void {
.owned_source = happy_code_after_reset,
};
reset_test_steps[4] = .{ .message = .{ .type = "QUERY_TYPES" }, .expected_status = "SUCCESS", .expected_data_contains = "inferred-types" };
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "RESET Message Functionality",
.steps = reset_test_steps,
});
@ -1099,7 +1099,7 @@ pub fn main() !void {
.expected_diagnostics = .{ .min_errors = 1, .error_messages = &.{"LIST NOT CLOSED"} },
.owned_source = code_for_mem_test_2,
};
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "Memory Corruption on Reset - Load, Reset, Load",
.steps = memory_corruption_steps,
});
@ -1127,7 +1127,7 @@ pub fn main() !void {
.expected_status = "SUCCESS",
.expected_hover_info_contains = "Num(Int(Signed32))",
};
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "GET_HOVER_INFO - Specific Type Query",
.steps = get_hover_info_steps,
});
@ -1142,7 +1142,7 @@ pub fn main() !void {
repl_lifecycle_steps[3] = .{ .message = .{ .type = "CLEAR_REPL" }, .expected_status = "SUCCESS", .expected_message_contains = "REPL cleared" };
repl_lifecycle_steps[4] = .{ .message = .{ .type = "RESET" }, .expected_status = "SUCCESS" };
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "REPL Lifecycle - Init, Step, Clear, Reset",
.steps = repl_lifecycle_steps,
});
@ -1156,7 +1156,7 @@ pub fn main() !void {
repl_core_steps[4] = .{ .message = .{ .type = "REPL_STEP", .input = "y" }, .expected_status = "SUCCESS", .expected_result_output_contains = "15" };
repl_core_steps[5] = .{ .message = .{ .type = "RESET" }, .expected_status = "SUCCESS" };
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "REPL Core - Definitions and Expressions",
.steps = repl_core_steps,
});
@ -1176,7 +1176,7 @@ pub fn main() !void {
};
repl_redefinition_steps[7] = .{ .message = .{ .type = "RESET" }, .expected_status = "SUCCESS" };
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "REPL Variable Redefinition - Dependency Updates",
.steps = repl_redefinition_steps,
});
@ -1200,7 +1200,7 @@ pub fn main() !void {
};
repl_error_steps[5] = .{ .message = .{ .type = "RESET" }, .expected_status = "SUCCESS" };
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "REPL Error Handling - Invalid Syntax Recovery",
.steps = repl_error_steps,
});
@ -1217,7 +1217,7 @@ pub fn main() !void {
.expected_data_contains = "can-ir",
};
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "REPL Compiler Integration - Query After Evaluation",
.steps = repl_compiler_steps,
});
@ -1238,7 +1238,7 @@ pub fn main() !void {
};
repl_isolation_steps[5] = .{ .message = .{ .type = "QUERY_TYPES" }, .expected_status = "SUCCESS" };
try test_cases.append(.{
try test_cases.append(allocator, .{
.name = "REPL State Isolation - Mode Switching",
.steps = repl_isolation_steps,
});

View file

@ -217,9 +217,9 @@ main! = |_| {
(p-assign (ident "main!"))
(e-closure
(captures
(capture (ident "addOne"))
(capture (ident "identity"))
(capture (ident "combine"))
(capture (ident "identity")))
(capture (ident "addOne")))
(e-lambda
(args
(p-underscore))

View file

@ -880,8 +880,8 @@ combineResults = |jsonResult, httpStatus|
(p-assign (ident "combineResults"))
(e-closure
(captures
(capture (ident "error"))
(capture (ident "value")))
(capture (ident "value"))
(capture (ident "error")))
(e-lambda
(args
(p-assign (ident "jsonResult"))

View file

@ -559,10 +559,10 @@ combineResults = |result1, result2|
(p-assign (ident "combineResults"))
(e-closure
(captures
(capture (ident "value1"))
(capture (ident "value2"))
(capture (ident "err"))
(capture (ident "err"))
(capture (ident "value1")))
(capture (ident "err")))
(e-lambda
(args
(p-assign (ident "result1"))

View file

@ -13,9 +13,31 @@ type=expr
}
~~~
# EXPECTED
NIL
UNUSED VALUE - ann_effectful_fn.md:2:35:2:39
UNUSED VALUE - ann_effectful_fn.md:2:40:2:53
# PROBLEMS
NIL
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**ann_effectful_fn.md:2:35:2:39:**
```roc
launchTheNukes : {} => Result Bool LaunchNukeErr
```
^^^^
It has the type:
_[Bool]_others_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**ann_effectful_fn.md:2:40:2:53:**
```roc
launchTheNukes : {} => Result Bool LaunchNukeErr
```
^^^^^^^^^^^^^
It has the type:
_[LaunchNukeErr]_others_
# TOKENS
~~~zig
OpenCurly,

View file

@ -683,10 +683,10 @@ h = |x, y| {
(p-assign (ident "h"))
(e-closure
(captures
(capture (ident "a"))
(capture (ident "h"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a")))
(e-lambda
(args

View file

@ -1763,6 +1763,7 @@ h = |x, y| {
(p-assign (ident "h"))
(e-closure
(captures
(capture (ident "h"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "h"))

View file

@ -371,10 +371,10 @@ NO CHANGE
(p-assign (ident "h"))
(e-closure
(captures
(capture (ident "a"))
(capture (ident "h"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a")))
(e-lambda
(args

View file

@ -484,10 +484,10 @@ h = |
(p-assign (ident "h"))
(e-closure
(captures
(capture (ident "a"))
(capture (ident "h"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a")))
(e-lambda
(args

View file

@ -163,7 +163,9 @@ UNDEFINED VARIABLE - fuzz_crash_019.md:80:3:80:6
CRASH EXPECTS STRING - fuzz_crash_019.md:86:3:86:11
UNDEFINED VARIABLE - fuzz_crash_019.md:87:11:87:12
UNDEFINED VARIABLE - fuzz_crash_019.md:89:3:89:6
NOT IMPLEMENTED - :0:0:0:0
UNDEFINED VARIABLE - fuzz_crash_019.md:92:11:92:15
UNDEFINED VARIABLE - fuzz_crash_019.md:93:2:93:7
UNDEFINED VARIABLE - fuzz_crash_019.md:94:3:94:6
UNDEFINED VARIABLE - fuzz_crash_019.md:96:34:96:37
UNDEFINED VARIABLE - fuzz_crash_019.md:96:47:96:52
UNDEFINED VARIABLE - fuzz_crash_019.md:96:54:96:57
@ -187,8 +189,15 @@ UNDEFINED VARIABLE - fuzz_crash_019.md:120:1:120:2
UNDEFINED VARIABLE - fuzz_crash_019.md:120:6:120:9
EXPOSED BUT NOT DEFINED - fuzz_crash_019.md:2:6:2:11
TOO FEW ARGS - fuzz_crash_019.md:17:3:18:4
UNUSED VALUE - fuzz_crash_019.md:39:2:39:3
INCOMPATIBLE MATCH PATTERNS - fuzz_crash_019.md:52:2:52:2
UNUSED VALUE - fuzz_crash_019.md:1:1:1:1
TYPE MISMATCH - fuzz_crash_019.md:84:2:86:3
UNUSED VALUE - fuzz_crash_019.md:84:2:86:3
UNUSED VALUE - fuzz_crash_019.md:86:11:86:17
UNUSED VALUE - fuzz_crash_019.md:98:4:104:3
UNUSED VALUE - fuzz_crash_019.md:105:2:105:54
UNUSED VALUE - fuzz_crash_019.md:105:55:105:85
# PROBLEMS
**PARSE ERROR**
A parsing error occurred: `match_branch_missing_arrow`
@ -586,10 +595,38 @@ Is there an `import` or `exposing` missing up-top?
^^^
**NOT IMPLEMENTED**
This feature is not yet implemented: statement type in block
**UNDEFINED VARIABLE**
Nothing is named `list` in this scope.
Is there an `import` or `exposing` missing up-top?
**fuzz_crash_019.md:92:11:92:15:**
```roc
for n in list {
```
^^^^
**UNDEFINED VARIABLE**
Nothing is named `line!` in this scope.
Is there an `import` or `exposing` missing up-top?
**fuzz_crash_019.md:93:2:93:7:**
```roc
line!("Ag ${n} to ${er}")
```
^^^^^
**UNDEFINED VARIABLE**
Nothing is named `ber` in this scope.
Is there an `import` or `exposing` missing up-top?
**fuzz_crash_019.md:94:3:94:6:**
```roc
ber + n
```
^^^
This error doesn't have a proper diagnostic report yet. Let us know if you want to help improve Roc's error messages!
**UNDEFINED VARIABLE**
Nothing is named `tag` in this scope.
@ -846,6 +883,17 @@ The type _List_ expects argument, but got instead.
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_019.md:39:2:39:3:**
```roc
1
```
^
It has the type:
_Num(_size)_
**INCOMPATIBLE MATCH PATTERNS**
The pattern in the fourth branch of this `match` differs from previous ones:
**fuzz_crash_019.md:52:2:**
@ -881,6 +929,17 @@ All patterns in an `match` must have compatible types.
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_019.md:1:1:1:1:**
```roc
# Thnt!
```
^
It has the type:
__f_
**TYPE MISMATCH**
This expression is used in an unexpected way:
**fuzz_crash_019.md:84:2:86:3:**
@ -896,6 +955,67 @@ It has the type:
But I expected it to be:
_[Blue]_others, [Tb]_others2 -> Error_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_019.md:84:2:86:3:**
```roc
me(
..., # r
)crash ke"Unr!" #)
```
It has the type:
__f_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_019.md:86:11:86:17:**
```roc
)crash ke"Unr!" #)
```
^^^^^^
It has the type:
_Str_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_019.md:98:4:104:3:**
```roc
m (
123,
"World",ag1,
O, # nt
(ne, tuple),
[1, 2, 3],
)
```
It has the type:
_(Num(_size), Str, Error, [O]_others, (Error, Error), List(Num(_size2)))_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_019.md:105:2:105:54:**
```roc
b?? 12 > 5 or 13 + 2 < 5 and 10 - 1 >= 16 or 12 <= 3 e_fn(arg1)?.od()?.ned()?.recd?
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
It has the type:
_Bool_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_019.md:105:55:105:85:**
```roc
b?? 12 > 5 or 13 + 2 < 5 and 10 - 1 >= 16 or 12 <= 3 e_fn(arg1)?.od()?.ned()?.recd?
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
It has the type:
__f_
# TOKENS
~~~zig
KwApp,OpenSquare,LowerIdent,CloseSquare,OpenCurly,LowerIdent,OpColon,KwPlatform,StringStart,StringPart,StringEnd,CloseCurly,
@ -1686,7 +1806,25 @@ expect {
(p-assign (ident "er"))))
(e-num (value "456"))
(e-num (value "9")))))
(s-runtime-error (tag "not_implemented"))
(s-for
(p-assign (ident "n"))
(e-runtime-error (tag "ident_not_in_scope"))
(e-block
(s-expr
(e-call
(e-runtime-error (tag "ident_not_in_scope"))
(e-string
(e-literal (string "Ag "))
(e-lookup-local
(p-assign (ident "n")))
(e-literal (string " to "))
(e-lookup-local
(p-assign (ident "er")))
(e-literal (string "")))))
(e-binop (op "add")
(e-runtime-error (tag "ident_not_in_scope"))
(e-lookup-local
(p-assign (ident "n"))))))
(s-let
(p-assign (ident "rd"))
(e-record

View file

@ -164,7 +164,9 @@ UNDEFINED VARIABLE - fuzz_crash_020.md:80:3:80:6
CRASH EXPECTS STRING - fuzz_crash_020.md:86:3:86:11
UNDEFINED VARIABLE - fuzz_crash_020.md:87:11:87:12
UNDEFINED VARIABLE - fuzz_crash_020.md:89:3:89:6
NOT IMPLEMENTED - :0:0:0:0
UNDEFINED VARIABLE - fuzz_crash_020.md:92:11:92:15
UNDEFINED VARIABLE - fuzz_crash_020.md:93:2:93:7
UNDEFINED VARIABLE - fuzz_crash_020.md:94:3:94:6
UNDEFINED VARIABLE - fuzz_crash_020.md:96:34:96:37
UNDEFINED VARIABLE - fuzz_crash_020.md:96:47:96:52
UNDEFINED VARIABLE - fuzz_crash_020.md:96:54:96:57
@ -188,7 +190,13 @@ UNDEFINED VARIABLE - fuzz_crash_020.md:120:1:120:2
UNDEFINED VARIABLE - fuzz_crash_020.md:120:6:120:9
EXPOSED BUT NOT DEFINED - fuzz_crash_020.md:2:6:2:11
TOO FEW ARGS - fuzz_crash_020.md:17:3:18:4
UNUSED VALUE - fuzz_crash_020.md:39:2:39:3
INCOMPATIBLE MATCH PATTERNS - fuzz_crash_020.md:52:2:52:2
UNUSED VALUE - fuzz_crash_020.md:1:1:1:1
UNUSED VALUE - fuzz_crash_020.md:86:11:86:17
UNUSED VALUE - fuzz_crash_020.md:98:4:104:3
UNUSED VALUE - fuzz_crash_020.md:105:2:105:54
UNUSED VALUE - fuzz_crash_020.md:105:55:105:85
# PROBLEMS
**PARSE ERROR**
A parsing error occurred: `match_branch_missing_arrow`
@ -597,10 +605,38 @@ Is there an `import` or `exposing` missing up-top?
^^^
**NOT IMPLEMENTED**
This feature is not yet implemented: statement type in block
**UNDEFINED VARIABLE**
Nothing is named `list` in this scope.
Is there an `import` or `exposing` missing up-top?
**fuzz_crash_020.md:92:11:92:15:**
```roc
for n in list {
```
^^^^
**UNDEFINED VARIABLE**
Nothing is named `line!` in this scope.
Is there an `import` or `exposing` missing up-top?
**fuzz_crash_020.md:93:2:93:7:**
```roc
line!("Ag ${n} to ${er}")
```
^^^^^
**UNDEFINED VARIABLE**
Nothing is named `ber` in this scope.
Is there an `import` or `exposing` missing up-top?
**fuzz_crash_020.md:94:3:94:6:**
```roc
ber + n
```
^^^
This error doesn't have a proper diagnostic report yet. Let us know if you want to help improve Roc's error messages!
**UNDEFINED VARIABLE**
Nothing is named `tag` in this scope.
@ -857,6 +893,17 @@ The type _List_ expects argument, but got instead.
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_020.md:39:2:39:3:**
```roc
1
```
^
It has the type:
_Num(_size)_
**INCOMPATIBLE MATCH PATTERNS**
The pattern in the fourth branch of this `match` differs from previous ones:
**fuzz_crash_020.md:52:2:**
@ -892,6 +939,66 @@ All patterns in an `match` must have compatible types.
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_020.md:1:1:1:1:**
```roc
# Thnt!
```
^
It has the type:
__f_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_020.md:86:11:86:17:**
```roc
)crash ke"Unr!" #)
```
^^^^^^
It has the type:
_Str_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_020.md:98:4:104:3:**
```roc
m (
123,
"World",ag1,
O, # nt
(ne, tuple),
[1, 2, 3],
)
```
It has the type:
_(Num(_size), Str, Error, [O]_others, (Error, Error), List(Num(_size2)))_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_020.md:105:2:105:54:**
```roc
b?? 12 > 5 or 13 + 2 < 5 and 10 - 1 >= 16 or 12 <= 3 e_fn(arg1)?.od()?.ned()?.recd?
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
It has the type:
_Bool_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_020.md:105:55:105:85:**
```roc
b?? 12 > 5 or 13 + 2 < 5 and 10 - 1 >= 16 or 12 <= 3 e_fn(arg1)?.od()?.ned()?.recd?
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
It has the type:
__f_
# TOKENS
~~~zig
KwApp,OpenSquare,LowerIdent,CloseSquare,OpenCurly,LowerIdent,OpColon,KwPlatform,StringStart,StringPart,StringEnd,CloseCurly,
@ -1678,7 +1785,25 @@ expect {
(p-assign (ident "er"))))
(e-num (value "456"))
(e-num (value "9")))))
(s-runtime-error (tag "not_implemented"))
(s-for
(p-assign (ident "n"))
(e-runtime-error (tag "ident_not_in_scope"))
(e-block
(s-expr
(e-call
(e-runtime-error (tag "ident_not_in_scope"))
(e-string
(e-literal (string "Ag "))
(e-lookup-local
(p-assign (ident "n")))
(e-literal (string " to "))
(e-lookup-local
(p-assign (ident "er")))
(e-literal (string "")))))
(e-binop (op "add")
(e-runtime-error (tag "ident_not_in_scope"))
(e-lookup-local
(p-assign (ident "n"))))))
(s-let
(p-assign (ident "rd"))
(e-record

View file

@ -253,7 +253,7 @@ UNDEFINED VARIABLE - fuzz_crash_023.md:141:2:141:6
UNDECLARED TYPE - fuzz_crash_023.md:143:14:143:20
UNDEFINED VARIABLE - fuzz_crash_023.md:147:9:147:13
UNDEFINED VARIABLE - fuzz_crash_023.md:158:2:158:11
NOT IMPLEMENTED - :0:0:0:0
UNDEFINED VARIABLE - fuzz_crash_023.md:175:3:175:15
UNRECOGNIZED SYNTAX - fuzz_crash_023.md:178:38:178:40
UNRECOGNIZED SYNTAX - fuzz_crash_023.md:178:40:178:41
UNRECOGNIZED SYNTAX - fuzz_crash_023.md:178:45:178:46
@ -268,7 +268,6 @@ UNDEFINED VARIABLE - fuzz_crash_023.md:191:2:191:14
UNDEFINED VARIABLE - fuzz_crash_023.md:193:4:193:13
UNUSED VARIABLE - fuzz_crash_023.md:164:2:164:18
UNUSED VARIABLE - fuzz_crash_023.md:165:2:165:14
UNUSED VARIABLE - fuzz_crash_023.md:166:2:166:6
UNUSED VARIABLE - fuzz_crash_023.md:178:2:178:8
UNUSED VARIABLE - fuzz_crash_023.md:180:2:180:17
UNUSED VARIABLE - fuzz_crash_023.md:188:2:188:15
@ -276,7 +275,10 @@ UNUSED VARIABLE - fuzz_crash_023.md:189:2:189:23
UNDECLARED TYPE - fuzz_crash_023.md:201:9:201:14
INVALID IF CONDITION - fuzz_crash_023.md:70:5:70:5
INCOMPATIBLE MATCH PATTERNS - fuzz_crash_023.md:84:2:84:2
UNUSED VALUE - fuzz_crash_023.md:1:1:1:1
TYPE MISMATCH - fuzz_crash_023.md:155:2:157:3
UNUSED VALUE - fuzz_crash_023.md:155:2:157:3
UNUSED VALUE - fuzz_crash_023.md:178:42:178:45
# PROBLEMS
**PARSE ERROR**
A parsing error occurred: `expected_expr_record_field_name`
@ -679,10 +681,16 @@ Is there an `import` or `exposing` missing up-top?
^^^^^^^^^
**NOT IMPLEMENTED**
This feature is not yet implemented: statement type in block
**UNDEFINED VARIABLE**
Nothing is named `line!` in this scope.
Is there an `import` or `exposing` missing up-top?
**fuzz_crash_023.md:175:3:175:15:**
```roc
Stdout.line!("Adding ${n} to ${number}")
```
^^^^^^^^^^^^
This error doesn't have a proper diagnostic report yet. Let us know if you want to help improve Roc's error messages!
**UNRECOGNIZED SYNTAX**
I don't recognize this syntax.
@ -827,18 +835,6 @@ The unused variable is declared here:
^^^^^^^^^^^^
**UNUSED VARIABLE**
Variable `list` is not used anywhere in your code.
If you don't need this variable, prefix it with an underscore like `_list` to suppress this warning.
The unused variable is declared here:
**fuzz_crash_023.md:166:2:166:6:**
```roc
list = [
```
^^^^
**UNUSED VARIABLE**
Variable `record` is not used anywhere in your code.
@ -983,6 +979,17 @@ All patterns in an `match` must have compatible types.
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_023.md:1:1:1:1:**
```roc
# This is a module comment!
```
^
It has the type:
__d_
**TYPE MISMATCH**
This expression is used in an unexpected way:
**fuzz_crash_023.md:155:2:157:3:**
@ -998,6 +1005,29 @@ It has the type:
But I expected it to be:
_[Red][Blue, Green]_others, _arg -> Error_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_023.md:155:2:157:3:**
```roc
match_time(
..., # Single args with comment
)
```
It has the type:
__d_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_023.md:178:42:178:45:**
```roc
record = { foo: 123, bar: "Hello", ;az: tag, qux: Ok(world), punned }
```
^^^
It has the type:
_[Blue]_others_
# TOKENS
~~~zig
KwApp,OpenSquare,LowerIdent,CloseSquare,OpenCurly,LowerIdent,OpColon,KwPlatform,StringStart,StringPart,StringEnd,CloseCurly,
@ -1973,8 +2003,8 @@ expect {
(e-closure
(captures
(capture (ident "x"))
(capture (ident "dude"))
(capture (ident "x")))
(capture (ident "x"))
(capture (ident "dude")))
(e-lambda
(args
(p-assign (ident "a"))
@ -2247,7 +2277,30 @@ expect {
(p-assign (ident "number")))))
(e-num (value "456"))
(e-num (value "789")))))
(s-runtime-error (tag "not_implemented"))
(s-for
(p-assign (ident "n"))
(e-lookup-local
(p-assign (ident "list")))
(e-block
(s-expr
(e-call
(e-runtime-error (tag "ident_not_in_scope"))
(e-string
(e-literal (string "Adding "))
(e-lookup-local
(p-assign (ident "n")))
(e-literal (string " to "))
(e-lookup-local
(p-assign (ident "number")))
(e-literal (string "")))))
(s-reassign
(p-assign (ident "number"))
(e-binop (op "add")
(e-lookup-local
(p-assign (ident "number")))
(e-lookup-local
(p-assign (ident "n")))))
(e-empty_record)))
(s-let
(p-assign (ident "record"))
(e-runtime-error (tag "expr_not_canonicalized")))

View file

@ -208,7 +208,7 @@ UNDEFINED VARIABLE - fuzz_crash_027.md:97:2:97:6
UNDECLARED TYPE - fuzz_crash_027.md:99:14:99:20
UNDEFINED VARIABLE - fuzz_crash_027.md:103:9:103:13
UNDEFINED VARIABLE - fuzz_crash_027.md:114:2:114:11
NOT IMPLEMENTED - :0:0:0:0
UNDEFINED VARIABLE - fuzz_crash_027.md:128:2:128:7
UNDEFINED VARIABLE - fuzz_crash_027.md:131:63:131:69
UNDEFINED VARIABLE - fuzz_crash_027.md:132:42:132:48
UNDEFINED VARIABLE - fuzz_crash_027.md:136:3:136:7
@ -218,7 +218,6 @@ NOT IMPLEMENTED - :0:0:0:0
UNDEFINED VARIABLE - fuzz_crash_027.md:145:4:145:13
UNUSED VARIABLE - fuzz_crash_027.md:119:2:119:10
UNUSED VARIABLE - fuzz_crash_027.md:120:2:120:6
UNUSED VARIABLE - fuzz_crash_027.md:121:2:121:6
UNUSED VARIABLE - fuzz_crash_027.md:131:2:131:8
UNUSED VARIABLE - fuzz_crash_027.md:133:2:133:9
UNUSED VARIABLE - fuzz_crash_027.md:141:2:141:7
@ -227,7 +226,9 @@ UNDECLARED TYPE - fuzz_crash_027.md:153:9:153:14
TOO FEW ARGS - fuzz_crash_027.md:21:3:22:4
INVALID IF CONDITION - fuzz_crash_027.md:50:5:50:5
INCOMPATIBLE MATCH PATTERNS - fuzz_crash_027.md:64:2:64:2
UNUSED VALUE - fuzz_crash_027.md:1:1:1:1
TYPE MISMATCH - fuzz_crash_027.md:111:2:113:3
UNUSED VALUE - fuzz_crash_027.md:111:2:113:3
TYPE MISMATCH - fuzz_crash_027.md:143:2:147:3
# PROBLEMS
**LEADING ZERO**
@ -674,10 +675,16 @@ Is there an `import` or `exposing` missing up-top?
^^^^^^^^^
**NOT IMPLEMENTED**
This feature is not yet implemented: statement type in block
**UNDEFINED VARIABLE**
Nothing is named `line!` in this scope.
Is there an `import` or `exposing` missing up-top?
**fuzz_crash_027.md:128:2:128:7:**
```roc
line!("Adding ${n} to ${number}")
```
^^^^^
This error doesn't have a proper diagnostic report yet. Let us know if you want to help improve Roc's error messages!
**UNDEFINED VARIABLE**
Nothing is named `punned` in this scope.
@ -774,18 +781,6 @@ The unused variable is declared here:
^^^^
**UNUSED VARIABLE**
Variable `list` is not used anywhere in your code.
If you don't need this variable, prefix it with an underscore like `_list` to suppress this warning.
The unused variable is declared here:
**fuzz_crash_027.md:121:2:121:6:**
```roc
list = [
```
^^^^
**UNUSED VARIABLE**
Variable `record` is not used anywhere in your code.
@ -916,6 +911,17 @@ All patterns in an `match` must have compatible types.
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_027.md:1:1:1:1:**
```roc
# Thnt!
```
^
It has the type:
__d_
**TYPE MISMATCH**
This expression is used in an unexpected way:
**fuzz_crash_027.md:111:2:113:3:**
@ -931,6 +937,18 @@ It has the type:
But I expected it to be:
_[Red, Blue]_others, _arg -> Error_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**fuzz_crash_027.md:111:2:113:3:**
```roc
match_time(
..., #
)
```
It has the type:
__d_
**TYPE MISMATCH**
This expression is used in an unexpected way:
**fuzz_crash_027.md:143:2:147:3:**
@ -1973,7 +1991,30 @@ expect {
(e-list
(elems
(e-num (value "456")))))
(s-runtime-error (tag "not_implemented"))
(s-for
(p-assign (ident "n"))
(e-lookup-local
(p-assign (ident "list")))
(e-block
(s-expr
(e-call
(e-runtime-error (tag "ident_not_in_scope"))
(e-string
(e-literal (string "Adding "))
(e-lookup-local
(p-assign (ident "n")))
(e-literal (string " to "))
(e-lookup-local
(p-assign (ident "number")))
(e-literal (string "")))))
(s-reassign
(p-assign (ident "number"))
(e-binop (op "add")
(e-lookup-local
(p-assign (ident "number")))
(e-lookup-local
(p-assign (ident "n")))))
(e-empty_record)))
(s-let
(p-assign (ident "record"))
(e-record

View file

@ -26,7 +26,6 @@ UNEXPECTED TOKEN IN PATTERN - fuzz_crash_032.md:7:21:7:22
PARSE ERROR - fuzz_crash_032.md:7:22:7:22
UNDECLARED TYPE VARIABLE - fuzz_crash_032.md:1:14:1:17
UNDECLARED TYPE - fuzz_crash_032.md:1:21:1:24
NOT IMPLEMENTED - :0:0:0:0
UNDECLARED TYPE - fuzz_crash_032.md:4:25:4:30
EXPECTED NOMINAL TYPE - fuzz_crash_032.md:6:26:6:37
INVALID PATTERN - :0:0:0:0
@ -158,11 +157,6 @@ LocalStatus :lue => Loc= [Pending, Complete]
^^^
**NOT IMPLEMENTED**
This feature is not yet implemented: statement type in block
This error doesn't have a proper diagnostic report yet. Let us know if you want to help improve Roc's error messages!
**UNDECLARED TYPE**
The type _Color_ is not declared in this scope.
@ -296,7 +290,6 @@ olor = |color| {
(args
(p-assign (ident "color")))
(e-block
(s-runtime-error (tag "not_implemented"))
(s-expr
(e-runtime-error (tag "undeclared_type")))
(e-match

View file

@ -56,9 +56,9 @@ NO CHANGE
(p-assign (ident "c")))
(e-closure
(captures
(capture (ident "c"))
(capture (ident "a"))
(capture (ident "b"))
(capture (ident "a")))
(capture (ident "c")))
(e-lambda
(args
(p-assign (ident "x")))

View file

@ -76,25 +76,25 @@ NO CHANGE
(p-assign (ident "b")))
(e-closure
(captures
(capture (ident "b"))
(capture (ident "a")))
(capture (ident "a"))
(capture (ident "b")))
(e-lambda
(args
(p-assign (ident "c")))
(e-closure
(captures
(capture (ident "c"))
(capture (ident "a"))
(capture (ident "b"))
(capture (ident "a")))
(capture (ident "c")))
(e-lambda
(args
(p-assign (ident "d")))
(e-closure
(captures
(capture (ident "c"))
(capture (ident "a"))
(capture (ident "b"))
(capture (ident "d"))
(capture (ident "a")))
(capture (ident "c"))
(capture (ident "d")))
(e-lambda
(args
(p-assign (ident "e")))

View file

@ -254,10 +254,10 @@ main! = |_| {
(p-assign (ident "main!"))
(e-closure
(captures
(capture (ident "multiply"))
(capture (ident "add"))
(capture (ident "double"))
(capture (ident "process")))
(capture (ident "multiply"))
(capture (ident "process"))
(capture (ident "double")))
(e-lambda
(args
(p-underscore))

View file

@ -46,6 +46,7 @@ main = |_| {
UNEXPECTED TOKEN IN EXPRESSION - let_polymorphism_records.md:19:50:19:51
UNRECOGNIZED SYNTAX - let_polymorphism_records.md:19:50:19:51
UNUSED VARIABLE - let_polymorphism_records.md:19:27:19:36
UNUSED VALUE - let_polymorphism_records.md:19:40:19:49
# PROBLEMS
**UNEXPECTED TOKEN IN EXPRESSION**
The token **&** is not expected in an expression.
@ -81,6 +82,17 @@ update_data = |container, new_value| { container & data: new_value }
^^^^^^^^^
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**let_polymorphism_records.md:19:40:19:49:**
```roc
update_data = |container, new_value| { container & data: new_value }
```
^^^^^^^^^
It has the type:
__a_
# TOKENS
~~~zig
KwApp,OpenSquare,LowerIdent,CloseSquare,OpenCurly,LowerIdent,OpColon,KwPlatform,StringStart,StringPart,StringEnd,CloseCurly,

View file

@ -22,7 +22,6 @@ processColor = |color| {
~~~
# EXPECTED
IMPORT MUST BE TOP LEVEL - nominal_mixed_scope.md:7:5:7:11
NOT IMPLEMENTED - :0:0:0:0
UNDECLARED TYPE - nominal_mixed_scope.md:7:12:7:17
UNDECLARED TYPE - nominal_mixed_scope.md:10:9:10:12
UNDECLARED TYPE - nominal_mixed_scope.md:11:9:11:12
@ -39,11 +38,6 @@ Move this import to the top of the file, after the module header but before any
^^^^^^
**NOT IMPLEMENTED**
This feature is not yet implemented: statement type in block
This error doesn't have a proper diagnostic report yet. Let us know if you want to help improve Roc's error messages!
**UNDECLARED TYPE**
The type _Color_ is not declared in this scope.
@ -166,7 +160,6 @@ processColor = |color| {
(args
(p-assign (ident "color")))
(e-block
(s-runtime-error (tag "not_implemented"))
(s-expr
(e-runtime-error (tag "undeclared_type")))
(e-match

View file

@ -839,12 +839,12 @@ is_named_color = |str| {
(p-assign (ident "hex"))
(e-closure
(captures
(capture (ident "b"))
(capture (ident "f"))
(capture (ident "d"))
(capture (ident "a"))
(capture (ident "b"))
(capture (ident "c"))
(capture (ident "d"))
(capture (ident "e"))
(capture (ident "f"))
(capture (ident "is_valid")))
(e-lambda
(args
@ -998,14 +998,14 @@ is_named_color = |str| {
(captures
(capture (ident "to_str"))
(capture (ident "r"))
(capture (ident "inner"))
(capture (ident "g"))
(capture (ident "g"))
(capture (ident "b"))
(capture (ident "r"))
(capture (ident "inner"))
(capture (ident "g"))
(capture (ident "b"))
(capture (ident "a"))
(capture (ident "b")))
(capture (ident "inner"))
(capture (ident "inner")))
(e-lambda
(args
(p-assign (ident "color")))

View file

@ -59,6 +59,10 @@ UNDEFINED VARIABLE - record_different_fields_error.md:7:5:7:10
UNRECOGNIZED SYNTAX - record_different_fields_error.md:7:10:7:17
UNRECOGNIZED SYNTAX - record_different_fields_error.md:7:17:7:18
UNRECOGNIZED SYNTAX - record_different_fields_error.md:7:30:7:31
UNUSED VALUE - record_different_fields_error.md:4:5:4:15
UNUSED VALUE - record_different_fields_error.md:4:17:4:25
UNUSED VALUE - record_different_fields_error.md:5:17:5:24
UNUSED VALUE - record_different_fields_error.md:7:19:7:30
# PROBLEMS
**UNEXPECTED TOKEN IN TYPE ANNOTATION**
The token **"** is not expected in a type annotation.
@ -541,6 +545,50 @@ I don't recognize this syntax.
This might be a syntax error, an unsupported language feature, or a typo.
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**record_different_fields_error.md:4:5:4:15:**
```roc
PascalCase: "pascal",
```
^^^^^^^^^^
It has the type:
_[PascalCase]_others_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**record_different_fields_error.md:4:17:4:25:**
```roc
PascalCase: "pascal",
```
^^^^^^^^
It has the type:
_Str_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**record_different_fields_error.md:5:17:5:24:**
```roc
kebab-case: "kebab",
```
^^^^^^^
It has the type:
_Str_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**record_different_fields_error.md:7:19:7:30:**
```roc
field@symbol: "at symbol",
```
^^^^^^^^^^^
It has the type:
_Str_
# TOKENS
~~~zig
OpenCurly,

View file

@ -39,7 +39,6 @@ UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:3:25:3:26
UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:3:26:3:27
UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:4:11:4:12
UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:4:29:4:30
NOT IMPLEMENTED - :0:0:0:0
UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:5:11:5:12
UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:5:26:5:27
UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:6:5:6:8
@ -50,6 +49,8 @@ UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:7:5:7:7
UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:7:7:7:8
DOES NOT EXIST - record_different_fields_reserved_error.md:7:9:7:19
UNRECOGNIZED SYNTAX - record_different_fields_reserved_error.md:7:19:7:20
UNUSED VALUE - record_different_fields_reserved_error.md:4:13:4:29
UNUSED VALUE - record_different_fields_reserved_error.md:5:13:5:26
# PROBLEMS
**UNEXPECTED TOKEN IN EXPRESSION**
The token **:** is not expected in an expression.
@ -315,11 +316,6 @@ I don't recognize this syntax.
This might be a syntax error, an unsupported language feature, or a typo.
**NOT IMPLEMENTED**
This feature is not yet implemented: statement type in block
This error doesn't have a proper diagnostic report yet. Let us know if you want to help improve Roc's error messages!
**UNRECOGNIZED SYNTAX**
I don't recognize this syntax.
@ -428,6 +424,28 @@ I don't recognize this syntax.
This might be a syntax error, an unsupported language feature, or a typo.
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**record_different_fields_reserved_error.md:4:13:4:29:**
```roc
expect: "test assertion",
```
^^^^^^^^^^^^^^^^
It has the type:
_Str_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**record_different_fields_reserved_error.md:5:13:5:26:**
```roc
import: "module load",
```
^^^^^^^^^^^^^
It has the type:
_Str_
# TOKENS
~~~zig
OpenCurly,
@ -506,7 +524,6 @@ EndOfFile,
(e-literal (string "test assertion"))))
(s-expr
(e-runtime-error (tag "expr_not_canonicalized")))
(s-runtime-error (tag "not_implemented"))
(s-expr
(e-runtime-error (tag "expr_not_canonicalized")))
(s-expr

View file

@ -79,7 +79,7 @@ foo = |num| {
~~~clojure
(inferred-types
(defs
(patt (type "a -> a")))
(patt (type "a -> a where [a.to_str : a -> ba.to_str : a -> b]")))
(expressions
(expr (type "a -> a"))))
(expr (type "a -> a where [a.to_str : a -> ba.to_str : a -> b]"))))
~~~

View file

@ -0,0 +1,109 @@
# META
~~~ini
description=For expression stmt
type=snippet
~~~
# SOURCE
~~~roc
foo : U64
foo = {
var result = 0
for x in [1, 2, 3] {
result = result + x
}
result
}
~~~
# EXPECTED
NIL
# PROBLEMS
NIL
# TOKENS
~~~zig
LowerIdent,OpColon,UpperIdent,
LowerIdent,OpAssign,OpenCurly,
KwVar,LowerIdent,OpAssign,Int,
KwFor,LowerIdent,KwIn,OpenSquare,Int,Comma,Int,Comma,Int,CloseSquare,OpenCurly,
LowerIdent,OpAssign,LowerIdent,OpPlus,LowerIdent,
CloseCurly,
LowerIdent,
CloseCurly,
EndOfFile,
~~~
# PARSE
~~~clojure
(file
(type-module)
(statements
(s-type-anno (name "foo")
(ty (name "U64")))
(s-decl
(p-ident (raw "foo"))
(e-block
(statements
(s-var (name "result")
(e-int (raw "0")))
(s-for
(p-ident (raw "x"))
(e-list
(e-int (raw "1"))
(e-int (raw "2"))
(e-int (raw "3")))
(e-block
(statements
(s-decl
(p-ident (raw "result"))
(e-binop (op "+")
(e-ident (raw "result"))
(e-ident (raw "x")))))))
(e-ident (raw "result")))))))
~~~
# FORMATTED
~~~roc
foo : U64
foo = {
var result = 0
for x in [1, 2, 3] {
result = result + x
}
result
}
~~~
# CANONICALIZE
~~~clojure
(can-ir
(d-let
(p-assign (ident "foo"))
(e-block
(s-var
(p-assign (ident "result"))
(e-num (value "0")))
(s-for
(p-assign (ident "x"))
(e-list
(elems
(e-num (value "1"))
(e-num (value "2"))
(e-num (value "3"))))
(e-block
(s-reassign
(p-assign (ident "result"))
(e-binop (op "add")
(e-lookup-local
(p-assign (ident "result")))
(e-lookup-local
(p-assign (ident "x")))))
(e-empty_record)))
(e-lookup-local
(p-assign (ident "result"))))
(annotation
(ty-lookup (name "U64") (builtin)))))
~~~
# TYPES
~~~clojure
(inferred-types
(defs
(patt (type "Num(Int(Unsigned64))")))
(expressions
(expr (type "Num(Int(Unsigned64))"))))
~~~

View file

@ -248,7 +248,7 @@ UNDEFINED VARIABLE - syntax_grab_bag.md:141:2:141:6
UNDECLARED TYPE - syntax_grab_bag.md:143:14:143:20
UNDEFINED VARIABLE - syntax_grab_bag.md:147:9:147:13
UNDEFINED VARIABLE - syntax_grab_bag.md:158:2:158:11
NOT IMPLEMENTED - :0:0:0:0
UNDEFINED VARIABLE - syntax_grab_bag.md:175:3:175:15
UNDEFINED VARIABLE - syntax_grab_bag.md:178:63:178:69
UNDEFINED VARIABLE - syntax_grab_bag.md:179:42:179:48
UNDEFINED VARIABLE - syntax_grab_bag.md:183:3:183:7
@ -260,7 +260,6 @@ UNDEFINED VARIABLE - syntax_grab_bag.md:191:2:191:14
UNDEFINED VARIABLE - syntax_grab_bag.md:193:4:193:13
UNUSED VARIABLE - syntax_grab_bag.md:164:2:164:18
UNUSED VARIABLE - syntax_grab_bag.md:165:2:165:14
UNUSED VARIABLE - syntax_grab_bag.md:166:2:166:6
UNUSED VARIABLE - syntax_grab_bag.md:178:2:178:8
UNUSED VARIABLE - syntax_grab_bag.md:180:2:180:17
UNUSED VARIABLE - syntax_grab_bag.md:188:2:188:15
@ -268,7 +267,9 @@ UNUSED VARIABLE - syntax_grab_bag.md:189:2:189:23
UNDECLARED TYPE - syntax_grab_bag.md:201:9:201:14
INVALID IF CONDITION - syntax_grab_bag.md:70:5:70:5
INCOMPATIBLE MATCH PATTERNS - syntax_grab_bag.md:84:2:84:2
UNUSED VALUE - syntax_grab_bag.md:1:1:1:1
TYPE MISMATCH - syntax_grab_bag.md:155:2:157:3
UNUSED VALUE - syntax_grab_bag.md:155:2:157:3
# PROBLEMS
**UNDECLARED TYPE**
The type _Bar_ is not declared in this scope.
@ -616,10 +617,16 @@ Is there an `import` or `exposing` missing up-top?
^^^^^^^^^
**NOT IMPLEMENTED**
This feature is not yet implemented: statement type in block
**UNDEFINED VARIABLE**
Nothing is named `line!` in this scope.
Is there an `import` or `exposing` missing up-top?
**syntax_grab_bag.md:175:3:175:15:**
```roc
Stdout.line!("Adding ${n} to ${number}")
```
^^^^^^^^^^^^
This error doesn't have a proper diagnostic report yet. Let us know if you want to help improve Roc's error messages!
**UNDEFINED VARIABLE**
Nothing is named `punned` in this scope.
@ -732,18 +739,6 @@ The unused variable is declared here:
^^^^^^^^^^^^
**UNUSED VARIABLE**
Variable `list` is not used anywhere in your code.
If you don't need this variable, prefix it with an underscore like `_list` to suppress this warning.
The unused variable is declared here:
**syntax_grab_bag.md:166:2:166:6:**
```roc
list = [
```
^^^^
**UNUSED VARIABLE**
Variable `record` is not used anywhere in your code.
@ -888,6 +883,17 @@ All patterns in an `match` must have compatible types.
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**syntax_grab_bag.md:1:1:1:1:**
```roc
# This is a module comment!
```
^
It has the type:
__d_
**TYPE MISMATCH**
This expression is used in an unexpected way:
**syntax_grab_bag.md:155:2:157:3:**
@ -903,6 +909,18 @@ It has the type:
But I expected it to be:
_[Red][Blue, Green]_others, _arg -> Error_
**UNUSED VALUE**
This expression produces a value, but it's not being used:
**syntax_grab_bag.md:155:2:157:3:**
```roc
match_time(
..., # Single args with comment
)
```
It has the type:
__d_
# TOKENS
~~~zig
KwApp,OpenSquare,LowerIdent,CloseSquare,OpenCurly,LowerIdent,OpColon,KwPlatform,StringStart,StringPart,StringEnd,CloseCurly,
@ -1883,8 +1901,8 @@ expect {
(e-closure
(captures
(capture (ident "x"))
(capture (ident "dude"))
(capture (ident "x")))
(capture (ident "x"))
(capture (ident "dude")))
(e-lambda
(args
(p-assign (ident "a"))
@ -2157,7 +2175,30 @@ expect {
(p-assign (ident "number")))))
(e-num (value "456"))
(e-num (value "789")))))
(s-runtime-error (tag "not_implemented"))
(s-for
(p-assign (ident "n"))
(e-lookup-local
(p-assign (ident "list")))
(e-block
(s-expr
(e-call
(e-runtime-error (tag "ident_not_in_scope"))
(e-string
(e-literal (string "Adding "))
(e-lookup-local
(p-assign (ident "n")))
(e-literal (string " to "))
(e-lookup-local
(p-assign (ident "number")))
(e-literal (string "")))))
(s-reassign
(p-assign (ident "number"))
(e-binop (op "add")
(e-lookup-local
(p-assign (ident "number")))
(e-lookup-local
(p-assign (ident "n")))))
(e-empty_record)))
(s-let
(p-assign (ident "record"))
(e-record

View file

@ -230,9 +230,9 @@ main! = |_| {
(p-assign (ident "main!"))
(e-closure
(captures
(capture (ident "addOne"))
(capture (ident "identity"))
(capture (ident "combine")))
(capture (ident "combine"))
(capture (ident "addOne")))
(e-lambda
(args
(p-underscore))

View file

@ -207,9 +207,9 @@ main! = |_| {
(capture (ident "identity"))
(capture (ident "identity2"))
(capture (ident "pair"))
(capture (ident "a"))
(capture (ident "b"))
(capture (ident "c"))
(capture (ident "a")))
(capture (ident "c")))
(e-lambda
(args
(p-underscore))

View file

@ -529,13 +529,13 @@ main! = |_| {
(p-assign (ident "main!"))
(e-closure
(captures
(capture (ident "identity"))
(capture (ident "anotherIdentity"))
(capture (ident "combine"))
(capture (ident "yetAnotherIdentity"))
(capture (ident "finalIdentity"))
(capture (ident "a"))
(capture (ident "anotherIdentity"))
(capture (ident "f"))
(capture (ident "identity"))
(capture (ident "combine"))
(capture (ident "yetAnotherIdentity")))
(capture (ident "f")))
(e-lambda
(args
(p-underscore))

View file

@ -206,9 +206,9 @@ main! = |_| {
(p-assign (ident "main!"))
(e-closure
(captures
(capture (ident "unused_regular"))
(capture (ident "used_underscore"))
(capture (ident "unused_underscore"))
(capture (ident "unused_regular"))
(capture (ident "used_regular")))
(e-lambda
(args