Merge pull request #8563 from roc-lang/fix-inspect

Fix `inspect` bug
This commit is contained in:
Richard Feldman 2025-12-04 11:24:43 -05:00 committed by GitHub
commit a9d728ba30
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
107 changed files with 1017 additions and 1183 deletions

166
build.zig
View file

@ -87,7 +87,7 @@ const TestsSummaryStep = struct {
/// 2. They are brittle to changes that type-checking should not be sensitive to
///
/// Instead, we always compare indices - either into node stores or to interned string indices.
/// This step enforces that rule by failing the build if `std.mem.` is found in src/check/ or src/layout/.
/// This step enforces that rule by failing the build if `std.mem.` is found in src/canonicalize/, src/check/, src/layout/, or src/eval/.
const CheckTypeCheckerPatternsStep = struct {
step: Step,
@ -104,15 +104,15 @@ const CheckTypeCheckerPatternsStep = struct {
return self;
}
fn make(step: *Step, options: Step.MakeOptions) !void {
_ = options;
fn make(step: *Step, _: Step.MakeOptions) !void {
const b = step.owner;
const allocator = b.allocator;
var violations = std.ArrayList(Violation).empty;
defer violations.deinit(allocator);
// Recursively scan src/check/, src/layout/, and src/eval/ for .zig files
// Recursively scan src/canonicalize/, src/check/, src/layout/, and src/eval/ for .zig files
// TODO: uncomment "src/canonicalize" once its std.mem violations are fixed
const dirs_to_scan = [_][]const u8{ "src/check", "src/layout", "src/eval" };
for (dirs_to_scan) |dir_path| {
var dir = std.fs.cwd().openDir(dir_path, .{ .iterate = true }) catch |err| {
@ -130,7 +130,7 @@ const CheckTypeCheckerPatternsStep = struct {
std.debug.print("=" ** 80 ++ "\n\n", .{});
std.debug.print(
\\Code in src/check/, src/layout/, and src/eval/ must NOT do raw string comparison or manipulation.
\\Code in src/canonicalize/, src/check/, src/layout/, and src/eval/ must NOT do raw string comparison or manipulation.
\\
\\WHY THIS RULE EXISTS:
\\ We NEVER do string or byte comparisons because:
@ -170,7 +170,7 @@ const CheckTypeCheckerPatternsStep = struct {
std.debug.print("\n" ++ "=" ** 80 ++ "\n", .{});
return step.fail(
"Found {d} forbidden patterns (raw string comparison or manipulation) in src/check/, src/layout/, or src/eval/. " ++
"Found {d} forbidden patterns (raw string comparison or manipulation) in src/canonicalize/, src/check/, src/layout/, or src/eval/. " ++
"See above for details on why this is forbidden and what to do instead.",
.{violations.items.len},
);
@ -292,6 +292,153 @@ const CheckTypeCheckerPatternsStep = struct {
}
};
/// Build step that checks for unused variable suppression patterns.
///
/// In this codebase, we don't use `_ = variable;` to suppress unused variable warnings.
/// Instead, we delete the unused variable/argument and update all call sites as necessary.
const CheckUnusedSuppressionStep = struct {
step: Step,
fn create(b: *std.Build) *CheckUnusedSuppressionStep {
const self = b.allocator.create(CheckUnusedSuppressionStep) catch @panic("OOM");
self.* = .{
.step = Step.init(.{
.id = Step.Id.custom,
.name = "check-unused-suppression",
.owner = b,
.makeFn = make,
}),
};
return self;
}
fn make(step: *Step, _: Step.MakeOptions) !void {
const b = step.owner;
const allocator = b.allocator;
var violations = std.ArrayList(Violation).empty;
defer violations.deinit(allocator);
// Scan all src/ directories for .zig files
var dir = std.fs.cwd().openDir("src", .{ .iterate = true }) catch |err| {
return step.fail("Failed to open src/ directory: {}", .{err});
};
defer dir.close();
try scanDirectoryForUnusedSuppression(allocator, dir, "src", &violations);
if (violations.items.len > 0) {
std.debug.print("\n", .{});
std.debug.print("=" ** 80 ++ "\n", .{});
std.debug.print("UNUSED VARIABLE SUPPRESSION DETECTED\n", .{});
std.debug.print("=" ** 80 ++ "\n\n", .{});
std.debug.print(
\\In this codebase, we do NOT use `_ = variable;` to suppress unused warnings.
\\
\\Instead, you should:
\\ 1. Delete the unused variable, parameter, or argument
\\ 2. Update all call sites as necessary
\\ 3. Propagate the change through the codebase until tests pass
\\
\\VIOLATIONS FOUND:
\\
, .{});
for (violations.items) |violation| {
std.debug.print(" {s}:{d}: {s}\n", .{
violation.file_path,
violation.line_number,
violation.line_content,
});
}
std.debug.print("\n" ++ "=" ** 80 ++ "\n", .{});
return step.fail(
"Found {d} unused variable suppression patterns (`_ = identifier;`). " ++
"Delete the unused variables and update call sites instead.",
.{violations.items.len},
);
}
}
const Violation = struct {
file_path: []const u8,
line_number: usize,
line_content: []const u8,
};
fn scanDirectoryForUnusedSuppression(
allocator: std.mem.Allocator,
dir: std.fs.Dir,
path_prefix: []const u8,
violations: *std.ArrayList(Violation),
) !void {
var walker = try dir.walk(allocator);
defer walker.deinit();
while (try walker.next()) |entry| {
if (entry.kind != .file) continue;
if (!std.mem.endsWith(u8, entry.path, ".zig")) continue;
const full_path = try std.fmt.allocPrint(allocator, "{s}/{s}", .{ path_prefix, entry.path });
const file = dir.openFile(entry.path, .{}) catch continue;
defer file.close();
const content = file.readToEndAlloc(allocator, 10 * 1024 * 1024) catch continue;
defer allocator.free(content);
var line_number: usize = 1;
var line_start: usize = 0;
for (content, 0..) |char, i| {
if (char == '\n') {
const line = content[line_start..i];
const trimmed = std.mem.trim(u8, line, " \t");
// Check for pattern: _ = identifier;
// where identifier is alphanumeric with underscores
if (isUnusedSuppression(trimmed)) {
try violations.append(allocator, .{
.file_path = full_path,
.line_number = line_number,
.line_content = try allocator.dupe(u8, trimmed),
});
}
line_number += 1;
line_start = i + 1;
}
}
}
}
fn isUnusedSuppression(line: []const u8) bool {
// Pattern: `_ = identifier;` where identifier is alphanumeric with underscores
// Must start with "_ = " and end with ";"
if (!std.mem.startsWith(u8, line, "_ = ")) return false;
if (!std.mem.endsWith(u8, line, ";")) return false;
// Extract the identifier part (between "_ = " and ";")
const identifier = line[4 .. line.len - 1];
// Must have at least one character
if (identifier.len == 0) return false;
// Check that identifier contains only alphanumeric chars and underscores
// Also allow dots for field access like `_ = self.field;` which we also want to catch
for (identifier) |c| {
if (!std.ascii.isAlphanumeric(c) and c != '_' and c != '.') {
return false;
}
}
return true;
}
};
fn checkFxPlatformTestCoverage(step: *Step) !void {
const b = step.owner;
std.debug.print("---- checking fx platform test coverage ----\n", .{});
@ -1272,6 +1419,10 @@ pub fn build(b: *std.Build) void {
const check_patterns = CheckTypeCheckerPatternsStep.create(b);
test_step.dependOn(&check_patterns.step);
// Add check for unused variable suppression patterns
const check_unused = CheckUnusedSuppressionStep.create(b);
test_step.dependOn(&check_unused.step);
test_step.dependOn(&tests_summary.step);
b.default_step.dependOn(playground_step);
@ -2187,9 +2338,8 @@ fn generateGlibcStub(b: *std.Build, target: ResolvedTarget, target_name: []const
const writer = assembly_buf.writer(b.allocator);
const target_arch = target.result.cpu.arch;
const target_abi = target.result.abi;
glibc_stub_build.generateComprehensiveStub(b.allocator, writer, target_arch, target_abi) catch |err| {
glibc_stub_build.generateComprehensiveStub(writer, target_arch) catch |err| {
std.log.warn("Failed to generate comprehensive stub assembly for {s}: {}, using minimal ELF", .{ target_name, err });
// Fall back to minimal ELF
const stub_content = switch (target.result.cpu.arch) {

View file

@ -47,21 +47,15 @@ fn PlainTextSExprWriter(comptime WriterType: type) type {
try self.writer.print(fmt, args);
}
pub fn setColor(self: *@This(), color: Color) !void {
_ = self;
_ = color;
pub fn setColor(_: *@This(), _: Color) !void {
// No-op for plain text
}
pub fn beginSourceRange(self: *@This(), start_byte: u32, end_byte: u32) !void {
_ = self;
_ = start_byte;
_ = end_byte;
pub fn beginSourceRange(_: *@This(), _: u32, _: u32) !void {
// No-op for plain text
}
pub fn endSourceRange(self: *@This()) !void {
_ = self;
pub fn endSourceRange(_: *@This()) !void {
// No-op for plain text
}

View file

@ -106,9 +106,8 @@ test "safeCast and safeRead" {
var buffer = [_]u8{ 0x12, 0x34, 0x56, 0x78 };
const ptr = @as(*anyopaque, @ptrCast(&buffer));
const value = try safeRead(u16, ptr, 0, 4);
// Endianness dependent, but should not crash
_ = value;
// Just verify this doesn't error - actual value is endianness dependent
_ = try safeRead(u16, ptr, 0, 4);
try std.testing.expectError(error.BufferOverflow, safeRead(u32, ptr, 1, 4));
}

View file

@ -13,5 +13,5 @@ pub const encode = base58.encode;
pub const decode = base58.decode;
test {
_ = base58;
@import("std").testing.refAllDecls(@This());
}

View file

@ -1617,7 +1617,7 @@ fn compileModule(
}
// 4. Canonicalize
try module_env.initCIRFields(gpa, module_name);
try module_env.initCIRFields(module_name);
var can_result = try gpa.create(Can);
defer {

View file

@ -4,14 +4,9 @@ const std = @import("std");
/// Generate assembly stub with essential libc symbols
pub fn generateComprehensiveStub(
allocator: std.mem.Allocator,
writer: anytype,
target_arch: std.Target.Cpu.Arch,
target_abi: std.Target.Abi,
) !void {
_ = allocator;
_ = target_abi;
const ptr_width: u32 = switch (target_arch) {
.x86_64, .aarch64 => 8,
else => 4,

View file

@ -47,29 +47,15 @@ const ___tracy_c_zone_context = extern struct {
/// The tracy context object for tracking zones.
/// Make sure to defer calling end.
pub const Ctx = if (enable) ___tracy_c_zone_context else struct {
pub inline fn end(self: @This()) void {
_ = self;
}
pub inline fn end(_: @This()) void {}
pub inline fn addText(self: @This(), text: []const u8) void {
_ = self;
_ = text;
}
pub inline fn addText(_: @This(), _: []const u8) void {}
pub inline fn setName(self: @This(), name: []const u8) void {
_ = self;
_ = name;
}
pub inline fn setName(_: @This(), _: []const u8) void {}
pub inline fn setColor(self: @This(), color: u32) void {
_ = self;
_ = color;
}
pub inline fn setColor(_: @This(), _: u32) void {}
pub inline fn setValue(self: @This(), value: u64) void {
_ = self;
_ = value;
}
pub inline fn setValue(_: @This(), _: u64) void {}
};
/// Creates a source location based tracing zone.

View file

@ -97,9 +97,6 @@ fn testing_roc_dealloc(c_ptr: *anyopaque, _: u32) callconv(.c) void {
allocator.free(slice);
}
fn testing_roc_panic(c_ptr: *anyopaque, tag_id: u32) callconv(.c) void {
_ = c_ptr;
_ = tag_id;
fn testing_roc_panic(_: *anyopaque, _: u32) callconv(.c) void {
@panic("Roc panicked");
}

View file

@ -635,16 +635,6 @@ pub fn strSplitOn(
return list;
}
fn initFromSmallStr(
slice_bytes: [*]u8,
len: usize,
_: usize,
// TODO we probable don't need this here
roc_ops: *RocOps,
) RocStr {
return RocStr.init(slice_bytes, len, roc_ops);
}
/// TODO
pub fn strSplitOnHelp(
array: [*]RocStr,

View file

@ -3,6 +3,7 @@
//! This module provides essential infrastructure for builtin operations,
//! including memory allocation interfaces, overflow detection utilities,
//! debug functions, and common types used throughout the builtin modules.
//!
//! It serves as the foundation layer that other builtin modules depend on
//! for low-level operations and host interface functions.
const std = @import("std");
@ -163,18 +164,11 @@ pub const TestEnv = struct {
}
}
fn rocDbgFn(roc_dbg: *const RocDbg, env: *anyopaque) callconv(.c) void {
_ = env;
_ = roc_dbg;
}
fn rocDbgFn(_: *const RocDbg, _: *anyopaque) callconv(.c) void {}
fn rocExpectFailedFn(roc_expect: *const RocExpectFailed, env: *anyopaque) callconv(.c) void {
_ = env;
_ = roc_expect;
}
fn rocExpectFailedFn(_: *const RocExpectFailed, _: *anyopaque) callconv(.c) void {}
fn rocCrashedFn(roc_crashed: *const RocCrashed, env: *anyopaque) callconv(.c) noreturn {
_ = env;
fn rocCrashedFn(roc_crashed: *const RocCrashed, _: *anyopaque) callconv(.c) noreturn {
const message = roc_crashed.utf8_bytes[0..roc_crashed.len];
@panic(message);
}
@ -763,10 +757,9 @@ test "TestEnv basic functionality" {
// Should start with no allocations
try std.testing.expectEqual(@as(usize, 0), test_env.getAllocationCount());
// Get ops should work
// Get ops should work - verify we can get ops and it points back to our test env
const ops = test_env.getOps();
// Function pointers are non-null by design, just verify we can get ops
_ = ops;
try std.testing.expectEqual(@as(*anyopaque, @ptrCast(&test_env)), ops.env);
}
test "TestEnv allocation tracking" {

View file

@ -40,5 +40,4 @@ pub const freeForZstd = bundle.freeForZstd;
test {
_ = @import("test_bundle.zig");
_ = @import("test_streaming.zig");
_ = bundle;
}

View file

@ -271,7 +271,7 @@ pub const WhereClause = union(enum) {
const attrs = tree.beginNode();
try tree.endNode(begin, attrs);
},
.w_malformed => |malformed| {
.w_malformed => {
const begin = tree.beginNode();
try tree.pushStaticAtom("malformed");
@ -280,7 +280,6 @@ pub const WhereClause = union(enum) {
const region = cir.store.getRegionAt(node_idx);
try cir.appendRegionInfoToSExprTreeFromRegion(tree, region);
_ = malformed;
const attrs = tree.beginNode();
try tree.endNode(begin, attrs);
},

View file

@ -1322,10 +1322,8 @@ fn processAssociatedItemsSecondPass(
fn registerUserFacingName(
self: *Self,
fully_qualified_idx: Ident.Idx,
item_name_idx: Ident.Idx,
pattern_idx: CIR.Pattern.Idx,
) std.mem.Allocator.Error!void {
_ = item_name_idx;
// Get the fully qualified text and strip the module prefix
const fully_qualified_text = self.env.getIdent(fully_qualified_idx);
@ -1656,7 +1654,7 @@ fn processAssociatedItemsFirstPass(
// - Module scope gets "Foo.Bar.baz" (user-facing fully qualified)
// - Foo's scope gets "Bar.baz" (partially qualified)
// - Bar's scope gets "baz" (unqualified)
try self.registerUserFacingName(qualified_idx, decl_ident, placeholder_pattern_idx);
try self.registerUserFacingName(qualified_idx, placeholder_pattern_idx);
}
}
},
@ -1684,7 +1682,7 @@ fn processAssociatedItemsFirstPass(
try current_scope.idents.put(self.env.gpa, qualified_idx, placeholder_pattern_idx);
// Register progressively qualified names at each scope level per the plan
try self.registerUserFacingName(qualified_idx, anno_ident, placeholder_pattern_idx);
try self.registerUserFacingName(qualified_idx, placeholder_pattern_idx);
}
},
else => {
@ -2280,9 +2278,8 @@ pub fn canonicalizeFile(
}
}
},
.malformed => |malformed| {
.malformed => {
// We won't touch this since it's already a parse error.
_ = malformed;
},
}
}
@ -2687,9 +2684,8 @@ fn addToExposedScope(
try self.exposed_type_texts.put(gpa, type_text, region);
}
},
.malformed => |malformed| {
.malformed => {
// Malformed exposed items are already captured as diagnostics during parsing
_ = malformed;
},
}
}
@ -2895,20 +2891,14 @@ fn bringImportIntoScope(
const exposed = self.parse_ir.store.getExposedItem(exposed_idx);
switch (exposed) {
.lower_ident => |ident| {
// TODO handle `as` here using an Alias
if (self.parse_ir.tokens.resolveIdentifier(ident.ident)) |ident_idx| {
_ = ident_idx;
// TODO Introduce our import
// TODO Introduce our import
if (self.parse_ir.tokens.resolveIdentifier(ident.ident)) |_| {
// _ = self.scope.levels.introduce(gpa, &self.env.idents, .ident, .{ .scope_name = ident_idx, .ident = ident_idx });
}
},
.upper_ident => |imported_type| {
_ = imported_type;
// const alias = Alias{
.upper_ident => {
// TODO: const alias = Alias{
// .name = imported_type.name,
// .region = ir.env.tag_names.getRegion(imported_type.name),
// .is_builtin = false,
@ -2921,9 +2911,7 @@ fn bringImportIntoScope(
// .alias = alias_idx,
// });
},
.upper_ident_star => |ident| {
_ = ident;
},
.upper_ident_star => {},
}
}
}
@ -2965,6 +2953,7 @@ fn importAliased(
alias_tok: ?Token.Idx,
exposed_items_span: CIR.ExposedItem.Span,
import_region: Region,
is_package_qualified: bool,
) std.mem.Allocator.Error!?Statement.Idx {
const module_name_text = self.env.getIdent(module_name);
@ -2979,8 +2968,8 @@ fn importAliased(
// 2. Resolve the alias
const alias = try self.resolveModuleAlias(alias_tok, module_name) orelse return null;
// 3. Add to scope: alias -> module_name mapping
try self.scopeIntroduceModuleAlias(alias, module_name, import_region, exposed_items_span);
// 3. Add to scope: alias -> module_name mapping (includes is_package_qualified flag)
try self.scopeIntroduceModuleAlias(alias, module_name, import_region, exposed_items_span, is_package_qualified);
// 4. Process type imports from this module
try self.processTypeImports(module_name, alias);
@ -3011,12 +3000,16 @@ fn importAliased(
// 9. Check that this module actually exists, and if not report an error
// Only check if module_envs is provided - when it's null, we don't know what modules
// exist yet (e.g., during standalone module canonicalization without full project context)
// Skip for package-qualified imports (e.g., "pf.Stdout") - those are cross-package
// imports that are resolved by the workspace resolver
if (self.module_envs) |envs_map| {
if (!envs_map.contains(module_name)) {
try self.env.pushDiagnostic(Diagnostic{ .module_not_found = .{
.module_name = module_name,
.region = import_region,
} });
if (!is_package_qualified) {
try self.env.pushDiagnostic(Diagnostic{ .module_not_found = .{
.module_name = module_name,
.region = import_region,
} });
}
}
}
@ -3101,6 +3094,7 @@ fn importUnaliased(
module_name: Ident.Idx,
exposed_items_span: CIR.ExposedItem.Span,
import_region: Region,
is_package_qualified: bool,
) std.mem.Allocator.Error!Statement.Idx {
const module_name_text = self.env.getIdent(module_name);
@ -3138,12 +3132,16 @@ fn importUnaliased(
// 6. Check that this module actually exists, and if not report an error
// Only check if module_envs is provided - when it's null, we don't know what modules
// exist yet (e.g., during standalone module canonicalization without full project context)
// Skip for package-qualified imports (e.g., "pf.Stdout") - those are cross-package
// imports that are resolved by the workspace resolver
if (self.module_envs) |envs_map| {
if (!envs_map.contains(module_name)) {
try self.env.pushDiagnostic(Diagnostic{ .module_not_found = .{
.module_name = module_name,
.region = import_region,
} });
if (!is_package_qualified) {
try self.env.pushDiagnostic(Diagnostic{ .module_not_found = .{
.module_name = module_name,
.region = import_region,
} });
}
}
}
@ -3224,11 +3222,14 @@ fn canonicalizeImportStatement(
const cir_exposes = try self.env.store.exposedItemSpanFrom(scratch_start);
const import_region = self.parse_ir.tokenizedRegionToRegion(import_stmt.region);
// 3. Dispatch to the appropriate handler based on whether this is a nested import
// 3. Check if this is a package-qualified import (has a qualifier like "pf" in "pf.Stdout")
const is_package_qualified = import_stmt.qualifier_tok != null;
// 4. Dispatch to the appropriate handler based on whether this is a nested import
return if (import_stmt.nested_import)
try self.importUnaliased(module_name, cir_exposes, import_region)
try self.importUnaliased(module_name, cir_exposes, import_region, is_package_qualified)
else
try self.importAliased(module_name, import_stmt.alias_tok, cir_exposes, import_region);
try self.importAliased(module_name, import_stmt.alias_tok, cir_exposes, import_region, is_package_qualified);
}
/// Resolve the module alias name from either explicit alias or module name
@ -3935,17 +3936,21 @@ pub fn canonicalizeExpr(
const qualifier_tok = @as(Token.Idx, @intCast(qualifier_tokens[0]));
if (self.parse_ir.tokens.resolveIdentifier(qualifier_tok)) |module_alias| {
// Check if this is a module alias, or an auto-imported module
const module_name = self.scopeLookupModule(module_alias) orelse blk: {
const module_info: ?Scope.ModuleAliasInfo = self.scopeLookupModule(module_alias) orelse blk: {
// Not in scope, check if it's an auto-imported module
if (self.module_envs) |envs_map| {
if (envs_map.contains(module_alias)) {
// This is an auto-imported module like Bool or Try
// Use the module_alias directly as the module_name
break :blk module_alias;
// Use the module_alias directly as the module_name (not package-qualified)
break :blk Scope.ModuleAliasInfo{
.module_name = module_alias,
.is_package_qualified = false,
};
}
}
break :blk null;
} orelse {
};
const module_name = if (module_info) |info| info.module_name else {
// Not a module alias and not an auto-imported module
// Check if the qualifier is a type - if so, try to lookup associated items
const is_type_in_scope = self.scopeLookupTypeBinding(module_alias) != null;
@ -4117,6 +4122,15 @@ pub fn canonicalizeExpr(
break :blk_qualified;
}
// Check if this is a package-qualified import (e.g., "pf.Stdout")
// These are cross-package imports resolved by the workspace resolver
const is_pkg_qualified = if (module_info) |info| info.is_package_qualified else false;
if (is_pkg_qualified) {
// Package-qualified import - member resolution happens via the resolver
// Fall through to normal identifier lookup
break :blk_qualified;
}
// Generate a more helpful error for auto-imported types (List, Bool, Try, etc.)
const is_auto_imported_type = if (self.module_envs) |envs_map|
envs_map.contains(module_name)
@ -5636,7 +5650,7 @@ pub fn canonicalizeExpr(
},
.for_expr => |for_expr| {
const region = self.parse_ir.tokenizedRegionToRegion(for_expr.region);
const result = try self.canonicalizeForLoop(for_expr.patt, for_expr.expr, for_expr.body, region);
const result = try self.canonicalizeForLoop(for_expr.patt, for_expr.expr, for_expr.body);
const for_expr_idx = try self.env.addExpr(Expr{
.e_for = .{
@ -5648,9 +5662,8 @@ pub fn canonicalizeExpr(
return CanonicalizedExpr{ .idx = for_expr_idx, .free_vars = result.free_vars };
},
.malformed => |malformed| {
.malformed => {
// We won't touch this since it's already a parse error.
_ = malformed;
return null;
},
}
@ -5686,9 +5699,7 @@ fn canonicalizeForLoop(
ast_patt: AST.Pattern.Idx,
ast_list_expr: AST.Expr.Idx,
ast_body: AST.Expr.Idx,
region: base.Region,
) std.mem.Allocator.Error!CanonicalizedForLoop {
_ = region;
// Tmp state to capture free vars from both expr & body
// This is stored as a map to avoid duplicate captures
@ -6039,7 +6050,8 @@ fn canonicalizeTagExpr(self: *Self, e: AST.TagExpr, mb_args: ?AST.Expr.Span, reg
// For Imported.Foo.Bar.X: module=Imported, type=Foo.Bar, tag=X
// qualifiers=[Imported, Foo, Bar], so type name is built from qualifiers[1..]
const module_name = self.scopeLookupModule(first_tok_ident).?; // Already checked above
const module_info = self.scopeLookupModule(first_tok_ident).?; // Already checked above
const module_name = module_info.module_name;
const module_name_text = self.env.getIdent(module_name);
// Check if this is imported in the current scope
@ -6766,13 +6778,14 @@ fn canonicalizePattern(
const module_alias = try self.env.insertIdent(base.Ident.for_text(module_alias_text));
// Check if this is a module alias
const module_name = self.scopeLookupModule(module_alias) orelse {
const module_info = self.scopeLookupModule(module_alias) orelse {
// Module is not in current scope
return try self.env.pushMalformed(Pattern.Idx, CIR.Diagnostic{ .module_not_imported = .{
.module_name = module_alias,
.region = region,
} });
};
const module_name = module_info.module_name;
const module_name_text = self.env.getIdent(module_name);
// Check if this module is imported in the current scope
@ -7151,9 +7164,8 @@ fn canonicalizePattern(
return pattern_idx;
}
},
.malformed => |malformed| {
.malformed => {
// We won't touch this since it's already a parse error.
_ = malformed;
return null;
},
}
@ -8002,7 +8014,7 @@ fn canonicalizeTypeAnnoBasicType(
const module_alias = try self.env.insertIdent(base.Ident.for_text(module_alias_text));
// Check if this is a module alias
const module_name = self.scopeLookupModule(module_alias) orelse {
const module_info = self.scopeLookupModule(module_alias) orelse {
// Module is not in current scope - but check if it's a type name first
if (self.scopeLookupTypeBinding(module_alias)) |_| {
// This is in scope as a type/value, but doesn't expose the nested type being requested
@ -8019,6 +8031,7 @@ fn canonicalizeTypeAnnoBasicType(
.region = region,
} });
};
const module_name = module_info.module_name;
const module_name_text = self.env.getIdent(module_name);
// Check if this module is imported in the current scope
@ -9227,7 +9240,7 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt
},
.@"for" => |for_stmt| {
const region = self.parse_ir.tokenizedRegionToRegion(for_stmt.region);
const result = try self.canonicalizeForLoop(for_stmt.patt, for_stmt.expr, for_stmt.body, region);
const result = try self.canonicalizeForLoop(for_stmt.patt, for_stmt.expr, for_stmt.body);
const stmt_idx = try self.env.addStatement(Statement{
.s_for = .{
@ -10199,7 +10212,7 @@ fn scopeLookupTypeBindingConst(self: *const Self, ident_idx: Ident.Idx) ?TypeBin
}
/// Look up a module alias in the scope hierarchy
fn scopeLookupModule(self: *const Self, alias_name: Ident.Idx) ?Ident.Idx {
fn scopeLookupModule(self: *const Self, alias_name: Ident.Idx) ?Scope.ModuleAliasInfo {
// Search from innermost to outermost scope
var i = self.scopes.items.len;
while (i > 0) {
@ -10207,7 +10220,7 @@ fn scopeLookupModule(self: *const Self, alias_name: Ident.Idx) ?Ident.Idx {
const scope = &self.scopes.items[i];
switch (scope.lookupModuleAlias(alias_name)) {
.found => |module_name| return module_name,
.found => |module_info| return module_info,
.not_found => continue,
}
}
@ -10216,7 +10229,7 @@ fn scopeLookupModule(self: *const Self, alias_name: Ident.Idx) ?Ident.Idx {
}
/// Introduce a module alias into scope
fn scopeIntroduceModuleAlias(self: *Self, alias_name: Ident.Idx, module_name: Ident.Idx, import_region: Region, exposed_items_span: CIR.ExposedItem.Span) std.mem.Allocator.Error!void {
fn scopeIntroduceModuleAlias(self: *Self, alias_name: Ident.Idx, module_name: Ident.Idx, import_region: Region, exposed_items_span: CIR.ExposedItem.Span, is_package_qualified: bool) std.mem.Allocator.Error!void {
const gpa = self.env.gpa;
const current_scope = &self.scopes.items[self.scopes.items.len - 1];
@ -10257,11 +10270,11 @@ fn scopeIntroduceModuleAlias(self: *Self, alias_name: Ident.Idx, module_name: Id
}
// Simplified introduction without parent lookup for now
const result = try current_scope.introduceModuleAlias(gpa, alias_name, module_name, null);
const result = try current_scope.introduceModuleAlias(gpa, alias_name, module_name, is_package_qualified, null);
switch (result) {
.success => {},
.shadowing_warning => |shadowed_module| {
.shadowing_warning => {
// Create diagnostic for module alias shadowing
try self.env.pushDiagnostic(Diagnostic{
.shadowing_warning = .{
@ -10270,11 +10283,9 @@ fn scopeIntroduceModuleAlias(self: *Self, alias_name: Ident.Idx, module_name: Id
.original_region = Region.zero(),
},
});
_ = shadowed_module; // Suppress unused variable warning
},
.already_in_scope => |existing_module| {
.already_in_scope => {
// Module alias already exists in current scope
// For now, just issue a diagnostic
try self.env.pushDiagnostic(Diagnostic{
.shadowing_warning = .{
.ident = alias_name,
@ -10282,13 +10293,12 @@ fn scopeIntroduceModuleAlias(self: *Self, alias_name: Ident.Idx, module_name: Id
.original_region = Region.zero(),
},
});
_ = existing_module; // Suppress unused variable warning
},
}
}
/// Helper function to look up module aliases in parent scopes only
fn scopeLookupModuleInParentScopes(self: *const Self, alias_name: Ident.Idx) ?Ident.Idx {
fn scopeLookupModuleInParentScopes(self: *const Self, alias_name: Ident.Idx) ?Scope.ModuleAliasInfo {
// Search from second-innermost to outermost scope (excluding current scope)
if (self.scopes.items.len <= 1) return null;
@ -10297,8 +10307,8 @@ fn scopeLookupModuleInParentScopes(self: *const Self, alias_name: Ident.Idx) ?Id
i -= 1;
const scope = &self.scopes.items[i];
switch (scope.lookupModuleAlias(&self.env.idents, alias_name)) {
.found => |module_name| return module_name,
switch (scope.lookupModuleAlias(alias_name)) {
.found => |module_info| return module_info,
.not_found => continue,
}
}
@ -10740,12 +10750,13 @@ fn createAnnotationFromTypeAnno(
/// we create external declarations that will be resolved later when
/// we have access to the other module's IR after it has been type checked.
fn processTypeImports(self: *Self, module_name: Ident.Idx, alias_name: Ident.Idx) std.mem.Allocator.Error!void {
// Set up the module alias for qualified lookups
// Set up the module alias for qualified lookups (type imports are not package-qualified)
const scope = self.currentScope();
_ = try scope.introduceModuleAlias(
self.env.gpa,
alias_name,
module_name,
false, // Type imports are not package-qualified
null, // No parent lookup function for now
);
}
@ -10765,7 +10776,8 @@ fn tryModuleQualifiedLookup(self: *Self, field_access: AST.BinOp) std.mem.Alloca
const module_alias = self.parse_ir.tokens.resolveIdentifier(left_ident.token) orelse return null;
// Check if this is a module alias
const module_name = self.scopeLookupModule(module_alias) orelse return null;
const module_info = self.scopeLookupModule(module_alias) orelse return null;
const module_name = module_info.module_name;
const module_text = self.env.getIdent(module_name);
// Check if this module is imported in the current scope

View file

@ -468,7 +468,6 @@ pub const Diagnostic = union(enum) {
allocator: Allocator,
ident_name: []const u8,
region_info: base.RegionInfo,
original_region_info: base.RegionInfo,
filename: []const u8,
source: []const u8,
line_starts: []const u32,
@ -490,10 +489,6 @@ pub const Diagnostic = union(enum) {
line_starts,
);
// we don't need to display the original region info
// as this header is in a single location
_ = original_region_info;
try report.document.addReflowingText("You can remove the duplicate entry to fix this warning.");
return report;

View file

@ -22,8 +22,6 @@ pub const Idx = enum(u32) { _ };
pub const Span = extern struct { span: DataSpan };
/// Converts this external declaration to an S-expression tree representation for debugging
pub fn pushToSExprTree(self: *const ExternalDecl, cir: anytype, tree: anytype) !void {
_ = self;
_ = cir;
pub fn pushToSExprTree(_: *const ExternalDecl, _: anytype, tree: anytype) !void {
try tree.pushStaticAtom("external-decl-stub");
}

View file

@ -122,10 +122,6 @@ pub fn replaceAnnoOnlyWithHosted(env: *ModuleEnv) !std.ArrayList(CIR.Def.Idx) {
env.store.extra_data.items.items[extra_start + 1] = @intFromEnum(expr_idx);
// Verify the def still has its annotation after modification
const modified_def = env.store.getDef(def_idx);
_ = modified_def;
// Track this modified def index
try modified_def_indices.append(gpa, def_idx);
}

View file

@ -437,8 +437,7 @@ pub fn relocate(self: *Self, offset: isize) void {
}
/// Initialize the compilation fields in an existing ModuleEnv
pub fn initCIRFields(self: *Self, gpa: std.mem.Allocator, module_name: []const u8) !void {
_ = gpa; // unused since we don't create new allocations
pub fn initCIRFields(self: *Self, module_name: []const u8) !void {
self.module_kind = .deprecated_module; // default until canonicalization sets the actual kind
self.all_defs = .{ .span = .{ .start = 0, .len = 0 } };
self.all_statements = .{ .span = .{ .start = 0, .len = 0 } };
@ -454,8 +453,8 @@ pub fn initCIRFields(self: *Self, gpa: std.mem.Allocator, module_name: []const u
}
/// Alias for initCIRFields for backwards compatibility with tests
pub fn initModuleEnvFields(self: *Self, gpa: std.mem.Allocator, module_name: []const u8) !void {
return self.initCIRFields(gpa, module_name);
pub fn initModuleEnvFields(self: *Self, module_name: []const u8) !void {
return self.initCIRFields(module_name);
}
/// Initialize the module environment.
@ -982,7 +981,6 @@ pub fn diagnosticToReport(self: *Self, diagnostic: CIR.Diagnostic, allocator: st
.redundant_exposed => |data| blk: {
const ident_name = self.getIdent(data.ident);
const region_info = self.calcRegionInfo(data.region);
const original_region_info = self.calcRegionInfo(data.original_region);
var report = Report.init(allocator, "REDUNDANT EXPOSED", .warning);
const owned_ident = try report.addOwnedString(ident_name);
@ -1001,10 +999,6 @@ pub fn diagnosticToReport(self: *Self, diagnostic: CIR.Diagnostic, allocator: st
self.getLineStartsAll(),
);
// we don't need to display the original region info
// as this header is in a single location
_ = original_region_info;
try report.document.addReflowingText("You can remove the duplicate entry to fix this warning.");
break :blk report;
@ -1206,9 +1200,7 @@ pub fn diagnosticToReport(self: *Self, diagnostic: CIR.Diagnostic, allocator: st
break :blk report;
},
.lambda_body_not_canonicalized => |data| blk: {
_ = data;
.lambda_body_not_canonicalized => blk: {
var report = Report.init(allocator, "INVALID LAMBDA", .runtime_error);
try report.document.addReflowingText("The body of this lambda expression is not valid.");
@ -1234,9 +1226,7 @@ pub fn diagnosticToReport(self: *Self, diagnostic: CIR.Diagnostic, allocator: st
break :blk report;
},
.var_across_function_boundary => |data| blk: {
_ = data;
.var_across_function_boundary => blk: {
var report = Report.init(allocator, "VAR REASSIGNMENT ERROR", .runtime_error);
try report.document.addReflowingText("Cannot reassign a ");
try report.document.addKeyword("var");
@ -1248,9 +1238,7 @@ pub fn diagnosticToReport(self: *Self, diagnostic: CIR.Diagnostic, allocator: st
break :blk report;
},
.tuple_elem_not_canonicalized => |data| blk: {
_ = data;
.tuple_elem_not_canonicalized => blk: {
var report = Report.init(allocator, "INVALID TUPLE ELEMENT", .runtime_error);
try report.document.addReflowingText("This tuple element is malformed or contains invalid syntax.");
@ -2238,8 +2226,7 @@ pub fn addMatchBranchPattern(self: *Self, expr: CIR.Expr.Match.BranchPattern, re
/// Add a new pattern record field to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addPatternRecordField(self: *Self, expr: CIR.PatternRecordField, region: Region) std.mem.Allocator.Error!CIR.PatternRecordField.Idx {
_ = region;
pub fn addPatternRecordField(self: *Self, expr: CIR.PatternRecordField) std.mem.Allocator.Error!CIR.PatternRecordField.Idx {
const expr_idx = try self.store.addPatternRecordField(expr);
self.debugAssertArraysInSync();
return expr_idx;

View file

@ -1126,9 +1126,7 @@ pub fn getPattern(store: *const NodeStore, pattern_idx: CIR.Pattern.Idx) CIR.Pat
}
/// Retrieves a pattern record field from the store.
pub fn getPatternRecordField(store: *NodeStore, patternRecordField: CIR.PatternRecordField.Idx) CIR.PatternRecordField {
_ = store;
_ = patternRecordField;
pub fn getPatternRecordField(_: *NodeStore, _: CIR.PatternRecordField.Idx) CIR.PatternRecordField {
// Return empty placeholder since PatternRecordField has no fields yet
return CIR.PatternRecordField{};
}
@ -2140,10 +2138,7 @@ pub fn addPattern(store: *NodeStore, pattern: CIR.Pattern, region: base.Region)
}
/// Adds a pattern record field to the store.
pub fn addPatternRecordField(store: *NodeStore, patternRecordField: CIR.PatternRecordField) Allocator.Error!CIR.PatternRecordField.Idx {
_ = store;
_ = patternRecordField;
pub fn addPatternRecordField(_: *NodeStore, _: CIR.PatternRecordField) Allocator.Error!CIR.PatternRecordField.Idx {
return @enumFromInt(0);
}

View file

@ -50,8 +50,8 @@ forward_references: std.AutoHashMapUnmanaged(Ident.Idx, ForwardReference),
type_bindings: std.AutoHashMapUnmanaged(Ident.Idx, TypeBinding),
/// Maps type variables to their type annotation indices
type_vars: std.AutoHashMapUnmanaged(Ident.Idx, CIR.TypeAnno.Idx),
/// Maps module alias names to their full module names
module_aliases: std.AutoHashMapUnmanaged(Ident.Idx, Ident.Idx),
/// Maps module alias names to their full module info (name + whether package-qualified)
module_aliases: std.AutoHashMapUnmanaged(Ident.Idx, ModuleAliasInfo),
/// Maps exposed item names to their source modules and original names (for import resolution)
exposed_items: std.AutoHashMapUnmanaged(Ident.Idx, ExposedItemInfo),
/// Maps module names to their Import.Idx for modules imported in this scope
@ -69,7 +69,7 @@ pub fn init(is_function_boundary: bool) Scope {
.forward_references = std.AutoHashMapUnmanaged(Ident.Idx, ForwardReference){},
.type_bindings = std.AutoHashMapUnmanaged(Ident.Idx, TypeBinding){},
.type_vars = std.AutoHashMapUnmanaged(Ident.Idx, CIR.TypeAnno.Idx){},
.module_aliases = std.AutoHashMapUnmanaged(Ident.Idx, Ident.Idx){},
.module_aliases = std.AutoHashMapUnmanaged(Ident.Idx, ModuleAliasInfo){},
.exposed_items = std.AutoHashMapUnmanaged(Ident.Idx, ExposedItemInfo){},
.imported_modules = std.StringHashMapUnmanaged(CIR.Import.Idx){},
.is_function_boundary = is_function_boundary,
@ -124,9 +124,15 @@ pub const TypeVarLookupResult = union(enum) {
not_found: void,
};
/// Information about a module alias
pub const ModuleAliasInfo = struct {
module_name: Ident.Idx,
is_package_qualified: bool,
};
/// Result of looking up a module alias
pub const ModuleAliasLookupResult = union(enum) {
found: Ident.Idx,
found: ModuleAliasInfo,
not_found: void,
};
@ -174,8 +180,8 @@ pub const TypeVarIntroduceResult = union(enum) {
/// Result of introducing a module alias
pub const ModuleAliasIntroduceResult = union(enum) {
success: void,
shadowing_warning: Ident.Idx, // The module alias that was shadowed
already_in_scope: Ident.Idx, // The module alias already exists in this scope
shadowing_warning: ModuleAliasInfo, // The module alias that was shadowed
already_in_scope: ModuleAliasInfo, // The module alias already exists in this scope
};
/// Result of introducing an exposed item
@ -204,7 +210,7 @@ pub const ItemKind = enum { ident, alias, type_var, module_alias, exposed_item }
pub fn items(scope: *Scope, comptime item_kind: ItemKind) switch (item_kind) {
.ident, .alias => *std.AutoHashMapUnmanaged(Ident.Idx, CIR.Pattern.Idx),
.type_var => *std.AutoHashMapUnmanaged(Ident.Idx, CIR.TypeAnno.Idx),
.module_alias => *std.AutoHashMapUnmanaged(Ident.Idx, Ident.Idx),
.module_alias => *std.AutoHashMapUnmanaged(Ident.Idx, ModuleAliasInfo),
.exposed_item => *std.AutoHashMapUnmanaged(Ident.Idx, ExposedItemInfo),
} {
return switch (item_kind) {
@ -220,7 +226,7 @@ pub fn items(scope: *Scope, comptime item_kind: ItemKind) switch (item_kind) {
pub fn itemsConst(scope: *const Scope, comptime item_kind: ItemKind) switch (item_kind) {
.ident, .alias => *const std.AutoHashMapUnmanaged(Ident.Idx, CIR.Pattern.Idx),
.type_var => *const std.AutoHashMapUnmanaged(Ident.Idx, CIR.TypeAnno.Idx),
.module_alias => *const std.AutoHashMapUnmanaged(Ident.Idx, Ident.Idx),
.module_alias => *const std.AutoHashMapUnmanaged(Ident.Idx, ModuleAliasInfo),
.exposed_item => *const std.AutoHashMapUnmanaged(Ident.Idx, ExposedItemInfo),
} {
return switch (item_kind) {
@ -236,7 +242,7 @@ pub fn itemsConst(scope: *const Scope, comptime item_kind: ItemKind) switch (ite
pub fn put(scope: *Scope, gpa: std.mem.Allocator, comptime item_kind: ItemKind, name: Ident.Idx, value: switch (item_kind) {
.ident, .alias => CIR.Pattern.Idx,
.type_var => CIR.TypeAnno.Idx,
.module_alias => Ident.Idx,
.module_alias => ModuleAliasInfo,
.exposed_item => ExposedItemInfo,
}) std.mem.Allocator.Error!void {
try scope.items(item_kind).put(gpa, name, value);
@ -373,7 +379,8 @@ pub fn introduceModuleAlias(
gpa: std.mem.Allocator,
alias_name: Ident.Idx,
module_name: Ident.Idx,
parent_lookup_fn: ?fn (Ident.Idx) ?Ident.Idx,
is_package_qualified: bool,
parent_lookup_fn: ?fn (Ident.Idx) ?ModuleAliasInfo,
) std.mem.Allocator.Error!ModuleAliasIntroduceResult {
// Check if already exists in current scope by comparing text content
var iter = scope.module_aliases.iterator();
@ -385,15 +392,20 @@ pub fn introduceModuleAlias(
}
// Check for shadowing in parent scopes
var shadowed_module: ?Ident.Idx = null;
var shadowed_module: ?ModuleAliasInfo = null;
if (parent_lookup_fn) |lookup_fn| {
shadowed_module = lookup_fn(alias_name);
}
try scope.put(gpa, .module_alias, alias_name, module_name);
const module_info = ModuleAliasInfo{
.module_name = module_name,
.is_package_qualified = is_package_qualified,
};
if (shadowed_module) |module| {
return ModuleAliasIntroduceResult{ .shadowing_warning = module };
try scope.put(gpa, .module_alias, alias_name, module_info);
if (shadowed_module) |info| {
return ModuleAliasIntroduceResult{ .shadowing_warning = info };
}
return ModuleAliasIntroduceResult{ .success = {} };

View file

@ -48,7 +48,7 @@ pub fn init(source: []const u8) !TestEnv {
parse_ast.store.emptyScratch();
try module_env.initCIRFields(gpa, "test");
try module_env.initCIRFields("test");
can.* = try Can.init(module_env, parse_ast, null);

View file

@ -27,7 +27,7 @@ test "exposed but not implemented - values" {
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
@ -66,7 +66,7 @@ test "exposed but not implemented - types" {
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
@ -105,7 +105,7 @@ test "redundant exposed entries" {
;
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
var czer = try Can.init(&env, &ast, null);
@ -148,7 +148,7 @@ test "shadowing with exposed items" {
;
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
var czer = try Can.init(&env, &ast, null);
@ -181,7 +181,7 @@ test "shadowing non-exposed items" {
;
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
var czer = try Can.init(&env, &ast, null);
@ -221,7 +221,7 @@ test "exposed items correctly tracked across shadowing" {
;
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
var czer = try Can.init(&env, &ast, null);
@ -277,7 +277,7 @@ test "complex case with redundant, shadowing, and not implemented" {
;
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
var czer = try Can.init(&env, &ast, null);
@ -329,7 +329,7 @@ test "exposed_items is populated correctly" {
;
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
var czer = try Can.init(&env, &ast, null);
@ -361,7 +361,7 @@ test "exposed_items persists after canonicalization" {
;
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
var czer = try Can.init(&env, &ast, null);
@ -391,7 +391,7 @@ test "exposed_items never has entries removed" {
;
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
var czer = try Can.init(&env, &ast, null);
@ -424,7 +424,7 @@ test "exposed_items handles identifiers with different attributes" {
;
var env = try ModuleEnv.init(allocator, source);
defer env.deinit();
try env.initCIRFields(allocator, "Test");
try env.initCIRFields("Test");
var ast = try parse.parse(&env.common, allocator);
defer ast.deinit(allocator);
var czer = try Can.init(&env, &ast, null);

View file

@ -30,9 +30,7 @@ test "fractional literal - basic decimal" {
try testing.expectEqual(dec.value.numerator, 314);
try testing.expectEqual(dec.value.denominator_power_of_ten, 2);
},
.e_dec => |dec| {
_ = dec;
},
.e_dec => {},
else => {
std.debug.print("Unexpected expr type: {}\n", .{expr});
try testing.expect(false); // Should be dec_small or frac_dec
@ -54,9 +52,8 @@ test "fractional literal - scientific notation small" {
// This is expected behavior when the value is too small for i16 representation
try testing.expectEqual(dec.value.numerator, 0);
},
.e_dec => |frac| {
.e_dec => {
// RocDec stores the value in a special format
_ = frac;
},
.e_frac_f64 => |frac| {
try testing.expectApproxEqAbs(frac.value, 1.23e-10, 1e-20);

View file

@ -36,7 +36,7 @@ fn parseAndCanonicalizeSource(
ast.* = try parse.parse(&parse_env.common, allocator);
// Initialize CIR fields
try parse_env.initCIRFields(allocator, "Test");
try parse_env.initCIRFields("Test");
const can = try allocator.create(Can);
can.* = try Can.init(parse_env, ast, module_envs);
@ -114,7 +114,7 @@ test "import validation - mix of MODULE NOT FOUND, TYPE NOT EXPOSED, VALUE NOT E
var ast = try parse.parse(&parse_env.common, allocator);
defer ast.deinit(allocator);
// Initialize CIR fields
try parse_env.initCIRFields(allocator, "Test");
try parse_env.initCIRFields("Test");
// Now create module_envs using parse_env's ident store
var module_envs = std.AutoHashMap(base.Ident.Idx, Can.AutoImportedType).init(allocator);
@ -199,7 +199,7 @@ test "import validation - no module_envs provided" {
var ast = try parse.parse(&parse_env.common, allocator);
defer ast.deinit(allocator);
// Initialize CIR fields
try parse_env.initCIRFields(allocator, "Test");
try parse_env.initCIRFields("Test");
// Create czer
// with null module_envs
var can = try Can.init(parse_env, &ast, null);
@ -618,7 +618,7 @@ test "export count safety - ensures safe u16 casting" {
// Test the diagnostic for exactly maxInt(u16) exports
var env1 = try ModuleEnv.init(allocator, "");
defer env1.deinit();
try env1.initCIRFields(allocator, "Test");
try env1.initCIRFields("Test");
const diag_at_limit = CIR.Diagnostic{
.too_many_exports = .{
.count = 65535, // Exactly at the limit
@ -636,7 +636,7 @@ test "export count safety - ensures safe u16 casting" {
// Test the diagnostic for exceeding the limit
var env2 = try ModuleEnv.init(allocator, "");
defer env2.deinit();
try env2.initCIRFields(allocator, "Test");
try env2.initCIRFields("Test");
const diag_over_limit = CIR.Diagnostic{
.too_many_exports = .{
.count = 70000, // Well over the limit

View file

@ -475,7 +475,7 @@ test "hexadecimal integer literals" {
var env = try ModuleEnv.init(gpa, tc.literal);
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
var ast = try parse.parseExpr(&env.common, env.gpa);
defer ast.deinit(gpa);
@ -534,7 +534,7 @@ test "binary integer literals" {
var env = try ModuleEnv.init(gpa, tc.literal);
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
var ast = try parse.parseExpr(&env.common, env.gpa);
defer ast.deinit(gpa);
@ -593,7 +593,7 @@ test "octal integer literals" {
var env = try ModuleEnv.init(gpa, tc.literal);
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
var ast = try parse.parseExpr(&env.common, env.gpa);
defer ast.deinit(gpa);
@ -652,7 +652,7 @@ test "integer literals with uppercase base prefixes" {
var env = try ModuleEnv.init(gpa, tc.literal);
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
var ast = try parse.parseExpr(&env.common, gpa);
defer ast.deinit(gpa);
@ -685,7 +685,7 @@ test "numeric literal patterns use pattern idx as type var" {
var env = try ModuleEnv.init(gpa, "");
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
// Create an int literal pattern directly
const int_pattern = CIR.Pattern{
@ -708,7 +708,7 @@ test "numeric literal patterns use pattern idx as type var" {
var env = try ModuleEnv.init(gpa, "");
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
// Create a dec literal pattern directly
const dec_pattern = CIR.Pattern{
@ -738,7 +738,7 @@ test "pattern numeric literal value edge cases" {
var env = try ModuleEnv.init(gpa, "");
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
// Test i128 max
const max_pattern = CIR.Pattern{
@ -768,7 +768,7 @@ test "pattern numeric literal value edge cases" {
var env = try ModuleEnv.init(gpa, "");
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
const small_dec_pattern = CIR.Pattern{
.small_dec_literal = .{
@ -793,7 +793,7 @@ test "pattern numeric literal value edge cases" {
var env = try ModuleEnv.init(gpa, "");
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
const dec_pattern = CIR.Pattern{
.dec_literal = .{
@ -814,7 +814,7 @@ test "pattern numeric literal value edge cases" {
var env = try ModuleEnv.init(gpa, "");
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
// Test negative zero (RocDec doesn't distinguish between +0 and -0)
const neg_zero_pattern = CIR.Pattern{

View file

@ -21,7 +21,7 @@ test "record literal uses record_unbound" {
var env = try ModuleEnv.init(gpa, source);
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
var ast = try parse.parseExpr(&env.common, gpa);
defer ast.deinit(gpa);
@ -52,7 +52,7 @@ test "record literal uses record_unbound" {
var env = try ModuleEnv.init(gpa, source2);
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
var ast = try parse.parseExpr(&env.common, gpa);
defer ast.deinit(gpa);
@ -83,7 +83,7 @@ test "record literal uses record_unbound" {
var env = try ModuleEnv.init(gpa, source3);
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
var ast = try parse.parseExpr(&env.common, gpa);
defer ast.deinit(gpa);
@ -124,7 +124,7 @@ test "record_unbound basic functionality" {
var env = try ModuleEnv.init(gpa, source);
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
var ast = try parse.parseExpr(&env.common, gpa);
defer ast.deinit(gpa);
@ -165,7 +165,7 @@ test "record_unbound with multiple fields" {
var env = try ModuleEnv.init(gpa, source);
defer env.deinit();
try env.initCIRFields(gpa, "test");
try env.initCIRFields("test");
// Create record_unbound with multiple fields
var ast = try parse.parseExpr(&env.common, gpa);

View file

@ -23,7 +23,7 @@ const ScopeTestContext = struct {
// heap allocate ModuleEnv for testing
const module_env = try gpa.create(ModuleEnv);
module_env.* = try ModuleEnv.init(gpa, "");
try module_env.initCIRFields(gpa, "test");
try module_env.initCIRFields("test");
return ScopeTestContext{
.self = try Can.init(module_env, undefined, null),

View file

@ -1497,8 +1497,7 @@ fn generateStaticDispatchConstraintFromWhere(self: *Self, where_idx: CIR.WhereCl
},
});
},
.w_alias => |alias| {
_ = alias;
.w_alias => {
// TODO: Recursively unwrap alias
},
.w_malformed => {
@ -4941,7 +4940,7 @@ fn handleRecursiveConstraint(
const recursion_var = try self.types.freshFromContentWithRank(rec_var_content, env.rank());
// Create RecursionInfo to track the recursion metadata
const recursion_info = types_mod.RecursionInfo{
_ = types_mod.RecursionInfo{
.recursion_var = recursion_var,
.depth = depth,
};
@ -4949,7 +4948,6 @@ fn handleRecursiveConstraint(
// Store the recursion info in the deferred constraint
// Note: This will be enhanced in later implementation to properly
// update the constraint with the recursion info
_ = recursion_info;
}
/// Check static dispatch constraints
@ -4964,28 +4962,6 @@ fn handleRecursiveConstraint(
///
/// Initially, we only have to check constraint for `Test.to_str2`. But when we
/// process that, we then have to check `Test.to_str`.
/// Check a from_numeral constraint - actual validation happens during comptime evaluation
fn checkNumeralConstraint(
self: *Self,
type_var: Var,
constraint: types_mod.StaticDispatchConstraint,
num_lit_info: types_mod.NumeralInfo,
nominal_type: types_mod.NominalType,
env: *Env,
) !void {
// Mark parameters as intentionally unused - validation happens in comptime evaluation
_ = self;
_ = type_var;
_ = constraint;
_ = num_lit_info;
_ = nominal_type;
_ = env;
// All numeric literal validation now happens during comptime evaluation
// in ComptimeEvaluator.validateDeferredNumericLiterals()
// This function exists only to satisfy the constraint checking interface
}
fn checkDeferredStaticDispatchConstraints(self: *Self, env: *Env) std.mem.Allocator.Error!void {
var deferred_constraint_len = env.deferred_static_dispatch_constraints.items.items.len;
var deferred_constraint_index: usize = 0;
@ -5250,16 +5226,9 @@ fn checkDeferredStaticDispatchConstraints(self: *Self, env: *Env) std.mem.Alloca
if (any_arg_failed or ret_result.isProblem()) {
try self.unifyWith(deferred_constraint.var_, .err, env);
try self.unifyWith(resolved_func.ret, .err, env);
} else if (constraint.origin == .from_numeral and constraint.num_literal != null) {
// For from_numeral constraints on builtin types, do compile-time validation
try self.checkNumeralConstraint(
deferred_constraint.var_,
constraint,
constraint.num_literal.?,
nominal_type,
env,
);
}
// Note: from_numeral constraint validation happens during comptime evaluation
// in ComptimeEvaluator.validateDeferredNumericLiterals()
}
} else if (dispatcher_content == .structure and
(dispatcher_content.structure == .record or

View file

@ -454,7 +454,7 @@ pub const ReportBuilder = struct {
const expected_content = self.snapshots.getContent(types.expected_snapshot);
const actual_content = self.snapshots.getContent(types.actual_snapshot);
if (types.from_annotation and self.areBothFunctionSnapshots(expected_content, actual_content)) {
if (types.from_annotation and areBothFunctionSnapshots(expected_content, actual_content)) {
// When we have constraint_origin_var, it indicates this error originated from
// a specific constraint like a dot access (e.g., str.to_utf8()).
// In this case, show a specialized argument type mismatch error.
@ -2436,13 +2436,12 @@ pub const ReportBuilder = struct {
}
/// Check if both snapshot contents represent function types
fn areBothFunctionSnapshots(self: *Self, expected_content: snapshot.SnapshotContent, actual_content: snapshot.SnapshotContent) bool {
return self.isSnapshotFunction(expected_content) and self.isSnapshotFunction(actual_content);
fn areBothFunctionSnapshots(expected_content: snapshot.SnapshotContent, actual_content: snapshot.SnapshotContent) bool {
return isSnapshotFunction(expected_content) and isSnapshotFunction(actual_content);
}
/// Check if a snapshot content represents a function type
fn isSnapshotFunction(self: *Self, content: snapshot.SnapshotContent) bool {
_ = self;
fn isSnapshotFunction(content: snapshot.SnapshotContent) bool {
return switch (content) {
.structure => |structure| switch (structure) {
.fn_pure, .fn_effectful, .fn_unbound => true,

View file

@ -201,7 +201,7 @@ pub fn initWithImport(module_name: []const u8, source: []const u8, other_module_
parse_ast.store.emptyScratch();
// Canonicalize
try module_env.initCIRFields(gpa, module_name);
try module_env.initCIRFields(module_name);
can.* = try Can.init(module_env, parse_ast, &module_envs);
errdefer can.deinit();
@ -321,7 +321,7 @@ pub fn init(module_name: []const u8, source: []const u8) !TestEnv {
parse_ast.store.emptyScratch();
// Canonicalize
try module_env.initCIRFields(gpa, module_name);
try module_env.initCIRFields(module_name);
can.* = try Can.init(module_env, parse_ast, &module_envs);
errdefer can.deinit();

View file

@ -2425,10 +2425,10 @@ test "check type - pure zero-arg function annotation" {
try checkTypesModule(source, .{ .pass = .last_def }, "({}) -> { }");
}
test "imports of non-existent modules produce MODULE NOT FOUND errors" {
// This test verifies that importing modules that don't exist produces
// MODULE NOT FOUND errors. This is a regression test - a parser change
// for zero-arg functions accidentally caused these errors to disappear.
test "qualified imports don't produce MODULE NOT FOUND during canonicalization" {
// Qualified imports (e.g., "json.Json") are cross-package imports that are
// resolved by the workspace resolver, not during canonicalization.
// They should NOT produce MODULE NOT FOUND errors during canonicalization.
//
// Source from test/snapshots/can_import_comprehensive.md
const source =
@ -2479,11 +2479,9 @@ test "imports of non-existent modules produce MODULE NOT FOUND errors" {
}
}
// We expect exactly 3 MODULE NOT FOUND errors:
// 1. json.Json
// 2. http.Client
// 3. utils.String
try testing.expectEqual(@as(usize, 3), module_not_found_count);
// Qualified imports (json.Json, http.Client, utils.String) should NOT produce
// MODULE NOT FOUND errors - they're handled by the workspace resolver
try testing.expectEqual(@as(usize, 0), module_not_found_count);
}
// Try with match and error propagation //

View file

@ -82,7 +82,7 @@ const TestEnv = struct {
fn init(gpa: std.mem.Allocator) std.mem.Allocator.Error!Self {
const module_env = try gpa.create(ModuleEnv);
module_env.* = try ModuleEnv.init(gpa, try gpa.dupe(u8, ""));
try module_env.initCIRFields(gpa, "Test");
try module_env.initCIRFields("Test");
return .{
.module_env = module_env,
.snapshots = try snapshot_mod.Store.initCapacity(gpa, 16),

View file

@ -1,4 +1,5 @@
//! This module implements Hindley-Milner style type unification with extensions for:
//!
//! * flex/rigid variables
//! * type aliases
//! * tuples

View file

@ -121,7 +121,7 @@ fn benchParseOrTokenize(comptime is_parse: bool, gpa: Allocator, path: []const u
var tokenizer = try tokenize.Tokenizer.init(&env.?.common, gpa, roc_file.content, msg_slice);
try tokenizer.tokenize(gpa);
var result = tokenizer.finishAndDeinit(gpa);
var result = tokenizer.finishAndDeinit();
iteration_tokens += result.tokens.tokens.len;
result.tokens.deinit(gpa);
}

View file

@ -1579,7 +1579,7 @@ pub fn setupSharedMemoryWithModuleEnv(allocs: *Allocators, roc_file_path: []cons
defer app_parse_ast.deinit(allocs.gpa);
app_parse_ast.store.emptyScratch();
try app_env.initCIRFields(shm_allocator, app_module_name);
try app_env.initCIRFields(app_module_name);
var app_module_envs_map = std.AutoHashMap(base.Ident.Idx, Can.AutoImportedType).init(allocs.gpa);
defer app_module_envs_map.deinit();
@ -1826,7 +1826,7 @@ fn compileModuleToSharedMemory(
parse_ast.store.emptyScratch();
// Initialize CIR
try env.initCIRFields(shm_allocator, module_name_copy);
try env.initCIRFields(module_name_copy);
// Create module_envs map
var module_envs_map = std.AutoHashMap(base.Ident.Idx, Can.AutoImportedType).init(allocs.gpa);
@ -3047,7 +3047,7 @@ fn rocTest(allocs: *Allocators, args: cli_args.TestArgs) !void {
parse_ast.store.emptyScratch();
// Initialize CIR fields in ModuleEnv
try env.initCIRFields(allocs.gpa, module_name);
try env.initCIRFields(module_name);
// Populate module_envs with Bool, Try, Dict, Set using shared function
try Can.populateModuleEnvs(
@ -3217,8 +3217,7 @@ fn rocTest(allocs: *Allocators, args: cli_args.TestArgs) !void {
}
}
fn rocRepl(allocs: *Allocators) !void {
_ = allocs;
fn rocRepl(_: *Allocators) !void {
const stderr = stderrWriter();
defer stderr.flush() catch {};
stderr.print("repl not implemented\n", .{}) catch {};

View file

@ -1522,3 +1522,19 @@ test "run allows warnings without blocking execution" {
// Should produce output (runs successfully)
try testing.expect(std.mem.indexOf(u8, run_result.stdout, "Hello, World!") != null);
}
test "fx platform method inspect on string" {
// Tests that calling .inspect() on a Str correctly reports MISSING METHOD
// (Str doesn't have an inspect method, unlike custom opaque types)
const allocator = testing.allocator;
const run_result = try runRoc(allocator, "test/fx/test_method_inspect.roc", .{});
defer allocator.free(run_result.stdout);
defer allocator.free(run_result.stderr);
// This should fail because Str doesn't have an inspect method
try checkFailure(run_result);
// Should show MISSING METHOD error
try testing.expect(std.mem.indexOf(u8, run_result.stderr, "MISSING METHOD") != null);
}

View file

@ -86,13 +86,6 @@ test "roc docs generates nested package documentation" {
\\
);
// Create output directory path
const output_dir = try std.fs.path.join(gpa, &[_][]const u8{ tmp_path, "generated-docs" });
defer gpa.free(output_dir);
const root_path = try std.fs.path.join(gpa, &[_][]const u8{ tmp_path, "root.roc" });
defer gpa.free(root_path);
// Note: We would call main.rocDocs(gpa, args) here, but it requires
// a full build environment setup. Instead, we test the individual
// helper functions in separate tests below.
@ -103,9 +96,6 @@ test "roc docs generates nested package documentation" {
tmp.dir.access("bar/main.roc", .{}) catch unreachable;
tmp.dir.access("baz/main.roc", .{}) catch unreachable;
tmp.dir.access("qux/main.roc", .{}) catch unreachable;
_ = root_path;
_ = output_dir;
}
test "generatePackageIndex creates valid HTML" {

View file

@ -1963,8 +1963,7 @@ test "SafeMultiList CompactWriter verify exact memory layout" {
// Sort by alignment descending, then name ascending
std.mem.sort(FieldInfo, &field_infos, {}, struct {
fn lessThan(ctx: void, lhs: FieldInfo, rhs: FieldInfo) bool {
_ = ctx;
fn lessThan(_: void, lhs: FieldInfo, rhs: FieldInfo) bool {
if (lhs.alignment != rhs.alignment) {
return lhs.alignment > rhs.alignment;
}

View file

@ -84,13 +84,10 @@ pub const CacheKey = struct {
/// Format cache key for debugging output.
pub fn format(
self: Self,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
comptime _: []const u8,
_: std.fmt.FormatOptions,
writer: anytype,
) !void {
_ = fmt;
_ = options;
try writer.print("CacheKey{{ content: {x}, mtime: {}, compiler: {x} }}", .{
self.content_hash[0..8], // First 8 bytes for readability
self.file_mtime,

View file

@ -65,12 +65,6 @@ pub const CacheManager = struct {
};
}
/// Deinitialize the cache manager.
pub fn deinit(self: *Self) void {
_ = self;
// Nothing to deinit currently
}
/// Load a cached module based on its content and compiler version.
/// Look up a cache entry by content and compiler version.
///

View file

@ -201,17 +201,6 @@ pub const CacheModule = struct {
}
}
/// Convenience functions for reading/writing cache files
pub fn writeToFile(
allocator: Allocator,
cache_data: []const u8,
file_path: []const u8,
filesystem: anytype,
) !void {
_ = allocator;
try filesystem.writeFile(file_path, cache_data);
}
/// Convenience function for reading cache files
pub fn readFromFile(
allocator: Allocator,

View file

@ -294,10 +294,8 @@ const GlobalQueue = struct {
}
// Hook from ModuleBuild to enqueue newly discovered/scheduled modules
pub fn hookOnSchedule(ctx: ?*anyopaque, package_name: []const u8, module_name: []const u8, _path: []const u8, _depth: u32) void {
pub fn hookOnSchedule(ctx: ?*anyopaque, package_name: []const u8, module_name: []const u8, _: []const u8, _: u32) void {
var self: *GlobalQueue = @ptrCast(@alignCast(ctx.?));
_ = _path;
_ = _depth;
// Enqueue to global queue - log but don't fail on error
self.enqueue(package_name, module_name) catch {
// Continue anyway - the module will still be processed by local scheduler
@ -411,7 +409,6 @@ pub const BuildEnv = struct {
// Deinit cache manager if present
if (self.cache_manager) |cm| {
cm.deinit();
self.gpa.destroy(cm);
}
@ -682,14 +679,8 @@ pub const BuildEnv = struct {
ws: *BuildEnv,
// Called by ModuleBuild.schedule_hook when a module is discovered/scheduled
pub fn onSchedule(ctx: ?*anyopaque, package_name: []const u8, module_name: []const u8, _path: []const u8, _depth: u32) void {
const self: *ScheduleCtx = @ptrCast(@alignCast(ctx.?));
_ = package_name;
_ = module_name;
_ = _path;
_ = _depth;
pub fn onSchedule(_: ?*anyopaque, _: []const u8, _: []const u8, _: []const u8, _: u32) void {
// Early reports auto-register in OrderedSink.emitReport when they are emitted
_ = self;
}
};
@ -704,12 +695,6 @@ pub const BuildEnv = struct {
}
}
fn resolverClassify(ctx: ?*anyopaque, _: []const u8, _: []const u8) bool {
_ = ctx;
// Unused: ModuleBuild determines external vs local from CIR (s_import.qualifier_tok)
return false;
}
fn resolverScheduleExternal(ctx: ?*anyopaque, current_package: []const u8, import_name: []const u8) void {
var self: *ResolverCtx = @ptrCast(@alignCast(ctx.?));
const cur_pkg = self.ws.packages.get(current_package) orelse return;
@ -761,8 +746,7 @@ pub const BuildEnv = struct {
return sched.*.getEnvIfDone(rest);
}
fn resolverResolveLocalPath(ctx: ?*anyopaque, _current_package: []const u8, root_dir: []const u8, import_name: []const u8) []const u8 {
_ = _current_package;
fn resolverResolveLocalPath(ctx: ?*anyopaque, _: []const u8, root_dir: []const u8, import_name: []const u8) []const u8 {
var self: *ResolverCtx = @ptrCast(@alignCast(ctx.?));
return self.ws.dottedToPath(root_dir, import_name) catch import_name;
}
@ -774,7 +758,6 @@ pub const BuildEnv = struct {
ctx.* = .{ .ws = self };
return .{
.ctx = ctx,
.classify = resolverClassify,
.scheduleExternal = resolverScheduleExternal,
.isReady = resolverIsReady,
.getEnv = resolverGetEnv,

View file

@ -83,8 +83,6 @@ pub const ScheduleHook = struct {
/// Resolver for handling imports across package boundaries
pub const ImportResolver = struct {
ctx: ?*anyopaque,
/// Return true if the import_name refers to an external package (e.g. "cli.Stdout")
classify: *const fn (ctx: ?*anyopaque, current_package: []const u8, import_name: []const u8) bool,
/// Ensure the external import is scheduled for building in its owning package
scheduleExternal: *const fn (ctx: ?*anyopaque, current_package: []const u8, import_name: []const u8) void,
/// Return true if the external import is fully type-checked and its ModuleEnv is ready
@ -579,7 +577,7 @@ pub const PackageEnv = struct {
var env = try ModuleEnv.init(self.gpa, src);
// init CIR fields
try env.initCIRFields(self.gpa, st.name);
try env.initCIRFields(st.name);
try env.common.calcLineStarts(self.gpa);

View file

@ -325,7 +325,7 @@
// var module_env = try ModuleEnv.init(gpa, source);
// defer module_env.deinit();
// try module_env.initCIRFields(gpa, "TestModule");
// try module_env.initCIRFields("TestModule");
// // CIR is now just an alias for ModuleEnv, so use module_env directly
// const cir = &module_env;
@ -401,7 +401,7 @@
// // var module_env = try ModuleEnv.init(gpa, source);
// // defer module_env.deinit();
// // try module_env.initCIRFields(gpa, "TestModule");
// // try module_env.initCIRFields("TestModule");
// // // CIR is now just an alias for ModuleEnv, so use module_env directly
// // const cir = &module_env;

View file

@ -34,7 +34,7 @@ test "ModuleEnv.Serialized roundtrip" {
_ = try original.common.line_starts.append(gpa, 20);
// Initialize CIR fields to ensure imports are available
try original.initCIRFields(gpa, "TestModule");
try original.initCIRFields("TestModule");
// Add some imports to test serialization/deserialization
const import1 = try original.imports.getOrPut(gpa, &original.common.strings, "json.Json");
@ -193,7 +193,7 @@ test "ModuleEnv.Serialized roundtrip" {
// defer original.deinit();
// // Initialize CIR fields
// try original.initCIRFields(gpa, "test.Types");
// try original.initCIRFields("test.Types");
// // Add some type variables
// const var1 = try original.types.freshFromContent(.err);
@ -358,7 +358,7 @@ test "ModuleEnv.Serialized roundtrip" {
// defer original.deinit();
// // Initialize CIR fields
// try original.initCIRFields(gpa, "test.Hello");
// try original.initCIRFields("test.Hello");
// // Create arena allocator for serialization
// var arena = std.heap.ArenaAllocator.init(gpa);
@ -431,11 +431,8 @@ test "ModuleEnv pushExprTypesToSExprTree extracts and formats types" {
.origin_module = builtin_ident,
.is_opaque = false,
};
const str_type = try env.types.freshFromContent(.{ .structure = .{ .nominal_type = str_nominal } });
// Add a string segment expression
const segment_idx = try env.addExpr(.{ .e_str_segment = .{ .literal = str_literal_idx } }, base.Region.from_raw_offsets(0, 5));
_ = str_type;
// Now create a string expression that references the segment
const expr_idx = try env.addExpr(.{ .e_str = .{ .span = Expr.Span{ .span = base.DataSpan{ .start = @intFromEnum(segment_idx), .len = 1 } } } }, base.Region.from_raw_offsets(0, 5));

View file

@ -44,7 +44,7 @@ is_initialized: bool = false,
rt_var: ?types.Var = null,
/// Copy this stack value to a destination pointer with bounds checking
pub fn copyToPtr(self: StackValue, layout_cache: *LayoutStore, dest_ptr: *anyopaque, _: *RocOps) !void {
pub fn copyToPtr(self: StackValue, layout_cache: *LayoutStore, dest_ptr: *anyopaque) !void {
std.debug.assert(self.is_initialized); // Source must be initialized before copying
// For closures, use getTotalSize to include capture data; for others use layoutSize
@ -752,9 +752,9 @@ pub const TupleAccessor = struct {
}
/// Set an element by copying from a source StackValue
pub fn setElement(self: TupleAccessor, index: usize, source: StackValue, ops: *RocOps) !void {
pub fn setElement(self: TupleAccessor, index: usize, source: StackValue) !void {
const dest_element = try self.getElement(index);
try source.copyToPtr(self.layout_cache, dest_element.ptr.?, ops);
try source.copyToPtr(self.layout_cache, dest_element.ptr.?);
}
/// Find the sorted element index corresponding to an original tuple position
@ -1030,9 +1030,9 @@ pub const RecordAccessor = struct {
}
/// Set a field by copying from a source StackValue
pub fn setFieldByIndex(self: RecordAccessor, index: usize, source: StackValue, ops: *RocOps) !void {
pub fn setFieldByIndex(self: RecordAccessor, index: usize, source: StackValue) !void {
const dest_field = try self.getFieldByIndex(index);
try source.copyToPtr(self.layout_cache, dest_field.ptr.?, ops);
try source.copyToPtr(self.layout_cache, dest_field.ptr.?);
}
/// Get the number of fields in this record

View file

@ -54,10 +54,8 @@ fn comptimeRocAlloc(alloc_args: *RocAlloc, env: *anyopaque) callconv(.c) void {
alloc_args.answer = base_ptr;
}
fn comptimeRocDealloc(dealloc_args: *RocDealloc, env: *anyopaque) callconv(.c) void {
fn comptimeRocDealloc(_: *RocDealloc, _: *anyopaque) callconv(.c) void {
// No-op: arena allocator frees all memory at once when evaluation completes
_ = dealloc_args;
_ = env;
}
fn comptimeRocRealloc(realloc_args: *RocRealloc, env: *anyopaque) callconv(.c) void {
@ -93,8 +91,7 @@ fn comptimeRocRealloc(realloc_args: *RocRealloc, env: *anyopaque) callconv(.c) v
realloc_args.answer = new_ptr;
}
fn comptimeRocDbg(dbg_args: *const RocDbg, env: *anyopaque) callconv(.c) void {
_ = env;
fn comptimeRocDbg(dbg_args: *const RocDbg, _: *anyopaque) callconv(.c) void {
var stderr_buffer: [256]u8 = undefined;
var stderr_writer = std.fs.File.stderr().writer(&stderr_buffer);
const stderr = &stderr_writer.interface;
@ -1014,7 +1011,7 @@ pub const ComptimeEvaluator = struct {
// Build the Numeral record
// Ownership of before_list and after_list is transferred to this record
const num_literal_record = try self.buildNumeralRecord(is_neg_value, before_list, after_list, roc_ops);
const num_literal_record = try self.buildNumeralRecord(is_neg_value, before_list, after_list);
defer num_literal_record.decref(&self.interpreter.runtime_layout_store, roc_ops);
// Evaluate the from_numeral function to get a closure
@ -1229,7 +1226,6 @@ pub const ComptimeEvaluator = struct {
is_negative: eval_mod.StackValue,
digits_before_pt: eval_mod.StackValue,
digits_after_pt: eval_mod.StackValue,
roc_ops: *RocOps,
) !eval_mod.StackValue {
// Use precomputed idents from self.env for field names
const field_layouts = [_]layout_mod.Layout{
@ -1251,13 +1247,13 @@ pub const ComptimeEvaluator = struct {
// Use self.env for field lookups since the record was built with self.env's idents
const is_neg_idx = accessor.findFieldIndex(self.env.idents.is_negative) orelse return error.OutOfMemory;
try accessor.setFieldByIndex(is_neg_idx, is_negative, roc_ops);
try accessor.setFieldByIndex(is_neg_idx, is_negative);
const before_pt_idx = accessor.findFieldIndex(self.env.idents.digits_before_pt) orelse return error.OutOfMemory;
try accessor.setFieldByIndex(before_pt_idx, digits_before_pt, roc_ops);
try accessor.setFieldByIndex(before_pt_idx, digits_before_pt);
const after_pt_idx = accessor.findFieldIndex(self.env.idents.digits_after_pt) orelse return error.OutOfMemory;
try accessor.setFieldByIndex(after_pt_idx, digits_after_pt, roc_ops);
try accessor.setFieldByIndex(after_pt_idx, digits_after_pt);
return dest;
}
@ -1390,9 +1386,8 @@ pub const ComptimeEvaluator = struct {
fn extractInvalidNumeralMessage(
self: *ComptimeEvaluator,
try_accessor: eval_mod.StackValue.RecordAccessor,
region: base.Region,
_: base.Region,
) ![]const u8 {
_ = region;
// Get the payload field from the Try record
// Use layout store's env for field lookups

View file

@ -508,13 +508,9 @@ pub const Interpreter = struct {
}
}
pub fn startTrace(self: *Interpreter) void {
_ = self;
}
pub fn startTrace(_: *Interpreter) void {}
pub fn endTrace(self: *Interpreter) void {
_ = self;
}
pub fn endTrace(_: *Interpreter) void {}
pub fn evaluateExpression(
self: *Interpreter,
@ -612,7 +608,7 @@ pub const Interpreter = struct {
self.early_return_value = null;
defer return_val.decref(&self.runtime_layout_store, roc_ops);
if (try self.shouldCopyResult(return_val, ret_ptr, roc_ops)) {
try return_val.copyToPtr(&self.runtime_layout_store, ret_ptr, roc_ops);
try return_val.copyToPtr(&self.runtime_layout_store, ret_ptr);
}
return;
}
@ -622,7 +618,7 @@ pub const Interpreter = struct {
// Only copy result if the result type is compatible with ret_ptr
if (try self.shouldCopyResult(result_value, ret_ptr, roc_ops)) {
try result_value.copyToPtr(&self.runtime_layout_store, ret_ptr, roc_ops);
try result_value.copyToPtr(&self.runtime_layout_store, ret_ptr);
}
return;
}
@ -632,7 +628,7 @@ pub const Interpreter = struct {
// Only copy result if the result type is compatible with ret_ptr
if (try self.shouldCopyResult(result, ret_ptr, roc_ops)) {
try result.copyToPtr(&self.runtime_layout_store, ret_ptr, roc_ops);
try result.copyToPtr(&self.runtime_layout_store, ret_ptr);
}
}
@ -767,7 +763,7 @@ pub const Interpreter = struct {
return StackValue{ .layout = .{ .tag = .zst, .data = undefined }, .ptr = ptr, .is_initialized = true };
}
pub fn pushCopy(self: *Interpreter, src: StackValue, roc_ops: *RocOps) !StackValue {
pub fn pushCopy(self: *Interpreter, src: StackValue) !StackValue {
const size: u32 = if (src.layout.tag == .closure) src.getTotalSize(&self.runtime_layout_store) else self.runtime_layout_store.layoutSize(src.layout);
const target_usize = self.runtime_layout_store.targetUsize();
var alignment = src.layout.alignment(target_usize);
@ -779,7 +775,7 @@ pub const Interpreter = struct {
// Preserve rt_var for constant folding
const dest = StackValue{ .layout = src.layout, .ptr = ptr, .is_initialized = true, .rt_var = src.rt_var };
if (size > 0 and src.ptr != null and ptr != null) {
try src.copyToPtr(&self.runtime_layout_store, ptr.?, roc_ops);
try src.copyToPtr(&self.runtime_layout_store, ptr.?);
}
return dest;
}
@ -870,8 +866,8 @@ pub const Interpreter = struct {
};
// Copy elements for comparison (compare_fn will consume them)
const arg0 = try self.pushCopy(elem1_value, roc_ops); // element being inserted
const arg1 = try self.pushCopy(elem0_value, roc_ops); // element to compare against
const arg0 = try self.pushCopy(elem1_value); // element being inserted
const arg1 = try self.pushCopy(elem0_value); // element to compare against
// Push continuation to handle comparison result
try work_stack.push(.{ .apply_continuation = .{ .sort_compare_result = .{
@ -2131,7 +2127,7 @@ pub const Interpreter = struct {
};
// Copy to new location and increment refcount
var result = try self.pushCopy(elem_value, roc_ops);
var result = try self.pushCopy(elem_value);
result.rt_var = elem_rt_var; // Ensure rt_var is preserved after copy
return result;
},
@ -2193,14 +2189,14 @@ pub const Interpreter = struct {
if (list_a.len() == 0) {
list_a_arg.decref(&self.runtime_layout_store, roc_ops);
// list_b ownership is transferred to the result (pushCopy increfs)
const result = try self.pushCopy(list_b_arg, roc_ops);
const result = try self.pushCopy(list_b_arg);
list_b_arg.decref(&self.runtime_layout_store, roc_ops);
return result;
}
if (list_b.len() == 0) {
list_b_arg.decref(&self.runtime_layout_store, roc_ops);
// list_a ownership is transferred to the result (pushCopy increfs)
const result = try self.pushCopy(list_a_arg, roc_ops);
const result = try self.pushCopy(list_a_arg);
list_a_arg.decref(&self.runtime_layout_store, roc_ops);
return result;
}
@ -4762,8 +4758,6 @@ pub const Interpreter = struct {
fn buildSuccessValRecord(self: *Interpreter, success: bool, val: RocDec) !StackValue {
// Layout: tuple (Dec, Bool) where element 0 is Dec (16 bytes) and element 1 is Bool (1 byte)
// Total size with alignment: 24 bytes (16 for Dec + 8 for alignment of Bool field)
const dec_layout = Layout.frac(.dec);
const bool_layout = Layout.int(.u8);
// We need to create a tuple layout for the result
// For now, allocate raw bytes and set them directly
@ -4781,8 +4775,6 @@ pub const Interpreter = struct {
out.is_initialized = true;
// Layout is set by pushRawBytes as .zst since we're working with raw bytes
_ = dec_layout;
_ = bool_layout;
return out;
}
@ -5178,8 +5170,7 @@ pub const Interpreter = struct {
return null;
}
fn layoutMatchesKind(self: *Interpreter, layout_val: Layout, kind: NumericKind) bool {
_ = self;
fn layoutMatchesKind(_: *Interpreter, layout_val: Layout, kind: NumericKind) bool {
if (layout_val.tag != .scalar) return false;
return switch (kind) {
.int => layout_val.data.scalar.tag == .int,
@ -5309,8 +5300,7 @@ pub const Interpreter = struct {
return out;
}
fn stackValueToDecimal(self: *Interpreter, value: StackValue) !RocDec {
_ = self;
fn stackValueToDecimal(_: *Interpreter, value: StackValue) !RocDec {
if (value.layout.tag != .scalar) return error.TypeMismatch;
switch (value.layout.data.scalar.tag) {
.frac => switch (value.layout.data.scalar.data.frac) {
@ -5328,8 +5318,7 @@ pub const Interpreter = struct {
}
}
fn stackValueToFloat(self: *Interpreter, comptime FloatT: type, value: StackValue) !FloatT {
_ = self;
fn stackValueToFloat(_: *Interpreter, comptime FloatT: type, value: StackValue) !FloatT {
if (value.layout.tag != .scalar) return error.TypeMismatch;
switch (value.layout.data.scalar.tag) {
.int => {
@ -5371,8 +5360,7 @@ pub const Interpreter = struct {
dec: RocDec,
};
fn isNumericScalar(self: *Interpreter, layout_val: Layout) bool {
_ = self;
fn isNumericScalar(_: *Interpreter, layout_val: Layout) bool {
if (layout_val.tag != .scalar) return false;
return switch (layout_val.data.scalar.tag) {
.int, .frac => true,
@ -5380,8 +5368,7 @@ pub const Interpreter = struct {
};
}
fn extractNumericValue(self: *Interpreter, value: StackValue) !NumericValue {
_ = self;
fn extractNumericValue(_: *Interpreter, value: StackValue) !NumericValue {
if (value.layout.tag != .scalar) return error.NotNumeric;
const scalar = value.layout.data.scalar;
return switch (scalar.tag) {
@ -5422,8 +5409,7 @@ pub const Interpreter = struct {
};
}
fn orderInt(self: *Interpreter, lhs: i128, rhs: NumericValue) !std.math.Order {
_ = self;
fn orderInt(_: *Interpreter, lhs: i128, rhs: NumericValue) !std.math.Order {
return switch (rhs) {
.int => std.math.order(lhs, rhs.int),
.f32 => {
@ -5441,8 +5427,7 @@ pub const Interpreter = struct {
};
}
fn orderF32(self: *Interpreter, lhs: f32, rhs: NumericValue) !std.math.Order {
_ = self;
fn orderF32(_: *Interpreter, lhs: f32, rhs: NumericValue) !std.math.Order {
return switch (rhs) {
.int => {
const rhs_f: f32 = @floatFromInt(rhs.int);
@ -5457,8 +5442,7 @@ pub const Interpreter = struct {
};
}
fn orderF64(self: *Interpreter, lhs: f64, rhs: NumericValue) !std.math.Order {
_ = self;
fn orderF64(_: *Interpreter, lhs: f64, rhs: NumericValue) !std.math.Order {
return switch (rhs) {
.int => {
const rhs_f: f64 = @floatFromInt(rhs.int);
@ -5473,8 +5457,7 @@ pub const Interpreter = struct {
};
}
fn orderDec(self: *Interpreter, lhs: RocDec, rhs: NumericValue) !std.math.Order {
_ = self;
fn orderDec(_: *Interpreter, lhs: RocDec, rhs: NumericValue) !std.math.Order {
return switch (rhs) {
.int => {
const rhs_dec = rhs.int * RocDec.one_point_zero_i128;
@ -6208,7 +6191,7 @@ pub const Interpreter = struct {
const data_ptr = utils.allocateWithRefcount(elem_size, elem_alignment_u32, false, roc_ops);
if (elem_size > 0 and payload.ptr != null) {
try payload.copyToPtr(&self.runtime_layout_store, data_ptr, roc_ops);
try payload.copyToPtr(&self.runtime_layout_store, data_ptr);
}
if (out.ptr) |ptr| {
@ -6295,7 +6278,7 @@ pub const Interpreter = struct {
}
// Copy the value to pass to the method
const copied_value = self.pushCopy(value, roc_ops) catch return null;
const copied_value = self.pushCopy(value) catch return null;
// Bind the parameter
self.bindings.append(.{
@ -6460,7 +6443,7 @@ pub const Interpreter = struct {
switch (pat) {
.assign => |_| {
// Bind entire value to this pattern
const copied = try self.pushCopy(value, roc_ops);
const copied = try self.pushCopy(value);
try out_binds.append(.{ .pattern_idx = pattern_idx, .value = copied, .expr_idx = expr_idx, .source_env = self.env });
return true;
},
@ -6471,7 +6454,7 @@ pub const Interpreter = struct {
return false;
}
const alias_value = try self.pushCopy(value, roc_ops);
const alias_value = try self.pushCopy(value);
try out_binds.append(.{ .pattern_idx = pattern_idx, .value = alias_value, .expr_idx = expr_idx, .source_env = self.env });
return true;
},
@ -10629,7 +10612,7 @@ pub const Interpreter = struct {
self.triggerCrash("e_closure: capture field not found in record", false, roc_ops);
return error.Crash;
};
try accessor.setFieldByIndex(idx_opt, cap_val, roc_ops);
try accessor.setFieldByIndex(idx_opt, cap_val);
}
}
return value;
@ -10762,7 +10745,7 @@ pub const Interpreter = struct {
}
}
}
const copy_result = try self.pushCopy(b.value, roc_ops);
const copy_result = try self.pushCopy(b.value);
return copy_result;
}
}
@ -10792,7 +10775,7 @@ pub const Interpreter = struct {
var accessor = try rec_val.asRecord(&self.runtime_layout_store);
if (accessor.findFieldIndex(var_ident)) |fidx| {
const field_val = try accessor.getFieldByIndex(fidx);
return try self.pushCopy(field_val, roc_ops);
return try self.pushCopy(field_val);
}
}
}
@ -10815,7 +10798,7 @@ pub const Interpreter = struct {
});
// Return a copy to give the caller ownership while the binding retains ownership too.
// This is consistent with the pushCopy call above for already-bound values.
return try self.pushCopy(result, roc_ops);
return try self.pushCopy(result);
}
}
@ -11422,7 +11405,7 @@ pub const Interpreter = struct {
// Set all elements
for (0..total_count) |idx| {
try accessor.setElement(idx, values[idx], roc_ops);
try accessor.setElement(idx, values[idx]);
}
// Decref temporary values after they've been copied into the tuple
@ -11512,7 +11495,7 @@ pub const Interpreter = struct {
if (runtime_list.bytes) |buffer| {
for (values, 0..) |val, idx| {
const dest_ptr = buffer + idx * elem_size;
try val.copyToPtr(&self.runtime_layout_store, dest_ptr, roc_ops);
try val.copyToPtr(&self.runtime_layout_store, dest_ptr);
}
}
}
@ -11644,7 +11627,7 @@ pub const Interpreter = struct {
const info = base_accessor.field_layouts.get(idx);
const dest_field_idx = accessor.findFieldIndex(info.name) orelse return error.TypeMismatch;
const base_field_value = try base_accessor.getFieldByIndex(idx);
try accessor.setFieldByIndex(dest_field_idx, base_field_value, roc_ops);
try accessor.setFieldByIndex(dest_field_idx, base_field_value);
}
}
@ -11666,7 +11649,7 @@ pub const Interpreter = struct {
}
}
try accessor.setFieldByIndex(dest_field_idx, val, roc_ops);
try accessor.setFieldByIndex(dest_field_idx, val);
}
// Decref base value and field values after they've been copied
@ -11782,7 +11765,7 @@ pub const Interpreter = struct {
const payload_field = try acc.getFieldByIndex(payload_field_idx);
if (payload_field.ptr) |payload_ptr| {
if (total_count == 1) {
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr);
} else {
// Multiple args - create tuple payload
var elem_layouts = try self.allocator.alloc(Layout, total_count);
@ -11795,7 +11778,7 @@ pub const Interpreter = struct {
var tuple_dest = StackValue{ .layout = tuple_layout, .ptr = payload_ptr, .is_initialized = true };
var tup_acc = try tuple_dest.asTuple(&self.runtime_layout_store);
for (values, 0..) |val, idx| {
try tup_acc.setElement(idx, val, roc_ops);
try tup_acc.setElement(idx, val);
}
}
}
@ -11846,7 +11829,7 @@ pub const Interpreter = struct {
// Write payload
const proper_payload_field = try proper_acc.getElement(0);
if (proper_payload_field.ptr) |proper_ptr| {
try values[0].copyToPtr(&self.runtime_layout_store, proper_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, proper_ptr);
}
for (values) |val| {
@ -11857,7 +11840,7 @@ pub const Interpreter = struct {
return true;
}
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr);
} else {
// Multiple args - create tuple payload
var elem_layouts = try self.allocator.alloc(Layout, total_count);
@ -11870,7 +11853,7 @@ pub const Interpreter = struct {
var tuple_dest = StackValue{ .layout = tuple_layout, .ptr = payload_ptr, .is_initialized = true };
var tup_acc = try tuple_dest.asTuple(&self.runtime_layout_store);
for (values, 0..) |val, idx| {
try tup_acc.setElement(idx, val, roc_ops);
try tup_acc.setElement(idx, val);
}
}
}
@ -11924,7 +11907,7 @@ pub const Interpreter = struct {
// Write payload (element 0)
const proper_payload_field = try proper_acc.getElement(0);
if (proper_payload_field.ptr) |proper_ptr| {
try values[0].copyToPtr(&self.runtime_layout_store, proper_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, proper_ptr);
}
for (values) |val| {
@ -11952,7 +11935,7 @@ pub const Interpreter = struct {
// Write payload at offset 0
const payload_ptr: *anyopaque = @ptrCast(base_ptr);
if (total_count == 1) {
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr);
} else {
// Multiple args - create tuple payload at offset 0
var elem_layouts = try self.allocator.alloc(Layout, total_count);
@ -11965,7 +11948,7 @@ pub const Interpreter = struct {
var tuple_dest = StackValue{ .layout = tuple_layout, .ptr = payload_ptr, .is_initialized = true };
var tup_acc = try tuple_dest.asTuple(&self.runtime_layout_store);
for (values, 0..) |val, idx| {
try tup_acc.setElement(idx, val, roc_ops);
try tup_acc.setElement(idx, val);
}
}
@ -11983,7 +11966,7 @@ pub const Interpreter = struct {
// Scrutinee is on value stack - get it but keep it there for potential later use
const scrutinee_temp = value_stack.pop() orelse return error.Crash;
// Make a copy to protect from corruption
const scrutinee = try self.pushCopy(scrutinee_temp, roc_ops);
const scrutinee = try self.pushCopy(scrutinee_temp);
scrutinee_temp.decref(&self.runtime_layout_store, roc_ops);
// Try branches starting from current_branch
@ -13131,7 +13114,7 @@ pub const Interpreter = struct {
var accessor = try receiver_value.asRecord(&self.runtime_layout_store);
const field_idx = accessor.findFieldIndex(da.field_name) orelse return error.TypeMismatch;
const field_value = try accessor.getFieldByIndex(field_idx);
const result = try self.pushCopy(field_value, roc_ops);
const result = try self.pushCopy(field_value);
try value_stack.push(result);
return true;
}
@ -14025,8 +14008,8 @@ pub const Interpreter = struct {
};
// Copy elements for comparison
const arg0 = try self.pushCopy(elem_current_value, roc_ops);
const arg1 = try self.pushCopy(elem_inner_value, roc_ops);
const arg0 = try self.pushCopy(elem_current_value);
const arg1 = try self.pushCopy(elem_inner_value);
// Push continuation for next comparison
// After swap, the element we're inserting is now at sc.inner_index
@ -14106,8 +14089,8 @@ pub const Interpreter = struct {
};
// Copy elements for comparison
const arg0 = try self.pushCopy(elem_outer_value, roc_ops);
const arg1 = try self.pushCopy(elem_prev_value, roc_ops);
const arg0 = try self.pushCopy(elem_outer_value);
const arg1 = try self.pushCopy(elem_prev_value);
// Push continuation for next comparison
try work_stack.push(.{ .apply_continuation = .{ .sort_compare_result = .{
@ -14521,12 +14504,12 @@ test "interpreter: cross-module method resolution should find methods in origin
// Set up Module A (the imported module where the type and method are defined)
var module_a = try can.ModuleEnv.init(gpa, module_a_name);
defer module_a.deinit();
try module_a.initCIRFields(gpa, module_a_name);
try module_a.initCIRFields(module_a_name);
// Set up Module B (the current module that imports Module A)
var module_b = try can.ModuleEnv.init(gpa, module_b_name);
defer module_b.deinit();
try module_b.initCIRFields(gpa, module_b_name);
try module_b.initCIRFields(module_b_name);
const builtin_indices = try builtin_loading.deserializeBuiltinIndices(gpa, compiled_builtins.builtin_indices_bin);
const bool_source = "Bool := [True, False].{}\n";
@ -14573,15 +14556,15 @@ test "interpreter: transitive module method resolution (A imports B imports C)"
// Set up three modules: A (current) imports B, B imports C
var module_a = try can.ModuleEnv.init(gpa, module_a_name);
defer module_a.deinit();
try module_a.initCIRFields(gpa, module_a_name);
try module_a.initCIRFields(module_a_name);
var module_b = try can.ModuleEnv.init(gpa, module_b_name);
defer module_b.deinit();
try module_b.initCIRFields(gpa, module_b_name);
try module_b.initCIRFields(module_b_name);
var module_c = try can.ModuleEnv.init(gpa, module_c_name);
defer module_c.deinit();
try module_c.initCIRFields(gpa, module_c_name);
try module_c.initCIRFields(module_c_name);
const builtin_indices = try builtin_loading.deserializeBuiltinIndices(gpa, compiled_builtins.builtin_indices_bin);
const bool_source = "Bool := [True, False].{}\n";

View file

@ -144,9 +144,7 @@ fn testRocRealloc(realloc_args: *RocRealloc, env: *anyopaque) callconv(.c) void
realloc_args.answer = @ptrFromInt(@intFromPtr(new_base_ptr) + size_storage_bytes);
}
fn testRocDbg(dbg_args: *const RocDbg, env: *anyopaque) callconv(.c) void {
_ = dbg_args;
_ = env;
fn testRocDbg(_: *const RocDbg, _: *anyopaque) callconv(.c) void {
@panic("testRocDbg not implemented yet");
}

View file

@ -50,7 +50,7 @@ fn parseCheckAndEvalModule(src: []const u8) !struct {
var builtin_module = try builtin_loading.loadCompiledModule(gpa, compiled_builtins.builtin_bin, "Builtin", builtin_source);
errdefer builtin_module.deinit();
try module_env.initCIRFields(gpa, "test");
try module_env.initCIRFields("test");
const builtin_ctx: Check.BuiltinContext = .{
.module_name = try module_env.insertIdent(base.Ident.for_text("test")),
.bool_stmt = builtin_indices.bool_type,

View file

@ -1,4 +1,5 @@
//! Tests for compile-time evaluation of top-level declarations
const std = @import("std");
const parse = @import("parse");
const types = @import("types");
@ -57,7 +58,7 @@ fn parseCheckAndEvalModuleWithName(src: []const u8, module_name: []const u8) !Ev
errdefer builtin_module.deinit();
// Initialize CIR fields in ModuleEnv
try module_env.initCIRFields(gpa, module_name);
try module_env.initCIRFields(module_name);
const builtin_ctx: Check.BuiltinContext = .{
.module_name = try module_env.insertIdent(base.Ident.for_text(module_name)),
.bool_stmt = builtin_indices.bool_type,
@ -136,7 +137,7 @@ fn parseCheckAndEvalModuleWithImport(src: []const u8, import_name: []const u8, i
errdefer builtin_module.deinit();
// Initialize CIR fields in ModuleEnv
try module_env.initCIRFields(gpa, "test");
try module_env.initCIRFields("test");
const builtin_ctx: Check.BuiltinContext = .{
.module_name = try module_env.insertIdent(base.Ident.for_text("test")),
.bool_stmt = builtin_indices.bool_type,
@ -1179,7 +1180,7 @@ test "comptime eval - U8 valid max value" {
var result = try parseCheckAndEvalModule(src);
defer cleanupEvalModule(&result);
const summary = try result.evaluator.evalAll();
_ = try result.evaluator.evalAll();
// Debug: print any problems
if (result.problems.len() > 0) {
std.debug.print("\nU8 valid max problems ({d}):\n", .{result.problems.len()});
@ -1191,8 +1192,6 @@ test "comptime eval - U8 valid max value" {
std.debug.print("\n", .{});
}
}
try testing.expectEqual(@as(u32, 1), summary.evaluated);
try testing.expectEqual(@as(u32, 0), summary.crashed);
try testing.expectEqual(@as(usize, 0), result.problems.len());
}
@ -1318,9 +1317,8 @@ test "comptime eval - U16 valid max value" {
var result = try parseCheckAndEvalModule(src);
defer cleanupEvalModule(&result);
const summary = try result.evaluator.evalAll();
_ = try result.evaluator.evalAll();
try testing.expectEqual(@as(usize, 0), result.problems.len());
_ = summary;
}
test "comptime eval - U16 too large with descriptive error" {

View file

@ -711,7 +711,7 @@ test "ModuleEnv serialization and interpreter evaluation" {
parse_ast.store.emptyScratch();
// Initialize CIR fields in ModuleEnv
try original_env.initCIRFields(gpa, "test");
try original_env.initCIRFields("test");
// Get Bool and Try statement indices from builtin module
const bool_stmt_in_builtin_module = builtin_indices.bool_type;

View file

@ -621,7 +621,7 @@ pub fn parseAndCanonicalizeExpr(allocator: std.mem.Allocator, source: []const u8
parse_ast.store.emptyScratch();
// Initialize CIR fields in ModuleEnv
try module_env.initCIRFields(allocator, "test");
try module_env.initCIRFields("test");
// Register Builtin as import so Bool, Try, and Str are available
_ = try module_env.imports.getOrPut(allocator, &module_env.common.strings, "Builtin");

View file

@ -52,7 +52,7 @@ fn parseCheckAndEvalModule(src: []const u8) !struct {
var builtin_module = try builtin_loading.loadCompiledModule(gpa, compiled_builtins.builtin_bin, "Builtin", builtin_source);
errdefer builtin_module.deinit();
try module_env.initCIRFields(gpa, "test");
try module_env.initCIRFields("test");
const builtin_ctx: Check.BuiltinContext = .{
.module_name = try module_env.insertIdent(base.Ident.for_text("test")),
.bool_stmt = builtin_indices.bool_type,

View file

@ -70,9 +70,7 @@ fn testRocRealloc(realloc_args: *RocRealloc, env: *anyopaque) callconv(.c) void
realloc_args.answer = @ptrFromInt(@intFromPtr(new_slice.ptr) + size_storage_bytes);
}
fn testRocDbg(dbg_args: *const RocDbg, env: *anyopaque) callconv(.c) void {
_ = dbg_args;
_ = env;
fn testRocDbg(_: *const RocDbg, _: *anyopaque) callconv(.c) void {
@panic("testRocDbg not implemented yet");
}

View file

@ -1265,6 +1265,17 @@ const Formatter = struct {
}
_ = try fmt.formatExpr(d.expr);
},
.inspect => |i| {
try fmt.pushAll("inspect");
const expr_node = fmt.nodeRegion(@intFromEnum(i.expr));
if (multiline and try fmt.flushCommentsBefore(expr_node.start)) {
fmt.curr_indent += 1;
try fmt.pushIndent();
} else {
try fmt.push(' ');
}
_ = try fmt.formatExpr(i.expr);
},
.block => |b| {
try fmt.formatBlock(b);
},

View file

@ -288,58 +288,43 @@ fn writeFileDefault(path: []const u8, contents: []const u8) WriteError!void {
// Testing implementations that fail tests if called
fn fileExistsTesting(absolute_path: []const u8) OpenError!bool {
_ = absolute_path;
fn fileExistsTesting(_: []const u8) OpenError!bool {
@panic("fileExists should not be called in this test");
}
fn readFileTesting(relative_path: []const u8, allocator: Allocator) ReadError![]const u8 {
_ = relative_path;
_ = allocator;
fn readFileTesting(_: []const u8, _: Allocator) ReadError![]const u8 {
@panic("readFile should not be called in this test");
}
fn readFileIntoTesting(path: []const u8, buffer: []u8) ReadError!usize {
_ = path;
_ = buffer;
fn readFileIntoTesting(_: []const u8, _: []u8) ReadError!usize {
@panic("readFileInto should not be called in this test");
}
fn writeFileTesting(path: []const u8, contents: []const u8) WriteError!void {
_ = path;
_ = contents;
fn writeFileTesting(_: []const u8, _: []const u8) WriteError!void {
@panic("writeFile should not be called in this test");
}
fn openDirTesting(absolute_path: []const u8) OpenError!Dir {
_ = absolute_path;
fn openDirTesting(_: []const u8) OpenError!Dir {
@panic("openDir should not be called in this test");
}
fn dirNameTesting(absolute_path: []const u8) ?[]const u8 {
_ = absolute_path;
fn dirNameTesting(_: []const u8) ?[]const u8 {
@panic("dirName should not be called in this test");
}
fn baseNameTesting(absolute_path: []const u8) ?[]const u8 {
_ = absolute_path;
fn baseNameTesting(_: []const u8) ?[]const u8 {
@panic("baseName should not be called in this test");
}
fn canonicalizeTesting(root_relative_path: []const u8, allocator: Allocator) CanonicalizeError![]const u8 {
_ = root_relative_path;
_ = allocator;
fn canonicalizeTesting(_: []const u8, _: Allocator) CanonicalizeError![]const u8 {
@panic("canonicalize should not be called in this test");
}
fn makePathTesting(path: []const u8) MakePathError!void {
_ = path;
fn makePathTesting(_: []const u8) MakePathError!void {
@panic("makePath should not be called in this test");
}
fn renameTesting(old_path: []const u8, new_path: []const u8) RenameError!void {
_ = old_path;
_ = new_path;
fn renameTesting(_: []const u8, _: []const u8) RenameError!void {
@panic("rename should not be called in this test");
}

View file

@ -217,8 +217,7 @@ pub fn allocator(self: *SharedMemoryAllocator) std.mem.Allocator {
};
}
fn alloc(ctx: *anyopaque, len: usize, ptr_align: std.mem.Alignment, ret_addr: usize) ?[*]u8 {
_ = ret_addr;
fn alloc(ctx: *anyopaque, len: usize, ptr_align: std.mem.Alignment, _: usize) ?[*]u8 {
const self: *SharedMemoryAllocator = @ptrCast(@alignCast(ctx));
const alignment = @as(usize, 1) << @intFromEnum(ptr_align);
@ -248,33 +247,18 @@ fn alloc(ctx: *anyopaque, len: usize, ptr_align: std.mem.Alignment, ret_addr: us
}
}
fn resize(ctx: *anyopaque, buf: []u8, buf_align: std.mem.Alignment, new_len: usize, ret_addr: usize) bool {
_ = ctx;
_ = buf_align;
_ = ret_addr;
fn resize(_: *anyopaque, buf: []u8, _: std.mem.Alignment, new_len: usize, _: usize) bool {
// Simple bump allocator doesn't support resize
// Could be implemented by checking if this is the last allocation
return new_len <= buf.len;
}
fn free(ctx: *anyopaque, buf: []u8, buf_align: std.mem.Alignment, ret_addr: usize) void {
_ = ctx;
_ = buf;
_ = buf_align;
_ = ret_addr;
fn free(_: *anyopaque, _: []u8, _: std.mem.Alignment, _: usize) void {
// Simple bump allocator doesn't support free
// Memory is only freed when the entire region is unmapped
}
fn remap(ctx: *anyopaque, old_mem: []u8, old_align: std.mem.Alignment, new_size: usize, ret_addr: usize) ?[*]u8 {
_ = ctx;
_ = old_mem;
_ = old_align;
_ = new_size;
_ = ret_addr;
fn remap(_: *anyopaque, _: []u8, _: std.mem.Alignment, _: usize, _: usize) ?[*]u8 {
// Simple bump allocator doesn't support remapping
return null;
}

View file

@ -1121,20 +1121,7 @@ pub const Store = struct {
current = self.types_store.resolveVar(last_pending_field.var_);
continue :outer;
},
.fn_pure => |func| {
_ = func;
// Create empty captures layout for generic function type
const empty_captures_idx = try self.getEmptyRecordLayout();
break :flat_type Layout.closure(empty_captures_idx);
},
.fn_effectful => |func| {
_ = func;
// Create empty captures layout for generic function type
const empty_captures_idx = try self.getEmptyRecordLayout();
break :flat_type Layout.closure(empty_captures_idx);
},
.fn_unbound => |func| {
_ = func;
.fn_pure, .fn_effectful, .fn_unbound => {
// Create empty captures layout for generic function type
const empty_captures_idx = try self.getEmptyRecordLayout();
break :flat_type Layout.closure(empty_captures_idx);
@ -1285,7 +1272,7 @@ pub const Store = struct {
// and append our variant layouts. This ensures our variants are contiguous.
const variants_start: u32 = @intCast(self.tag_union_variants.len());
for (variant_layout_indices, 0..) |variant_layout_idx, variant_i| {
for (variant_layout_indices) |variant_layout_idx| {
const variant_layout = self.getLayout(variant_layout_idx);
const variant_size = self.layoutSize(variant_layout);
const variant_alignment = variant_layout.alignment(self.targetUsize());
@ -1298,7 +1285,6 @@ pub const Store = struct {
_ = try self.tag_union_variants.append(self.env.gpa, .{
.payload_layout = variant_layout_idx,
});
_ = variant_i;
}
// Calculate discriminant info

View file

@ -190,7 +190,7 @@ pub const SyntaxChecker = struct {
};
}
fn rangeFromReport(self: *SyntaxChecker, rep: reporting.Report) Diagnostics.Range {
fn rangeFromReport(_: *SyntaxChecker, rep: reporting.Report) Diagnostics.Range {
var start = Diagnostics.Position{ .line = 0, .character = 0 };
var end = Diagnostics.Position{ .line = 0, .character = 0 };
@ -220,7 +220,6 @@ pub const SyntaxChecker = struct {
}
}
_ = self;
return .{ .start = start, .end = end };
}

View file

@ -1000,7 +1000,7 @@ pub const Statement = union(enum) {
try tree.pushStaticAtom("exposing");
const attrs2 = tree.beginNode();
for (ast.store.exposedItemSlice(import.exposes)) |e| {
try ast.store.getExposedItem(e).pushToSExprTree(gpa, env, ast, tree);
try ast.store.getExposedItem(e).pushToSExprTree(env, ast, tree);
}
try tree.endNode(exposed, attrs2);
}
@ -1641,7 +1641,7 @@ pub const Header = union(enum) {
// Could push region info for provides_coll here if desired
for (provides_items) |item_idx| {
const item = ast.store.getExposedItem(item_idx);
try item.pushToSExprTree(gpa, env, ast, tree);
try item.pushToSExprTree(env, ast, tree);
}
try tree.endNode(provides_begin, attrs2);
@ -1677,7 +1677,7 @@ pub const Header = union(enum) {
const attrs2 = tree.beginNode();
for (ast.store.exposedItemSlice(.{ .span = exposes.span })) |exposed| {
const item = ast.store.getExposedItem(exposed);
try item.pushToSExprTree(gpa, env, ast, tree);
try item.pushToSExprTree(env, ast, tree);
}
try tree.endNode(exposes_begin, attrs2);
@ -1697,7 +1697,7 @@ pub const Header = union(enum) {
const attrs2 = tree.beginNode();
for (ast.store.exposedItemSlice(.{ .span = exposes.span })) |exposed| {
const item = ast.store.getExposedItem(exposed);
try item.pushToSExprTree(gpa, env, ast, tree);
try item.pushToSExprTree(env, ast, tree);
}
try tree.endNode(exposes_begin, attrs2);
@ -1732,7 +1732,7 @@ pub const Header = union(enum) {
// Could push region info for rigids here if desired
for (ast.store.exposedItemSlice(.{ .span = rigids.span })) |exposed| {
const item = ast.store.getExposedItem(exposed);
try item.pushToSExprTree(gpa, env, ast, tree);
try item.pushToSExprTree(env, ast, tree);
}
try tree.endNode(rigids_begin, attrs3);
@ -1748,7 +1748,7 @@ pub const Header = union(enum) {
const attrs4 = tree.beginNode();
for (ast.store.exposedItemSlice(.{ .span = exposes.span })) |exposed| {
const item = ast.store.getExposedItem(exposed);
try item.pushToSExprTree(gpa, env, ast, tree);
try item.pushToSExprTree(env, ast, tree);
}
try tree.endNode(exposes_begin, attrs4);
@ -1793,7 +1793,7 @@ pub const Header = union(enum) {
const attrs2 = tree.beginNode();
for (ast.store.exposedItemSlice(.{ .span = exposes.span })) |exposed| {
const item = ast.store.getExposedItem(exposed);
try item.pushToSExprTree(gpa, env, ast, tree);
try item.pushToSExprTree(env, ast, tree);
}
try tree.endNode(exposes_begin, attrs2);
@ -1866,9 +1866,7 @@ pub const ExposedItem = union(enum) {
pub const Idx = enum(u32) { _ };
pub const Span = struct { span: base.DataSpan };
pub fn pushToSExprTree(self: @This(), gpa: std.mem.Allocator, env: *const CommonEnv, ast: *const AST, tree: *SExprTree) std.mem.Allocator.Error!void {
_ = gpa;
pub fn pushToSExprTree(self: @This(), env: *const CommonEnv, ast: *const AST, tree: *SExprTree) std.mem.Allocator.Error!void {
switch (self) {
.lower_ident => |i| {
const begin = tree.beginNode();

View file

@ -40,7 +40,7 @@ fn runParse(env: *CommonEnv, gpa: std.mem.Allocator, parserCall: *const fn (*Par
const msg_slice = messages[0..];
var tokenizer = try tokenize.Tokenizer.init(env, gpa, env.source, msg_slice);
try tokenizer.tokenize(gpa);
var result = tokenizer.finishAndDeinit(gpa);
var result = tokenizer.finishAndDeinit();
var parser = try Parser.init(result.tokens, gpa);
defer parser.deinit();

View file

@ -1109,7 +1109,7 @@ pub const Tokenizer = struct {
self.string_interpolation_stack.deinit();
}
pub fn finishAndDeinit(self: *Tokenizer, _: std.mem.Allocator) TokenOutput {
pub fn finishAndDeinit(self: *Tokenizer) TokenOutput {
self.string_interpolation_stack.deinit();
const actual_message_count = @min(self.cursor.message_count, self.cursor.messages.len);
return .{
@ -1252,7 +1252,7 @@ pub const Tokenizer = struct {
} else {
self.cursor.pos += 1;
// Look at what follows the minus to determine if it's unary
const tokenType: Token.Tag = if (self.canFollowUnaryMinus(n)) .OpUnaryMinus else .OpBinaryMinus;
const tokenType: Token.Tag = if (canFollowUnaryMinus(n)) .OpUnaryMinus else .OpBinaryMinus;
try self.pushTokenNormalHere(gpa, tokenType, start);
}
} else {
@ -1569,8 +1569,7 @@ pub const Tokenizer = struct {
}
/// Determines if a character can follow a unary minus (i.e., can start an expression)
fn canFollowUnaryMinus(self: *const Tokenizer, c: u8) bool {
_ = self;
fn canFollowUnaryMinus(c: u8) bool {
return switch (c) {
// Identifiers
'a'...'z', 'A'...'Z', '_' => true,
@ -1684,7 +1683,7 @@ pub fn checkTokenizerInvariants(gpa: std.mem.Allocator, input: []const u8, debug
var messages: [32]Diagnostic = undefined;
var tokenizer = try Tokenizer.init(&env, gpa, input, &messages);
try tokenizer.tokenize(gpa);
var output = tokenizer.finishAndDeinit(gpa);
var output = tokenizer.finishAndDeinit();
defer output.tokens.deinit(gpa);
if (debug) {
@ -1719,7 +1718,7 @@ pub fn checkTokenizerInvariants(gpa: std.mem.Allocator, input: []const u8, debug
// Second tokenization.
tokenizer = try Tokenizer.init(&env, gpa, buf2.items, &messages);
try tokenizer.tokenize(gpa);
var output2 = tokenizer.finishAndDeinit(gpa);
var output2 = tokenizer.finishAndDeinit();
defer output2.tokens.deinit(gpa);
if (debug) {

View file

@ -94,15 +94,12 @@ fn readFileIntoWasm(path: []const u8, buffer: []u8) Filesystem.ReadError!usize {
return error.FileNotFound;
}
fn writeFileWasm(path: []const u8, contents: []const u8) Filesystem.WriteError!void {
_ = path;
_ = contents;
fn writeFileWasm(_: []const u8, _: []const u8) Filesystem.WriteError!void {
// Writing files is not supported in WASM playground
return error.AccessDenied;
}
fn openDirWasm(absolute_path: []const u8) Filesystem.OpenError!Filesystem.Dir {
_ = absolute_path;
fn openDirWasm(_: []const u8) Filesystem.OpenError!Filesystem.Dir {
// Directory operations are not supported in WASM playground
return error.FileNotFound;
}
@ -131,15 +128,12 @@ fn canonicalizeWasm(root_relative_path: []const u8, allocator: Allocator) Filesy
return allocator.dupe(u8, root_relative_path) catch handleOom();
}
fn makePathWasm(path: []const u8) Filesystem.MakePathError!void {
_ = path;
fn makePathWasm(_: []const u8) Filesystem.MakePathError!void {
// Directory creation is not supported in WASM playground
return error.AccessDenied;
}
fn renameWasm(old_path: []const u8, new_path: []const u8) Filesystem.RenameError!void {
_ = old_path;
_ = new_path;
fn renameWasm(_: []const u8, _: []const u8) Filesystem.RenameError!void {
// File operations are not supported in WASM playground
return error.AccessDenied;
}

View file

@ -442,9 +442,8 @@ fn wasmRocRealloc(realloc_args: *builtins.host_abi.RocRealloc, _: *anyopaque) ca
}
}
fn wasmRocDbg(dbg_args: *const builtins.host_abi.RocDbg, _: *anyopaque) callconv(.c) void {
fn wasmRocDbg(_: *const builtins.host_abi.RocDbg, _: *anyopaque) callconv(.c) void {
// No-op in WASM playground
_ = dbg_args;
}
fn wasmRocExpectFailed(expect_failed_args: *const builtins.host_abi.RocExpectFailed, env: *anyopaque) callconv(.c) void {
@ -934,7 +933,7 @@ fn compileSource(source: []const u8) !CompilerStageData {
// Stage 2: Canonicalization (always run, even with parse errors)
// The canonicalizer handles malformed parse nodes and continues processing
const env = result.module_env;
try env.initCIRFields(allocator, "main");
try env.initCIRFields("main");
// Load builtin modules and inject Bool and Result type declarations
// (following the pattern from eval.zig and TestEnv.zig)
@ -1183,8 +1182,7 @@ const ResponseWriter = struct {
return result;
}
fn drain(w: *std.Io.Writer, data: []const []const u8, splat: usize) std.Io.Writer.Error!usize {
_ = splat;
fn drain(w: *std.Io.Writer, data: []const []const u8, _: usize) std.Io.Writer.Error!usize {
const self: *Self = @alignCast(@fieldParentPtr("interface", w));
var total: usize = 0;
for (data) |bytes| {
@ -2078,10 +2076,9 @@ fn writeUnbundleErrorResponse(response: []u8, err: unbundle.UnbundleError) u8 {
error.OutOfMemory => "Out of memory",
};
const json = std.fmt.bufPrint(response, "{{\"success\":false,\"error\":\"{s}\"}}", .{error_msg}) catch {
_ = std.fmt.bufPrint(response, "{{\"success\":false,\"error\":\"{s}\"}}", .{error_msg}) catch {
return 1; // Response buffer too small
};
_ = json;
return 2; // Unbundle error
}

View file

@ -547,7 +547,7 @@ pub const Repl = struct {
// Create CIR
const cir = module_env; // CIR is now just ModuleEnv
try cir.initCIRFields(self.allocator, "repl");
try cir.initCIRFields("repl");
// Get Bool, Try, and Str statement indices from the IMPORTED modules (not copied!)
// These refer to the actual statements in the Builtin module
@ -749,7 +749,7 @@ pub const Repl = struct {
// Create CIR
const cir = module_env;
try cir.initCIRFields(self.allocator, "repl");
try cir.initCIRFields("repl");
// Populate all auto-imported builtin types using the shared helper to keep behavior consistent
var module_envs_map = std.AutoHashMap(base.Ident.Idx, can.Can.AutoImportedType).init(self.allocator);

View file

@ -309,7 +309,7 @@ test "Repl - minimal interpreter integration" {
// Step 3: Create CIR
const cir = &module_env; // CIR is now just ModuleEnv
try cir.initCIRFields(gpa, "test");
try cir.initCIRFields("test");
// Get Bool, Try, and Str statement indices from the builtin module
const bool_stmt_in_builtin_module = builtin_indices.bool_type;

View file

@ -132,9 +132,7 @@ fn testRocRealloc(realloc_args: *RocRealloc, env: *anyopaque) callconv(.c) void
realloc_args.answer = @ptrFromInt(@intFromPtr(new_slice.ptr) + size_storage_bytes);
}
fn testRocDbg(dbg_args: *const RocDbg, env: *anyopaque) callconv(.c) void {
_ = dbg_args;
_ = env;
fn testRocDbg(_: *const RocDbg, _: *anyopaque) callconv(.c) void {
@panic("testRocDbg not implemented yet");
}

View file

@ -1129,7 +1129,7 @@ fn processSnapshotContent(
basename;
};
var can_ir = &module_env; // ModuleEnv contains the canonical IR
try can_ir.initCIRFields(allocator, module_name);
try can_ir.initCIRFields(module_name);
const builtin_ctx: Check.BuiltinContext = .{
.module_name = try can_ir.insertIdent(base.Ident.for_text(module_name)),
@ -2918,8 +2918,7 @@ fn generateReplOutputSection(output: *DualOutput, snapshot_path: []const u8, con
return success;
}
fn generateReplProblemsSection(output: *DualOutput, content: *const Content) !void {
_ = content;
fn generateReplProblemsSection(output: *DualOutput, _: *const Content) !void {
try output.begin_section("PROBLEMS");
try output.md_writer.writer.writeAll("NIL\n");
@ -3151,9 +3150,7 @@ fn snapshotRocRealloc(realloc_args: *RocRealloc, env: *anyopaque) callconv(.c) v
realloc_args.answer = @ptrFromInt(@intFromPtr(new_slice.ptr) + size_storage_bytes);
}
fn snapshotRocDbg(dbg_args: *const RocDbg, env: *anyopaque) callconv(.c) void {
_ = dbg_args;
_ = env;
fn snapshotRocDbg(_: *const RocDbg, _: *anyopaque) callconv(.c) void {
@panic("snapshotRocDbg not implemented yet");
}

View file

@ -50,8 +50,7 @@ pub const Slot = union(enum) {
redirect: Var,
/// Calculate the size needed to serialize this Slot
pub fn serializedSize(self: *const Slot) usize {
_ = self;
pub fn serializedSize(_: *const Slot) usize {
return @sizeOf(u8) + @sizeOf(u32); // tag + data
}

View file

@ -143,7 +143,7 @@ test "BufferExtractWriter - basic functionality" {
// Create a file
const file_writer = try writer.extractWriter().createFile("test.txt");
try file_writer.writeAll("Hello, World!");
writer.extractWriter().finishFile(file_writer);
writer.extractWriter().finishFile();
// Create a directory (should be no-op for buffer writer)
try writer.extractWriter().makeDir("test_dir");
@ -151,7 +151,7 @@ test "BufferExtractWriter - basic functionality" {
// Create another file in a subdirectory
const file_writer2 = try writer.extractWriter().createFile("subdir/test2.txt");
try file_writer2.writeAll("Second file");
writer.extractWriter().finishFile(file_writer2);
writer.extractWriter().finishFile();
// Verify files were stored
try testing.expectEqual(@as(usize, 2), writer.files.count());
@ -185,7 +185,7 @@ test "DirExtractWriter - basic functionality" {
// Create a file
const file_writer = try writer.extractWriter().createFile("test.txt");
try file_writer.writeAll("Test content");
writer.extractWriter().finishFile(file_writer);
writer.extractWriter().finishFile();
// Verify file was created
const content = try tmp.dir.readFileAlloc(testing.allocator, "test.txt", 1024);
@ -195,7 +195,7 @@ test "DirExtractWriter - basic functionality" {
// Create a file in a subdirectory (should create parent dirs)
const file_writer2 = try writer.extractWriter().createFile("deep/nested/file.txt");
try file_writer2.writeAll("Nested content");
writer.extractWriter().finishFile(file_writer2);
writer.extractWriter().finishFile();
// Verify nested file was created
const nested_content = try tmp.dir.readFileAlloc(testing.allocator, "deep/nested/file.txt", 1024);
@ -304,12 +304,12 @@ test "BufferExtractWriter - overwrite existing file" {
// Create a file with initial content
const file_writer1 = try writer.extractWriter().createFile("test.txt");
try file_writer1.writeAll("Initial content");
writer.extractWriter().finishFile(file_writer1);
writer.extractWriter().finishFile();
// Overwrite the same file
const file_writer2 = try writer.extractWriter().createFile("test.txt");
try file_writer2.writeAll("New content");
writer.extractWriter().finishFile(file_writer2);
writer.extractWriter().finishFile();
// Verify it was overwritten
const file = writer.files.get("test.txt");
@ -327,7 +327,7 @@ test "DirExtractWriter - nested directory creation" {
// Create a file in a deeply nested path
const file_writer = try writer.extractWriter().createFile("a/b/c/d/e/file.txt");
try file_writer.writeAll("Nested content");
writer.extractWriter().finishFile(file_writer);
writer.extractWriter().finishFile();
// Verify the file was created
const content = try tmp.dir.readFileAlloc(testing.allocator, "a/b/c/d/e/file.txt", 1024);

View file

@ -65,7 +65,7 @@ pub const ExtractWriter = struct {
pub const VTable = struct {
createFile: *const fn (ptr: *anyopaque, path: []const u8) CreateFileError!*std.Io.Writer,
finishFile: *const fn (ptr: *anyopaque, writer: *std.Io.Writer) void,
finishFile: *const fn (ptr: *anyopaque) void,
makeDir: *const fn (ptr: *anyopaque, path: []const u8) MakeDirError!void,
};
@ -82,8 +82,8 @@ pub const ExtractWriter = struct {
return self.vtable.createFile(self.ptr, path);
}
pub fn finishFile(self: ExtractWriter, writer: *std.Io.Writer) void {
return self.vtable.finishFile(self.ptr, writer);
pub fn finishFile(self: ExtractWriter) void {
return self.vtable.finishFile(self.ptr);
}
pub fn makeDir(self: ExtractWriter, path: []const u8) MakeDirError!void {
@ -162,8 +162,7 @@ pub const DirExtractWriter = struct {
return &entry.writer.interface;
}
fn finishFile(ptr: *anyopaque, writer: *std.Io.Writer) void {
_ = writer;
fn finishFile(ptr: *anyopaque) void {
const self: *DirExtractWriter = @ptrCast(@alignCast(ptr));
// Close and remove the last file
if (self.open_files.items.len > 0) {
@ -236,7 +235,7 @@ pub const BufferExtractWriter = struct {
return &self.current_file_writer.?.writer;
}
fn finishFile(ptr: *anyopaque, _: *std.Io.Writer) void {
fn finishFile(ptr: *anyopaque) void {
const self: *BufferExtractWriter = @ptrCast(@alignCast(ptr));
if (self.current_file_writer) |*writer| {
if (self.current_file_path) |path| {
@ -591,7 +590,7 @@ pub fn unbundleStream(
},
.file => {
const file_writer = try extract_writer.createFile(file_path);
defer extract_writer.finishFile(file_writer);
defer extract_writer.finishFile();
try tar_iterator.streamRemaining(entry, file_writer);
try file_writer.flush();

View file

@ -106,8 +106,8 @@ const macos_externs = if (use_real_fsevents) struct {
// Stub implementations for cross-compilation
const macos_stubs = struct {
fn FSEventStreamCreate(
allocator: CFAllocatorRef,
callback: *const fn (
_: CFAllocatorRef,
_: *const fn (
streamRef: FSEventStreamRef,
clientCallBackInfo: ?*anyopaque,
numEvents: usize,
@ -115,58 +115,36 @@ const macos_stubs = struct {
eventFlags: [*]const FSEventStreamEventFlags,
eventIds: [*]const FSEventStreamEventId,
) callconv(.c) void,
context: ?*FSEventStreamContext,
pathsToWatch: CFArrayRef,
sinceWhen: FSEventStreamEventId,
latency: CFAbsoluteTime,
flags: FSEventStreamCreateFlags,
_: ?*FSEventStreamContext,
_: CFArrayRef,
_: FSEventStreamEventId,
_: CFAbsoluteTime,
_: FSEventStreamCreateFlags,
) ?FSEventStreamRef {
_ = allocator;
_ = callback;
_ = context;
_ = pathsToWatch;
_ = sinceWhen;
_ = latency;
_ = flags;
return null;
}
fn FSEventStreamScheduleWithRunLoop(
streamRef: FSEventStreamRef,
runLoop: CFRunLoopRef,
runLoopMode: CFStringRef,
) void {
_ = streamRef;
_ = runLoop;
_ = runLoopMode;
}
_: FSEventStreamRef,
_: CFRunLoopRef,
_: CFStringRef,
) void {}
fn FSEventStreamStart(streamRef: FSEventStreamRef) bool {
_ = streamRef;
fn FSEventStreamStart(_: FSEventStreamRef) bool {
return false;
}
fn FSEventStreamStop(streamRef: FSEventStreamRef) void {
_ = streamRef;
}
fn FSEventStreamStop(_: FSEventStreamRef) void {}
fn FSEventStreamUnscheduleFromRunLoop(
streamRef: FSEventStreamRef,
runLoop: CFRunLoopRef,
runLoopMode: CFStringRef,
) void {
_ = streamRef;
_ = runLoop;
_ = runLoopMode;
}
_: FSEventStreamRef,
_: CFRunLoopRef,
_: CFStringRef,
) void {}
fn FSEventStreamInvalidate(streamRef: FSEventStreamRef) void {
_ = streamRef;
}
fn FSEventStreamInvalidate(_: FSEventStreamRef) void {}
fn FSEventStreamRelease(streamRef: FSEventStreamRef) void {
_ = streamRef;
}
fn FSEventStreamRelease(_: FSEventStreamRef) void {}
fn CFRunLoopGetCurrent() CFRunLoopRef {
return @ptrFromInt(1);
@ -174,44 +152,30 @@ const macos_stubs = struct {
fn CFRunLoopRun() void {}
fn CFRunLoopRunInMode(mode: CFStringRef, seconds: CFAbsoluteTime, returnAfterSourceHandled: bool) i32 {
_ = mode;
_ = seconds;
_ = returnAfterSourceHandled;
fn CFRunLoopRunInMode(_: CFStringRef, _: CFAbsoluteTime, _: bool) i32 {
return 0;
}
fn CFRunLoopStop(rl: CFRunLoopRef) void {
_ = rl;
}
fn CFRunLoopStop(_: CFRunLoopRef) void {}
fn CFArrayCreate(
allocator: CFAllocatorRef,
values: [*]const ?*const anyopaque,
numValues: CFIndex,
callBacks: ?*const anyopaque,
_: CFAllocatorRef,
_: [*]const ?*const anyopaque,
_: CFIndex,
_: ?*const anyopaque,
) ?CFArrayRef {
_ = allocator;
_ = values;
_ = numValues;
_ = callBacks;
return null;
}
fn CFStringCreateWithCString(
alloc: CFAllocatorRef,
cStr: [*:0]const u8,
encoding: u32,
_: CFAllocatorRef,
_: [*:0]const u8,
_: u32,
) ?CFStringRef {
_ = alloc;
_ = cStr;
_ = encoding;
return null;
}
fn CFRelease(cf: ?*anyopaque) void {
_ = cf;
}
fn CFRelease(_: ?*anyopaque) void {}
const kCFRunLoopDefaultMode: CFStringRef = @ptrFromInt(1);
};
@ -570,17 +534,13 @@ pub const Watcher = struct {
}
fn fsEventsCallback(
streamRef: FSEventStreamRef,
_: FSEventStreamRef,
clientCallBackInfo: ?*anyopaque,
numEvents: usize,
eventPaths: *anyopaque,
eventFlags: [*]const FSEventStreamEventFlags,
eventIds: [*]const FSEventStreamEventId,
_: [*]const FSEventStreamEventFlags,
_: [*]const FSEventStreamEventId,
) callconv(.c) void {
_ = streamRef;
_ = eventFlags;
_ = eventIds;
if (clientCallBackInfo == null) return;
const self: *Watcher = @ptrCast(@alignCast(clientCallBackInfo.?));
@ -1130,8 +1090,7 @@ test "recursive directory watching" {
};
const callback = struct {
fn cb(event: WatchEvent) void {
_ = event;
fn cb(_: WatchEvent) void {
_ = global.event_count.fetchAdd(1, .seq_cst);
}
}.cb;
@ -1167,8 +1126,7 @@ test "multiple directories watching" {
};
const callback = struct {
fn cb(event: WatchEvent) void {
_ = event;
fn cb(_: WatchEvent) void {
_ = global.event_count.fetchAdd(1, .seq_cst);
}
}.cb;
@ -1205,8 +1163,7 @@ test "file modification detection" {
};
const callback = struct {
fn cb(event: WatchEvent) void {
_ = event;
fn cb(_: WatchEvent) void {
_ = global.event_count.fetchAdd(1, .seq_cst);
}
}.cb;
@ -1238,8 +1195,7 @@ test "rapid file creation" {
};
const callback = struct {
fn cb(event: WatchEvent) void {
_ = event;
fn cb(_: WatchEvent) void {
_ = global.event_count.fetchAdd(1, .seq_cst);
}
}.cb;
@ -1284,8 +1240,7 @@ test "directory creation and file addition" {
};
const callback = struct {
fn cb(event: WatchEvent) void {
_ = event;
fn cb(_: WatchEvent) void {
_ = global.event_count.fetchAdd(1, .seq_cst);
}
}.cb;
@ -1325,8 +1280,7 @@ test "start stop restart" {
};
const callback = struct {
fn cb(event: WatchEvent) void {
_ = event;
fn cb(_: WatchEvent) void {
_ = global.event_count.fetchAdd(1, .seq_cst);
}
}.cb;
@ -1442,8 +1396,7 @@ test "file rename detection" {
};
const callback = struct {
fn cb(event: WatchEvent) void {
_ = event;
fn cb(_: WatchEvent) void {
_ = global.event_count.fetchAdd(1, .seq_cst);
}
}.cb;
@ -1560,8 +1513,7 @@ test "windows long path handling" {
};
const callback = struct {
fn cb(event: WatchEvent) void {
_ = event;
fn cb(_: WatchEvent) void {
_ = global.event_count.fetchAdd(1, .seq_cst);
}
}.cb;

View file

@ -0,0 +1,8 @@
app [main!] { pf: platform "./platform/main.roc" }
import pf.Stdout
main! = || {
x = "hello"
Stdout.line!(x.inspect())
}

View file

@ -40,10 +40,7 @@ main = {
}
~~~
# EXPECTED
MODULE NOT FOUND - can_import_comprehensive.md:1:1:1:17
MODULE NOT FOUND - can_import_comprehensive.md:2:1:2:48
DUPLICATE DEFINITION - can_import_comprehensive.md:3:1:3:27
MODULE NOT FOUND - can_import_comprehensive.md:3:1:3:27
UNDEFINED VARIABLE - can_import_comprehensive.md:6:14:6:22
UNDEFINED VARIABLE - can_import_comprehensive.md:7:14:7:23
UNDEFINED VARIABLE - can_import_comprehensive.md:8:14:8:22
@ -53,28 +50,6 @@ UNDEFINED VARIABLE - can_import_comprehensive.md:17:15:17:18
UNDEFINED VARIABLE - can_import_comprehensive.md:18:15:18:19
UNDEFINED VARIABLE - can_import_comprehensive.md:21:16:21:26
# PROBLEMS
**MODULE NOT FOUND**
The module `json.Json` was not found in this Roc project.
You're attempting to use this module here:
**can_import_comprehensive.md:1:1:1:17:**
```roc
import json.Json
```
^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `http.Client` was not found in this Roc project.
You're attempting to use this module here:
**can_import_comprehensive.md:2:1:2:48:**
```roc
import http.Client as Http exposing [get, post]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**DUPLICATE DEFINITION**
The name `Str` is being redeclared in this scope.
@ -93,17 +68,6 @@ import json.Json
^
**MODULE NOT FOUND**
The module `utils.String` was not found in this Roc project.
You're attempting to use this module here:
**can_import_comprehensive.md:3:1:3:27:**
```roc
import utils.String as Str
```
^^^^^^^^^^^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `get` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -62,10 +62,7 @@ combineTrys = |jsonTry, httpStatus|
UNDECLARED TYPE - can_import_exposing_types.md:29:18:29:24
UNDECLARED TYPE - can_import_exposing_types.md:30:18:30:24
UNDECLARED TYPE - can_import_exposing_types.md:31:23:31:31
MODULE NOT FOUND - can_import_exposing_types.md:1:1:1:49
MODULE NOT FOUND - can_import_exposing_types.md:2:1:2:64
DUPLICATE DEFINITION - can_import_exposing_types.md:3:1:3:32
MODULE NOT FOUND - can_import_exposing_types.md:3:1:3:32
UNDECLARED TYPE - can_import_exposing_types.md:6:24:6:29
UNDECLARED TYPE - can_import_exposing_types.md:6:31:6:36
UNDEFINED VARIABLE - can_import_exposing_types.md:7:21:7:31
@ -125,28 +122,6 @@ This type is referenced here:
^^^^^^^^
**MODULE NOT FOUND**
The module `json.Json` was not found in this Roc project.
You're attempting to use this module here:
**can_import_exposing_types.md:1:1:1:49:**
```roc
import json.Json exposing [Value, Error, Config]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `http.Client` was not found in this Roc project.
You're attempting to use this module here:
**can_import_exposing_types.md:2:1:2:64:**
```roc
import http.Client as Http exposing [Request, Response, Status]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**DUPLICATE DEFINITION**
The name `Try` is being redeclared in this scope.
@ -165,17 +140,6 @@ import json.Json exposing [Value, Error, Config]
^
**MODULE NOT FOUND**
The module `utils.Try` was not found in this Roc project.
You're attempting to use this module here:
**can_import_exposing_types.md:3:1:3:32:**
```roc
import utils.Try exposing [Try]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**UNDECLARED TYPE**
The type _Value_ is not declared in this scope.

View file

@ -10,20 +10,8 @@ import json.Json
main = Json.utf8
~~~
# EXPECTED
MODULE NOT FOUND - can_import_json.md:1:1:1:17
UNDEFINED VARIABLE - can_import_json.md:3:8:3:17
# PROBLEMS
**MODULE NOT FOUND**
The module `json.Json` was not found in this Roc project.
You're attempting to use this module here:
**can_import_json.md:1:1:1:17:**
```roc
import json.Json
```
^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `utf8` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -31,9 +31,6 @@ validateAuth : HttpAuth.Credentials -> Try(HttpAuth.Token, HttpAuth.Error)
validateAuth = |creds| HttpAuth.validate(creds)
~~~
# EXPECTED
MODULE NOT FOUND - can_import_nested_modules.md:1:1:1:26
MODULE NOT FOUND - can_import_nested_modules.md:2:1:2:36
MODULE NOT FOUND - can_import_nested_modules.md:3:1:3:46
MODULE NOT IMPORTED - can_import_nested_modules.md:6:15:6:30
DOES NOT EXIST - can_import_nested_modules.md:7:26:7:41
UNDEFINED VARIABLE - can_import_nested_modules.md:11:29:11:43
@ -44,39 +41,6 @@ UNDEFINED VARIABLE - can_import_nested_modules.md:20:23:20:30
DOES NOT EXIST - can_import_nested_modules.md:20:37:20:58
UNDEFINED VARIABLE - can_import_nested_modules.md:24:24:24:41
# PROBLEMS
**MODULE NOT FOUND**
The module `json.Parser` was not found in this Roc project.
You're attempting to use this module here:
**can_import_nested_modules.md:1:1:1:26:**
```roc
import json.Parser.Config
```
^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `http.Client.Auth` was not found in this Roc project.
You're attempting to use this module here:
**can_import_nested_modules.md:2:1:2:36:**
```roc
import http.Client.Auth as HttpAuth
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `utils.String.Format` was not found in this Roc project.
You're attempting to use this module here:
**can_import_nested_modules.md:3:1:3:46:**
```roc
import utils.String.Format exposing [padLeft]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT IMPORTED**
There is no module with the name `Config` imported into this Roc file.

View file

@ -44,10 +44,7 @@ combineTrys = |result1, result2|
}
~~~
# EXPECTED
MODULE NOT FOUND - can_import_type_annotations.md:1:1:1:56
MODULE NOT FOUND - can_import_type_annotations.md:2:1:2:17
DUPLICATE DEFINITION - can_import_type_annotations.md:3:1:3:32
MODULE NOT FOUND - can_import_type_annotations.md:3:1:3:32
UNDECLARED TYPE - can_import_type_annotations.md:5:18:5:25
UNDECLARED TYPE - can_import_type_annotations.md:5:29:5:37
UNDEFINED VARIABLE - can_import_type_annotations.md:6:24:6:44
@ -60,28 +57,6 @@ MODULE NOT IMPORTED - can_import_type_annotations.md:24:18:24:36
MODULE NOT IMPORTED - can_import_type_annotations.md:24:61:24:78
UNDEFINED VARIABLE - can_import_type_annotations.md:25:40:25:61
# PROBLEMS
**MODULE NOT FOUND**
The module `http.Client` was not found in this Roc project.
You're attempting to use this module here:
**can_import_type_annotations.md:1:1:1:56:**
```roc
import http.Client as Http exposing [Request, Response]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `json.Json` was not found in this Roc project.
You're attempting to use this module here:
**can_import_type_annotations.md:2:1:2:17:**
```roc
import json.Json
```
^^^^^^^^^^^^^^^^
**DUPLICATE DEFINITION**
The name `Try` is being redeclared in this scope.
@ -100,17 +75,6 @@ import http.Client as Http exposing [Request, Response]
^
**MODULE NOT FOUND**
The module `utils.Try` was not found in this Roc project.
You're attempting to use this module here:
**can_import_type_annotations.md:3:1:3:32:**
```roc
import utils.Try exposing [Try]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**UNDECLARED TYPE**
The type _Request_ is not declared in this scope.

View file

@ -32,8 +32,6 @@ client = Http.invalidMethod
parser = Json.Parser.Advanced.NonExistent.create
~~~
# EXPECTED
MODULE NOT FOUND - can_import_unresolved_qualified.md:1:1:1:17
MODULE NOT FOUND - can_import_unresolved_qualified.md:2:1:2:27
UNDEFINED VARIABLE - can_import_unresolved_qualified.md:5:8:5:31
UNDEFINED VARIABLE - can_import_unresolved_qualified.md:9:20:9:34
MODULE NOT IMPORTED - can_import_unresolved_qualified.md:12:18:12:37
@ -45,28 +43,6 @@ DOES NOT EXIST - can_import_unresolved_qualified.md:19:10:19:31
UNDEFINED VARIABLE - can_import_unresolved_qualified.md:22:10:22:28
UNDEFINED VARIABLE - can_import_unresolved_qualified.md:25:10:25:49
# PROBLEMS
**MODULE NOT FOUND**
The module `json.Json` was not found in this Roc project.
You're attempting to use this module here:
**can_import_unresolved_qualified.md:1:1:1:17:**
```roc
import json.Json
```
^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `http.Client` was not found in this Roc project.
You're attempting to use this module here:
**can_import_unresolved_qualified.md:2:1:2:27:**
```roc
import http.Client as Http
```
^^^^^^^^^^^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `method` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -10,20 +10,8 @@ import json.Json as MyJson
main = MyJson.decode
~~~
# EXPECTED
MODULE NOT FOUND - can_import_with_alias.md:1:1:1:27
UNDEFINED VARIABLE - can_import_with_alias.md:3:8:3:21
# PROBLEMS
**MODULE NOT FOUND**
The module `json.Json` was not found in this Roc project.
You're attempting to use this module here:
**can_import_with_alias.md:1:1:1:27:**
```roc
import json.Json as MyJson
```
^^^^^^^^^^^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `decode` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -16,20 +16,8 @@ print_msg! = |msg| Stdout.line!(msg)
main! = print_msg!("Hello, world!")
~~~
# EXPECTED
MODULE NOT FOUND - effectful_with_effectful_annotation.md:3:1:3:17
UNDEFINED VARIABLE - effectful_with_effectful_annotation.md:7:20:7:32
# PROBLEMS
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**effectful_with_effectful_annotation.md:3:1:3:17:**
```roc
import pf.Stdout
```
^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `line!` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -10,19 +10,9 @@ import pf.Stdout exposing [line!, write!]
main = 42
~~~
# EXPECTED
MODULE NOT FOUND - exposed_items_test.md:1:1:1:42
NIL
# PROBLEMS
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**exposed_items_test.md:1:1:1:42:**
```roc
import pf.Stdout exposing [line!, write!]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
NIL
# TOKENS
~~~zig
KwImport,LowerIdent,NoSpaceDotUpperIdent,KwExposing,OpenSquare,LowerIdent,Comma,LowerIdent,CloseSquare,

View file

@ -17,33 +17,9 @@ main! = |_| {
}
~~~
# EXPECTED
MODULE NOT FOUND - external_decl_lookup.md:3:1:3:17
MODULE NOT FOUND - external_decl_lookup.md:4:1:4:17
UNDEFINED VARIABLE - external_decl_lookup.md:8:14:8:23
UNDEFINED VARIABLE - external_decl_lookup.md:9:5:9:17
# PROBLEMS
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**external_decl_lookup.md:3:1:3:17:**
```roc
import pf.Stdout
```
^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `json.Json` was not found in this Roc project.
You're attempting to use this module here:
**external_decl_lookup.md:4:1:4:17:**
```roc
import json.Json
```
^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `utf8` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -21,20 +21,8 @@ process! = |x| print_number!(multiply(x, 2))
main! = process!(42)
~~~
# EXPECTED
MODULE NOT FOUND - function_no_annotation.md:3:1:3:17
UNDEFINED VARIABLE - function_no_annotation.md:9:21:9:33
# PROBLEMS
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**function_no_annotation.md:3:1:3:17:**
```roc
import pf.Stdout
```
^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `line!` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -141,7 +141,6 @@ UNDECLARED TYPE VARIABLE - fuzz_crash_019.md:19:4:19:6
UNDECLARED TYPE VARIABLE - fuzz_crash_019.md:20:12:20:13
UNDECLARED TYPE - fuzz_crash_019.md:24:15:24:16
UNDECLARED TYPE VARIABLE - fuzz_crash_019.md:24:24:24:25
MODULE NOT FOUND - fuzz_crash_019.md:4:1:4:34
MODULE NOT FOUND - fuzz_crash_019.md:6:1:8:6
MODULE NOT FOUND - fuzz_crash_019.md:10:1:10:19
MODULE NOT FOUND - fuzz_crash_019.md:11:1:12:4
@ -352,17 +351,6 @@ Som : { foo : O, bar : g }
^
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**fuzz_crash_019.md:4:1:4:34:**
```roc
import pf.Stdout exposing [line!]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `Stdot` was not found in this Roc project.

View file

@ -141,7 +141,6 @@ UNDECLARED TYPE VARIABLE - fuzz_crash_020.md:19:4:19:6
UNDECLARED TYPE VARIABLE - fuzz_crash_020.md:20:12:20:13
UNDECLARED TYPE - fuzz_crash_020.md:24:15:24:16
UNDECLARED TYPE VARIABLE - fuzz_crash_020.md:24:24:24:25
MODULE NOT FOUND - fuzz_crash_020.md:4:1:4:34
MODULE NOT FOUND - fuzz_crash_020.md:6:1:8:6
MODULE NOT FOUND - fuzz_crash_020.md:10:1:10:19
MODULE NOT FOUND - fuzz_crash_020.md:11:1:12:4
@ -351,17 +350,6 @@ Som : { foo : O, bar : g }
^
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**fuzz_crash_020.md:4:1:4:34:**
```roc
import pf.Stdout exposing [line!]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `Stdot` was not found in this Roc project.

View file

@ -229,10 +229,7 @@ UNDECLARED TYPE - fuzz_crash_023.md:45:8:45:10
UNDECLARED TYPE - fuzz_crash_023.md:46:8:46:17
UNDECLARED TYPE - fuzz_crash_023.md:52:4:52:6
UNDECLARED TYPE - fuzz_crash_023.md:53:8:53:17
MODULE NOT FOUND - fuzz_crash_023.md:4:1:4:42
NOT IMPLEMENTED - :0:0:0:0
MODULE NOT FOUND - fuzz_crash_023.md:6:1:12:4
MODULE NOT FOUND - fuzz_crash_023.md:14:1:14:82
MODULE NOT FOUND - fuzz_crash_023.md:16:1:16:27
MODULE NOT FOUND - fuzz_crash_023.md:17:1:20:20
UNDEFINED VARIABLE - fuzz_crash_023.md:72:4:72:13
@ -450,49 +447,11 @@ This type is referenced here:
^^^^^^^^^
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**fuzz_crash_023.md:4:1:4:42:**
```roc
import pf.Stdout exposing [line!, write!]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**NOT IMPLEMENTED**
This feature is not yet implemented: malformed import module name contains invalid control characters
This error doesn't have a proper diagnostic report yet. Let us know if you want to help improve Roc's error messages!
**MODULE NOT FOUND**
The module `MALFORMED_IMPORT` was not found in this Roc project.
You're attempting to use this module here:
**fuzz_crash_023.md:6:1:12:4:**
```roc
import # Comment after import keyword
pf # Comment after qualifier
.StdoutMultiline # Comment after ident
exposing [ # Comment after exposing open
line!, # Comment after exposed item
write!, # Another after exposed item
] # Comment after exposing close
```
**MODULE NOT FOUND**
The module `pkg.Something` was not found in this Roc project.
You're attempting to use this module here:
**fuzz_crash_023.md:14:1:14:82:**
```roc
import pkg.Something exposing [func as function, Type as ValueCategory, Custom.*]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `BadName` was not found in this Roc project.

View file

@ -182,9 +182,7 @@ UNDECLARED TYPE - fuzz_crash_027.md:34:8:34:11
UNDECLARED TYPE - fuzz_crash_027.md:38:8:38:11
UNDECLARED TYPE - fuzz_crash_027.md:43:11:43:16
UNDECLARED TYPE - fuzz_crash_027.md:43:26:43:31
MODULE NOT FOUND - fuzz_crash_027.md:4:1:4:38
MODULE NOT FOUND - fuzz_crash_027.md:6:1:8:4
MODULE NOT FOUND - fuzz_crash_027.md:10:1:10:46
MODULE NOT FOUND - fuzz_crash_027.md:12:1:12:19
MODULE NOT FOUND - fuzz_crash_027.md:13:1:14:4
UNDECLARED TYPE - fuzz_crash_027.md:29:2:29:5
@ -421,17 +419,6 @@ Func(a) : Maybe(a), a -> Maybe(a)
^^^^^
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**fuzz_crash_027.md:4:1:4:38:**
```roc
import pf.Stdout exposing [line!, e!]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `Stdot` was not found in this Roc project.
@ -444,17 +431,6 @@ import Stdot
```
**MODULE NOT FOUND**
The module `pkg.S` was not found in this Roc project.
You're attempting to use this module here:
**fuzz_crash_027.md:10:1:10:46:**
```roc
import pkg.S exposing [func as fry, Custom.*]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**MODULE NOT FOUND**
The module `Bae` was not found in this Roc project.

View file

@ -10,7 +10,6 @@ import u.R}g:r->R.a.E
# EXPECTED
PARSE ERROR - fuzz_crash_042.md:1:11:1:12
MODULE NOT IMPORTED - fuzz_crash_042.md:1:17:1:22
MODULE NOT FOUND - fuzz_crash_042.md:1:1:1:11
# PROBLEMS
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
@ -34,17 +33,6 @@ import u.R}g:r->R.a.E
^^^^^
**MODULE NOT FOUND**
The module `u.R` was not found in this Roc project.
You're attempting to use this module here:
**fuzz_crash_042.md:1:1:1:11:**
```roc
import u.R}g:r->R.a.E
```
^^^^^^^^^^
# TOKENS
~~~zig
KwImport,LowerIdent,NoSpaceDotUpperIdent,CloseCurly,LowerIdent,OpColon,LowerIdent,OpArrow,UpperIdent,NoSpaceDotLowerIdent,NoSpaceDotUpperIdent,

View file

@ -12,20 +12,8 @@ import pf.Stdout
main! = |_| Stdout.line!("Hello, world!")
~~~
# EXPECTED
MODULE NOT FOUND - hello_world.md:3:1:3:17
UNDEFINED VARIABLE - hello_world.md:5:13:5:25
# PROBLEMS
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**hello_world.md:3:1:3:17:**
```roc
import pf.Stdout
```
^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `line!` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -19,21 +19,9 @@ main! = |_| {
}
~~~
# EXPECTED
MODULE NOT FOUND - hello_world_with_block.md:6:1:6:17
UNDEFINED VARIABLE - hello_world_with_block.md:11:2:11:14
UNUSED VARIABLE - hello_world_with_block.md:9:2:9:7
# PROBLEMS
**MODULE NOT FOUND**
The module `pf.Stdout` was not found in this Roc project.
You're attempting to use this module here:
**hello_world_with_block.md:6:1:6:17:**
```roc
import pf.Stdout
```
^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `line!` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -15,21 +15,9 @@ main = {
}
~~~
# EXPECTED
MODULE NOT FOUND - import_exposing_basic.md:1:1:1:43
UNDEFINED VARIABLE - import_exposing_basic.md:5:15:5:21
UNDEFINED VARIABLE - import_exposing_basic.md:6:15:6:21
# PROBLEMS
**MODULE NOT FOUND**
The module `json.Json` was not found in this Roc project.
You're attempting to use this module here:
**import_exposing_basic.md:1:1:1:43:**
```roc
import json.Json exposing [decode, encode]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**UNDEFINED VARIABLE**
Nothing is named `encode` in this scope.
Is there an `import` or `exposing` missing up-top?

View file

@ -0,0 +1,137 @@
# META
~~~ini
description=Using inspect on an open union type variable should work
type=snippet
~~~
# SOURCE
~~~roc
main_for_host : Try({}, [Exit(I32), ..others]) -> Str
main_for_host = |result|
match result {
Ok({}) => "ok"
Err(Exit(code)) => inspect code
Err(other) => inspect other
}
~~~
# EXPECTED
NIL
# PROBLEMS
NIL
# TOKENS
~~~zig
LowerIdent,OpColon,UpperIdent,NoSpaceOpenRound,OpenCurly,CloseCurly,Comma,OpenSquare,UpperIdent,NoSpaceOpenRound,UpperIdent,CloseRound,Comma,DoubleDot,LowerIdent,CloseSquare,CloseRound,OpArrow,UpperIdent,
LowerIdent,OpAssign,OpBar,LowerIdent,OpBar,
KwMatch,LowerIdent,OpenCurly,
UpperIdent,NoSpaceOpenRound,OpenCurly,CloseCurly,CloseRound,OpFatArrow,StringStart,StringPart,StringEnd,
UpperIdent,NoSpaceOpenRound,UpperIdent,NoSpaceOpenRound,LowerIdent,CloseRound,CloseRound,OpFatArrow,KwInspect,LowerIdent,
UpperIdent,NoSpaceOpenRound,LowerIdent,CloseRound,OpFatArrow,KwInspect,LowerIdent,
CloseCurly,
EndOfFile,
~~~
# PARSE
~~~clojure
(file
(type-module)
(statements
(s-type-anno (name "main_for_host")
(ty-fn
(ty-apply
(ty (name "Try"))
(ty-record)
(ty-tag-union
(tags
(ty-apply
(ty (name "Exit"))
(ty (name "I32"))))
(ty-var (raw "others"))))
(ty (name "Str"))))
(s-decl
(p-ident (raw "main_for_host"))
(e-lambda
(args
(p-ident (raw "result")))
(e-match
(e-ident (raw "result"))
(branches
(branch
(p-tag (raw "Ok")
(p-record))
(e-string
(e-string-part (raw "ok"))))
(branch
(p-tag (raw "Err")
(p-tag (raw "Exit")
(p-ident (raw "code"))))
(e-inspect
(e-ident (raw "code"))))
(branch
(p-tag (raw "Err")
(p-ident (raw "other")))
(e-inspect
(e-ident (raw "other"))))))))))
~~~
# FORMATTED
~~~roc
main_for_host : Try({}, [Exit(I32), ..others]) -> Str
main_for_host = |result|
match result {
Ok({}) => "ok"
Err(Exit(code)) => inspect code
Err(other) => inspect other
}
~~~
# CANONICALIZE
~~~clojure
(can-ir
(d-let
(p-assign (ident "main_for_host"))
(e-lambda
(args
(p-assign (ident "result")))
(e-match
(match
(cond
(e-lookup-local
(p-assign (ident "result"))))
(branches
(branch
(patterns
(pattern (degenerate false)
(p-applied-tag)))
(value
(e-string
(e-literal (string "ok")))))
(branch
(patterns
(pattern (degenerate false)
(p-applied-tag)))
(value
(e-inspect
(e-lookup-local
(p-assign (ident "code"))))))
(branch
(patterns
(pattern (degenerate false)
(p-applied-tag)))
(value
(e-inspect
(e-lookup-local
(p-assign (ident "other"))))))))))
(annotation
(ty-fn (effectful false)
(ty-apply (name "Try") (builtin)
(ty-record)
(ty-tag-union
(ty-tag-name (name "Exit")
(ty-lookup (name "I32") (builtin)))
(ty-rigid-var (name "others"))))
(ty-lookup (name "Str") (builtin))))))
~~~
# TYPES
~~~clojure
(inferred-types
(defs
(patt (type "Try({ }, [Exit(I32), ..others]) -> Str")))
(expressions
(expr (type "Try({ }, [Exit(I32), ..others]) -> Str"))))
~~~

View file

@ -0,0 +1,50 @@
# META
~~~ini
description=Method call syntax with .inspect() should produce e_dot_access with args
type=expr
~~~
# SOURCE
~~~roc
x.inspect()
~~~
# EXPECTED
UNDEFINED VARIABLE - method_call_inspect.md:1:1:1:2
# PROBLEMS
**UNDEFINED VARIABLE**
Nothing is named `x` in this scope.
Is there an `import` or `exposing` missing up-top?
**method_call_inspect.md:1:1:1:2:**
```roc
x.inspect()
```
^
# TOKENS
~~~zig
LowerIdent,NoSpaceDotLowerIdent,NoSpaceOpenRound,CloseRound,
EndOfFile,
~~~
# PARSE
~~~clojure
(e-field-access
(e-ident (raw "x"))
(e-apply
(e-ident (raw "inspect"))))
~~~
# FORMATTED
~~~roc
NO CHANGE
~~~
# CANONICALIZE
~~~clojure
(e-dot-access (field "inspect")
(receiver
(e-runtime-error (tag "ident_not_in_scope")))
(args))
~~~
# TYPES
~~~clojure
(expr (type "Error"))
~~~

View file

@ -0,0 +1,95 @@
# META
~~~ini
description=Method call syntax with .inspect() on string should give MISSING METHOD
type=expr
~~~
# SOURCE
~~~roc
{ x = "hello"; x.inspect() }
~~~
# EXPECTED
UNEXPECTED TOKEN IN EXPRESSION - method_call_inspect_defined.md:1:14:1:15
UNRECOGNIZED SYNTAX - method_call_inspect_defined.md:1:14:1:15
MISSING METHOD - method_call_inspect_defined.md:1:18:1:25
# PROBLEMS
**UNEXPECTED TOKEN IN EXPRESSION**
The token **;** is not expected in an expression.
Expressions can be identifiers, literals, function calls, or operators.
**method_call_inspect_defined.md:1:14:1:15:**
```roc
{ x = "hello"; x.inspect() }
```
^
**UNRECOGNIZED SYNTAX**
I don't recognize this syntax.
**method_call_inspect_defined.md:1:14:1:15:**
```roc
{ x = "hello"; x.inspect() }
```
^
This might be a syntax error, an unsupported language feature, or a typo.
**MISSING METHOD**
This **inspect** method is being called on a value whose type doesn't have that method:
**method_call_inspect_defined.md:1:18:1:25:**
```roc
{ x = "hello"; x.inspect() }
```
^^^^^^^
The value's type, which does not have a method named **inspect**, is:
_Str_
**Hint:** For this to work, the type would need to have a method named **inspect** associated with it in the type's declaration.
# TOKENS
~~~zig
OpenCurly,LowerIdent,OpAssign,StringStart,StringPart,StringEnd,MalformedUnknownToken,LowerIdent,NoSpaceDotLowerIdent,NoSpaceOpenRound,CloseRound,CloseCurly,
EndOfFile,
~~~
# PARSE
~~~clojure
(e-block
(statements
(s-decl
(p-ident (raw "x"))
(e-string
(e-string-part (raw "hello"))))
(e-malformed (reason "expr_unexpected_token"))
(e-field-access
(e-ident (raw "x"))
(e-apply
(e-ident (raw "inspect"))))))
~~~
# FORMATTED
~~~roc
{
x = "hello"
x.inspect()
}
~~~
# CANONICALIZE
~~~clojure
(e-block
(s-let
(p-assign (ident "x"))
(e-string
(e-literal (string "hello"))))
(s-expr
(e-runtime-error (tag "expr_not_canonicalized")))
(e-dot-access (field "inspect")
(receiver
(e-lookup-local
(p-assign (ident "x"))))
(args)))
~~~
# TYPES
~~~clojure
(expr (type "Error"))
~~~

View file

@ -0,0 +1,153 @@
# META
~~~ini
description=Try return with match and error propagation should type-check
type=snippet
~~~
# SOURCE
~~~roc
get_greeting : {} -> Try(Str, _)
get_greeting = |{}| {
match 0 {
0 => Try.Ok(List.first(["hello"])?),
_ => Err(Impossible)
}
}
~~~
# EXPECTED
NIL
# PROBLEMS
NIL
# TOKENS
~~~zig
LowerIdent,OpColon,OpenCurly,CloseCurly,OpArrow,UpperIdent,NoSpaceOpenRound,UpperIdent,Comma,Underscore,CloseRound,
LowerIdent,OpAssign,OpBar,OpenCurly,CloseCurly,OpBar,OpenCurly,
KwMatch,Int,OpenCurly,
Int,OpFatArrow,UpperIdent,NoSpaceDotUpperIdent,NoSpaceOpenRound,UpperIdent,NoSpaceDotLowerIdent,NoSpaceOpenRound,OpenSquare,StringStart,StringPart,StringEnd,CloseSquare,CloseRound,NoSpaceOpQuestion,CloseRound,Comma,
Underscore,OpFatArrow,UpperIdent,NoSpaceOpenRound,UpperIdent,CloseRound,
CloseCurly,
CloseCurly,
EndOfFile,
~~~
# PARSE
~~~clojure
(file
(type-module)
(statements
(s-type-anno (name "get_greeting")
(ty-fn
(ty-record)
(ty-apply
(ty (name "Try"))
(ty (name "Str"))
(_))))
(s-decl
(p-ident (raw "get_greeting"))
(e-lambda
(args
(p-record))
(e-block
(statements
(e-match
(e-int (raw "0"))
(branches
(branch
(p-int (raw "0"))
(e-apply
(e-tag (raw "Try.Ok"))
(e-question-suffix
(e-apply
(e-ident (raw "List.first"))
(e-list
(e-string
(e-string-part (raw "hello"))))))))
(branch
(p-underscore)
(e-apply
(e-tag (raw "Err"))
(e-tag (raw "Impossible"))))))))))))
~~~
# FORMATTED
~~~roc
get_greeting : {} -> Try(Str, _)
get_greeting = |{}| {
match 0 {
0 => Try.Ok(List.first(["hello"])?)
_ => Err(Impossible)
}
}
~~~
# CANONICALIZE
~~~clojure
(can-ir
(d-let
(p-assign (ident "get_greeting"))
(e-lambda
(args
(p-record-destructure
(destructs)))
(e-block
(e-match
(match
(cond
(e-num (value "0")))
(branches
(branch
(patterns
(pattern (degenerate false)
(p-num (value "0"))))
(value
(e-nominal-external
(builtin)
(e-tag (name "Ok")
(args
(e-match
(match
(cond
(e-call
(e-lookup-external
(builtin))
(e-list
(elems
(e-string
(e-literal (string "hello")))))))
(branches
(branch
(patterns
(pattern (degenerate false)
(p-applied-tag)))
(value
(e-lookup-local
(p-assign (ident "#ok")))))
(branch
(patterns
(pattern (degenerate false)
(p-applied-tag)))
(value
(e-return
(e-tag (name "Err")
(args
(e-lookup-local
(p-assign (ident "#err"))))))))))))))))
(branch
(patterns
(pattern (degenerate false)
(p-underscore)))
(value
(e-tag (name "Err")
(args
(e-tag (name "Impossible")))))))))))
(annotation
(ty-fn (effectful false)
(ty-record)
(ty-apply (name "Try") (builtin)
(ty-lookup (name "Str") (builtin))
(ty-underscore))))))
~~~
# TYPES
~~~clojure
(inferred-types
(defs
(patt (type "{ } -> Try(Str, [ListWasEmpty, Impossible, .._others2])")))
(expressions
(expr (type "{ } -> Try(Str, [ListWasEmpty, Impossible, .._others2])"))))
~~~

View file

@ -27,7 +27,6 @@ PARSE ERROR - multi_qualified_import.md:12:30:12:31
PARSE ERROR - multi_qualified_import.md:12:31:12:36
PARSE ERROR - multi_qualified_import.md:12:36:12:37
PARSE ERROR - multi_qualified_import.md:12:37:12:38
MODULE NOT FOUND - multi_qualified_import.md:1:1:1:41
UNDECLARED TYPE - multi_qualified_import.md:3:16:3:23
DOES NOT EXIST - multi_qualified_import.md:4:16:4:45
MODULE NOT IMPORTED - multi_qualified_import.md:7:11:7:33
@ -123,17 +122,6 @@ data = json.Core.Utf8.encode("hello")
^
**MODULE NOT FOUND**
The module `json.Core.Utf8` was not found in this Roc project.
You're attempting to use this module here:
**multi_qualified_import.md:1:1:1:41:**
```roc
import json.Core.Utf8 exposing [Encoder]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**UNDECLARED TYPE**
The type _Encoder_ is not declared in this scope.

View file

@ -11,20 +11,8 @@ red : CE
red = ... # not implemented
~~~
# EXPECTED
MODULE NOT FOUND - nominal_import_long_package.md:1:1:1:52
UNDECLARED TYPE - nominal_import_long_package.md:3:7:3:9
# PROBLEMS
**MODULE NOT FOUND**
The module `design.Styles.Color` was not found in this Roc project.
You're attempting to use this module here:
**nominal_import_long_package.md:1:1:1:52:**
```roc
import design.Styles.Color exposing [Encoder as CE]
```
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**UNDECLARED TYPE**
The type _CE_ is not declared in this scope.

Some files were not shown because too many files have changed in this diff Show more