From 0efe802123a0f74ed0a54b470f1915a4cdc449bb Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Wed, 8 Oct 2025 11:22:24 +0200 Subject: [PATCH 01/17] remove type handling from Can --- src/canonicalize/Can.zig | 625 ++++++++++----------------- src/canonicalize/ModuleEnv.zig | 271 +++--------- src/canonicalize/test/int_test.zig | 4 +- src/compile/test/module_env_test.zig | 4 +- 4 files changed, 295 insertions(+), 609 deletions(-) diff --git a/src/canonicalize/Can.zig b/src/canonicalize/Can.zig index 2f75225622..406cf35d97 100644 --- a/src/canonicalize/Can.zig +++ b/src/canonicalize/Can.zig @@ -233,39 +233,37 @@ fn addBuiltinTypeBool(self: *Self, ir: *ModuleEnv) std.mem.Allocator.Error!void // Create a type header (lhs) => Bool // - const header_idx = try ir.addTypeHeaderAndTypeVar(.{ + const header_idx = try ir.addTypeHeader(.{ .name = type_ident, .args = .{ .span = DataSpan.empty() }, - }, .err, Region.zero()); + }, Region.zero()); // Create the type body (rhs) => [True, False] // const scratch_top = self.env.store.scratchTypeAnnoTop(); - const true_tag_anno_idx = try ir.addTypeAnnoAndTypeVar( + const true_tag_anno_idx = try ir.addTypeAnno( .{ .tag = .{ .name = true_ident, .args = .{ .span = DataSpan.empty() } } }, - .err, Region.zero(), ); try self.env.store.addScratchTypeAnno(true_tag_anno_idx); - const false_tag_anno_idx = try ir.addTypeAnnoAndTypeVar( + const false_tag_anno_idx = try ir.addTypeAnno( .{ .tag = .{ .name = false_ident, .args = .{ .span = DataSpan.empty() } } }, - .err, Region.zero(), ); try self.env.store.addScratchTypeAnno(false_tag_anno_idx); - const tag_union_anno_idx = try ir.addTypeAnnoAndTypeVar(.{ .tag_union = .{ + const tag_union_anno_idx = try ir.addTypeAnno(.{ .tag_union = .{ .tags = try self.env.store.typeAnnoSpanFrom(scratch_top), .ext = null, - } }, .err, Region.zero()); + } }, Region.zero()); // Create the type declaration statement // - const type_decl_idx = try ir.addStatementAndTypeVar(Statement{ + const type_decl_idx = try ir.addStatement(Statement{ .s_nominal_decl = .{ .header = header_idx, .anno = tag_union_anno_idx }, - }, .err, Region.zero()); + }, Region.zero()); // Assert that this is the first stmt in the file std.debug.assert(type_decl_idx == BUILTIN_BOOL); @@ -293,46 +291,44 @@ fn addBuiltinTypeResult(self: *Self, ir: *ModuleEnv) std.mem.Allocator.Error!voi const header_scratch_top = self.env.store.scratchTypeAnnoTop(); - const ok_rigid_var = try ir.addTypeAnnoAndTypeVar(.{ .rigid_var = .{ .name = ok_var_ident } }, .err, Region.zero()); + const ok_rigid_var = try ir.addTypeAnno(.{ .rigid_var = .{ .name = ok_var_ident } }, Region.zero()); try self.env.store.addScratchTypeAnno(ok_rigid_var); - const err_rigid_var = try ir.addTypeAnnoAndTypeVar(.{ .rigid_var = .{ .name = err_var_ident } }, .err, Region.zero()); + const err_rigid_var = try ir.addTypeAnno(.{ .rigid_var = .{ .name = err_var_ident } }, Region.zero()); try self.env.store.addScratchTypeAnno(err_rigid_var); - const header_idx = try ir.addTypeHeaderAndTypeVar(.{ + const header_idx = try ir.addTypeHeader(.{ .name = type_ident, .args = try self.env.store.typeAnnoSpanFrom(header_scratch_top), - }, .err, Region.zero()); + }, Region.zero()); // Create the type body (rhs) => [Ok(ok), Err(err)] // // Create Ok(ok) const ok_tag_scratch_top = self.env.store.scratchTypeAnnoTop(); - const ok_rigid_var_arg = try ir.addTypeAnnoAndTypeVar(.{ .rigid_var_lookup = .{ .ref = ok_rigid_var } }, .err, Region.zero()); + const ok_rigid_var_arg = try ir.addTypeAnno(.{ .rigid_var_lookup = .{ .ref = ok_rigid_var } }, Region.zero()); try self.env.store.addScratchTypeAnno(ok_rigid_var_arg); - const ok_tag_anno_idx = try ir.addTypeAnnoAndTypeVar( + const ok_tag_anno_idx = try ir.addTypeAnno( .{ .tag = .{ .name = ok_tag_ident, .args = try self.env.store.typeAnnoSpanFrom(ok_tag_scratch_top), } }, - .err, Region.zero(), ); // Create Err(err) const err_tag_scratch_top = self.env.store.scratchTypeAnnoTop(); - const err_rigid_var_arg = try ir.addTypeAnnoAndTypeVar(.{ .rigid_var_lookup = .{ .ref = err_rigid_var } }, .err, Region.zero()); + const err_rigid_var_arg = try ir.addTypeAnno(.{ .rigid_var_lookup = .{ .ref = err_rigid_var } }, Region.zero()); try self.env.store.addScratchTypeAnno(err_rigid_var_arg); - const err_tag_anno_idx = try ir.addTypeAnnoAndTypeVar( + const err_tag_anno_idx = try ir.addTypeAnno( .{ .tag = .{ .name = err_tag_ident, .args = try self.env.store.typeAnnoSpanFrom(err_tag_scratch_top), } }, - .err, Region.zero(), ); @@ -341,18 +337,17 @@ fn addBuiltinTypeResult(self: *Self, ir: *ModuleEnv) std.mem.Allocator.Error!voi try self.env.store.addScratchTypeAnno(ok_tag_anno_idx); try self.env.store.addScratchTypeAnno(err_tag_anno_idx); - const tag_union_anno_idx = try ir.addTypeAnnoAndTypeVar(.{ .tag_union = .{ + const tag_union_anno_idx = try ir.addTypeAnno(.{ .tag_union = .{ .tags = try self.env.store.typeAnnoSpanFrom(tag_scratch_top), .ext = null, - } }, .err, Region.zero()); + } }, Region.zero()); // Create the type declaration statement // - const type_decl_idx = try ir.addStatementAndTypeVar( + const type_decl_idx = try ir.addStatement( Statement{ .s_nominal_decl = .{ .header = header_idx, .anno = tag_union_anno_idx }, }, - .err, Region.zero(), ); @@ -435,7 +430,7 @@ fn processTypeDeclFirstPass( }, }; - const type_decl_stmt_idx = try self.env.addStatementAndTypeVar(placeholder_cir_type_decl, .err, region); + const type_decl_stmt_idx = try self.env.addStatement(placeholder_cir_type_decl, region); // Introduce the type name into scope early to support recursive references try self.scopeIntroduceTypeDecl(qualified_name_idx, type_decl_stmt_idx, region); @@ -535,7 +530,7 @@ fn processAssociatedItemsFirstPass( .ident = qualified_idx, }, }; - const placeholder_pattern_idx = try self.env.addPatternAndTypeVar(placeholder_pattern, .err, region); + const placeholder_pattern_idx = try self.env.addPattern(placeholder_pattern, region); // Introduce the qualified name to scope switch (try self.scopeIntroduceInternal(self.env.gpa, .ident, qualified_idx, placeholder_pattern_idx, false, true)) { @@ -716,7 +711,7 @@ pub fn canonicalizeFile( const expect_stmt = Statement{ .s_expect = .{ .body = malformed, } }; - const expect_stmt_idx = try self.env.addStatementAndTypeVar(expect_stmt, Content{ .flex_var = null }, region); + const expect_stmt_idx = try self.env.addStatement(expect_stmt, region); try self.env.store.addScratchStatement(expect_stmt_idx); continue; }; @@ -725,7 +720,7 @@ pub fn canonicalizeFile( const expect_stmt = Statement{ .s_expect = .{ .body = can_expect.idx, } }; - const expect_stmt_idx = try self.env.addStatementAndTypeVar(expect_stmt, Content{ .flex_var = null }, region); + const expect_stmt_idx = try self.env.addStatement(expect_stmt, region); try self.env.store.addScratchStatement(expect_stmt_idx); }, .@"for" => |for_stmt| { @@ -802,7 +797,7 @@ pub fn canonicalizeFile( .where = where_clauses, }, }; - const type_anno_stmt_idx = try self.env.addStatementAndTypeVar(type_anno_stmt, Content{ .flex_var = null }, region); + const type_anno_stmt_idx = try self.env.addStatement(type_anno_stmt, region); try self.env.store.addScratchStatement(type_anno_stmt_idx); } @@ -1381,7 +1376,7 @@ fn canonicalizeImportStatement( }, }; - const import_idx = try self.env.addStatementAndTypeVar(cir_import, Content{ .flex_var = null }, self.parse_ir.tokenizedRegionToRegion(import_stmt.region)); + const import_idx = try self.env.addStatement(cir_import, self.parse_ir.tokenizedRegionToRegion(import_stmt.region)); try self.env.store.addScratchStatement(import_idx); // 8. Add the module to the current scope so it can be used in qualified lookups @@ -1514,7 +1509,7 @@ fn convertASTExposesToCIR( inline else => |payload| payload.region, }; const region = self.parse_ir.tokenizedRegionToRegion(tokenized_region); - const cir_exposed_idx = try self.env.addExposedItemAndTypeVar(cir_exposed, .{ .flex_var = null }, region); + const cir_exposed_idx = try self.env.addExposedItem(cir_exposed, region); try self.env.store.addScratchExposedItem(cir_exposed_idx); } } @@ -1653,12 +1648,12 @@ fn canonicalizeDeclWithAnnotation( // 4. Type errors will be caught during unification if the implementation doesn't // match the annotation const region = self.parse_ir.tokenizedRegionToRegion(decl.region); - const def_idx = self.env.addDefAndTypeVar(.{ + const def_idx = self.env.addDef(.{ .pattern = pattern_idx, .expr = can_expr.idx, .annotation = mb_anno_idx, .kind = .let, - }, Content{ .flex_var = null }, region); + }, region); return def_idx; } @@ -1738,9 +1733,9 @@ fn canonicalizeStringLike( try self.extractStringSegments(parts); const region = self.parse_ir.tokenizedRegionToRegion(e.region); - const expr_idx = try self.env.addExprAndTypeVar(Expr{ .e_str = .{ + const expr_idx = try self.env.addExpr(Expr{ .e_str = .{ .span = can_str_span, - } }, Content{ .structure = .str }, region); + } }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null }; @@ -1763,18 +1758,18 @@ fn canonicalizeSingleQuote( .kind = .u128, }; if (comptime Idx == Expr.Idx) { - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_num = .{ .value = value_content, .kind = .int_unbound, }, - }, .err, region); + }, region); return expr_idx; } else if (comptime Idx == Pattern.Idx) { - const pat_idx = try self.env.addPatternAndTypeVar(Pattern{ .num_literal = .{ + const pat_idx = try self.env.addPattern(Pattern{ .num_literal = .{ .value = value_content, .kind = .int_unbound, - } }, .err, region); + } }, region); return pat_idx; } else { @compileError("Unsupported Idx type"); @@ -1823,7 +1818,7 @@ fn canonicalizeRecordField( .value = can_value.idx, }; - return try self.env.addRecordFieldAndTypeVar(cir_field, Content{ .flex_var = null }, self.parse_ir.tokenizedRegionToRegion(field.region)); + return try self.env.addRecordField(cir_field, self.parse_ir.tokenizedRegionToRegion(field.region)); } /// Parse an integer with underscores. @@ -1886,13 +1881,13 @@ pub fn canonicalizeExpr( // Create span from scratch expressions const args_span = try self.env.store.exprSpanFrom(scratch_top); - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_call = .{ .func = can_fn_expr.idx, .args = args_span, .called_via = CalledVia.apply, }, - }, .err, region); + }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null }; @@ -1919,9 +1914,9 @@ pub fn canonicalizeExpr( try self.used_patterns.put(self.env.gpa, found_pattern_idx, {}); // We found the qualified ident in local scope - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ .e_lookup_local = .{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_lookup_local = .{ .pattern_idx = found_pattern_idx, - } }, .err, region); + } }, region); const free_vars_start = self.scratch_free_vars.top(); try self.scratch_free_vars.append(self.env.gpa, found_pattern_idx); @@ -1968,11 +1963,11 @@ pub fn canonicalizeExpr( } else 0; // Create the e_lookup_external expression with Import.Idx - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ .e_lookup_external = .{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_lookup_external = .{ .module_idx = import_idx, .target_node_idx = target_node_idx, .region = region, - } }, Content{ .flex_var = null }, region); + } }, region); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null, @@ -1992,9 +1987,9 @@ pub fn canonicalizeExpr( // We found the ident in scope, lookup to reference the pattern // TODO(RANK) - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ .e_lookup_local = .{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_lookup_local = .{ .pattern_idx = found_pattern_idx, - } }, .err, region); + } }, region); const free_vars_start = self.scratch_free_vars.top(); try self.scratch_free_vars.append(self.env.gpa, found_pattern_idx); @@ -2033,11 +2028,11 @@ pub fn canonicalizeExpr( } else 0; // Create the e_lookup_external expression with Import.Idx - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ .e_lookup_external = .{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_lookup_external = .{ .module_idx = import_idx, .target_node_idx = target_node_idx, .region = region, - } }, .err, region); + } }, region); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; } @@ -2185,9 +2180,8 @@ pub fn canonicalizeExpr( // Note that type-checking will ensure that the actual int value // fits into the provided type - const expr_idx = try self.env.addExprAndTypeVar( + const expr_idx = try self.env.addExpr( .{ .e_num = .{ .value = int_value, .kind = num_suffix } }, - .err, region, ); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; @@ -2198,22 +2192,20 @@ pub fn canonicalizeExpr( const is_not_base10 = int_base != DEFAULT_BASE; if (is_not_base10) { // For non-decimal integers (hex, binary, octal), set as an int - break :blk try self.env.addExprAndTypeVar( + break :blk try self.env.addExpr( CIR.Expr{ .e_num = .{ .value = int_value, .kind = .int_unbound, } }, - .err, region, ); } else { // For decimal (base 10), use a num so it can be either Int or Frac - break :blk try self.env.addExprAndTypeVar( + break :blk try self.env.addExpr( CIR.Expr{ .e_num = .{ .value = int_value, .kind = .num_unbound, } }, - .err, region, ); } @@ -2239,22 +2231,20 @@ pub fn canonicalizeExpr( const expr_idx = try self.env.pushMalformed(Expr.Idx, Diagnostic{ .invalid_num_literal = .{ .region = region } }); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; } - const expr_idx = try self.env.addExprAndTypeVar( + const expr_idx = try self.env.addExpr( .{ .e_frac_f32 = .{ .value = @floatCast(f64_val), .has_suffix = true, } }, - .err, region, ); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; } else if (std.mem.eql(u8, suffix, "f64")) { - const expr_idx = try self.env.addExprAndTypeVar( + const expr_idx = try self.env.addExpr( .{ .e_frac_f64 = .{ .value = f64_val, .has_suffix = true, } }, - .err, region, ); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; @@ -2267,12 +2257,11 @@ pub fn canonicalizeExpr( const expr_idx = try self.env.pushMalformed(Expr.Idx, Diagnostic{ .invalid_num_literal = .{ .region = region } }); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; }; - const expr_idx = try self.env.addExprAndTypeVar( + const expr_idx = try self.env.addExpr( .{ .e_dec = .{ .value = dec_val, .has_suffix = true, } }, - .err, region, ); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; @@ -2315,7 +2304,7 @@ pub fn canonicalizeExpr( }, }; - const expr_idx = try self.env.addExprAndTypeVar(cir_expr, .err, region); + const expr_idx = try self.env.addExpr(cir_expr, region); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; }, @@ -2336,9 +2325,9 @@ pub fn canonicalizeExpr( const items_slice = self.parse_ir.store.exprSlice(e.items); if (items_slice.len == 0) { // Empty list - use e_empty_list - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_empty_list = .{}, - }, Content{ .structure = .list_unbound }, region); + }, region); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; } @@ -2361,16 +2350,16 @@ pub fn canonicalizeExpr( // If all elements failed to canonicalize, treat as empty list if (elems_span.span.len == 0) { // All elements failed to canonicalize - create empty list - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_empty_list = .{}, - }, Content{ .structure = .list_unbound }, region); + }, region); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; } - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_list = .{ .elems = elems_span }, - }, .err, region); + }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null }; @@ -2434,25 +2423,15 @@ pub fn canonicalizeExpr( try self.env.store.addScratchExpr(item_expr_idx.get_idx()); } - // Since expr idx map 1-to-1 to variables, we can get cast the slice - // of scratch expr idx and cast them to vars - const elems_var_range = try self.env.types.appendVars( - @ptrCast(@alignCast( - self.env.store.scratch_exprs.slice(scratch_top, self.env.store.scratchExprTop()), - )), - ); - // Create span of the new scratch expressions const elems_span = try self.env.store.exprSpanFrom(scratch_top); // Then insert the tuple expr - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_tuple = .{ .elems = elems_span, }, - }, Content{ .structure = FlatType{ - .tuple = types.Tuple{ .elems = elems_var_range }, - } }, region); + }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null }; @@ -2472,9 +2451,9 @@ pub fn canonicalizeExpr( const fields_slice = self.parse_ir.store.recordFieldSlice(e.fields); if (fields_slice.len == 0) { - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_empty_record = .{}, - }, Content{ .structure = .empty_record }, region); + }, region); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; } @@ -2554,20 +2533,15 @@ pub fn canonicalizeExpr( }); } - // Create the record type structure - const type_fields_range = try self.env.types.appendRecordFields( - self.scratch_record_fields.sliceFromStart(record_fields_top), - ); - // Shink the scratch array to it's original size self.scratch_record_fields.clearFrom(record_fields_top); - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_record = .{ .fields = fields_span, .ext = ext_expr, }, - }, Content{ .structure = .{ .record_unbound = type_fields_range } }, region); + }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null }; @@ -2640,11 +2614,8 @@ pub fn canonicalizeExpr( .body = can_body.idx, }, }; - const lambda_type_content = try self.env.types.mkFuncUnbound( - @ptrCast(self.env.store.slicePatterns(args_span)), - ModuleEnv.varFrom(can_body.idx), - ); - const lambda_idx = try self.env.addExprAndTypeVar(lambda_expr, lambda_type_content, region); + + const lambda_idx = try self.env.addExpr(lambda_expr, region); // If there are no captures, this is a pure lambda. // Otherwise, it's a closure. @@ -2668,7 +2639,7 @@ pub fn canonicalizeExpr( .pattern_idx = pattern_idx, .scope_depth = 0, // This is now unused, but kept for struct compatibility. }; - const capture_idx = try self.env.addCaptureAndTypeVar(capture, types.Content{ .flex_var = null }, region); + const capture_idx = try self.env.addCapture(capture, region); try self.env.store.addScratchCapture(capture_idx); } @@ -2684,7 +2655,7 @@ pub fn canonicalizeExpr( }; // The type of the closure is the same as the type of the pure lambda - const expr_idx = try self.env.addExprAndTypeVar(closure_expr, lambda_type_content, region); + const expr_idx = try self.env.addExpr(closure_expr, region); // The free variables of the lambda are its captures. // I need to add them to the global list and return a span. @@ -2764,9 +2735,9 @@ pub fn canonicalizeExpr( }, }; - const expr_idx = try self.env.addExprAndTypeVar(Expr{ + const expr_idx = try self.env.addExpr(Expr{ .e_binop = Expr.Binop.init(op, can_lhs.idx, can_rhs.idx), - }, Content{ .flex_var = null }, region); + }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null }; @@ -2789,9 +2760,9 @@ pub fn canonicalizeExpr( const can_operand = (try self.canonicalizeExpr(unary.expr)) orelse return null; // Create unary minus CIR expression - const expr_idx = try self.env.addExprAndTypeVar(Expr{ + const expr_idx = try self.env.addExpr(Expr{ .e_unary_minus = Expr.UnaryMinus.init(can_operand.idx), - }, Content{ .flex_var = null }, region); + }, region); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = can_operand.free_vars }; }, @@ -2800,9 +2771,9 @@ pub fn canonicalizeExpr( const can_operand = (try self.canonicalizeExpr(unary.expr)) orelse return null; // Create unary not CIR expression - const expr_idx = try self.env.addExprAndTypeVar(Expr{ + const expr_idx = try self.env.addExpr(Expr{ .e_unary_not = Expr.UnaryNot.init(can_operand.idx), - }, Content{ .flex_var = null }, region); + }, region); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = can_operand.free_vars }; }, @@ -2854,7 +2825,7 @@ pub fn canonicalizeExpr( .cond = can_cond.idx, .body = can_then.idx, }; - const if_branch_idx = try self.env.addIfBranchAndTypeVar(if_branch, Content{ .flex_var = null }, self.parse_ir.tokenizedRegionToRegion(current_if.region)); + const if_branch_idx = try self.env.addIfBranch(if_branch, self.parse_ir.tokenizedRegionToRegion(current_if.region)); try self.env.store.addScratchIfBranch(if_branch_idx); // Check if the else clause is another if-then-else @@ -2882,18 +2853,12 @@ pub fn canonicalizeExpr( std.debug.assert(branches.len > 0); // Create the if expression with flex var initially - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_if = .{ .branches = branches_span, .final_else = final_else, }, - }, Content{ .flex_var = null }, region); - - // Immediately redirect the if expression's type variable to the first branch's body - const first_branch = self.env.store.getIfBranch(branches[0]); - const first_branch_type_var = @as(TypeVar, @enumFromInt(@intFromEnum(first_branch.body))); - const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(expr_idx))); - try self.env.types.setVarRedirect(expr_var, first_branch_type_var); + }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null }; @@ -2945,10 +2910,10 @@ pub fn canonicalizeExpr( } }; - const branch_pattern_idx = try self.env.addMatchBranchPatternAndTypeVar(Expr.Match.BranchPattern{ + const branch_pattern_idx = try self.env.addMatchBranchPattern(Expr.Match.BranchPattern{ .pattern = pattern_idx, .degenerate = false, - }, Content{ .flex_var = null }, alt_pattern_region); + }, alt_pattern_region); try self.env.store.addScratchMatchBranchPattern(branch_pattern_idx); } }, @@ -2965,10 +2930,10 @@ pub fn canonicalizeExpr( break :blk malformed_idx; } }; - const branch_pattern_idx = try self.env.addMatchBranchPatternAndTypeVar(Expr.Match.BranchPattern{ + const branch_pattern_idx = try self.env.addMatchBranchPattern(Expr.Match.BranchPattern{ .pattern = pattern_idx, .degenerate = false, - }, Content{ .flex_var = null }, pattern_region); + }, pattern_region); try self.env.store.addScratchMatchBranchPattern(branch_pattern_idx); }, } @@ -2988,19 +2953,14 @@ pub fn canonicalizeExpr( }; const value_idx = can_body.idx; - // Get the body region from the AST node - const body = self.parse_ir.store.getExpr(ast_branch.body); - const body_region = self.parse_ir.tokenizedRegionToRegion(body.to_tokenized_region()); - - const branch_idx = try self.env.addMatchBranchAndTypeVar( + const branch_idx = try self.env.addMatchBranch( Expr.Match.Branch{ .patterns = branch_pat_span, .value = value_idx, .guard = null, .redundant = @enumFromInt(0), // TODO }, - Content{ .flex_var = null }, - body_region, + region, ); // Set the branch var @@ -3020,17 +2980,7 @@ pub fn canonicalizeExpr( .branches = branches_span, .exhaustive = @enumFromInt(0), // Will be set during type checking }; - - // Create initial content for the match expression - const initial_content = if (mb_branch_var) |_| Content{ .flex_var = null } else Content{ .err = {} }; - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ .e_match = match_expr }, initial_content, region); - - // If there is at least 1 branch, then set the root expr to redirect - // to the type of the match branch - const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(expr_idx))); - if (mb_branch_var) |branch_var| { - try self.env.types.setVarRedirect(expr_var, branch_var); - } + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_match = match_expr }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = if (free_vars_slice.len > 0) free_vars_slice else null }; @@ -3041,9 +2991,9 @@ pub fn canonicalizeExpr( const can_inner = try self.canonicalizeExpr(d.expr) orelse return null; // Create debug expression - const dbg_expr = try self.env.addExprAndTypeVar(Expr{ .e_dbg = .{ + const dbg_expr = try self.env.addExpr(Expr{ .e_dbg = .{ .expr = can_inner.idx, - } }, Content{ .flex_var = null }, region); + } }, region); return CanonicalizedExpr{ .idx = dbg_expr, .free_vars = can_inner.free_vars }; }, @@ -3057,7 +3007,7 @@ pub fn canonicalizeExpr( }, .ellipsis => |e| { const region = self.parse_ir.tokenizedRegionToRegion(e.region); - const ellipsis_expr = try self.env.addExprAndTypeVar(Expr{ .e_ellipsis = .{} }, Content{ .flex_var = null }, region); + const ellipsis_expr = try self.env.addExpr(Expr{ .e_ellipsis = .{} }, region); return CanonicalizedExpr{ .idx = ellipsis_expr, .free_vars = null }; }, .block => |e| { @@ -3114,30 +3064,25 @@ fn canonicalizeTagExpr(self: *Self, e: AST.TagExpr, mb_args: ?AST.Expr.Span, reg } // Create a single tag, open tag union for this variable - // Use a placeholder ext_var that will be handled during type checking - const ext_var = try self.env.addTypeSlotAndTypeVar(@enumFromInt(0), .{ .flex_var = null }, region, TypeVar); - const tag = try self.env.types.mkTag(tag_name, @ptrCast(self.env.store.sliceExpr(args_span))); - const tag_union = try self.env.types.mkTagUnion(&[_]Tag{tag}, ext_var); - // Create the tag expression with the tag union type - const tag_expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const tag_expr_idx = try self.env.addExpr(CIR.Expr{ .e_tag = .{ .name = tag_name, .args = args_span, }, - }, tag_union, region); + }, region); if (e.qualifiers.span.len == 0) { // Check if this is an unqualified nominal tag (e.g. True or False are in scope unqualified by default) if (self.unqualified_nominal_tags.get(tag_name_text)) |nominal_type_decl| { // Get the type variable for the nominal type declaration (e.g., Bool type) - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_nominal = .{ .nominal_type_decl = nominal_type_decl, .backing_expr = tag_expr_idx, .backing_type = .tag, }, - }, .err, region); + }, region); return CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; } @@ -3165,13 +3110,13 @@ fn canonicalizeTagExpr(self: *Self, e: AST.TagExpr, mb_args: ?AST.Expr.Span, reg }; switch (self.env.store.getStatement(nominal_type_decl_stmt_idx)) { .s_nominal_decl => { - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_nominal = .{ .nominal_type_decl = nominal_type_decl_stmt_idx, .backing_expr = tag_expr_idx, .backing_type = .tag, }, - }, .err, region); + }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ @@ -3273,14 +3218,14 @@ fn canonicalizeTagExpr(self: *Self, e: AST.TagExpr, mb_args: ?AST.Expr.Span, reg break :blk other_module_node_id; }; - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_nominal_external = .{ .module_idx = import_idx, .target_node_idx = target_node_idx, .backing_expr = tag_expr_idx, .backing_type = .tag, }, - }, .err, region); + }, region); const free_vars_slice = self.scratch_free_vars.slice(free_vars_start, self.scratch_free_vars.top()); return CanonicalizedExpr{ @@ -3296,9 +3241,9 @@ fn addStringLiteralToScratch(self: *Self, text: []const u8, region: AST.Tokenize const string_idx = try self.env.insertString(text); // create a node for the string literal - const str_expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ .e_str_segment = .{ + const str_expr_idx = try self.env.addExpr(CIR.Expr{ .e_str_segment = .{ .literal = string_idx, - } }, Content{ .structure = .str }, self.parse_ir.tokenizedRegionToRegion(region)); + } }, self.parse_ir.tokenizedRegionToRegion(region)); // add the node idx to our scratch expr stack try self.env.store.addScratchExpr(str_expr_idx); @@ -3384,9 +3329,9 @@ fn canonicalizePattern( const region = self.parse_ir.tokenizedRegionToRegion(e.region); if (self.parse_ir.tokens.resolveIdentifier(e.ident_tok)) |ident_idx| { // Create a Pattern node for our identifier - const pattern_idx = try self.env.addPatternAndTypeVar(Pattern{ .assign = .{ + const pattern_idx = try self.env.addPattern(Pattern{ .assign = .{ .ident = ident_idx, - } }, .err, region); + } }, region); // Introduce the identifier into scope mapping to this pattern node switch (try self.scopeIntroduceInternal(self.env.gpa, .ident, ident_idx, pattern_idx, false, true)) { @@ -3431,7 +3376,7 @@ fn canonicalizePattern( .underscore = {}, }; - const pattern_idx = try self.env.addPatternAndTypeVar(underscore_pattern, .err, region); + const pattern_idx = try self.env.addPattern(underscore_pattern, region); return pattern_idx; }, @@ -3558,23 +3503,21 @@ fn canonicalizePattern( return try self.env.pushMalformed(Pattern.Idx, Diagnostic{ .invalid_num_literal = .{ .region = region } }); } }; - const pattern_idx = try self.env.addPatternAndTypeVar( + const pattern_idx = try self.env.addPattern( .{ .num_literal = .{ .value = .{ .bytes = @bitCast(i128_val), .kind = .i128 }, .kind = num_suffix, } }, - .err, region, ); return pattern_idx; } - const pattern_idx = try self.env.addPatternAndTypeVar( + const pattern_idx = try self.env.addPattern( Pattern{ .num_literal = .{ .value = CIR.IntValue{ .bytes = @bitCast(i128_val), .kind = .i128 }, .kind = .num_unbound, } }, - .err, region, ); return pattern_idx; @@ -3597,16 +3540,14 @@ fn canonicalizePattern( const malformed_idx = try self.env.pushMalformed(Pattern.Idx, Diagnostic{ .invalid_num_literal = .{ .region = region } }); return malformed_idx; } - const pattern_idx = try self.env.addPatternAndTypeVar( + const pattern_idx = try self.env.addPattern( .{ .frac_f32_literal = .{ .value = @floatCast(f64_val) } }, - .err, region, ); return pattern_idx; } else if (std.mem.eql(u8, suffix, "f64")) { - const pattern_idx = try self.env.addPatternAndTypeVar( + const pattern_idx = try self.env.addPattern( .{ .frac_f64_literal = .{ .value = f64_val } }, - .err, region, ); return pattern_idx; @@ -3619,9 +3560,8 @@ fn canonicalizePattern( const malformed_idx = try self.env.pushMalformed(Pattern.Idx, Diagnostic{ .invalid_num_literal = .{ .region = region } }); return malformed_idx; }; - const pattern_idx = try self.env.addPatternAndTypeVar( + const pattern_idx = try self.env.addPattern( .{ .dec_literal = .{ .value = dec_val, .has_suffix = true } }, - .err, region, ); return pattern_idx; @@ -3664,7 +3604,7 @@ fn canonicalizePattern( .f64 => unreachable, // Already handled above }; - const pattern_idx = try self.env.addPatternAndTypeVar(cir_pattern, .err, region); + const pattern_idx = try self.env.addPattern(cir_pattern, region); return pattern_idx; }, @@ -3683,7 +3623,7 @@ fn canonicalizePattern( .literal = literal, }, }; - const pattern_idx = try self.env.addPatternAndTypeVar(str_pattern, .err, region); + const pattern_idx = try self.env.addPattern(str_pattern, region); return pattern_idx; }, @@ -3712,33 +3652,24 @@ fn canonicalizePattern( } const args = try self.env.store.patternSpanFrom(patterns_start); - // Create the pattern type var first - const arg_vars: []TypeVar = @ptrCast(self.env.store.slicePatterns(args)); - // We need to create a temporary pattern idx to get the type var - const ext_var = try self.env.addTypeSlotAndTypeVar(@enumFromInt(0), .{ .flex_var = null }, region, TypeVar); - const tag = try self.env.types.mkTag(tag_name, arg_vars); - _ = try self.env.types.mkTagUnion(&[_]Tag{tag}, ext_var); - // Create the pattern node with type var - const tag_pattern_idx = try self.env.addPatternAndTypeVar(Pattern{ + const tag_pattern_idx = try self.env.addPattern(Pattern{ .applied_tag = .{ .name = tag_name, .args = args, }, - }, .err, region); + }, region); if (e.qualifiers.span.len == 0) { // Check if this is an unqualified nominal tag (e.g. True or False are in scope unqualified by default) if (self.unqualified_nominal_tags.get(tag_name_text)) |nominal_type_decl| { - // Get the type variable for the nominal type declaration (e.g., Bool type) - const nominal_type_var = ModuleEnv.castIdx(Statement.Idx, TypeVar, nominal_type_decl); - const nominal_pattern_idx = try self.env.addPatternAndTypeVarRedirect(CIR.Pattern{ + const nominal_pattern_idx = try self.env.addPattern(CIR.Pattern{ .nominal = .{ .nominal_type_decl = nominal_type_decl, .backing_pattern = tag_pattern_idx, .backing_type = .tag, }, - }, nominal_type_var, region); + }, region); return nominal_pattern_idx; } @@ -3763,14 +3694,13 @@ fn canonicalizePattern( switch (self.env.store.getStatement(nominal_type_decl_stmt_idx)) { .s_nominal_decl => { - const nominal_type_var = ModuleEnv.castIdx(Statement.Idx, TypeVar, nominal_type_decl_stmt_idx); - const pattern_idx = try self.env.addPatternAndTypeVarRedirect(CIR.Pattern{ + const pattern_idx = try self.env.addPattern(CIR.Pattern{ .nominal = .{ .nominal_type_decl = nominal_type_decl_stmt_idx, .backing_pattern = tag_pattern_idx, .backing_type = .tag, }, - }, nominal_type_var, region); + }, region); return pattern_idx; }, @@ -3861,14 +3791,14 @@ fn canonicalizePattern( break :blk .{ other_module_node_id, Content{ .flex_var = null } }; }; - const nominal_pattern_idx = try self.env.addPatternAndTypeVar(CIR.Pattern{ + const nominal_pattern_idx = try self.env.addPattern(CIR.Pattern{ .nominal_external = .{ .module_idx = import_idx, .target_node_idx = target_node_idx, .backing_pattern = tag_pattern_idx, .backing_type = .tag, }, - }, .err, region); + }, region); return nominal_pattern_idx; } @@ -3906,12 +3836,12 @@ fn canonicalizePattern( .kind = .{ .SubPattern = canonicalized_sub_pattern }, }; - const destruct_idx = try self.env.addRecordDestructAndTypeVar(record_destruct, .err, field_region); + const destruct_idx = try self.env.addRecordDestruct(record_destruct, field_region); try self.env.store.addScratchRecordDestruct(destruct_idx); } else { // Simple case: Create the RecordDestruct for this field const assign_pattern = Pattern{ .assign = .{ .ident = field_name_ident } }; - const assign_pattern_idx = try self.env.addPatternAndTypeVar(assign_pattern, .err, field_region); + const assign_pattern_idx = try self.env.addPattern(assign_pattern, field_region); const record_destruct = CIR.Pattern.RecordDestruct{ .label = field_name_ident, @@ -3919,7 +3849,7 @@ fn canonicalizePattern( .kind = .{ .Required = assign_pattern_idx }, }; - const destruct_idx = try self.env.addRecordDestructAndTypeVar(record_destruct, .err, field_region); + const destruct_idx = try self.env.addRecordDestruct(record_destruct, field_region); try self.env.store.addScratchRecordDestruct(destruct_idx); // Introduce the identifier into scope @@ -3965,11 +3895,11 @@ fn canonicalizePattern( const destructs_span = try self.env.store.recordDestructSpanFrom(scratch_top); // Create the record destructure pattern - const pattern_idx = try self.env.addPatternAndTypeVar(Pattern{ + const pattern_idx = try self.env.addPattern(Pattern{ .record_destructure = .{ .destructs = destructs_span, }, - }, .err, region); + }, region); return pattern_idx; }, @@ -3992,11 +3922,11 @@ fn canonicalizePattern( // Create span of the new scratch patterns const patterns_span = try self.env.store.patternSpanFrom(scratch_top); - const pattern_idx = try self.env.addPatternAndTypeVar(Pattern{ + const pattern_idx = try self.env.addPattern(Pattern{ .tuple = .{ .patterns = patterns_span, }, - }, .err, region); + }, region); return pattern_idx; }, @@ -4034,9 +3964,9 @@ fn canonicalizePattern( // Create an assign pattern for the rest variable // Use the region of just the identifier token, not the full rest pattern const name_region = self.parse_ir.tokenizedRegionToRegion(.{ .start = name_tok, .end = name_tok }); - const assign_idx = try self.env.addPatternAndTypeVar(Pattern{ .assign = .{ + const assign_idx = try self.env.addPattern(Pattern{ .assign = .{ .ident = ident_idx, - } }, .err, name_region); + } }, name_region); // Introduce the identifier into scope switch (try self.scopeIntroduceInternal(self.env.gpa, .ident, ident_idx, assign_idx, false, true)) { @@ -4095,24 +4025,24 @@ fn canonicalizePattern( // Handle empty list patterns specially if (patterns_span.span.len == 0 and rest_index == null) { // Empty list pattern - const pattern_idx = try self.env.addPatternAndTypeVar(Pattern{ + const pattern_idx = try self.env.addPattern(Pattern{ .list = .{ .patterns = patterns_span, .rest_info = null, }, - }, .err, region); + }, region); return pattern_idx; } // Create the list pattern with rest info // Set type variable for the pattern - this should be the list type - const pattern_idx = try self.env.addPatternAndTypeVar(Pattern{ + const pattern_idx = try self.env.addPattern(Pattern{ .list = .{ .patterns = patterns_span, .rest_info = if (rest_index) |idx| .{ .index = idx, .pattern = rest_pattern } else null, }, - }, .err, region); + }, region); return pattern_idx; }, @@ -4158,7 +4088,7 @@ fn canonicalizePattern( }, }; - const pattern_idx = try self.env.addPatternAndTypeVar(as_pattern, .err, region); + const pattern_idx = try self.env.addPattern(as_pattern, region); // Introduce the identifier into scope switch (try self.scopeIntroduceInternal(self.env.gpa, .ident, ident_idx, pattern_idx, false, true)) { @@ -4708,9 +4638,9 @@ fn canonicalizeTypeAnnoHelp(self: *Self, anno_idx: AST.TypeAnno.Idx, type_anno_c // Track this type variable for underscore validation try self.scratch_type_var_validation.append(self.env.gpa, name_ident); - return try self.env.addTypeAnnoAndTypeVarRedirect(.{ .rigid_var_lookup = .{ + return try self.env.addTypeAnno(.{ .rigid_var_lookup = .{ .ref = found_anno_idx, - } }, ModuleEnv.varFrom(found_anno_idx), region); + } }, region); }, .not_found => { switch (type_anno_ctx.type) { @@ -4720,10 +4650,9 @@ fn canonicalizeTypeAnnoHelp(self: *Self, anno_idx: AST.TypeAnno.Idx, type_anno_c // Track this type variable for underscore validation try self.scratch_type_var_validation.append(self.env.gpa, name_ident); - const content = types.Content{ .rigid_var = name_ident }; - const new_anno_idx = try self.env.addTypeAnnoAndTypeVar(.{ .rigid_var = .{ + const new_anno_idx = try self.env.addTypeAnno(.{ .rigid_var = .{ .name = name_ident, - } }, content, region); + } }, region); // Add to scope _ = try self.scopeIntroduceTypeVar(name_ident, new_anno_idx); @@ -4766,9 +4695,9 @@ fn canonicalizeTypeAnnoHelp(self: *Self, anno_idx: AST.TypeAnno.Idx, type_anno_c // Track this type variable for underscore validation try self.scratch_type_var_validation.append(self.env.gpa, name_ident); - return try self.env.addTypeAnnoAndTypeVarRedirect(.{ .rigid_var_lookup = .{ + return try self.env.addTypeAnno(.{ .rigid_var_lookup = .{ .ref = found_anno_idx, - } }, ModuleEnv.varFrom(found_anno_idx), region); + } }, region); }, .not_found => { switch (type_anno_ctx.type) { @@ -4778,10 +4707,9 @@ fn canonicalizeTypeAnnoHelp(self: *Self, anno_idx: AST.TypeAnno.Idx, type_anno_c // Track this type variable for underscore validation try self.scratch_type_var_validation.append(self.env.gpa, name_ident); - const content = types.Content{ .rigid_var = name_ident }; - const new_anno_idx = try self.env.addTypeAnnoAndTypeVar(.{ .rigid_var = .{ + const new_anno_idx = try self.env.addTypeAnno(.{ .rigid_var = .{ .name = name_ident, - } }, content, region); + } }, region); // Add to scope _ = try self.scopeIntroduceTypeVar(name_ident, new_anno_idx); @@ -4815,16 +4743,7 @@ fn canonicalizeTypeAnnoHelp(self: *Self, anno_idx: AST.TypeAnno.Idx, type_anno_c } }); } - // Create type variable with error content if underscore in type declaration - const content = blk: { - if (type_anno_ctx.isTypeDeclAndHasUnderscore()) { - break :blk types.Content{ .err = {} }; - } else { - break :blk types.Content{ .flex_var = null }; - } - }; - - return try self.env.addTypeAnnoAndTypeVar(.{ .underscore = {} }, content, region); + return try self.env.addTypeAnno(.{ .underscore = {} }, region); }, .tuple => |tuple| { return try self.canonicalizeTypeAnnoTuple(tuple, type_anno_ctx); @@ -4844,13 +4763,13 @@ fn canonicalizeTypeAnnoHelp(self: *Self, anno_idx: AST.TypeAnno.Idx, type_anno_c // Create type variable with error content if underscore in type declaration if (type_anno_ctx.isTypeDeclAndHasUnderscore()) { - return try self.env.addTypeAnnoAndTypeVar(.{ .parens = .{ + return try self.env.addTypeAnno(.{ .parens = .{ .anno = inner_anno, - } }, .err, region); + } }, region); } else { - return try self.env.addTypeAnnoAndTypeVarRedirect(.{ .parens = .{ + return try self.env.addTypeAnno(.{ .parens = .{ .anno = inner_anno, - } }, ModuleEnv.varFrom(inner_anno), region); + } }, region); } }, .malformed => |malformed| { @@ -4886,10 +4805,10 @@ fn canonicalizeTypeAnnoBasicType( // First, check if the type is a builtin type // There are always automatically in-scope if (TypeAnno.Builtin.fromBytes(self.env.getIdentText(type_name_ident))) |builtin_type| { - return try self.env.addTypeAnnoAndTypeVar(CIR.TypeAnno{ .lookup = .{ + return try self.env.addTypeAnno(CIR.TypeAnno{ .lookup = .{ .name = type_name_ident, .base = .{ .builtin = builtin_type }, - } }, .err, region); + } }, region); } else { // If it's not a builtin, look up in scope const type_decl_idx = self.scopeLookupTypeDecl(type_name_ident) orelse { @@ -4898,10 +4817,10 @@ fn canonicalizeTypeAnnoBasicType( .region = type_name_region, } }); }; - return try self.env.addTypeAnnoAndTypeVar(CIR.TypeAnno{ .lookup = .{ + return try self.env.addTypeAnno(CIR.TypeAnno{ .lookup = .{ .name = type_name_ident, .base = .{ .local = .{ .decl_idx = type_decl_idx } }, - } }, .err, region); + } }, region); } } else { // First, check if this is a qualified name for an associated type (e.g., Foo.Bar) @@ -4916,10 +4835,10 @@ fn canonicalizeTypeAnnoBasicType( // Try looking up the full qualified name in local scope (for associated types) if (self.scopeLookupTypeDecl(qualified_name_ident)) |type_decl_idx| { - return try self.env.addTypeAnnoAndTypeVar(CIR.TypeAnno{ .lookup = .{ + return try self.env.addTypeAnno(CIR.TypeAnno{ .lookup = .{ .name = qualified_name_ident, .base = .{ .local = .{ .decl_idx = type_decl_idx } }, - } }, .err, region); + } }, region); } // Not a local qualified type, so treat as an external type from a module @@ -4990,10 +4909,10 @@ fn canonicalizeTypeAnnoBasicType( // Create the ty_lookup_external expression with Import.Idx // Type solving will copy this types from the origin type store into the // this module's type store - return try self.env.addTypeAnnoAndTypeVar(CIR.TypeAnno{ .lookup = .{ .name = type_name_ident, .base = .{ .external = .{ + return try self.env.addTypeAnno(CIR.TypeAnno{ .lookup = .{ .name = type_name_ident, .base = .{ .external = .{ .module_idx = import_idx, .target_node_idx = target_node_idx, - } } } }, .err, region); + } } } }, region); } } @@ -5050,11 +4969,11 @@ fn canonicalizeTypeAnnoTypeApplication( } }); } - return try self.env.addTypeAnnoAndTypeVar(.{ .apply = .{ + return try self.env.addTypeAnno(.{ .apply = .{ .name = ty.name, .base = ty.base, .args = args_span, - } }, .err, region); + } }, region); }, else => return base_anno_idx, } @@ -5093,18 +5012,9 @@ fn canonicalizeTypeAnnoTuple( } const annos = try self.env.store.typeAnnoSpanFrom(scratch_top); - const content = blk: { - if (type_anno_ctx.isTypeDeclAndHasUnderscore()) { - break :blk types.Content{ .err = {} }; - } else { - const elems_var_range = try self.env.types.appendVars(self.scratch_vars.sliceFromStart(scratch_vars_top)); - break :blk types.Content{ .structure = FlatType{ .tuple = .{ .elems = elems_var_range } } }; - } - }; - - return try self.env.addTypeAnnoAndTypeVar(.{ .tuple = .{ + return try self.env.addTypeAnno(.{ .tuple = .{ .elems = annos, - } }, content, region); + } }, region); } } @@ -5141,9 +5051,9 @@ fn canonicalizeTypeAnnoRecord( .name = malformed_ident, .ty = canonicalized_ty, }; - const field_cir_idx = try self.env.addAnnoRecordFieldAndTypeVarRedirect( + const field_cir_idx = try self.env.addAnnoRecordField( cir_field, - ModuleEnv.varFrom(canonicalized_ty), + self.parse_ir.tokenizedRegionToRegion(ast_field.region), ); try self.env.store.addScratchAnnoRecordField(field_cir_idx); @@ -5164,9 +5074,9 @@ fn canonicalizeTypeAnnoRecord( .name = field_name, .ty = canonicalized_ty, }; - const field_cir_idx = try self.env.addAnnoRecordFieldAndTypeVarRedirect( + const field_cir_idx = try self.env.addAnnoRecordField( cir_field, - ModuleEnv.varFrom(canonicalized_ty), + self.parse_ir.tokenizedRegionToRegion(ast_field.region), ); try self.env.store.addScratchAnnoRecordField(field_cir_idx); @@ -5182,27 +5092,10 @@ fn canonicalizeTypeAnnoRecord( // Should we be sorting here? const record_fields_scratch = self.scratch_record_fields.sliceFromStart(scratch_record_fields_top); std.mem.sort(types.RecordField, record_fields_scratch, self.env.common.getIdentStore(), comptime types.RecordField.sortByNameAsc); - const fields_type_range = try self.env.types.appendRecordFields(record_fields_scratch); - const content = blk: { - if (type_anno_ctx.isTypeDeclAndHasUnderscore()) { - break :blk types.Content{ .err = {} }; - } else { - // TODO: Add parser support for extensible variables in - // record then thread that through here - const ext_var = try self.env.addTypeSlotAndTypeVar( - @enumFromInt(0), // TODO - .{ .structure = .empty_record }, - region, - TypeVar, - ); - break :blk Content{ .structure = .{ .record = .{ .fields = fields_type_range, .ext = ext_var } } }; - } - }; - - return try self.env.addTypeAnnoAndTypeVar(.{ .record = .{ + return try self.env.addTypeAnno(.{ .record = .{ .fields = field_anno_idxs, - } }, content, region); + } }, region); } /// Handle tag union types like [Some(a), None] @@ -5220,70 +5113,24 @@ fn canonicalizeTypeAnnoTagUnion( const scratch_annos_top = self.env.store.scratchTypeAnnoTop(); defer self.env.store.clearScratchTypeAnnosFrom(scratch_annos_top); - const scratch_tags_top = self.scratch_tags.top(); - defer self.scratch_tags.clearFrom(scratch_tags_top); - for (self.parse_ir.store.typeAnnoSlice(tag_union.tags)) |tag_idx| { - // First canonicalized the tag variant + // Canonicalized the tag variant // This will always return a `ty` or an `apply` const canonicalized_tag_idx = try self.canonicalizeTypeAnnoTag(tag_idx, type_anno_ctx); try self.env.store.addScratchTypeAnno(canonicalized_tag_idx); - - // Then, create the type system tag and append to scratch tags - const tag_cir_anno = self.env.store.getTypeAnno(canonicalized_tag_idx); - const tag = blk: { - switch (tag_cir_anno) { - .tag => |tag| { - const args_slice: []TypeVar = @ptrCast(self.env.store.sliceTypeAnnos(tag.args)); - break :blk try self.env.types.mkTag(tag.name, args_slice); - }, - .malformed => { - continue; - }, - else => unreachable, - } - }; - try self.scratch_tags.append(self.env.gpa, tag); } const tag_anno_idxs = try self.env.store.typeAnnoSpanFrom(scratch_annos_top); - // Should we be sorting here? - const tags_slice = self.scratch_tags.sliceFromStart(scratch_tags_top); - std.mem.sort(types.Tag, tags_slice, self.env.common.getIdentStore(), comptime types.Tag.sortByNameAsc); - // Canonicalize the ext, if it exists const mb_ext_anno = if (tag_union.open_anno) |open_idx| blk: { break :blk try self.canonicalizeTypeAnnoHelp(open_idx, type_anno_ctx); } else null; - const content = blk: { - if (type_anno_ctx.isTypeDeclAndHasUnderscore()) { - break :blk types.Content{ .err = {} }; - } else { - // Make the ext type variable - const ext_var = inner_blk: { - if (mb_ext_anno) |ext_anno| { - break :inner_blk ModuleEnv.varFrom(ext_anno); - } else { - break :inner_blk try self.env.addTypeSlotAndTypeVar( - @enumFromInt(0), - .{ .structure = .empty_tag_union }, - region, - TypeVar, - ); - } - }; - - // Make type system tag union - break :blk try self.env.types.mkTagUnion(tags_slice, ext_var); - } - }; - - return try self.env.addTypeAnnoAndTypeVar(.{ .tag_union = .{ + return try self.env.addTypeAnno(.{ .tag_union = .{ .tags = tag_anno_idxs, .ext = mb_ext_anno, - } }, content, region); + } }, region); } /// Canonicalize a tag variant within a tag union type annotation @@ -5311,10 +5158,10 @@ fn canonicalizeTypeAnnoTag( // Create identifier from text if resolution fails try self.env.insertIdent(base.Ident.for_text(self.parse_ir.resolve(ty.token))); - return try self.env.addTypeAnnoAndTypeVar(.{ .tag = .{ + return try self.env.addTypeAnno(.{ .tag = .{ .name = ident_idx, .args = .{ .span = DataSpan.empty() }, - } }, .err, region); + } }, region); }, .apply => |apply| { // For tags with arguments like `Some(Str)`, validate the arguments but not the tag name @@ -5346,10 +5193,10 @@ fn canonicalizeTypeAnnoTag( } const args = try self.env.store.typeAnnoSpanFrom(scratch_top); - return try self.env.addTypeAnnoAndTypeVar(.{ .tag = .{ + return try self.env.addTypeAnno(.{ .tag = .{ .name = type_name, .args = args, - } }, Content{ .flex_var = null }, region); + } }, region); }, else => { return try self.env.pushMalformed(TypeAnno.Idx, Diagnostic{ @@ -5376,30 +5223,14 @@ fn canonicalizeTypeAnnoFunc( const args_span = try self.env.store.typeAnnoSpanFrom(scratch_top); - const args_anno_idxs = self.env.store.sliceTypeAnnos(args_span); - const args_vars: []TypeVar = @ptrCast(@alignCast(args_anno_idxs)); - // Canonicalize return type const ret_anno_idx = try self.canonicalizeTypeAnnoHelp(func.ret, type_anno_ctx); - const ret_var = ModuleEnv.varFrom(ret_anno_idx); - const content = blk: { - if (type_anno_ctx.isTypeDeclAndHasUnderscore()) { - break :blk types.Content{ .err = {} }; - } else { - if (func.effectful) { - break :blk try self.env.types.mkFuncEffectful(args_vars, ret_var); - } else { - break :blk try self.env.types.mkFuncPure(args_vars, ret_var); - } - } - }; - - return try self.env.addTypeAnnoAndTypeVar(.{ .@"fn" = .{ + return try self.env.addTypeAnno(.{ .@"fn" = .{ .args = args_span, .ret = ret_anno_idx, .effectful = func.effectful, - } }, content, region); + } }, region); } //////////////////////////////////////////////////////////////////////////////// @@ -5413,10 +5244,10 @@ fn canonicalizeTypeHeader(self: *Self, header_idx: AST.TypeHeader.Idx) std.mem.A const node_region = self.parse_ir.tokenizedRegionToRegion(node.region); if (node.tag == .malformed) { // Create a malformed type header with an invalid identifier - return try self.env.addTypeHeaderAndTypeVar(.{ + return try self.env.addTypeHeader(.{ .name = base.Ident.Idx{ .attributes = .{ .effectful = false, .ignored = false, .reassignable = false }, .idx = 0 }, // Invalid identifier .args = .{ .span = .{ .start = 0, .len = 0 } }, - }, Content{ .flex_var = null }, node_region); + }, node_region); } const ast_header = self.parse_ir.store.getTypeHeader(header_idx) catch unreachable; // Malformed handled above @@ -5425,10 +5256,10 @@ fn canonicalizeTypeHeader(self: *Self, header_idx: AST.TypeHeader.Idx) std.mem.A // Get the type name identifier const name_ident = self.parse_ir.tokens.resolveIdentifier(ast_header.name) orelse { // If we can't resolve the identifier, create a malformed header with invalid identifier - return try self.env.addTypeHeaderAndTypeVar(.{ + return try self.env.addTypeHeader(.{ .name = base.Ident.Idx{ .attributes = .{ .effectful = false, .ignored = false, .reassignable = false }, .idx = 0 }, // Invalid identifier .args = .{ .span = .{ .start = 0, .len = 0 } }, - }, Content{ .flex_var = null }, region); + }, region); }; // Check if this is a builtin type @@ -5468,9 +5299,9 @@ fn canonicalizeTypeHeader(self: *Self, header_idx: AST.TypeHeader.Idx) std.mem.A } }); } - const param_anno = try self.env.addTypeAnnoAndTypeVar(.{ .rigid_var = .{ + const param_anno = try self.env.addTypeAnno(.{ .rigid_var = .{ .name = param_ident, - } }, Content{ .rigid_var = param_ident }, param_region); + } }, param_region); try self.env.store.addScratchTypeAnno(param_anno); }, .underscore => |underscore_param| { @@ -5484,7 +5315,7 @@ fn canonicalizeTypeHeader(self: *Self, header_idx: AST.TypeHeader.Idx) std.mem.A } }); // Create underscore type annotation - const underscore_anno = try self.env.addTypeAnnoAndTypeVar(.{ .underscore = {} }, Content{ .err = {} }, param_region); + const underscore_anno = try self.env.addTypeAnno(.{ .underscore = {} }, param_region); try self.env.store.addScratchTypeAnno(underscore_anno); }, .malformed => |malformed_param| { @@ -5514,10 +5345,10 @@ fn canonicalizeTypeHeader(self: *Self, header_idx: AST.TypeHeader.Idx) std.mem.A const args = try self.env.store.typeAnnoSpanFrom(scratch_top); - return try self.env.addTypeHeaderAndTypeVar(.{ + return try self.env.addTypeHeader(.{ .name = name_ident, .args = args, - }, Content{ .flex_var = null }, region); + }, region); } // expr statements // @@ -5567,9 +5398,9 @@ fn canonicalizeBlock(self: *Self, e: AST.Block) std.mem.Allocator.Error!Canonica const inner_expr = try self.canonicalizeExprOrMalformed(dbg_stmt.expr); // Create debug expression - const dbg_expr = try self.env.addExprAndTypeVarRedirect(Expr{ .e_dbg = .{ + const dbg_expr = try self.env.addExpr(Expr{ .e_dbg = .{ .expr = inner_expr.idx, - } }, ModuleEnv.varFrom(inner_expr.idx), debug_region); + } }, debug_region); last_expr = CanonicalizedExpr{ .idx = dbg_expr, .free_vars = inner_expr.free_vars }; }, .@"return" => |return_stmt| { @@ -5591,15 +5422,15 @@ fn canonicalizeBlock(self: *Self, e: AST.Block) std.mem.Allocator.Error!Canonica const first_part = self.parse_ir.store.getExpr(parts[0]); if (first_part == .string_part) { const part_text = self.parse_ir.resolve(first_part.string_part.token); - break :blk try self.env.addExprAndTypeVar(Expr{ .e_crash = .{ + break :blk try self.env.addExpr(Expr{ .e_crash = .{ .msg = try self.env.insertString(part_text), - } }, .{ .flex_var = null }, crash_region); + } }, crash_region); } } // Fall back to default if we can't extract - break :blk try self.env.addExprAndTypeVar(Expr{ .e_crash = .{ + break :blk try self.env.addExpr(Expr{ .e_crash = .{ .msg = try self.env.insertString("crash"), - } }, .{ .flex_var = null }, crash_region); + } }, crash_region); }, else => { // For non-string expressions, create a malformed expression @@ -5675,12 +5506,11 @@ fn canonicalizeBlock(self: *Self, e: AST.Block) std.mem.Allocator.Error!Canonica // Determine the final expression const final_expr = if (last_expr) |can_expr| can_expr else blk: { // Empty block - create empty record - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_empty_record = .{}, - }, Content{ .structure = .empty_record }, block_region); + }, block_region); break :blk CanonicalizedExpr{ .idx = expr_idx, .free_vars = null }; }; - const final_expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(final_expr.idx))); // Add free vars from the final expression to the block's scratch space if (final_expr.free_vars) |fvs| { @@ -5709,11 +5539,7 @@ fn canonicalizeBlock(self: *Self, e: AST.Block) std.mem.Allocator.Error!Canonica .final_expr = final_expr.idx, }, }; - const block_idx = try self.env.addExprAndTypeVar(block_expr, Content{ .flex_var = null }, block_region); - const block_var = @as(TypeVar, @enumFromInt(@intFromEnum(block_idx))); - - // Set the root block expr to redirect to the final expr var - try self.env.types.setVarRedirect(block_var, final_expr_var); + const block_idx = try self.env.addExpr(block_expr, block_region); return CanonicalizedExpr{ .idx = block_idx, .free_vars = if (captures_slice.len > 0) captures_slice else null }; } @@ -5762,9 +5588,9 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt const expr = try self.canonicalizeExprOrMalformed(v.body); // Create pattern for the var - const pattern_idx = try self.env.addPatternAndTypeVarRedirect( + const pattern_idx = try self.env.addPattern( Pattern{ .assign = .{ .ident = var_name } }, - ModuleEnv.varFrom(expr.idx), + region, ); @@ -5772,10 +5598,10 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt _ = try self.scopeIntroduceVar(var_name, pattern_idx, region, true, Pattern.Idx); // Create var statement - const stmt_idx = try self.env.addStatementAndTypeVarRedirect(Statement{ .s_var = .{ + const stmt_idx = try self.env.addStatement(Statement{ .s_var = .{ .pattern_idx = pattern_idx, .expr = expr.idx, - } }, ModuleEnv.varFrom(expr.idx), region); + } }, region); mb_canonicailzed_stmt = CanonicalizedStatement{ .idx = stmt_idx, .free_vars = expr.free_vars }; }, @@ -5786,9 +5612,9 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt const expr = try self.canonicalizeExprOrMalformed(e_.expr); // Create expression statement - const stmt_idx = try self.env.addStatementAndTypeVarRedirect(Statement{ .s_expr = .{ + const stmt_idx = try self.env.addStatement(Statement{ .s_expr = .{ .expr = expr.idx, - } }, ModuleEnv.varFrom(expr.idx), region); + } }, region); mb_canonicailzed_stmt = CanonicalizedStatement{ .idx = stmt_idx, .free_vars = expr.free_vars }; }, @@ -5821,9 +5647,9 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt const stmt_idx = blk: { if (mb_msg_literal) |msg_literal| { // Create crash statement - break :blk try self.env.addStatementAndTypeVar(Statement{ .s_crash = .{ + break :blk try self.env.addStatement(Statement{ .s_crash = .{ .msg = msg_literal, - } }, .err, region); + } }, region); } else { // For non-string expressions, create a malformed expression break :blk try self.env.pushMalformed(Statement.Idx, Diagnostic{ .crash_expects_string = .{ @@ -5842,9 +5668,9 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt // Create dbg statement - const stmt_idx = try self.env.addStatementAndTypeVarRedirect(Statement{ .s_dbg = .{ + const stmt_idx = try self.env.addStatement(Statement{ .s_dbg = .{ .expr = expr.idx, - } }, ModuleEnv.varFrom(expr.idx), region); + } }, region); mb_canonicailzed_stmt = CanonicalizedStatement{ .idx = stmt_idx, .free_vars = expr.free_vars }; }, @@ -5855,9 +5681,9 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt const expr = try self.canonicalizeExprOrMalformed(e_.body); // Create expect statement - const stmt_idx = try self.env.addStatementAndTypeVar(Statement{ .s_expect = .{ + const stmt_idx = try self.env.addStatement(Statement{ .s_expect = .{ .body = expr.idx, - } }, Content{ .structure = .empty_record }, region); + } }, region); mb_canonicailzed_stmt = CanonicalizedStatement{ .idx = stmt_idx, .free_vars = expr.free_vars }; }, @@ -5868,9 +5694,9 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt const expr = try self.canonicalizeExprOrMalformed(r.expr); // Create return statement - const stmt_idx = try self.env.addStatementAndTypeVarRedirect(Statement{ .s_return = .{ + const stmt_idx = try self.env.addStatement(Statement{ .s_return = .{ .expr = expr.idx, - } }, ModuleEnv.varFrom(expr.idx), region); + } }, region); mb_canonicailzed_stmt = CanonicalizedStatement{ .idx = stmt_idx, .free_vars = expr.free_vars }; }, @@ -5930,13 +5756,13 @@ pub fn canonicalizeBlockStatement(self: *Self, ast_stmt: AST.Statement, ast_stmt // If we have where clauses, create a separate s_type_anno statement const mb_type_anno_stmt_idx: ?Statement.Idx = inner_blk: { if (where_clauses != null) { - break :inner_blk try self.env.addStatementAndTypeVarRedirect(Statement{ + break :inner_blk try self.env.addStatement(Statement{ .s_type_anno = .{ .name = name_ident, .anno = type_anno_idx, .where = where_clauses, }, - }, ModuleEnv.varFrom(type_anno_idx), region); + }, region); } else { break :inner_blk null; } @@ -6007,10 +5833,10 @@ pub fn canonicalizeBlockDecl(self: *Self, d: AST.Statement.Decl, mb_last_anno: ? } }); // Create a reassign statement with the error expression - const reassign_idx = try self.env.addStatementAndTypeVarRedirect(Statement{ .s_reassign = .{ + const reassign_idx = try self.env.addStatement(Statement{ .s_reassign = .{ .pattern_idx = existing_pattern_idx, .expr = malformed_idx, - } }, ModuleEnv.varFrom(malformed_idx), ident_region); + } }, ident_region); return CanonicalizedStatement{ .idx = reassign_idx, .free_vars = null }; } @@ -6021,10 +5847,10 @@ pub fn canonicalizeBlockDecl(self: *Self, d: AST.Statement.Decl, mb_last_anno: ? const expr = try self.canonicalizeExprOrMalformed(d.body); // Create reassign statement - const reassign_idx = try self.env.addStatementAndTypeVarRedirect(Statement{ .s_reassign = .{ + const reassign_idx = try self.env.addStatement(Statement{ .s_reassign = .{ .pattern_idx = existing_pattern_idx, .expr = expr.idx, - } }, ModuleEnv.varFrom(expr.idx), ident_region); + } }, ident_region); return CanonicalizedStatement{ .idx = reassign_idx, .free_vars = expr.free_vars }; } @@ -6069,11 +5895,11 @@ pub fn canonicalizeBlockDecl(self: *Self, d: AST.Statement.Decl, mb_last_anno: ? const expr = try self.canonicalizeExprOrMalformed(d.body); // Create a declaration statement - const stmt_idx = try self.env.addStatementAndTypeVarRedirect(Statement{ .s_decl = .{ + const stmt_idx = try self.env.addStatement(Statement{ .s_decl = .{ .pattern = pattern_idx, .expr = expr.idx, .anno = mb_validated_anno, - } }, ModuleEnv.varFrom(expr.idx), region); + } }, region); return CanonicalizedStatement{ .idx = stmt_idx, .free_vars = expr.free_vars }; } @@ -6190,14 +6016,13 @@ fn currentScopeIdx(self: *Self) usize { /// This will be used later for builtins like Num.nan, Num.infinity, etc. pub fn addNonFiniteFloat(self: *Self, value: f64, region: base.Region) !Expr.Idx { // then in the final slot the actual expr is inserted - const expr_idx = try self.env.addExprAndTypeVar( + const expr_idx = try self.env.addExpr( CIR.Expr{ .e_frac_f64 = .{ .value = value, .has_suffix = false, }, }, - .err, region, ); @@ -6926,16 +6751,16 @@ fn canonicalizeWhereClause(self: *Self, ast_where_idx: AST.WhereClause.Idx, type defer self.env.gpa.free(module_text); const module_name = try self.env.insertIdent(Ident.for_text(module_text)); - const external_type_var = try self.env.addTypeSlotAndTypeVar(@enumFromInt(0), .{ .flex_var = null }, region, TypeVar); + const external_type_var = try self.env.addTypeSlot(@enumFromInt(0), region, TypeVar); const external_decl = try self.createExternalDeclaration(qualified_name, module_name, method_ident, .value, external_type_var, region); - return try self.env.addWhereClauseAndTypeVar(WhereClause{ .mod_method = .{ + return try self.env.addWhereClause(WhereClause{ .mod_method = .{ .var_name = var_ident, .method_name = method_ident, .args = args_span, .ret_anno = ret_anno, .external_decl = external_decl, - } }, .{ .flex_var = null }, region); + } }, region); }, .mod_alias => |ma| { const region = self.parse_ir.tokenizedRegionToRegion(ma.region); @@ -6971,23 +6796,23 @@ fn canonicalizeWhereClause(self: *Self, ast_where_idx: AST.WhereClause.Idx, type defer self.env.gpa.free(module_text); const module_name = try self.env.insertIdent(Ident.for_text(module_text)); - const external_type_var = try self.env.addTypeSlotAndTypeVar(@enumFromInt(0), .{ .flex_var = null }, region, TypeVar); + const external_type_var = try self.env.addTypeSlot(@enumFromInt(0), region, TypeVar); const external_decl = try self.createExternalDeclaration(qualified_name, module_name, alias_ident, .type, external_type_var, region); - return try self.env.addWhereClauseAndTypeVar(WhereClause{ .mod_alias = .{ + return try self.env.addWhereClause(WhereClause{ .mod_alias = .{ .var_name = var_ident, .alias_name = alias_ident, .external_decl = external_decl, - } }, .{ .flex_var = null }, region); + } }, region); }, .malformed => |m| { const region = self.parse_ir.tokenizedRegionToRegion(m.region); - const diagnostic = try self.env.addDiagnosticAndTypeVar(Diagnostic{ .malformed_where_clause = .{ + const diagnostic = try self.env.addDiagnostic(Diagnostic{ .malformed_where_clause = .{ .region = region, - } }, .err); - return try self.env.addWhereClauseAndTypeVar(WhereClause{ .malformed = .{ + } }); + return try self.env.addWhereClause(WhereClause{ .malformed = .{ .diagnostic = diagnostic, - } }, .{ .flex_var = null }, region); + } }, region); }, } } @@ -7003,11 +6828,11 @@ fn createAnnotationFromTypeAnno(self: *Self, type_anno_idx: TypeAnno.Idx, region // TODO: Capture where clauses const annotation = CIR.Annotation{ .type_anno = type_anno_idx, - .signature = try self.env.addTypeSlotAndTypeVar(@enumFromInt(0), .err, region, TypeVar), + .signature = try self.env.addTypeSlot(@enumFromInt(0), region, TypeVar), }; // Add to NodeStore and return the index - const annotation_idx = try self.env.addAnnotationAndTypeVarRedirect(annotation, ModuleEnv.varFrom(type_anno_idx), region); + const annotation_idx = try self.env.addAnnotation(annotation, region); return annotation_idx; } @@ -7082,11 +6907,11 @@ fn tryModuleQualifiedLookup(self: *Self, field_access: AST.BinOp) std.mem.Alloca } else 0; // Create the e_lookup_external expression with Import.Idx - const expr_idx = try self.env.addExprAndTypeVar(CIR.Expr{ .e_lookup_external = .{ + const expr_idx = try self.env.addExpr(CIR.Expr{ .e_lookup_external = .{ .module_idx = import_idx, .target_node_idx = target_node_idx, .region = region, - } }, Content{ .flex_var = null }, region); + } }, region); return expr_idx; } @@ -7114,7 +6939,7 @@ fn canonicalizeRegularFieldAccess(self: *Self, field_access: AST.BinOp) std.mem. }, }; - const expr_idx = try self.env.addExprAndTypeVar(dot_access_expr, Content{ .flex_var = null }, self.parse_ir.tokenizedRegionToRegion(field_access.region)); + const expr_idx = try self.env.addExpr(dot_access_expr, self.parse_ir.tokenizedRegionToRegion(field_access.region)); return expr_idx; } diff --git a/src/canonicalize/ModuleEnv.zig b/src/canonicalize/ModuleEnv.zig index 3571988949..fc58edbc89 100644 --- a/src/canonicalize/ModuleEnv.zig +++ b/src/canonicalize/ModuleEnv.zig @@ -136,15 +136,15 @@ pub fn freezeInterners(self: *Self) void { /// Records a diagnostic error during canonicalization without blocking compilation. pub fn pushDiagnostic(self: *Self, reason: CIR.Diagnostic) std.mem.Allocator.Error!void { - _ = try self.addDiagnosticAndTypeVar(reason, .err); + _ = try self.addDiagnostic(reason); } /// Creates a malformed node that represents a runtime error in the IR. pub fn pushMalformed(self: *Self, comptime RetIdx: type, reason: CIR.Diagnostic) std.mem.Allocator.Error!RetIdx { comptime if (!isCastable(RetIdx)) @compileError("Idx type " ++ @typeName(RetIdx) ++ " is not castable"); - const diag_idx = try self.addDiagnosticAndTypeVar(reason, .err); + const diag_idx = try self.addDiagnostic(reason); const region = getDiagnosticRegion(reason); - const malformed_idx = try self.addMalformedAndTypeVar(diag_idx, .err, region); + const malformed_idx = try self.addMalformed(diag_idx, region); return castIdx(Node.Idx, RetIdx, malformed_idx); } @@ -1519,17 +1519,16 @@ pub fn containsExposedById(self: *const Self, ident_idx: Ident.Idx) bool { return self.common.exposed_items.containsById(self.gpa, @bitCast(ident_idx)); } -/// Assert that nodes, regions and types are all in sync +/// Assert that nodes and regions are in sync pub inline fn debugAssertArraysInSync(self: *const Self) void { if (builtin.mode == .Debug) { const cir_nodes = self.store.nodes.items.len; const region_nodes = self.store.regions.len(); - const type_nodes = self.types.len(); - if (!(cir_nodes == region_nodes and region_nodes == type_nodes)) { + if (!(cir_nodes == region_nodes)) { std.debug.panic( - "Arrays out of sync:\n cir_nodes={}\n region_nodes={}\n type_nodes={}\n", - .{ cir_nodes, region_nodes, type_nodes }, + "Arrays out of sync:\n cir_nodes={}\n region_nodes={}\n", + .{ cir_nodes, region_nodes }, ); } } @@ -1550,311 +1549,173 @@ inline fn debugAssertIdxsEql(comptime desc: []const u8, idx1: anytype, idx2: any } } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addDefAndTypeVar(self: *Self, expr: CIR.Def, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Def.Idx { +/// Add a new expression to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addDef(self: *Self, expr: CIR.Def, region: Region) std.mem.Allocator.Error!CIR.Def.Idx { const expr_idx = try self.store.addDef(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("self", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addTypeHeaderAndTypeVar(self: *Self, expr: CIR.TypeHeader, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.TypeHeader.Idx { +/// Add a new type header to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addTypeHeader(self: *Self, expr: CIR.TypeHeader, region: Region) std.mem.Allocator.Error!CIR.TypeHeader.Idx { const expr_idx = try self.store.addTypeHeader(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addTypeHeaderAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addStatementAndTypeVar(self: *Self, expr: CIR.Statement, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Statement.Idx { +/// Add a new statement to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addStatement(self: *Self, expr: CIR.Statement, region: Region) std.mem.Allocator.Error!CIR.Statement.Idx { const expr_idx = try self.store.addStatement(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addStatementAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addStatementAndTypeVarRedirect(self: *Self, expr: CIR.Statement, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.Statement.Idx { - const expr_idx = try self.store.addStatement(expr, region); - const expr_var = try self.types.freshRedirect(redirect_to); - debugAssertIdxsEql("addStatementAndTypeVarRedirect", expr_idx, expr_var); - self.debugAssertArraysInSync(); - return expr_idx; -} -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addPatternAndTypeVar(self: *Self, expr: CIR.Pattern, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Pattern.Idx { +/// Add a new pattern to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addPattern(self: *Self, expr: CIR.Pattern, region: Region) std.mem.Allocator.Error!CIR.Pattern.Idx { const expr_idx = try self.store.addPattern(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addPatternAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addPatternAndTypeVarRedirect(self: *Self, expr: CIR.Pattern, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.Pattern.Idx { - const expr_idx = try self.store.addPattern(expr, region); - const expr_var = try self.types.freshRedirect(redirect_to); - debugAssertIdxsEql("addPatternAndTypeVar", expr_idx, expr_var); - self.debugAssertArraysInSync(); - return expr_idx; -} - -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addExprAndTypeVar(self: *Self, expr: CIR.Expr, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.Idx { +/// Add a new expression to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addExpr(self: *Self, expr: CIR.Expr, region: Region) std.mem.Allocator.Error!CIR.Expr.Idx { const expr_idx = try self.store.addExpr(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addExprAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addExprAndTypeVarRedirect(self: *Self, expr: CIR.Expr, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.Expr.Idx { - const expr_idx = try self.store.addExpr(expr, region); - const expr_var = try self.types.freshRedirect(redirect_to); - debugAssertIdxsEql("addExprAndTypeVarRedirect", expr_idx, expr_var); - self.debugAssertArraysInSync(); - return expr_idx; -} - -/// Add a new capture and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addCaptureAndTypeVar(self: *Self, capture: CIR.Expr.Capture, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.Capture.Idx { +/// Add a new capture to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addCapture(self: *Self, capture: CIR.Expr.Capture, region: Region) std.mem.Allocator.Error!CIR.Expr.Capture.Idx { const capture_idx = try self.store.addCapture(capture, region); - const capture_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addCaptureAndTypeVar", capture_idx, capture_var); self.debugAssertArraysInSync(); return capture_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addRecordFieldAndTypeVar(self: *Self, expr: CIR.RecordField, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.RecordField.Idx { +/// Add a new record field to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addRecordField(self: *Self, expr: CIR.RecordField, region: Region) std.mem.Allocator.Error!CIR.RecordField.Idx { const expr_idx = try self.store.addRecordField(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addRecordFieldAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addRecordDestructAndTypeVar(self: *Self, expr: CIR.Pattern.RecordDestruct, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Pattern.RecordDestruct.Idx { +/// Add a new record destructuring to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addRecordDestruct(self: *Self, expr: CIR.Pattern.RecordDestruct, region: Region) std.mem.Allocator.Error!CIR.Pattern.RecordDestruct.Idx { const expr_idx = try self.store.addRecordDestruct(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addRecordDestructorAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addIfBranchAndTypeVar(self: *Self, expr: CIR.Expr.IfBranch, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.IfBranch.Idx { +/// Adds a new if branch to the store. +/// This function asserts that the nodes and regions are in sync. +pub fn addIfBranch(self: *Self, expr: CIR.Expr.IfBranch, region: Region) std.mem.Allocator.Error!CIR.Expr.IfBranch.Idx { const expr_idx = try self.store.addIfBranch(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addIfBranchAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addMatchBranchAndTypeVar(self: *Self, expr: CIR.Expr.Match.Branch, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.Match.Branch.Idx { +/// Add a new match branch to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addMatchBranch(self: *Self, expr: CIR.Expr.Match.Branch, region: Region) std.mem.Allocator.Error!CIR.Expr.Match.Branch.Idx { const expr_idx = try self.store.addMatchBranch(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addMatchBranchAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addWhereClauseAndTypeVar(self: *Self, expr: CIR.WhereClause, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.WhereClause.Idx { +/// Add a new where clause to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addWhereClause(self: *Self, expr: CIR.WhereClause, region: Region) std.mem.Allocator.Error!CIR.WhereClause.Idx { const expr_idx = try self.store.addWhereClause(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addWhereClauseAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addTypeAnnoAndTypeVar(self: *Self, expr: CIR.TypeAnno, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.Idx { +/// Add a new type annotation to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addTypeAnno(self: *Self, expr: CIR.TypeAnno, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.Idx { const expr_idx = try self.store.addTypeAnno(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addTypeAnnoAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addTypeAnnoAndTypeVarRedirect(self: *Self, expr: CIR.TypeAnno, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.Idx { - const expr_idx = try self.store.addTypeAnno(expr, region); - const expr_var = try self.types.freshRedirect(redirect_to); - debugAssertIdxsEql("addTypeAnnoAndTypeVarRedirect", expr_idx, expr_var); - self.debugAssertArraysInSync(); - return expr_idx; -} - -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addAnnotationAndTypeVar(self: *Self, expr: CIR.Annotation, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Annotation.Idx { +/// Add a new annotation to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addAnnotation(self: *Self, expr: CIR.Annotation, region: Region) std.mem.Allocator.Error!CIR.Annotation.Idx { const expr_idx = try self.store.addAnnotation(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addAnnotationAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addAnnotationAndTypeVarRedirect(self: *Self, expr: CIR.Annotation, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.Annotation.Idx { - const expr_idx = try self.store.addAnnotation(expr, region); - const expr_var = try self.types.freshRedirect(redirect_to); - debugAssertIdxsEql("addAnnotationAndTypeVar", expr_idx, expr_var); - self.debugAssertArraysInSync(); - return expr_idx; -} - -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addAnnoRecordFieldAndTypeVar(self: *Self, expr: CIR.TypeAnno.RecordField, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.RecordField.Idx { +/// Add a new record field to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addAnnoRecordField(self: *Self, expr: CIR.TypeAnno.RecordField, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.RecordField.Idx { const expr_idx = try self.store.addAnnoRecordField(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addAnnoRecordFieldAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addAnnoRecordFieldAndTypeVarRedirect(self: *Self, expr: CIR.TypeAnno.RecordField, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.RecordField.Idx { - const expr_idx = try self.store.addAnnoRecordField(expr, region); - const expr_var = try self.types.freshRedirect(redirect_to); - debugAssertIdxsEql("addAnnoRecordFieldAndTypeVar", expr_idx, expr_var); - self.debugAssertArraysInSync(); - return expr_idx; -} - -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addExposedItemAndTypeVar(self: *Self, expr: CIR.ExposedItem, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.ExposedItem.Idx { +/// Add a new exposed item to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addExposedItem(self: *Self, expr: CIR.ExposedItem, region: Region) std.mem.Allocator.Error!CIR.ExposedItem.Idx { const expr_idx = try self.store.addExposedItem(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addExposedItemAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a diagnostic without creating a corresponding type variable. +/// Add a diagnostic. +/// This function asserts that the nodes and regions are in sync. pub fn addDiagnostic(self: *Self, reason: CIR.Diagnostic) std.mem.Allocator.Error!CIR.Diagnostic.Idx { - return self.store.addDiagnostic(reason); -} - -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addDiagnosticAndTypeVar(self: *Self, reason: CIR.Diagnostic, content: types_mod.Content) std.mem.Allocator.Error!CIR.Diagnostic.Idx { const expr_idx = try self.store.addDiagnostic(reason); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addDiagnosticAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addMalformedAndTypeVar(self: *Self, diagnostic_idx: CIR.Diagnostic.Idx, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Node.Idx { +/// Add a new malformed node to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addMalformed(self: *Self, diagnostic_idx: CIR.Diagnostic.Idx, region: Region) std.mem.Allocator.Error!CIR.Node.Idx { const malformed_idx = try self.store.addMalformed(diagnostic_idx, region); - const malformed_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addMalformedAndTypeVar", malformed_idx, malformed_var); self.debugAssertArraysInSync(); return malformed_idx; } -/// Add a new match branch pattern and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addMatchBranchPatternAndTypeVar(self: *Self, expr: CIR.Expr.Match.BranchPattern, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.Match.BranchPattern.Idx { +/// Add a new match branch pattern to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addMatchBranchPattern(self: *Self, expr: CIR.Expr.Match.BranchPattern, region: Region) std.mem.Allocator.Error!CIR.Expr.Match.BranchPattern.Idx { const expr_idx = try self.store.addMatchBranchPattern(expr, region); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addMatchBranchPatternAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new pattern record field and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addPatternRecordFieldAndTypeVar(self: *Self, expr: CIR.PatternRecordField, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.PatternRecordField.Idx { +/// Add a new pattern record field to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addPatternRecordField(self: *Self, expr: CIR.PatternRecordField, region: Region) std.mem.Allocator.Error!CIR.PatternRecordField.Idx { _ = region; const expr_idx = try self.store.addPatternRecordField(expr); - const expr_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addPatternRecordFieldAndTypeVar", expr_idx, expr_var); self.debugAssertArraysInSync(); return expr_idx; } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addTypeSlotAndTypeVar( +/// Add a new type variable to the node store. +/// This function asserts that the nodes and regions are in sync. +pub fn addTypeSlot( self: *Self, parent_node: CIR.Node.Idx, - content: types_mod.Content, region: Region, comptime RetIdx: type, ) std.mem.Allocator.Error!RetIdx { comptime if (!isCastable(RetIdx)) @compileError("Idx type " ++ @typeName(RetIdx) ++ " is not castable"); const node_idx = try self.store.addTypeVarSlot(parent_node, region); - const node_var = try self.types.freshFromContent(content); - debugAssertIdxsEql("addTypeSlotAndTypeVar", node_idx, node_var); self.debugAssertArraysInSync(); return @enumFromInt(@intFromEnum(node_idx)); } -/// Add a new expression and type variable. -/// This function asserts that the types array and the nodes are in sync. -pub fn addTypeSlotAndTypeVarRedirect( - self: *Self, - parent_node: CIR.Node.Idx, - redirect_to: TypeVar, - region: Region, - comptime RetIdx: type, -) std.mem.Allocator.Error!RetIdx { - comptime if (!isCastable(RetIdx)) @compileError("Idx type " ++ @typeName(RetIdx) ++ " is not castable"); - const node_idx = try self.store.addTypeVarSlot(parent_node, region); - const node_var = try self.types.freshRedirect(redirect_to); - debugAssertIdxsEql("addTypeSlotAndTypeVarRedirect", node_idx, node_var); - self.debugAssertArraysInSync(); - return @enumFromInt(@intFromEnum(node_idx)); -} - -/// Function that redirects an existing node to the provided var. -/// Assert that the requested idx in in bounds -pub fn redirectTypeTo( - self: *Self, - comptime FromIdx: type, - at_idx: FromIdx, - redirect_to: types_mod.Var, -) std.mem.Allocator.Error!void { - comptime if (!isCastable(FromIdx)) @compileError("Idx type " ++ @typeName(FromIdx) ++ " is not castable"); - self.debugAssertArraysInSync(); - std.debug.assert(@intFromEnum(at_idx) < self.types.len()); - - const var_ = varFrom(at_idx); - try self.types.setVarRedirect(var_, redirect_to); -} - /// Adds an external declaration and returns its index pub fn pushExternalDecl(self: *Self, decl: CIR.ExternalDecl) std.mem.Allocator.Error!CIR.ExternalDecl.Idx { const idx = @as(u32, @intCast(self.external_decls.len())); diff --git a/src/canonicalize/test/int_test.zig b/src/canonicalize/test/int_test.zig index 5487b30db1..679ea1f4ba 100644 --- a/src/canonicalize/test/int_test.zig +++ b/src/canonicalize/test/int_test.zig @@ -695,7 +695,7 @@ test "numeric literal patterns use pattern idx as type var" { }, }; - const pattern_idx = try env.addPatternAndTypeVar(int_pattern, .err, base.Region.zero()); + const pattern_idx = try env.addPattern(int_pattern, base.Region.zero()); // Verify the stored pattern const stored_pattern = env.store.getPattern(pattern_idx); @@ -718,7 +718,7 @@ test "numeric literal patterns use pattern idx as type var" { }, }; - const pattern_idx = try env.addPatternAndTypeVar(dec_pattern, .err, base.Region.zero()); + const pattern_idx = try env.addPattern(dec_pattern, base.Region.zero()); // Verify the stored pattern const stored_pattern = env.store.getPattern(pattern_idx); diff --git a/src/compile/test/module_env_test.zig b/src/compile/test/module_env_test.zig index df0be60744..ca896c292e 100644 --- a/src/compile/test/module_env_test.zig +++ b/src/compile/test/module_env_test.zig @@ -389,10 +389,10 @@ test "ModuleEnv pushExprTypesToSExprTree extracts and formats types" { const str_literal_idx = try env.insertString("hello"); // Add a string segment expression - const segment_idx = try env.addExprAndTypeVar(.{ .e_str_segment = .{ .literal = str_literal_idx } }, .{ .structure = .str }, base.Region.from_raw_offsets(0, 5)); + const segment_idx = try env.addExpr(.{ .e_str_segment = .{ .literal = str_literal_idx } }, base.Region.from_raw_offsets(0, 5)); // Now create a string expression that references the segment - const expr_idx = try env.addExprAndTypeVar(.{ .e_str = .{ .span = Expr.Span{ .span = base.DataSpan{ .start = @intFromEnum(segment_idx), .len = 1 } } } }, .{ .structure = .str }, base.Region.from_raw_offsets(0, 5)); + const expr_idx = try env.addExpr(.{ .e_str = .{ .span = Expr.Span{ .span = base.DataSpan{ .start = @intFromEnum(segment_idx), .len = 1 } } } }, base.Region.from_raw_offsets(0, 5)); // Create an S-expression tree var tree = base.SExprTree.init(gpa); From d63e9f7a409166628bab67e4f467ec75f3b93436 Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Thu, 9 Oct 2025 11:26:32 +0000 Subject: [PATCH 02/17] move filling type store from init to check --- src/check/Check.zig | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/src/check/Check.zig b/src/check/Check.zig index 4f1aaa124d..943aeae636 100644 --- a/src/check/Check.zig +++ b/src/check/Check.zig @@ -158,6 +158,13 @@ pub inline fn debugAssertArraysInSync(self: *const Self) void { } } +/// Fills the type store with fresh variables up to the number of regions +inline fn ensureTypeStoreIsFilled(self: *Self) Allocator.Error!void { + for (self.types.len()..self.regions.len()) |_| { + _ = try self.types.fresh(); + } +} + // import caches // /// Key for the import cache: module index + expression index in that module @@ -198,7 +205,7 @@ const ImportCache = std.HashMapUnmanaged(ImportCacheKey, Var, struct { // unify // /// Unify two types -pub fn unify(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!unifier.Result { +fn unify(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!unifier.Result { const trace = tracy.trace(@src()); defer trace.end(); @@ -267,7 +274,7 @@ fn findConstraintOriginForVars(self: *Self, a: Var, b: Var) ?Var { /// Unify two variables where the second represents an annotation type. /// This sets from_annotation=true to ensure proper error region highlighting. -pub fn unifyFromAnno(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!unifier.Result { +fn unifyFromAnno(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!unifier.Result { const trace = tracy.trace(@src()); defer trace.end(); @@ -305,7 +312,7 @@ pub fn unifyFromAnno(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator. /// Unify two variables with a specific constraint origin for better error reporting. /// The constraint_origin_var should point to the expression that created the constraint. -pub fn unifyWithConstraintOrigin(self: *Self, a: Var, b: Var, constraint_origin_var: Var) std.mem.Allocator.Error!unifier.Result { +fn unifyWithConstraintOrigin(self: *Self, a: Var, b: Var, constraint_origin_var: Var) std.mem.Allocator.Error!unifier.Result { const trace = tracy.trace(@src()); defer trace.end(); @@ -512,6 +519,8 @@ pub fn checkFile(self: *Self) std.mem.Allocator.Error!void { const trace = tracy.trace(@src()); defer trace.end(); + try ensureTypeStoreIsFilled(self); + // First, iterate over the statements, generating types for each type declaration const builtin_stmts_slice = self.cir.store.sliceStatements(self.cir.builtin_statements); for (builtin_stmts_slice) |builtin_stmt_idx| { @@ -541,6 +550,7 @@ pub fn checkFile(self: *Self) std.mem.Allocator.Error!void { /// Check an expr for the repl pub fn checkExprRepl(self: *Self, expr_idx: CIR.Expr.Idx) std.mem.Allocator.Error!void { + try ensureTypeStoreIsFilled(self); // First, iterate over the statements, generating types for each type declaration const stms_slice = self.cir.store.sliceStatements(self.cir.builtin_statements); for (stms_slice) |stmt_idx| { From 69dd940f5cad4bc23f28e5b9e4fbefb1c907c8c4 Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Fri, 10 Oct 2025 11:55:48 +0000 Subject: [PATCH 03/17] Add missing unification in match --- src/check/Check.zig | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/check/Check.zig b/src/check/Check.zig index 943aeae636..18d351a8a3 100644 --- a/src/check/Check.zig +++ b/src/check/Check.zig @@ -2916,6 +2916,9 @@ fn checkMatchExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: Rank, match: CIR.Ex does_fx = try self.checkExpr(first_branch.value, rank, .no_expectation) or does_fx; const branch_var = ModuleEnv.varFrom(first_branch.value); + // Unify the match expr to the first branch. TODO: I don't think this can fail? + _ = try self.unify(ModuleEnv.varFrom(expr_idx), branch_var, rank); + // Then iterate over the rest of the branches for (branch_idxs[1..], 1..) |branch_idx, branch_cur_index| { const branch = self.cir.store.getMatchBranch(branch_idx); From acab21f9de12d23d8f7065f55e83526297c454ba Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Fri, 10 Oct 2025 12:00:10 +0000 Subject: [PATCH 04/17] add assert to make sure my assumption will hold --- src/check/Check.zig | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/check/Check.zig b/src/check/Check.zig index 18d351a8a3..98d3f04b78 100644 --- a/src/check/Check.zig +++ b/src/check/Check.zig @@ -2916,8 +2916,9 @@ fn checkMatchExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: Rank, match: CIR.Ex does_fx = try self.checkExpr(first_branch.value, rank, .no_expectation) or does_fx; const branch_var = ModuleEnv.varFrom(first_branch.value); - // Unify the match expr to the first branch. TODO: I don't think this can fail? - _ = try self.unify(ModuleEnv.varFrom(expr_idx), branch_var, rank); + // Unify the match expr to the first branch. + const match_result = try self.unify(ModuleEnv.varFrom(expr_idx), branch_var, rank); + std.debug.assert(match_result.isOk()); // Then iterate over the rest of the branches for (branch_idxs[1..], 1..) |branch_idx, branch_cur_index| { From 2f12f8acf45d0169cd042fffb4763568059f7833 Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Fri, 10 Oct 2025 13:27:19 +0000 Subject: [PATCH 05/17] remove obsolete debugAssertArraysInSync --- src/snapshot_tool/main.zig | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/snapshot_tool/main.zig b/src/snapshot_tool/main.zig index 90249348f5..7ca7148f9c 100644 --- a/src/snapshot_tool/main.zig +++ b/src/snapshot_tool/main.zig @@ -1147,9 +1147,6 @@ fn processSnapshotContent( ); defer solver.deinit(); - // Assert that we have regions for every type variable - solver.debugAssertArraysInSync(); - if (maybe_expr_idx) |expr_idx| { _ = try solver.checkExprRepl(expr_idx.idx); } else { From cc2a993fe5b397690500ad775c38a68e9044802e Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Fri, 10 Oct 2025 13:39:28 +0000 Subject: [PATCH 06/17] intcast to usize --- src/check/Check.zig | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/check/Check.zig b/src/check/Check.zig index 98d3f04b78..87dce3b629 100644 --- a/src/check/Check.zig +++ b/src/check/Check.zig @@ -160,7 +160,9 @@ pub inline fn debugAssertArraysInSync(self: *const Self) void { /// Fills the type store with fresh variables up to the number of regions inline fn ensureTypeStoreIsFilled(self: *Self) Allocator.Error!void { - for (self.types.len()..self.regions.len()) |_| { + const region_nodes: usize = @intCast(self.regions.len()); + const type_nodes: usize = @intCast(self.types.len()); + for (type_nodes..region_nodes) |_| { _ = try self.types.fresh(); } } From 6997450108893c18e9c05b3f8210428fae969609 Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Mon, 13 Oct 2025 12:53:49 +0000 Subject: [PATCH 07/17] change Can record_test to not rely on types --- src/canonicalize/test/record_test.zig | 125 +++++++++++--------------- 1 file changed, 54 insertions(+), 71 deletions(-) diff --git a/src/canonicalize/test/record_test.zig b/src/canonicalize/test/record_test.zig index 87ee4cd896..cc440bf9cd 100644 --- a/src/canonicalize/test/record_test.zig +++ b/src/canonicalize/test/record_test.zig @@ -34,20 +34,14 @@ test "record literal uses record_unbound" { return error.CanonicalizeError; }; - // Get the type of the expression - const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx()))); - const resolved = env.types.resolveVar(expr_var); - - // Check that it's a record_unbound - switch (resolved.desc.content) { - .structure => |structure| switch (structure) { - .record_unbound => |fields| { - // Success! The record literal created a record_unbound type - try std.testing.expect(fields.len() == 2); - }, - else => return error.ExpectedRecordUnbound, + const canonical_expr = env.store.getExpr(canonical_expr_idx.idx); + // Check that it's a record + switch (canonical_expr) { + .e_record => |record| { + // Success! The record literal created a record + try std.testing.expect(record.fields.span.len == 2); }, - else => return error.ExpectedStructure, + else => return error.ExpectedRecord, } } @@ -71,19 +65,13 @@ test "record literal uses record_unbound" { return error.CanonicalizeError; }; - // Get the type of the expression - const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx()))); - const resolved = env.types.resolveVar(expr_var); - + const canonical_expr = env.store.getExpr(canonical_expr_idx.idx); // Check that it's an empty_record - switch (resolved.desc.content) { - .structure => |structure| switch (structure) { - .empty_record => { - // Success! Empty record literal created empty_record type - }, - else => return error.ExpectedEmptyRecord, + switch (canonical_expr) { + .e_empty_record => { + // Success! Empty record literal created empty_record }, - else => return error.ExpectedStructure, + else => return error.ExpectedEmptyRecord, } } @@ -108,25 +96,21 @@ test "record literal uses record_unbound" { return error.CanonicalizeError; }; - // Get the type of the expression - const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx()))); - const resolved = env.types.resolveVar(expr_var); + const canonical_expr = env.store.getExpr(canonical_expr_idx.idx); + // Check that it's a record + switch (canonical_expr) { + .e_record => |record| { + // Success! The record literal created a record + try std.testing.expect(record.fields.span.len == 1); - // Check that it's a record_unbound - switch (resolved.desc.content) { - .structure => |structure| switch (structure) { - .record_unbound => |fields| { - // Success! The record literal created a record_unbound type - try std.testing.expect(fields.len() == 1); + const cir_fields = env.store.sliceRecordFields(record.fields); - // Check the field - const fields_slice = env.types.getRecordFieldsSlice(fields); - const field_name = env.getIdent(fields_slice.get(0).name); - try std.testing.expectEqualStrings("value", field_name); - }, - else => return error.ExpectedRecordUnbound, + const cir_field = env.store.getRecordField(cir_fields[0]); + + const field_name = env.getIdent(cir_field.name); + try std.testing.expectEqualStrings("value", field_name); }, - else => return error.ExpectedStructure, + else => return error.ExpectedRecord, } } } @@ -152,25 +136,23 @@ test "record_unbound basic functionality" { return error.CanonicalizeError; }; - // Get the type of the expression - const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx()))); - const resolved = env.types.resolveVar(expr_var); + const canonical_expr = env.store.getExpr(canonical_expr_idx.idx); + // Check that it's a record + switch (canonical_expr) { + .e_record => |record| { + // Success! The record literal created a record + try std.testing.expect(record.fields.span.len == 2); - // Verify it starts as record_unbound - switch (resolved.desc.content) { - .structure => |structure| switch (structure) { - .record_unbound => |fields| { - // Success! Record literal created record_unbound type - try std.testing.expect(fields.len() == 2); + const cir_fields = env.store.sliceRecordFields(record.fields); - // Check field names - const field_slice = env.types.getRecordFieldsSlice(fields); - try std.testing.expectEqualStrings("x", env.getIdent(field_slice.get(0).name)); - try std.testing.expectEqualStrings("y", env.getIdent(field_slice.get(1).name)); - }, - else => return error.ExpectedRecordUnbound, + const cir_field_0 = env.store.getRecordField(cir_fields[0]); + const cir_field_1 = env.store.getRecordField(cir_fields[1]); + + // Check field names + try std.testing.expectEqualStrings("x", env.getIdent(cir_field_0.name)); + try std.testing.expectEqualStrings("y", env.getIdent(cir_field_1.name)); }, - else => return error.ExpectedStructure, + else => return error.ExpectedRecord, } } @@ -195,24 +177,25 @@ test "record_unbound with multiple fields" { return error.CanonicalizeError; }; - const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx()))); - const resolved = env.types.resolveVar(expr_var); + const canonical_expr = env.store.getExpr(canonical_expr_idx.idx); + // Check that it's a record + switch (canonical_expr) { + .e_record => |record| { + // Success! The record literal created a record + try std.testing.expect(record.fields.span.len == 3); - // Should be record_unbound - switch (resolved.desc.content) { - .structure => |s| switch (s) { - .record_unbound => |fields| { - try std.testing.expect(fields.len() == 3); + const cir_fields = env.store.sliceRecordFields(record.fields); - // Check field names - const field_slice = env.types.getRecordFieldsSlice(fields); - try std.testing.expectEqualStrings("a", env.getIdent(field_slice.get(0).name)); - try std.testing.expectEqualStrings("b", env.getIdent(field_slice.get(1).name)); - try std.testing.expectEqualStrings("c", env.getIdent(field_slice.get(2).name)); - }, - else => return error.ExpectedRecordUnbound, + const cir_field_0 = env.store.getRecordField(cir_fields[0]); + const cir_field_1 = env.store.getRecordField(cir_fields[1]); + const cir_field_2 = env.store.getRecordField(cir_fields[2]); + + // Check field names + try std.testing.expectEqualStrings("a", env.getIdent(cir_field_0.name)); + try std.testing.expectEqualStrings("b", env.getIdent(cir_field_1.name)); + try std.testing.expectEqualStrings("c", env.getIdent(cir_field_2.name)); }, - else => return error.ExpectedStructure, + else => return error.ExpectedRecord, } } From 52b17e2faf0d77128f46ef7f54260e67833db36d Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Mon, 13 Oct 2025 13:09:04 +0000 Subject: [PATCH 08/17] remove unused scratch record fields code --- src/canonicalize/Can.zig | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/src/canonicalize/Can.zig b/src/canonicalize/Can.zig index 406cf35d97..fd00dbf00a 100644 --- a/src/canonicalize/Can.zig +++ b/src/canonicalize/Can.zig @@ -2518,23 +2518,6 @@ pub fn canonicalizeExpr( // Create span of the new scratch record fields const fields_span = try self.env.store.recordFieldSpanFrom(scratch_top); - // Create fresh type variables for each record field - // The type checker will unify these with the field expression types - const cir_fields = self.env.store.sliceRecordFields(fields_span); - - // Create fresh type variables for each field - const record_fields_top = self.scratch_record_fields.top(); - - for (cir_fields) |cir_field_idx| { - const cir_field = self.env.store.getRecordField(cir_field_idx); - try self.scratch_record_fields.append(self.env.gpa, types.RecordField{ - .name = cir_field.name, - .var_ = @enumFromInt(@intFromEnum(cir_field.value)), - }); - } - - // Shink the scratch array to it's original size - self.scratch_record_fields.clearFrom(record_fields_top); const expr_idx = try self.env.addExpr(CIR.Expr{ .e_record = .{ From d9be1119e7045b9fffc1c15bddc6aacf64c1cb3a Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Mon, 13 Oct 2025 20:36:18 +0200 Subject: [PATCH 09/17] fix module evn test --- src/compile/test/module_env_test.zig | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/compile/test/module_env_test.zig b/src/compile/test/module_env_test.zig index ca896c292e..79c0915ce4 100644 --- a/src/compile/test/module_env_test.zig +++ b/src/compile/test/module_env_test.zig @@ -2,6 +2,7 @@ const std = @import("std"); const base = @import("base"); const can = @import("can"); +const types = @import("types"); const collections = @import("collections"); const ModuleEnv = can.ModuleEnv; @@ -390,9 +391,11 @@ test "ModuleEnv pushExprTypesToSExprTree extracts and formats types" { // Add a string segment expression const segment_idx = try env.addExpr(.{ .e_str_segment = .{ .literal = str_literal_idx } }, base.Region.from_raw_offsets(0, 5)); + _ = try env.types.freshFromContent(.{ .structure = .str }); // Now create a string expression that references the segment const expr_idx = try env.addExpr(.{ .e_str = .{ .span = Expr.Span{ .span = base.DataSpan{ .start = @intFromEnum(segment_idx), .len = 1 } } } }, base.Region.from_raw_offsets(0, 5)); + _ = try env.types.freshFromContent(.{ .structure = .str }); // Create an S-expression tree var tree = base.SExprTree.init(gpa); From 41dc698d9f0eb41ea1b5f1977353c8f46060760a Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Tue, 14 Oct 2025 07:28:54 +0000 Subject: [PATCH 10/17] fix wrong types in blocks fix by @jaredramirez --- src/check/Check.zig | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/check/Check.zig b/src/check/Check.zig index 87dce3b629..5210e31b3f 100644 --- a/src/check/Check.zig +++ b/src/check/Check.zig @@ -2165,10 +2165,19 @@ fn checkExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: types_mod.Rank, expected const check_mode = blk: { if (decl_stmt.anno) |anno_idx| { const annotation = self.cir.store.getAnnotation(anno_idx); + + // Generate the annotation type var in-place + self.seen_annos.clearRetainingCapacity(); try self.generateAnnoTypeInPlace(annotation.type_anno, .annotation); + + // Update the outer anno to redirect to the inner anno const anno_var = ModuleEnv.varFrom(anno_idx); + const type_anno_var = ModuleEnv.varFrom(annotation.type_anno); + try self.types.setVarRedirect(anno_var, type_anno_var); + + // Return the expectation break :blk Expected{ - .expected = .{ .var_ = anno_var, .from_annotation = true }, + .expected = .{ .var_ = type_anno_var, .from_annotation = true }, }; } else { break :blk Expected.no_expectation; From 29dc0fefb1a66020310740c73461e46576938c16 Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Tue, 14 Oct 2025 08:03:40 +0000 Subject: [PATCH 11/17] run snapshot tool --- test/snapshots/can_import_exposing_types.md | 4 +-- test/snapshots/can_import_type_annotations.md | 4 +-- .../formatting/multiline/everything.md | 6 ++--- .../multiline_without_comma/everything.md | 6 ++--- .../formatting/singleline/everything.md | 6 ++--- .../singleline_with_comma/everything.md | 6 ++--- test/snapshots/fuzz_crash/fuzz_crash_023.md | 8 +++--- test/snapshots/fuzz_crash/fuzz_crash_028.md | Bin 71235 -> 71235 bytes test/snapshots/match_expr/pattern_as_basic.md | 2 +- .../snapshots/match_expr/pattern_as_nested.md | 2 +- .../nominal/nominal_associated_value_alias.md | 4 +-- test/snapshots/plume_package/Color.md | 24 +++++++++--------- .../function_record_parameter_capture.md | 2 +- test/snapshots/syntax_grab_bag.md | 8 +++--- test/snapshots/type_app_complex_nested.md | 12 ++++----- .../type_var_name_avoids_collision.md | 6 ++--- 16 files changed, 50 insertions(+), 50 deletions(-) diff --git a/test/snapshots/can_import_exposing_types.md b/test/snapshots/can_import_exposing_types.md index 3c4fa354c2..9b1c652448 100644 --- a/test/snapshots/can_import_exposing_types.md +++ b/test/snapshots/can_import_exposing_types.md @@ -744,8 +744,8 @@ combineResults = |jsonResult, httpStatus| (p-assign @40.1-40.15 (ident "handleResponse")) (e-closure @40.18-44.6 (captures - (capture @43.13-43.18 (ident "error")) - (capture @42.12-42.18 (ident "status"))) + (capture @42.12-42.18 (ident "status")) + (capture @43.13-43.18 (ident "error"))) (e-lambda @40.18-44.6 (args (p-assign @40.19-40.27 (ident "response"))) diff --git a/test/snapshots/can_import_type_annotations.md b/test/snapshots/can_import_type_annotations.md index 7f2f58a43e..6da0cb1bb7 100644 --- a/test/snapshots/can_import_type_annotations.md +++ b/test/snapshots/can_import_type_annotations.md @@ -490,9 +490,9 @@ combineResults = |result1, result2| (e-closure @29.18-37.6 (captures (capture @34.21-34.24 (ident "err")) - (capture @33.20-33.26 (ident "value2")) (capture @36.13-36.16 (ident "err")) - (capture @31.12-31.18 (ident "value1"))) + (capture @31.12-31.18 (ident "value1")) + (capture @33.20-33.26 (ident "value2"))) (e-lambda @29.18-37.6 (args (p-assign @29.19-29.26 (ident "result1")) diff --git a/test/snapshots/formatting/multiline/everything.md b/test/snapshots/formatting/multiline/everything.md index 75c5a8fe27..0539e5cc82 100644 --- a/test/snapshots/formatting/multiline/everything.md +++ b/test/snapshots/formatting/multiline/everything.md @@ -657,11 +657,11 @@ h = |x, y| { (p-assign @58.1-58.2 (ident "h")) (e-closure @58.5-108.2 (captures - (capture @103.5-103.6 (ident "a")) + (capture @58.1-58.2 (ident "h")) + (capture @92.4-92.5 (ident "a")) (capture @97.5-97.6 (ident "a")) (capture @87.5-87.6 (ident "a")) - (capture @58.1-58.2 (ident "h")) - (capture @92.4-92.5 (ident "a"))) + (capture @103.5-103.6 (ident "a"))) (e-lambda @58.5-108.2 (args (p-assign @58.6-58.7 (ident "x")) diff --git a/test/snapshots/formatting/multiline_without_comma/everything.md b/test/snapshots/formatting/multiline_without_comma/everything.md index 88aa49f301..664fecb168 100644 --- a/test/snapshots/formatting/multiline_without_comma/everything.md +++ b/test/snapshots/formatting/multiline_without_comma/everything.md @@ -657,11 +657,11 @@ h = |x, y| { (p-assign @58.1-58.2 (ident "h")) (e-closure @58.5-108.2 (captures - (capture @103.5-103.6 (ident "a")) + (capture @58.1-58.2 (ident "h")) + (capture @92.4-92.5 (ident "a")) (capture @97.5-97.6 (ident "a")) (capture @87.5-87.6 (ident "a")) - (capture @58.1-58.2 (ident "h")) - (capture @92.4-92.5 (ident "a"))) + (capture @103.5-103.6 (ident "a"))) (e-lambda @58.5-108.2 (args (p-assign @58.6-58.7 (ident "x")) diff --git a/test/snapshots/formatting/singleline/everything.md b/test/snapshots/formatting/singleline/everything.md index 0d9933a2da..e560a6b36c 100644 --- a/test/snapshots/formatting/singleline/everything.md +++ b/test/snapshots/formatting/singleline/everything.md @@ -400,11 +400,11 @@ h = |x, y| { (p-assign @16.1-16.2 (ident "h")) (e-closure @16.5-29.2 (captures - (capture @27.7-27.8 (ident "a")) + (capture @16.1-16.2 (ident "h")) + (capture @25.6-25.7 (ident "a")) (capture @26.8-26.9 (ident "a")) (capture @24.7-24.8 (ident "a")) - (capture @16.1-16.2 (ident "h")) - (capture @25.6-25.7 (ident "a"))) + (capture @27.7-27.8 (ident "a"))) (e-lambda @16.5-29.2 (args (p-assign @16.6-16.7 (ident "x")) diff --git a/test/snapshots/formatting/singleline_with_comma/everything.md b/test/snapshots/formatting/singleline_with_comma/everything.md index bda600c575..b88124fcc6 100644 --- a/test/snapshots/formatting/singleline_with_comma/everything.md +++ b/test/snapshots/formatting/singleline_with_comma/everything.md @@ -482,11 +482,11 @@ h = | (p-assign @16.1-16.2 (ident "h")) (e-closure @16.5-29.2 (captures - (capture @27.7-27.8 (ident "a")) + (capture @16.1-16.2 (ident "h")) + (capture @25.6-25.7 (ident "a")) (capture @26.8-26.9 (ident "a")) (capture @24.7-24.8 (ident "a")) - (capture @16.1-16.2 (ident "h")) - (capture @25.6-25.7 (ident "a"))) + (capture @27.7-27.8 (ident "a"))) (e-lambda @16.5-29.2 (args (p-assign @16.6-16.7 (ident "x")) diff --git a/test/snapshots/fuzz_crash/fuzz_crash_023.md b/test/snapshots/fuzz_crash/fuzz_crash_023.md index 94e07c286f..7f5047b077 100644 --- a/test/snapshots/fuzz_crash/fuzz_crash_023.md +++ b/test/snapshots/fuzz_crash/fuzz_crash_023.md @@ -1961,9 +1961,9 @@ expect { (p-assign @80.1-80.11 (ident "match_time")) (e-closure @80.14-138.3 (captures - (capture @86.4-86.5 (ident "x")) (capture @94.5-94.6 (ident "x")) - (capture @136.11-136.15 (ident "dude"))) + (capture @136.11-136.15 (ident "dude")) + (capture @86.4-86.5 (ident "x"))) (e-lambda @80.14-138.3 (args (p-assign @81.2-81.3 (ident "a")) @@ -2177,8 +2177,8 @@ expect { (p-assign @144.1-144.6 (ident "main!")) (e-closure @144.9-196.2 (captures - (capture @80.1-80.11 (ident "match_time")) - (capture @68.1-68.8 (ident "add_one"))) + (capture @68.1-68.8 (ident "add_one")) + (capture @80.1-80.11 (ident "match_time"))) (e-lambda @144.9-196.2 (args (p-underscore @144.10-144.11)) diff --git a/test/snapshots/fuzz_crash/fuzz_crash_028.md b/test/snapshots/fuzz_crash/fuzz_crash_028.md index e037f9b7c866fbc57b49b882b245161d17434c57..2c7be13497b2d5c4a44b9e271549731922da67e0 100644 GIT binary patch delta 26 kcmV+#0OkL~tOUcX1h9nYld9+-lXd7OlTZ2uv(M;sEyB_cN&o-= delta 22 ecmX@ShUM@YmJKa0CU1TrKDqRT{^nCJDs=&m0Ss~g diff --git a/test/snapshots/match_expr/pattern_as_basic.md b/test/snapshots/match_expr/pattern_as_basic.md index 2b79a2c532..9989048f06 100644 --- a/test/snapshots/match_expr/pattern_as_basic.md +++ b/test/snapshots/match_expr/pattern_as_basic.md @@ -92,5 +92,5 @@ match (1, 2) { ~~~ # TYPES ~~~clojure -(expr @1.1-3.2 (type "Error")) +(expr @1.1-3.2 (type "(Num(_size), Num(_size2))")) ~~~ diff --git a/test/snapshots/match_expr/pattern_as_nested.md b/test/snapshots/match_expr/pattern_as_nested.md index 52dec166af..b2310c8ca0 100644 --- a/test/snapshots/match_expr/pattern_as_nested.md +++ b/test/snapshots/match_expr/pattern_as_nested.md @@ -156,5 +156,5 @@ match person { ~~~ # TYPES ~~~clojure -(expr @1.1-4.2 (type "(Error, Error, Str)")) +(expr @1.1-4.2 (type "(Error, _field, Str)")) ~~~ diff --git a/test/snapshots/nominal/nominal_associated_value_alias.md b/test/snapshots/nominal/nominal_associated_value_alias.md index 527b7511f0..f3e02d91d4 100644 --- a/test/snapshots/nominal/nominal_associated_value_alias.md +++ b/test/snapshots/nominal/nominal_associated_value_alias.md @@ -100,13 +100,13 @@ result = myBar (inferred-types (defs (patt @7.1-7.6 (type "Num(_size)")) - (patt @10.1-10.7 (type "Error")) + (patt @10.1-10.7 (type "Num(Int(Unsigned64))")) (patt @2.5-2.13 (type "Num(_size)"))) (type_decls (nominal @1.1-3.2 (type "Foo") (ty-header @1.1-1.4 (name "Foo")))) (expressions (expr @7.9-7.16 (type "Num(_size)")) - (expr @10.10-10.15 (type "Error")) + (expr @10.10-10.15 (type "Num(Int(Unsigned64))")) (expr @2.11-2.13 (type "Num(_size)")))) ~~~ diff --git a/test/snapshots/plume_package/Color.md b/test/snapshots/plume_package/Color.md index bebbf6da71..735a302e64 100644 --- a/test/snapshots/plume_package/Color.md +++ b/test/snapshots/plume_package/Color.md @@ -716,13 +716,13 @@ is_named_color = |str| { (p-assign @27.1-27.4 (ident "hex")) (e-closure @27.7-46.2 (captures - (capture @34.13-34.21 (ident "is_valid")) - (capture @33.24-33.25 (ident "d")) - (capture @33.30-33.31 (ident "f")) (capture @33.15-33.16 (ident "a")) - (capture @33.18-33.19 (ident "b")) + (capture @33.24-33.25 (ident "d")) + (capture @33.27-33.28 (ident "e")) (capture @33.21-33.22 (ident "c")) - (capture @33.27-33.28 (ident "e"))) + (capture @33.30-33.31 (ident "f")) + (capture @33.18-33.19 (ident "b")) + (capture @34.13-34.21 (ident "is_valid"))) (e-lambda @27.7-46.2 (args (p-assign @27.8-27.11 (ident "str"))) @@ -877,16 +877,16 @@ is_named_color = |str| { (p-assign @49.1-49.7 (ident "to_str")) (e-closure @49.10-54.2 (captures - (capture @51.22-51.23 (ident "b")) - (capture @51.25-51.26 (ident "a")) - (capture @53.15-53.20 (ident "inner")) - (capture @50.18-50.19 (ident "g")) (capture @50.21-50.22 (ident "b")) - (capture @49.1-49.7 (ident "to_str")) + (capture @51.25-51.26 (ident "a")) (capture @50.15-50.16 (ident "r")) - (capture @51.19-51.20 (ident "g")) + (capture @53.15-53.20 (ident "inner")) + (capture @49.1-49.7 (ident "to_str")) + (capture @51.16-51.17 (ident "r")) (capture @52.17-52.22 (ident "inner")) - (capture @51.16-51.17 (ident "r"))) + (capture @50.18-50.19 (ident "g")) + (capture @51.22-51.23 (ident "b")) + (capture @51.19-51.20 (ident "g"))) (e-lambda @49.10-54.2 (args (p-assign @49.11-49.16 (ident "color"))) diff --git a/test/snapshots/records/function_record_parameter_capture.md b/test/snapshots/records/function_record_parameter_capture.md index eedc6c95a5..41d49aeb0d 100644 --- a/test/snapshots/records/function_record_parameter_capture.md +++ b/test/snapshots/records/function_record_parameter_capture.md @@ -99,5 +99,5 @@ NO CHANGE ~~~ # TYPES ~~~clojure -(expr @1.1-1.103 (type "Error -> { full_record: Error, greeting: Str, is_adult: Bool }")) +(expr @1.1-1.103 (type "b -> { full_record: b, greeting: Str, is_adult: Bool }")) ~~~ diff --git a/test/snapshots/syntax_grab_bag.md b/test/snapshots/syntax_grab_bag.md index be4928c33f..2d7240a70d 100644 --- a/test/snapshots/syntax_grab_bag.md +++ b/test/snapshots/syntax_grab_bag.md @@ -1871,9 +1871,9 @@ expect { (p-assign @80.1-80.11 (ident "match_time")) (e-closure @80.14-138.3 (captures - (capture @86.4-86.5 (ident "x")) (capture @94.5-94.6 (ident "x")) - (capture @136.11-136.15 (ident "dude"))) + (capture @136.11-136.15 (ident "dude")) + (capture @86.4-86.5 (ident "x"))) (e-lambda @80.14-138.3 (args (p-assign @81.2-81.3 (ident "a")) @@ -2087,8 +2087,8 @@ expect { (p-assign @144.1-144.6 (ident "main!")) (e-closure @144.9-196.2 (captures - (capture @80.1-80.11 (ident "match_time")) - (capture @68.1-68.8 (ident "add_one"))) + (capture @68.1-68.8 (ident "add_one")) + (capture @80.1-80.11 (ident "match_time"))) (e-lambda @144.9-196.2 (args (p-underscore @144.10-144.11)) diff --git a/test/snapshots/type_app_complex_nested.md b/test/snapshots/type_app_complex_nested.md index 55c39672ec..5b03e0ce8a 100644 --- a/test/snapshots/type_app_complex_nested.md +++ b/test/snapshots/type_app_complex_nested.md @@ -366,9 +366,9 @@ main! = |_| processComplex(Ok([Some(42), None])) ~~~clojure (inferred-types (defs - (patt @5.1-5.15 (type "Result(List(Error), Error) -> List(a)")) - (patt @13.1-13.11 (type "Error -> a")) - (patt @20.1-20.6 (type "_arg -> List(a)"))) + (patt @5.1-5.15 (type "Result(List(Error), Error) -> List(_elem)")) + (patt @13.1-13.11 (type "Error -> _ret")) + (patt @20.1-20.6 (type "_arg -> List(_elem)"))) (type_decls (alias @18.1-18.64 (type "ComplexType(a, b)") (ty-header @18.1-18.18 (name "ComplexType") @@ -376,7 +376,7 @@ main! = |_| processComplex(Ok([Some(42), None])) (ty-rigid-var @18.13-18.14 (name "a")) (ty-rigid-var @18.16-18.17 (name "b")))))) (expressions - (expr @5.18-9.6 (type "Result(List(Error), Error) -> List(a)")) - (expr @13.14-15.2 (type "Error -> a")) - (expr @20.9-20.49 (type "_arg -> List(a)")))) + (expr @5.18-9.6 (type "Result(List(Error), Error) -> List(_elem)")) + (expr @13.14-15.2 (type "Error -> _ret")) + (expr @20.9-20.49 (type "_arg -> List(_elem)")))) ~~~ diff --git a/test/snapshots/type_var_name_avoids_collision.md b/test/snapshots/type_var_name_avoids_collision.md index 9e04e18b5a..6a9d09cbe5 100644 --- a/test/snapshots/type_var_name_avoids_collision.md +++ b/test/snapshots/type_var_name_avoids_collision.md @@ -531,13 +531,13 @@ main! = |_| { (p-assign @55.1-55.6 (ident "main!")) (e-closure @55.9-65.2 (captures - (capture @16.1-16.16 (ident "anotherIdentity")) + (capture @46.1-46.19 (ident "yetAnotherIdentity")) (capture @23.1-23.2 (ident "f")) (capture @4.1-4.2 (ident "a")) (capture @53.1-53.14 (ident "finalIdentity")) (capture @7.1-7.9 (ident "identity")) - (capture @20.1-20.8 (ident "combine")) - (capture @46.1-46.19 (ident "yetAnotherIdentity"))) + (capture @16.1-16.16 (ident "anotherIdentity")) + (capture @20.1-20.8 (ident "combine"))) (e-lambda @55.9-65.2 (args (p-underscore @55.10-55.11)) From abbcff84a2a48ff78c5e15fd5d64e1d67f32a551 Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Wed, 15 Oct 2025 09:41:04 +0000 Subject: [PATCH 12/17] Change default type to err and use ensureTotalCapacity to reduce allocations --- src/check/Check.zig | 3 ++- src/collections/safe_list.zig | 16 ++++++++++++++++ src/types/store.zig | 19 ++++++++++++++++--- 3 files changed, 34 insertions(+), 4 deletions(-) diff --git a/src/check/Check.zig b/src/check/Check.zig index 057fddf800..28a448bab3 100644 --- a/src/check/Check.zig +++ b/src/check/Check.zig @@ -164,8 +164,9 @@ pub inline fn debugAssertArraysInSync(self: *const Self) void { inline fn ensureTypeStoreIsFilled(self: *Self) Allocator.Error!void { const region_nodes: usize = @intCast(self.regions.len()); const type_nodes: usize = @intCast(self.types.len()); + try self.types.ensureTotalCapacity(region_nodes); for (type_nodes..region_nodes) |_| { - _ = try self.types.fresh(); + _ = self.types.appendFromContentAssumeCapacity(.err); } } diff --git a/src/collections/safe_list.zig b/src/collections/safe_list.zig index 7bab56e26f..7fe88e0b7a 100644 --- a/src/collections/safe_list.zig +++ b/src/collections/safe_list.zig @@ -200,6 +200,14 @@ pub fn SafeList(comptime T: type) type { return @enumFromInt(@as(u32, @intCast(length))); } + /// Add a new item to the end of this list assuming cpacity is sufficient to hold an additional item. + pub fn appendAssumeCapacity(self: *SafeList(T), item: T) Idx { + const length = self.len(); + self.items.appendAssumeCapacity(item); + + return @enumFromInt(@as(u32, @intCast(length))); + } + /// Create a range from the provided idx to the end of the list pub fn rangeToEnd(self: *SafeList(T), start_int: u32) Range { const len_int = self.len(); @@ -432,6 +440,14 @@ pub fn SafeMultiList(comptime T: type) type { return @enumFromInt(@as(u32, @intCast(length))); } + /// Add a new item to the end of this list assuming cpacity is sufficient to hold an additional item. + pub fn appendAssumeCapacity(self: *SafeMultiList(T), item: T) Idx { + const length = self.len(); + self.items.appendAssumeCapacity(item); + + return @enumFromInt(@as(u32, @intCast(length))); + } + pub fn appendSlice(self: *SafeMultiList(T), gpa: Allocator, elems: []const T) std.mem.Allocator.Error!Range { if (elems.len == 0) { return .{ .start = .zero, .count = 0 }; diff --git a/src/types/store.zig b/src/types/store.zig index aff8253c25..2ffa5a151f 100644 --- a/src/types/store.zig +++ b/src/types/store.zig @@ -184,6 +184,13 @@ pub const Store = struct { } } + /// Create a new variable with the provided content assuming there is capacity + pub fn appendFromContentAssumeCapacity(self: *Self, content: Content) Var { + const desc_idx = self.descs.appendAssumeCapacity(.{ .content = content, .rank = Rank.top_level, .mark = Mark.none }); + const slot_idx = self.slots.appendAssumeCapacity(.{ .root = desc_idx }); + return Self.slotIdxToVar(slot_idx); + } + // setting variables // /// Set a type variable to the provided content @@ -964,9 +971,9 @@ const SlotStore = struct { return @enumFromInt(@intFromEnum(safe_idx)); } - /// Insert a value into the store - fn appendAssumeCapacity(self: *Self, gpa: Allocator, typ: Slot) std.mem.Allocator.Error!Idx { - const safe_idx = try self.backing.append(gpa, typ); + /// Insert a value into the store assuming there is capacity + fn appendAssumeCapacity(self: *Self, typ: Slot) Idx { + const safe_idx = self.backing.appendAssumeCapacity(typ); return @enumFromInt(@intFromEnum(safe_idx)); } @@ -1068,6 +1075,12 @@ const DescStore = struct { return @enumFromInt(@intFromEnum(safe_idx)); } + /// Appends a value to the store assuming there is capacity + fn appendAssumeCapacity(self: *Self, typ: Desc) Idx { + const safe_idx = self.backing.appendAssumeCapacity(typ); + return @enumFromInt(@intFromEnum(safe_idx)); + } + /// Set a value in the store fn set(self: *Self, idx: Idx, val: Desc) void { self.backing.set(@enumFromInt(@intFromEnum(idx)), val); From b0f510f82e1f7916cb8d8b5bc2dc01adb9d854d8 Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Wed, 15 Oct 2025 13:02:20 +0000 Subject: [PATCH 13/17] fix match and unary operation type checking --- src/check/Check.zig | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/check/Check.zig b/src/check/Check.zig index 28a448bab3..a2c1ae5460 100644 --- a/src/check/Check.zig +++ b/src/check/Check.zig @@ -2927,9 +2927,8 @@ fn checkMatchExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: Rank, match: CIR.Ex does_fx = try self.checkExpr(first_branch.value, rank, .no_expectation) or does_fx; const branch_var = ModuleEnv.varFrom(first_branch.value); - // Unify the match expr to the first branch. - const match_result = try self.unify(ModuleEnv.varFrom(expr_idx), branch_var, rank); - std.debug.assert(match_result.isOk()); + // Redirect the match expr to the first branch. + try self.types.setVarRedirect(ModuleEnv.varFrom(expr_idx), branch_var); // Then iterate over the rest of the branches for (branch_idxs[1..], 1..) |branch_idx, branch_cur_index| { @@ -3023,9 +3022,11 @@ fn checkUnaryMinusExpr(self: *Self, expr_idx: CIR.Expr.Idx, expr_region: Region, } } }; const num_var = try self.freshFromContent(num_content, rank, expr_region); - // Unify operand and result with the number type + // Redirect the result to the number type + try self.types.setVarRedirect(result_var, num_var); + + // Unify result with the number type _ = try self.unify(num_var, operand_var, rank); - _ = try self.unify(num_var, result_var, rank); return does_fx; } @@ -3046,9 +3047,11 @@ fn checkUnaryNotExpr(self: *Self, expr_idx: CIR.Expr.Idx, expr_region: Region, r // Create a fresh boolean variable for the operation const bool_var = try self.freshBool(rank, expr_region); - // Unify operand and result with the boolean type + // Redirect the result to the boolean type + try self.types.setVarRedirect(result_var, bool_var); + + // Unify result with the boolean type _ = try self.unify(bool_var, operand_var, rank); - _ = try self.unify(bool_var, result_var, rank); return does_fx; } From 8303bdf0a8fdcff95e7cf2e83456f5693c43b02a Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Wed, 15 Oct 2025 13:03:15 +0000 Subject: [PATCH 14/17] add s_var to type checking --- src/check/Check.zig | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/check/Check.zig b/src/check/Check.zig index a2c1ae5460..55f5e8523c 100644 --- a/src/check/Check.zig +++ b/src/check/Check.zig @@ -2235,6 +2235,30 @@ fn checkExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: types_mod.Rank, expected const bool_var = try self.freshBool(rank, expr_region); _ = try self.unify(bool_var, stmt_expr, rank); }, + .s_var => |var_stmt| { + + // Check the pattern + try self.checkPattern(var_stmt.pattern_idx, rank, .no_expectation); + const var_pattern_var: Var = ModuleEnv.varFrom(var_stmt.pattern_idx); + + { + // Enter a new rank + try self.var_pool.pushRank(); + defer self.var_pool.popRank(); + + const next_rank = rank.next(); + std.debug.assert(next_rank == self.var_pool.current_rank); + + does_fx = try self.checkExpr(var_stmt.expr, next_rank, Expected.no_expectation) or does_fx; + + // Now that we are existing the scope, we must generalize then pop this rank + try self.generalizer.generalize(&self.var_pool, next_rank); + } + + // Unify the pattern with the expression + const var_expr_var: Var = ModuleEnv.varFrom(var_stmt.expr); + _ = try self.unify(var_pattern_var, var_expr_var, rank); + }, else => { // TODO }, From 9ef2122d44faf647c1f06a5f381007bda2dacdf2 Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Wed, 15 Oct 2025 13:24:23 +0000 Subject: [PATCH 15/17] run snapshot tool --- test/snapshots/binops.md | 2 +- test/snapshots/can_dot_access_with_vars.md | 2 +- test/snapshots/expr/double_question_binop.md | 2 +- test/snapshots/match_expr/nested_record_patterns.md | 2 +- test/snapshots/match_expr/pattern_as_basic.md | 2 +- test/snapshots/match_expr/pattern_as_nested.md | 2 +- test/snapshots/match_expr/record_destructure.md | 2 +- .../match_expr/record_pattern_edge_cases.md | 2 +- .../nominal/nominal_associated_value_alias.md | 4 ++-- test/snapshots/plume_package/Color.md | 4 ++-- test/snapshots/records/function_record_parameter.md | 2 +- .../records/function_record_parameter_capture.md | 2 +- test/snapshots/records/pattern_destructure_rename.md | 2 +- test/snapshots/type_app_complex_nested.md | 12 ++++++------ test/snapshots/type_app_single_arg.md | 8 ++++---- test/snapshots/type_app_with_vars.md | 6 +++--- test/snapshots/type_application_basic.md | 8 ++++---- .../where_clause/where_clauses_simple_dispatch.md | 4 ++-- .../where_clause/where_clauses_type_annotation.md | 4 ++-- 19 files changed, 36 insertions(+), 36 deletions(-) diff --git a/test/snapshots/binops.md b/test/snapshots/binops.md index 7cd77793fa..376cb0928c 100644 --- a/test/snapshots/binops.md +++ b/test/snapshots/binops.md @@ -173,5 +173,5 @@ EndOfFile(18:1-18:1), ~~~ # TYPES ~~~clojure -(expr @1.1-17.2 (type "(Num(_size), Num(_size2), Num(_size3), Num(_size4), Num(_size5), Bool, Bool, Bool, Bool, Bool, Bool, Num(_size6), Bool, Bool, _field)")) +(expr @1.1-17.2 (type "(Num(_size), Num(_size2), Num(_size3), Num(_size4), Num(_size5), Bool, Bool, Bool, Bool, Bool, Bool, Num(_size6), Bool, Bool, Error)")) ~~~ diff --git a/test/snapshots/can_dot_access_with_vars.md b/test/snapshots/can_dot_access_with_vars.md index 7dc09e8d05..95bd9cb5c9 100644 --- a/test/snapshots/can_dot_access_with_vars.md +++ b/test/snapshots/can_dot_access_with_vars.md @@ -85,5 +85,5 @@ EndOfFile(6:1-6:1), ~~~ # TYPES ~~~clojure -(expr @1.1-5.2 (type "_a")) +(expr @1.1-5.2 (type "Error")) ~~~ diff --git a/test/snapshots/expr/double_question_binop.md b/test/snapshots/expr/double_question_binop.md index d982bc30b6..4985a0a7d5 100644 --- a/test/snapshots/expr/double_question_binop.md +++ b/test/snapshots/expr/double_question_binop.md @@ -50,5 +50,5 @@ NO CHANGE ~~~ # TYPES ~~~clojure -(expr @1.1-1.23 (type "_a")) +(expr @1.1-1.23 (type "Error")) ~~~ diff --git a/test/snapshots/match_expr/nested_record_patterns.md b/test/snapshots/match_expr/nested_record_patterns.md index e92763557d..42185013d5 100644 --- a/test/snapshots/match_expr/nested_record_patterns.md +++ b/test/snapshots/match_expr/nested_record_patterns.md @@ -222,5 +222,5 @@ match ... { ~~~ # TYPES ~~~clojure -(expr @1.1-7.2 (type "Str")) +(expr @1.1-7.2 (type "Error")) ~~~ diff --git a/test/snapshots/match_expr/pattern_as_basic.md b/test/snapshots/match_expr/pattern_as_basic.md index 9989048f06..2b79a2c532 100644 --- a/test/snapshots/match_expr/pattern_as_basic.md +++ b/test/snapshots/match_expr/pattern_as_basic.md @@ -92,5 +92,5 @@ match (1, 2) { ~~~ # TYPES ~~~clojure -(expr @1.1-3.2 (type "(Num(_size), Num(_size2))")) +(expr @1.1-3.2 (type "Error")) ~~~ diff --git a/test/snapshots/match_expr/pattern_as_nested.md b/test/snapshots/match_expr/pattern_as_nested.md index b2310c8ca0..52dec166af 100644 --- a/test/snapshots/match_expr/pattern_as_nested.md +++ b/test/snapshots/match_expr/pattern_as_nested.md @@ -156,5 +156,5 @@ match person { ~~~ # TYPES ~~~clojure -(expr @1.1-4.2 (type "(Error, _field, Str)")) +(expr @1.1-4.2 (type "(Error, Error, Str)")) ~~~ diff --git a/test/snapshots/match_expr/record_destructure.md b/test/snapshots/match_expr/record_destructure.md index c13ec21fe5..3c2a4ddeb7 100644 --- a/test/snapshots/match_expr/record_destructure.md +++ b/test/snapshots/match_expr/record_destructure.md @@ -132,5 +132,5 @@ match ... { ~~~ # TYPES ~~~clojure -(expr @1.1-5.2 (type "Str")) +(expr @1.1-5.2 (type "Error")) ~~~ diff --git a/test/snapshots/match_expr/record_pattern_edge_cases.md b/test/snapshots/match_expr/record_pattern_edge_cases.md index e6ea90c8f5..9550a900f4 100644 --- a/test/snapshots/match_expr/record_pattern_edge_cases.md +++ b/test/snapshots/match_expr/record_pattern_edge_cases.md @@ -276,5 +276,5 @@ match ... { ~~~ # TYPES ~~~clojure -(expr @1.1-9.2 (type "Str")) +(expr @1.1-9.2 (type "Error")) ~~~ diff --git a/test/snapshots/nominal/nominal_associated_value_alias.md b/test/snapshots/nominal/nominal_associated_value_alias.md index f3e02d91d4..527b7511f0 100644 --- a/test/snapshots/nominal/nominal_associated_value_alias.md +++ b/test/snapshots/nominal/nominal_associated_value_alias.md @@ -100,13 +100,13 @@ result = myBar (inferred-types (defs (patt @7.1-7.6 (type "Num(_size)")) - (patt @10.1-10.7 (type "Num(Int(Unsigned64))")) + (patt @10.1-10.7 (type "Error")) (patt @2.5-2.13 (type "Num(_size)"))) (type_decls (nominal @1.1-3.2 (type "Foo") (ty-header @1.1-1.4 (name "Foo")))) (expressions (expr @7.9-7.16 (type "Num(_size)")) - (expr @10.10-10.15 (type "Num(Int(Unsigned64))")) + (expr @10.10-10.15 (type "Error")) (expr @2.11-2.13 (type "Num(_size)")))) ~~~ diff --git a/test/snapshots/plume_package/Color.md b/test/snapshots/plume_package/Color.md index 735a302e64..926d8d4875 100644 --- a/test/snapshots/plume_package/Color.md +++ b/test/snapshots/plume_package/Color.md @@ -1111,7 +1111,7 @@ is_named_color = |str| { (patt @27.1-27.4 (type "Str -> Result(Color, [InvalidHex(Str)])")) (patt @49.1-49.7 (type "Error -> Error")) (patt @61.1-61.6 (type "Str -> Result(Color, [UnknownColor(Str)])")) - (patt @67.1-67.15 (type "_arg -> _ret"))) + (patt @67.1-67.15 (type "_arg -> Error"))) (type_decls (nominal @10.1-15.2 (type "Color") (ty-header @10.1-10.6 (name "Color")))) @@ -1121,5 +1121,5 @@ is_named_color = |str| { (expr @27.7-46.2 (type "Str -> Result(Color, [InvalidHex(Str)])")) (expr @49.10-54.2 (type "Error -> Error")) (expr @61.9-65.50 (type "Str -> Result(Color, [UnknownColor(Str)])")) - (expr @67.18-71.2 (type "_arg -> _ret")))) + (expr @67.18-71.2 (type "_arg -> Error")))) ~~~ diff --git a/test/snapshots/records/function_record_parameter.md b/test/snapshots/records/function_record_parameter.md index ba1f791ff6..a609c0b8d3 100644 --- a/test/snapshots/records/function_record_parameter.md +++ b/test/snapshots/records/function_record_parameter.md @@ -63,5 +63,5 @@ NO CHANGE ~~~ # TYPES ~~~clojure -(expr @1.1-1.67 (type "{ age: _field, name: _field2 } -> Str")) +(expr @1.1-1.67 (type "{ age: _field, name: _field2 } -> Error")) ~~~ diff --git a/test/snapshots/records/function_record_parameter_capture.md b/test/snapshots/records/function_record_parameter_capture.md index 41d49aeb0d..eedc6c95a5 100644 --- a/test/snapshots/records/function_record_parameter_capture.md +++ b/test/snapshots/records/function_record_parameter_capture.md @@ -99,5 +99,5 @@ NO CHANGE ~~~ # TYPES ~~~clojure -(expr @1.1-1.103 (type "b -> { full_record: b, greeting: Str, is_adult: Bool }")) +(expr @1.1-1.103 (type "Error -> { full_record: Error, greeting: Str, is_adult: Bool }")) ~~~ diff --git a/test/snapshots/records/pattern_destructure_rename.md b/test/snapshots/records/pattern_destructure_rename.md index 876225f5ac..f54d2f7087 100644 --- a/test/snapshots/records/pattern_destructure_rename.md +++ b/test/snapshots/records/pattern_destructure_rename.md @@ -90,5 +90,5 @@ match person { ~~~ # TYPES ~~~clojure -(expr @1.1-3.2 (type "Str")) +(expr @1.1-3.2 (type "Error")) ~~~ diff --git a/test/snapshots/type_app_complex_nested.md b/test/snapshots/type_app_complex_nested.md index 5b03e0ce8a..334fc6d44f 100644 --- a/test/snapshots/type_app_complex_nested.md +++ b/test/snapshots/type_app_complex_nested.md @@ -366,9 +366,9 @@ main! = |_| processComplex(Ok([Some(42), None])) ~~~clojure (inferred-types (defs - (patt @5.1-5.15 (type "Result(List(Error), Error) -> List(_elem)")) - (patt @13.1-13.11 (type "Error -> _ret")) - (patt @20.1-20.6 (type "_arg -> List(_elem)"))) + (patt @5.1-5.15 (type "Result(List(Error), Error) -> List(Error)")) + (patt @13.1-13.11 (type "Error -> Error")) + (patt @20.1-20.6 (type "_arg -> List(Error)"))) (type_decls (alias @18.1-18.64 (type "ComplexType(a, b)") (ty-header @18.1-18.18 (name "ComplexType") @@ -376,7 +376,7 @@ main! = |_| processComplex(Ok([Some(42), None])) (ty-rigid-var @18.13-18.14 (name "a")) (ty-rigid-var @18.16-18.17 (name "b")))))) (expressions - (expr @5.18-9.6 (type "Result(List(Error), Error) -> List(_elem)")) - (expr @13.14-15.2 (type "Error -> _ret")) - (expr @20.9-20.49 (type "_arg -> List(_elem)")))) + (expr @5.18-9.6 (type "Result(List(Error), Error) -> List(Error)")) + (expr @13.14-15.2 (type "Error -> Error")) + (expr @20.9-20.49 (type "_arg -> List(Error)")))) ~~~ diff --git a/test/snapshots/type_app_single_arg.md b/test/snapshots/type_app_single_arg.md index bd5a862bf1..0488b79f7f 100644 --- a/test/snapshots/type_app_single_arg.md +++ b/test/snapshots/type_app_single_arg.md @@ -117,9 +117,9 @@ main! = |_| processList(["one", "two"]) ~~~clojure (inferred-types (defs - (patt @4.1-4.12 (type "List(Str) -> Num(Int(Unsigned64))")) - (patt @6.1-6.6 (type "_arg -> Num(Int(Unsigned64))"))) + (patt @4.1-4.12 (type "List(Str) -> Error")) + (patt @6.1-6.6 (type "_arg -> Error"))) (expressions - (expr @4.15-4.32 (type "List(Str) -> Num(Int(Unsigned64))")) - (expr @6.9-6.39 (type "_arg -> Num(Int(Unsigned64))")))) + (expr @4.15-4.32 (type "List(Str) -> Error")) + (expr @6.9-6.39 (type "_arg -> Error")))) ~~~ diff --git a/test/snapshots/type_app_with_vars.md b/test/snapshots/type_app_with_vars.md index 4f905263ac..f0525d0a97 100644 --- a/test/snapshots/type_app_with_vars.md +++ b/test/snapshots/type_app_with_vars.md @@ -27,7 +27,7 @@ It has the type: _List(Num(_size)) -> _ret_ But I expected it to be: - _List(a), a -> b -> List(b)_ + _List(a), a -> b -> Error_ # TOKENS ~~~zig @@ -147,9 +147,9 @@ main! = |_| mapList([1, 2, 3, 4, 5]) ~~~clojure (inferred-types (defs - (patt @4.1-4.8 (type "List(a), a -> b -> List(b)")) + (patt @4.1-4.8 (type "List(a), a -> b -> Error")) (patt @6.1-6.6 (type "_arg -> _ret"))) (expressions - (expr @4.11-4.34 (type "List(a), a -> b -> List(b)")) + (expr @4.11-4.34 (type "List(a), a -> b -> Error")) (expr @6.9-6.33 (type "_arg -> _ret")))) ~~~ diff --git a/test/snapshots/type_application_basic.md b/test/snapshots/type_application_basic.md index 142c6b4c57..aedf2d9dd2 100644 --- a/test/snapshots/type_application_basic.md +++ b/test/snapshots/type_application_basic.md @@ -121,9 +121,9 @@ main! = |_| processList(["one", "two", "three"]) ~~~clojure (inferred-types (defs - (patt @4.1-4.12 (type "List(Str) -> Num(Int(Unsigned64))")) - (patt @6.1-6.6 (type "_arg -> Num(Int(Unsigned64))"))) + (patt @4.1-4.12 (type "List(Str) -> Error")) + (patt @6.1-6.6 (type "_arg -> Error"))) (expressions - (expr @4.15-4.32 (type "List(Str) -> Num(Int(Unsigned64))")) - (expr @6.9-6.47 (type "_arg -> Num(Int(Unsigned64))")))) + (expr @4.15-4.32 (type "List(Str) -> Error")) + (expr @6.9-6.47 (type "_arg -> Error")))) ~~~ diff --git a/test/snapshots/where_clause/where_clauses_simple_dispatch.md b/test/snapshots/where_clause/where_clauses_simple_dispatch.md index 027df148c9..592a1e911e 100644 --- a/test/snapshots/where_clause/where_clauses_simple_dispatch.md +++ b/test/snapshots/where_clause/where_clauses_simple_dispatch.md @@ -79,7 +79,7 @@ NO CHANGE ~~~clojure (inferred-types (defs - (patt @2.1-2.10 (type "a -> Str"))) + (patt @2.1-2.10 (type "a -> Error"))) (expressions - (expr @2.13-2.35 (type "a -> Str")))) + (expr @2.13-2.35 (type "a -> Error")))) ~~~ diff --git a/test/snapshots/where_clause/where_clauses_type_annotation.md b/test/snapshots/where_clause/where_clauses_type_annotation.md index d9ec7d411f..bc2068d16e 100644 --- a/test/snapshots/where_clause/where_clauses_type_annotation.md +++ b/test/snapshots/where_clause/where_clauses_type_annotation.md @@ -79,7 +79,7 @@ NO CHANGE ~~~clojure (inferred-types (defs - (patt @2.1-2.8 (type "a -> b"))) + (patt @2.1-2.8 (type "a -> Error"))) (expressions - (expr @2.11-2.23 (type "a -> b")))) + (expr @2.11-2.23 (type "a -> Error")))) ~~~ From b2a8e54931825de49ee7cd52c29117f509beb775 Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Sat, 18 Oct 2025 15:31:48 +0200 Subject: [PATCH 16/17] add back assert and add missing regions --- src/check/Check.zig | 12 +++++++++++- src/snapshot_tool/main.zig | 3 +++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/src/check/Check.zig b/src/check/Check.zig index 55f5e8523c..361e4645ab 100644 --- a/src/check/Check.zig +++ b/src/check/Check.zig @@ -244,8 +244,11 @@ fn unify(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!unifie for (self.unify_scratch.fresh_vars.items.items) |fresh_var| { try self.var_pool.addVarToRank(fresh_var, rank); + const region = self.cir.store.getNodeRegion(ModuleEnv.nodeIdxFrom(a)); + try self.fillInRegionsThrough(fresh_var); + self.setRegionAt(fresh_var, region); } - + self.debugAssertArraysInSync(); return result; } @@ -311,6 +314,13 @@ fn unifyFromAnno(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Erro try self.var_pool.addVarToRank(fresh_var, rank); } } + for (self.unify_scratch.fresh_vars.items.items) |fresh_var| { + const region = self.cir.store.getNodeRegion(ModuleEnv.nodeIdxFrom(a)); + try self.fillInRegionsThrough(fresh_var); + self.setRegionAt(fresh_var, region); + } + + self.debugAssertArraysInSync(); return result; } diff --git a/src/snapshot_tool/main.zig b/src/snapshot_tool/main.zig index 7f60426e10..873691afa3 100644 --- a/src/snapshot_tool/main.zig +++ b/src/snapshot_tool/main.zig @@ -1263,6 +1263,9 @@ fn processSnapshotContent( try solver.checkFile(); } + // Assert that we have regions for every type variable + solver.debugAssertArraysInSync(); + // Cache round-trip validation - ensure ModuleCache serialization/deserialization works { // Generate original S-expression for comparison From 2e4c0b418233544a0967caef4239c5adac75292d Mon Sep 17 00:00:00 2001 From: Fabian Schmalzried Date: Sun, 19 Oct 2025 21:11:08 +0200 Subject: [PATCH 17/17] Typo in safe_list.zig --- src/collections/safe_list.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/collections/safe_list.zig b/src/collections/safe_list.zig index ff008c4a54..07bb49c9cf 100644 --- a/src/collections/safe_list.zig +++ b/src/collections/safe_list.zig @@ -200,7 +200,7 @@ pub fn SafeList(comptime T: type) type { return @enumFromInt(@as(u32, @intCast(length))); } - /// Add a new item to the end of this list assuming cpacity is sufficient to hold an additional item. + /// Add a new item to the end of this list assuming capacity is sufficient to hold an additional item. pub fn appendAssumeCapacity(self: *SafeList(T), item: T) Idx { const length = self.len(); self.items.appendAssumeCapacity(item); @@ -440,7 +440,7 @@ pub fn SafeMultiList(comptime T: type) type { return @enumFromInt(@as(u32, @intCast(length))); } - /// Add a new item to the end of this list assuming cpacity is sufficient to hold an additional item. + /// Add a new item to the end of this list assuming capacity is sufficient to hold an additional item. pub fn appendAssumeCapacity(self: *SafeMultiList(T), item: T) Idx { const length = self.len(); self.items.appendAssumeCapacity(item);