Merge pull request #8274 from FabHof/type_cleanup

Remove Types from Canonicalization
This commit is contained in:
Jared Ramirez 2025-10-19 18:20:37 -04:00 committed by GitHub
commit 0428f06b38
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
33 changed files with 532 additions and 785 deletions

File diff suppressed because it is too large Load diff

View file

@ -154,15 +154,15 @@ pub fn freezeInterners(self: *Self) void {
/// Records a diagnostic error during canonicalization without blocking compilation.
pub fn pushDiagnostic(self: *Self, reason: CIR.Diagnostic) std.mem.Allocator.Error!void {
_ = try self.addDiagnosticAndTypeVar(reason, .err);
_ = try self.addDiagnostic(reason);
}
/// Creates a malformed node that represents a runtime error in the IR.
pub fn pushMalformed(self: *Self, comptime RetIdx: type, reason: CIR.Diagnostic) std.mem.Allocator.Error!RetIdx {
comptime if (!isCastable(RetIdx)) @compileError("Idx type " ++ @typeName(RetIdx) ++ " is not castable");
const diag_idx = try self.addDiagnosticAndTypeVar(reason, .err);
const diag_idx = try self.addDiagnostic(reason);
const region = getDiagnosticRegion(reason);
const malformed_idx = try self.addMalformedAndTypeVar(diag_idx, .err, region);
const malformed_idx = try self.addMalformed(diag_idx, region);
return castIdx(Node.Idx, RetIdx, malformed_idx);
}
@ -1480,17 +1480,16 @@ pub fn containsExposedById(self: *const Self, ident_idx: Ident.Idx) bool {
return self.common.exposed_items.containsById(self.gpa, @bitCast(ident_idx));
}
/// Assert that nodes, regions and types are all in sync
/// Assert that nodes and regions are in sync
pub inline fn debugAssertArraysInSync(self: *const Self) void {
if (builtin.mode == .Debug) {
const cir_nodes = self.store.nodes.items.len;
const region_nodes = self.store.regions.len();
const type_nodes = self.types.len();
if (!(cir_nodes == region_nodes and region_nodes == type_nodes)) {
if (!(cir_nodes == region_nodes)) {
std.debug.panic(
"Arrays out of sync:\n cir_nodes={}\n region_nodes={}\n type_nodes={}\n",
.{ cir_nodes, region_nodes, type_nodes },
"Arrays out of sync:\n cir_nodes={}\n region_nodes={}\n",
.{ cir_nodes, region_nodes },
);
}
}
@ -1511,311 +1510,173 @@ inline fn debugAssertIdxsEql(comptime desc: []const u8, idx1: anytype, idx2: any
}
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addDefAndTypeVar(self: *Self, expr: CIR.Def, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Def.Idx {
/// Add a new expression to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addDef(self: *Self, expr: CIR.Def, region: Region) std.mem.Allocator.Error!CIR.Def.Idx {
const expr_idx = try self.store.addDef(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("self", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addTypeHeaderAndTypeVar(self: *Self, expr: CIR.TypeHeader, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.TypeHeader.Idx {
/// Add a new type header to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addTypeHeader(self: *Self, expr: CIR.TypeHeader, region: Region) std.mem.Allocator.Error!CIR.TypeHeader.Idx {
const expr_idx = try self.store.addTypeHeader(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addTypeHeaderAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addStatementAndTypeVar(self: *Self, expr: CIR.Statement, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Statement.Idx {
/// Add a new statement to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addStatement(self: *Self, expr: CIR.Statement, region: Region) std.mem.Allocator.Error!CIR.Statement.Idx {
const expr_idx = try self.store.addStatement(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addStatementAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addStatementAndTypeVarRedirect(self: *Self, expr: CIR.Statement, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.Statement.Idx {
const expr_idx = try self.store.addStatement(expr, region);
const expr_var = try self.types.freshRedirect(redirect_to);
debugAssertIdxsEql("addStatementAndTypeVarRedirect", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addPatternAndTypeVar(self: *Self, expr: CIR.Pattern, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Pattern.Idx {
/// Add a new pattern to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addPattern(self: *Self, expr: CIR.Pattern, region: Region) std.mem.Allocator.Error!CIR.Pattern.Idx {
const expr_idx = try self.store.addPattern(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addPatternAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addPatternAndTypeVarRedirect(self: *Self, expr: CIR.Pattern, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.Pattern.Idx {
const expr_idx = try self.store.addPattern(expr, region);
const expr_var = try self.types.freshRedirect(redirect_to);
debugAssertIdxsEql("addPatternAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addExprAndTypeVar(self: *Self, expr: CIR.Expr, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.Idx {
/// Add a new expression to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addExpr(self: *Self, expr: CIR.Expr, region: Region) std.mem.Allocator.Error!CIR.Expr.Idx {
const expr_idx = try self.store.addExpr(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addExprAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addExprAndTypeVarRedirect(self: *Self, expr: CIR.Expr, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.Expr.Idx {
const expr_idx = try self.store.addExpr(expr, region);
const expr_var = try self.types.freshRedirect(redirect_to);
debugAssertIdxsEql("addExprAndTypeVarRedirect", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new capture and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addCaptureAndTypeVar(self: *Self, capture: CIR.Expr.Capture, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.Capture.Idx {
/// Add a new capture to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addCapture(self: *Self, capture: CIR.Expr.Capture, region: Region) std.mem.Allocator.Error!CIR.Expr.Capture.Idx {
const capture_idx = try self.store.addCapture(capture, region);
const capture_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addCaptureAndTypeVar", capture_idx, capture_var);
self.debugAssertArraysInSync();
return capture_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addRecordFieldAndTypeVar(self: *Self, expr: CIR.RecordField, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.RecordField.Idx {
/// Add a new record field to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addRecordField(self: *Self, expr: CIR.RecordField, region: Region) std.mem.Allocator.Error!CIR.RecordField.Idx {
const expr_idx = try self.store.addRecordField(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addRecordFieldAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addRecordDestructAndTypeVar(self: *Self, expr: CIR.Pattern.RecordDestruct, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Pattern.RecordDestruct.Idx {
/// Add a new record destructuring to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addRecordDestruct(self: *Self, expr: CIR.Pattern.RecordDestruct, region: Region) std.mem.Allocator.Error!CIR.Pattern.RecordDestruct.Idx {
const expr_idx = try self.store.addRecordDestruct(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addRecordDestructorAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addIfBranchAndTypeVar(self: *Self, expr: CIR.Expr.IfBranch, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.IfBranch.Idx {
/// Adds a new if branch to the store.
/// This function asserts that the nodes and regions are in sync.
pub fn addIfBranch(self: *Self, expr: CIR.Expr.IfBranch, region: Region) std.mem.Allocator.Error!CIR.Expr.IfBranch.Idx {
const expr_idx = try self.store.addIfBranch(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addIfBranchAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addMatchBranchAndTypeVar(self: *Self, expr: CIR.Expr.Match.Branch, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.Match.Branch.Idx {
/// Add a new match branch to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addMatchBranch(self: *Self, expr: CIR.Expr.Match.Branch, region: Region) std.mem.Allocator.Error!CIR.Expr.Match.Branch.Idx {
const expr_idx = try self.store.addMatchBranch(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addMatchBranchAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addWhereClauseAndTypeVar(self: *Self, expr: CIR.WhereClause, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.WhereClause.Idx {
/// Add a new where clause to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addWhereClause(self: *Self, expr: CIR.WhereClause, region: Region) std.mem.Allocator.Error!CIR.WhereClause.Idx {
const expr_idx = try self.store.addWhereClause(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addWhereClauseAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addTypeAnnoAndTypeVar(self: *Self, expr: CIR.TypeAnno, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.Idx {
/// Add a new type annotation to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addTypeAnno(self: *Self, expr: CIR.TypeAnno, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.Idx {
const expr_idx = try self.store.addTypeAnno(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addTypeAnnoAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addTypeAnnoAndTypeVarRedirect(self: *Self, expr: CIR.TypeAnno, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.Idx {
const expr_idx = try self.store.addTypeAnno(expr, region);
const expr_var = try self.types.freshRedirect(redirect_to);
debugAssertIdxsEql("addTypeAnnoAndTypeVarRedirect", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addAnnotationAndTypeVar(self: *Self, expr: CIR.Annotation, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Annotation.Idx {
/// Add a new annotation to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addAnnotation(self: *Self, expr: CIR.Annotation, region: Region) std.mem.Allocator.Error!CIR.Annotation.Idx {
const expr_idx = try self.store.addAnnotation(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addAnnotationAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addAnnotationAndTypeVarRedirect(self: *Self, expr: CIR.Annotation, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.Annotation.Idx {
const expr_idx = try self.store.addAnnotation(expr, region);
const expr_var = try self.types.freshRedirect(redirect_to);
debugAssertIdxsEql("addAnnotationAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addAnnoRecordFieldAndTypeVar(self: *Self, expr: CIR.TypeAnno.RecordField, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.RecordField.Idx {
/// Add a new record field to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addAnnoRecordField(self: *Self, expr: CIR.TypeAnno.RecordField, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.RecordField.Idx {
const expr_idx = try self.store.addAnnoRecordField(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addAnnoRecordFieldAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addAnnoRecordFieldAndTypeVarRedirect(self: *Self, expr: CIR.TypeAnno.RecordField, redirect_to: TypeVar, region: Region) std.mem.Allocator.Error!CIR.TypeAnno.RecordField.Idx {
const expr_idx = try self.store.addAnnoRecordField(expr, region);
const expr_var = try self.types.freshRedirect(redirect_to);
debugAssertIdxsEql("addAnnoRecordFieldAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addExposedItemAndTypeVar(self: *Self, expr: CIR.ExposedItem, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.ExposedItem.Idx {
/// Add a new exposed item to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addExposedItem(self: *Self, expr: CIR.ExposedItem, region: Region) std.mem.Allocator.Error!CIR.ExposedItem.Idx {
const expr_idx = try self.store.addExposedItem(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addExposedItemAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a diagnostic without creating a corresponding type variable.
/// Add a diagnostic.
/// This function asserts that the nodes and regions are in sync.
pub fn addDiagnostic(self: *Self, reason: CIR.Diagnostic) std.mem.Allocator.Error!CIR.Diagnostic.Idx {
return self.store.addDiagnostic(reason);
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addDiagnosticAndTypeVar(self: *Self, reason: CIR.Diagnostic, content: types_mod.Content) std.mem.Allocator.Error!CIR.Diagnostic.Idx {
const expr_idx = try self.store.addDiagnostic(reason);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addDiagnosticAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addMalformedAndTypeVar(self: *Self, diagnostic_idx: CIR.Diagnostic.Idx, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Node.Idx {
/// Add a new malformed node to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addMalformed(self: *Self, diagnostic_idx: CIR.Diagnostic.Idx, region: Region) std.mem.Allocator.Error!CIR.Node.Idx {
const malformed_idx = try self.store.addMalformed(diagnostic_idx, region);
const malformed_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addMalformedAndTypeVar", malformed_idx, malformed_var);
self.debugAssertArraysInSync();
return malformed_idx;
}
/// Add a new match branch pattern and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addMatchBranchPatternAndTypeVar(self: *Self, expr: CIR.Expr.Match.BranchPattern, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.Expr.Match.BranchPattern.Idx {
/// Add a new match branch pattern to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addMatchBranchPattern(self: *Self, expr: CIR.Expr.Match.BranchPattern, region: Region) std.mem.Allocator.Error!CIR.Expr.Match.BranchPattern.Idx {
const expr_idx = try self.store.addMatchBranchPattern(expr, region);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addMatchBranchPatternAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new pattern record field and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addPatternRecordFieldAndTypeVar(self: *Self, expr: CIR.PatternRecordField, content: types_mod.Content, region: Region) std.mem.Allocator.Error!CIR.PatternRecordField.Idx {
/// Add a new pattern record field to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addPatternRecordField(self: *Self, expr: CIR.PatternRecordField, region: Region) std.mem.Allocator.Error!CIR.PatternRecordField.Idx {
_ = region;
const expr_idx = try self.store.addPatternRecordField(expr);
const expr_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addPatternRecordFieldAndTypeVar", expr_idx, expr_var);
self.debugAssertArraysInSync();
return expr_idx;
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addTypeSlotAndTypeVar(
/// Add a new type variable to the node store.
/// This function asserts that the nodes and regions are in sync.
pub fn addTypeSlot(
self: *Self,
parent_node: CIR.Node.Idx,
content: types_mod.Content,
region: Region,
comptime RetIdx: type,
) std.mem.Allocator.Error!RetIdx {
comptime if (!isCastable(RetIdx)) @compileError("Idx type " ++ @typeName(RetIdx) ++ " is not castable");
const node_idx = try self.store.addTypeVarSlot(parent_node, region);
const node_var = try self.types.freshFromContent(content);
debugAssertIdxsEql("addTypeSlotAndTypeVar", node_idx, node_var);
self.debugAssertArraysInSync();
return @enumFromInt(@intFromEnum(node_idx));
}
/// Add a new expression and type variable.
/// This function asserts that the types array and the nodes are in sync.
pub fn addTypeSlotAndTypeVarRedirect(
self: *Self,
parent_node: CIR.Node.Idx,
redirect_to: TypeVar,
region: Region,
comptime RetIdx: type,
) std.mem.Allocator.Error!RetIdx {
comptime if (!isCastable(RetIdx)) @compileError("Idx type " ++ @typeName(RetIdx) ++ " is not castable");
const node_idx = try self.store.addTypeVarSlot(parent_node, region);
const node_var = try self.types.freshRedirect(redirect_to);
debugAssertIdxsEql("addTypeSlotAndTypeVarRedirect", node_idx, node_var);
self.debugAssertArraysInSync();
return @enumFromInt(@intFromEnum(node_idx));
}
/// Function that redirects an existing node to the provided var.
/// Assert that the requested idx in in bounds
pub fn redirectTypeTo(
self: *Self,
comptime FromIdx: type,
at_idx: FromIdx,
redirect_to: types_mod.Var,
) std.mem.Allocator.Error!void {
comptime if (!isCastable(FromIdx)) @compileError("Idx type " ++ @typeName(FromIdx) ++ " is not castable");
self.debugAssertArraysInSync();
std.debug.assert(@intFromEnum(at_idx) < self.types.len());
const var_ = varFrom(at_idx);
try self.types.setVarRedirect(var_, redirect_to);
}
/// Adds an external declaration and returns its index
pub fn pushExternalDecl(self: *Self, decl: CIR.ExternalDecl) std.mem.Allocator.Error!CIR.ExternalDecl.Idx {
const idx = @as(u32, @intCast(self.external_decls.len()));

View file

@ -695,7 +695,7 @@ test "numeric literal patterns use pattern idx as type var" {
},
};
const pattern_idx = try env.addPatternAndTypeVar(int_pattern, .err, base.Region.zero());
const pattern_idx = try env.addPattern(int_pattern, base.Region.zero());
// Verify the stored pattern
const stored_pattern = env.store.getPattern(pattern_idx);
@ -718,7 +718,7 @@ test "numeric literal patterns use pattern idx as type var" {
},
};
const pattern_idx = try env.addPatternAndTypeVar(dec_pattern, .err, base.Region.zero());
const pattern_idx = try env.addPattern(dec_pattern, base.Region.zero());
// Verify the stored pattern
const stored_pattern = env.store.getPattern(pattern_idx);

View file

@ -34,20 +34,14 @@ test "record literal uses record_unbound" {
return error.CanonicalizeError;
};
// Get the type of the expression
const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx())));
const resolved = env.types.resolveVar(expr_var);
// Check that it's a record_unbound
switch (resolved.desc.content) {
.structure => |structure| switch (structure) {
.record_unbound => |fields| {
// Success! The record literal created a record_unbound type
try std.testing.expect(fields.len() == 2);
},
else => return error.ExpectedRecordUnbound,
const canonical_expr = env.store.getExpr(canonical_expr_idx.idx);
// Check that it's a record
switch (canonical_expr) {
.e_record => |record| {
// Success! The record literal created a record
try std.testing.expect(record.fields.span.len == 2);
},
else => return error.ExpectedStructure,
else => return error.ExpectedRecord,
}
}
@ -71,19 +65,13 @@ test "record literal uses record_unbound" {
return error.CanonicalizeError;
};
// Get the type of the expression
const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx())));
const resolved = env.types.resolveVar(expr_var);
const canonical_expr = env.store.getExpr(canonical_expr_idx.idx);
// Check that it's an empty_record
switch (resolved.desc.content) {
.structure => |structure| switch (structure) {
.empty_record => {
// Success! Empty record literal created empty_record type
},
else => return error.ExpectedEmptyRecord,
switch (canonical_expr) {
.e_empty_record => {
// Success! Empty record literal created empty_record
},
else => return error.ExpectedStructure,
else => return error.ExpectedEmptyRecord,
}
}
@ -108,25 +96,21 @@ test "record literal uses record_unbound" {
return error.CanonicalizeError;
};
// Get the type of the expression
const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx())));
const resolved = env.types.resolveVar(expr_var);
const canonical_expr = env.store.getExpr(canonical_expr_idx.idx);
// Check that it's a record
switch (canonical_expr) {
.e_record => |record| {
// Success! The record literal created a record
try std.testing.expect(record.fields.span.len == 1);
// Check that it's a record_unbound
switch (resolved.desc.content) {
.structure => |structure| switch (structure) {
.record_unbound => |fields| {
// Success! The record literal created a record_unbound type
try std.testing.expect(fields.len() == 1);
const cir_fields = env.store.sliceRecordFields(record.fields);
// Check the field
const fields_slice = env.types.getRecordFieldsSlice(fields);
const field_name = env.getIdent(fields_slice.get(0).name);
try std.testing.expectEqualStrings("value", field_name);
},
else => return error.ExpectedRecordUnbound,
const cir_field = env.store.getRecordField(cir_fields[0]);
const field_name = env.getIdent(cir_field.name);
try std.testing.expectEqualStrings("value", field_name);
},
else => return error.ExpectedStructure,
else => return error.ExpectedRecord,
}
}
}
@ -153,25 +137,23 @@ test "record_unbound basic functionality" {
return error.CanonicalizeError;
};
// Get the type of the expression
const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx())));
const resolved = env.types.resolveVar(expr_var);
const canonical_expr = env.store.getExpr(canonical_expr_idx.idx);
// Check that it's a record
switch (canonical_expr) {
.e_record => |record| {
// Success! The record literal created a record
try std.testing.expect(record.fields.span.len == 2);
// Verify it starts as record_unbound
switch (resolved.desc.content) {
.structure => |structure| switch (structure) {
.record_unbound => |fields| {
// Success! Record literal created record_unbound type
try std.testing.expect(fields.len() == 2);
const cir_fields = env.store.sliceRecordFields(record.fields);
// Check field names
const field_slice = env.types.getRecordFieldsSlice(fields);
try std.testing.expectEqualStrings("x", env.getIdent(field_slice.get(0).name));
try std.testing.expectEqualStrings("y", env.getIdent(field_slice.get(1).name));
},
else => return error.ExpectedRecordUnbound,
const cir_field_0 = env.store.getRecordField(cir_fields[0]);
const cir_field_1 = env.store.getRecordField(cir_fields[1]);
// Check field names
try std.testing.expectEqualStrings("x", env.getIdent(cir_field_0.name));
try std.testing.expectEqualStrings("y", env.getIdent(cir_field_1.name));
},
else => return error.ExpectedStructure,
else => return error.ExpectedRecord,
}
}
@ -197,24 +179,25 @@ test "record_unbound with multiple fields" {
return error.CanonicalizeError;
};
const expr_var = @as(TypeVar, @enumFromInt(@intFromEnum(canonical_expr_idx.get_idx())));
const resolved = env.types.resolveVar(expr_var);
const canonical_expr = env.store.getExpr(canonical_expr_idx.idx);
// Check that it's a record
switch (canonical_expr) {
.e_record => |record| {
// Success! The record literal created a record
try std.testing.expect(record.fields.span.len == 3);
// Should be record_unbound
switch (resolved.desc.content) {
.structure => |s| switch (s) {
.record_unbound => |fields| {
try std.testing.expect(fields.len() == 3);
const cir_fields = env.store.sliceRecordFields(record.fields);
// Check field names
const field_slice = env.types.getRecordFieldsSlice(fields);
try std.testing.expectEqualStrings("a", env.getIdent(field_slice.get(0).name));
try std.testing.expectEqualStrings("b", env.getIdent(field_slice.get(1).name));
try std.testing.expectEqualStrings("c", env.getIdent(field_slice.get(2).name));
},
else => return error.ExpectedRecordUnbound,
const cir_field_0 = env.store.getRecordField(cir_fields[0]);
const cir_field_1 = env.store.getRecordField(cir_fields[1]);
const cir_field_2 = env.store.getRecordField(cir_fields[2]);
// Check field names
try std.testing.expectEqualStrings("a", env.getIdent(cir_field_0.name));
try std.testing.expectEqualStrings("b", env.getIdent(cir_field_1.name));
try std.testing.expectEqualStrings("c", env.getIdent(cir_field_2.name));
},
else => return error.ExpectedStructure,
else => return error.ExpectedRecord,
}
}

View file

@ -166,6 +166,16 @@ pub inline fn debugAssertArraysInSync(self: *const Self) void {
}
}
/// Fills the type store with fresh variables up to the number of regions
inline fn ensureTypeStoreIsFilled(self: *Self) Allocator.Error!void {
const region_nodes: usize = @intCast(self.regions.len());
const type_nodes: usize = @intCast(self.types.len());
try self.types.ensureTotalCapacity(region_nodes);
for (type_nodes..region_nodes) |_| {
_ = self.types.appendFromContentAssumeCapacity(.err);
}
}
// import caches //
/// Key for the import cache: module index + expression index in that module
@ -206,7 +216,7 @@ const ImportCache = std.HashMapUnmanaged(ImportCacheKey, Var, struct {
// unify //
/// Unify two types
pub fn unify(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!unifier.Result {
fn unify(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!unifier.Result {
const trace = tracy.trace(@src());
defer trace.end();
@ -240,12 +250,15 @@ pub fn unify(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!un
for (self.unify_scratch.fresh_vars.items.items) |fresh_var| {
try self.var_pool.addVarToRank(fresh_var, rank);
const region = self.cir.store.getNodeRegion(ModuleEnv.nodeIdxFrom(a));
try self.fillInRegionsThrough(fresh_var);
self.setRegionAt(fresh_var, region);
}
for (self.unify_scratch.deferred_constraints.items.items) |deferred_constraint| {
_ = try self.deferred_static_dispatch_constraints.append(self.gpa, deferred_constraint);
}
self.debugAssertArraysInSync();
return result;
}
@ -279,7 +292,7 @@ fn findConstraintOriginForVars(self: *Self, a: Var, b: Var) ?Var {
/// Unify two variables where the second represents an annotation type.
/// This sets from_annotation=true to ensure proper error region highlighting.
pub fn unifyFromAnno(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!unifier.Result {
fn unifyFromAnno(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.Error!unifier.Result {
const trace = tracy.trace(@src());
defer trace.end();
@ -311,13 +324,20 @@ pub fn unifyFromAnno(self: *Self, a: Var, b: Var, rank: Rank) std.mem.Allocator.
try self.var_pool.addVarToRank(fresh_var, rank);
}
}
for (self.unify_scratch.fresh_vars.items.items) |fresh_var| {
const region = self.cir.store.getNodeRegion(ModuleEnv.nodeIdxFrom(a));
try self.fillInRegionsThrough(fresh_var);
self.setRegionAt(fresh_var, region);
}
self.debugAssertArraysInSync();
return result;
}
/// Unify two variables with a specific constraint origin for better error reporting.
/// The constraint_origin_var should point to the expression that created the constraint.
pub fn unifyWithConstraintOrigin(self: *Self, a: Var, b: Var, constraint_origin_var: Var) std.mem.Allocator.Error!unifier.Result {
fn unifyWithConstraintOrigin(self: *Self, a: Var, b: Var, constraint_origin_var: Var) std.mem.Allocator.Error!unifier.Result {
const trace = tracy.trace(@src());
defer trace.end();
@ -533,6 +553,8 @@ pub fn checkFile(self: *Self) std.mem.Allocator.Error!void {
const trace = tracy.trace(@src());
defer trace.end();
try ensureTypeStoreIsFilled(self);
// First, iterate over the statements, generating types for each type declaration
const builtin_stmts_slice = self.cir.store.sliceStatements(self.cir.builtin_statements);
for (builtin_stmts_slice) |builtin_stmt_idx| {
@ -562,6 +584,7 @@ pub fn checkFile(self: *Self) std.mem.Allocator.Error!void {
/// Check an expr for the repl
pub fn checkExprRepl(self: *Self, expr_idx: CIR.Expr.Idx) std.mem.Allocator.Error!void {
try ensureTypeStoreIsFilled(self);
// First, iterate over the statements, generating types for each type declaration
const stms_slice = self.cir.store.sliceStatements(self.cir.builtin_statements);
for (stms_slice) |stmt_idx| {
@ -2162,10 +2185,19 @@ fn checkExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: types_mod.Rank, expected
const check_mode = blk: {
if (decl_stmt.anno) |anno_idx| {
const annotation = self.cir.store.getAnnotation(anno_idx);
// Generate the annotation type var in-place
self.seen_annos.clearRetainingCapacity();
try self.generateAnnoTypeInPlace(annotation.type_anno, .annotation);
// Update the outer anno to redirect to the inner anno
const anno_var = ModuleEnv.varFrom(anno_idx);
const type_anno_var = ModuleEnv.varFrom(annotation.type_anno);
try self.types.setVarRedirect(anno_var, type_anno_var);
// Return the expectation
break :blk Expected{
.expected = .{ .var_ = anno_var, .from_annotation = true },
.expected = .{ .var_ = type_anno_var, .from_annotation = true },
};
} else {
break :blk Expected.no_expectation;
@ -2223,6 +2255,30 @@ fn checkExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: types_mod.Rank, expected
const bool_var = try self.freshBool(rank, expr_region);
_ = try self.unify(bool_var, stmt_expr, rank);
},
.s_var => |var_stmt| {
// Check the pattern
try self.checkPattern(var_stmt.pattern_idx, rank, .no_expectation);
const var_pattern_var: Var = ModuleEnv.varFrom(var_stmt.pattern_idx);
{
// Enter a new rank
try self.var_pool.pushRank();
defer self.var_pool.popRank();
const next_rank = rank.next();
std.debug.assert(next_rank == self.var_pool.current_rank);
does_fx = try self.checkExpr(var_stmt.expr, next_rank, Expected.no_expectation) or does_fx;
// Now that we are existing the scope, we must generalize then pop this rank
try self.generalizer.generalize(&self.var_pool, next_rank);
}
// Unify the pattern with the expression
const var_expr_var: Var = ModuleEnv.varFrom(var_stmt.expr);
_ = try self.unify(var_pattern_var, var_expr_var, rank);
},
else => {
// TODO
},
@ -2915,6 +2971,9 @@ fn checkMatchExpr(self: *Self, expr_idx: CIR.Expr.Idx, rank: Rank, match: CIR.Ex
does_fx = try self.checkExpr(first_branch.value, rank, .no_expectation) or does_fx;
const branch_var = ModuleEnv.varFrom(first_branch.value);
// Redirect the match expr to the first branch.
try self.types.setVarRedirect(ModuleEnv.varFrom(expr_idx), branch_var);
// Then iterate over the rest of the branches
for (branch_idxs[1..], 1..) |branch_idx, branch_cur_index| {
const branch = self.cir.store.getMatchBranch(branch_idx);
@ -3007,9 +3066,11 @@ fn checkUnaryMinusExpr(self: *Self, expr_idx: CIR.Expr.Idx, expr_region: Region,
} } };
const num_var = try self.freshFromContent(num_content, rank, expr_region);
// Unify operand and result with the number type
// Redirect the result to the number type
try self.types.setVarRedirect(result_var, num_var);
// Unify result with the number type
_ = try self.unify(num_var, operand_var, rank);
_ = try self.unify(num_var, result_var, rank);
return does_fx;
}
@ -3030,9 +3091,11 @@ fn checkUnaryNotExpr(self: *Self, expr_idx: CIR.Expr.Idx, expr_region: Region, r
// Create a fresh boolean variable for the operation
const bool_var = try self.freshBool(rank, expr_region);
// Unify operand and result with the boolean type
// Redirect the result to the boolean type
try self.types.setVarRedirect(result_var, bool_var);
// Unify result with the boolean type
_ = try self.unify(bool_var, operand_var, rank);
_ = try self.unify(bool_var, result_var, rank);
return does_fx;
}

View file

@ -200,6 +200,14 @@ pub fn SafeList(comptime T: type) type {
return @enumFromInt(@as(u32, @intCast(length)));
}
/// Add a new item to the end of this list assuming capacity is sufficient to hold an additional item.
pub fn appendAssumeCapacity(self: *SafeList(T), item: T) Idx {
const length = self.len();
self.items.appendAssumeCapacity(item);
return @enumFromInt(@as(u32, @intCast(length)));
}
/// Create a range from the provided idx to the end of the list
pub fn rangeToEnd(self: *SafeList(T), start_int: u32) Range {
const len_int = self.len();
@ -432,6 +440,14 @@ pub fn SafeMultiList(comptime T: type) type {
return @enumFromInt(@as(u32, @intCast(length)));
}
/// Add a new item to the end of this list assuming capacity is sufficient to hold an additional item.
pub fn appendAssumeCapacity(self: *SafeMultiList(T), item: T) Idx {
const length = self.len();
self.items.appendAssumeCapacity(item);
return @enumFromInt(@as(u32, @intCast(length)));
}
pub fn appendSlice(self: *SafeMultiList(T), gpa: Allocator, elems: []const T) std.mem.Allocator.Error!Range {
if (elems.len == 0) {
return .{ .start = .zero, .count = 0 };

View file

@ -2,6 +2,7 @@
const std = @import("std");
const base = @import("base");
const can = @import("can");
const types = @import("types");
const collections = @import("collections");
const ModuleEnv = can.ModuleEnv;
@ -389,10 +390,12 @@ test "ModuleEnv pushExprTypesToSExprTree extracts and formats types" {
const str_literal_idx = try env.insertString("hello");
// Add a string segment expression
const segment_idx = try env.addExprAndTypeVar(.{ .e_str_segment = .{ .literal = str_literal_idx } }, .{ .structure = .str }, base.Region.from_raw_offsets(0, 5));
const segment_idx = try env.addExpr(.{ .e_str_segment = .{ .literal = str_literal_idx } }, base.Region.from_raw_offsets(0, 5));
_ = try env.types.freshFromContent(.{ .structure = .str });
// Now create a string expression that references the segment
const expr_idx = try env.addExprAndTypeVar(.{ .e_str = .{ .span = Expr.Span{ .span = base.DataSpan{ .start = @intFromEnum(segment_idx), .len = 1 } } } }, .{ .structure = .str }, base.Region.from_raw_offsets(0, 5));
const expr_idx = try env.addExpr(.{ .e_str = .{ .span = Expr.Span{ .span = base.DataSpan{ .start = @intFromEnum(segment_idx), .len = 1 } } } }, base.Region.from_raw_offsets(0, 5));
_ = try env.types.freshFromContent(.{ .structure = .str });
// Create an S-expression tree
var tree = base.SExprTree.init(gpa);

View file

@ -1286,15 +1286,15 @@ fn processSnapshotContent(
);
defer solver.deinit();
// Assert that we have regions for every type variable
solver.debugAssertArraysInSync();
if (maybe_expr_idx) |expr_idx| {
_ = try solver.checkExprRepl(expr_idx.idx);
} else {
try solver.checkFile();
}
// Assert that we have regions for every type variable
solver.debugAssertArraysInSync();
// Cache round-trip validation - ensure ModuleCache serialization/deserialization works
{
// Generate original S-expression for comparison

View file

@ -184,6 +184,13 @@ pub const Store = struct {
}
}
/// Create a new variable with the provided content assuming there is capacity
pub fn appendFromContentAssumeCapacity(self: *Self, content: Content) Var {
const desc_idx = self.descs.appendAssumeCapacity(.{ .content = content, .rank = Rank.top_level, .mark = Mark.none });
const slot_idx = self.slots.appendAssumeCapacity(.{ .root = desc_idx });
return Self.slotIdxToVar(slot_idx);
}
// setting variables //
/// Set a type variable to the provided content
@ -974,9 +981,9 @@ const SlotStore = struct {
return @enumFromInt(@intFromEnum(safe_idx));
}
/// Insert a value into the store
fn appendAssumeCapacity(self: *Self, gpa: Allocator, typ: Slot) std.mem.Allocator.Error!Idx {
const safe_idx = try self.backing.append(gpa, typ);
/// Insert a value into the store assuming there is capacity
fn appendAssumeCapacity(self: *Self, typ: Slot) Idx {
const safe_idx = self.backing.appendAssumeCapacity(typ);
return @enumFromInt(@intFromEnum(safe_idx));
}
@ -1078,6 +1085,12 @@ const DescStore = struct {
return @enumFromInt(@intFromEnum(safe_idx));
}
/// Appends a value to the store assuming there is capacity
fn appendAssumeCapacity(self: *Self, typ: Desc) Idx {
const safe_idx = self.backing.appendAssumeCapacity(typ);
return @enumFromInt(@intFromEnum(safe_idx));
}
/// Set a value in the store
fn set(self: *Self, idx: Idx, val: Desc) void {
self.backing.set(@enumFromInt(@intFromEnum(idx)), val);

View file

@ -173,5 +173,5 @@ EndOfFile,
~~~
# TYPES
~~~clojure
(expr (type "(Num(_size), Num(_size2), Num(_size3), Num(_size4), Num(_size5), Bool, Bool, Bool, Bool, Bool, Bool, Num(_size6), Bool, Bool, _field)"))
(expr (type "(Num(_size), Num(_size2), Num(_size3), Num(_size4), Num(_size5), Bool, Bool, Bool, Bool, Bool, Bool, Num(_size6), Bool, Bool, Error)"))
~~~

View file

@ -85,5 +85,5 @@ EndOfFile,
~~~
# TYPES
~~~clojure
(expr (type "_a"))
(expr (type "Error"))
~~~

View file

@ -744,8 +744,8 @@ combineResults = |jsonResult, httpStatus|
(p-assign (ident "handleResponse"))
(e-closure
(captures
(capture (ident "error"))
(capture (ident "status")))
(capture (ident "status"))
(capture (ident "error")))
(e-lambda
(args
(p-assign (ident "response")))

View file

@ -490,9 +490,9 @@ combineResults = |result1, result2|
(e-closure
(captures
(capture (ident "err"))
(capture (ident "value2"))
(capture (ident "err"))
(capture (ident "value1")))
(capture (ident "value1"))
(capture (ident "value2")))
(e-lambda
(args
(p-assign (ident "result1"))

View file

@ -50,5 +50,5 @@ NO CHANGE
~~~
# TYPES
~~~clojure
(expr (type "_a"))
(expr (type "Error"))
~~~

View file

@ -683,10 +683,10 @@ h = |x, y| {
(p-assign (ident "h"))
(e-closure
(captures
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "h"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a")))
(e-lambda
(args

View file

@ -371,10 +371,10 @@ NO CHANGE
(p-assign (ident "h"))
(e-closure
(captures
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "h"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a")))
(e-lambda
(args

View file

@ -484,10 +484,10 @@ h = |
(p-assign (ident "h"))
(e-closure
(captures
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "h"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a"))
(capture (ident "a")))
(e-lambda
(args

View file

@ -1962,8 +1962,8 @@ expect {
(e-closure
(captures
(capture (ident "x"))
(capture (ident "x"))
(capture (ident "dude")))
(capture (ident "dude"))
(capture (ident "x")))
(e-lambda
(args
(p-assign (ident "a"))
@ -2177,8 +2177,8 @@ expect {
(p-assign (ident "main!"))
(e-closure
(captures
(capture (ident "match_time"))
(capture (ident "add_one")))
(capture (ident "add_one"))
(capture (ident "match_time")))
(e-lambda
(args
(p-underscore))

View file

@ -222,5 +222,5 @@ match ... {
~~~
# TYPES
~~~clojure
(expr (type "Str"))
(expr (type "Error"))
~~~

View file

@ -132,5 +132,5 @@ match ... {
~~~
# TYPES
~~~clojure
(expr (type "Str"))
(expr (type "Error"))
~~~

View file

@ -276,5 +276,5 @@ match ... {
~~~
# TYPES
~~~clojure
(expr (type "Str"))
(expr (type "Error"))
~~~

View file

@ -716,13 +716,13 @@ is_named_color = |str| {
(p-assign (ident "hex"))
(e-closure
(captures
(capture (ident "is_valid"))
(capture (ident "d"))
(capture (ident "f"))
(capture (ident "a"))
(capture (ident "b"))
(capture (ident "d"))
(capture (ident "e"))
(capture (ident "c"))
(capture (ident "e")))
(capture (ident "f"))
(capture (ident "b"))
(capture (ident "is_valid")))
(e-lambda
(args
(p-assign (ident "str")))
@ -879,14 +879,14 @@ is_named_color = |str| {
(captures
(capture (ident "b"))
(capture (ident "a"))
(capture (ident "r"))
(capture (ident "inner"))
(capture (ident "to_str"))
(capture (ident "r"))
(capture (ident "inner"))
(capture (ident "g"))
(capture (ident "b"))
(capture (ident "to_str"))
(capture (ident "r"))
(capture (ident "g"))
(capture (ident "inner"))
(capture (ident "r")))
(capture (ident "g")))
(e-lambda
(args
(p-assign (ident "color")))
@ -1111,7 +1111,7 @@ is_named_color = |str| {
(patt (type "Str -> Result(Color, [InvalidHex(Str)])"))
(patt (type "Error -> Error"))
(patt (type "Str -> Result(Color, [UnknownColor(Str)])"))
(patt (type "_arg -> _ret")))
(patt (type "_arg -> Error")))
(type_decls
(nominal (type "Color")
(ty-header (name "Color"))))
@ -1121,5 +1121,5 @@ is_named_color = |str| {
(expr (type "Str -> Result(Color, [InvalidHex(Str)])"))
(expr (type "Error -> Error"))
(expr (type "Str -> Result(Color, [UnknownColor(Str)])"))
(expr (type "_arg -> _ret"))))
(expr (type "_arg -> Error"))))
~~~

View file

@ -63,5 +63,5 @@ NO CHANGE
~~~
# TYPES
~~~clojure
(expr (type "{ age: _field, name: _field2 } -> Str"))
(expr (type "{ age: _field, name: _field2 } -> Error"))
~~~

View file

@ -90,5 +90,5 @@ match person {
~~~
# TYPES
~~~clojure
(expr (type "Str"))
(expr (type "Error"))
~~~

View file

@ -1872,8 +1872,8 @@ expect {
(e-closure
(captures
(capture (ident "x"))
(capture (ident "x"))
(capture (ident "dude")))
(capture (ident "dude"))
(capture (ident "x")))
(e-lambda
(args
(p-assign (ident "a"))
@ -2087,8 +2087,8 @@ expect {
(p-assign (ident "main!"))
(e-closure
(captures
(capture (ident "match_time"))
(capture (ident "add_one")))
(capture (ident "add_one"))
(capture (ident "match_time")))
(e-lambda
(args
(p-underscore))

View file

@ -366,9 +366,9 @@ main! = |_| processComplex(Ok([Some(42), None]))
~~~clojure
(inferred-types
(defs
(patt (type "Result(List(Error), Error) -> List(a)"))
(patt (type "Error -> a"))
(patt (type "_arg -> List(a)")))
(patt (type "Result(List(Error), Error) -> List(Error)"))
(patt (type "Error -> Error"))
(patt (type "_arg -> List(Error)")))
(type_decls
(alias (type "ComplexType(a, b)")
(ty-header (name "ComplexType")
@ -376,7 +376,7 @@ main! = |_| processComplex(Ok([Some(42), None]))
(ty-rigid-var (name "a"))
(ty-rigid-var (name "b"))))))
(expressions
(expr (type "Result(List(Error), Error) -> List(a)"))
(expr (type "Error -> a"))
(expr (type "_arg -> List(a)"))))
(expr (type "Result(List(Error), Error) -> List(Error)"))
(expr (type "Error -> Error"))
(expr (type "_arg -> List(Error)"))))
~~~

View file

@ -117,9 +117,9 @@ main! = |_| processList(["one", "two"])
~~~clojure
(inferred-types
(defs
(patt (type "List(Str) -> Num(Int(Unsigned64))"))
(patt (type "_arg -> Num(Int(Unsigned64))")))
(patt (type "List(Str) -> Error"))
(patt (type "_arg -> Error")))
(expressions
(expr (type "List(Str) -> Num(Int(Unsigned64))"))
(expr (type "_arg -> Num(Int(Unsigned64))"))))
(expr (type "List(Str) -> Error"))
(expr (type "_arg -> Error"))))
~~~

View file

@ -27,7 +27,7 @@ It has the type:
_List(Num(_size)) -> _ret_
But I expected it to be:
_List(a), a -> b -> List(b)_
_List(a), a -> b -> Error_
# TOKENS
~~~zig
@ -147,9 +147,9 @@ main! = |_| mapList([1, 2, 3, 4, 5])
~~~clojure
(inferred-types
(defs
(patt (type "List(a), a -> b -> List(b)"))
(patt (type "List(a), a -> b -> Error"))
(patt (type "_arg -> _ret")))
(expressions
(expr (type "List(a), a -> b -> List(b)"))
(expr (type "List(a), a -> b -> Error"))
(expr (type "_arg -> _ret"))))
~~~

View file

@ -121,9 +121,9 @@ main! = |_| processList(["one", "two", "three"])
~~~clojure
(inferred-types
(defs
(patt (type "List(Str) -> Num(Int(Unsigned64))"))
(patt (type "_arg -> Num(Int(Unsigned64))")))
(patt (type "List(Str) -> Error"))
(patt (type "_arg -> Error")))
(expressions
(expr (type "List(Str) -> Num(Int(Unsigned64))"))
(expr (type "_arg -> Num(Int(Unsigned64))"))))
(expr (type "List(Str) -> Error"))
(expr (type "_arg -> Error"))))
~~~

View file

@ -531,13 +531,13 @@ main! = |_| {
(p-assign (ident "main!"))
(e-closure
(captures
(capture (ident "anotherIdentity"))
(capture (ident "yetAnotherIdentity"))
(capture (ident "f"))
(capture (ident "a"))
(capture (ident "finalIdentity"))
(capture (ident "identity"))
(capture (ident "combine"))
(capture (ident "yetAnotherIdentity")))
(capture (ident "anotherIdentity"))
(capture (ident "combine")))
(e-lambda
(args
(p-underscore))

View file

@ -79,7 +79,7 @@ NO CHANGE
~~~clojure
(inferred-types
(defs
(patt (type "a -> Str")))
(patt (type "a -> Error")))
(expressions
(expr (type "a -> Str"))))
(expr (type "a -> Error"))))
~~~

View file

@ -79,7 +79,7 @@ NO CHANGE
~~~clojure
(inferred-types
(defs
(patt (type "a -> b")))
(patt (type "a -> Error")))
(expressions
(expr (type "a -> b"))))
(expr (type "a -> Error"))))
~~~