mirror of
https://github.com/roc-lang/roc.git
synced 2025-08-04 20:28:02 +00:00
Merge pull request #7820 from roc-lang/more-can
Implement more Can things
This commit is contained in:
commit
7b8f6d047a
114 changed files with 2949 additions and 2425 deletions
34
src/base.zig
34
src/base.zig
|
@ -11,7 +11,7 @@ pub const Package = @import("base/Package.zig");
|
|||
pub const ModuleEnv = @import("base/ModuleEnv.zig");
|
||||
pub const ModuleImport = @import("base/ModuleImport.zig");
|
||||
pub const StringLiteral = @import("base/StringLiteral.zig");
|
||||
pub const DiagnosticPosition = @import("base/DiagnosticPosition.zig");
|
||||
pub const RegionInfo = @import("base/RegionInfo.zig");
|
||||
pub const Scratch = @import("base/Scratch.zig").Scratch;
|
||||
|
||||
pub const ModuleWork = module_work.ModuleWork;
|
||||
|
@ -27,7 +27,29 @@ pub const Recursive = enum {
|
|||
};
|
||||
|
||||
/// The manner in which a function was called, useful for giving better feedback to users.
|
||||
pub const CalledVia = enum {};
|
||||
pub const CalledVia = enum {
|
||||
/// Normal function application, e.g. `foo(bar)`
|
||||
apply,
|
||||
/// Calling with an operator, e.g. `(1 + 2)`
|
||||
binop,
|
||||
/// Calling with a unary operator, e.g. `!foo` or `-foo`
|
||||
unary_op,
|
||||
/// This call is the result of desugaring string interpolation,
|
||||
/// e.g. "${first} ${last}" is transformed into `Str.concat(Str.concat(first, " "))` last.
|
||||
string_interpolation,
|
||||
/// This call is the result of desugaring a map2-based Record Builder field. e.g.
|
||||
/// ```roc
|
||||
/// { Result.parallel <-
|
||||
/// foo: get("a"),
|
||||
/// bar: get("b"),
|
||||
/// }
|
||||
/// ```
|
||||
/// is transformed into
|
||||
/// ```roc
|
||||
/// Result.parallel(get("a"), get("b"), (|foo, bar | { foo, bar }))
|
||||
/// ```
|
||||
record_builder,
|
||||
};
|
||||
|
||||
/// Represents a value written as-is in a Roc source file.
|
||||
pub const Literal = union(enum) {
|
||||
|
@ -76,4 +98,12 @@ pub const DataSpan = struct {
|
|||
pub fn empty() DataSpan {
|
||||
return DataSpan{ .start = 0, .len = 0 };
|
||||
}
|
||||
|
||||
pub fn init(start: u32, len: u32) DataSpan {
|
||||
return DataSpan{ .start = start, .len = len };
|
||||
}
|
||||
|
||||
pub fn as(self: DataSpan, comptime T: type) T {
|
||||
return @as(T, .{ .span = self });
|
||||
}
|
||||
};
|
||||
|
|
|
@ -11,6 +11,8 @@ const problem = @import("../problem.zig");
|
|||
const collections = @import("../collections.zig");
|
||||
const Ident = @import("Ident.zig");
|
||||
const StringLiteral = @import("StringLiteral.zig");
|
||||
const RegionInfo = @import("RegionInfo.zig");
|
||||
const exitOnOom = collections.utils.exitOnOom;
|
||||
|
||||
const Type = type_mod.Type;
|
||||
const Problem = problem.Problem;
|
||||
|
@ -23,10 +25,24 @@ ident_ids_for_slicing: collections.SafeList(Ident.Idx),
|
|||
strings: StringLiteral.Store,
|
||||
types_store: type_mod.Store,
|
||||
problems: Problem.List,
|
||||
/// Line starts for error reporting. We retain only start and offset positions in the IR
|
||||
/// and then use these line starts to calculate the line number and column number as required.
|
||||
/// this is a more compact representation at the expense of extra computation only when generating error diagnostics.
|
||||
line_starts: std.ArrayList(u32),
|
||||
/// The original source bytes. We use these to generate error diagnostics.
|
||||
/// TODO think about how we will manage this using the cache. Should we only
|
||||
/// read these when we need them to report an error? instead of keeping all of this in memory.
|
||||
/// This implementation here is simple, but let's us progress with working snapshot tests
|
||||
/// and we can validate the error messages and region information there.
|
||||
source: std.ArrayList(u8),
|
||||
|
||||
/// Initialize the module environment.
|
||||
pub fn init(gpa: std.mem.Allocator) Self {
|
||||
pub fn init(gpa: std.mem.Allocator, source_bytes: []const u8) Self {
|
||||
// TODO: maybe wire in smarter default based on the initial input text size.
|
||||
|
||||
var source = std.ArrayList(u8).init(gpa);
|
||||
source.appendSlice(source_bytes) catch |err| exitOnOom(err);
|
||||
|
||||
return Self{
|
||||
.gpa = gpa,
|
||||
.idents = Ident.Store.initCapacity(gpa, 1024),
|
||||
|
@ -34,6 +50,8 @@ pub fn init(gpa: std.mem.Allocator) Self {
|
|||
.strings = StringLiteral.Store.initCapacityBytes(gpa, 4096),
|
||||
.types_store = type_mod.Store.initCapacity(gpa, 2048, 512),
|
||||
.problems = Problem.List.initCapacity(gpa, 64),
|
||||
.line_starts = std.ArrayList(u32).init(gpa),
|
||||
.source = source,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -44,4 +62,22 @@ pub fn deinit(self: *Self) void {
|
|||
self.strings.deinit(self.gpa);
|
||||
self.types_store.deinit();
|
||||
self.problems.deinit(self.gpa);
|
||||
self.line_starts.deinit();
|
||||
self.source.deinit();
|
||||
}
|
||||
|
||||
/// Helper to push a problem to the ModuleEnv
|
||||
pub fn pushProblem(self: *Self, p: Problem) void {
|
||||
_ = self.problems.append(self.gpa, p);
|
||||
}
|
||||
|
||||
/// Calculate and store line starts from the source text
|
||||
pub fn calcLineStarts(self: *Self, source: []const u8) !void {
|
||||
self.line_starts.clearRetainingCapacity();
|
||||
self.line_starts = try RegionInfo.findLineStarts(self.gpa, source);
|
||||
}
|
||||
|
||||
/// Get diagnostic position information for a given range
|
||||
pub fn calcRegionInfo(self: *Self, source: []const u8, begin: u32, end: u32) !RegionInfo {
|
||||
return RegionInfo.position(source, self.line_starts.items, begin, end);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
//! This module provides helpers for calculating position for diagnostics
|
||||
//! This module provides helpers for calculating region information for diagnostics
|
||||
//! including the start and end line and column information
|
||||
//!
|
||||
//! We only store simple position offsets in the AST and intermediate representation's (IR)
|
||||
//! as this is more compact, and then when we need to we can calculate the line and column information
|
||||
//! using line_starts and the offsets.
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
|
@ -10,21 +14,21 @@ end_line_idx: u32,
|
|||
end_col_idx: u32,
|
||||
line_text: []const u8,
|
||||
|
||||
const DiagnosticPosition = @This();
|
||||
const RegionInfo = @This();
|
||||
|
||||
/// Finds the line index for a given position in the source
|
||||
fn lineIdx(line_starts: std.ArrayList(u32), pos: u32) u32 {
|
||||
for (line_starts.items[1..], 0..) |n, i| {
|
||||
fn lineIdx(line_starts: []const u32, pos: u32) u32 {
|
||||
for (line_starts[1..], 0..) |n, i| {
|
||||
if (pos < n) {
|
||||
return @intCast(i);
|
||||
}
|
||||
}
|
||||
return @intCast(line_starts.items.len - 1);
|
||||
return @intCast(line_starts.len - 1);
|
||||
}
|
||||
|
||||
/// Gets the column index for a position on a given line
|
||||
fn columnIdx(line_starts: std.ArrayList(u32), line: u32, pos: u32) !u32 {
|
||||
const line_start: u32 = @intCast(line_starts.items[line]);
|
||||
fn columnIdx(line_starts: []const u32, line: u32, pos: u32) !u32 {
|
||||
const line_start: u32 = @intCast(line_starts[line]);
|
||||
if (pos < line_start) {
|
||||
return error.InvalidPosition;
|
||||
}
|
||||
|
@ -32,10 +36,10 @@ fn columnIdx(line_starts: std.ArrayList(u32), line: u32, pos: u32) !u32 {
|
|||
}
|
||||
|
||||
/// Returns the source text for a given line index
|
||||
fn getLineText(source: []const u8, line_starts: std.ArrayList(u32), line_idx: u32) []const u8 {
|
||||
const line_start_offset = line_starts.items[line_idx];
|
||||
const line_end_offset = if (line_idx + 1 < line_starts.items.len)
|
||||
line_starts.items[line_idx + 1]
|
||||
fn getLineText(source: []const u8, line_starts: []const u32, line_idx: u32) []const u8 {
|
||||
const line_start_offset = line_starts[line_idx];
|
||||
const line_end_offset = if (line_idx + 1 < line_starts.len)
|
||||
line_starts[line_idx + 1]
|
||||
else
|
||||
source.len;
|
||||
|
||||
|
@ -68,7 +72,7 @@ pub fn findLineStarts(gpa: Allocator, source: []const u8) !std.ArrayList(u32) {
|
|||
}
|
||||
|
||||
/// Returns position info for a given start and end index offset
|
||||
pub fn position(source: []const u8, line_starts: std.ArrayList(u32), begin: u32, end: u32) !DiagnosticPosition {
|
||||
pub fn position(source: []const u8, line_starts: []const u32, begin: u32, end: u32) !RegionInfo {
|
||||
if (begin > end) {
|
||||
return error.OutOfOrder;
|
||||
}
|
||||
|
@ -105,17 +109,17 @@ test "lineIdx" {
|
|||
try line_starts.append(20);
|
||||
try line_starts.append(30);
|
||||
|
||||
try std.testing.expectEqual(0, lineIdx(line_starts, 0));
|
||||
try std.testing.expectEqual(0, lineIdx(line_starts, 5));
|
||||
try std.testing.expectEqual(0, lineIdx(line_starts, 9));
|
||||
try std.testing.expectEqual(1, lineIdx(line_starts, 10));
|
||||
try std.testing.expectEqual(1, lineIdx(line_starts, 15));
|
||||
try std.testing.expectEqual(1, lineIdx(line_starts, 19));
|
||||
try std.testing.expectEqual(2, lineIdx(line_starts, 20));
|
||||
try std.testing.expectEqual(2, lineIdx(line_starts, 25));
|
||||
try std.testing.expectEqual(2, lineIdx(line_starts, 29));
|
||||
try std.testing.expectEqual(3, lineIdx(line_starts, 30));
|
||||
try std.testing.expectEqual(3, lineIdx(line_starts, 35));
|
||||
try std.testing.expectEqual(0, lineIdx(line_starts.items, 0));
|
||||
try std.testing.expectEqual(0, lineIdx(line_starts.items, 5));
|
||||
try std.testing.expectEqual(0, lineIdx(line_starts.items, 9));
|
||||
try std.testing.expectEqual(1, lineIdx(line_starts.items, 10));
|
||||
try std.testing.expectEqual(1, lineIdx(line_starts.items, 15));
|
||||
try std.testing.expectEqual(1, lineIdx(line_starts.items, 19));
|
||||
try std.testing.expectEqual(2, lineIdx(line_starts.items, 20));
|
||||
try std.testing.expectEqual(2, lineIdx(line_starts.items, 25));
|
||||
try std.testing.expectEqual(2, lineIdx(line_starts.items, 29));
|
||||
try std.testing.expectEqual(3, lineIdx(line_starts.items, 30));
|
||||
try std.testing.expectEqual(3, lineIdx(line_starts.items, 35));
|
||||
}
|
||||
|
||||
test "columnIdx" {
|
||||
|
@ -127,12 +131,12 @@ test "columnIdx" {
|
|||
try line_starts.append(10);
|
||||
try line_starts.append(20);
|
||||
|
||||
try std.testing.expectEqual(0, columnIdx(line_starts, 0, 0));
|
||||
try std.testing.expectEqual(5, columnIdx(line_starts, 0, 5));
|
||||
try std.testing.expectEqual(9, columnIdx(line_starts, 0, 9));
|
||||
try std.testing.expectEqual(0, columnIdx(line_starts.items, 0, 0));
|
||||
try std.testing.expectEqual(5, columnIdx(line_starts.items, 0, 5));
|
||||
try std.testing.expectEqual(9, columnIdx(line_starts.items, 0, 9));
|
||||
|
||||
try std.testing.expectEqual(0, columnIdx(line_starts, 1, 10));
|
||||
try std.testing.expectEqual(5, columnIdx(line_starts, 1, 15));
|
||||
try std.testing.expectEqual(0, columnIdx(line_starts.items, 1, 10));
|
||||
try std.testing.expectEqual(5, columnIdx(line_starts.items, 1, 15));
|
||||
}
|
||||
|
||||
test "getLineText" {
|
||||
|
@ -146,9 +150,9 @@ test "getLineText" {
|
|||
try line_starts.append(6);
|
||||
try line_starts.append(12);
|
||||
|
||||
try std.testing.expectEqualStrings("line0\n", getLineText(source, line_starts, 0));
|
||||
try std.testing.expectEqualStrings("line1\n", getLineText(source, line_starts, 1));
|
||||
try std.testing.expectEqualStrings("line2", getLineText(source, line_starts, 2));
|
||||
try std.testing.expectEqualStrings("line0\n", getLineText(source, line_starts.items, 0));
|
||||
try std.testing.expectEqualStrings("line1\n", getLineText(source, line_starts.items, 1));
|
||||
try std.testing.expectEqualStrings("line2", getLineText(source, line_starts.items, 2));
|
||||
}
|
||||
|
||||
test "get" {
|
||||
|
@ -162,14 +166,14 @@ test "get" {
|
|||
try line_starts.append(6);
|
||||
try line_starts.append(12);
|
||||
|
||||
const info1 = try position(source, line_starts, 2, 4);
|
||||
const info1 = try position(source, line_starts.items, 2, 4);
|
||||
try std.testing.expectEqual(0, info1.start_line_idx);
|
||||
try std.testing.expectEqual(2, info1.start_col_idx);
|
||||
try std.testing.expectEqual(0, info1.end_line_idx);
|
||||
try std.testing.expectEqual(4, info1.end_col_idx);
|
||||
try std.testing.expectEqualStrings("line0\n", info1.line_text);
|
||||
|
||||
const info2 = try position(source, line_starts, 8, 10);
|
||||
const info2 = try position(source, line_starts.items, 8, 10);
|
||||
try std.testing.expectEqual(1, info2.start_line_idx);
|
||||
try std.testing.expectEqual(2, info2.start_col_idx);
|
||||
try std.testing.expectEqual(1, info2.end_line_idx);
|
|
@ -3,7 +3,7 @@
|
|||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const testing = std.testing;
|
||||
const DiagnosticPosition = @import("DiagnosticPosition.zig");
|
||||
const RegionInfo = @import("RegionInfo.zig");
|
||||
|
||||
/// How many child nodes before breaking to a newline
|
||||
const CHILD_COUNT_BREAKPOINT = 5;
|
||||
|
@ -14,7 +14,7 @@ pub const Expr = union(enum) {
|
|||
value: []const u8,
|
||||
children: std.ArrayListUnmanaged(Expr),
|
||||
},
|
||||
region: DiagnosticPosition,
|
||||
region: RegionInfo,
|
||||
string: []const u8,
|
||||
signed_int: i128,
|
||||
unsigned_int: u128,
|
||||
|
@ -63,27 +63,27 @@ pub const Expr = union(enum) {
|
|||
}
|
||||
|
||||
// Helper function to append a child node to a parent node
|
||||
fn appendNodeChildUnsafe(self: *Expr, gpa: Allocator, child: Expr) void {
|
||||
fn appendNodeUnsafe(self: *Expr, gpa: Allocator, child: Expr) void {
|
||||
switch (self.*) {
|
||||
.node => |*n| n.children.append(gpa, child) catch {
|
||||
@panic("Failed to append child node");
|
||||
},
|
||||
else => @panic("called appendNodeChildUnsafe on a Expr that is not a .node"),
|
||||
else => @panic("called appendNodeUnsafe on a Expr that is not a .node"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to append a string child
|
||||
/// Helper function to append a string
|
||||
/// The value will be duplicated so that it is owned by the node
|
||||
/// and will be freed when the node is destroyed.
|
||||
pub fn appendStringChild(self: *Expr, gpa: Allocator, value: []const u8) void {
|
||||
pub fn appendString(self: *Expr, gpa: Allocator, value: []const u8) void {
|
||||
const owned_value = gpa.dupe(u8, value) catch {
|
||||
@panic("Failed to duplicate string value");
|
||||
};
|
||||
self.appendNodeChildUnsafe(gpa, .{ .string = owned_value });
|
||||
self.appendNodeUnsafe(gpa, .{ .string = owned_value });
|
||||
}
|
||||
|
||||
pub fn appendRegionChild(self: *Expr, gpa: Allocator, region: DiagnosticPosition) void {
|
||||
self.appendNodeChildUnsafe(gpa, .{ .region = DiagnosticPosition{
|
||||
pub fn appendRegionInfo(self: *Expr, gpa: Allocator, region: RegionInfo) void {
|
||||
self.appendNodeUnsafe(gpa, .{ .region = RegionInfo{
|
||||
.start_line_idx = region.start_line_idx,
|
||||
.start_col_idx = region.start_col_idx,
|
||||
.end_line_idx = region.end_line_idx,
|
||||
|
@ -94,24 +94,24 @@ pub const Expr = union(enum) {
|
|||
} });
|
||||
}
|
||||
|
||||
/// Helper function to append a signed integer child
|
||||
pub fn appendSignedIntChild(self: *Expr, gpa: Allocator, value: i128) void {
|
||||
self.appendNodeChildUnsafe(gpa, .{ .signed_int = value });
|
||||
/// Helper function to append a signed integer
|
||||
pub fn appendSignedInt(self: *Expr, gpa: Allocator, value: i128) void {
|
||||
self.appendNodeUnsafe(gpa, .{ .signed_int = value });
|
||||
}
|
||||
|
||||
/// Helper function to append an unsigned integer child
|
||||
pub fn appendUnsignedIntChild(self: *Expr, gpa: Allocator, value: u128) void {
|
||||
self.appendNodeChildUnsafe(gpa, .{ .unsigned_int = value });
|
||||
/// Helper function to append an unsigned integer
|
||||
pub fn appendUnsignedInt(self: *Expr, gpa: Allocator, value: u128) void {
|
||||
self.appendNodeUnsafe(gpa, .{ .unsigned_int = value });
|
||||
}
|
||||
|
||||
/// Helper function to append a float child
|
||||
pub fn appendFloatChild(self: *Expr, gpa: Allocator, value: f64) void {
|
||||
self.appendNodeChildUnsafe(gpa, .{ .float = value });
|
||||
/// Helper function to append a float
|
||||
pub fn appendFloat(self: *Expr, gpa: Allocator, value: f64) void {
|
||||
self.appendNodeUnsafe(gpa, .{ .float = value });
|
||||
}
|
||||
|
||||
/// Helper function to append a node child
|
||||
pub fn appendNodeChild(self: *Expr, gpa: Allocator, child_node: *Expr) void {
|
||||
self.appendNodeChildUnsafe(gpa, child_node.*);
|
||||
/// Helper function to append a node
|
||||
pub fn appendNode(self: *Expr, gpa: Allocator, child_node: *Expr) void {
|
||||
self.appendNodeUnsafe(gpa, child_node.*);
|
||||
}
|
||||
|
||||
/// Format the node as an S-expression formatted string
|
||||
|
@ -213,13 +213,13 @@ test "s-expression" {
|
|||
const gpa = testing.allocator;
|
||||
|
||||
var baz = Expr.init(gpa, "baz");
|
||||
baz.appendUnsignedIntChild(gpa, 456);
|
||||
baz.appendFloatChild(gpa, 789.0);
|
||||
baz.appendUnsignedInt(gpa, 456);
|
||||
baz.appendFloat(gpa, 789.0);
|
||||
|
||||
var foo = Expr.init(gpa, "foo");
|
||||
foo.appendStringChild(gpa, "bar");
|
||||
foo.appendSignedIntChild(gpa, -123);
|
||||
foo.appendNodeChild(gpa, &baz);
|
||||
foo.appendString(gpa, "bar");
|
||||
foo.appendSignedInt(gpa, -123);
|
||||
foo.appendNode(gpa, &baz);
|
||||
|
||||
// Test pretty formatting
|
||||
{
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -25,16 +25,16 @@ pub const Idx = List.Idx;
|
|||
/// This is the tag associated with a raw Node in the list
|
||||
pub const Tag = enum {
|
||||
// Statements
|
||||
statement_expr,
|
||||
statement_decl,
|
||||
statement_var,
|
||||
statement_for,
|
||||
statement_crash,
|
||||
statement_expr,
|
||||
statement_expect,
|
||||
statement_for,
|
||||
statement_return,
|
||||
statement_import,
|
||||
statement_type_decl,
|
||||
statement_type_anno,
|
||||
statement_crash,
|
||||
// Expressions
|
||||
expr_var,
|
||||
expr_tuple,
|
||||
|
@ -45,7 +45,7 @@ pub const Tag = enum {
|
|||
expr_static_dispatch,
|
||||
expr_apply,
|
||||
expr_string,
|
||||
expr_string_part,
|
||||
expr_string_segment,
|
||||
expr_int,
|
||||
expr_float,
|
||||
expr_tag,
|
||||
|
@ -77,6 +77,14 @@ pub const Tag = enum {
|
|||
pattern_identifier,
|
||||
pattern_as,
|
||||
pattern_applied_tag,
|
||||
pattern_record_destructure,
|
||||
pattern_list,
|
||||
pattern_num_literal,
|
||||
pattern_int_literal,
|
||||
pattern_float_literal,
|
||||
pattern_str_literal,
|
||||
pattern_char_literal,
|
||||
pattern_underscore,
|
||||
// Definitions
|
||||
def,
|
||||
|
||||
|
|
|
@ -7,6 +7,9 @@ const collections = @import("../../collections.zig");
|
|||
const Node = @import("Node.zig");
|
||||
const CIR = @import("CIR.zig");
|
||||
|
||||
const DataSpan = base.DataSpan;
|
||||
const Region = base.Region;
|
||||
|
||||
const exitOnOom = collections.exitOnOom;
|
||||
|
||||
const NodeStore = @This();
|
||||
|
@ -76,24 +79,65 @@ pub fn getStatement(store: *NodeStore, statement: CIR.Statement.Idx) CIR.Stateme
|
|||
const node = store.nodes.get(node_idx);
|
||||
|
||||
switch (node.tag) {
|
||||
.statement_expr => {
|
||||
return .{ .expr = .{
|
||||
.expr = node.data_1,
|
||||
.statement_decl => return CIR.Statement{ .decl = .{
|
||||
.region = node.region,
|
||||
.expr = @enumFromInt(node.data_1),
|
||||
.pattern = @enumFromInt(node.data_2),
|
||||
} },
|
||||
.statement_var => return CIR.Statement{ .@"var" = .{
|
||||
.region = node.region,
|
||||
.expr = @enumFromInt(node.data_1),
|
||||
.ident = @bitCast(node.data_2),
|
||||
} },
|
||||
.statement_crash => return CIR.Statement{ .crash = .{
|
||||
.msg = @bitCast(node.data_1),
|
||||
.region = node.region,
|
||||
} },
|
||||
.statement_expr => return .{ .expr = .{
|
||||
.expr = node.data_1,
|
||||
.region = node.region,
|
||||
} },
|
||||
.statement_expect => return CIR.Statement{ .expect = .{
|
||||
.region = node.region,
|
||||
.body = @enumFromInt(node.data_1),
|
||||
} },
|
||||
.statement_for => return CIR.Statement{ .@"for" = .{
|
||||
.region = node.region,
|
||||
.body = @enumFromInt(node.data_1),
|
||||
.expr = @enumFromInt(node.data_2),
|
||||
.patt = @enumFromInt(node.data_3),
|
||||
} },
|
||||
.statement_return => return CIR.Statement{ .@"return" = .{
|
||||
.region = node.region,
|
||||
.expr = @enumFromInt(node.data_1),
|
||||
} },
|
||||
.statement_import => return CIR.Statement{
|
||||
.import = .{
|
||||
.region = node.region,
|
||||
} };
|
||||
.module_name_tok = @bitCast(node.data_1),
|
||||
.exposes = DataSpan.init(node.data_2, node.data_3).as(CIR.ExposedItem.Span),
|
||||
.alias_tok = null, // TODO save these in extra_data and then insert them here
|
||||
.qualifier_tok = null, // TODO save these in extra_data and then insert them here
|
||||
},
|
||||
},
|
||||
.statement_type_decl => return CIR.Statement{
|
||||
.type_decl = .{
|
||||
.region = node.region,
|
||||
.anno = @enumFromInt(node.data_1),
|
||||
.header = @enumFromInt(0), // TODO save these in extra_data and then insert them here
|
||||
.where = null, // TODO save these in extra_data and then insert them here
|
||||
},
|
||||
},
|
||||
.statement_type_anno => return CIR.Statement{
|
||||
.type_anno = .{
|
||||
.region = node.region,
|
||||
.anno = @enumFromInt(node.data_1),
|
||||
.where = null, // TODO save these in extra_data and then insert them here
|
||||
},
|
||||
},
|
||||
else => {
|
||||
@panic("unreachable, node is not an expression tag");
|
||||
},
|
||||
// .statement_decl => {},
|
||||
// .statement_var => {},
|
||||
// .statement_for => {},
|
||||
// .statement_expect => {},
|
||||
// .statement_return => {},
|
||||
// .statement_import => {},
|
||||
// .statement_type_decl => {},
|
||||
// .statement_type_anno => {},
|
||||
// .statement_crash => {},
|
||||
else => @panic("TODO: implement other statement variants"),
|
||||
// not a statement node
|
||||
// else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -104,12 +148,10 @@ pub fn getExpr(store: *const NodeStore, expr: CIR.Expr.Idx) CIR.Expr {
|
|||
|
||||
switch (node.tag) {
|
||||
.expr_var => {
|
||||
const ident_idx: base.Ident.Idx = @bitCast(@as(u32, @bitCast(node.data_1)));
|
||||
return CIR.Expr{
|
||||
.lookup = .{
|
||||
.ident = ident_idx,
|
||||
},
|
||||
};
|
||||
return CIR.Expr{ .lookup = .{
|
||||
.pattern_idx = @enumFromInt(node.data_1),
|
||||
.region = node.region,
|
||||
} };
|
||||
},
|
||||
.expr_int => {
|
||||
// Retrieve the literal index from data_1
|
||||
|
@ -126,12 +168,10 @@ pub fn getExpr(store: *const NodeStore, expr: CIR.Expr.Idx) CIR.Expr {
|
|||
.num_var = num_var,
|
||||
.precision_var = precision_var,
|
||||
.literal = literal,
|
||||
.value = CIR.IntValue{ // Placeholder value
|
||||
.bytes = [16]u8{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
|
||||
.kind = .i128,
|
||||
},
|
||||
.value = CIR.IntValue.placeholder(),
|
||||
// TODO shouldn't this be a flex_var?
|
||||
.bound = types.Num.Compact.Int.Precision.i128,
|
||||
.bound = types.Num.Compact.placeholder(), // TODO: get from extra_data
|
||||
.region = node.region,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
@ -140,6 +180,7 @@ pub fn getExpr(store: *const NodeStore, expr: CIR.Expr.Idx) CIR.Expr {
|
|||
.list = .{
|
||||
.elems = .{ .span = .{ .start = node.data_1, .len = node.data_2 } },
|
||||
.elem_var = @enumFromInt(0), // TODO: get from extra_data
|
||||
.region = node.region,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
@ -147,6 +188,8 @@ pub fn getExpr(store: *const NodeStore, expr: CIR.Expr.Idx) CIR.Expr {
|
|||
return .{
|
||||
.call = .{
|
||||
.args = .{ .span = .{ .start = node.data_1, .len = node.data_2 } },
|
||||
.region = node.region,
|
||||
.called_via = @enumFromInt(node.data_3),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
@ -167,15 +210,19 @@ pub fn getExpr(store: *const NodeStore, expr: CIR.Expr.Idx) CIR.Expr {
|
|||
.literal = literal,
|
||||
.value = 0,
|
||||
// TODO shouldn't this be a flex_var?
|
||||
.bound = types.Num.Compact.Frac.Precision.dec,
|
||||
.bound = types.Num.Compact.placeholder(), // TODO
|
||||
.region = node.region,
|
||||
},
|
||||
};
|
||||
},
|
||||
.expr_string => {
|
||||
return .{
|
||||
.str = @enumFromInt(node.data_1),
|
||||
};
|
||||
},
|
||||
.expr_string_segment => return CIR.Expr.init_str_segment(
|
||||
@enumFromInt(node.data_1),
|
||||
node.region,
|
||||
),
|
||||
.expr_string => return CIR.Expr.init_str(
|
||||
DataSpan.init(node.data_1, node.data_2).as(CIR.Expr.Span),
|
||||
node.region,
|
||||
),
|
||||
.expr_tag => {
|
||||
return .{
|
||||
.tag = .{
|
||||
|
@ -183,33 +230,25 @@ pub fn getExpr(store: *const NodeStore, expr: CIR.Expr.Idx) CIR.Expr {
|
|||
.ext_var = @enumFromInt(0), // Placeholder
|
||||
.name = @bitCast(@as(base.Ident.Idx, @bitCast(node.data_1))),
|
||||
.args = .{ .span = .{ .start = 0, .len = 0 } }, // Empty args for now
|
||||
.region = node.region,
|
||||
},
|
||||
};
|
||||
},
|
||||
.malformed => {
|
||||
return CIR.Expr{
|
||||
.runtime_error = @enumFromInt(node.data_1),
|
||||
};
|
||||
.expr_bin_op => {
|
||||
return CIR.Expr{ .binop = CIR.Expr.Binop.init(
|
||||
@enumFromInt(node.data_1),
|
||||
@enumFromInt(node.data_2),
|
||||
@enumFromInt(node.data_3),
|
||||
node.region,
|
||||
) };
|
||||
},
|
||||
.statement_expr,
|
||||
.statement_decl,
|
||||
.statement_var,
|
||||
.statement_for,
|
||||
.statement_expect,
|
||||
.statement_return,
|
||||
.statement_import,
|
||||
.statement_type_decl,
|
||||
.statement_type_anno,
|
||||
.statement_crash,
|
||||
.expr_tuple,
|
||||
.expr_record,
|
||||
.expr_field_access,
|
||||
.expr_static_dispatch,
|
||||
.expr_apply,
|
||||
.expr_string_part,
|
||||
.expr_lambda,
|
||||
.expr_record_update,
|
||||
.expr_bin_op,
|
||||
.expr_unary,
|
||||
.expr_suffix_single_question,
|
||||
.expr_if_then_else,
|
||||
|
@ -217,27 +256,22 @@ pub fn getExpr(store: *const NodeStore, expr: CIR.Expr.Idx) CIR.Expr {
|
|||
.expr_dbg,
|
||||
.expr_block,
|
||||
.expr_ellipsis,
|
||||
|
||||
.expr_record_builder,
|
||||
.type_decl_header,
|
||||
.type_anno_apply,
|
||||
.type_anno_var,
|
||||
.type_anno_ty,
|
||||
.type_anno_underscore,
|
||||
.type_anno_mod_ty,
|
||||
.type_anno_union,
|
||||
.type_anno_tuple,
|
||||
.type_anno_record,
|
||||
.type_anno_fn,
|
||||
.type_anno_parens,
|
||||
.pattern_identifier,
|
||||
.pattern_as,
|
||||
.pattern_applied_tag,
|
||||
.def,
|
||||
.if_branch,
|
||||
=> {
|
||||
std.log.debug("TODO: implement getExpr for node type {?}", .{node.tag});
|
||||
return CIR.Expr{ .runtime_error = @enumFromInt(0) };
|
||||
return CIR.Expr{ .runtime_error = .{
|
||||
.tag = CIR.Diagnostic.Tag.not_implemented,
|
||||
.region = node.region,
|
||||
} };
|
||||
},
|
||||
.malformed => {
|
||||
return CIR.Expr{ .runtime_error = .{
|
||||
.tag = @enumFromInt(node.data_1),
|
||||
.region = node.region,
|
||||
} };
|
||||
},
|
||||
else => {
|
||||
@panic("unreachable, node is not an expression tag");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -262,13 +296,92 @@ pub fn getPattern(store: *NodeStore, pattern_idx: CIR.Pattern.Idx) CIR.Pattern {
|
|||
const node = store.nodes.get(node_idx);
|
||||
|
||||
switch (node.tag) {
|
||||
.pattern_identifier => {
|
||||
const ident_idx: base.Ident.Idx = @bitCast(node.data_1);
|
||||
return CIR.Pattern{ .identifier = ident_idx };
|
||||
.pattern_identifier => return CIR.Pattern{
|
||||
.assign = .{
|
||||
.ident = @bitCast(node.data_1),
|
||||
.region = node.region,
|
||||
},
|
||||
},
|
||||
.pattern_as => return CIR.Pattern{
|
||||
.as = .{
|
||||
.ident = @bitCast(node.data_1),
|
||||
.pattern = @enumFromInt(node.data_2),
|
||||
.region = node.region,
|
||||
},
|
||||
},
|
||||
.pattern_applied_tag => return CIR.Pattern{
|
||||
.applied_tag = .{
|
||||
.region = node.region,
|
||||
.arguments = DataSpan.init(node.data_1, node.data_2).as(CIR.Pattern.Span),
|
||||
.tag_name = @bitCast(node.data_3),
|
||||
|
||||
.ext_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
.whole_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
},
|
||||
},
|
||||
.pattern_record_destructure => return CIR.Pattern{
|
||||
.record_destructure = .{
|
||||
.region = node.region,
|
||||
.destructs = DataSpan.init(node.data_1, node.data_2).as(CIR.RecordDestruct.Span),
|
||||
.ext_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
.whole_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
},
|
||||
},
|
||||
.pattern_list => return CIR.Pattern{
|
||||
.list = .{
|
||||
.region = node.region,
|
||||
.patterns = DataSpan.init(node.data_1, node.data_2).as(CIR.Pattern.Span),
|
||||
.elem_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
.list_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
},
|
||||
},
|
||||
.pattern_num_literal => return CIR.Pattern{
|
||||
.num_literal = .{
|
||||
.region = node.region,
|
||||
.literal = @enumFromInt(node.data_1),
|
||||
.num_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
.bound = types.Num.Compact.placeholder(), // TODO extra_data
|
||||
.value = CIR.IntValue.placeholder(),
|
||||
},
|
||||
},
|
||||
.pattern_int_literal => return CIR.Pattern{
|
||||
.int_literal = .{
|
||||
.region = node.region,
|
||||
.literal = @enumFromInt(node.data_1),
|
||||
.precision_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
.num_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
.bound = types.Num.Compact.placeholder(), // TODO extra_data
|
||||
.value = CIR.IntValue.placeholder(), // TODO need to store and retrieve from extra_data
|
||||
},
|
||||
},
|
||||
.pattern_float_literal => return CIR.Pattern{
|
||||
.float_literal = .{
|
||||
.region = node.region,
|
||||
.literal = @enumFromInt(node.data_1),
|
||||
.precision_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
.num_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
.bound = types.Num.Compact.placeholder(), // TODO extra_data
|
||||
.value = 42, // TODO need to store and retrieve from extra_data
|
||||
},
|
||||
},
|
||||
.pattern_str_literal => return CIR.Pattern{ .str_literal = .{
|
||||
.region = node.region,
|
||||
.literal = @enumFromInt(node.data_1),
|
||||
} },
|
||||
.pattern_char_literal => return CIR.Pattern{
|
||||
.char_literal = .{
|
||||
.region = node.region,
|
||||
.value = node.data_1,
|
||||
.bound = types.Num.Compact.placeholder(), // TODO
|
||||
.num_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
.precision_var = @enumFromInt(0), // TODO need to store and retrieve from extra_data
|
||||
},
|
||||
},
|
||||
.pattern_underscore => return CIR.Pattern{ .underscore = .{
|
||||
.region = node.region,
|
||||
} },
|
||||
else => {
|
||||
std.log.debug("TODO: implement pattern {}", .{node.tag});
|
||||
@panic("unimplemented");
|
||||
@panic("unreachable, node is not an pattern tag");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -306,12 +419,64 @@ pub fn addStatement(store: *NodeStore, statement: CIR.Statement) CIR.Statement.I
|
|||
const node = Node{};
|
||||
|
||||
switch (statement) {
|
||||
.expr => |stmt| {
|
||||
node.data_1 = stmt.expr;
|
||||
node.region = stmt.region;
|
||||
.decl => |s| {
|
||||
node.tag = .statement_decl;
|
||||
node.region = s.region;
|
||||
node.data_1 = @intFromEnum(s.expr);
|
||||
node.data_2 = @enumFromInt(s.pattern);
|
||||
},
|
||||
else => {
|
||||
std.debug.panic("Statement of type {s} not yet implemented in Can\n", .{@tagName(statement)});
|
||||
.@"var" => |s| {
|
||||
node.tag = .statement_var;
|
||||
node.region = s.region;
|
||||
node.data_1 = @intFromEnum(s.expr);
|
||||
node.data_2 = @bitCast(s.ident);
|
||||
},
|
||||
.crash => |s| {
|
||||
node.tag = .statement_crash;
|
||||
node.region = node.region;
|
||||
node.data_1 = @bitCast(s.msg);
|
||||
},
|
||||
.expr => |s| {
|
||||
node.tag = .statement_expr;
|
||||
node.data_1 = s.expr;
|
||||
node.region = s.region;
|
||||
},
|
||||
.expect => |s| {
|
||||
node.tag = .statement_expect;
|
||||
node.region = s.region;
|
||||
node.data_1 = @intFromEnum(s.body);
|
||||
},
|
||||
.@"for" => |s| {
|
||||
node.tag = .statement_for;
|
||||
node.region = node.region;
|
||||
node.data_1 = @intFromEnum(s.body);
|
||||
node.data_2 = @intFromEnum(s.expr);
|
||||
node.data_3 = @intFromEnum(s.patt);
|
||||
},
|
||||
.@"return" => |s| {
|
||||
node.tag = .statement_return;
|
||||
node.region = s.region;
|
||||
node.data_1 = @intFromEnum(s.expr);
|
||||
},
|
||||
.import => |s| {
|
||||
node.tag = .statement_import;
|
||||
node.region = s.region;
|
||||
node.data_1 = @bitCast(s.module_name_tok);
|
||||
node.data_2 = s.exposes.span.start;
|
||||
node.data_3 = s.exposes.span.len;
|
||||
// TODO store alias_tok and qualifier_tok in extra_data
|
||||
},
|
||||
.type_decl => |s| {
|
||||
node.tag = .statement_type_decl;
|
||||
node.region = s.region;
|
||||
node.data_1 = @intFromEnum(s.anno);
|
||||
// TODO store header and where clause data in extra_data
|
||||
},
|
||||
.type_anno => |s| {
|
||||
node.tag = .statement_type_anno;
|
||||
node.region = s.region;
|
||||
node.data_1 = @intFromEnum(s.anno);
|
||||
// TODO store the optional where clause data in extra_data
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -319,7 +484,7 @@ pub fn addStatement(store: *NodeStore, statement: CIR.Statement) CIR.Statement.I
|
|||
}
|
||||
|
||||
/// Adds an expression node to the store.
|
||||
pub fn addExpr(store: *NodeStore, expr: CIR.ExprAtRegion) CIR.Expr.Idx {
|
||||
pub fn addExpr(store: *NodeStore, expr: CIR.Expr) CIR.Expr.Idx {
|
||||
var node = Node{
|
||||
.data_1 = 0,
|
||||
.data_2 = 0,
|
||||
|
@ -327,14 +492,15 @@ pub fn addExpr(store: *NodeStore, expr: CIR.ExprAtRegion) CIR.Expr.Idx {
|
|||
.region = base.Region.zero(),
|
||||
.tag = @enumFromInt(0),
|
||||
};
|
||||
node.region = expr.region;
|
||||
|
||||
switch (expr.expr) {
|
||||
switch (expr) {
|
||||
.lookup => |e| {
|
||||
node.region = e.region;
|
||||
node.tag = .expr_var;
|
||||
node.data_1 = @bitCast(@as(u32, @bitCast(e.ident)));
|
||||
node.data_1 = @intFromEnum(e.pattern_idx);
|
||||
},
|
||||
.int => |e| {
|
||||
node.region = e.region;
|
||||
node.tag = .expr_int;
|
||||
|
||||
// Store the literal index in data_1
|
||||
|
@ -347,12 +513,14 @@ pub fn addExpr(store: *NodeStore, expr: CIR.ExprAtRegion) CIR.Expr.Idx {
|
|||
// TODO for storing the value and bound, use extra_data
|
||||
},
|
||||
.list => |e| {
|
||||
node.region = e.region;
|
||||
node.tag = .expr_list;
|
||||
// TODO: Store list data properly. For now, just store placeholder values
|
||||
node.data_1 = e.elems.span.start;
|
||||
node.data_2 = e.elems.span.len;
|
||||
},
|
||||
.float => |e| {
|
||||
node.region = e.region;
|
||||
node.tag = .expr_float;
|
||||
|
||||
// Store the literal index in data_1
|
||||
|
@ -364,50 +532,75 @@ pub fn addExpr(store: *NodeStore, expr: CIR.ExprAtRegion) CIR.Expr.Idx {
|
|||
|
||||
// TODO for storing the value and bound, use extra_data
|
||||
},
|
||||
.str_segment => |e| {
|
||||
node.region = e.region;
|
||||
node.tag = .expr_string_segment;
|
||||
node.data_1 = @intFromEnum(e.literal);
|
||||
},
|
||||
.str => |e| {
|
||||
node.region = e.region;
|
||||
node.tag = .expr_string;
|
||||
// TODO: Store string data properly. For now, just store the literal idx
|
||||
node.data_1 = @intCast(@intFromEnum(e));
|
||||
node.data_1 = e.span.span.start;
|
||||
node.data_2 = e.span.span.len;
|
||||
},
|
||||
.tag => |e| {
|
||||
node.region = e.region;
|
||||
node.tag = .expr_tag;
|
||||
// Store the full Ident.Idx as a u32
|
||||
node.data_1 = @bitCast(@as(u32, @bitCast(e.name)));
|
||||
},
|
||||
.runtime_error => |err| {
|
||||
node.data_1 = @intFromEnum(err);
|
||||
.runtime_error => |e| {
|
||||
node.region = e.region;
|
||||
node.data_1 = @intFromEnum(e.tag);
|
||||
node.tag = .malformed;
|
||||
},
|
||||
.num => {
|
||||
.num => |e| {
|
||||
node.region = e.region;
|
||||
@panic("TODO addExpr num");
|
||||
},
|
||||
.single_quote => {
|
||||
.single_quote => |e| {
|
||||
node.region = e.region;
|
||||
@panic("TODO addExpr single_quote");
|
||||
},
|
||||
.when => {
|
||||
.when => |e| {
|
||||
node.region = e.region;
|
||||
@panic("TODO addExpr when");
|
||||
},
|
||||
.@"if" => {
|
||||
.@"if" => |e| {
|
||||
node.region = e.region;
|
||||
@panic("TODO addExpr if");
|
||||
},
|
||||
.call => {
|
||||
.call => |e| {
|
||||
node.region = e.region;
|
||||
node.tag = .expr_call;
|
||||
// Store the args span
|
||||
node.data_1 = expr.expr.call.args.span.start;
|
||||
node.data_2 = expr.expr.call.args.span.len;
|
||||
node.data_1 = e.args.span.start;
|
||||
node.data_2 = e.args.span.len;
|
||||
node.data_3 = @intFromEnum(e.called_via);
|
||||
},
|
||||
.record => {
|
||||
.record => |e| {
|
||||
node.region = e.region;
|
||||
@panic("TODO addExpr record");
|
||||
},
|
||||
.empty_record => {
|
||||
.empty_record => |e| {
|
||||
node.region = e.region;
|
||||
@panic("TODO addExpr empty_record");
|
||||
},
|
||||
.record_access => {
|
||||
.record_access => |e| {
|
||||
node.region = e.region;
|
||||
@panic("TODO addExpr record_access");
|
||||
},
|
||||
.zero_argument_tag => {
|
||||
.zero_argument_tag => |e| {
|
||||
node.region = e.region;
|
||||
@panic("TODO addExpr zero_argument_tag");
|
||||
},
|
||||
.binop => |e| {
|
||||
node.region = e.region;
|
||||
node.tag = .expr_bin_op;
|
||||
node.data_1 = @intFromEnum(e.op);
|
||||
node.data_2 = @intFromEnum(e.lhs);
|
||||
node.data_3 = @intFromEnum(e.rhs);
|
||||
},
|
||||
}
|
||||
|
||||
return @enumFromInt(@intFromEnum(store.nodes.append(store.gpa, node)));
|
||||
|
@ -448,22 +641,72 @@ pub fn addPattern(store: *NodeStore, pattern: CIR.Pattern) CIR.Pattern.Idx {
|
|||
};
|
||||
|
||||
switch (pattern) {
|
||||
.identifier => |ident_idx| {
|
||||
node.data_1 = @bitCast(ident_idx);
|
||||
.assign => |p| {
|
||||
node.data_1 = @bitCast(p.ident);
|
||||
node.tag = .pattern_identifier;
|
||||
node.region = p.region;
|
||||
},
|
||||
.as,
|
||||
.applied_tag,
|
||||
.record_destructure,
|
||||
.list,
|
||||
.num_literal,
|
||||
.int_literal,
|
||||
.float_literal,
|
||||
.str_literal,
|
||||
.char_literal,
|
||||
.Underscore,
|
||||
=> {
|
||||
std.debug.panic("Pattern of type {s} not yet implemented in Can\n", .{@tagName(pattern)});
|
||||
.as => |p| {
|
||||
node.tag = .pattern_as;
|
||||
node.region = p.region;
|
||||
node.data_1 = @bitCast(p.ident);
|
||||
node.data_2 = @intFromEnum(p.pattern);
|
||||
},
|
||||
.applied_tag => |p| {
|
||||
node.tag = .pattern_applied_tag;
|
||||
node.region = p.region;
|
||||
node.data_1 = p.arguments.span.start;
|
||||
node.data_2 = p.arguments.span.len;
|
||||
node.data_3 = @bitCast(p.tag_name);
|
||||
// TODO store type vars in extra data
|
||||
},
|
||||
.record_destructure => |p| {
|
||||
node.tag = .pattern_record_destructure;
|
||||
node.region = p.region;
|
||||
node.data_1 = p.destructs.span.start;
|
||||
node.data_2 = p.destructs.span.len;
|
||||
// TODO store type vars in extra data
|
||||
},
|
||||
.list => |p| {
|
||||
node.tag = .pattern_list;
|
||||
node.region = p.region;
|
||||
node.data_1 = p.patterns.span.start;
|
||||
node.data_2 = p.patterns.span.len;
|
||||
// TODO store type vars in extra data
|
||||
},
|
||||
.num_literal => |p| {
|
||||
node.tag = .pattern_num_literal;
|
||||
node.region = p.region;
|
||||
node.data_1 = @intFromEnum(p.literal);
|
||||
// TODO store other data in extra_data
|
||||
},
|
||||
.int_literal => |p| {
|
||||
node.tag = .pattern_int_literal;
|
||||
node.region = p.region;
|
||||
node.data_1 = @intFromEnum(p.literal);
|
||||
// TODO store other data
|
||||
},
|
||||
.float_literal => |p| {
|
||||
node.tag = .pattern_float_literal;
|
||||
node.region = p.region;
|
||||
node.data_1 = @intFromEnum(p.literal);
|
||||
// TODO store other data
|
||||
},
|
||||
.str_literal => |p| {
|
||||
node.tag = .pattern_str_literal;
|
||||
node.region = p.region;
|
||||
node.data_1 = @intFromEnum(p.literal);
|
||||
// TODO store other data
|
||||
},
|
||||
.char_literal => |p| {
|
||||
node.tag = .pattern_char_literal;
|
||||
node.region = p.region;
|
||||
node.data_1 = p.value;
|
||||
// TODO store other data
|
||||
},
|
||||
.underscore => |p| {
|
||||
node.tag = .pattern_underscore;
|
||||
node.region = p.region;
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -533,15 +776,20 @@ pub fn addDef(store: *NodeStore, def: CIR.Def) CIR.Def.Idx {
|
|||
store.extra_data.append(store.gpa, @intFromEnum(def.expr)) catch |err| exitOnOom(err);
|
||||
// Store expr_var
|
||||
store.extra_data.append(store.gpa, @intFromEnum(def.expr_var)) catch |err| exitOnOom(err);
|
||||
// Store kind tag
|
||||
store.extra_data.append(store.gpa, @intFromEnum(def.kind)) catch |err| exitOnOom(err);
|
||||
// Store kind tag as two u32's
|
||||
const kind_encoded = def.kind.encode();
|
||||
store.extra_data.append(store.gpa, kind_encoded[0]) catch |err| exitOnOom(err);
|
||||
store.extra_data.append(store.gpa, kind_encoded[1]) catch |err| exitOnOom(err);
|
||||
// Store annotation idx (0 if null)
|
||||
const anno_idx = if (def.annotation) |anno| @intFromEnum(anno) else 0;
|
||||
store.extra_data.append(store.gpa, anno_idx) catch |err| exitOnOom(err);
|
||||
// Store expr_region start and end
|
||||
store.extra_data.append(store.gpa, def.expr_region.start.offset) catch |err| exitOnOom(err);
|
||||
store.extra_data.append(store.gpa, def.expr_region.end.offset) catch |err| exitOnOom(err);
|
||||
|
||||
// Store the extra data range in the node
|
||||
node.data_1 = extra_start;
|
||||
node.data_2 = 5; // Number of extra data items
|
||||
node.data_2 = 8; // Number of extra data items
|
||||
|
||||
return @enumFromInt(@intFromEnum(store.nodes.append(store.gpa, node)));
|
||||
}
|
||||
|
@ -559,13 +807,11 @@ pub fn getDef(store: *NodeStore, def_idx: CIR.Def.Idx) CIR.Def {
|
|||
const pattern: CIR.Pattern.Idx = @enumFromInt(extra_data[0]);
|
||||
const expr: CIR.Expr.Idx = @enumFromInt(extra_data[1]);
|
||||
const expr_var: types.Var = @enumFromInt(extra_data[2]);
|
||||
const kind_tag = extra_data[3];
|
||||
const anno_idx = extra_data[4];
|
||||
|
||||
const kind: CIR.Def.Kind = switch (kind_tag) {
|
||||
@intFromEnum(CIR.Def.Kind.Let) => .Let,
|
||||
else => .{ .Stmt = @enumFromInt(0) }, // TODO: implement proper kind deserialization
|
||||
};
|
||||
const kind_encoded = .{ extra_data[3], extra_data[4] };
|
||||
const kind = CIR.Def.Kind.decode(kind_encoded);
|
||||
const anno_idx = extra_data[5];
|
||||
const expr_region_start = extra_data[6];
|
||||
const expr_region_end = extra_data[7];
|
||||
|
||||
const annotation: ?CIR.Annotation.Idx = if (anno_idx == 0) null else @enumFromInt(anno_idx);
|
||||
|
||||
|
@ -573,7 +819,10 @@ pub fn getDef(store: *NodeStore, def_idx: CIR.Def.Idx) CIR.Def {
|
|||
.pattern = pattern,
|
||||
.pattern_region = node.region, // Stored as node region
|
||||
.expr = expr,
|
||||
.expr_region = base.Region.zero(), // TODO store and retrieve expr region
|
||||
.expr_region = base.Region{
|
||||
.start = .{ .offset = expr_region_start },
|
||||
.end = .{ .offset = expr_region_end },
|
||||
},
|
||||
.expr_var = expr_var,
|
||||
.annotation = annotation,
|
||||
.kind = kind,
|
||||
|
@ -659,23 +908,42 @@ pub fn defSpanFrom(store: *NodeStore, start: u32) CIR.Def.Span {
|
|||
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
|
||||
}
|
||||
|
||||
/// Computes the span of a pattern starting from a given index.
|
||||
pub fn patternSpanFrom(store: *NodeStore, start: u32) CIR.Pattern.Span {
|
||||
const end = store.scratch_patterns.top();
|
||||
defer store.scratch_patterns.clearFrom(start);
|
||||
var i = @as(usize, @intCast(start));
|
||||
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
|
||||
std.debug.assert(end >= i);
|
||||
while (i < end) {
|
||||
store.extra_data.append(store.gpa, @intFromEnum(store.scratch_patterns.items.items[i])) catch |err| exitOnOom(err);
|
||||
i += 1;
|
||||
}
|
||||
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
|
||||
}
|
||||
|
||||
/// Clears scratch definitions starting from a specified index.
|
||||
pub fn clearScratchDefsFrom(store: *NodeStore, start: u32) void {
|
||||
store.scratch_defs.clearFrom(start);
|
||||
}
|
||||
|
||||
/// Creates a slice corresponding to a span.
|
||||
pub fn sliceFromSpan(store: *NodeStore, comptime T: type, span: base.DataSpan) []T {
|
||||
pub fn sliceFromSpan(store: *const NodeStore, comptime T: type, span: base.DataSpan) []T {
|
||||
return @ptrCast(store.extra_data.items[span.start..][0..span.len]);
|
||||
}
|
||||
|
||||
/// Returns a slice of definitions from the store.
|
||||
pub fn sliceDefs(store: *NodeStore, span: CIR.Def.Span) []CIR.Def.Idx {
|
||||
pub fn sliceDefs(store: *const NodeStore, span: CIR.Def.Span) []CIR.Def.Idx {
|
||||
return store.sliceFromSpan(CIR.Def.Idx, span.span);
|
||||
}
|
||||
|
||||
/// Returns a slice of expressions from the store.
|
||||
pub fn sliceExpr(store: *const NodeStore, span: CIR.Expr.Span) []CIR.Expr.Idx {
|
||||
return store.sliceFromSpan(CIR.Expr.Idx, span.span);
|
||||
}
|
||||
|
||||
/// Returns a slice of `CanIR.Pattern.Idx`
|
||||
pub fn slicePatterns(store: *NodeStore, span: CIR.Pattern.Span) []CIR.Pattern.Idx {
|
||||
pub fn slicePatterns(store: *const NodeStore, span: CIR.Pattern.Span) []CIR.Pattern.Idx {
|
||||
return store.sliceFromSpan(CIR.Pattern.Idx, span.span);
|
||||
}
|
||||
|
||||
|
@ -683,3 +951,15 @@ pub fn slicePatterns(store: *NodeStore, span: CIR.Pattern.Span) []CIR.Pattern.Id
|
|||
pub fn sliceIfBranch(store: *const NodeStore, span: CIR.IfBranch.Span) []CIR.IfBranch.Idx {
|
||||
return store.sliceFromSpan(CIR.IfBranch.Idx, span.span);
|
||||
}
|
||||
|
||||
/// Any node type can be malformed, but must come with a diagnostic reason
|
||||
pub fn addMalformed(store: *NodeStore, comptime t: type, reason: CIR.Diagnostic.Tag, region: base.Region) t {
|
||||
const nid = store.nodes.append(store.gpa, .{
|
||||
.tag = .malformed,
|
||||
.data_1 = @intFromEnum(reason),
|
||||
.data_2 = 0, // spare
|
||||
.data_3 = 0, // spare
|
||||
.region = region,
|
||||
});
|
||||
return @enumFromInt(@intFromEnum(nid));
|
||||
}
|
||||
|
|
|
@ -1,6 +1,19 @@
|
|||
//! Scope management for identifier resolution during canonicalization.
|
||||
//!
|
||||
//! This module provides a hierarchical scope structure for tracking identifiers and aliases
|
||||
//! during the canonicalization phase of compilation. It supports:
|
||||
//! - Nested scopes with shadowing semantics
|
||||
//! - Separate namespaces for identifiers and type aliases
|
||||
//! - Lookups that search through nested scopes from innermost to outermost
|
||||
//! - Error reporting for duplicate and missing identifiers
|
||||
//!
|
||||
//! The scope hierarchy works like a stack of levels, where each level represents a lexical
|
||||
//! scope (e.g., function body, let-binding, pattern match). When looking up an identifier,
|
||||
//! the search proceeds from the innermost scope outward until the identifier is found or
|
||||
//! all scopes are exhausted.
|
||||
|
||||
const std = @import("std");
|
||||
const base = @import("../../base.zig");
|
||||
const problem_mod = @import("../../problem.zig");
|
||||
const collections = @import("../../collections.zig");
|
||||
|
||||
const Alias = @import("./Alias.zig");
|
||||
|
@ -8,391 +21,536 @@ const Alias = @import("./Alias.zig");
|
|||
const Ident = base.Ident;
|
||||
const Region = base.Region;
|
||||
const Module = base.Module;
|
||||
const Problem = problem_mod.Problem;
|
||||
const Pattern = @import("CIR.zig").Pattern;
|
||||
const exitOnOom = collections.utils.exitOnOom;
|
||||
|
||||
const Scope = @This();
|
||||
|
||||
const TagMap = std.AutoHashMapUnmanaged(Ident.Idx, Alias.Idx);
|
||||
|
||||
env: *base.ModuleEnv,
|
||||
/// The custom alias that this file is centered around, if one has been defined.
|
||||
focused_custom_alias: ?Alias.Idx = null,
|
||||
// TODO: handle renaming, e.g. `CustomType := [ExportedName as LocalName]`
|
||||
custom_tags: TagMap = TagMap.empty,
|
||||
/// Identifiers/aliases that are in scope, and defined in the current module.
|
||||
levels: Levels,
|
||||
|
||||
/// Errors that can occur during scope operations
|
||||
pub const Error = error{
|
||||
NotInScope,
|
||||
AlreadyInScope,
|
||||
ExitedTopScopeLevel,
|
||||
};
|
||||
|
||||
/// Initialize a new scope.
|
||||
pub fn init(
|
||||
env: *base.ModuleEnv,
|
||||
builtin_aliases: []const struct { alias: Ident.Idx, name: Ident.Idx },
|
||||
builtin_idents: []const Ident.Idx,
|
||||
) Scope {
|
||||
var scope = Scope{ .env = env, .levels = Levels{ .env = env } };
|
||||
pub fn init(gpa: std.mem.Allocator) Scope {
|
||||
var scope = Scope{ .levels = Levels{} };
|
||||
|
||||
scope.levels.enter();
|
||||
|
||||
for (builtin_idents) |builtin_ident| {
|
||||
_ = scope.levels.introduce(.ident, .{
|
||||
.scope_name = builtin_ident,
|
||||
.ident = builtin_ident,
|
||||
});
|
||||
}
|
||||
|
||||
for (builtin_aliases) |builtin_alias| {
|
||||
_ = scope.levels.introduce(.alias, .{
|
||||
.scope_name = builtin_alias.name,
|
||||
.alias = builtin_alias.alias,
|
||||
});
|
||||
}
|
||||
// ensure we have a top-level scope
|
||||
scope.levels.enter(gpa);
|
||||
|
||||
return scope;
|
||||
}
|
||||
|
||||
/// Deinitialize a scope's memory
|
||||
pub fn deinit(self: *Scope) void {
|
||||
// if (self.custom_tags.size > 0) {
|
||||
// self.custom_tags.deinit(self.env.gpa);
|
||||
// }
|
||||
Levels.deinit(&self.levels);
|
||||
pub fn deinit(self: *Scope, gpa: std.mem.Allocator) void {
|
||||
Levels.deinit(gpa, &self.levels);
|
||||
}
|
||||
|
||||
/// Generates a unique ident like "1" or "5" in the home module.
|
||||
///
|
||||
/// This is used, for example, during canonicalization of an Expr::Closure
|
||||
/// to generate a unique ident to refer to that closure.
|
||||
pub fn genUnique(self: *Scope) Ident.Idx {
|
||||
const unique_idx = self.env.idents.genUnique();
|
||||
|
||||
_ = self.levels.introduce(.ident, .{
|
||||
.scope_name = unique_idx,
|
||||
.ident = unique_idx,
|
||||
});
|
||||
|
||||
return unique_idx;
|
||||
}
|
||||
|
||||
/// todo
|
||||
pub fn Contains(item_kind: Level.ItemKind) type {
|
||||
return union(enum) {
|
||||
InScope: Level.ItemName(item_kind),
|
||||
NotInScope: Level.ItemName(item_kind),
|
||||
NotPresent,
|
||||
};
|
||||
}
|
||||
|
||||
/// todo
|
||||
/// Level in the scope hierarchy
|
||||
pub const Level = struct {
|
||||
idents: std.ArrayListUnmanaged(IdentInScope) = .{},
|
||||
aliases: std.ArrayListUnmanaged(AliasInScope) = .{},
|
||||
/// Maps an Ident to a Pattern in the Can IR
|
||||
idents: std.AutoHashMapUnmanaged(Ident.Idx, Pattern.Idx),
|
||||
aliases: std.AutoHashMapUnmanaged(Ident.Idx, Pattern.Idx),
|
||||
|
||||
/// todo
|
||||
/// Initialize the level
|
||||
pub fn init() Level {
|
||||
return Level{
|
||||
.idents = std.AutoHashMapUnmanaged(Ident.Idx, Pattern.Idx).empty,
|
||||
.aliases = std.AutoHashMapUnmanaged(Ident.Idx, Pattern.Idx).empty,
|
||||
};
|
||||
}
|
||||
|
||||
/// Deinitialize the level
|
||||
pub fn deinit(self: *Level, gpa: std.mem.Allocator) void {
|
||||
if (self.idents.count() > 0) {
|
||||
self.idents.deinit(gpa);
|
||||
}
|
||||
if (self.aliases.count() > 0) {
|
||||
self.aliases.deinit(gpa);
|
||||
}
|
||||
}
|
||||
|
||||
/// Item kinds in a level
|
||||
pub const ItemKind = enum { ident, alias };
|
||||
/// todo
|
||||
pub fn Item(comptime item_kind: ItemKind) type {
|
||||
return switch (item_kind) {
|
||||
.ident => IdentInScope,
|
||||
.alias => AliasInScope,
|
||||
};
|
||||
}
|
||||
|
||||
/// todo
|
||||
pub fn ItemName(comptime item_kind: ItemKind) type {
|
||||
return switch (item_kind) {
|
||||
.ident => Ident.Idx,
|
||||
.alias => Ident.Idx,
|
||||
};
|
||||
}
|
||||
|
||||
/// todo
|
||||
pub fn items(level: *Level, comptime item_kind: ItemKind) *std.ArrayListUnmanaged(Item(item_kind)) {
|
||||
/// Get the appropriate map for the given item kind
|
||||
pub fn items(level: *Level, comptime item_kind: ItemKind) *std.AutoHashMapUnmanaged(Ident.Idx, Pattern.Idx) {
|
||||
return switch (item_kind) {
|
||||
.ident => &level.idents,
|
||||
.alias => &level.aliases,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn append(level: *Level, gpa: std.mem.Allocator, comptime item_kind: ItemKind, item: Item(item_kind)) void {
|
||||
switch (item_kind) {
|
||||
.ident => {
|
||||
level.idents.append(gpa, item) catch |e| exitOnOom(e);
|
||||
},
|
||||
.alias => {
|
||||
level.aliases.append(gpa, item) catch |e| exitOnOom(e);
|
||||
},
|
||||
}
|
||||
/// Get the appropriate map for the given item kind (const version)
|
||||
pub fn itemsConst(level: *const Level, comptime item_kind: ItemKind) *const std.AutoHashMapUnmanaged(Ident.Idx, Pattern.Idx) {
|
||||
return switch (item_kind) {
|
||||
.ident => &level.idents,
|
||||
.alias => &level.aliases,
|
||||
};
|
||||
}
|
||||
/// todo
|
||||
pub const IdentInScope = struct {
|
||||
scope_name: Ident.Idx,
|
||||
ident: Ident.Idx,
|
||||
};
|
||||
/// todo
|
||||
pub const AliasInScope = struct {
|
||||
scope_name: Ident.Idx,
|
||||
alias: Ident.Idx,
|
||||
};
|
||||
/// todo
|
||||
pub fn deinit(self: *Level, gpa: std.mem.Allocator) void {
|
||||
if (self.idents.items.len > 0) {
|
||||
self.idents.deinit(gpa);
|
||||
}
|
||||
if (self.aliases.items.len > 0) {
|
||||
self.aliases.deinit(gpa);
|
||||
}
|
||||
|
||||
/// Put an item in the level, panics on OOM
|
||||
pub fn put(level: *Level, gpa: std.mem.Allocator, comptime item_kind: ItemKind, name: Ident.Idx, pattern: Pattern.Idx) void {
|
||||
level.items(item_kind).put(gpa, name, pattern) catch |err| exitOnOom(err);
|
||||
}
|
||||
};
|
||||
|
||||
/// todo
|
||||
/// Manages multiple levels of scope
|
||||
pub const Levels = struct {
|
||||
env: *base.ModuleEnv,
|
||||
levels: std.ArrayListUnmanaged(Level) = .{},
|
||||
/// todo
|
||||
pub fn deinit(self: *Levels) void {
|
||||
|
||||
/// Deinitialize all levels
|
||||
pub fn deinit(gpa: std.mem.Allocator, self: *Levels) void {
|
||||
for (0..self.levels.items.len) |i| {
|
||||
var level = &self.levels.items[i];
|
||||
level.deinit(self.env.gpa);
|
||||
level.deinit(gpa);
|
||||
}
|
||||
self.levels.deinit(self.env.gpa);
|
||||
}
|
||||
/// todo
|
||||
pub fn enter(self: *Levels) void {
|
||||
self.levels.append(self.env.gpa, .{}) catch |err| exitOnOom(err);
|
||||
}
|
||||
/// todo
|
||||
pub fn exit(self: *Levels) void {
|
||||
if (self.levels.items.len <= 1) {
|
||||
self.env.problems.append(self.env.gpa, Problem.Compiler.make(.{
|
||||
.canonicalize = .exited_top_scope_level,
|
||||
})) catch |err| exitOnOom(err);
|
||||
return;
|
||||
}
|
||||
_ = self.levels.pop();
|
||||
}
|
||||
/// todo
|
||||
pub fn iter(self: *Levels, comptime item_kind: Level.ItemKind) Iterator(item_kind) {
|
||||
return Iterator(item_kind).new(self);
|
||||
self.levels.deinit(gpa);
|
||||
}
|
||||
|
||||
/// Enter a new scope level
|
||||
pub fn enter(self: *Levels, gpa: std.mem.Allocator) void {
|
||||
const level = Level.init();
|
||||
self.levels.append(gpa, level) catch |err| exitOnOom(err);
|
||||
}
|
||||
|
||||
/// Exit the current scope level
|
||||
pub fn exit(self: *Levels, gpa: std.mem.Allocator) error{ExitedTopScopeLevel}!void {
|
||||
if (self.levels.items.len <= 1) {
|
||||
return Error.ExitedTopScopeLevel;
|
||||
}
|
||||
var level: Level = self.levels.pop().?;
|
||||
level.deinit(gpa);
|
||||
}
|
||||
|
||||
/// Check if an identifier is in scope
|
||||
fn contains(
|
||||
self: *Levels,
|
||||
self: *const Levels,
|
||||
ident_store: *const Ident.Store,
|
||||
comptime item_kind: Level.ItemKind,
|
||||
name: Level.ItemName(item_kind),
|
||||
) ?Level.Item(item_kind) {
|
||||
var items_in_scope = Iterator(item_kind).new(self);
|
||||
while (items_in_scope.nextData()) |item_in_scope| {
|
||||
switch (item_kind) {
|
||||
.alias => {
|
||||
if (self.env.idents.identsHaveSameText(name, item_in_scope.alias)) {
|
||||
return item_in_scope;
|
||||
}
|
||||
},
|
||||
.ident => {
|
||||
if (self.env.idents.identsHaveSameText(name, item_in_scope.ident)) {
|
||||
return item_in_scope;
|
||||
}
|
||||
},
|
||||
name: Ident.Idx,
|
||||
) ?Pattern.Idx {
|
||||
var level_idx = self.levels.items.len;
|
||||
while (level_idx > 0) {
|
||||
level_idx -= 1;
|
||||
const level = &self.levels.items[level_idx];
|
||||
const map = level.itemsConst(item_kind);
|
||||
|
||||
var iter = map.iterator();
|
||||
while (iter.next()) |entry| {
|
||||
if (ident_store.identsHaveSameText(name, entry.key_ptr.*)) {
|
||||
return entry.value_ptr.*;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// todo
|
||||
/// Look up an identifier in the scope
|
||||
pub fn lookup(
|
||||
self: *Levels,
|
||||
self: *const Levels,
|
||||
ident_store: *const Ident.Store,
|
||||
comptime item_kind: Level.ItemKind,
|
||||
name: Level.ItemName(item_kind),
|
||||
) Contains(item_kind) {
|
||||
if (self.contains(item_kind, name)) |_| {
|
||||
return Contains(item_kind){ .InScope = name };
|
||||
name: Ident.Idx,
|
||||
) ?Pattern.Idx {
|
||||
if (self.contains(ident_store, item_kind, name)) |pattern| {
|
||||
return pattern;
|
||||
}
|
||||
|
||||
const problem = switch (item_kind) {
|
||||
.ident => blk: {
|
||||
var all_idents_in_scope = self.iter(.ident);
|
||||
const options = self.env.ident_ids_for_slicing.extendFromIter(self.env.gpa, &all_idents_in_scope);
|
||||
|
||||
break :blk Problem.Canonicalize.make(.{ .IdentNotInScope = .{
|
||||
.ident = name,
|
||||
.suggestions = options,
|
||||
} });
|
||||
},
|
||||
.alias => blk: {
|
||||
var all_aliases_in_scope = self.levels.iter(.alias);
|
||||
const options = self.env.ident_ids_for_slicing.extendFromIter(self.env.gpa, &all_aliases_in_scope);
|
||||
|
||||
break :blk Problem.Canonicalize.make(.{ .AliasNotInScope = .{
|
||||
.name = name,
|
||||
.suggestions = options,
|
||||
} });
|
||||
},
|
||||
};
|
||||
|
||||
_ = self.env.problems.append(self.env.gpa, problem);
|
||||
return Contains(item_kind).NotPresent;
|
||||
return null;
|
||||
}
|
||||
/// todo
|
||||
|
||||
/// Introduce a new identifier to the current scope level
|
||||
pub fn introduce(
|
||||
self: *Levels,
|
||||
gpa: std.mem.Allocator,
|
||||
ident_store: *const Ident.Store,
|
||||
comptime item_kind: Level.ItemKind,
|
||||
scope_item: Level.Item(item_kind),
|
||||
) Level.Item(item_kind) {
|
||||
if (self.contains(item_kind, scope_item.scope_name)) |item_in_scope| {
|
||||
const can_problem: Problem.Canonicalize = switch (item_kind) {
|
||||
.ident => .{ .IdentAlreadyInScope = .{
|
||||
.original_ident = item_in_scope.scope_name,
|
||||
.shadow = scope_item.scope_name,
|
||||
} },
|
||||
.alias => .{ .AliasAlreadyInScope = .{
|
||||
.original_name = item_in_scope.scope_name,
|
||||
.shadow = scope_item.scope_name,
|
||||
} },
|
||||
};
|
||||
ident_idx: Ident.Idx,
|
||||
pattern_idx: Pattern.Idx,
|
||||
) error{AlreadyInScope}!void {
|
||||
// Only check the current level for duplicates to allow shadowing in nested scopes
|
||||
const current_level = &self.levels.items[self.levels.items.len - 1];
|
||||
const map = current_level.itemsConst(item_kind);
|
||||
|
||||
_ = self.env.problems.append(self.env.gpa, Problem.Canonicalize.make(can_problem));
|
||||
// TODO: is this correct for shadows?
|
||||
return scope_item;
|
||||
var iter = map.iterator();
|
||||
while (iter.next()) |entry| {
|
||||
if (ident_store.identsHaveSameText(ident_idx, entry.key_ptr.*)) {
|
||||
return Error.AlreadyInScope;
|
||||
}
|
||||
}
|
||||
|
||||
self.levels.items[self.levels.items.len -| 1].append(self.env.gpa, item_kind, scope_item);
|
||||
|
||||
return scope_item;
|
||||
self.levels.items[self.levels.items.len - 1].put(gpa, item_kind, ident_idx, pattern_idx);
|
||||
}
|
||||
/// todo
|
||||
pub fn Iterator(comptime item_kind: Level.ItemKind) type {
|
||||
return struct {
|
||||
levels: *Levels,
|
||||
level_index: usize,
|
||||
prior_item_index: usize,
|
||||
/// todo
|
||||
pub fn empty(levels: *Levels) Iterator(item_kind) {
|
||||
return Iterator(item_kind){
|
||||
.levels = levels,
|
||||
.level_index = 0,
|
||||
.prior_item_index = 0,
|
||||
};
|
||||
|
||||
/// Get all identifiers in scope
|
||||
pub fn getAllIdentsInScope(self: *const Levels, gpa: std.mem.Allocator, comptime item_kind: Level.ItemKind) []Ident.Idx {
|
||||
var result = std.ArrayList(Ident.Idx).init(gpa);
|
||||
|
||||
for (self.levels.items) |level| {
|
||||
const map = level.itemsConst(item_kind);
|
||||
var iter = map.iterator();
|
||||
while (iter.next()) |entry| {
|
||||
result.append(entry.key_ptr.*) catch |err| exitOnOom(err);
|
||||
}
|
||||
/// todo
|
||||
pub fn new(scope_levels: *Levels) Iterator(item_kind) {
|
||||
if (scope_levels.levels.items.len == 0) {
|
||||
return empty(scope_levels);
|
||||
}
|
||||
}
|
||||
|
||||
const levels = scope_levels.levels.items;
|
||||
|
||||
var level_index = levels.len - 1;
|
||||
while (level_index > 0 and levels[level_index].items(item_kind).items.len == 0) {
|
||||
level_index -= 1;
|
||||
}
|
||||
|
||||
const prior_item_index = levels[level_index].items(item_kind).items.len;
|
||||
|
||||
return Iterator(item_kind){
|
||||
.levels = scope_levels,
|
||||
.level_index = level_index,
|
||||
.prior_item_index = prior_item_index,
|
||||
};
|
||||
}
|
||||
/// todo
|
||||
pub fn next(
|
||||
self: *Iterator(item_kind),
|
||||
) ?Level.ItemName(item_kind) {
|
||||
if (self.prior_item_index == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const levels = self.levels.levels.items;
|
||||
var level = levels[self.level_index];
|
||||
const next_item = level.items(item_kind).items[self.prior_item_index - 1];
|
||||
|
||||
self.prior_item_index -|= 1;
|
||||
|
||||
if (self.prior_item_index == 0) {
|
||||
self.level_index -|= 1;
|
||||
|
||||
while (self.level_index > 0 and levels[self.level_index].items(item_kind).items.len == 0) {
|
||||
self.level_index -|= 1;
|
||||
}
|
||||
}
|
||||
|
||||
return next_item.scope_name;
|
||||
}
|
||||
/// todo
|
||||
pub fn nextData(
|
||||
self: *Iterator(item_kind),
|
||||
) ?Level.Item(item_kind) {
|
||||
if (self.prior_item_index == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const levels = self.levels.levels.items;
|
||||
var level = levels[self.level_index];
|
||||
const next_item = level.items(item_kind).items[self.prior_item_index - 1];
|
||||
|
||||
self.prior_item_index -= 1;
|
||||
|
||||
if (self.prior_item_index == 0) {
|
||||
self.level_index -|= 1;
|
||||
|
||||
while (self.level_index > 0 and levels[self.level_index].items(item_kind).items.len == 0) {
|
||||
self.level_index -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
return next_item;
|
||||
}
|
||||
};
|
||||
return result.toOwnedSlice() catch |err| exitOnOom(err);
|
||||
}
|
||||
};
|
||||
|
||||
fn createTestScope(idents: [][]Level.IdentInScope, aliases: [][]Level.AliasInScope) Scope {
|
||||
test "empty scope has no items" {
|
||||
const gpa = std.testing.allocator;
|
||||
var env = base.ModuleEnv.init(gpa);
|
||||
defer env.deinit();
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = Scope{
|
||||
.env = &env,
|
||||
.focused_custom_alias = null,
|
||||
.custom_tags = std.AutoHashMap(Ident.Idx, Alias.Idx).init(gpa),
|
||||
.levels = Levels.init(&env, gpa),
|
||||
.gpa = gpa,
|
||||
};
|
||||
scope.deinit();
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
const max_level = @min(idents.len, aliases.len);
|
||||
for (0..max_level) |_| {
|
||||
scope.levels.enter();
|
||||
}
|
||||
const foo_ident = ident_store.insert(gpa, Ident.for_text("foo"), Region.zero());
|
||||
const result = scope.levels.lookup(&ident_store, .ident, foo_ident);
|
||||
|
||||
if (idents.len > 0) {
|
||||
for (idents, 0..) |ident_level, level_index| {
|
||||
var level = scope.levels.levels.items[level_index];
|
||||
for (ident_level) |ident_in_scope| {
|
||||
level.idents.append(ident_in_scope) catch |err| exitOnOom(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (aliases.len > 0) {
|
||||
for (aliases, 0..) |alias_level, level_index| {
|
||||
var level = scope.levels.levels.items[level_index];
|
||||
for (alias_level) |aliases_in_scope| {
|
||||
level.aliases.append(aliases_in_scope) catch |err| exitOnOom(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return scope;
|
||||
try std.testing.expectEqual(null, result);
|
||||
}
|
||||
|
||||
// test "empty scope has no items" {
|
||||
// var scope = createTestScope(&.{}, &.{});
|
||||
// defer scope.env.deinit();
|
||||
// defer scope.deinit();
|
||||
test "can add and lookup idents at top level" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
// var ident_iter = scope.levels.iter(.ident);
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
// try std.testing.expectEqual(null, ident_iter.next());
|
||||
// }
|
||||
const foo_ident = ident_store.insert(gpa, Ident.for_text("foo"), Region.zero());
|
||||
const bar_ident = ident_store.insert(gpa, Ident.for_text("bar"), Region.zero());
|
||||
const foo_pattern: Pattern.Idx = @enumFromInt(1);
|
||||
const bar_pattern: Pattern.Idx = @enumFromInt(2);
|
||||
|
||||
// Add identifiers
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, foo_ident, foo_pattern);
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, bar_ident, bar_pattern);
|
||||
|
||||
// Lookup should find them
|
||||
const foo_result = scope.levels.lookup(&ident_store, .ident, foo_ident);
|
||||
const bar_result = scope.levels.lookup(&ident_store, .ident, bar_ident);
|
||||
|
||||
try std.testing.expectEqual(foo_pattern, foo_result);
|
||||
try std.testing.expectEqual(bar_pattern, bar_result);
|
||||
}
|
||||
|
||||
test "nested scopes shadow outer scopes" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
const x_ident = ident_store.insert(gpa, Ident.for_text("x"), Region.zero());
|
||||
const outer_pattern: Pattern.Idx = @enumFromInt(1);
|
||||
const inner_pattern: Pattern.Idx = @enumFromInt(2);
|
||||
|
||||
// Add x to outer scope
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, x_ident, outer_pattern);
|
||||
|
||||
// Enter new scope
|
||||
scope.levels.enter(gpa);
|
||||
|
||||
// x from outer scope should still be visible
|
||||
const outer_result = scope.levels.lookup(&ident_store, .ident, x_ident);
|
||||
try std.testing.expectEqual(outer_pattern, outer_result);
|
||||
|
||||
// Add x to inner scope (shadows outer)
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, x_ident, inner_pattern);
|
||||
|
||||
// Now x should resolve to inner scope
|
||||
const inner_result = scope.levels.lookup(&ident_store, .ident, x_ident);
|
||||
try std.testing.expectEqual(inner_pattern, inner_result);
|
||||
|
||||
// Exit inner scope
|
||||
try scope.levels.exit(gpa);
|
||||
|
||||
// x should resolve to outer scope again
|
||||
const after_exit_result = scope.levels.lookup(&ident_store, .ident, x_ident);
|
||||
try std.testing.expectEqual(outer_pattern, after_exit_result);
|
||||
}
|
||||
|
||||
test "cannot introduce duplicate identifier in same scope" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
const x_ident = ident_store.insert(gpa, Ident.for_text("x"), Region.zero());
|
||||
const pattern1: Pattern.Idx = @enumFromInt(1);
|
||||
const pattern2: Pattern.Idx = @enumFromInt(2);
|
||||
|
||||
// First introduction should succeed
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, x_ident, pattern1);
|
||||
|
||||
// Second introduction should fail
|
||||
const result = scope.levels.introduce(gpa, &ident_store, .ident, x_ident, pattern2);
|
||||
try std.testing.expectError(Error.AlreadyInScope, result);
|
||||
}
|
||||
|
||||
test "aliases work separately from idents" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
const foo = ident_store.insert(gpa, Ident.for_text("Foo"), Region.zero());
|
||||
const ident_pattern: Pattern.Idx = @enumFromInt(1);
|
||||
const alias_pattern: Pattern.Idx = @enumFromInt(2);
|
||||
|
||||
// Add as both ident and alias (they're in separate namespaces)
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, foo, ident_pattern);
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .alias, foo, alias_pattern);
|
||||
|
||||
// Both should be found in their respective namespaces
|
||||
const ident_result = scope.levels.lookup(&ident_store, .ident, foo);
|
||||
const alias_result = scope.levels.lookup(&ident_store, .alias, foo);
|
||||
|
||||
try std.testing.expectEqual(ident_pattern, ident_result);
|
||||
try std.testing.expectEqual(alias_pattern, alias_result);
|
||||
}
|
||||
|
||||
test "cannot exit top scope level" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
// Should fail to exit the only level
|
||||
const result = scope.levels.exit(gpa);
|
||||
try std.testing.expectError(Error.ExitedTopScopeLevel, result);
|
||||
}
|
||||
|
||||
test "multiple nested scopes work correctly" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
const a = ident_store.insert(gpa, Ident.for_text("a"), Region.zero());
|
||||
const b = ident_store.insert(gpa, Ident.for_text("b"), Region.zero());
|
||||
const c = ident_store.insert(gpa, Ident.for_text("c"), Region.zero());
|
||||
|
||||
const pattern_a: Pattern.Idx = @enumFromInt(1);
|
||||
const pattern_b1: Pattern.Idx = @enumFromInt(2);
|
||||
const pattern_b2: Pattern.Idx = @enumFromInt(3);
|
||||
const pattern_c: Pattern.Idx = @enumFromInt(4);
|
||||
|
||||
// Level 1: add a
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, a, pattern_a);
|
||||
|
||||
// Enter level 2: add b
|
||||
scope.levels.enter(gpa);
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, b, pattern_b1);
|
||||
|
||||
// Enter level 3: shadow b, add c
|
||||
scope.levels.enter(gpa);
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, b, pattern_b2);
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, c, pattern_c);
|
||||
|
||||
// Check all are visible with correct values
|
||||
try std.testing.expectEqual(pattern_a, scope.levels.lookup(&ident_store, .ident, a));
|
||||
try std.testing.expectEqual(pattern_b2, scope.levels.lookup(&ident_store, .ident, b));
|
||||
try std.testing.expectEqual(pattern_c, scope.levels.lookup(&ident_store, .ident, c));
|
||||
|
||||
// Exit level 3
|
||||
try scope.levels.exit(gpa);
|
||||
|
||||
// c should be gone, b should be from level 2
|
||||
try std.testing.expectEqual(pattern_a, scope.levels.lookup(&ident_store, .ident, a));
|
||||
try std.testing.expectEqual(pattern_b1, scope.levels.lookup(&ident_store, .ident, b));
|
||||
try std.testing.expectEqual(null, scope.levels.lookup(&ident_store, .ident, c));
|
||||
|
||||
// Exit level 2
|
||||
try scope.levels.exit(gpa);
|
||||
|
||||
// Only a should remain
|
||||
try std.testing.expectEqual(pattern_a, scope.levels.lookup(&ident_store, .ident, a));
|
||||
try std.testing.expectEqual(null, scope.levels.lookup(&ident_store, .ident, b));
|
||||
try std.testing.expectEqual(null, scope.levels.lookup(&ident_store, .ident, c));
|
||||
}
|
||||
|
||||
test "getAllIdentsInScope returns all identifiers" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
const a = ident_store.insert(gpa, Ident.for_text("a"), Region.zero());
|
||||
const b = ident_store.insert(gpa, Ident.for_text("b"), Region.zero());
|
||||
const c = ident_store.insert(gpa, Ident.for_text("c"), Region.zero());
|
||||
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, a, @enumFromInt(1));
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, b, @enumFromInt(2));
|
||||
|
||||
// Get all idents in scope
|
||||
const all_idents_1 = scope.levels.getAllIdentsInScope(gpa, .ident);
|
||||
defer gpa.free(all_idents_1);
|
||||
|
||||
// Should only have 2 identifiers
|
||||
try std.testing.expectEqual(@as(usize, 2), all_idents_1.len);
|
||||
|
||||
scope.levels.enter(gpa);
|
||||
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, c, @enumFromInt(3));
|
||||
|
||||
// Get all idents in scope
|
||||
const all_idents_2 = scope.levels.getAllIdentsInScope(gpa, .ident);
|
||||
defer gpa.free(all_idents_2);
|
||||
|
||||
// Should have all 3 identifiers
|
||||
try std.testing.expectEqual(@as(usize, 3), all_idents_2.len);
|
||||
|
||||
// Also test for aliases (should be empty)
|
||||
const all_aliases = scope.levels.getAllIdentsInScope(gpa, .alias);
|
||||
defer gpa.free(all_aliases);
|
||||
|
||||
try std.testing.expectEqual(@as(usize, 0), all_aliases.len);
|
||||
}
|
||||
|
||||
test "identifiers with same text are treated as duplicates" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
// Create two different Ident.Idx with the same text
|
||||
const foo1 = ident_store.insert(gpa, Ident.for_text("foo"), Region.zero());
|
||||
const foo2 = ident_store.insert(gpa, Ident.for_text("foo"), Region.zero());
|
||||
|
||||
// They should have different indices
|
||||
try std.testing.expect(foo1 != foo2);
|
||||
|
||||
const pattern_1_idx: Pattern.Idx = @enumFromInt(1);
|
||||
const pattern_2_idx: Pattern.Idx = @enumFromInt(1);
|
||||
|
||||
// Add the first one
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, foo1, pattern_1_idx);
|
||||
|
||||
// Adding the second should fail because it has the same text
|
||||
const result = scope.levels.introduce(gpa, &ident_store, .ident, foo2, pattern_2_idx);
|
||||
try std.testing.expectError(Error.AlreadyInScope, result);
|
||||
|
||||
// But looking up either should find the first one
|
||||
const lookup1 = scope.levels.lookup(&ident_store, .ident, foo1);
|
||||
const lookup2 = scope.levels.lookup(&ident_store, .ident, foo2);
|
||||
|
||||
try std.testing.expectEqual(pattern_1_idx, lookup1);
|
||||
try std.testing.expectEqual(pattern_1_idx, lookup2);
|
||||
}
|
||||
|
||||
test "cannot introduce duplicate alias in same scope" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
const list_alias = ident_store.insert(gpa, Ident.for_text("List"), Region.zero());
|
||||
const pattern1: Pattern.Idx = @enumFromInt(1);
|
||||
const pattern2: Pattern.Idx = @enumFromInt(2);
|
||||
|
||||
// First introduction should succeed
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .alias, list_alias, pattern1);
|
||||
|
||||
// Second introduction should fail
|
||||
const result = scope.levels.introduce(gpa, &ident_store, .alias, list_alias, pattern2);
|
||||
try std.testing.expectError(Error.AlreadyInScope, result);
|
||||
}
|
||||
|
||||
test "shadowing works correctly for aliases" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
const my_type = ident_store.insert(gpa, Ident.for_text("MyType"), Region.zero());
|
||||
const outer_pattern: Pattern.Idx = @enumFromInt(1);
|
||||
const inner_pattern: Pattern.Idx = @enumFromInt(2);
|
||||
|
||||
// Add alias to outer scope
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .alias, my_type, outer_pattern);
|
||||
|
||||
// Enter new scope and shadow the alias
|
||||
scope.levels.enter(gpa);
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .alias, my_type, inner_pattern);
|
||||
|
||||
// Should resolve to inner scope
|
||||
const inner_result = scope.levels.lookup(&ident_store, .alias, my_type);
|
||||
try std.testing.expectEqual(inner_pattern, inner_result);
|
||||
|
||||
// Exit inner scope
|
||||
try scope.levels.exit(gpa);
|
||||
|
||||
// Should resolve to outer scope again
|
||||
const outer_result = scope.levels.lookup(&ident_store, .alias, my_type);
|
||||
try std.testing.expectEqual(outer_pattern, outer_result);
|
||||
}
|
||||
|
||||
test "deeply nested scopes maintain proper visibility" {
|
||||
const gpa = std.testing.allocator;
|
||||
var ident_store = Ident.Store.initCapacity(gpa, 100);
|
||||
defer ident_store.deinit(gpa);
|
||||
|
||||
var scope = init(gpa);
|
||||
defer scope.deinit(gpa);
|
||||
|
||||
const x = ident_store.insert(gpa, Ident.for_text("x"), Region.zero());
|
||||
const patterns = [_]Pattern.Idx{
|
||||
@enumFromInt(1),
|
||||
@enumFromInt(2),
|
||||
@enumFromInt(3),
|
||||
@enumFromInt(4),
|
||||
@enumFromInt(5),
|
||||
};
|
||||
|
||||
// Create 5 nested scopes, each shadowing x
|
||||
for (patterns) |pattern| {
|
||||
scope.levels.enter(gpa);
|
||||
_ = try scope.levels.introduce(gpa, &ident_store, .ident, x, pattern);
|
||||
|
||||
// Verify it resolves to the current pattern
|
||||
const result = scope.levels.lookup(&ident_store, .ident, x);
|
||||
try std.testing.expectEqual(pattern, result);
|
||||
}
|
||||
|
||||
// Exit all scopes and verify x resolves correctly at each level
|
||||
var i: usize = patterns.len;
|
||||
while (i > 1) : (i -= 1) {
|
||||
try scope.levels.exit(gpa);
|
||||
const expected = patterns[i - 2];
|
||||
const result = scope.levels.lookup(&ident_store, .ident, x);
|
||||
try std.testing.expectEqual(expected, result);
|
||||
}
|
||||
|
||||
// Exit the last scope - x should not be found
|
||||
try scope.levels.exit(gpa);
|
||||
const final_result = scope.levels.lookup(&ident_store, .ident, x);
|
||||
try std.testing.expectEqual(null, final_result);
|
||||
}
|
||||
|
|
|
@ -21,6 +21,9 @@ fn runParse(env: *base.ModuleEnv, source: []const u8, parserCall: *const fn (*Pa
|
|||
const trace = tracy.trace(@src());
|
||||
defer trace.end();
|
||||
|
||||
// Calculate and store line starts for diagnostic position calculation
|
||||
env.calcLineStarts(source) catch |err| exitOnOom(err);
|
||||
|
||||
var messages: [128]tokenize.Diagnostic = undefined;
|
||||
const msg_slice = messages[0..];
|
||||
var tokenizer = tokenize.Tokenizer.init(env, source, msg_slice);
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -14,7 +14,7 @@ const Node = @This();
|
|||
tag: Tag,
|
||||
data: Data,
|
||||
main_token: TokenIdx,
|
||||
region: AST.Region,
|
||||
region: AST.TokenizedRegion,
|
||||
|
||||
/// A SafeMultiList of Nodes
|
||||
pub const List = collections.SafeMultiList(Node);
|
||||
|
|
|
@ -10,7 +10,7 @@ const collections = @import("../../collections.zig");
|
|||
const AST = @import("AST.zig");
|
||||
const Node = @import("Node.zig");
|
||||
const Token = @import("tokenize.zig").Token;
|
||||
const Region = AST.Region;
|
||||
const Region = AST.TokenizedRegion;
|
||||
const Diagnostic = AST.Diagnostic;
|
||||
|
||||
const sexpr = base.sexpr;
|
||||
|
@ -176,7 +176,7 @@ pub fn addHeader(store: *NodeStore, header: AST.Header) AST.Header.Idx {
|
|||
.lhs = 0,
|
||||
.rhs = 0,
|
||||
},
|
||||
.region = AST.Region.empty(),
|
||||
.region = AST.TokenizedRegion.empty(),
|
||||
};
|
||||
switch (header) {
|
||||
.app => |app| {
|
||||
|
@ -241,7 +241,7 @@ pub fn addExposedItem(store: *NodeStore, item: AST.ExposedItem) AST.ExposedItem.
|
|||
.lhs = 0,
|
||||
.rhs = 0,
|
||||
},
|
||||
.region = AST.Region.empty(),
|
||||
.region = AST.TokenizedRegion.empty(),
|
||||
};
|
||||
|
||||
switch (item) {
|
||||
|
@ -285,7 +285,7 @@ pub fn addStatement(store: *NodeStore, statement: AST.Statement) AST.Statement.I
|
|||
.lhs = 0,
|
||||
.rhs = 0,
|
||||
},
|
||||
.region = AST.Region.empty(),
|
||||
.region = AST.TokenizedRegion.empty(),
|
||||
};
|
||||
switch (statement) {
|
||||
.decl => |d| {
|
||||
|
@ -392,7 +392,7 @@ pub fn addPattern(store: *NodeStore, pattern: AST.Pattern) AST.Pattern.Idx {
|
|||
.lhs = 0,
|
||||
.rhs = 0,
|
||||
},
|
||||
.region = AST.Region.empty(),
|
||||
.region = AST.TokenizedRegion.empty(),
|
||||
};
|
||||
switch (pattern) {
|
||||
.ident => |i| {
|
||||
|
@ -468,7 +468,7 @@ pub fn addExpr(store: *NodeStore, expr: AST.Expr) AST.Expr.Idx {
|
|||
.tag = .statement,
|
||||
.main_token = 0,
|
||||
.data = .{ .lhs = 0, .rhs = 0 },
|
||||
.region = AST.Region.empty(),
|
||||
.region = AST.TokenizedRegion.empty(),
|
||||
};
|
||||
switch (expr) {
|
||||
.int => |e| {
|
||||
|
@ -729,7 +729,7 @@ pub fn addWhereClause(store: *NodeStore, clause: AST.WhereClause) AST.WhereClaus
|
|||
.lhs = 0,
|
||||
.rhs = 0,
|
||||
},
|
||||
.region = AST.Region.empty(),
|
||||
.region = AST.TokenizedRegion.empty(),
|
||||
};
|
||||
|
||||
switch (clause) {
|
||||
|
@ -777,7 +777,7 @@ pub fn addTypeAnno(store: *NodeStore, anno: AST.TypeAnno) AST.TypeAnno.Idx {
|
|||
.lhs = 0,
|
||||
.rhs = 0,
|
||||
},
|
||||
.region = AST.Region.empty(),
|
||||
.region = AST.TokenizedRegion.empty(),
|
||||
};
|
||||
|
||||
switch (anno) {
|
||||
|
|
|
@ -132,7 +132,7 @@ pub fn peekN(self: *Parser, n: u32) Token.Tag {
|
|||
}
|
||||
|
||||
/// add a diagnostic error
|
||||
pub fn pushDiagnostic(self: *Parser, tag: AST.Diagnostic.Tag, region: AST.Region) void {
|
||||
pub fn pushDiagnostic(self: *Parser, tag: AST.Diagnostic.Tag, region: AST.TokenizedRegion) void {
|
||||
self.diagnostics.append(self.gpa, .{
|
||||
.tag = tag,
|
||||
.region = region,
|
||||
|
@ -144,7 +144,7 @@ pub fn pushMalformed(self: *Parser, comptime t: type, tag: AST.Diagnostic.Tag, s
|
|||
if (self.peek() != .EndOfFile) {
|
||||
self.advanceOne(); // TODO: find a better point to advance to
|
||||
}
|
||||
const region = AST.Region{ .start = start, .end = pos };
|
||||
const region = AST.TokenizedRegion{ .start = start, .end = pos };
|
||||
self.diagnostics.append(self.gpa, .{
|
||||
.tag = tag,
|
||||
.region = region,
|
||||
|
@ -162,7 +162,7 @@ pub fn parseFile(self: *Parser) void {
|
|||
_ = self.store.addFile(.{
|
||||
.header = @as(AST.Header.Idx, @enumFromInt(0)),
|
||||
.statements = AST.Statement.Span{ .span = base.DataSpan.empty() },
|
||||
.region = AST.Region.empty(),
|
||||
.region = AST.TokenizedRegion.empty(),
|
||||
});
|
||||
|
||||
while (self.peek() == .Newline) {
|
||||
|
@ -667,7 +667,7 @@ pub fn parseAppHeader(self: *Parser) AST.Header.Idx {
|
|||
return self.pushMalformed(AST.Header.Idx, .import_exposing_no_close, start);
|
||||
};
|
||||
const provides_span = self.store.exposedItemSpanFrom(scratch_top);
|
||||
const provides_region = AST.Region{ .start = provides_start, .end = provides_end };
|
||||
const provides_region = AST.TokenizedRegion{ .start = provides_start, .end = provides_end };
|
||||
const provides = self.store.addCollection(.collection_exposed, AST.Collection{
|
||||
.span = provides_span.span,
|
||||
.region = provides_region,
|
||||
|
|
|
@ -501,11 +501,11 @@ pub const Diagnostic = struct {
|
|||
};
|
||||
|
||||
pub fn toStr(self: Diagnostic, gpa: Allocator, source: []const u8, writer: anytype) !void {
|
||||
var newlines = try base.DiagnosticPosition.findLineStarts(gpa, source);
|
||||
var newlines = try base.RegionInfo.findLineStarts(gpa, source);
|
||||
defer newlines.deinit();
|
||||
|
||||
// Get position information
|
||||
const info = try base.DiagnosticPosition.position(source, newlines, self.begin, self.end);
|
||||
const info = try base.RegionInfo.position(source, newlines.items, self.begin, self.end);
|
||||
|
||||
// Strip trailing newline for display
|
||||
const display_text = if (info.line_text.len > 0 and
|
||||
|
@ -1560,7 +1560,7 @@ pub const Tokenizer = struct {
|
|||
fn testTokenization(gpa: std.mem.Allocator, input: []const u8, expected: []const Token.Tag) !void {
|
||||
var messages: [10]Diagnostic = undefined;
|
||||
|
||||
var env = base.ModuleEnv.init(gpa);
|
||||
var env = base.ModuleEnv.init(gpa, input);
|
||||
defer env.deinit();
|
||||
|
||||
var tokenizer = Tokenizer.init(&env, input, &messages);
|
||||
|
@ -1578,7 +1578,7 @@ fn testTokenization(gpa: std.mem.Allocator, input: []const u8, expected: []const
|
|||
|
||||
/// Assert the invariants of the tokenizer are held.
|
||||
pub fn checkTokenizerInvariants(gpa: std.mem.Allocator, input: []const u8, debug: bool) void {
|
||||
var env = base.ModuleEnv.init(gpa);
|
||||
var env = base.ModuleEnv.init(gpa, input);
|
||||
defer env.deinit();
|
||||
|
||||
// Initial tokenization.
|
||||
|
|
|
@ -480,7 +480,7 @@ fn parseDependenciesFromPackageRoot(
|
|||
return .{ .failed_to_read_root_file = err };
|
||||
defer gpa.free(contents);
|
||||
|
||||
var env = base.ModuleEnv.init(gpa);
|
||||
var env = base.ModuleEnv.init(gpa, contents);
|
||||
defer env.deinit();
|
||||
|
||||
var parse_ast = parse.parse(&env, contents);
|
||||
|
|
|
@ -107,14 +107,19 @@ fn loadOrCompileCanIr(
|
|||
const cache_lookup = cache.getCanIrForHashAndRocVersion(&hash_of_contents, current_roc_version, fs, gpa);
|
||||
|
||||
return if (cache_lookup) |ir| ir else blk: {
|
||||
var module_env = base.ModuleEnv.init(gpa);
|
||||
|
||||
// TODO we probably shouldn't be saving the contents of the file in the ModuleEnv
|
||||
// this is temporary so we can generate error reporting and diagnostics/region info.
|
||||
// We should probably be reading the file on demand or something else. Leaving this
|
||||
// comment here so we discuss the plan and make the necessary changes.
|
||||
var module_env = base.ModuleEnv.init(gpa, contents);
|
||||
|
||||
var parse_ir = parse.parse(&module_env, contents);
|
||||
parse_ir.store.emptyScratch();
|
||||
|
||||
var can_ir = Can.CIR.init(module_env);
|
||||
var scope = Scope.init(&can_ir.env, &.{}, &.{});
|
||||
defer scope.deinit();
|
||||
var scope = Scope.init(can_ir.env.gpa);
|
||||
defer scope.deinit(gpa);
|
||||
var can = Can.init(&can_ir, &parse_ir, &scope);
|
||||
can.canonicalize_file();
|
||||
|
||||
|
|
42
src/fmt.zig
42
src/fmt.zig
|
@ -145,7 +145,7 @@ pub fn formatFilePath(gpa: std.mem.Allocator, base_dir: std.fs.Dir, path: []cons
|
|||
};
|
||||
defer gpa.free(contents);
|
||||
|
||||
var module_env = base.ModuleEnv.init(gpa);
|
||||
var module_env = base.ModuleEnv.init(gpa, contents);
|
||||
defer module_env.deinit();
|
||||
|
||||
var parse_ast = parse.parse(&module_env, contents);
|
||||
|
@ -568,7 +568,7 @@ const Formatter = struct {
|
|||
fmt.curr_indent = curr_indent;
|
||||
}
|
||||
if (qualifier) |q| {
|
||||
const multiline = fmt.ast.regionIsMultiline(AST.Region{ .start = q, .end = ident });
|
||||
const multiline = fmt.ast.regionIsMultiline(AST.TokenizedRegion{ .start = q, .end = ident });
|
||||
try fmt.pushTokenText(q);
|
||||
if (multiline and try fmt.flushCommentsAfter(q)) {
|
||||
fmt.curr_indent += 1;
|
||||
|
@ -607,7 +607,7 @@ const Formatter = struct {
|
|||
}
|
||||
};
|
||||
|
||||
fn formatCollection(fmt: *Formatter, region: AST.Region, braces: Braces, comptime T: type, items: []T, formatter: fn (*Formatter, T) anyerror!AST.Region) !void {
|
||||
fn formatCollection(fmt: *Formatter, region: AST.TokenizedRegion, braces: Braces, comptime T: type, items: []T, formatter: fn (*Formatter, T) anyerror!AST.TokenizedRegion) !void {
|
||||
const multiline = fmt.ast.regionIsMultiline(region);
|
||||
const curr_indent = fmt.curr_indent;
|
||||
defer {
|
||||
|
@ -653,7 +653,7 @@ const Formatter = struct {
|
|||
try fmt.push(braces.end());
|
||||
}
|
||||
|
||||
fn formatRecordField(fmt: *Formatter, idx: AST.RecordField.Idx) !AST.Region {
|
||||
fn formatRecordField(fmt: *Formatter, idx: AST.RecordField.Idx) !AST.TokenizedRegion {
|
||||
const field = fmt.ast.store.getRecordField(idx);
|
||||
try fmt.pushTokenText(field.name);
|
||||
if (field.value) |v| {
|
||||
|
@ -669,11 +669,11 @@ const Formatter = struct {
|
|||
no_indent_on_access,
|
||||
};
|
||||
|
||||
fn formatExpr(fmt: *Formatter, ei: AST.Expr.Idx) anyerror!AST.Region {
|
||||
fn formatExpr(fmt: *Formatter, ei: AST.Expr.Idx) anyerror!AST.TokenizedRegion {
|
||||
return formatExprInner(fmt, ei, .normal);
|
||||
}
|
||||
|
||||
fn formatExprInner(fmt: *Formatter, ei: AST.Expr.Idx, format_behavior: ExprFormatBehavior) anyerror!AST.Region {
|
||||
fn formatExprInner(fmt: *Formatter, ei: AST.Expr.Idx, format_behavior: ExprFormatBehavior) anyerror!AST.TokenizedRegion {
|
||||
const expr = fmt.ast.store.getExpr(ei);
|
||||
const region = fmt.nodeRegion(@intFromEnum(ei));
|
||||
const multiline = fmt.ast.regionIsMultiline(region);
|
||||
|
@ -707,7 +707,7 @@ const Formatter = struct {
|
|||
// So we'll widen the region by one token for calculating multliline.
|
||||
// Ideally, we'd also check if the expr itself is multiline, and if we will end up flushing, but
|
||||
// we'll leave it as is for now
|
||||
const part_is_multiline = fmt.ast.regionIsMultiline(AST.Region{ .start = part_region.start - 1, .end = part_region.end + 1 });
|
||||
const part_is_multiline = fmt.ast.regionIsMultiline(AST.TokenizedRegion{ .start = part_region.start - 1, .end = part_region.end + 1 });
|
||||
if (part_is_multiline) {
|
||||
_ = try fmt.flushCommentsBefore(part_region.start);
|
||||
try fmt.ensureNewline();
|
||||
|
@ -774,7 +774,7 @@ const Formatter = struct {
|
|||
const args = fmt.ast.store.patternSlice(l.args);
|
||||
const body_region = fmt.nodeRegion(@intFromEnum(l.body));
|
||||
const args_region = fmt.regionInSlice(AST.Pattern.Idx, args);
|
||||
const args_are_multiline = fmt.ast.regionIsMultiline(AST.Region{ .start = l.region.start, .end = args_region.end });
|
||||
const args_are_multiline = fmt.ast.regionIsMultiline(AST.TokenizedRegion{ .start = l.region.start, .end = args_region.end });
|
||||
try fmt.push('|');
|
||||
if (args_are_multiline) {
|
||||
fmt.curr_indent += 1;
|
||||
|
@ -962,7 +962,7 @@ const Formatter = struct {
|
|||
return region;
|
||||
}
|
||||
|
||||
fn formatPatternRecordField(fmt: *Formatter, idx: AST.PatternRecordField.Idx) !AST.Region {
|
||||
fn formatPatternRecordField(fmt: *Formatter, idx: AST.PatternRecordField.Idx) !AST.TokenizedRegion {
|
||||
const field = fmt.ast.store.getPatternRecordField(idx);
|
||||
const multiline = fmt.ast.regionIsMultiline(field.region);
|
||||
const curr_indent = fmt.curr_indent;
|
||||
|
@ -999,9 +999,9 @@ const Formatter = struct {
|
|||
return field.region;
|
||||
}
|
||||
|
||||
fn formatPattern(fmt: *Formatter, pi: AST.Pattern.Idx) !AST.Region {
|
||||
fn formatPattern(fmt: *Formatter, pi: AST.Pattern.Idx) !AST.TokenizedRegion {
|
||||
const pattern = fmt.ast.store.getPattern(pi);
|
||||
var region = AST.Region{ .start = 0, .end = 0 };
|
||||
var region = AST.TokenizedRegion{ .start = 0, .end = 0 };
|
||||
switch (pattern) {
|
||||
.ident => |i| {
|
||||
region = i.region;
|
||||
|
@ -1104,9 +1104,9 @@ const Formatter = struct {
|
|||
return region;
|
||||
}
|
||||
|
||||
fn formatExposedItem(fmt: *Formatter, idx: AST.ExposedItem.Idx) !AST.Region {
|
||||
fn formatExposedItem(fmt: *Formatter, idx: AST.ExposedItem.Idx) !AST.TokenizedRegion {
|
||||
const item = fmt.ast.store.getExposedItem(idx);
|
||||
var region = AST.Region{ .start = 0, .end = 0 };
|
||||
var region = AST.TokenizedRegion{ .start = 0, .end = 0 };
|
||||
switch (item) {
|
||||
.lower_ident => |i| {
|
||||
region = i.region;
|
||||
|
@ -1431,7 +1431,7 @@ const Formatter = struct {
|
|||
}
|
||||
}
|
||||
|
||||
fn nodeRegion(fmt: *Formatter, idx: u32) AST.Region {
|
||||
fn nodeRegion(fmt: *Formatter, idx: u32) AST.TokenizedRegion {
|
||||
return fmt.ast.store.nodes.items.items(.region)[idx];
|
||||
}
|
||||
|
||||
|
@ -1469,7 +1469,7 @@ const Formatter = struct {
|
|||
}
|
||||
}
|
||||
|
||||
fn formatAnnoRecordField(fmt: *Formatter, idx: AST.AnnoRecordField.Idx) !AST.Region {
|
||||
fn formatAnnoRecordField(fmt: *Formatter, idx: AST.AnnoRecordField.Idx) !AST.TokenizedRegion {
|
||||
const curr_indent = fmt.curr_indent;
|
||||
defer {
|
||||
fmt.curr_indent = curr_indent;
|
||||
|
@ -1606,9 +1606,9 @@ const Formatter = struct {
|
|||
}
|
||||
}
|
||||
|
||||
fn formatTypeAnno(fmt: *Formatter, anno: AST.TypeAnno.Idx) !AST.Region {
|
||||
fn formatTypeAnno(fmt: *Formatter, anno: AST.TypeAnno.Idx) !AST.TokenizedRegion {
|
||||
const a = fmt.ast.store.getTypeAnno(anno);
|
||||
var region = AST.Region{ .start = 0, .end = 0 };
|
||||
var region = AST.TokenizedRegion{ .start = 0, .end = 0 };
|
||||
switch (a) {
|
||||
.apply => |app| {
|
||||
const slice = fmt.ast.store.typeAnnoSlice(app.args);
|
||||
|
@ -1818,9 +1818,9 @@ const Formatter = struct {
|
|||
try fmt.pushAll(text);
|
||||
}
|
||||
|
||||
fn regionInSlice(fmt: *Formatter, comptime T: anytype, slice: []T) AST.Region {
|
||||
fn regionInSlice(fmt: *Formatter, comptime T: anytype, slice: []T) AST.TokenizedRegion {
|
||||
if (slice.len == 0) {
|
||||
return AST.Region.empty();
|
||||
return AST.TokenizedRegion.empty();
|
||||
}
|
||||
const first: usize = @intFromEnum(slice[0]);
|
||||
const last: usize = @intFromEnum(slice[slice.len - 1]);
|
||||
|
@ -1829,7 +1829,7 @@ const Formatter = struct {
|
|||
return first_region.spanAcross(last_region);
|
||||
}
|
||||
|
||||
fn displayRegion(fmt: *Formatter, region: AST.Region) void {
|
||||
fn displayRegion(fmt: *Formatter, region: AST.TokenizedRegion) void {
|
||||
const tags = fmt.ast.tokens.tokens.items(.tag);
|
||||
return std.debug.print("[{s}@{d}...{s}@{d}]\n", .{ @tagName(tags[region.start]), region.start, @tagName(tags[region.end]), region.end });
|
||||
}
|
||||
|
@ -1941,7 +1941,7 @@ pub fn moduleFmtsStable(gpa: std.mem.Allocator, input: []const u8, debug: bool)
|
|||
}
|
||||
|
||||
fn parseAndFmt(gpa: std.mem.Allocator, input: []const u8, debug: bool) ![]const u8 {
|
||||
var module_env = base.ModuleEnv.init(gpa);
|
||||
var module_env = base.ModuleEnv.init(gpa, input);
|
||||
defer module_env.deinit();
|
||||
|
||||
var parse_ast = parse.parse(&module_env, input);
|
||||
|
|
201
src/problem.zig
201
src/problem.zig
|
@ -17,158 +17,12 @@ const Region = base.Region;
|
|||
pub const Problem = union(enum) {
|
||||
tokenize: @import("check/parse/tokenize.zig").Diagnostic,
|
||||
parser: @import("check/parse/AST.zig").Diagnostic,
|
||||
canonicalize: Canonicalize,
|
||||
canonicalize: @import("check/canonicalize/CIR.zig").Diagnostic,
|
||||
compiler: Compiler,
|
||||
|
||||
/// User errors preventing a module from being canonicalized correctly,
|
||||
/// e.g. a variable that was used but not defined.
|
||||
pub const Canonicalize = union(enum) {
|
||||
NotYetImplemented,
|
||||
NotYetImplementedExpr: struct {
|
||||
expr_type: []const u8,
|
||||
region: Region,
|
||||
},
|
||||
NotYetImplementedPattern: struct {
|
||||
pattern_type: []const u8,
|
||||
region: Region,
|
||||
},
|
||||
NotYetImplementedTypeDecl: struct {
|
||||
region: Region,
|
||||
},
|
||||
NotYetImplementedTypeAnno: struct {
|
||||
region: Region,
|
||||
},
|
||||
NotYetImplementedExpect: struct {
|
||||
region: Region,
|
||||
},
|
||||
NotYetImplementedImport: struct {
|
||||
region: Region,
|
||||
},
|
||||
DuplicateImport: struct {
|
||||
duplicate_import_region: Region,
|
||||
},
|
||||
DuplicateExposes: struct {
|
||||
first_exposes: Ident.Idx,
|
||||
duplicate_exposes: Ident.Idx,
|
||||
},
|
||||
AliasNotInScope: struct {
|
||||
name: Ident.Idx,
|
||||
suggestions: collections.SafeList(Ident.Idx).Range,
|
||||
},
|
||||
IdentNotInScope: struct {
|
||||
ident: Ident.Idx,
|
||||
suggestions: collections.SafeList(Ident.Idx).Range,
|
||||
},
|
||||
AliasAlreadyInScope: struct {
|
||||
original_name: Ident.Idx,
|
||||
shadow: Ident.Idx,
|
||||
},
|
||||
IdentAlreadyInScope: struct {
|
||||
original_ident: Ident.Idx,
|
||||
shadow: Ident.Idx,
|
||||
},
|
||||
InvalidTopLevelStatement: struct {
|
||||
ty: StatementType,
|
||||
region: Region,
|
||||
|
||||
const StatementType = enum(u8) { @"var", expr, @"for", crash, @"return" };
|
||||
},
|
||||
InvalidNumLiteral: struct {
|
||||
region: Region,
|
||||
literal: []const u8,
|
||||
},
|
||||
|
||||
/// Make a `Problem` based on a canonicalization problem.
|
||||
pub fn make(can_problem: @This()) Problem {
|
||||
return Problem{ .canonicalize = can_problem };
|
||||
}
|
||||
|
||||
pub fn toStr(self: @This(), gpa: Allocator, writer: anytype) !void {
|
||||
_ = gpa;
|
||||
// use a stack allocation for printing our tag errors
|
||||
var buf: [1000]u8 = undefined;
|
||||
|
||||
switch (self) {
|
||||
.NotYetImplemented => {
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Not yet implemented", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.NotYetImplementedExpr => |e| {
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Expression type '{s}' not yet implemented", .{e.expr_type});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.NotYetImplementedPattern => |e| {
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Pattern type '{s}' not yet implemented", .{e.pattern_type});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.NotYetImplementedTypeDecl => |e| {
|
||||
_ = e;
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Type declarations not yet implemented", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.NotYetImplementedTypeAnno => |e| {
|
||||
_ = e;
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Type annotations not yet implemented", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.NotYetImplementedExpect => |e| {
|
||||
_ = e;
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Expect statements not yet implemented", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.NotYetImplementedImport => |e| {
|
||||
_ = e;
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Import statements not yet fully implemented", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.DuplicateImport => |e| {
|
||||
_ = e; // TODO: Use this capture in a meaningful way (make sure to update Canonicalize tests)
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Duplicate Import", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.DuplicateExposes => |e| {
|
||||
_ = e; // TODO: Use this capture in a meaningful way (make sure to update Canonicalize tests)
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Duplicate Exposes", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.AliasNotInScope => |e| {
|
||||
_ = e; // TODO: Use this capture in a meaningful way (make sure to update Canonicalize tests)
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Alias not in scope", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.IdentNotInScope => |e| {
|
||||
_ = e; // TODO: Use this capture in a meaningful way (make sure to update Canonicalize tests)
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Ident not in scope", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.AliasAlreadyInScope => |e| {
|
||||
_ = e; // TODO: Use this capture in a meaningful way (make sure to update Canonicalize tests)
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Alias already in scope", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.IdentAlreadyInScope => |e| {
|
||||
_ = e; // TODO: Use this capture in a meaningful way (make sure to update Canonicalize tests)
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Ident already in scope", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.InvalidTopLevelStatement => |e| {
|
||||
_ = e; // TODO: Use this capture in a meaningful way (make sure to update Canonicalize tests)
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Invalid top level statement", .{});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.InvalidNumLiteral => |e| {
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CAN: Invalid number literal {s}", .{e.literal});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/// Internal compiler error due to a bug in the compiler implementation.
|
||||
pub const Compiler = union(enum) {
|
||||
canonicalize: enum {
|
||||
exited_top_scope_level,
|
||||
},
|
||||
canonicalize: Can,
|
||||
resolve_imports,
|
||||
type_check,
|
||||
specialize_types,
|
||||
|
@ -178,10 +32,24 @@ pub const Problem = union(enum) {
|
|||
lower_statements,
|
||||
reference_count,
|
||||
|
||||
pub const Can = enum {
|
||||
not_implemented,
|
||||
exited_top_scope_level,
|
||||
unable_to_resolve_identifier,
|
||||
failed_to_canonicalize_decl,
|
||||
unexpected_token_binop,
|
||||
concatenate_an_interpolated_string,
|
||||
};
|
||||
|
||||
/// Make a `Problem` based on a compiler error.
|
||||
pub fn make(compiler_error: @This()) Problem {
|
||||
pub fn make(compiler_error: Compiler) Problem {
|
||||
return Problem{ .compiler = compiler_error };
|
||||
}
|
||||
|
||||
/// Make a `Problem` based on a compiler error.
|
||||
pub fn can(tag: Can) Problem {
|
||||
return Problem{ .compiler = .{ .canonicalize = tag } };
|
||||
}
|
||||
};
|
||||
|
||||
/// A list of problems.
|
||||
|
@ -196,13 +64,42 @@ pub const Problem = union(enum) {
|
|||
var buf: [1000]u8 = undefined;
|
||||
|
||||
switch (self) {
|
||||
.tokenize => |a| try a.toStr(gpa, source, writer),
|
||||
.tokenize => |a| {
|
||||
try a.toStr(gpa, source, writer);
|
||||
},
|
||||
.parser => |a| {
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "PARSER: {s}", .{@tagName(a.tag)});
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.canonicalize => |err| {
|
||||
try err.toStr(gpa, writer);
|
||||
.canonicalize => |a| {
|
||||
const MAX_TO_PRINT = 20;
|
||||
const ELLIPSIS = "...";
|
||||
|
||||
const start = a.region.start.offset;
|
||||
const end = a.region.end.offset;
|
||||
const src_len = source.len;
|
||||
|
||||
var text: []const u8 = "";
|
||||
|
||||
if (start < end and start < src_len) {
|
||||
const safe_end = if (end <= src_len) end else src_len;
|
||||
const region_len = safe_end - start;
|
||||
const truncated = region_len > MAX_TO_PRINT;
|
||||
const slice_len = if (truncated) MAX_TO_PRINT - ELLIPSIS.len else region_len;
|
||||
text = source[start .. start + slice_len];
|
||||
|
||||
if (truncated) {
|
||||
var b: [MAX_TO_PRINT]u8 = undefined;
|
||||
std.mem.copyForwards(u8, b[0..slice_len], text);
|
||||
std.mem.copyForwards(u8, b[slice_len .. slice_len + ELLIPSIS.len], ELLIPSIS);
|
||||
text = b[0 .. slice_len + ELLIPSIS.len];
|
||||
}
|
||||
}
|
||||
|
||||
// format the error message
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "CANONICALIZE: {s} \"{s}\"", .{ @tagName(a.tag), text });
|
||||
|
||||
try writer.writeAll(err_msg);
|
||||
},
|
||||
.compiler => |err| {
|
||||
const err_msg = try std.fmt.bufPrint(&buf, "COMPILER: {?}", .{err});
|
||||
|
|
|
@ -2,7 +2,8 @@ const std = @import("std");
|
|||
const testing = std.testing;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const base = @import("base.zig");
|
||||
const Can = @import("check/canonicalize.zig");
|
||||
const canonicalize = @import("check/canonicalize.zig");
|
||||
const CIR = canonicalize.CIR;
|
||||
const Scope = @import("check/canonicalize/Scope.zig");
|
||||
const parse = @import("check/parse.zig");
|
||||
const fmt = @import("fmt.zig");
|
||||
|
@ -434,10 +435,7 @@ fn processSnapshotFile(gpa: Allocator, snapshot_path: []const u8, maybe_fuzz_cor
|
|||
}
|
||||
};
|
||||
|
||||
var line_starts = try base.DiagnosticPosition.findLineStarts(gpa, content.source);
|
||||
defer line_starts.deinit();
|
||||
|
||||
var module_env = base.ModuleEnv.init(gpa);
|
||||
var module_env = base.ModuleEnv.init(gpa, file_content);
|
||||
defer module_env.deinit();
|
||||
|
||||
// Parse the source code
|
||||
|
@ -454,15 +452,15 @@ fn processSnapshotFile(gpa: Allocator, snapshot_path: []const u8, maybe_fuzz_cor
|
|||
|
||||
// Canonicalize the source code
|
||||
// Can.IR.init takes ownership of the module_env and type_store
|
||||
var can_ir = Can.CIR.init(module_env);
|
||||
var can_ir = CIR.init(module_env);
|
||||
defer can_ir.deinit();
|
||||
|
||||
var scope = Scope.init(&can_ir.env, &.{}, &.{});
|
||||
defer scope.deinit();
|
||||
var scope = Scope.init(can_ir.env.gpa);
|
||||
defer scope.deinit(can_ir.env.gpa);
|
||||
|
||||
var can = Can.init(&can_ir, &parse_ast, &scope);
|
||||
var can = canonicalize.init(&can_ir, &parse_ast, &scope);
|
||||
|
||||
var maybe_expr_idx: ?Can.CIR.Expr.Idx = null;
|
||||
var maybe_expr_idx: ?CIR.Expr.Idx = null;
|
||||
|
||||
switch (content.meta.node_type) {
|
||||
.file => can.canonicalize_file(),
|
||||
|
@ -473,6 +471,12 @@ fn processSnapshotFile(gpa: Allocator, snapshot_path: []const u8, maybe_fuzz_cor
|
|||
// For expr snapshots, just canonicalize the root expression directly
|
||||
const expr_idx: AST.Expr.Idx = @enumFromInt(parse_ast.root_node_idx);
|
||||
maybe_expr_idx = can.canonicalize_expr(expr_idx);
|
||||
|
||||
// Manually copy errors across to ModuleEnv problems
|
||||
// as `canonicalize_expr` doesn't do this for us.
|
||||
for (can_ir.diagnostics.items) |msg| {
|
||||
_ = module_env.problems.append(gpa, .{ .canonicalize = msg });
|
||||
}
|
||||
},
|
||||
.statement => {
|
||||
// TODO: implement canonicalize_statement when available
|
||||
|
@ -505,10 +509,10 @@ fn processSnapshotFile(gpa: Allocator, snapshot_path: []const u8, maybe_fuzz_cor
|
|||
{
|
||||
try writer.writeAll(Section.PROBLEMS);
|
||||
try writer.writeAll("\n");
|
||||
if (module_env.problems.len() > 0) {
|
||||
var iter = module_env.problems.iterIndices();
|
||||
if (can_ir.env.problems.len() > 0) {
|
||||
var iter = can_ir.env.problems.iterIndices();
|
||||
while (iter.next()) |problem_idx| {
|
||||
const problem = module_env.problems.get(problem_idx);
|
||||
const problem = can_ir.env.problems.get(problem_idx);
|
||||
try problem.toStr(gpa, content.source, writer);
|
||||
try writer.writeAll("\n");
|
||||
}
|
||||
|
@ -525,7 +529,7 @@ fn processSnapshotFile(gpa: Allocator, snapshot_path: []const u8, maybe_fuzz_cor
|
|||
const tokens = tokenizedBuffer.tokens.items(.tag);
|
||||
for (tokens, 0..) |tok, i| {
|
||||
const region = tokenizedBuffer.resolve(@intCast(i));
|
||||
const info = try base.DiagnosticPosition.position(content.source, line_starts, region.start.offset, region.end.offset);
|
||||
const info = try module_env.calcRegionInfo(content.source, region.start.offset, region.end.offset);
|
||||
const region_str = try std.fmt.allocPrint(gpa, "{s}({d}:{d}-{d}:{d}),", .{
|
||||
@tagName(tok),
|
||||
// add one to display numbers instead of index
|
||||
|
@ -554,16 +558,25 @@ fn processSnapshotFile(gpa: Allocator, snapshot_path: []const u8, maybe_fuzz_cor
|
|||
try parse_ast.toSExprStr(&module_env, parse_buffer.writer().any());
|
||||
},
|
||||
.header => {
|
||||
const node = parse_ast.store.getHeader(@enumFromInt(parse_ast.root_node_idx));
|
||||
try parse_ast.nodeToSExprStr(node, &module_env, parse_buffer.writer().any());
|
||||
const header = parse_ast.store.getHeader(@enumFromInt(parse_ast.root_node_idx));
|
||||
var node = header.toSExpr(&module_env, &parse_ast);
|
||||
defer node.deinit(gpa);
|
||||
|
||||
node.toStringPretty(parse_buffer.writer().any());
|
||||
},
|
||||
.expr => {
|
||||
const node = parse_ast.store.getExpr(@enumFromInt(parse_ast.root_node_idx));
|
||||
try parse_ast.nodeToSExprStr(node, &module_env, parse_buffer.writer().any());
|
||||
const expr = parse_ast.store.getExpr(@enumFromInt(parse_ast.root_node_idx));
|
||||
var node = expr.toSExpr(&module_env, &parse_ast);
|
||||
defer node.deinit(gpa);
|
||||
|
||||
node.toStringPretty(parse_buffer.writer().any());
|
||||
},
|
||||
.statement => {
|
||||
const node = parse_ast.store.getStatement(@enumFromInt(parse_ast.root_node_idx));
|
||||
try parse_ast.nodeToSExprStr(node, &module_env, parse_buffer.writer().any());
|
||||
const stmt = parse_ast.store.getStatement(@enumFromInt(parse_ast.root_node_idx));
|
||||
var node = stmt.toSExpr(&module_env, &parse_ast);
|
||||
defer node.deinit(gpa);
|
||||
|
||||
node.toStringPretty(parse_buffer.writer().any());
|
||||
},
|
||||
}
|
||||
try writer.writeAll(Section.PARSE);
|
||||
|
@ -608,7 +621,7 @@ fn processSnapshotFile(gpa: Allocator, snapshot_path: []const u8, maybe_fuzz_cor
|
|||
var canonicalized = std.ArrayList(u8).init(gpa);
|
||||
defer canonicalized.deinit();
|
||||
|
||||
try can_ir.toSExprStr(canonicalized.writer().any(), maybe_expr_idx, line_starts, content.source);
|
||||
try can_ir.toSExprStr(canonicalized.writer().any(), maybe_expr_idx);
|
||||
|
||||
try writer.writeAll(Section.CANONICALIZE);
|
||||
try writer.writeAll("\n");
|
||||
|
|
|
@ -42,8 +42,10 @@ foo =
|
|||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
(Let)
|
||||
(pattern (1:11-1:11) (pattern_ident (ident "foo")))
|
||||
(expr (1:1-1:1) (str "one"))
|
||||
"let"
|
||||
(pattern (6:1-6:4)
|
||||
(assign (6:1-6:4) (ident "foo")))
|
||||
(expr (8:5-8:10)
|
||||
(string (8:5-8:10) (literal (8:6-8:9) "one")))
|
||||
"#0")))
|
||||
~~~END
|
|
@ -6,7 +6,7 @@ module [add2]
|
|||
|
||||
add2 = x + 2
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
CANONICALIZE: ident_not_in_scope "x"
|
||||
~~~TOKENS
|
||||
KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:13),CloseSquare(1:13-1:14),Newline(1:1-1:1),
|
||||
Newline(1:1-1:1),
|
||||
|
@ -30,17 +30,18 @@ add2 = x + 2
|
|||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
(Let)
|
||||
(pattern (1:7-1:7) (pattern_ident (ident "add2")))
|
||||
(expr (1:1-1:1)
|
||||
(call
|
||||
(lookup (ident "add"))
|
||||
(lookup (ident "x"))
|
||||
(int
|
||||
"let"
|
||||
(pattern (3:1-3:5)
|
||||
(assign (3:1-3:5) (ident "add2")))
|
||||
(expr (3:8-3:18)
|
||||
(binop (3:8-3:18)
|
||||
"add"
|
||||
(runtime_error (3:8-3:9) "ident_not_in_scope")
|
||||
(int (3:17-3:18)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "2")
|
||||
(value "TODO")
|
||||
(bound "i128"))))
|
||||
(bound "int"))))
|
||||
"#2")))
|
||||
~~~END
|
|
@ -4,7 +4,15 @@ type=expr
|
|||
~~~SOURCE
|
||||
Err(foo)??12>5*5 or 13+2<5 and 10-1>=16 or 12<=3/5
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
CANONICALIZE: ident_not_in_scope "foo"
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo)??12>5*5 or"
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo)??12>5*5 ..."
|
||||
CANONICALIZE: expr_not_canonicalized "13+2<5 and 10-1>=..."
|
||||
CANONICALIZE: expr_not_canonicalized "13+2<5 and 10-1>=..."
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo)??12>5*5 ..."
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo)??12>5*5 ..."
|
||||
CANONICALIZE: expr_not_canonicalized "12<=3/5"
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo)??12>5*5 ..."
|
||||
~~~TOKENS
|
||||
UpperIdent(1:1-1:4),NoSpaceOpenRound(1:4-1:5),LowerIdent(1:5-1:8),CloseRound(1:8-1:9),OpDoubleQuestion(1:9-1:11),Int(1:11-1:13),OpGreaterThan(1:13-1:14),Int(1:14-1:15),OpStar(1:15-1:16),Int(1:16-1:17),OpOr(1:18-1:20),Int(1:21-1:23),OpPlus(1:23-1:24),Int(1:24-1:25),OpLessThan(1:25-1:26),Int(1:26-1:27),OpAnd(1:28-1:31),Int(1:32-1:34),OpBinaryMinus(1:34-1:35),Int(1:35-1:36),OpGreaterThanOrEq(1:36-1:38),Int(1:38-1:40),OpOr(1:41-1:43),Int(1:44-1:46),OpLessThanOrEq(1:46-1:48),Int(1:48-1:49),OpSlash(1:49-1:50),Int(1:50-1:51),EndOfFile(1:51-1:51),
|
||||
~~~PARSE
|
||||
|
|
|
@ -4,7 +4,15 @@ type=expr
|
|||
~~~SOURCE
|
||||
Err(foo) ?? 12 > 5 * 5 or 13 + 2 < 5 and 10 - 1 >= 16 or 12 <= 3 / 5
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
CANONICALIZE: ident_not_in_scope "foo"
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo) ?? 12 > ..."
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo) ?? 12 > ..."
|
||||
CANONICALIZE: expr_not_canonicalized "13 + 2 < 5 and 10..."
|
||||
CANONICALIZE: expr_not_canonicalized "13 + 2 < 5 and 10..."
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo) ?? 12 > ..."
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo) ?? 12 > ..."
|
||||
CANONICALIZE: expr_not_canonicalized "12 <= 3 / 5"
|
||||
CANONICALIZE: expr_not_canonicalized "Err(foo) ?? 12 > ..."
|
||||
~~~TOKENS
|
||||
UpperIdent(1:1-1:4),NoSpaceOpenRound(1:4-1:5),LowerIdent(1:5-1:8),CloseRound(1:8-1:9),OpDoubleQuestion(1:10-1:12),Int(1:13-1:15),OpGreaterThan(1:16-1:17),Int(1:18-1:19),OpStar(1:20-1:21),Int(1:22-1:23),OpOr(1:24-1:26),Int(1:27-1:29),OpPlus(1:30-1:31),Int(1:32-1:33),OpLessThan(1:34-1:35),Int(1:36-1:37),OpAnd(1:38-1:41),Int(1:42-1:44),OpBinaryMinus(1:45-1:46),Int(1:47-1:48),OpGreaterThanOrEq(1:49-1:51),Int(1:52-1:54),OpOr(1:55-1:57),Int(1:58-1:60),OpLessThanOrEq(1:61-1:63),Int(1:64-1:65),OpSlash(1:66-1:67),Int(1:68-1:69),EndOfFile(1:69-1:69),
|
||||
~~~PARSE
|
||||
|
|
69
src/snapshots/can_two_decls.txt
Normal file
69
src/snapshots/can_two_decls.txt
Normal file
|
@ -0,0 +1,69 @@
|
|||
~~~META
|
||||
description=Two decls
|
||||
type=file
|
||||
~~~SOURCE
|
||||
app [main!] { pf: platform "../basic-cli/platform.roc" }
|
||||
|
||||
a = 5
|
||||
b = a + 1
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
~~~TOKENS
|
||||
KwApp(1:1-1:4),OpenSquare(1:5-1:6),LowerIdent(1:6-1:11),CloseSquare(1:11-1:12),OpenCurly(1:13-1:14),LowerIdent(1:15-1:17),OpColon(1:17-1:18),KwPlatform(1:19-1:27),StringStart(1:28-1:29),StringPart(1:29-1:54),StringEnd(1:54-1:55),CloseCurly(1:56-1:57),Newline(1:1-1:1),
|
||||
Newline(1:1-1:1),
|
||||
LowerIdent(3:1-3:2),OpAssign(3:3-3:4),Int(3:5-3:6),Newline(1:1-1:1),
|
||||
LowerIdent(4:1-4:2),OpAssign(4:3-4:4),LowerIdent(4:5-4:6),OpPlus(4:7-4:8),Int(4:9-4:10),EndOfFile(4:10-4:10),
|
||||
~~~PARSE
|
||||
(file (1:1-4:10)
|
||||
(app (1:1-1:57)
|
||||
(provides (1:6-1:12) (exposed_item (lower_ident "main!")))
|
||||
(record_field (1:15-1:57)
|
||||
"pf"
|
||||
(string (1:28-1:55) (string_part (1:29-1:54) "../basic-cli/platform.roc")))
|
||||
(packages (1:13-1:57)
|
||||
(record_field (1:15-1:57)
|
||||
"pf"
|
||||
(string (1:28-1:55) (string_part (1:29-1:54) "../basic-cli/platform.roc")))))
|
||||
(statements
|
||||
(decl (3:1-3:6)
|
||||
(ident (3:1-3:2) "a")
|
||||
(int (3:5-3:6) "5"))
|
||||
(decl (4:1-4:10)
|
||||
(ident (4:1-4:2) "b")
|
||||
(binop (4:5-4:10)
|
||||
"+"
|
||||
(ident (4:5-4:6) "" "a")
|
||||
(int (4:9-4:10) "1")))))
|
||||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
"let"
|
||||
(pattern (3:1-3:2)
|
||||
(assign (3:1-3:2) (ident "a")))
|
||||
(expr (3:5-3:6)
|
||||
(int (3:5-3:6)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "5")
|
||||
(value "TODO")
|
||||
(bound "int")))
|
||||
"#2")
|
||||
(def
|
||||
"let"
|
||||
(pattern (4:1-4:2)
|
||||
(assign (4:1-4:2) (ident "b")))
|
||||
(expr (4:5-4:10)
|
||||
(binop (4:5-4:10)
|
||||
"add"
|
||||
(lookup (4:5-4:6) (pattern_idx "2"))
|
||||
(int (4:9-4:10)
|
||||
(num_var "#3")
|
||||
(precision_var "#4")
|
||||
(literal "1")
|
||||
(value "TODO")
|
||||
(bound "int"))))
|
||||
"#5")))
|
||||
~~~END
|
|
@ -15,13 +15,13 @@ LowerIdent(1:1-1:4),NoSpaceOpenRound(1:4-1:5),Int(1:5-1:7),Comma(1:7-1:8),String
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(call
|
||||
(lookup (ident "foo"))
|
||||
(int
|
||||
(call (1:1-1:17)
|
||||
(runtime_error (1:1-1:4) "ident_not_in_scope")
|
||||
(int (1:5-1:7)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "42")
|
||||
(value "TODO")
|
||||
(bound "i128"))
|
||||
(str "hello"))
|
||||
(bound "int"))
|
||||
(string (1:9-1:16) (literal (1:10-1:15) "hello")))
|
||||
~~~END
|
|
@ -15,18 +15,18 @@ Int(1:1-1:2),OpPlus(1:3-1:4),Int(1:5-1:6),EndOfFile(1:6-1:6),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(call
|
||||
(lookup (ident "add"))
|
||||
(int
|
||||
(binop (1:1-1:6)
|
||||
"add"
|
||||
(int (1:1-1:2)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "1")
|
||||
(value "TODO")
|
||||
(bound "i128"))
|
||||
(int
|
||||
(bound "int"))
|
||||
(int (1:5-1:6)
|
||||
(num_var "#2")
|
||||
(precision_var "#3")
|
||||
(literal "2")
|
||||
(value "TODO")
|
||||
(bound "i128")))
|
||||
(bound "int")))
|
||||
~~~END
|
|
@ -8,7 +8,7 @@ type=expr
|
|||
y * 2
|
||||
}
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
OpenCurly(1:1-1:2),Newline(1:1-1:1),
|
||||
LowerIdent(2:5-2:6),OpAssign(2:7-2:8),Int(2:9-2:11),Newline(1:1-1:1),
|
||||
|
|
|
@ -4,7 +4,7 @@ type=expr
|
|||
~~~SOURCE
|
||||
dbg x
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwDbg(1:1-1:4),LowerIdent(1:5-1:6),EndOfFile(1:6-1:6),
|
||||
~~~PARSE
|
||||
|
|
|
@ -4,7 +4,7 @@ type=expr
|
|||
~~~SOURCE
|
||||
person.name
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
LowerIdent(1:1-1:7),NoSpaceDotLowerIdent(1:7-1:12),EndOfFile(1:12-1:12),
|
||||
~~~PARSE
|
||||
|
|
|
@ -5,6 +5,7 @@ type=expr
|
|||
3.14.15
|
||||
~~~PROBLEMS
|
||||
PARSER: expr_no_space_dot_int
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
Float(1:1-1:5),NoSpaceDotInt(1:5-1:8),EndOfFile(1:8-1:8),
|
||||
~~~PARSE
|
||||
|
|
|
@ -12,10 +12,10 @@ Float(1:1-1:5),EndOfFile(1:5-1:5),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(float
|
||||
(float (1:1-1:5)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "-2.5")
|
||||
(value "0")
|
||||
(bound "dec"))
|
||||
(bound "int"))
|
||||
~~~END
|
|
@ -12,10 +12,10 @@ Float(1:1-1:8),EndOfFile(1:8-1:8),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(float
|
||||
(float (1:1-1:8)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "1.23e-4")
|
||||
(value "0")
|
||||
(bound "dec"))
|
||||
(bound "int"))
|
||||
~~~END
|
|
@ -12,10 +12,10 @@ Float(1:1-1:5),EndOfFile(1:5-1:5),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(float
|
||||
(float (1:1-1:5)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "3.14")
|
||||
(value "0")
|
||||
(bound "dec"))
|
||||
(bound "int"))
|
||||
~~~END
|
|
@ -12,5 +12,5 @@ Int(1:1-1:5),EndOfFile(1:5-1:5),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(runtime_error "CAN: Invalid number literal 0xFF")
|
||||
(runtime_error (1:1-1:5) "invalid_num_literal")
|
||||
~~~END
|
|
@ -12,10 +12,10 @@ Int(1:1-1:31),EndOfFile(1:31-1:31),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(int
|
||||
(int (1:1-1:31)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "999999999999999999999999999999")
|
||||
(value "TODO")
|
||||
(bound "i128"))
|
||||
(bound "int"))
|
||||
~~~END
|
|
@ -12,10 +12,10 @@ Int(1:1-1:10),EndOfFile(1:10-1:10),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(int
|
||||
(int (1:1-1:10)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "1_000_000")
|
||||
(value "TODO")
|
||||
(bound "i128"))
|
||||
(bound "int"))
|
||||
~~~END
|
|
@ -17,5 +17,5 @@ OpBar(1:1-1:2),LowerIdent(1:2-1:3),OpBar(1:3-1:4),LowerIdent(1:5-1:6),OpPlus(1:7
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(can_ir (top_level_defs "empty"))
|
||||
(runtime_error (1:1-1:10) "can_lambda_not_implemented")
|
||||
~~~END
|
|
@ -12,5 +12,7 @@ OpenSquare(1:1-1:2),CloseSquare(1:2-1:3),EndOfFile(1:3-1:3),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(list (elem_var "#0") (elems "TODO each element"))
|
||||
(list (1:1-1:3)
|
||||
(elem_var "#0")
|
||||
(elems "TODO each element"))
|
||||
~~~END
|
|
@ -15,5 +15,7 @@ OpenSquare(1:1-1:2),Int(1:2-1:3),Comma(1:3-1:4),Int(1:5-1:6),Comma(1:6-1:7),Int(
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(list (elem_var "#0") (elems "TODO each element"))
|
||||
(list (1:1-1:10)
|
||||
(elem_var "#0")
|
||||
(elems "TODO each element"))
|
||||
~~~END
|
|
@ -19,5 +19,7 @@ OpenSquare(1:1-1:2),OpenSquare(1:2-1:3),Int(1:3-1:4),Comma(1:4-1:5),Int(1:6-1:7)
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(list (elem_var "#0") (elems "TODO each element"))
|
||||
(list (1:1-1:22)
|
||||
(elem_var "#0")
|
||||
(elems "TODO each element"))
|
||||
~~~END
|
|
@ -6,6 +6,7 @@ type=expr
|
|||
~~~PROBLEMS
|
||||
PARSER: expr_unexpected_token
|
||||
PARSER: ty_anno_unexpected_token
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
OpenCurly(1:1-1:2),LowerIdent(1:3-1:9),OpAmpersand(1:10-1:11),LowerIdent(1:12-1:15),OpColon(1:15-1:16),Int(1:17-1:19),CloseCurly(1:20-1:21),EndOfFile(1:21-1:21),
|
||||
~~~PARSE
|
||||
|
|
|
@ -4,7 +4,7 @@ type=expr
|
|||
~~~SOURCE
|
||||
{ name: "Alice", age: 30 }
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
OpenCurly(1:1-1:2),LowerIdent(1:3-1:7),OpColon(1:7-1:8),StringStart(1:9-1:10),StringPart(1:10-1:15),StringEnd(1:15-1:16),Comma(1:16-1:17),LowerIdent(1:18-1:21),OpColon(1:21-1:22),Int(1:23-1:25),CloseCurly(1:26-1:27),EndOfFile(1:27-1:27),
|
||||
~~~PARSE
|
||||
|
|
|
@ -12,5 +12,5 @@ StringStart(1:1-1:2),StringPart(1:2-1:2),StringEnd(1:2-1:3),EndOfFile(1:3-1:3),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(str "")
|
||||
(string (1:1-1:3) (literal (1:2-1:2) ""))
|
||||
~~~END
|
|
@ -15,11 +15,8 @@ StringStart(1:1-1:2),StringPart(1:2-1:8),OpenStringInterpolation(1:8-1:10),Lower
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(call
|
||||
(lookup (ident "Str.concat"))
|
||||
(call
|
||||
(lookup (ident "Str.concat"))
|
||||
(str "Hello ")
|
||||
(lookup (ident "name")))
|
||||
(str "!"))
|
||||
(string (1:1-1:17)
|
||||
(literal (1:2-1:8) "Hello ")
|
||||
(runtime_error (1:10-1:14) "ident_not_in_scope")
|
||||
(literal (1:15-1:16) "!"))
|
||||
~~~END
|
|
@ -12,5 +12,5 @@ StringStart(1:1-1:2),StringPart(1:2-1:13),StringEnd(1:13-1:14),EndOfFile(1:14-1:
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(str "hello world")
|
||||
(string (1:1-1:14) (literal (1:2-1:13) "hello world"))
|
||||
~~~END
|
|
@ -12,7 +12,7 @@ UpperIdent(1:1-1:3),EndOfFile(1:3-1:3),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(tag
|
||||
(tag (1:1-1:3)
|
||||
(tag_union_var "#0")
|
||||
(ext_var "#0")
|
||||
(name "Ok")
|
||||
|
|
|
@ -14,16 +14,16 @@ UpperIdent(1:1-1:5),NoSpaceOpenRound(1:5-1:6),Int(1:6-1:8),CloseRound(1:8-1:9),E
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(call
|
||||
(tag
|
||||
(call (1:1-1:9)
|
||||
(tag (1:1-1:5)
|
||||
(tag_union_var "#0")
|
||||
(ext_var "#0")
|
||||
(name "Some")
|
||||
(args "TODO"))
|
||||
(int
|
||||
(int (1:6-1:8)
|
||||
(num_var "#2")
|
||||
(precision_var "#3")
|
||||
(literal "42")
|
||||
(value "TODO")
|
||||
(bound "i128")))
|
||||
(bound "int")))
|
||||
~~~END
|
|
@ -4,7 +4,7 @@ type=expr
|
|||
~~~SOURCE
|
||||
(1, "hello", True)
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
OpenRound(1:1-1:2),Int(1:2-1:3),Comma(1:3-1:4),StringStart(1:5-1:6),StringPart(1:6-1:11),StringEnd(1:11-1:12),Comma(1:12-1:13),UpperIdent(1:14-1:18),CloseRound(1:18-1:19),EndOfFile(1:19-1:19),
|
||||
~~~PARSE
|
||||
|
|
|
@ -5,6 +5,7 @@ type=expr
|
|||
!isValid
|
||||
~~~PROBLEMS
|
||||
PARSER: expr_unexpected_token
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
OpBang(1:1-1:2),LowerIdent(1:2-1:9),EndOfFile(1:9-1:9),
|
||||
~~~PARSE
|
||||
|
|
|
@ -16,5 +16,5 @@ UpperIdent(3:5-3:8),NoSpaceOpenRound(3:8-3:9),LowerIdent(3:9-3:12),CloseRound(3:
|
|||
~~~FORMATTED
|
||||
when
|
||||
~~~CANONICALIZE
|
||||
(lookup (ident "when"))
|
||||
(runtime_error (1:1-1:5) "ident_not_in_scope")
|
||||
~~~END
|
|
@ -12,5 +12,5 @@ LowerIdent(1:1-1:4),EndOfFile(1:4-1:4),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(lookup (ident "foo"))
|
||||
(runtime_error (1:1-1:4) "ident_not_in_scope")
|
||||
~~~END
|
|
@ -7,6 +7,8 @@ module []
|
|||
foo = if tru then 0
|
||||
~~~PROBLEMS
|
||||
PARSER: no_else
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
CANONICALIZE: expr_not_canonicalized "foo = if tru then 0"
|
||||
~~~TOKENS
|
||||
KwModule(1:1-1:7),OpenSquare(1:8-1:9),CloseSquare(1:9-1:10),Newline(1:1-1:1),
|
||||
Newline(1:1-1:1),
|
||||
|
@ -23,5 +25,12 @@ module []
|
|||
|
||||
foo =
|
||||
~~~CANONICALIZE
|
||||
(can_ir (top_level_defs "empty"))
|
||||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
"let"
|
||||
(pattern (3:1-3:4)
|
||||
(assign (3:1-3:4) (ident "foo")))
|
||||
(expr (3:19-3:20) (runtime_error (3:1-3:20) "expr_not_canonicalized"))
|
||||
"#0")))
|
||||
~~~END
|
|
@ -12,5 +12,5 @@ Int(1:1-1:42),EndOfFile(1:42-1:42),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(runtime_error "CAN: Invalid number literal 99999999999999999999999999999999999999999")
|
||||
(runtime_error (1:1-1:42) "invalid_num_literal")
|
||||
~~~END
|
|
@ -12,10 +12,10 @@ Int(1:1-1:5),EndOfFile(1:5-1:5),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(int
|
||||
(int (1:1-1:5)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "-123")
|
||||
(value "TODO")
|
||||
(bound "i128"))
|
||||
(bound "int"))
|
||||
~~~END
|
|
@ -12,10 +12,10 @@ Int(1:1-1:3),EndOfFile(1:3-1:3),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(int
|
||||
(int (1:1-1:3)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "42")
|
||||
(value "TODO")
|
||||
(bound "i128"))
|
||||
(bound "int"))
|
||||
~~~END
|
|
@ -7,6 +7,8 @@ module []
|
|||
foo = asd.0
|
||||
~~~PROBLEMS
|
||||
PARSER: expr_no_space_dot_int
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
CANONICALIZE: expr_not_canonicalized "foo = asd.0"
|
||||
~~~TOKENS
|
||||
KwModule(1:1-1:7),OpenSquare(1:8-1:9),CloseSquare(1:9-1:10),Newline(1:1-1:1),
|
||||
Newline(1:1-1:1),
|
||||
|
@ -23,5 +25,12 @@ module []
|
|||
|
||||
foo =
|
||||
~~~CANONICALIZE
|
||||
(can_ir (top_level_defs "empty"))
|
||||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
"let"
|
||||
(pattern (3:1-3:4)
|
||||
(assign (3:1-3:4) (ident "foo")))
|
||||
(expr (3:10-3:12) (runtime_error (3:1-3:12) "expr_not_canonicalized"))
|
||||
"#0")))
|
||||
~~~END
|
|
@ -7,6 +7,7 @@ mo|%
|
|||
PARSER: missing_header
|
||||
PARSER: pattern_unexpected_token
|
||||
PARSER: expected_expr_bar
|
||||
CANONICALIZE: invalid_top_level_statement "|%"
|
||||
~~~TOKENS
|
||||
LowerIdent(1:1-1:3),OpBar(1:3-1:4),OpPercent(1:4-1:5),EndOfFile(1:5-1:5),
|
||||
~~~PARSE
|
||||
|
|
|
@ -23,6 +23,24 @@ PARSER: expr_unexpected_token
|
|||
PARSER: expr_unexpected_token
|
||||
PARSER: expr_unexpected_token
|
||||
PARSER: expected_expr_close_square_or_comma
|
||||
CANONICALIZE: invalid_top_level_statement ":;"
|
||||
CANONICALIZE: invalid_top_level_statement ";:"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement "::"
|
||||
CANONICALIZE: invalid_top_level_statement ":le"
|
||||
CANONICALIZE: invalid_top_level_statement "le["
|
||||
CANONICALIZE: invalid_top_level_statement "[%"
|
||||
~~~TOKENS
|
||||
LowerIdent(1:1-1:5),OpColon(1:5-1:6),MalformedUnknownToken(1:6-1:7),OpColon(1:7-1:8),OpColon(1:8-1:9),OpColon(1:9-1:10),OpColon(1:10-1:11),OpColon(1:11-1:12),OpColon(1:12-1:13),OpColon(1:13-1:14),OpColon(1:14-1:15),OpColon(1:15-1:16),OpColon(1:16-1:17),OpColon(1:17-1:18),OpColon(1:18-1:19),OpColon(1:19-1:20),OpColon(1:20-1:21),LowerIdent(1:21-1:23),OpenSquare(1:23-1:24),OpPercent(1:24-1:25),EndOfFile(1:25-1:25),
|
||||
~~~PARSE
|
||||
|
|
|
@ -8,6 +8,7 @@ TOKENIZE: (1:5-1:7) UnclosedString:
|
|||
= "te
|
||||
^^
|
||||
PARSER: missing_header
|
||||
CANONICALIZE: invalid_top_level_statement ""te"
|
||||
~~~TOKENS
|
||||
OpAssign(1:2-1:3),StringStart(1:4-1:5),StringPart(1:5-1:7),EndOfFile(1:7-1:7),
|
||||
~~~PARSE
|
||||
|
|
Binary file not shown.
|
@ -7,6 +7,8 @@ ff8.8.d
|
|||
PARSER: missing_header
|
||||
PARSER: expr_unexpected_token
|
||||
PARSER: expr_unexpected_token
|
||||
CANONICALIZE: invalid_top_level_statement ".8.d"
|
||||
CANONICALIZE: invalid_top_level_statement ".d"
|
||||
~~~TOKENS
|
||||
LowerIdent(1:1-1:4),NoSpaceDotInt(1:4-1:6),NoSpaceDotLowerIdent(1:6-1:8),EndOfFile(1:8-1:8),
|
||||
~~~PARSE
|
||||
|
|
|
@ -9,6 +9,7 @@ TOKENIZE: (1:2-1:2) AsciiControl:
|
|||
^
|
||||
PARSER: missing_header
|
||||
PARSER: expected_expr_bar
|
||||
CANONICALIZE: invalid_top_level_statement "|1"
|
||||
~~~TOKENS
|
||||
OpBar(1:1-1:2),OpBar(1:3-1:4),Int(1:4-1:5),EndOfFile(1:5-1:5),
|
||||
~~~PARSE
|
||||
|
|
|
@ -16,6 +16,10 @@ TOKENIZE: (6:6-6:12) UnclosedString:
|
|||
"onmo %
|
||||
^^^^^^
|
||||
PARSER: missing_header
|
||||
CANONICALIZE: invalid_top_level_statement "{o,
|
||||
]
|
||||
|
||||
foo"
|
||||
~~~TOKENS
|
||||
LowerIdent(1:2-1:3),OpenCurly(1:3-1:4),LowerIdent(1:4-1:5),Comma(1:5-1:6),Newline(1:1-1:1),
|
||||
CloseCurly(2:6-2:7),Newline(1:1-1:1),
|
||||
|
@ -43,8 +47,10 @@ foo =
|
|||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
(Let)
|
||||
(pattern (2:3-2:3) (pattern_ident (ident "foo")))
|
||||
(expr (1:1-1:1) (str "onmo %"))
|
||||
"let"
|
||||
(pattern (4:1-4:4)
|
||||
(assign (4:1-4:4) (ident "foo")))
|
||||
(expr (6:5-6:12)
|
||||
(string (6:5-6:12) (literal (6:6-6:12) "onmo %")))
|
||||
"#0")))
|
||||
~~~END
|
|
@ -18,6 +18,9 @@ TOKENIZE: (5:6-5:35) UnclosedString:
|
|||
"on (string 'onmo %')))
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
PARSER: missing_header
|
||||
CANONICALIZE: invalid_top_level_statement "{o,
|
||||
]
|
||||
foo"
|
||||
~~~TOKENS
|
||||
UpperIdent(1:1-1:2),OpenCurly(1:2-1:3),LowerIdent(1:3-1:4),Comma(1:4-1:5),Newline(1:1-1:1),
|
||||
CloseCurly(2:6-2:7),Newline(1:1-1:1),
|
||||
|
@ -43,8 +46,10 @@ foo =
|
|||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
(Let)
|
||||
(pattern (2:3-2:3) (pattern_ident (ident "foo")))
|
||||
(expr (1:1-1:1) (str "on (string 'onmo %')))"))
|
||||
"let"
|
||||
(pattern (3:1-3:4)
|
||||
(assign (3:1-3:4) (ident "foo")))
|
||||
(expr (5:5-5:35)
|
||||
(string (5:5-5:35) (literal (5:6-5:35) "on (string 'onmo %')))")))
|
||||
"#0")))
|
||||
~~~END
|
|
@ -8,6 +8,7 @@ PARSER: missing_header
|
|||
PARSER: pattern_unexpected_token
|
||||
PARSER: pattern_unexpected_token
|
||||
PARSER: expected_expr_bar
|
||||
CANONICALIZE: invalid_top_level_statement "|(|(l888888888|"
|
||||
~~~TOKENS
|
||||
OpBar(1:2-1:3),OpBar(1:3-1:4),NoSpaceOpenRound(1:4-1:5),OpBar(1:5-1:6),NoSpaceOpenRound(1:6-1:7),LowerIdent(1:7-1:17),OpBar(1:17-1:18),EndOfFile(1:18-1:18),
|
||||
~~~PARSE
|
||||
|
|
|
@ -5,6 +5,7 @@ type=file
|
|||
0{
|
||||
~~~PROBLEMS
|
||||
PARSER: missing_header
|
||||
CANONICALIZE: invalid_top_level_statement "{"
|
||||
~~~TOKENS
|
||||
Int(1:1-1:2),OpenCurly(1:2-1:3),EndOfFile(1:3-1:3),
|
||||
~~~PARSE
|
||||
|
|
|
@ -10,6 +10,9 @@ PARSER: missing_header
|
|||
PARSER: expr_unexpected_token
|
||||
PARSER: expr_unexpected_token
|
||||
PARSER: expr_unexpected_token
|
||||
CANONICALIZE: invalid_top_level_statement ""
|
||||
CANONICALIZE: invalid_top_level_statement ""
|
||||
CANONICALIZE: invalid_top_level_statement "0u22"
|
||||
~~~TOKENS
|
||||
MalformedNumberNoDigits(1:1-1:3),NoSpaceDotInt(1:3-1:5),Newline(1:1-1:1),
|
||||
MalformedNumberNoDigits(2:1-2:6),Newline(1:1-1:1),
|
||||
|
|
|
@ -13,6 +13,11 @@ TOKENIZE: (2:3-2:3) LeadingZero:
|
|||
PARSER: missing_header
|
||||
PARSER: expr_unexpected_token
|
||||
PARSER: expr_no_space_dot_int
|
||||
CANONICALIZE: invalid_top_level_statement ""
|
||||
CANONICALIZE: invalid_top_level_statement "0_0
|
||||
0u8"
|
||||
CANONICALIZE: invalid_top_level_statement ""
|
||||
CANONICALIZE: invalid_top_level_statement "0_"
|
||||
~~~TOKENS
|
||||
Int(1:1-1:4),NoSpaceDotInt(1:4-1:6),Newline(1:1-1:1),
|
||||
Int(2:1-2:4),Newline(1:1-1:1),
|
||||
|
|
|
@ -6,6 +6,7 @@ type=file
|
|||
~~~PROBLEMS
|
||||
PARSER: missing_header
|
||||
PARSER: expected_expr_bar
|
||||
CANONICALIZE: invalid_top_level_statement "|"
|
||||
~~~TOKENS
|
||||
Int(1:1-1:2),OpBar(1:2-1:3),EndOfFile(1:3-1:3),
|
||||
~~~PARSE
|
||||
|
|
|
@ -8,6 +8,10 @@ foo = "hello ${namF
|
|||
PARSER: missing_header
|
||||
PARSER: expr_unexpected_token
|
||||
PARSER: string_expected_close_interpolation
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
CANONICALIZE: invalid_top_level_statement "= ""
|
||||
CANONICALIZE: invalid_top_level_statement ""
|
||||
CANONICALIZE: expr_not_canonicalized "foo = "hello ${namF"
|
||||
~~~TOKENS
|
||||
LowerIdent(1:1-1:3),OpAssign(1:4-1:5),StringStart(1:6-1:7),StringPart(1:7-1:10),StringEnd(1:10-1:11),Newline(1:1-1:1),
|
||||
LowerIdent(2:1-2:4),OpAssign(2:5-2:6),StringStart(2:7-2:8),StringPart(2:8-2:14),OpenStringInterpolation(2:14-2:16),LowerIdent(2:16-2:20),EndOfFile(2:20-2:20),
|
||||
|
@ -24,5 +28,12 @@ LowerIdent(2:1-2:4),OpAssign(2:5-2:6),StringStart(2:7-2:8),StringPart(2:8-2:14),
|
|||
"luc"
|
||||
foo =
|
||||
~~~CANONICALIZE
|
||||
(can_ir (top_level_defs "empty"))
|
||||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
"let"
|
||||
(pattern (2:1-2:4)
|
||||
(assign (2:1-2:4) (ident "foo")))
|
||||
(expr (2:7-2:20) (runtime_error (2:1-2:20) "expr_not_canonicalized"))
|
||||
"#0")))
|
||||
~~~END
|
|
@ -6,6 +6,7 @@ type=file
|
|||
.R
|
||||
~~~PROBLEMS
|
||||
PARSER: missing_header
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
Int(1:1-1:2),LowerIdent(1:3-1:4),OpColon(1:4-1:5),UpperIdent(1:5-1:6),Newline(1:1-1:1),
|
||||
DotUpperIdent(2:1-2:3),EndOfFile(2:3-2:3),
|
||||
|
|
|
@ -6,6 +6,7 @@ type=file
|
|||
~~~PROBLEMS
|
||||
PARSER: missing_header
|
||||
PARSER: expected_expr_close_round_or_comma
|
||||
CANONICALIZE: invalid_top_level_statement "("
|
||||
~~~TOKENS
|
||||
Int(1:1-1:2),OpenRound(1:3-1:4),EndOfFile(1:4-1:4),
|
||||
~~~PARSE
|
||||
|
|
|
@ -8,7 +8,8 @@ import pf.Stdout
|
|||
|
||||
main! = |_| Stdout.line!("Hello, world!")
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
CANONICALIZE: ident_not_in_scope "Stdout.line!"
|
||||
CANONICALIZE: can_lambda_not_implemented "|_| Stdout.line!(..."
|
||||
~~~TOKENS
|
||||
KwApp(1:1-1:4),OpenSquare(1:5-1:6),LowerIdent(1:6-1:11),CloseSquare(1:11-1:12),OpenCurly(1:13-1:14),LowerIdent(1:15-1:17),OpColon(1:17-1:18),KwPlatform(1:19-1:27),StringStart(1:28-1:29),StringPart(1:29-1:54),StringEnd(1:54-1:55),CloseCurly(1:56-1:57),Newline(1:1-1:1),
|
||||
Newline(1:1-1:1),
|
||||
|
@ -38,5 +39,12 @@ LowerIdent(5:1-5:6),OpAssign(5:7-5:8),OpBar(5:9-5:10),Underscore(5:10-5:11),OpBa
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(can_ir (top_level_defs "empty"))
|
||||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
"let"
|
||||
(pattern (5:1-5:6)
|
||||
(assign (5:1-5:6) (ident "main!")))
|
||||
(expr (5:9-5:42) (runtime_error (5:9-5:42) "can_lambda_not_implemented"))
|
||||
"#0")))
|
||||
~~~END
|
|
@ -15,7 +15,11 @@ main! = |_| {
|
|||
Stdout.line!("Hello, world!")
|
||||
}
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
CANONICALIZE: lambda_body_not_canonicalized "{
|
||||
world = "World..."
|
||||
CANONICALIZE: can_lambda_not_implemented "|_| {
|
||||
world = "W..."
|
||||
~~~TOKENS
|
||||
Newline(1:2-1:15),
|
||||
Newline(1:1-1:1),
|
||||
|
@ -57,5 +61,12 @@ CloseCurly(12:1-12:2),EndOfFile(12:2-12:2),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(can_ir (top_level_defs "empty"))
|
||||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
"let"
|
||||
(pattern (8:1-8:6)
|
||||
(assign (8:1-8:6) (ident "main!")))
|
||||
(expr (8:9-12:2) (runtime_error (8:9-12:2) "can_lambda_not_implemented"))
|
||||
"#0")))
|
||||
~~~END
|
|
@ -10,7 +10,10 @@ foo = if true A
|
|||
B
|
||||
}
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
CANONICALIZE: expr_not_canonicalized "foo = if true A
|
||||
|
||||
..."
|
||||
~~~TOKENS
|
||||
KwModule(1:1-1:7),OpenSquare(1:8-1:9),LowerIdent(1:9-1:12),CloseSquare(1:12-1:13),Newline(1:1-1:1),
|
||||
Newline(1:1-1:1),
|
||||
|
@ -40,5 +43,12 @@ foo = if true A
|
|||
B
|
||||
}
|
||||
~~~CANONICALIZE
|
||||
(can_ir (top_level_defs "empty"))
|
||||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
"let"
|
||||
(pattern (3:1-3:4)
|
||||
(assign (3:1-3:4) (ident "foo")))
|
||||
(expr (3:7-7:6) (runtime_error (3:1-7:6) "expr_not_canonicalized"))
|
||||
"#0")))
|
||||
~~~END
|
|
@ -4,7 +4,7 @@ type=expr
|
|||
~~~SOURCE
|
||||
if bool 1 else 2
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwIf(1:1-1:3),LowerIdent(1:4-1:8),Int(1:9-1:10),KwElse(1:11-1:15),Int(1:16-1:17),EndOfFile(1:17-1:17),
|
||||
~~~PARSE
|
||||
|
|
|
@ -10,7 +10,7 @@ if # Comment after if
|
|||
2
|
||||
}
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwIf(1:1-1:3),Newline(1:5-1:22),
|
||||
LowerIdent(2:2-2:6),Newline(1:1-1:1),
|
||||
|
|
|
@ -10,7 +10,7 @@ if # Comment after if
|
|||
2
|
||||
}
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwIf(1:1-1:3),Newline(1:5-1:22),
|
||||
LowerIdent(2:2-2:6),Newline(2:8-2:27),
|
||||
|
|
|
@ -12,7 +12,7 @@ if # Comment after if
|
|||
2
|
||||
}
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwIf(1:1-1:3),Newline(1:5-1:22),
|
||||
LowerIdent(2:2-2:6),Newline(2:8-2:27),
|
||||
|
|
|
@ -6,7 +6,7 @@ if bool {
|
|||
1
|
||||
} else 2
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwIf(1:1-1:3),LowerIdent(1:4-1:8),OpenCurly(1:9-1:10),Newline(1:1-1:1),
|
||||
Int(2:2-2:3),Newline(1:1-1:1),
|
||||
|
|
|
@ -6,7 +6,7 @@ if bool { # Comment after then open
|
|||
1 # Comment after expr
|
||||
} else 2
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwIf(1:1-1:3),LowerIdent(1:4-1:8),OpenCurly(1:9-1:10),Newline(1:12-1:36),
|
||||
Int(2:2-2:3),Newline(2:5-2:24),
|
||||
|
|
|
@ -8,7 +8,7 @@ if bool {
|
|||
2
|
||||
}
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwIf(1:1-1:3),LowerIdent(1:4-1:8),OpenCurly(1:9-1:10),Newline(1:1-1:1),
|
||||
Int(2:2-2:3),Newline(1:1-1:1),
|
||||
|
|
|
@ -8,7 +8,7 @@ if bool {
|
|||
2
|
||||
}
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwIf(1:1-1:3),LowerIdent(1:4-1:8),OpenCurly(1:9-1:10),Newline(1:1-1:1),
|
||||
Int(2:2-2:3),Newline(1:1-1:1),
|
||||
|
|
|
@ -7,7 +7,7 @@ some_fn(arg1)?
|
|||
->next_static_dispatch_method()?
|
||||
->record_field?
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
LowerIdent(1:1-1:8),NoSpaceOpenRound(1:8-1:9),LowerIdent(1:9-1:13),CloseRound(1:13-1:14),NoSpaceOpQuestion(1:14-1:15),Newline(1:1-1:1),
|
||||
OpArrow(2:2-2:4),LowerIdent(2:4-2:26),NoSpaceOpenRound(2:26-2:27),CloseRound(2:27-2:28),NoSpaceOpQuestion(2:28-2:29),Newline(1:1-1:1),
|
||||
|
|
|
@ -7,7 +7,7 @@ some_fn(arg1)? # Comment 1
|
|||
->next_static_dispatch_method()? # Comment 3
|
||||
->record_field?
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
LowerIdent(1:1-1:8),NoSpaceOpenRound(1:8-1:9),LowerIdent(1:9-1:13),CloseRound(1:13-1:14),NoSpaceOpQuestion(1:14-1:15),Newline(1:17-1:27),
|
||||
OpArrow(2:2-2:4),LowerIdent(2:4-2:26),NoSpaceOpenRound(2:26-2:27),CloseRound(2:27-2:28),NoSpaceOpQuestion(2:28-2:29),Newline(2:31-2:41),
|
||||
|
|
|
@ -32,26 +32,26 @@ Int(8:3-8:4),EndOfFile(8:4-8:4),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(call
|
||||
(lookup (ident "add"))
|
||||
(int
|
||||
(binop (1:1-8:4)
|
||||
"add"
|
||||
(int (1:1-1:2)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "1")
|
||||
(value "TODO")
|
||||
(bound "i128"))
|
||||
(call
|
||||
(lookup (ident "mul"))
|
||||
(int
|
||||
(bound "int"))
|
||||
(binop (6:2-8:4)
|
||||
"mul"
|
||||
(int (6:2-6:3)
|
||||
(num_var "#2")
|
||||
(precision_var "#3")
|
||||
(literal "2")
|
||||
(value "TODO")
|
||||
(bound "i128"))
|
||||
(int
|
||||
(bound "int"))
|
||||
(int (8:3-8:4)
|
||||
(num_var "#4")
|
||||
(precision_var "#5")
|
||||
(literal "3")
|
||||
(value "TODO")
|
||||
(bound "i128"))))
|
||||
(bound "int"))))
|
||||
~~~END
|
|
@ -34,5 +34,7 @@ CloseSquare(9:1-9:2),EndOfFile(9:2-9:2),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(list (elem_var "#0") (elems "TODO each element"))
|
||||
(list (1:1-9:2)
|
||||
(elem_var "#0")
|
||||
(elems "TODO each element"))
|
||||
~~~END
|
|
@ -31,5 +31,7 @@ CloseSquare(9:1-9:2),EndOfFile(9:2-9:2),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(list (elem_var "#0") (elems "TODO each element"))
|
||||
(list (1:1-9:2)
|
||||
(elem_var "#0")
|
||||
(elems "TODO each element"))
|
||||
~~~END
|
|
@ -23,5 +23,7 @@ CloseSquare(5:1-5:2),EndOfFile(5:2-5:2),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(list (elem_var "#0") (elems "TODO each element"))
|
||||
(list (1:1-5:2)
|
||||
(elem_var "#0")
|
||||
(elems "TODO each element"))
|
||||
~~~END
|
|
@ -21,5 +21,7 @@ Int(2:3-2:4),CloseSquare(2:4-2:5),EndOfFile(2:5-2:5),
|
|||
3,
|
||||
]
|
||||
~~~CANONICALIZE
|
||||
(list (elem_var "#0") (elems "TODO each element"))
|
||||
(list (1:1-2:5)
|
||||
(elem_var "#0")
|
||||
(elems "TODO each element"))
|
||||
~~~END
|
|
@ -23,5 +23,7 @@ CloseSquare(5:1-5:2),EndOfFile(5:2-5:2),
|
|||
~~~FORMATTED
|
||||
NO CHANGE
|
||||
~~~CANONICALIZE
|
||||
(list (elem_var "#0") (elems "TODO each element"))
|
||||
(list (1:1-5:2)
|
||||
(elem_var "#0")
|
||||
(elems "TODO each element"))
|
||||
~~~END
|
|
@ -23,14 +23,15 @@ NO CHANGE
|
|||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
(Let)
|
||||
(pattern (1:6-1:6) (pattern_ident (ident "foo")))
|
||||
(expr (1:1-1:1)
|
||||
(float
|
||||
"let"
|
||||
(pattern (2:1-2:4)
|
||||
(assign (2:1-2:4) (ident "foo")))
|
||||
(expr (2:7-2:12)
|
||||
(float (2:7-2:12)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "12.34")
|
||||
(value "0")
|
||||
(bound "dec")))
|
||||
(bound "int")))
|
||||
"#2")))
|
||||
~~~END
|
|
@ -23,14 +23,15 @@ NO CHANGE
|
|||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
(Let)
|
||||
(pattern (1:6-1:6) (pattern_ident (ident "foo")))
|
||||
(expr (1:1-1:1)
|
||||
(int
|
||||
"let"
|
||||
(pattern (2:1-2:4)
|
||||
(assign (2:1-2:4) (ident "foo")))
|
||||
(expr (2:7-2:9)
|
||||
(int (2:7-2:9)
|
||||
(num_var "#0")
|
||||
(precision_var "#1")
|
||||
(literal "42")
|
||||
(value "TODO")
|
||||
(bound "i128")))
|
||||
(bound "int")))
|
||||
"#2")))
|
||||
~~~END
|
|
@ -31,20 +31,20 @@ NO CHANGE
|
|||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
(Let)
|
||||
(pattern (1:6-1:6) (pattern_ident (ident "name")))
|
||||
(expr (1:1-1:1) (str "luc"))
|
||||
"let"
|
||||
(pattern (2:1-2:5)
|
||||
(assign (2:1-2:5) (ident "name")))
|
||||
(expr (2:8-2:13)
|
||||
(string (2:8-2:13) (literal (2:9-2:12) "luc")))
|
||||
"#0")
|
||||
(def
|
||||
(Let)
|
||||
(pattern (1:12-1:12) (pattern_ident (ident "foo")))
|
||||
(expr (1:1-1:1)
|
||||
(call
|
||||
(lookup (ident "Str.concat"))
|
||||
(call
|
||||
(lookup (ident "Str.concat"))
|
||||
(str "hello ")
|
||||
(lookup (ident "name")))
|
||||
(str "")))
|
||||
"let"
|
||||
(pattern (3:1-3:4)
|
||||
(assign (3:1-3:4) (ident "foo")))
|
||||
(expr (3:7-3:22)
|
||||
(string (3:7-3:22)
|
||||
(literal (3:8-3:14) "hello ")
|
||||
(lookup (3:16-3:20) (pattern_idx "2"))
|
||||
(literal (3:21-3:21) "")))
|
||||
"#1")))
|
||||
~~~END
|
|
@ -23,10 +23,11 @@ NO CHANGE
|
|||
(can_ir
|
||||
(top_level_defs
|
||||
(def
|
||||
(Let)
|
||||
(pattern (1:6-1:6) (pattern_ident (ident "foo")))
|
||||
(expr (1:1-1:1)
|
||||
(tag
|
||||
"let"
|
||||
(pattern (2:1-2:4)
|
||||
(assign (2:1-2:4) (ident "foo")))
|
||||
(expr (2:7-2:15)
|
||||
(tag (2:7-2:15)
|
||||
(tag_union_var "#0")
|
||||
(ext_var "#0")
|
||||
(name "FortyTwo")
|
||||
|
|
|
@ -6,7 +6,7 @@ module []
|
|||
|
||||
import json.Json [foo, BAR]
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
CANONICALIZE: invalid_top_level_statement "[foo, BAR]"
|
||||
~~~TOKENS
|
||||
KwModule(1:1-1:7),OpenSquare(1:8-1:9),CloseSquare(1:9-1:10),Newline(1:1-1:1),
|
||||
Newline(1:1-1:1),
|
||||
|
|
|
@ -6,7 +6,7 @@ module [Foo]
|
|||
|
||||
Foo(a,b) : (a,b,Str,U64)
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
KwModule(1:1-1:7),OpenSquare(1:8-1:9),UpperIdent(1:9-1:12),CloseSquare(1:12-1:13),Newline(1:1-1:1),
|
||||
Newline(1:1-1:1),
|
||||
|
|
|
@ -7,7 +7,7 @@ some_fn(arg1)?
|
|||
.next_static_dispatch_method()?
|
||||
.record_field?
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
LowerIdent(1:1-1:8),NoSpaceOpenRound(1:8-1:9),LowerIdent(1:9-1:13),CloseRound(1:13-1:14),NoSpaceOpQuestion(1:14-1:15),Newline(1:1-1:1),
|
||||
DotLowerIdent(2:2-2:25),NoSpaceOpenRound(2:25-2:26),CloseRound(2:26-2:27),NoSpaceOpQuestion(2:27-2:28),Newline(1:1-1:1),
|
||||
|
|
|
@ -7,7 +7,7 @@ some_fn(arg1)? # Comment 1
|
|||
.next_static_dispatch_method()? # Comment 3
|
||||
.record_field?
|
||||
~~~PROBLEMS
|
||||
NIL
|
||||
COMPILER: problem.Problem.Compiler{ .canonicalize = problem.Problem.Compiler.Can.not_implemented }
|
||||
~~~TOKENS
|
||||
LowerIdent(1:1-1:8),NoSpaceOpenRound(1:8-1:9),LowerIdent(1:9-1:13),CloseRound(1:13-1:14),NoSpaceOpQuestion(1:14-1:15),Newline(1:17-1:27),
|
||||
DotLowerIdent(2:2-2:25),NoSpaceOpenRound(2:25-2:26),CloseRound(2:26-2:27),NoSpaceOpQuestion(2:27-2:28),Newline(2:30-2:40),
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue