Add for clauses to platform modules

This commit is contained in:
Richard Feldman 2025-12-12 21:21:51 -05:00
parent 76ac7ae646
commit 58cede45b2
No known key found for this signature in database
37 changed files with 1008 additions and 559 deletions

View file

@ -1592,7 +1592,7 @@ test "unbundleStream with BufferExtractWriter (WASM simulation)" {
try src_dir.makePath("platform");
const file = try src_dir.createFile("platform/main.roc", .{});
defer file.close();
try file.writeAll("platform \"test\" requires {} { main : Str }\n");
try file.writeAll("platform \"test\" requires { main : Str }\n");
}
// Bundle to memory
@ -1647,7 +1647,7 @@ test "unbundleStream with BufferExtractWriter (WASM simulation)" {
const platform_content = buffer_writer.files.get("platform/main.roc");
try testing.expect(platform_content != null);
try testing.expectEqualStrings("platform \"test\" requires {} { main : Str }\n", platform_content.?.items);
try testing.expectEqualStrings("platform \"test\" requires { main : Str }\n", platform_content.?.items);
}
// Test large file unbundle - verifies multi-block zstd streaming works correctly

View file

@ -1780,10 +1780,10 @@ pub fn canonicalizeFile(
// These need to be in the exposed scope so they become exports
// Platform provides uses curly braces { main_for_host! } so it's parsed as record fields
try self.addPlatformProvidesItems(h.provides);
// Extract required type signatures for type checking
// Extract required type signatures for type checking using the new for-clause syntax
// This stores the types in env.requires_types without creating local definitions
// Pass requires_rigids so R1, R2, etc. are in scope when processing signatures
try self.processRequiresSignatures(h.requires_rigids, h.requires_signatures);
// Also introduces type aliases (like Model) into the platform's top-level scope
try self.processRequiresEntries(h.requires_entries);
},
.hosted => |h| {
self.env.module_kind = .hosted;
@ -2722,83 +2722,94 @@ fn addPlatformProvidesItems(
}
}
/// Process the requires_signatures from a platform header.
/// Process the requires entries from a platform header using the new for-clause syntax.
///
/// This extracts the required type signatures (like `main! : () => {}`) from the platform
/// header and stores them in `env.requires_types`. These are used during app type checking
/// to ensure the app's provided values match the platform's expected types.
/// This extracts the required type signatures from the platform header and stores them
/// in `env.requires_types`. These are used during app type checking to ensure the app's
/// provided values match the platform's expected types.
///
/// The requires_rigids parameter contains the type variables declared in `requires { R1, R2 }`.
/// These are introduced into scope before processing the signatures so that references to
/// R1, R2, etc. in the signatures are properly resolved as type variables.
/// The new syntax is: requires { [Model : model] for main : () -> { init : ... } }
///
/// Note: Required identifiers (like `main!`) are NOT introduced into scope here. Instead,
/// when an identifier is looked up and not found, we check env.requires_types to see if it's
/// a required identifier from the platform. This avoids conflicts with local definitions.
fn processRequiresSignatures(self: *Self, requires_rigids_idx: AST.Collection.Idx, requires_signatures_idx: AST.TypeAnno.Idx) std.mem.Allocator.Error!void {
// Enter a type var scope for the rigids - they should only be in scope while processing signatures
const type_var_scope = self.scopeEnterTypeVar();
defer self.scopeExitTypeVar(type_var_scope);
/// For each requires entry, this function:
/// 1. Introduces the rigid type variables (e.g., `model`) into a temporary scope
/// 2. Creates aliases (e.g., `Model`) that refer to the SAME type annotation as the rigid
/// 3. Canonicalizes the entrypoint type annotation with rigids in scope
/// 4. Stores the required type for type checking
///
/// IMPORTANT: Both the rigid (`model`) and the alias (`Model`) point to the SAME
/// type annotation. This ensures that when the type is unified, both names resolve
/// to the same concrete type.
fn processRequiresEntries(self: *Self, requires_entries: AST.RequiresEntry.Span) std.mem.Allocator.Error!void {
for (self.parse_ir.store.requiresEntrySlice(requires_entries)) |entry_idx| {
const entry = self.parse_ir.store.getRequiresEntry(entry_idx);
const entry_region = self.parse_ir.tokenizedRegionToRegion(entry.region);
// First, process the requires_rigids to add them to the type variable scope
// This allows R1, R2, etc. to be recognized when processing the signatures
const rigids_collection = self.parse_ir.store.getCollection(requires_rigids_idx);
for (self.parse_ir.store.exposedItemSlice(.{ .span = rigids_collection.span })) |exposed_idx| {
const exposed_item = self.parse_ir.store.getExposedItem(exposed_idx);
switch (exposed_item) {
.upper_ident => |upper| {
// Get the identifier for this rigid type variable (e.g., "R1")
const rigid_name = self.parse_ir.tokens.resolveIdentifier(upper.ident) orelse continue;
const rigid_region = self.parse_ir.tokenizedRegionToRegion(upper.region);
// Enter a type var scope for the rigids in this entry
const type_var_scope = self.scopeEnterTypeVar();
defer self.scopeExitTypeVar(type_var_scope);
// Create a type annotation for this rigid variable
const rigid_anno_idx = try self.env.addTypeAnno(.{ .rigid_var = .{
.name = rigid_name,
} }, rigid_region);
// Record start of type aliases for this entry
const type_aliases_start = self.env.for_clause_aliases.len();
// Introduce it into the type variable scope
_ = try self.scopeIntroduceTypeVar(rigid_name, rigid_anno_idx);
},
else => {
// Skip lower_ident, upper_ident_star, malformed - these aren't valid for requires rigids
},
// Process type aliases: [Model : model, Foo : foo]
// For each alias:
// 1. Create a SINGLE type annotation for the rigid
// 2. Introduce BOTH the rigid name (model) AND the alias name (Model)
// pointing to the SAME type annotation
// 3. Store the alias mapping for later use during type checking
for (self.parse_ir.store.forClauseTypeAliasSlice(entry.type_aliases)) |alias_idx| {
const alias = self.parse_ir.store.getForClauseTypeAlias(alias_idx);
const alias_region = self.parse_ir.tokenizedRegionToRegion(alias.region);
// Get the rigid name (lowercase, e.g., "model")
const rigid_name = self.parse_ir.tokens.resolveIdentifier(alias.rigid_name) orelse continue;
// Get the alias name (uppercase, e.g., "Model")
const alias_name = self.parse_ir.tokens.resolveIdentifier(alias.alias_name) orelse continue;
// Create a SINGLE type annotation for this rigid variable
// IMPORTANT: We use the rigid_name in the annotation, but introduce it
// under BOTH names in the scope
const rigid_anno_idx = try self.env.addTypeAnno(.{ .rigid_var = .{
.name = rigid_name,
} }, alias_region);
// Introduce the rigid (model) into the type variable scope
_ = try self.scopeIntroduceTypeVar(rigid_name, rigid_anno_idx);
// Introduce the alias (Model) pointing to the SAME type annotation
// This means both "model" and "Model" will resolve to the same type!
_ = try self.scopeIntroduceTypeVar(alias_name, rigid_anno_idx);
// Store the alias mapping for use during type checking
_ = try self.env.for_clause_aliases.append(self.env.gpa, .{
.alias_name = alias_name,
.rigid_name = rigid_name,
.rigid_anno_idx = rigid_anno_idx,
});
}
}
// Now process the requires_signatures with the rigids in scope
const requires_signatures = self.parse_ir.store.getTypeAnno(requires_signatures_idx);
// Calculate type aliases range for this entry
const type_aliases_end = self.env.for_clause_aliases.len();
const type_aliases_range = ModuleEnv.ForClauseAlias.SafeList.Range{
.start = @enumFromInt(type_aliases_start),
.count = @intCast(type_aliases_end - type_aliases_start),
};
// The requires_signatures should be a record type like { main! : () => {} }
switch (requires_signatures) {
.record => |record| {
for (self.parse_ir.store.annoRecordFieldSlice(record.fields)) |field_idx| {
const field = self.parse_ir.store.getAnnoRecordField(field_idx) catch |err| switch (err) {
error.MalformedNode => {
// Skip malformed fields
continue;
},
};
// Get the entrypoint name (e.g., "main")
const entrypoint_name = self.parse_ir.tokens.resolveIdentifier(entry.entrypoint_name) orelse continue;
// Get the field name (e.g., "main!")
const field_name = self.parse_ir.tokens.resolveIdentifier(field.name) orelse continue;
const field_region = self.parse_ir.tokenizedRegionToRegion(field.region);
// Canonicalize the type annotation for this entrypoint
var type_anno_ctx = TypeAnnoCtx.init(.inline_anno);
const type_anno_idx = try self.canonicalizeTypeAnnoHelp(entry.type_anno, &type_anno_ctx);
// Canonicalize the type annotation for this required identifier
var type_anno_ctx = TypeAnnoCtx.init(.inline_anno);
const type_anno_idx = try self.canonicalizeTypeAnnoHelp(field.ty, &type_anno_ctx);
// Store the required type in the module env
_ = try self.env.requires_types.append(self.env.gpa, .{
.ident = field_name,
.type_anno = type_anno_idx,
.region = field_region,
});
}
},
else => {
// requires_signatures should always be a record type from parsing
// If it's not, just skip processing (parser would have reported an error)
},
// Store the required type in the module env
_ = try self.env.requires_types.append(self.env.gpa, .{
.ident = entrypoint_name,
.type_anno = type_anno_idx,
.region = entry_region,
.type_aliases = type_aliases_range,
});
}
}

View file

@ -132,7 +132,7 @@ pub const Expr = union(enum) {
/// This represents a value that the app provides to the platform.
/// ```roc
/// platform "..."
/// requires {} { main! : () => {} }
/// requires { main! : () => {} }
/// ...
/// main_for_host! = main! # "main!" here is a required lookup
/// ```

View file

@ -403,10 +403,17 @@ all_defs: CIR.Def.Span,
all_statements: CIR.Statement.Span,
/// Definitions that are exported by this module (populated by canonicalization)
exports: CIR.Def.Span,
/// Required type signatures for platform modules (from `requires {} { main! : () => {} }`)
/// Required type signatures for platform modules (from `requires { main! : () => {} }`)
/// Maps identifier names to their expected type annotations.
/// Empty for non-platform modules.
requires_types: RequiredType.SafeList,
/// Type alias mappings from for-clauses in requires declarations.
/// Stores (alias_name, rigid_name) pairs like (Model, model).
for_clause_aliases: ForClauseAlias.SafeList,
/// Rigid type variable mappings from platform for-clause after unification.
/// Maps rigid names (e.g., "model") to their resolved type variables in the app's type store.
/// Populated during checkPlatformRequirements when the platform has a for-clause.
rigid_vars: std.AutoHashMapUnmanaged(Ident.Idx, TypeVar),
/// All builtin stmts (temporary until module imports are working)
builtin_statements: CIR.Statement.Span,
/// All external declarations referenced in this module
@ -457,6 +464,19 @@ pub const DeferredNumericLiteral = struct {
pub const SafeList = collections.SafeList(@This());
};
/// A type alias mapping from a for-clause: [Model : model]
/// Maps an alias name (Model) to a rigid variable name (model)
pub const ForClauseAlias = struct {
/// The alias name (e.g., "Model") - to be looked up in the app
alias_name: Ident.Idx,
/// The rigid variable name (e.g., "model") - the rigid in the required type
rigid_name: Ident.Idx,
/// The type annotation index of the rigid_var for this alias
rigid_anno_idx: CIR.TypeAnno.Idx,
pub const SafeList = collections.SafeList(@This());
};
/// Required type for platform modules - maps an identifier to its expected type annotation.
/// Used to enforce that apps provide values matching the platform's required types.
pub const RequiredType = struct {
@ -466,6 +486,9 @@ pub const RequiredType = struct {
type_anno: CIR.TypeAnno.Idx,
/// Region of the requirement for error reporting
region: Region,
/// Type alias mappings from the for-clause (e.g., [Model : model])
/// These specify which app type aliases should be substituted for which rigids
type_aliases: ForClauseAlias.SafeList.Range,
pub const SafeList = collections.SafeList(@This());
};
@ -478,6 +501,7 @@ pub fn relocate(self: *Self, offset: isize) void {
self.types.relocate(offset);
self.external_decls.relocate(offset);
self.requires_types.relocate(offset);
self.for_clause_aliases.relocate(offset);
self.imports.relocate(offset);
self.store.relocate(offset);
self.deferred_numeric_literals.relocate(offset);
@ -534,6 +558,8 @@ pub fn init(gpa: std.mem.Allocator, source: []const u8) std.mem.Allocator.Error!
.all_statements = .{ .span = .{ .start = 0, .len = 0 } },
.exports = .{ .span = .{ .start = 0, .len = 0 } },
.requires_types = try RequiredType.SafeList.initCapacity(gpa, 4),
.for_clause_aliases = try ForClauseAlias.SafeList.initCapacity(gpa, 4),
.rigid_vars = std.AutoHashMapUnmanaged(Ident.Idx, TypeVar){},
.builtin_statements = .{ .span = .{ .start = 0, .len = 0 } },
.external_decls = try CIR.ExternalDecl.SafeList.initCapacity(gpa, 16),
.imports = CIR.Import.Store.init(),
@ -555,6 +581,8 @@ pub fn deinit(self: *Self) void {
self.types.deinit();
self.external_decls.deinit(self.gpa);
self.requires_types.deinit(self.gpa);
self.for_clause_aliases.deinit(self.gpa);
self.rigid_vars.deinit(self.gpa);
self.imports.deinit(self.gpa);
self.deferred_numeric_literals.deinit(self.gpa);
self.import_mapping.deinit();
@ -1953,6 +1981,7 @@ pub const Serialized = extern struct {
all_statements: CIR.Statement.Span,
exports: CIR.Def.Span,
requires_types: RequiredType.SafeList.Serialized,
for_clause_aliases: ForClauseAlias.SafeList.Serialized,
builtin_statements: CIR.Statement.Span,
external_decls: CIR.ExternalDecl.SafeList.Serialized,
imports: CIR.Import.Store.Serialized,
@ -1986,6 +2015,7 @@ pub const Serialized = extern struct {
self.builtin_statements = env.builtin_statements;
try self.requires_types.serialize(&env.requires_types, allocator, writer);
try self.for_clause_aliases.serialize(&env.for_clause_aliases, allocator, writer);
try self.external_decls.serialize(&env.external_decls, allocator, writer);
try self.imports.serialize(&env.imports, allocator, writer);
@ -2045,6 +2075,7 @@ pub const Serialized = extern struct {
.all_statements = self.all_statements,
.exports = self.exports,
.requires_types = self.requires_types.deserialize(offset).*,
.for_clause_aliases = self.for_clause_aliases.deserialize(offset).*,
.builtin_statements = self.builtin_statements,
.external_decls = self.external_decls.deserialize(offset).*,
.imports = (try self.imports.deserialize(offset, gpa)).*,
@ -2057,6 +2088,7 @@ pub const Serialized = extern struct {
.deferred_numeric_literals = self.deferred_numeric_literals.deserialize(offset).*,
.import_mapping = types_mod.import_mapping.ImportMapping.init(gpa),
.method_idents = self.method_idents.deserialize(offset).*,
.rigid_vars = std.AutoHashMapUnmanaged(Ident.Idx, TypeVar){},
};
return env;

View file

@ -1077,6 +1077,17 @@ pub fn checkFile(self: *Self) std.mem.Allocator.Error!void {
fn processRequiresTypes(self: *Self, env: *Env) std.mem.Allocator.Error!void {
const requires_types_slice = self.cir.requires_types.items.items;
for (requires_types_slice) |required_type| {
// First, process any for-clause rigids for this required type.
// These are standalone rigid_var annotations introduced by the for-clause
// that are referenced by the main type but not part of its annotation tree.
// We process them first so they're properly initialized before the main type.
const type_aliases_range = required_type.type_aliases;
const all_aliases = self.cir.for_clause_aliases.items.items;
const type_aliases_slice = all_aliases[@intFromEnum(type_aliases_range.start)..][0..type_aliases_range.count];
for (type_aliases_slice) |alias| {
try self.generateAnnoTypeInPlace(alias.rigid_anno_idx, env, .annotation);
}
// Generate the type from the annotation
try self.generateAnnoTypeInPlace(required_type.type_anno, env, .annotation);
}
@ -1135,17 +1146,84 @@ pub fn checkPlatformRequirements(
const copied_required_var = try self.copyVar(required_type_var, platform_env, required_type.region);
// Instantiate the copied variable before unifying (to avoid poisoning the cached copy)
const instantiated_required_var = try self.instantiateVar(copied_required_var, &env, .{ .explicit = required_type.region });
// Use instantiateVarPreserveRigids so that rigid type variables from the for-clause
// remain as rigids and can be looked up by name during interpretation.
const instantiated_required_var = try self.instantiateVarPreserveRigids(copied_required_var, &env, .{ .explicit = required_type.region });
// Extract rigid name -> instantiated var mappings from the var_map.
// At this point, fresh vars still have rigid content with their names.
// After unification, these vars will redirect to concrete types.
// This allows the interpreter to substitute platform rigid type vars with app concrete types.
var var_map_iter = self.var_map.iterator();
while (var_map_iter.next()) |entry| {
const fresh_var = entry.value_ptr.*;
const resolved = self.types.resolveVar(fresh_var);
switch (resolved.desc.content) {
.rigid => |rigid| {
// Store the rigid name -> instantiated var mapping in the app's module env
try self.cir.rigid_vars.put(self.gpa, rigid.name, fresh_var);
},
else => {},
}
}
// For each for-clause type alias (e.g., [Model : model]), look up the app's
// corresponding type alias and unify it with the rigid type variable.
// This substitutes concrete app types for platform rigid type variables.
const type_aliases_range = required_type.type_aliases;
const all_aliases = platform_env.for_clause_aliases.items.items;
const type_aliases_slice = all_aliases[@intFromEnum(type_aliases_range.start)..][0..type_aliases_range.count];
for (type_aliases_slice) |alias| {
// Translate the platform's alias name to the app's namespace
const app_alias_name = platform_to_app_idents.get(alias.alias_name) orelse continue;
// Look up the rigid var we stored earlier.
// rigid_vars is keyed by the APP's ident index (the rigid name was translated when copied),
// so we translate the platform's rigid_name to the app's ident space using the pre-built map.
const app_rigid_name = platform_to_app_idents.get(alias.rigid_name) orelse continue;
const rigid_var = self.cir.rigid_vars.get(app_rigid_name) orelse continue;
// Look up the app's type alias body (the underlying type, not the alias wrapper)
const app_type_var = self.findTypeAliasBodyVar(app_alias_name) orelse continue;
// Redirect the rigid var to the app's type alias body.
// This substitutes the concrete app type for the platform's rigid type variable.
// We use redirect instead of unify because rigids can't be unified with concrete types.
try self.types.setVarRedirect(rigid_var, app_type_var);
}
// Unify the platform's required type with the app's export type.
// This constrains type variables in the export (e.g., closure params)
// to match the platform's expected types.
// to match the platform's expected types. After this, the fresh vars
// stored in rigid_vars will redirect to the concrete app types.
_ = try self.unifyFromAnno(instantiated_required_var, export_var, &env);
}
// Note: If the export is not found, the canonicalizer should have already reported an error
}
}
/// Find a type alias declaration by name and return the var for its underlying type.
/// This returns the var for the alias's body (e.g., for `Model : { value: I64 }` returns the var for `{ value: I64 }`),
/// not the var for the alias declaration itself.
/// Returns null if no type alias declaration with the given name is found.
fn findTypeAliasBodyVar(self: *Self, name: Ident.Idx) ?Var {
const stmts_slice = self.cir.store.sliceStatements(self.cir.all_statements);
for (stmts_slice) |stmt_idx| {
const stmt = self.cir.store.getStatement(stmt_idx);
switch (stmt) {
.s_alias_decl => |alias| {
const header = self.cir.store.getTypeHeader(alias.header);
if (header.relative_name == name) {
// Return the var for the alias body annotation, not the statement
return ModuleEnv.varFrom(alias.anno);
}
},
else => {},
}
}
return null;
}
// repl //
/// Check an expr for the repl

View file

@ -78,6 +78,7 @@ fn loadCompiledModule(gpa: std.mem.Allocator, bin_data: []const u8, module_name:
.all_statements = serialized_ptr.all_statements,
.exports = serialized_ptr.exports,
.requires_types = serialized_ptr.requires_types.deserialize(@as(i64, @intCast(base_ptr))).*,
.for_clause_aliases = serialized_ptr.for_clause_aliases.deserialize(@as(i64, @intCast(base_ptr))).*,
.builtin_statements = serialized_ptr.builtin_statements,
.external_decls = serialized_ptr.external_decls.deserialize(@as(i64, @intCast(base_ptr))).*,
.imports = (try serialized_ptr.imports.deserialize(@as(i64, @intCast(base_ptr)), gpa)).*,
@ -90,6 +91,7 @@ fn loadCompiledModule(gpa: std.mem.Allocator, bin_data: []const u8, module_name:
.deferred_numeric_literals = try ModuleEnv.DeferredNumericLiteral.SafeList.initCapacity(gpa, 0),
.import_mapping = types.import_mapping.ImportMapping.init(gpa),
.method_idents = serialized_ptr.method_idents.deserialize(@as(i64, @intCast(base_ptr))).*,
.rigid_vars = std.AutoHashMapUnmanaged(base.Ident.Idx, types.Var){},
};
return LoadedModule{

View file

@ -325,20 +325,28 @@ fn buildLinkArgs(allocs: *Allocators, config: LinkConfig) LinkError!std.array_li
// This ensures host exports (init, handleEvent, update) aren't stripped even when
// not referenced by other code
const is_wasm = config.target_format == .wasm;
const is_macos = target_os == .macos;
if (is_wasm and config.platform_files_pre.len > 0) {
try args.append("--whole-archive");
}
// Add platform-provided files that come before object files
// Use --whole-archive to include all members from static libraries (e.g., libhost.a)
// Use --whole-archive (or -all_load on macOS) to include all members from static libraries
// This ensures host-exported functions like init, handleEvent, update are included
// even though they're not referenced by the Roc app's compiled code
if (config.platform_files_pre.len > 0) {
try args.append("--whole-archive");
if (is_macos) {
// macOS uses -all_load to include all members from static libraries
try args.append("-all_load");
} else {
try args.append("--whole-archive");
}
for (config.platform_files_pre) |platform_file| {
try args.append(platform_file);
}
try args.append("--no-whole-archive");
if (!is_macos) {
try args.append("--no-whole-archive");
}
}
// Add object files (Roc shim libraries - don't need --whole-archive)
@ -349,11 +357,17 @@ fn buildLinkArgs(allocs: *Allocators, config: LinkConfig) LinkError!std.array_li
// Add platform-provided files that come after object files
// Also use --whole-archive in case there are static libs here too
if (config.platform_files_post.len > 0) {
try args.append("--whole-archive");
if (is_macos) {
try args.append("-all_load");
} else {
try args.append("--whole-archive");
}
for (config.platform_files_post) |platform_file| {
try args.append(platform_file);
}
try args.append("--no-whole-archive");
if (!is_macos) {
try args.append("--no-whole-archive");
}
}
// Add any extra arguments

View file

@ -2006,6 +2006,23 @@ pub fn setupSharedMemoryWithModuleEnv(allocs: *Allocators, roc_file_path: []cons
if (app_env.common.findIdent(platform_ident_text)) |app_ident| {
try platform_to_app_idents.put(required_type.ident, app_ident);
}
// Also add for-clause type alias names (Model, model) to the translation map
const all_aliases = penv.for_clause_aliases.items.items;
const type_aliases_slice = all_aliases[@intFromEnum(required_type.type_aliases.start)..][0..required_type.type_aliases.count];
for (type_aliases_slice) |alias| {
// Add alias name (e.g., "Model") - must exist in app since it's required
const alias_name_text = penv.getIdent(alias.alias_name);
if (app_env.common.findIdent(alias_name_text)) |app_ident| {
try platform_to_app_idents.put(alias.alias_name, app_ident);
}
// Add rigid name (e.g., "model") - insert it into app's ident store since
// the rigid name is a platform concept that gets copied during type processing.
// Using insert (not find) ensures the app's ident store has this name for later lookups.
const rigid_name_text = penv.getIdent(alias.rigid_name);
const app_ident = try app_env.common.insertIdent(allocs.gpa, base.Ident.for_text(rigid_name_text));
try platform_to_app_idents.put(alias.rigid_name, app_ident);
}
}
try app_checker.checkPlatformRequirements(penv, &platform_to_app_idents);

View file

@ -728,7 +728,7 @@ test "validatePlatformHasTargets detects missing targets section" {
// Platform without targets section
const source =
\\platform ""
\\ requires {} { main : {} }
\\ requires { main : {} }
\\ exposes []
\\ packages {}
\\ provides { main_for_host: "main" }
@ -764,7 +764,7 @@ test "validatePlatformHasTargets accepts platform with targets section" {
// Platform with targets section
const source =
\\platform ""
\\ requires {} { main : {} }
\\ requires { main : {} }
\\ exposes []
\\ packages {}
\\ provides { main_for_host: "main" }
@ -825,7 +825,7 @@ test "validatePlatformHasTargets accepts platform with multiple target types" {
// Platform with exe and static_lib targets
const source =
\\platform ""
\\ requires {} { main : {} }
\\ requires { main : {} }
\\ exposes []
\\ packages {}
\\ provides { main_for_host: "main" }
@ -863,7 +863,7 @@ test "validatePlatformHasTargets accepts platform with win_gui target" {
// Platform with win_gui special identifier
const source =
\\platform ""
\\ requires {} { main : {} }
\\ requires { main : {} }
\\ exposes []
\\ packages {}
\\ provides { main_for_host: "main" }
@ -896,7 +896,7 @@ test "TargetsConfig.fromAST extracts targets configuration" {
// Platform with various targets
const source =
\\platform ""
\\ requires {} { main : {} }
\\ requires { main : {} }
\\ exposes []
\\ packages {}
\\ provides { main_for_host: "main" }

View file

@ -97,6 +97,7 @@ test "ModuleEnv.Serialized roundtrip" {
.all_statements = deserialized_ptr.all_statements,
.exports = deserialized_ptr.exports,
.requires_types = deserialized_ptr.requires_types.deserialize(@as(i64, @intCast(@intFromPtr(buffer.ptr)))).*,
.for_clause_aliases = deserialized_ptr.for_clause_aliases.deserialize(@as(i64, @intCast(@intFromPtr(buffer.ptr)))).*,
.builtin_statements = deserialized_ptr.builtin_statements,
.external_decls = deserialized_ptr.external_decls.deserialize(@as(i64, @intCast(@intFromPtr(buffer.ptr)))).*,
.imports = (try deserialized_ptr.imports.deserialize(@as(i64, @intCast(@intFromPtr(buffer.ptr))), deser_alloc)).*,
@ -109,6 +110,7 @@ test "ModuleEnv.Serialized roundtrip" {
.deferred_numeric_literals = try ModuleEnv.DeferredNumericLiteral.SafeList.initCapacity(deser_alloc, 0),
.import_mapping = types.import_mapping.ImportMapping.init(deser_alloc),
.method_idents = deserialized_ptr.method_idents.deserialize(@as(i64, @intCast(@intFromPtr(buffer.ptr)))).*,
.rigid_vars = std.AutoHashMapUnmanaged(base.Ident.Idx, types.Var){},
};
// Verify original data before serialization was correct

View file

@ -223,6 +223,11 @@ pub const Interpreter = struct {
// Rigid variable substitution context for generic function instantiation
// Maps rigid type variables to their concrete instantiations
rigid_subst: std.AutoHashMap(types.Var, types.Var),
// Rigid name substitution for platform-app type variable mappings
// Maps rigid ident names (in runtime ident store) to concrete runtime type vars
// Maps rigid variable name string indices to concrete runtime type vars.
// Keyed by the raw string index (u29) to ignore attribute differences.
rigid_name_subst: std.AutoHashMap(u29, types.Var),
// Compile-time rigid substitution for nominal type backing translation
// Maps CT rigid vars in backing type to CT type arg vars
translate_rigid_subst: std.AutoHashMap(types.Var, types.Var),
@ -414,6 +419,7 @@ pub const Interpreter = struct {
.translate_cache = std.AutoHashMap(ModuleVarKey, CacheEntry).init(allocator),
.translation_in_progress = std.AutoHashMap(ModuleVarKey, void).init(allocator),
.rigid_subst = std.AutoHashMap(types.Var, types.Var).init(allocator),
.rigid_name_subst = std.AutoHashMap(u29, types.Var).init(allocator),
.translate_rigid_subst = std.AutoHashMap(types.Var, types.Var).init(allocator),
.flex_type_context = std.AutoHashMap(ModuleVarKey, types.Var).init(allocator),
.poly_context_generation = 0,
@ -535,6 +541,72 @@ pub const Interpreter = struct {
return result;
}
/// Setup for-clause type mappings from the platform's required types.
/// This maps rigid variable names (like "model") to their concrete app types (like { value: I64 }).
pub fn setupForClauseTypeMappings(self: *Interpreter, platform_env: *const can.ModuleEnv) Error!void {
const app_env = self.app_env orelse return;
// Get the platform's for_clause_aliases
const all_aliases = platform_env.for_clause_aliases.items.items;
if (all_aliases.len == 0) return;
// Iterate through all required types and their for-clause aliases
const requires_types_slice = platform_env.requires_types.items.items;
for (requires_types_slice) |required_type| {
// Get the type aliases for this required type
const type_aliases_slice = all_aliases[@intFromEnum(required_type.type_aliases.start)..][0..required_type.type_aliases.count];
for (type_aliases_slice) |alias| {
// Get the alias name (e.g., "Model") - translate to app's ident store
const alias_name_str = platform_env.getIdent(alias.alias_name);
// Use insertIdent (not findIdent) to translate the platform ident to app ident
const app_alias_ident = @constCast(app_env).common.insertIdent(self.allocator, base_pkg.Ident.for_text(alias_name_str)) catch continue;
// Get the rigid name (e.g., "model") - insert into runtime ident store
const rigid_name_str = platform_env.getIdent(alias.rigid_name);
const rt_rigid_name = self.runtime_layout_store.env.insertIdent(base_pkg.Ident.for_text(rigid_name_str)) catch continue;
// Find the app's type alias definition and get its underlying type var
const app_type_var = findTypeAliasBodyVar(app_env, app_alias_ident) orelse continue;
// Translate the app's type variable to a runtime type variable
const app_rt_var = self.translateTypeVar(@constCast(app_env), app_type_var) catch continue;
// Add the mapping: rigid_name -> app's concrete type
// Use just the string index (u29), ignoring attributes
self.rigid_name_subst.put(rt_rigid_name.idx, app_rt_var) catch continue;
}
}
// CRITICAL: Clear the translate_cache after adding for-clause mappings.
// During the translations above, the platform's rigid type vars (like `model`)
// may have been cached before their mappings were established. Clear the cache
// so that subsequent translations will pick up the for-clause mappings.
self.translate_cache.clearRetainingCapacity();
// Also clear the var_to_layout_slot cache
@memset(self.var_to_layout_slot.items, 0);
}
/// Find a type alias declaration by name in a module and return the var for its underlying type.
/// Returns null if no type alias declaration with the given name is found.
fn findTypeAliasBodyVar(module: *const can.ModuleEnv, name: base_pkg.Ident.Idx) ?types.Var {
const stmts_slice = module.store.sliceStatements(module.all_statements);
for (stmts_slice) |stmt_idx| {
const stmt = module.store.getStatement(stmt_idx);
switch (stmt) {
.s_alias_decl => |alias_decl| {
const header = module.store.getTypeHeader(alias_decl.header);
if (header.relative_name == name) {
// Return the var for the alias body annotation
return can.ModuleEnv.varFrom(alias_decl.anno);
}
},
else => {},
}
}
return null;
}
/// Evaluates a Roc expression and returns the result.
pub fn eval(self: *Interpreter, expr_idx: can.CIR.Expr.Idx, roc_ops: *RocOps) Error!StackValue {
// Clear flex_type_context at the start of each top-level evaluation.
@ -654,6 +726,13 @@ pub const Interpreter = struct {
if (!matched) {
@panic("TypeMismatch at patternMatchesBind line 664");
}
// Decref refcounted argument values (lists, strings) after binding.
// patternMatchesBind made copies, so we need to decref the originals.
// EXCEPT: Don't decref Box types because that zeros the slot in host memory.
// The host owns box slots and will manage them.
if (arg_value.layout.tag != .box and arg_value.layout.tag != .box_of_zst) {
arg_value.decref(&self.runtime_layout_store, roc_ops);
}
}
}
@ -666,9 +745,6 @@ pub const Interpreter = struct {
temp_binds.items.len = 0;
}
// Decref args after body evaluation (caller transfers ownership)
defer if (params.len > 0) args_tuple_value.decref(&self.runtime_layout_store, roc_ops);
defer self.trimBindingList(&self.bindings, base_binding_len, roc_ops);
// Evaluate body, handling early returns at function boundary
@ -7599,6 +7675,7 @@ pub const Interpreter = struct {
self.translate_cache.deinit();
self.translation_in_progress.deinit();
self.rigid_subst.deinit();
self.rigid_name_subst.deinit();
self.translate_rigid_subst.deinit();
self.flex_type_context.deinit();
var it = self.poly_cache.iterator();
@ -8150,10 +8227,19 @@ pub const Interpreter = struct {
// In debug builds, use a counter to prevent infinite loops from cyclic substitutions
var count: u32 = 0;
while (resolved.desc.content == .rigid) {
const rigid_name = resolved.desc.content.rigid.name;
// First check rigid_subst (by type variable)
if (self.rigid_subst.get(resolved.var_)) |substituted_var| {
count += 1;
std.debug.assert(count < 1000); // Guard against infinite loops in debug builds
resolved = self.runtime_types.resolveVar(substituted_var);
} else if (self.rigid_name_subst.get(rigid_name.idx)) |substituted_var| {
// Fall back to rigid_name_subst (by string index) - used for for-clause type mappings
// Also add this to rigid_subst for faster future lookups
self.rigid_subst.put(resolved.var_, substituted_var) catch {};
count += 1;
std.debug.assert(count < 1000);
resolved = self.runtime_types.resolveVar(substituted_var);
} else {
break;
}
@ -8898,6 +8984,17 @@ pub const Interpreter = struct {
.rigid => |rigid| {
// Check if this rigid should be substituted (during nominal type backing translation)
if (self.translate_rigid_subst.get(resolved.var_)) |substitute_var| {
// Check if the substitute_var is itself a rigid with a for-clause mapping
const sub_resolved = module.types.resolveVar(substitute_var);
if (sub_resolved.desc.content == .rigid) {
const sub_rigid = sub_resolved.desc.content.rigid;
const sub_name_str = module.getIdent(sub_rigid.name);
const sub_rt_name = try self.runtime_layout_store.env.insertIdent(base_pkg.Ident.for_text(sub_name_str));
if (self.rigid_name_subst.get(sub_rt_name.idx)) |for_clause_var| {
// Use the for-clause mapping instead
break :blk for_clause_var;
}
}
// Translate the substitute type instead of the rigid
break :blk try self.translateTypeVar(module, substitute_var);
}
@ -8936,7 +9033,22 @@ pub const Interpreter = struct {
};
const content: types.Content = .{ .rigid = rt_rigid };
break :blk try self.runtime_types.freshFromContent(content);
const rt_rigid_var = try self.runtime_types.freshFromContent(content);
// If there's a for-clause mapping for this rigid name, add it to empty_scope
// so the layout store can find it during Box/List layout computation
if (self.rigid_name_subst.get(rt_name.idx)) |concrete_rt_var| {
// Ensure we have at least one scope level
if (self.empty_scope.scopes.items.len == 0) {
try self.empty_scope.scopes.append(types.VarMap.init(self.allocator));
}
// Add the mapping to empty_scope
try self.empty_scope.scopes.items[0].put(rt_rigid_var, concrete_rt_var);
// Also add to rigid_subst for consistency
try self.rigid_subst.put(rt_rigid_var, concrete_rt_var);
}
break :blk rt_rigid_var;
},
.err => {
// Handle generic type parameters from compiled builtin modules.
@ -9951,9 +10063,6 @@ pub const Interpreter = struct {
switch (work_item) {
.eval_expr => |eval_item| {
self.scheduleExprEval(&work_stack, &value_stack, eval_item.expr_idx, eval_item.expected_rt_var, roc_ops) catch |err| {
if (err == error.TypeMismatch) {
@panic("TypeMismatch from scheduleExprEval");
}
return err;
};
},
@ -11280,7 +11389,7 @@ pub const Interpreter = struct {
// Check if this is an error expression that shouldn't be called
if (func_expr_check == .e_runtime_error or func_expr_check == .e_anno_only or func_expr_check == .e_crash) {
@panic("TypeMismatch: func is error/anno_only/crash");
return error.TypeMismatch;
}
// Get function type and potentially instantiate
@ -14964,8 +15073,16 @@ pub const Interpreter = struct {
return error.TypeMismatch;
}
// Translate field name from compile-time ident store to runtime ident store.
// The field name in da.field_name is from self.env's ident store, but the
// record layout was built with runtime ident store field names.
const ct_field_name_str = self.env.getIdent(da.field_name);
const rt_field_name = try self.runtime_layout_store.env.insertIdent(base_pkg.Ident.for_text(ct_field_name_str));
var accessor = try receiver_value.asRecord(&self.runtime_layout_store);
const field_idx = accessor.findFieldIndex(da.field_name) orelse return error.TypeMismatch;
const field_idx = accessor.findFieldIndex(rt_field_name) orelse {
return error.TypeMismatch;
};
// Get the field's rt_var from the receiver's record type
const receiver_resolved = self.runtime_types.resolveVar(receiver_value.rt_var);
@ -14981,7 +15098,8 @@ pub const Interpreter = struct {
var i: usize = 0;
while (i < fields.len) : (i += 1) {
const f = fields.get(i);
if (f.name == da.field_name) {
// Use translated field name for comparison (both are in runtime ident store)
if (f.name == rt_field_name) {
break :blk f.var_;
}
}
@ -15112,7 +15230,6 @@ pub const Interpreter = struct {
return error.InvalidMethodReceiver;
}
// Handle Box.box intrinsic - must intercept before resolveMethodFunction
// since Box.box has no implementation body
if (nominal_info.?.ident == self.root_env.idents.box and

View file

@ -1673,31 +1673,49 @@ const Formatter = struct {
fmt.curr_indent = start_indent + 1;
try fmt.pushIndent();
try fmt.pushAll("requires");
const rigids = fmt.ast.store.getCollection(p.requires_rigids);
if (try fmt.flushCommentsBefore(rigids.region.start)) {
fmt.curr_indent += 1;
try fmt.pushIndent();
} else {
try fmt.push(' ');
}
try fmt.formatCollection(
rigids.region,
.curly,
AST.ExposedItem.Idx,
fmt.ast.store.exposedItemSlice(.{ .span = rigids.span }),
Formatter.formatExposedItem,
);
if (try fmt.flushCommentsBefore(rigids.region.end)) {
fmt.curr_indent += 1;
try fmt.pushIndent();
} else {
try fmt.push(' ');
}
_ = try fmt.formatTypeAnno(p.requires_signatures);
try fmt.pushAll("requires {");
// Format requires entries with for-clause syntax
const entries = fmt.ast.store.requiresEntrySlice(p.requires_entries);
if (entries.len > 0) {
try fmt.ensureNewline();
fmt.curr_indent = start_indent + 2;
for (entries, 0..) |entry_idx, entry_i| {
const entry = fmt.ast.store.getRequiresEntry(entry_idx);
try fmt.pushIndent();
const signatures_region = fmt.nodeRegion(@intFromEnum(p.requires_signatures));
_ = try fmt.flushCommentsBefore(signatures_region.end);
// Format type aliases: [Model : model] for ...
// Only output the bracket syntax if there are type aliases
const aliases = fmt.ast.store.forClauseTypeAliasSlice(entry.type_aliases);
if (aliases.len > 0) {
try fmt.push('[');
for (aliases, 0..) |alias_idx, alias_i| {
const alias = fmt.ast.store.getForClauseTypeAlias(alias_idx);
try fmt.pushTokenText(alias.alias_name);
try fmt.pushAll(" : ");
try fmt.pushTokenText(alias.rigid_name);
if (alias_i < aliases.len - 1) {
try fmt.pushAll(", ");
}
}
try fmt.pushAll("] for ");
}
// Format entrypoint name
try fmt.pushTokenText(entry.entrypoint_name);
try fmt.pushAll(" : ");
// Format type annotation
_ = try fmt.formatTypeAnno(entry.type_anno);
if (entry_i < entries.len - 1) {
try fmt.push(',');
}
try fmt.ensureNewline();
}
fmt.curr_indent = start_indent + 1;
try fmt.pushIndent();
}
try fmt.push('}');
try fmt.ensureNewline();
fmt.curr_indent = start_indent + 1;
try fmt.pushIndent();
@ -2431,7 +2449,8 @@ const Formatter = struct {
return fmt.collectionWillBeMultiline(AST.RecordField.Idx, p.packages);
},
.platform => |p| {
if (fmt.collectionWillBeMultiline(AST.ExposedItem.Idx, p.requires_rigids)) {
// Requires entries with for-clause always multiline if present
if (p.requires_entries.span.len > 0) {
return true;
}
if (fmt.collectionWillBeMultiline(AST.ExposedItem.Idx, p.exposes)) {

View file

@ -688,10 +688,17 @@ fn createInterpreter(env_ptr: *ModuleEnv, app_env: ?*ModuleEnv, builtin_modules:
};
}
const interpreter = eval.Interpreter.init(allocator, env_ptr, builtin_types, builtin_module_env, imported_envs, &shim_import_mapping, app_env) catch {
var interpreter = eval.Interpreter.init(allocator, env_ptr, builtin_types, builtin_module_env, imported_envs, &shim_import_mapping, app_env) catch {
roc_ops.crash("INTERPRETER SHIM: Interpreter initialization failed");
return error.InterpreterSetupFailed;
};
// Setup for-clause type mappings from platform to app.
// This maps rigid variable names (like "model") to their concrete app types.
interpreter.setupForClauseTypeMappings(env_ptr) catch {
roc_ops.crash("INTERPRETER SHIM: Failed to setup for-clause type mappings");
return error.InterpreterSetupFailed;
};
return interpreter;
}

View file

@ -666,6 +666,14 @@ pub const Diagnostic = struct {
expected_requires_rigids_open_curly,
expected_requires_signatures_close_curly,
expected_requires_signatures_open_curly,
expected_for_clause_open_square,
expected_for_clause_close_square,
expected_for_clause_alias_name,
expected_for_clause_colon,
expected_for_clause_rigid_name,
expected_for_keyword,
expected_for_clause_entrypoint_name,
expected_for_clause_type_colon,
header_expected_open_square,
header_expected_close_square,
pattern_unexpected_token,
@ -1624,8 +1632,7 @@ pub const Header = union(enum) {
},
platform: struct {
name: Token.Idx,
requires_rigids: Collection.Idx,
requires_signatures: TypeAnno.Idx,
requires_entries: RequiresEntry.Span, // [Model : model] for main : () -> { ... }
exposes: Collection.Idx,
packages: Collection.Idx,
provides: Collection.Idx,
@ -1754,22 +1761,42 @@ pub const Header = union(enum) {
try tree.pushStringPair("name", ast.resolve(a.name));
const attrs = tree.beginNode();
// Requires Rigids
const rigids = ast.store.getCollection(a.requires_rigids);
const rigids_begin = tree.beginNode();
try tree.pushStaticAtom("rigids");
try ast.appendRegionInfoToSexprTree(env, tree, rigids.region);
// Requires Entries (for-clause syntax)
const requires_begin = tree.beginNode();
try tree.pushStaticAtom("requires");
const attrs3 = tree.beginNode();
// Could push region info for rigids here if desired
for (ast.store.exposedItemSlice(.{ .span = rigids.span })) |exposed| {
const item = ast.store.getExposedItem(exposed);
try item.pushToSExprTree(env, ast, tree);
}
try tree.endNode(rigids_begin, attrs3);
for (ast.store.requiresEntrySlice(a.requires_entries)) |entry_idx| {
const entry = ast.store.getRequiresEntry(entry_idx);
const entry_begin = tree.beginNode();
try tree.pushStaticAtom("requires-entry");
try ast.appendRegionInfoToSexprTree(env, tree, entry.region);
const entry_attrs = tree.beginNode();
// Requires Signatures
const signatures = ast.store.getTypeAnno(a.requires_signatures);
try signatures.pushToSExprTree(gpa, env, ast, tree);
// Type aliases
const aliases_begin = tree.beginNode();
try tree.pushStaticAtom("type-aliases");
const aliases_attrs = tree.beginNode();
for (ast.store.forClauseTypeAliasSlice(entry.type_aliases)) |alias_idx| {
const alias = ast.store.getForClauseTypeAlias(alias_idx);
const alias_begin = tree.beginNode();
try tree.pushStaticAtom("alias");
try tree.pushStringPair("name", ast.resolve(alias.alias_name));
try tree.pushStringPair("rigid", ast.resolve(alias.rigid_name));
const alias_attrs = tree.beginNode();
try tree.endNode(alias_begin, alias_attrs);
}
try tree.endNode(aliases_begin, aliases_attrs);
// Entrypoint name
try tree.pushStringPair("entrypoint", ast.resolve(entry.entrypoint_name));
// Type annotation
const type_anno = ast.store.getTypeAnno(entry.type_anno);
try type_anno.pushToSExprTree(gpa, env, ast, tree);
try tree.endNode(entry_begin, entry_attrs);
}
try tree.endNode(requires_begin, attrs3);
// Exposes
const exposes = ast.store.getCollection(a.exposes);
@ -2038,6 +2065,33 @@ pub const TargetFile = union(enum) {
pub const Span = struct { span: base.DataSpan };
};
/// A type alias mapping in a for-clause: Model : model
/// Maps an uppercase alias (Model) to a lowercase rigid variable (model)
pub const ForClauseTypeAlias = struct {
/// The alias name token (e.g., "Model") - UpperIdent
alias_name: Token.Idx,
/// The rigid variable name token (e.g., "model") - LowerIdent
rigid_name: Token.Idx,
region: TokenizedRegion,
pub const Idx = enum(u32) { _ };
pub const Span = struct { span: base.DataSpan };
};
/// A requires entry with for-clause: [Model : model] for main : () -> { ... }
pub const RequiresEntry = struct {
/// Type aliases: [Model : model, Foo : foo]
type_aliases: ForClauseTypeAlias.Span,
/// The entrypoint name token (e.g., "main") - LowerIdent
entrypoint_name: Token.Idx,
/// The type annotation for this entrypoint
type_anno: TypeAnno.Idx,
region: TokenizedRegion,
pub const Idx = enum(u32) { _ };
pub const Span = struct { span: base.DataSpan };
};
/// TODO
pub const TypeHeader = struct {
name: Token.Idx,

View file

@ -506,6 +506,17 @@ pub const Tag = enum {
/// A special identifier in a target list: app, win_gui
/// * main_token - identifier token
target_file_ident,
/// A for-clause type alias: Model : model
/// * main_token - alias name token (UpperIdent)
/// * lhs - rigid name token index
for_clause_type_alias,
/// A requires entry: [Model : model] for main : () -> { ... }
/// * main_token - entrypoint name token
/// * lhs - start of type_aliases span
/// * rhs - packed: type_aliases len (16 bits) + type_anno idx (16 bits)
requires_entry,
};
/// Unstructured information about a Node. These

View file

@ -42,6 +42,8 @@ scratch_exposed_items: base.Scratch(AST.ExposedItem.Idx),
scratch_where_clauses: base.Scratch(AST.WhereClause.Idx),
scratch_target_entries: base.Scratch(AST.TargetEntry.Idx),
scratch_target_files: base.Scratch(AST.TargetFile.Idx),
scratch_for_clause_type_aliases: base.Scratch(AST.ForClauseTypeAlias.Idx),
scratch_requires_entries: base.Scratch(AST.RequiresEntry.Idx),
/// Compile-time constants for union variant counts to ensure we don't miss cases
/// when adding/removing variants from AST unions. Update these when modifying the unions.
@ -78,6 +80,8 @@ pub fn initCapacity(gpa: std.mem.Allocator, capacity: usize) std.mem.Allocator.E
.scratch_where_clauses = try base.Scratch(AST.WhereClause.Idx).init(gpa),
.scratch_target_entries = try base.Scratch(AST.TargetEntry.Idx).init(gpa),
.scratch_target_files = try base.Scratch(AST.TargetFile.Idx).init(gpa),
.scratch_for_clause_type_aliases = try base.Scratch(AST.ForClauseTypeAlias.Idx).init(gpa),
.scratch_requires_entries = try base.Scratch(AST.RequiresEntry.Idx).init(gpa),
};
_ = try store.nodes.append(gpa, .{
@ -114,6 +118,8 @@ pub fn deinit(store: *NodeStore) void {
store.scratch_where_clauses.deinit();
store.scratch_target_entries.deinit();
store.scratch_target_files.deinit();
store.scratch_for_clause_type_aliases.deinit();
store.scratch_requires_entries.deinit();
}
/// Ensures that all scratch buffers in the store
@ -132,6 +138,8 @@ pub fn emptyScratch(store: *NodeStore) void {
store.scratch_where_clauses.clearFrom(0);
store.scratch_target_entries.clearFrom(0);
store.scratch_target_files.clearFrom(0);
store.scratch_for_clause_type_aliases.clearFrom(0);
store.scratch_requires_entries.clearFrom(0);
}
/// Prints debug information about all nodes and scratch buffers in the store.
@ -243,8 +251,9 @@ pub fn addHeader(store: *NodeStore, header: AST.Header) std.mem.Allocator.Error!
node.main_token = platform.name;
const ed_start = store.extra_data.items.len;
try store.extra_data.append(store.gpa, @intFromEnum(platform.requires_rigids));
try store.extra_data.append(store.gpa, @intFromEnum(platform.requires_signatures));
// Store requires_entries span (start and len)
try store.extra_data.append(store.gpa, platform.requires_entries.span.start);
try store.extra_data.append(store.gpa, platform.requires_entries.span.len);
try store.extra_data.append(store.gpa, @intFromEnum(platform.exposes));
try store.extra_data.append(store.gpa, @intFromEnum(platform.packages));
try store.extra_data.append(store.gpa, @intFromEnum(platform.provides));
@ -1104,8 +1113,10 @@ pub fn getHeader(store: *const NodeStore, header_idx: AST.Header.Idx) AST.Header
return .{ .platform = .{
.name = node.main_token,
.requires_rigids = @enumFromInt(store.extra_data.items[ed_start]),
.requires_signatures = @enumFromInt(store.extra_data.items[ed_start + 1]),
.requires_entries = .{ .span = .{
.start = store.extra_data.items[ed_start],
.len = store.extra_data.items[ed_start + 1],
} },
.exposes = @enumFromInt(store.extra_data.items[ed_start + 2]),
.packages = @enumFromInt(store.extra_data.items[ed_start + 3]),
.provides = @enumFromInt(store.extra_data.items[ed_start + 4]),
@ -2614,3 +2625,130 @@ pub fn getTargetFile(store: *const NodeStore, idx: AST.TargetFile.Idx) AST.Targe
},
}
}
/// Adds a ForClauseTypeAlias node and returns its index.
pub fn addForClauseTypeAlias(store: *NodeStore, alias: AST.ForClauseTypeAlias) std.mem.Allocator.Error!AST.ForClauseTypeAlias.Idx {
const node = Node{
.tag = .for_clause_type_alias,
.main_token = alias.alias_name,
.data = .{
.lhs = alias.rigid_name,
.rhs = 0,
},
.region = alias.region,
};
const nid = try store.nodes.append(store.gpa, node);
return @enumFromInt(@intFromEnum(nid));
}
/// Returns the start position for a new Span of ForClauseTypeAlias.Idxs in scratch
pub fn scratchForClauseTypeAliasTop(store: *NodeStore) u32 {
return store.scratch_for_clause_type_aliases.top();
}
/// Places a new AST.ForClauseTypeAlias.Idx in the scratch.
pub fn addScratchForClauseTypeAlias(store: *NodeStore, idx: AST.ForClauseTypeAlias.Idx) std.mem.Allocator.Error!void {
try store.scratch_for_clause_type_aliases.append(idx);
}
/// Creates a new span starting at start. Moves the items from scratch to extra_data.
pub fn forClauseTypeAliasSpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Error!AST.ForClauseTypeAlias.Span {
const end = store.scratch_for_clause_type_aliases.top();
defer store.scratch_for_clause_type_aliases.clearFrom(start);
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_for_clause_type_aliases.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
}
/// Clears any ForClauseTypeAlias.Idxs added to scratch from start until the end.
pub fn clearScratchForClauseTypeAliasesFrom(store: *NodeStore, start: u32) void {
store.scratch_for_clause_type_aliases.clearFrom(start);
}
/// Returns a new ForClauseTypeAlias slice for iteration.
pub fn forClauseTypeAliasSlice(store: *const NodeStore, span: AST.ForClauseTypeAlias.Span) []AST.ForClauseTypeAlias.Idx {
return store.sliceFromSpan(AST.ForClauseTypeAlias.Idx, span.span);
}
/// Retrieves a ForClauseTypeAlias from a stored node.
pub fn getForClauseTypeAlias(store: *const NodeStore, idx: AST.ForClauseTypeAlias.Idx) AST.ForClauseTypeAlias {
const node = store.nodes.get(@enumFromInt(@intFromEnum(idx)));
std.debug.assert(node.tag == .for_clause_type_alias);
return .{
.alias_name = node.main_token,
.rigid_name = node.data.lhs,
.region = node.region,
};
}
/// Adds a RequiresEntry node and returns its index.
pub fn addRequiresEntry(store: *NodeStore, entry: AST.RequiresEntry) std.mem.Allocator.Error!AST.RequiresEntry.Idx {
// Pack type_aliases len and type_anno idx into rhs
const rhs_packed: u32 = (@as(u32, entry.type_aliases.span.len) << 16) | @as(u32, @intFromEnum(entry.type_anno));
const node = Node{
.tag = .requires_entry,
.main_token = entry.entrypoint_name,
.data = .{
.lhs = entry.type_aliases.span.start,
.rhs = rhs_packed,
},
.region = entry.region,
};
const nid = try store.nodes.append(store.gpa, node);
return @enumFromInt(@intFromEnum(nid));
}
/// Returns the start position for a new Span of RequiresEntry.Idxs in scratch
pub fn scratchRequiresEntryTop(store: *NodeStore) u32 {
return store.scratch_requires_entries.top();
}
/// Places a new AST.RequiresEntry.Idx in the scratch.
pub fn addScratchRequiresEntry(store: *NodeStore, idx: AST.RequiresEntry.Idx) std.mem.Allocator.Error!void {
try store.scratch_requires_entries.append(idx);
}
/// Creates a new span starting at start. Moves the items from scratch to extra_data.
pub fn requiresEntrySpanFrom(store: *NodeStore, start: u32) std.mem.Allocator.Error!AST.RequiresEntry.Span {
const end = store.scratch_requires_entries.top();
defer store.scratch_requires_entries.clearFrom(start);
var i = @as(usize, @intCast(start));
const ed_start = @as(u32, @intCast(store.extra_data.items.len));
while (i < end) {
try store.extra_data.append(store.gpa, @intFromEnum(store.scratch_requires_entries.items.items[i]));
i += 1;
}
return .{ .span = .{ .start = ed_start, .len = @as(u32, @intCast(end)) - start } };
}
/// Clears any RequiresEntry.Idxs added to scratch from start until the end.
pub fn clearScratchRequiresEntriesFrom(store: *NodeStore, start: u32) void {
store.scratch_requires_entries.clearFrom(start);
}
/// Returns a new RequiresEntry slice for iteration.
pub fn requiresEntrySlice(store: *const NodeStore, span: AST.RequiresEntry.Span) []AST.RequiresEntry.Idx {
return store.sliceFromSpan(AST.RequiresEntry.Idx, span.span);
}
/// Retrieves a RequiresEntry from a stored node.
pub fn getRequiresEntry(store: *const NodeStore, idx: AST.RequiresEntry.Idx) AST.RequiresEntry {
const node = store.nodes.get(@enumFromInt(@intFromEnum(idx)));
std.debug.assert(node.tag == .requires_entry);
// Unpack type_aliases len and type_anno idx from rhs
const type_aliases_len: u32 = node.data.rhs >> 16;
const type_anno_idx: u16 = @truncate(node.data.rhs);
return .{
.type_aliases = .{ .span = .{ .start = node.data.lhs, .len = type_aliases_len } },
.entrypoint_name = node.main_token,
.type_anno = @enumFromInt(type_anno_idx),
.region = node.region,
};
}

View file

@ -355,7 +355,7 @@ pub fn parseHeader(self: *Parser) Error!AST.Header.Idx {
/// e.g:
/// ```roc
/// platform
/// requires {} { main! : List(Str) => {} }
/// requires { main! : List(Str) => {} }
/// exposes []
/// packages { foo: "../foo.roc" }
/// imports []
@ -399,8 +399,10 @@ pub fn parsePlatformHeader(self: *Parser) Error!AST.Header.Idx {
self.pos,
);
};
// Get requires rigids
const rigids_start = self.pos;
// Parse requires entries with for-clause syntax:
// requires { [Model : model] for main : () -> { init : ... } }
const requires_start = self.pos;
self.expect(.OpenCurly) catch {
return try self.pushMalformed(
AST.Header.Idx,
@ -408,75 +410,172 @@ pub fn parsePlatformHeader(self: *Parser) Error!AST.Header.Idx {
self.pos,
);
};
const rigids_top = self.store.scratchExposedItemTop();
self.parseCollectionSpan(
AST.ExposedItem.Idx,
.CloseCurly,
NodeStore.addScratchExposedItem,
Parser.parseExposedItem,
) catch |err| {
switch (err) {
error.ExpectedNotFound => {
self.store.clearScratchExposedItemsFrom(rigids_top);
return try self.pushMalformed(
AST.Header.Idx,
.expected_requires_rigids_close_curly,
rigids_start,
);
},
error.OutOfMemory => return error.OutOfMemory,
error.TooNested => return error.TooNested,
}
};
const rigids_span = try self.store.exposedItemSpanFrom(rigids_top);
const rigids = try self.store.addCollection(
.collection_exposed,
.{
.span = rigids_span.span,
.region = .{
.start = rigids_start,
.end = self.pos,
},
},
);
// Get requires signatures
const signatures_start = self.pos;
self.expect(.OpenCurly) catch {
return try self.pushMalformed(
AST.Header.Idx,
.expected_requires_signatures_open_curly,
self.pos,
);
};
const signatures_top = self.store.scratchAnnoRecordFieldTop();
self.parseCollectionSpan(
AST.AnnoRecordField.Idx,
.CloseCurly,
NodeStore.addScratchAnnoRecordField,
Parser.parseAnnoRecordField,
) catch |err| {
switch (err) {
error.ExpectedNotFound => {
const requires_entries_top = self.store.scratchRequiresEntryTop();
// Handle backward compatibility: `requires {} { ... }` (legacy syntax)
// If we see CloseCurly followed by OpenCurly, skip the empty rigids block
// and parse from the second curly block
var already_consumed_close_curly = false;
if (self.peek() == .CloseCurly) {
self.advance(); // consume first '}'
if (self.peek() == .OpenCurly) {
self.advance(); // consume second '{'
// Continue parsing entries from the second curly block
} else {
// Empty requires {} with no second block - we already consumed the close curly
already_consumed_close_curly = true;
}
}
// Parse requires entries (comma-separated)
// Supported syntaxes:
// 1. Simple: requires { main : Type } - no type aliases
// 2. With aliases: requires { [Model : model] for main : Type }
while (!already_consumed_close_curly and self.peek() != .CloseCurly and self.peek() != .EndOfFile) {
const entry_start = self.pos;
const type_aliases_top = self.store.scratchForClauseTypeAliasTop();
// Check if we have type aliases (starts with '[')
if (self.peek() == .OpenSquare) {
self.advance(); // consume '['
// Parse type alias mappings: [Model : model, Foo : foo]
while (self.peek() != .CloseSquare and self.peek() != .EndOfFile) {
const alias_start = self.pos;
// Expect UpperIdent for alias name (e.g., "Model")
if (self.peek() != .UpperIdent) {
self.store.clearScratchForClauseTypeAliasesFrom(type_aliases_top);
self.store.clearScratchRequiresEntriesFrom(requires_entries_top);
return try self.pushMalformed(
AST.Header.Idx,
.expected_for_clause_alias_name,
self.pos,
);
}
const alias_name = self.pos;
self.advance();
// Expect colon
self.expect(.OpColon) catch {
self.store.clearScratchForClauseTypeAliasesFrom(type_aliases_top);
self.store.clearScratchRequiresEntriesFrom(requires_entries_top);
return try self.pushMalformed(
AST.Header.Idx,
.expected_for_clause_colon,
self.pos,
);
};
// Expect LowerIdent for rigid name (e.g., "model")
if (self.peek() != .LowerIdent) {
self.store.clearScratchForClauseTypeAliasesFrom(type_aliases_top);
self.store.clearScratchRequiresEntriesFrom(requires_entries_top);
return try self.pushMalformed(
AST.Header.Idx,
.expected_for_clause_rigid_name,
self.pos,
);
}
const rigid_name = self.pos;
self.advance();
const alias_idx = try self.store.addForClauseTypeAlias(.{
.alias_name = alias_name,
.rigid_name = rigid_name,
.region = .{ .start = alias_start, .end = self.pos },
});
try self.store.addScratchForClauseTypeAlias(alias_idx);
// Check for comma (more aliases) or close square
if (self.peek() == .Comma) {
self.advance();
} else {
break;
}
}
self.expect(.CloseSquare) catch {
self.store.clearScratchForClauseTypeAliasesFrom(type_aliases_top);
self.store.clearScratchRequiresEntriesFrom(requires_entries_top);
return try self.pushMalformed(
AST.Header.Idx,
.expected_requires_signatures_close_curly,
signatures_start,
.expected_for_clause_close_square,
self.pos,
);
},
error.OutOfMemory => return error.OutOfMemory,
error.TooNested => return error.TooNested,
};
// Expect "for" keyword after type aliases
self.expect(.KwFor) catch {
self.store.clearScratchForClauseTypeAliasesFrom(type_aliases_top);
self.store.clearScratchRequiresEntriesFrom(requires_entries_top);
return try self.pushMalformed(
AST.Header.Idx,
.expected_for_keyword,
self.pos,
);
};
}
};
const signatures_span = try self.store.annoRecordFieldSpanFrom(signatures_top);
const signatures = try self.store.addTypeAnno(.{ .record = .{
.fields = signatures_span,
.ext = null,
.region = .{
.start = signatures_start,
.end = self.pos,
},
} });
// No type aliases - just parse entrypoint directly
const type_aliases_span = try self.store.forClauseTypeAliasSpanFrom(type_aliases_top);
// Expect entrypoint name (LowerIdent, e.g., "main")
if (self.peek() != .LowerIdent) {
self.store.clearScratchRequiresEntriesFrom(requires_entries_top);
return try self.pushMalformed(
AST.Header.Idx,
.expected_for_clause_entrypoint_name,
self.pos,
);
}
const entrypoint_name = self.pos;
self.advance();
// Expect colon before type annotation
self.expect(.OpColon) catch {
self.store.clearScratchRequiresEntriesFrom(requires_entries_top);
return try self.pushMalformed(
AST.Header.Idx,
.expected_for_clause_type_colon,
self.pos,
);
};
// Parse the type annotation
// Use .not_looking_for_args to properly handle function types like `I64, I64 -> I64`
const type_anno = try self.parseTypeAnno(.not_looking_for_args);
const entry_idx = try self.store.addRequiresEntry(.{
.type_aliases = type_aliases_span,
.entrypoint_name = entrypoint_name,
.type_anno = type_anno,
.region = .{ .start = entry_start, .end = self.pos },
});
try self.store.addScratchRequiresEntry(entry_idx);
// Check for comma (more entries) or close curly
if (self.peek() == .Comma) {
self.advance();
} else {
break;
}
}
if (!already_consumed_close_curly) {
self.expect(.CloseCurly) catch {
self.store.clearScratchRequiresEntriesFrom(requires_entries_top);
return try self.pushMalformed(
AST.Header.Idx,
.expected_requires_signatures_close_curly,
requires_start,
);
};
}
const requires_entries = try self.store.requiresEntrySpanFrom(requires_entries_top);
// Get exposes
self.expect(.KwExposes) catch {
@ -622,8 +721,7 @@ pub fn parsePlatformHeader(self: *Parser) Error!AST.Header.Idx {
return self.store.addHeader(.{ .platform = .{
.name = name,
.requires_rigids = rigids,
.requires_signatures = signatures,
.requires_entries = requires_entries,
.exposes = exposes,
.packages = packages,
.provides = provides,

View file

@ -87,8 +87,7 @@ test "NodeStore round trip - Headers" {
.name = rand_token_idx(),
.packages = rand_idx(AST.Collection.Idx),
.provides = rand_idx(AST.Collection.Idx),
.requires_rigids = rand_idx(AST.Collection.Idx),
.requires_signatures = rand_idx(AST.TypeAnno.Idx),
.requires_entries = .{ .span = .{ .start = 0, .len = 0 } },
.targets = null,
.region = rand_region(),
},

View file

@ -981,6 +981,7 @@ fn compileSource(source: []const u8) !CompilerStageData {
.all_statements = serialized_ptr.all_statements,
.exports = serialized_ptr.exports,
.requires_types = serialized_ptr.requires_types.deserialize(@as(i64, @intCast(base_ptr))).*,
.for_clause_aliases = serialized_ptr.for_clause_aliases.deserialize(@as(i64, @intCast(base_ptr))).*,
.builtin_statements = serialized_ptr.builtin_statements,
.external_decls = serialized_ptr.external_decls.deserialize(@as(i64, @intCast(base_ptr))).*,
.imports = (try serialized_ptr.imports.deserialize(@as(i64, @intCast(base_ptr)), gpa)).*,
@ -993,6 +994,7 @@ fn compileSource(source: []const u8) !CompilerStageData {
.deferred_numeric_literals = try ModuleEnv.DeferredNumericLiteral.SafeList.initCapacity(gpa, 0),
.import_mapping = types.import_mapping.ImportMapping.init(gpa),
.method_idents = serialized_ptr.method_idents.deserialize(@as(i64, @intCast(base_ptr))).*,
.rigid_vars = std.AutoHashMapUnmanaged(base.Ident.Idx, types.Var){},
};
logDebug("loadCompiledModule: ModuleEnv deserialized successfully\n", .{});

View file

@ -82,6 +82,7 @@ fn loadCompiledModule(gpa: std.mem.Allocator, bin_data: []const u8, module_name:
.all_statements = serialized_ptr.all_statements,
.exports = serialized_ptr.exports,
.requires_types = serialized_ptr.requires_types.deserialize(@as(i64, @intCast(base_ptr))).*,
.for_clause_aliases = serialized_ptr.for_clause_aliases.deserialize(@as(i64, @intCast(base_ptr))).*,
.builtin_statements = serialized_ptr.builtin_statements,
.external_decls = serialized_ptr.external_decls.deserialize(@as(i64, @intCast(base_ptr))).*,
.imports = (try serialized_ptr.imports.deserialize(@as(i64, @intCast(base_ptr)), gpa)).*,
@ -94,6 +95,7 @@ fn loadCompiledModule(gpa: std.mem.Allocator, bin_data: []const u8, module_name:
.deferred_numeric_literals = try ModuleEnv.DeferredNumericLiteral.SafeList.initCapacity(gpa, 0),
.import_mapping = types.import_mapping.ImportMapping.init(gpa),
.method_idents = serialized_ptr.method_idents.deserialize(@as(i64, @intCast(base_ptr))).*,
.rigid_vars = std.AutoHashMapUnmanaged(base.Ident.Idx, types.Var){},
};
return LoadedModule{

View file

@ -1,5 +1,7 @@
platform ""
requires {} { main! : () => {} }
requires {
main! : () => {}
}
exposes [Stdout, Stderr, Stdin]
packages {}
provides { main_for_host!: "main" }

View file

@ -1,12 +1,9 @@
app [init, update, render] { pf: platform "./platform/main.roc" }
app [main] { pf: platform "./platform/main.roc" }
Model : { value: I64 }
init : {} -> Model
init = |{}| { value: 0 }
update : Model, I64 -> Model
update = |m, delta| { value: m.value + delta }
render : Model -> I64
render = |m| m.value
main = |{}| {
init: |{}| { value: 0 },
update: |m, delta| { value: m.value + delta },
render: |m| m.value,
}

View file

@ -128,7 +128,6 @@ fn platform_main() !void {
success_count += 1;
// Test 2: render takes Box(model), returns I64
// This should trigger the TypeMismatch bug if it exists
try stdout.print("\n=== Test 2: render(Box(model)) -> I64 ===\n", .{});
var render_result: i64 = undefined;
roc__render(&roc_ops, @as(*anyopaque, @ptrCast(&render_result)), @as(*anyopaque, @ptrCast(&boxed_model)));
@ -142,7 +141,6 @@ fn platform_main() !void {
}
// Test 3: update takes (Box(model), I64), returns Box(model)
// This should also trigger the TypeMismatch bug if it exists
try stdout.print("\n=== Test 3: update(Box(model), 42) -> Box(model) ===\n", .{});
const UpdateArgs = extern struct { boxed_model: Box, delta: i64 };
var update_args = UpdateArgs{ .boxed_model = boxed_model, .delta = 42 };

View file

@ -1,5 +1,11 @@
platform ""
requires { model } { init : {} -> model, update : model, I64 -> model, render : model -> I64 }
requires {
[Model : model] for main : {} -> {
init : {} -> model,
update : model, I64 -> model,
render : model -> I64
}
}
exposes []
packages {}
provides { init_for_host: "init", update_for_host: "update", render_for_host: "render" }
@ -15,21 +21,27 @@ platform ""
}
}
# Returns Box(model) - this works (return value)
# Explicit type annotations for host-facing functions
init_for_host : {} -> Box(model)
init_for_host = |{}| Box.box(init({}))
init_for_host = |{}| {
callbacks = main({})
init_fn = callbacks.init
record = init_fn({})
Box.box(record)
}
# Takes Box(model) as parameter - this should trigger the bug
# Also takes I64 which host can provide
update_for_host : Box(model), I64 -> Box(model)
update_for_host = |boxed_model, value| {
m = Box.unbox(boxed_model)
Box.box(update(m, value))
callbacks = main({})
update_fn = callbacks.update
Box.box(update_fn(m, value))
}
# Takes Box(model) as parameter, returns I64 for host verification
render_for_host : Box(model) -> I64
render_for_host = |boxed_model| {
m = Box.unbox(boxed_model)
render(m)
callbacks = main({})
render_fn = callbacks.render
render_fn(m)
}

View file

@ -31,7 +31,7 @@ const expected_safelist_u8_size = 24;
const expected_safelist_u32_size = 24;
const expected_safemultilist_teststruct_size = 24;
const expected_safemultilist_node_size = 24;
const expected_moduleenv_size = 1104; // Platform-independent size
const expected_moduleenv_size = 1128; // Platform-independent size
const expected_nodestore_size = 96; // Platform-independent size
// Compile-time assertions - build will fail if sizes don't match expected values

View file

@ -1,17 +1,13 @@
# META
~~~ini
description=Multiline formatting platform
description=Multiline formatting platform with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform "pf"
requires {
R1,
R2,
} {
r1 : R1 -> R2,
r2 : R1 -> R2,
[R1 : r1, R2 : r2] for main : R1 -> R2
}
exposes [
E1,
@ -21,22 +17,21 @@ platform "pf"
pa1: "pa1",
pa2: "pa2",
}
# imports [I1.{ I11, I12, }, I2.{ I21, I22, },]
provides {
pr1: "not implemented",
pr2: "not implemented",
}
~~~
# EXPECTED
EXPOSED BUT NOT DEFINED - platform.md:19:3:19:25
EXPOSED BUT NOT DEFINED - platform.md:20:3:20:25
EXPOSED BUT NOT DEFINED - platform.md:10:3:10:5
EXPOSED BUT NOT DEFINED - platform.md:11:3:11:5
EXPOSED BUT NOT DEFINED - platform.md:14:3:14:25
EXPOSED BUT NOT DEFINED - platform.md:15:3:15:25
EXPOSED BUT NOT DEFINED - platform.md:6:3:6:5
EXPOSED BUT NOT DEFINED - platform.md:7:3:7:5
# PROBLEMS
**EXPOSED BUT NOT DEFINED**
The module header says that `pr1` is exposed, but it is not defined anywhere in this module.
**platform.md:19:3:19:25:**
**platform.md:14:3:14:25:**
```roc
pr1: "not implemented",
```
@ -46,7 +41,7 @@ You can fix this by either defining `pr1` in this module, or by removing it from
**EXPOSED BUT NOT DEFINED**
The module header says that `pr2` is exposed, but it is not defined anywhere in this module.
**platform.md:20:3:20:25:**
**platform.md:15:3:15:25:**
```roc
pr2: "not implemented",
```
@ -56,7 +51,7 @@ You can fix this by either defining `pr2` in this module, or by removing it from
**EXPOSED BUT NOT DEFINED**
The module header says that `E1` is exposed, but it is not defined anywhere in this module.
**platform.md:10:3:10:5:**
**platform.md:6:3:6:5:**
```roc
E1,
```
@ -66,7 +61,7 @@ You can fix this by either defining `E1` in this module, or by removing it from
**EXPOSED BUT NOT DEFINED**
The module header says that `E2` is exposed, but it is not defined anywhere in this module.
**platform.md:11:3:11:5:**
**platform.md:7:3:7:5:**
```roc
E2,
```
@ -77,11 +72,7 @@ You can fix this by either defining `E2` in this module, or by removing it from
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,
KwRequires,OpenCurly,
UpperIdent,Comma,
UpperIdent,Comma,
CloseCurly,OpenCurly,
LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,Comma,
LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,Comma,
OpenSquare,UpperIdent,OpColon,LowerIdent,Comma,UpperIdent,OpColon,LowerIdent,CloseSquare,KwFor,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,
CloseCurly,
KwExposes,OpenSquare,
UpperIdent,Comma,
@ -101,15 +92,12 @@ EndOfFile,
~~~clojure
(file
(platform (name "pf")
(rigids
(exposed-upper-ident (text "R1"))
(exposed-upper-ident (text "R2")))
(ty-record
(anno-record-field (name "r1")
(ty-fn
(ty (name "R1"))
(ty (name "R2"))))
(anno-record-field (name "r2")
(requires
(requires-entry
(type-aliases
(alias (name "R1") (rigid "r1"))
(alias (name "R2") (rigid "r2")))
(entrypoint "main")
(ty-fn
(ty (name "R1"))
(ty (name "R2")))))

View file

@ -1,17 +1,13 @@
# META
~~~ini
description=Multiline without comma formatting platform
description=Multiline without comma formatting platform with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform "pf"
requires {
R1,
R2
} {
r1 : R1 -> R2,
r2 : R1 -> R2
[R1 : r1, R2 : r2] for main : R1 -> R2
}
exposes [
E1,
@ -21,22 +17,21 @@ platform "pf"
pa1: "pa1",
pa2: "pa2"
}
# imports [I1.{ I11, I12, }, I2.{ I21, I22, },]
provides {
pr1: "not implemented",
pr2: "not implemented",
}
~~~
# EXPECTED
EXPOSED BUT NOT DEFINED - platform.md:19:3:19:25
EXPOSED BUT NOT DEFINED - platform.md:20:3:20:25
EXPOSED BUT NOT DEFINED - platform.md:10:3:10:5
EXPOSED BUT NOT DEFINED - platform.md:11:3:11:5
EXPOSED BUT NOT DEFINED - platform.md:14:3:14:25
EXPOSED BUT NOT DEFINED - platform.md:15:3:15:25
EXPOSED BUT NOT DEFINED - platform.md:6:3:6:5
EXPOSED BUT NOT DEFINED - platform.md:7:3:7:5
# PROBLEMS
**EXPOSED BUT NOT DEFINED**
The module header says that `pr1` is exposed, but it is not defined anywhere in this module.
**platform.md:19:3:19:25:**
**platform.md:14:3:14:25:**
```roc
pr1: "not implemented",
```
@ -46,7 +41,7 @@ You can fix this by either defining `pr1` in this module, or by removing it from
**EXPOSED BUT NOT DEFINED**
The module header says that `pr2` is exposed, but it is not defined anywhere in this module.
**platform.md:20:3:20:25:**
**platform.md:15:3:15:25:**
```roc
pr2: "not implemented",
```
@ -56,7 +51,7 @@ You can fix this by either defining `pr2` in this module, or by removing it from
**EXPOSED BUT NOT DEFINED**
The module header says that `E1` is exposed, but it is not defined anywhere in this module.
**platform.md:10:3:10:5:**
**platform.md:6:3:6:5:**
```roc
E1,
```
@ -66,7 +61,7 @@ You can fix this by either defining `E1` in this module, or by removing it from
**EXPOSED BUT NOT DEFINED**
The module header says that `E2` is exposed, but it is not defined anywhere in this module.
**platform.md:11:3:11:5:**
**platform.md:7:3:7:5:**
```roc
E2
```
@ -77,11 +72,7 @@ You can fix this by either defining `E2` in this module, or by removing it from
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,
KwRequires,OpenCurly,
UpperIdent,Comma,
UpperIdent,
CloseCurly,OpenCurly,
LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,Comma,
LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,
OpenSquare,UpperIdent,OpColon,LowerIdent,Comma,UpperIdent,OpColon,LowerIdent,CloseSquare,KwFor,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,
CloseCurly,
KwExposes,OpenSquare,
UpperIdent,Comma,
@ -101,15 +92,12 @@ EndOfFile,
~~~clojure
(file
(platform (name "pf")
(rigids
(exposed-upper-ident (text "R1"))
(exposed-upper-ident (text "R2")))
(ty-record
(anno-record-field (name "r1")
(ty-fn
(ty (name "R1"))
(ty (name "R2"))))
(anno-record-field (name "r2")
(requires
(requires-entry
(type-aliases
(alias (name "R1") (rigid "r1"))
(alias (name "R2") (rigid "r2")))
(entrypoint "main")
(ty-fn
(ty (name "R1"))
(ty (name "R2")))))
@ -136,11 +124,7 @@ EndOfFile,
~~~roc
platform "pf"
requires {
R1,
R2,
} {
r1 : R1 -> R2,
r2 : R1 -> R2,
[R1 : r1, R2 : r2] for main : R1 -> R2
}
exposes [
E1,
@ -150,7 +134,6 @@ platform "pf"
pa1: "pa1",
pa2: "pa2",
}
# imports [I1.{ I11, I12, }, I2.{ I21, I22, },]
provides {
pr1: "not implemented",
pr2: "not implemented",

View file

@ -1,27 +1,26 @@
# META
~~~ini
description=Singleline formatting platform
description=Singleline formatting platform with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform "pf"
requires { R1, R2 } { r1 : R1 -> R2, r2 : R1 -> R2 }
requires { [R1 : r1, R2 : r2] for main : R1 -> R2 }
exposes [E1, E2]
packages { pa1: "pa1", pa2: "pa2" }
# imports [I1.{ I11, I12 }, I2.{ I21, I22 }]
provides { pr1: "not implemented", pr2: "not implemented" }
~~~
# EXPECTED
EXPOSED BUT NOT DEFINED - platform.md:6:13:6:35
EXPOSED BUT NOT DEFINED - platform.md:6:37:6:59
EXPOSED BUT NOT DEFINED - platform.md:5:13:5:35
EXPOSED BUT NOT DEFINED - platform.md:5:37:5:59
EXPOSED BUT NOT DEFINED - platform.md:3:11:3:13
EXPOSED BUT NOT DEFINED - platform.md:3:15:3:17
# PROBLEMS
**EXPOSED BUT NOT DEFINED**
The module header says that `pr1` is exposed, but it is not defined anywhere in this module.
**platform.md:6:13:6:35:**
**platform.md:5:13:5:35:**
```roc
provides { pr1: "not implemented", pr2: "not implemented" }
```
@ -31,7 +30,7 @@ You can fix this by either defining `pr1` in this module, or by removing it from
**EXPOSED BUT NOT DEFINED**
The module header says that `pr2` is exposed, but it is not defined anywhere in this module.
**platform.md:6:37:6:59:**
**platform.md:5:37:5:59:**
```roc
provides { pr1: "not implemented", pr2: "not implemented" }
```
@ -61,7 +60,7 @@ You can fix this by either defining `E2` in this module, or by removing it from
# TOKENS
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,
KwRequires,OpenCurly,UpperIdent,Comma,UpperIdent,CloseCurly,OpenCurly,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,Comma,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,CloseCurly,
KwRequires,OpenCurly,OpenSquare,UpperIdent,OpColon,LowerIdent,Comma,UpperIdent,OpColon,LowerIdent,CloseSquare,KwFor,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,CloseCurly,
KwExposes,OpenSquare,UpperIdent,Comma,UpperIdent,CloseSquare,
KwPackages,OpenCurly,LowerIdent,OpColon,StringStart,StringPart,StringEnd,Comma,LowerIdent,OpColon,StringStart,StringPart,StringEnd,CloseCurly,
KwProvides,OpenCurly,LowerIdent,OpColon,StringStart,StringPart,StringEnd,Comma,LowerIdent,OpColon,StringStart,StringPart,StringEnd,CloseCurly,
@ -71,15 +70,12 @@ EndOfFile,
~~~clojure
(file
(platform (name "pf")
(rigids
(exposed-upper-ident (text "R1"))
(exposed-upper-ident (text "R2")))
(ty-record
(anno-record-field (name "r1")
(ty-fn
(ty (name "R1"))
(ty (name "R2"))))
(anno-record-field (name "r2")
(requires
(requires-entry
(type-aliases
(alias (name "R1") (rigid "r1"))
(alias (name "R2") (rigid "r2")))
(entrypoint "main")
(ty-fn
(ty (name "R1"))
(ty (name "R2")))))
@ -104,7 +100,13 @@ EndOfFile,
~~~
# FORMATTED
~~~roc
NO CHANGE
platform "pf"
requires {
[R1 : r1, R2 : r2] for main : R1 -> R2
}
exposes [E1, E2]
packages { pa1: "pa1", pa2: "pa2" }
provides { pr1: "not implemented", pr2: "not implemented" }
~~~
# CANONICALIZE
~~~clojure

View file

@ -1,27 +1,26 @@
# META
~~~ini
description=Singleline with comma formatting platform
description=Singleline with comma formatting platform with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform "pf"
requires { R1, R2, } { r1 : R1 -> R2, r2 : R1 -> R2, }
requires { [R1 : r1, R2 : r2,] for main : R1 -> R2 }
exposes [E1, E2,]
packages { pa1: "pa1", pa2: "pa2", }
# imports [I1.{ I11, I12, }, I2.{ I21, I22, },]
provides { pr1: "not implemented", pr2: "not implemented", }
~~~
# EXPECTED
EXPOSED BUT NOT DEFINED - platform.md:6:13:6:35
EXPOSED BUT NOT DEFINED - platform.md:6:37:6:59
EXPOSED BUT NOT DEFINED - platform.md:5:13:5:35
EXPOSED BUT NOT DEFINED - platform.md:5:37:5:59
EXPOSED BUT NOT DEFINED - platform.md:3:11:3:13
EXPOSED BUT NOT DEFINED - platform.md:3:15:3:17
# PROBLEMS
**EXPOSED BUT NOT DEFINED**
The module header says that `pr1` is exposed, but it is not defined anywhere in this module.
**platform.md:6:13:6:35:**
**platform.md:5:13:5:35:**
```roc
provides { pr1: "not implemented", pr2: "not implemented", }
```
@ -31,7 +30,7 @@ You can fix this by either defining `pr1` in this module, or by removing it from
**EXPOSED BUT NOT DEFINED**
The module header says that `pr2` is exposed, but it is not defined anywhere in this module.
**platform.md:6:37:6:59:**
**platform.md:5:37:5:59:**
```roc
provides { pr1: "not implemented", pr2: "not implemented", }
```
@ -61,7 +60,7 @@ You can fix this by either defining `E2` in this module, or by removing it from
# TOKENS
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,
KwRequires,OpenCurly,UpperIdent,Comma,UpperIdent,Comma,CloseCurly,OpenCurly,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,Comma,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,Comma,CloseCurly,
KwRequires,OpenCurly,OpenSquare,UpperIdent,OpColon,LowerIdent,Comma,UpperIdent,OpColon,LowerIdent,Comma,CloseSquare,KwFor,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,CloseCurly,
KwExposes,OpenSquare,UpperIdent,Comma,UpperIdent,Comma,CloseSquare,
KwPackages,OpenCurly,LowerIdent,OpColon,StringStart,StringPart,StringEnd,Comma,LowerIdent,OpColon,StringStart,StringPart,StringEnd,Comma,CloseCurly,
KwProvides,OpenCurly,LowerIdent,OpColon,StringStart,StringPart,StringEnd,Comma,LowerIdent,OpColon,StringStart,StringPart,StringEnd,Comma,CloseCurly,
@ -71,15 +70,12 @@ EndOfFile,
~~~clojure
(file
(platform (name "pf")
(rigids
(exposed-upper-ident (text "R1"))
(exposed-upper-ident (text "R2")))
(ty-record
(anno-record-field (name "r1")
(ty-fn
(ty (name "R1"))
(ty (name "R2"))))
(anno-record-field (name "r2")
(requires
(requires-entry
(type-aliases
(alias (name "R1") (rigid "r1"))
(alias (name "R2") (rigid "r2")))
(entrypoint "main")
(ty-fn
(ty (name "R1"))
(ty (name "R2")))))
@ -106,11 +102,7 @@ EndOfFile,
~~~roc
platform "pf"
requires {
R1,
R2,
} {
r1 : R1 -> R2,
r2 : R1 -> R2,
[R1 : r1, R2 : r2] for main : R1 -> R2
}
exposes [
E1,
@ -120,7 +112,6 @@ platform "pf"
pa1: "pa1",
pa2: "pa2",
}
# imports [I1.{ I11, I12, }, I2.{ I21, I22, },]
provides {
pr1: "not implemented",
pr2: "not implemented",

View file

@ -24,18 +24,7 @@ ar,
]
~~~
# EXPECTED
PARSE ERROR - fuzz_crash_029.md:4:4:4:5
PARSE ERROR - fuzz_crash_029.md:5:14:5:17
PARSE ERROR - fuzz_crash_029.md:5:19:5:21
PARSE ERROR - fuzz_crash_029.md:5:22:5:23
PARSE ERROR - fuzz_crash_029.md:5:23:5:24
PARSE ERROR - fuzz_crash_029.md:5:24:5:25
PARSE ERROR - fuzz_crash_029.md:6:4:6:5
PARSE ERROR - fuzz_crash_029.md:7:2:7:9
PARSE ERROR - fuzz_crash_029.md:8:3:8:4
PARSE ERROR - fuzz_crash_029.md:9:3:9:4
PARSE ERROR - fuzz_crash_029.md:10:2:10:10
PARSE ERROR - fuzz_crash_029.md:11:3:11:8
EXPECTED OPENING BRACE - fuzz_crash_029.md:11:3:11:8
PARSE ERROR - fuzz_crash_029.md:12:3:12:4
UNEXPECTED TOKEN IN TYPE ANNOTATION - fuzz_crash_029.md:13:6:13:7
PARSE ERROR - fuzz_crash_029.md:13:7:13:10
@ -53,142 +42,9 @@ PARSE ERROR - fuzz_crash_029.md:16:3:16:4
PARSE ERROR - fuzz_crash_029.md:17:3:17:4
MALFORMED TYPE - fuzz_crash_029.md:13:6:13:7
# PROBLEMS
**PARSE ERROR**
A parsing error occurred: `expected_requires_rigids_close_curly`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:4:4:4:5:**
```roc
{ # d
```
^
**PARSE ERROR**
A parsing error occurred: `invalid_type_arg`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:5:14:5:17:**
```roc
n! : List(Str) => {}, # ure
```
^^^
**PARSE ERROR**
Type applications require parentheses around their type arguments.
I found a type followed by what looks like a type argument, but they need to be connected with parentheses.
Instead of:
**List U8**
Use:
**List(U8)**
Other valid examples:
`Dict(Str, Num)`
`Try(a, Str)`
`Maybe(List(U64))`
**fuzz_crash_029.md:5:19:5:21:**
```roc
n! : List(Str) => {}, # ure
```
^^
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:5:22:5:23:**
```roc
n! : List(Str) => {}, # ure
```
^
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:5:23:5:24:**
```roc
n! : List(Str) => {}, # ure
```
^
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:5:24:5:25:**
```roc
n! : List(Str) => {}, # ure
```
^
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:6:4:6:5:**
```roc
} #Ce
```
^
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:7:2:7:9:**
```roc
exposes #rd
```
^^^^^^^
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:8:3:8:4:**
```roc
[ #
```
^
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:9:3:9:4:**
```roc
] # Cse
```
^
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_029.md:10:2:10:10:**
```roc
packages # Cd
```
^^^^^^^^
**PARSE ERROR**
A parsing error occurred: `statement_unexpected_token`
This is an unexpected parsing error. Please check your syntax.
**EXPECTED OPENING BRACE**
Platform headers must have a `packages` section that lists package dependencies.
For example: packages { base: "../base/main.roc" }
**fuzz_crash_029.md:11:3:11:8:**
```roc
@ -408,18 +264,8 @@ EndOfFile,
# PARSE
~~~clojure
(file
(malformed-header (tag "expected_requires_rigids_close_curly"))
(malformed-header (tag "expected_packages_open_curly"))
(statements
(s-malformed (tag "expected_colon_after_type_annotation"))
(s-malformed (tag "statement_unexpected_token"))
(s-malformed (tag "statement_unexpected_token"))
(s-malformed (tag "statement_unexpected_token"))
(s-malformed (tag "statement_unexpected_token"))
(s-malformed (tag "statement_unexpected_token"))
(s-malformed (tag "statement_unexpected_token"))
(s-malformed (tag "statement_unexpected_token"))
(s-malformed (tag "statement_unexpected_token"))
(s-malformed (tag "statement_unexpected_token"))
(s-malformed (tag "statement_unexpected_token"))
(s-type-anno (name "pkg")
(ty-malformed (tag "ty_anno_unexpected_token")))
@ -439,12 +285,6 @@ EndOfFile,
~~~
# FORMATTED
~~~roc
# ure
# Ce
# rd
#
# Cse
# Cd
# Cd
# pen
pkg :

View file

@ -11,7 +11,7 @@ requires{}{n:0[import S exposing[
# EXPECTED
UNCLOSED STRING - fuzz_crash_061.md:1:9:1:10
UNEXPECTED TOKEN IN TYPE ANNOTATION - fuzz_crash_061.md:2:14:2:15
PARSE ERROR - fuzz_crash_061.md:2:11:2:12
PARSE ERROR - fuzz_crash_061.md:2:9:2:10
PARSE ERROR - fuzz_crash_061.md:2:16:2:22
# PROBLEMS
**UNCLOSED STRING**
@ -39,11 +39,11 @@ requires{}{n:0[import S exposing[
A parsing error occurred: `expected_requires_signatures_close_curly`
This is an unexpected parsing error. Please check your syntax.
**fuzz_crash_061.md:2:11:2:12:**
**fuzz_crash_061.md:2:9:2:10:**
```roc
requires{}{n:0[import S exposing[
```
^
^
**PARSE ERROR**

View file

@ -1,15 +1,15 @@
# META
~~~ini
description=platform_header_empty (1)
description=platform_header_empty (1) with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform "foo"
requires {} {}
exposes []
packages {}
provides {}
requires {}
exposes []
packages {}
provides {}
~~~
# EXPECTED
NIL
@ -18,7 +18,7 @@ NIL
# TOKENS
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,
KwRequires,OpenCurly,CloseCurly,OpenCurly,CloseCurly,
KwRequires,OpenCurly,CloseCurly,
KwExposes,OpenSquare,CloseSquare,
KwPackages,OpenCurly,CloseCurly,
KwProvides,OpenCurly,CloseCurly,
@ -28,8 +28,7 @@ EndOfFile,
~~~clojure
(file
(platform (name "foo")
(rigids)
(ty-record)
(requires)
(exposes)
(packages)
(provides))
@ -37,7 +36,11 @@ EndOfFile,
~~~
# FORMATTED
~~~roc
NO CHANGE
platform "foo"
requires {}
exposes []
packages {}
provides {}
~~~
# CANONICALIZE
~~~clojure

View file

@ -1,11 +1,11 @@
# META
~~~ini
description=platform_header_empty (4)
description=platform_header_empty (4) with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform "foo" requires {} {} exposes [] packages {} provides {}
platform "foo" requires {} exposes [] packages {} provides {}
~~~
# EXPECTED
NIL
@ -13,15 +13,14 @@ NIL
NIL
# TOKENS
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,KwRequires,OpenCurly,CloseCurly,OpenCurly,CloseCurly,KwExposes,OpenSquare,CloseSquare,KwPackages,OpenCurly,CloseCurly,KwProvides,OpenCurly,CloseCurly,
KwPlatform,StringStart,StringPart,StringEnd,KwRequires,OpenCurly,CloseCurly,KwExposes,OpenSquare,CloseSquare,KwPackages,OpenCurly,CloseCurly,KwProvides,OpenCurly,CloseCurly,
EndOfFile,
~~~
# PARSE
~~~clojure
(file
(platform (name "foo")
(rigids)
(ty-record)
(requires)
(exposes)
(packages)
(provides))
@ -30,7 +29,7 @@ EndOfFile,
# FORMATTED
~~~roc
platform "foo"
requires {} {}
requires {}
exposes []
packages {}
provides {}

View file

@ -1,15 +1,15 @@
# META
~~~ini
description=platform_header_empty (5)
description=platform_header_empty (5) with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform "foo"
requires {} {}
exposes []
packages {}
provides {}
requires {}
exposes []
packages {}
provides {}
~~~
# EXPECTED
NIL
@ -18,7 +18,7 @@ NIL
# TOKENS
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,
KwRequires,OpenCurly,CloseCurly,OpenCurly,CloseCurly,
KwRequires,OpenCurly,CloseCurly,
KwExposes,OpenSquare,CloseSquare,
KwPackages,OpenCurly,CloseCurly,
KwProvides,OpenCurly,CloseCurly,
@ -28,8 +28,7 @@ EndOfFile,
~~~clojure
(file
(platform (name "foo")
(rigids)
(ty-record)
(requires)
(exposes)
(packages)
(provides))
@ -37,7 +36,11 @@ EndOfFile,
~~~
# FORMATTED
~~~roc
NO CHANGE
platform "foo"
requires {}
exposes []
packages {}
provides {}
~~~
# CANONICALIZE
~~~clojure

View file

@ -1,15 +1,17 @@
# META
~~~ini
description=Simple plaform module
description=Simple platform module with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform ""
requires {} { main : Str -> Str }
exposes []
packages {}
provides { entrypoint: "roc__entrypoint" }
requires {
main : Str -> Str
}
exposes []
packages {}
provides { entrypoint: "roc__entrypoint" }
entrypoint : Str -> Str
entrypoint = main
@ -21,7 +23,9 @@ NIL
# TOKENS
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,
KwRequires,OpenCurly,CloseCurly,OpenCurly,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,CloseCurly,
KwRequires,OpenCurly,
LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,
CloseCurly,
KwExposes,OpenSquare,CloseSquare,
KwPackages,OpenCurly,CloseCurly,
KwProvides,OpenCurly,LowerIdent,OpColon,StringStart,StringPart,StringEnd,CloseCurly,
@ -33,9 +37,10 @@ EndOfFile,
~~~clojure
(file
(platform (name "")
(rigids)
(ty-record
(anno-record-field (name "main")
(requires
(requires-entry
(type-aliases)
(entrypoint "main")
(ty-fn
(ty (name "Str"))
(ty (name "Str")))))
@ -56,7 +61,16 @@ EndOfFile,
~~~
# FORMATTED
~~~roc
NO CHANGE
platform ""
requires {
main : Str -> Str
}
exposes []
packages {}
provides { entrypoint: "roc__entrypoint" }
entrypoint : Str -> Str
entrypoint = main
~~~
# CANONICALIZE
~~~clojure

View file

@ -1,12 +1,14 @@
# META
~~~ini
description=the int test platform
description=the int test platform with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform ""
requires {} { multiplyInts : I64, I64 -> I64 }
requires {
multiplyInts : I64, I64 -> I64
}
exposes []
packages {}
provides { multiplyInts: "multiplyInts" }
@ -14,12 +16,12 @@ platform ""
multiplyInts : I64, I64 -> I64
~~~
# EXPECTED
EXPOSED BUT NOT DEFINED - platform_int.md:5:16:5:44
EXPOSED BUT NOT DEFINED - platform_int.md:7:16:7:44
# PROBLEMS
**EXPOSED BUT NOT DEFINED**
The module header says that `multiplyInts` is exposed, but it is not defined anywhere in this module.
**platform_int.md:5:16:5:44:**
**platform_int.md:7:16:7:44:**
```roc
provides { multiplyInts: "multiplyInts" }
```
@ -29,7 +31,9 @@ You can fix this by either defining `multiplyInts` in this module, or by removin
# TOKENS
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,
KwRequires,OpenCurly,CloseCurly,OpenCurly,LowerIdent,OpColon,UpperIdent,Comma,UpperIdent,OpArrow,UpperIdent,CloseCurly,
KwRequires,OpenCurly,
LowerIdent,OpColon,UpperIdent,Comma,UpperIdent,OpArrow,UpperIdent,
CloseCurly,
KwExposes,OpenSquare,CloseSquare,
KwPackages,OpenCurly,CloseCurly,
KwProvides,OpenCurly,LowerIdent,OpColon,StringStart,StringPart,StringEnd,CloseCurly,
@ -40,9 +44,10 @@ EndOfFile,
~~~clojure
(file
(platform (name "")
(rigids)
(ty-record
(anno-record-field (name "multiplyInts")
(requires
(requires-entry
(type-aliases)
(entrypoint "multiplyInts")
(ty-fn
(ty (name "I64"))
(ty (name "I64"))
@ -63,7 +68,9 @@ EndOfFile,
# FORMATTED
~~~roc
platform ""
requires {} { multiplyInts : I64, I64 -> I64 }
requires {
multiplyInts : I64, I64 -> I64
}
exposes []
packages {}
provides { multiplyInts: "multiplyInts" }

View file

@ -1,12 +1,14 @@
# META
~~~ini
description=the str test platform
description=the str test platform with for-clause syntax
type=file
~~~
# SOURCE
~~~roc
platform ""
requires {} { processString : Str -> Str }
requires {
processString : Str -> Str
}
exposes []
packages {}
provides { processString: "processString" }
@ -14,12 +16,12 @@ platform ""
processString : Str -> Str
~~~
# EXPECTED
EXPOSED BUT NOT DEFINED - platform_str.md:5:16:5:46
EXPOSED BUT NOT DEFINED - platform_str.md:7:16:7:46
# PROBLEMS
**EXPOSED BUT NOT DEFINED**
The module header says that `processString` is exposed, but it is not defined anywhere in this module.
**platform_str.md:5:16:5:46:**
**platform_str.md:7:16:7:46:**
```roc
provides { processString: "processString" }
```
@ -29,7 +31,9 @@ You can fix this by either defining `processString` in this module, or by removi
# TOKENS
~~~zig
KwPlatform,StringStart,StringPart,StringEnd,
KwRequires,OpenCurly,CloseCurly,OpenCurly,LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,CloseCurly,
KwRequires,OpenCurly,
LowerIdent,OpColon,UpperIdent,OpArrow,UpperIdent,
CloseCurly,
KwExposes,OpenSquare,CloseSquare,
KwPackages,OpenCurly,CloseCurly,
KwProvides,OpenCurly,LowerIdent,OpColon,StringStart,StringPart,StringEnd,CloseCurly,
@ -40,9 +44,10 @@ EndOfFile,
~~~clojure
(file
(platform (name "")
(rigids)
(ty-record
(anno-record-field (name "processString")
(requires
(requires-entry
(type-aliases)
(entrypoint "processString")
(ty-fn
(ty (name "Str"))
(ty (name "Str")))))
@ -61,7 +66,9 @@ EndOfFile,
# FORMATTED
~~~roc
platform ""
requires {} { processString : Str -> Str }
requires {
processString : Str -> Str
}
exposes []
packages {}
provides { processString: "processString" }