Fix some more violations

This commit is contained in:
Richard Feldman 2025-12-04 02:02:38 -05:00
parent 799dada6a0
commit 36d68c9774
No known key found for this signature in database
10 changed files with 21 additions and 46 deletions

View file

@ -1,7 +1,6 @@
//! Base58 encoding and decoding for BLAKE3 hashes
//!
// zig-lint: required-param
//!
//! This module provides base58 encoding/decoding specifically optimized for 256-bit BLAKE3 hashes.
//! The base58 alphabet excludes visually similar characters (0, O, I, l) to prevent confusion.
//!

View file

@ -3,8 +3,6 @@
//! This module provides the core implementation of Roc's Str type, including
//! operations for string manipulation, Unicode handling, formatting, and
//! memory management. It defines the RocStr structure and associated functions
//!
// zig-lint: required-param
//! that are called from compiled Roc code to handle string operations efficiently.
//!
//! ## Ownership Semantics
@ -637,16 +635,6 @@ pub fn strSplitOn(
return list;
}
fn initFromSmallStr(
slice_bytes: [*]u8,
len: usize,
_: usize,
// TODO we probable don't need this here
roc_ops: *RocOps,
) RocStr {
return RocStr.init(slice_bytes, len, roc_ops);
}
/// TODO
pub fn strSplitOnHelp(
array: [*]RocStr,

View file

@ -121,7 +121,7 @@ fn benchParseOrTokenize(comptime is_parse: bool, gpa: Allocator, path: []const u
var tokenizer = try tokenize.Tokenizer.init(&env.?.common, gpa, roc_file.content, msg_slice);
try tokenizer.tokenize(gpa);
var result = tokenizer.finishAndDeinit(gpa);
var result = tokenizer.finishAndDeinit();
iteration_tokens += result.tokens.tokens.len;
result.tokens.deinit(gpa);
}

View file

@ -1,6 +1,5 @@
//! Modern cache manager that uses BLAKE3-based keys and subdirectory splitting.
//!
// zig-lint: required-param
const std = @import("std");
const base = @import("base");
@ -67,11 +66,6 @@ pub const CacheManager = struct {
};
}
/// Deinitialize the cache manager.
pub fn deinit(_: *Self) void {
// Nothing to deinit currently
}
/// Load a cached module based on its content and compiler version.
/// Look up a cache entry by content and compiler version.
///

View file

@ -411,7 +411,6 @@ pub const BuildEnv = struct {
// Deinit cache manager if present
if (self.cache_manager) |cm| {
cm.deinit();
self.gpa.destroy(cm);
}

View file

@ -1,7 +1,5 @@
//! Represents a "value" on the Interpreter's stack.
//!
// zig-lint: required-param
//!
//! This is the public facing interface for interacting with stack values.
//!
//! It provides methods for working with the value safely using the layout.
@ -46,7 +44,7 @@ is_initialized: bool = false,
rt_var: ?types.Var = null,
/// Copy this stack value to a destination pointer with bounds checking
pub fn copyToPtr(self: StackValue, layout_cache: *LayoutStore, dest_ptr: *anyopaque, _: *RocOps) !void {
pub fn copyToPtr(self: StackValue, layout_cache: *LayoutStore, dest_ptr: *anyopaque) !void {
std.debug.assert(self.is_initialized); // Source must be initialized before copying
// For closures, use getTotalSize to include capture data; for others use layoutSize
@ -756,7 +754,7 @@ pub const TupleAccessor = struct {
/// Set an element by copying from a source StackValue
pub fn setElement(self: TupleAccessor, index: usize, source: StackValue, ops: *RocOps) !void {
const dest_element = try self.getElement(index);
try source.copyToPtr(self.layout_cache, dest_element.ptr.?, ops);
try source.copyToPtr(self.layout_cache, dest_element.ptr.?);
}
/// Find the sorted element index corresponding to an original tuple position
@ -1034,7 +1032,7 @@ pub const RecordAccessor = struct {
/// Set a field by copying from a source StackValue
pub fn setFieldByIndex(self: RecordAccessor, index: usize, source: StackValue, ops: *RocOps) !void {
const dest_field = try self.getFieldByIndex(index);
try source.copyToPtr(self.layout_cache, dest_field.ptr.?, ops);
try source.copyToPtr(self.layout_cache, dest_field.ptr.?);
}
/// Get the number of fields in this record

View file

@ -614,7 +614,7 @@ pub const Interpreter = struct {
self.early_return_value = null;
defer return_val.decref(&self.runtime_layout_store, roc_ops);
if (try self.shouldCopyResult(return_val, ret_ptr, roc_ops)) {
try return_val.copyToPtr(&self.runtime_layout_store, ret_ptr, roc_ops);
try return_val.copyToPtr(&self.runtime_layout_store, ret_ptr);
}
return;
}
@ -624,7 +624,7 @@ pub const Interpreter = struct {
// Only copy result if the result type is compatible with ret_ptr
if (try self.shouldCopyResult(result_value, ret_ptr, roc_ops)) {
try result_value.copyToPtr(&self.runtime_layout_store, ret_ptr, roc_ops);
try result_value.copyToPtr(&self.runtime_layout_store, ret_ptr);
}
return;
}
@ -634,7 +634,7 @@ pub const Interpreter = struct {
// Only copy result if the result type is compatible with ret_ptr
if (try self.shouldCopyResult(result, ret_ptr, roc_ops)) {
try result.copyToPtr(&self.runtime_layout_store, ret_ptr, roc_ops);
try result.copyToPtr(&self.runtime_layout_store, ret_ptr);
}
}
@ -781,7 +781,7 @@ pub const Interpreter = struct {
// Preserve rt_var for constant folding
const dest = StackValue{ .layout = src.layout, .ptr = ptr, .is_initialized = true, .rt_var = src.rt_var };
if (size > 0 and src.ptr != null and ptr != null) {
try src.copyToPtr(&self.runtime_layout_store, ptr.?, roc_ops);
try src.copyToPtr(&self.runtime_layout_store, ptr.?);
}
return dest;
}
@ -6210,7 +6210,7 @@ pub const Interpreter = struct {
const data_ptr = utils.allocateWithRefcount(elem_size, elem_alignment_u32, false, roc_ops);
if (elem_size > 0 and payload.ptr != null) {
try payload.copyToPtr(&self.runtime_layout_store, data_ptr, roc_ops);
try payload.copyToPtr(&self.runtime_layout_store, data_ptr);
}
if (out.ptr) |ptr| {
@ -11514,7 +11514,7 @@ pub const Interpreter = struct {
if (runtime_list.bytes) |buffer| {
for (values, 0..) |val, idx| {
const dest_ptr = buffer + idx * elem_size;
try val.copyToPtr(&self.runtime_layout_store, dest_ptr, roc_ops);
try val.copyToPtr(&self.runtime_layout_store, dest_ptr);
}
}
}
@ -11784,7 +11784,7 @@ pub const Interpreter = struct {
const payload_field = try acc.getFieldByIndex(payload_field_idx);
if (payload_field.ptr) |payload_ptr| {
if (total_count == 1) {
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr);
} else {
// Multiple args - create tuple payload
var elem_layouts = try self.allocator.alloc(Layout, total_count);
@ -11848,7 +11848,7 @@ pub const Interpreter = struct {
// Write payload
const proper_payload_field = try proper_acc.getElement(0);
if (proper_payload_field.ptr) |proper_ptr| {
try values[0].copyToPtr(&self.runtime_layout_store, proper_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, proper_ptr);
}
for (values) |val| {
@ -11859,7 +11859,7 @@ pub const Interpreter = struct {
return true;
}
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr);
} else {
// Multiple args - create tuple payload
var elem_layouts = try self.allocator.alloc(Layout, total_count);
@ -11926,7 +11926,7 @@ pub const Interpreter = struct {
// Write payload (element 0)
const proper_payload_field = try proper_acc.getElement(0);
if (proper_payload_field.ptr) |proper_ptr| {
try values[0].copyToPtr(&self.runtime_layout_store, proper_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, proper_ptr);
}
for (values) |val| {
@ -11954,7 +11954,7 @@ pub const Interpreter = struct {
// Write payload at offset 0
const payload_ptr: *anyopaque = @ptrCast(base_ptr);
if (total_count == 1) {
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr, roc_ops);
try values[0].copyToPtr(&self.runtime_layout_store, payload_ptr);
} else {
// Multiple args - create tuple payload at offset 0
var elem_layouts = try self.allocator.alloc(Layout, total_count);

View file

@ -9,7 +9,6 @@
//! child process would be complex.
//!
// zig-lint: required-param
//!
//! ## Cross-platform coordination
//!
//! The allocator uses platform-specific coordination mechanisms:

View file

@ -40,7 +40,7 @@ fn runParse(env: *CommonEnv, gpa: std.mem.Allocator, parserCall: *const fn (*Par
const msg_slice = messages[0..];
var tokenizer = try tokenize.Tokenizer.init(env, gpa, env.source, msg_slice);
try tokenizer.tokenize(gpa);
var result = tokenizer.finishAndDeinit(gpa);
var result = tokenizer.finishAndDeinit();
var parser = try Parser.init(result.tokens, gpa);
defer parser.deinit();

View file

@ -1,7 +1,5 @@
//! Tokenization functionality for the Roc parser.
//!
// zig-lint: required-param
//!
//! This module provides the tokenizer that converts Roc source code into
//! a stream of tokens for parsing. It handles all Roc language tokens including
//! keywords, identifiers, literals, operators, and punctuation, representing
@ -1111,7 +1109,7 @@ pub const Tokenizer = struct {
self.string_interpolation_stack.deinit();
}
pub fn finishAndDeinit(self: *Tokenizer, _: std.mem.Allocator) TokenOutput {
pub fn finishAndDeinit(self: *Tokenizer) TokenOutput {
self.string_interpolation_stack.deinit();
const actual_message_count = @min(self.cursor.message_count, self.cursor.messages.len);
return .{
@ -1254,7 +1252,7 @@ pub const Tokenizer = struct {
} else {
self.cursor.pos += 1;
// Look at what follows the minus to determine if it's unary
const tokenType: Token.Tag = if (self.canFollowUnaryMinus(n)) .OpUnaryMinus else .OpBinaryMinus;
const tokenType: Token.Tag = if (canFollowUnaryMinus(n)) .OpUnaryMinus else .OpBinaryMinus;
try self.pushTokenNormalHere(gpa, tokenType, start);
}
} else {
@ -1571,7 +1569,7 @@ pub const Tokenizer = struct {
}
/// Determines if a character can follow a unary minus (i.e., can start an expression)
fn canFollowUnaryMinus(_: *const Tokenizer, c: u8) bool {
fn canFollowUnaryMinus(c: u8) bool {
return switch (c) {
// Identifiers
'a'...'z', 'A'...'Z', '_' => true,
@ -1685,7 +1683,7 @@ pub fn checkTokenizerInvariants(gpa: std.mem.Allocator, input: []const u8, debug
var messages: [32]Diagnostic = undefined;
var tokenizer = try Tokenizer.init(&env, gpa, input, &messages);
try tokenizer.tokenize(gpa);
var output = tokenizer.finishAndDeinit(gpa);
var output = tokenizer.finishAndDeinit();
defer output.tokens.deinit(gpa);
if (debug) {
@ -1720,7 +1718,7 @@ pub fn checkTokenizerInvariants(gpa: std.mem.Allocator, input: []const u8, debug
// Second tokenization.
tokenizer = try Tokenizer.init(&env, gpa, buf2.items, &messages);
try tokenizer.tokenize(gpa);
var output2 = tokenizer.finishAndDeinit(gpa);
var output2 = tokenizer.finishAndDeinit();
defer output2.tokens.deinit(gpa);
if (debug) {