mirror of
https://github.com/roc-lang/roc.git
synced 2025-12-23 08:48:03 +00:00
Fix rank panic when variable redirects to higher-rank variable
When a variable added to var_pool at rank 1 was later redirected (via setVarRedirect) to a variable at rank 2, generalization would try to add the resolved variable at rank 2 to tmp_var_pool which only goes up to rank 1, causing a panic. The fix caps the effective rank at rank_to_generalize when copying variables to tmp_var_pool. This allows the rank adjustment phase to properly handle these variables. Fixes #8656 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
6fabcdeb0e
commit
930fd0865e
3 changed files with 92 additions and 1 deletions
|
|
@ -238,6 +238,11 @@ pub const io_spec_tests = [_]TestSpec{
|
|||
.io_spec = "0<short|1>short|0<|1>",
|
||||
.description = "Regression test: Stdin.line! in while loop with short input (small string optimization)",
|
||||
},
|
||||
.{
|
||||
.roc_file = "test/fx/issue8656.roc",
|
||||
.io_spec = "1>ok",
|
||||
.description = "Regression test: rank panic when variable redirects to higher-rank variable during generalization",
|
||||
},
|
||||
};
|
||||
|
||||
/// Get the total number of IO spec tests
|
||||
|
|
|
|||
|
|
@ -171,7 +171,13 @@ pub const Generalizer = struct {
|
|||
// Copy all variables at this rank into the temporary pool, resolving redirects
|
||||
for (vars_to_generalize) |var_| {
|
||||
const resolved = self.store.resolveVar(var_);
|
||||
try self.tmp_var_pool.addVarToRank(resolved.var_, resolved.desc.rank);
|
||||
// Cap the rank at rank_to_generalize. If the resolved variable has a higher
|
||||
// rank than what we're generalizing, this can happen when a variable is
|
||||
// redirected (via setVarRedirect) to a higher-rank variable after being
|
||||
// added to the var_pool. We handle this by treating it as if it's at the
|
||||
// current rank - it will be properly handled during rank adjustment.
|
||||
const effective_rank = resolved.desc.rank.min(rank_to_generalize);
|
||||
try self.tmp_var_pool.addVarToRank(resolved.var_, effective_rank);
|
||||
try self.vars_to_generalized.put(resolved.var_, {});
|
||||
}
|
||||
|
||||
|
|
|
|||
80
test/fx/issue8656.roc
Normal file
80
test/fx/issue8656.roc
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
app [main!] { pf: platform "./platform/main.roc" }
|
||||
|
||||
import pf.Stdout
|
||||
|
||||
# Regression test for issue #8656
|
||||
# Tests that generalization handles variables that redirect to higher-rank
|
||||
# variables without panicking.
|
||||
#
|
||||
# The original bug was: when a variable added to the var_pool at rank 1
|
||||
# was later redirected (via setVarRedirect) to a variable at rank 2,
|
||||
# generalization would try to add the resolved variable at rank 2 to the
|
||||
# tmp_var_pool which only goes up to rank 1, causing a panic.
|
||||
|
||||
main! = || {
|
||||
Stdout.line!("ok")
|
||||
}
|
||||
|
||||
# The following code structure triggered the bug in the original report.
|
||||
# It involves complex type definitions with nested tuples and recursive functions.
|
||||
|
||||
Maybe(t) : [
|
||||
Some(t),
|
||||
None,
|
||||
]
|
||||
|
||||
TokenContents : [
|
||||
NewlineToken,
|
||||
SymbolsToken(Str),
|
||||
SnakeCaseIdentToken(Str),
|
||||
EndOfFileToken,
|
||||
]
|
||||
|
||||
TokenizerResult : (
|
||||
Try(TokenContents, Str),
|
||||
U64,
|
||||
U64,
|
||||
)
|
||||
|
||||
get_next_token : List(U8), U64 -> TokenizerResult
|
||||
get_next_token = |file, index| {
|
||||
match List.get(file, index) {
|
||||
Ok('\n') => (Ok(NewlineToken), index, index + 1)
|
||||
Err(_) => (Ok(EndOfFileToken), index, index)
|
||||
}
|
||||
}
|
||||
|
||||
# This function has a nested lambda (ret) that creates rank-2 variables
|
||||
tokenize_identifier = |file, index, acc, start_index| {
|
||||
char = List.get(file, index)
|
||||
ret = || {
|
||||
match Str.from_utf8(acc) {
|
||||
Ok(str) => (Ok(SnakeCaseIdentToken(str)), start_index, index)
|
||||
Err(_) => (Err("Invalid UTF8"), start_index, index)
|
||||
}
|
||||
}
|
||||
match char {
|
||||
Ok(c) => {
|
||||
if ('a' <= c and c <= 'z') or ('A' <= c and c <= 'Z') or (c == '_') {
|
||||
tokenize_identifier(file, index + 1, List.append(acc, c), start_index)
|
||||
} else {
|
||||
ret()
|
||||
}
|
||||
}
|
||||
_ => ret()
|
||||
}
|
||||
}
|
||||
|
||||
# This function with pattern matching on tuples exercises the type checker
|
||||
parse_pattern = |file, tokenizer_result| {
|
||||
(token, _, index) = tokenizer_result
|
||||
match token {
|
||||
Ok(SnakeCaseIdentToken(ident)) => {
|
||||
match get_next_token(file, index) {
|
||||
(Ok(SymbolsToken(":")), _, index2) => Ok((ident, Some("type"), index2))
|
||||
_ => Ok((ident, None, index))
|
||||
}
|
||||
}
|
||||
_ => Err("expected pattern")
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue