Reject more syntactically invalid Python programs (#8524)

## Summary

This commit adds some additional error checking to the parser such that
assignments that are invalid syntax are rejected. This covers the
obvious cases like `5 = 3` and some not so obvious cases like `x + y =
42`.

This does add an additional recursive call to the parser for the cases
handling assignments. I had initially been concerned about doing this,
but `set_context` is already doing recursion during assignments, so I
didn't feel as though this was changing any fundamental performance
characteristics of the parser. (Also, in practice, I would expect any
such recursion here to be quite shallow since the recursion is done on
the target of an assignment. Such things are rarely nested much in
practice.)

Fixes #6895

## Test Plan

I've added unit tests covering every case that is detected as invalid on
an `Expr`.
This commit is contained in:
Andrew Gallant 2023-11-07 07:16:06 -05:00 committed by GitHub
parent c3d6d5d006
commit 6a1fa4778f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 1432 additions and 148 deletions

View file

@ -0,0 +1,733 @@
/*!
Defines some helper routines for rejecting invalid Python programs.
These routines are named in a way that supports qualified use. For example,
`invalid::assignment_targets`.
*/
use {ruff_python_ast::Expr, ruff_text_size::TextSize};
use crate::lexer::{LexicalError, LexicalErrorType};
/// Returns an error for invalid assignment targets.
///
/// # Errors
///
/// This returns an error when any of the given expressions are themselves
/// or contain an expression that is invalid on the left hand side of an
/// assignment. For example, all literal expressions are invalid assignment
/// targets.
pub(crate) fn assignment_targets(targets: &[Expr]) -> Result<(), LexicalError> {
for t in targets {
assignment_target(t)?;
}
Ok(())
}
/// Returns an error if the given target is invalid for the left hand side of
/// an assignment.
///
/// # Errors
///
/// This returns an error when the given expression is itself or contains an
/// expression that is invalid on the left hand side of an assignment. For
/// example, all literal expressions are invalid assignment targets.
pub(crate) fn assignment_target(target: &Expr) -> Result<(), LexicalError> {
// Allowing a glob import here because of its limited scope.
#[allow(clippy::enum_glob_use)]
use self::Expr::*;
let err = |location: TextSize| -> LexicalError {
let error = LexicalErrorType::AssignmentError;
LexicalError { error, location }
};
match *target {
BoolOp(ref e) => Err(err(e.range.start())),
NamedExpr(ref e) => Err(err(e.range.start())),
BinOp(ref e) => Err(err(e.range.start())),
UnaryOp(ref e) => Err(err(e.range.start())),
Lambda(ref e) => Err(err(e.range.start())),
IfExp(ref e) => Err(err(e.range.start())),
Dict(ref e) => Err(err(e.range.start())),
Set(ref e) => Err(err(e.range.start())),
ListComp(ref e) => Err(err(e.range.start())),
SetComp(ref e) => Err(err(e.range.start())),
DictComp(ref e) => Err(err(e.range.start())),
GeneratorExp(ref e) => Err(err(e.range.start())),
Await(ref e) => Err(err(e.range.start())),
Yield(ref e) => Err(err(e.range.start())),
YieldFrom(ref e) => Err(err(e.range.start())),
Compare(ref e) => Err(err(e.range.start())),
Call(ref e) => Err(err(e.range.start())),
FormattedValue(ref e) => Err(err(e.range.start())),
// FString is recursive, but all its forms are invalid as an
// assignment target, so we can reject it without exploring it.
FString(ref e) => Err(err(e.range.start())),
StringLiteral(ref e) => Err(err(e.range.start())),
BytesLiteral(ref e) => Err(err(e.range.start())),
NumberLiteral(ref e) => Err(err(e.range.start())),
BooleanLiteral(ref e) => Err(err(e.range.start())),
NoneLiteral(ref e) => Err(err(e.range.start())),
EllipsisLiteral(ref e) => Err(err(e.range.start())),
// This isn't in the Python grammar but is Jupyter notebook specific.
// It seems like this should be an error. It does also seem like the
// parser prevents this from ever appearing as an assignment target
// anyway. ---AG
IpyEscapeCommand(ref e) => Err(err(e.range.start())),
// The only nested expressions allowed as an assignment target
// are star exprs, lists and tuples.
Starred(ref e) => assignment_target(&e.value),
List(ref e) => assignment_targets(&e.elts),
Tuple(ref e) => assignment_targets(&e.elts),
// Subscript is recursive and can be invalid, but aren't syntax errors.
// For example, `5[1] = 42` is a type error.
Subscript(_) => Ok(()),
// Similar to Subscript, e.g., `5[1:2] = [42]` is a type error.
Slice(_) => Ok(()),
// Similar to Subscript, e.g., `"foo".y = 42` is an attribute error.
Attribute(_) => Ok(()),
// These are always valid as assignment targets.
Name(_) => Ok(()),
}
}
#[cfg(test)]
mod tests {
use crate::parse_suite;
// First we test, broadly, that various kinds of assignments are now
// rejected by the parser. e.g., `5 = 3`, `5 += 3`, `(5): int = 3`.
// Regression test: https://github.com/astral-sh/ruff/issues/6895
#[test]
fn err_literal_assignment() {
let ast = parse_suite(r"5 = 3", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
// This test previously passed before the assignment operator checking
// above, but we include it here for good measure.
#[test]
fn err_assignment_expr() {
let ast = parse_suite(r"(5 := 3)", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: UnrecognizedToken(
ColonEqual,
None,
),
offset: 3,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_literal_augment_assignment() {
let ast = parse_suite(r"5 += 3", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_literal_annotation_assignment() {
let ast = parse_suite(r"(5): int = 3", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 1,
source_path: "<test>",
},
)
"#);
}
// Now we exhaustively test all possible cases where assignment can fail.
#[test]
fn err_bool_op() {
let ast = parse_suite(r"x or y = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_named_expr() {
let ast = parse_suite(r"(x := 5) = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 1,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_bin_op() {
let ast = parse_suite(r"x + y = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_unary_op() {
let ast = parse_suite(r"-x = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_lambda() {
let ast = parse_suite(r"(lambda _: 1) = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 1,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_if_exp() {
let ast = parse_suite(r"a if b else c = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_dict() {
let ast = parse_suite(r"{'a':5} = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_set() {
let ast = parse_suite(r"{a} = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_list_comp() {
let ast = parse_suite(r"[x for x in xs] = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_set_comp() {
let ast = parse_suite(r"{x for x in xs} = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_dict_comp() {
let ast = parse_suite(r"{x: x*2 for x in xs} = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_generator_exp() {
let ast = parse_suite(r"(x for x in xs) = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_await() {
let ast = parse_suite(r"await x = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_yield() {
let ast = parse_suite(r"(yield x) = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 1,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_yield_from() {
let ast = parse_suite(r"(yield from xs) = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 1,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_compare() {
let ast = parse_suite(r"a < b < c = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_call() {
let ast = parse_suite(r"foo() = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_formatted_value() {
// N.B. It looks like the parser can't generate a top-level
// FormattedValue, where as the official Python AST permits
// representing a single f-string containing just a variable as a
// FormattedValue directly.
//
// Bottom line is that because of this, this test is (at present)
// duplicative with the `fstring` test. That is, in theory these tests
// could fail independently, but in practice their failure or success
// is coupled.
//
// See: https://docs.python.org/3/library/ast.html#ast.FormattedValue
let ast = parse_suite(r#"f"{quux}" = 42"#, "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_fstring() {
let ast = parse_suite(r#"f"{foo} and {bar}" = 42"#, "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_string_literal() {
let ast = parse_suite(r#""foo" = 42"#, "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_bytes_literal() {
let ast = parse_suite(r#"b"foo" = 42"#, "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_number_literal() {
let ast = parse_suite(r"123 = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_boolean_literal() {
let ast = parse_suite(r"True = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_none_literal() {
let ast = parse_suite(r"None = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_ellipsis_literal() {
let ast = parse_suite(r"... = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 0,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_starred() {
let ast = parse_suite(r"*foo() = 42", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 1,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_list() {
let ast = parse_suite(r"[x, foo(), y] = [42, 42, 42]", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 4,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_list_nested() {
let ast = parse_suite(r"[[a, b], [[42]], d] = [[1, 2], [[3]], 4]", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 11,
source_path: "<test>",
},
)
"#);
}
#[test]
fn err_tuple() {
let ast = parse_suite(r"(x, foo(), y) = (42, 42, 42)", "<test>");
insta::assert_debug_snapshot!(ast, @r#"
Err(
ParseError {
error: Lexical(
AssignmentError,
),
offset: 4,
source_path: "<test>",
},
)
"#);
}
// This last group of tests checks that assignments we expect to be parsed
// (including some interesting ones) continue to be parsed successfully.
#[test]
fn ok_starred() {
let ast = parse_suite(r"*foo = 42", "<test>");
insta::assert_debug_snapshot!(ast);
}
#[test]
fn ok_list() {
let ast = parse_suite(r"[x, y, z] = [1, 2, 3]", "<test>");
insta::assert_debug_snapshot!(ast);
}
#[test]
fn ok_tuple() {
let ast = parse_suite(r"(x, y, z) = (1, 2, 3)", "<test>");
insta::assert_debug_snapshot!(ast);
}
#[test]
fn ok_subscript_normal() {
let ast = parse_suite(r"x[0] = 42", "<test>");
insta::assert_debug_snapshot!(ast);
}
// This is actually a type error, not a syntax error. So check that it
// doesn't fail parsing.
#[test]
fn ok_subscript_weird() {
let ast = parse_suite(r"5[0] = 42", "<test>");
insta::assert_debug_snapshot!(ast);
}
#[test]
fn ok_slice_normal() {
let ast = parse_suite(r"x[1:2] = [42]", "<test>");
insta::assert_debug_snapshot!(ast);
}
// This is actually a type error, not a syntax error. So check that it
// doesn't fail parsing.
#[test]
fn ok_slice_weird() {
let ast = parse_suite(r"5[1:2] = [42]", "<test>");
insta::assert_debug_snapshot!(ast);
}
#[test]
fn ok_attribute_normal() {
let ast = parse_suite(r"foo.bar = 42", "<test>");
insta::assert_debug_snapshot!(ast);
}
// This is actually an attribute error, not a syntax error. So check that
// it doesn't fail parsing.
#[test]
fn ok_attribute_weird() {
let ast = parse_suite(r#""foo".y = 42"#, "<test>");
insta::assert_debug_snapshot!(ast);
}
#[test]
fn ok_name() {
let ast = parse_suite(r"foo = 42", "<test>");
insta::assert_debug_snapshot!(ast);
}
// This is a sanity test for what looks like an ipython directive being
// assigned to. Although this doesn't actually parse as an assignment
// statement, but rather, a directive whose value is `foo = 42`.
#[test]
fn ok_ipy_escape_command() {
use crate::Mode;
let src = r"!foo = 42";
let tokens = crate::lexer::lex(src, Mode::Ipython);
let ast = crate::parse_tokens(tokens, src, Mode::Ipython, "<test>");
insta::assert_debug_snapshot!(ast);
}
#[test]
fn ok_assignment_expr() {
let ast = parse_suite(r"(x := 5)", "<test>");
insta::assert_debug_snapshot!(ast);
}
}

View file

@ -1344,6 +1344,8 @@ pub enum LexicalErrorType {
LineContinuationError,
/// An unexpected end of file was encountered.
Eof,
/// Occurs when a syntactically invalid assignment was encountered.
AssignmentError,
/// An unexpected error occurred.
OtherError(String),
}
@ -1389,6 +1391,7 @@ impl std::fmt::Display for LexicalErrorType {
write!(f, "unexpected character after line continuation character")
}
LexicalErrorType::Eof => write!(f, "unexpected EOF while parsing"),
LexicalErrorType::AssignmentError => write!(f, "invalid assignment target"),
LexicalErrorType::OtherError(msg) => write!(f, "{msg}"),
}
}

View file

@ -123,6 +123,7 @@ use crate::lexer::LexResult;
mod function;
// Skip flattening lexer to distinguish from full ruff_python_parser
mod context;
mod invalid;
pub mod lexer;
mod parser;
mod soft_keywords;

View file

@ -13,6 +13,7 @@ use crate::{
context::set_context,
string::{StringType, concatenate_strings, parse_fstring_middle, parse_string_literal},
token::{self, StringKind},
invalid,
};
use lalrpop_util::ParseError;
@ -108,12 +109,12 @@ DelStatement: ast::Stmt = {
};
ExpressionStatement: ast::Stmt = {
<location:@L> <expression:TestOrStarExprList> <suffix:AssignSuffix*> <end_location:@R> => {
<location:@L> <expression:TestOrStarExprList> <suffix:AssignSuffix*> <end_location:@R> =>? {
// Just an expression, no assignment:
if suffix.is_empty() {
ast::Stmt::Expr(
Ok(ast::Stmt::Expr(
ast::StmtExpr { value: Box::new(expression.into()), range: (location..end_location).into() }
)
))
} else {
let mut targets = vec![set_context(expression.into(), ast::ExprContext::Store)];
let mut values = suffix;
@ -123,25 +124,27 @@ ExpressionStatement: ast::Stmt = {
for target in values {
targets.push(set_context(target.into(), ast::ExprContext::Store));
}
ast::Stmt::Assign(
invalid::assignment_targets(&targets)?;
Ok(ast::Stmt::Assign(
ast::StmtAssign { targets, value, range: (location..end_location).into() }
)
))
}
},
<location:@L> <target:TestOrStarExprList> <op:AugAssign> <rhs:TestListOrYieldExpr> <end_location:@R> => {
ast::Stmt::AugAssign(
<location:@L> <target:TestOrStarExprList> <op:AugAssign> <rhs:TestListOrYieldExpr> <end_location:@R> =>? {
invalid::assignment_target(&target.expr)?;
Ok(ast::Stmt::AugAssign(
ast::StmtAugAssign {
target: Box::new(set_context(target.into(), ast::ExprContext::Store)),
op,
value: Box::new(rhs.into()),
range: (location..end_location).into()
},
)
))
},
<location:@L> <target:Test<"all">> ":" <annotation:Test<"all">> <rhs:AssignSuffix?> <end_location:@R> => {
<location:@L> <target:Test<"all">> ":" <annotation:Test<"all">> <rhs:AssignSuffix?> <end_location:@R> =>? {
let simple = target.expr.is_name_expr();
ast::Stmt::AnnAssign(
invalid::assignment_target(&target.expr)?;
Ok(ast::Stmt::AnnAssign(
ast::StmtAnnAssign {
target: Box::new(set_context(target.into(), ast::ExprContext::Store)),
annotation: Box::new(annotation.into()),
@ -149,7 +152,7 @@ ExpressionStatement: ast::Stmt = {
simple,
range: (location..end_location).into()
},
)
))
},
};

View file

@ -1,5 +1,5 @@
// auto-generated: "lalrpop 0.20.0"
// sha3: c798bc6e7bd9950e88dd5d950470865a75b5ff0352f4fc7fb51f13147de6ba6c
// sha3: b8ac4a859b69d580e50733d39c96a3fe018f568e71e532ebb3153a19902e64e5
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use ruff_python_ast::{self as ast, Int, IpyEscapeKind};
use crate::{
@ -10,6 +10,7 @@ use crate::{
context::set_context,
string::{StringType, concatenate_strings, parse_fstring_middle, parse_string_literal},
token::{self, StringKind},
invalid,
};
use lalrpop_util::ParseError;
#[allow(unused_extern_crates)]
@ -33,6 +34,7 @@ mod __parse__Top {
context::set_context,
string::{StringType, concatenate_strings, parse_fstring_middle, parse_string_literal},
token::{self, StringKind},
invalid,
};
use lalrpop_util::ParseError;
#[allow(unused_extern_crates)]
@ -13725,19 +13727,76 @@ mod __parse__Top {
__reduce356(source_code, mode, __lookahead_start, __symbols, core::marker::PhantomData::<()>)
}
357 => {
__reduce357(source_code, mode, __lookahead_start, __symbols, core::marker::PhantomData::<()>)
// ExpressionStatement = GenericList<TestOrStarExpr> => ActionFn(1752);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym0.2;
let __nt = match super::__action1752::<>(source_code, mode, __sym0) {
Ok(v) => v,
Err(e) => return Some(Err(e)),
};
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(1, 137)
}
358 => {
__reduce358(source_code, mode, __lookahead_start, __symbols, core::marker::PhantomData::<()>)
// ExpressionStatement = GenericList<TestOrStarExpr>, AssignSuffix+ => ActionFn(1753);
assert!(__symbols.len() >= 2);
let __sym1 = __pop_Variant17(__symbols);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym1.2;
let __nt = match super::__action1753::<>(source_code, mode, __sym0, __sym1) {
Ok(v) => v,
Err(e) => return Some(Err(e)),
};
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(2, 137)
}
359 => {
__reduce359(source_code, mode, __lookahead_start, __symbols, core::marker::PhantomData::<()>)
// ExpressionStatement = GenericList<TestOrStarExpr>, AugAssign, TestListOrYieldExpr => ActionFn(1754);
assert!(__symbols.len() >= 3);
let __sym2 = __pop_Variant15(__symbols);
let __sym1 = __pop_Variant49(__symbols);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym2.2;
let __nt = match super::__action1754::<>(source_code, mode, __sym0, __sym1, __sym2) {
Ok(v) => v,
Err(e) => return Some(Err(e)),
};
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(3, 137)
}
360 => {
__reduce360(source_code, mode, __lookahead_start, __symbols, core::marker::PhantomData::<()>)
// ExpressionStatement = Test<"all">, ":", Test<"all">, AssignSuffix => ActionFn(1531);
assert!(__symbols.len() >= 4);
let __sym3 = __pop_Variant15(__symbols);
let __sym2 = __pop_Variant15(__symbols);
let __sym1 = __pop_Variant0(__symbols);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym3.2;
let __nt = match super::__action1531::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) {
Ok(v) => v,
Err(e) => return Some(Err(e)),
};
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(4, 137)
}
361 => {
__reduce361(source_code, mode, __lookahead_start, __symbols, core::marker::PhantomData::<()>)
// ExpressionStatement = Test<"all">, ":", Test<"all"> => ActionFn(1532);
assert!(__symbols.len() >= 3);
let __sym2 = __pop_Variant15(__symbols);
let __sym1 = __pop_Variant0(__symbols);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym2.2;
let __nt = match super::__action1532::<>(source_code, mode, __sym0, __sym1, __sym2) {
Ok(v) => v,
Err(e) => return Some(Err(e)),
};
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(3, 137)
}
362 => {
// FStringConversion = "!", name => ActionFn(800);
@ -24718,103 +24777,6 @@ mod __parse__Top {
__symbols.push((__start, __Symbol::Variant15(__nt), __end));
(1, 136)
}
pub(crate) fn __reduce357<
>(
source_code: &str,
mode: Mode,
__lookahead_start: Option<&TextSize>,
__symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)>,
_: core::marker::PhantomData<()>,
) -> (usize, usize)
{
// ExpressionStatement = GenericList<TestOrStarExpr> => ActionFn(1752);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym0.2;
let __nt = super::__action1752::<>(source_code, mode, __sym0);
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(1, 137)
}
pub(crate) fn __reduce358<
>(
source_code: &str,
mode: Mode,
__lookahead_start: Option<&TextSize>,
__symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)>,
_: core::marker::PhantomData<()>,
) -> (usize, usize)
{
// ExpressionStatement = GenericList<TestOrStarExpr>, AssignSuffix+ => ActionFn(1753);
assert!(__symbols.len() >= 2);
let __sym1 = __pop_Variant17(__symbols);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym1.2;
let __nt = super::__action1753::<>(source_code, mode, __sym0, __sym1);
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(2, 137)
}
pub(crate) fn __reduce359<
>(
source_code: &str,
mode: Mode,
__lookahead_start: Option<&TextSize>,
__symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)>,
_: core::marker::PhantomData<()>,
) -> (usize, usize)
{
// ExpressionStatement = GenericList<TestOrStarExpr>, AugAssign, TestListOrYieldExpr => ActionFn(1754);
assert!(__symbols.len() >= 3);
let __sym2 = __pop_Variant15(__symbols);
let __sym1 = __pop_Variant49(__symbols);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym2.2;
let __nt = super::__action1754::<>(source_code, mode, __sym0, __sym1, __sym2);
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(3, 137)
}
pub(crate) fn __reduce360<
>(
source_code: &str,
mode: Mode,
__lookahead_start: Option<&TextSize>,
__symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)>,
_: core::marker::PhantomData<()>,
) -> (usize, usize)
{
// ExpressionStatement = Test<"all">, ":", Test<"all">, AssignSuffix => ActionFn(1531);
assert!(__symbols.len() >= 4);
let __sym3 = __pop_Variant15(__symbols);
let __sym2 = __pop_Variant15(__symbols);
let __sym1 = __pop_Variant0(__symbols);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym3.2;
let __nt = super::__action1531::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3);
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(4, 137)
}
pub(crate) fn __reduce361<
>(
source_code: &str,
mode: Mode,
__lookahead_start: Option<&TextSize>,
__symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)>,
_: core::marker::PhantomData<()>,
) -> (usize, usize)
{
// ExpressionStatement = Test<"all">, ":", Test<"all"> => ActionFn(1532);
assert!(__symbols.len() >= 3);
let __sym2 = __pop_Variant15(__symbols);
let __sym1 = __pop_Variant0(__symbols);
let __sym0 = __pop_Variant15(__symbols);
let __start = __sym0.0;
let __end = __sym2.2;
let __nt = super::__action1532::<>(source_code, mode, __sym0, __sym1, __sym2);
__symbols.push((__start, __Symbol::Variant37(__nt), __end));
(3, 137)
}
pub(crate) fn __reduce363<
>(
source_code: &str,
@ -32789,14 +32751,14 @@ fn __action26<
(_, expression, _): (TextSize, ast::ParenthesizedExpr, TextSize),
(_, suffix, _): (TextSize, alloc::vec::Vec<ast::ParenthesizedExpr>, TextSize),
(_, end_location, _): (TextSize, TextSize, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
{
// Just an expression, no assignment:
if suffix.is_empty() {
ast::Stmt::Expr(
Ok(ast::Stmt::Expr(
ast::StmtExpr { value: Box::new(expression.into()), range: (location..end_location).into() }
)
))
} else {
let mut targets = vec![set_context(expression.into(), ast::ExprContext::Store)];
let mut values = suffix;
@ -32806,10 +32768,10 @@ fn __action26<
for target in values {
targets.push(set_context(target.into(), ast::ExprContext::Store));
}
ast::Stmt::Assign(
invalid::assignment_targets(&targets)?;
Ok(ast::Stmt::Assign(
ast::StmtAssign { targets, value, range: (location..end_location).into() }
)
))
}
}
}
@ -32825,17 +32787,18 @@ fn __action27<
(_, op, _): (TextSize, ast::Operator, TextSize),
(_, rhs, _): (TextSize, ast::ParenthesizedExpr, TextSize),
(_, end_location, _): (TextSize, TextSize, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
{
ast::Stmt::AugAssign(
invalid::assignment_target(&target.expr)?;
Ok(ast::Stmt::AugAssign(
ast::StmtAugAssign {
target: Box::new(set_context(target.into(), ast::ExprContext::Store)),
op,
value: Box::new(rhs.into()),
range: (location..end_location).into()
},
)
))
}
}
@ -32851,11 +32814,12 @@ fn __action28<
(_, annotation, _): (TextSize, ast::ParenthesizedExpr, TextSize),
(_, rhs, _): (TextSize, core::option::Option<ast::ParenthesizedExpr>, TextSize),
(_, end_location, _): (TextSize, TextSize, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
{
let simple = target.expr.is_name_expr();
ast::Stmt::AnnAssign(
invalid::assignment_target(&target.expr)?;
Ok(ast::Stmt::AnnAssign(
ast::StmtAnnAssign {
target: Box::new(set_context(target.into(), ast::ExprContext::Store)),
annotation: Box::new(annotation.into()),
@ -32863,7 +32827,7 @@ fn __action28<
simple,
range: (location..end_location).into()
},
)
))
}
}
@ -48215,7 +48179,7 @@ fn __action797<
__0: (TextSize, ast::ParenthesizedExpr, TextSize),
__1: (TextSize, alloc::vec::Vec<ast::ParenthesizedExpr>, TextSize),
__2: (TextSize, TextSize, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __0.0;
let __end0 = __0.0;
@ -48246,7 +48210,7 @@ fn __action798<
__1: (TextSize, ast::Operator, TextSize),
__2: (TextSize, ast::ParenthesizedExpr, TextSize),
__3: (TextSize, TextSize, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __0.0;
let __end0 = __0.0;
@ -48279,7 +48243,7 @@ fn __action799<
__2: (TextSize, ast::ParenthesizedExpr, TextSize),
__3: (TextSize, core::option::Option<ast::ParenthesizedExpr>, TextSize),
__4: (TextSize, TextSize, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __0.0;
let __end0 = __0.0;
@ -64278,7 +64242,7 @@ fn __action1309<
mode: Mode,
__0: (TextSize, ast::ParenthesizedExpr, TextSize),
__1: (TextSize, alloc::vec::Vec<ast::ParenthesizedExpr>, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __1.2;
let __end0 = __1.2;
@ -64307,7 +64271,7 @@ fn __action1310<
__0: (TextSize, ast::ParenthesizedExpr, TextSize),
__1: (TextSize, ast::Operator, TextSize),
__2: (TextSize, ast::ParenthesizedExpr, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __2.2;
let __end0 = __2.2;
@ -64338,7 +64302,7 @@ fn __action1311<
__1: (TextSize, token::Tok, TextSize),
__2: (TextSize, ast::ParenthesizedExpr, TextSize),
__3: (TextSize, core::option::Option<ast::ParenthesizedExpr>, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __3.2;
let __end0 = __3.2;
@ -71035,7 +70999,7 @@ fn __action1529<
source_code: &str,
mode: Mode,
__0: (TextSize, ast::ParenthesizedExpr, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __0.2;
let __end0 = __0.2;
@ -71062,7 +71026,7 @@ fn __action1530<
mode: Mode,
__0: (TextSize, ast::ParenthesizedExpr, TextSize),
__1: (TextSize, alloc::vec::Vec<ast::ParenthesizedExpr>, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __1.0;
let __end0 = __1.2;
@ -71090,7 +71054,7 @@ fn __action1531<
__1: (TextSize, token::Tok, TextSize),
__2: (TextSize, ast::ParenthesizedExpr, TextSize),
__3: (TextSize, ast::ParenthesizedExpr, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __3.0;
let __end0 = __3.2;
@ -71119,7 +71083,7 @@ fn __action1532<
__0: (TextSize, ast::ParenthesizedExpr, TextSize),
__1: (TextSize, token::Tok, TextSize),
__2: (TextSize, ast::ParenthesizedExpr, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __2.2;
let __end0 = __2.2;
@ -78391,7 +78355,7 @@ fn __action1752<
source_code: &str,
mode: Mode,
__0: (TextSize, ast::ParenthesizedExpr, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __0.0;
let __end0 = __0.2;
@ -78416,7 +78380,7 @@ fn __action1753<
mode: Mode,
__0: (TextSize, ast::ParenthesizedExpr, TextSize),
__1: (TextSize, alloc::vec::Vec<ast::ParenthesizedExpr>, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __0.0;
let __end0 = __0.2;
@ -78443,7 +78407,7 @@ fn __action1754<
__0: (TextSize, ast::ParenthesizedExpr, TextSize),
__1: (TextSize, ast::Operator, TextSize),
__2: (TextSize, ast::ParenthesizedExpr, TextSize),
) -> ast::Stmt
) -> Result<ast::Stmt,__lalrpop_util::ParseError<TextSize,token::Tok,LexicalError>>
{
let __start0 = __0.0;
let __end0 = __0.2;

View file

@ -0,0 +1,33 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Expr(
StmtExpr {
range: 0..8,
value: NamedExpr(
ExprNamedExpr {
range: 1..7,
target: Name(
ExprName {
range: 1..2,
id: "x",
ctx: Store,
},
),
value: NumberLiteral(
ExprNumberLiteral {
range: 6..7,
value: Int(
5,
),
},
),
},
),
},
),
],
)

View file

@ -0,0 +1,40 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..12,
targets: [
Attribute(
ExprAttribute {
range: 0..7,
value: Name(
ExprName {
range: 0..3,
id: "foo",
ctx: Load,
},
),
attr: Identifier {
id: "bar",
range: 4..7,
},
ctx: Store,
},
),
],
value: NumberLiteral(
ExprNumberLiteral {
range: 10..12,
value: Int(
42,
),
},
),
},
),
],
)

View file

@ -0,0 +1,41 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..12,
targets: [
Attribute(
ExprAttribute {
range: 0..7,
value: StringLiteral(
ExprStringLiteral {
range: 0..5,
value: "foo",
unicode: false,
implicit_concatenated: false,
},
),
attr: Identifier {
id: "y",
range: 6..7,
},
ctx: Store,
},
),
],
value: NumberLiteral(
ExprNumberLiteral {
range: 10..12,
value: Int(
42,
),
},
),
},
),
],
)

View file

@ -0,0 +1,20 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
Module(
ModModule {
range: 0..9,
body: [
IpyEscapeCommand(
StmtIpyEscapeCommand {
range: 0..9,
kind: Shell,
value: "foo = 42",
},
),
],
},
),
)

View file

@ -0,0 +1,76 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..21,
targets: [
List(
ExprList {
range: 0..9,
elts: [
Name(
ExprName {
range: 1..2,
id: "x",
ctx: Store,
},
),
Name(
ExprName {
range: 4..5,
id: "y",
ctx: Store,
},
),
Name(
ExprName {
range: 7..8,
id: "z",
ctx: Store,
},
),
],
ctx: Store,
},
),
],
value: List(
ExprList {
range: 12..21,
elts: [
NumberLiteral(
ExprNumberLiteral {
range: 13..14,
value: Int(
1,
),
},
),
NumberLiteral(
ExprNumberLiteral {
range: 16..17,
value: Int(
2,
),
},
),
NumberLiteral(
ExprNumberLiteral {
range: 19..20,
value: Int(
3,
),
},
),
],
ctx: Load,
},
),
},
),
],
)

View file

@ -0,0 +1,30 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..8,
targets: [
Name(
ExprName {
range: 0..3,
id: "foo",
ctx: Store,
},
),
],
value: NumberLiteral(
ExprNumberLiteral {
range: 6..8,
value: Int(
42,
),
},
),
},
),
],
)

View file

@ -0,0 +1,70 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..13,
targets: [
Subscript(
ExprSubscript {
range: 0..6,
value: Name(
ExprName {
range: 0..1,
id: "x",
ctx: Load,
},
),
slice: Slice(
ExprSlice {
range: 2..5,
lower: Some(
NumberLiteral(
ExprNumberLiteral {
range: 2..3,
value: Int(
1,
),
},
),
),
upper: Some(
NumberLiteral(
ExprNumberLiteral {
range: 4..5,
value: Int(
2,
),
},
),
),
step: None,
},
),
ctx: Store,
},
),
],
value: List(
ExprList {
range: 9..13,
elts: [
NumberLiteral(
ExprNumberLiteral {
range: 10..12,
value: Int(
42,
),
},
),
],
ctx: Load,
},
),
},
),
],
)

View file

@ -0,0 +1,71 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..13,
targets: [
Subscript(
ExprSubscript {
range: 0..6,
value: NumberLiteral(
ExprNumberLiteral {
range: 0..1,
value: Int(
5,
),
},
),
slice: Slice(
ExprSlice {
range: 2..5,
lower: Some(
NumberLiteral(
ExprNumberLiteral {
range: 2..3,
value: Int(
1,
),
},
),
),
upper: Some(
NumberLiteral(
ExprNumberLiteral {
range: 4..5,
value: Int(
2,
),
},
),
),
step: None,
},
),
ctx: Store,
},
),
],
value: List(
ExprList {
range: 9..13,
elts: [
NumberLiteral(
ExprNumberLiteral {
range: 10..12,
value: Int(
42,
),
},
),
],
ctx: Load,
},
),
},
),
],
)

View file

@ -0,0 +1,36 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..9,
targets: [
Starred(
ExprStarred {
range: 0..4,
value: Name(
ExprName {
range: 1..4,
id: "foo",
ctx: Store,
},
),
ctx: Store,
},
),
],
value: NumberLiteral(
ExprNumberLiteral {
range: 7..9,
value: Int(
42,
),
},
),
},
),
],
)

View file

@ -0,0 +1,44 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..9,
targets: [
Subscript(
ExprSubscript {
range: 0..4,
value: Name(
ExprName {
range: 0..1,
id: "x",
ctx: Load,
},
),
slice: NumberLiteral(
ExprNumberLiteral {
range: 2..3,
value: Int(
0,
),
},
),
ctx: Store,
},
),
],
value: NumberLiteral(
ExprNumberLiteral {
range: 7..9,
value: Int(
42,
),
},
),
},
),
],
)

View file

@ -0,0 +1,45 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..9,
targets: [
Subscript(
ExprSubscript {
range: 0..4,
value: NumberLiteral(
ExprNumberLiteral {
range: 0..1,
value: Int(
5,
),
},
),
slice: NumberLiteral(
ExprNumberLiteral {
range: 2..3,
value: Int(
0,
),
},
),
ctx: Store,
},
),
],
value: NumberLiteral(
ExprNumberLiteral {
range: 7..9,
value: Int(
42,
),
},
),
},
),
],
)

View file

@ -0,0 +1,76 @@
---
source: crates/ruff_python_parser/src/invalid.rs
expression: ast
---
Ok(
[
Assign(
StmtAssign {
range: 0..21,
targets: [
Tuple(
ExprTuple {
range: 0..9,
elts: [
Name(
ExprName {
range: 1..2,
id: "x",
ctx: Store,
},
),
Name(
ExprName {
range: 4..5,
id: "y",
ctx: Store,
},
),
Name(
ExprName {
range: 7..8,
id: "z",
ctx: Store,
},
),
],
ctx: Store,
},
),
],
value: Tuple(
ExprTuple {
range: 12..21,
elts: [
NumberLiteral(
ExprNumberLiteral {
range: 13..14,
value: Int(
1,
),
},
),
NumberLiteral(
ExprNumberLiteral {
range: 16..17,
value: Int(
2,
),
},
),
NumberLiteral(
ExprNumberLiteral {
range: 19..20,
value: Int(
3,
),
},
),
],
ctx: Load,
},
),
},
),
],
)