mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-17 00:50:33 +00:00
Add support for PEP 701 (#7376)
## Summary This PR adds support for PEP 701 in Ruff. This is a rollup PR of all the other individual PRs. The separate PRs were created for logic separation and code reviews. Refer to each pull request for a detail description on the change. Refer to the PR description for the list of pull requests within this PR. ## Test Plan ### Formatter ecosystem checks Explanation for the change in ecosystem check: https://github.com/astral-sh/ruff/pull/7597#issue-1908878183 #### `main` ``` | project | similarity index | total files | changed files | |--------------|------------------:|------------------:|------------------:| | cpython | 0.76083 | 1789 | 1631 | | django | 0.99983 | 2760 | 36 | | transformers | 0.99963 | 2587 | 319 | | twine | 1.00000 | 33 | 0 | | typeshed | 0.99983 | 3496 | 18 | | warehouse | 0.99967 | 648 | 15 | | zulip | 0.99972 | 1437 | 21 | ``` #### `dhruv/pep-701` ``` | project | similarity index | total files | changed files | |--------------|------------------:|------------------:|------------------:| | cpython | 0.76051 | 1789 | 1632 | | django | 0.99983 | 2760 | 36 | | transformers | 0.99963 | 2587 | 319 | | twine | 1.00000 | 33 | 0 | | typeshed | 0.99983 | 3496 | 18 | | warehouse | 0.99967 | 648 | 15 | | zulip | 0.99972 | 1437 | 21 | ```
This commit is contained in:
parent
78b8741352
commit
e62e245c61
115 changed files with 44780 additions and 31370 deletions
|
@ -50,7 +50,7 @@ use ruff_python_ast::{Mod, ModModule, Suite};
|
|||
/// ```
|
||||
pub fn parse_program(source: &str, source_path: &str) -> Result<ModModule, ParseError> {
|
||||
let lexer = lex(source, Mode::Module);
|
||||
match parse_tokens(lexer, Mode::Module, source_path)? {
|
||||
match parse_tokens(lexer, source, Mode::Module, source_path)? {
|
||||
Mod::Module(m) => Ok(m),
|
||||
Mod::Expression(_) => unreachable!("Mode::Module doesn't return other variant"),
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ pub fn parse_suite(source: &str, source_path: &str) -> Result<Suite, ParseError>
|
|||
/// ```
|
||||
pub fn parse_expression(source: &str, source_path: &str) -> Result<ast::Expr, ParseError> {
|
||||
let lexer = lex(source, Mode::Expression);
|
||||
match parse_tokens(lexer, Mode::Expression, source_path)? {
|
||||
match parse_tokens(lexer, source, Mode::Expression, source_path)? {
|
||||
Mod::Expression(expression) => Ok(*expression.body),
|
||||
Mod::Module(_m) => unreachable!("Mode::Expression doesn't return other variant"),
|
||||
}
|
||||
|
@ -107,7 +107,7 @@ pub fn parse_expression_starts_at(
|
|||
offset: TextSize,
|
||||
) -> Result<ast::Expr, ParseError> {
|
||||
let lexer = lex_starts_at(source, Mode::Module, offset);
|
||||
match parse_tokens(lexer, Mode::Expression, source_path)? {
|
||||
match parse_tokens(lexer, source, Mode::Expression, source_path)? {
|
||||
Mod::Expression(expression) => Ok(*expression.body),
|
||||
Mod::Module(_m) => unreachable!("Mode::Expression doesn't return other variant"),
|
||||
}
|
||||
|
@ -193,7 +193,7 @@ pub fn parse_starts_at(
|
|||
offset: TextSize,
|
||||
) -> Result<Mod, ParseError> {
|
||||
let lxr = lexer::lex_starts_at(source, mode, offset);
|
||||
parse_tokens(lxr, mode, source_path)
|
||||
parse_tokens(lxr, source, mode, source_path)
|
||||
}
|
||||
|
||||
/// Parse an iterator of [`LexResult`]s using the specified [`Mode`].
|
||||
|
@ -208,11 +208,13 @@ pub fn parse_starts_at(
|
|||
/// ```
|
||||
/// use ruff_python_parser::{lexer::lex, Mode, parse_tokens};
|
||||
///
|
||||
/// let expr = parse_tokens(lex("1 + 2", Mode::Expression), Mode::Expression, "<embedded>");
|
||||
/// let source = "1 + 2";
|
||||
/// let expr = parse_tokens(lex(source, Mode::Expression), source, Mode::Expression, "<embedded>");
|
||||
/// assert!(expr.is_ok());
|
||||
/// ```
|
||||
pub fn parse_tokens(
|
||||
lxr: impl IntoIterator<Item = LexResult>,
|
||||
source: &str,
|
||||
mode: Mode,
|
||||
source_path: &str,
|
||||
) -> Result<Mod, ParseError> {
|
||||
|
@ -220,6 +222,7 @@ pub fn parse_tokens(
|
|||
|
||||
parse_filtered_tokens(
|
||||
lxr.filter_ok(|(tok, _)| !matches!(tok, Tok::Comment { .. } | Tok::NonLogicalNewline)),
|
||||
source,
|
||||
mode,
|
||||
source_path,
|
||||
)
|
||||
|
@ -228,6 +231,7 @@ pub fn parse_tokens(
|
|||
/// Parse tokens into an AST like [`parse_tokens`], but we already know all tokens are valid.
|
||||
pub fn parse_ok_tokens(
|
||||
lxr: impl IntoIterator<Item = Spanned>,
|
||||
source: &str,
|
||||
mode: Mode,
|
||||
source_path: &str,
|
||||
) -> Result<Mod, ParseError> {
|
||||
|
@ -239,12 +243,13 @@ pub fn parse_ok_tokens(
|
|||
.chain(lxr)
|
||||
.map(|(t, range)| (range.start(), t, range.end()));
|
||||
python::TopParser::new()
|
||||
.parse(mode, lexer)
|
||||
.parse(source, mode, lexer)
|
||||
.map_err(|e| parse_error_from_lalrpop(e, source_path))
|
||||
}
|
||||
|
||||
fn parse_filtered_tokens(
|
||||
lxr: impl IntoIterator<Item = LexResult>,
|
||||
source: &str,
|
||||
mode: Mode,
|
||||
source_path: &str,
|
||||
) -> Result<Mod, ParseError> {
|
||||
|
@ -252,6 +257,7 @@ fn parse_filtered_tokens(
|
|||
let lexer = iter::once(Ok(marker_token)).chain(lxr);
|
||||
python::TopParser::new()
|
||||
.parse(
|
||||
source,
|
||||
mode,
|
||||
lexer.map_ok(|(t, range)| (range.start(), t, range.end())),
|
||||
)
|
||||
|
@ -1253,11 +1259,58 @@ a = 1
|
|||
"#
|
||||
.trim();
|
||||
let lxr = lexer::lex_starts_at(source, Mode::Ipython, TextSize::default());
|
||||
let parse_err = parse_tokens(lxr, Mode::Module, "<test>").unwrap_err();
|
||||
let parse_err = parse_tokens(lxr, source, Mode::Module, "<test>").unwrap_err();
|
||||
assert_eq!(
|
||||
parse_err.to_string(),
|
||||
"IPython escape commands are only allowed in `Mode::Ipython` at byte offset 6"
|
||||
.to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fstrings() {
|
||||
let parse_ast = parse_suite(
|
||||
r#"
|
||||
f"{" f"}"
|
||||
f"{foo!s}"
|
||||
f"{3,}"
|
||||
f"{3!=4:}"
|
||||
f'{3:{"}"}>10}'
|
||||
f'{3:{"{"}>10}'
|
||||
f"{ foo = }"
|
||||
f"{ foo = :.3f }"
|
||||
f"{ foo = !s }"
|
||||
f"{ 1, 2 = }"
|
||||
f'{f"{3.1415=:.1f}":*^20}'
|
||||
|
||||
{"foo " f"bar {x + y} " "baz": 10}
|
||||
match foo:
|
||||
case "foo " f"bar {x + y} " "baz":
|
||||
pass
|
||||
|
||||
f"\{foo}\{bar:\}"
|
||||
f"\\{{foo\\}}"
|
||||
"#
|
||||
.trim(),
|
||||
"<test>",
|
||||
)
|
||||
.unwrap();
|
||||
insta::assert_debug_snapshot!(parse_ast);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fstrings_with_unicode() {
|
||||
let parse_ast = parse_suite(
|
||||
r#"
|
||||
u"foo" f"{bar}" "baz" " some"
|
||||
"foo" f"{bar}" u"baz" " some"
|
||||
"foo" f"{bar}" "baz" u" some"
|
||||
u"foo" f"bar {baz} really" u"bar" "no"
|
||||
"#
|
||||
.trim(),
|
||||
"<test>",
|
||||
)
|
||||
.unwrap();
|
||||
insta::assert_debug_snapshot!(parse_ast);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue