mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-29 05:15:12 +00:00
Use Jupyter
mode while parsing Notebook files (#5552)
## Summary Enable using the new `Mode::Jupyter` for the tokenizer/parser to parse Jupyter line magic tokens. The individual call to the lexer i.e., `lex_starts_at` done by various rules should consider the context of the source code (is this content from a Jupyter Notebook?). Thus, a new field `source_type` (of type `PySourceType`) is added to `Checker` which is being passed around as an argument to the relevant functions. This is then used to determine the `Mode` for the lexer. ## Test Plan Add new test cases to make sure that the magic statement is considered while generating the diagnostic and autofix: * For `I001`, if there's a magic statement in between two import blocks, they should be sorted independently fixes: #6090
This commit is contained in:
parent
d788957ec4
commit
32fa05765a
52 changed files with 652 additions and 196 deletions
|
@ -36,6 +36,7 @@ use regex::Regex;
|
|||
use ruff_python_ast::statement_visitor::{walk_body, walk_stmt, StatementVisitor};
|
||||
use ruff_python_ast::visitor::{walk_expr, Visitor};
|
||||
use ruff_python_ast::{Expr, Ranged, Stmt, Suite};
|
||||
use ruff_python_parser::Mode;
|
||||
use ruff_text_size::TextRange;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
@ -275,7 +276,7 @@ impl Strategy for StrategyRemoveToken {
|
|||
input: &'a str,
|
||||
_ast: &'a Suite,
|
||||
) -> Result<Box<dyn ExactSizeStringIter + 'a>> {
|
||||
let token_ranges: Vec<_> = ruff_python_parser::tokenize(input)
|
||||
let token_ranges: Vec<_> = ruff_python_parser::tokenize(input, Mode::Module)
|
||||
.into_iter()
|
||||
// At this point we know we have valid python code
|
||||
.map(Result::unwrap)
|
||||
|
@ -320,9 +321,9 @@ fn minimization_step(
|
|||
pattern: &Regex,
|
||||
last_strategy_and_idx: Option<(&'static dyn Strategy, usize)>,
|
||||
) -> Result<Option<(&'static dyn Strategy, usize, String)>> {
|
||||
let tokens = ruff_python_parser::tokenize(input);
|
||||
let ast =
|
||||
ruff_python_parser::parse_program_tokens(tokens, "input.py").context("not valid python")?;
|
||||
let tokens = ruff_python_parser::tokenize(input, Mode::Module);
|
||||
let ast = ruff_python_parser::parse_program_tokens(tokens, "input.py", false)
|
||||
.context("not valid python")?;
|
||||
|
||||
// Try the last succeeding strategy first, skipping all that failed last time
|
||||
if let Some((last_strategy, last_idx)) = last_strategy_and_idx {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue