mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-03 18:28:24 +00:00
Use Jupyter
mode while parsing Notebook files (#5552)
## Summary Enable using the new `Mode::Jupyter` for the tokenizer/parser to parse Jupyter line magic tokens. The individual call to the lexer i.e., `lex_starts_at` done by various rules should consider the context of the source code (is this content from a Jupyter Notebook?). Thus, a new field `source_type` (of type `PySourceType`) is added to `Checker` which is being passed around as an argument to the relevant functions. This is then used to determine the `Mode` for the lexer. ## Test Plan Add new test cases to make sure that the magic statement is considered while generating the diagnostic and autofix: * For `I001`, if there's a magic statement in between two import blocks, they should be sorted independently fixes: #6090
This commit is contained in:
parent
d788957ec4
commit
32fa05765a
52 changed files with 652 additions and 196 deletions
8
crates/ruff/resources/test/fixtures/jupyter/cell/cell_magic.json
vendored
Normal file
8
crates/ruff/resources/test/fixtures/jupyter/cell/cell_magic.json
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"execution_count": null,
|
||||
"cell_type": "code",
|
||||
"id": "1",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": ["%%timeit\n", "print('hello world')"]
|
||||
}
|
|
@ -25,6 +25,23 @@
|
|||
"def foo():\n",
|
||||
" pass"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "16214f6f-bb32-4594-81be-79fb27c6ec92",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from pathlib import Path\n",
|
||||
"import sys\n",
|
||||
"\n",
|
||||
"%matplotlib \\\n",
|
||||
" --inline\n",
|
||||
"\n",
|
||||
"import math\n",
|
||||
"import abc"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
|
|
@ -27,6 +27,23 @@
|
|||
"def foo():\n",
|
||||
" pass"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "6d6c55c6-4a34-4662-914b-4ee11c9c24a5",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import sys\n",
|
||||
"from pathlib import Path\n",
|
||||
"\n",
|
||||
"%matplotlib \\\n",
|
||||
" --inline\n",
|
||||
"\n",
|
||||
"import abc\n",
|
||||
"import math"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
|
52
crates/ruff/resources/test/fixtures/jupyter/line_magics.ipynb
vendored
Normal file
52
crates/ruff/resources/test/fixtures/jupyter/line_magics.ipynb
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "eab4754a-d6df-4b41-8ee8-7e23aef440f9",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import math\n",
|
||||
"\n",
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"_ = math.pi"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "2b0e2986-1b87-4bb6-9b1d-c11ca1decd87",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%%timeit\n",
|
||||
"import sys"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python (ruff)",
|
||||
"language": "python",
|
||||
"name": "ruff"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
51
crates/ruff/resources/test/fixtures/jupyter/line_magics_expected.ipynb
vendored
Normal file
51
crates/ruff/resources/test/fixtures/jupyter/line_magics_expected.ipynb
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "cad32845-44f9-4a53-8b8c-a6b1bb3f3378",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import math\n",
|
||||
"\n",
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"_ = math.pi"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d7b8e967-8b4a-493b-b6f7-d5cecfb3a5c3",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%%timeit\n",
|
||||
"import sys"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python (ruff)",
|
||||
"language": "python",
|
||||
"name": "ruff"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
|
@ -3,10 +3,12 @@
|
|||
use anyhow::{bail, Result};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, Keyword, Ranged, Stmt};
|
||||
use ruff_python_ast::{
|
||||
self as ast, Arguments, ExceptHandler, Expr, Keyword, PySourceType, Ranged, Stmt,
|
||||
};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::{lexer, Mode};
|
||||
use ruff_python_parser::{lexer, AsMode};
|
||||
use ruff_python_trivia::{has_leading_content, is_python_whitespace, PythonWhitespace};
|
||||
use ruff_source_file::{Locator, NewlineWithTrailingNewline};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
@ -88,6 +90,7 @@ pub(crate) fn remove_argument<T: Ranged>(
|
|||
arguments: &Arguments,
|
||||
parentheses: Parentheses,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
) -> Result<Edit> {
|
||||
// TODO(sbrugman): Preserve trailing comments.
|
||||
if arguments.keywords.len() + arguments.args.len() > 1 {
|
||||
|
@ -106,7 +109,7 @@ pub(crate) fn remove_argument<T: Ranged>(
|
|||
let mut seen_comma = false;
|
||||
for (tok, range) in lexer::lex_starts_at(
|
||||
locator.slice(arguments.range()),
|
||||
Mode::Module,
|
||||
source_type.as_mode(),
|
||||
arguments.start(),
|
||||
)
|
||||
.flatten()
|
||||
|
@ -135,7 +138,7 @@ pub(crate) fn remove_argument<T: Ranged>(
|
|||
// previous comma to the end of the argument.
|
||||
for (tok, range) in lexer::lex_starts_at(
|
||||
locator.slice(arguments.range()),
|
||||
Mode::Module,
|
||||
source_type.as_mode(),
|
||||
arguments.start(),
|
||||
)
|
||||
.flatten()
|
||||
|
|
|
@ -37,7 +37,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
|||
// Identify any valid runtime imports. If a module is imported at runtime, and
|
||||
// used at runtime, then by default, we avoid flagging any other
|
||||
// imports from that model as typing-only.
|
||||
let enforce_typing_imports = !checker.is_stub
|
||||
let enforce_typing_imports = !checker.source_type.is_stub()
|
||||
&& checker.any_enabled(&[
|
||||
Rule::RuntimeImportInTypeCheckingBlock,
|
||||
Rule::TypingOnlyFirstPartyImport,
|
||||
|
@ -243,7 +243,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
|||
pyflakes::rules::unused_annotation(checker, scope, &mut diagnostics);
|
||||
}
|
||||
|
||||
if !checker.is_stub {
|
||||
if !checker.source_type.is_stub() {
|
||||
if checker.any_enabled(&[
|
||||
Rule::UnusedClassMethodArgument,
|
||||
Rule::UnusedFunctionArgument,
|
||||
|
|
|
@ -30,7 +30,7 @@ pub(crate) fn definitions(checker: &mut Checker) {
|
|||
Rule::MissingTypeKwargs,
|
||||
Rule::MissingTypeSelf,
|
||||
]);
|
||||
let enforce_stubs = checker.is_stub && checker.enabled(Rule::DocstringInStub);
|
||||
let enforce_stubs = checker.source_type.is_stub() && checker.enabled(Rule::DocstringInStub);
|
||||
let enforce_stubs_and_runtime = checker.enabled(Rule::IterMethodReturnIterable);
|
||||
let enforce_docstrings = checker.any_enabled(&[
|
||||
Rule::BlankLineAfterLastSection,
|
||||
|
|
|
@ -31,7 +31,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
if let Some(operator) = typing::to_pep604_operator(value, slice, &checker.semantic)
|
||||
{
|
||||
if checker.enabled(Rule::FutureRewritableTypeAnnotation) {
|
||||
if !checker.is_stub
|
||||
if !checker.source_type.is_stub()
|
||||
&& checker.settings.target_version < PythonVersion::Py310
|
||||
&& checker.settings.target_version >= PythonVersion::Py37
|
||||
&& !checker.semantic.future_annotations()
|
||||
|
@ -44,7 +44,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
}
|
||||
}
|
||||
if checker.enabled(Rule::NonPEP604Annotation) {
|
||||
if checker.is_stub
|
||||
if checker.source_type.is_stub()
|
||||
|| checker.settings.target_version >= PythonVersion::Py310
|
||||
|| (checker.settings.target_version >= PythonVersion::Py37
|
||||
&& checker.semantic.future_annotations()
|
||||
|
@ -59,7 +59,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
|
||||
// Ex) list[...]
|
||||
if checker.enabled(Rule::FutureRequiredTypeAnnotation) {
|
||||
if !checker.is_stub
|
||||
if !checker.source_type.is_stub()
|
||||
&& checker.settings.target_version < PythonVersion::Py39
|
||||
&& !checker.semantic.future_annotations()
|
||||
&& checker.semantic.in_annotation()
|
||||
|
@ -176,7 +176,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
typing::to_pep585_generic(expr, &checker.semantic)
|
||||
{
|
||||
if checker.enabled(Rule::FutureRewritableTypeAnnotation) {
|
||||
if !checker.is_stub
|
||||
if !checker.source_type.is_stub()
|
||||
&& checker.settings.target_version < PythonVersion::Py39
|
||||
&& checker.settings.target_version >= PythonVersion::Py37
|
||||
&& !checker.semantic.future_annotations()
|
||||
|
@ -187,7 +187,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
}
|
||||
}
|
||||
if checker.enabled(Rule::NonPEP585Annotation) {
|
||||
if checker.is_stub
|
||||
if checker.source_type.is_stub()
|
||||
|| checker.settings.target_version >= PythonVersion::Py39
|
||||
|| (checker.settings.target_version >= PythonVersion::Py37
|
||||
&& checker.semantic.future_annotations()
|
||||
|
@ -272,7 +272,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
]) {
|
||||
if let Some(replacement) = typing::to_pep585_generic(expr, &checker.semantic) {
|
||||
if checker.enabled(Rule::FutureRewritableTypeAnnotation) {
|
||||
if !checker.is_stub
|
||||
if !checker.source_type.is_stub()
|
||||
&& checker.settings.target_version < PythonVersion::Py39
|
||||
&& checker.settings.target_version >= PythonVersion::Py37
|
||||
&& !checker.semantic.future_annotations()
|
||||
|
@ -285,7 +285,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
}
|
||||
}
|
||||
if checker.enabled(Rule::NonPEP585Annotation) {
|
||||
if checker.is_stub
|
||||
if checker.source_type.is_stub()
|
||||
|| checker.settings.target_version >= PythonVersion::Py39
|
||||
|| (checker.settings.target_version >= PythonVersion::Py37
|
||||
&& checker.semantic.future_annotations()
|
||||
|
@ -1066,7 +1066,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
}) => {
|
||||
// Ex) `str | None`
|
||||
if checker.enabled(Rule::FutureRequiredTypeAnnotation) {
|
||||
if !checker.is_stub
|
||||
if !checker.source_type.is_stub()
|
||||
&& checker.settings.target_version < PythonVersion::Py310
|
||||
&& !checker.semantic.future_annotations()
|
||||
&& checker.semantic.in_annotation()
|
||||
|
@ -1212,7 +1212,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
kind: _,
|
||||
range: _,
|
||||
}) => {
|
||||
if checker.is_stub && checker.enabled(Rule::NumericLiteralTooLong) {
|
||||
if checker.source_type.is_stub() && checker.enabled(Rule::NumericLiteralTooLong) {
|
||||
flake8_pyi::rules::numeric_literal_too_long(checker, expr);
|
||||
}
|
||||
}
|
||||
|
@ -1221,7 +1221,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
kind: _,
|
||||
range: _,
|
||||
}) => {
|
||||
if checker.is_stub && checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
if checker.source_type.is_stub() && checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
flake8_pyi::rules::string_or_bytes_too_long(checker, expr);
|
||||
}
|
||||
}
|
||||
|
@ -1249,7 +1249,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
if checker.enabled(Rule::UnicodeKindPrefix) {
|
||||
pyupgrade::rules::unicode_kind_prefix(checker, expr, kind.as_deref());
|
||||
}
|
||||
if checker.is_stub {
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
flake8_pyi::rules::string_or_bytes_too_long(checker, expr);
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ pub(crate) fn parameters(parameters: &Parameters, checker: &mut Checker) {
|
|||
if checker.settings.rules.enabled(Rule::ImplicitOptional) {
|
||||
ruff::rules::implicit_optional(checker, parameters);
|
||||
}
|
||||
if checker.is_stub {
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::TypedArgumentDefaultInStub) {
|
||||
flake8_pyi::rules::typed_argument_simple_defaults(checker, parameters);
|
||||
}
|
||||
|
|
|
@ -133,7 +133,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.is_stub {
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::PassStatementStubBody) {
|
||||
flake8_pyi::rules::pass_statement_stub_body(checker, body);
|
||||
}
|
||||
|
@ -168,12 +168,14 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
type_params.as_ref(),
|
||||
);
|
||||
}
|
||||
if checker.is_stub {
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::StrOrReprDefinedInStub) {
|
||||
flake8_pyi::rules::str_or_repr_defined_in_stub(checker, stmt);
|
||||
}
|
||||
}
|
||||
if checker.is_stub || checker.settings.target_version >= PythonVersion::Py311 {
|
||||
if checker.source_type.is_stub()
|
||||
|| checker.settings.target_version >= PythonVersion::Py311
|
||||
{
|
||||
if checker.enabled(Rule::NoReturnArgumentAnnotationInStub) {
|
||||
flake8_pyi::rules::no_return_argument_annotation(checker, parameters);
|
||||
}
|
||||
|
@ -412,7 +414,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
body,
|
||||
);
|
||||
}
|
||||
if !checker.is_stub {
|
||||
if !checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::DjangoModelWithoutDunderStr) {
|
||||
flake8_django::rules::model_without_dunder_str(checker, class_def);
|
||||
}
|
||||
|
@ -453,7 +455,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if !checker.is_stub {
|
||||
if !checker.source_type.is_stub() {
|
||||
if checker.any_enabled(&[
|
||||
Rule::AbstractBaseClassWithoutAbstractMethod,
|
||||
Rule::EmptyMethodWithoutAbstractDecorator,
|
||||
|
@ -467,7 +469,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
);
|
||||
}
|
||||
}
|
||||
if checker.is_stub {
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::PassStatementStubBody) {
|
||||
flake8_pyi::rules::pass_statement_stub_body(checker, body);
|
||||
}
|
||||
|
@ -569,7 +571,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
alias,
|
||||
);
|
||||
}
|
||||
if !checker.is_stub {
|
||||
if !checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::UselessImportAlias) {
|
||||
pylint::rules::useless_import_alias(checker, alias);
|
||||
}
|
||||
|
@ -744,7 +746,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.is_stub {
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::FutureAnnotationsInStub) {
|
||||
flake8_pyi::rules::from_future_import(checker, import_from);
|
||||
}
|
||||
|
@ -889,7 +891,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if !checker.is_stub {
|
||||
if !checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::UselessImportAlias) {
|
||||
pylint::rules::useless_import_alias(checker, alias);
|
||||
}
|
||||
|
@ -1013,7 +1015,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.is_stub {
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.any_enabled(&[
|
||||
Rule::UnrecognizedVersionInfoCheck,
|
||||
Rule::PatchVersionComparison,
|
||||
|
@ -1325,7 +1327,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
if checker.settings.rules.enabled(Rule::TypeBivariance) {
|
||||
pylint::rules::type_bivariance(checker, value);
|
||||
}
|
||||
if checker.is_stub {
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.any_enabled(&[
|
||||
Rule::UnprefixedTypeParam,
|
||||
Rule::AssignmentDefaultInStub,
|
||||
|
@ -1395,7 +1397,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
if checker.enabled(Rule::NonPEP695TypeAlias) {
|
||||
pyupgrade::rules::non_pep695_type_alias(checker, assign_stmt);
|
||||
}
|
||||
if checker.is_stub {
|
||||
if checker.source_type.is_stub() {
|
||||
if let Some(value) = value {
|
||||
if checker.enabled(Rule::AssignmentDefaultInStub) {
|
||||
// Ignore assignments in function bodies; those are covered by other rules.
|
||||
|
|
|
@ -43,7 +43,7 @@ use ruff_python_ast::helpers::{extract_handled_exceptions, to_module_path};
|
|||
use ruff_python_ast::identifier::Identifier;
|
||||
use ruff_python_ast::str::trailing_quote;
|
||||
use ruff_python_ast::visitor::{walk_except_handler, walk_pattern, Visitor};
|
||||
use ruff_python_ast::{helpers, str, visitor};
|
||||
use ruff_python_ast::{helpers, str, visitor, PySourceType};
|
||||
use ruff_python_codegen::{Generator, Quote, Stylist};
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::typing::{parse_type_annotation, AnnotationKind};
|
||||
|
@ -53,7 +53,6 @@ use ruff_python_semantic::{
|
|||
ModuleKind, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, StarImport, SubmoduleImport,
|
||||
};
|
||||
use ruff_python_stdlib::builtins::{BUILTINS, MAGIC_GLOBALS};
|
||||
use ruff_python_stdlib::path::is_python_stub_file;
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
use crate::checkers::ast::deferred::Deferred;
|
||||
|
@ -75,8 +74,8 @@ pub(crate) struct Checker<'a> {
|
|||
package: Option<&'a Path>,
|
||||
/// The module representation of the current file (e.g., `foo.bar`).
|
||||
module_path: Option<&'a [String]>,
|
||||
/// Whether the current file is a stub (`.pyi`) file.
|
||||
is_stub: bool,
|
||||
/// The [`PySourceType`] of the current file.
|
||||
pub(crate) source_type: PySourceType,
|
||||
/// The [`flags::Noqa`] for the current analysis (i.e., whether to respect suppression
|
||||
/// comments).
|
||||
noqa: flags::Noqa,
|
||||
|
@ -118,6 +117,7 @@ impl<'a> Checker<'a> {
|
|||
stylist: &'a Stylist,
|
||||
indexer: &'a Indexer,
|
||||
importer: Importer<'a>,
|
||||
source_type: PySourceType,
|
||||
) -> Checker<'a> {
|
||||
Checker {
|
||||
settings,
|
||||
|
@ -126,7 +126,7 @@ impl<'a> Checker<'a> {
|
|||
path,
|
||||
package,
|
||||
module_path: module.path(),
|
||||
is_stub: is_python_stub_file(path),
|
||||
source_type,
|
||||
locator,
|
||||
stylist,
|
||||
indexer,
|
||||
|
@ -233,11 +233,6 @@ impl<'a> Checker<'a> {
|
|||
&self.semantic
|
||||
}
|
||||
|
||||
/// Return `true` if the current file is a stub file (`.pyi`).
|
||||
pub(crate) const fn is_stub(&self) -> bool {
|
||||
self.is_stub
|
||||
}
|
||||
|
||||
/// The [`Path`] to the file under analysis.
|
||||
pub(crate) const fn path(&self) -> &'a Path {
|
||||
self.path
|
||||
|
@ -1786,7 +1781,7 @@ impl<'a> Checker<'a> {
|
|||
pyupgrade::rules::quoted_annotation(self, value, range);
|
||||
}
|
||||
}
|
||||
if self.is_stub {
|
||||
if self.source_type.is_stub() {
|
||||
if self.enabled(Rule::QuotedAnnotationInStub) {
|
||||
flake8_pyi::rules::quoted_annotation_in_stub(self, value, range);
|
||||
}
|
||||
|
@ -1928,6 +1923,7 @@ pub(crate) fn check_ast(
|
|||
noqa: flags::Noqa,
|
||||
path: &Path,
|
||||
package: Option<&Path>,
|
||||
source_type: PySourceType,
|
||||
) -> Vec<Diagnostic> {
|
||||
let module_path = package.and_then(|package| to_module_path(package, path));
|
||||
let module = Module {
|
||||
|
@ -1955,6 +1951,7 @@ pub(crate) fn check_ast(
|
|||
stylist,
|
||||
indexer,
|
||||
Importer::new(python_ast, locator, stylist),
|
||||
source_type,
|
||||
);
|
||||
checker.bind_builtins();
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_python_ast::{self as ast, Ranged, Stmt, Suite};
|
||||
use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt, Suite};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::helpers::to_module_path;
|
||||
|
@ -10,7 +10,7 @@ use ruff_python_ast::imports::{ImportMap, ModuleImport};
|
|||
use ruff_python_ast::statement_visitor::StatementVisitor;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_stdlib::path::is_python_stub_file;
|
||||
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
use crate::directives::IsortDirectives;
|
||||
|
@ -87,12 +87,12 @@ pub(crate) fn check_imports(
|
|||
path: &Path,
|
||||
package: Option<&Path>,
|
||||
source_kind: Option<&SourceKind>,
|
||||
source_type: PySourceType,
|
||||
) -> (Vec<Diagnostic>, Option<ImportMap>) {
|
||||
let is_stub = is_python_stub_file(path);
|
||||
|
||||
// Extract all import blocks from the AST.
|
||||
let tracker = {
|
||||
let mut tracker = BlockBuilder::new(locator, directives, is_stub, source_kind);
|
||||
let mut tracker =
|
||||
BlockBuilder::new(locator, directives, source_type.is_stub(), source_kind);
|
||||
tracker.visit_body(python_ast);
|
||||
tracker
|
||||
};
|
||||
|
@ -104,7 +104,13 @@ pub(crate) fn check_imports(
|
|||
for block in &blocks {
|
||||
if !block.imports.is_empty() {
|
||||
if let Some(diagnostic) = isort::rules::organize_imports(
|
||||
block, locator, stylist, indexer, settings, package,
|
||||
block,
|
||||
locator,
|
||||
stylist,
|
||||
indexer,
|
||||
settings,
|
||||
package,
|
||||
source_type,
|
||||
) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
|
@ -113,7 +119,11 @@ pub(crate) fn check_imports(
|
|||
}
|
||||
if settings.rules.enabled(Rule::MissingRequiredImport) {
|
||||
diagnostics.extend(isort::rules::add_required_imports(
|
||||
python_ast, locator, stylist, settings, is_stub,
|
||||
python_ast,
|
||||
locator,
|
||||
stylist,
|
||||
settings,
|
||||
source_type,
|
||||
));
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
//! Insert statements into Python code.
|
||||
use std::ops::Add;
|
||||
|
||||
use ruff_python_ast::{Ranged, Stmt};
|
||||
use ruff_python_parser::{lexer, Mode, Tok};
|
||||
use ruff_python_ast::{PySourceType, Ranged, Stmt};
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
|
@ -137,6 +137,7 @@ impl<'a> Insertion<'a> {
|
|||
mut location: TextSize,
|
||||
locator: &Locator<'a>,
|
||||
stylist: &Stylist,
|
||||
source_type: PySourceType,
|
||||
) -> Insertion<'a> {
|
||||
enum Awaiting {
|
||||
Colon(u32),
|
||||
|
@ -146,7 +147,7 @@ impl<'a> Insertion<'a> {
|
|||
|
||||
let mut state = Awaiting::Colon(0);
|
||||
for (tok, range) in
|
||||
lexer::lex_starts_at(locator.after(location), Mode::Module, location).flatten()
|
||||
lexer::lex_starts_at(locator.after(location), source_type.as_mode(), location).flatten()
|
||||
{
|
||||
match state {
|
||||
// Iterate until we find the colon indicating the start of the block body.
|
||||
|
@ -300,12 +301,12 @@ fn match_leading_semicolon(s: &str) -> Option<TextSize> {
|
|||
mod tests {
|
||||
use anyhow::Result;
|
||||
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_parser::parse_suite;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::{parse_suite, Mode};
|
||||
use ruff_source_file::{LineEnding, Locator};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use super::Insertion;
|
||||
|
||||
|
@ -313,7 +314,7 @@ mod tests {
|
|||
fn start_of_file() -> Result<()> {
|
||||
fn insert(contents: &str) -> Result<Insertion> {
|
||||
let program = parse_suite(contents, "<filename>")?;
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents);
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, Mode::Module);
|
||||
let locator = Locator::new(contents);
|
||||
let stylist = Stylist::from_tokens(&tokens, &locator);
|
||||
Ok(Insertion::start_of_file(&program, &locator, &stylist))
|
||||
|
@ -424,10 +425,10 @@ x = 1
|
|||
#[test]
|
||||
fn start_of_block() {
|
||||
fn insert(contents: &str, offset: TextSize) -> Insertion {
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents);
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, Mode::Module);
|
||||
let locator = Locator::new(contents);
|
||||
let stylist = Stylist::from_tokens(&tokens, &locator);
|
||||
Insertion::start_of_block(offset, &locator, &stylist)
|
||||
Insertion::start_of_block(offset, &locator, &stylist, PySourceType::default())
|
||||
}
|
||||
|
||||
let contents = "if True: pass";
|
||||
|
|
|
@ -7,7 +7,7 @@ use std::error::Error;
|
|||
|
||||
use anyhow::Result;
|
||||
use libcst_native::{ImportAlias, Name, NameOrAttribute};
|
||||
use ruff_python_ast::{self as ast, Ranged, Stmt, Suite};
|
||||
use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt, Suite};
|
||||
use ruff_text_size::TextSize;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
|
@ -121,6 +121,7 @@ impl<'a> Importer<'a> {
|
|||
import: &StmtImports,
|
||||
at: TextSize,
|
||||
semantic: &SemanticModel,
|
||||
source_type: PySourceType,
|
||||
) -> Result<TypingImportEdit> {
|
||||
// Generate the modified import statement.
|
||||
let content = autofix::codemods::retain_imports(
|
||||
|
@ -140,7 +141,7 @@ impl<'a> Importer<'a> {
|
|||
// Add the import to a `TYPE_CHECKING` block.
|
||||
let add_import_edit = if let Some(block) = self.preceding_type_checking_block(at) {
|
||||
// Add the import to the `TYPE_CHECKING` block.
|
||||
self.add_to_type_checking_block(&content, block.start())
|
||||
self.add_to_type_checking_block(&content, block.start(), source_type)
|
||||
} else {
|
||||
// Add the import to a new `TYPE_CHECKING` block.
|
||||
self.add_type_checking_block(
|
||||
|
@ -353,8 +354,13 @@ impl<'a> Importer<'a> {
|
|||
}
|
||||
|
||||
/// Add an import statement to an existing `TYPE_CHECKING` block.
|
||||
fn add_to_type_checking_block(&self, content: &str, at: TextSize) -> Edit {
|
||||
Insertion::start_of_block(at, self.locator, self.stylist).into_edit(content)
|
||||
fn add_to_type_checking_block(
|
||||
&self,
|
||||
content: &str,
|
||||
at: TextSize,
|
||||
source_type: PySourceType,
|
||||
) -> Edit {
|
||||
Insertion::start_of_block(at, self.locator, self.stylist, source_type).into_edit(content)
|
||||
}
|
||||
|
||||
/// Return the import statement that precedes the given position, if any.
|
||||
|
|
|
@ -24,8 +24,6 @@ use crate::IOError;
|
|||
|
||||
pub const JUPYTER_NOTEBOOK_EXT: &str = "ipynb";
|
||||
|
||||
const MAGIC_PREFIX: [&str; 3] = ["%", "!", "?"];
|
||||
|
||||
/// Run round-trip source code generation on a given Jupyter notebook file path.
|
||||
pub fn round_trip(path: &Path) -> anyhow::Result<String> {
|
||||
let mut notebook = Notebook::read(path).map_err(|err| {
|
||||
|
@ -78,26 +76,21 @@ impl Cell {
|
|||
/// Return `true` if it's a valid code cell.
|
||||
///
|
||||
/// A valid code cell is a cell where the cell type is [`Cell::Code`] and the
|
||||
/// source doesn't contain a magic, shell or help command.
|
||||
/// source doesn't contain a cell magic.
|
||||
fn is_valid_code_cell(&self) -> bool {
|
||||
let source = match self {
|
||||
Cell::Code(cell) => &cell.source,
|
||||
_ => return false,
|
||||
};
|
||||
// Ignore a cell if it contains a magic command. There could be valid
|
||||
// Python code as well, but we'll ignore that for now.
|
||||
// TODO(dhruvmanila): https://github.com/psf/black/blob/main/src/black/handle_ipynb_magics.py
|
||||
// Ignore cells containing cell magic. This is different from line magic
|
||||
// which is allowed and ignored by the parser.
|
||||
!match source {
|
||||
SourceValue::String(string) => string.lines().any(|line| {
|
||||
MAGIC_PREFIX
|
||||
.iter()
|
||||
.any(|prefix| line.trim_start().starts_with(prefix))
|
||||
}),
|
||||
SourceValue::StringArray(string_array) => string_array.iter().any(|line| {
|
||||
MAGIC_PREFIX
|
||||
.iter()
|
||||
.any(|prefix| line.trim_start().starts_with(prefix))
|
||||
}),
|
||||
SourceValue::String(string) => string
|
||||
.lines()
|
||||
.any(|line| line.trim_start().starts_with("%%")),
|
||||
SourceValue::StringArray(string_array) => string_array
|
||||
.iter()
|
||||
.any(|line| line.trim_start().starts_with("%%")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -513,9 +506,10 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test_case(Path::new("markdown.json"), false; "markdown")]
|
||||
#[test_case(Path::new("only_magic.json"), false; "only_magic")]
|
||||
#[test_case(Path::new("code_and_magic.json"), false; "code_and_magic")]
|
||||
#[test_case(Path::new("only_magic.json"), true; "only_magic")]
|
||||
#[test_case(Path::new("code_and_magic.json"), true; "code_and_magic")]
|
||||
#[test_case(Path::new("only_code.json"), true; "only_code")]
|
||||
#[test_case(Path::new("cell_magic.json"), false; "cell_magic")]
|
||||
fn test_is_valid_code_cell(path: &Path, expected: bool) -> Result<()> {
|
||||
assert_eq!(read_jupyter_cell(path)?.is_valid_code_cell(), expected);
|
||||
Ok(())
|
||||
|
@ -567,7 +561,7 @@ print("after empty cells")
|
|||
#[test]
|
||||
fn test_import_sorting() -> Result<()> {
|
||||
let path = "isort.ipynb".to_string();
|
||||
let (diagnostics, source_kind) = test_notebook_path(
|
||||
let (diagnostics, source_kind, _) = test_notebook_path(
|
||||
&path,
|
||||
Path::new("isort_expected.ipynb"),
|
||||
&settings::Settings::for_rule(Rule::UnsortedImports),
|
||||
|
@ -576,10 +570,22 @@ print("after empty cells")
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_line_magics() -> Result<()> {
|
||||
let path = "line_magics.ipynb".to_string();
|
||||
let (diagnostics, source_kind, _) = test_notebook_path(
|
||||
&path,
|
||||
Path::new("line_magics_expected.ipynb"),
|
||||
&settings::Settings::for_rule(Rule::UnusedImport),
|
||||
)?;
|
||||
assert_messages!(diagnostics, path, source_kind);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_json_consistency() -> Result<()> {
|
||||
let path = "before_fix.ipynb".to_string();
|
||||
let (_, source_kind) = test_notebook_path(
|
||||
let (_, _, source_kind) = test_notebook_path(
|
||||
path,
|
||||
Path::new("after_fix.ipynb"),
|
||||
&settings::Settings::for_rule(Rule::UnusedImport),
|
||||
|
|
|
@ -47,4 +47,43 @@ isort.ipynb:cell 2:1:1: I001 [*] Import block is un-sorted or un-formatted
|
|||
7 9 | def foo():
|
||||
8 10 | pass
|
||||
|
||||
isort.ipynb:cell 3:1:1: I001 [*] Import block is un-sorted or un-formatted
|
||||
|
|
||||
1 | / from pathlib import Path
|
||||
2 | | import sys
|
||||
3 | |
|
||||
4 | | %matplotlib \
|
||||
| |_^ I001
|
||||
5 | --inline
|
||||
|
|
||||
= help: Organize imports
|
||||
|
||||
ℹ Fix
|
||||
6 6 | # Newline should be added here
|
||||
7 7 | def foo():
|
||||
8 8 | pass
|
||||
9 |+import sys
|
||||
9 10 | from pathlib import Path
|
||||
10 |-import sys
|
||||
11 11 |
|
||||
12 12 | %matplotlib \
|
||||
13 13 | --inline
|
||||
|
||||
isort.ipynb:cell 3:7:1: I001 [*] Import block is un-sorted or un-formatted
|
||||
|
|
||||
5 | --inline
|
||||
6 |
|
||||
7 | / import math
|
||||
8 | | import abc
|
||||
|
|
||||
= help: Organize imports
|
||||
|
||||
ℹ Fix
|
||||
12 12 | %matplotlib \
|
||||
13 13 | --inline
|
||||
14 14 |
|
||||
15 |+import abc
|
||||
15 16 | import math
|
||||
16 |-import abc
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
---
|
||||
source: crates/ruff/src/jupyter/notebook.rs
|
||||
---
|
||||
line_magics.ipynb:cell 1:5:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
3 | %matplotlib inline
|
||||
4 |
|
||||
5 | import os
|
||||
| ^^ F401
|
||||
6 |
|
||||
7 | _ = math.pi
|
||||
|
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
ℹ Fix
|
||||
2 2 |
|
||||
3 3 | %matplotlib inline
|
||||
4 4 |
|
||||
5 |-import os
|
||||
6 5 |
|
||||
7 6 | _ = math.pi
|
||||
|
||||
|
|
@ -7,14 +7,15 @@ use colored::Colorize;
|
|||
use itertools::Itertools;
|
||||
use log::error;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::ParseError;
|
||||
use ruff_python_parser::{AsMode, ParseError};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_stdlib::path::is_python_stub_file;
|
||||
|
||||
use ruff_source_file::{Locator, SourceFileBuilder};
|
||||
|
||||
use crate::autofix::{fix_file, FixResult};
|
||||
|
@ -81,6 +82,7 @@ pub fn check_path(
|
|||
settings: &Settings,
|
||||
noqa: flags::Noqa,
|
||||
source_kind: Option<&SourceKind>,
|
||||
source_type: PySourceType,
|
||||
) -> LinterResult<(Vec<Diagnostic>, Option<ImportMap>)> {
|
||||
// Aggregate all diagnostics.
|
||||
let mut diagnostics = vec![];
|
||||
|
@ -101,9 +103,13 @@ pub fn check_path(
|
|||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_tokens())
|
||||
{
|
||||
let is_stub = is_python_stub_file(path);
|
||||
diagnostics.extend(check_tokens(
|
||||
&tokens, path, locator, indexer, settings, is_stub,
|
||||
&tokens,
|
||||
path,
|
||||
locator,
|
||||
indexer,
|
||||
settings,
|
||||
source_type.is_stub(),
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -138,7 +144,11 @@ pub fn check_path(
|
|||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_imports());
|
||||
if use_ast || use_imports || use_doc_lines {
|
||||
match ruff_python_parser::parse_program_tokens(tokens, &path.to_string_lossy()) {
|
||||
match ruff_python_parser::parse_program_tokens(
|
||||
tokens,
|
||||
&path.to_string_lossy(),
|
||||
source_type.is_jupyter(),
|
||||
) {
|
||||
Ok(python_ast) => {
|
||||
if use_ast {
|
||||
diagnostics.extend(check_ast(
|
||||
|
@ -151,6 +161,7 @@ pub fn check_path(
|
|||
noqa,
|
||||
path,
|
||||
package,
|
||||
source_type,
|
||||
));
|
||||
}
|
||||
if use_imports {
|
||||
|
@ -164,6 +175,7 @@ pub fn check_path(
|
|||
path,
|
||||
package,
|
||||
source_kind,
|
||||
source_type,
|
||||
);
|
||||
imports = module_imports;
|
||||
diagnostics.extend(import_diagnostics);
|
||||
|
@ -256,11 +268,13 @@ const MAX_ITERATIONS: usize = 100;
|
|||
|
||||
/// Add any missing `# noqa` pragmas to the source code at the given `Path`.
|
||||
pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings) -> Result<usize> {
|
||||
let source_type = PySourceType::from(path);
|
||||
|
||||
// Read the file from disk.
|
||||
let contents = std::fs::read_to_string(path)?;
|
||||
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&contents);
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&contents, source_type.as_mode());
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(&contents);
|
||||
|
@ -294,6 +308,7 @@ pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings
|
|||
settings,
|
||||
flags::Noqa::Disabled,
|
||||
None,
|
||||
source_type,
|
||||
);
|
||||
|
||||
// Log any parse errors.
|
||||
|
@ -326,9 +341,10 @@ pub fn lint_only(
|
|||
settings: &Settings,
|
||||
noqa: flags::Noqa,
|
||||
source_kind: Option<&SourceKind>,
|
||||
source_type: PySourceType,
|
||||
) -> LinterResult<(Vec<Message>, Option<ImportMap>)> {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents);
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, source_type.as_mode());
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(contents);
|
||||
|
@ -359,6 +375,7 @@ pub fn lint_only(
|
|||
settings,
|
||||
noqa,
|
||||
source_kind,
|
||||
source_type,
|
||||
);
|
||||
|
||||
result.map(|(diagnostics, imports)| {
|
||||
|
@ -405,6 +422,7 @@ pub fn lint_fix<'a>(
|
|||
noqa: flags::Noqa,
|
||||
settings: &Settings,
|
||||
source_kind: &mut SourceKind,
|
||||
source_type: PySourceType,
|
||||
) -> Result<FixerResult<'a>> {
|
||||
let mut transformed = Cow::Borrowed(contents);
|
||||
|
||||
|
@ -420,7 +438,8 @@ pub fn lint_fix<'a>(
|
|||
// Continuously autofix until the source code stabilizes.
|
||||
loop {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&transformed);
|
||||
let tokens: Vec<LexResult> =
|
||||
ruff_python_parser::tokenize(&transformed, source_type.as_mode());
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(&transformed);
|
||||
|
@ -451,6 +470,7 @@ pub fn lint_fix<'a>(
|
|||
settings,
|
||||
noqa,
|
||||
Some(source_kind),
|
||||
source_type,
|
||||
);
|
||||
|
||||
if iterations == 0 {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use anyhow::{bail, Result};
|
||||
use ruff_python_ast::{Ranged, Stmt};
|
||||
use ruff_python_parser::{lexer, Mode, Tok};
|
||||
use ruff_python_ast::{PySourceType, Ranged, Stmt};
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_source_file::Locator;
|
||||
|
@ -10,6 +10,7 @@ pub(crate) fn add_return_annotation(
|
|||
locator: &Locator,
|
||||
stmt: &Stmt,
|
||||
annotation: &str,
|
||||
source_type: PySourceType,
|
||||
) -> Result<Edit> {
|
||||
let contents = &locator.contents()[stmt.range()];
|
||||
|
||||
|
@ -17,7 +18,9 @@ pub(crate) fn add_return_annotation(
|
|||
let mut seen_lpar = false;
|
||||
let mut seen_rpar = false;
|
||||
let mut count = 0u32;
|
||||
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, stmt.start()).flatten() {
|
||||
for (tok, range) in
|
||||
lexer::lex_starts_at(contents, source_type.as_mode(), stmt.start()).flatten()
|
||||
{
|
||||
if seen_lpar && seen_rpar {
|
||||
if matches!(tok, Tok::Colon) {
|
||||
return Ok(Edit::insertion(format!(" -> {annotation}"), range.start()));
|
||||
|
|
|
@ -709,8 +709,13 @@ pub(crate) fn definition(
|
|||
);
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
diagnostic.try_set_fix(|| {
|
||||
fixes::add_return_annotation(checker.locator(), stmt, "None")
|
||||
.map(Fix::suggested)
|
||||
fixes::add_return_annotation(
|
||||
checker.locator(),
|
||||
stmt,
|
||||
"None",
|
||||
checker.source_type,
|
||||
)
|
||||
.map(Fix::suggested)
|
||||
});
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
|
@ -727,8 +732,13 @@ pub(crate) fn definition(
|
|||
if checker.patch(diagnostic.kind.rule()) {
|
||||
if let Some(return_type) = simple_magic_return_type(name) {
|
||||
diagnostic.try_set_fix(|| {
|
||||
fixes::add_return_annotation(checker.locator(), stmt, return_type)
|
||||
.map(Fix::suggested)
|
||||
fixes::add_return_annotation(
|
||||
checker.locator(),
|
||||
stmt,
|
||||
return_type,
|
||||
checker.source_type,
|
||||
)
|
||||
.map(Fix::suggested)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -550,6 +550,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &D
|
|||
arguments,
|
||||
edits::Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
)
|
||||
.map(Fix::suggested)
|
||||
});
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use ruff_python_ast::{self as ast, Arguments, Constant, Decorator, Expr, ExprContext, Ranged};
|
||||
use ruff_python_parser::{lexer, Mode, Tok};
|
||||
use ruff_python_ast::{
|
||||
self as ast, Arguments, Constant, Decorator, Expr, ExprContext, PySourceType, Ranged,
|
||||
};
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
||||
|
@ -95,7 +97,12 @@ fn elts_to_csv(elts: &[Expr], generator: Generator) -> Option<String> {
|
|||
/// ```
|
||||
///
|
||||
/// This method assumes that the first argument is a string.
|
||||
fn get_parametrize_name_range(decorator: &Decorator, expr: &Expr, locator: &Locator) -> TextRange {
|
||||
fn get_parametrize_name_range(
|
||||
decorator: &Decorator,
|
||||
expr: &Expr,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
) -> TextRange {
|
||||
let mut locations = Vec::new();
|
||||
let mut implicit_concat = None;
|
||||
|
||||
|
@ -103,7 +110,7 @@ fn get_parametrize_name_range(decorator: &Decorator, expr: &Expr, locator: &Loca
|
|||
// decorator to find them.
|
||||
for (tok, range) in lexer::lex_starts_at(
|
||||
locator.slice(decorator.range()),
|
||||
Mode::Module,
|
||||
source_type.as_mode(),
|
||||
decorator.start(),
|
||||
)
|
||||
.flatten()
|
||||
|
@ -141,8 +148,12 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) {
|
|||
if names.len() > 1 {
|
||||
match names_type {
|
||||
types::ParametrizeNameType::Tuple => {
|
||||
let name_range =
|
||||
get_parametrize_name_range(decorator, expr, checker.locator());
|
||||
let name_range = get_parametrize_name_range(
|
||||
decorator,
|
||||
expr,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
);
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
PytestParametrizeNamesWrongType {
|
||||
expected: names_type,
|
||||
|
@ -172,8 +183,12 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) {
|
|||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
types::ParametrizeNameType::List => {
|
||||
let name_range =
|
||||
get_parametrize_name_range(decorator, expr, checker.locator());
|
||||
let name_range = get_parametrize_name_range(
|
||||
decorator,
|
||||
expr,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
);
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
PytestParametrizeNamesWrongType {
|
||||
expected: names_type,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ruff_python_ast::{self as ast, Arguments, Expr, Ranged};
|
||||
use ruff_python_parser::{lexer, Mode, Tok};
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, PySourceType, Ranged};
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||
|
@ -71,7 +71,7 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr:
|
|||
return;
|
||||
}
|
||||
|
||||
let range = match_parens(func.end(), checker.locator())
|
||||
let range = match_parens(func.end(), checker.locator(), checker.source_type)
|
||||
.expect("Expected call to include parentheses");
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryParenOnRaiseException, range);
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
|
@ -82,14 +82,18 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr:
|
|||
}
|
||||
|
||||
/// Return the range of the first parenthesis pair after a given [`TextSize`].
|
||||
fn match_parens(start: TextSize, locator: &Locator) -> Option<TextRange> {
|
||||
fn match_parens(
|
||||
start: TextSize,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
) -> Option<TextRange> {
|
||||
let contents = &locator.contents()[usize::from(start)..];
|
||||
|
||||
let mut fix_start = None;
|
||||
let mut fix_end = None;
|
||||
let mut count = 0u32;
|
||||
|
||||
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, start).flatten() {
|
||||
for (tok, range) in lexer::lex_starts_at(contents, source_type.as_mode(), start).flatten() {
|
||||
match tok {
|
||||
Tok::Lpar => {
|
||||
if count == 0 {
|
||||
|
|
|
@ -378,6 +378,7 @@ pub(crate) fn nested_if_statements(
|
|||
let colon = first_colon_range(
|
||||
TextRange::new(test.end(), first_stmt.start()),
|
||||
checker.locator().contents(),
|
||||
checker.source_type.is_jupyter(),
|
||||
);
|
||||
|
||||
// Check if the parent is already emitting a larger diagnostic including this if statement
|
||||
|
|
|
@ -119,6 +119,7 @@ pub(crate) fn multiple_with_statements(
|
|||
body.first().expect("Expected body to be non-empty").start(),
|
||||
),
|
||||
checker.locator().contents(),
|
||||
checker.source_type.is_jupyter(),
|
||||
);
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
|
|
|
@ -447,6 +447,7 @@ fn fix_imports(checker: &Checker, stmt_id: NodeId, imports: &[Import]) -> Result
|
|||
},
|
||||
at,
|
||||
checker.semantic(),
|
||||
checker.source_type,
|
||||
)?;
|
||||
|
||||
Ok(
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ruff_python_ast::{self as ast, Ranged, Stmt};
|
||||
use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
use ruff_source_file::Locator;
|
||||
|
@ -13,6 +13,7 @@ pub(crate) fn annotate_imports<'a>(
|
|||
comments: Vec<Comment<'a>>,
|
||||
locator: &Locator,
|
||||
split_on_trailing_comma: bool,
|
||||
source_type: PySourceType,
|
||||
) -> Vec<AnnotatedImport<'a>> {
|
||||
let mut comments_iter = comments.into_iter().peekable();
|
||||
|
||||
|
@ -119,7 +120,7 @@ pub(crate) fn annotate_imports<'a>(
|
|||
names: aliases,
|
||||
level: level.map(|level| level.to_u32()),
|
||||
trailing_comma: if split_on_trailing_comma {
|
||||
trailing_comma(import, locator)
|
||||
trailing_comma(import, locator, source_type)
|
||||
} else {
|
||||
TrailingComma::default()
|
||||
},
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use std::borrow::Cow;
|
||||
|
||||
use ruff_python_parser::{lexer, Mode, Tok};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use ruff_source_file::Locator;
|
||||
|
@ -22,9 +23,13 @@ impl Comment<'_> {
|
|||
}
|
||||
|
||||
/// Collect all comments in an import block.
|
||||
pub(crate) fn collect_comments<'a>(range: TextRange, locator: &'a Locator) -> Vec<Comment<'a>> {
|
||||
pub(crate) fn collect_comments<'a>(
|
||||
range: TextRange,
|
||||
locator: &'a Locator,
|
||||
source_type: PySourceType,
|
||||
) -> Vec<Comment<'a>> {
|
||||
let contents = locator.slice(range);
|
||||
lexer::lex_starts_at(contents, Mode::Module, range.start())
|
||||
lexer::lex_starts_at(contents, source_type.as_mode(), range.start())
|
||||
.flatten()
|
||||
.filter_map(|(tok, range)| {
|
||||
if let Tok::Comment(value) = tok {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ruff_python_ast::{Ranged, Stmt};
|
||||
use ruff_python_parser::{lexer, Mode, Tok};
|
||||
use ruff_python_ast::{PySourceType, Ranged, Stmt};
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
|
||||
use ruff_python_trivia::PythonWhitespace;
|
||||
use ruff_source_file::{Locator, UniversalNewlines};
|
||||
|
@ -8,11 +8,15 @@ use crate::rules::isort::types::TrailingComma;
|
|||
|
||||
/// Return `true` if a `Stmt::ImportFrom` statement ends with a magic
|
||||
/// trailing comma.
|
||||
pub(super) fn trailing_comma(stmt: &Stmt, locator: &Locator) -> TrailingComma {
|
||||
pub(super) fn trailing_comma(
|
||||
stmt: &Stmt,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
) -> TrailingComma {
|
||||
let contents = locator.slice(stmt.range());
|
||||
let mut count = 0u32;
|
||||
let mut trailing_comma = TrailingComma::Absent;
|
||||
for (tok, _) in lexer::lex_starts_at(contents, Mode::Module, stmt.start()).flatten() {
|
||||
for (tok, _) in lexer::lex_starts_at(contents, source_type.as_mode(), stmt.start()).flatten() {
|
||||
if matches!(tok, Tok::Lpar) {
|
||||
count = count.saturating_add(1);
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ pub use categorize::{ImportSection, ImportType};
|
|||
use comments::Comment;
|
||||
use normalize::normalize_imports;
|
||||
use order::order_imports;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_source_file::Locator;
|
||||
use settings::RelativeImportsOrder;
|
||||
|
@ -72,6 +73,7 @@ pub(crate) fn format_imports(
|
|||
stylist: &Stylist,
|
||||
src: &[PathBuf],
|
||||
package: Option<&Path>,
|
||||
source_type: PySourceType,
|
||||
combine_as_imports: bool,
|
||||
force_single_line: bool,
|
||||
force_sort_within_sections: bool,
|
||||
|
@ -94,7 +96,13 @@ pub(crate) fn format_imports(
|
|||
section_order: &[ImportSection],
|
||||
) -> String {
|
||||
let trailer = &block.trailer;
|
||||
let block = annotate_imports(&block.imports, comments, locator, split_on_trailing_comma);
|
||||
let block = annotate_imports(
|
||||
&block.imports,
|
||||
comments,
|
||||
locator,
|
||||
split_on_trailing_comma,
|
||||
source_type,
|
||||
);
|
||||
|
||||
// Normalize imports (i.e., deduplicate, aggregate `from` imports).
|
||||
let block = normalize_imports(
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use log::error;
|
||||
use ruff_python_ast::{self as ast, Stmt, Suite};
|
||||
use ruff_python_ast::{self as ast, PySourceType, Stmt, Suite};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Fix};
|
||||
|
@ -91,7 +91,7 @@ fn add_required_import(
|
|||
locator: &Locator,
|
||||
stylist: &Stylist,
|
||||
settings: &Settings,
|
||||
is_stub: bool,
|
||||
source_type: PySourceType,
|
||||
) -> Option<Diagnostic> {
|
||||
// Don't add imports to semantically-empty files.
|
||||
if python_ast.iter().all(is_docstring_stmt) {
|
||||
|
@ -99,7 +99,7 @@ fn add_required_import(
|
|||
}
|
||||
|
||||
// We don't need to add `__future__` imports to stubs.
|
||||
if is_stub && required_import.is_future_import() {
|
||||
if source_type.is_stub() && required_import.is_future_import() {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -131,7 +131,7 @@ pub(crate) fn add_required_imports(
|
|||
locator: &Locator,
|
||||
stylist: &Stylist,
|
||||
settings: &Settings,
|
||||
is_stub: bool,
|
||||
source_type: PySourceType,
|
||||
) -> Vec<Diagnostic> {
|
||||
settings
|
||||
.isort
|
||||
|
@ -172,7 +172,7 @@ pub(crate) fn add_required_imports(
|
|||
locator,
|
||||
stylist,
|
||||
settings,
|
||||
is_stub,
|
||||
source_type,
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
|
@ -190,7 +190,7 @@ pub(crate) fn add_required_imports(
|
|||
locator,
|
||||
stylist,
|
||||
settings,
|
||||
is_stub,
|
||||
source_type,
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::path::Path;
|
||||
|
||||
use itertools::{EitherOrBoth, Itertools};
|
||||
use ruff_python_ast::{Ranged, Stmt};
|
||||
use ruff_python_ast::{PySourceType, Ranged, Stmt};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
||||
|
@ -87,6 +87,7 @@ pub(crate) fn organize_imports(
|
|||
indexer: &Indexer,
|
||||
settings: &Settings,
|
||||
package: Option<&Path>,
|
||||
source_type: PySourceType,
|
||||
) -> Option<Diagnostic> {
|
||||
let indentation = locator.slice(extract_indentation_range(&block.imports, locator));
|
||||
let indentation = leading_indentation(indentation);
|
||||
|
@ -105,6 +106,7 @@ pub(crate) fn organize_imports(
|
|||
let comments = comments::collect_comments(
|
||||
TextRange::new(range.start(), locator.full_line_end(range.end())),
|
||||
locator,
|
||||
source_type,
|
||||
);
|
||||
|
||||
let trailing_line_end = if block.trailer.is_none() {
|
||||
|
@ -123,6 +125,7 @@ pub(crate) fn organize_imports(
|
|||
stylist,
|
||||
&settings.src,
|
||||
package,
|
||||
source_type,
|
||||
settings.isort.combine_as_imports,
|
||||
settings.isort.force_single_line,
|
||||
settings.isort.force_sort_within_sections,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::is_const_true;
|
||||
use ruff_python_ast::{self as ast, Keyword, Ranged};
|
||||
use ruff_python_ast::{self as ast, Keyword, PySourceType, Ranged};
|
||||
use ruff_python_semantic::{BindingKind, Import};
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
|
@ -93,9 +93,12 @@ pub(crate) fn inplace_argument(checker: &mut Checker, call: &ast::ExprCall) {
|
|||
&& checker.semantic().expr_parent().is_none()
|
||||
&& !checker.semantic().scope().kind.is_lambda()
|
||||
{
|
||||
if let Some(fix) =
|
||||
convert_inplace_argument_to_assignment(checker.locator(), call, keyword)
|
||||
{
|
||||
if let Some(fix) = convert_inplace_argument_to_assignment(
|
||||
checker.locator(),
|
||||
call,
|
||||
keyword,
|
||||
checker.source_type,
|
||||
) {
|
||||
diagnostic.set_fix(fix);
|
||||
}
|
||||
}
|
||||
|
@ -116,6 +119,7 @@ fn convert_inplace_argument_to_assignment(
|
|||
locator: &Locator,
|
||||
call: &ast::ExprCall,
|
||||
keyword: &Keyword,
|
||||
source_type: PySourceType,
|
||||
) -> Option<Fix> {
|
||||
// Add the assignment.
|
||||
let attr = call.func.as_attribute_expr()?;
|
||||
|
@ -125,8 +129,14 @@ fn convert_inplace_argument_to_assignment(
|
|||
);
|
||||
|
||||
// Remove the `inplace` argument.
|
||||
let remove_argument =
|
||||
remove_argument(keyword, &call.arguments, Parentheses::Preserve, locator).ok()?;
|
||||
let remove_argument = remove_argument(
|
||||
keyword,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
locator,
|
||||
source_type,
|
||||
)
|
||||
.ok()?;
|
||||
|
||||
Some(Fix::suggested_edits(insert_assignment, [remove_argument]))
|
||||
}
|
||||
|
|
|
@ -523,7 +523,7 @@ pub(crate) fn not_missing(
|
|||
definition: &Definition,
|
||||
visibility: Visibility,
|
||||
) -> bool {
|
||||
if checker.is_stub() {
|
||||
if checker.source_type.is_stub() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -12,11 +12,14 @@ mod tests {
|
|||
use anyhow::Result;
|
||||
use regex::Regex;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::AsMode;
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
|
@ -504,8 +507,9 @@ mod tests {
|
|||
/// Note that all tests marked with `#[ignore]` should be considered TODOs.
|
||||
fn flakes(contents: &str, expected: &[Rule]) {
|
||||
let contents = dedent(contents);
|
||||
let source_type = PySourceType::default();
|
||||
let settings = Settings::for_rules(Linter::Pyflakes.rules());
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&contents);
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&contents, source_type.as_mode());
|
||||
let locator = Locator::new(&contents);
|
||||
let stylist = Stylist::from_tokens(&tokens, &locator);
|
||||
let indexer = Indexer::from_tokens(&tokens, &locator);
|
||||
|
@ -529,6 +533,7 @@ mod tests {
|
|||
&settings,
|
||||
flags::Noqa::Enabled,
|
||||
None,
|
||||
source_type,
|
||||
);
|
||||
diagnostics.sort_by_key(Diagnostic::start);
|
||||
let actual = diagnostics
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ruff_python_ast::{Expr, Ranged};
|
||||
use ruff_python_parser::{lexer, Mode, StringKind, Tok};
|
||||
use ruff_python_ast::{Expr, PySourceType, Ranged};
|
||||
use ruff_python_parser::{lexer, AsMode, StringKind, Tok};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||
|
@ -52,9 +52,10 @@ impl AlwaysAutofixableViolation for FStringMissingPlaceholders {
|
|||
fn find_useless_f_strings<'a>(
|
||||
expr: &'a Expr,
|
||||
locator: &'a Locator,
|
||||
source_type: PySourceType,
|
||||
) -> impl Iterator<Item = (TextRange, TextRange)> + 'a {
|
||||
let contents = locator.slice(expr.range());
|
||||
lexer::lex_starts_at(contents, Mode::Module, expr.start())
|
||||
lexer::lex_starts_at(contents, source_type.as_mode(), expr.start())
|
||||
.flatten()
|
||||
.filter_map(|(tok, range)| match tok {
|
||||
Tok::String {
|
||||
|
@ -85,7 +86,9 @@ pub(crate) fn f_string_missing_placeholders(expr: &Expr, values: &[Expr], checke
|
|||
.iter()
|
||||
.any(|value| matches!(value, Expr::FormattedValue(_)))
|
||||
{
|
||||
for (prefix_range, tok_range) in find_useless_f_strings(expr, checker.locator()) {
|
||||
for (prefix_range, tok_range) in
|
||||
find_useless_f_strings(expr, checker.locator(), checker.source_type)
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(FStringMissingPlaceholders, tok_range);
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
diagnostic.set_fix(convert_f_string_to_regular_string(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use itertools::Itertools;
|
||||
use ruff_python_ast::{self as ast, Ranged, Stmt};
|
||||
use ruff_python_parser::{lexer, Mode, Tok};
|
||||
use ruff_python_ast::{self as ast, PySourceType, Ranged, Stmt};
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
||||
|
@ -62,12 +62,17 @@ impl Violation for UnusedVariable {
|
|||
}
|
||||
|
||||
/// Return the [`TextRange`] of the token before the next match of the predicate
|
||||
fn match_token_before<F>(location: TextSize, locator: &Locator, f: F) -> Option<TextRange>
|
||||
fn match_token_before<F>(
|
||||
location: TextSize,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
f: F,
|
||||
) -> Option<TextRange>
|
||||
where
|
||||
F: Fn(Tok) -> bool,
|
||||
{
|
||||
let contents = locator.after(location);
|
||||
for ((_, range), (tok, _)) in lexer::lex_starts_at(contents, Mode::Module, location)
|
||||
for ((_, range), (tok, _)) in lexer::lex_starts_at(contents, source_type.as_mode(), location)
|
||||
.flatten()
|
||||
.tuple_windows()
|
||||
{
|
||||
|
@ -80,7 +85,12 @@ where
|
|||
|
||||
/// Return the [`TextRange`] of the token after the next match of the predicate, skipping over
|
||||
/// any bracketed expressions.
|
||||
fn match_token_after<F>(location: TextSize, locator: &Locator, f: F) -> Option<TextRange>
|
||||
fn match_token_after<F>(
|
||||
location: TextSize,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
f: F,
|
||||
) -> Option<TextRange>
|
||||
where
|
||||
F: Fn(Tok) -> bool,
|
||||
{
|
||||
|
@ -91,7 +101,7 @@ where
|
|||
let mut sqb_count = 0u32;
|
||||
let mut brace_count = 0u32;
|
||||
|
||||
for ((tok, _), (_, range)) in lexer::lex_starts_at(contents, Mode::Module, location)
|
||||
for ((tok, _), (_, range)) in lexer::lex_starts_at(contents, source_type.as_mode(), location)
|
||||
.flatten()
|
||||
.tuple_windows()
|
||||
{
|
||||
|
@ -131,7 +141,12 @@ where
|
|||
|
||||
/// Return the [`TextRange`] of the token matching the predicate or the first mismatched
|
||||
/// bracket, skipping over any bracketed expressions.
|
||||
fn match_token_or_closing_brace<F>(location: TextSize, locator: &Locator, f: F) -> Option<TextRange>
|
||||
fn match_token_or_closing_brace<F>(
|
||||
location: TextSize,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
f: F,
|
||||
) -> Option<TextRange>
|
||||
where
|
||||
F: Fn(Tok) -> bool,
|
||||
{
|
||||
|
@ -142,7 +157,7 @@ where
|
|||
let mut sqb_count = 0u32;
|
||||
let mut brace_count = 0u32;
|
||||
|
||||
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, location).flatten() {
|
||||
for (tok, range) in lexer::lex_starts_at(contents, source_type.as_mode(), location).flatten() {
|
||||
match tok {
|
||||
Tok::Lpar => {
|
||||
par_count = par_count.saturating_add(1);
|
||||
|
@ -204,7 +219,10 @@ fn remove_unused_variable(
|
|||
// If the expression is complex (`x = foo()`), remove the assignment,
|
||||
// but preserve the right-hand side.
|
||||
let start = target.start();
|
||||
let end = match_token_after(start, checker.locator(), |tok| tok == Tok::Equal)?
|
||||
let end =
|
||||
match_token_after(start, checker.locator(), checker.source_type, |tok| {
|
||||
tok == Tok::Equal
|
||||
})?
|
||||
.start();
|
||||
let edit = Edit::deletion(start, end);
|
||||
Some(Fix::suggested(edit))
|
||||
|
@ -230,7 +248,10 @@ fn remove_unused_variable(
|
|||
// but preserve the right-hand side.
|
||||
let start = stmt.start();
|
||||
let end =
|
||||
match_token_after(start, checker.locator(), |tok| tok == Tok::Equal)?.start();
|
||||
match_token_after(start, checker.locator(), checker.source_type, |tok| {
|
||||
tok == Tok::Equal
|
||||
})?
|
||||
.start();
|
||||
let edit = Edit::deletion(start, end);
|
||||
Some(Fix::suggested(edit))
|
||||
} else {
|
||||
|
@ -249,16 +270,21 @@ fn remove_unused_variable(
|
|||
if let Some(optional_vars) = &item.optional_vars {
|
||||
if optional_vars.range() == range {
|
||||
// Find the first token before the `as` keyword.
|
||||
let start =
|
||||
match_token_before(item.context_expr.start(), checker.locator(), |tok| {
|
||||
tok == Tok::As
|
||||
})?
|
||||
.end();
|
||||
let start = match_token_before(
|
||||
item.context_expr.start(),
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
|tok| tok == Tok::As,
|
||||
)?
|
||||
.end();
|
||||
|
||||
// Find the first colon, comma, or closing bracket after the `as` keyword.
|
||||
let end = match_token_or_closing_brace(start, checker.locator(), |tok| {
|
||||
tok == Tok::Colon || tok == Tok::Comma
|
||||
})?
|
||||
let end = match_token_or_closing_brace(
|
||||
start,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
|tok| tok == Tok::Colon || tok == Tok::Comma,
|
||||
)?
|
||||
.start();
|
||||
|
||||
let edit = Edit::deletion(start, end);
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::str::FromStr;
|
|||
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Ranged};
|
||||
use ruff_python_literal::cformat::{CFormatPart, CFormatSpec, CFormatStrOrBytes, CFormatString};
|
||||
use ruff_python_parser::{lexer, Mode};
|
||||
use ruff_python_parser::{lexer, AsMode};
|
||||
use ruff_text_size::TextRange;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
|
@ -203,7 +203,9 @@ pub(crate) fn bad_string_format_type(checker: &mut Checker, expr: &Expr, right:
|
|||
// Grab each string segment (in case there's an implicit concatenation).
|
||||
let content = checker.locator().slice(expr.range());
|
||||
let mut strings: Vec<TextRange> = vec![];
|
||||
for (tok, range) in lexer::lex_starts_at(content, Mode::Module, expr.start()).flatten() {
|
||||
for (tok, range) in
|
||||
lexer::lex_starts_at(content, checker.source_type.as_mode(), expr.start()).flatten()
|
||||
{
|
||||
if tok.is_string() {
|
||||
strings.push(range);
|
||||
} else if tok.is_percent() {
|
||||
|
|
|
@ -4,7 +4,7 @@ use ruff_python_ast::{self as ast, Constant, Expr, Ranged};
|
|||
use ruff_python_literal::cformat::{
|
||||
CConversionFlags, CFormatPart, CFormatPrecision, CFormatQuantity, CFormatString,
|
||||
};
|
||||
use ruff_python_parser::{lexer, Mode, Tok};
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||
|
@ -339,7 +339,7 @@ pub(crate) fn printf_string_formatting(
|
|||
let mut extension = None;
|
||||
for (tok, range) in lexer::lex_starts_at(
|
||||
checker.locator().slice(expr.range()),
|
||||
Mode::Module,
|
||||
checker.source_type.as_mode(),
|
||||
expr.start(),
|
||||
)
|
||||
.flatten()
|
||||
|
|
|
@ -4,8 +4,8 @@ use anyhow::{anyhow, Result};
|
|||
|
||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, Ranged};
|
||||
use ruff_python_parser::{lexer, Mode};
|
||||
use ruff_python_ast::{self as ast, Constant, Expr, PySourceType, Ranged};
|
||||
use ruff_python_parser::{lexer, AsMode};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::TextSize;
|
||||
|
@ -84,6 +84,7 @@ pub(crate) fn redundant_open_modes(checker: &mut Checker, call: &ast::ExprCall)
|
|||
mode.replacement_value(),
|
||||
checker.locator(),
|
||||
checker.patch(Rule::RedundantOpenModes),
|
||||
checker.source_type,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -103,6 +104,7 @@ pub(crate) fn redundant_open_modes(checker: &mut Checker, call: &ast::ExprCall)
|
|||
mode.replacement_value(),
|
||||
checker.locator(),
|
||||
checker.patch(Rule::RedundantOpenModes),
|
||||
checker.source_type,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -169,6 +171,7 @@ fn create_check<T: Ranged>(
|
|||
replacement_value: Option<&str>,
|
||||
locator: &Locator,
|
||||
patch: bool,
|
||||
source_type: PySourceType,
|
||||
) -> Diagnostic {
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
RedundantOpenModes {
|
||||
|
@ -184,7 +187,7 @@ fn create_check<T: Ranged>(
|
|||
)));
|
||||
} else {
|
||||
diagnostic.try_set_fix(|| {
|
||||
create_remove_param_fix(locator, expr, mode_param).map(Fix::automatic)
|
||||
create_remove_param_fix(locator, expr, mode_param, source_type).map(Fix::automatic)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -195,6 +198,7 @@ fn create_remove_param_fix<T: Ranged>(
|
|||
locator: &Locator,
|
||||
expr: &T,
|
||||
mode_param: &Expr,
|
||||
source_type: PySourceType,
|
||||
) -> Result<Edit> {
|
||||
let content = locator.slice(expr.range());
|
||||
// Find the last comma before mode_param and create a deletion fix
|
||||
|
@ -203,7 +207,8 @@ fn create_remove_param_fix<T: Ranged>(
|
|||
let mut fix_end: Option<TextSize> = None;
|
||||
let mut is_first_arg: bool = false;
|
||||
let mut delete_first_arg: bool = false;
|
||||
for (tok, range) in lexer::lex_starts_at(content, Mode::Module, expr.start()).flatten() {
|
||||
for (tok, range) in lexer::lex_starts_at(content, source_type.as_mode(), expr.start()).flatten()
|
||||
{
|
||||
if range.start() == mode_param.start() {
|
||||
if is_first_arg {
|
||||
delete_first_arg = true;
|
||||
|
|
|
@ -2,7 +2,7 @@ use anyhow::Result;
|
|||
|
||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Keyword, Ranged};
|
||||
use ruff_python_ast::{self as ast, Keyword, PySourceType, Ranged};
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
use crate::autofix::edits::{remove_argument, Parentheses};
|
||||
|
@ -56,6 +56,7 @@ fn generate_fix(
|
|||
stderr: &Keyword,
|
||||
call: &ast::ExprCall,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
) -> Result<Fix> {
|
||||
let (first, second) = if stdout.start() < stderr.start() {
|
||||
(stdout, stderr)
|
||||
|
@ -69,6 +70,7 @@ fn generate_fix(
|
|||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
locator,
|
||||
source_type,
|
||||
)?],
|
||||
))
|
||||
}
|
||||
|
@ -103,7 +105,9 @@ pub(crate) fn replace_stdout_stderr(checker: &mut Checker, call: &ast::ExprCall)
|
|||
|
||||
let mut diagnostic = Diagnostic::new(ReplaceStdoutStderr, call.range());
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
diagnostic.try_set_fix(|| generate_fix(stdout, stderr, call, checker.locator()));
|
||||
diagnostic.try_set_fix(|| {
|
||||
generate_fix(stdout, stderr, call, checker.locator(), checker.source_type)
|
||||
});
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Arguments, Constant, Expr, Keyword, Ranged};
|
||||
use ruff_python_parser::{lexer, Mode, Tok};
|
||||
use ruff_python_ast::{self as ast, Arguments, Constant, Expr, Keyword, PySourceType, Ranged};
|
||||
use ruff_python_parser::{lexer, AsMode, Tok};
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
|
@ -119,12 +119,18 @@ fn match_encoding_arg(arguments: &Arguments) -> Option<EncodingArg> {
|
|||
}
|
||||
|
||||
/// Return a [`Fix`] replacing the call to encode with a byte string.
|
||||
fn replace_with_bytes_literal<T: Ranged>(locator: &Locator, expr: &T) -> Fix {
|
||||
fn replace_with_bytes_literal<T: Ranged>(
|
||||
locator: &Locator,
|
||||
expr: &T,
|
||||
source_type: PySourceType,
|
||||
) -> Fix {
|
||||
// Build up a replacement string by prefixing all string tokens with `b`.
|
||||
let contents = locator.slice(expr.range());
|
||||
let mut replacement = String::with_capacity(contents.len() + 1);
|
||||
let mut prev = expr.start();
|
||||
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, expr.start()).flatten() {
|
||||
for (tok, range) in
|
||||
lexer::lex_starts_at(contents, source_type.as_mode(), expr.start()).flatten()
|
||||
{
|
||||
match tok {
|
||||
Tok::Dot => break,
|
||||
Tok::String { .. } => {
|
||||
|
@ -166,7 +172,11 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal
|
|||
call.range(),
|
||||
);
|
||||
if checker.patch(Rule::UnnecessaryEncodeUTF8) {
|
||||
diagnostic.set_fix(replace_with_bytes_literal(checker.locator(), call));
|
||||
diagnostic.set_fix(replace_with_bytes_literal(
|
||||
checker.locator(),
|
||||
call,
|
||||
checker.source_type,
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
} else if let EncodingArg::Keyword(kwarg) = encoding_arg {
|
||||
|
@ -185,6 +195,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal
|
|||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
@ -205,6 +216,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal
|
|||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
@ -232,6 +244,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal
|
|||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
@ -252,6 +265,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal
|
|||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
|
|
@ -69,8 +69,14 @@ pub(crate) fn useless_object_inheritance(checker: &mut Checker, class_def: &ast:
|
|||
);
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
diagnostic.try_set_fix(|| {
|
||||
remove_argument(base, arguments, Parentheses::Remove, checker.locator())
|
||||
.map(Fix::automatic)
|
||||
remove_argument(
|
||||
base,
|
||||
arguments,
|
||||
Parentheses::Remove,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
|
|
|
@ -7,11 +7,14 @@ use std::path::Path;
|
|||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{AutofixKind, Diagnostic};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::AsMode;
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_source_file::{Locator, SourceFileBuilder};
|
||||
|
||||
|
@ -61,8 +64,9 @@ pub(crate) fn test_notebook_path(
|
|||
path: impl AsRef<Path>,
|
||||
expected: impl AsRef<Path>,
|
||||
settings: &Settings,
|
||||
) -> Result<(Vec<Message>, SourceKind)> {
|
||||
) -> Result<(Vec<Message>, SourceKind, SourceKind)> {
|
||||
let mut source_kind = SourceKind::Jupyter(read_jupyter_notebook(path.as_ref())?);
|
||||
let original_source_kind = source_kind.clone();
|
||||
let messages = test_contents(&mut source_kind, path.as_ref(), settings);
|
||||
let expected_notebook = read_jupyter_notebook(expected.as_ref())?;
|
||||
if let SourceKind::Jupyter(notebook) = &source_kind {
|
||||
|
@ -70,7 +74,7 @@ pub(crate) fn test_notebook_path(
|
|||
assert_eq!(notebook.index(), expected_notebook.index());
|
||||
assert_eq!(notebook.content(), expected_notebook.content());
|
||||
};
|
||||
Ok((messages, source_kind))
|
||||
Ok((messages, original_source_kind, source_kind))
|
||||
}
|
||||
|
||||
/// Run [`check_path`] on a snippet of Python code.
|
||||
|
@ -100,7 +104,8 @@ pub(crate) fn max_iterations() -> usize {
|
|||
/// asserts that autofixes converge after a fixed number of iterations.
|
||||
fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings) -> Vec<Message> {
|
||||
let contents = source_kind.content().to_string();
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&contents);
|
||||
let source_type = PySourceType::from(path);
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&contents, source_type.as_mode());
|
||||
let locator = Locator::new(&contents);
|
||||
let stylist = Stylist::from_tokens(&tokens, &locator);
|
||||
let indexer = Indexer::from_tokens(&tokens, &locator);
|
||||
|
@ -125,6 +130,7 @@ fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings)
|
|||
settings,
|
||||
flags::Noqa::Enabled,
|
||||
Some(source_kind),
|
||||
source_type,
|
||||
);
|
||||
|
||||
let source_has_errors = error.is_some();
|
||||
|
@ -162,7 +168,8 @@ fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings)
|
|||
notebook.update(&source_map, &fixed_contents);
|
||||
};
|
||||
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&fixed_contents);
|
||||
let tokens: Vec<LexResult> =
|
||||
ruff_python_parser::tokenize(&fixed_contents, source_type.as_mode());
|
||||
let locator = Locator::new(&fixed_contents);
|
||||
let stylist = Stylist::from_tokens(&tokens, &locator);
|
||||
let indexer = Indexer::from_tokens(&tokens, &locator);
|
||||
|
@ -187,6 +194,7 @@ fn test_contents(source_kind: &mut SourceKind, path: &Path, settings: &Settings)
|
|||
settings,
|
||||
flags::Noqa::Enabled,
|
||||
Some(source_kind),
|
||||
source_type,
|
||||
);
|
||||
|
||||
if let Some(fixed_error) = fixed_error {
|
||||
|
|
|
@ -9,6 +9,7 @@ use ruff::linter::lint_only;
|
|||
use ruff::settings::{flags, Settings};
|
||||
use ruff::RuleSelector;
|
||||
use ruff_benchmark::{TestCase, TestCaseSpeed, TestFile, TestFileDownloadError};
|
||||
use ruff_python_ast::PySourceType;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
|
@ -57,13 +58,15 @@ fn benchmark_linter(mut group: BenchmarkGroup<WallTime>, settings: &Settings) {
|
|||
&case,
|
||||
|b, case| {
|
||||
b.iter(|| {
|
||||
let path = case.path();
|
||||
let result = lint_only(
|
||||
case.code(),
|
||||
&case.path(),
|
||||
&path,
|
||||
None,
|
||||
settings,
|
||||
flags::Noqa::Enabled,
|
||||
None,
|
||||
PySourceType::from(path.as_path()),
|
||||
);
|
||||
|
||||
// Assert that file contains no parse errors
|
||||
|
|
|
@ -29,7 +29,8 @@ use ruff::{fs, IOError};
|
|||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_python_ast::imports::ImportMap;
|
||||
use ruff_python_stdlib::path::{is_jupyter_notebook, is_project_toml};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_stdlib::path::is_project_toml;
|
||||
use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
|
||||
|
||||
#[derive(CacheKey)]
|
||||
|
@ -211,8 +212,10 @@ pub(crate) fn lint_path(
|
|||
});
|
||||
}
|
||||
|
||||
let source_type = PySourceType::from(path);
|
||||
|
||||
// Read the file from disk
|
||||
let mut source_kind = if is_jupyter_notebook(path) {
|
||||
let mut source_kind = if source_type.is_jupyter() {
|
||||
match load_jupyter_notebook(path) {
|
||||
Ok(notebook) => SourceKind::Jupyter(notebook),
|
||||
Err(diagnostic) => return Ok(*diagnostic),
|
||||
|
@ -249,6 +252,7 @@ pub(crate) fn lint_path(
|
|||
noqa,
|
||||
&settings.lib,
|
||||
&mut source_kind,
|
||||
source_type,
|
||||
) {
|
||||
if !fixed.is_empty() {
|
||||
match autofix {
|
||||
|
@ -335,6 +339,7 @@ pub(crate) fn lint_path(
|
|||
&settings.lib,
|
||||
noqa,
|
||||
Some(&source_kind),
|
||||
source_type,
|
||||
);
|
||||
let fixed = FxHashMap::default();
|
||||
(result, fixed)
|
||||
|
@ -347,6 +352,7 @@ pub(crate) fn lint_path(
|
|||
&settings.lib,
|
||||
noqa,
|
||||
Some(&source_kind),
|
||||
source_type,
|
||||
);
|
||||
let fixed = FxHashMap::default();
|
||||
(result, fixed)
|
||||
|
@ -396,6 +402,8 @@ pub(crate) fn lint_stdin(
|
|||
autofix: flags::FixMode,
|
||||
) -> Result<Diagnostics> {
|
||||
let mut source_kind = SourceKind::Python(contents.to_string());
|
||||
let source_type = PySourceType::default();
|
||||
|
||||
// Lint the inputs.
|
||||
let (
|
||||
LinterResult {
|
||||
|
@ -415,6 +423,7 @@ pub(crate) fn lint_stdin(
|
|||
noqa,
|
||||
settings,
|
||||
&mut source_kind,
|
||||
source_type,
|
||||
) {
|
||||
match autofix {
|
||||
flags::FixMode::Apply => {
|
||||
|
@ -450,6 +459,7 @@ pub(crate) fn lint_stdin(
|
|||
settings,
|
||||
noqa,
|
||||
Some(&source_kind),
|
||||
source_type,
|
||||
);
|
||||
let fixed = FxHashMap::default();
|
||||
|
||||
|
@ -468,6 +478,7 @@ pub(crate) fn lint_stdin(
|
|||
settings,
|
||||
noqa,
|
||||
Some(&source_kind),
|
||||
source_type,
|
||||
);
|
||||
let fixed = FxHashMap::default();
|
||||
(result, fixed)
|
||||
|
|
|
@ -5,18 +5,26 @@ use std::fs;
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use ruff_python_parser::parse_suite;
|
||||
use ruff_python_parser::{parse, Mode};
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
/// Python file for which to generate the AST.
|
||||
#[arg(required = true)]
|
||||
file: PathBuf,
|
||||
/// Run in Jupyter mode i.e., allow line magics.
|
||||
#[arg(long)]
|
||||
jupyter: bool,
|
||||
}
|
||||
|
||||
pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
let contents = fs::read_to_string(&args.file)?;
|
||||
let python_ast = parse_suite(&contents, &args.file.to_string_lossy())?;
|
||||
let mode = if args.jupyter {
|
||||
Mode::Jupyter
|
||||
} else {
|
||||
Mode::Module
|
||||
};
|
||||
let python_ast = parse(&contents, mode, &args.file.to_string_lossy())?;
|
||||
println!("{python_ast:#?}");
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -12,11 +12,19 @@ pub(crate) struct Args {
|
|||
/// Python file for which to generate the AST.
|
||||
#[arg(required = true)]
|
||||
file: PathBuf,
|
||||
/// Run in Jupyter mode i.e., allow line magics (`%`, `!`, `?`, `/`, `,`, `;`).
|
||||
#[arg(long)]
|
||||
jupyter: bool,
|
||||
}
|
||||
|
||||
pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
let contents = fs::read_to_string(&args.file)?;
|
||||
for (tok, range) in lexer::lex(&contents, Mode::Module).flatten() {
|
||||
let mode = if args.jupyter {
|
||||
Mode::Jupyter
|
||||
} else {
|
||||
Mode::Module
|
||||
};
|
||||
for (tok, range) in lexer::lex(&contents, mode).flatten() {
|
||||
println!(
|
||||
"{start:#?} {tok:#?} {end:#?}",
|
||||
start = range.start(),
|
||||
|
|
|
@ -114,7 +114,7 @@ pub use parser::{
|
|||
parse, parse_expression, parse_expression_starts_at, parse_program, parse_starts_at,
|
||||
parse_suite, parse_tokens, ParseError, ParseErrorType,
|
||||
};
|
||||
use ruff_python_ast::{CmpOp, Expr, Mod, Ranged, Suite};
|
||||
use ruff_python_ast::{CmpOp, Expr, Mod, PySourceType, Ranged, Suite};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
pub use string::FStringErrorType;
|
||||
pub use token::{StringKind, Tok, TokenKind};
|
||||
|
@ -130,9 +130,9 @@ mod token;
|
|||
pub mod typing;
|
||||
|
||||
/// Collect tokens up to and including the first error.
|
||||
pub fn tokenize(contents: &str) -> Vec<LexResult> {
|
||||
pub fn tokenize(contents: &str, mode: Mode) -> Vec<LexResult> {
|
||||
let mut tokens: Vec<LexResult> = vec![];
|
||||
for tok in lexer::lex(contents, Mode::Module) {
|
||||
for tok in lexer::lex(contents, mode) {
|
||||
let is_err = tok.is_err();
|
||||
tokens.push(tok);
|
||||
if is_err {
|
||||
|
@ -146,17 +146,32 @@ pub fn tokenize(contents: &str) -> Vec<LexResult> {
|
|||
pub fn parse_program_tokens(
|
||||
lxr: Vec<LexResult>,
|
||||
source_path: &str,
|
||||
is_jupyter_notebook: bool,
|
||||
) -> anyhow::Result<Suite, ParseError> {
|
||||
match parse_tokens(lxr, Mode::Module, source_path)? {
|
||||
let mode = if is_jupyter_notebook {
|
||||
Mode::Jupyter
|
||||
} else {
|
||||
Mode::Module
|
||||
};
|
||||
match parse_tokens(lxr, mode, source_path)? {
|
||||
Mod::Module(m) => Ok(m.body),
|
||||
Mod::Expression(_) => unreachable!("Mode::Module doesn't return other variant"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the `Range` of the first `Tok::Colon` token in a `Range`.
|
||||
pub fn first_colon_range(range: TextRange, source: &str) -> Option<TextRange> {
|
||||
pub fn first_colon_range(
|
||||
range: TextRange,
|
||||
source: &str,
|
||||
is_jupyter_notebook: bool,
|
||||
) -> Option<TextRange> {
|
||||
let contents = &source[range];
|
||||
let range = lexer::lex_starts_at(contents, Mode::Module, range.start())
|
||||
let mode = if is_jupyter_notebook {
|
||||
Mode::Jupyter
|
||||
} else {
|
||||
Mode::Module
|
||||
};
|
||||
let range = lexer::lex_starts_at(contents, mode, range.start())
|
||||
.flatten()
|
||||
.find(|(tok, _)| tok.is_colon())
|
||||
.map(|(_, range)| range);
|
||||
|
@ -308,6 +323,19 @@ impl std::str::FromStr for Mode {
|
|||
}
|
||||
}
|
||||
|
||||
pub trait AsMode {
|
||||
fn as_mode(&self) -> Mode;
|
||||
}
|
||||
|
||||
impl AsMode for PySourceType {
|
||||
fn as_mode(&self) -> Mode {
|
||||
match self {
|
||||
PySourceType::Python | PySourceType::Stub => Mode::Module,
|
||||
PySourceType::Jupyter => Mode::Jupyter,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returned when a given mode is not valid.
|
||||
#[derive(Debug)]
|
||||
pub struct ModeParseError;
|
||||
|
@ -357,6 +385,7 @@ mod tests {
|
|||
let range = first_colon_range(
|
||||
TextRange::new(TextSize::from(0), contents.text_len()),
|
||||
contents,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(&contents[range], ":");
|
||||
|
|
|
@ -36,6 +36,7 @@ use regex::Regex;
|
|||
use ruff_python_ast::statement_visitor::{walk_body, walk_stmt, StatementVisitor};
|
||||
use ruff_python_ast::visitor::{walk_expr, Visitor};
|
||||
use ruff_python_ast::{Expr, Ranged, Stmt, Suite};
|
||||
use ruff_python_parser::Mode;
|
||||
use ruff_text_size::TextRange;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
@ -275,7 +276,7 @@ impl Strategy for StrategyRemoveToken {
|
|||
input: &'a str,
|
||||
_ast: &'a Suite,
|
||||
) -> Result<Box<dyn ExactSizeStringIter + 'a>> {
|
||||
let token_ranges: Vec<_> = ruff_python_parser::tokenize(input)
|
||||
let token_ranges: Vec<_> = ruff_python_parser::tokenize(input, Mode::Module)
|
||||
.into_iter()
|
||||
// At this point we know we have valid python code
|
||||
.map(Result::unwrap)
|
||||
|
@ -320,9 +321,9 @@ fn minimization_step(
|
|||
pattern: &Regex,
|
||||
last_strategy_and_idx: Option<(&'static dyn Strategy, usize)>,
|
||||
) -> Result<Option<(&'static dyn Strategy, usize, String)>> {
|
||||
let tokens = ruff_python_parser::tokenize(input);
|
||||
let ast =
|
||||
ruff_python_parser::parse_program_tokens(tokens, "input.py").context("not valid python")?;
|
||||
let tokens = ruff_python_parser::tokenize(input, Mode::Module);
|
||||
let ast = ruff_python_parser::parse_program_tokens(tokens, "input.py", false)
|
||||
.context("not valid python")?;
|
||||
|
||||
// Try the last succeeding strategy first, skipping all that failed last time
|
||||
if let Some((last_strategy, last_idx)) = last_strategy_and_idx {
|
||||
|
|
|
@ -25,6 +25,7 @@ use ruff_python_ast::PySourceType;
|
|||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_formatter::{format_module, format_node, PyFormatOptions};
|
||||
use ruff_python_index::{CommentRangesBuilder, Indexer};
|
||||
use ruff_python_parser::AsMode;
|
||||
use ruff_source_file::{Locator, SourceLocation};
|
||||
|
||||
#[wasm_bindgen(typescript_custom_section)]
|
||||
|
@ -197,8 +198,10 @@ impl Workspace {
|
|||
}
|
||||
|
||||
pub fn check(&self, contents: &str) -> Result<JsValue, Error> {
|
||||
let source_type = PySourceType::default();
|
||||
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents);
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, source_type.as_mode());
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(contents);
|
||||
|
@ -228,6 +231,7 @@ impl Workspace {
|
|||
&self.settings,
|
||||
flags::Noqa::Enabled,
|
||||
None,
|
||||
source_type,
|
||||
);
|
||||
|
||||
let source_code = locator.to_source_code();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue