mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-27 20:42:10 +00:00
Make SourceKind
a required parameter (#7013)
This commit is contained in:
parent
93ca8ebbc0
commit
1067261a55
10 changed files with 62 additions and 60 deletions
|
@ -85,7 +85,7 @@ pub(crate) fn check_imports(
|
||||||
stylist: &Stylist,
|
stylist: &Stylist,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
package: Option<&Path>,
|
package: Option<&Path>,
|
||||||
source_kind: Option<&SourceKind>,
|
source_kind: &SourceKind,
|
||||||
source_type: PySourceType,
|
source_type: PySourceType,
|
||||||
) -> (Vec<Diagnostic>, Option<ImportMap>) {
|
) -> (Vec<Diagnostic>, Option<ImportMap>) {
|
||||||
// Extract all import blocks from the AST.
|
// Extract all import blocks from the AST.
|
||||||
|
|
|
@ -81,7 +81,7 @@ pub fn check_path(
|
||||||
directives: &Directives,
|
directives: &Directives,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
noqa: flags::Noqa,
|
noqa: flags::Noqa,
|
||||||
source_kind: Option<&SourceKind>,
|
source_kind: &SourceKind,
|
||||||
source_type: PySourceType,
|
source_type: PySourceType,
|
||||||
) -> LinterResult<(Vec<Diagnostic>, Option<ImportMap>)> {
|
) -> LinterResult<(Vec<Diagnostic>, Option<ImportMap>)> {
|
||||||
// Aggregate all diagnostics.
|
// Aggregate all diagnostics.
|
||||||
|
@ -270,17 +270,17 @@ const MAX_ITERATIONS: usize = 100;
|
||||||
pub fn add_noqa_to_path(
|
pub fn add_noqa_to_path(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
package: Option<&Path>,
|
package: Option<&Path>,
|
||||||
|
source_kind: &SourceKind,
|
||||||
source_type: PySourceType,
|
source_type: PySourceType,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
) -> Result<usize> {
|
) -> Result<usize> {
|
||||||
// Read the file from disk.
|
let contents = source_kind.source_code();
|
||||||
let contents = std::fs::read_to_string(path)?;
|
|
||||||
|
|
||||||
// Tokenize once.
|
// Tokenize once.
|
||||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&contents, source_type.as_mode());
|
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, source_type.as_mode());
|
||||||
|
|
||||||
// Map row and column locations to byte slices (lazily).
|
// Map row and column locations to byte slices (lazily).
|
||||||
let locator = Locator::new(&contents);
|
let locator = Locator::new(contents);
|
||||||
|
|
||||||
// Detect the current code style (lazily).
|
// Detect the current code style (lazily).
|
||||||
let stylist = Stylist::from_tokens(&tokens, &locator);
|
let stylist = Stylist::from_tokens(&tokens, &locator);
|
||||||
|
@ -310,21 +310,20 @@ pub fn add_noqa_to_path(
|
||||||
&directives,
|
&directives,
|
||||||
settings,
|
settings,
|
||||||
flags::Noqa::Disabled,
|
flags::Noqa::Disabled,
|
||||||
None,
|
source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Log any parse errors.
|
// Log any parse errors.
|
||||||
if let Some(err) = error {
|
if let Some(err) = error {
|
||||||
// TODO(dhruvmanila): This should use `SourceKind`, update when
|
|
||||||
// `--add-noqa` is supported for Jupyter notebooks.
|
|
||||||
error!(
|
error!(
|
||||||
"{}",
|
"{}",
|
||||||
DisplayParseError::new(err, locator.to_source_code(), None)
|
DisplayParseError::new(err, locator.to_source_code(), source_kind)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add any missing `# noqa` pragmas.
|
// Add any missing `# noqa` pragmas.
|
||||||
|
// TODO(dhruvmanila): Add support for Jupyter Notebooks
|
||||||
add_noqa(
|
add_noqa(
|
||||||
path,
|
path,
|
||||||
&diagnostics.0,
|
&diagnostics.0,
|
||||||
|
@ -377,7 +376,7 @@ pub fn lint_only(
|
||||||
&directives,
|
&directives,
|
||||||
settings,
|
settings,
|
||||||
noqa,
|
noqa,
|
||||||
Some(source_kind),
|
source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -471,7 +470,7 @@ pub fn lint_fix<'a>(
|
||||||
&directives,
|
&directives,
|
||||||
settings,
|
settings,
|
||||||
noqa,
|
noqa,
|
||||||
Some(source_kind),
|
source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -139,14 +139,14 @@ pub fn set_up_logging(level: &LogLevel) -> Result<()> {
|
||||||
pub struct DisplayParseError<'a> {
|
pub struct DisplayParseError<'a> {
|
||||||
error: ParseError,
|
error: ParseError,
|
||||||
source_code: SourceCode<'a, 'a>,
|
source_code: SourceCode<'a, 'a>,
|
||||||
source_kind: Option<&'a SourceKind>,
|
source_kind: &'a SourceKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> DisplayParseError<'a> {
|
impl<'a> DisplayParseError<'a> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
error: ParseError,
|
error: ParseError,
|
||||||
source_code: SourceCode<'a, 'a>,
|
source_code: SourceCode<'a, 'a>,
|
||||||
source_kind: Option<&'a SourceKind>,
|
source_kind: &'a SourceKind,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
error,
|
error,
|
||||||
|
@ -171,11 +171,8 @@ impl Display for DisplayParseError<'_> {
|
||||||
// If we're working on a Jupyter notebook, translate the positions
|
// If we're working on a Jupyter notebook, translate the positions
|
||||||
// with respect to the cell and row in the cell. This is the same
|
// with respect to the cell and row in the cell. This is the same
|
||||||
// format as the `TextEmitter`.
|
// format as the `TextEmitter`.
|
||||||
let error_location = if let Some(jupyter_index) = self
|
let error_location =
|
||||||
.source_kind
|
if let Some(jupyter_index) = self.source_kind.as_ipy_notebook().map(Notebook::index) {
|
||||||
.and_then(SourceKind::notebook)
|
|
||||||
.map(Notebook::index)
|
|
||||||
{
|
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"cell {cell}{colon}",
|
"cell {cell}{colon}",
|
||||||
|
|
|
@ -43,7 +43,7 @@ impl<'a> BlockBuilder<'a> {
|
||||||
locator: &'a Locator<'a>,
|
locator: &'a Locator<'a>,
|
||||||
directives: &'a IsortDirectives,
|
directives: &'a IsortDirectives,
|
||||||
is_stub: bool,
|
is_stub: bool,
|
||||||
source_kind: Option<&'a SourceKind>,
|
source_kind: &'a SourceKind,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
locator,
|
locator,
|
||||||
|
@ -53,7 +53,7 @@ impl<'a> BlockBuilder<'a> {
|
||||||
exclusions: &directives.exclusions,
|
exclusions: &directives.exclusions,
|
||||||
nested: false,
|
nested: false,
|
||||||
cell_offsets: source_kind
|
cell_offsets: source_kind
|
||||||
.and_then(SourceKind::notebook)
|
.as_ipy_notebook()
|
||||||
.map(Notebook::cell_offsets)
|
.map(Notebook::cell_offsets)
|
||||||
.map(|offsets| offsets.iter().peekable()),
|
.map(|offsets| offsets.iter().peekable()),
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,6 +27,7 @@ mod tests {
|
||||||
use crate::registry::{AsRule, Linter, Rule};
|
use crate::registry::{AsRule, Linter, Rule};
|
||||||
use crate::rules::pyflakes;
|
use crate::rules::pyflakes;
|
||||||
use crate::settings::{flags, Settings};
|
use crate::settings::{flags, Settings};
|
||||||
|
use crate::source_kind::SourceKind;
|
||||||
use crate::test::{test_path, test_snippet};
|
use crate::test::{test_path, test_snippet};
|
||||||
use crate::{assert_messages, directives};
|
use crate::{assert_messages, directives};
|
||||||
|
|
||||||
|
@ -508,6 +509,7 @@ mod tests {
|
||||||
fn flakes(contents: &str, expected: &[Rule]) {
|
fn flakes(contents: &str, expected: &[Rule]) {
|
||||||
let contents = dedent(contents);
|
let contents = dedent(contents);
|
||||||
let source_type = PySourceType::default();
|
let source_type = PySourceType::default();
|
||||||
|
let source_kind = SourceKind::Python(contents.to_string());
|
||||||
let settings = Settings::for_rules(Linter::Pyflakes.rules());
|
let settings = Settings::for_rules(Linter::Pyflakes.rules());
|
||||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&contents, source_type.as_mode());
|
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(&contents, source_type.as_mode());
|
||||||
let locator = Locator::new(&contents);
|
let locator = Locator::new(&contents);
|
||||||
|
@ -532,7 +534,7 @@ mod tests {
|
||||||
&directives,
|
&directives,
|
||||||
&settings,
|
&settings,
|
||||||
flags::Noqa::Enabled,
|
flags::Noqa::Enabled,
|
||||||
None,
|
&source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
);
|
);
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(Ranged::start);
|
||||||
|
|
|
@ -10,15 +10,6 @@ pub enum SourceKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceKind {
|
impl SourceKind {
|
||||||
/// Return the [`Notebook`] if the source kind is [`SourceKind::IpyNotebook`].
|
|
||||||
pub fn notebook(&self) -> Option<&Notebook> {
|
|
||||||
if let Self::IpyNotebook(notebook) = self {
|
|
||||||
Some(notebook)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub(crate) fn updated(&self, new_source: String, source_map: &SourceMap) -> Self {
|
pub(crate) fn updated(&self, new_source: String, source_map: &SourceMap) -> Self {
|
||||||
match self {
|
match self {
|
||||||
|
|
|
@ -132,7 +132,7 @@ pub(crate) fn test_contents<'a>(
|
||||||
&directives,
|
&directives,
|
||||||
settings,
|
settings,
|
||||||
flags::Noqa::Enabled,
|
flags::Noqa::Enabled,
|
||||||
Some(source_kind),
|
source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -195,7 +195,7 @@ pub(crate) fn test_contents<'a>(
|
||||||
&directives,
|
&directives,
|
||||||
settings,
|
settings,
|
||||||
flags::Noqa::Enabled,
|
flags::Noqa::Enabled,
|
||||||
Some(source_kind),
|
source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -274,7 +274,7 @@ fn print_diagnostics(diagnostics: Vec<Diagnostic>, path: &Path, source: &SourceK
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
if let Some(notebook) = source.notebook() {
|
if let Some(notebook) = source.as_ipy_notebook() {
|
||||||
print_jupyter_messages(&messages, path, notebook)
|
print_jupyter_messages(&messages, path, notebook)
|
||||||
} else {
|
} else {
|
||||||
print_messages(&messages)
|
print_messages(&messages)
|
||||||
|
|
|
@ -12,6 +12,7 @@ use ruff_python_ast::{PySourceType, SourceType};
|
||||||
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig};
|
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig};
|
||||||
|
|
||||||
use crate::args::Overrides;
|
use crate::args::Overrides;
|
||||||
|
use crate::diagnostics::LintSource;
|
||||||
|
|
||||||
/// Add `noqa` directives to a collection of files.
|
/// Add `noqa` directives to a collection of files.
|
||||||
pub(crate) fn add_noqa(
|
pub(crate) fn add_noqa(
|
||||||
|
@ -56,7 +57,15 @@ pub(crate) fn add_noqa(
|
||||||
.and_then(|parent| package_roots.get(parent))
|
.and_then(|parent| package_roots.get(parent))
|
||||||
.and_then(|package| *package);
|
.and_then(|package| *package);
|
||||||
let settings = resolver.resolve(path, pyproject_config);
|
let settings = resolver.resolve(path, pyproject_config);
|
||||||
match add_noqa_to_path(path, package, source_type, settings) {
|
let LintSource(source_kind) = match LintSource::try_from_path(path, source_type) {
|
||||||
|
Ok(Some(source)) => source,
|
||||||
|
Ok(None) => return None,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to extract source from {}: {e}", path.display());
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match add_noqa_to_path(path, package, &source_kind, source_type, settings) {
|
||||||
Ok(count) => Some(count),
|
Ok(count) => Some(count),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Failed to add noqa to {}: {e}", path.display());
|
error!("Failed to add noqa to {}: {e}", path.display());
|
||||||
|
|
|
@ -354,7 +354,7 @@ pub(crate) fn lint_path(
|
||||||
source_kind.source_code(),
|
source_kind.source_code(),
|
||||||
&LineIndex::from_source_text(source_kind.source_code())
|
&LineIndex::from_source_text(source_kind.source_code())
|
||||||
),
|
),
|
||||||
Some(&source_kind),
|
&source_kind,
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -503,11 +503,11 @@ pub(crate) fn lint_stdin(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct LintSource(SourceKind);
|
pub(crate) struct LintSource(pub(crate) SourceKind);
|
||||||
|
|
||||||
impl LintSource {
|
impl LintSource {
|
||||||
/// Extract the lint [`LintSource`] from the given file path.
|
/// Extract the lint [`LintSource`] from the given file path.
|
||||||
fn try_from_path(
|
pub(crate) fn try_from_path(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
source_type: PySourceType,
|
source_type: PySourceType,
|
||||||
) -> Result<Option<LintSource>, SourceExtractionError> {
|
) -> Result<Option<LintSource>, SourceExtractionError> {
|
||||||
|
@ -526,7 +526,7 @@ impl LintSource {
|
||||||
/// Extract the lint [`LintSource`] from the raw string contents, optionally accompanied by a
|
/// Extract the lint [`LintSource`] from the raw string contents, optionally accompanied by a
|
||||||
/// file path indicating the path to the file from which the contents were read. If provided,
|
/// file path indicating the path to the file from which the contents were read. If provided,
|
||||||
/// the file path should be used for diagnostics, but not for reading the file from disk.
|
/// the file path should be used for diagnostics, but not for reading the file from disk.
|
||||||
fn try_from_source_code(
|
pub(crate) fn try_from_source_code(
|
||||||
source_code: String,
|
source_code: String,
|
||||||
source_type: PySourceType,
|
source_type: PySourceType,
|
||||||
) -> Result<Option<LintSource>, SourceExtractionError> {
|
) -> Result<Option<LintSource>, SourceExtractionError> {
|
||||||
|
|
|
@ -10,6 +10,7 @@ use ruff::linter::{check_path, LinterResult};
|
||||||
use ruff::registry::AsRule;
|
use ruff::registry::AsRule;
|
||||||
use ruff::settings::types::PythonVersion;
|
use ruff::settings::types::PythonVersion;
|
||||||
use ruff::settings::{defaults, flags, Settings};
|
use ruff::settings::{defaults, flags, Settings};
|
||||||
|
use ruff::source_kind::SourceKind;
|
||||||
use ruff_formatter::{FormatResult, Formatted};
|
use ruff_formatter::{FormatResult, Formatted};
|
||||||
use ruff_python_ast::{Mod, PySourceType};
|
use ruff_python_ast::{Mod, PySourceType};
|
||||||
use ruff_python_codegen::Stylist;
|
use ruff_python_codegen::Stylist;
|
||||||
|
@ -165,6 +166,9 @@ impl Workspace {
|
||||||
pub fn check(&self, contents: &str) -> Result<JsValue, Error> {
|
pub fn check(&self, contents: &str) -> Result<JsValue, Error> {
|
||||||
let source_type = PySourceType::default();
|
let source_type = PySourceType::default();
|
||||||
|
|
||||||
|
// TODO(dhruvmanila): Support Jupyter Notebooks
|
||||||
|
let source_kind = SourceKind::Python(contents.to_string());
|
||||||
|
|
||||||
// Tokenize once.
|
// Tokenize once.
|
||||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, source_type.as_mode());
|
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, source_type.as_mode());
|
||||||
|
|
||||||
|
@ -195,7 +199,7 @@ impl Workspace {
|
||||||
&directives,
|
&directives,
|
||||||
&self.settings,
|
&self.settings,
|
||||||
flags::Noqa::Enabled,
|
flags::Noqa::Enabled,
|
||||||
None,
|
&source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue