mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-26 20:10:09 +00:00
Start detecting version-related syntax errors in the parser (#16090)
## Summary This PR builds on the changes in #16220 to pass a target Python version to the parser. It also adds the `Parser::unsupported_syntax_errors` field, which collects version-related syntax errors while parsing. These syntax errors are then turned into `Message`s in ruff (in preview mode). This PR only detects one syntax error (`match` statement before Python 3.10), but it has been pretty quick to extend to several other simple errors (see #16308 for example). ## Test Plan The current tests are CLI tests in the linter crate, but these could be supplemented with inline parser tests after #16357. I also tested the display of these syntax errors in VS Code:   --------- Co-authored-by: Alex Waygood <alex.waygood@gmail.com>
This commit is contained in:
parent
b39a4ad01d
commit
78806361fd
14 changed files with 356 additions and 37 deletions
|
@ -586,6 +586,7 @@ mod tests {
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use filetime::{set_file_mtime, FileTime};
|
use filetime::{set_file_mtime, FileTime};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use ruff_linter::settings::LinterSettings;
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
|
|
||||||
use ruff_cache::CACHE_DIR_NAME;
|
use ruff_cache::CACHE_DIR_NAME;
|
||||||
|
@ -593,7 +594,7 @@ mod tests {
|
||||||
use ruff_linter::package::PackageRoot;
|
use ruff_linter::package::PackageRoot;
|
||||||
use ruff_linter::settings::flags;
|
use ruff_linter::settings::flags;
|
||||||
use ruff_linter::settings::types::UnsafeFixes;
|
use ruff_linter::settings::types::UnsafeFixes;
|
||||||
use ruff_python_ast::PySourceType;
|
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||||
use ruff_workspace::Settings;
|
use ruff_workspace::Settings;
|
||||||
|
|
||||||
use crate::cache::{self, FileCache, FileCacheData, FileCacheKey};
|
use crate::cache::{self, FileCache, FileCacheData, FileCacheKey};
|
||||||
|
@ -611,6 +612,10 @@ mod tests {
|
||||||
|
|
||||||
let settings = Settings {
|
let settings = Settings {
|
||||||
cache_dir,
|
cache_dir,
|
||||||
|
linter: LinterSettings {
|
||||||
|
unresolved_target_version: PythonVersion::PY310,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
..Settings::default()
|
..Settings::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -2627,3 +2627,77 @@ class A(Generic[T]):
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn match_before_py310() {
|
||||||
|
// ok on 3.10
|
||||||
|
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||||
|
.args(STDIN_BASE_OPTIONS)
|
||||||
|
.args(["--stdin-filename", "test.py"])
|
||||||
|
.arg("--target-version=py310")
|
||||||
|
.arg("-")
|
||||||
|
.pass_stdin(
|
||||||
|
r#"
|
||||||
|
match 2:
|
||||||
|
case 1:
|
||||||
|
print("it's one")
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
@r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
All checks passed!
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
// ok on 3.9 without preview
|
||||||
|
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||||
|
.args(STDIN_BASE_OPTIONS)
|
||||||
|
.args(["--stdin-filename", "test.py"])
|
||||||
|
.arg("--target-version=py39")
|
||||||
|
.arg("-")
|
||||||
|
.pass_stdin(
|
||||||
|
r#"
|
||||||
|
match 2:
|
||||||
|
case 1:
|
||||||
|
print("it's one")
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
@r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
All checks passed!
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
// syntax error on 3.9 with preview
|
||||||
|
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||||
|
.args(STDIN_BASE_OPTIONS)
|
||||||
|
.args(["--stdin-filename", "test.py"])
|
||||||
|
.arg("--target-version=py39")
|
||||||
|
.arg("--preview")
|
||||||
|
.arg("-")
|
||||||
|
.pass_stdin(
|
||||||
|
r#"
|
||||||
|
match 2:
|
||||||
|
case 1:
|
||||||
|
print("it's one")
|
||||||
|
"#
|
||||||
|
),
|
||||||
|
@r"
|
||||||
|
success: false
|
||||||
|
exit_code: 1
|
||||||
|
----- stdout -----
|
||||||
|
test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||||
|
Found 1 error.
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -10,10 +10,10 @@ use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use ruff_diagnostics::Diagnostic;
|
use ruff_diagnostics::Diagnostic;
|
||||||
use ruff_notebook::Notebook;
|
use ruff_notebook::Notebook;
|
||||||
use ruff_python_ast::{ModModule, PySourceType};
|
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
|
||||||
use ruff_python_codegen::Stylist;
|
use ruff_python_codegen::Stylist;
|
||||||
use ruff_python_index::Indexer;
|
use ruff_python_index::Indexer;
|
||||||
use ruff_python_parser::{ParseError, Parsed};
|
use ruff_python_parser::{ParseError, ParseOptions, Parsed, UnsupportedSyntaxError};
|
||||||
use ruff_source_file::SourceFileBuilder;
|
use ruff_source_file::SourceFileBuilder;
|
||||||
use ruff_text_size::Ranged;
|
use ruff_text_size::Ranged;
|
||||||
|
|
||||||
|
@ -71,6 +71,7 @@ pub fn check_path(
|
||||||
source_kind: &SourceKind,
|
source_kind: &SourceKind,
|
||||||
source_type: PySourceType,
|
source_type: PySourceType,
|
||||||
parsed: &Parsed<ModModule>,
|
parsed: &Parsed<ModModule>,
|
||||||
|
target_version: PythonVersion,
|
||||||
) -> Vec<Diagnostic> {
|
) -> Vec<Diagnostic> {
|
||||||
// Aggregate all diagnostics.
|
// Aggregate all diagnostics.
|
||||||
let mut diagnostics = vec![];
|
let mut diagnostics = vec![];
|
||||||
|
@ -104,8 +105,6 @@ pub fn check_path(
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let target_version = settings.resolve_target_version(path);
|
|
||||||
|
|
||||||
// Run the filesystem-based rules.
|
// Run the filesystem-based rules.
|
||||||
if settings
|
if settings
|
||||||
.rules
|
.rules
|
||||||
|
@ -335,7 +334,8 @@ pub fn add_noqa_to_path(
|
||||||
settings: &LinterSettings,
|
settings: &LinterSettings,
|
||||||
) -> Result<usize> {
|
) -> Result<usize> {
|
||||||
// Parse once.
|
// Parse once.
|
||||||
let parsed = ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type);
|
let target_version = settings.resolve_target_version(path);
|
||||||
|
let parsed = parse_unchecked_source(source_kind, source_type, target_version);
|
||||||
|
|
||||||
// Map row and column locations to byte slices (lazily).
|
// Map row and column locations to byte slices (lazily).
|
||||||
let locator = Locator::new(source_kind.source_code());
|
let locator = Locator::new(source_kind.source_code());
|
||||||
|
@ -367,6 +367,7 @@ pub fn add_noqa_to_path(
|
||||||
source_kind,
|
source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
&parsed,
|
&parsed,
|
||||||
|
target_version,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Add any missing `# noqa` pragmas.
|
// Add any missing `# noqa` pragmas.
|
||||||
|
@ -393,7 +394,8 @@ pub fn lint_only(
|
||||||
source_type: PySourceType,
|
source_type: PySourceType,
|
||||||
source: ParseSource,
|
source: ParseSource,
|
||||||
) -> LinterResult {
|
) -> LinterResult {
|
||||||
let parsed = source.into_parsed(source_kind, source_type);
|
let target_version = settings.resolve_target_version(path);
|
||||||
|
let parsed = source.into_parsed(source_kind, source_type, target_version);
|
||||||
|
|
||||||
// Map row and column locations to byte slices (lazily).
|
// Map row and column locations to byte slices (lazily).
|
||||||
let locator = Locator::new(source_kind.source_code());
|
let locator = Locator::new(source_kind.source_code());
|
||||||
|
@ -425,12 +427,20 @@ pub fn lint_only(
|
||||||
source_kind,
|
source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
&parsed,
|
&parsed,
|
||||||
|
target_version,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let syntax_errors = if settings.preview.is_enabled() {
|
||||||
|
parsed.unsupported_syntax_errors()
|
||||||
|
} else {
|
||||||
|
&[]
|
||||||
|
};
|
||||||
|
|
||||||
LinterResult {
|
LinterResult {
|
||||||
messages: diagnostics_to_messages(
|
messages: diagnostics_to_messages(
|
||||||
diagnostics,
|
diagnostics,
|
||||||
parsed.errors(),
|
parsed.errors(),
|
||||||
|
syntax_errors,
|
||||||
path,
|
path,
|
||||||
&locator,
|
&locator,
|
||||||
&directives,
|
&directives,
|
||||||
|
@ -443,6 +453,7 @@ pub fn lint_only(
|
||||||
fn diagnostics_to_messages(
|
fn diagnostics_to_messages(
|
||||||
diagnostics: Vec<Diagnostic>,
|
diagnostics: Vec<Diagnostic>,
|
||||||
parse_errors: &[ParseError],
|
parse_errors: &[ParseError],
|
||||||
|
unsupported_syntax_errors: &[UnsupportedSyntaxError],
|
||||||
path: &Path,
|
path: &Path,
|
||||||
locator: &Locator,
|
locator: &Locator,
|
||||||
directives: &Directives,
|
directives: &Directives,
|
||||||
|
@ -461,6 +472,9 @@ fn diagnostics_to_messages(
|
||||||
parse_errors
|
parse_errors
|
||||||
.iter()
|
.iter()
|
||||||
.map(|parse_error| Message::from_parse_error(parse_error, locator, file.deref().clone()))
|
.map(|parse_error| Message::from_parse_error(parse_error, locator, file.deref().clone()))
|
||||||
|
.chain(unsupported_syntax_errors.iter().map(|syntax_error| {
|
||||||
|
Message::from_unsupported_syntax_error(syntax_error, file.deref().clone())
|
||||||
|
}))
|
||||||
.chain(diagnostics.into_iter().map(|diagnostic| {
|
.chain(diagnostics.into_iter().map(|diagnostic| {
|
||||||
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
|
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
|
||||||
Message::from_diagnostic(diagnostic, file.deref().clone(), noqa_offset)
|
Message::from_diagnostic(diagnostic, file.deref().clone(), noqa_offset)
|
||||||
|
@ -491,11 +505,12 @@ pub fn lint_fix<'a>(
|
||||||
// Track whether the _initial_ source code is valid syntax.
|
// Track whether the _initial_ source code is valid syntax.
|
||||||
let mut is_valid_syntax = false;
|
let mut is_valid_syntax = false;
|
||||||
|
|
||||||
|
let target_version = settings.resolve_target_version(path);
|
||||||
|
|
||||||
// Continuously fix until the source code stabilizes.
|
// Continuously fix until the source code stabilizes.
|
||||||
loop {
|
loop {
|
||||||
// Parse once.
|
// Parse once.
|
||||||
let parsed =
|
let parsed = parse_unchecked_source(&transformed, source_type, target_version);
|
||||||
ruff_python_parser::parse_unchecked_source(transformed.source_code(), source_type);
|
|
||||||
|
|
||||||
// Map row and column locations to byte slices (lazily).
|
// Map row and column locations to byte slices (lazily).
|
||||||
let locator = Locator::new(transformed.source_code());
|
let locator = Locator::new(transformed.source_code());
|
||||||
|
@ -527,6 +542,7 @@ pub fn lint_fix<'a>(
|
||||||
&transformed,
|
&transformed,
|
||||||
source_type,
|
source_type,
|
||||||
&parsed,
|
&parsed,
|
||||||
|
target_version,
|
||||||
);
|
);
|
||||||
|
|
||||||
if iterations == 0 {
|
if iterations == 0 {
|
||||||
|
@ -573,11 +589,18 @@ pub fn lint_fix<'a>(
|
||||||
report_failed_to_converge_error(path, transformed.source_code(), &diagnostics);
|
report_failed_to_converge_error(path, transformed.source_code(), &diagnostics);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let syntax_errors = if settings.preview.is_enabled() {
|
||||||
|
parsed.unsupported_syntax_errors()
|
||||||
|
} else {
|
||||||
|
&[]
|
||||||
|
};
|
||||||
|
|
||||||
return Ok(FixerResult {
|
return Ok(FixerResult {
|
||||||
result: LinterResult {
|
result: LinterResult {
|
||||||
messages: diagnostics_to_messages(
|
messages: diagnostics_to_messages(
|
||||||
diagnostics,
|
diagnostics,
|
||||||
parsed.errors(),
|
parsed.errors(),
|
||||||
|
syntax_errors,
|
||||||
path,
|
path,
|
||||||
&locator,
|
&locator,
|
||||||
&directives,
|
&directives,
|
||||||
|
@ -680,16 +703,35 @@ pub enum ParseSource {
|
||||||
impl ParseSource {
|
impl ParseSource {
|
||||||
/// Consumes the [`ParseSource`] and returns the parsed [`Parsed`], parsing the source code if
|
/// Consumes the [`ParseSource`] and returns the parsed [`Parsed`], parsing the source code if
|
||||||
/// necessary.
|
/// necessary.
|
||||||
fn into_parsed(self, source_kind: &SourceKind, source_type: PySourceType) -> Parsed<ModModule> {
|
fn into_parsed(
|
||||||
|
self,
|
||||||
|
source_kind: &SourceKind,
|
||||||
|
source_type: PySourceType,
|
||||||
|
target_version: PythonVersion,
|
||||||
|
) -> Parsed<ModModule> {
|
||||||
match self {
|
match self {
|
||||||
ParseSource::None => {
|
ParseSource::None => parse_unchecked_source(source_kind, source_type, target_version),
|
||||||
ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type)
|
|
||||||
}
|
|
||||||
ParseSource::Precomputed(parsed) => parsed,
|
ParseSource::Precomputed(parsed) => parsed,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Like [`ruff_python_parser::parse_unchecked_source`] but with an additional [`PythonVersion`]
|
||||||
|
/// argument.
|
||||||
|
fn parse_unchecked_source(
|
||||||
|
source_kind: &SourceKind,
|
||||||
|
source_type: PySourceType,
|
||||||
|
target_version: PythonVersion,
|
||||||
|
) -> Parsed<ModModule> {
|
||||||
|
let options = ParseOptions::from(source_type).with_target_version(target_version);
|
||||||
|
// SAFETY: Safe because `PySourceType` always parses to a `ModModule`. See
|
||||||
|
// `ruff_python_parser::parse_unchecked_source`. We use `parse_unchecked` (and thus
|
||||||
|
// have to unwrap) in order to pass the `PythonVersion` via `ParseOptions`.
|
||||||
|
ruff_python_parser::parse_unchecked(source_kind.source_code(), options)
|
||||||
|
.try_into_module()
|
||||||
|
.expect("PySourceType always parses into a module")
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
|
@ -16,7 +16,7 @@ pub use pylint::PylintEmitter;
|
||||||
pub use rdjson::RdjsonEmitter;
|
pub use rdjson::RdjsonEmitter;
|
||||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
|
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
|
||||||
use ruff_notebook::NotebookIndex;
|
use ruff_notebook::NotebookIndex;
|
||||||
use ruff_python_parser::ParseError;
|
use ruff_python_parser::{ParseError, UnsupportedSyntaxError};
|
||||||
use ruff_source_file::{SourceFile, SourceLocation};
|
use ruff_source_file::{SourceFile, SourceLocation};
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||||
pub use sarif::SarifEmitter;
|
pub use sarif::SarifEmitter;
|
||||||
|
@ -121,6 +121,18 @@ impl Message {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a [`Message`] from the given [`UnsupportedSyntaxError`].
|
||||||
|
pub fn from_unsupported_syntax_error(
|
||||||
|
unsupported_syntax_error: &UnsupportedSyntaxError,
|
||||||
|
file: SourceFile,
|
||||||
|
) -> Message {
|
||||||
|
Message::SyntaxError(SyntaxErrorMessage {
|
||||||
|
message: format!("SyntaxError: {unsupported_syntax_error}"),
|
||||||
|
range: unsupported_syntax_error.range,
|
||||||
|
file,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub const fn as_diagnostic_message(&self) -> Option<&DiagnosticMessage> {
|
pub const fn as_diagnostic_message(&self) -> Option<&DiagnosticMessage> {
|
||||||
match self {
|
match self {
|
||||||
Message::Diagnostic(m) => Some(m),
|
Message::Diagnostic(m) => Some(m),
|
||||||
|
|
|
@ -11,6 +11,7 @@ mod tests {
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use ruff_python_parser::ParseOptions;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
|
|
||||||
|
@ -744,8 +745,11 @@ mod tests {
|
||||||
let source_type = PySourceType::default();
|
let source_type = PySourceType::default();
|
||||||
let source_kind = SourceKind::Python(contents.to_string());
|
let source_kind = SourceKind::Python(contents.to_string());
|
||||||
let settings = LinterSettings::for_rules(Linter::Pyflakes.rules());
|
let settings = LinterSettings::for_rules(Linter::Pyflakes.rules());
|
||||||
let parsed =
|
let options =
|
||||||
ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type);
|
ParseOptions::from(source_type).with_target_version(settings.unresolved_target_version);
|
||||||
|
let parsed = ruff_python_parser::parse_unchecked(source_kind.source_code(), options)
|
||||||
|
.try_into_module()
|
||||||
|
.expect("PySourceType always parses into a module");
|
||||||
let locator = Locator::new(&contents);
|
let locator = Locator::new(&contents);
|
||||||
let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents());
|
let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents());
|
||||||
let indexer = Indexer::from_tokens(parsed.tokens(), locator.contents());
|
let indexer = Indexer::from_tokens(parsed.tokens(), locator.contents());
|
||||||
|
@ -767,6 +771,7 @@ mod tests {
|
||||||
&source_kind,
|
&source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
&parsed,
|
&parsed,
|
||||||
|
settings.unresolved_target_version,
|
||||||
);
|
);
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(Ranged::start);
|
||||||
let actual = diagnostics
|
let actual = diagnostics
|
||||||
|
|
|
@ -16,7 +16,7 @@ use ruff_notebook::NotebookError;
|
||||||
use ruff_python_ast::PySourceType;
|
use ruff_python_ast::PySourceType;
|
||||||
use ruff_python_codegen::Stylist;
|
use ruff_python_codegen::Stylist;
|
||||||
use ruff_python_index::Indexer;
|
use ruff_python_index::Indexer;
|
||||||
use ruff_python_parser::ParseError;
|
use ruff_python_parser::{ParseError, ParseOptions};
|
||||||
use ruff_python_trivia::textwrap::dedent;
|
use ruff_python_trivia::textwrap::dedent;
|
||||||
use ruff_source_file::SourceFileBuilder;
|
use ruff_source_file::SourceFileBuilder;
|
||||||
use ruff_text_size::Ranged;
|
use ruff_text_size::Ranged;
|
||||||
|
@ -110,7 +110,11 @@ pub(crate) fn test_contents<'a>(
|
||||||
settings: &LinterSettings,
|
settings: &LinterSettings,
|
||||||
) -> (Vec<Message>, Cow<'a, SourceKind>) {
|
) -> (Vec<Message>, Cow<'a, SourceKind>) {
|
||||||
let source_type = PySourceType::from(path);
|
let source_type = PySourceType::from(path);
|
||||||
let parsed = ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type);
|
let target_version = settings.resolve_target_version(path);
|
||||||
|
let options = ParseOptions::from(source_type).with_target_version(target_version);
|
||||||
|
let parsed = ruff_python_parser::parse_unchecked(source_kind.source_code(), options.clone())
|
||||||
|
.try_into_module()
|
||||||
|
.expect("PySourceType always parses into a module");
|
||||||
let locator = Locator::new(source_kind.source_code());
|
let locator = Locator::new(source_kind.source_code());
|
||||||
let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents());
|
let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents());
|
||||||
let indexer = Indexer::from_tokens(parsed.tokens(), locator.contents());
|
let indexer = Indexer::from_tokens(parsed.tokens(), locator.contents());
|
||||||
|
@ -134,6 +138,7 @@ pub(crate) fn test_contents<'a>(
|
||||||
source_kind,
|
source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
&parsed,
|
&parsed,
|
||||||
|
target_version,
|
||||||
);
|
);
|
||||||
|
|
||||||
let source_has_errors = !parsed.is_valid();
|
let source_has_errors = !parsed.is_valid();
|
||||||
|
@ -174,7 +179,9 @@ pub(crate) fn test_contents<'a>(
|
||||||
transformed = Cow::Owned(transformed.updated(fixed_contents, &source_map));
|
transformed = Cow::Owned(transformed.updated(fixed_contents, &source_map));
|
||||||
|
|
||||||
let parsed =
|
let parsed =
|
||||||
ruff_python_parser::parse_unchecked_source(transformed.source_code(), source_type);
|
ruff_python_parser::parse_unchecked(transformed.source_code(), options.clone())
|
||||||
|
.try_into_module()
|
||||||
|
.expect("PySourceType always parses into a module");
|
||||||
let locator = Locator::new(transformed.source_code());
|
let locator = Locator::new(transformed.source_code());
|
||||||
let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents());
|
let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents());
|
||||||
let indexer = Indexer::from_tokens(parsed.tokens(), locator.contents());
|
let indexer = Indexer::from_tokens(parsed.tokens(), locator.contents());
|
||||||
|
@ -197,6 +204,7 @@ pub(crate) fn test_contents<'a>(
|
||||||
&transformed,
|
&transformed,
|
||||||
source_type,
|
source_type,
|
||||||
&parsed,
|
&parsed,
|
||||||
|
target_version,
|
||||||
);
|
);
|
||||||
|
|
||||||
if !parsed.is_valid() && !source_has_errors {
|
if !parsed.is_valid() && !source_has_errors {
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use std::fmt;
|
use std::fmt::{self, Display};
|
||||||
|
|
||||||
|
use ruff_python_ast::PythonVersion;
|
||||||
use ruff_text_size::TextRange;
|
use ruff_text_size::TextRange;
|
||||||
|
|
||||||
use crate::TokenKind;
|
use crate::TokenKind;
|
||||||
|
@ -426,6 +427,50 @@ impl std::fmt::Display for LexicalErrorType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Represents a version-related syntax error detected during parsing.
|
||||||
|
///
|
||||||
|
/// An example of a version-related error is the use of a `match` statement before Python 3.10, when
|
||||||
|
/// it was first introduced. See [`UnsupportedSyntaxErrorKind`] for other kinds of errors.
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct UnsupportedSyntaxError {
|
||||||
|
pub kind: UnsupportedSyntaxErrorKind,
|
||||||
|
pub range: TextRange,
|
||||||
|
/// The target [`PythonVersion`] for which this error was detected.
|
||||||
|
///
|
||||||
|
/// This is different from the version reported by the
|
||||||
|
/// [`minimum_version`](UnsupportedSyntaxError::minimum_version) method, which is the earliest
|
||||||
|
/// allowed version for this piece of syntax. The `target_version` is primarily used for
|
||||||
|
/// user-facing error messages.
|
||||||
|
pub target_version: PythonVersion,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UnsupportedSyntaxError {
|
||||||
|
/// The earliest allowed version for the syntax associated with this error.
|
||||||
|
pub const fn minimum_version(&self) -> PythonVersion {
|
||||||
|
match self.kind {
|
||||||
|
UnsupportedSyntaxErrorKind::MatchBeforePy310 => PythonVersion::PY310,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for UnsupportedSyntaxError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self.kind {
|
||||||
|
UnsupportedSyntaxErrorKind::MatchBeforePy310 => write!(
|
||||||
|
f,
|
||||||
|
"Cannot use `match` statement on Python {} (syntax was added in Python {})",
|
||||||
|
self.target_version,
|
||||||
|
self.minimum_version(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||||
|
pub enum UnsupportedSyntaxErrorKind {
|
||||||
|
MatchBeforePy310,
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(target_pointer_width = "64")]
|
#[cfg(target_pointer_width = "64")]
|
||||||
mod sizes {
|
mod sizes {
|
||||||
use crate::error::{LexicalError, LexicalErrorType};
|
use crate::error::{LexicalError, LexicalErrorType};
|
||||||
|
|
|
@ -67,7 +67,10 @@
|
||||||
use std::iter::FusedIterator;
|
use std::iter::FusedIterator;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
pub use crate::error::{FStringErrorType, LexicalErrorType, ParseError, ParseErrorType};
|
pub use crate::error::{
|
||||||
|
FStringErrorType, LexicalErrorType, ParseError, ParseErrorType, UnsupportedSyntaxError,
|
||||||
|
UnsupportedSyntaxErrorKind,
|
||||||
|
};
|
||||||
pub use crate::parser::ParseOptions;
|
pub use crate::parser::ParseOptions;
|
||||||
pub use crate::token::{Token, TokenKind};
|
pub use crate::token::{Token, TokenKind};
|
||||||
|
|
||||||
|
@ -305,6 +308,7 @@ pub struct Parsed<T> {
|
||||||
syntax: T,
|
syntax: T,
|
||||||
tokens: Tokens,
|
tokens: Tokens,
|
||||||
errors: Vec<ParseError>,
|
errors: Vec<ParseError>,
|
||||||
|
unsupported_syntax_errors: Vec<UnsupportedSyntaxError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Parsed<T> {
|
impl<T> Parsed<T> {
|
||||||
|
@ -323,6 +327,11 @@ impl<T> Parsed<T> {
|
||||||
&self.errors
|
&self.errors
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a list of version-related syntax errors found during parsing.
|
||||||
|
pub fn unsupported_syntax_errors(&self) -> &[UnsupportedSyntaxError] {
|
||||||
|
&self.unsupported_syntax_errors
|
||||||
|
}
|
||||||
|
|
||||||
/// Consumes the [`Parsed`] output and returns the contained syntax node.
|
/// Consumes the [`Parsed`] output and returns the contained syntax node.
|
||||||
pub fn into_syntax(self) -> T {
|
pub fn into_syntax(self) -> T {
|
||||||
self.syntax
|
self.syntax
|
||||||
|
@ -334,12 +343,18 @@ impl<T> Parsed<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the parsed source code is valid i.e., it has no syntax errors.
|
/// Returns `true` if the parsed source code is valid i.e., it has no syntax errors.
|
||||||
|
///
|
||||||
|
/// Note that this does not include version-related
|
||||||
|
/// [`unsupported_syntax_errors`](Parsed::unsupported_syntax_errors).
|
||||||
pub fn is_valid(&self) -> bool {
|
pub fn is_valid(&self) -> bool {
|
||||||
self.errors.is_empty()
|
self.errors.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the [`Parsed`] output as a [`Result`], returning [`Ok`] if it has no syntax errors,
|
/// Returns the [`Parsed`] output as a [`Result`], returning [`Ok`] if it has no syntax errors,
|
||||||
/// or [`Err`] containing the first [`ParseError`] encountered.
|
/// or [`Err`] containing the first [`ParseError`] encountered.
|
||||||
|
///
|
||||||
|
/// Note that any [`unsupported_syntax_errors`](Parsed::unsupported_syntax_errors) will not
|
||||||
|
/// cause [`Err`] to be returned.
|
||||||
pub fn as_result(&self) -> Result<&Parsed<T>, &[ParseError]> {
|
pub fn as_result(&self) -> Result<&Parsed<T>, &[ParseError]> {
|
||||||
if self.is_valid() {
|
if self.is_valid() {
|
||||||
Ok(self)
|
Ok(self)
|
||||||
|
@ -350,6 +365,9 @@ impl<T> Parsed<T> {
|
||||||
|
|
||||||
/// Consumes the [`Parsed`] output and returns a [`Result`] which is [`Ok`] if it has no syntax
|
/// Consumes the [`Parsed`] output and returns a [`Result`] which is [`Ok`] if it has no syntax
|
||||||
/// errors, or [`Err`] containing the first [`ParseError`] encountered.
|
/// errors, or [`Err`] containing the first [`ParseError`] encountered.
|
||||||
|
///
|
||||||
|
/// Note that any [`unsupported_syntax_errors`](Parsed::unsupported_syntax_errors) will not
|
||||||
|
/// cause [`Err`] to be returned.
|
||||||
pub(crate) fn into_result(self) -> Result<Parsed<T>, ParseError> {
|
pub(crate) fn into_result(self) -> Result<Parsed<T>, ParseError> {
|
||||||
if self.is_valid() {
|
if self.is_valid() {
|
||||||
Ok(self)
|
Ok(self)
|
||||||
|
@ -373,6 +391,7 @@ impl Parsed<Mod> {
|
||||||
syntax: module,
|
syntax: module,
|
||||||
tokens: self.tokens,
|
tokens: self.tokens,
|
||||||
errors: self.errors,
|
errors: self.errors,
|
||||||
|
unsupported_syntax_errors: self.unsupported_syntax_errors,
|
||||||
}),
|
}),
|
||||||
Mod::Expression(_) => None,
|
Mod::Expression(_) => None,
|
||||||
}
|
}
|
||||||
|
@ -392,6 +411,7 @@ impl Parsed<Mod> {
|
||||||
syntax: expression,
|
syntax: expression,
|
||||||
tokens: self.tokens,
|
tokens: self.tokens,
|
||||||
errors: self.errors,
|
errors: self.errors,
|
||||||
|
unsupported_syntax_errors: self.unsupported_syntax_errors,
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ use bitflags::bitflags;
|
||||||
use ruff_python_ast::{Mod, ModExpression, ModModule};
|
use ruff_python_ast::{Mod, ModExpression, ModModule};
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
|
|
||||||
|
use crate::error::UnsupportedSyntaxError;
|
||||||
use crate::parser::expression::ExpressionContext;
|
use crate::parser::expression::ExpressionContext;
|
||||||
use crate::parser::progress::{ParserProgress, TokenId};
|
use crate::parser::progress::{ParserProgress, TokenId};
|
||||||
use crate::token::TokenValue;
|
use crate::token::TokenValue;
|
||||||
|
@ -35,6 +36,9 @@ pub(crate) struct Parser<'src> {
|
||||||
/// Stores all the syntax errors found during the parsing.
|
/// Stores all the syntax errors found during the parsing.
|
||||||
errors: Vec<ParseError>,
|
errors: Vec<ParseError>,
|
||||||
|
|
||||||
|
/// Stores non-fatal syntax errors found during parsing, such as version-related errors.
|
||||||
|
unsupported_syntax_errors: Vec<UnsupportedSyntaxError>,
|
||||||
|
|
||||||
/// Options for how the code will be parsed.
|
/// Options for how the code will be parsed.
|
||||||
options: ParseOptions,
|
options: ParseOptions,
|
||||||
|
|
||||||
|
@ -70,6 +74,7 @@ impl<'src> Parser<'src> {
|
||||||
options,
|
options,
|
||||||
source,
|
source,
|
||||||
errors: Vec::new(),
|
errors: Vec::new(),
|
||||||
|
unsupported_syntax_errors: Vec::new(),
|
||||||
tokens,
|
tokens,
|
||||||
recovery_context: RecoveryContext::empty(),
|
recovery_context: RecoveryContext::empty(),
|
||||||
prev_token_end: TextSize::new(0),
|
prev_token_end: TextSize::new(0),
|
||||||
|
@ -166,6 +171,7 @@ impl<'src> Parser<'src> {
|
||||||
syntax,
|
syntax,
|
||||||
tokens: Tokens::new(tokens),
|
tokens: Tokens::new(tokens),
|
||||||
errors: parse_errors,
|
errors: parse_errors,
|
||||||
|
unsupported_syntax_errors: self.unsupported_syntax_errors,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,6 +203,7 @@ impl<'src> Parser<'src> {
|
||||||
syntax,
|
syntax,
|
||||||
tokens: Tokens::new(tokens),
|
tokens: Tokens::new(tokens),
|
||||||
errors: merged,
|
errors: merged,
|
||||||
|
unsupported_syntax_errors: self.unsupported_syntax_errors,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -658,6 +665,7 @@ impl<'src> Parser<'src> {
|
||||||
ParserCheckpoint {
|
ParserCheckpoint {
|
||||||
tokens: self.tokens.checkpoint(),
|
tokens: self.tokens.checkpoint(),
|
||||||
errors_position: self.errors.len(),
|
errors_position: self.errors.len(),
|
||||||
|
unsupported_syntax_errors_position: self.unsupported_syntax_errors.len(),
|
||||||
current_token_id: self.current_token_id,
|
current_token_id: self.current_token_id,
|
||||||
prev_token_end: self.prev_token_end,
|
prev_token_end: self.prev_token_end,
|
||||||
recovery_context: self.recovery_context,
|
recovery_context: self.recovery_context,
|
||||||
|
@ -669,6 +677,7 @@ impl<'src> Parser<'src> {
|
||||||
let ParserCheckpoint {
|
let ParserCheckpoint {
|
||||||
tokens,
|
tokens,
|
||||||
errors_position,
|
errors_position,
|
||||||
|
unsupported_syntax_errors_position,
|
||||||
current_token_id,
|
current_token_id,
|
||||||
prev_token_end,
|
prev_token_end,
|
||||||
recovery_context,
|
recovery_context,
|
||||||
|
@ -676,6 +685,8 @@ impl<'src> Parser<'src> {
|
||||||
|
|
||||||
self.tokens.rewind(tokens);
|
self.tokens.rewind(tokens);
|
||||||
self.errors.truncate(errors_position);
|
self.errors.truncate(errors_position);
|
||||||
|
self.unsupported_syntax_errors
|
||||||
|
.truncate(unsupported_syntax_errors_position);
|
||||||
self.current_token_id = current_token_id;
|
self.current_token_id = current_token_id;
|
||||||
self.prev_token_end = prev_token_end;
|
self.prev_token_end = prev_token_end;
|
||||||
self.recovery_context = recovery_context;
|
self.recovery_context = recovery_context;
|
||||||
|
@ -685,6 +696,7 @@ impl<'src> Parser<'src> {
|
||||||
struct ParserCheckpoint {
|
struct ParserCheckpoint {
|
||||||
tokens: TokenSourceCheckpoint,
|
tokens: TokenSourceCheckpoint,
|
||||||
errors_position: usize,
|
errors_position: usize,
|
||||||
|
unsupported_syntax_errors_position: usize,
|
||||||
current_token_id: TokenId,
|
current_token_id: TokenId,
|
||||||
prev_token_end: TextSize,
|
prev_token_end: TextSize,
|
||||||
recovery_context: RecoveryContext,
|
recovery_context: RecoveryContext,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use ruff_python_ast::PySourceType;
|
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||||
|
|
||||||
use crate::{AsMode, Mode};
|
use crate::{AsMode, Mode};
|
||||||
|
|
||||||
|
@ -20,15 +20,28 @@ use crate::{AsMode, Mode};
|
||||||
///
|
///
|
||||||
/// let options = ParseOptions::from(PySourceType::Python);
|
/// let options = ParseOptions::from(PySourceType::Python);
|
||||||
/// ```
|
/// ```
|
||||||
#[derive(Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct ParseOptions {
|
pub struct ParseOptions {
|
||||||
/// Specify the mode in which the code will be parsed.
|
/// Specify the mode in which the code will be parsed.
|
||||||
pub(crate) mode: Mode,
|
pub(crate) mode: Mode,
|
||||||
|
/// Target version for detecting version-related syntax errors.
|
||||||
|
pub(crate) target_version: PythonVersion,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ParseOptions {
|
||||||
|
#[must_use]
|
||||||
|
pub fn with_target_version(mut self, target_version: PythonVersion) -> Self {
|
||||||
|
self.target_version = target_version;
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Mode> for ParseOptions {
|
impl From<Mode> for ParseOptions {
|
||||||
fn from(mode: Mode) -> Self {
|
fn from(mode: Mode) -> Self {
|
||||||
Self { mode }
|
Self {
|
||||||
|
mode,
|
||||||
|
target_version: PythonVersion::default(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,6 +49,7 @@ impl From<PySourceType> for ParseOptions {
|
||||||
fn from(source_type: PySourceType) -> Self {
|
fn from(source_type: PySourceType) -> Self {
|
||||||
Self {
|
Self {
|
||||||
mode: source_type.as_mode(),
|
mode: source_type.as_mode(),
|
||||||
|
target_version: PythonVersion::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,8 @@ use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||||
|
|
||||||
use ruff_python_ast::name::Name;
|
use ruff_python_ast::name::Name;
|
||||||
use ruff_python_ast::{
|
use ruff_python_ast::{
|
||||||
self as ast, ExceptHandler, Expr, ExprContext, IpyEscapeKind, Operator, Stmt, WithItem,
|
self as ast, ExceptHandler, Expr, ExprContext, IpyEscapeKind, Operator, PythonVersion, Stmt,
|
||||||
|
WithItem,
|
||||||
};
|
};
|
||||||
use ruff_text_size::{Ranged, TextSize};
|
use ruff_text_size::{Ranged, TextSize};
|
||||||
|
|
||||||
|
@ -16,7 +17,7 @@ use crate::parser::{
|
||||||
};
|
};
|
||||||
use crate::token::{TokenKind, TokenValue};
|
use crate::token::{TokenKind, TokenValue};
|
||||||
use crate::token_set::TokenSet;
|
use crate::token_set::TokenSet;
|
||||||
use crate::{Mode, ParseErrorType};
|
use crate::{Mode, ParseErrorType, UnsupportedSyntaxError, UnsupportedSyntaxErrorKind};
|
||||||
|
|
||||||
use super::expression::ExpressionContext;
|
use super::expression::ExpressionContext;
|
||||||
use super::Parenthesized;
|
use super::Parenthesized;
|
||||||
|
@ -2257,11 +2258,21 @@ impl<'src> Parser<'src> {
|
||||||
let start = self.node_start();
|
let start = self.node_start();
|
||||||
self.bump(TokenKind::Match);
|
self.bump(TokenKind::Match);
|
||||||
|
|
||||||
|
let match_range = self.node_range(start);
|
||||||
|
|
||||||
let subject = self.parse_match_subject_expression();
|
let subject = self.parse_match_subject_expression();
|
||||||
self.expect(TokenKind::Colon);
|
self.expect(TokenKind::Colon);
|
||||||
|
|
||||||
let cases = self.parse_match_body();
|
let cases = self.parse_match_body();
|
||||||
|
|
||||||
|
if self.options.target_version < PythonVersion::PY310 {
|
||||||
|
self.unsupported_syntax_errors.push(UnsupportedSyntaxError {
|
||||||
|
kind: UnsupportedSyntaxErrorKind::MatchBeforePy310,
|
||||||
|
range: match_range,
|
||||||
|
target_version: self.options.target_version,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
ast::StmtMatch {
|
ast::StmtMatch {
|
||||||
subject: Box::new(subject),
|
subject: Box::new(subject),
|
||||||
cases,
|
cases,
|
||||||
|
|
|
@ -24,7 +24,7 @@ use ruff_linter::{
|
||||||
use ruff_notebook::Notebook;
|
use ruff_notebook::Notebook;
|
||||||
use ruff_python_codegen::Stylist;
|
use ruff_python_codegen::Stylist;
|
||||||
use ruff_python_index::Indexer;
|
use ruff_python_index::Indexer;
|
||||||
use ruff_python_parser::ParseError;
|
use ruff_python_parser::{ParseError, ParseOptions, UnsupportedSyntaxError};
|
||||||
use ruff_source_file::LineIndex;
|
use ruff_source_file::LineIndex;
|
||||||
use ruff_text_size::{Ranged, TextRange};
|
use ruff_text_size::{Ranged, TextRange};
|
||||||
|
|
||||||
|
@ -94,8 +94,18 @@ pub(crate) fn check(
|
||||||
|
|
||||||
let source_type = query.source_type();
|
let source_type = query.source_type();
|
||||||
|
|
||||||
|
let target_version = if let Some(path) = &document_path {
|
||||||
|
settings.linter.resolve_target_version(path)
|
||||||
|
} else {
|
||||||
|
settings.linter.unresolved_target_version
|
||||||
|
};
|
||||||
|
|
||||||
|
let parse_options = ParseOptions::from(source_type).with_target_version(target_version);
|
||||||
|
|
||||||
// Parse once.
|
// Parse once.
|
||||||
let parsed = ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type);
|
let parsed = ruff_python_parser::parse_unchecked(source_kind.source_code(), parse_options)
|
||||||
|
.try_into_module()
|
||||||
|
.expect("PySourceType always parses to a ModModule");
|
||||||
|
|
||||||
// Map row and column locations to byte slices (lazily).
|
// Map row and column locations to byte slices (lazily).
|
||||||
let locator = Locator::new(source_kind.source_code());
|
let locator = Locator::new(source_kind.source_code());
|
||||||
|
@ -122,6 +132,7 @@ pub(crate) fn check(
|
||||||
&source_kind,
|
&source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
&parsed,
|
&parsed,
|
||||||
|
target_version,
|
||||||
);
|
);
|
||||||
|
|
||||||
let noqa_edits = generate_noqa_edits(
|
let noqa_edits = generate_noqa_edits(
|
||||||
|
@ -164,7 +175,10 @@ pub(crate) fn check(
|
||||||
let lsp_diagnostics = lsp_diagnostics.chain(
|
let lsp_diagnostics = lsp_diagnostics.chain(
|
||||||
show_syntax_errors
|
show_syntax_errors
|
||||||
.then(|| {
|
.then(|| {
|
||||||
parsed.errors().iter().map(|parse_error| {
|
parsed
|
||||||
|
.errors()
|
||||||
|
.iter()
|
||||||
|
.map(|parse_error| {
|
||||||
parse_error_to_lsp_diagnostic(
|
parse_error_to_lsp_diagnostic(
|
||||||
parse_error,
|
parse_error,
|
||||||
&source_kind,
|
&source_kind,
|
||||||
|
@ -172,6 +186,14 @@ pub(crate) fn check(
|
||||||
encoding,
|
encoding,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
.chain(parsed.unsupported_syntax_errors().iter().map(|error| {
|
||||||
|
unsupported_syntax_error_to_lsp_diagnostic(
|
||||||
|
error,
|
||||||
|
&source_kind,
|
||||||
|
locator.to_index(),
|
||||||
|
encoding,
|
||||||
|
)
|
||||||
|
}))
|
||||||
})
|
})
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten(),
|
.flatten(),
|
||||||
|
@ -350,6 +372,45 @@ fn parse_error_to_lsp_diagnostic(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn unsupported_syntax_error_to_lsp_diagnostic(
|
||||||
|
unsupported_syntax_error: &UnsupportedSyntaxError,
|
||||||
|
source_kind: &SourceKind,
|
||||||
|
index: &LineIndex,
|
||||||
|
encoding: PositionEncoding,
|
||||||
|
) -> (usize, lsp_types::Diagnostic) {
|
||||||
|
let range: lsp_types::Range;
|
||||||
|
let cell: usize;
|
||||||
|
|
||||||
|
if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
|
||||||
|
NotebookRange { cell, range } = unsupported_syntax_error.range.to_notebook_range(
|
||||||
|
source_kind.source_code(),
|
||||||
|
index,
|
||||||
|
notebook_index,
|
||||||
|
encoding,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
cell = usize::default();
|
||||||
|
range = unsupported_syntax_error
|
||||||
|
.range
|
||||||
|
.to_range(source_kind.source_code(), index, encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
(
|
||||||
|
cell,
|
||||||
|
lsp_types::Diagnostic {
|
||||||
|
range,
|
||||||
|
severity: Some(lsp_types::DiagnosticSeverity::ERROR),
|
||||||
|
tags: None,
|
||||||
|
code: None,
|
||||||
|
code_description: None,
|
||||||
|
source: Some(DIAGNOSTIC_NAME.into()),
|
||||||
|
message: format!("SyntaxError: {unsupported_syntax_error}"),
|
||||||
|
related_information: None,
|
||||||
|
data: None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
fn diagnostic_edit_range(
|
fn diagnostic_edit_range(
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
source_kind: &SourceKind,
|
source_kind: &SourceKind,
|
||||||
|
|
|
@ -196,6 +196,7 @@ impl Workspace {
|
||||||
&source_kind,
|
&source_kind,
|
||||||
source_type,
|
source_type,
|
||||||
&parsed,
|
&parsed,
|
||||||
|
self.settings.linter.unresolved_target_version,
|
||||||
);
|
);
|
||||||
|
|
||||||
let source_code = locator.to_source_code();
|
let source_code = locator.to_source_code();
|
||||||
|
|
|
@ -58,7 +58,16 @@ def redknot_contains_bug(code: str, *, red_knot_executable: Path) -> bool:
|
||||||
def ruff_contains_bug(code: str, *, ruff_executable: Path) -> bool:
|
def ruff_contains_bug(code: str, *, ruff_executable: Path) -> bool:
|
||||||
"""Return `True` if the code triggers a parser error."""
|
"""Return `True` if the code triggers a parser error."""
|
||||||
completed_process = subprocess.run(
|
completed_process = subprocess.run(
|
||||||
[ruff_executable, "check", "--config", "lint.select=[]", "--no-cache", "-"],
|
[
|
||||||
|
ruff_executable,
|
||||||
|
"check",
|
||||||
|
"--config",
|
||||||
|
"lint.select=[]",
|
||||||
|
"--no-cache",
|
||||||
|
"--target-version",
|
||||||
|
"py313",
|
||||||
|
"-",
|
||||||
|
],
|
||||||
capture_output=True,
|
capture_output=True,
|
||||||
text=True,
|
text=True,
|
||||||
input=code,
|
input=code,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue