mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-29 21:34:57 +00:00
Sort edits prior to deduplicating in quotation fix (#11452)
## Summary We already have handling for "references that get quoted within our quoted references", but we were assuming a specific ordering in the way edits were generated. Closes https://github.com/astral-sh/ruff/issues/11449.
This commit is contained in:
parent
42b655b24f
commit
43e8147eaf
4 changed files with 75 additions and 3 deletions
|
@ -90,3 +90,10 @@ def f():
|
||||||
|
|
||||||
def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]:
|
def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def f():
|
||||||
|
from pandas import DataFrame, Series
|
||||||
|
|
||||||
|
def func(self) -> DataFrame | list[Series]:
|
||||||
|
pass
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use std::cmp::Reverse;
|
||||||
|
|
||||||
use ruff_diagnostics::Edit;
|
use ruff_diagnostics::Edit;
|
||||||
use ruff_python_ast::helpers::{map_callable, map_subscript};
|
use ruff_python_ast::helpers::{map_callable, map_subscript};
|
||||||
|
@ -286,11 +287,17 @@ pub(crate) fn quote_annotation(
|
||||||
|
|
||||||
/// Filter out any [`Edit`]s that are completely contained by any other [`Edit`].
|
/// Filter out any [`Edit`]s that are completely contained by any other [`Edit`].
|
||||||
pub(crate) fn filter_contained(edits: Vec<Edit>) -> Vec<Edit> {
|
pub(crate) fn filter_contained(edits: Vec<Edit>) -> Vec<Edit> {
|
||||||
|
let mut edits = edits;
|
||||||
|
|
||||||
|
// Sort such that the largest edits are prioritized.
|
||||||
|
edits.sort_unstable_by_key(|edit| (edit.start(), Reverse(edit.end())));
|
||||||
|
|
||||||
|
// Remove any edits that are completely contained by another edit.
|
||||||
let mut filtered: Vec<Edit> = Vec::with_capacity(edits.len());
|
let mut filtered: Vec<Edit> = Vec::with_capacity(edits.len());
|
||||||
for edit in edits {
|
for edit in edits {
|
||||||
if filtered
|
if !filtered
|
||||||
.iter()
|
.iter()
|
||||||
.all(|filtered_edit| !filtered_edit.range().contains_range(edit.range()))
|
.any(|filtered_edit| filtered_edit.range().contains_range(edit.range()))
|
||||||
{
|
{
|
||||||
filtered.push(edit);
|
filtered.push(edit);
|
||||||
}
|
}
|
||||||
|
|
|
@ -296,6 +296,8 @@ quote.py:78:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a typ
|
||||||
91 |- def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]:
|
91 |- def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]:
|
||||||
91 |+ def func() -> "DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]":
|
91 |+ def func() -> "DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]":
|
||||||
92 92 | ...
|
92 92 | ...
|
||||||
|
93 93 |
|
||||||
|
94 94 |
|
||||||
|
|
||||||
quote.py:78:35: TCH002 [*] Move third-party import `pandas.Series` into a type-checking block
|
quote.py:78:35: TCH002 [*] Move third-party import `pandas.Series` into a type-checking block
|
||||||
|
|
|
|
||||||
|
@ -337,5 +339,61 @@ quote.py:78:35: TCH002 [*] Move third-party import `pandas.Series` into a type-c
|
||||||
91 |- def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]:
|
91 |- def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]:
|
||||||
91 |+ def func() -> "DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]":
|
91 |+ def func() -> "DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]":
|
||||||
92 92 | ...
|
92 92 | ...
|
||||||
|
93 93 |
|
||||||
|
94 94 |
|
||||||
|
|
||||||
|
quote.py:96:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block
|
||||||
|
|
|
||||||
|
95 | def f():
|
||||||
|
96 | from pandas import DataFrame, Series
|
||||||
|
| ^^^^^^^^^ TCH002
|
||||||
|
97 |
|
||||||
|
98 | def func(self) -> DataFrame | list[Series]:
|
||||||
|
|
|
||||||
|
= help: Move into type-checking block
|
||||||
|
|
||||||
|
ℹ Unsafe fix
|
||||||
|
1 |+from typing import TYPE_CHECKING
|
||||||
|
2 |+
|
||||||
|
3 |+if TYPE_CHECKING:
|
||||||
|
4 |+ from pandas import DataFrame, Series
|
||||||
|
1 5 | def f():
|
||||||
|
2 6 | from pandas import DataFrame
|
||||||
|
3 7 |
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
93 97 |
|
||||||
|
94 98 |
|
||||||
|
95 99 | def f():
|
||||||
|
96 |- from pandas import DataFrame, Series
|
||||||
|
97 100 |
|
||||||
|
98 |- def func(self) -> DataFrame | list[Series]:
|
||||||
|
101 |+ def func(self) -> "DataFrame | list[Series]":
|
||||||
|
99 102 | pass
|
||||||
|
|
||||||
|
quote.py:96:35: TCH002 [*] Move third-party import `pandas.Series` into a type-checking block
|
||||||
|
|
|
||||||
|
95 | def f():
|
||||||
|
96 | from pandas import DataFrame, Series
|
||||||
|
| ^^^^^^ TCH002
|
||||||
|
97 |
|
||||||
|
98 | def func(self) -> DataFrame | list[Series]:
|
||||||
|
|
|
||||||
|
= help: Move into type-checking block
|
||||||
|
|
||||||
|
ℹ Unsafe fix
|
||||||
|
1 |+from typing import TYPE_CHECKING
|
||||||
|
2 |+
|
||||||
|
3 |+if TYPE_CHECKING:
|
||||||
|
4 |+ from pandas import DataFrame, Series
|
||||||
|
1 5 | def f():
|
||||||
|
2 6 | from pandas import DataFrame
|
||||||
|
3 7 |
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
93 97 |
|
||||||
|
94 98 |
|
||||||
|
95 99 | def f():
|
||||||
|
96 |- from pandas import DataFrame, Series
|
||||||
|
97 100 |
|
||||||
|
98 |- def func(self) -> DataFrame | list[Series]:
|
||||||
|
101 |+ def func(self) -> "DataFrame | list[Series]":
|
||||||
|
99 102 | pass
|
||||||
|
|
|
@ -91,7 +91,7 @@ pub fn test_snippet(contents: &str, settings: &LinterSettings) -> Vec<Message> {
|
||||||
}
|
}
|
||||||
|
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static MAX_ITERATIONS: std::cell::Cell<usize> = const { std::cell::Cell::new(8) };
|
static MAX_ITERATIONS: std::cell::Cell<usize> = const { std::cell::Cell::new(10) };
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_max_iterations(max: usize) {
|
pub fn set_max_iterations(max: usize) {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue