Merge remote-tracking branch 'origin/main' into fix-21364

This commit is contained in:
Dan 2025-11-13 18:49:04 -05:00
commit 409ff8404a
83 changed files with 3946 additions and 868 deletions

View file

@ -491,6 +491,7 @@ Ruff is used by a number of major open-source projects and companies, including:
- [PyTorch](https://github.com/pytorch/pytorch)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Pylint](https://github.com/PyCQA/pylint)
- [PyScripter](https://github.com/pyscripter/pyscripter)
- [PyVista](https://github.com/pyvista/pyvista)
- [Reflex](https://github.com/reflex-dev/reflex)
- [River](https://github.com/online-ml/river)

View file

@ -415,8 +415,13 @@ pub struct CheckCommand {
)]
pub statistics: bool,
/// Enable automatic additions of `noqa` directives to failing lines.
/// Optionally provide a reason to append after the codes.
#[arg(
long,
value_name = "REASON",
default_missing_value = "",
num_args = 0..=1,
require_equals = true,
// conflicts_with = "add_noqa",
conflicts_with = "show_files",
conflicts_with = "show_settings",
@ -428,7 +433,7 @@ pub struct CheckCommand {
conflicts_with = "fix",
conflicts_with = "diff",
)]
pub add_noqa: bool,
pub add_noqa: Option<String>,
/// See the files Ruff will be run against with the current settings.
#[arg(
long,
@ -1057,7 +1062,7 @@ Possible choices:
/// etc.).
#[expect(clippy::struct_excessive_bools)]
pub struct CheckArguments {
pub add_noqa: bool,
pub add_noqa: Option<String>,
pub diff: bool,
pub exit_non_zero_on_fix: bool,
pub exit_zero: bool,

View file

@ -21,6 +21,7 @@ pub(crate) fn add_noqa(
files: &[PathBuf],
pyproject_config: &PyprojectConfig,
config_arguments: &ConfigArguments,
reason: Option<&str>,
) -> Result<usize> {
// Collect all the files to check.
let start = Instant::now();
@ -76,7 +77,14 @@ pub(crate) fn add_noqa(
return None;
}
};
match add_noqa_to_path(path, package, &source_kind, source_type, &settings.linter) {
match add_noqa_to_path(
path,
package,
&source_kind,
source_type,
&settings.linter,
reason,
) {
Ok(count) => Some(count),
Err(e) => {
error!("Failed to add noqa to {}: {e}", path.display());

View file

@ -319,12 +319,20 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
warn_user!("Detected debug build without --no-cache.");
}
if cli.add_noqa {
if let Some(reason) = &cli.add_noqa {
if !fix_mode.is_generate() {
warn_user!("--fix is incompatible with --add-noqa.");
}
if reason.contains(['\n', '\r']) {
return Err(anyhow::anyhow!(
"--add-noqa <reason> cannot contain newline characters"
));
}
let reason_opt = (!reason.is_empty()).then_some(reason.as_str());
let modifications =
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments, reason_opt)?;
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
let s = if modifications == 1 { "" } else { "s" };
#[expect(clippy::print_stderr)]

View file

@ -1760,6 +1760,64 @@ from foo import ( # noqa: F401
Ok(())
}
#[test]
fn add_noqa_with_reason() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"test.py",
r#"import os
def foo():
x = 1
"#,
)?;
assert_cmd_snapshot!(fixture
.check_command()
.arg("--add-noqa=TODO: fix")
.arg("--select=F401,F841")
.arg("test.py"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Added 2 noqa directives.
");
let content = fs::read_to_string(fixture.root().join("test.py"))?;
insta::assert_snapshot!(content, @r"
import os # noqa: F401 TODO: fix
def foo():
x = 1 # noqa: F841 TODO: fix
");
Ok(())
}
#[test]
fn add_noqa_with_newline_in_reason() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file("test.py", "import os\n")?;
assert_cmd_snapshot!(fixture
.check_command()
.arg("--add-noqa=line1\nline2")
.arg("--select=F401")
.arg("test.py"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
ruff failed
Cause: --add-noqa <reason> cannot contain newline characters
"###);
Ok(())
}
/// Infer `3.11` from `requires-python` in `pyproject.toml`.
#[test]
fn requires_python() -> Result<()> {

View file

@ -71,16 +71,13 @@ impl Display for Benchmark<'_> {
}
}
fn check_project(db: &ProjectDatabase, max_diagnostics: usize) {
fn check_project(db: &ProjectDatabase, project_name: &str, max_diagnostics: usize) {
let result = db.check();
let diagnostics = result.len();
assert!(
diagnostics > 1 && diagnostics <= max_diagnostics,
"Expected between {} and {} diagnostics but got {}",
1,
max_diagnostics,
diagnostics
"Expected between 1 and {max_diagnostics} diagnostics on project '{project_name}' but got {diagnostics}",
);
}
@ -184,7 +181,7 @@ static PYDANTIC: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY39,
},
1000,
5000,
);
static SYMPY: Benchmark = Benchmark::new(
@ -226,7 +223,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new(
max_dep_date: "2025-08-09",
python_version: PythonVersion::PY311,
},
800,
900,
);
#[track_caller]
@ -234,11 +231,11 @@ fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) {
bencher
.with_inputs(|| benchmark.setup_iteration())
.bench_local_refs(|db| {
check_project(db, benchmark.max_diagnostics);
check_project(db, benchmark.project.name, benchmark.max_diagnostics);
});
}
#[bench(args=[&ALTAIR, &FREQTRADE, &PYDANTIC, &TANJUN], sample_size=2, sample_count=3)]
#[bench(args=[&ALTAIR, &FREQTRADE, &TANJUN], sample_size=2, sample_count=3)]
fn small(bencher: Bencher, benchmark: &Benchmark) {
run_single_threaded(bencher, benchmark);
}
@ -248,12 +245,12 @@ fn medium(bencher: Bencher, benchmark: &Benchmark) {
run_single_threaded(bencher, benchmark);
}
#[bench(args=[&SYMPY], sample_size=1, sample_count=2)]
#[bench(args=[&SYMPY, &PYDANTIC], sample_size=1, sample_count=2)]
fn large(bencher: Bencher, benchmark: &Benchmark) {
run_single_threaded(bencher, benchmark);
}
#[bench(args=[&PYDANTIC], sample_size=3, sample_count=8)]
#[bench(args=[&ALTAIR], sample_size=3, sample_count=8)]
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
@ -261,7 +258,7 @@ fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
.with_inputs(|| benchmark.setup_iteration())
.bench_local_values(|db| {
thread_pool.install(|| {
check_project(&db, benchmark.max_diagnostics);
check_project(&db, benchmark.project.name, benchmark.max_diagnostics);
db
})
});
@ -285,7 +282,7 @@ fn main() {
// branch when looking up the ingredient index.
{
let db = TANJUN.setup_iteration();
check_project(&db, TANJUN.max_diagnostics);
check_project(&db, TANJUN.project.name, TANJUN.max_diagnostics);
}
divan::main();

View file

@ -112,16 +112,16 @@ impl std::fmt::Display for Diff<'_> {
// `None`, indicating a regular script file, all the lines will be in one "cell" under the
// `None` key.
let cells = if let Some(notebook_index) = &self.notebook_index {
let mut last_cell = OneIndexed::MIN;
let mut last_cell_index = OneIndexed::MIN;
let mut cells: Vec<(Option<OneIndexed>, TextSize)> = Vec::new();
for (row, cell) in notebook_index.iter() {
if cell != last_cell {
let offset = source_code.line_start(row);
cells.push((Some(last_cell), offset));
last_cell = cell;
for cell in notebook_index.iter() {
if cell.cell_index() != last_cell_index {
let offset = source_code.line_start(cell.start_row());
cells.push((Some(last_cell_index), offset));
last_cell_index = cell.cell_index();
}
}
cells.push((Some(last_cell), source_text.text_len()));
cells.push((Some(last_cell_index), source_text.text_len()));
cells
} else {
vec![(None, source_text.text_len())]

View file

@ -46,7 +46,8 @@ def func():
def func():
# OK (index doesn't start at 0
# SIM113
# https://github.com/astral-sh/ruff/pull/21395
idx = 10
for x in range(5):
g(x, idx)

View file

@ -371,6 +371,61 @@ class Foo:
"""
return
# DOC102 - Test case from issue #20959: comma-separated parameters
def leq(x: object, y: object) -> bool:
"""Compare two objects for loose equality.
Parameters
----------
x1, x2 : object
Objects.
Returns
-------
bool
Whether the objects are identical or equal.
"""
return x is y or x == y
# OK - comma-separated parameters that match function signature
def compare_values(x1: int, x2: int) -> bool:
"""Compare two integer values.
Parameters
----------
x1, x2 : int
Values to compare.
Returns
-------
bool
True if values are equal.
"""
return x1 == x2
# DOC102 - mixed comma-separated and regular parameters
def process_data(data, x1: str, x2: str) -> str:
"""Process data with multiple string parameters.
Parameters
----------
data : list
Input data to process.
x1, x2 : str
String parameters for processing.
extra_param : str
Extra parameter not in signature.
Returns
-------
str
Processed result.
"""
return f"{x1}{x2}{len(data)}"
# OK
def baz(x: int) -> int:
"""
@ -389,3 +444,21 @@ def baz(x: int) -> int:
int
"""
return x
# OK - comma-separated parameters without type annotations
def add_numbers(a, b):
"""
Adds two numbers and returns the result.
Parameters
----------
a, b
The numbers to add.
Returns
-------
int
The sum of the two numbers.
"""
return a + b

View file

@ -83,6 +83,37 @@ def calculate_speed(distance: float, time: float) -> float:
raise
# DOC502 regression for Sphinx directive after Raises (issue #18959)
def foo():
"""First line.
Raises:
ValueError:
some text
.. versionadded:: 0.7.0
The ``init_kwargs`` argument.
"""
raise ValueError
# DOC502 regression for following section with colons
def example_with_following_section():
"""Summary.
Returns:
str: The resulting expression.
Raises:
ValueError: If the unit is not valid.
Relation to `time_range_lookup`:
- Handles the "start of" modifier.
- Example: "start of month" `DATETRUNC()`.
"""
raise ValueError
# This should NOT trigger DOC502 because OSError is explicitly re-raised
def f():
"""Do nothing.

View file

@ -117,3 +117,33 @@ def calculate_speed(distance: float, time: float) -> float:
except TypeError:
print("Not a number? Shame on you!")
raise
# DOC502 regression for Sphinx directive after Raises (issue #18959)
def foo():
"""First line.
Raises
------
ValueError
some text
.. versionadded:: 0.7.0
The ``init_kwargs`` argument.
"""
raise ValueError
# Make sure we don't bail out on a Sphinx directive in the description of one
# of the exceptions
def foo():
"""First line.
Raises
------
ValueError
some text
.. math:: e^{xception}
ZeroDivisionError
Will not be raised, DOC502
"""
raise ValueError

View file

@ -377,6 +377,7 @@ pub fn add_noqa_to_path(
source_kind: &SourceKind,
source_type: PySourceType,
settings: &LinterSettings,
reason: Option<&str>,
) -> Result<usize> {
// Parse once.
let target_version = settings.resolve_target_version(path);
@ -425,6 +426,7 @@ pub fn add_noqa_to_path(
&settings.external,
&directives.noqa_line_for,
stylist.line_ending(),
reason,
)
}

View file

@ -39,7 +39,7 @@ pub fn generate_noqa_edits(
let exemption = FileExemption::from(&file_directives);
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
build_noqa_edits_by_diagnostic(comments, locator, line_ending)
build_noqa_edits_by_diagnostic(comments, locator, line_ending, None)
}
/// A directive to ignore a set of rules either for a given line of Python source code or an entire file (e.g.,
@ -715,6 +715,7 @@ impl Display for LexicalError {
impl Error for LexicalError {}
/// Adds noqa comments to suppress all messages of a file.
#[expect(clippy::too_many_arguments)]
pub(crate) fn add_noqa(
path: &Path,
diagnostics: &[Diagnostic],
@ -723,6 +724,7 @@ pub(crate) fn add_noqa(
external: &[String],
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
reason: Option<&str>,
) -> Result<usize> {
let (count, output) = add_noqa_inner(
path,
@ -732,12 +734,14 @@ pub(crate) fn add_noqa(
external,
noqa_line_for,
line_ending,
reason,
);
fs::write(path, output)?;
Ok(count)
}
#[expect(clippy::too_many_arguments)]
fn add_noqa_inner(
path: &Path,
diagnostics: &[Diagnostic],
@ -746,6 +750,7 @@ fn add_noqa_inner(
external: &[String],
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
reason: Option<&str>,
) -> (usize, String) {
let mut count = 0;
@ -757,7 +762,7 @@ fn add_noqa_inner(
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
let edits = build_noqa_edits_by_line(comments, locator, line_ending);
let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason);
let contents = locator.contents();
@ -783,6 +788,7 @@ fn build_noqa_edits_by_diagnostic(
comments: Vec<Option<NoqaComment>>,
locator: &Locator,
line_ending: LineEnding,
reason: Option<&str>,
) -> Vec<Option<Edit>> {
let mut edits = Vec::default();
for comment in comments {
@ -794,6 +800,7 @@ fn build_noqa_edits_by_diagnostic(
FxHashSet::from_iter([comment.code]),
locator,
line_ending,
reason,
) {
edits.push(Some(noqa_edit.into_edit()));
}
@ -808,6 +815,7 @@ fn build_noqa_edits_by_line<'a>(
comments: Vec<Option<NoqaComment<'a>>>,
locator: &Locator,
line_ending: LineEnding,
reason: Option<&'a str>,
) -> BTreeMap<TextSize, NoqaEdit<'a>> {
let mut comments_by_line = BTreeMap::default();
for comment in comments.into_iter().flatten() {
@ -831,6 +839,7 @@ fn build_noqa_edits_by_line<'a>(
.collect(),
locator,
line_ending,
reason,
) {
edits.insert(offset, edit);
}
@ -927,6 +936,7 @@ struct NoqaEdit<'a> {
noqa_codes: FxHashSet<&'a SecondaryCode>,
codes: Option<&'a Codes<'a>>,
line_ending: LineEnding,
reason: Option<&'a str>,
}
impl NoqaEdit<'_> {
@ -954,6 +964,9 @@ impl NoqaEdit<'_> {
push_codes(writer, self.noqa_codes.iter().sorted_unstable());
}
}
if let Some(reason) = self.reason {
write!(writer, " {reason}").unwrap();
}
write!(writer, "{}", self.line_ending.as_str()).unwrap();
}
}
@ -970,6 +983,7 @@ fn generate_noqa_edit<'a>(
noqa_codes: FxHashSet<&'a SecondaryCode>,
locator: &Locator,
line_ending: LineEnding,
reason: Option<&'a str>,
) -> Option<NoqaEdit<'a>> {
let line_range = locator.full_line_range(offset);
@ -999,6 +1013,7 @@ fn generate_noqa_edit<'a>(
noqa_codes,
codes,
line_ending,
reason,
})
}
@ -2832,6 +2847,7 @@ mod tests {
&[],
&noqa_line_for,
LineEnding::Lf,
None,
);
assert_eq!(count, 0);
assert_eq!(output, format!("{contents}"));
@ -2855,6 +2871,7 @@ mod tests {
&[],
&noqa_line_for,
LineEnding::Lf,
None,
);
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: F841\n");
@ -2885,6 +2902,7 @@ mod tests {
&[],
&noqa_line_for,
LineEnding::Lf,
None,
);
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
@ -2915,6 +2933,7 @@ mod tests {
&[],
&noqa_line_for,
LineEnding::Lf,
None,
);
assert_eq!(count, 0);
assert_eq!(output, "x = 1 # noqa");

View file

@ -274,3 +274,8 @@ pub(crate) const fn is_extended_i18n_function_matching_enabled(settings: &Linter
pub(crate) const fn is_extended_snmp_api_path_detection_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/21395
pub(crate) const fn is_enumerate_for_loop_int_index_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}

View file

@ -61,6 +61,7 @@ mod tests {
#[test_case(Rule::SplitStaticString, Path::new("SIM905.py"))]
#[test_case(Rule::DictGetWithNoneDefault, Path::new("SIM910.py"))]
#[test_case(Rule::EnumerateForLoop, Path::new("SIM113.py"))]
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"preview__{}_{}",

View file

@ -1,6 +1,8 @@
use crate::preview::is_enumerate_for_loop_int_index_enabled;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::statement_visitor::{StatementVisitor, walk_stmt};
use ruff_python_ast::{self as ast, Expr, Int, Number, Operator, Stmt};
use ruff_python_semantic::analyze::type_inference::{NumberLike, PythonType, ResolvedPythonType};
use ruff_python_semantic::analyze::typing;
use ruff_text_size::Ranged;
@ -11,6 +13,9 @@ use crate::checkers::ast::Checker;
/// Checks for `for` loops with explicit loop-index variables that can be replaced
/// with `enumerate()`.
///
/// In [preview], this rule checks for index variables initialized with any integer rather than only
/// a literal zero.
///
/// ## Why is this bad?
/// When iterating over a sequence, it's often desirable to keep track of the
/// index of each element alongside the element itself. Prefer the `enumerate`
@ -35,6 +40,8 @@ use crate::checkers::ast::Checker;
///
/// ## References
/// - [Python documentation: `enumerate`](https://docs.python.org/3/library/functions.html#enumerate)
///
/// [preview]: https://docs.astral.sh/ruff/preview/
#[derive(ViolationMetadata)]
#[violation_metadata(stable_since = "v0.2.0")]
pub(crate) struct EnumerateForLoop {
@ -82,17 +89,21 @@ pub(crate) fn enumerate_for_loop(checker: &Checker, for_stmt: &ast::StmtFor) {
continue;
}
// Ensure that the index variable was initialized to 0.
// Ensure that the index variable was initialized to 0 (or instance of `int` if preview is enabled).
let Some(value) = typing::find_binding_value(binding, checker.semantic()) else {
continue;
};
if !matches!(
if !(matches!(
value,
Expr::NumberLiteral(ast::ExprNumberLiteral {
value: Number::Int(Int::ZERO),
..
})
) {
) || matches!(
ResolvedPythonType::from(value),
ResolvedPythonType::Atom(PythonType::Number(NumberLike::Integer))
) && is_enumerate_for_loop_int_index_enabled(checker.settings()))
{
continue;
}

View file

@ -0,0 +1,60 @@
---
source: crates/ruff_linter/src/rules/flake8_simplify/mod.rs
---
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
--> SIM113.py:6:9
|
4 | for x in range(5):
5 | g(x, idx)
6 | idx += 1
| ^^^^^^^^
7 | h(x)
|
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
--> SIM113.py:17:9
|
15 | if g(x):
16 | break
17 | idx += 1
| ^^^^^^^^
18 | sum += h(x, idx)
|
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
--> SIM113.py:27:9
|
25 | g(x)
26 | h(x, y)
27 | idx += 1
| ^^^^^^^^
|
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
--> SIM113.py:36:9
|
34 | for x in range(5):
35 | sum += h(x, idx)
36 | idx += 1
| ^^^^^^^^
|
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
--> SIM113.py:44:9
|
42 | for x in range(5):
43 | g(x, idx)
44 | idx += 1
| ^^^^^^^^
45 | h(x)
|
SIM113 Use `enumerate()` for index variable `idx` in `for` loop
--> SIM113.py:54:9
|
52 | for x in range(5):
53 | g(x, idx)
54 | idx += 1
| ^^^^^^^^
55 | h(x)
|

View file

@ -661,19 +661,31 @@ fn parse_parameters_numpy(content: &str, content_start: TextSize) -> Vec<Paramet
.is_some_and(|first_char| !first_char.is_whitespace())
{
if let Some(before_colon) = entry.split(':').next() {
let param = before_colon.trim_end();
let param_name = param.trim_start_matches('*');
if is_identifier(param_name) {
let param_start = line_start + indentation.text_len();
let param_end = param_start + param.text_len();
let param_line = before_colon.trim_end();
entries.push(ParameterEntry {
name: param_name,
range: TextRange::new(
content_start + param_start,
content_start + param_end,
),
});
// Split on commas to handle comma-separated parameters
let mut current_offset = TextSize::from(0);
for param_part in param_line.split(',') {
let param_part_trimmed = param_part.trim();
let param_name = param_part_trimmed.trim_start_matches('*');
if is_identifier(param_name) {
// Calculate the position of this specific parameter part within the line
// Account for leading whitespace that gets trimmed
let param_start_in_line = current_offset
+ (param_part.text_len() - param_part_trimmed.text_len());
let param_start =
line_start + indentation.text_len() + param_start_in_line;
entries.push(ParameterEntry {
name: param_name,
range: TextRange::at(
content_start + param_start,
param_part_trimmed.text_len(),
),
});
}
// Update offset for next iteration: add the part length plus comma length
current_offset = current_offset + param_part.text_len() + ','.text_len();
}
}
}
@ -710,12 +722,30 @@ fn parse_raises(content: &str, style: Option<SectionStyle>) -> Vec<QualifiedName
/// ```
fn parse_raises_google(content: &str) -> Vec<QualifiedName<'_>> {
let mut entries: Vec<QualifiedName> = Vec::new();
for potential in content.lines() {
let Some(colon_idx) = potential.find(':') else {
continue;
};
let entry = potential[..colon_idx].trim();
entries.push(QualifiedName::user_defined(entry));
let mut lines = content.lines().peekable();
let Some(first) = lines.peek() else {
return entries;
};
let indentation = &first[..first.len() - first.trim_start().len()];
for potential in lines {
if let Some(entry) = potential.strip_prefix(indentation) {
if let Some(first_char) = entry.chars().next() {
if !first_char.is_whitespace() {
if let Some(colon_idx) = entry.find(':') {
let entry = entry[..colon_idx].trim();
if !entry.is_empty() {
entries.push(QualifiedName::user_defined(entry));
}
}
}
}
} else {
// If we can't strip the expected indentation, check if this is a dedented line
// (not blank) - if so, break early as we've reached the end of this section
if !potential.trim().is_empty() {
break;
}
}
}
entries
}
@ -739,6 +769,12 @@ fn parse_raises_numpy(content: &str) -> Vec<QualifiedName<'_>> {
let indentation = &dashes[..dashes.len() - dashes.trim_start().len()];
for potential in lines {
if let Some(entry) = potential.strip_prefix(indentation) {
// Check for Sphinx directives (lines starting with ..) - these indicate the end of the
// section. In numpy-style, exceptions are dedented to the same level as sphinx
// directives.
if entry.starts_with("..") {
break;
}
if let Some(first_char) = entry.chars().next() {
if !first_char.is_whitespace() {
entries.push(QualifiedName::user_defined(entry.trim_end()));

View file

@ -95,3 +95,23 @@ DOC502 Raised exception is not explicitly raised: `DivisionByZero`
82 | return distance / time
|
help: Remove `DivisionByZero` from the docstring
DOC502 Raised exception is not explicitly raised: `ZeroDivisionError`
--> DOC502_numpy.py:139:5
|
137 | # of the exceptions
138 | def foo():
139 | / """First line.
140 | |
141 | | Raises
142 | | ------
143 | | ValueError
144 | | some text
145 | | .. math:: e^{xception}
146 | | ZeroDivisionError
147 | | Will not be raised, DOC502
148 | | """
| |_______^
149 | raise ValueError
|
help: Remove `ZeroDivisionError` from the docstring

View file

@ -187,3 +187,36 @@ DOC102 Documented parameter `a` is not in the function's signature
302 | b
|
help: Remove the extraneous parameter from the docstring
DOC102 Documented parameter `x1` is not in the function's signature
--> DOC102_numpy.py:380:5
|
378 | Parameters
379 | ----------
380 | x1, x2 : object
| ^^
381 | Objects.
|
help: Remove the extraneous parameter from the docstring
DOC102 Documented parameter `x2` is not in the function's signature
--> DOC102_numpy.py:380:9
|
378 | Parameters
379 | ----------
380 | x1, x2 : object
| ^^
381 | Objects.
|
help: Remove the extraneous parameter from the docstring
DOC102 Documented parameter `extra_param` is not in the function's signature
--> DOC102_numpy.py:418:5
|
416 | x1, x2 : str
417 | String parameters for processing.
418 | extra_param : str
| ^^^^^^^^^^^
419 | Extra parameter not in signature.
|
help: Remove the extraneous parameter from the docstring

View file

@ -766,11 +766,12 @@ pub(crate) fn deprecated_import(checker: &Checker, import_from_stmt: &StmtImport
}
for operation in fixer.with_renames() {
checker.report_diagnostic(
let mut diagnostic = checker.report_diagnostic(
DeprecatedImport {
deprecation: Deprecation::WithRename(operation),
},
import_from_stmt.range(),
);
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
}
}

View file

@ -8,37 +8,40 @@ use ruff_source_file::{LineColumn, OneIndexed, SourceLocation};
/// [`ruff_text_size::TextSize`] to jupyter notebook cell/row/column.
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct NotebookIndex {
/// Enter a row (1-based), get back the cell (1-based)
pub(super) row_to_cell: Vec<OneIndexed>,
/// Enter a row (1-based), get back the row in cell (1-based)
pub(super) row_to_row_in_cell: Vec<OneIndexed>,
/// Stores the starting row and the absolute cell index for every Python (valid) cell.
///
/// The index in this vector corresponds to the Python cell index (valid cell index).
pub(super) cell_starts: Vec<CellStart>,
}
impl NotebookIndex {
pub fn new(row_to_cell: Vec<OneIndexed>, row_to_row_in_cell: Vec<OneIndexed>) -> Self {
Self {
row_to_cell,
row_to_row_in_cell,
fn find_cell(&self, row: OneIndexed) -> Option<CellStart> {
match self
.cell_starts
.binary_search_by_key(&row, |start| start.start_row)
{
Ok(cell_index) => Some(self.cell_starts[cell_index]),
Err(insertion_point) => Some(self.cell_starts[insertion_point.checked_sub(1)?]),
}
}
/// Returns the cell number (1-based) for the given row (1-based).
/// Returns the (raw) cell number (1-based) for the given row (1-based).
pub fn cell(&self, row: OneIndexed) -> Option<OneIndexed> {
self.row_to_cell.get(row.to_zero_indexed()).copied()
self.find_cell(row).map(|start| start.raw_cell_index)
}
/// Returns the row number (1-based) in the cell (1-based) for the
/// given row (1-based).
pub fn cell_row(&self, row: OneIndexed) -> Option<OneIndexed> {
self.row_to_row_in_cell.get(row.to_zero_indexed()).copied()
self.find_cell(row)
.map(|start| OneIndexed::from_zero_indexed(row.get() - start.start_row.get()))
}
/// Returns an iterator over the row:cell-number pairs (both 1-based).
pub fn iter(&self) -> impl Iterator<Item = (OneIndexed, OneIndexed)> {
self.row_to_cell
.iter()
.enumerate()
.map(|(row, cell)| (OneIndexed::from_zero_indexed(row), *cell))
/// Returns an iterator over the starting rows of each cell (1-based).
///
/// This yields one entry per Python cell (skipping over Makrdown cell).
pub fn iter(&self) -> impl Iterator<Item = CellStart> + '_ {
self.cell_starts.iter().copied()
}
/// Translates the given [`LineColumn`] based on the indexing table.
@ -67,3 +70,23 @@ impl NotebookIndex {
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct CellStart {
/// The row in the concatenated notebook source code at which
/// this cell starts.
pub(super) start_row: OneIndexed,
/// The absolute index of this cell in the notebook.
pub(super) raw_cell_index: OneIndexed,
}
impl CellStart {
pub fn start_row(&self) -> OneIndexed {
self.start_row
}
pub fn cell_index(&self) -> OneIndexed {
self.raw_cell_index
}
}

View file

@ -18,7 +18,7 @@ use ruff_text_size::TextSize;
use crate::cell::CellOffsets;
use crate::index::NotebookIndex;
use crate::schema::{Cell, RawNotebook, SortAlphabetically, SourceValue};
use crate::{CellMetadata, RawNotebookMetadata, schema};
use crate::{CellMetadata, CellStart, RawNotebookMetadata, schema};
/// Run round-trip source code generation on a given Jupyter notebook file path.
pub fn round_trip(path: &Path) -> anyhow::Result<String> {
@ -320,11 +320,19 @@ impl Notebook {
/// The index building is expensive as it needs to go through the content of
/// every valid code cell.
fn build_index(&self) -> NotebookIndex {
let mut row_to_cell = Vec::new();
let mut row_to_row_in_cell = Vec::new();
let mut cell_starts = Vec::with_capacity(self.valid_code_cells.len());
let mut current_row = OneIndexed::MIN;
for &cell_index in &self.valid_code_cells {
let line_count = match &self.raw.cells[cell_index as usize].source() {
let raw_cell_index = cell_index as usize;
// Record the starting row of this cell
cell_starts.push(CellStart {
start_row: current_row,
raw_cell_index: OneIndexed::from_zero_indexed(raw_cell_index),
});
let line_count = match &self.raw.cells[raw_cell_index].source() {
SourceValue::String(string) => {
if string.is_empty() {
1
@ -342,17 +350,11 @@ impl Notebook {
}
}
};
row_to_cell.extend(std::iter::repeat_n(
OneIndexed::from_zero_indexed(cell_index as usize),
line_count,
));
row_to_row_in_cell.extend((0..line_count).map(OneIndexed::from_zero_indexed));
current_row = current_row.saturating_add(line_count);
}
NotebookIndex {
row_to_cell,
row_to_row_in_cell,
}
NotebookIndex { cell_starts }
}
/// Return the notebook content.
@ -456,7 +458,7 @@ mod tests {
use ruff_source_file::OneIndexed;
use crate::{Cell, Notebook, NotebookError, NotebookIndex};
use crate::{Cell, CellStart, Notebook, NotebookError, NotebookIndex};
/// Construct a path to a Jupyter notebook in the `resources/test/fixtures/jupyter` directory.
fn notebook_path(path: impl AsRef<Path>) -> std::path::PathBuf {
@ -548,39 +550,27 @@ print("after empty cells")
assert_eq!(
notebook.index(),
&NotebookIndex {
row_to_cell: vec![
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(4),
OneIndexed::from_zero_indexed(6),
OneIndexed::from_zero_indexed(6),
OneIndexed::from_zero_indexed(7)
],
row_to_row_in_cell: vec![
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(3),
OneIndexed::from_zero_indexed(4),
OneIndexed::from_zero_indexed(5),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(2),
OneIndexed::from_zero_indexed(3),
OneIndexed::from_zero_indexed(4),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(0),
OneIndexed::from_zero_indexed(1),
OneIndexed::from_zero_indexed(0)
cell_starts: vec![
CellStart {
start_row: OneIndexed::MIN,
raw_cell_index: OneIndexed::MIN
},
CellStart {
start_row: OneIndexed::from_zero_indexed(6),
raw_cell_index: OneIndexed::from_zero_indexed(2)
},
CellStart {
start_row: OneIndexed::from_zero_indexed(11),
raw_cell_index: OneIndexed::from_zero_indexed(4)
},
CellStart {
start_row: OneIndexed::from_zero_indexed(12),
raw_cell_index: OneIndexed::from_zero_indexed(6)
},
CellStart {
start_row: OneIndexed::from_zero_indexed(14),
raw_cell_index: OneIndexed::from_zero_indexed(7)
}
],
}
);

View file

@ -323,6 +323,231 @@ fn python_version_inferred_from_system_installation() -> anyhow::Result<()> {
Ok(())
}
/// This attempts to simulate the tangled web of symlinks that a homebrew install has
/// which can easily confuse us if we're ever told to use it.
///
/// The main thing this is regression-testing is a panic in one *extremely* specific case
/// that you have to try really hard to hit (but vscode, hilariously, did hit).
#[cfg(unix)]
#[test]
fn python_argument_trapped_in_a_symlink_factory() -> anyhow::Result<()> {
let case = CliTest::with_files([
// This is the real python binary.
(
"opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3.13",
"",
),
// There's a real site-packages here (although it's basically empty).
(
"opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages/foo.py",
"",
),
// There's also a real site-packages here (although it's basically empty).
("opt/homebrew/lib/python3.13/site-packages/bar.py", ""),
// This has the real stdlib, but the site-packages in this dir is a symlink.
(
"opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/abc.py",
"",
),
// It's important that this our faux-homebrew not be in the same dir as our working directory
// to reproduce the crash, don't ask me why.
(
"project/test.py",
"\
import foo
import bar
import colorama
",
),
])?;
// many python symlinks pointing to a single real python (the longest path)
case.write_symlink(
"opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3.13",
"opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3",
)?;
case.write_symlink(
"opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3",
"opt/homebrew/Cellar/python@3.13/3.13.5/bin/python3",
)?;
case.write_symlink(
"opt/homebrew/Cellar/python@3.13/3.13.5/bin/python3",
"opt/homebrew/bin/python3",
)?;
// the "real" python's site-packages is a symlink to a different dir
case.write_symlink(
"opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages",
"opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages",
)?;
// Try all 4 pythons with absolute paths to our fauxbrew install
assert_cmd_snapshot!(case.command()
.current_dir(case.root().join("project"))
.arg("--python").arg(case.root().join("opt/homebrew/bin/python3")), @r"
success: false
exit_code: 1
----- stdout -----
error[unresolved-import]: Cannot resolve imported module `foo`
--> test.py:1:8
|
1 | import foo
| ^^^
2 | import bar
3 | import colorama
|
info: Searched in the following paths during module resolution:
info: 1. <temp_dir>/project (first-party code)
info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: 3. <temp_dir>/opt/homebrew/lib/python3.13/site-packages (site-packages)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default
error[unresolved-import]: Cannot resolve imported module `colorama`
--> test.py:3:8
|
1 | import foo
2 | import bar
3 | import colorama
| ^^^^^^^^
|
info: Searched in the following paths during module resolution:
info: 1. <temp_dir>/project (first-party code)
info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: 3. <temp_dir>/opt/homebrew/lib/python3.13/site-packages (site-packages)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default
Found 2 diagnostics
----- stderr -----
");
assert_cmd_snapshot!(case.command()
.current_dir(case.root().join("project"))
.arg("--python").arg(case.root().join("opt/homebrew/Cellar/python@3.13/3.13.5/bin/python3")), @r"
success: false
exit_code: 1
----- stdout -----
error[unresolved-import]: Cannot resolve imported module `bar`
--> test.py:2:8
|
1 | import foo
2 | import bar
| ^^^
3 | import colorama
|
info: Searched in the following paths during module resolution:
info: 1. <temp_dir>/project (first-party code)
info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: 3. <temp_dir>/opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages (site-packages)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default
error[unresolved-import]: Cannot resolve imported module `colorama`
--> test.py:3:8
|
1 | import foo
2 | import bar
3 | import colorama
| ^^^^^^^^
|
info: Searched in the following paths during module resolution:
info: 1. <temp_dir>/project (first-party code)
info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: 3. <temp_dir>/opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages (site-packages)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default
Found 2 diagnostics
----- stderr -----
");
assert_cmd_snapshot!(case.command()
.current_dir(case.root().join("project"))
.arg("--python").arg(case.root().join("opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3")), @r"
success: false
exit_code: 1
----- stdout -----
error[unresolved-import]: Cannot resolve imported module `bar`
--> test.py:2:8
|
1 | import foo
2 | import bar
| ^^^
3 | import colorama
|
info: Searched in the following paths during module resolution:
info: 1. <temp_dir>/project (first-party code)
info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: 3. <temp_dir>/opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default
error[unresolved-import]: Cannot resolve imported module `colorama`
--> test.py:3:8
|
1 | import foo
2 | import bar
3 | import colorama
| ^^^^^^^^
|
info: Searched in the following paths during module resolution:
info: 1. <temp_dir>/project (first-party code)
info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: 3. <temp_dir>/opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default
Found 2 diagnostics
----- stderr -----
");
assert_cmd_snapshot!(case.command()
.current_dir(case.root().join("project"))
.arg("--python").arg(case.root().join("opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3.13")), @r"
success: false
exit_code: 1
----- stdout -----
error[unresolved-import]: Cannot resolve imported module `bar`
--> test.py:2:8
|
1 | import foo
2 | import bar
| ^^^
3 | import colorama
|
info: Searched in the following paths during module resolution:
info: 1. <temp_dir>/project (first-party code)
info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: 3. <temp_dir>/opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default
error[unresolved-import]: Cannot resolve imported module `colorama`
--> test.py:3:8
|
1 | import foo
2 | import bar
3 | import colorama
| ^^^^^^^^
|
info: Searched in the following paths during module resolution:
info: 1. <temp_dir>/project (first-party code)
info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: 3. <temp_dir>/opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default
Found 2 diagnostics
----- stderr -----
");
Ok(())
}
/// On Unix systems, it's common for a Python installation at `.venv/bin/python` to only be a symlink
/// to a system Python installation. We must be careful not to resolve the symlink too soon!
/// If we do, we will incorrectly add the system installation's `site-packages` as a search path,

View file

@ -10,12 +10,14 @@ import-deprioritizes-type_check_only,main.py,1,1
import-deprioritizes-type_check_only,main.py,2,1
import-deprioritizes-type_check_only,main.py,3,2
import-deprioritizes-type_check_only,main.py,4,3
import-keyword-completion,main.py,0,1
internal-typeshed-hidden,main.py,0,5
none-completion,main.py,0,11
none-completion,main.py,0,2
numpy-array,main.py,0,
numpy-array,main.py,1,1
object-attr-instance-methods,main.py,0,1
object-attr-instance-methods,main.py,1,1
pass-keyword-completion,main.py,0,1
raise-uses-base-exception,main.py,0,2
scope-existing-over-new-import,main.py,0,1
scope-prioritize-closer,main.py,0,2
@ -23,4 +25,4 @@ scope-simple-long-identifier,main.py,0,1
tstring-completions,main.py,0,1
ty-extensions-lower-stdlib,main.py,0,8
type-var-typing-over-ast,main.py,0,3
type-var-typing-over-ast,main.py,1,277
type-var-typing-over-ast,main.py,1,279

1 name file index rank
10 import-deprioritizes-type_check_only main.py 2 1
11 import-deprioritizes-type_check_only main.py 3 2
12 import-deprioritizes-type_check_only main.py 4 3
13 import-keyword-completion main.py 0 1
14 internal-typeshed-hidden main.py 0 5
15 none-completion main.py 0 11 2
16 numpy-array main.py 0
17 numpy-array main.py 1 1
18 object-attr-instance-methods main.py 0 1
19 object-attr-instance-methods main.py 1 1
20 pass-keyword-completion main.py 0 1
21 raise-uses-base-exception main.py 0 2
22 scope-existing-over-new-import main.py 0 1
23 scope-prioritize-closer main.py 0 2
25 tstring-completions main.py 0 1
26 ty-extensions-lower-stdlib main.py 0 8
27 type-var-typing-over-ast main.py 0 3
28 type-var-typing-over-ast main.py 1 277 279

View file

@ -0,0 +1,2 @@
[settings]
auto-import = false

View file

@ -0,0 +1 @@
from collections im<CURSOR: import>

View file

@ -0,0 +1,5 @@
[project]
name = "test"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = []

View file

@ -0,0 +1,8 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "test"
version = "0.1.0"
source = { virtual = "." }

View file

@ -0,0 +1,2 @@
[settings]
auto-import = false

View file

@ -0,0 +1,3 @@
match x:
case int():
pa<CURSOR: pass>

View file

@ -0,0 +1,5 @@
[project]
name = "test"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = []

View file

@ -0,0 +1,8 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "test"
version = "0.1.0"
source = { virtual = "." }

File diff suppressed because it is too large Load diff

View file

@ -1592,6 +1592,111 @@ a = Test()
");
}
#[test]
fn float_annotation() {
let test = CursorTest::builder()
.source(
"main.py",
"
a: float<CURSOR> = 3.14
",
)
.build();
assert_snapshot!(test.goto_definition(), @r#"
info[goto-definition]: Definition
--> stdlib/builtins.pyi:346:7
|
345 | @disjoint_base
346 | class int:
| ^^^
347 | """int([x]) -> integer
348 | int(x, base=10) -> integer
|
info: Source
--> main.py:2:4
|
2 | a: float = 3.14
| ^^^^^
|
info[goto-definition]: Definition
--> stdlib/builtins.pyi:659:7
|
658 | @disjoint_base
659 | class float:
| ^^^^^
660 | """Convert a string or number to a floating-point number, if possible."""
|
info: Source
--> main.py:2:4
|
2 | a: float = 3.14
| ^^^^^
|
"#);
}
#[test]
fn complex_annotation() {
let test = CursorTest::builder()
.source(
"main.py",
"
a: complex<CURSOR> = 3.14
",
)
.build();
assert_snapshot!(test.goto_definition(), @r#"
info[goto-definition]: Definition
--> stdlib/builtins.pyi:346:7
|
345 | @disjoint_base
346 | class int:
| ^^^
347 | """int([x]) -> integer
348 | int(x, base=10) -> integer
|
info: Source
--> main.py:2:4
|
2 | a: complex = 3.14
| ^^^^^^^
|
info[goto-definition]: Definition
--> stdlib/builtins.pyi:659:7
|
658 | @disjoint_base
659 | class float:
| ^^^^^
660 | """Convert a string or number to a floating-point number, if possible."""
|
info: Source
--> main.py:2:4
|
2 | a: complex = 3.14
| ^^^^^^^
|
info[goto-definition]: Definition
--> stdlib/builtins.pyi:820:7
|
819 | @disjoint_base
820 | class complex:
| ^^^^^^^
821 | """Create a complex number from a string or numbers.
|
info: Source
--> main.py:2:4
|
2 | a: complex = 3.14
| ^^^^^^^
|
"#);
}
/// Regression test for <https://github.com/astral-sh/ty/issues/1451>.
/// We must ensure we respect re-import convention for stub files for
/// imports in builtins.pyi.

View file

@ -2634,6 +2634,40 @@ def ab(a: int, *, c: int):
");
}
#[test]
fn hover_float_annotation() {
let test = cursor_test(
r#"
a: float<CURSOR> = 3.14
"#,
);
assert_snapshot!(test.hover(), @r"
int | float
---------------------------------------------
Convert a string or number to a floating-point number, if possible.
---------------------------------------------
```python
int | float
```
---
```text
Convert a string or number to a floating-point number, if possible.
```
---------------------------------------------
info[hover]: Hovered content is
--> main.py:2:4
|
2 | a: float = 3.14
| ^^^^^- Cursor offset
| |
| source
|
");
}
impl CursorTest {
fn hover(&self) -> String {
use std::fmt::Write;

File diff suppressed because it is too large Load diff

View file

@ -1,13 +1,7 @@
# Documentation of two fuzzer panics involving comprehensions
# Regression test for https://github.com/astral-sh/ruff/pull/20962
# error message:
# `place_by_id: execute: too many cycle iterations`
Type inference for comprehensions was added in <https://github.com/astral-sh/ruff/pull/20962>. It
added two new fuzzer panics that are documented here for regression testing.
## Too many cycle iterations in `place_by_id`
<!-- expect-panic: too many cycle iterations -->
```py
name_5(name_3)
[0 for unique_name_0 in unique_name_1 for unique_name_2 in name_3]
@ -34,4 +28,3 @@ else:
@name_3
async def name_5():
pass
```

View file

@ -87,9 +87,23 @@ class Foo:
class Baz[T: Foo]:
pass
# error: [unresolved-reference] "Name `Foo` used when not defined"
# error: [unresolved-reference] "Name `Bar` used when not defined"
class Qux(Foo, Bar, Baz):
pass
# error: [unresolved-reference] "Name `Foo` used when not defined"
# error: [unresolved-reference] "Name `Bar` used when not defined"
class Quux[_T](Foo, Bar, Baz):
pass
# error: [unresolved-reference]
type S = a
type T = b
type U = Foo
# error: [unresolved-reference]
type V = Bar
type W = Baz
def h[T: Bar]():
# error: [unresolved-reference]
@ -141,9 +155,23 @@ class Foo:
class Baz[T: Foo]:
pass
# error: [unresolved-reference] "Name `Foo` used when not defined"
# error: [unresolved-reference] "Name `Bar` used when not defined"
class Qux(Foo, Bar, Baz):
pass
# error: [unresolved-reference] "Name `Foo` used when not defined"
# error: [unresolved-reference] "Name `Bar` used when not defined"
class Quux[_T](Foo, Bar, Baz):
pass
# error: [unresolved-reference]
type S = a
type T = b
type U = Foo
# error: [unresolved-reference]
type V = Bar
type W = Baz
def h[T: Bar]():
# error: [unresolved-reference]

View file

@ -369,6 +369,11 @@ reveal_type(c_instance.y) # revealed: Unknown | int
#### Attributes defined in comprehensions
```toml
[environment]
python-version = "3.12"
```
```py
class TupleIterator:
def __next__(self) -> tuple[int, str]:
@ -380,19 +385,9 @@ class TupleIterable:
class C:
def __init__(self) -> None:
# TODO: Should not emit this diagnostic
# error: [unresolved-attribute]
[... for self.a in range(3)]
# TODO: Should not emit this diagnostic
# error: [unresolved-attribute]
# error: [unresolved-attribute]
[... for (self.b, self.c) in TupleIterable()]
# TODO: Should not emit this diagnostic
# error: [unresolved-attribute]
# error: [unresolved-attribute]
[... for self.d in range(3) for self.e in range(3)]
# TODO: Should not emit this diagnostic
# error: [unresolved-attribute]
[[... for self.f in range(3)] for _ in range(3)]
[[... for self.g in range(3)] for self in [D()]]
@ -401,35 +396,74 @@ class D:
c_instance = C()
# TODO: no error, reveal Unknown | int
# error: [unresolved-attribute]
reveal_type(c_instance.a) # revealed: Unknown
reveal_type(c_instance.a) # revealed: Unknown | int
# TODO: no error, reveal Unknown | int
# error: [unresolved-attribute]
reveal_type(c_instance.b) # revealed: Unknown
reveal_type(c_instance.b) # revealed: Unknown | int
# TODO: no error, reveal Unknown | str
# error: [unresolved-attribute]
reveal_type(c_instance.c) # revealed: Unknown
reveal_type(c_instance.c) # revealed: Unknown | str
# TODO: no error, reveal Unknown | int
# error: [unresolved-attribute]
reveal_type(c_instance.d) # revealed: Unknown
reveal_type(c_instance.d) # revealed: Unknown | int
# TODO: no error, reveal Unknown | int
# error: [unresolved-attribute]
reveal_type(c_instance.e) # revealed: Unknown
reveal_type(c_instance.e) # revealed: Unknown | int
# TODO: no error, reveal Unknown | int
# error: [unresolved-attribute]
reveal_type(c_instance.f) # revealed: Unknown
reveal_type(c_instance.f) # revealed: Unknown | int
# This one is correctly not resolved as an attribute:
# error: [unresolved-attribute]
reveal_type(c_instance.g) # revealed: Unknown
```
It does not matter how much the comprehension is nested.
Similarly attributes defined by the comprehension in a generic method are recognized.
```py
class C:
def f[T](self):
[... for self.a in [1]]
[[... for self.b in [1]] for _ in [1]]
c_instance = C()
reveal_type(c_instance.a) # revealed: Unknown | int
reveal_type(c_instance.b) # revealed: Unknown | int
```
If the comprehension is inside another scope like function then that attribute is not inferred.
```py
class C:
def __init__(self):
def f():
# error: [unresolved-attribute]
[... for self.a in [1]]
def g():
# error: [unresolved-attribute]
[... for self.b in [1]]
g()
c_instance = C()
# This attribute is in the function f and is not reachable
# error: [unresolved-attribute]
reveal_type(c_instance.a) # revealed: Unknown
# error: [unresolved-attribute]
reveal_type(c_instance.b) # revealed: Unknown
```
If the comprehension is nested in any other eager scope it still can assign attributes.
```py
class C:
def __init__(self):
class D:
[[... for self.a in [1]] for _ in [1]]
reveal_type(C().a) # revealed: Unknown | int
```
#### Conditionally declared / bound attributes
We currently treat implicit instance attributes to be bound, even if they are only conditionally

View file

@ -162,3 +162,38 @@ def _(x: A | B, y: list[int]):
reveal_type(x) # revealed: B & ~A
reveal_type(isinstance(x, B)) # revealed: Literal[True]
```
Certain special forms in the typing module are not instances of `type`, so are strictly-speaking
disallowed as the second argument to `isinstance()` according to typeshed's annotations. However, at
runtime they work fine as the second argument, and we implement that special case in ty:
```py
import typing as t
# no errors emitted for any of these:
isinstance("", t.Dict)
isinstance("", t.List)
isinstance("", t.Set)
isinstance("", t.FrozenSet)
isinstance("", t.Tuple)
isinstance("", t.ChainMap)
isinstance("", t.Counter)
isinstance("", t.Deque)
isinstance("", t.OrderedDict)
isinstance("", t.Callable)
isinstance("", t.Type)
isinstance("", t.Callable | t.Deque)
# `Any` is valid in `issubclass()` calls but not `isinstance()` calls
issubclass(list, t.Any)
issubclass(list, t.Any | t.Dict)
```
But for other special forms that are not permitted as the second argument, we still emit an error:
```py
isinstance("", t.TypeGuard) # error: [invalid-argument-type]
isinstance("", t.ClassVar) # error: [invalid-argument-type]
isinstance("", t.Final) # error: [invalid-argument-type]
isinstance("", t.Any) # error: [invalid-argument-type]
```

View file

@ -58,6 +58,24 @@ Iterating over an unbound iterable yields `Unknown`:
# error: [not-iterable] "Object of type `int` is not iterable"
# revealed: tuple[int, Unknown]
[reveal_type((x, z)) for x in range(3) for z in x]
# error: [unresolved-reference] "Name `foo` used when not defined"
foo
foo = [
# revealed: tuple[int, Unknown]
reveal_type((x, z))
for x in range(3)
# error: [unresolved-reference] "Name `foo` used when not defined"
for z in [foo]
]
baz = [
# revealed: tuple[int, Unknown]
reveal_type((x, z))
for x in range(3)
# error: [unresolved-reference] "Name `baz` used when not defined"
for z in [baz]
]
```
## Starred expressions

View file

@ -288,6 +288,43 @@ class C[T]:
class Bad2(Iterable[T]): ...
```
## Class bases are evaluated within the type parameter scope
```py
class C[_T](
# error: [unresolved-reference] "Name `C` used when not defined"
C
): ...
# `D` in `list[D]` is resolved to be a type variable of class `D`.
class D[D](list[D]): ...
# error: [unresolved-reference] "Name `E` used when not defined"
if E:
class E[_T](
# error: [unresolved-reference] "Name `E` used when not defined"
E
): ...
# error: [unresolved-reference] "Name `F` used when not defined"
F
# error: [unresolved-reference] "Name `F` used when not defined"
class F[_T](F): ...
def foo():
class G[_T](
# error: [unresolved-reference] "Name `G` used when not defined"
G
): ...
# error: [unresolved-reference] "Name `H` used when not defined"
if H:
class H[_T](
# error: [unresolved-reference] "Name `H` used when not defined"
H
): ...
```
## Class scopes do not cover inner scopes
Just like regular symbols, the typevars of a generic class are only available in that class's scope,

View file

@ -33,7 +33,7 @@ g(None)
We also support unions in type aliases:
```py
from typing_extensions import Any, Never, Literal, LiteralString, Tuple, Annotated, Optional
from typing_extensions import Any, Never, Literal, LiteralString, Tuple, Annotated, Optional, Union
from ty_extensions import Unknown
IntOrStr = int | str
@ -41,6 +41,8 @@ IntOrStrOrBytes1 = int | str | bytes
IntOrStrOrBytes2 = (int | str) | bytes
IntOrStrOrBytes3 = int | (str | bytes)
IntOrStrOrBytes4 = IntOrStr | bytes
IntOrStrOrBytes5 = int | Union[str, bytes]
IntOrStrOrBytes6 = Union[int, str] | bytes
BytesOrIntOrStr = bytes | IntOrStr
IntOrNone = int | None
NoneOrInt = None | int
@ -70,6 +72,8 @@ reveal_type(IntOrStrOrBytes1) # revealed: types.UnionType
reveal_type(IntOrStrOrBytes2) # revealed: types.UnionType
reveal_type(IntOrStrOrBytes3) # revealed: types.UnionType
reveal_type(IntOrStrOrBytes4) # revealed: types.UnionType
reveal_type(IntOrStrOrBytes5) # revealed: types.UnionType
reveal_type(IntOrStrOrBytes6) # revealed: types.UnionType
reveal_type(BytesOrIntOrStr) # revealed: types.UnionType
reveal_type(IntOrNone) # revealed: types.UnionType
reveal_type(NoneOrInt) # revealed: types.UnionType
@ -100,6 +104,8 @@ def _(
int_or_str_or_bytes2: IntOrStrOrBytes2,
int_or_str_or_bytes3: IntOrStrOrBytes3,
int_or_str_or_bytes4: IntOrStrOrBytes4,
int_or_str_or_bytes5: IntOrStrOrBytes5,
int_or_str_or_bytes6: IntOrStrOrBytes6,
bytes_or_int_or_str: BytesOrIntOrStr,
int_or_none: IntOrNone,
none_or_int: NoneOrInt,
@ -129,6 +135,8 @@ def _(
reveal_type(int_or_str_or_bytes2) # revealed: int | str | bytes
reveal_type(int_or_str_or_bytes3) # revealed: int | str | bytes
reveal_type(int_or_str_or_bytes4) # revealed: int | str | bytes
reveal_type(int_or_str_or_bytes5) # revealed: int | str | bytes
reveal_type(int_or_str_or_bytes6) # revealed: int | str | bytes
reveal_type(bytes_or_int_or_str) # revealed: bytes | int | str
reveal_type(int_or_none) # revealed: int | None
reveal_type(none_or_int) # revealed: None | int
@ -272,6 +280,54 @@ def g(
): ...
```
## `|` unions in stubs and `TYPE_CHECKING` blocks
In runtime contexts, `|` unions are only permitted on Python 3.10+. But in suites of code that are
never executed at runtime (stub files, `if TYPE_CHECKING` blocks, and stringified annotations), they
are permitted even if the target version is set to Python 3.9 or earlier.
```toml
[environment]
python-version = "3.9"
```
`bar.pyi`:
```pyi
Z = int | str
GLOBAL_CONSTANT: Z
```
`foo.py`:
```py
from typing import TYPE_CHECKING
from bar import GLOBAL_CONSTANT
reveal_type(GLOBAL_CONSTANT) # revealed: int | str
if TYPE_CHECKING:
class ItsQuiteCloudyInManchester:
X = int | str
def f(obj: X):
reveal_type(obj) # revealed: int | str
# TODO: we currently only understand code as being inside a `TYPE_CHECKING` block
# if a whole *scope* is inside the `if TYPE_CHECKING` block
# (like the `ItsQuiteCloudyInManchester` class above); this is a false-positive
Y = int | str # error: [unsupported-operator]
def g(obj: Y):
# TODO: should be `int | str`
reveal_type(obj) # revealed: Unknown
Y = list["int | str"]
def g(obj: Y):
reveal_type(obj) # revealed: list[int | str]
```
## Generic types
Implicit type aliases can also refer to generic types:
@ -457,13 +513,90 @@ def _(
## `Tuple`
We support implicit type aliases using `typing.Tuple`:
```py
from typing import Tuple
IntAndStr = Tuple[int, str]
SingleInt = Tuple[int]
Ints = Tuple[int, ...]
EmptyTuple = Tuple[()]
def _(int_and_str: IntAndStr):
def _(int_and_str: IntAndStr, single_int: SingleInt, ints: Ints, empty_tuple: EmptyTuple):
reveal_type(int_and_str) # revealed: tuple[int, str]
reveal_type(single_int) # revealed: tuple[int]
reveal_type(ints) # revealed: tuple[int, ...]
reveal_type(empty_tuple) # revealed: tuple[()]
```
Invalid uses cause diagnostics:
```py
from typing import Tuple
# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression"
Invalid = Tuple[int, 1]
def _(invalid: Invalid):
reveal_type(invalid) # revealed: tuple[int, Unknown]
```
## `Union`
We support implicit type aliases using `typing.Union`:
```py
from typing import Union
IntOrStr = Union[int, str]
IntOrStrOrBytes = Union[int, Union[str, bytes]]
reveal_type(IntOrStr) # revealed: types.UnionType
reveal_type(IntOrStrOrBytes) # revealed: types.UnionType
def _(
int_or_str: IntOrStr,
int_or_str_or_bytes: IntOrStrOrBytes,
):
reveal_type(int_or_str) # revealed: int | str
reveal_type(int_or_str_or_bytes) # revealed: int | str | bytes
```
If a single type is given, no `types.UnionType` instance is created:
```py
JustInt = Union[int]
reveal_type(JustInt) # revealed: <class 'int'>
def _(just_int: JustInt):
reveal_type(just_int) # revealed: int
```
An empty `typing.Union` leads to a `TypeError` at runtime, so we emit an error. We still infer
`Never` when used as a type expression, which seems reasonable for an empty union:
```py
# error: [invalid-type-form] "`typing.Union` requires at least one type argument"
EmptyUnion = Union[()]
reveal_type(EmptyUnion) # revealed: types.UnionType
def _(empty: EmptyUnion):
reveal_type(empty) # revealed: Never
```
Other invalid uses are also caught:
```py
# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression"
Invalid = Union[str, 1]
def _(
invalid: Invalid,
):
reveal_type(invalid) # revealed: str | Unknown
```
## Stringified annotations?
@ -496,10 +629,19 @@ We *do* support stringified annotations if they appear in a position where a typ
syntactically expected:
```py
ListOfInts = list["int"]
from typing import Union
def _(list_of_ints: ListOfInts):
ListOfInts = list["int"]
StrOrStyle = Union[str, "Style"]
class Style: ...
def _(
list_of_ints: ListOfInts,
str_or_style: StrOrStyle,
):
reveal_type(list_of_ints) # revealed: list[int]
reveal_type(str_or_style) # revealed: str | Style
```
## Recursive

View file

@ -22,7 +22,10 @@ This file currently covers the following details:
- **dot re-exports**: `from . import a` in an `__init__.pyi` is considered a re-export of `a`
(equivalent to `from . import a as a`). This is required to properly handle many stubs in the
wild. Currently it must be *exactly* `from . import ...`.
wild. Equivalent imports like `from whatever.thispackage import a` also introduce a re-export
(this has essentially zero ecosystem impact, we just felt it was more consistent). The only way
to opt out of this is to rename the import to something else (`from . import a as b`).
`from .a import b` and equivalent does *not* introduce a re-export.
Note: almost all tests in here have a stub and non-stub version, because we're interested in both
defining symbols *at all* and re-exporting them.
@ -94,8 +97,7 @@ reveal_type(mypackage.fails.Y) # revealed: Unknown
## Absolute `from` Import of Direct Submodule in `__init__`
If an absolute `from...import` happens to import a submodule (i.e. it's equivalent to
`from . import y`) we do not treat it as a re-export. We could, but we don't. (This is an arbitrary
decision and can be changed!)
`from . import y`) we also treat it as a re-export.
### In Stub
@ -122,9 +124,7 @@ Y: int = 47
```py
import mypackage
# TODO: this could work and would be nice to have?
# error: "has no member `imported`"
reveal_type(mypackage.imported.X) # revealed: Unknown
reveal_type(mypackage.imported.X) # revealed: int
# error: "has no member `fails`"
reveal_type(mypackage.fails.Y) # revealed: Unknown
```
@ -333,7 +333,7 @@ reveal_type(mypackage.nested.X) # revealed: Unknown
### In Non-Stub
`from mypackage.submodule import nested` in an `__init__.py` only creates `nested`.
`from mypackage.submodule import nested` in an `__init__.py` creates both `submodule` and `nested`.
`mypackage/__init__.py`:
@ -357,12 +357,11 @@ X: int = 42
```py
import mypackage
reveal_type(mypackage.submodule) # revealed: <module 'mypackage.submodule'>
# TODO: this would be nice to support
# error: "has no member `submodule`"
reveal_type(mypackage.submodule) # revealed: Unknown
# error: "has no member `submodule`"
# error: "has no member `nested`"
reveal_type(mypackage.submodule.nested) # revealed: Unknown
# error: "has no member `submodule`"
# error: "has no member `nested`"
reveal_type(mypackage.submodule.nested.X) # revealed: Unknown
reveal_type(mypackage.nested) # revealed: <module 'mypackage.submodule.nested'>
reveal_type(mypackage.nested.X) # revealed: int

View file

@ -104,3 +104,27 @@ from typing import Callable
def _(c: Callable[]):
reveal_type(c) # revealed: (...) -> Unknown
```
### `typing.Tuple`
```py
from typing import Tuple
# error: [invalid-syntax] "Expected index or slice expression"
InvalidEmptyTuple = Tuple[]
def _(t: InvalidEmptyTuple):
reveal_type(t) # revealed: tuple[Unknown]
```
### `typing.Union`
```py
from typing import Union
# error: [invalid-syntax] "Expected index or slice expression"
InvalidEmptyUnion = Union[]
def _(u: InvalidEmptyUnion):
reveal_type(u) # revealed: Unknown
```

View file

@ -58,6 +58,15 @@ d.x = 1
reveal_type(d.x) # revealed: Literal[1]
d.x = unknown()
reveal_type(d.x) # revealed: Unknown
class E:
x: int | None = None
e = E()
if e.x is not None:
class _:
reveal_type(e.x) # revealed: int
```
Narrowing can be "reset" by assigning to the attribute:

View file

@ -147,6 +147,25 @@ def _(x: int | str | bytes):
reveal_type(x) # revealed: (int & Unknown) | (str & Unknown) | (bytes & Unknown)
```
## `classinfo` is a `typing.py` special form
Certain special forms in `typing.py` are aliases to classes elsewhere in the standard library; these
can be used in `isinstance()` and `issubclass()` checks. We support narrowing using them:
```py
import typing as t
def f(x: dict[str, int] | list[str], y: object):
if isinstance(x, t.Dict):
reveal_type(x) # revealed: dict[str, int]
else:
reveal_type(x) # revealed: list[str]
if isinstance(y, t.Callable):
# TODO: a better top-materialization for `Callable`s (https://github.com/astral-sh/ty/issues/1426)
reveal_type(y) # revealed: () -> object
```
## Class types
```py

View file

@ -0,0 +1,31 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid key type
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | config: dict[str, int] = {}
2 | config[0] = 3 # error: [invalid-assignment]
```
# Diagnostics
```
error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, int].__setitem__(key: str, value: int, /) -> None` cannot be called with a key of type `Literal[0]` and a value of type `Literal[3]` on object of type `dict[str, int]`
--> src/mdtest_snippet.py:2:1
|
1 | config: dict[str, int] = {}
2 | config[0] = 3 # error: [invalid-assignment]
| ^^^^^^
|
info: rule `invalid-assignment` is enabled by default
```

View file

@ -0,0 +1,36 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid key type for `TypedDict`
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing import TypedDict
2 |
3 | class Config(TypedDict):
4 | retries: int
5 |
6 | def _(config: Config) -> None:
7 | config[0] = 3 # error: [invalid-key]
```
# Diagnostics
```
error[invalid-key]: Cannot access `Config` with a key of type `Literal[0]`. Only string literals are allowed as keys on TypedDicts.
--> src/mdtest_snippet.py:7:12
|
6 | def _(config: Config) -> None:
7 | config[0] = 3 # error: [invalid-key]
| ^
|
info: rule `invalid-key` is enabled by default
```

View file

@ -0,0 +1,31 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid value type
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | config: dict[str, int] = {}
2 | config["retries"] = "three" # error: [invalid-assignment]
```
# Diagnostics
```
error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, int].__setitem__(key: str, value: int, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `Literal["three"]` on object of type `dict[str, int]`
--> src/mdtest_snippet.py:2:1
|
1 | config: dict[str, int] = {}
2 | config["retries"] = "three" # error: [invalid-assignment]
| ^^^^^^
|
info: rule `invalid-assignment` is enabled by default
```

View file

@ -0,0 +1,48 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid value type for `TypedDict`
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing import TypedDict
2 |
3 | class Config(TypedDict):
4 | retries: int
5 |
6 | def _(config: Config) -> None:
7 | config["retries"] = "three" # error: [invalid-assignment]
```
# Diagnostics
```
error[invalid-assignment]: Invalid assignment to key "retries" with declared type `int` on TypedDict `Config`
--> src/mdtest_snippet.py:7:5
|
6 | def _(config: Config) -> None:
7 | config["retries"] = "three" # error: [invalid-assignment]
| ------ --------- ^^^^^^^ value of type `Literal["three"]`
| | |
| | key has declared type `int`
| TypedDict `Config`
|
info: Item declaration
--> src/mdtest_snippet.py:4:5
|
3 | class Config(TypedDict):
4 | retries: int
| ------------ Item declared here
5 |
6 | def _(config: Config) -> None:
|
info: rule `invalid-assignment` is enabled by default
```

View file

@ -0,0 +1,38 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Misspelled key for `TypedDict`
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing import TypedDict
2 |
3 | class Config(TypedDict):
4 | retries: int
5 |
6 | def _(config: Config) -> None:
7 | config["Retries"] = 30.0 # error: [invalid-key]
```
# Diagnostics
```
error[invalid-key]: Invalid key for TypedDict `Config`
--> src/mdtest_snippet.py:7:5
|
6 | def _(config: Config) -> None:
7 | config["Retries"] = 30.0 # error: [invalid-key]
| ------ ^^^^^^^^^ Unknown key "Retries" - did you mean "retries"?
| |
| TypedDict `Config`
|
info: rule `invalid-key` is enabled by default
```

View file

@ -0,0 +1,35 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - No `__setitem__` method
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | class ReadOnlyDict:
2 | def __getitem__(self, key: str) -> int:
3 | return 42
4 |
5 | config = ReadOnlyDict()
6 | config["retries"] = 3 # error: [invalid-assignment]
```
# Diagnostics
```
error[invalid-assignment]: Cannot assign to a subscript on an object of type `ReadOnlyDict` with no `__setitem__` method
--> src/mdtest_snippet.py:6:1
|
5 | config = ReadOnlyDict()
6 | config["retries"] = 3 # error: [invalid-assignment]
| ^^^^^^
|
info: rule `invalid-assignment` is enabled by default
```

View file

@ -0,0 +1,32 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Possibly missing `__setitem__` method
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | def _(config: dict[str, int] | None) -> None:
2 | config["retries"] = 3 # error: [invalid-assignment]
```
# Diagnostics
```
error[invalid-assignment]: Cannot assign to a subscript on an object of type `None` with no `__setitem__` method
--> src/mdtest_snippet.py:2:5
|
1 | def _(config: dict[str, int] | None) -> None:
2 | config["retries"] = 3 # error: [invalid-assignment]
| ^^^^^^
|
info: The full type of the subscripted object is `dict[str, int] | None`
info: rule `invalid-assignment` is enabled by default
```

View file

@ -0,0 +1,60 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Unknown key for all elemens of a union
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing import TypedDict
2 |
3 | class Person(TypedDict):
4 | name: str
5 |
6 | class Animal(TypedDict):
7 | name: str
8 | legs: int
9 |
10 | def _(being: Person | Animal) -> None:
11 | # error: [invalid-key]
12 | # error: [invalid-key]
13 | being["surname"] = "unknown"
```
# Diagnostics
```
error[invalid-key]: Invalid key for TypedDict `Person`
--> src/mdtest_snippet.py:13:5
|
11 | # error: [invalid-key]
12 | # error: [invalid-key]
13 | being["surname"] = "unknown"
| ----- ^^^^^^^^^ Unknown key "surname" - did you mean "name"?
| |
| TypedDict `Person` in union type `Person | Animal`
|
info: rule `invalid-key` is enabled by default
```
```
error[invalid-key]: Invalid key for TypedDict `Animal`
--> src/mdtest_snippet.py:13:5
|
11 | # error: [invalid-key]
12 | # error: [invalid-key]
13 | being["surname"] = "unknown"
| ----- ^^^^^^^^^ Unknown key "surname" - did you mean "name"?
| |
| TypedDict `Animal` in union type `Person | Animal`
|
info: rule `invalid-key` is enabled by default
```

View file

@ -0,0 +1,42 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Unknown key for one element of a union
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | from typing import TypedDict
2 |
3 | class Person(TypedDict):
4 | name: str
5 |
6 | class Animal(TypedDict):
7 | name: str
8 | legs: int
9 |
10 | def _(being: Person | Animal) -> None:
11 | being["legs"] = 4 # error: [invalid-key]
```
# Diagnostics
```
error[invalid-key]: Invalid key for TypedDict `Person`
--> src/mdtest_snippet.py:11:5
|
10 | def _(being: Person | Animal) -> None:
11 | being["legs"] = 4 # error: [invalid-key]
| ----- ^^^^^^ Unknown key "legs"
| |
| TypedDict `Person` in union type `Person | Animal`
|
info: rule `invalid-key` is enabled by default
```

View file

@ -0,0 +1,32 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Wrong value type for one element of a union
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | def _(config: dict[str, int] | dict[str, str]) -> None:
2 | config["retries"] = 3 # error: [invalid-assignment]
```
# Diagnostics
```
error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, str].__setitem__(key: str, value: str, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `Literal[3]` on object of type `dict[str, str]`
--> src/mdtest_snippet.py:2:5
|
1 | def _(config: dict[str, int] | dict[str, str]) -> None:
2 | config["retries"] = 3 # error: [invalid-assignment]
| ^^^^^^
|
info: The full type of the subscripted object is `dict[str, int] | dict[str, str]`
info: rule `invalid-assignment` is enabled by default
```

View file

@ -0,0 +1,49 @@
---
source: crates/ty_test/src/lib.rs
expression: snapshot
---
---
mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Wrong value type for all elements of a union
mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md
---
# Python source files
## mdtest_snippet.py
```
1 | def _(config: dict[str, int] | dict[str, str]) -> None:
2 | # error: [invalid-assignment]
3 | # error: [invalid-assignment]
4 | config["retries"] = 3.0
```
# Diagnostics
```
error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, int].__setitem__(key: str, value: int, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `float` on object of type `dict[str, int]`
--> src/mdtest_snippet.py:4:5
|
2 | # error: [invalid-assignment]
3 | # error: [invalid-assignment]
4 | config["retries"] = 3.0
| ^^^^^^
|
info: The full type of the subscripted object is `dict[str, int] | dict[str, str]`
info: rule `invalid-assignment` is enabled by default
```
```
error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, str].__setitem__(key: str, value: str, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `float` on object of type `dict[str, str]`
--> src/mdtest_snippet.py:4:5
|
2 | # error: [invalid-assignment]
3 | # error: [invalid-assignment]
4 | config["retries"] = 3.0
| ^^^^^^
|
info: The full type of the subscripted object is `dict[str, int] | dict[str, str]`
info: rule `invalid-assignment` is enabled by default
```

View file

@ -89,7 +89,7 @@ info: rule `invalid-key` is enabled by default
```
```
error[invalid-key]: Invalid key for TypedDict `Person` of type `str`
error[invalid-key]: Invalid key of type `str` for TypedDict `Person`
--> src/mdtest_snippet.py:16:12
|
15 | def access_with_str_key(person: Person, str_key: str):

View file

@ -0,0 +1,121 @@
# Subscript assignment diagnostics
<!-- snapshot-diagnostics -->
## Invalid value type
```py
config: dict[str, int] = {}
config["retries"] = "three" # error: [invalid-assignment]
```
## Invalid key type
```py
config: dict[str, int] = {}
config[0] = 3 # error: [invalid-assignment]
```
## Invalid value type for `TypedDict`
```py
from typing import TypedDict
class Config(TypedDict):
retries: int
def _(config: Config) -> None:
config["retries"] = "three" # error: [invalid-assignment]
```
## Invalid key type for `TypedDict`
```py
from typing import TypedDict
class Config(TypedDict):
retries: int
def _(config: Config) -> None:
config[0] = 3 # error: [invalid-key]
```
## Misspelled key for `TypedDict`
```py
from typing import TypedDict
class Config(TypedDict):
retries: int
def _(config: Config) -> None:
config["Retries"] = 30.0 # error: [invalid-key]
```
## No `__setitem__` method
```py
class ReadOnlyDict:
def __getitem__(self, key: str) -> int:
return 42
config = ReadOnlyDict()
config["retries"] = 3 # error: [invalid-assignment]
```
## Possibly missing `__setitem__` method
```py
def _(config: dict[str, int] | None) -> None:
config["retries"] = 3 # error: [invalid-assignment]
```
## Unknown key for one element of a union
```py
from typing import TypedDict
class Person(TypedDict):
name: str
class Animal(TypedDict):
name: str
legs: int
def _(being: Person | Animal) -> None:
being["legs"] = 4 # error: [invalid-key]
```
## Unknown key for all elemens of a union
```py
from typing import TypedDict
class Person(TypedDict):
name: str
class Animal(TypedDict):
name: str
legs: int
def _(being: Person | Animal) -> None:
# error: [invalid-key]
# error: [invalid-key]
being["surname"] = "unknown"
```
## Wrong value type for one element of a union
```py
def _(config: dict[str, int] | dict[str, str]) -> None:
config["retries"] = 3 # error: [invalid-assignment]
```
## Wrong value type for all elements of a union
```py
def _(config: dict[str, int] | dict[str, str]) -> None:
# error: [invalid-assignment]
# error: [invalid-assignment]
config["retries"] = 3.0
```

View file

@ -76,7 +76,7 @@ a[0] = 0
class NoSetitem: ...
a = NoSetitem()
a[0] = 0 # error: "Cannot assign to object of type `NoSetitem` with no `__setitem__` method"
a[0] = 0 # error: "Cannot assign to a subscript on an object of type `NoSetitem` with no `__setitem__` method"
```
## `__setitem__` not callable

View file

@ -88,8 +88,6 @@ class C:
self.FINAL_C: Final[int] = 1
self.FINAL_D: Final = 1
self.FINAL_E: Final
# TODO: Should not be an error
# error: [invalid-assignment] "Cannot assign to final attribute `FINAL_E` on type `Self@__init__`"
self.FINAL_E = 1
reveal_type(C.FINAL_A) # revealed: int
@ -186,7 +184,6 @@ class C(metaclass=Meta):
self.INSTANCE_FINAL_A: Final[int] = 1
self.INSTANCE_FINAL_B: Final = 1
self.INSTANCE_FINAL_C: Final[int]
# error: [invalid-assignment] "Cannot assign to final attribute `INSTANCE_FINAL_C` on type `Self@__init__`"
self.INSTANCE_FINAL_C = 1
# error: [invalid-assignment] "Cannot assign to final attribute `META_FINAL_A` on type `<class 'C'>`"
@ -282,8 +279,6 @@ class C:
def __init__(self):
self.LEGAL_H: Final[int] = 1
self.LEGAL_I: Final[int]
# TODO: Should not be an error
# error: [invalid-assignment]
self.LEGAL_I = 1
# error: [invalid-type-form] "`Final` is not allowed in function parameter annotations"
@ -392,15 +387,142 @@ class C:
# TODO: This should be an error
NO_ASSIGNMENT_B: Final[int]
# This is okay. `DEFINED_IN_INIT` is defined in `__init__`.
DEFINED_IN_INIT: Final[int]
def __init__(self):
# TODO: should not be an error
# error: [invalid-assignment]
self.DEFINED_IN_INIT = 1
```
## Final attributes with Self annotation in `__init__`
Issue #1409: Final instance attributes should be assignable in `__init__` even when using `Self`
type annotation.
```toml
[environment]
python-version = "3.11"
```
```py
from typing import Final, Self
class ClassA:
ID4: Final[int] # OK because initialized in __init__
def __init__(self: Self):
self.ID4 = 1 # Should be OK
def other_method(self: Self):
# error: [invalid-assignment] "Cannot assign to final attribute `ID4` on type `Self@other_method`"
self.ID4 = 2 # Should still error outside __init__
class ClassB:
ID5: Final[int]
def __init__(self): # Without Self annotation
self.ID5 = 1 # Should also be OK
reveal_type(ClassA().ID4) # revealed: int
reveal_type(ClassB().ID5) # revealed: int
```
## Reassignment to Final in `__init__`
Per PEP 591 and the typing conformance suite, Final attributes can be assigned in `__init__`.
Multiple assignments within `__init__` are allowed (matching mypy and pyright behavior). However,
assignment in `__init__` is not allowed if the attribute already has a value at class level.
```py
from typing import Final
# Case 1: Declared in class, assigned once in __init__ - ALLOWED
class DeclaredAssignedInInit:
attr1: Final[int]
def __init__(self):
self.attr1 = 1 # OK: First and only assignment
# Case 2: Declared and assigned in class body - ALLOWED (no __init__ assignment)
class DeclaredAndAssignedInClass:
attr2: Final[int] = 10
# Case 3: Reassignment when already assigned in class body
class ReassignmentFromClass:
attr3: Final[int] = 10
def __init__(self):
# error: [invalid-assignment]
self.attr3 = 20 # Error: already assigned in class body
# Case 4: Multiple assignments within __init__ itself
# Per conformance suite and PEP 591, all assignments in __init__ are allowed
class MultipleAssignmentsInInit:
attr4: Final[int]
def __init__(self):
self.attr4 = 1 # OK: Assignment in __init__
self.attr4 = 2 # OK: Multiple assignments in __init__ are allowed
class ConditionalAssignment:
X: Final[int]
def __init__(self, cond: bool):
if cond:
self.X = 42 # OK: Assignment in __init__
else:
self.X = 56 # OK: Multiple assignments in __init__ are allowed
# Case 5: Declaration and assignment in __init__ - ALLOWED
class DeclareAndAssignInInit:
def __init__(self):
self.attr5: Final[int] = 1 # OK: Declare and assign in __init__
# Case 6: Assignment outside __init__ should still fail
class AssignmentOutsideInit:
attr6: Final[int]
def other_method(self):
# error: [invalid-assignment] "Cannot assign to final attribute `attr6`"
self.attr6 = 1 # Error: Not in __init__
```
## Final assignment restrictions in `__init__`
`__init__` can only assign Final attributes on the class it's defining, and only to the first
parameter (`self`).
```py
from typing import Final
class C:
x: Final[int] = 100
# Assignment from standalone function (even named __init__)
def _(c: C):
# error: [invalid-assignment] "Cannot assign to final attribute `x`"
c.x = 1 # Error: Not in C.__init__
def __init__(c: C):
# error: [invalid-assignment] "Cannot assign to final attribute `x`"
c.x = 1 # Error: Not a method
# Assignment from another class's __init__
class A:
def __init__(self, c: C):
# error: [invalid-assignment] "Cannot assign to final attribute `x`"
c.x = 1 # Error: Not C's __init__
# Assignment to non-self parameter in __init__
class D:
y: Final[int]
def __init__(self, other: "D"):
self.y = 1 # OK: Assigning to self
# TODO: Should error - assigning to non-self parameter
# Requires tracking which parameter the base expression refers to
other.y = 2
```
## Full diagnostics
<!-- snapshot-diagnostics -->

View file

@ -69,7 +69,7 @@ def name_or_age() -> Literal["name", "age"]:
carol: Person = {NAME: "Carol", AGE: 20}
reveal_type(carol[NAME]) # revealed: str
# error: [invalid-key] "Invalid key for TypedDict `Person` of type `str`"
# error: [invalid-key] "Invalid key of type `str` for TypedDict `Person`"
reveal_type(carol[non_literal()]) # revealed: Unknown
reveal_type(carol[name_or_age()]) # revealed: str | int | None
@ -526,10 +526,20 @@ class Person(TypedDict):
name: str
age: int | None
class Animal(TypedDict):
name: str
NAME_FINAL: Final = "name"
AGE_FINAL: Final[Literal["age"]] = "age"
def _(person: Person, literal_key: Literal["age"], union_of_keys: Literal["age", "name"], str_key: str, unknown_key: Any) -> None:
def _(
person: Person,
being: Person | Animal,
literal_key: Literal["age"],
union_of_keys: Literal["age", "name"],
str_key: str,
unknown_key: Any,
) -> None:
reveal_type(person["name"]) # revealed: str
reveal_type(person["age"]) # revealed: int | None
@ -543,23 +553,35 @@ def _(person: Person, literal_key: Literal["age"], union_of_keys: Literal["age",
# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "non_existing""
reveal_type(person["non_existing"]) # revealed: Unknown
# error: [invalid-key] "Invalid key for TypedDict `Person` of type `str`"
# error: [invalid-key] "Invalid key of type `str` for TypedDict `Person`"
reveal_type(person[str_key]) # revealed: Unknown
# No error here:
reveal_type(person[unknown_key]) # revealed: Unknown
reveal_type(being["name"]) # revealed: str
# TODO: A type of `int | None | Unknown` might be better here. The `str` is mixed in
# because `Animal.__getitem__` can only return `str`.
# error: [invalid-key] "Invalid key for TypedDict `Animal`"
reveal_type(being["age"]) # revealed: int | None | str
```
### Writing
```py
from typing_extensions import TypedDict, Final, Literal, LiteralString, Any
from ty_extensions import Intersection
class Person(TypedDict):
name: str
surname: str
age: int | None
class Animal(TypedDict):
name: str
legs: int
NAME_FINAL: Final = "name"
AGE_FINAL: Final[Literal["age"]] = "age"
@ -580,13 +602,32 @@ def _(person: Person, literal_key: Literal["age"]):
def _(person: Person, union_of_keys: Literal["name", "surname"]):
person[union_of_keys] = "unknown"
# error: [invalid-assignment] "Cannot assign value of type `Literal[1]` to key of type `Literal["name", "surname"]` on TypedDict `Person`"
# error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Person`: value of type `Literal[1]`"
# error: [invalid-assignment] "Invalid assignment to key "surname" with declared type `str` on TypedDict `Person`: value of type `Literal[1]`"
person[union_of_keys] = 1
def _(being: Person | Animal):
being["name"] = "Being"
# error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Person`: value of type `Literal[1]`"
# error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Animal`: value of type `Literal[1]`"
being["name"] = 1
# error: [invalid-key] "Invalid key for TypedDict `Animal`: Unknown key "surname" - did you mean "name"?"
being["surname"] = "unknown"
def _(centaur: Intersection[Person, Animal]):
centaur["name"] = "Chiron"
centaur["age"] = 100
centaur["legs"] = 4
# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "unknown""
centaur["unknown"] = "value"
def _(person: Person, union_of_keys: Literal["name", "age"], unknown_value: Any):
person[union_of_keys] = unknown_value
# error: [invalid-assignment] "Cannot assign value of type `None` to key of type `Literal["name", "age"]` on TypedDict `Person`"
# error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Person`: value of type `None`"
person[union_of_keys] = None
def _(person: Person, str_key: str, literalstr_key: LiteralString):

View file

@ -85,6 +85,7 @@ where
///
/// This method may panic or produce unspecified results if the provided module is from a
/// different file or Salsa revision than the module to which the node belongs.
#[track_caller]
pub fn node<'ast>(&self, module_ref: &'ast ParsedModuleRef) -> &'ast T {
#[cfg(debug_assertions)]
assert_eq!(module_ref.module().addr(), self.module_addr);

View file

@ -295,6 +295,7 @@ impl ModuleName {
Self::from_identifier_parts(db, importing_file, module.as_deref(), *level)
}
/// Computes the absolute module name from the LHS components of `from LHS import RHS`
pub(crate) fn from_identifier_parts(
db: &dyn Db,
importing_file: File,
@ -309,6 +310,16 @@ impl ModuleName {
.ok_or(ModuleNameResolutionError::InvalidSyntax)
}
}
/// Computes the absolute module name for the package this file belongs to.
///
/// i.e. this resolves `.`
pub(crate) fn package_for_file(
db: &dyn Db,
importing_file: File,
) -> Result<Self, ModuleNameResolutionError> {
Self::from_identifier_parts(db, importing_file, None, 1)
}
}
impl Deref for ModuleName {

View file

@ -452,15 +452,12 @@ pub(crate) fn dynamic_resolution_paths<'db>(
let site_packages_dir = site_packages_search_path
.as_system_path()
.expect("Expected site package path to be a system path");
let site_packages_dir = system
.canonicalize_path(site_packages_dir)
.unwrap_or_else(|_| site_packages_dir.to_path_buf());
if !existing_paths.insert(Cow::Owned(site_packages_dir.clone())) {
if !existing_paths.insert(Cow::Borrowed(site_packages_dir)) {
continue;
}
let site_packages_root = files.expect_root(db, &site_packages_dir);
let site_packages_root = files.expect_root(db, site_packages_dir);
// This query needs to be re-executed each time a `.pth` file
// is added, modified or removed from the `site-packages` directory.
@ -477,7 +474,7 @@ pub(crate) fn dynamic_resolution_paths<'db>(
// containing a (relative or absolute) path.
// Each of these paths may point to an editable install of a package,
// so should be considered an additional search path.
let pth_file_iterator = match PthFileIterator::new(db, &site_packages_dir) {
let pth_file_iterator = match PthFileIterator::new(db, site_packages_dir) {
Ok(iterator) => iterator,
Err(error) => {
tracing::warn!(

View file

@ -1,4 +1,4 @@
use std::iter::FusedIterator;
use std::iter::{FusedIterator, once};
use std::sync::Arc;
use ruff_db::files::File;
@ -148,29 +148,56 @@ pub(crate) fn attribute_declarations<'db, 's>(
///
/// Only call this when doing type inference on the same file as `class_body_scope`, otherwise it
/// introduces a direct dependency on that file's AST.
pub(crate) fn attribute_scopes<'db, 's>(
pub(crate) fn attribute_scopes<'db>(
db: &'db dyn Db,
class_body_scope: ScopeId<'db>,
) -> impl Iterator<Item = FileScopeId> + use<'s, 'db> {
) -> impl Iterator<Item = FileScopeId> + 'db {
let file = class_body_scope.file(db);
let index = semantic_index(db, file);
let class_scope_id = class_body_scope.file_scope_id(db);
ChildrenIter::new(&index.scopes, class_scope_id)
.filter_map(move |(child_scope_id, scope)| {
let (function_scope_id, function_scope) =
if scope.node().scope_kind() == ScopeKind::TypeParams {
// This could be a generic method with a type-params scope.
// Go one level deeper to find the function scope. The first
// descendant is the (potential) function scope.
let function_scope_id = scope.descendants().start;
(function_scope_id, index.scope(function_scope_id))
} else {
(child_scope_id, scope)
};
function_scope.node().as_function()?;
Some(function_scope_id)
})
.flat_map(move |func_id| {
// Add any descendent scope that is eager and have eager scopes between the scope
// and the method scope. Since attributes can be defined in this scope.
let nested = index.descendent_scopes(func_id).filter_map(move |(id, s)| {
let is_eager = s.kind().is_eager();
let parents_are_eager = {
let mut all_parents_eager = true;
let mut current = Some(id);
ChildrenIter::new(&index.scopes, class_scope_id).filter_map(move |(child_scope_id, scope)| {
let (function_scope_id, function_scope) =
if scope.node().scope_kind() == ScopeKind::TypeParams {
// This could be a generic method with a type-params scope.
// Go one level deeper to find the function scope. The first
// descendant is the (potential) function scope.
let function_scope_id = scope.descendants().start;
(function_scope_id, index.scope(function_scope_id))
} else {
(child_scope_id, scope)
};
while let Some(scope_id) = current {
if scope_id == func_id {
break;
}
let scope = index.scope(scope_id);
if !scope.is_eager() {
all_parents_eager = false;
break;
}
current = scope.parent();
}
function_scope.node().as_function()?;
Some(function_scope_id)
})
all_parents_eager
};
(parents_are_eager && is_eager).then_some(id)
});
once(func_id).chain(nested)
})
}
/// Returns the module global scope of `file`.

View file

@ -186,29 +186,34 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
self.current_scope_info().file_scope_id
}
/// Returns the scope ID of the surrounding class body scope if the current scope
/// is a method inside a class body. Returns `None` otherwise, e.g. if the current
/// scope is a function body outside of a class, or if the current scope is not a
/// Returns the scope ID of the current scope if the current scope
/// is a method inside a class body or an eagerly executed scope inside a method.
/// Returns `None` otherwise, e.g. if the current scope is a function body outside of a class, or if the current scope is not a
/// function body.
fn is_method_of_class(&self) -> Option<FileScopeId> {
let mut scopes_rev = self.scope_stack.iter().rev();
fn is_method_or_eagerly_executed_in_method(&self) -> Option<FileScopeId> {
let mut scopes_rev = self
.scope_stack
.iter()
.rev()
.skip_while(|scope| self.scopes[scope.file_scope_id].is_eager());
let current = scopes_rev.next()?;
if self.scopes[current.file_scope_id].kind() != ScopeKind::Function {
return None;
}
let maybe_method = current.file_scope_id;
let parent = scopes_rev.next()?;
match self.scopes[parent.file_scope_id].kind() {
ScopeKind::Class => Some(parent.file_scope_id),
ScopeKind::Class => Some(maybe_method),
ScopeKind::TypeParams => {
// If the function is generic, the parent scope is an annotation scope.
// In this case, we need to go up one level higher to find the class scope.
let grandparent = scopes_rev.next()?;
if self.scopes[grandparent.file_scope_id].kind() == ScopeKind::Class {
Some(grandparent.file_scope_id)
Some(maybe_method)
} else {
None
}
@ -217,6 +222,32 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
}
}
/// Checks if a symbol name is bound in any intermediate eager scopes
/// between the current scope and the specified method scope.
///
fn is_symbol_bound_in_intermediate_eager_scopes(
&self,
symbol_name: &str,
method_scope_id: FileScopeId,
) -> bool {
for scope_info in self.scope_stack.iter().rev() {
let scope_id = scope_info.file_scope_id;
if scope_id == method_scope_id {
break;
}
if let Some(symbol_id) = self.place_tables[scope_id].symbol_id(symbol_name) {
let symbol = self.place_tables[scope_id].symbol(symbol_id);
if symbol.is_bound() {
return true;
}
}
}
false
}
/// Push a new loop, returning the outer loop, if any.
fn push_loop(&mut self) -> Option<Loop> {
self.current_scope_info_mut()
@ -283,6 +314,9 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
// Records snapshots of the place states visible from the current eager scope.
fn record_eager_snapshots(&mut self, popped_scope_id: FileScopeId) {
let popped_scope = &self.scopes[popped_scope_id];
let popped_scope_is_annotation_scope = popped_scope.kind().is_annotation();
// If the scope that we just popped off is an eager scope, we need to "lock" our view of
// which bindings reach each of the uses in the scope. Loop through each enclosing scope,
// looking for any that bind each place.
@ -297,6 +331,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
// ```
for enclosing_scope_info in self.scope_stack.iter().rev() {
let enclosing_scope_id = enclosing_scope_info.file_scope_id;
let is_immediately_enclosing_scope = popped_scope.parent() == Some(enclosing_scope_id);
let enclosing_scope_kind = self.scopes[enclosing_scope_id].kind();
let enclosing_place_table = &self.place_tables[enclosing_scope_id];
@ -324,6 +359,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
enclosing_place_id,
enclosing_scope_kind,
enclosing_place,
popped_scope_is_annotation_scope && is_immediately_enclosing_scope,
);
self.enclosing_snapshots.insert(key, eager_snapshot);
}
@ -398,6 +434,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
enclosed_symbol_id.into(),
enclosing_scope_kind,
enclosing_place.into(),
false,
);
self.enclosing_snapshots.insert(key, lazy_snapshot);
}
@ -1451,7 +1488,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
// If we see:
//
// * `from .x.y import z` (must be relative!)
// * `from .x.y import z` (or `from whatever.thispackage.x.y`)
// * And we are in an `__init__.py(i)` (hereafter `thispackage`)
// * And this is the first time we've seen `from .x` in this module
// * And we're in the global scope
@ -1465,25 +1502,35 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
// reasons but it works well for most practical purposes. In particular it's nice
// that `x` can be freely overwritten, and that we don't assume that an import
// in one function is visible in another function.
//
// TODO: Also support `from thispackage.x.y import z`?
if self.current_scope() == FileScopeId::global()
&& node.level == 1
&& let Some(submodule) = &node.module
&& let Some(parsed_submodule) = ModuleName::new(submodule.as_str())
&& let Some(direct_submodule) = parsed_submodule.components().next()
&& self.file.is_package(self.db)
&& !self.seen_submodule_imports.contains(direct_submodule)
let mut is_self_import = false;
if self.file.is_package(self.db)
&& let Ok(module_name) = ModuleName::from_identifier_parts(
self.db,
self.file,
node.module.as_deref(),
node.level,
)
&& let Ok(thispackage) = ModuleName::package_for_file(self.db, self.file)
{
self.seen_submodule_imports
.insert(direct_submodule.to_owned());
// Record whether this is equivalent to `from . import ...`
is_self_import = module_name == thispackage;
let direct_submodule_name = Name::new(direct_submodule);
let symbol = self.add_symbol(direct_submodule_name);
self.add_definition(
symbol.into(),
ImportFromSubmoduleDefinitionNodeRef { node, submodule },
);
if node.module.is_some()
&& let Some(relative_submodule) = module_name.relative_to(&thispackage)
&& let Some(direct_submodule) = relative_submodule.components().next()
&& !self.seen_submodule_imports.contains(direct_submodule)
&& self.current_scope().is_global()
{
self.seen_submodule_imports
.insert(direct_submodule.to_owned());
let direct_submodule_name = Name::new(direct_submodule);
let symbol = self.add_symbol(direct_submodule_name);
self.add_definition(
symbol.into(),
ImportFromSubmoduleDefinitionNodeRef { node },
);
}
}
let mut found_star = false;
@ -1595,13 +1642,10 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
// It's re-exported if it's `from ... import x as x`
(&asname.id, asname.id == alias.name.id)
} else {
// It's re-exported if it's `from . import x` in an `__init__.pyi`
(
&alias.name.id,
node.level == 1
&& node.module.is_none()
&& self.file.is_package(self.db),
)
// As a non-standard rule to handle stubs in the wild, we consider
// `from . import x` and `from whatever.thispackage import x` in an
// `__init__.pyi` to re-export `x` (as long as it wasn't renamed)
(&alias.name.id, is_self_import)
};
// Look for imports `from __future__ import annotations`, ignore `as ...`
@ -1693,7 +1737,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
self.visit_expr(&node.annotation);
if let Some(value) = &node.value {
self.visit_expr(value);
if self.is_method_of_class().is_some() {
if self.is_method_or_eagerly_executed_in_method().is_some() {
// Record the right-hand side of the assignment as a standalone expression
// if we're inside a method. This allows type inference to infer the type
// of the value for annotated assignments like `self.CONSTANT: Final = 1`,
@ -2365,14 +2409,21 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
| ast::Expr::Attribute(ast::ExprAttribute { ctx, .. })
| ast::Expr::Subscript(ast::ExprSubscript { ctx, .. }) => {
if let Some(mut place_expr) = PlaceExpr::try_from_expr(expr) {
if self.is_method_of_class().is_some() {
if let Some(method_scope_id) = self.is_method_or_eagerly_executed_in_method() {
if let PlaceExpr::Member(member) = &mut place_expr {
if member.is_instance_attribute_candidate() {
// We specifically mark attribute assignments to the first parameter of a method,
// i.e. typically `self` or `cls`.
let accessed_object_refers_to_first_parameter = self
.current_first_parameter_name
.is_some_and(|first| member.symbol_name() == first);
// However, we must check that the symbol hasn't been shadowed by an intermediate
// scope (e.g., a comprehension variable: `for self in [...]`).
let accessed_object_refers_to_first_parameter =
self.current_first_parameter_name.is_some_and(|first| {
member.symbol_name() == first
&& !self.is_symbol_bound_in_intermediate_eager_scopes(
first,
method_scope_id,
)
});
if accessed_object_refers_to_first_parameter {
member.mark_instance_attribute();

View file

@ -3,7 +3,6 @@ use std::ops::Deref;
use ruff_db::files::{File, FileRange};
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
use ruff_python_ast as ast;
use ruff_python_ast::name::Name;
use ruff_text_size::{Ranged, TextRange};
use crate::Db;
@ -368,7 +367,6 @@ pub(crate) struct ImportFromDefinitionNodeRef<'ast> {
#[derive(Copy, Clone, Debug)]
pub(crate) struct ImportFromSubmoduleDefinitionNodeRef<'ast> {
pub(crate) node: &'ast ast::StmtImportFrom,
pub(crate) submodule: &'ast ast::Identifier,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct AssignmentDefinitionNodeRef<'ast, 'db> {
@ -450,10 +448,8 @@ impl<'db> DefinitionNodeRef<'_, 'db> {
}),
DefinitionNodeRef::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef {
node,
submodule,
}) => DefinitionKind::ImportFromSubmodule(ImportFromSubmoduleDefinitionKind {
node: AstNodeRef::new(parsed, node),
submodule: submodule.as_str().into(),
}),
DefinitionNodeRef::ImportStar(star_import) => {
let StarImportDefinitionNodeRef { node, symbol_id } = star_import;
@ -580,10 +576,7 @@ impl<'db> DefinitionNodeRef<'_, 'db> {
alias_index,
is_reexported: _,
}) => (&node.names[alias_index]).into(),
Self::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef {
node,
submodule: _,
}) => node.into(),
Self::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef { node }) => node.into(),
// INVARIANT: for an invalid-syntax statement such as `from foo import *, bar, *`,
// we only create a `StarImportDefinitionKind` for the *first* `*` alias in the names list.
Self::ImportStar(StarImportDefinitionNodeRef { node, symbol_id: _ }) => node
@ -1021,17 +1014,12 @@ impl ImportFromDefinitionKind {
#[derive(Clone, Debug, get_size2::GetSize)]
pub struct ImportFromSubmoduleDefinitionKind {
node: AstNodeRef<ast::StmtImportFrom>,
submodule: Name,
}
impl ImportFromSubmoduleDefinitionKind {
pub fn import<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::StmtImportFrom {
self.node.node(module)
}
pub(crate) fn submodule(&self) -> &Name {
&self.submodule
}
}
#[derive(Clone, Debug, get_size2::GetSize)]
@ -1046,7 +1034,7 @@ impl<'db> AssignmentDefinitionKind<'db> {
self.target_kind
}
pub(crate) fn value<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::Expr {
pub fn value<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::Expr {
self.value.node(module)
}

View file

@ -761,6 +761,7 @@ pub(crate) struct DeclarationsIterator<'map, 'db> {
inner: LiveDeclarationsIterator<'map>,
}
#[derive(Debug)]
pub(crate) struct DeclarationWithConstraint<'db> {
pub(crate) declaration: DefinitionState<'db>,
pub(crate) reachability_constraint: ScopedReachabilityConstraintId,
@ -1186,17 +1187,21 @@ impl<'db> UseDefMapBuilder<'db> {
pub(super) fn snapshot_enclosing_state(
&mut self,
enclosing_place: ScopedPlaceId,
scope: ScopeKind,
enclosing_scope: ScopeKind,
enclosing_place_expr: PlaceExprRef,
is_parent_of_annotation_scope: bool,
) -> ScopedEnclosingSnapshotId {
let bindings = match enclosing_place {
ScopedPlaceId::Symbol(symbol) => self.symbol_states[symbol].bindings(),
ScopedPlaceId::Member(member) => self.member_states[member].bindings(),
};
// Names bound in class scopes are never visible to nested scopes (but attributes/subscripts are visible),
// so we never need to save eager scope bindings in a class scope.
if (scope.is_class() && enclosing_place.is_symbol()) || !enclosing_place_expr.is_bound() {
let is_class_symbol = enclosing_scope.is_class() && enclosing_place.is_symbol();
// Names bound in class scopes are never visible to nested scopes (but
// attributes/subscripts are visible), so we never need to save eager scope bindings in a
// class scope. There is one exception to this rule: annotation scopes can see names
// defined in an immediately-enclosing class scope.
if (is_class_symbol && !is_parent_of_annotation_scope) || !enclosing_place_expr.is_bound() {
self.enclosing_snapshots.push(EnclosingSnapshot::Constraint(
bindings.unbound_narrowing_constraint(),
))

View file

@ -869,7 +869,7 @@ impl<'db> Type<'db> {
matches!(self, Type::Dynamic(DynamicType::Todo(_)))
}
pub(crate) const fn is_generic_alias(&self) -> bool {
pub const fn is_generic_alias(&self) -> bool {
matches!(self, Type::GenericAlias(_))
}
@ -1028,6 +1028,13 @@ impl<'db> Type<'db> {
any_over_type(db, self, &|ty| matches!(ty, Type::TypeVar(_)), false)
}
pub(crate) const fn as_special_form(self) -> Option<SpecialFormType> {
match self {
Type::SpecialForm(special_form) => Some(special_form),
_ => None,
}
}
pub(crate) const fn as_class_literal(self) -> Option<ClassLiteral<'db>> {
match self {
Type::ClassLiteral(class_type) => Some(class_type),
@ -1073,12 +1080,11 @@ impl<'db> Type<'db> {
.expect("Expected a Type::ClassLiteral variant")
}
pub(crate) const fn is_subclass_of(&self) -> bool {
pub const fn is_subclass_of(&self) -> bool {
matches!(self, Type::SubclassOf(..))
}
#[cfg(test)]
pub(crate) const fn is_class_literal(&self) -> bool {
pub const fn is_class_literal(&self) -> bool {
matches!(self, Type::ClassLiteral(..))
}
@ -1157,6 +1163,10 @@ impl<'db> Type<'db> {
}
}
pub(crate) const fn is_union(&self) -> bool {
matches!(self, Type::Union(_))
}
pub(crate) const fn as_union(self) -> Option<UnionType<'db>> {
match self {
Type::Union(union_type) => Some(union_type),
@ -1164,7 +1174,6 @@ impl<'db> Type<'db> {
}
}
#[cfg(test)]
#[track_caller]
pub(crate) const fn expect_union(self) -> UnionType<'db> {
self.as_union().expect("Expected a Type::Union variant")
@ -6580,12 +6589,13 @@ impl<'db> Type<'db> {
}),
KnownInstanceType::UnionType(list) => {
let mut builder = UnionBuilder::new(db);
let inferred_as = list.inferred_as(db);
for element in list.elements(db) {
builder = builder.add(element.in_type_expression(
db,
scope_id,
typevar_binding_context,
)?);
builder = builder.add(if inferred_as.type_expression() {
*element
} else {
element.in_type_expression(db, scope_id, typevar_binding_context)?
});
}
Ok(builder.build())
}
@ -8578,7 +8588,7 @@ impl<'db> TypeVarInstance<'db> {
self.identity(db).definition(db)
}
pub(crate) fn kind(self, db: &'db dyn Db) -> TypeVarKind {
pub fn kind(self, db: &'db dyn Db) -> TypeVarKind {
self.identity(db).kind(db)
}
@ -9158,6 +9168,21 @@ impl<'db> TypeVarBoundOrConstraints<'db> {
}
}
/// Whether a given type originates from value expression inference or type expression inference.
/// For example, the symbol `int` would be inferred as `<class 'int'>` in value expression context,
/// and as `int` (i.e. an instance of the class `int`) in type expression context.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, get_size2::GetSize, salsa::Update)]
pub enum InferredAs {
ValueExpression,
TypeExpression,
}
impl InferredAs {
pub const fn type_expression(self) -> bool {
matches!(self, InferredAs::TypeExpression)
}
}
/// A salsa-interned list of types.
///
/// # Ordering
@ -9168,6 +9193,7 @@ impl<'db> TypeVarBoundOrConstraints<'db> {
pub struct InternedTypes<'db> {
#[returns(deref)]
elements: Box<[Type<'db>]>,
inferred_as: InferredAs,
}
impl get_size2::GetSize for InternedTypes<'_> {}
@ -9176,8 +9202,9 @@ impl<'db> InternedTypes<'db> {
pub(crate) fn from_elements(
db: &'db dyn Db,
elements: impl IntoIterator<Item = Type<'db>>,
inferred_as: InferredAs,
) -> InternedTypes<'db> {
InternedTypes::new(db, elements.into_iter().collect::<Box<[_]>>())
InternedTypes::new(db, elements.into_iter().collect::<Box<[_]>>(), inferred_as)
}
pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self {
@ -9187,6 +9214,7 @@ impl<'db> InternedTypes<'db> {
.iter()
.map(|ty| ty.normalized_impl(db, visitor))
.collect::<Box<[_]>>(),
self.inferred_as(db),
)
}
}

View file

@ -3647,6 +3647,31 @@ impl<'db> BindingError<'db> {
expected_ty,
provided_ty,
} => {
// Certain special forms in the typing module are aliases for classes
// elsewhere in the standard library. These special forms are not instances of `type`,
// and you cannot use them in place of their aliased classes in *all* situations:
// for example, `dict()` succeeds at runtime, but `typing.Dict()` fails. However,
// they *can* all be used as the second argument to `isinstance` and `issubclass`.
// We model that specific aspect of their behaviour here.
//
// This is implemented as a special case in call-binding machinery because overriding
// typeshed's signatures for `isinstance()` and `issubclass()` would be complex and
// error-prone, due to the fact that they are annotated with recursive type aliases.
if parameter.index == 1
&& *argument_index == Some(1)
&& matches!(
callable_ty
.as_function_literal()
.and_then(|function| function.known(context.db())),
Some(KnownFunction::IsInstance | KnownFunction::IsSubclass)
)
&& provided_ty
.as_special_form()
.is_some_and(SpecialFormType::is_valid_isinstance_target)
{
return;
}
// TODO: Ideally we would not emit diagnostics for `TypedDict` literal arguments
// here (see `diagnostic::is_invalid_typed_dict_literal`). However, we may have
// silenced diagnostics during overload evaluation, and rely on the assignability

View file

@ -3119,30 +3119,47 @@ impl<'db> ClassLiteral<'db> {
union_of_inferred_types = union_of_inferred_types.add(Type::unknown());
}
for (attribute_assignments, method_scope_id) in
for (attribute_assignments, attribute_binding_scope_id) in
attribute_assignments(db, class_body_scope, &name)
{
let method_scope = index.scope(method_scope_id);
if !is_valid_scope(method_scope) {
let binding_scope = index.scope(attribute_binding_scope_id);
if !is_valid_scope(binding_scope) {
continue;
}
// The attribute assignment inherits the reachability of the method which contains it
let is_method_reachable = if let Some(method_def) = method_scope.node().as_function() {
let method = index.expect_single_definition(method_def);
let method_place = class_table
.symbol_id(&method_def.node(&module).name)
.unwrap();
class_map
.all_reachable_symbol_bindings(method_place)
.find_map(|bind| {
(bind.binding.is_defined_and(|def| def == method))
.then(|| class_map.binding_reachability(db, &bind))
})
.unwrap_or(Truthiness::AlwaysFalse)
} else {
Truthiness::AlwaysFalse
let scope_for_reachability_analysis = {
if binding_scope.node().as_function().is_some() {
binding_scope
} else if binding_scope.is_eager() {
let mut eager_scope_parent = binding_scope;
while eager_scope_parent.is_eager()
&& let Some(parent) = eager_scope_parent.parent()
{
eager_scope_parent = index.scope(parent);
}
eager_scope_parent
} else {
binding_scope
}
};
// The attribute assignment inherits the reachability of the method which contains it
let is_method_reachable =
if let Some(method_def) = scope_for_reachability_analysis.node().as_function() {
let method = index.expect_single_definition(method_def);
let method_place = class_table
.symbol_id(&method_def.node(&module).name)
.unwrap();
class_map
.all_reachable_symbol_bindings(method_place)
.find_map(|bind| {
(bind.binding.is_defined_and(|def| def == method))
.then(|| class_map.binding_reachability(db, &bind))
})
.unwrap_or(Truthiness::AlwaysFalse)
} else {
Truthiness::AlwaysFalse
};
if is_method_reachable.is_always_false() {
continue;
}

View file

@ -3063,6 +3063,7 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>(
typed_dict_node: AnyNodeRef,
key_node: AnyNodeRef,
typed_dict_ty: Type<'db>,
full_object_ty: Option<Type<'db>>,
key_ty: Type<'db>,
items: &FxOrderMap<Name, Field<'db>>,
) {
@ -3077,11 +3078,21 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>(
"Invalid key for TypedDict `{typed_dict_name}`",
));
diagnostic.annotate(
diagnostic.annotate(if let Some(full_object_ty) = full_object_ty {
context.secondary(typed_dict_node).message(format_args!(
"TypedDict `{typed_dict_name}` in {kind} type `{full_object_ty}`",
kind = if full_object_ty.is_union() {
"union"
} else {
"intersection"
},
full_object_ty = full_object_ty.display(db)
))
} else {
context
.secondary(typed_dict_node)
.message(format_args!("TypedDict `{typed_dict_name}`")),
);
.message(format_args!("TypedDict `{typed_dict_name}`"))
});
let existing_keys = items.iter().map(|(name, _)| name.as_str());
@ -3093,15 +3104,22 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>(
String::new()
}
));
diagnostic
}
_ => builder.into_diagnostic(format_args!(
"Invalid key for TypedDict `{}` of type `{}`",
typed_dict_ty.display(db),
key_ty.display(db),
)),
};
_ => {
let mut diagnostic = builder.into_diagnostic(format_args!(
"Invalid key of type `{}` for TypedDict `{}`",
key_ty.display(db),
typed_dict_ty.display(db),
));
if let Some(full_object_ty) = full_object_ty {
diagnostic.info(format_args!(
"The full type of the subscripted object is `{}`",
full_object_ty.display(db)
));
}
}
}
}
}

View file

@ -1764,6 +1764,7 @@ impl KnownFunction {
Type::KnownInstance(KnownInstanceType::UnionType(_)) => {
fn find_invalid_elements<'db>(
db: &'db dyn Db,
function: KnownFunction,
ty: Type<'db>,
invalid_elements: &mut Vec<Type<'db>>,
) {
@ -1771,9 +1772,19 @@ impl KnownFunction {
Type::ClassLiteral(_) => {}
Type::NominalInstance(instance)
if instance.has_known_class(db, KnownClass::NoneType) => {}
Type::SpecialForm(special_form)
if special_form.is_valid_isinstance_target() => {}
// `Any` can be used in `issubclass()` calls but not `isinstance()` calls
Type::SpecialForm(SpecialFormType::Any)
if function == KnownFunction::IsSubclass => {}
Type::KnownInstance(KnownInstanceType::UnionType(union)) => {
for element in union.elements(db) {
find_invalid_elements(db, *element, invalid_elements);
find_invalid_elements(
db,
function,
*element,
invalid_elements,
);
}
}
_ => invalid_elements.push(ty),
@ -1781,7 +1792,7 @@ impl KnownFunction {
}
let mut invalid_elements = vec![];
find_invalid_elements(db, *second_argument, &mut invalid_elements);
find_invalid_elements(db, self, *second_argument, &mut invalid_elements);
let Some((first_invalid_element, other_invalid_elements)) =
invalid_elements.split_first()

View file

@ -10,9 +10,9 @@ use crate::semantic_index::scope::ScopeId;
use crate::semantic_index::{
attribute_scopes, global_scope, place_table, semantic_index, use_def_map,
};
use crate::types::CallDunderError;
use crate::types::call::{CallArguments, MatchedArgument};
use crate::types::signatures::Signature;
use crate::types::{CallDunderError, UnionType};
use crate::types::{
ClassBase, ClassLiteral, DynamicType, KnownClass, KnownInstanceType, Type, TypeContext,
TypeVarBoundOrConstraints, class::CodeGeneratorKind,
@ -477,32 +477,17 @@ pub fn all_members<'db>(db: &'db dyn Db, ty: Type<'db>) -> FxHashSet<Member<'db>
/// Get the primary definition kind for a name expression within a specific file.
/// Returns the first definition kind that is reachable for this name in its scope.
/// This is useful for IDE features like semantic tokens.
pub fn definition_kind_for_name<'db>(
pub fn definition_for_name<'db>(
db: &'db dyn Db,
file: File,
name: &ast::ExprName,
) -> Option<DefinitionKind<'db>> {
let index = semantic_index(db, file);
let name_str = name.id.as_str();
// Get the scope for this name expression
let file_scope = index.expression_scope_id(&ast::ExprRef::from(name));
// Get the place table for this scope
let place_table = index.place_table(file_scope);
// Look up the place by name
let symbol_id = place_table.symbol_id(name_str)?;
// Get the use-def map and look up definitions for this place
let declarations = index
.use_def_map(file_scope)
.all_reachable_symbol_declarations(symbol_id);
) -> Option<Definition<'db>> {
let definitions = definitions_for_name(db, file, name);
// Find the first valid definition and return its kind
for declaration in declarations {
if let Some(def) = declaration.declaration.definition() {
return Some(def.kind(db).clone());
for declaration in definitions {
if let Some(def) = declaration.definition() {
return Some(def);
}
}
@ -617,8 +602,34 @@ pub fn definitions_for_name<'db>(
// If we didn't find any definitions in scopes, fallback to builtins
if resolved_definitions.is_empty() {
let Some(builtins_scope) = builtins_module_scope(db) else {
return Vec::new();
return resolved_definitions;
};
// Special cases for `float` and `complex` in type annotation positions.
// We don't know whether we're in a type annotation position, so we'll just ask `Name`'s type,
// which resolves to `int | float` or `int | float | complex` if `float` or `complex` is used in
// a type annotation position and `float` or `complex` otherwise.
//
// https://typing.python.org/en/latest/spec/special-types.html#special-cases-for-float-and-complex
if matches!(name_str, "float" | "complex")
&& let Some(union) = name.inferred_type(&SemanticModel::new(db, file)).as_union()
&& is_float_or_complex_annotation(db, union, name_str)
{
return union
.elements(db)
.iter()
// Use `rev` so that `complex` and `float` come first.
// This is required for hover to pick up the docstring of `complex` and `float`
// instead of `int` (hover only shows the docstring of the first definition).
.rev()
.filter_map(|ty| ty.as_nominal_instance())
.map(|instance| {
let definition = instance.class_literal(db).definition(db);
ResolvedDefinition::Definition(definition)
})
.collect();
}
find_symbol_in_scope(db, builtins_scope, name_str)
.into_iter()
.filter(|def| def.is_reexported(db))
@ -636,6 +647,30 @@ pub fn definitions_for_name<'db>(
}
}
fn is_float_or_complex_annotation(db: &dyn Db, ty: UnionType, name: &str) -> bool {
let float_or_complex_ty = match name {
"float" => UnionType::from_elements(
db,
[
KnownClass::Int.to_instance(db),
KnownClass::Float.to_instance(db),
],
),
"complex" => UnionType::from_elements(
db,
[
KnownClass::Int.to_instance(db),
KnownClass::Float.to_instance(db),
KnownClass::Complex.to_instance(db),
],
),
_ => return false,
}
.expect_union();
ty == float_or_complex_ty
}
/// Returns all resolved definitions for an attribute expression `x.y`.
/// This function duplicates much of the functionality in the semantic
/// analyzer, but it has somewhat different behavior so we've decided
@ -1196,6 +1231,14 @@ mod resolve_definition {
}
impl<'db> ResolvedDefinition<'db> {
pub(crate) fn definition(&self) -> Option<Definition<'db>> {
match self {
ResolvedDefinition::Definition(definition) => Some(*definition),
ResolvedDefinition::Module(_) => None,
ResolvedDefinition::FileWithRange(_) => None,
}
}
fn file(&self, db: &'db dyn Db) -> File {
match self {
ResolvedDefinition::Definition(definition) => definition.file(db),

View file

@ -4,7 +4,6 @@ use itertools::{Either, Itertools};
use ruff_db::diagnostic::{Annotation, DiagnosticId, Severity};
use ruff_db::files::File;
use ruff_db::parsed::ParsedModuleRef;
use ruff_python_ast::name::Name;
use ruff_python_ast::visitor::{Visitor, walk_expr};
use ruff_python_ast::{
self as ast, AnyNodeRef, ExprContext, HasNodeIndex, NodeIndex, PythonVersion,
@ -102,11 +101,11 @@ use crate::types::typed_dict::{
use crate::types::visitor::any_over_type;
use crate::types::{
CallDunderError, CallableBinding, CallableType, ClassLiteral, ClassType, DataclassParams,
DynamicType, InternedType, InternedTypes, IntersectionBuilder, IntersectionType, KnownClass,
KnownInstanceType, MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, Parameter,
ParameterForm, Parameters, SpecialFormType, SubclassOfType, TrackedConstraintSet, Truthiness,
Type, TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers,
TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity,
DynamicType, InferredAs, InternedType, InternedTypes, IntersectionBuilder, IntersectionType,
KnownClass, KnownInstanceType, LintDiagnosticGuard, MemberLookupPolicy, MetaclassCandidate,
PEP695TypeAliasType, Parameter, ParameterForm, Parameters, SpecialFormType, SubclassOfType,
TrackedConstraintSet, Truthiness, Type, TypeAliasType, TypeAndQualifiers, TypeContext,
TypeQualifiers, TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity,
TypeVarInstance, TypeVarKind, TypeVarVariance, TypedDictType, UnionBuilder, UnionType,
binding_type, todo_type,
};
@ -1218,7 +1217,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
DefinitionKind::ImportFromSubmodule(import_from) => {
self.infer_import_from_submodule_definition(
import_from.import(self.module()),
import_from.submodule(),
definition,
);
}
@ -3540,142 +3538,305 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
}
/// Make sure that the subscript assignment `obj[slice] = value` is valid.
/// Validate a subscript assignment of the form `object[key] = rhs_value`.
fn validate_subscript_assignment(
&mut self,
target: &ast::ExprSubscript,
rhs: &ast::Expr,
assigned_ty: Type<'db>,
rhs_value: &ast::Expr,
rhs_value_ty: Type<'db>,
) -> bool {
let ast::ExprSubscript {
range: _,
node_index: _,
value,
value: object,
slice,
ctx: _,
} = target;
let value_ty = self.infer_expression(value, TypeContext::default());
let object_ty = self.infer_expression(object, TypeContext::default());
let slice_ty = self.infer_expression(slice, TypeContext::default());
self.validate_subscript_assignment_impl(
object.as_ref(),
None,
object_ty,
slice.as_ref(),
slice_ty,
rhs_value,
rhs_value_ty,
true,
)
}
#[expect(clippy::too_many_arguments)]
fn validate_subscript_assignment_impl(
&self,
object_node: &'ast ast::Expr,
full_object_ty: Option<Type<'db>>,
object_ty: Type<'db>,
slice_node: &'ast ast::Expr,
slice_ty: Type<'db>,
rhs_value_node: &'ast ast::Expr,
rhs_value_ty: Type<'db>,
emit_diagnostic: bool,
) -> bool {
/// Given a string literal or a union of string literals, return an iterator over the contained
/// strings, or `None`, if the type is neither.
fn key_literals<'db>(
db: &'db dyn Db,
slice_ty: Type<'db>,
) -> Option<impl Iterator<Item = &'db str> + 'db> {
if let Some(literal) = slice_ty.as_string_literal() {
Some(Either::Left(std::iter::once(literal.value(db))))
} else {
slice_ty.as_union().map(|union| {
Either::Right(
union
.elements(db)
.iter()
.filter_map(|ty| ty.as_string_literal().map(|lit| lit.value(db))),
)
})
}
}
let db = self.db();
let context = &self.context;
match value_ty.try_call_dunder(
db,
"__setitem__",
CallArguments::positional([slice_ty, assigned_ty]),
TypeContext::default(),
) {
Ok(_) => true,
Err(err) => match err {
CallDunderError::PossiblyUnbound { .. } => {
if let Some(builder) =
context.report_lint(&POSSIBLY_MISSING_IMPLICIT_CALL, &**value)
{
builder.into_diagnostic(format_args!(
"Method `__setitem__` of type `{}` may be missing",
value_ty.display(db),
));
}
false
let attach_original_type_info = |mut diagnostic: LintDiagnosticGuard| {
if let Some(full_object_ty) = full_object_ty {
diagnostic.info(format_args!(
"The full type of the subscripted object is `{}`",
full_object_ty.display(db)
));
}
};
match object_ty {
Type::Union(union) => {
// Note that we use a loop here instead of .all(…) to avoid short-circuiting.
// We need to keep iterating to emit all diagnostics.
let mut valid = true;
for element_ty in union.elements(db) {
valid &= self.validate_subscript_assignment_impl(
object_node,
full_object_ty.or(Some(object_ty)),
*element_ty,
slice_node,
slice_ty,
rhs_value_node,
rhs_value_ty,
emit_diagnostic,
);
}
CallDunderError::CallError(call_error_kind, bindings) => {
match call_error_kind {
CallErrorKind::NotCallable => {
if let Some(builder) = context.report_lint(&CALL_NON_CALLABLE, &**value)
{
builder.into_diagnostic(format_args!(
"Method `__setitem__` of type `{}` is not callable \
on object of type `{}`",
bindings.callable_type().display(db),
value_ty.display(db),
));
}
}
CallErrorKind::BindingError => {
let assigned_d = assigned_ty.display(db);
let value_d = value_ty.display(db);
valid
}
if let Some(typed_dict) = value_ty.as_typed_dict() {
if let Some(key) = slice_ty.as_string_literal() {
let key = key.value(self.db());
validate_typed_dict_key_assignment(
&self.context,
typed_dict,
key,
assigned_ty,
value.as_ref(),
slice.as_ref(),
rhs,
TypedDictAssignmentKind::Subscript,
);
} else {
// Check if the key has a valid type. We only allow string literals, a union of string literals,
// or a dynamic type like `Any`. We can do this by checking assignability to `LiteralString`,
// but we need to exclude `LiteralString` itself. This check would technically allow weird key
// types like `LiteralString & Any` to pass, but it does not need to be perfect. We would just
// fail to provide the "Only string literals are allowed" hint in that case.
if slice_ty.is_assignable_to(db, Type::LiteralString)
&& !slice_ty.is_equivalent_to(db, Type::LiteralString)
Type::Intersection(intersection) => {
let check_positive_elements = |emit_diagnostic_and_short_circuit| {
let mut valid = false;
for element_ty in intersection.positive(db) {
valid |= self.validate_subscript_assignment_impl(
object_node,
full_object_ty.or(Some(object_ty)),
*element_ty,
slice_node,
slice_ty,
rhs_value_node,
rhs_value_ty,
emit_diagnostic_and_short_circuit,
);
if !valid && emit_diagnostic_and_short_circuit {
break;
}
}
valid
};
// Perform an initial check of all elements. If the assignment is valid
// for at least one element, we do not emit any diagnostics. Otherwise,
// we re-run the check and emit a diagnostic on the first failing element.
let valid = check_positive_elements(false);
if !valid {
check_positive_elements(true);
}
valid
}
Type::TypedDict(typed_dict) => {
// As an optimization, prevent calling `__setitem__` on (unions of) large `TypedDict`s, and
// validate the assignment ourselves. This also allows us to emit better diagnostics.
let mut valid = true;
let Some(keys) = key_literals(db, slice_ty) else {
// Check if the key has a valid type. We only allow string literals, a union of string literals,
// or a dynamic type like `Any`. We can do this by checking assignability to `LiteralString`,
// but we need to exclude `LiteralString` itself. This check would technically allow weird key
// types like `LiteralString & Any` to pass, but it does not need to be perfect. We would just
// fail to provide the "Only string literals are allowed" hint in that case.
if slice_ty.is_dynamic() {
return true;
}
let assigned_d = rhs_value_ty.display(db);
let value_d = object_ty.display(db);
if slice_ty.is_assignable_to(db, Type::LiteralString)
&& !slice_ty.is_equivalent_to(db, Type::LiteralString)
{
if let Some(builder) =
self.context.report_lint(&INVALID_ASSIGNMENT, slice_node)
{
let diagnostic = builder.into_diagnostic(format_args!(
"Cannot assign value of type `{assigned_d}` to key of type `{}` on TypedDict `{value_d}`",
slice_ty.display(db)
));
attach_original_type_info(diagnostic);
}
} else {
if let Some(builder) = self.context.report_lint(&INVALID_KEY, slice_node) {
let diagnostic = builder.into_diagnostic(format_args!(
"Cannot access `{value_d}` with a key of type `{}`. Only string literals are allowed as keys on TypedDicts.",
slice_ty.display(db)
));
attach_original_type_info(diagnostic);
}
}
return false;
};
for key in keys {
valid &= validate_typed_dict_key_assignment(
&self.context,
typed_dict,
full_object_ty,
key,
rhs_value_ty,
object_node,
slice_node,
rhs_value_node,
TypedDictAssignmentKind::Subscript,
emit_diagnostic,
);
}
valid
}
_ => {
match object_ty.try_call_dunder(
db,
"__setitem__",
CallArguments::positional([slice_ty, rhs_value_ty]),
TypeContext::default(),
) {
Ok(_) => true,
Err(err) => match err {
CallDunderError::PossiblyUnbound { .. } => {
if emit_diagnostic
&& let Some(builder) = self
.context
.report_lint(&POSSIBLY_MISSING_IMPLICIT_CALL, rhs_value_node)
{
let diagnostic = builder.into_diagnostic(format_args!(
"Method `__setitem__` of type `{}` may be missing",
object_ty.display(db),
));
attach_original_type_info(diagnostic);
}
false
}
CallDunderError::CallError(call_error_kind, bindings) => {
match call_error_kind {
CallErrorKind::NotCallable => {
if emit_diagnostic
&& let Some(builder) = self
.context
.report_lint(&CALL_NON_CALLABLE, object_node)
{
if let Some(builder) =
context.report_lint(&INVALID_ASSIGNMENT, &**slice)
{
builder.into_diagnostic(format_args!(
"Cannot assign value of type `{assigned_d}` to key of type `{}` on TypedDict `{value_d}`",
slice_ty.display(db)
));
let diagnostic = builder.into_diagnostic(format_args!(
"Method `__setitem__` of type `{}` is not callable \
on object of type `{}`",
bindings.callable_type().display(db),
object_ty.display(db),
));
attach_original_type_info(diagnostic);
}
}
CallErrorKind::BindingError => {
if let Some(typed_dict) = object_ty.as_typed_dict() {
if let Some(key) = slice_ty.as_string_literal() {
let key = key.value(db);
validate_typed_dict_key_assignment(
&self.context,
typed_dict,
full_object_ty,
key,
rhs_value_ty,
object_node,
slice_node,
rhs_value_node,
TypedDictAssignmentKind::Subscript,
true,
);
}
} else {
if let Some(builder) =
context.report_lint(&INVALID_KEY, &**slice)
if emit_diagnostic
&& let Some(builder) = self
.context
.report_lint(&INVALID_ASSIGNMENT, object_node)
{
builder.into_diagnostic(format_args!(
"Cannot access `{value_d}` with a key of type `{}`. Only string literals are allowed as keys on TypedDicts.",
slice_ty.display(db)
let assigned_d = rhs_value_ty.display(db);
let value_d = object_ty.display(db);
let diagnostic = builder.into_diagnostic(format_args!(
"Method `__setitem__` of type `{}` cannot be called with \
a key of type `{}` and a value of type `{assigned_d}` on object of type `{value_d}`",
bindings.callable_type().display(db),
slice_ty.display(db),
));
attach_original_type_info(diagnostic);
}
}
}
} else {
if let Some(builder) =
context.report_lint(&INVALID_ASSIGNMENT, &**value)
{
builder.into_diagnostic(format_args!(
"Method `__setitem__` of type `{}` cannot be called with \
a key of type `{}` and a value of type `{assigned_d}` on object of type `{value_d}`",
bindings.callable_type().display(db),
slice_ty.display(db),
));
CallErrorKind::PossiblyNotCallable => {
if emit_diagnostic
&& let Some(builder) = self
.context
.report_lint(&CALL_NON_CALLABLE, object_node)
{
let diagnostic = builder.into_diagnostic(format_args!(
"Method `__setitem__` of type `{}` may not be callable on object of type `{}`",
bindings.callable_type().display(db),
object_ty.display(db),
));
attach_original_type_info(diagnostic);
}
}
}
false
}
CallErrorKind::PossiblyNotCallable => {
if let Some(builder) = context.report_lint(&CALL_NON_CALLABLE, &**value)
CallDunderError::MethodNotAvailable => {
if emit_diagnostic
&& let Some(builder) =
self.context.report_lint(&INVALID_ASSIGNMENT, object_node)
{
builder.into_diagnostic(format_args!(
"Method `__setitem__` of type `{}` may not be \
callable on object of type `{}`",
bindings.callable_type().display(db),
value_ty.display(db),
let diagnostic = builder.into_diagnostic(format_args!(
"Cannot assign to a subscript on an object of type `{}` with no `__setitem__` method",
object_ty.display(db),
));
attach_original_type_info(diagnostic);
}
false
}
}
false
},
}
CallDunderError::MethodNotAvailable => {
if let Some(builder) = context.report_lint(&INVALID_ASSIGNMENT, &**value) {
builder.into_diagnostic(format_args!(
"Cannot assign to object of type `{}` with no `__setitem__` method",
value_ty.display(db),
));
}
false
}
},
}
}
}
@ -3743,23 +3904,77 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
assignable
};
let emit_invalid_final = |builder: &Self| {
if emit_diagnostics {
if let Some(builder) = builder.context.report_lint(&INVALID_ASSIGNMENT, target) {
builder.into_diagnostic(format_args!(
"Cannot assign to final attribute `{attribute}` on type `{}`",
object_ty.display(db)
));
}
}
};
// Return true (and emit a diagnostic) if this is an invalid assignment to a `Final` attribute.
// Per PEP 591 and the typing conformance suite, Final instance attributes can be assigned
// in __init__ methods. Multiple assignments within __init__ are allowed (matching mypy
// and pyright behavior), as long as the attribute doesn't have a class-level value.
let invalid_assignment_to_final = |builder: &Self, qualifiers: TypeQualifiers| -> bool {
if qualifiers.contains(TypeQualifiers::FINAL) {
if emit_diagnostics {
if let Some(builder) = builder.context.report_lint(&INVALID_ASSIGNMENT, target)
{
builder.into_diagnostic(format_args!(
"Cannot assign to final attribute `{attribute}` \
on type `{}`",
object_ty.display(db)
));
// Check if it's a Final attribute
if !qualifiers.contains(TypeQualifiers::FINAL) {
return false;
}
// Check if we're in an __init__ method (where Final attributes can be initialized).
let is_in_init = builder
.current_function_definition()
.is_some_and(|func| func.name.id == "__init__");
// Not in __init__ - always disallow
if !is_in_init {
emit_invalid_final(builder);
return true;
}
// We're in __init__ - verify we're in a method of the class being mutated
let Some(class_ty) = builder.class_context_of_current_method() else {
// Not a method (standalone function named __init__)
emit_invalid_final(builder);
return true;
};
// Check that object_ty is an instance of the class we're in
if !object_ty.is_subtype_of(builder.db(), Type::instance(builder.db(), class_ty)) {
// Assigning to a different class's Final attribute
emit_invalid_final(builder);
return true;
}
// Check if class-level attribute already has a value
{
let class_definition = class_ty.class_literal(db).0;
let class_scope_id = class_definition.body_scope(db).file_scope_id(db);
let place_table = builder.index.place_table(class_scope_id);
if let Some(symbol) = place_table.symbol_by_name(attribute) {
if symbol.is_bound() {
if emit_diagnostics {
if let Some(diag_builder) =
builder.context.report_lint(&INVALID_ASSIGNMENT, target)
{
diag_builder.into_diagnostic(format_args!(
"Cannot assign to final attribute `{attribute}` in `__init__` \
because it already has a value at class level"
));
}
}
return true;
}
}
true
} else {
false
}
// In __init__ and no class-level value - allow
false
};
match object_ty {
@ -5901,51 +6116,64 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
}
/// Infer the implicit local definition `x = <module 'thispackage.x'>` that
/// `from .x.y import z` can introduce in an `__init__.py(i)`.
/// Infer the implicit local definition `x = <module 'whatever.thispackage.x'>` that
/// `from .x.y import z` or `from whatever.thispackage.x.y` can introduce in `__init__.py(i)`.
///
/// For the definition `z`, see [`TypeInferenceBuilder::infer_import_from_definition`].
///
/// The runtime semantic of this kind of statement is to introduce a variable in the global
/// scope of this module *the first time it's imported in the entire program*. This
/// implementation just blindly introduces a local variable wherever the `from..import` is
/// (if the imports actually resolve).
///
/// That gap between the semantics and implementation are currently the responsibility of the
/// code that actually creates these kinds of Definitions (so blindly introducing a local
/// is all we need to be doing here).
fn infer_import_from_submodule_definition(
&mut self,
import_from: &ast::StmtImportFrom,
submodule: &Name,
definition: Definition<'db>,
) {
// The runtime semantic of this kind of statement is to introduce a variable in the global
// scope of this module, so we do just that. (Actually we introduce a local variable, but
// this type of Definition is only created when a `from..import` is in global scope.)
// Get this package's module by resolving `.`
let Ok(module_name) = ModuleName::from_identifier_parts(self.db(), self.file(), None, 1)
else {
// Get this package's absolute module name by resolving `.`, and make sure it exists
let Ok(thispackage_name) = ModuleName::package_for_file(self.db(), self.file()) else {
self.add_binding(import_from.into(), definition, |_, _| Type::unknown());
return;
};
let Some(module) = resolve_module(self.db(), &thispackage_name) else {
self.add_binding(import_from.into(), definition, |_, _| Type::unknown());
return;
};
let Some(module) = resolve_module(self.db(), &module_name) else {
// We have `from whatever.thispackage.x.y ...` or `from .x.y ...`
// and we want to extract `x` (to ultimately construct `whatever.thispackage.x`):
// First we normalize to `whatever.thispackage.x.y`
let Some(final_part) = ModuleName::from_identifier_parts(
self.db(),
self.file(),
import_from.module.as_deref(),
import_from.level,
)
.ok()
// `whatever.thispackage.x.y` => `x.y`
.and_then(|submodule_name| submodule_name.relative_to(&thispackage_name))
// `x.y` => `x`
.and_then(|relative_submodule_name| {
relative_submodule_name
.components()
.next()
.and_then(ModuleName::new)
}) else {
self.add_binding(import_from.into(), definition, |_, _| Type::unknown());
return;
};
// Now construct the submodule `.x`
assert!(
!submodule.is_empty(),
"ImportFromSubmoduleDefinitionKind constructed with empty module"
);
let name = submodule
.split_once('.')
.map(|(first, _)| first)
.unwrap_or(submodule.as_str());
let full_submodule_name = ModuleName::new(name).map(|final_part| {
let mut ret = module_name.clone();
ret.extend(&final_part);
ret
});
// And try to import it
if let Some(submodule_type) = full_submodule_name
.as_ref()
.and_then(|submodule_name| self.module_type_from_name(submodule_name))
{
// `x` => `whatever.thispackage.x`
let mut full_submodule_name = thispackage_name.clone();
full_submodule_name.extend(&final_part);
// Try to actually resolve the import `whatever.thispackage.x`
if let Some(submodule_type) = self.module_type_from_name(&full_submodule_name) {
// Success, introduce a binding!
//
// We explicitly don't introduce a *declaration* because it's actual ok
@ -5970,17 +6198,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
};
let diagnostic = builder.into_diagnostic(format_args!(
"Module `{module_name}` has no submodule `{name}`"
"Module `{thispackage_name}` has no submodule `{final_part}`"
));
if let Some(full_submodule_name) = full_submodule_name {
hint_if_stdlib_submodule_exists_on_other_versions(
self.db(),
diagnostic,
&full_submodule_name,
module,
);
}
hint_if_stdlib_submodule_exists_on_other_versions(
self.db(),
diagnostic,
&full_submodule_name,
module,
);
}
fn infer_return_statement(&mut self, ret: &ast::StmtReturn) {
@ -7619,6 +7845,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
first_arg.into(),
first_arg.into(),
Type::TypedDict(typed_dict_ty),
None,
key_ty,
&items,
);
@ -8256,6 +8483,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let mut nonlocal_union_builder = UnionBuilder::new(db);
let mut found_some_definition = false;
for (enclosing_scope_file_id, _) in self.index.ancestor_scopes(file_scope_id).skip(1) {
// If the current enclosing scope is global, no place lookup is performed here,
// instead falling back to the module's explicit global lookup below.
if enclosing_scope_file_id.is_global() {
break;
}
// Class scopes are not visible to nested scopes, and we need to handle global
// scope differently (because an unbound name there falls back to builtins), so
// check only function-like scopes.
@ -8286,6 +8519,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// registering eager bindings for nested scopes that are actually eager, and for
// enclosing scopes that actually contain bindings that we should use when
// resolving the reference.)
let mut eagerly_resolved_place = None;
if !self.is_deferred() {
match self.index.enclosing_snapshot(
enclosing_scope_file_id,
@ -8297,6 +8531,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
enclosing_scope_file_id,
ConstraintKey::NarrowingConstraint(constraint),
));
// If the current scope is eager, it is certain that the place is undefined in the current scope.
// Do not call the `place` query below as a fallback.
if scope.scope(db).is_eager() {
eagerly_resolved_place = Some(Place::Undefined.into());
}
}
EnclosingSnapshotResult::FoundBindings(bindings) => {
let place = place_from_bindings(db, bindings).map_type(|ty| {
@ -8358,18 +8597,20 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// `nonlocal` variable, but we don't enforce that here. See the
// `ast::Stmt::AnnAssign` handling in `SemanticIndexBuilder::visit_stmt`.)
if enclosing_place.is_bound() || enclosing_place.is_declared() {
let local_place_and_qualifiers = place(
db,
enclosing_scope_id,
place_expr,
ConsideredDefinitions::AllReachable,
)
.map_type(|ty| {
self.narrow_place_with_applicable_constraints(
let local_place_and_qualifiers = eagerly_resolved_place.unwrap_or_else(|| {
place(
db,
enclosing_scope_id,
place_expr,
ty,
&constraint_keys,
ConsideredDefinitions::AllReachable,
)
.map_type(|ty| {
self.narrow_place_with_applicable_constraints(
place_expr,
ty,
&constraint_keys,
)
})
});
// We could have `Place::Undefined` here, despite the checks above, for example if
// this scope contains a `del` statement but no binding or declaration.
@ -8412,6 +8653,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
FileScopeId::global(),
ConstraintKey::NarrowingConstraint(constraint),
));
// Reaching here means that no bindings are found in any scope.
// Since `explicit_global_symbol` may return a cycle initial value, we return `Place::Undefined` here.
return Place::Undefined.into();
}
EnclosingSnapshotResult::FoundBindings(bindings) => {
let place = place_from_bindings(db, bindings).map_type(|ty| {
@ -8899,6 +9143,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
emitted_division_by_zero_diagnostic = self.check_division_by_zero(node, op, left_ty);
}
let pep_604_unions_allowed = || {
Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310
|| self.file().is_stub(self.db())
|| self.scope().scope(self.db()).in_type_checking_block()
};
match (left_ty, right_ty, op) {
(Type::Union(lhs_union), rhs, _) => lhs_union.try_map(self.db(), |lhs_element| {
self.infer_binary_expression_type(
@ -9160,12 +9410,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
| KnownInstanceType::Annotated(_),
),
ast::Operator::BitOr,
) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => {
) if pep_604_unions_allowed() => {
if left_ty.is_equivalent_to(self.db(), right_ty) {
Some(left_ty)
} else {
Some(Type::KnownInstance(KnownInstanceType::UnionType(
InternedTypes::from_elements(self.db(), [left_ty, right_ty]),
InternedTypes::from_elements(
self.db(),
[left_ty, right_ty],
InferredAs::ValueExpression,
),
)))
}
}
@ -9186,11 +9440,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
| Type::KnownInstance(..)
| Type::SpecialForm(..),
ast::Operator::BitOr,
) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310
) if pep_604_unions_allowed()
&& instance.has_known_class(self.db(), KnownClass::NoneType) =>
{
Some(Type::KnownInstance(KnownInstanceType::UnionType(
InternedTypes::from_elements(self.db(), [left_ty, right_ty]),
InternedTypes::from_elements(
self.db(),
[left_ty, right_ty],
InferredAs::ValueExpression,
),
)))
}
@ -9210,17 +9468,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
_,
Type::ClassLiteral(..) | Type::GenericAlias(..) | Type::SubclassOf(..),
ast::Operator::BitOr,
) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => {
Type::try_call_bin_op_with_policy(
self.db(),
left_ty,
ast::Operator::BitOr,
right_ty,
MemberLookupPolicy::META_CLASS_NO_TYPE_FALLBACK,
)
.ok()
.map(|binding| binding.return_type(self.db()))
}
) if pep_604_unions_allowed() => Type::try_call_bin_op_with_policy(
self.db(),
left_ty,
ast::Operator::BitOr,
right_ty,
MemberLookupPolicy::META_CLASS_NO_TYPE_FALLBACK,
)
.ok()
.map(|binding| binding.return_type(self.db())),
// We've handled all of the special cases that we support for literals, so we need to
// fall back on looking for dunder methods on one of the operand types.
@ -10409,9 +10665,46 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
return Type::KnownInstance(KnownInstanceType::UnionType(
InternedTypes::from_elements(self.db(), [ty, Type::none(self.db())]),
InternedTypes::from_elements(
self.db(),
[ty, Type::none(self.db())],
InferredAs::ValueExpression,
),
));
}
Type::SpecialForm(SpecialFormType::Union) => {
let db = self.db();
match **slice {
ast::Expr::Tuple(ref tuple) => {
let mut elements = tuple
.elts
.iter()
.map(|elt| self.infer_type_expression(elt))
.peekable();
let is_empty = elements.peek().is_none();
let union_type = Type::KnownInstance(KnownInstanceType::UnionType(
InternedTypes::from_elements(db, elements, InferredAs::TypeExpression),
));
if is_empty {
if let Some(builder) =
self.context.report_lint(&INVALID_TYPE_FORM, subscript)
{
builder.into_diagnostic(
"`typing.Union` requires at least one type argument",
);
}
}
return union_type;
}
_ => {
return self.infer_expression(slice, TypeContext::default());
}
}
}
_ => {}
}
@ -10779,6 +11072,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
value_node.into(),
slice_node.into(),
value_ty,
None,
slice_ty,
&typed_dict.items(db),
);

View file

@ -11,9 +11,9 @@ use crate::types::enums::{enum_member_literals, enum_metadata};
use crate::types::function::KnownFunction;
use crate::types::infer::infer_same_file_expression_type;
use crate::types::{
ClassLiteral, ClassType, IntersectionBuilder, KnownClass, KnownInstanceType, SpecialFormType,
SubclassOfInner, SubclassOfType, Truthiness, Type, TypeContext, TypeVarBoundOrConstraints,
UnionBuilder, infer_expression_types,
CallableType, ClassLiteral, ClassType, IntersectionBuilder, KnownClass, KnownInstanceType,
SpecialFormType, SubclassOfInner, SubclassOfType, Truthiness, Type, TypeContext,
TypeVarBoundOrConstraints, UnionBuilder, infer_expression_types,
};
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
@ -229,6 +229,18 @@ impl ClassInfoConstraintFunction {
)
}
// We don't have a good meta-type for `Callable`s right now,
// so only apply `isinstance()` narrowing, not `issubclass()`
Type::SpecialForm(SpecialFormType::Callable)
if self == ClassInfoConstraintFunction::IsInstance =>
{
Some(CallableType::unknown(db).top_materialization(db))
}
Type::SpecialForm(special_form) => special_form
.aliased_stdlib_class()
.and_then(|class| self.generate_constraint(db, class.to_class_literal(db))),
Type::AlwaysFalsy
| Type::AlwaysTruthy
| Type::BooleanLiteral(_)
@ -244,7 +256,6 @@ impl ClassInfoConstraintFunction {
| Type::FunctionLiteral(_)
| Type::ProtocolInstance(_)
| Type::PropertyInstance(_)
| Type::SpecialForm(_)
| Type::LiteralString
| Type::StringLiteral(_)
| Type::IntLiteral(_)

View file

@ -328,6 +328,113 @@ impl SpecialFormType {
}
}
/// Return `Some(KnownClass)` if this special form is an alias
/// to a standard library class.
pub(super) const fn aliased_stdlib_class(self) -> Option<KnownClass> {
match self {
Self::List => Some(KnownClass::List),
Self::Dict => Some(KnownClass::Dict),
Self::Set => Some(KnownClass::Set),
Self::FrozenSet => Some(KnownClass::FrozenSet),
Self::ChainMap => Some(KnownClass::ChainMap),
Self::Counter => Some(KnownClass::Counter),
Self::DefaultDict => Some(KnownClass::DefaultDict),
Self::Deque => Some(KnownClass::Deque),
Self::OrderedDict => Some(KnownClass::OrderedDict),
Self::Tuple => Some(KnownClass::Tuple),
Self::Type => Some(KnownClass::Type),
Self::AlwaysFalsy
| Self::AlwaysTruthy
| Self::Annotated
| Self::Bottom
| Self::CallableTypeOf
| Self::ClassVar
| Self::Concatenate
| Self::Final
| Self::Intersection
| Self::Literal
| Self::LiteralString
| Self::Never
| Self::NoReturn
| Self::Not
| Self::ReadOnly
| Self::Required
| Self::TypeAlias
| Self::TypeGuard
| Self::NamedTuple
| Self::NotRequired
| Self::Optional
| Self::Top
| Self::TypeIs
| Self::TypedDict
| Self::TypingSelf
| Self::Union
| Self::Unknown
| Self::TypeOf
| Self::Any
// `typing.Callable` is an alias to `collections.abc.Callable`,
// but they're both the same `SpecialFormType` in our model,
// and neither is a class in typeshed (even though the `collections.abc` one is at runtime)
| Self::Callable
| Self::Protocol
| Self::Generic
| Self::Unpack => None,
}
}
/// Return `true` if this special form is valid as the second argument
/// to `issubclass()` and `isinstance()` calls.
pub(super) const fn is_valid_isinstance_target(self) -> bool {
match self {
Self::Callable
| Self::ChainMap
| Self::Counter
| Self::DefaultDict
| Self::Deque
| Self::FrozenSet
| Self::Dict
| Self::List
| Self::OrderedDict
| Self::Set
| Self::Tuple
| Self::Type
| Self::Protocol
| Self::Generic => true,
Self::AlwaysFalsy
| Self::AlwaysTruthy
| Self::Annotated
| Self::Bottom
| Self::CallableTypeOf
| Self::ClassVar
| Self::Concatenate
| Self::Final
| Self::Intersection
| Self::Literal
| Self::LiteralString
| Self::Never
| Self::NoReturn
| Self::Not
| Self::ReadOnly
| Self::Required
| Self::TypeAlias
| Self::TypeGuard
| Self::NamedTuple
| Self::NotRequired
| Self::Optional
| Self::Top
| Self::TypeIs
| Self::TypedDict
| Self::TypingSelf
| Self::Union
| Self::Unknown
| Self::TypeOf
| Self::Any // can be used in `issubclass()` but not `isinstance()`.
| Self::Unpack => false,
}
}
/// Return the repr of the symbol at runtime
pub(super) const fn repr(self) -> &'static str {
match self {

View file

@ -143,30 +143,57 @@ impl TypedDictAssignmentKind {
pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>(
context: &InferContext<'db, 'ast>,
typed_dict: TypedDictType<'db>,
full_object_ty: Option<Type<'db>>,
key: &str,
value_ty: Type<'db>,
typed_dict_node: impl Into<AnyNodeRef<'ast>>,
typed_dict_node: impl Into<AnyNodeRef<'ast>> + Copy,
key_node: impl Into<AnyNodeRef<'ast>>,
value_node: impl Into<AnyNodeRef<'ast>>,
assignment_kind: TypedDictAssignmentKind,
emit_diagnostic: bool,
) -> bool {
let db = context.db();
let items = typed_dict.items(db);
// Check if key exists in `TypedDict`
let Some((_, item)) = items.iter().find(|(name, _)| *name == key) else {
report_invalid_key_on_typed_dict(
context,
typed_dict_node.into(),
key_node.into(),
Type::TypedDict(typed_dict),
Type::string_literal(db, key),
&items,
);
if emit_diagnostic {
report_invalid_key_on_typed_dict(
context,
typed_dict_node.into(),
key_node.into(),
Type::TypedDict(typed_dict),
full_object_ty,
Type::string_literal(db, key),
&items,
);
}
return false;
};
let add_object_type_annotation =
|diagnostic: &mut Diagnostic| {
if let Some(full_object_ty) = full_object_ty {
diagnostic.annotate(context.secondary(typed_dict_node.into()).message(
format_args!(
"TypedDict `{}` in {kind} type `{}`",
Type::TypedDict(typed_dict).display(db),
full_object_ty.display(db),
kind = if full_object_ty.is_union() {
"union"
} else {
"intersection"
},
),
));
} else {
diagnostic.annotate(context.secondary(typed_dict_node.into()).message(
format_args!("TypedDict `{}`", Type::TypedDict(typed_dict).display(db)),
));
}
};
let add_item_definition_subdiagnostic = |diagnostic: &mut Diagnostic, message| {
if let Some(declaration) = item.single_declaration {
let file = declaration.file(db);
@ -184,8 +211,9 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>(
};
if assignment_kind.is_subscript() && item.is_read_only() {
if let Some(builder) =
context.report_lint(assignment_kind.diagnostic_type(), key_node.into())
if emit_diagnostic
&& let Some(builder) =
context.report_lint(assignment_kind.diagnostic_type(), key_node.into())
{
let typed_dict_ty = Type::TypedDict(typed_dict);
let typed_dict_d = typed_dict_ty.display(db);
@ -195,13 +223,7 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>(
));
diagnostic.set_primary_message(format_args!("key is marked read-only"));
diagnostic.annotate(
context
.secondary(typed_dict_node.into())
.message(format_args!("TypedDict `{typed_dict_d}`")),
);
add_object_type_annotation(&mut diagnostic);
add_item_definition_subdiagnostic(&mut diagnostic, "Read-only item declared here");
}
@ -219,7 +241,9 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>(
}
// Invalid assignment - emit diagnostic
if let Some(builder) = context.report_lint(assignment_kind.diagnostic_type(), value_node) {
if emit_diagnostic
&& let Some(builder) = context.report_lint(assignment_kind.diagnostic_type(), value_node)
{
let typed_dict_ty = Type::TypedDict(typed_dict);
let typed_dict_d = typed_dict_ty.display(db);
let value_d = value_ty.display(db);
@ -232,12 +256,6 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>(
diagnostic.set_primary_message(format_args!("value of type `{value_d}`"));
diagnostic.annotate(
context
.secondary(typed_dict_node.into())
.message(format_args!("TypedDict `{typed_dict_d}`")),
);
diagnostic.annotate(
context
.secondary(key_node.into())
@ -245,6 +263,7 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>(
);
add_item_definition_subdiagnostic(&mut diagnostic, "Item declared here");
add_object_type_annotation(&mut diagnostic);
}
false
@ -343,12 +362,14 @@ fn validate_from_dict_literal<'db, 'ast>(
validate_typed_dict_key_assignment(
context,
typed_dict,
None,
key_str,
value_type,
error_node,
key_expr,
&dict_item.value,
TypedDictAssignmentKind::Constructor,
true,
);
}
}
@ -380,12 +401,14 @@ fn validate_from_keywords<'db, 'ast>(
validate_typed_dict_key_assignment(
context,
typed_dict,
None,
arg_name.as_str(),
arg_type,
error_node,
keyword,
&keyword.value,
TypedDictAssignmentKind::Constructor,
true,
);
}
}
@ -418,12 +441,14 @@ pub(super) fn validate_typed_dict_dict_literal<'db>(
valid &= validate_typed_dict_key_assignment(
context,
typed_dict,
None,
key_str,
value_type,
error_node,
key_expr,
&item.value,
TypedDictAssignmentKind::Constructor,
true,
);
}
}

View file

@ -618,8 +618,9 @@ Options:
notebooks, use `--extension ipy:ipynb`
--statistics
Show counts for every rule with at least one violation
--add-noqa
Enable automatic additions of `noqa` directives to failing lines
--add-noqa[=<REASON>]
Enable automatic additions of `noqa` directives to failing lines.
Optionally provide a reason to append after the codes
--show-files
See the files Ruff will be run against with the current settings
--show-settings

View file

@ -70,13 +70,16 @@ export default function Editor({
const serverRef = useRef<PlaygroundServer | null>(null);
if (serverRef.current != null) {
serverRef.current.update({
files,
workspace,
onOpenFile,
onVendoredFileChange,
onBackToUserFile,
});
serverRef.current.update(
{
files,
workspace,
onOpenFile,
onVendoredFileChange,
onBackToUserFile,
},
isViewingVendoredFile,
);
}
// Update the diagnostics in the editor.
@ -200,6 +203,7 @@ class PlaygroundServer
private rangeSemanticTokensDisposable: IDisposable;
private signatureHelpDisposable: IDisposable;
private documentHighlightDisposable: IDisposable;
private inVendoredFileCondition: editor.IContextKey<boolean>;
// Cache for vendored file handles
private vendoredFileHandles = new Map<string, FileHandle>();
@ -249,8 +253,16 @@ class PlaygroundServer
this.documentHighlightDisposable =
monaco.languages.registerDocumentHighlightProvider("python", this);
this.inVendoredFileCondition = editor.createContextKey<boolean>(
"inVendoredFile",
false,
);
// Register Esc key command
editor.addCommand(monaco.KeyCode.Escape, this.props.onBackToUserFile);
editor.addCommand(
monaco.KeyCode.Escape,
() => this.props.onBackToUserFile(),
"inVendoredFile",
);
}
triggerCharacters: string[] = ["."];
@ -452,8 +464,9 @@ class PlaygroundServer
return undefined;
}
update(props: PlaygroundServerProps) {
update(props: PlaygroundServerProps, isViewingVendoredFile: boolean) {
this.props = props;
this.inVendoredFileCondition.set(isViewingVendoredFile);
}
private getOrCreateVendoredFileHandle(vendoredPath: string): FileHandle {