mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-30 22:01:18 +00:00
Merge branch 'main' into dcreager/real-constraint-sets
* main: (21 commits) [ty] Literal promotion refactor (#20646) [ty] Add tests for nested generic functions (#20631) [`cli`] Add conflict between `--add-noqa` and `--diff` options (#20642) [ty] Ensure first-party search paths always appear in a sensible order (#20629) [ty] Use `typing.Self` for the first parameter of instance methods (#20517) [ty] Remove unnecessary `parsed_module()` calls (#20630) Remove `TextEmitter` (#20595) [ty] Use fully qualified names to distinguish ambiguous protocols in diagnostics (#20627) [ty] Ecosystem analyzer: relax timeout thresholds (#20626) [ty] Apply type mappings to functions eagerly (#20596) [ty] Improve disambiguation of class names in diagnostics (#20603) Add the *The Basics* title back to CONTRIBUTING.md (#20624) [`playground`] Fix quick fixes for empty ranges in playground (#20599) Update dependency ruff to v0.13.2 (#20622) [`ruff`] Fix minor typos in doc comments (#20623) Update dependency PyYAML to v6.0.3 (#20621) Update cargo-bins/cargo-binstall action to v1.15.6 (#20620) Fixed documentation for try_consider_else (#20587) [ty] Use `Top` materializations for `TypeIs` special form (#20591) [ty] Simplify `Any | (Any & T)` to `Any` (#20593) ...
This commit is contained in:
commit
06aed22426
113 changed files with 2440 additions and 1129 deletions
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
|
@ -452,7 +452,7 @@ jobs:
|
|||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@20aa316bab4942180bbbabe93237858e8d77f1ed # v1.15.5
|
||||
uses: cargo-bins/cargo-binstall@38e8f5e4c386b611d51e8aa997b9a06a3c8eb67a # v1.15.6
|
||||
- name: "Install cargo-fuzz"
|
||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
||||
|
@ -703,7 +703,7 @@ jobs:
|
|||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@20aa316bab4942180bbbabe93237858e8d77f1ed # v1.15.5
|
||||
- uses: cargo-bins/cargo-binstall@38e8f5e4c386b611d51e8aa997b9a06a3c8eb67a # v1.15.6
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
|
|
2
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
2
.github/workflows/ty-ecosystem-analyzer.yaml
vendored
|
@ -64,7 +64,7 @@ jobs:
|
|||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@fc0f612798710b0dd69bb7528bc9b361dc60bd43"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@6ce3a609575bc84eaf5d247739529c60b6c2ae5b"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
|
|
2
.github/workflows/ty-ecosystem-report.yaml
vendored
2
.github/workflows/ty-ecosystem-report.yaml
vendored
|
@ -49,7 +49,7 @@ jobs:
|
|||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@fc0f612798710b0dd69bb7528bc9b361dc60bd43"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@6ce3a609575bc84eaf5d247739529c60b6c2ae5b"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
|
|
|
@ -37,6 +37,8 @@ exploration of new features, we will often close these pull requests immediately
|
|||
new feature to ruff creates a long-term maintenance burden and requires strong consensus from the ruff
|
||||
team before it is appropriate to begin work on an implementation.
|
||||
|
||||
## The Basics
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Ruff is written in Rust. You'll need to install the
|
||||
|
|
|
@ -416,6 +416,7 @@ pub struct CheckCommand {
|
|||
conflicts_with = "stdin_filename",
|
||||
conflicts_with = "watch",
|
||||
conflicts_with = "fix",
|
||||
conflicts_with = "diff",
|
||||
)]
|
||||
pub add_noqa: bool,
|
||||
/// See the files Ruff will be run against with the current settings.
|
||||
|
|
|
@ -227,7 +227,8 @@ mod test {
|
|||
use rustc_hash::FxHashMap;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use ruff_linter::message::{Emitter, EmitterContext, TextEmitter};
|
||||
use ruff_db::diagnostic::{DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics};
|
||||
use ruff_linter::message::EmitterContext;
|
||||
use ruff_linter::registry::Rule;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
use ruff_linter::settings::{LinterSettings, flags};
|
||||
|
@ -280,19 +281,16 @@ mod test {
|
|||
UnsafeFixes::Enabled,
|
||||
)
|
||||
.unwrap();
|
||||
let mut output = Vec::new();
|
||||
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_color(false)
|
||||
.emit(
|
||||
&mut output,
|
||||
&diagnostics.inner,
|
||||
&EmitterContext::new(&FxHashMap::default()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let messages = String::from_utf8(output).unwrap();
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Concise)
|
||||
.hide_severity(true);
|
||||
let messages = DisplayDiagnostics::new(
|
||||
&EmitterContext::new(&FxHashMap::default()),
|
||||
&config,
|
||||
&diagnostics.inner,
|
||||
)
|
||||
.to_string();
|
||||
|
||||
insta::with_settings!({
|
||||
omit_expression => true,
|
||||
|
|
|
@ -10,12 +10,11 @@ use ruff_linter::linter::FixTable;
|
|||
use serde::Serialize;
|
||||
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics,
|
||||
DisplayGithubDiagnostics, GithubRenderer, SecondaryCode,
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, SecondaryCode,
|
||||
};
|
||||
use ruff_linter::fs::relativize_path;
|
||||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{Emitter, EmitterContext, GroupedEmitter, SarifEmitter, TextEmitter};
|
||||
use ruff_linter::message::{EmitterContext, render_diagnostics};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::settings::flags::{self};
|
||||
use ruff_linter::settings::types::{OutputFormat, UnsafeFixes};
|
||||
|
@ -225,86 +224,28 @@ impl Printer {
|
|||
let context = EmitterContext::new(&diagnostics.notebook_indexes);
|
||||
let fixables = FixableStatistics::try_from(diagnostics, self.unsafe_fixes);
|
||||
|
||||
let config = DisplayDiagnosticConfig::default().preview(preview);
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.preview(preview)
|
||||
.hide_severity(true)
|
||||
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize())
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability())
|
||||
.show_fix_diff(preview);
|
||||
|
||||
match self.format {
|
||||
OutputFormat::Json => {
|
||||
let config = config.format(DiagnosticFormat::Json);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Rdjson => {
|
||||
let config = config.format(DiagnosticFormat::Rdjson);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::JsonLines => {
|
||||
let config = config.format(DiagnosticFormat::JsonLines);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Junit => {
|
||||
let config = config.format(DiagnosticFormat::Junit);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Concise | OutputFormat::Full => {
|
||||
TextEmitter::default()
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_show_fix_diff(self.format == OutputFormat::Full && preview)
|
||||
.with_show_source(self.format == OutputFormat::Full)
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability())
|
||||
.with_preview(preview)
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
render_diagnostics(writer, self.format, config, &context, &diagnostics.inner)?;
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
if !diagnostics.fixed.is_empty() {
|
||||
writeln!(writer)?;
|
||||
print_fix_summary(writer, &diagnostics.fixed)?;
|
||||
writeln!(writer)?;
|
||||
}
|
||||
if matches!(
|
||||
self.format,
|
||||
OutputFormat::Full | OutputFormat::Concise | OutputFormat::Grouped
|
||||
) {
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
if !diagnostics.fixed.is_empty() {
|
||||
writeln!(writer)?;
|
||||
print_fix_summary(writer, &diagnostics.fixed)?;
|
||||
writeln!(writer)?;
|
||||
}
|
||||
|
||||
self.write_summary_text(writer, diagnostics)?;
|
||||
}
|
||||
OutputFormat::Grouped => {
|
||||
GroupedEmitter::default()
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_unsafe_fixes(self.unsafe_fixes)
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_FIX_SUMMARY) {
|
||||
if !diagnostics.fixed.is_empty() {
|
||||
writeln!(writer)?;
|
||||
print_fix_summary(writer, &diagnostics.fixed)?;
|
||||
writeln!(writer)?;
|
||||
}
|
||||
}
|
||||
self.write_summary_text(writer, diagnostics)?;
|
||||
}
|
||||
OutputFormat::Github => {
|
||||
let renderer = GithubRenderer::new(&context, "Ruff");
|
||||
let value = DisplayGithubDiagnostics::new(&renderer, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Gitlab => {
|
||||
let config = config.format(DiagnosticFormat::Gitlab);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Pylint => {
|
||||
let config = config.format(DiagnosticFormat::Pylint);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Azure => {
|
||||
let config = config.format(DiagnosticFormat::Azure);
|
||||
let value = DisplayDiagnostics::new(&context, &config, &diagnostics.inner);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
OutputFormat::Sarif => {
|
||||
SarifEmitter.emit(writer, &diagnostics.inner, &context)?;
|
||||
}
|
||||
self.write_summary_text(writer, diagnostics)?;
|
||||
}
|
||||
|
||||
writer.flush()?;
|
||||
|
@ -448,11 +389,22 @@ impl Printer {
|
|||
}
|
||||
|
||||
let context = EmitterContext::new(&diagnostics.notebook_indexes);
|
||||
TextEmitter::default()
|
||||
let format = if preview {
|
||||
DiagnosticFormat::Full
|
||||
} else {
|
||||
DiagnosticFormat::Concise
|
||||
};
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.hide_severity(true)
|
||||
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize())
|
||||
.with_show_fix_status(show_fix_status(self.fix_mode, fixables.as_ref()))
|
||||
.with_show_source(preview)
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability())
|
||||
.emit(writer, &diagnostics.inner, &context)?;
|
||||
.format(format)
|
||||
.with_fix_applicability(self.unsafe_fixes.required_applicability());
|
||||
write!(
|
||||
writer,
|
||||
"{}",
|
||||
DisplayDiagnostics::new(&context, &config, &diagnostics.inner)
|
||||
)?;
|
||||
}
|
||||
writer.flush()?;
|
||||
|
||||
|
|
|
@ -6199,6 +6199,36 @@ match 42: # invalid-syntax
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case::test_case("concise"; "concise_show_fixes")]
|
||||
#[test_case::test_case("full"; "full_show_fixes")]
|
||||
#[test_case::test_case("grouped"; "grouped_show_fixes")]
|
||||
fn output_format_show_fixes(output_format: &str) -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let input = tempdir.path().join("input.py");
|
||||
fs::write(&input, "import os # F401")?;
|
||||
|
||||
let snapshot = format!("output_format_show_fixes_{output_format}");
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
snapshot,
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args([
|
||||
"check",
|
||||
"--no-cache",
|
||||
"--output-format",
|
||||
output_format,
|
||||
"--select",
|
||||
"F401",
|
||||
"--fix",
|
||||
"--show-fixes",
|
||||
"input.py",
|
||||
])
|
||||
.current_dir(&tempdir),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn up045_nested_optional_flatten_all() {
|
||||
let contents = "\
|
||||
|
|
|
@ -44,6 +44,43 @@ import some_module
|
|||
__all__ = ["some_module"]
|
||||
```
|
||||
|
||||
## Preview
|
||||
When [preview] is enabled (and certain simplifying assumptions
|
||||
are met), we analyze all import statements for a given module
|
||||
when determining whether an import is used, rather than simply
|
||||
the last of these statements. This can result in both different and
|
||||
more import statements being marked as unused.
|
||||
|
||||
For example, if a module consists of
|
||||
|
||||
```python
|
||||
import a
|
||||
import a.b
|
||||
```
|
||||
|
||||
then both statements are marked as unused under [preview], whereas
|
||||
only the second is marked as unused under stable behavior.
|
||||
|
||||
As another example, if a module consists of
|
||||
|
||||
```python
|
||||
import a.b
|
||||
import a
|
||||
|
||||
a.b.foo()
|
||||
```
|
||||
|
||||
then a diagnostic will only be emitted for the first line under [preview],
|
||||
whereas a diagnostic would only be emitted for the second line under
|
||||
stable behavior.
|
||||
|
||||
Note that this behavior is somewhat subjective and is designed
|
||||
to conform to the developer's intuition rather than Python's actual
|
||||
execution. To wit, the statement `import a.b` automatically executes
|
||||
`import a`, so in some sense `import a` is _always_ redundant
|
||||
in the presence of `import a.b`.
|
||||
|
||||
|
||||
## Fix safety
|
||||
|
||||
Fixes to remove unused imports are safe, except in `__init__.py` files.
|
||||
|
@ -96,4 +133,6 @@ else:
|
|||
- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
|
||||
- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)
|
||||
|
||||
[preview]: https://docs.astral.sh/ruff/preview/
|
||||
|
||||
----- stderr -----
|
||||
|
|
|
@ -119,7 +119,7 @@ exit_code: 1
|
|||
"rules": [
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Preview\nWhen [preview] is enabled (and certain simplifying assumptions\nare met), we analyze all import statements for a given module\nwhen determining whether an import is used, rather than simply\nthe last of these statements. This can result in both different and\nmore import statements being marked as unused.\n\nFor example, if a module consists of\n\n```python\nimport a\nimport a.b\n```\n\nthen both statements are marked as unused under [preview], whereas\nonly the second is marked as unused under stable behavior.\n\nAs another example, if a module consists of\n\n```python\nimport a.b\nimport a\n\na.b.foo()\n```\n\nthen a diagnostic will only be emitted for the first line under [preview],\nwhereas a diagnostic would only be emitted for the second line under\nstable behavior.\n\nNote that this behavior is somewhat subjective and is designed\nto conform to the developer's intuition rather than Python's actual\nexecution. To wit, the statement `import a.b` automatically executes\n`import a`, so in some sense `import a` is _always_ redundant\nin the presence of `import a.b`.\n\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n\n[preview]: https://docs.astral.sh/ruff/preview/\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- concise
|
||||
- "--select"
|
||||
- F401
|
||||
- "--fix"
|
||||
- "--show-fixes"
|
||||
- input.py
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
Fixed 1 error:
|
||||
- input.py:
|
||||
1 × F401 (unused-import)
|
||||
|
||||
Found 1 error (1 fixed, 0 remaining).
|
||||
|
||||
----- stderr -----
|
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- full
|
||||
- "--select"
|
||||
- F401
|
||||
- "--fix"
|
||||
- "--show-fixes"
|
||||
- input.py
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
Fixed 1 error:
|
||||
- input.py:
|
||||
1 × F401 (unused-import)
|
||||
|
||||
Found 1 error (1 fixed, 0 remaining).
|
||||
|
||||
----- stderr -----
|
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
source: crates/ruff/tests/lint.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- check
|
||||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- grouped
|
||||
- "--select"
|
||||
- F401
|
||||
- "--fix"
|
||||
- "--show-fixes"
|
||||
- input.py
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
Fixed 1 error:
|
||||
- input.py:
|
||||
1 × F401 (unused-import)
|
||||
|
||||
Found 1 error (1 fixed, 0 remaining).
|
||||
|
||||
----- stderr -----
|
|
@ -444,7 +444,7 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
|
|||
criterion.bench_function("ty_micro[complex_constrained_attributes_2]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| {
|
||||
// This is is similar to the case above, but now the attributes are actually defined.
|
||||
// This is similar to the case above, but now the attributes are actually defined.
|
||||
// https://github.com/astral-sh/ty/issues/711
|
||||
setup_micro_case(
|
||||
r#"
|
||||
|
|
|
@ -117,7 +117,7 @@ static COLOUR_SCIENCE: std::sync::LazyLock<Benchmark<'static>> = std::sync::Lazy
|
|||
max_dep_date: "2025-06-17",
|
||||
python_version: PythonVersion::PY310,
|
||||
},
|
||||
477,
|
||||
500,
|
||||
)
|
||||
});
|
||||
|
||||
|
|
|
@ -1353,7 +1353,7 @@ impl DisplayDiagnosticConfig {
|
|||
}
|
||||
|
||||
/// Whether to show a fix's availability or not.
|
||||
pub fn show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
pub fn with_show_fix_status(self, yes: bool) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
show_fix_status: yes,
|
||||
..self
|
||||
|
@ -1374,12 +1374,20 @@ impl DisplayDiagnosticConfig {
|
|||
/// availability for unsafe or display-only fixes.
|
||||
///
|
||||
/// Note that this option is currently ignored when `hide_severity` is false.
|
||||
pub fn fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
|
||||
pub fn with_fix_applicability(self, applicability: Applicability) -> DisplayDiagnosticConfig {
|
||||
DisplayDiagnosticConfig {
|
||||
fix_applicability: applicability,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn show_fix_status(&self) -> bool {
|
||||
self.show_fix_status
|
||||
}
|
||||
|
||||
pub fn fix_applicability(&self) -> Applicability {
|
||||
self.fix_applicability
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DisplayDiagnosticConfig {
|
||||
|
|
|
@ -2618,7 +2618,7 @@ watermelon
|
|||
/// Show fix availability when rendering.
|
||||
pub(super) fn show_fix_status(&mut self, yes: bool) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.show_fix_status(yes);
|
||||
config = config.with_show_fix_status(yes);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
|
@ -2632,7 +2632,7 @@ watermelon
|
|||
/// The lowest fix applicability to show when rendering.
|
||||
pub(super) fn fix_applicability(&mut self, applicability: Applicability) {
|
||||
let mut config = std::mem::take(&mut self.config);
|
||||
config = config.fix_applicability(applicability);
|
||||
config = config.with_fix_applicability(applicability);
|
||||
self.config = config;
|
||||
}
|
||||
|
||||
|
|
|
@ -2360,7 +2360,7 @@ impl<'a> Checker<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Visit an body of [`Stmt`] nodes within a type-checking block.
|
||||
/// Visit a body of [`Stmt`] nodes within a type-checking block.
|
||||
fn visit_type_checking_block(&mut self, body: &'a [Stmt]) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::TYPE_CHECKING_BLOCK;
|
||||
|
|
|
@ -6,17 +6,25 @@ use std::num::NonZeroUsize;
|
|||
use colored::Colorize;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_diagnostics::Applicability;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed};
|
||||
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct GroupedEmitter {
|
||||
show_fix_status: bool,
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
applicability: Applicability,
|
||||
}
|
||||
|
||||
impl Default for GroupedEmitter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
show_fix_status: false,
|
||||
applicability: Applicability::Safe,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GroupedEmitter {
|
||||
|
@ -27,8 +35,8 @@ impl GroupedEmitter {
|
|||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_unsafe_fixes(mut self, unsafe_fixes: UnsafeFixes) -> Self {
|
||||
self.unsafe_fixes = unsafe_fixes;
|
||||
pub fn with_applicability(mut self, applicability: Applicability) -> Self {
|
||||
self.applicability = applicability;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +75,7 @@ impl Emitter for GroupedEmitter {
|
|||
notebook_index: context.notebook_index(&message.expect_ruff_filename()),
|
||||
message,
|
||||
show_fix_status: self.show_fix_status,
|
||||
unsafe_fixes: self.unsafe_fixes,
|
||||
applicability: self.applicability,
|
||||
row_length,
|
||||
column_length,
|
||||
}
|
||||
|
@ -114,7 +122,7 @@ fn group_diagnostics_by_filename(
|
|||
struct DisplayGroupedMessage<'a> {
|
||||
message: MessageWithLocation<'a>,
|
||||
show_fix_status: bool,
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
applicability: Applicability,
|
||||
row_length: NonZeroUsize,
|
||||
column_length: NonZeroUsize,
|
||||
notebook_index: Option<&'a NotebookIndex>,
|
||||
|
@ -162,7 +170,7 @@ impl Display for DisplayGroupedMessage<'_> {
|
|||
code_and_body = RuleCodeAndBody {
|
||||
message,
|
||||
show_fix_status: self.show_fix_status,
|
||||
unsafe_fixes: self.unsafe_fixes
|
||||
applicability: self.applicability
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -173,7 +181,7 @@ impl Display for DisplayGroupedMessage<'_> {
|
|||
pub(super) struct RuleCodeAndBody<'a> {
|
||||
pub(crate) message: &'a Diagnostic,
|
||||
pub(crate) show_fix_status: bool,
|
||||
pub(crate) unsafe_fixes: UnsafeFixes,
|
||||
pub(crate) applicability: Applicability,
|
||||
}
|
||||
|
||||
impl Display for RuleCodeAndBody<'_> {
|
||||
|
@ -181,7 +189,7 @@ impl Display for RuleCodeAndBody<'_> {
|
|||
if self.show_fix_status {
|
||||
if let Some(fix) = self.message.fix() {
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if fix.applies(self.unsafe_fixes.required_applicability()) {
|
||||
if fix.applies(self.applicability) {
|
||||
if let Some(code) = self.message.secondary_code() {
|
||||
write!(f, "{} ", code.red().bold())?;
|
||||
}
|
||||
|
@ -217,11 +225,12 @@ impl Display for RuleCodeAndBody<'_> {
|
|||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::message::GroupedEmitter;
|
||||
use crate::message::tests::{
|
||||
capture_emitter_output, create_diagnostics, create_syntax_error_diagnostics,
|
||||
};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
|
||||
#[test]
|
||||
fn default() {
|
||||
|
@ -251,7 +260,7 @@ mod tests {
|
|||
fn fix_status_unsafe() {
|
||||
let mut emitter = GroupedEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_unsafe_fixes(UnsafeFixes::Enabled);
|
||||
.with_applicability(Applicability::Unsafe);
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
|
|
@ -4,8 +4,9 @@ use std::io::Write;
|
|||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::diagnostic::{
|
||||
Annotation, Diagnostic, DiagnosticId, FileResolver, Input, LintName, SecondaryCode, Severity,
|
||||
Span, UnifiedFile,
|
||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig,
|
||||
DisplayDiagnostics, DisplayGithubDiagnostics, FileResolver, GithubRenderer, Input, LintName,
|
||||
SecondaryCode, Severity, Span, UnifiedFile,
|
||||
};
|
||||
use ruff_db::files::File;
|
||||
|
||||
|
@ -14,14 +15,13 @@ use ruff_notebook::NotebookIndex;
|
|||
use ruff_source_file::SourceFile;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
pub use sarif::SarifEmitter;
|
||||
pub use text::TextEmitter;
|
||||
|
||||
use crate::Fix;
|
||||
use crate::registry::Rule;
|
||||
use crate::settings::types::{OutputFormat, RuffOutputFormat};
|
||||
|
||||
mod grouped;
|
||||
mod sarif;
|
||||
mod text;
|
||||
|
||||
/// Creates a `Diagnostic` from a syntax error, with the format expected by Ruff.
|
||||
///
|
||||
|
@ -160,14 +160,48 @@ impl<'a> EmitterContext<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn render_diagnostics(
|
||||
writer: &mut dyn Write,
|
||||
format: OutputFormat,
|
||||
config: DisplayDiagnosticConfig,
|
||||
context: &EmitterContext<'_>,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> std::io::Result<()> {
|
||||
match DiagnosticFormat::try_from(format) {
|
||||
Ok(format) => {
|
||||
let config = config.format(format);
|
||||
let value = DisplayDiagnostics::new(context, &config, diagnostics);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
Err(RuffOutputFormat::Github) => {
|
||||
let renderer = GithubRenderer::new(context, "Ruff");
|
||||
let value = DisplayGithubDiagnostics::new(&renderer, diagnostics);
|
||||
write!(writer, "{value}")?;
|
||||
}
|
||||
Err(RuffOutputFormat::Grouped) => {
|
||||
GroupedEmitter::default()
|
||||
.with_show_fix_status(config.show_fix_status())
|
||||
.with_applicability(config.fix_applicability())
|
||||
.emit(writer, diagnostics, context)
|
||||
.map_err(std::io::Error::other)?;
|
||||
}
|
||||
Err(RuffOutputFormat::Sarif) => {
|
||||
SarifEmitter
|
||||
.emit(writer, diagnostics, context)
|
||||
.map_err(std::io::Error::other)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_parser::{Mode, ParseOptions, parse_unchecked};
|
||||
use ruff_source_file::{OneIndexed, SourceFileBuilder};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use crate::codes::Rule;
|
||||
|
@ -257,104 +291,6 @@ def fibonacci(n):
|
|||
vec![unused_import, unused_variable, undefined_name]
|
||||
}
|
||||
|
||||
pub(super) fn create_notebook_diagnostics()
|
||||
-> (Vec<Diagnostic>, FxHashMap<String, NotebookIndex>) {
|
||||
let notebook = r"# cell 1
|
||||
import os
|
||||
# cell 2
|
||||
import math
|
||||
|
||||
print('hello world')
|
||||
# cell 3
|
||||
def foo():
|
||||
print()
|
||||
x = 1
|
||||
";
|
||||
|
||||
let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish();
|
||||
|
||||
let unused_import_os_start = TextSize::from(16);
|
||||
let unused_import_os = create_lint_diagnostic(
|
||||
"`os` imported but unused",
|
||||
Some("Remove unused import: `os`"),
|
||||
TextRange::new(unused_import_os_start, TextSize::from(18)),
|
||||
Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(9),
|
||||
TextSize::from(19),
|
||||
)))),
|
||||
None,
|
||||
notebook_source.clone(),
|
||||
Some(unused_import_os_start),
|
||||
Rule::UnusedImport,
|
||||
);
|
||||
|
||||
let unused_import_math_start = TextSize::from(35);
|
||||
let unused_import_math = create_lint_diagnostic(
|
||||
"`math` imported but unused",
|
||||
Some("Remove unused import: `math`"),
|
||||
TextRange::new(unused_import_math_start, TextSize::from(39)),
|
||||
Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
|
||||
TextSize::from(28),
|
||||
TextSize::from(40),
|
||||
)))),
|
||||
None,
|
||||
notebook_source.clone(),
|
||||
Some(unused_import_math_start),
|
||||
Rule::UnusedImport,
|
||||
);
|
||||
|
||||
let unused_variable_start = TextSize::from(98);
|
||||
let unused_variable = create_lint_diagnostic(
|
||||
"Local variable `x` is assigned to but never used",
|
||||
Some("Remove assignment to unused variable `x`"),
|
||||
TextRange::new(unused_variable_start, TextSize::from(99)),
|
||||
Some(Fix::unsafe_edit(Edit::deletion(
|
||||
TextSize::from(94),
|
||||
TextSize::from(104),
|
||||
))),
|
||||
None,
|
||||
notebook_source,
|
||||
Some(unused_variable_start),
|
||||
Rule::UnusedVariable,
|
||||
);
|
||||
|
||||
let mut notebook_indexes = FxHashMap::default();
|
||||
notebook_indexes.insert(
|
||||
"notebook.ipynb".to_string(),
|
||||
NotebookIndex::new(
|
||||
vec![
|
||||
OneIndexed::from_zero_indexed(0),
|
||||
OneIndexed::from_zero_indexed(0),
|
||||
OneIndexed::from_zero_indexed(1),
|
||||
OneIndexed::from_zero_indexed(1),
|
||||
OneIndexed::from_zero_indexed(1),
|
||||
OneIndexed::from_zero_indexed(1),
|
||||
OneIndexed::from_zero_indexed(2),
|
||||
OneIndexed::from_zero_indexed(2),
|
||||
OneIndexed::from_zero_indexed(2),
|
||||
OneIndexed::from_zero_indexed(2),
|
||||
],
|
||||
vec![
|
||||
OneIndexed::from_zero_indexed(0),
|
||||
OneIndexed::from_zero_indexed(1),
|
||||
OneIndexed::from_zero_indexed(0),
|
||||
OneIndexed::from_zero_indexed(1),
|
||||
OneIndexed::from_zero_indexed(2),
|
||||
OneIndexed::from_zero_indexed(3),
|
||||
OneIndexed::from_zero_indexed(0),
|
||||
OneIndexed::from_zero_indexed(1),
|
||||
OneIndexed::from_zero_indexed(2),
|
||||
OneIndexed::from_zero_indexed(3),
|
||||
],
|
||||
),
|
||||
);
|
||||
|
||||
(
|
||||
vec![unused_import_os, unused_import_math, unused_variable],
|
||||
notebook_indexes,
|
||||
)
|
||||
}
|
||||
|
||||
pub(super) fn capture_emitter_output(
|
||||
emitter: &mut dyn Emitter,
|
||||
diagnostics: &[Diagnostic],
|
||||
|
@ -366,16 +302,4 @@ def foo():
|
|||
|
||||
String::from_utf8(output).expect("Output to be valid UTF-8")
|
||||
}
|
||||
|
||||
pub(super) fn capture_emitter_notebook_output(
|
||||
emitter: &mut dyn Emitter,
|
||||
diagnostics: &[Diagnostic],
|
||||
notebook_indexes: &FxHashMap<String, NotebookIndex>,
|
||||
) -> String {
|
||||
let context = EmitterContext::new(notebook_indexes);
|
||||
let mut output: Vec<u8> = Vec::new();
|
||||
emitter.emit(&mut output, diagnostics, &context).unwrap();
|
||||
|
||||
String::from_utf8(output).expect("Output to be valid UTF-8")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -129,7 +129,7 @@ expression: value
|
|||
"rules": [
|
||||
{
|
||||
"fullDescription": {
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n"
|
||||
"text": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Preview\nWhen [preview] is enabled (and certain simplifying assumptions\nare met), we analyze all import statements for a given module\nwhen determining whether an import is used, rather than simply\nthe last of these statements. This can result in both different and\nmore import statements being marked as unused.\n\nFor example, if a module consists of\n\n```python\nimport a\nimport a.b\n```\n\nthen both statements are marked as unused under [preview], whereas\nonly the second is marked as unused under stable behavior.\n\nAs another example, if a module consists of\n\n```python\nimport a.b\nimport a\n\na.b.foo()\n```\n\nthen a diagnostic will only be emitted for the first line under [preview],\nwhereas a diagnostic would only be emitted for the second line under\nstable behavior.\n\nNote that this behavior is somewhat subjective and is designed\nto conform to the developer's intuition rather than Python's actual\nexecution. To wit, the statement `import a.b` automatically executes\n`import a`, so in some sense `import a` is _always_ redundant\nin the presence of `import a.b`.\n\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n\n[preview]: https://docs.astral.sh/ruff/preview/\n"
|
||||
},
|
||||
"help": {
|
||||
"text": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability"
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/message/text.rs
|
||||
expression: content
|
||||
---
|
||||
F401 `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
F841 Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
F821 Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/message/text.rs
|
||||
expression: content
|
||||
---
|
||||
F401 `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
F841 Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
F821 Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/message/text.rs
|
||||
expression: content
|
||||
---
|
||||
F401 [*] `os` imported but unused
|
||||
--> fib.py:1:8
|
||||
|
|
||||
1 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
F841 [*] Local variable `x` is assigned to but never used
|
||||
--> fib.py:6:5
|
||||
|
|
||||
4 | def fibonacci(n):
|
||||
5 | """Compute the nth number in the Fibonacci sequence."""
|
||||
6 | x = 1
|
||||
| ^
|
||||
7 | if n == 0:
|
||||
8 | return 0
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
||||
|
||||
F821 Undefined name `a`
|
||||
--> undef.py:1:4
|
||||
|
|
||||
1 | if a == 1: pass
|
||||
| ^
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/message/text.rs
|
||||
expression: content
|
||||
---
|
||||
F401 [*] `os` imported but unused
|
||||
--> notebook.ipynb:cell 1:2:8
|
||||
|
|
||||
1 | # cell 1
|
||||
2 | import os
|
||||
| ^^
|
||||
|
|
||||
help: Remove unused import: `os`
|
||||
|
||||
F401 [*] `math` imported but unused
|
||||
--> notebook.ipynb:cell 2:2:8
|
||||
|
|
||||
1 | # cell 2
|
||||
2 | import math
|
||||
| ^^^^
|
||||
3 |
|
||||
4 | print('hello world')
|
||||
|
|
||||
help: Remove unused import: `math`
|
||||
|
||||
F841 [*] Local variable `x` is assigned to but never used
|
||||
--> notebook.ipynb:cell 3:4:5
|
||||
|
|
||||
2 | def foo():
|
||||
3 | print()
|
||||
4 | x = 1
|
||||
| ^
|
||||
|
|
||||
help: Remove assignment to unused variable `x`
|
|
@ -1,23 +0,0 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/message/text.rs
|
||||
expression: content
|
||||
---
|
||||
invalid-syntax: Expected one or more symbol names after import
|
||||
--> syntax_errors.py:1:15
|
||||
|
|
||||
1 | from os import
|
||||
| ^
|
||||
2 |
|
||||
3 | if call(foo
|
||||
|
|
||||
|
||||
invalid-syntax: Expected ')', found newline
|
||||
--> syntax_errors.py:3:12
|
||||
|
|
||||
1 | from os import
|
||||
2 |
|
||||
3 | if call(foo
|
||||
| ^
|
||||
4 | def bar():
|
||||
5 | pass
|
||||
|
|
|
@ -1,143 +0,0 @@
|
|||
use std::io::Write;
|
||||
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics,
|
||||
};
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
|
||||
pub struct TextEmitter {
|
||||
config: DisplayDiagnosticConfig,
|
||||
}
|
||||
|
||||
impl Default for TextEmitter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
config: DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Concise)
|
||||
.hide_severity(true)
|
||||
.color(!cfg!(test) && colored::control::SHOULD_COLORIZE.should_colorize()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TextEmitter {
|
||||
#[must_use]
|
||||
pub fn with_show_fix_status(mut self, show_fix_status: bool) -> Self {
|
||||
self.config = self.config.show_fix_status(show_fix_status);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_show_fix_diff(mut self, show_fix_diff: bool) -> Self {
|
||||
self.config = self.config.show_fix_diff(show_fix_diff);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_show_source(mut self, show_source: bool) -> Self {
|
||||
self.config = self.config.format(if show_source {
|
||||
DiagnosticFormat::Full
|
||||
} else {
|
||||
DiagnosticFormat::Concise
|
||||
});
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_fix_applicability(mut self, applicability: Applicability) -> Self {
|
||||
self.config = self.config.fix_applicability(applicability);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_preview(mut self, preview: bool) -> Self {
|
||||
self.config = self.config.preview(preview);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_color(mut self, color: bool) -> Self {
|
||||
self.config = self.config.color(color);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Emitter for TextEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
write!(
|
||||
writer,
|
||||
"{}",
|
||||
DisplayDiagnostics::new(context, &self.config, diagnostics)
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
use ruff_diagnostics::Applicability;
|
||||
|
||||
use crate::message::TextEmitter;
|
||||
use crate::message::tests::{
|
||||
capture_emitter_notebook_output, capture_emitter_output, create_diagnostics,
|
||||
create_notebook_diagnostics, create_syntax_error_diagnostics,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn default() {
|
||||
let mut emitter = TextEmitter::default().with_show_source(true);
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fix_status() {
|
||||
let mut emitter = TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_show_source(true);
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fix_status_unsafe() {
|
||||
let mut emitter = TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_show_source(true)
|
||||
.with_fix_applicability(Applicability::Unsafe);
|
||||
let content = capture_emitter_output(&mut emitter, &create_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn notebook_output() {
|
||||
let mut emitter = TextEmitter::default()
|
||||
.with_show_fix_status(true)
|
||||
.with_show_source(true)
|
||||
.with_fix_applicability(Applicability::Unsafe);
|
||||
let (messages, notebook_indexes) = create_notebook_diagnostics();
|
||||
let content = capture_emitter_notebook_output(&mut emitter, &messages, ¬ebook_indexes);
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn syntax_errors() {
|
||||
let mut emitter = TextEmitter::default().with_show_source(true);
|
||||
let content = capture_emitter_output(&mut emitter, &create_syntax_error_diagnostics());
|
||||
|
||||
assert_snapshot!(content);
|
||||
}
|
||||
}
|
|
@ -235,3 +235,8 @@ pub(crate) const fn is_a003_class_scope_shadowing_expansion_enabled(
|
|||
) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/20200
|
||||
pub(crate) const fn is_refined_submodule_import_match_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ mod tests {
|
|||
use crate::settings::{LinterSettings, flags};
|
||||
use crate::source_kind::SourceKind;
|
||||
use crate::test::{test_contents, test_path, test_snippet};
|
||||
use crate::{Locator, assert_diagnostics, directives};
|
||||
use crate::{Locator, assert_diagnostics, assert_diagnostics_diff, directives};
|
||||
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_0.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_1.py"))]
|
||||
|
@ -392,6 +392,154 @@ mod tests {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_0.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_1.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_2.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_3.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_4.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_5.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_6.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_7.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_8.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_9.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_10.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_11.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_12.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_13.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_14.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_15.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_16.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_17.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_18.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_19.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_20.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_21.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_22.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_23.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_32.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_34.py"))]
|
||||
#[test_case(Rule::UnusedImport, Path::new("F401_35.py"))]
|
||||
fn f401_preview_refined_submodule_handling_diffs(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("preview_diff__{}", path.to_string_lossy());
|
||||
assert_diagnostics_diff!(
|
||||
snapshot,
|
||||
Path::new("pyflakes").join(path).as_path(),
|
||||
&LinterSettings::for_rule(rule_code),
|
||||
&LinterSettings {
|
||||
preview: PreviewMode::Enabled,
|
||||
..LinterSettings::for_rule(rule_code)
|
||||
}
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(
|
||||
r"
|
||||
import a
|
||||
import a.b
|
||||
import a.c",
|
||||
"f401_multiple_unused_submodules"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
import a
|
||||
import a.b
|
||||
a.foo()",
|
||||
"f401_use_top_member"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
import a
|
||||
import a.b
|
||||
a.foo()
|
||||
a.bar()",
|
||||
"f401_use_top_member_twice"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
# reverts to stable behavior - used between imports
|
||||
import a
|
||||
a.foo()
|
||||
import a.b",
|
||||
"f401_use_top_member_before_second_import"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
# reverts to stable behavior - used between imports
|
||||
import a
|
||||
a.foo()
|
||||
a = 1
|
||||
import a.b",
|
||||
"f401_use_top_member_and_redefine_before_second_import"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
# reverts to stable behavior - used between imports
|
||||
import a
|
||||
a.foo()
|
||||
import a.b
|
||||
a = 1",
|
||||
"f401_use_top_member_then_import_then_redefine"
|
||||
)]
|
||||
#[test_case(
|
||||
r#"
|
||||
import a
|
||||
import a.b
|
||||
__all__ = ["a"]"#,
|
||||
"f401_use_in_dunder_all"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
import a.c
|
||||
import a.b
|
||||
a.foo()",
|
||||
"f401_import_submodules_but_use_top_level"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
import a.c
|
||||
import a.b.d
|
||||
a.foo()",
|
||||
"f401_import_submodules_different_lengths_but_use_top_level"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
# refined logic only applied _within_ scope
|
||||
import a
|
||||
def foo():
|
||||
import a.b
|
||||
a.foo()",
|
||||
"f401_import_submodules_in_function_scope"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
# reverts to stable behavior - used between bindings
|
||||
import a
|
||||
a.b
|
||||
import a.b",
|
||||
"f401_use_in_between_imports"
|
||||
)]
|
||||
#[test_case(
|
||||
r"
|
||||
# reverts to stable behavior - used between bindings
|
||||
import a.b
|
||||
a
|
||||
import a",
|
||||
"f401_use_in_between_imports"
|
||||
)]
|
||||
fn f401_preview_refined_submodule_handling(contents: &str, snapshot: &str) {
|
||||
let diagnostics = test_contents(
|
||||
&SourceKind::Python(dedent(contents).to_string()),
|
||||
Path::new("f401_preview_submodule.py"),
|
||||
&LinterSettings {
|
||||
preview: PreviewMode::Enabled,
|
||||
..LinterSettings::for_rule(Rule::UnusedImport)
|
||||
},
|
||||
)
|
||||
.0;
|
||||
assert_diagnostics!(snapshot, diagnostics);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn f841_dummy_variable_rgx() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
|
|
|
@ -5,19 +5,22 @@ use anyhow::{Result, anyhow, bail};
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::name::QualifiedName;
|
||||
use ruff_python_ast::name::{QualifiedName, QualifiedNameBuilder};
|
||||
use ruff_python_ast::{self as ast, Stmt};
|
||||
use ruff_python_semantic::{
|
||||
AnyImport, BindingKind, Exceptions, Imported, NodeId, Scope, ScopeId, SemanticModel,
|
||||
SubmoduleImport,
|
||||
AnyImport, Binding, BindingFlags, BindingId, BindingKind, Exceptions, Imported, NodeId, Scope,
|
||||
ScopeId, SemanticModel, SubmoduleImport,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix;
|
||||
use crate::preview::is_dunder_init_fix_unused_import_enabled;
|
||||
use crate::preview::{
|
||||
is_dunder_init_fix_unused_import_enabled, is_refined_submodule_import_match_enabled,
|
||||
};
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::{isort, isort::ImportSection, isort::ImportType};
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::{Applicability, Fix, FixAvailability, Violation};
|
||||
|
||||
/// ## What it does
|
||||
|
@ -49,6 +52,43 @@ use crate::{Applicability, Fix, FixAvailability, Violation};
|
|||
/// __all__ = ["some_module"]
|
||||
/// ```
|
||||
///
|
||||
/// ## Preview
|
||||
/// When [preview] is enabled (and certain simplifying assumptions
|
||||
/// are met), we analyze all import statements for a given module
|
||||
/// when determining whether an import is used, rather than simply
|
||||
/// the last of these statements. This can result in both different and
|
||||
/// more import statements being marked as unused.
|
||||
///
|
||||
/// For example, if a module consists of
|
||||
///
|
||||
/// ```python
|
||||
/// import a
|
||||
/// import a.b
|
||||
/// ```
|
||||
///
|
||||
/// then both statements are marked as unused under [preview], whereas
|
||||
/// only the second is marked as unused under stable behavior.
|
||||
///
|
||||
/// As another example, if a module consists of
|
||||
///
|
||||
/// ```python
|
||||
/// import a.b
|
||||
/// import a
|
||||
///
|
||||
/// a.b.foo()
|
||||
/// ```
|
||||
///
|
||||
/// then a diagnostic will only be emitted for the first line under [preview],
|
||||
/// whereas a diagnostic would only be emitted for the second line under
|
||||
/// stable behavior.
|
||||
///
|
||||
/// Note that this behavior is somewhat subjective and is designed
|
||||
/// to conform to the developer's intuition rather than Python's actual
|
||||
/// execution. To wit, the statement `import a.b` automatically executes
|
||||
/// `import a`, so in some sense `import a` is _always_ redundant
|
||||
/// in the presence of `import a.b`.
|
||||
///
|
||||
///
|
||||
/// ## Fix safety
|
||||
///
|
||||
/// Fixes to remove unused imports are safe, except in `__init__.py` files.
|
||||
|
@ -100,6 +140,8 @@ use crate::{Applicability, Fix, FixAvailability, Violation};
|
|||
/// - [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)
|
||||
/// - [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
|
||||
/// - [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)
|
||||
///
|
||||
/// [preview]: https://docs.astral.sh/ruff/preview/
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct UnusedImport {
|
||||
/// Qualified name of the import
|
||||
|
@ -284,17 +326,7 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope) {
|
|||
let mut unused: BTreeMap<(NodeId, Exceptions), Vec<ImportBinding>> = BTreeMap::default();
|
||||
let mut ignored: BTreeMap<(NodeId, Exceptions), Vec<ImportBinding>> = BTreeMap::default();
|
||||
|
||||
for binding_id in scope.binding_ids() {
|
||||
let binding = checker.semantic().binding(binding_id);
|
||||
|
||||
if binding.is_used()
|
||||
|| binding.is_explicit_export()
|
||||
|| binding.is_nonlocal()
|
||||
|| binding.is_global()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
for binding in unused_imports_in_scope(checker.semantic(), scope, checker.settings()) {
|
||||
let Some(import) = binding.as_any_import() else {
|
||||
continue;
|
||||
};
|
||||
|
@ -586,3 +618,302 @@ fn fix_by_reexporting<'a>(
|
|||
let isolation = Checker::isolation(checker.semantic().parent_statement_id(node_id));
|
||||
Ok(Fix::safe_edits(head, tail).isolate(isolation))
|
||||
}
|
||||
|
||||
/// Returns an iterator over bindings to import statements that appear unused.
|
||||
///
|
||||
/// The stable behavior is to return those bindings to imports
|
||||
/// satisfying the following properties:
|
||||
///
|
||||
/// - they are not shadowed
|
||||
/// - they are not `global`, not `nonlocal`, and not explicit exports (i.e. `import foo as foo`)
|
||||
/// - they have no references, according to the semantic model
|
||||
///
|
||||
/// Under preview, there is a more refined analysis performed
|
||||
/// in the case where all bindings shadowed by a given import
|
||||
/// binding (including the binding itself) are of a simple form:
|
||||
/// they are required to be un-aliased imports or submodule imports.
|
||||
///
|
||||
/// This alternative analysis is described in the documentation for
|
||||
/// [`unused_imports_from_binding`].
|
||||
fn unused_imports_in_scope<'a, 'b>(
|
||||
semantic: &'a SemanticModel<'b>,
|
||||
scope: &'a Scope,
|
||||
settings: &'a LinterSettings,
|
||||
) -> impl Iterator<Item = &'a Binding<'b>> {
|
||||
scope
|
||||
.binding_ids()
|
||||
.map(|id| (id, semantic.binding(id)))
|
||||
.filter(|(_, bdg)| {
|
||||
matches!(
|
||||
bdg.kind,
|
||||
BindingKind::Import(_)
|
||||
| BindingKind::FromImport(_)
|
||||
| BindingKind::SubmoduleImport(_)
|
||||
)
|
||||
})
|
||||
.filter(|(_, bdg)| !bdg.is_global() && !bdg.is_nonlocal() && !bdg.is_explicit_export())
|
||||
.flat_map(|(id, bdg)| {
|
||||
if is_refined_submodule_import_match_enabled(settings)
|
||||
// No need to apply refined logic if there is only a single binding
|
||||
&& scope.shadowed_bindings(id).nth(1).is_some()
|
||||
// Only apply the new logic in certain situations to avoid
|
||||
// complexity, false positives, and intersection with
|
||||
// `redefined-while-unused` (`F811`).
|
||||
&& has_simple_shadowed_bindings(scope, id, semantic)
|
||||
{
|
||||
unused_imports_from_binding(semantic, id, scope)
|
||||
} else if bdg.is_used() {
|
||||
vec![]
|
||||
} else {
|
||||
vec![bdg]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a `Vec` of bindings to unused import statements that
|
||||
/// are shadowed by a given binding.
|
||||
///
|
||||
/// This is best explained by example. So suppose we have:
|
||||
///
|
||||
/// ```python
|
||||
/// import a
|
||||
/// import a.b
|
||||
/// import a.b.c
|
||||
///
|
||||
/// __all__ = ["a"]
|
||||
///
|
||||
/// a.b.foo()
|
||||
/// ```
|
||||
///
|
||||
/// As of 2025-09-25, Ruff's semantic model, upon visiting
|
||||
/// the whole module, will have a single live binding for
|
||||
/// the symbol `a` that points to the line `import a.b.c`,
|
||||
/// and the remaining two import bindings are considered shadowed
|
||||
/// by the last.
|
||||
///
|
||||
/// This function expects to receive the `id`
|
||||
/// for the live binding and will begin by collecting
|
||||
/// all bindings shadowed by the given one - i.e. all
|
||||
/// the different import statements binding the symbol `a`.
|
||||
/// We iterate over references to this
|
||||
/// module and decide (somewhat subjectively) which
|
||||
/// import statement the user "intends" to reference. To that end,
|
||||
/// to each reference we attempt to build a [`QualifiedName`]
|
||||
/// corresponding to an iterated attribute access (e.g. `a.b.foo`).
|
||||
/// We then determine the closest matching import statement to that
|
||||
/// qualified name, and mark it as used.
|
||||
///
|
||||
/// In the present example, the qualified name associated to the
|
||||
/// reference from the dunder all export is `"a"` and the qualified
|
||||
/// name associated to the reference in the last line is `"a.b.foo"`.
|
||||
/// The closest matches are `import a` and `import a.b`, respectively,
|
||||
/// leaving `import a.b.c` unused.
|
||||
///
|
||||
/// For a precise definition of "closest match" see [`best_match`]
|
||||
/// and [`rank_matches`].
|
||||
///
|
||||
/// Note: if any reference comes from something other than
|
||||
/// a `Name` or a dunder all expression, then we return just
|
||||
/// the original binding, thus reverting the stable behavior.
|
||||
fn unused_imports_from_binding<'a, 'b>(
|
||||
semantic: &'a SemanticModel<'b>,
|
||||
id: BindingId,
|
||||
scope: &'a Scope,
|
||||
) -> Vec<&'a Binding<'b>> {
|
||||
let mut marked = MarkedBindings::from_binding_id(semantic, id, scope);
|
||||
|
||||
let binding = semantic.binding(id);
|
||||
|
||||
// ensure we only do this once
|
||||
let mut marked_dunder_all = false;
|
||||
|
||||
for ref_id in binding.references() {
|
||||
let resolved_reference = semantic.reference(ref_id);
|
||||
if !marked_dunder_all && resolved_reference.in_dunder_all_definition() {
|
||||
let first = *binding
|
||||
.as_any_import()
|
||||
.expect("binding to be import binding since current function called after restricting to these in `unused_imports_in_scope`")
|
||||
.qualified_name()
|
||||
.segments().first().expect("import binding to have nonempty qualified name");
|
||||
mark_uses_of_qualified_name(&mut marked, &QualifiedName::user_defined(first));
|
||||
marked_dunder_all = true;
|
||||
continue;
|
||||
}
|
||||
let Some(expr_id) = resolved_reference.expression_id() else {
|
||||
// If there is some other kind of reference, abandon
|
||||
// the refined approach for the usual one
|
||||
return vec![binding];
|
||||
};
|
||||
let Some(prototype) = expand_to_qualified_name_attribute(semantic, expr_id) else {
|
||||
return vec![binding];
|
||||
};
|
||||
|
||||
mark_uses_of_qualified_name(&mut marked, &prototype);
|
||||
}
|
||||
|
||||
marked.into_unused()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MarkedBindings<'a, 'b> {
|
||||
bindings: Vec<&'a Binding<'b>>,
|
||||
used: Vec<bool>,
|
||||
}
|
||||
|
||||
impl<'a, 'b> MarkedBindings<'a, 'b> {
|
||||
fn from_binding_id(semantic: &'a SemanticModel<'b>, id: BindingId, scope: &'a Scope) -> Self {
|
||||
let bindings: Vec<_> = scope
|
||||
.shadowed_bindings(id)
|
||||
.map(|id| semantic.binding(id))
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
used: vec![false; bindings.len()],
|
||||
bindings,
|
||||
}
|
||||
}
|
||||
|
||||
fn into_unused(self) -> Vec<&'a Binding<'b>> {
|
||||
self.bindings
|
||||
.into_iter()
|
||||
.zip(self.used)
|
||||
.filter_map(|(bdg, is_used)| (!is_used).then_some(bdg))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn iter_mut(&mut self) -> impl Iterator<Item = (&'a Binding<'b>, &mut bool)> {
|
||||
self.bindings.iter().copied().zip(self.used.iter_mut())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `Some` [`QualifiedName`] delineating the path for the
|
||||
/// maximal [`ExprName`] or [`ExprAttribute`] containing the expression
|
||||
/// associated to the given [`NodeId`], or `None` otherwise.
|
||||
///
|
||||
/// For example, if the `expr_id` points to `a` in `a.b.c.foo()`
|
||||
/// then the qualified name would have segments [`a`, `b`, `c`, `foo`].
|
||||
fn expand_to_qualified_name_attribute<'b>(
|
||||
semantic: &SemanticModel<'b>,
|
||||
expr_id: NodeId,
|
||||
) -> Option<QualifiedName<'b>> {
|
||||
let mut builder = QualifiedNameBuilder::with_capacity(16);
|
||||
|
||||
let mut expr_id = expr_id;
|
||||
|
||||
let expr = semantic.expression(expr_id)?;
|
||||
|
||||
let name = expr.as_name_expr()?;
|
||||
|
||||
builder.push(&name.id);
|
||||
|
||||
while let Some(node_id) = semantic.parent_expression_id(expr_id) {
|
||||
let Some(expr) = semantic.expression(node_id) else {
|
||||
break;
|
||||
};
|
||||
let Some(expr_attr) = expr.as_attribute_expr() else {
|
||||
break;
|
||||
};
|
||||
builder.push(expr_attr.attr.as_str());
|
||||
expr_id = node_id;
|
||||
}
|
||||
Some(builder.build())
|
||||
}
|
||||
|
||||
fn mark_uses_of_qualified_name(marked: &mut MarkedBindings, prototype: &QualifiedName) {
|
||||
let Some(best) = best_match(&marked.bindings, prototype) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(best_import) = best.as_any_import() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let best_name = best_import.qualified_name();
|
||||
|
||||
// We loop through all bindings in case there are repeated instances
|
||||
// of the `best_name`. For example, if we have
|
||||
//
|
||||
// ```python
|
||||
// import a
|
||||
// import a
|
||||
//
|
||||
// a.foo()
|
||||
// ```
|
||||
//
|
||||
// then we want to mark both import statements as used. It
|
||||
// is the job of `redefined-while-unused` (`F811`) to catch
|
||||
// the repeated binding in this case.
|
||||
for (binding, is_used) in marked.iter_mut() {
|
||||
if *is_used {
|
||||
continue;
|
||||
}
|
||||
|
||||
if binding
|
||||
.as_any_import()
|
||||
.is_some_and(|imp| imp.qualified_name() == best_name)
|
||||
{
|
||||
*is_used = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a pair with first component the length of the largest
|
||||
/// shared prefix between the qualified name of the import binding
|
||||
/// and the `prototype` and second component the length of the
|
||||
/// qualified name of the import binding (i.e. the number of path
|
||||
/// segments). Moreover, we regard the second component as ordered
|
||||
/// in reverse.
|
||||
///
|
||||
/// For example, if the binding corresponds to `import a.b.c`
|
||||
/// and the prototype to `a.b.foo()`, then the function returns
|
||||
/// `(2,std::cmp::Reverse(3))`.
|
||||
fn rank_matches(binding: &Binding, prototype: &QualifiedName) -> (usize, std::cmp::Reverse<usize>) {
|
||||
let Some(import) = binding.as_any_import() else {
|
||||
unreachable!()
|
||||
};
|
||||
let qname = import.qualified_name();
|
||||
let left = qname
|
||||
.segments()
|
||||
.iter()
|
||||
.zip(prototype.segments())
|
||||
.take_while(|(x, y)| x == y)
|
||||
.count();
|
||||
(left, std::cmp::Reverse(qname.segments().len()))
|
||||
}
|
||||
|
||||
/// Returns the import binding that shares the longest prefix
|
||||
/// with the `prototype` and is of minimal length amongst these.
|
||||
///
|
||||
/// See also [`rank_matches`].
|
||||
fn best_match<'a, 'b>(
|
||||
bindings: &Vec<&'a Binding<'b>>,
|
||||
prototype: &QualifiedName,
|
||||
) -> Option<&'a Binding<'b>> {
|
||||
bindings
|
||||
.iter()
|
||||
.copied()
|
||||
.max_by_key(|binding| rank_matches(binding, prototype))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn has_simple_shadowed_bindings(scope: &Scope, id: BindingId, semantic: &SemanticModel) -> bool {
|
||||
scope.shadowed_bindings(id).enumerate().all(|(i, shadow)| {
|
||||
let shadowed_binding = semantic.binding(shadow);
|
||||
// Bail if one of the shadowed bindings is
|
||||
// used before the last live binding. This is
|
||||
// to avoid situations like this:
|
||||
//
|
||||
// ```
|
||||
// import a
|
||||
// a.b
|
||||
// import a.b
|
||||
// ```
|
||||
if i > 0 && shadowed_binding.is_used() {
|
||||
return false;
|
||||
}
|
||||
matches!(
|
||||
shadowed_binding.kind,
|
||||
BindingKind::Import(_) | BindingKind::SubmoduleImport(_)
|
||||
) && !shadowed_binding.flags.contains(BindingFlags::ALIAS)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F401 [*] `a.b` imported but unused
|
||||
--> f401_preview_submodule.py:3:8
|
||||
|
|
||||
2 | import a.c
|
||||
3 | import a.b
|
||||
| ^^^
|
||||
4 | a.foo()
|
||||
|
|
||||
help: Remove unused import: `a.b`
|
||||
1 |
|
||||
2 | import a.c
|
||||
- import a.b
|
||||
3 | a.foo()
|
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F401 [*] `a.b.d` imported but unused
|
||||
--> f401_preview_submodule.py:3:8
|
||||
|
|
||||
2 | import a.c
|
||||
3 | import a.b.d
|
||||
| ^^^^^
|
||||
4 | a.foo()
|
||||
|
|
||||
help: Remove unused import: `a.b.d`
|
||||
1 |
|
||||
2 | import a.c
|
||||
- import a.b.d
|
||||
3 | a.foo()
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F401 [*] `a` imported but unused
|
||||
--> f401_preview_submodule.py:3:8
|
||||
|
|
||||
2 | # refined logic only applied _within_ scope
|
||||
3 | import a
|
||||
| ^
|
||||
4 | def foo():
|
||||
5 | import a.b
|
||||
|
|
||||
help: Remove unused import: `a`
|
||||
1 |
|
||||
2 | # refined logic only applied _within_ scope
|
||||
- import a
|
||||
3 | def foo():
|
||||
4 | import a.b
|
||||
5 | a.foo()
|
|
@ -0,0 +1,44 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F401 [*] `a` imported but unused
|
||||
--> f401_preview_submodule.py:2:8
|
||||
|
|
||||
2 | import a
|
||||
| ^
|
||||
3 | import a.b
|
||||
4 | import a.c
|
||||
|
|
||||
help: Remove unused import: `a`
|
||||
1 |
|
||||
- import a
|
||||
2 | import a.b
|
||||
3 | import a.c
|
||||
|
||||
F401 [*] `a.b` imported but unused
|
||||
--> f401_preview_submodule.py:3:8
|
||||
|
|
||||
2 | import a
|
||||
3 | import a.b
|
||||
| ^^^
|
||||
4 | import a.c
|
||||
|
|
||||
help: Remove unused import: `a.b`
|
||||
1 |
|
||||
2 | import a
|
||||
- import a.b
|
||||
3 | import a.c
|
||||
|
||||
F401 [*] `a.c` imported but unused
|
||||
--> f401_preview_submodule.py:4:8
|
||||
|
|
||||
2 | import a
|
||||
3 | import a.b
|
||||
4 | import a.c
|
||||
| ^^^
|
||||
|
|
||||
help: Remove unused import: `a.c`
|
||||
1 |
|
||||
2 | import a
|
||||
3 | import a.b
|
||||
- import a.c
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F401 [*] `a.b` imported but unused
|
||||
--> f401_preview_submodule.py:3:8
|
||||
|
|
||||
2 | import a
|
||||
3 | import a.b
|
||||
| ^^^
|
||||
4 | __all__ = ["a"]
|
||||
|
|
||||
help: Remove unused import: `a.b`
|
||||
1 |
|
||||
2 | import a
|
||||
- import a.b
|
||||
3 | __all__ = ["a"]
|
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F401 [*] `a.b` imported but unused
|
||||
--> f401_preview_submodule.py:3:8
|
||||
|
|
||||
2 | import a
|
||||
3 | import a.b
|
||||
| ^^^
|
||||
4 | a.foo()
|
||||
|
|
||||
help: Remove unused import: `a.b`
|
||||
1 |
|
||||
2 | import a
|
||||
- import a.b
|
||||
3 | a.foo()
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F401 [*] `a.b` imported but unused
|
||||
--> f401_preview_submodule.py:3:8
|
||||
|
|
||||
2 | import a
|
||||
3 | import a.b
|
||||
| ^^^
|
||||
4 | a.foo()
|
||||
5 | a.bar()
|
||||
|
|
||||
help: Remove unused import: `a.b`
|
||||
1 |
|
||||
2 | import a
|
||||
- import a.b
|
||||
3 | a.foo()
|
||||
4 | a.bar()
|
|
@ -0,0 +1,50 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 2
|
||||
|
||||
--- Added ---
|
||||
F401 [*] `multiprocessing.process` imported but unused
|
||||
--> F401_0.py:10:8
|
||||
|
|
||||
8 | )
|
||||
9 | import multiprocessing.pool
|
||||
10 | import multiprocessing.process
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
11 | import logging.config
|
||||
12 | import logging.handlers
|
||||
|
|
||||
help: Remove unused import: `multiprocessing.process`
|
||||
7 | namedtuple,
|
||||
8 | )
|
||||
9 | import multiprocessing.pool
|
||||
- import multiprocessing.process
|
||||
10 | import logging.config
|
||||
11 | import logging.handlers
|
||||
12 | from typing import (
|
||||
|
||||
|
||||
F401 [*] `logging.config` imported but unused
|
||||
--> F401_0.py:11:8
|
||||
|
|
||||
9 | import multiprocessing.pool
|
||||
10 | import multiprocessing.process
|
||||
11 | import logging.config
|
||||
| ^^^^^^^^^^^^^^
|
||||
12 | import logging.handlers
|
||||
13 | from typing import (
|
||||
|
|
||||
help: Remove unused import: `logging.config`
|
||||
8 | )
|
||||
9 | import multiprocessing.pool
|
||||
10 | import multiprocessing.process
|
||||
- import logging.config
|
||||
11 | import logging.handlers
|
||||
12 | from typing import (
|
||||
13 | TYPE_CHECKING,
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
--- Linter settings ---
|
||||
-linter.preview = disabled
|
||||
+linter.preview = enabled
|
||||
|
||||
--- Summary ---
|
||||
Removed: 0
|
||||
Added: 0
|
|
@ -29,6 +29,7 @@ use crate::checkers::ast::Checker;
|
|||
/// return rec
|
||||
/// except ZeroDivisionError:
|
||||
/// logging.exception("Exception occurred")
|
||||
/// raise
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
|
@ -41,6 +42,7 @@ use crate::checkers::ast::Checker;
|
|||
/// rec = 1 / n
|
||||
/// except ZeroDivisionError:
|
||||
/// logging.exception("Exception occurred")
|
||||
/// raise
|
||||
/// else:
|
||||
/// print(f"reciprocal of {n} is {rec}")
|
||||
/// return rec
|
||||
|
|
|
@ -9,6 +9,7 @@ use anyhow::{Context, Result, bail};
|
|||
use globset::{Glob, GlobMatcher, GlobSet, GlobSetBuilder};
|
||||
use log::debug;
|
||||
use pep440_rs::{VersionSpecifier, VersionSpecifiers};
|
||||
use ruff_db::diagnostic::DiagnosticFormat;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Deserializer, Serialize, de};
|
||||
use strum_macros::EnumIter;
|
||||
|
@ -553,6 +554,34 @@ impl Display for OutputFormat {
|
|||
}
|
||||
}
|
||||
|
||||
/// The subset of output formats only implemented in Ruff, not in `ruff_db` via `DisplayDiagnostics`.
|
||||
pub enum RuffOutputFormat {
|
||||
Github,
|
||||
Grouped,
|
||||
Sarif,
|
||||
}
|
||||
|
||||
impl TryFrom<OutputFormat> for DiagnosticFormat {
|
||||
type Error = RuffOutputFormat;
|
||||
|
||||
fn try_from(format: OutputFormat) -> std::result::Result<Self, Self::Error> {
|
||||
match format {
|
||||
OutputFormat::Concise => Ok(DiagnosticFormat::Concise),
|
||||
OutputFormat::Full => Ok(DiagnosticFormat::Full),
|
||||
OutputFormat::Json => Ok(DiagnosticFormat::Json),
|
||||
OutputFormat::JsonLines => Ok(DiagnosticFormat::JsonLines),
|
||||
OutputFormat::Junit => Ok(DiagnosticFormat::Junit),
|
||||
OutputFormat::Gitlab => Ok(DiagnosticFormat::Gitlab),
|
||||
OutputFormat::Pylint => Ok(DiagnosticFormat::Pylint),
|
||||
OutputFormat::Rdjson => Ok(DiagnosticFormat::Rdjson),
|
||||
OutputFormat::Azure => Ok(DiagnosticFormat::Azure),
|
||||
OutputFormat::Github => Err(RuffOutputFormat::Github),
|
||||
OutputFormat::Grouped => Err(RuffOutputFormat::Grouped),
|
||||
OutputFormat::Sarif => Err(RuffOutputFormat::Sarif),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
#[serde(try_from = "String")]
|
||||
pub struct RequiredVersion(VersionSpecifiers);
|
||||
|
|
|
@ -10,7 +10,9 @@ use anyhow::Result;
|
|||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::diagnostic::{Diagnostic, Span};
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics, Span,
|
||||
};
|
||||
use ruff_notebook::Notebook;
|
||||
#[cfg(not(fuzzing))]
|
||||
use ruff_notebook::NotebookError;
|
||||
|
@ -24,7 +26,7 @@ use ruff_source_file::SourceFileBuilder;
|
|||
use crate::codes::Rule;
|
||||
use crate::fix::{FixResult, fix_file};
|
||||
use crate::linter::check_path;
|
||||
use crate::message::{Emitter, EmitterContext, TextEmitter, create_syntax_error_diagnostic};
|
||||
use crate::message::{EmitterContext, create_syntax_error_diagnostic};
|
||||
use crate::package::PackageRoot;
|
||||
use crate::packaging::detect_package_root;
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
|
@ -444,42 +446,38 @@ pub(crate) fn print_jupyter_messages(
|
|||
path: &Path,
|
||||
notebook: &Notebook,
|
||||
) -> String {
|
||||
let mut output = Vec::new();
|
||||
|
||||
TextEmitter::default()
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Full)
|
||||
.hide_severity(true)
|
||||
.with_show_fix_status(true)
|
||||
.with_show_fix_diff(true)
|
||||
.with_show_source(true)
|
||||
.with_fix_applicability(Applicability::DisplayOnly)
|
||||
.emit(
|
||||
&mut output,
|
||||
diagnostics,
|
||||
&EmitterContext::new(&FxHashMap::from_iter([(
|
||||
path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
notebook.index().clone(),
|
||||
)])),
|
||||
)
|
||||
.unwrap();
|
||||
.show_fix_diff(true)
|
||||
.with_fix_applicability(Applicability::DisplayOnly);
|
||||
|
||||
String::from_utf8(output).unwrap()
|
||||
DisplayDiagnostics::new(
|
||||
&EmitterContext::new(&FxHashMap::from_iter([(
|
||||
path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
notebook.index().clone(),
|
||||
)])),
|
||||
&config,
|
||||
diagnostics,
|
||||
)
|
||||
.to_string()
|
||||
}
|
||||
|
||||
pub(crate) fn print_messages(diagnostics: &[Diagnostic]) -> String {
|
||||
let mut output = Vec::new();
|
||||
|
||||
TextEmitter::default()
|
||||
let config = DisplayDiagnosticConfig::default()
|
||||
.format(DiagnosticFormat::Full)
|
||||
.hide_severity(true)
|
||||
.with_show_fix_status(true)
|
||||
.with_show_fix_diff(true)
|
||||
.with_show_source(true)
|
||||
.with_fix_applicability(Applicability::DisplayOnly)
|
||||
.emit(
|
||||
&mut output,
|
||||
diagnostics,
|
||||
&EmitterContext::new(&FxHashMap::default()),
|
||||
)
|
||||
.unwrap();
|
||||
.show_fix_diff(true)
|
||||
.with_fix_applicability(Applicability::DisplayOnly);
|
||||
|
||||
String::from_utf8(output).unwrap()
|
||||
DisplayDiagnostics::new(
|
||||
&EmitterContext::new(&FxHashMap::default()),
|
||||
&config,
|
||||
diagnostics,
|
||||
)
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
|
|
|
@ -3030,6 +3030,12 @@ impl Parameters {
|
|||
.find(|arg| arg.parameter.name.as_str() == name)
|
||||
}
|
||||
|
||||
/// Returns the index of the parameter with the given name
|
||||
pub fn index(&self, name: &str) -> Option<usize> {
|
||||
self.iter_non_variadic_params()
|
||||
.position(|arg| arg.parameter.name.as_str() == name)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all parameters included in this [`Parameters`] node.
|
||||
pub fn iter(&self) -> ParametersIterator<'_> {
|
||||
ParametersIterator::new(self)
|
||||
|
|
|
@ -2101,7 +2101,7 @@ impl<'a> SemanticModel<'a> {
|
|||
/// Finds and returns the [`Scope`] corresponding to a given [`ast::StmtFunctionDef`].
|
||||
///
|
||||
/// This method searches all scopes created by a function definition, comparing the
|
||||
/// [`TextRange`] of the provided `function_def` with the the range of the function
|
||||
/// [`TextRange`] of the provided `function_def` with the range of the function
|
||||
/// associated with the scope.
|
||||
pub fn function_scope(&self, function_def: &ast::StmtFunctionDef) -> Option<&Scope<'_>> {
|
||||
self.scopes.iter().find(|scope| {
|
||||
|
|
|
@ -188,6 +188,40 @@ fn config_file_annotation_showing_where_python_version_set_typing_error() -> any
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// If `.` and `./src` are both registered as first-party search paths,
|
||||
/// the `./src` directory should take precedence for module resolution,
|
||||
/// because it is relative to `.`.
|
||||
#[test]
|
||||
fn src_subdirectory_takes_precedence_over_repo_root() -> anyhow::Result<()> {
|
||||
let case = CliTest::with_files([(
|
||||
"src/package/__init__.py",
|
||||
"from . import nonexistent_submodule",
|
||||
)])?;
|
||||
|
||||
// If `./src` didn't take priority over `.` here, we would report
|
||||
// "Module `src.package` has no member `nonexistent_submodule`"
|
||||
// instead of "Module `package` has no member `nonexistent_submodule`".
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error[unresolved-import]: Module `package` has no member `nonexistent_submodule`
|
||||
--> src/package/__init__.py:1:15
|
||||
|
|
||||
1 | from . import nonexistent_submodule
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
info: rule `unresolved-import` is enabled by default
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
WARN ty is pre-release software and not ready for production use. Expect to encounter bugs, missing features, and fatal errors.
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// This tests that, even if no Python *version* has been specified on the CLI or in a config file,
|
||||
/// ty is still able to infer the Python version from a `--python` argument on the CLI,
|
||||
/// *even if* the `--python` argument points to a system installation.
|
||||
|
@ -1738,8 +1772,8 @@ fn default_root_tests_package() -> anyhow::Result<()> {
|
|||
5 | print(f"{foo} {bar}")
|
||||
|
|
||||
info: Searched in the following paths during module resolution:
|
||||
info: 1. <temp_dir>/ (first-party code)
|
||||
info: 2. <temp_dir>/src (first-party code)
|
||||
info: 1. <temp_dir>/src (first-party code)
|
||||
info: 2. <temp_dir>/ (first-party code)
|
||||
info: 3. vendored://stdlib (stdlib typeshed stubs vendored by ty)
|
||||
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
|
||||
info: rule `unresolved-import` is enabled by default
|
||||
|
@ -1814,8 +1848,8 @@ fn default_root_python_package() -> anyhow::Result<()> {
|
|||
5 | print(f"{foo} {bar}")
|
||||
|
|
||||
info: Searched in the following paths during module resolution:
|
||||
info: 1. <temp_dir>/ (first-party code)
|
||||
info: 2. <temp_dir>/src (first-party code)
|
||||
info: 1. <temp_dir>/src (first-party code)
|
||||
info: 2. <temp_dir>/ (first-party code)
|
||||
info: 3. vendored://stdlib (stdlib typeshed stubs vendored by ty)
|
||||
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
|
||||
info: rule `unresolved-import` is enabled by default
|
||||
|
@ -1861,8 +1895,8 @@ fn default_root_python_package_pyi() -> anyhow::Result<()> {
|
|||
5 | print(f"{foo} {bar}")
|
||||
|
|
||||
info: Searched in the following paths during module resolution:
|
||||
info: 1. <temp_dir>/ (first-party code)
|
||||
info: 2. <temp_dir>/src (first-party code)
|
||||
info: 1. <temp_dir>/src (first-party code)
|
||||
info: 2. <temp_dir>/ (first-party code)
|
||||
info: 3. vendored://stdlib (stdlib typeshed stubs vendored by ty)
|
||||
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
|
||||
info: rule `unresolved-import` is enabled by default
|
||||
|
@ -1902,8 +1936,8 @@ fn pythonpath_is_respected() -> anyhow::Result<()> {
|
|||
3 | print(f"{baz.it}")
|
||||
|
|
||||
info: Searched in the following paths during module resolution:
|
||||
info: 1. <temp_dir>/ (first-party code)
|
||||
info: 2. <temp_dir>/src (first-party code)
|
||||
info: 1. <temp_dir>/src (first-party code)
|
||||
info: 2. <temp_dir>/ (first-party code)
|
||||
info: 3. vendored://stdlib (stdlib typeshed stubs vendored by ty)
|
||||
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
|
||||
info: rule `unresolved-import` is enabled by default
|
||||
|
@ -1959,8 +1993,8 @@ fn pythonpath_multiple_dirs_is_respected() -> anyhow::Result<()> {
|
|||
3 | import foo
|
||||
|
|
||||
info: Searched in the following paths during module resolution:
|
||||
info: 1. <temp_dir>/ (first-party code)
|
||||
info: 2. <temp_dir>/src (first-party code)
|
||||
info: 1. <temp_dir>/src (first-party code)
|
||||
info: 2. <temp_dir>/ (first-party code)
|
||||
info: 3. vendored://stdlib (stdlib typeshed stubs vendored by ty)
|
||||
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
|
||||
info: rule `unresolved-import` is enabled by default
|
||||
|
@ -1975,8 +2009,8 @@ fn pythonpath_multiple_dirs_is_respected() -> anyhow::Result<()> {
|
|||
5 | print(f"{baz.it}")
|
||||
|
|
||||
info: Searched in the following paths during module resolution:
|
||||
info: 1. <temp_dir>/ (first-party code)
|
||||
info: 2. <temp_dir>/src (first-party code)
|
||||
info: 1. <temp_dir>/src (first-party code)
|
||||
info: 2. <temp_dir>/ (first-party code)
|
||||
info: 3. vendored://stdlib (stdlib typeshed stubs vendored by ty)
|
||||
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
|
||||
info: rule `unresolved-import` is enabled by default
|
||||
|
|
|
@ -688,7 +688,7 @@ fn import_from_tokens(tokens: &[Token]) -> Option<&Token> {
|
|||
/// This also handles cases like `import foo, c<CURSOR>, bar`.
|
||||
///
|
||||
/// If found, a token corresponding to the `import` or `from` keyword
|
||||
/// and the the closest point of the `<CURSOR>` is returned.
|
||||
/// and the closest point of the `<CURSOR>` is returned.
|
||||
///
|
||||
/// It is assumed that callers will call `from_import_tokens` first to
|
||||
/// try and recognize a `from ... import ...` statement before using
|
||||
|
|
|
@ -123,7 +123,7 @@ impl<'a> Importer<'a> {
|
|||
/// then the existing style is always respected instead.
|
||||
///
|
||||
/// `members` should be a map of symbols in scope at the position
|
||||
/// where the the imported symbol should be available. This is used
|
||||
/// where the imported symbol should be available. This is used
|
||||
/// to craft import statements in a way that doesn't conflict with
|
||||
/// symbols in scope. If it's not feasible to provide this map, then
|
||||
/// providing an empty map is generally fine. But it does mean that
|
||||
|
|
|
@ -10,7 +10,7 @@ mod exclude;
|
|||
mod include;
|
||||
mod portable;
|
||||
|
||||
/// Path filtering based on an an exclude and include glob pattern set.
|
||||
/// Path filtering based on an exclude and include glob pattern set.
|
||||
///
|
||||
/// Exclude patterns take precedence over includes.
|
||||
#[derive(Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
|
||||
|
|
|
@ -237,15 +237,16 @@ impl Options {
|
|||
.map(|root| root.absolute(project_root, system))
|
||||
.collect()
|
||||
} else {
|
||||
let mut roots = vec![];
|
||||
let src = project_root.join("src");
|
||||
|
||||
let mut roots = if system.is_directory(&src) {
|
||||
if system.is_directory(&src) {
|
||||
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
|
||||
// This corresponds to the `src-layout`
|
||||
tracing::debug!(
|
||||
"Including `.` and `./src` in `environment.root` because a `./src` directory exists"
|
||||
);
|
||||
vec![project_root.to_path_buf(), src]
|
||||
roots.push(src);
|
||||
} else if system.is_directory(&project_root.join(project_name).join(project_name)) {
|
||||
// `src-layout` but when the folder isn't called `src` but has the same name as the project.
|
||||
// For example, the "src" folder for `psycopg` is called `psycopg` and the python files are in `psycopg/psycopg/_adapters_map.py`
|
||||
|
@ -253,12 +254,11 @@ impl Options {
|
|||
"Including `.` and `/{project_name}` in `environment.root` because a `./{project_name}/{project_name}` directory exists"
|
||||
);
|
||||
|
||||
vec![project_root.to_path_buf(), project_root.join(project_name)]
|
||||
roots.push(project_root.join(project_name));
|
||||
} else {
|
||||
// Default to a [flat project structure](https://packaging.python.org/en/latest/discussions/src-layout-vs-flat-layout/).
|
||||
tracing::debug!("Including `.` in `environment.root`");
|
||||
vec![project_root.to_path_buf()]
|
||||
};
|
||||
}
|
||||
|
||||
let python = project_root.join("python");
|
||||
if system.is_directory(&python)
|
||||
|
@ -293,6 +293,10 @@ impl Options {
|
|||
roots.push(tests_dir);
|
||||
}
|
||||
|
||||
// The project root should always be included, and should always come
|
||||
// after any subdirectories such as `./src`, `./tests` and/or `./python`.
|
||||
roots.push(project_root.to_path_buf());
|
||||
|
||||
roots
|
||||
};
|
||||
|
||||
|
|
|
@ -33,11 +33,6 @@ class Shape:
|
|||
reveal_type(x) # revealed: Self@nested_func_without_enclosing_binding
|
||||
inner(self)
|
||||
|
||||
def implicit_self(self) -> Self:
|
||||
# TODO: first argument in a method should be considered as "typing.Self"
|
||||
reveal_type(self) # revealed: Unknown
|
||||
return self
|
||||
|
||||
reveal_type(Shape().nested_type()) # revealed: list[Shape]
|
||||
reveal_type(Shape().nested_func()) # revealed: Shape
|
||||
|
||||
|
@ -53,6 +48,150 @@ class Outer:
|
|||
return self
|
||||
```
|
||||
|
||||
## Type of (unannotated) `self` parameters
|
||||
|
||||
In instance methods, the first parameter (regardless of its name) is assumed to have the type
|
||||
`typing.Self`, unless it has an explicit annotation. This does not apply to `@classmethod` and
|
||||
`@staticmethod`s.
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.11"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import Self
|
||||
|
||||
class A:
|
||||
def implicit_self(self) -> Self:
|
||||
# TODO: This should be Self@implicit_self
|
||||
reveal_type(self) # revealed: Unknown
|
||||
|
||||
return self
|
||||
|
||||
def a_method(self) -> int:
|
||||
def first_arg_is_not_self(a: int) -> int:
|
||||
reveal_type(a) # revealed: int
|
||||
return a
|
||||
return first_arg_is_not_self(1)
|
||||
|
||||
@classmethod
|
||||
def a_classmethod(cls) -> Self:
|
||||
# TODO: This should be type[Self@bar]
|
||||
reveal_type(cls) # revealed: Unknown
|
||||
return cls()
|
||||
|
||||
@staticmethod
|
||||
def a_staticmethod(x: int): ...
|
||||
|
||||
a = A()
|
||||
|
||||
reveal_type(a.implicit_self()) # revealed: A
|
||||
reveal_type(a.implicit_self) # revealed: bound method A.implicit_self() -> A
|
||||
```
|
||||
|
||||
Calling an instance method explicitly verifies the first argument:
|
||||
|
||||
```py
|
||||
A.implicit_self(a)
|
||||
|
||||
# error: [invalid-argument-type] "Argument to function `implicit_self` is incorrect: Argument type `Literal[1]` does not satisfy upper bound `A` of type variable `Self`"
|
||||
A.implicit_self(1)
|
||||
```
|
||||
|
||||
Passing `self` implicitly also verifies the type:
|
||||
|
||||
```py
|
||||
from typing import Never
|
||||
|
||||
class Strange:
|
||||
def can_not_be_called(self: Never) -> None: ...
|
||||
|
||||
# error: [invalid-argument-type] "Argument to bound method `can_not_be_called` is incorrect: Expected `Never`, found `Strange`"
|
||||
Strange().can_not_be_called()
|
||||
```
|
||||
|
||||
If the method is a class or static method then first argument is not inferred as `Self`:
|
||||
|
||||
```py
|
||||
A.a_classmethod()
|
||||
A.a_classmethod(a) # error: [too-many-positional-arguments]
|
||||
A.a_staticmethod(1)
|
||||
a.a_staticmethod(1)
|
||||
A.a_staticmethod(a) # error: [invalid-argument-type]
|
||||
```
|
||||
|
||||
The first parameter of instance methods always has type `Self`, if it is not explicitly annotated.
|
||||
The name `self` is not special in any way.
|
||||
|
||||
```py
|
||||
class B:
|
||||
def name_does_not_matter(this) -> Self:
|
||||
# TODO: Should reveal Self@name_does_not_matter
|
||||
reveal_type(this) # revealed: Unknown
|
||||
|
||||
return this
|
||||
|
||||
def positional_only(self, /, x: int) -> Self:
|
||||
# TODO: Should reveal Self@positional_only
|
||||
reveal_type(self) # revealed: Unknown
|
||||
return self
|
||||
|
||||
def keyword_only(self, *, x: int) -> Self:
|
||||
# TODO: Should reveal Self@keyword_only
|
||||
reveal_type(self) # revealed: Unknown
|
||||
return self
|
||||
|
||||
@property
|
||||
def a_property(self) -> Self:
|
||||
# TODO: Should reveal Self@a_property
|
||||
reveal_type(self) # revealed: Unknown
|
||||
return self
|
||||
|
||||
reveal_type(B().name_does_not_matter()) # revealed: B
|
||||
reveal_type(B().positional_only(1)) # revealed: B
|
||||
reveal_type(B().keyword_only(x=1)) # revealed: B
|
||||
|
||||
# TODO: this should be B
|
||||
reveal_type(B().a_property) # revealed: Unknown
|
||||
```
|
||||
|
||||
This also works for generic classes:
|
||||
|
||||
```py
|
||||
from typing import Self, Generic, TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
class G(Generic[T]):
|
||||
def id(self) -> Self:
|
||||
# TODO: Should reveal Self@id
|
||||
reveal_type(self) # revealed: Unknown
|
||||
|
||||
return self
|
||||
|
||||
reveal_type(G[int]().id()) # revealed: G[int]
|
||||
reveal_type(G[str]().id()) # revealed: G[str]
|
||||
```
|
||||
|
||||
Free functions and nested functions do not use implicit `Self`:
|
||||
|
||||
```py
|
||||
def not_a_method(self):
|
||||
reveal_type(self) # revealed: Unknown
|
||||
|
||||
# error: [invalid-type-form]
|
||||
def does_not_return_self(self) -> Self:
|
||||
return self
|
||||
|
||||
class C:
|
||||
def outer(self) -> None:
|
||||
def inner(self):
|
||||
reveal_type(self) # revealed: Unknown
|
||||
|
||||
reveal_type(not_a_method) # revealed: def not_a_method(self) -> Unknown
|
||||
```
|
||||
|
||||
## typing_extensions
|
||||
|
||||
```toml
|
||||
|
@ -208,6 +347,47 @@ class MyMetaclass(type):
|
|||
return super().__new__(cls)
|
||||
```
|
||||
|
||||
## Explicit annotations override implicit `Self`
|
||||
|
||||
If the first parameter is explicitly annotated, that annotation takes precedence over the implicit
|
||||
`Self` type.
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import final
|
||||
|
||||
@final
|
||||
class Disjoint: ...
|
||||
|
||||
class Explicit:
|
||||
# TODO: We could emit a warning if the annotated type of `self` is disjoint from `Explicit`
|
||||
def bad(self: Disjoint) -> None:
|
||||
reveal_type(self) # revealed: Disjoint
|
||||
|
||||
def forward(self: Explicit) -> None:
|
||||
reveal_type(self) # revealed: Explicit
|
||||
|
||||
# error: [invalid-argument-type] "Argument to bound method `bad` is incorrect: Expected `Disjoint`, found `Explicit`"
|
||||
Explicit().bad()
|
||||
|
||||
Explicit().forward()
|
||||
|
||||
class ExplicitGeneric[T]:
|
||||
def special(self: ExplicitGeneric[int]) -> None:
|
||||
reveal_type(self) # revealed: ExplicitGeneric[int]
|
||||
|
||||
ExplicitGeneric[int]().special()
|
||||
|
||||
# TODO: this should be an `invalid-argument-type` error
|
||||
ExplicitGeneric[str]().special()
|
||||
```
|
||||
|
||||
## Binding a method fixes `Self`
|
||||
|
||||
When a method is bound, any instances of `Self` in its signature are "fixed", since we now know the
|
||||
|
|
|
@ -69,7 +69,9 @@ reveal_type(bound_method(1)) # revealed: str
|
|||
When we call the function object itself, we need to pass the `instance` explicitly:
|
||||
|
||||
```py
|
||||
C.f(1) # error: [missing-argument]
|
||||
# error: [invalid-argument-type] "Argument to function `f` is incorrect: Expected `C`, found `Literal[1]`"
|
||||
# error: [missing-argument]
|
||||
C.f(1)
|
||||
|
||||
reveal_type(C.f(C(), 1)) # revealed: str
|
||||
```
|
||||
|
|
|
@ -431,6 +431,8 @@ def _(flag: bool):
|
|||
reveal_type(C7.union_of_class_data_descriptor_and_attribute) # revealed: Literal["data", 2]
|
||||
|
||||
C7.union_of_metaclass_attributes = 2 if flag else 1
|
||||
# TODO: https://github.com/astral-sh/ty/issues/1163
|
||||
# error: [invalid-assignment]
|
||||
C7.union_of_metaclass_data_descriptor_and_attribute = 2 if flag else 100
|
||||
C7.union_of_class_attributes = 2 if flag else 1
|
||||
C7.union_of_class_data_descriptor_and_attribute = 2 if flag else DataDescriptor()
|
||||
|
|
|
@ -43,8 +43,7 @@ import b
|
|||
df: a.DataFrame = b.DataFrame() # error: [invalid-assignment] "Object of type `b.DataFrame` is not assignable to `a.DataFrame`"
|
||||
|
||||
def _(dfs: list[b.DataFrame]):
|
||||
# TODO should be"Object of type `list[b.DataFrame]` is not assignable to `list[a.DataFrame]`
|
||||
# error: [invalid-assignment] "Object of type `list[DataFrame]` is not assignable to `list[DataFrame]`"
|
||||
# error: [invalid-assignment] "Object of type `list[b.DataFrame]` is not assignable to `list[a.DataFrame]`"
|
||||
dataframes: list[a.DataFrame] = dfs
|
||||
```
|
||||
|
||||
|
@ -171,6 +170,36 @@ class Container(Generic[T]):
|
|||
|
||||
## Protocols
|
||||
|
||||
### Differing members
|
||||
|
||||
`bad.py`:
|
||||
|
||||
```py
|
||||
from typing import Protocol, TypeVar
|
||||
|
||||
T_co = TypeVar("T_co", covariant=True)
|
||||
|
||||
class Iterator(Protocol[T_co]):
|
||||
def __nexxt__(self) -> T_co: ...
|
||||
|
||||
def bad() -> Iterator[str]:
|
||||
raise NotImplementedError
|
||||
```
|
||||
|
||||
`main.py`:
|
||||
|
||||
```py
|
||||
from typing import Iterator
|
||||
|
||||
def f() -> Iterator[str]:
|
||||
import bad
|
||||
|
||||
# error: [invalid-return-type] "Return type does not match returned value: expected `typing.Iterator[str]`, found `bad.Iterator[str]"
|
||||
return bad.bad()
|
||||
```
|
||||
|
||||
### Same members but with different types
|
||||
|
||||
```py
|
||||
from typing import Protocol
|
||||
import proto_a
|
||||
|
@ -228,3 +257,21 @@ from typing import TypedDict
|
|||
class Person(TypedDict):
|
||||
name: bytes
|
||||
```
|
||||
|
||||
## Tuple specializations
|
||||
|
||||
`module.py`:
|
||||
|
||||
```py
|
||||
class Model: ...
|
||||
```
|
||||
|
||||
```py
|
||||
class Model: ...
|
||||
|
||||
def get_models_tuple() -> tuple[Model]:
|
||||
from module import Model
|
||||
|
||||
# error: [invalid-return-type] "Return type does not match returned value: expected `tuple[mdtest_snippet.Model]`, found `tuple[module.Model]`"
|
||||
return (Model(),)
|
||||
```
|
||||
|
|
|
@ -562,17 +562,17 @@ class C(Generic[T]):
|
|||
return u
|
||||
|
||||
reveal_type(generic_context(C)) # revealed: tuple[T@C]
|
||||
reveal_type(generic_context(C.method)) # revealed: None
|
||||
reveal_type(generic_context(C.generic_method)) # revealed: tuple[U@generic_method]
|
||||
reveal_type(generic_context(C.method)) # revealed: tuple[Self@method]
|
||||
reveal_type(generic_context(C.generic_method)) # revealed: tuple[Self@generic_method, U@generic_method]
|
||||
reveal_type(generic_context(C[int])) # revealed: None
|
||||
reveal_type(generic_context(C[int].method)) # revealed: None
|
||||
reveal_type(generic_context(C[int].generic_method)) # revealed: tuple[U@generic_method]
|
||||
reveal_type(generic_context(C[int].method)) # revealed: tuple[Self@method]
|
||||
reveal_type(generic_context(C[int].generic_method)) # revealed: tuple[Self@generic_method, U@generic_method]
|
||||
|
||||
c: C[int] = C[int]()
|
||||
reveal_type(c.generic_method(1, "string")) # revealed: Literal["string"]
|
||||
reveal_type(generic_context(c)) # revealed: None
|
||||
reveal_type(generic_context(c.method)) # revealed: None
|
||||
reveal_type(generic_context(c.generic_method)) # revealed: tuple[U@generic_method]
|
||||
reveal_type(generic_context(c.method)) # revealed: tuple[Self@method]
|
||||
reveal_type(generic_context(c.generic_method)) # revealed: tuple[Self@generic_method, U@generic_method]
|
||||
```
|
||||
|
||||
## Specializations propagate
|
||||
|
|
|
@ -464,6 +464,7 @@ def f(x: str):
|
|||
from typing import TypeVar, overload
|
||||
|
||||
T = TypeVar("T")
|
||||
S = TypeVar("S")
|
||||
|
||||
def outer(t: T) -> None:
|
||||
def inner(t: T) -> None: ...
|
||||
|
@ -479,6 +480,13 @@ def overloaded_outer(t: T | None = None) -> None:
|
|||
|
||||
if t is not None:
|
||||
inner(t)
|
||||
|
||||
def outer(t: T) -> None:
|
||||
def inner(inner_t: T, s: S) -> tuple[T, S]:
|
||||
return inner_t, s
|
||||
reveal_type(inner(t, 1)) # revealed: tuple[T@outer, Literal[1]]
|
||||
|
||||
inner("wrong", 1) # error: [invalid-argument-type]
|
||||
```
|
||||
|
||||
## Unpacking a TypeVar
|
||||
|
|
|
@ -504,17 +504,17 @@ class C[T]:
|
|||
def cannot_shadow_class_typevar[T](self, t: T): ...
|
||||
|
||||
reveal_type(generic_context(C)) # revealed: tuple[T@C]
|
||||
reveal_type(generic_context(C.method)) # revealed: None
|
||||
reveal_type(generic_context(C.generic_method)) # revealed: tuple[U@generic_method]
|
||||
reveal_type(generic_context(C.method)) # revealed: tuple[Self@method]
|
||||
reveal_type(generic_context(C.generic_method)) # revealed: tuple[Self@generic_method, U@generic_method]
|
||||
reveal_type(generic_context(C[int])) # revealed: None
|
||||
reveal_type(generic_context(C[int].method)) # revealed: None
|
||||
reveal_type(generic_context(C[int].generic_method)) # revealed: tuple[U@generic_method]
|
||||
reveal_type(generic_context(C[int].method)) # revealed: tuple[Self@method]
|
||||
reveal_type(generic_context(C[int].generic_method)) # revealed: tuple[Self@generic_method, U@generic_method]
|
||||
|
||||
c: C[int] = C[int]()
|
||||
reveal_type(c.generic_method(1, "string")) # revealed: Literal["string"]
|
||||
reveal_type(generic_context(c)) # revealed: None
|
||||
reveal_type(generic_context(c.method)) # revealed: None
|
||||
reveal_type(generic_context(c.generic_method)) # revealed: tuple[U@generic_method]
|
||||
reveal_type(generic_context(c.method)) # revealed: tuple[Self@method]
|
||||
reveal_type(generic_context(c.generic_method)) # revealed: tuple[Self@generic_method, U@generic_method]
|
||||
```
|
||||
|
||||
## Specializations propagate
|
||||
|
|
|
@ -474,6 +474,13 @@ def overloaded_outer[T](t: T | None = None) -> None:
|
|||
|
||||
if t is not None:
|
||||
inner(t)
|
||||
|
||||
def outer[T](t: T) -> None:
|
||||
def inner[S](inner_t: T, s: S) -> tuple[T, S]:
|
||||
return inner_t, s
|
||||
reveal_type(inner(t, 1)) # revealed: tuple[T@outer, Literal[1]]
|
||||
|
||||
inner("wrong", 1) # error: [invalid-argument-type]
|
||||
```
|
||||
|
||||
## Unpacking a TypeVar
|
||||
|
@ -534,6 +541,5 @@ class C:
|
|||
def _(x: int):
|
||||
reveal_type(C().explicit_self(x)) # revealed: tuple[C, int]
|
||||
|
||||
# TODO: this should be `tuple[C, int]` as well, once we support implicit `self`
|
||||
reveal_type(C().implicit_self(x)) # revealed: tuple[Unknown, int]
|
||||
reveal_type(C().implicit_self(x)) # revealed: tuple[C, int]
|
||||
```
|
||||
|
|
|
@ -117,6 +117,7 @@ reveal_type(bound_method.__func__) # revealed: def f(self, x: int) -> str
|
|||
reveal_type(C[int]().f(1)) # revealed: str
|
||||
reveal_type(bound_method(1)) # revealed: str
|
||||
|
||||
# error: [invalid-argument-type] "Argument to function `f` is incorrect: Argument type `Literal[1]` does not satisfy upper bound `C[T@C]` of type variable `Self`"
|
||||
C[int].f(1) # error: [missing-argument]
|
||||
reveal_type(C[int].f(C[int](), 1)) # revealed: str
|
||||
|
||||
|
@ -154,7 +155,7 @@ from ty_extensions import generic_context
|
|||
legacy.m("string", None) # error: [invalid-argument-type]
|
||||
reveal_type(legacy.m) # revealed: bound method Legacy[int].m[S](x: int, y: S@m) -> S@m
|
||||
reveal_type(generic_context(Legacy)) # revealed: tuple[T@Legacy]
|
||||
reveal_type(generic_context(legacy.m)) # revealed: tuple[S@m]
|
||||
reveal_type(generic_context(legacy.m)) # revealed: tuple[Self@m, S@m]
|
||||
```
|
||||
|
||||
With PEP 695 syntax, it is clearer that the method uses a separate typevar:
|
||||
|
|
|
@ -278,8 +278,7 @@ reveal_type(Person._make(("Alice", 42))) # revealed: Unknown
|
|||
person = Person("Alice", 42)
|
||||
|
||||
reveal_type(person._asdict()) # revealed: dict[str, Any]
|
||||
# TODO: should be `Person` once we support implicit type of `self`
|
||||
reveal_type(person._replace(name="Bob")) # revealed: Unknown
|
||||
reveal_type(person._replace(name="Bob")) # revealed: Person
|
||||
```
|
||||
|
||||
When accessing them on child classes of generic `NamedTuple`s, the return type is specialized
|
||||
|
@ -296,8 +295,7 @@ class Box(NamedTuple, Generic[T]):
|
|||
class IntBox(Box[int]):
|
||||
pass
|
||||
|
||||
# TODO: should be `IntBox` once we support the implicit type of `self`
|
||||
reveal_type(IntBox(1)._replace(content=42)) # revealed: Unknown
|
||||
reveal_type(IntBox(1)._replace(content=42)) # revealed: IntBox
|
||||
```
|
||||
|
||||
## `collections.namedtuple`
|
||||
|
|
|
@ -324,8 +324,7 @@ a covariant generic, this is equivalent to using the upper bound of the type par
|
|||
from typing import Self
|
||||
|
||||
class Covariant[T]:
|
||||
# TODO: remove the explicit `Self` annotation, once we support the implicit type of `self`
|
||||
def get(self: Self) -> T:
|
||||
def get(self) -> T:
|
||||
raise NotImplementedError
|
||||
|
||||
def _(x: object):
|
||||
|
@ -338,8 +337,7 @@ Similarly, contravariant type parameters use their lower bound of `Never`:
|
|||
|
||||
```py
|
||||
class Contravariant[T]:
|
||||
# TODO: remove the explicit `Self` annotation, once we support the implicit type of `self`
|
||||
def push(self: Self, x: T) -> None: ...
|
||||
def push(self, x: T) -> None: ...
|
||||
|
||||
def _(x: object):
|
||||
if isinstance(x, Contravariant):
|
||||
|
@ -354,10 +352,8 @@ the type system, so we represent it with the internal `Top[]` special form.
|
|||
|
||||
```py
|
||||
class Invariant[T]:
|
||||
# TODO: remove the explicit `Self` annotation, once we support the implicit type of `self`
|
||||
def push(self: Self, x: T) -> None: ...
|
||||
# TODO: remove the explicit `Self` annotation, once we support the implicit type of `self`
|
||||
def get(self: Self) -> T:
|
||||
def push(self, x: T) -> None: ...
|
||||
def get(self) -> T:
|
||||
raise NotImplementedError
|
||||
|
||||
def _(x: object):
|
||||
|
|
|
@ -173,6 +173,11 @@ def _(d: Any):
|
|||
|
||||
## Narrowing
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
python-version = "3.12"
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
from typing_extensions import TypeGuard, TypeIs
|
||||
|
@ -295,6 +300,38 @@ def _(a: Foo):
|
|||
reveal_type(a) # revealed: Foo & Bar
|
||||
```
|
||||
|
||||
For generics, we transform the argument passed into `TypeIs[]` from `X` to `Top[X]`. This helps
|
||||
especially when using various functions from typeshed that are annotated as returning
|
||||
`TypeIs[SomeCovariantGeneric[Any]]` to avoid false positives in other type checkers. For ty's
|
||||
purposes, it would usually lead to more intuitive results if `object` was used as the specialization
|
||||
for a covariant generic inside the `TypeIs` special form, but this is mitigated by our implicit
|
||||
transformation from `TypeIs[SomeCovariantGeneric[Any]]` to `TypeIs[Top[SomeCovariantGeneric[Any]]]`
|
||||
(which just simplifies to `TypeIs[SomeCovariantGeneric[object]]`).
|
||||
|
||||
```py
|
||||
class Unrelated: ...
|
||||
|
||||
class Covariant[T]:
|
||||
def get(self) -> T:
|
||||
raise NotImplementedError
|
||||
|
||||
def is_instance_of_covariant(arg: object) -> TypeIs[Covariant[Any]]:
|
||||
return isinstance(arg, Covariant)
|
||||
|
||||
def needs_instance_of_unrelated(arg: Unrelated):
|
||||
pass
|
||||
|
||||
def _(x: Unrelated | Covariant[int]):
|
||||
if is_instance_of_covariant(x):
|
||||
raise RuntimeError("oh no")
|
||||
|
||||
reveal_type(x) # revealed: Unrelated & ~Covariant[object]
|
||||
|
||||
# We would emit a false-positive diagnostic here if we didn't implicitly transform
|
||||
# `TypeIs[Covariant[Any]]` to `TypeIs[Covariant[object]]`
|
||||
needs_instance_of_unrelated(x)
|
||||
```
|
||||
|
||||
## `TypeGuard` special cases
|
||||
|
||||
```py
|
||||
|
|
|
@ -325,7 +325,7 @@ type A = list[Union["A", str]]
|
|||
def f(x: A):
|
||||
reveal_type(x) # revealed: list[A | str]
|
||||
for item in x:
|
||||
reveal_type(item) # revealed: list[A | str] | str
|
||||
reveal_type(item) # revealed: list[Any | str] | str
|
||||
```
|
||||
|
||||
#### With new-style union
|
||||
|
@ -336,7 +336,7 @@ type A = list["A" | str]
|
|||
def f(x: A):
|
||||
reveal_type(x) # revealed: list[A | str]
|
||||
for item in x:
|
||||
reveal_type(item) # revealed: list[A | str] | str
|
||||
reveal_type(item) # revealed: list[Any | str] | str
|
||||
```
|
||||
|
||||
#### With Optional
|
||||
|
@ -349,7 +349,7 @@ type A = list[Optional[Union["A", str]]]
|
|||
def f(x: A):
|
||||
reveal_type(x) # revealed: list[A | str | None]
|
||||
for item in x:
|
||||
reveal_type(item) # revealed: list[A | str | None] | str | None
|
||||
reveal_type(item) # revealed: list[Any | str | None] | str | None
|
||||
```
|
||||
|
||||
### Tuple comparison
|
||||
|
|
|
@ -893,8 +893,10 @@ class LotsOfBindings(Protocol):
|
|||
match object():
|
||||
case l: # error: [ambiguous-protocol-member]
|
||||
...
|
||||
# error: [ambiguous-protocol-member] "Consider adding an annotation, e.g. `m: int | str = ...`"
|
||||
m = 1 if 1.2 > 3.4 else "a"
|
||||
|
||||
# revealed: frozenset[Literal["Nested", "NestedProtocol", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"]]
|
||||
# revealed: frozenset[Literal["Nested", "NestedProtocol", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"]]
|
||||
reveal_type(get_protocol_members(LotsOfBindings))
|
||||
|
||||
class Foo(Protocol):
|
||||
|
@ -1977,12 +1979,12 @@ from typing_extensions import TypeVar, Self, Protocol
|
|||
from ty_extensions import is_equivalent_to, static_assert, is_assignable_to, is_subtype_of
|
||||
|
||||
class NewStyleClassScoped[T](Protocol):
|
||||
def method(self: Self, input: T) -> None: ...
|
||||
def method(self, input: T) -> None: ...
|
||||
|
||||
S = TypeVar("S")
|
||||
|
||||
class LegacyClassScoped(Protocol[S]):
|
||||
def method(self: Self, input: S) -> None: ...
|
||||
def method(self, input: S) -> None: ...
|
||||
|
||||
# TODO: these should pass
|
||||
static_assert(is_equivalent_to(NewStyleClassScoped, LegacyClassScoped)) # error: [static-assert-error]
|
||||
|
|
|
@ -339,7 +339,7 @@ class A: ...
|
|||
|
||||
def f(x: A):
|
||||
# TODO: no error
|
||||
# error: [invalid-assignment] "Object of type `A | A` is not assignable to `A`"
|
||||
# error: [invalid-assignment] "Object of type `mdtest_snippet.A | mdtest_snippet.A` is not assignable to `mdtest_snippet.A`"
|
||||
x = A()
|
||||
```
|
||||
|
||||
|
|
|
@ -133,6 +133,11 @@ class Single(Enum):
|
|||
VALUE = 1
|
||||
|
||||
static_assert(is_equivalent_to(P | Q | Single, Literal[Single.VALUE] | Q | P))
|
||||
|
||||
static_assert(is_equivalent_to(Any, Any | Intersection[Any, str]))
|
||||
static_assert(is_equivalent_to(Any, Intersection[str, Any] | Any))
|
||||
static_assert(is_equivalent_to(Any, Any | Intersection[Any, Not[None]]))
|
||||
static_assert(is_equivalent_to(Any, Intersection[Not[None], Any] | Any))
|
||||
```
|
||||
|
||||
## Tuples
|
||||
|
|
|
@ -1948,8 +1948,6 @@ static_assert(is_subtype_of(TypeOf[A.g], Callable[[int], int]))
|
|||
static_assert(not is_subtype_of(TypeOf[a.f], Callable[[float], int]))
|
||||
static_assert(not is_subtype_of(TypeOf[A.g], Callable[[], int]))
|
||||
|
||||
# TODO: This assertion should be true
|
||||
# error: [static-assert-error] "Static assertion error: argument of type `ty_extensions.ConstraintSet[never]` is statically known to be falsy"
|
||||
static_assert(is_subtype_of(TypeOf[A.f], Callable[[A, int], int]))
|
||||
```
|
||||
|
||||
|
|
|
@ -657,16 +657,14 @@ alice: Employee = {"name": "Alice", "employee_id": 1}
|
|||
eve: Employee = {"name": "Eve"}
|
||||
|
||||
def combine(p: Person, e: Employee):
|
||||
# TODO: Should be `Person` once we support the implicit type of self
|
||||
reveal_type(p.copy()) # revealed: Unknown
|
||||
# TODO: Should be `Employee` once we support the implicit type of self
|
||||
reveal_type(e.copy()) # revealed: Unknown
|
||||
reveal_type(p.copy()) # revealed: Person
|
||||
reveal_type(e.copy()) # revealed: Employee
|
||||
|
||||
reveal_type(p | p) # revealed: Person
|
||||
reveal_type(e | e) # revealed: Employee
|
||||
|
||||
# TODO: Should be `Person` once we support the implicit type of self and subtyping for TypedDicts
|
||||
reveal_type(p | e) # revealed: Employee
|
||||
# TODO: Should be `Person` once we support subtyping for TypedDicts
|
||||
reveal_type(p | e) # revealed: Person | Employee
|
||||
```
|
||||
|
||||
When inheriting from a `TypedDict` with a different `total` setting, inherited fields maintain their
|
||||
|
|
|
@ -254,8 +254,7 @@ async def long_running_task():
|
|||
|
||||
async def main():
|
||||
async with asyncio.TaskGroup() as tg:
|
||||
# TODO: should be `TaskGroup`
|
||||
reveal_type(tg) # revealed: Unknown
|
||||
reveal_type(tg) # revealed: TaskGroup
|
||||
|
||||
tg.create_task(long_running_task())
|
||||
```
|
||||
|
|
|
@ -2272,7 +2272,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
|||
// like `sys.exit()`, and not within sub-expression like `3 + sys.exit()` etc.
|
||||
//
|
||||
// We also only add these inside function scopes, since considering module-level
|
||||
// constraints can affect the the type of imported symbols, leading to a lot more
|
||||
// constraints can affect the type of imported symbols, leading to a lot more
|
||||
// work in third-party code.
|
||||
if let ast::Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() {
|
||||
if !self.source_type.is_stub() && self.in_function_scope() {
|
||||
|
|
|
@ -52,8 +52,8 @@ use crate::types::function::{
|
|||
DataclassTransformerParams, FunctionSpans, FunctionType, KnownFunction,
|
||||
};
|
||||
use crate::types::generics::{
|
||||
GenericContext, PartialSpecialization, Specialization, bind_typevar, walk_generic_context,
|
||||
walk_partial_specialization, walk_specialization,
|
||||
GenericContext, PartialSpecialization, Specialization, bind_typevar, typing_self,
|
||||
walk_generic_context,
|
||||
};
|
||||
pub use crate::types::ide_support::{
|
||||
CallSignatureDetails, Member, MemberWithDefinition, all_members, call_signature_details,
|
||||
|
@ -1050,6 +1050,13 @@ impl<'db> Type<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn into_intersection(self) -> Option<IntersectionType<'db>> {
|
||||
match self {
|
||||
Type::Intersection(intersection_type) => Some(intersection_type),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[track_caller]
|
||||
pub(crate) fn expect_union(self) -> UnionType<'db> {
|
||||
|
@ -1159,21 +1166,26 @@ impl<'db> Type<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
/// If this type is a literal, promote it to a type that this literal is an instance of.
|
||||
/// Promote (possibly nested) literals to types that these literals are instances of.
|
||||
///
|
||||
/// Note that this function tries to promote literals to a more user-friendly form than their
|
||||
/// fallback instance type. For example, `def _() -> int` is promoted to `Callable[[], int]`,
|
||||
/// as opposed to `FunctionType`.
|
||||
pub(crate) fn literal_promotion_type(self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||
pub(crate) fn promote_literals(self, db: &'db dyn Db) -> Type<'db> {
|
||||
self.apply_type_mapping(db, &TypeMapping::PromoteLiterals)
|
||||
}
|
||||
|
||||
/// Like [`Type::promote_literals`], but does not recurse into nested types.
|
||||
fn promote_literals_impl(self, db: &'db dyn Db) -> Type<'db> {
|
||||
match self {
|
||||
Type::StringLiteral(_) | Type::LiteralString => Some(KnownClass::Str.to_instance(db)),
|
||||
Type::BooleanLiteral(_) => Some(KnownClass::Bool.to_instance(db)),
|
||||
Type::IntLiteral(_) => Some(KnownClass::Int.to_instance(db)),
|
||||
Type::BytesLiteral(_) => Some(KnownClass::Bytes.to_instance(db)),
|
||||
Type::ModuleLiteral(_) => Some(KnownClass::ModuleType.to_instance(db)),
|
||||
Type::EnumLiteral(literal) => Some(literal.enum_class_instance(db)),
|
||||
Type::FunctionLiteral(literal) => Some(Type::Callable(literal.into_callable_type(db))),
|
||||
_ => None,
|
||||
Type::StringLiteral(_) | Type::LiteralString => KnownClass::Str.to_instance(db),
|
||||
Type::BooleanLiteral(_) => KnownClass::Bool.to_instance(db),
|
||||
Type::IntLiteral(_) => KnownClass::Int.to_instance(db),
|
||||
Type::BytesLiteral(_) => KnownClass::Bytes.to_instance(db),
|
||||
Type::ModuleLiteral(_) => KnownClass::ModuleType.to_instance(db),
|
||||
Type::EnumLiteral(literal) => literal.enum_class_instance(db),
|
||||
Type::FunctionLiteral(literal) => Type::Callable(literal.into_callable_type(db)),
|
||||
_ => self,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3153,7 +3165,7 @@ impl<'db> Type<'db> {
|
|||
);
|
||||
match self {
|
||||
Type::Callable(callable) if callable.is_function_like(db) => {
|
||||
// For "function-like" callables, model the the behavior of `FunctionType.__get__`.
|
||||
// For "function-like" callables, model the behavior of `FunctionType.__get__`.
|
||||
//
|
||||
// It is a shortcut to model this in `try_call_dunder_get`. If we want to be really precise,
|
||||
// we should instead return a new method-wrapper type variant for the synthesized `__get__`
|
||||
|
@ -5356,12 +5368,10 @@ impl<'db> Type<'db> {
|
|||
Some(generic_context) => (
|
||||
Some(class),
|
||||
Some(generic_context),
|
||||
Type::from(class.apply_specialization(db, |_| {
|
||||
// It is important that identity_specialization specializes the class with
|
||||
// _inferable_ typevars, so that our specialization inference logic will
|
||||
// try to find a specialization for them.
|
||||
generic_context.identity_specialization(db)
|
||||
})),
|
||||
// It is important that identity_specialization specializes the class with
|
||||
// _inferable_ typevars, so that our specialization inference logic will
|
||||
// try to find a specialization for them.
|
||||
Type::from(class.identity_specialization(db, &Type::TypeVar)),
|
||||
),
|
||||
_ => (None, None, self),
|
||||
},
|
||||
|
@ -5633,11 +5643,9 @@ impl<'db> Type<'db> {
|
|||
Type::KnownInstance(known_instance) => match known_instance {
|
||||
KnownInstanceType::TypeAliasType(alias) => Ok(Type::TypeAlias(*alias)),
|
||||
KnownInstanceType::TypeVar(typevar) => {
|
||||
let module = parsed_module(db, scope_id.file(db)).load(db);
|
||||
let index = semantic_index(db, scope_id.file(db));
|
||||
Ok(bind_typevar(
|
||||
db,
|
||||
&module,
|
||||
index,
|
||||
scope_id.file_scope_id(db),
|
||||
typevar_binding_context,
|
||||
|
@ -5710,7 +5718,6 @@ impl<'db> Type<'db> {
|
|||
.build()),
|
||||
|
||||
SpecialFormType::TypingSelf => {
|
||||
let module = parsed_module(db, scope_id.file(db)).load(db);
|
||||
let index = semantic_index(db, scope_id.file(db));
|
||||
let Some(class) = nearest_enclosing_class(db, index, scope_id) else {
|
||||
return Err(InvalidTypeExpressionError {
|
||||
|
@ -5721,40 +5728,13 @@ impl<'db> Type<'db> {
|
|||
});
|
||||
};
|
||||
|
||||
let upper_bound = Type::instance(
|
||||
Ok(typing_self(
|
||||
db,
|
||||
class.apply_specialization(db, |generic_context| {
|
||||
let types = generic_context
|
||||
.variables(db)
|
||||
.iter()
|
||||
.map(|typevar| Type::NonInferableTypeVar(*typevar));
|
||||
|
||||
generic_context.specialize(db, types.collect())
|
||||
}),
|
||||
);
|
||||
|
||||
let class_definition = class.definition(db);
|
||||
let typevar = TypeVarInstance::new(
|
||||
db,
|
||||
ast::name::Name::new_static("Self"),
|
||||
Some(class_definition),
|
||||
Some(TypeVarBoundOrConstraints::UpperBound(upper_bound).into()),
|
||||
// According to the [spec], we can consider `Self`
|
||||
// equivalent to an invariant type variable
|
||||
// [spec]: https://typing.python.org/en/latest/spec/generics.html#self
|
||||
Some(TypeVarVariance::Invariant),
|
||||
None,
|
||||
TypeVarKind::TypingSelf,
|
||||
);
|
||||
Ok(bind_typevar(
|
||||
db,
|
||||
&module,
|
||||
index,
|
||||
scope_id.file_scope_id(db),
|
||||
scope_id,
|
||||
typevar_binding_context,
|
||||
typevar,
|
||||
class,
|
||||
&Type::NonInferableTypeVar,
|
||||
)
|
||||
.map(Type::NonInferableTypeVar)
|
||||
.unwrap_or(*self))
|
||||
}
|
||||
SpecialFormType::TypeAlias => Ok(Type::Dynamic(DynamicType::TodoTypeAlias)),
|
||||
|
@ -6109,19 +6089,18 @@ impl<'db> Type<'db> {
|
|||
}
|
||||
|
||||
Type::FunctionLiteral(function) => {
|
||||
let function = Type::FunctionLiteral(function.with_type_mapping(db, type_mapping));
|
||||
let function = Type::FunctionLiteral(function.apply_type_mapping_impl(db, type_mapping, visitor));
|
||||
|
||||
match type_mapping {
|
||||
TypeMapping::PromoteLiterals => function.literal_promotion_type(db)
|
||||
.expect("function literal should have a promotion type"),
|
||||
TypeMapping::PromoteLiterals => function.promote_literals_impl(db),
|
||||
_ => function
|
||||
}
|
||||
}
|
||||
|
||||
Type::BoundMethod(method) => Type::BoundMethod(BoundMethodType::new(
|
||||
db,
|
||||
method.function(db).with_type_mapping(db, type_mapping),
|
||||
method.self_instance(db).apply_type_mapping(db, type_mapping),
|
||||
method.function(db).apply_type_mapping_impl(db, type_mapping, visitor),
|
||||
method.self_instance(db).apply_type_mapping_impl(db, type_mapping, visitor),
|
||||
)),
|
||||
|
||||
Type::NominalInstance(instance) =>
|
||||
|
@ -6140,13 +6119,13 @@ impl<'db> Type<'db> {
|
|||
|
||||
Type::KnownBoundMethod(KnownBoundMethodType::FunctionTypeDunderGet(function)) => {
|
||||
Type::KnownBoundMethod(KnownBoundMethodType::FunctionTypeDunderGet(
|
||||
function.with_type_mapping(db, type_mapping),
|
||||
function.apply_type_mapping_impl(db, type_mapping, visitor),
|
||||
))
|
||||
}
|
||||
|
||||
Type::KnownBoundMethod(KnownBoundMethodType::FunctionTypeDunderCall(function)) => {
|
||||
Type::KnownBoundMethod(KnownBoundMethodType::FunctionTypeDunderCall(
|
||||
function.with_type_mapping(db, type_mapping),
|
||||
function.apply_type_mapping_impl(db, type_mapping, visitor),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -6221,8 +6200,7 @@ impl<'db> Type<'db> {
|
|||
TypeMapping::ReplaceSelf { .. } |
|
||||
TypeMapping::MarkTypeVarsInferable(_) |
|
||||
TypeMapping::Materialize(_) => self,
|
||||
TypeMapping::PromoteLiterals => self.literal_promotion_type(db)
|
||||
.expect("literal type should have a promotion type"),
|
||||
TypeMapping::PromoteLiterals => self.promote_literals_impl(db)
|
||||
}
|
||||
|
||||
Type::Dynamic(_) => match type_mapping {
|
||||
|
@ -6782,84 +6760,7 @@ pub enum TypeMapping<'a, 'db> {
|
|||
Materialize(MaterializationKind),
|
||||
}
|
||||
|
||||
fn walk_type_mapping<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
|
||||
db: &'db dyn Db,
|
||||
mapping: &TypeMapping<'_, 'db>,
|
||||
visitor: &V,
|
||||
) {
|
||||
match mapping {
|
||||
TypeMapping::Specialization(specialization) => {
|
||||
walk_specialization(db, *specialization, visitor);
|
||||
}
|
||||
TypeMapping::PartialSpecialization(specialization) => {
|
||||
walk_partial_specialization(db, specialization, visitor);
|
||||
}
|
||||
TypeMapping::BindSelf(self_type) => {
|
||||
visitor.visit_type(db, *self_type);
|
||||
}
|
||||
TypeMapping::ReplaceSelf { new_upper_bound } => {
|
||||
visitor.visit_type(db, *new_upper_bound);
|
||||
}
|
||||
TypeMapping::PromoteLiterals
|
||||
| TypeMapping::BindLegacyTypevars(_)
|
||||
| TypeMapping::MarkTypeVarsInferable(_)
|
||||
| TypeMapping::Materialize(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> TypeMapping<'_, 'db> {
|
||||
fn to_owned(&self) -> TypeMapping<'db, 'db> {
|
||||
match self {
|
||||
TypeMapping::Specialization(specialization) => {
|
||||
TypeMapping::Specialization(*specialization)
|
||||
}
|
||||
TypeMapping::PartialSpecialization(partial) => {
|
||||
TypeMapping::PartialSpecialization(partial.to_owned())
|
||||
}
|
||||
TypeMapping::PromoteLiterals => TypeMapping::PromoteLiterals,
|
||||
TypeMapping::BindLegacyTypevars(binding_context) => {
|
||||
TypeMapping::BindLegacyTypevars(*binding_context)
|
||||
}
|
||||
TypeMapping::BindSelf(self_type) => TypeMapping::BindSelf(*self_type),
|
||||
TypeMapping::ReplaceSelf { new_upper_bound } => TypeMapping::ReplaceSelf {
|
||||
new_upper_bound: *new_upper_bound,
|
||||
},
|
||||
TypeMapping::MarkTypeVarsInferable(binding_context) => {
|
||||
TypeMapping::MarkTypeVarsInferable(*binding_context)
|
||||
}
|
||||
TypeMapping::Materialize(materialization_kind) => {
|
||||
TypeMapping::Materialize(*materialization_kind)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn normalized_impl(&self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self {
|
||||
match self {
|
||||
TypeMapping::Specialization(specialization) => {
|
||||
TypeMapping::Specialization(specialization.normalized_impl(db, visitor))
|
||||
}
|
||||
TypeMapping::PartialSpecialization(partial) => {
|
||||
TypeMapping::PartialSpecialization(partial.normalized_impl(db, visitor))
|
||||
}
|
||||
TypeMapping::PromoteLiterals => TypeMapping::PromoteLiterals,
|
||||
TypeMapping::BindLegacyTypevars(binding_context) => {
|
||||
TypeMapping::BindLegacyTypevars(*binding_context)
|
||||
}
|
||||
TypeMapping::BindSelf(self_type) => {
|
||||
TypeMapping::BindSelf(self_type.normalized_impl(db, visitor))
|
||||
}
|
||||
TypeMapping::ReplaceSelf { new_upper_bound } => TypeMapping::ReplaceSelf {
|
||||
new_upper_bound: new_upper_bound.normalized_impl(db, visitor),
|
||||
},
|
||||
TypeMapping::MarkTypeVarsInferable(binding_context) => {
|
||||
TypeMapping::MarkTypeVarsInferable(*binding_context)
|
||||
}
|
||||
TypeMapping::Materialize(materialization_kind) => {
|
||||
TypeMapping::Materialize(*materialization_kind)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the generic context of a [`Signature`] according to the current type mapping
|
||||
pub(crate) fn update_signature_generic_context(
|
||||
&self,
|
||||
|
@ -7085,7 +6986,11 @@ impl<'db> KnownInstanceType<'db> {
|
|||
if let Some(specialization) = alias.specialization(self.db) {
|
||||
f.write_str(alias.name(self.db))?;
|
||||
specialization
|
||||
.display_short(self.db, TupleSpecialization::No)
|
||||
.display_short(
|
||||
self.db,
|
||||
TupleSpecialization::No,
|
||||
DisplaySettings::default(),
|
||||
)
|
||||
.fmt(f)
|
||||
} else {
|
||||
f.write_str("typing.TypeAliasType")
|
||||
|
|
|
@ -504,9 +504,16 @@ impl<'db> UnionBuilder<'db> {
|
|||
if should_simplify_full && !matches!(element_type, Type::TypeAlias(_)) {
|
||||
if ty.is_equivalent_to(self.db, element_type)
|
||||
|| ty.is_subtype_of(self.db, element_type)
|
||||
|| ty.into_intersection().is_some_and(|intersection| {
|
||||
intersection.positive(self.db).contains(&element_type)
|
||||
})
|
||||
{
|
||||
return;
|
||||
} else if element_type.is_subtype_of(self.db, ty) {
|
||||
} else if element_type.is_subtype_of(self.db, ty)
|
||||
|| element_type
|
||||
.into_intersection()
|
||||
.is_some_and(|intersection| intersection.positive(self.db).contains(&ty))
|
||||
{
|
||||
to_remove.push(index);
|
||||
} else if ty_negated.is_subtype_of(self.db, element_type) {
|
||||
// We add `ty` to the union. We just checked that `~ty` is a subtype of an
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue