mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-04 10:49:50 +00:00
[red-knot] Add initial support for *
imports (#16923)
## Summary This PR adds initial support for `*` imports to red-knot. The approach is to implement a standalone query, called from semantic indexing, that visits the module referenced by the `*` import and collects all global-scope public names that will be imported by the `*` import. The `SemanticIndexBuilder` then adds separate definitions for each of these names, all keyed to the same `ast::Alias` node that represents the `*` import. There are many pieces of `*`-import semantics that are still yet to be done, even with this PR: - This PR does not attempt to implement any of the semantics to do with `__all__`. (If a module defines `__all__`, then only the symbols included in `__all__` are imported, _not_ all public global-scope symbols. - With the logic implemented in this PR as it currently stands, we sometimes incorrectly consider a symbol bound even though it is defined in a branch that is statically known to be dead code, e.g. (assuming the target Python version is set to 3.11): ```py # a.py import sys if sys.version_info < (3, 10): class Foo: ... ``` ```py # b.py from a import * print(Foo) # this is unbound at runtime on 3.11, # but we currently consider it bound with the logic in this PR ``` Implementing these features is important, but is for now deferred to followup PRs. Many thanks to @ntBre, who contributed to this PR in a pairing session on Friday! ## Test Plan Assertions in existing mdtests are adjusted, and several new ones are added.
This commit is contained in:
parent
cba197e3c5
commit
e87fee4b3b
17 changed files with 927 additions and 357 deletions
|
@ -4,8 +4,10 @@ References:
|
|||
|
||||
- <https://typing.readthedocs.io/en/latest/spec/callables.html#callable>
|
||||
|
||||
TODO: Use `collections.abc` as importing from `typing` is deprecated but this requires support for
|
||||
`*` imports. See: <https://docs.python.org/3/library/typing.html#deprecated-aliases>.
|
||||
Note that `typing.Callable` is deprecated at runtime, in favour of `collections.abc.Callable` (see:
|
||||
<https://docs.python.org/3/library/typing.html#deprecated-aliases>). However, removal of
|
||||
`typing.Callable` is not currently planned, and the canonical location of the stub for the symbol in
|
||||
typeshed is still `typing.pyi`.
|
||||
|
||||
## Invalid forms
|
||||
|
||||
|
|
|
@ -81,8 +81,7 @@ reveal_type(DictSubclass.__mro__)
|
|||
|
||||
class SetSubclass(typing.Set): ...
|
||||
|
||||
# TODO: should have `Generic`, should not have `Unknown`
|
||||
# revealed: tuple[Literal[SetSubclass], Literal[set], Unknown, Literal[object]]
|
||||
# revealed: tuple[Literal[SetSubclass], Literal[set], Literal[MutableSet], Literal[AbstractSet], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
||||
reveal_type(SetSubclass.__mro__)
|
||||
|
||||
class FrozenSetSubclass(typing.FrozenSet): ...
|
||||
|
@ -114,8 +113,7 @@ reveal_type(DefaultDictSubclass.__mro__)
|
|||
|
||||
class DequeSubclass(typing.Deque): ...
|
||||
|
||||
# TODO: Should be (DequeSubclass, deque, MutableSequence, Sequence, Reversible, Collection, Sized, Iterable, Container, Generic, object)
|
||||
# revealed: tuple[Literal[DequeSubclass], Literal[deque], Unknown, Literal[object]]
|
||||
# revealed: tuple[Literal[DequeSubclass], Literal[deque], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
||||
reveal_type(DequeSubclass.__mro__)
|
||||
|
||||
class OrderedDictSubclass(typing.OrderedDict): ...
|
||||
|
|
|
@ -551,6 +551,7 @@ reveal_type(C().x) # revealed: str
|
|||
class C:
|
||||
def __init__(self) -> None:
|
||||
# error: [too-many-positional-arguments]
|
||||
# error: [invalid-argument-type]
|
||||
self.x: int = len(1, 2, 3)
|
||||
```
|
||||
|
||||
|
|
|
@ -229,6 +229,6 @@ reveal_type(len(SecondRequiredArgument())) # revealed: Literal[1]
|
|||
```py
|
||||
class NoDunderLen: ...
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(len(NoDunderLen())) # revealed: int
|
||||
```
|
||||
|
|
|
@ -17,10 +17,7 @@ X: bool = True
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# TODO: should not error, should be `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
|
||||
reveal_type(X) # revealed: bool
|
||||
print(Y) # error: [unresolved-reference]
|
||||
```
|
||||
|
||||
|
@ -40,8 +37,7 @@ reveal_type(X) # revealed: Literal[42]
|
|||
|
||||
from a import *
|
||||
|
||||
# TODO: should reveal `bool`
|
||||
reveal_type(X) # revealed: Literal[42]
|
||||
reveal_type(X) # revealed: bool
|
||||
```
|
||||
|
||||
### Overridden by later definition
|
||||
|
@ -57,12 +53,9 @@ X: bool = True
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# TODO: should not error, should reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
|
||||
X = 42
|
||||
reveal_type(X) # revealed: Literal[42]
|
||||
reveal_type(X) # revealed: bool
|
||||
X = False
|
||||
reveal_type(X) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
### Reaching across many modules
|
||||
|
@ -90,9 +83,7 @@ from b import *
|
|||
```py
|
||||
from c import *
|
||||
|
||||
# TODO: should not error, should reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
reveal_type(X) # revealed: bool
|
||||
```
|
||||
|
||||
### A wildcard import constitutes a re-export
|
||||
|
@ -120,8 +111,7 @@ from b import Y
|
|||
|
||||
```py
|
||||
# `X` is accessible because the `*` import in `c` re-exports it from `c`
|
||||
# TODO: should not error
|
||||
from c import X # error: [unresolved-import]
|
||||
from c import X
|
||||
|
||||
# but `Y` is not because the `from b import Y` import does *not* constitute a re-export
|
||||
from c import Y # error: [unresolved-import]
|
||||
|
@ -140,12 +130,8 @@ X = (Y := 3) + 4
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# TODO should not error, should reveal `Literal[7] | Unknown`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
# TODO should not error, should reveal `Literal[3] | Unknown`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
reveal_type(X) # revealed: Unknown | Literal[7]
|
||||
reveal_type(Y) # revealed: Unknown | Literal[3]
|
||||
```
|
||||
|
||||
### Global-scope symbols defined in many other ways
|
||||
|
@ -200,52 +186,29 @@ from a import *
|
|||
# fmt: off
|
||||
|
||||
print((
|
||||
# TODO: false positive
|
||||
A, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
B, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
C, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
D, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
E, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
F, # error: [unresolved-reference]
|
||||
# TODO: could emit diagnostic about being possibly unbound, but this is a false positive
|
||||
G, # error: [unresolved-reference] "Name `G` used when not defined"
|
||||
# TODO: false positive
|
||||
H, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
I, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
J, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
K, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
L, # error: [unresolved-reference]
|
||||
# TODO: could emit diagnostic about being possibly unbound, but this is a false positive
|
||||
M, # error: [unresolved-reference] "Name `M` used when not defined"
|
||||
# TODO: could emit diagnostic about being possibly unbound, but this is a false positive
|
||||
N, # error: [unresolved-reference] "Name `N` used when not defined"
|
||||
# TODO: could emit diagnostic about being possibly unbound, but this is a false positive
|
||||
O, # error: [unresolved-reference] "Name `O` used when not defined"
|
||||
# TODO: could emit diagnostic about being possibly unbound, but this is a false positive
|
||||
P, # error: [unresolved-reference] "Name `P` used when not defined"
|
||||
# TODO: could emit diagnostic about being possibly unbound, but this is a false positive
|
||||
Q, # error: [unresolved-reference] "Name `Q` used when not defined"
|
||||
# TODO: could emit diagnostic about being possibly unbound, but this is a false positive
|
||||
R, # error: [unresolved-reference] "Name `R` used when not defined"
|
||||
# TODO: could emit diagnostic about being possibly unbound, but this is a false positive
|
||||
S, # error: [unresolved-reference] "Name `S` used when not defined"
|
||||
# TODO: could emit diagnostic about being possibly unbound, but this is a false positive
|
||||
T, # error: [unresolved-reference] "Name `T` used when not defined"
|
||||
# TODO: false positive
|
||||
typing, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
OrderedDict, # error: [unresolved-reference]
|
||||
# TODO: false positive
|
||||
Foo, # error: [unresolved-reference]
|
||||
A,
|
||||
B,
|
||||
C,
|
||||
D,
|
||||
E,
|
||||
F,
|
||||
G, # TODO: could emit diagnostic about being possibly unbound
|
||||
H,
|
||||
I,
|
||||
J,
|
||||
K,
|
||||
L,
|
||||
M, # TODO: could emit diagnostic about being possibly unbound
|
||||
N, # TODO: could emit diagnostic about being possibly unbound
|
||||
O, # TODO: could emit diagnostic about being possibly unbound
|
||||
P, # TODO: could emit diagnostic about being possibly unbound
|
||||
Q, # TODO: could emit diagnostic about being possibly unbound
|
||||
R, # TODO: could emit diagnostic about being possibly unbound
|
||||
S, # TODO: could emit diagnostic about being possibly unbound
|
||||
T, # TODO: could emit diagnostic about being possibly unbound
|
||||
typing,
|
||||
OrderedDict,
|
||||
Foo,
|
||||
))
|
||||
```
|
||||
|
||||
|
@ -275,6 +238,14 @@ lambda e: (f := 42)
|
|||
[(g := h * 2) for h in Iterable()]
|
||||
[i for j in Iterable() if (i := j - 10) > 0]
|
||||
{(k := l * 2): (m := l * 3) for l in Iterable()}
|
||||
list(((o := p * 2) for p in Iterable()))
|
||||
|
||||
# A walrus expression nested inside several scopes *still* leaks out
|
||||
# to the global scope:
|
||||
[[[[(q := r) for r in Iterable()]] for _ in range(42)] for _ in range(42)]
|
||||
|
||||
# A walrus inside a lambda inside a comprehension does not leak out
|
||||
[(lambda s=s: (t := 42))() for s in Iterable()]
|
||||
```
|
||||
|
||||
`b.py`:
|
||||
|
@ -298,20 +269,25 @@ reveal_type(f) # revealed: Unknown
|
|||
reveal_type(h) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(j) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(p) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(r) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(s) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(t) # revealed: Unknown
|
||||
|
||||
# TODO: these should all reveal `Unknown | int` and should not have diagnostics.
|
||||
# TODO: these should all reveal `Unknown | int`.
|
||||
# (We don't generally model elsewhere in red-knot that bindings from walruses
|
||||
# "leak" from comprehension scopes into outer scopes, but we should.)
|
||||
# See https://github.com/astral-sh/ruff/issues/16954
|
||||
#
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(g) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(i) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(k) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(m) # revealed: Unknown
|
||||
reveal_type(o) # revealed: Unknown
|
||||
reveal_type(q) # revealed: Unknown
|
||||
```
|
||||
|
||||
### An annotation without a value is a definition in a stub but not a `.py` file
|
||||
|
@ -334,11 +310,7 @@ Y: bool
|
|||
from a import *
|
||||
from b import *
|
||||
|
||||
# TODO: this is a false positive, should reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
|
||||
# but this diagnostic is accurate!
|
||||
reveal_type(X) # revealed: bool
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
```
|
||||
|
@ -364,8 +336,6 @@ Y: bool = True
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# These errors are correct:
|
||||
#
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(_private) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
|
@ -375,10 +345,7 @@ reveal_type(__dunder__) # revealed: Unknown
|
|||
# error: [unresolved-reference]
|
||||
reveal_type(___thunder___) # revealed: Unknown
|
||||
|
||||
# TODO: this error is incorrect (should reveal `bool`):
|
||||
#
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
reveal_type(Y) # revealed: bool
|
||||
```
|
||||
|
||||
### All public symbols are considered re-exported from `.py` files
|
||||
|
@ -407,11 +374,8 @@ from a import X
|
|||
```py
|
||||
from b import *
|
||||
|
||||
# TODO: this is a false positive, but we could consider a different opt-in diagnostic
|
||||
# (see prose commentary above)
|
||||
#
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
# TODO: we could consider an opt-in diagnostic (see prose commentary above)
|
||||
reveal_type(X) # revealed: bool
|
||||
```
|
||||
|
||||
### Only explicit re-exports are considered re-exported from `.pyi` files
|
||||
|
@ -423,12 +387,13 @@ For `.pyi` files, we should consider all imports private to the stub unless they
|
|||
|
||||
```pyi
|
||||
X: bool = True
|
||||
Y: bool = True
|
||||
```
|
||||
|
||||
`b.pyi`:
|
||||
|
||||
```pyi
|
||||
from a import X
|
||||
from a import X, Y as Y
|
||||
```
|
||||
|
||||
`c.py`:
|
||||
|
@ -440,6 +405,8 @@ from b import *
|
|||
#
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
|
||||
reveal_type(Y) # revealed: bool
|
||||
```
|
||||
|
||||
### Symbols in statically known branches
|
||||
|
@ -468,18 +435,17 @@ Z: bool = True
|
|||
|
||||
from a import *
|
||||
|
||||
# TODO should not error, should reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
reveal_type(X) # revealed: bool
|
||||
|
||||
# error: [unresolved-reference]
|
||||
# TODO: should emit error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
|
||||
# The `*` import should not be considered a redefinition
|
||||
# TODO: The `*` import should not be considered a redefinition
|
||||
# of the global variable in this module, as the symbol in
|
||||
# the `a` module is in a branch that is statically known
|
||||
# to be dead code given the `python-version` configuration.
|
||||
reveal_type(Z) # revealed: Literal[True]
|
||||
# Thus this should reveal `Literal[True]`.
|
||||
reveal_type(Z) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Relative `*` imports
|
||||
|
@ -502,9 +468,7 @@ X: bool = True
|
|||
```py
|
||||
from .foo import *
|
||||
|
||||
# TODO should not error, should reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
reveal_type(X) # revealed: bool
|
||||
```
|
||||
|
||||
## Star imports with `__all__`
|
||||
|
@ -533,10 +497,10 @@ Y: bool = False
|
|||
```py
|
||||
from a import *
|
||||
|
||||
reveal_type(X) # revealed: bool
|
||||
|
||||
# TODO none of these should error, should all reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(_private) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(__protected) # revealed: Unknown
|
||||
|
@ -545,10 +509,8 @@ reveal_type(__dunder__) # revealed: Unknown
|
|||
# error: [unresolved-reference]
|
||||
reveal_type(___thunder___) # revealed: Unknown
|
||||
|
||||
# but this diagnostic is accurate!
|
||||
#
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
# TODO: should emit [unresolved-reference] diagnostic & reveal `Unknown`
|
||||
reveal_type(Y) # revealed: bool
|
||||
```
|
||||
|
||||
### Simple list `__all__`
|
||||
|
@ -567,12 +529,10 @@ Y: bool = False
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# TODO should not error, should reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
reveal_type(X) # revealed: bool
|
||||
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
# TODO: should emit [unresolved-reference] diagnostic & reveal `Unknown`
|
||||
reveal_type(Y) # revealed: bool
|
||||
```
|
||||
|
||||
### `__all__` with additions later on in the global scope
|
||||
|
@ -613,22 +573,15 @@ F: bool = False
|
|||
```py
|
||||
from b import *
|
||||
|
||||
# TODO none of these should error, they should all reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(A) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(B) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(C) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(D) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(E) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(FOO) # revealed: Unknown
|
||||
reveal_type(A) # revealed: bool
|
||||
reveal_type(B) # revealed: bool
|
||||
reveal_type(C) # revealed: bool
|
||||
reveal_type(D) # revealed: bool
|
||||
reveal_type(E) # revealed: bool
|
||||
reveal_type(FOO) # revealed: bool
|
||||
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(F) # revealed: Unknown
|
||||
# TODO should error with [unresolved-reference] & reveal `Unknown`
|
||||
reveal_type(F) # revealed: bool
|
||||
```
|
||||
|
||||
### `__all__` with subtractions later on in the global scope
|
||||
|
@ -651,12 +604,10 @@ B: bool = True
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# TODO should not error, should reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(A) # revealed: Unknown
|
||||
reveal_type(A) # revealed: bool
|
||||
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(B) # revealed: Unknown
|
||||
# TODO should emit an [unresolved-reference] diagnostic & reveal `Unknown`
|
||||
reveal_type(B) # revealed: bool
|
||||
```
|
||||
|
||||
### Invalid `__all__`
|
||||
|
@ -712,16 +663,11 @@ __all__ = [f()]
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# TODO: we should avoid both errors here.
|
||||
#
|
||||
# At runtime, `f` is imported but `g` is not; to avoid false positives, however,
|
||||
# we should treat `a` as though it does not have `__all__` at all,
|
||||
# we treat `a` as though it does not have `__all__` at all,
|
||||
# which would imply that both symbols would be present.
|
||||
#
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(f) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(g) # revealed: Unknown
|
||||
reveal_type(f) # revealed: Literal[f]
|
||||
reveal_type(g) # revealed: Literal[g]
|
||||
```
|
||||
|
||||
### `__all__` conditionally defined in a statically known branch
|
||||
|
@ -751,13 +697,10 @@ else:
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# TODO neither should error, both should be `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
reveal_type(X) # revealed: bool
|
||||
reveal_type(Y) # revealed: bool
|
||||
|
||||
# error: [unresolved-reference]
|
||||
# TODO: should error with [unresolved-reference]
|
||||
reveal_type(Z) # revealed: Unknown
|
||||
```
|
||||
|
||||
|
@ -789,13 +732,10 @@ else:
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# TODO neither should error, both should be `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
reveal_type(X) # revealed: bool
|
||||
reveal_type(Y) # revealed: bool
|
||||
|
||||
# error: [unresolved-reference]
|
||||
# TODO should have an [unresolved-reference] diagnostic
|
||||
reveal_type(Z) # revealed: Unknown
|
||||
```
|
||||
|
||||
|
@ -826,10 +766,9 @@ __all__ = []
|
|||
from a import *
|
||||
from b import *
|
||||
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
# TODO: both of these should have [unresolved-reference] diagnostics and reveal `Unknown`
|
||||
reveal_type(X) # revealed: bool
|
||||
reveal_type(Y) # revealed: bool
|
||||
```
|
||||
|
||||
### `__all__` in a stub file
|
||||
|
@ -857,13 +796,10 @@ __all__ = ["X"]
|
|||
```py
|
||||
from b import *
|
||||
|
||||
# TODO: should not error, should reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
reveal_type(X) # revealed: bool
|
||||
|
||||
# this error is correct:
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(Y) # revealed: Unknown
|
||||
# TODO this should have an [unresolved-reference] diagnostic and reveal `Unknown`
|
||||
reveal_type(Y) # revealed: bool
|
||||
```
|
||||
|
||||
## `global` statements in non-global scopes
|
||||
|
@ -887,9 +823,7 @@ f()
|
|||
```py
|
||||
from a import *
|
||||
|
||||
# TODO: false positive, should be `Literal[f]` with no diagnostic
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(f) # revealed: Unknown
|
||||
reveal_type(f) # revealed: Literal[f]
|
||||
|
||||
# TODO: false positive, should be `bool` with no diagnostic
|
||||
# error: [unresolved-reference]
|
||||
|
@ -909,11 +843,8 @@ are present due to `*` imports.
|
|||
import typing
|
||||
import collections.abc
|
||||
|
||||
# TODO these should not error, should not reveal `Unknown`
|
||||
# error: [unresolved-attribute]
|
||||
reveal_type(collections.abc.Sequence) # revealed: Unknown
|
||||
# error: [unresolved-attribute]
|
||||
reveal_type(collections.abc.Callable) # revealed: Unknown
|
||||
reveal_type(collections.abc.Sequence) # revealed: Literal[Sequence]
|
||||
reveal_type(collections.abc.Callable) # revealed: typing.Callable
|
||||
```
|
||||
|
||||
## Invalid `*` imports
|
||||
|
@ -968,11 +899,8 @@ from a import *, _Y # error: [invalid-syntax]
|
|||
|
||||
# The import statement above is invalid syntax,
|
||||
# but it's pretty obvious that the user wanted to do a `*` import,
|
||||
# so we should import all public names from `a` anyway, to minimize cascading errors
|
||||
#
|
||||
# TODO: get rid of this error, reveal `bool`
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(X) # revealed: Unknown
|
||||
# so we import all public names from `a` anyway, to minimize cascading errors
|
||||
reveal_type(X) # revealed: bool
|
||||
reveal_type(_Y) # revealed: bool
|
||||
```
|
||||
|
||||
|
|
|
@ -12,10 +12,7 @@ reveal_type(__file__) # revealed: str | None
|
|||
reveal_type(__loader__) # revealed: LoaderProtocol | None
|
||||
reveal_type(__package__) # revealed: str | None
|
||||
reveal_type(__doc__) # revealed: str | None
|
||||
|
||||
# TODO: Should be `ModuleSpec | None`
|
||||
# (needs support for `*` imports)
|
||||
reveal_type(__spec__) # revealed: Unknown | None
|
||||
reveal_type(__spec__) # revealed: ModuleSpec | None
|
||||
|
||||
reveal_type(__path__) # revealed: @Todo(generics)
|
||||
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
use std::fmt;
|
||||
use std::num::NonZeroU32;
|
||||
use std::ops::Deref;
|
||||
|
||||
use compact_str::{CompactString, ToCompactString};
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
use crate::{db::Db, module_resolver::file_to_module};
|
||||
|
||||
/// A module name, e.g. `foo.bar`.
|
||||
///
|
||||
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
|
||||
|
@ -206,6 +211,29 @@ impl ModuleName {
|
|||
pub fn ancestors(&self) -> impl Iterator<Item = Self> {
|
||||
std::iter::successors(Some(self.clone()), Self::parent)
|
||||
}
|
||||
|
||||
pub(crate) fn from_import_statement<'db>(
|
||||
db: &'db dyn Db,
|
||||
importing_file: File,
|
||||
node: &'db ast::StmtImportFrom,
|
||||
) -> Result<Self, ModuleNameResolutionError> {
|
||||
let ast::StmtImportFrom {
|
||||
module,
|
||||
level,
|
||||
names: _,
|
||||
range: _,
|
||||
} = node;
|
||||
|
||||
let module = module.as_deref();
|
||||
|
||||
if let Some(level) = NonZeroU32::new(*level) {
|
||||
relative_module_name(db, importing_file, module, level)
|
||||
} else {
|
||||
module
|
||||
.and_then(Self::new)
|
||||
.ok_or(ModuleNameResolutionError::InvalidSyntax)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
|
@ -234,3 +262,58 @@ impl std::fmt::Display for ModuleName {
|
|||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a `from .foo import bar` relative import, resolve the relative module
|
||||
/// we're importing `bar` from into an absolute [`ModuleName`]
|
||||
/// using the name of the module we're currently analyzing.
|
||||
///
|
||||
/// - `level` is the number of dots at the beginning of the relative module name:
|
||||
/// - `from .foo.bar import baz` => `level == 1`
|
||||
/// - `from ...foo.bar import baz` => `level == 3`
|
||||
/// - `tail` is the relative module name stripped of all leading dots:
|
||||
/// - `from .foo import bar` => `tail == "foo"`
|
||||
/// - `from ..foo.bar import baz` => `tail == "foo.bar"`
|
||||
fn relative_module_name(
|
||||
db: &dyn Db,
|
||||
importing_file: File,
|
||||
tail: Option<&str>,
|
||||
level: NonZeroU32,
|
||||
) -> Result<ModuleName, ModuleNameResolutionError> {
|
||||
let module = file_to_module(db, importing_file)
|
||||
.ok_or(ModuleNameResolutionError::UnknownCurrentModule)?;
|
||||
let mut level = level.get();
|
||||
|
||||
if module.kind().is_package() {
|
||||
level = level.saturating_sub(1);
|
||||
}
|
||||
|
||||
let mut module_name = module
|
||||
.name()
|
||||
.ancestors()
|
||||
.nth(level as usize)
|
||||
.ok_or(ModuleNameResolutionError::TooManyDots)?;
|
||||
|
||||
if let Some(tail) = tail {
|
||||
let tail = ModuleName::new(tail).ok_or(ModuleNameResolutionError::InvalidSyntax)?;
|
||||
module_name.extend(&tail);
|
||||
}
|
||||
|
||||
Ok(module_name)
|
||||
}
|
||||
|
||||
/// Various ways in which resolving a [`ModuleName`]
|
||||
/// from an [`ast::StmtImport`] or [`ast::StmtImportFrom`] node might fail
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub(crate) enum ModuleNameResolutionError {
|
||||
/// The import statement has invalid syntax
|
||||
InvalidSyntax,
|
||||
|
||||
/// We couldn't resolve the file we're currently analyzing back to a module
|
||||
/// (Only necessary for relative import statements)
|
||||
UnknownCurrentModule,
|
||||
|
||||
/// The relative import statement seems to take us outside of the module search path
|
||||
/// (e.g. our current module is `foo.bar`, and the relative import statement in `foo.bar`
|
||||
/// is `from ....baz import spam`)
|
||||
TooManyDots,
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
|||
use crate::semantic_index::ast_ids::AstIds;
|
||||
use crate::semantic_index::attribute_assignment::AttributeAssignments;
|
||||
use crate::semantic_index::builder::SemanticIndexBuilder;
|
||||
use crate::semantic_index::definition::{Definition, DefinitionNodeKey};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionNodeKey, Definitions};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolTable,
|
||||
|
@ -29,6 +29,7 @@ pub mod definition;
|
|||
pub mod expression;
|
||||
mod narrowing_constraints;
|
||||
pub(crate) mod predicate;
|
||||
mod re_exports;
|
||||
pub mod symbol;
|
||||
mod use_def;
|
||||
mod visibility_constraints;
|
||||
|
@ -136,7 +137,7 @@ pub(crate) struct SemanticIndex<'db> {
|
|||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
|
||||
/// Map from a node creating a definition to its definition.
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definition<'db>>,
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definitions<'db>>,
|
||||
|
||||
/// Map from a standalone expression to its [`Expression`] ingredient.
|
||||
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
|
||||
|
@ -250,13 +251,37 @@ impl<'db> SemanticIndex<'db> {
|
|||
AncestorsIter::new(self, scope)
|
||||
}
|
||||
|
||||
/// Returns the [`Definition`] salsa ingredient for `definition_key`.
|
||||
/// Returns the [`definition::Definition`] salsa ingredient(s) for `definition_key`.
|
||||
///
|
||||
/// There will only ever be >1 `Definition` associated with a `definition_key`
|
||||
/// if the definition is created by a wildcard (`*`) import.
|
||||
#[track_caller]
|
||||
pub(crate) fn definition(
|
||||
pub(crate) fn definitions(
|
||||
&self,
|
||||
definition_key: impl Into<DefinitionNodeKey>,
|
||||
) -> &Definitions<'db> {
|
||||
&self.definitions_by_node[&definition_key.into()]
|
||||
}
|
||||
|
||||
/// Returns the [`definition::Definition`] salsa ingredient for `definition_key`.
|
||||
///
|
||||
/// ## Panics
|
||||
///
|
||||
/// If the number of definitions associated with the key is not exactly 1 and
|
||||
/// the `debug_assertions` feature is enabled, this method will panic.
|
||||
#[track_caller]
|
||||
pub(crate) fn expect_single_definition(
|
||||
&self,
|
||||
definition_key: impl Into<DefinitionNodeKey> + std::fmt::Debug + Copy,
|
||||
) -> Definition<'db> {
|
||||
self.definitions_by_node[&definition_key.into()]
|
||||
let definitions = self.definitions(definition_key);
|
||||
debug_assert_eq!(
|
||||
definitions.len(),
|
||||
1,
|
||||
"Expected exactly one definition to be associated with AST node {definition_key:?} but found {}",
|
||||
definitions.len()
|
||||
);
|
||||
definitions[0]
|
||||
}
|
||||
|
||||
/// Returns the [`Expression`] ingredient for an expression node.
|
||||
|
@ -280,7 +305,8 @@ impl<'db> SemanticIndex<'db> {
|
|||
.copied()
|
||||
}
|
||||
|
||||
/// Returns the id of the scope that `node` creates. This is different from [`Definition::scope`] which
|
||||
/// Returns the id of the scope that `node` creates.
|
||||
/// This is different from [`definition::Definition::scope`] which
|
||||
/// returns the scope in which that definition is defined in.
|
||||
#[track_caller]
|
||||
pub(crate) fn node_scope(&self, node: NodeWithScopeRef) -> FileScopeId {
|
||||
|
|
|
@ -12,19 +12,21 @@ use ruff_python_ast::{self as ast, ExprContext};
|
|||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::attribute_assignment::{AttributeAssignment, AttributeAssignments};
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionCategory,
|
||||
DefinitionNodeKey, DefinitionNodeRef, ExceptHandlerDefinitionNodeRef, ForStmtDefinitionNodeRef,
|
||||
ImportDefinitionNodeRef, ImportFromDefinitionNodeRef, MatchPatternDefinitionNodeRef,
|
||||
WithItemDefinitionNodeRef,
|
||||
DefinitionNodeKey, DefinitionNodeRef, Definitions, ExceptHandlerDefinitionNodeRef,
|
||||
ForStmtDefinitionNodeRef, ImportDefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
MatchPatternDefinitionNodeRef, StarImportDefinitionNodeRef, WithItemDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::{Expression, ExpressionKind};
|
||||
use crate::semantic_index::predicate::{
|
||||
PatternPredicate, PatternPredicateKind, Predicate, PredicateNode, ScopedPredicateId,
|
||||
};
|
||||
use crate::semantic_index::re_exports::exported_names;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopeKind, ScopedSymbolId,
|
||||
SymbolTableBuilder,
|
||||
|
@ -87,7 +89,7 @@ pub(super) struct SemanticIndexBuilder<'db> {
|
|||
use_def_maps: IndexVec<FileScopeId, UseDefMapBuilder<'db>>,
|
||||
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definition<'db>>,
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definitions<'db>>,
|
||||
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
|
||||
imported_modules: FxHashSet<ModuleName>,
|
||||
attribute_assignments: FxHashMap<FileScopeId, AttributeAssignments<'db>>,
|
||||
|
@ -147,6 +149,10 @@ impl<'db> SemanticIndexBuilder<'db> {
|
|||
self.current_scope_info().file_scope_id
|
||||
}
|
||||
|
||||
fn current_scope_is_global_scope(&self) -> bool {
|
||||
self.scope_stack.len() == 1
|
||||
}
|
||||
|
||||
/// Returns the scope ID of the surrounding class body scope if the current scope
|
||||
/// is a method inside a class body. Returns `None` otherwise, e.g. if the current
|
||||
/// scope is a function body outside of a class, or if the current scope is not a
|
||||
|
@ -344,17 +350,55 @@ impl<'db> SemanticIndexBuilder<'db> {
|
|||
self.current_symbol_table().mark_symbol_used(id);
|
||||
}
|
||||
|
||||
fn add_entry_for_definition_key(&mut self, key: DefinitionNodeKey) -> &mut Definitions<'db> {
|
||||
self.definitions_by_node.entry(key).or_default()
|
||||
}
|
||||
|
||||
/// Add a [`Definition`] associated with the `definition_node` AST node.
|
||||
///
|
||||
/// ## Panics
|
||||
///
|
||||
/// This method panics if `debug_assertions` are enabled and the `definition_node` AST node
|
||||
/// already has a [`Definition`] associated with it. This is an important invariant to maintain
|
||||
/// for all nodes *except* [`ast::Alias`] nodes representing `*` imports.
|
||||
fn add_definition(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'db>>,
|
||||
definition_node: impl Into<DefinitionNodeRef<'db>> + std::fmt::Debug + Copy,
|
||||
) -> Definition<'db> {
|
||||
let (definition, num_definitions) =
|
||||
self.push_additional_definition(symbol, definition_node);
|
||||
debug_assert_eq!(
|
||||
num_definitions,
|
||||
1,
|
||||
"Attempted to create multiple `Definition`s associated with AST node {definition_node:?}"
|
||||
);
|
||||
definition
|
||||
}
|
||||
|
||||
/// Push a new [`Definition`] onto the list of definitions
|
||||
/// associated with the `definition_node` AST node.
|
||||
///
|
||||
/// Returns a 2-element tuple, where the first element is the newly created [`Definition`]
|
||||
/// and the second element is the number of definitions that are now associated with
|
||||
/// `definition_node`.
|
||||
///
|
||||
/// This method should only be used when adding a definition associated with a `*` import.
|
||||
/// All other nodes can only ever be associated with exactly 1 or 0 [`Definition`]s.
|
||||
/// For any node other than an [`ast::Alias`] representing a `*` import,
|
||||
/// prefer to use `self.add_definition()`, which ensures that this invariant is maintained.
|
||||
fn push_additional_definition(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'db>>,
|
||||
) -> (Definition<'db>, usize) {
|
||||
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
|
||||
#[allow(unsafe_code)]
|
||||
// SAFETY: `definition_node` is guaranteed to be a child of `self.module`
|
||||
let kind = unsafe { definition_node.into_owned(self.module.clone()) };
|
||||
let category = kind.category(self.file.is_stub(self.db.upcast()));
|
||||
let is_reexported = kind.is_reexported();
|
||||
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
|
@ -365,10 +409,11 @@ impl<'db> SemanticIndexBuilder<'db> {
|
|||
countme::Count::default(),
|
||||
);
|
||||
|
||||
let existing_definition = self
|
||||
.definitions_by_node
|
||||
.insert(definition_node.key(), definition);
|
||||
debug_assert_eq!(existing_definition, None);
|
||||
let num_definitions = {
|
||||
let definitions = self.add_entry_for_definition_key(definition_node.key());
|
||||
definitions.push(definition);
|
||||
definitions.len()
|
||||
};
|
||||
|
||||
if category.is_binding() {
|
||||
self.mark_symbol_bound(symbol);
|
||||
|
@ -390,7 +435,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
|||
try_node_stack_manager.record_definition(self);
|
||||
self.try_node_context_stack_manager = try_node_stack_manager;
|
||||
|
||||
definition
|
||||
(definition, num_definitions)
|
||||
}
|
||||
|
||||
fn record_expression_narrowing_constraint(
|
||||
|
@ -767,9 +812,10 @@ impl<'db> SemanticIndexBuilder<'db> {
|
|||
// Insert a mapping from the inner Parameter node to the same definition. This
|
||||
// ensures that calling `HasType::inferred_type` on the inner parameter returns
|
||||
// a valid type (and doesn't panic)
|
||||
let existing_definition = self
|
||||
.definitions_by_node
|
||||
.insert((¶meter.parameter).into(), definition);
|
||||
let existing_definition = self.definitions_by_node.insert(
|
||||
(¶meter.parameter).into(),
|
||||
Definitions::single(definition),
|
||||
);
|
||||
debug_assert_eq!(existing_definition, None);
|
||||
}
|
||||
|
||||
|
@ -991,7 +1037,54 @@ where
|
|||
}
|
||||
}
|
||||
ast::Stmt::ImportFrom(node) => {
|
||||
let mut found_star = false;
|
||||
for (alias_index, alias) in node.names.iter().enumerate() {
|
||||
if &alias.name == "*" {
|
||||
// The following line maintains the invariant that every AST node that
|
||||
// implements `Into<DefinitionNodeKey>` must have an entry in the
|
||||
// `definitions_by_node` map. Maintaining this invariant ensures that
|
||||
// `SemanticIndex::definitions` can always look up the definitions for a
|
||||
// given AST node without panicking.
|
||||
//
|
||||
// The reason why maintaining this invariant requires special handling here
|
||||
// is that some `Alias` nodes may be associated with 0 definitions:
|
||||
// - If the import statement has invalid syntax: multiple `*` names in the `names` list
|
||||
// (e.g. `from foo import *, bar, *`)
|
||||
// - If the `*` import refers to a module that has 0 exported names.
|
||||
// - If the module being imported from cannot be resolved.
|
||||
self.add_entry_for_definition_key(alias.into());
|
||||
|
||||
if found_star {
|
||||
continue;
|
||||
}
|
||||
|
||||
found_star = true;
|
||||
|
||||
// Wildcard imports are invalid syntax everywhere except the top-level scope,
|
||||
// and thus do not bind any definitions anywhere else
|
||||
if !self.current_scope_is_global_scope() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Ok(module_name) =
|
||||
ModuleName::from_import_statement(self.db, self.file, node)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let Some(module) = resolve_module(self.db, &module_name) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for export in exported_names(self.db, module.file()) {
|
||||
let symbol_id = self.add_symbol(export.clone());
|
||||
let node_ref = StarImportDefinitionNodeRef { node, symbol_id };
|
||||
self.push_additional_definition(symbol_id, node_ref);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
let (symbol_name, is_reexported) = if let Some(asname) = &alias.asname {
|
||||
(&asname.id, asname.id == alias.name.id)
|
||||
} else {
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_python_ast as ast;
|
||||
|
@ -52,10 +54,42 @@ impl<'db> Definition<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
/// One or more [`Definition`]s.
|
||||
#[derive(Debug, Default, PartialEq, Eq, salsa::Update)]
|
||||
pub struct Definitions<'db>(smallvec::SmallVec<[Definition<'db>; 1]>);
|
||||
|
||||
impl<'db> Definitions<'db> {
|
||||
pub(crate) fn single(definition: Definition<'db>) -> Self {
|
||||
Self(smallvec::smallvec![definition])
|
||||
}
|
||||
|
||||
pub(crate) fn push(&mut self, definition: Definition<'db>) {
|
||||
self.0.push(definition);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Deref for Definitions<'db> {
|
||||
type Target = [Definition<'db>];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'db> IntoIterator for &'a Definitions<'db> {
|
||||
type Item = &'a Definition<'db>;
|
||||
type IntoIter = std::slice::Iter<'a, Definition<'db>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Import(ImportDefinitionNodeRef<'a>),
|
||||
ImportFrom(ImportFromDefinitionNodeRef<'a>),
|
||||
ImportStar(StarImportDefinitionNodeRef<'a>),
|
||||
For(ForStmtDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
|
@ -178,12 +212,24 @@ impl<'a> From<MatchPatternDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<StarImportDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: StarImportDefinitionNodeRef<'a>) -> Self {
|
||||
Self::ImportStar(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportDefinitionNodeRef<'a> {
|
||||
pub(crate) alias: &'a ast::Alias,
|
||||
pub(crate) is_reexported: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct StarImportDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
pub(crate) symbol_id: ScopedSymbolId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
|
@ -253,6 +299,7 @@ impl<'db> DefinitionNodeRef<'db> {
|
|||
alias: AstNodeRef::new(parsed, alias),
|
||||
is_reexported,
|
||||
}),
|
||||
|
||||
DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef {
|
||||
node,
|
||||
alias_index,
|
||||
|
@ -262,6 +309,13 @@ impl<'db> DefinitionNodeRef<'db> {
|
|||
alias_index,
|
||||
is_reexported,
|
||||
}),
|
||||
DefinitionNodeRef::ImportStar(star_import) => {
|
||||
let StarImportDefinitionNodeRef { node, symbol_id } = star_import;
|
||||
DefinitionKind::StarImport(StarImportDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
symbol_id,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Function(function) => {
|
||||
DefinitionKind::Function(AstNodeRef::new(parsed, function))
|
||||
}
|
||||
|
@ -376,6 +430,19 @@ impl<'db> DefinitionNodeRef<'db> {
|
|||
alias_index,
|
||||
is_reexported: _,
|
||||
}) => (&node.names[alias_index]).into(),
|
||||
|
||||
// INVARIANT: for an invalid-syntax statement such as `from foo import *, bar, *`,
|
||||
// we only create a `StarImportDefinitionKind` for the *first* `*` alias in the names list.
|
||||
Self::ImportStar(StarImportDefinitionNodeRef { node, symbol_id: _ }) => node
|
||||
.names
|
||||
.iter()
|
||||
.find(|alias| &alias.name == "*")
|
||||
.expect(
|
||||
"The `StmtImportFrom` node of a `StarImportDefinitionKind` instance \
|
||||
should always have at least one `alias` with the name `*`.",
|
||||
)
|
||||
.into(),
|
||||
|
||||
Self::Function(node) => node.into(),
|
||||
Self::Class(node) => node.into(),
|
||||
Self::TypeAlias(node) => node.into(),
|
||||
|
@ -463,6 +530,7 @@ impl DefinitionCategory {
|
|||
pub enum DefinitionKind<'db> {
|
||||
Import(ImportDefinitionKind),
|
||||
ImportFrom(ImportFromDefinitionKind),
|
||||
StarImport(StarImportDefinitionKind),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
TypeAlias(AstNodeRef<ast::StmtTypeAlias>),
|
||||
|
@ -492,6 +560,13 @@ impl DefinitionKind<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn as_star_import(&self) -> Option<&StarImportDefinitionKind> {
|
||||
match self {
|
||||
DefinitionKind::StarImport(import) => Some(import),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`TextRange`] of the definition target.
|
||||
///
|
||||
/// A definition target would mainly be the node representing the symbol being defined i.e.,
|
||||
|
@ -502,6 +577,7 @@ impl DefinitionKind<'_> {
|
|||
match self {
|
||||
DefinitionKind::Import(import) => import.alias().range(),
|
||||
DefinitionKind::ImportFrom(import) => import.alias().range(),
|
||||
DefinitionKind::StarImport(import) => import.alias().range(),
|
||||
DefinitionKind::Function(function) => function.name.range(),
|
||||
DefinitionKind::Class(class) => class.name.range(),
|
||||
DefinitionKind::TypeAlias(type_alias) => type_alias.name.range(),
|
||||
|
@ -531,6 +607,7 @@ impl DefinitionKind<'_> {
|
|||
| DefinitionKind::TypeAlias(_)
|
||||
| DefinitionKind::Import(_)
|
||||
| DefinitionKind::ImportFrom(_)
|
||||
| DefinitionKind::StarImport(_)
|
||||
| DefinitionKind::TypeVar(_)
|
||||
| DefinitionKind::ParamSpec(_)
|
||||
| DefinitionKind::TypeVarTuple(_) => DefinitionCategory::DeclarationAndBinding,
|
||||
|
@ -589,7 +666,36 @@ impl<'db> From<Option<Unpack<'db>>> for TargetKind<'db> {
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct StarImportDefinitionKind {
|
||||
node: AstNodeRef<ast::StmtImportFrom>,
|
||||
symbol_id: ScopedSymbolId,
|
||||
}
|
||||
|
||||
impl StarImportDefinitionKind {
|
||||
pub(crate) fn import(&self) -> &ast::StmtImportFrom {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn alias(&self) -> &ast::Alias {
|
||||
// INVARIANT: for an invalid-syntax statement such as `from foo import *, bar, *`,
|
||||
// we only create a `StarImportDefinitionKind` for the *first* `*` alias in the names list.
|
||||
self.node
|
||||
.node()
|
||||
.names
|
||||
.iter()
|
||||
.find(|alias| &alias.name == "*")
|
||||
.expect(
|
||||
"The `StmtImportFrom` node of a `StarImportDefinitionKind` instance \
|
||||
should always have at least one `alias` with the name `*`.",
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn symbol_id(&self) -> ScopedSymbolId {
|
||||
self.symbol_id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MatchPatternDefinitionKind {
|
||||
pattern: AstNodeRef<ast::Pattern>,
|
||||
identifier: AstNodeRef<ast::Identifier>,
|
||||
|
|
343
crates/red_knot_python_semantic/src/semantic_index/re_exports.rs
Normal file
343
crates/red_knot_python_semantic/src/semantic_index/re_exports.rs
Normal file
|
@ -0,0 +1,343 @@
|
|||
//! A visitor and query to find all global-scope symbols that are exported from a module
|
||||
//! when a wildcard import is used.
|
||||
//!
|
||||
//! For example, if a module `foo` contains `from bar import *`, which symbols from the global
|
||||
//! scope of `bar` are imported into the global namespace of `foo`?
|
||||
//!
|
||||
//! ## Why is this a separate query rather than a part of semantic indexing?
|
||||
//!
|
||||
//! This query is called by the [`super::SemanticIndexBuilder`] in order to add the correct
|
||||
//! [`super::Definition`]s to the semantic index of a module `foo` if `foo` has a
|
||||
//! `from bar import *` statement in its global namespace. Adding the correct `Definition`s to
|
||||
//! `foo`'s [`super::SemanticIndex`] requires knowing which symbols are exported from `bar`.
|
||||
//!
|
||||
//! If we determined the set of exported names during semantic indexing rather than as a
|
||||
//! separate query, we would need to complete semantic indexing on `bar` in order to
|
||||
//! complete analysis of the global namespace of `foo`. Since semantic indexing is somewhat
|
||||
//! expensive, this would be undesirable. A separate query allows us to avoid this issue.
|
||||
|
||||
use ruff_db::{files::File, parsed::parsed_module};
|
||||
use ruff_python_ast::{
|
||||
self as ast,
|
||||
name::Name,
|
||||
visitor::{walk_expr, walk_pattern, walk_stmt, Visitor},
|
||||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::{module_name::ModuleName, resolve_module, Db};
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(super) fn exported_names(db: &dyn Db, file: File) -> FxHashSet<Name> {
|
||||
let module = parsed_module(db.upcast(), file);
|
||||
let mut finder = ExportFinder::new(db, file);
|
||||
finder.visit_body(module.suite());
|
||||
finder.exports
|
||||
}
|
||||
|
||||
struct ExportFinder<'db> {
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
visiting_stub_file: bool,
|
||||
exports: FxHashSet<Name>,
|
||||
}
|
||||
|
||||
impl<'db> ExportFinder<'db> {
|
||||
fn new(db: &'db dyn Db, file: File) -> Self {
|
||||
Self {
|
||||
db,
|
||||
file,
|
||||
visiting_stub_file: file.is_stub(db.upcast()),
|
||||
exports: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn possibly_add_export(&mut self, name: &Name) {
|
||||
if name.starts_with('_') {
|
||||
return;
|
||||
}
|
||||
self.exports.insert(name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Visitor<'db> for ExportFinder<'db> {
|
||||
fn visit_alias(&mut self, alias: &'db ast::Alias) {
|
||||
let ast::Alias { name, asname, .. } = alias;
|
||||
if self.visiting_stub_file {
|
||||
// If the source is a stub, names defined by imports are only exported
|
||||
// if they use the explicit `foo as foo` syntax:
|
||||
if asname.as_ref().is_some_and(|asname| asname.id == name.id) {
|
||||
self.possibly_add_export(&name.id);
|
||||
}
|
||||
} else {
|
||||
self.possibly_add_export(&asname.as_ref().unwrap_or(name).id);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'db ast::Pattern) {
|
||||
match pattern {
|
||||
ast::Pattern::MatchAs(ast::PatternMatchAs {
|
||||
pattern,
|
||||
name,
|
||||
range: _,
|
||||
}) => {
|
||||
if let Some(pattern) = pattern {
|
||||
self.visit_pattern(pattern);
|
||||
}
|
||||
if let Some(name) = name {
|
||||
// Wildcard patterns (`case _:`) do not bind names.
|
||||
// Currently `self.possibly_add_export()` just ignores
|
||||
// all names with leading underscores, but this will not always be the case
|
||||
// (in the future we will want to support modules with `__all__ = ['_']`).
|
||||
if name != "_" {
|
||||
self.possibly_add_export(&name.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::Pattern::MatchMapping(ast::PatternMatchMapping {
|
||||
patterns,
|
||||
rest,
|
||||
keys: _,
|
||||
range: _,
|
||||
}) => {
|
||||
for pattern in patterns {
|
||||
self.visit_pattern(pattern);
|
||||
}
|
||||
if let Some(rest) = rest {
|
||||
self.possibly_add_export(&rest.id);
|
||||
}
|
||||
}
|
||||
ast::Pattern::MatchStar(ast::PatternMatchStar { name, range: _ }) => {
|
||||
if let Some(name) = name {
|
||||
self.possibly_add_export(&name.id);
|
||||
}
|
||||
}
|
||||
ast::Pattern::MatchSequence(_)
|
||||
| ast::Pattern::MatchOr(_)
|
||||
| ast::Pattern::MatchClass(_) => {
|
||||
walk_pattern(self, pattern);
|
||||
}
|
||||
ast::Pattern::MatchSingleton(_) | ast::Pattern::MatchValue(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, stmt: &'db ruff_python_ast::Stmt) {
|
||||
match stmt {
|
||||
ast::Stmt::ClassDef(ast::StmtClassDef {
|
||||
name,
|
||||
decorator_list,
|
||||
arguments,
|
||||
type_params: _, // We don't want to visit the type params of the class
|
||||
body: _, // We don't want to visit the body of the class
|
||||
range: _,
|
||||
}) => {
|
||||
self.possibly_add_export(&name.id);
|
||||
for decorator in decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
if let Some(arguments) = arguments {
|
||||
self.visit_arguments(arguments);
|
||||
}
|
||||
}
|
||||
ast::Stmt::FunctionDef(ast::StmtFunctionDef {
|
||||
name,
|
||||
decorator_list,
|
||||
parameters,
|
||||
returns,
|
||||
type_params: _, // We don't want to visit the type params of the function
|
||||
body: _, // We don't want to visit the body of the function
|
||||
range: _,
|
||||
is_async: _,
|
||||
}) => {
|
||||
self.possibly_add_export(&name.id);
|
||||
for decorator in decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
self.visit_parameters(parameters);
|
||||
if let Some(returns) = returns {
|
||||
self.visit_expr(returns);
|
||||
}
|
||||
}
|
||||
ast::Stmt::AnnAssign(ast::StmtAnnAssign {
|
||||
target,
|
||||
value,
|
||||
annotation,
|
||||
simple: _,
|
||||
range: _,
|
||||
}) => {
|
||||
if value.is_some() || self.visiting_stub_file {
|
||||
self.visit_expr(target);
|
||||
}
|
||||
self.visit_expr(annotation);
|
||||
if let Some(value) = value {
|
||||
self.visit_expr(value);
|
||||
}
|
||||
}
|
||||
ast::Stmt::TypeAlias(ast::StmtTypeAlias {
|
||||
name,
|
||||
type_params: _,
|
||||
value: _,
|
||||
range: _,
|
||||
}) => {
|
||||
self.visit_expr(name);
|
||||
// Neither walrus expressions nor statements cannot appear in type aliases;
|
||||
// no need to recursively visit the `value` or `type_params`
|
||||
}
|
||||
ast::Stmt::ImportFrom(node) => {
|
||||
let mut found_star = false;
|
||||
for name in &node.names {
|
||||
if &name.name.id == "*" {
|
||||
if !found_star {
|
||||
found_star = true;
|
||||
self.exports.extend(
|
||||
ModuleName::from_import_statement(self.db, self.file, node)
|
||||
.ok()
|
||||
.and_then(|module_name| resolve_module(self.db, &module_name))
|
||||
.iter()
|
||||
.flat_map(|module| exported_names(self.db, module.file()))
|
||||
.cloned(),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
self.visit_alias(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast::Stmt::Import(_)
|
||||
| ast::Stmt::AugAssign(_)
|
||||
| ast::Stmt::While(_)
|
||||
| ast::Stmt::If(_)
|
||||
| ast::Stmt::With(_)
|
||||
| ast::Stmt::Assert(_)
|
||||
| ast::Stmt::Try(_)
|
||||
| ast::Stmt::Expr(_)
|
||||
| ast::Stmt::For(_)
|
||||
| ast::Stmt::Assign(_)
|
||||
| ast::Stmt::Match(_) => walk_stmt(self, stmt),
|
||||
|
||||
ast::Stmt::Global(_)
|
||||
| ast::Stmt::Raise(_)
|
||||
| ast::Stmt::Return(_)
|
||||
| ast::Stmt::Break(_)
|
||||
| ast::Stmt::Continue(_)
|
||||
| ast::Stmt::IpyEscapeCommand(_)
|
||||
| ast::Stmt::Delete(_)
|
||||
| ast::Stmt::Nonlocal(_)
|
||||
| ast::Stmt::Pass(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'db ast::Expr) {
|
||||
match expr {
|
||||
ast::Expr::Name(ast::ExprName { id, ctx, range: _ }) => {
|
||||
if ctx.is_store() {
|
||||
self.possibly_add_export(id);
|
||||
}
|
||||
}
|
||||
|
||||
ast::Expr::Lambda(_)
|
||||
| ast::Expr::BooleanLiteral(_)
|
||||
| ast::Expr::NoneLiteral(_)
|
||||
| ast::Expr::NumberLiteral(_)
|
||||
| ast::Expr::BytesLiteral(_)
|
||||
| ast::Expr::EllipsisLiteral(_)
|
||||
| ast::Expr::StringLiteral(_) => {}
|
||||
|
||||
// Walrus definitions "leak" from comprehension scopes into the comprehension's
|
||||
// enclosing scope; they thus need special handling
|
||||
ast::Expr::SetComp(_)
|
||||
| ast::Expr::ListComp(_)
|
||||
| ast::Expr::Generator(_)
|
||||
| ast::Expr::DictComp(_) => {
|
||||
let mut walrus_finder = WalrusFinder {
|
||||
export_finder: self,
|
||||
};
|
||||
walk_expr(&mut walrus_finder, expr);
|
||||
}
|
||||
|
||||
ast::Expr::BoolOp(_)
|
||||
| ast::Expr::Named(_)
|
||||
| ast::Expr::BinOp(_)
|
||||
| ast::Expr::UnaryOp(_)
|
||||
| ast::Expr::If(_)
|
||||
| ast::Expr::Attribute(_)
|
||||
| ast::Expr::Subscript(_)
|
||||
| ast::Expr::Starred(_)
|
||||
| ast::Expr::Call(_)
|
||||
| ast::Expr::Compare(_)
|
||||
| ast::Expr::Yield(_)
|
||||
| ast::Expr::YieldFrom(_)
|
||||
| ast::Expr::FString(_)
|
||||
| ast::Expr::Tuple(_)
|
||||
| ast::Expr::List(_)
|
||||
| ast::Expr::Slice(_)
|
||||
| ast::Expr::IpyEscapeCommand(_)
|
||||
| ast::Expr::Dict(_)
|
||||
| ast::Expr::Set(_)
|
||||
| ast::Expr::Await(_) => walk_expr(self, expr),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct WalrusFinder<'a, 'db> {
|
||||
export_finder: &'a mut ExportFinder<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Visitor<'db> for WalrusFinder<'_, 'db> {
|
||||
fn visit_expr(&mut self, expr: &'db ast::Expr) {
|
||||
match expr {
|
||||
// It's important for us to short-circuit here for lambdas specifically,
|
||||
// as walruses cannot leak out of the body of a lambda function.
|
||||
ast::Expr::Lambda(_)
|
||||
| ast::Expr::BooleanLiteral(_)
|
||||
| ast::Expr::NoneLiteral(_)
|
||||
| ast::Expr::NumberLiteral(_)
|
||||
| ast::Expr::BytesLiteral(_)
|
||||
| ast::Expr::EllipsisLiteral(_)
|
||||
| ast::Expr::StringLiteral(_)
|
||||
| ast::Expr::Name(_) => {}
|
||||
|
||||
ast::Expr::Named(ast::ExprNamed {
|
||||
target,
|
||||
value: _,
|
||||
range: _,
|
||||
}) => {
|
||||
if let ast::Expr::Name(ast::ExprName {
|
||||
id,
|
||||
ctx: ast::ExprContext::Store,
|
||||
range: _,
|
||||
}) = &**target
|
||||
{
|
||||
self.export_finder.possibly_add_export(id);
|
||||
}
|
||||
}
|
||||
|
||||
// We must recurse inside nested comprehensions,
|
||||
// as even a walrus inside a comprehension inside a comprehension in the global scope
|
||||
// will leak out into the global scope
|
||||
ast::Expr::DictComp(_)
|
||||
| ast::Expr::SetComp(_)
|
||||
| ast::Expr::ListComp(_)
|
||||
| ast::Expr::Generator(_)
|
||||
| ast::Expr::BoolOp(_)
|
||||
| ast::Expr::BinOp(_)
|
||||
| ast::Expr::UnaryOp(_)
|
||||
| ast::Expr::If(_)
|
||||
| ast::Expr::Attribute(_)
|
||||
| ast::Expr::Subscript(_)
|
||||
| ast::Expr::Starred(_)
|
||||
| ast::Expr::Call(_)
|
||||
| ast::Expr::Compare(_)
|
||||
| ast::Expr::Yield(_)
|
||||
| ast::Expr::YieldFrom(_)
|
||||
| ast::Expr::FString(_)
|
||||
| ast::Expr::Tuple(_)
|
||||
| ast::Expr::List(_)
|
||||
| ast::Expr::Slice(_)
|
||||
| ast::Expr::IpyEscapeCommand(_)
|
||||
| ast::Expr::Dict(_)
|
||||
| ast::Expr::Set(_)
|
||||
| ast::Expr::Await(_) => walk_expr(self, expr),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -149,7 +149,7 @@ macro_rules! impl_binding_has_ty {
|
|||
#[inline]
|
||||
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let binding = index.definition(self);
|
||||
let binding = index.expect_single_definition(self);
|
||||
binding_type(model.db, binding)
|
||||
}
|
||||
}
|
||||
|
@ -158,10 +158,19 @@ macro_rules! impl_binding_has_ty {
|
|||
|
||||
impl_binding_has_ty!(ast::StmtFunctionDef);
|
||||
impl_binding_has_ty!(ast::StmtClassDef);
|
||||
impl_binding_has_ty!(ast::Alias);
|
||||
impl_binding_has_ty!(ast::Parameter);
|
||||
impl_binding_has_ty!(ast::ParameterWithDefault);
|
||||
|
||||
impl HasType for ast::Alias {
|
||||
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
if &self.name == "*" {
|
||||
return Type::Never;
|
||||
}
|
||||
let index = semantic_index(model.db, model.file);
|
||||
binding_type(model.db, index.expect_single_definition(self))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
|
|
|
@ -887,6 +887,19 @@ impl<'db> Type<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: ditto for avoiding false positives when checking function calls with `Sized` parameters.
|
||||
(lhs, Type::Instance(InstanceType { class }))
|
||||
if class.is_known(db, KnownClass::Sized) =>
|
||||
{
|
||||
matches!(
|
||||
lhs.to_meta_type(db).member(db, "__len__"),
|
||||
SymbolAndQualifiers {
|
||||
symbol: Symbol::Type(..),
|
||||
..
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
(
|
||||
Type::Callable(CallableType::General(self_callable)),
|
||||
Type::Callable(CallableType::General(target_callable)),
|
||||
|
@ -4161,7 +4174,8 @@ impl<'db> FunctionType<'db> {
|
|||
fn internal_signature(self, db: &'db dyn Db) -> Signature<'db> {
|
||||
let scope = self.body_scope(db);
|
||||
let function_stmt_node = scope.node(db).expect_function();
|
||||
let definition = semantic_index(db, scope.file(db)).definition(function_stmt_node);
|
||||
let definition =
|
||||
semantic_index(db, scope.file(db)).expect_single_definition(function_stmt_node);
|
||||
Signature::from_function(db, definition, function_stmt_node)
|
||||
}
|
||||
|
||||
|
@ -4921,10 +4935,9 @@ impl<'db> TypeAliasType<'db> {
|
|||
#[salsa::tracked]
|
||||
pub fn value_type(self, db: &'db dyn Db) -> Type<'db> {
|
||||
let scope = self.rhs_scope(db);
|
||||
|
||||
let type_alias_stmt_node = scope.node(db).expect_type_alias();
|
||||
let definition = semantic_index(db, scope.file(db)).definition(type_alias_stmt_node);
|
||||
|
||||
let definition =
|
||||
semantic_index(db, scope.file(db)).expect_single_definition(type_alias_stmt_node);
|
||||
definition_expression_type(db, definition, &type_alias_stmt_node.value)
|
||||
}
|
||||
}
|
||||
|
@ -5688,8 +5701,8 @@ pub(crate) mod tests {
|
|||
|
||||
let function_node = function_body_scope.node(&db).expect_function();
|
||||
|
||||
let function_definition =
|
||||
semantic_index(&db, function_body_scope.file(&db)).definition(function_node);
|
||||
let function_definition = semantic_index(&db, function_body_scope.file(&db))
|
||||
.expect_single_definition(function_node);
|
||||
|
||||
assert_eq!(
|
||||
KnownFunction::try_from_definition_and_name(&db, function_definition, function_name),
|
||||
|
|
|
@ -128,9 +128,10 @@ impl<'db> Class<'db> {
|
|||
#[salsa::tracked(return_ref, cycle_fn=explicit_bases_cycle_recover, cycle_initial=explicit_bases_cycle_initial)]
|
||||
fn explicit_bases_query(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
|
||||
tracing::trace!("Class::explicit_bases_query: {}", self.name(db));
|
||||
let class_stmt = self.node(db);
|
||||
|
||||
let class_definition = semantic_index(db, self.file(db)).definition(class_stmt);
|
||||
let class_stmt = self.node(db);
|
||||
let class_definition =
|
||||
semantic_index(db, self.file(db)).expect_single_definition(class_stmt);
|
||||
|
||||
class_stmt
|
||||
.bases()
|
||||
|
@ -156,11 +157,15 @@ impl<'db> Class<'db> {
|
|||
#[salsa::tracked(return_ref)]
|
||||
fn decorators(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
|
||||
tracing::trace!("Class::decorators: {}", self.name(db));
|
||||
|
||||
let class_stmt = self.node(db);
|
||||
if class_stmt.decorator_list.is_empty() {
|
||||
return Box::new([]);
|
||||
}
|
||||
let class_definition = semantic_index(db, self.file(db)).definition(class_stmt);
|
||||
|
||||
let class_definition =
|
||||
semantic_index(db, self.file(db)).expect_single_definition(class_stmt);
|
||||
|
||||
class_stmt
|
||||
.decorator_list
|
||||
.iter()
|
||||
|
@ -224,9 +229,15 @@ impl<'db> Class<'db> {
|
|||
.as_ref()?
|
||||
.find_keyword("metaclass")?
|
||||
.value;
|
||||
let class_definition = semantic_index(db, self.file(db)).definition(class_stmt);
|
||||
let metaclass_ty = definition_expression_type(db, class_definition, metaclass_node);
|
||||
Some(metaclass_ty)
|
||||
|
||||
let class_definition =
|
||||
semantic_index(db, self.file(db)).expect_single_definition(class_stmt);
|
||||
|
||||
Some(definition_expression_type(
|
||||
db,
|
||||
class_definition,
|
||||
metaclass_node,
|
||||
))
|
||||
}
|
||||
|
||||
/// Return the metaclass of this class, or `type[Unknown]` if the metaclass cannot be inferred.
|
||||
|
@ -834,6 +845,7 @@ pub enum KnownClass {
|
|||
TypeAliasType,
|
||||
NoDefaultType,
|
||||
NewType,
|
||||
Sized,
|
||||
// TODO: This can probably be removed when we have support for protocols
|
||||
SupportsIndex,
|
||||
// Collections
|
||||
|
@ -911,6 +923,7 @@ impl<'db> KnownClass {
|
|||
| Self::DefaultDict
|
||||
| Self::Deque
|
||||
| Self::Float
|
||||
| Self::Sized
|
||||
| Self::Classmethod => Truthiness::Ambiguous,
|
||||
}
|
||||
}
|
||||
|
@ -955,6 +968,7 @@ impl<'db> KnownClass {
|
|||
Self::Counter => "Counter",
|
||||
Self::DefaultDict => "defaultdict",
|
||||
Self::Deque => "deque",
|
||||
Self::Sized => "Sized",
|
||||
Self::OrderedDict => "OrderedDict",
|
||||
// For example, `typing.List` is defined as `List = _Alias()` in typeshed
|
||||
Self::StdlibAlias => "_Alias",
|
||||
|
@ -1115,9 +1129,11 @@ impl<'db> KnownClass {
|
|||
| Self::MethodWrapperType
|
||||
| Self::WrapperDescriptorType => KnownModule::Types,
|
||||
Self::NoneType => KnownModule::Typeshed,
|
||||
Self::SpecialForm | Self::TypeVar | Self::StdlibAlias | Self::SupportsIndex => {
|
||||
KnownModule::Typing
|
||||
}
|
||||
Self::SpecialForm
|
||||
| Self::TypeVar
|
||||
| Self::StdlibAlias
|
||||
| Self::SupportsIndex
|
||||
| Self::Sized => KnownModule::Typing,
|
||||
Self::TypeAliasType | Self::TypeVarTuple | Self::ParamSpec | Self::NewType => {
|
||||
KnownModule::TypingExtensions
|
||||
}
|
||||
|
@ -1195,6 +1211,7 @@ impl<'db> KnownClass {
|
|||
| Self::TypeVar
|
||||
| Self::ParamSpec
|
||||
| Self::TypeVarTuple
|
||||
| Self::Sized
|
||||
| Self::NewType => false,
|
||||
}
|
||||
}
|
||||
|
@ -1247,6 +1264,7 @@ impl<'db> KnownClass {
|
|||
| Self::TypeVar
|
||||
| Self::ParamSpec
|
||||
| Self::TypeVarTuple
|
||||
| Self::Sized
|
||||
| Self::NewType => false,
|
||||
}
|
||||
}
|
||||
|
@ -1299,6 +1317,7 @@ impl<'db> KnownClass {
|
|||
"_SpecialForm" => Self::SpecialForm,
|
||||
"_NoDefaultType" => Self::NoDefaultType,
|
||||
"SupportsIndex" => Self::SupportsIndex,
|
||||
"Sized" => Self::Sized,
|
||||
"_version_info" => Self::VersionInfo,
|
||||
"ellipsis" if Program::get(db).python_version(db) <= PythonVersion::PY39 => {
|
||||
Self::EllipsisType
|
||||
|
@ -1358,6 +1377,7 @@ impl<'db> KnownClass {
|
|||
| Self::SupportsIndex
|
||||
| Self::ParamSpec
|
||||
| Self::TypeVarTuple
|
||||
| Self::Sized
|
||||
| Self::NewType => matches!(module, KnownModule::Typing | KnownModule::TypingExtensions),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use ruff_db::{
|
|||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use super::{binding_type, KnownFunction, TypeCheckDiagnostic, TypeCheckDiagnostics};
|
||||
use super::{binding_type, KnownFunction, Type, TypeCheckDiagnostic, TypeCheckDiagnostics};
|
||||
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
|
@ -177,9 +177,8 @@ impl<'db> InferContext<'db> {
|
|||
let mut function_scope_tys = index
|
||||
.ancestor_scopes(scope_id)
|
||||
.filter_map(|(_, scope)| scope.node().as_function())
|
||||
.filter_map(|function| {
|
||||
binding_type(self.db, index.definition(function)).into_function_literal()
|
||||
});
|
||||
.map(|node| binding_type(self.db, index.expect_single_definition(node)))
|
||||
.filter_map(Type::into_function_literal);
|
||||
|
||||
// Iterate over all functions and test if any is decorated with `@no_type_check`.
|
||||
function_scope_tys.any(|function_ty| {
|
||||
|
|
|
@ -33,8 +33,6 @@
|
|||
//! the query cycle until a fixed-point is reached. Salsa has a built-in fixed limit on the number
|
||||
//! of iterations, so if we fail to converge, Salsa will eventually panic. (This should of course
|
||||
//! be considered a bug.)
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use itertools::{Either, Itertools};
|
||||
use ruff_db::diagnostic::{DiagnosticId, Severity};
|
||||
use ruff_db::files::File;
|
||||
|
@ -45,8 +43,8 @@ use rustc_hash::{FxHashMap, FxHashSet};
|
|||
use salsa;
|
||||
use salsa::plumbing::AsId;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{file_to_module, resolve_module};
|
||||
use crate::module_name::{ModuleName, ModuleNameResolutionError};
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::ast_ids::{HasScopedExpressionId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionKind, Definition, DefinitionKind, DefinitionNodeKey,
|
||||
|
@ -889,6 +887,9 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
definition,
|
||||
);
|
||||
}
|
||||
DefinitionKind::StarImport(import) => {
|
||||
self.infer_import_from_definition(import.import(), import.alias(), definition);
|
||||
}
|
||||
DefinitionKind::Assignment(assignment) => {
|
||||
self.infer_assignment_definition(assignment, definition);
|
||||
}
|
||||
|
@ -1336,8 +1337,8 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
fn infer_definition(&mut self, node: impl Into<DefinitionNodeKey>) {
|
||||
let definition = self.index.definition(node);
|
||||
fn infer_definition(&mut self, node: impl Into<DefinitionNodeKey> + std::fmt::Debug + Copy) {
|
||||
let definition = self.index.expect_single_definition(node);
|
||||
let result = infer_definition_types(self.db(), definition);
|
||||
self.extend(result);
|
||||
}
|
||||
|
@ -3027,7 +3028,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
} = import;
|
||||
|
||||
for alias in names {
|
||||
self.infer_definition(alias);
|
||||
let definitions = self.index.definitions(alias);
|
||||
if definitions.is_empty() {
|
||||
// If the module couldn't be resolved while constructing the semantic index,
|
||||
// this node won't have any definitions associated with it -- but we need to
|
||||
// make sure that we still emit the diagnostic for the unresolvable module,
|
||||
// since this will cause the import to fail at runtime.
|
||||
self.resolve_import_from_module(import, alias);
|
||||
} else {
|
||||
for definition in definitions {
|
||||
self.extend(infer_definition_types(self.db(), *definition));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3079,52 +3091,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Given a `from .foo import bar` relative import, resolve the relative module
|
||||
/// we're importing `bar` from into an absolute [`ModuleName`]
|
||||
/// using the name of the module we're currently analyzing.
|
||||
///
|
||||
/// - `level` is the number of dots at the beginning of the relative module name:
|
||||
/// - `from .foo.bar import baz` => `level == 1`
|
||||
/// - `from ...foo.bar import baz` => `level == 3`
|
||||
/// - `tail` is the relative module name stripped of all leading dots:
|
||||
/// - `from .foo import bar` => `tail == "foo"`
|
||||
/// - `from ..foo.bar import baz` => `tail == "foo.bar"`
|
||||
fn relative_module_name(
|
||||
&self,
|
||||
tail: Option<&str>,
|
||||
level: NonZeroU32,
|
||||
) -> Result<ModuleName, ModuleNameResolutionError> {
|
||||
let module = file_to_module(self.db(), self.file())
|
||||
.ok_or(ModuleNameResolutionError::UnknownCurrentModule)?;
|
||||
let mut level = level.get();
|
||||
|
||||
if module.kind().is_package() {
|
||||
level = level.saturating_sub(1);
|
||||
}
|
||||
|
||||
let mut module_name = module
|
||||
.name()
|
||||
.ancestors()
|
||||
.nth(level as usize)
|
||||
.ok_or(ModuleNameResolutionError::TooManyDots)?;
|
||||
|
||||
if let Some(tail) = tail {
|
||||
let tail = ModuleName::new(tail).ok_or(ModuleNameResolutionError::InvalidSyntax)?;
|
||||
module_name.extend(&tail);
|
||||
}
|
||||
|
||||
Ok(module_name)
|
||||
}
|
||||
|
||||
fn infer_import_from_definition(
|
||||
/// Resolve the [`ModuleName`], and the type of the module, being referred to by an
|
||||
/// [`ast::StmtImportFrom`] node. Emit a diagnostic if the module cannot be resolved.
|
||||
fn resolve_import_from_module(
|
||||
&mut self,
|
||||
import_from: &'db ast::StmtImportFrom,
|
||||
import_from: &ast::StmtImportFrom,
|
||||
alias: &ast::Alias,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// TODO:
|
||||
// - Absolute `*` imports (`from collections import *`)
|
||||
// - Relative `*` imports (`from ...foo import *`)
|
||||
) -> Option<(ModuleName, Type<'db>)> {
|
||||
let ast::StmtImportFrom { module, level, .. } = import_from;
|
||||
// For diagnostics, we want to highlight the unresolvable
|
||||
// module and not the entire `from ... import ...` statement.
|
||||
|
@ -3134,32 +3107,20 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
.unwrap_or_else(|| AnyNodeRef::from(import_from));
|
||||
let module = module.as_deref();
|
||||
|
||||
let module_name = if let Some(level) = NonZeroU32::new(*level) {
|
||||
tracing::trace!(
|
||||
"Resolving imported object `{}` from module `{}` relative to file `{}`",
|
||||
alias.name,
|
||||
format_import_from_module(level.get(), module),
|
||||
self.file().path(self.db()),
|
||||
);
|
||||
self.relative_module_name(module, level)
|
||||
} else {
|
||||
tracing::trace!(
|
||||
"Resolving imported object `{}` from module `{}`",
|
||||
alias.name,
|
||||
format_import_from_module(*level, module),
|
||||
);
|
||||
module
|
||||
.and_then(ModuleName::new)
|
||||
.ok_or(ModuleNameResolutionError::InvalidSyntax)
|
||||
};
|
||||
tracing::trace!(
|
||||
"Resolving imported object `{}` from module `{}` into file `{}`",
|
||||
alias.name,
|
||||
format_import_from_module(*level, module),
|
||||
self.file().path(self.db()),
|
||||
);
|
||||
let module_name = ModuleName::from_import_statement(self.db(), self.file(), import_from);
|
||||
|
||||
let module_name = match module_name {
|
||||
Ok(module_name) => module_name,
|
||||
Err(ModuleNameResolutionError::InvalidSyntax) => {
|
||||
tracing::debug!("Failed to resolve import due to invalid syntax");
|
||||
// Invalid syntax diagnostics are emitted elsewhere.
|
||||
self.add_unknown_declaration_with_binding(alias.into(), definition);
|
||||
return;
|
||||
return None;
|
||||
}
|
||||
Err(ModuleNameResolutionError::TooManyDots) => {
|
||||
tracing::debug!(
|
||||
|
@ -3167,8 +3128,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
format_import_from_module(*level, module),
|
||||
);
|
||||
report_unresolved_module(&self.context, module_ref, *level, module);
|
||||
self.add_unknown_declaration_with_binding(alias.into(), definition);
|
||||
return;
|
||||
return None;
|
||||
}
|
||||
Err(ModuleNameResolutionError::UnknownCurrentModule) => {
|
||||
tracing::debug!(
|
||||
|
@ -3177,35 +3137,51 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
self.file().path(self.db())
|
||||
);
|
||||
report_unresolved_module(&self.context, module_ref, *level, module);
|
||||
self.add_unknown_declaration_with_binding(alias.into(), definition);
|
||||
return;
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let Some(module_ty) = self.module_type_from_name(&module_name) else {
|
||||
report_unresolved_module(&self.context, module_ref, *level, module);
|
||||
return None;
|
||||
};
|
||||
|
||||
Some((module_name, module_ty))
|
||||
}
|
||||
|
||||
fn infer_import_from_definition(
|
||||
&mut self,
|
||||
import_from: &'db ast::StmtImportFrom,
|
||||
alias: &ast::Alias,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
let Some((module_name, module_ty)) = self.resolve_import_from_module(import_from, alias)
|
||||
else {
|
||||
self.add_unknown_declaration_with_binding(alias.into(), definition);
|
||||
return;
|
||||
};
|
||||
|
||||
let ast::Alias {
|
||||
range: _,
|
||||
name,
|
||||
asname: _,
|
||||
} = alias;
|
||||
// The indirection of having `star_import_info` as a separate variable
|
||||
// is required in order to make the borrow checker happy.
|
||||
let star_import_info = definition
|
||||
.kind(self.db())
|
||||
.as_star_import()
|
||||
.map(|star_import| {
|
||||
let symbol_table = self
|
||||
.index
|
||||
.symbol_table(self.scope().file_scope_id(self.db()));
|
||||
(star_import, symbol_table)
|
||||
});
|
||||
|
||||
if name == "*" {
|
||||
self.add_declaration_with_binding(
|
||||
alias.into(),
|
||||
definition,
|
||||
&DeclaredAndInferredType::AreTheSame(Type::Never),
|
||||
);
|
||||
return;
|
||||
}
|
||||
let name = if let Some((star_import, symbol_table)) = star_import_info.as_ref() {
|
||||
symbol_table.symbol(star_import.symbol_id()).name()
|
||||
} else {
|
||||
&alias.name.id
|
||||
};
|
||||
|
||||
// First try loading the requested attribute from the module.
|
||||
if let Symbol::Type(ty, boundness) = module_ty.member(self.db(), &name.id).symbol {
|
||||
if boundness == Boundness::PossiblyUnbound {
|
||||
if let Symbol::Type(ty, boundness) = module_ty.member(self.db(), name).symbol {
|
||||
if &alias.name != "*" && boundness == Boundness::PossiblyUnbound {
|
||||
// TODO: Consider loading _both_ the attribute and any submodule and unioning them
|
||||
// together if the attribute exists but is possibly-unbound.
|
||||
self.context.report_lint(
|
||||
|
@ -3250,11 +3226,14 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
self.context.report_lint(
|
||||
&UNRESOLVED_IMPORT,
|
||||
AnyNodeRef::Alias(alias),
|
||||
format_args!("Module `{module_name}` has no member `{name}`",),
|
||||
);
|
||||
if &alias.name != "*" {
|
||||
self.context.report_lint(
|
||||
&UNRESOLVED_IMPORT,
|
||||
AnyNodeRef::Alias(alias),
|
||||
format_args!("Module `{module_name}` has no member `{name}`",),
|
||||
);
|
||||
}
|
||||
|
||||
self.add_unknown_declaration_with_binding(alias.into(), definition);
|
||||
}
|
||||
|
||||
|
@ -3775,7 +3754,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
fn infer_named_expression(&mut self, named: &ast::ExprNamed) -> Type<'db> {
|
||||
// See https://peps.python.org/pep-0572/#differences-between-assignment-expressions-and-assignment-statements
|
||||
if named.target.is_name_expr() {
|
||||
let definition = self.index.definition(named);
|
||||
let definition = self.index.expect_single_definition(named);
|
||||
let result = infer_definition_types(self.db(), definition);
|
||||
self.extend(result);
|
||||
result.binding_type(definition)
|
||||
|
@ -7202,23 +7181,6 @@ fn format_import_from_module(level: u32, module: Option<&str>) -> String {
|
|||
)
|
||||
}
|
||||
|
||||
/// Various ways in which resolving a [`ModuleName`]
|
||||
/// from an [`ast::StmtImport`] or [`ast::StmtImportFrom`] node might fail
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
enum ModuleNameResolutionError {
|
||||
/// The import statement has invalid syntax
|
||||
InvalidSyntax,
|
||||
|
||||
/// We couldn't resolve the file we're currently analyzing back to a module
|
||||
/// (Only necessary for relative import statements)
|
||||
UnknownCurrentModule,
|
||||
|
||||
/// The relative import statement seems to take us outside of the module search path
|
||||
/// (e.g. our current module is `foo.bar`, and the relative import statement in `foo.bar`
|
||||
/// is `from ....baz import spam`)
|
||||
TooManyDots,
|
||||
}
|
||||
|
||||
/// Struct collecting string parts when inferring a formatted string. Infers a string literal if the
|
||||
/// concatenated string is small enough, otherwise infers a literal string.
|
||||
///
|
||||
|
|
|
@ -60,23 +60,13 @@ type KeyDiagnosticFields = (
|
|||
Severity,
|
||||
);
|
||||
|
||||
static EXPECTED_DIAGNOSTICS: &[KeyDiagnosticFields] = &[
|
||||
// We don't support `*` imports yet:
|
||||
(
|
||||
DiagnosticId::lint("unresolved-import"),
|
||||
Some("/src/tomllib/_parser.py"),
|
||||
Some(192..200),
|
||||
Cow::Borrowed("Module `collections.abc` has no member `Iterable`"),
|
||||
Severity::Error,
|
||||
),
|
||||
(
|
||||
DiagnosticId::lint("unused-ignore-comment"),
|
||||
Some("/src/tomllib/_parser.py"),
|
||||
Some(22299..22333),
|
||||
Cow::Borrowed("Unused blanket `type: ignore` directive"),
|
||||
Severity::Warning,
|
||||
),
|
||||
];
|
||||
static EXPECTED_DIAGNOSTICS: &[KeyDiagnosticFields] = &[(
|
||||
DiagnosticId::lint("unused-ignore-comment"),
|
||||
Some("/src/tomllib/_parser.py"),
|
||||
Some(22299..22333),
|
||||
Cow::Borrowed("Unused blanket `type: ignore` directive"),
|
||||
Severity::Warning,
|
||||
)];
|
||||
|
||||
fn tomllib_path(file: &TestFile) -> SystemPathBuf {
|
||||
SystemPathBuf::from("src").join(file.name())
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue