[ty] AST garbage collection (#18482)

## Summary

Garbage collect ASTs once we are done checking a given file. Queries
with a cross-file dependency on the AST will reparse the file on demand.
This reduces ty's peak memory usage by ~20-30%.

The primary change of this PR is adding a `node_index` field to every
AST node, that is assigned by the parser. `ParsedModule` can use this to
create a flat index of AST nodes any time the file is parsed (or
reparsed). This allows `AstNodeRef` to simply index into the current
instance of the `ParsedModule`, instead of storing a pointer directly.

The indices are somewhat hackily (using an atomic integer) assigned by
the `parsed_module` query instead of by the parser directly. Assigning
the indices in source-order in the (recursive) parser turns out to be
difficult, and collecting the nodes during semantic indexing is
impossible as `SemanticIndex` does not hold onto a specific
`ParsedModuleRef`, which the pointers in the flat AST are tied to. This
means that we have to do an extra AST traversal to assign and collect
the nodes into a flat index, but the small performance impact (~3% on
cold runs) seems worth it for the memory savings.

Part of https://github.com/astral-sh/ty/issues/214.
This commit is contained in:
Ibraheem Ahmed 2025-06-13 08:40:11 -04:00 committed by GitHub
parent 76d9009a6e
commit c9dff5c7d5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
824 changed files with 25243 additions and 804 deletions

View file

@ -1,17 +1,17 @@
use std::sync::Arc;
use std::fmt::Debug;
use std::marker::PhantomData;
use ruff_db::parsed::ParsedModuleRef;
use ruff_python_ast::{AnyNodeRef, NodeIndex};
use ruff_python_ast::{AnyRootNodeRef, HasNodeIndex};
use ruff_text_size::Ranged;
/// Ref-counted owned reference to an AST node.
/// Reference to an AST node.
///
/// The type holds an owned reference to the node's ref-counted [`ParsedModuleRef`].
/// Holding on to the node's [`ParsedModuleRef`] guarantees that the reference to the
/// node must still be valid.
///
/// Holding on to any [`AstNodeRef`] prevents the [`ParsedModuleRef`] from being released.
///
/// ## Equality
/// Two `AstNodeRef` are considered equal if their pointer addresses are equal.
/// This type acts as a reference to an AST node within a given module that remains
/// stable regardless of whether the AST is garbage collected. As such, accessing a
/// node through the [`AstNodeRef`] requires a reference to the current [`ParsedModuleRef`]
/// for the module containing the node.
///
/// ## Usage in salsa tracked structs
/// It's important that [`AstNodeRef`] fields in salsa tracked structs are tracked fields
@ -32,54 +32,83 @@ use ruff_db::parsed::ParsedModuleRef;
/// run on every AST change. All other queries only run when the expression's identity changes.
#[derive(Clone)]
pub struct AstNodeRef<T> {
/// Owned reference to the node's [`ParsedModuleRef`].
///
/// The node's reference is guaranteed to remain valid as long as it's enclosing
/// [`ParsedModuleRef`] is alive.
parsed: ParsedModuleRef,
/// A pointer to the [`ruff_db::parsed::ParsedModule`] that this node was created from.
module_ptr: *const (),
/// Pointer to the referenced node.
node: std::ptr::NonNull<T>,
/// Debug information.
#[cfg(debug_assertions)]
kind: ruff_python_ast::NodeKind,
#[cfg(debug_assertions)]
range: ruff_text_size::TextRange,
/// The index of the node in the AST.
index: NodeIndex,
_node: PhantomData<T>,
}
#[expect(unsafe_code)]
impl<T> AstNodeRef<T> {
/// Creates a new `AstNodeRef` that references `node`. The `parsed` is the [`ParsedModuleRef`] to
/// which the `AstNodeRef` belongs.
impl<T> AstNodeRef<T>
where
T: HasNodeIndex + Ranged + PartialEq + Debug,
for<'ast> AnyNodeRef<'ast>: From<&'ast T>,
for<'ast> &'ast T: TryFrom<AnyRootNodeRef<'ast>>,
{
/// Creates a new `AstNodeRef` that references `node`.
///
/// ## Safety
///
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
/// [`ParsedModuleRef`] to which `node` belongs. It's the caller's responsibility to ensure that
/// the invariant `node belongs to parsed` is upheld.
pub(super) unsafe fn new(parsed: ParsedModuleRef, node: &T) -> Self {
/// This method may panic or produce unspecified results if the provided module is from a
/// different file or Salsa revision than the module to which the node belongs.
pub(super) fn new(module_ref: &ParsedModuleRef, node: &T) -> Self {
let index = node.node_index().load();
debug_assert_eq!(module_ref.get_by_index(index).try_into().ok(), Some(node));
Self {
parsed,
node: std::ptr::NonNull::from(node),
index,
module_ptr: module_ref.module().as_ptr(),
#[cfg(debug_assertions)]
kind: AnyNodeRef::from(node).kind(),
#[cfg(debug_assertions)]
range: node.range(),
_node: PhantomData,
}
}
/// Returns a reference to the wrapped node.
///
/// Note that this method will panic if the provided module is from a different file or Salsa revision
/// than the module this node was created with.
pub fn node<'ast>(&self, parsed: &'ast ParsedModuleRef) -> &'ast T {
debug_assert!(Arc::ptr_eq(self.parsed.as_arc(), parsed.as_arc()));
/// This method may panic or produce unspecified results if the provided module is from a
/// different file or Salsa revision than the module to which the node belongs.
pub fn node<'ast>(&self, module_ref: &'ast ParsedModuleRef) -> &'ast T {
debug_assert_eq!(module_ref.module().as_ptr(), self.module_ptr);
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still
// alive and not moved.
unsafe { self.node.as_ref() }
// Note that the module pointer is guaranteed to be stable within the Salsa
// revision, so the file contents cannot have changed by the above assertion.
module_ref
.get_by_index(self.index)
.try_into()
.ok()
.expect("AST indices should never change within the same revision")
}
}
impl<T> std::fmt::Debug for AstNodeRef<T>
#[allow(clippy::missing_fields_in_debug)]
impl<T> Debug for AstNodeRef<T>
where
T: std::fmt::Debug,
T: Debug,
for<'ast> &'ast T: TryFrom<AnyRootNodeRef<'ast>>,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("AstNodeRef")
.field(self.node(&self.parsed))
.finish()
#[cfg(debug_assertions)]
{
f.debug_struct("AstNodeRef")
.field("kind", &self.kind)
.field("range", &self.range)
.finish()
}
#[cfg(not(debug_assertions))]
{
// Unfortunately we have no access to the AST here.
f.debug_tuple("AstNodeRef").finish_non_exhaustive()
}
}
}
@ -88,9 +117,10 @@ unsafe impl<T> salsa::Update for AstNodeRef<T> {
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
let old_ref = unsafe { &mut (*old_pointer) };
if Arc::ptr_eq(old_ref.parsed.as_arc(), new_value.parsed.as_arc())
&& old_ref.node.eq(&new_value.node)
{
// Two nodes are guaranteed to be equal as long as they refer to the same node index
// within the same module. Note that the module pointer is guaranteed to be stable
// within the Salsa revision, so the file contents cannot have changed.
if old_ref.module_ptr == new_value.module_ptr && old_ref.index == new_value.index {
false
} else {
*old_ref = new_value;
@ -99,6 +129,7 @@ unsafe impl<T> salsa::Update for AstNodeRef<T> {
}
}
// SAFETY: The `module_ptr` is only used for pointer equality and never accessed directly.
#[expect(unsafe_code)]
unsafe impl<T> Send for AstNodeRef<T> where T: Send {}
#[expect(unsafe_code)]

View file

@ -222,6 +222,7 @@ impl ModuleName {
level,
names: _,
range: _,
node_index: _,
} = node;
let module = module.as_deref();

View file

@ -1,21 +1,14 @@
use ruff_python_ast::AnyNodeRef;
use ruff_python_ast::{HasNodeIndex, NodeIndex};
/// Compact key for a node for use in a hash map.
///
/// Stores the memory address of the node, because using the range and the kind
/// of the node is not enough to uniquely identify them in ASTs resulting from
/// invalid syntax. For example, parsing the input `for` results in a `StmtFor`
/// AST node where both the `target` and the `iter` field are `ExprName` nodes
/// with the same (empty) range `3..3`.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub(super) struct NodeKey(usize);
pub(super) struct NodeKey(NodeIndex);
impl NodeKey {
pub(super) fn from_node<'a, N>(node: N) -> Self
pub(super) fn from_node<N>(node: N) -> Self
where
N: Into<AnyNodeRef<'a>>,
N: HasNodeIndex,
{
let node = node.into();
NodeKey(node.as_ptr().as_ptr() as usize)
NodeKey(node.node_index().load())
}
}

View file

@ -241,9 +241,8 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
) {
let children_start = self.scopes.next_index() + 1;
// SAFETY: `node` is guaranteed to be a child of `self.module`
#[expect(unsafe_code)]
let node_with_kind = unsafe { node.to_kind(self.module.clone()) };
// Note `node` is guaranteed to be a child of `self.module`
let node_with_kind = node.to_kind(self.module);
let scope = Scope::new(
parent,
@ -473,9 +472,8 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
) -> (Definition<'db>, usize) {
let definition_node: DefinitionNodeRef<'ast, 'db> = definition_node.into();
#[expect(unsafe_code)]
// SAFETY: `definition_node` is guaranteed to be a child of `self.module`
let kind = unsafe { definition_node.into_owned(self.module.clone()) };
// Note `definition_node` is guaranteed to be a child of `self.module`
let kind = definition_node.into_owned(self.module);
let category = kind.category(self.source_type.is_stub(), self.module);
let is_reexported = kind.is_reexported();
@ -782,13 +780,8 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
self.db,
self.file,
self.current_scope(),
#[expect(unsafe_code)]
unsafe {
AstNodeRef::new(self.module.clone(), expression_node)
},
#[expect(unsafe_code)]
assigned_to
.map(|assigned_to| unsafe { AstNodeRef::new(self.module.clone(), assigned_to) }),
AstNodeRef::new(self.module, expression_node),
assigned_to.map(|assigned_to| AstNodeRef::new(self.module, assigned_to)),
expression_kind,
countme::Count::default(),
);
@ -810,6 +803,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
let (name, bound, default) = match type_param {
ast::TypeParam::TypeVar(ast::TypeParamTypeVar {
range: _,
node_index: _,
name,
bound,
default,
@ -989,11 +983,8 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
self.file,
value_file_scope,
self.current_scope(),
// SAFETY: `target` belongs to the `self.module` tree
#[expect(unsafe_code)]
unsafe {
AstNodeRef::new(self.module.clone(), target)
},
// Note `target` belongs to the `self.module` tree
AstNodeRef::new(self.module, target),
UnpackValue::new(unpackable.kind(), value),
countme::Count::default(),
));
@ -1103,6 +1094,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
body,
is_async: _,
range: _,
node_index: _,
} = function_def;
for decorator in decorator_list {
self.visit_decorator(decorator);
@ -1377,6 +1369,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
test,
msg,
range: _,
node_index: _,
}) => {
// We model an `assert test, msg` statement here. Conceptually, we can think of
// this as being equivalent to the following:
@ -1447,6 +1440,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
ast::Stmt::AugAssign(
aug_assign @ ast::StmtAugAssign {
range: _,
node_index: _,
target,
op,
value,
@ -1553,6 +1547,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
body,
orelse,
range: _,
node_index: _,
}) => {
self.visit_expr(test);
@ -1620,6 +1615,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
}) => {
for item @ ast::WithItem {
range: _,
node_index: _,
context_expr,
optional_vars,
} in items
@ -1643,6 +1639,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
ast::Stmt::For(
for_stmt @ ast::StmtFor {
range: _,
node_index: _,
is_async: _,
target,
iter,
@ -1680,6 +1677,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
subject,
cases,
range: _,
node_index: _,
}) => {
debug_assert_eq!(self.current_match_case, None);
@ -1767,6 +1765,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
finalbody,
is_star,
range: _,
node_index: _,
}) => {
self.record_ambiguous_visibility();
@ -1814,6 +1813,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
type_: handled_exceptions,
body: handler_body,
range: _,
node_index: _,
} = except_handler;
if let Some(handled_exceptions) = handled_exceptions {
@ -1892,7 +1892,11 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
// Everything in the current block after a terminal statement is unreachable.
self.mark_unreachable();
}
ast::Stmt::Global(ast::StmtGlobal { range: _, names }) => {
ast::Stmt::Global(ast::StmtGlobal {
range: _,
node_index: _,
names,
}) => {
for name in names {
let symbol_id = self.add_symbol(name.id.clone());
let symbol_table = self.current_place_table();
@ -1915,7 +1919,11 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
}
walk_stmt(self, stmt);
}
ast::Stmt::Delete(ast::StmtDelete { targets, range: _ }) => {
ast::Stmt::Delete(ast::StmtDelete {
targets,
range: _,
node_index: _,
}) => {
// We will check the target expressions and then delete them.
walk_stmt(self, stmt);
for target in targets {
@ -1926,7 +1934,11 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
}
}
}
ast::Stmt::Expr(ast::StmtExpr { value, range: _ }) if self.in_module_scope() => {
ast::Stmt::Expr(ast::StmtExpr {
value,
range: _,
node_index: _,
}) if self.in_module_scope() => {
if let Some(expr) = dunder_all_extend_argument(value) {
self.add_standalone_expression(expr);
}
@ -2186,6 +2198,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
ast::Expr::BoolOp(ast::ExprBoolOp {
values,
range: _,
node_index: _,
op,
}) => {
let pre_op = self.flow_snapshot();
@ -2273,6 +2286,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
if let ast::Pattern::MatchStar(ast::PatternMatchStar {
name: Some(name),
range: _,
node_index: _,
}) = pattern
{
let symbol = self.add_symbol(name.id().clone());
@ -2556,6 +2570,7 @@ fn dunder_all_extend_argument(value: &ast::Expr) -> Option<&ast::Expr> {
args,
keywords,
range: _,
node_index: _,
},
..
} = value.as_call_expr()?;

View file

@ -333,15 +333,14 @@ pub(crate) struct MatchPatternDefinitionNodeRef<'ast> {
}
impl<'db> DefinitionNodeRef<'_, 'db> {
#[expect(unsafe_code)]
pub(super) unsafe fn into_owned(self, parsed: ParsedModuleRef) -> DefinitionKind<'db> {
pub(super) fn into_owned(self, parsed: &ParsedModuleRef) -> DefinitionKind<'db> {
match self {
DefinitionNodeRef::Import(ImportDefinitionNodeRef {
node,
alias_index,
is_reexported,
}) => DefinitionKind::Import(ImportDefinitionKind {
node: unsafe { AstNodeRef::new(parsed, node) },
node: AstNodeRef::new(parsed, node),
alias_index,
is_reexported,
}),
@ -351,28 +350,28 @@ impl<'db> DefinitionNodeRef<'_, 'db> {
alias_index,
is_reexported,
}) => DefinitionKind::ImportFrom(ImportFromDefinitionKind {
node: unsafe { AstNodeRef::new(parsed, node) },
node: AstNodeRef::new(parsed, node),
alias_index,
is_reexported,
}),
DefinitionNodeRef::ImportStar(star_import) => {
let StarImportDefinitionNodeRef { node, place_id } = star_import;
DefinitionKind::StarImport(StarImportDefinitionKind {
node: unsafe { AstNodeRef::new(parsed, node) },
node: AstNodeRef::new(parsed, node),
place_id,
})
}
DefinitionNodeRef::Function(function) => {
DefinitionKind::Function(unsafe { AstNodeRef::new(parsed, function) })
DefinitionKind::Function(AstNodeRef::new(parsed, function))
}
DefinitionNodeRef::Class(class) => {
DefinitionKind::Class(unsafe { AstNodeRef::new(parsed, class) })
DefinitionKind::Class(AstNodeRef::new(parsed, class))
}
DefinitionNodeRef::TypeAlias(type_alias) => {
DefinitionKind::TypeAlias(unsafe { AstNodeRef::new(parsed, type_alias) })
DefinitionKind::TypeAlias(AstNodeRef::new(parsed, type_alias))
}
DefinitionNodeRef::NamedExpression(named) => {
DefinitionKind::NamedExpression(unsafe { AstNodeRef::new(parsed, named) })
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
}
DefinitionNodeRef::Assignment(AssignmentDefinitionNodeRef {
unpack,
@ -380,8 +379,8 @@ impl<'db> DefinitionNodeRef<'_, 'db> {
target,
}) => DefinitionKind::Assignment(AssignmentDefinitionKind {
target_kind: TargetKind::from(unpack),
value: unsafe { AstNodeRef::new(parsed.clone(), value) },
target: unsafe { AstNodeRef::new(parsed, target) },
value: AstNodeRef::new(parsed, value),
target: AstNodeRef::new(parsed, target),
}),
DefinitionNodeRef::AnnotatedAssignment(AnnotatedAssignmentDefinitionNodeRef {
node: _,
@ -389,14 +388,12 @@ impl<'db> DefinitionNodeRef<'_, 'db> {
value,
target,
}) => DefinitionKind::AnnotatedAssignment(AnnotatedAssignmentDefinitionKind {
target: unsafe { AstNodeRef::new(parsed.clone(), target) },
annotation: unsafe { AstNodeRef::new(parsed.clone(), annotation) },
value: value.map(|v| unsafe { AstNodeRef::new(parsed, v) }),
target: AstNodeRef::new(parsed, target),
annotation: AstNodeRef::new(parsed, annotation),
value: value.map(|v| AstNodeRef::new(parsed, v)),
}),
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
DefinitionKind::AugmentedAssignment(unsafe {
AstNodeRef::new(parsed, augmented_assignment)
})
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
}
DefinitionNodeRef::For(ForStmtDefinitionNodeRef {
unpack,
@ -405,8 +402,8 @@ impl<'db> DefinitionNodeRef<'_, 'db> {
is_async,
}) => DefinitionKind::For(ForStmtDefinitionKind {
target_kind: TargetKind::from(unpack),
iterable: unsafe { AstNodeRef::new(parsed.clone(), iterable) },
target: unsafe { AstNodeRef::new(parsed, target) },
iterable: AstNodeRef::new(parsed, iterable),
target: AstNodeRef::new(parsed, target),
is_async,
}),
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef {
@ -417,23 +414,19 @@ impl<'db> DefinitionNodeRef<'_, 'db> {
is_async,
}) => DefinitionKind::Comprehension(ComprehensionDefinitionKind {
target_kind: TargetKind::from(unpack),
iterable: unsafe { AstNodeRef::new(parsed.clone(), iterable) },
target: unsafe { AstNodeRef::new(parsed, target) },
iterable: AstNodeRef::new(parsed, iterable),
target: AstNodeRef::new(parsed, target),
first,
is_async,
}),
DefinitionNodeRef::VariadicPositionalParameter(parameter) => {
DefinitionKind::VariadicPositionalParameter(unsafe {
AstNodeRef::new(parsed, parameter)
})
DefinitionKind::VariadicPositionalParameter(AstNodeRef::new(parsed, parameter))
}
DefinitionNodeRef::VariadicKeywordParameter(parameter) => {
DefinitionKind::VariadicKeywordParameter(unsafe {
AstNodeRef::new(parsed, parameter)
})
DefinitionKind::VariadicKeywordParameter(AstNodeRef::new(parsed, parameter))
}
DefinitionNodeRef::Parameter(parameter) => {
DefinitionKind::Parameter(unsafe { AstNodeRef::new(parsed, parameter) })
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
}
DefinitionNodeRef::WithItem(WithItemDefinitionNodeRef {
unpack,
@ -442,8 +435,8 @@ impl<'db> DefinitionNodeRef<'_, 'db> {
is_async,
}) => DefinitionKind::WithItem(WithItemDefinitionKind {
target_kind: TargetKind::from(unpack),
context_expr: unsafe { AstNodeRef::new(parsed.clone(), context_expr) },
target: unsafe { AstNodeRef::new(parsed, target) },
context_expr: AstNodeRef::new(parsed, context_expr),
target: AstNodeRef::new(parsed, target),
is_async,
}),
DefinitionNodeRef::MatchPattern(MatchPatternDefinitionNodeRef {
@ -451,25 +444,25 @@ impl<'db> DefinitionNodeRef<'_, 'db> {
identifier,
index,
}) => DefinitionKind::MatchPattern(MatchPatternDefinitionKind {
pattern: unsafe { AstNodeRef::new(parsed.clone(), pattern) },
identifier: unsafe { AstNodeRef::new(parsed, identifier) },
pattern: AstNodeRef::new(parsed, pattern),
identifier: AstNodeRef::new(parsed, identifier),
index,
}),
DefinitionNodeRef::ExceptHandler(ExceptHandlerDefinitionNodeRef {
handler,
is_star,
}) => DefinitionKind::ExceptHandler(ExceptHandlerDefinitionKind {
handler: unsafe { AstNodeRef::new(parsed, handler) },
handler: AstNodeRef::new(parsed, handler),
is_star,
}),
DefinitionNodeRef::TypeVar(node) => {
DefinitionKind::TypeVar(unsafe { AstNodeRef::new(parsed, node) })
DefinitionKind::TypeVar(AstNodeRef::new(parsed, node))
}
DefinitionNodeRef::ParamSpec(node) => {
DefinitionKind::ParamSpec(unsafe { AstNodeRef::new(parsed, node) })
DefinitionKind::ParamSpec(AstNodeRef::new(parsed, node))
}
DefinitionNodeRef::TypeVarTuple(node) => {
DefinitionKind::TypeVarTuple(unsafe { AstNodeRef::new(parsed, node) })
DefinitionKind::TypeVarTuple(AstNodeRef::new(parsed, node))
}
}
}

View file

@ -778,46 +778,42 @@ pub(crate) enum NodeWithScopeRef<'a> {
impl NodeWithScopeRef<'_> {
/// Converts the unowned reference to an owned [`NodeWithScopeKind`].
///
/// # Safety
/// The node wrapped by `self` must be a child of `module`.
#[expect(unsafe_code)]
pub(super) unsafe fn to_kind(self, module: ParsedModuleRef) -> NodeWithScopeKind {
unsafe {
match self {
NodeWithScopeRef::Module => NodeWithScopeKind::Module,
NodeWithScopeRef::Class(class) => {
NodeWithScopeKind::Class(AstNodeRef::new(module, class))
}
NodeWithScopeRef::Function(function) => {
NodeWithScopeKind::Function(AstNodeRef::new(module, function))
}
NodeWithScopeRef::TypeAlias(type_alias) => {
NodeWithScopeKind::TypeAlias(AstNodeRef::new(module, type_alias))
}
NodeWithScopeRef::TypeAliasTypeParameters(type_alias) => {
NodeWithScopeKind::TypeAliasTypeParameters(AstNodeRef::new(module, type_alias))
}
NodeWithScopeRef::Lambda(lambda) => {
NodeWithScopeKind::Lambda(AstNodeRef::new(module, lambda))
}
NodeWithScopeRef::FunctionTypeParameters(function) => {
NodeWithScopeKind::FunctionTypeParameters(AstNodeRef::new(module, function))
}
NodeWithScopeRef::ClassTypeParameters(class) => {
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
}
NodeWithScopeRef::ListComprehension(comprehension) => {
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
}
NodeWithScopeRef::SetComprehension(comprehension) => {
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
}
NodeWithScopeRef::DictComprehension(comprehension) => {
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
}
NodeWithScopeRef::GeneratorExpression(generator) => {
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
}
/// Note that node wrapped by `self` must be a child of `module`.
pub(super) fn to_kind(self, module: &ParsedModuleRef) -> NodeWithScopeKind {
match self {
NodeWithScopeRef::Module => NodeWithScopeKind::Module,
NodeWithScopeRef::Class(class) => {
NodeWithScopeKind::Class(AstNodeRef::new(module, class))
}
NodeWithScopeRef::Function(function) => {
NodeWithScopeKind::Function(AstNodeRef::new(module, function))
}
NodeWithScopeRef::TypeAlias(type_alias) => {
NodeWithScopeKind::TypeAlias(AstNodeRef::new(module, type_alias))
}
NodeWithScopeRef::TypeAliasTypeParameters(type_alias) => {
NodeWithScopeKind::TypeAliasTypeParameters(AstNodeRef::new(module, type_alias))
}
NodeWithScopeRef::Lambda(lambda) => {
NodeWithScopeKind::Lambda(AstNodeRef::new(module, lambda))
}
NodeWithScopeRef::FunctionTypeParameters(function) => {
NodeWithScopeKind::FunctionTypeParameters(AstNodeRef::new(module, function))
}
NodeWithScopeRef::ClassTypeParameters(class) => {
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
}
NodeWithScopeRef::ListComprehension(comprehension) => {
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
}
NodeWithScopeRef::SetComprehension(comprehension) => {
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
}
NodeWithScopeRef::DictComprehension(comprehension) => {
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
}
NodeWithScopeRef::GeneratorExpression(generator) => {
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
}
}
}

View file

@ -104,6 +104,7 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
name,
asname,
range: _,
node_index: _,
} = alias;
let name = &name.id;
@ -126,6 +127,7 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
pattern,
name,
range: _,
node_index: _,
}) => {
if let Some(pattern) = pattern {
self.visit_pattern(pattern);
@ -145,6 +147,7 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
rest,
keys: _,
range: _,
node_index: _,
}) => {
for pattern in patterns {
self.visit_pattern(pattern);
@ -153,7 +156,11 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
self.possibly_add_export(&rest.id, PossibleExportKind::Normal);
}
}
ast::Pattern::MatchStar(ast::PatternMatchStar { name, range: _ }) => {
ast::Pattern::MatchStar(ast::PatternMatchStar {
name,
range: _,
node_index: _,
}) => {
if let Some(name) = name {
self.possibly_add_export(&name.id, PossibleExportKind::Normal);
}
@ -176,6 +183,7 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
type_params: _, // We don't want to visit the type params of the class
body: _, // We don't want to visit the body of the class
range: _,
node_index: _,
}) => {
self.possibly_add_export(&name.id, PossibleExportKind::Normal);
for decorator in decorator_list {
@ -194,6 +202,7 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
type_params: _, // We don't want to visit the type params of the function
body: _, // We don't want to visit the body of the function
range: _,
node_index: _,
is_async: _,
}) => {
self.possibly_add_export(&name.id, PossibleExportKind::Normal);
@ -212,6 +221,7 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
annotation,
simple: _,
range: _,
node_index: _,
}) => {
if value.is_some() || self.visiting_stub_file {
self.visit_expr(target);
@ -227,6 +237,7 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
type_params: _,
value: _,
range: _,
node_index: _,
}) => {
self.visit_expr(name);
// Neither walrus expressions nor statements cannot appear in type aliases;
@ -286,7 +297,12 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
fn visit_expr(&mut self, expr: &'db ast::Expr) {
match expr {
ast::Expr::Name(ast::ExprName { id, ctx, range: _ }) => {
ast::Expr::Name(ast::ExprName {
id,
ctx,
range: _,
node_index: _,
}) => {
if ctx.is_store() {
self.possibly_add_export(id, PossibleExportKind::Normal);
}
@ -359,11 +375,13 @@ impl<'db> Visitor<'db> for WalrusFinder<'_, 'db> {
target,
value: _,
range: _,
node_index: _,
}) => {
if let ast::Expr::Name(ast::ExprName {
id,
ctx: ast::ExprContext::Store,
range: _,
node_index: _,
}) = &**target
{
self.export_finder

View file

@ -1984,7 +1984,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
match statement {
ast::Stmt::FunctionDef(function) => self.infer_function_definition_statement(function),
ast::Stmt::ClassDef(class) => self.infer_class_definition_statement(class),
ast::Stmt::Expr(ast::StmtExpr { range: _, value }) => {
ast::Stmt::Expr(ast::StmtExpr {
range: _,
node_index: _,
value,
}) => {
self.infer_expression(value);
}
ast::Stmt::If(if_statement) => self.infer_if_statement(if_statement),
@ -2033,6 +2037,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
) {
let ast::StmtFunctionDef {
range: _,
node_index: _,
is_async: _,
name,
type_params,
@ -2165,6 +2170,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_parameters(&mut self, parameters: &ast::Parameters) {
let ast::Parameters {
range: _,
node_index: _,
posonlyargs: _,
args: _,
vararg,
@ -2186,6 +2192,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_parameter_with_default(&mut self, parameter_with_default: &ast::ParameterWithDefault) {
let ast::ParameterWithDefault {
range: _,
node_index: _,
parameter,
default: _,
} = parameter_with_default;
@ -2199,6 +2206,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_parameter(&mut self, parameter: &ast::Parameter) {
let ast::Parameter {
range: _,
node_index: _,
name: _,
annotation,
} = parameter;
@ -2244,6 +2252,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
parameter,
default,
range: _,
node_index: _,
} = parameter_with_default;
let default_ty = default
.as_ref()
@ -2378,6 +2387,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
) {
let ast::StmtClassDef {
range: _,
node_index: _,
name,
type_params,
decorator_list,
@ -2509,6 +2519,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_if_statement(&mut self, if_statement: &ast::StmtIf) {
let ast::StmtIf {
range: _,
node_index: _,
test,
body,
elif_else_clauses,
@ -2525,6 +2536,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
for clause in elif_else_clauses {
let ast::ElifElseClause {
range: _,
node_index: _,
test,
body,
} = clause;
@ -2544,6 +2556,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_try_statement(&mut self, try_statement: &ast::StmtTry) {
let ast::StmtTry {
range: _,
node_index: _,
body,
handlers,
orelse,
@ -2560,6 +2573,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
name: symbol_name,
body,
range: _,
node_index: _,
} = handler;
// If `symbol_name` is `Some()` and `handled_exceptions` is `None`,
@ -2582,6 +2596,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_with_statement(&mut self, with_statement: &ast::StmtWith) {
let ast::StmtWith {
range: _,
node_index: _,
is_async,
items,
body,
@ -2781,6 +2796,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
) {
let ast::TypeParamTypeVar {
range: _,
node_index: _,
name,
bound,
default,
@ -2848,6 +2864,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
) {
let ast::TypeParamParamSpec {
range: _,
node_index: _,
name: _,
default,
} = node;
@ -2867,6 +2884,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
) {
let ast::TypeParamTypeVarTuple {
range: _,
node_index: _,
name: _,
default,
} = node;
@ -2882,6 +2900,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_match_statement(&mut self, match_statement: &ast::StmtMatch) {
let ast::StmtMatch {
range: _,
node_index: _,
subject,
cases,
} = match_statement;
@ -2891,6 +2910,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
for case in cases {
let ast::MatchCase {
range: _,
node_index: _,
body,
pattern,
guard,
@ -2958,6 +2978,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
ast::Pattern::MatchClass(match_class) => {
let ast::PatternMatchClass {
range: _,
node_index: _,
cls,
arguments,
} = match_class;
@ -2993,6 +3014,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
ast::Pattern::MatchMapping(match_mapping) => {
let ast::PatternMatchMapping {
range: _,
node_index: _,
keys,
patterns,
rest: _,
@ -3007,6 +3029,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
ast::Pattern::MatchClass(match_class) => {
let ast::PatternMatchClass {
range: _,
node_index: _,
cls,
arguments,
} = match_class;
@ -3035,6 +3058,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_assignment_statement(&mut self, assignment: &ast::StmtAssign) {
let ast::StmtAssign {
range: _,
node_index: _,
targets,
value,
} = assignment;
@ -3652,6 +3676,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
// Non-name assignment targets are inferred as ordinary expressions, not definitions.
let ast::StmtAnnAssign {
range: _,
node_index: _,
annotation,
value,
target,
@ -3853,6 +3878,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_augment_assignment(&mut self, assignment: &ast::StmtAugAssign) -> Type<'db> {
let ast::StmtAugAssign {
range: _,
node_index: _,
target,
op: _,
value,
@ -3889,6 +3915,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_for_statement(&mut self, for_statement: &ast::StmtFor) {
let ast::StmtFor {
range: _,
node_index: _,
target,
iter,
body,
@ -3945,6 +3972,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_while_statement(&mut self, while_statement: &ast::StmtWhile) {
let ast::StmtWhile {
range: _,
node_index: _,
test,
body,
orelse,
@ -3961,7 +3989,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn infer_import_statement(&mut self, import: &ast::StmtImport) {
let ast::StmtImport { range: _, names } = import;
let ast::StmtImport {
range: _,
node_index: _,
names,
} = import;
for alias in names {
self.infer_definition(alias);
@ -4028,6 +4060,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
) {
let ast::Alias {
range: _,
node_index: _,
name,
asname,
} = alias;
@ -4077,6 +4110,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_import_from_statement(&mut self, import: &ast::StmtImportFrom) {
let ast::StmtImportFrom {
range: _,
node_index: _,
module: _,
names,
level: _,
@ -4094,6 +4128,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_assert_statement(&mut self, assert: &ast::StmtAssert) {
let ast::StmtAssert {
range: _,
node_index: _,
test,
msg,
} = assert;
@ -4110,6 +4145,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_raise_statement(&mut self, raise: &ast::StmtRaise) {
let ast::StmtRaise {
range: _,
node_index: _,
exc,
cause,
} = raise;
@ -4350,7 +4386,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn infer_delete_statement(&mut self, delete: &ast::StmtDelete) {
let ast::StmtDelete { range: _, targets } = delete;
let ast::StmtDelete {
range: _,
node_index: _,
targets,
} = delete;
for target in targets {
self.infer_expression(target);
}
@ -4364,6 +4404,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_decorator(&mut self, decorator: &ast::Decorator) -> Type<'db> {
let ast::Decorator {
range: _,
node_index: _,
expression,
} = decorator;
@ -4467,7 +4508,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_expression_impl(&mut self, expression: &ast::Expr) -> Type<'db> {
let ty = match expression {
ast::Expr::NoneLiteral(ast::ExprNoneLiteral { range: _ }) => Type::none(self.db()),
ast::Expr::NoneLiteral(ast::ExprNoneLiteral {
range: _,
node_index: _,
}) => Type::none(self.db()),
ast::Expr::NumberLiteral(literal) => self.infer_number_literal_expression(literal),
ast::Expr::BooleanLiteral(literal) => self.infer_boolean_literal_expression(literal),
ast::Expr::StringLiteral(literal) => self.infer_string_literal_expression(literal),
@ -4526,7 +4570,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn infer_number_literal_expression(&mut self, literal: &ast::ExprNumberLiteral) -> Type<'db> {
let ast::ExprNumberLiteral { range: _, value } = literal;
let ast::ExprNumberLiteral {
range: _,
node_index: _,
value,
} = literal;
let db = self.db();
match value {
@ -4541,7 +4589,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
#[expect(clippy::unused_self)]
fn infer_boolean_literal_expression(&mut self, literal: &ast::ExprBooleanLiteral) -> Type<'db> {
let ast::ExprBooleanLiteral { range: _, value } = literal;
let ast::ExprBooleanLiteral {
range: _,
node_index: _,
value,
} = literal;
Type::BooleanLiteral(*value)
}
@ -4561,7 +4613,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn infer_fstring_expression(&mut self, fstring: &ast::ExprFString) -> Type<'db> {
let ast::ExprFString { range: _, value } = fstring;
let ast::ExprFString {
range: _,
node_index: _,
value,
} = fstring;
let mut collector = StringPartsCollector::new();
for part in value {
@ -4577,6 +4633,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
ast::InterpolatedStringElement::Interpolation(expression) => {
let ast::InterpolatedElement {
range: _,
node_index: _,
expression,
debug_text: _,
conversion,
@ -4678,6 +4735,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_tuple_expression(&mut self, tuple: &ast::ExprTuple) -> Type<'db> {
let ast::ExprTuple {
range: _,
node_index: _,
elts,
ctx: _,
parenthesized: _,
@ -4694,6 +4752,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_list_expression(&mut self, list: &ast::ExprList) -> Type<'db> {
let ast::ExprList {
range: _,
node_index: _,
elts,
ctx: _,
} = list;
@ -4707,7 +4766,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn infer_set_expression(&mut self, set: &ast::ExprSet) -> Type<'db> {
let ast::ExprSet { range: _, elts } = set;
let ast::ExprSet {
range: _,
node_index: _,
elts,
} = set;
for elt in elts {
self.infer_expression(elt);
@ -4718,7 +4781,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn infer_dict_expression(&mut self, dict: &ast::ExprDict) -> Type<'db> {
let ast::ExprDict { range: _, items } = dict;
let ast::ExprDict {
range: _,
node_index: _,
items,
} = dict;
for item in items {
self.infer_optional_expression(item.key.as_ref());
@ -4741,6 +4808,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_generator_expression(&mut self, generator: &ast::ExprGenerator) -> Type<'db> {
let ast::ExprGenerator {
range: _,
node_index: _,
elt: _,
generators,
parenthesized: _,
@ -4754,6 +4822,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_list_comprehension_expression(&mut self, listcomp: &ast::ExprListComp) -> Type<'db> {
let ast::ExprListComp {
range: _,
node_index: _,
elt: _,
generators,
} = listcomp;
@ -4766,6 +4835,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_dict_comprehension_expression(&mut self, dictcomp: &ast::ExprDictComp) -> Type<'db> {
let ast::ExprDictComp {
range: _,
node_index: _,
key: _,
value: _,
generators,
@ -4779,6 +4849,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_set_comprehension_expression(&mut self, setcomp: &ast::ExprSetComp) -> Type<'db> {
let ast::ExprSetComp {
range: _,
node_index: _,
elt: _,
generators,
} = setcomp;
@ -4791,6 +4862,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_generator_expression_scope(&mut self, generator: &ast::ExprGenerator) {
let ast::ExprGenerator {
range: _,
node_index: _,
elt,
generators,
parenthesized: _,
@ -4803,6 +4875,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_list_comprehension_expression_scope(&mut self, listcomp: &ast::ExprListComp) {
let ast::ExprListComp {
range: _,
node_index: _,
elt,
generators,
} = listcomp;
@ -4814,6 +4887,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_dict_comprehension_expression_scope(&mut self, dictcomp: &ast::ExprDictComp) {
let ast::ExprDictComp {
range: _,
node_index: _,
key,
value,
generators,
@ -4827,6 +4901,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_set_comprehension_expression_scope(&mut self, setcomp: &ast::ExprSetComp) {
let ast::ExprSetComp {
range: _,
node_index: _,
elt,
generators,
} = setcomp;
@ -4849,6 +4924,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_comprehension(&mut self, comprehension: &ast::Comprehension, is_first: bool) {
let ast::Comprehension {
range: _,
node_index: _,
target,
iter,
ifs,
@ -4959,6 +5035,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
) -> Type<'db> {
let ast::ExprNamed {
range: _,
node_index: _,
target,
value,
} = named;
@ -4974,6 +5051,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_if_expression(&mut self, if_expression: &ast::ExprIf) -> Type<'db> {
let ast::ExprIf {
range: _,
node_index: _,
test,
body,
orelse,
@ -5000,6 +5078,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_lambda_expression(&mut self, lambda_expression: &ast::ExprLambda) -> Type<'db> {
let ast::ExprLambda {
range: _,
node_index: _,
parameters,
body: _,
} = lambda_expression;
@ -5088,6 +5167,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
) -> Type<'db> {
let ast::ExprCall {
range: _,
node_index: _,
func,
arguments,
} = call_expression;
@ -5733,6 +5813,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_starred_expression(&mut self, starred: &ast::ExprStarred) -> Type<'db> {
let ast::ExprStarred {
range: _,
node_index: _,
value,
ctx: _,
} = starred;
@ -5748,13 +5829,21 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn infer_yield_expression(&mut self, yield_expression: &ast::ExprYield) -> Type<'db> {
let ast::ExprYield { range: _, value } = yield_expression;
let ast::ExprYield {
range: _,
node_index: _,
value,
} = yield_expression;
self.infer_optional_expression(value.as_deref());
todo_type!("yield expressions")
}
fn infer_yield_from_expression(&mut self, yield_from: &ast::ExprYieldFrom) -> Type<'db> {
let ast::ExprYieldFrom { range: _, value } = yield_from;
let ast::ExprYieldFrom {
range: _,
node_index: _,
value,
} = yield_from;
let iterable_type = self.infer_expression(value);
iterable_type.try_iterate(self.db()).unwrap_or_else(|err| {
@ -5767,7 +5856,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
fn infer_await_expression(&mut self, await_expression: &ast::ExprAwait) -> Type<'db> {
let ast::ExprAwait { range: _, value } = await_expression;
let ast::ExprAwait {
range: _,
node_index: _,
value,
} = await_expression;
self.infer_expression(value);
todo_type!("generic `typing.Awaitable` type")
}
@ -5794,6 +5887,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_name_load(&mut self, name_node: &ast::ExprName) -> Type<'db> {
let ast::ExprName {
range: _,
node_index: _,
id: symbol_name,
ctx: _,
} = name_node;
@ -6160,6 +6254,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
value,
attr,
range: _,
node_index: _,
ctx: _,
} = attribute;
@ -6253,6 +6348,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
value,
attr: _,
range: _,
node_index: _,
ctx,
} = attribute;
@ -6276,6 +6372,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_unary_expression(&mut self, unary: &ast::ExprUnaryOp) -> Type<'db> {
let ast::ExprUnaryOp {
range: _,
node_index: _,
op,
operand,
} = unary;
@ -6370,6 +6467,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
op,
right,
range: _,
node_index: _,
} = binary;
let left_ty = self.infer_expression(left);
@ -6765,6 +6863,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_boolean_expression(&mut self, bool_op: &ast::ExprBoolOp) -> Type<'db> {
let ast::ExprBoolOp {
range: _,
node_index: _,
op,
values,
} = bool_op;
@ -6850,6 +6949,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_compare_expression(&mut self, compare: &ast::ExprCompare) -> Type<'db> {
let ast::ExprCompare {
range: _,
node_index: _,
left,
ops,
comparators,
@ -7649,6 +7749,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
value,
slice,
range: _,
node_index: _,
ctx,
} = subscript;
@ -7676,6 +7777,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_subscript_load(&mut self, subscript: &ast::ExprSubscript) -> Type<'db> {
let ast::ExprSubscript {
range: _,
node_index: _,
value,
slice,
ctx: _,
@ -8153,6 +8255,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let ast::ExprSlice {
range: _,
node_index: _,
lower,
upper,
step,
@ -8188,6 +8291,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
fn infer_type_parameters(&mut self, type_parameters: &ast::TypeParams) {
let ast::TypeParams {
range: _,
node_index: _,
type_params,
} = type_parameters;
for type_param in type_params {
@ -8494,6 +8598,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
slice,
ctx: _,
range: _,
node_index: _,
} = subscript;
let value_ty = self.infer_expression(value);
@ -9037,6 +9142,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
) -> Type<'db> {
let ast::ExprSubscript {
range: _,
node_index: _,
value: _,
slice,
ctx: _,

View file

@ -605,6 +605,7 @@ impl<'db, 'ast> NarrowingConstraintsBuilder<'db, 'ast> {
let ast::ExprCompare {
range: _,
node_index: _,
left,
ops,
comparators,
@ -656,12 +657,14 @@ impl<'db, 'ast> NarrowingConstraintsBuilder<'db, 'ast> {
}
ast::Expr::Call(ast::ExprCall {
range: _,
node_index: _,
func: callable,
arguments:
ast::Arguments {
args,
keywords,
range: _,
node_index: _,
},
}) if keywords.is_empty() => {
let rhs_class = match rhs_ty {

View file

@ -1097,6 +1097,7 @@ impl<'db> Parameters<'db> {
kwonlyargs,
kwarg,
range: _,
node_index: _,
} = parameters;
let default_type = |param: &ast::ParameterWithDefault| {
param