mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-29 13:24:57 +00:00
[ty] More precise type inference for dictionary literals (#20523)
## Summary Extends https://github.com/astral-sh/ruff/pull/20360 to dictionary literals. This also improves our `TypeDict` support by passing through nested type context.
This commit is contained in:
parent
f2cc2f604f
commit
bea92c8229
8 changed files with 265 additions and 120 deletions
|
@ -139,6 +139,15 @@ reveal_type(n) # revealed: list[Literal[1, 2, 3]]
|
||||||
# error: [invalid-assignment] "Object of type `list[Unknown | str]` is not assignable to `list[LiteralString]`"
|
# error: [invalid-assignment] "Object of type `list[Unknown | str]` is not assignable to `list[LiteralString]`"
|
||||||
o: list[typing.LiteralString] = ["a", "b", "c"]
|
o: list[typing.LiteralString] = ["a", "b", "c"]
|
||||||
reveal_type(o) # revealed: list[LiteralString]
|
reveal_type(o) # revealed: list[LiteralString]
|
||||||
|
|
||||||
|
p: dict[int, int] = {}
|
||||||
|
reveal_type(p) # revealed: dict[int, int]
|
||||||
|
|
||||||
|
q: dict[int | str, int] = {1: 1, 2: 2, 3: 3}
|
||||||
|
reveal_type(q) # revealed: dict[int | str, int]
|
||||||
|
|
||||||
|
r: dict[int | str, int | str] = {1: 1, 2: 2, 3: 3}
|
||||||
|
reveal_type(r) # revealed: dict[int | str, int | str]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Incorrect collection literal assignments are complained aobut
|
## Incorrect collection literal assignments are complained aobut
|
||||||
|
|
|
@ -57,7 +57,7 @@ type("Foo", Base, {})
|
||||||
# error: [invalid-argument-type] "Argument to class `type` is incorrect: Expected `tuple[type, ...]`, found `tuple[Literal[1], Literal[2]]`"
|
# error: [invalid-argument-type] "Argument to class `type` is incorrect: Expected `tuple[type, ...]`, found `tuple[Literal[1], Literal[2]]`"
|
||||||
type("Foo", (1, 2), {})
|
type("Foo", (1, 2), {})
|
||||||
|
|
||||||
# TODO: this should be an error
|
# error: [invalid-argument-type] "Argument to class `type` is incorrect: Expected `dict[str, Any]`, found `dict[Unknown | bytes, Unknown | int]`"
|
||||||
type("Foo", (Base,), {b"attr": 1})
|
type("Foo", (Base,), {b"attr": 1})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,49 @@
|
||||||
## Empty dictionary
|
## Empty dictionary
|
||||||
|
|
||||||
```py
|
```py
|
||||||
reveal_type({}) # revealed: dict[@Todo(dict literal key type), @Todo(dict literal value type)]
|
reveal_type({}) # revealed: dict[Unknown, Unknown]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Basic dict
|
||||||
|
|
||||||
|
```py
|
||||||
|
reveal_type({1: 1, 2: 1}) # revealed: dict[Unknown | int, Unknown | int]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Dict of tuples
|
||||||
|
|
||||||
|
```py
|
||||||
|
reveal_type({1: (1, 2), 2: (3, 4)}) # revealed: dict[Unknown | int, Unknown | tuple[int, int]]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Unpacked dict
|
||||||
|
|
||||||
|
```py
|
||||||
|
a = {"a": 1, "b": 2}
|
||||||
|
b = {"c": 3, "d": 4}
|
||||||
|
|
||||||
|
d = {**a, **b}
|
||||||
|
reveal_type(d) # revealed: dict[Unknown | str, Unknown | int]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Dict of functions
|
||||||
|
|
||||||
|
```py
|
||||||
|
def a(_: int) -> int:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def b(_: int) -> int:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
x = {1: a, 2: b}
|
||||||
|
reveal_type(x) # revealed: dict[Unknown | int, Unknown | ((_: int) -> int)]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Mixed dict
|
||||||
|
|
||||||
|
```py
|
||||||
|
# revealed: dict[Unknown | str, Unknown | int | tuple[int, int] | tuple[int, int, int]]
|
||||||
|
reveal_type({"a": 1, "b": (1, 2), "c": (1, 2, 3)})
|
||||||
```
|
```
|
||||||
|
|
||||||
## Dict comprehensions
|
## Dict comprehensions
|
||||||
|
|
|
@ -206,8 +206,7 @@ dd: defaultdict[int, int] = defaultdict(int)
|
||||||
dd[0] = 0
|
dd[0] = 0
|
||||||
cm: ChainMap[int, int] = ChainMap({1: 1}, {0: 0})
|
cm: ChainMap[int, int] = ChainMap({1: 1}, {0: 0})
|
||||||
cm[0] = 0
|
cm[0] = 0
|
||||||
# TODO: should be ChainMap[int, int]
|
reveal_type(cm) # revealed: ChainMap[Unknown | int, Unknown | int]
|
||||||
reveal_type(cm) # revealed: ChainMap[@Todo(dict literal key type), @Todo(dict literal value type)]
|
|
||||||
|
|
||||||
reveal_type(l[0]) # revealed: Literal[0]
|
reveal_type(l[0]) # revealed: Literal[0]
|
||||||
reveal_type(d[0]) # revealed: Literal[0]
|
reveal_type(d[0]) # revealed: Literal[0]
|
||||||
|
|
|
@ -85,6 +85,34 @@ alice["extra"] = True
|
||||||
bob["extra"] = True
|
bob["extra"] = True
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Nested `TypedDict`
|
||||||
|
|
||||||
|
Nested `TypedDict` fields are also supported.
|
||||||
|
|
||||||
|
```py
|
||||||
|
from typing import TypedDict
|
||||||
|
|
||||||
|
class Inner(TypedDict):
|
||||||
|
name: str
|
||||||
|
age: int | None
|
||||||
|
|
||||||
|
class Person(TypedDict):
|
||||||
|
inner: Inner
|
||||||
|
```
|
||||||
|
|
||||||
|
```py
|
||||||
|
alice: Person = {"inner": {"name": "Alice", "age": 30}}
|
||||||
|
|
||||||
|
reveal_type(alice["inner"]["name"]) # revealed: str
|
||||||
|
reveal_type(alice["inner"]["age"]) # revealed: int | None
|
||||||
|
|
||||||
|
# error: [invalid-key] "Invalid key access on TypedDict `Inner`: Unknown key "non_existing""
|
||||||
|
reveal_type(alice["inner"]["non_existing"]) # revealed: Unknown
|
||||||
|
|
||||||
|
# error: [invalid-key] "Invalid key access on TypedDict `Inner`: Unknown key "extra""
|
||||||
|
alice: Person = {"inner": {"name": "Alice", "age": 30, "extra": 1}}
|
||||||
|
```
|
||||||
|
|
||||||
## Validation of `TypedDict` construction
|
## Validation of `TypedDict` construction
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
|
|
@ -849,6 +849,28 @@ impl<'db> Type<'db> {
|
||||||
matches!(self, Type::Dynamic(_))
|
matches!(self, Type::Dynamic(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the type is a specialized instance of the given `KnownClass`, returns the specialization.
|
||||||
|
pub(crate) fn known_specialization(
|
||||||
|
self,
|
||||||
|
known_class: KnownClass,
|
||||||
|
db: &'db dyn Db,
|
||||||
|
) -> Option<Specialization<'db>> {
|
||||||
|
let class_type = match self {
|
||||||
|
Type::NominalInstance(instance) => instance,
|
||||||
|
Type::TypeAlias(alias) => alias.value_type(db).into_nominal_instance()?,
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
.class(db);
|
||||||
|
|
||||||
|
if !class_type.is_known(db, known_class) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
class_type
|
||||||
|
.into_generic_alias()
|
||||||
|
.map(|generic_alias| generic_alias.specialization(db))
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the top materialization (or upper bound materialization) of this type, which is the
|
/// Returns the top materialization (or upper bound materialization) of this type, which is the
|
||||||
/// most general form of the type that is fully static.
|
/// most general form of the type that is fully static.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
|
|
@ -386,20 +386,8 @@ impl<'db> TypeContext<'db> {
|
||||||
known_class: KnownClass,
|
known_class: KnownClass,
|
||||||
db: &'db dyn Db,
|
db: &'db dyn Db,
|
||||||
) -> Option<Specialization<'db>> {
|
) -> Option<Specialization<'db>> {
|
||||||
let class_type = match self.annotation? {
|
self.annotation
|
||||||
Type::NominalInstance(instance) => instance,
|
.and_then(|ty| ty.known_specialization(known_class, db))
|
||||||
Type::TypeAlias(alias) => alias.value_type(db).into_nominal_instance()?,
|
|
||||||
_ => return None,
|
|
||||||
}
|
|
||||||
.class(db);
|
|
||||||
|
|
||||||
if !class_type.is_known(db, known_class) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
class_type
|
|
||||||
.into_generic_alias()
|
|
||||||
.map(|generic_alias| generic_alias.specialization(db))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
use itertools::Itertools;
|
use std::iter;
|
||||||
|
|
||||||
|
use itertools::{Either, Itertools};
|
||||||
use ruff_db::diagnostic::{Annotation, DiagnosticId, Severity};
|
use ruff_db::diagnostic::{Annotation, DiagnosticId, Severity};
|
||||||
use ruff_db::files::File;
|
use ruff_db::files::File;
|
||||||
use ruff_db::parsed::ParsedModuleRef;
|
use ruff_db::parsed::ParsedModuleRef;
|
||||||
|
@ -86,13 +88,13 @@ use crate::types::typed_dict::{
|
||||||
};
|
};
|
||||||
use crate::types::visitor::any_over_type;
|
use crate::types::visitor::any_over_type;
|
||||||
use crate::types::{
|
use crate::types::{
|
||||||
CallDunderError, CallableType, ClassLiteral, ClassType, DataclassParams, DynamicType,
|
BoundTypeVarInstance, CallDunderError, CallableType, ClassLiteral, ClassType, DataclassParams,
|
||||||
IntersectionBuilder, IntersectionType, KnownClass, KnownInstanceType, MemberLookupPolicy,
|
DynamicType, IntersectionBuilder, IntersectionType, KnownClass, KnownInstanceType,
|
||||||
MetaclassCandidate, PEP695TypeAliasType, Parameter, ParameterForm, Parameters, SpecialFormType,
|
MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, Parameter, ParameterForm,
|
||||||
SubclassOfType, TrackedConstraintSet, Truthiness, Type, TypeAliasType, TypeAndQualifiers,
|
Parameters, SpecialFormType, SubclassOfType, TrackedConstraintSet, Truthiness, Type,
|
||||||
TypeContext, TypeMapping, TypeQualifiers, TypeVarBoundOrConstraintsEvaluation,
|
TypeAliasType, TypeAndQualifiers, TypeContext, TypeMapping, TypeQualifiers,
|
||||||
TypeVarDefaultEvaluation, TypeVarInstance, TypeVarKind, UnionBuilder, UnionType, binding_type,
|
TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarInstance, TypeVarKind,
|
||||||
todo_type,
|
UnionBuilder, UnionType, binding_type, todo_type,
|
||||||
};
|
};
|
||||||
use crate::types::{ClassBase, add_inferred_python_version_hint_to_diagnostic};
|
use crate::types::{ClassBase, add_inferred_python_version_hint_to_diagnostic};
|
||||||
use crate::unpack::{EvaluationMode, UnpackPosition};
|
use crate::unpack::{EvaluationMode, UnpackPosition};
|
||||||
|
@ -4110,7 +4112,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
value,
|
value,
|
||||||
TypeContext::new(Some(declared.inner_type())),
|
TypeContext::new(Some(declared.inner_type())),
|
||||||
);
|
);
|
||||||
let mut inferred_ty = if target
|
let inferred_ty = if target
|
||||||
.as_name_expr()
|
.as_name_expr()
|
||||||
.is_some_and(|name| &name.id == "TYPE_CHECKING")
|
.is_some_and(|name| &name.id == "TYPE_CHECKING")
|
||||||
{
|
{
|
||||||
|
@ -4121,24 +4123,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
inferred_ty
|
inferred_ty
|
||||||
};
|
};
|
||||||
|
|
||||||
// Validate `TypedDict` dictionary literal assignments
|
|
||||||
if let Some(typed_dict) = declared.inner_type().into_typed_dict() {
|
|
||||||
if let Some(dict_expr) = value.as_dict_expr() {
|
|
||||||
validate_typed_dict_dict_literal(
|
|
||||||
&self.context,
|
|
||||||
typed_dict,
|
|
||||||
dict_expr,
|
|
||||||
target.into(),
|
|
||||||
|expr| self.expression_type(expr),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Override the inferred type of the dict literal to be the `TypedDict` type
|
|
||||||
// This ensures that the dict literal gets the correct type for key access
|
|
||||||
let typed_dict_type = Type::TypedDict(typed_dict);
|
|
||||||
inferred_ty = typed_dict_type;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.add_declaration_with_binding(
|
self.add_declaration_with_binding(
|
||||||
target.into(),
|
target.into(),
|
||||||
definition,
|
definition,
|
||||||
|
@ -5290,6 +5274,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
ctx: _,
|
ctx: _,
|
||||||
} = list;
|
} = list;
|
||||||
|
|
||||||
|
let elts = elts.iter().map(|elt| [Some(elt)]);
|
||||||
self.infer_collection_literal(elts, tcx, KnownClass::List)
|
self.infer_collection_literal(elts, tcx, KnownClass::List)
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
KnownClass::List.to_specialized_instance(self.db(), [Type::unknown()])
|
KnownClass::List.to_specialized_instance(self.db(), [Type::unknown()])
|
||||||
|
@ -5303,95 +5288,167 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
elts,
|
elts,
|
||||||
} = set;
|
} = set;
|
||||||
|
|
||||||
|
let elts = elts.iter().map(|elt| [Some(elt)]);
|
||||||
self.infer_collection_literal(elts, tcx, KnownClass::Set)
|
self.infer_collection_literal(elts, tcx, KnownClass::Set)
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
KnownClass::Set.to_specialized_instance(self.db(), [Type::unknown()])
|
KnownClass::Set.to_specialized_instance(self.db(), [Type::unknown()])
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Infer the type of a collection literal expression.
|
fn infer_dict_expression(&mut self, dict: &ast::ExprDict, tcx: TypeContext<'db>) -> Type<'db> {
|
||||||
fn infer_collection_literal(
|
|
||||||
&mut self,
|
|
||||||
elts: &[ast::Expr],
|
|
||||||
tcx: TypeContext<'db>,
|
|
||||||
collection_class: KnownClass,
|
|
||||||
) -> Option<Type<'db>> {
|
|
||||||
// Extract the type variable `T` from `list[T]` in typeshed.
|
|
||||||
fn elts_ty(
|
|
||||||
collection_class: KnownClass,
|
|
||||||
db: &dyn Db,
|
|
||||||
) -> Option<(ClassLiteral<'_>, Type<'_>)> {
|
|
||||||
let class_literal = collection_class.try_to_class_literal(db)?;
|
|
||||||
let generic_context = class_literal.generic_context(db)?;
|
|
||||||
let variables = generic_context.variables(db);
|
|
||||||
let elts_ty = variables.iter().exactly_one().ok()?;
|
|
||||||
Some((class_literal, Type::TypeVar(*elts_ty)))
|
|
||||||
}
|
|
||||||
|
|
||||||
let annotated_elts_ty = tcx
|
|
||||||
.known_specialization(collection_class, self.db())
|
|
||||||
.and_then(|specialization| specialization.types(self.db()).iter().exactly_one().ok())
|
|
||||||
.copied();
|
|
||||||
|
|
||||||
let (class_literal, elts_ty) = elts_ty(collection_class, self.db()).unwrap_or_else(|| {
|
|
||||||
let name = collection_class.name(self.db());
|
|
||||||
panic!("Typeshed should always have a `{name}` class in `builtins.pyi` with a single type variable")
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create a set of constraints to infer a precise type for `T`.
|
|
||||||
let mut builder = SpecializationBuilder::new(self.db());
|
|
||||||
|
|
||||||
match annotated_elts_ty {
|
|
||||||
// The annotated type acts as a constraint for `T`.
|
|
||||||
//
|
|
||||||
// Note that we infer the annotated type _before_ the elements, to closer match the order
|
|
||||||
// of any unions written in the type annotation.
|
|
||||||
Some(annotated_elts_ty) => {
|
|
||||||
builder.infer(elts_ty, annotated_elts_ty).ok()?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If a valid type annotation was not provided, avoid restricting the type of the collection
|
|
||||||
// by unioning the inferred type with `Unknown`.
|
|
||||||
None => builder.infer(elts_ty, Type::unknown()).ok()?,
|
|
||||||
}
|
|
||||||
|
|
||||||
// The inferred type of each element acts as an additional constraint on `T`.
|
|
||||||
for elt in elts {
|
|
||||||
let inferred_elt_ty = self.infer_expression(elt, TypeContext::new(annotated_elts_ty));
|
|
||||||
|
|
||||||
// Convert any element literals to their promoted type form to avoid excessively large
|
|
||||||
// unions for large nested list literals, which the constraint solver struggles with.
|
|
||||||
let inferred_elt_ty =
|
|
||||||
inferred_elt_ty.apply_type_mapping(self.db(), &TypeMapping::PromoteLiterals);
|
|
||||||
builder.infer(elts_ty, inferred_elt_ty).ok()?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let class_type = class_literal
|
|
||||||
.apply_specialization(self.db(), |generic_context| builder.build(generic_context));
|
|
||||||
|
|
||||||
Type::from(class_type).to_instance(self.db())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn infer_dict_expression(&mut self, dict: &ast::ExprDict, _tcx: TypeContext<'db>) -> Type<'db> {
|
|
||||||
let ast::ExprDict {
|
let ast::ExprDict {
|
||||||
range: _,
|
range: _,
|
||||||
node_index: _,
|
node_index: _,
|
||||||
items,
|
items,
|
||||||
} = dict;
|
} = dict;
|
||||||
|
|
||||||
// TODO: Use the type context for more precise inference.
|
// Validate `TypedDict` dictionary literal assignments.
|
||||||
for item in items {
|
if let Some(typed_dict) = tcx.annotation.and_then(Type::into_typed_dict) {
|
||||||
self.infer_optional_expression(item.key.as_ref(), TypeContext::default());
|
let typed_dict_items = typed_dict.items(self.db());
|
||||||
self.infer_expression(&item.value, TypeContext::default());
|
|
||||||
|
for item in items {
|
||||||
|
self.infer_optional_expression(item.key.as_ref(), TypeContext::default());
|
||||||
|
|
||||||
|
if let Some(ast::Expr::StringLiteral(ref key)) = item.key
|
||||||
|
&& let Some(key) = key.as_single_part_string()
|
||||||
|
&& let Some(field) = typed_dict_items.get(key.as_str())
|
||||||
|
{
|
||||||
|
self.infer_expression(&item.value, TypeContext::new(Some(field.declared_ty)));
|
||||||
|
} else {
|
||||||
|
self.infer_expression(&item.value, TypeContext::default());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_typed_dict_dict_literal(
|
||||||
|
&self.context,
|
||||||
|
typed_dict,
|
||||||
|
dict,
|
||||||
|
dict.into(),
|
||||||
|
|expr| self.expression_type(expr),
|
||||||
|
);
|
||||||
|
|
||||||
|
return Type::TypedDict(typed_dict);
|
||||||
}
|
}
|
||||||
|
|
||||||
KnownClass::Dict.to_specialized_instance(
|
// Avoid false positives for the functional `TypedDict` form, which is currently
|
||||||
self.db(),
|
// unsupported.
|
||||||
[
|
if let Some(Type::Dynamic(DynamicType::Todo(_))) = tcx.annotation {
|
||||||
todo_type!("dict literal key type"),
|
return KnownClass::Dict
|
||||||
todo_type!("dict literal value type"),
|
.to_specialized_instance(self.db(), [Type::unknown(), Type::unknown()]);
|
||||||
],
|
}
|
||||||
)
|
|
||||||
|
let items = items
|
||||||
|
.iter()
|
||||||
|
.map(|item| [item.key.as_ref(), Some(&item.value)]);
|
||||||
|
|
||||||
|
self.infer_collection_literal(items, tcx, KnownClass::Dict)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
KnownClass::Dict
|
||||||
|
.to_specialized_instance(self.db(), [Type::unknown(), Type::unknown()])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Infer the type of a collection literal expression.
|
||||||
|
fn infer_collection_literal<'expr, const N: usize>(
|
||||||
|
&mut self,
|
||||||
|
elts: impl Iterator<Item = [Option<&'expr ast::Expr>; N]>,
|
||||||
|
tcx: TypeContext<'db>,
|
||||||
|
collection_class: KnownClass,
|
||||||
|
) -> Option<Type<'db>> {
|
||||||
|
// Extract the type variable `T` from `list[T]` in typeshed.
|
||||||
|
fn elt_tys(
|
||||||
|
collection_class: KnownClass,
|
||||||
|
db: &dyn Db,
|
||||||
|
) -> Option<(ClassLiteral<'_>, &FxOrderSet<BoundTypeVarInstance<'_>>)> {
|
||||||
|
let class_literal = collection_class.try_to_class_literal(db)?;
|
||||||
|
let generic_context = class_literal.generic_context(db)?;
|
||||||
|
Some((class_literal, generic_context.variables(db)))
|
||||||
|
}
|
||||||
|
|
||||||
|
let (class_literal, elt_tys) = elt_tys(collection_class, self.db()).unwrap_or_else(|| {
|
||||||
|
let name = collection_class.name(self.db());
|
||||||
|
panic!("Typeshed should always have a `{name}` class in `builtins.pyi`")
|
||||||
|
});
|
||||||
|
|
||||||
|
// Extract the annotated type of `T`, if provided.
|
||||||
|
let annotated_elt_tys = tcx
|
||||||
|
.known_specialization(collection_class, self.db())
|
||||||
|
.map(|specialization| specialization.types(self.db()));
|
||||||
|
|
||||||
|
// Create a set of constraints to infer a precise type for `T`.
|
||||||
|
let mut builder = SpecializationBuilder::new(self.db());
|
||||||
|
|
||||||
|
match annotated_elt_tys {
|
||||||
|
// The annotated type acts as a constraint for `T`.
|
||||||
|
//
|
||||||
|
// Note that we infer the annotated type _before_ the elements, to more closely match the
|
||||||
|
// order of any unions as written in the type annotation.
|
||||||
|
Some(annotated_elt_tys) => {
|
||||||
|
for (elt_ty, annotated_elt_ty) in iter::zip(elt_tys, annotated_elt_tys) {
|
||||||
|
builder
|
||||||
|
.infer(Type::TypeVar(*elt_ty), *annotated_elt_ty)
|
||||||
|
.ok()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If a valid type annotation was not provided, avoid restricting the type of the collection
|
||||||
|
// by unioning the inferred type with `Unknown`.
|
||||||
|
None => {
|
||||||
|
for elt_ty in elt_tys {
|
||||||
|
builder
|
||||||
|
.infer(Type::TypeVar(*elt_ty), Type::unknown())
|
||||||
|
.ok()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let elt_tcxs = match annotated_elt_tys {
|
||||||
|
None => Either::Left(iter::repeat(TypeContext::default())),
|
||||||
|
Some(tys) => Either::Right(tys.iter().map(|ty| TypeContext::new(Some(*ty)))),
|
||||||
|
};
|
||||||
|
|
||||||
|
for elts in elts {
|
||||||
|
// An unpacking expression for a dictionary.
|
||||||
|
if let &[None, Some(value)] = elts.as_slice() {
|
||||||
|
let inferred_value_ty = self.infer_expression(value, TypeContext::default());
|
||||||
|
|
||||||
|
// Merge the inferred type of the nested dictionary.
|
||||||
|
if let Some(specialization) =
|
||||||
|
inferred_value_ty.known_specialization(KnownClass::Dict, self.db())
|
||||||
|
{
|
||||||
|
for (elt_ty, inferred_elt_ty) in
|
||||||
|
iter::zip(elt_tys, specialization.types(self.db()))
|
||||||
|
{
|
||||||
|
builder
|
||||||
|
.infer(Type::TypeVar(*elt_ty), *inferred_elt_ty)
|
||||||
|
.ok()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The inferred type of each element acts as an additional constraint on `T`.
|
||||||
|
for (elt, elt_ty, elt_tcx) in itertools::izip!(elts, elt_tys, elt_tcxs.clone()) {
|
||||||
|
let Some(inferred_elt_ty) = self.infer_optional_expression(elt, elt_tcx) else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Convert any element literals to their promoted type form to avoid excessively large
|
||||||
|
// unions for large nested list literals, which the constraint solver struggles with.
|
||||||
|
let inferred_elt_ty =
|
||||||
|
inferred_elt_ty.apply_type_mapping(self.db(), &TypeMapping::PromoteLiterals);
|
||||||
|
|
||||||
|
builder
|
||||||
|
.infer(Type::TypeVar(*elt_ty), inferred_elt_ty)
|
||||||
|
.ok()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let class_type = class_literal
|
||||||
|
.apply_specialization(self.db(), |generic_context| builder.build(generic_context));
|
||||||
|
|
||||||
|
Type::from(class_type).to_instance(self.db())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Infer the type of the `iter` expression of the first comprehension.
|
/// Infer the type of the `iter` expression of the first comprehension.
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue