@no_type_check support (#15122)

Co-authored-by: Carl Meyer <carl@astral.sh>
This commit is contained in:
Micha Reiser 2024-12-30 10:42:18 +01:00 committed by GitHub
parent d4ee6abf4a
commit 0caab81d3d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 253 additions and 47 deletions

View file

@ -43,7 +43,7 @@ impl<T> AstNodeRef<T> {
}
/// Returns a reference to the wrapped node.
pub fn node(&self) -> &T {
pub const fn node(&self) -> &T {
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still
// alive and not moved.
unsafe { self.node.as_ref() }

View file

@ -6,10 +6,9 @@ use rustc_hash::{FxHashMap, FxHashSet};
use ruff_db::files::File;
use ruff_db::parsed::ParsedModule;
use ruff_index::IndexVec;
use ruff_python_ast as ast;
use ruff_python_ast::name::Name;
use ruff_python_ast::visitor::{walk_expr, walk_pattern, walk_stmt, Visitor};
use ruff_python_ast::{self as ast, Pattern};
use ruff_python_ast::{BoolOp, Expr};
use crate::ast_node_ref::AstNodeRef;
use crate::module_name::ModuleName;
@ -289,7 +288,7 @@ impl<'db> SemanticIndexBuilder<'db> {
constraint
}
fn build_constraint(&mut self, constraint_node: &Expr) -> Constraint<'db> {
fn build_constraint(&mut self, constraint_node: &ast::Expr) -> Constraint<'db> {
let expression = self.add_standalone_expression(constraint_node);
Constraint {
node: ConstraintNode::Expression(expression),
@ -408,11 +407,11 @@ impl<'db> SemanticIndexBuilder<'db> {
let guard = guard.map(|guard| self.add_standalone_expression(guard));
let kind = match pattern {
Pattern::MatchValue(pattern) => {
ast::Pattern::MatchValue(pattern) => {
let value = self.add_standalone_expression(&pattern.value);
PatternConstraintKind::Value(value, guard)
}
Pattern::MatchSingleton(singleton) => {
ast::Pattern::MatchSingleton(singleton) => {
PatternConstraintKind::Singleton(singleton.value, guard)
}
_ => PatternConstraintKind::Unsupported,
@ -1492,8 +1491,8 @@ where
if index < values.len() - 1 {
let constraint = self.build_constraint(value);
let (constraint, constraint_id) = match op {
BoolOp::And => (constraint, self.add_constraint(constraint)),
BoolOp::Or => self.add_negated_constraint(constraint),
ast::BoolOp::And => (constraint, self.add_constraint(constraint)),
ast::BoolOp::Or => self.add_negated_constraint(constraint),
};
let visibility_constraint = self
.add_visibility_constraint(VisibilityConstraint::VisibleIf(constraint));

View file

@ -463,10 +463,7 @@ impl NodeWithScopeKind {
}
pub fn expect_function(&self) -> &ast::StmtFunctionDef {
match self {
Self::Function(function) => function.node(),
_ => panic!("expected function"),
}
self.as_function().expect("expected function")
}
pub fn expect_type_alias(&self) -> &ast::StmtTypeAlias {
@ -475,6 +472,13 @@ impl NodeWithScopeKind {
_ => panic!("expected type alias"),
}
}
pub const fn as_function(&self) -> Option<&ast::StmtFunctionDef> {
match self {
Self::Function(function) => Some(function.node()),
_ => None,
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]

View file

@ -3086,13 +3086,16 @@ pub enum KnownFunction {
Len,
/// `typing(_extensions).final`
Final,
/// [`typing(_extensions).no_type_check`](https://typing.readthedocs.io/en/latest/spec/directives.html#no-type-check)
NoTypeCheck,
}
impl KnownFunction {
pub fn constraint_function(self) -> Option<KnownConstraintFunction> {
match self {
Self::ConstraintFunction(f) => Some(f),
Self::RevealType | Self::Len | Self::Final => None,
Self::RevealType | Self::Len | Self::Final | Self::NoTypeCheck => None,
}
}
@ -3111,6 +3114,9 @@ impl KnownFunction {
),
"len" if definition.is_builtin_definition(db) => Some(KnownFunction::Len),
"final" if definition.is_typing_definition(db) => Some(KnownFunction::Final),
"no_type_check" if definition.is_typing_definition(db) => {
Some(KnownFunction::NoTypeCheck)
}
_ => None,
}
}

View file

@ -8,14 +8,16 @@ use ruff_db::{
use ruff_python_ast::AnyNodeRef;
use ruff_text_size::Ranged;
use super::{binding_ty, KnownFunction, TypeCheckDiagnostic, TypeCheckDiagnostics};
use crate::semantic_index::semantic_index;
use crate::semantic_index::symbol::ScopeId;
use crate::{
lint::{LintId, LintMetadata},
suppression::suppressions,
Db,
};
use super::{TypeCheckDiagnostic, TypeCheckDiagnostics};
/// Context for inferring the types of a single file.
///
/// One context exists for at least for every inferred region but it's
@ -30,17 +32,21 @@ use super::{TypeCheckDiagnostic, TypeCheckDiagnostics};
/// on the current [`TypeInference`](super::infer::TypeInference) result.
pub(crate) struct InferContext<'db> {
db: &'db dyn Db,
scope: ScopeId<'db>,
file: File,
diagnostics: std::cell::RefCell<TypeCheckDiagnostics>,
no_type_check: InNoTypeCheck,
bomb: DebugDropBomb,
}
impl<'db> InferContext<'db> {
pub(crate) fn new(db: &'db dyn Db, file: File) -> Self {
pub(crate) fn new(db: &'db dyn Db, scope: ScopeId<'db>) -> Self {
Self {
db,
file,
scope,
file: scope.file(db),
diagnostics: std::cell::RefCell::new(TypeCheckDiagnostics::default()),
no_type_check: InNoTypeCheck::default(),
bomb: DebugDropBomb::new("`InferContext` needs to be explicitly consumed by calling `::finish` to prevent accidental loss of diagnostics."),
}
}
@ -68,11 +74,19 @@ impl<'db> InferContext<'db> {
node: AnyNodeRef,
message: fmt::Arguments,
) {
if !self.db.is_file_open(self.file) {
return;
}
// Skip over diagnostics if the rule is disabled.
let Some(severity) = self.db.rule_selection().severity(LintId::of(lint)) else {
return;
};
if self.is_in_no_type_check() {
return;
}
let suppressions = suppressions(self.db, self.file);
if let Some(suppression) = suppressions.find_suppression(node.range(), LintId::of(lint)) {
@ -112,6 +126,42 @@ impl<'db> InferContext<'db> {
});
}
pub(super) fn set_in_no_type_check(&mut self, no_type_check: InNoTypeCheck) {
self.no_type_check = no_type_check;
}
fn is_in_no_type_check(&self) -> bool {
match self.no_type_check {
InNoTypeCheck::Possibly => {
// Accessing the semantic index here is fine because
// the index belongs to the same file as for which we emit the diagnostic.
let index = semantic_index(self.db, self.file);
let scope_id = self.scope.file_scope_id(self.db);
// Inspect all ancestor function scopes by walking bottom up and infer the function's type.
let mut function_scope_tys = index
.ancestor_scopes(scope_id)
.filter_map(|(_, scope)| scope.node().as_function())
.filter_map(|function| {
binding_ty(self.db, index.definition(function)).into_function_literal()
});
// Iterate over all functions and test if any is decorated with `@no_type_check`.
function_scope_tys.any(|function_ty| {
function_ty
.decorators(self.db)
.iter()
.filter_map(|decorator| decorator.into_function_literal())
.any(|decorator_ty| {
decorator_ty.is_known(self.db, KnownFunction::NoTypeCheck)
})
})
}
InNoTypeCheck::Yes => true,
}
}
#[must_use]
pub(crate) fn finish(mut self) -> TypeCheckDiagnostics {
self.bomb.defuse();
@ -131,6 +181,17 @@ impl fmt::Debug for InferContext<'_> {
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)]
pub(crate) enum InNoTypeCheck {
/// The inference might be in a `no_type_check` block but only if any
/// ancestor function is decorated with `@no_type_check`.
#[default]
Possibly,
/// The inference is known to be in an `@no_type_check` decorated function.
Yes,
}
pub(crate) trait WithDiagnostics {
fn diagnostics(&self) -> &TypeCheckDiagnostics;
}

View file

@ -31,7 +31,7 @@ use std::num::NonZeroU32;
use itertools::Itertools;
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_python_ast::{self as ast, AnyNodeRef, ExprContext, UnaryOp};
use ruff_python_ast::{self as ast, AnyNodeRef, ExprContext};
use ruff_text_size::Ranged;
use rustc_hash::{FxHashMap, FxHashSet};
use salsa;
@ -72,7 +72,7 @@ use crate::unpack::Unpack;
use crate::util::subscript::{PyIndex, PySlice};
use crate::Db;
use super::context::{InferContext, WithDiagnostics};
use super::context::{InNoTypeCheck, InferContext, WithDiagnostics};
use super::diagnostic::{
report_index_out_of_bounds, report_invalid_exception_caught, report_invalid_exception_cause,
report_invalid_exception_raised, report_non_subscriptable,
@ -169,7 +169,6 @@ pub(crate) fn infer_deferred_types<'db>(
/// Use rarely; only for cases where we'd otherwise risk double-inferring an expression: RHS of an
/// assignment, which might be unpacking/multi-target and thus part of multiple definitions, or a
/// type narrowing guard expression (e.g. if statement test node).
#[allow(unused)]
#[salsa::tracked(return_ref)]
pub(crate) fn infer_expression_types<'db>(
db: &'db dyn Db,
@ -208,6 +207,7 @@ fn infer_unpack_types<'db>(db: &'db dyn Db, unpack: Unpack<'db>) -> UnpackResult
}
/// A region within which we can infer types.
#[derive(Copy, Clone, Debug)]
pub(crate) enum InferenceRegion<'db> {
/// infer types for a standalone [`Expression`]
Expression(Expression<'db>),
@ -219,6 +219,18 @@ pub(crate) enum InferenceRegion<'db> {
Scope(ScopeId<'db>),
}
impl<'db> InferenceRegion<'db> {
fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
match self {
InferenceRegion::Expression(expression) => expression.scope(db),
InferenceRegion::Definition(definition) | InferenceRegion::Deferred(definition) => {
definition.scope(db)
}
InferenceRegion::Scope(scope) => scope,
}
}
}
/// The inferred types for a single region.
#[derive(Debug, Eq, PartialEq)]
pub(crate) struct TypeInference<'db> {
@ -377,16 +389,10 @@ impl<'db> TypeInferenceBuilder<'db> {
region: InferenceRegion<'db>,
index: &'db SemanticIndex<'db>,
) -> Self {
let (file, scope) = match region {
InferenceRegion::Expression(expression) => (expression.file(db), expression.scope(db)),
InferenceRegion::Definition(definition) | InferenceRegion::Deferred(definition) => {
(definition.file(db), definition.scope(db))
}
InferenceRegion::Scope(scope) => (scope.file(db), scope),
};
let scope = region.scope(db);
Self {
context: InferContext::new(db, file),
context: InferContext::new(db, scope),
index,
region,
deferred_state: DeferredExpressionState::None,
@ -1022,10 +1028,20 @@ impl<'db> TypeInferenceBuilder<'db> {
decorator_list,
} = function;
let decorator_tys: Box<[Type]> = decorator_list
.iter()
.map(|decorator| self.infer_decorator(decorator))
.collect();
// Check if the function is decorated with the `no_type_check` decorator
// and, if so, suppress any errors that come after the decorators.
let mut decorator_tys = Vec::with_capacity(decorator_list.len());
for decorator in decorator_list {
let ty = self.infer_decorator(decorator);
decorator_tys.push(ty);
if let Type::FunctionLiteral(function) = ty {
if function.is_known(self.db(), KnownFunction::NoTypeCheck) {
self.context.set_in_no_type_check(InNoTypeCheck::Yes);
}
}
}
for default in parameters
.iter_non_variadic_params()
@ -1061,7 +1077,7 @@ impl<'db> TypeInferenceBuilder<'db> {
&name.id,
function_kind,
body_scope,
decorator_tys,
decorator_tys.into_boxed_slice(),
));
self.add_declaration_with_binding(function.into(), definition, function_ty, function_ty);
@ -3239,17 +3255,19 @@ impl<'db> TypeInferenceBuilder<'db> {
(_, Type::Never) => Type::Never,
(_, Type::Unknown) => Type::Unknown,
(UnaryOp::UAdd, Type::IntLiteral(value)) => Type::IntLiteral(value),
(UnaryOp::USub, Type::IntLiteral(value)) => Type::IntLiteral(-value),
(UnaryOp::Invert, Type::IntLiteral(value)) => Type::IntLiteral(!value),
(ast::UnaryOp::UAdd, Type::IntLiteral(value)) => Type::IntLiteral(value),
(ast::UnaryOp::USub, Type::IntLiteral(value)) => Type::IntLiteral(-value),
(ast::UnaryOp::Invert, Type::IntLiteral(value)) => Type::IntLiteral(!value),
(UnaryOp::UAdd, Type::BooleanLiteral(bool)) => Type::IntLiteral(i64::from(bool)),
(UnaryOp::USub, Type::BooleanLiteral(bool)) => Type::IntLiteral(-i64::from(bool)),
(UnaryOp::Invert, Type::BooleanLiteral(bool)) => Type::IntLiteral(!i64::from(bool)),
(ast::UnaryOp::UAdd, Type::BooleanLiteral(bool)) => Type::IntLiteral(i64::from(bool)),
(ast::UnaryOp::USub, Type::BooleanLiteral(bool)) => Type::IntLiteral(-i64::from(bool)),
(ast::UnaryOp::Invert, Type::BooleanLiteral(bool)) => {
Type::IntLiteral(!i64::from(bool))
}
(UnaryOp::Not, ty) => ty.bool(self.db()).negate().into_type(self.db()),
(ast::UnaryOp::Not, ty) => ty.bool(self.db()).negate().into_type(self.db()),
(
op @ (UnaryOp::UAdd | UnaryOp::USub | UnaryOp::Invert),
op @ (ast::UnaryOp::UAdd | ast::UnaryOp::USub | ast::UnaryOp::Invert),
Type::FunctionLiteral(_)
| Type::ModuleLiteral(_)
| Type::ClassLiteral(_)
@ -3267,10 +3285,10 @@ impl<'db> TypeInferenceBuilder<'db> {
| Type::Tuple(_),
) => {
let unary_dunder_method = match op {
UnaryOp::Invert => "__invert__",
UnaryOp::UAdd => "__pos__",
UnaryOp::USub => "__neg__",
UnaryOp::Not => {
ast::UnaryOp::Invert => "__invert__",
ast::UnaryOp::UAdd => "__pos__",
ast::UnaryOp::USub => "__neg__",
ast::UnaryOp::Not => {
unreachable!("Not operator is handled in its own case");
}
};
@ -5215,7 +5233,7 @@ impl<'db> TypeInferenceBuilder<'db> {
}
// for negative and positive numbers
ast::Expr::UnaryOp(ref u)
if matches!(u.op, UnaryOp::USub | UnaryOp::UAdd)
if matches!(u.op, ast::UnaryOp::USub | ast::UnaryOp::UAdd)
&& u.operand.is_number_literal_expr() =>
{
self.infer_unary_expression(u)

View file

@ -25,7 +25,7 @@ pub(crate) struct Unpacker<'db> {
impl<'db> Unpacker<'db> {
pub(crate) fn new(db: &'db dyn Db, scope: ScopeId<'db>) -> Self {
Self {
context: InferContext::new(db, scope.file(db)),
context: InferContext::new(db, scope),
targets: FxHashMap::default(),
scope,
}