@no_type_check support (#15122)

Co-authored-by: Carl Meyer <carl@astral.sh>
This commit is contained in:
Micha Reiser 2024-12-30 10:42:18 +01:00 committed by GitHub
parent d4ee6abf4a
commit 0caab81d3d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 253 additions and 47 deletions

View file

@ -0,0 +1,118 @@
# `@no_type_check`
> If a type checker supports the `no_type_check` decorator for functions, it should suppress all
> type errors for the def statement and its body including any nested functions or classes. It
> should also ignore all parameter and return type annotations and treat the function as if it were
> unannotated. [source](https://typing.readthedocs.io/en/latest/spec/directives.html#no-type-check)
## Error in the function body
```py
from typing import no_type_check
@no_type_check
def test() -> int:
return a + 5
```
## Error in nested function
```py
from typing import no_type_check
@no_type_check
def test() -> int:
def nested():
return a + 5
```
## Error in nested class
```py
from typing import no_type_check
@no_type_check
def test() -> int:
class Nested:
def inner(self):
return a + 5
```
## Error in preceding decorator
Don't suppress diagnostics for decorators appearing before the `no_type_check` decorator.
```py
from typing import no_type_check
@unknown_decorator # error: [unresolved-reference]
@no_type_check
def test() -> int:
return a + 5
```
## Error in following decorator
Unlike Pyright and mypy, suppress diagnostics appearing after the `no_type_check` decorator. We do
this because it more closely matches Python's runtime semantics of decorators. For more details, see
the discussion on the
[PR adding `@no_type_check` support](https://github.com/astral-sh/ruff/pull/15122#discussion_r1896869411).
```py
from typing import no_type_check
@no_type_check
@unknown_decorator
def test() -> int:
return a + 5
```
## Error in default value
```py
from typing import no_type_check
@no_type_check
def test(a: int = "test"):
return x + 5
```
## Error in return value position
```py
from typing import no_type_check
@no_type_check
def test() -> Undefined:
return x + 5
```
## `no_type_check` on classes isn't supported
Red Knot does not support decorating classes with `no_type_check`. The behaviour of `no_type_check`
when applied to classes is
[not specified currently](https://typing.readthedocs.io/en/latest/spec/directives.html#no-type-check),
and is not supported by Pyright or mypy.
A future improvement might be to emit a diagnostic if a `no_type_check` annotation is applied to a
class.
```py
from typing import no_type_check
@no_type_check
class Test:
def test(self):
return a + 5 # error: [unresolved-reference]
```
## `type: ignore` comments in `@no_type_check` blocks
```py
from typing import no_type_check
@no_type_check
def test():
# error: [unused-ignore-comment] "Unused `knot: ignore` directive: 'unresolved-reference'"
return x + 5 # knot: ignore[unresolved-reference]
```

View file

@ -43,7 +43,7 @@ impl<T> AstNodeRef<T> {
} }
/// Returns a reference to the wrapped node. /// Returns a reference to the wrapped node.
pub fn node(&self) -> &T { pub const fn node(&self) -> &T {
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still // SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still
// alive and not moved. // alive and not moved.
unsafe { self.node.as_ref() } unsafe { self.node.as_ref() }

View file

@ -6,10 +6,9 @@ use rustc_hash::{FxHashMap, FxHashSet};
use ruff_db::files::File; use ruff_db::files::File;
use ruff_db::parsed::ParsedModule; use ruff_db::parsed::ParsedModule;
use ruff_index::IndexVec; use ruff_index::IndexVec;
use ruff_python_ast as ast;
use ruff_python_ast::name::Name; use ruff_python_ast::name::Name;
use ruff_python_ast::visitor::{walk_expr, walk_pattern, walk_stmt, Visitor}; use ruff_python_ast::visitor::{walk_expr, walk_pattern, walk_stmt, Visitor};
use ruff_python_ast::{self as ast, Pattern};
use ruff_python_ast::{BoolOp, Expr};
use crate::ast_node_ref::AstNodeRef; use crate::ast_node_ref::AstNodeRef;
use crate::module_name::ModuleName; use crate::module_name::ModuleName;
@ -289,7 +288,7 @@ impl<'db> SemanticIndexBuilder<'db> {
constraint constraint
} }
fn build_constraint(&mut self, constraint_node: &Expr) -> Constraint<'db> { fn build_constraint(&mut self, constraint_node: &ast::Expr) -> Constraint<'db> {
let expression = self.add_standalone_expression(constraint_node); let expression = self.add_standalone_expression(constraint_node);
Constraint { Constraint {
node: ConstraintNode::Expression(expression), node: ConstraintNode::Expression(expression),
@ -408,11 +407,11 @@ impl<'db> SemanticIndexBuilder<'db> {
let guard = guard.map(|guard| self.add_standalone_expression(guard)); let guard = guard.map(|guard| self.add_standalone_expression(guard));
let kind = match pattern { let kind = match pattern {
Pattern::MatchValue(pattern) => { ast::Pattern::MatchValue(pattern) => {
let value = self.add_standalone_expression(&pattern.value); let value = self.add_standalone_expression(&pattern.value);
PatternConstraintKind::Value(value, guard) PatternConstraintKind::Value(value, guard)
} }
Pattern::MatchSingleton(singleton) => { ast::Pattern::MatchSingleton(singleton) => {
PatternConstraintKind::Singleton(singleton.value, guard) PatternConstraintKind::Singleton(singleton.value, guard)
} }
_ => PatternConstraintKind::Unsupported, _ => PatternConstraintKind::Unsupported,
@ -1492,8 +1491,8 @@ where
if index < values.len() - 1 { if index < values.len() - 1 {
let constraint = self.build_constraint(value); let constraint = self.build_constraint(value);
let (constraint, constraint_id) = match op { let (constraint, constraint_id) = match op {
BoolOp::And => (constraint, self.add_constraint(constraint)), ast::BoolOp::And => (constraint, self.add_constraint(constraint)),
BoolOp::Or => self.add_negated_constraint(constraint), ast::BoolOp::Or => self.add_negated_constraint(constraint),
}; };
let visibility_constraint = self let visibility_constraint = self
.add_visibility_constraint(VisibilityConstraint::VisibleIf(constraint)); .add_visibility_constraint(VisibilityConstraint::VisibleIf(constraint));

View file

@ -463,10 +463,7 @@ impl NodeWithScopeKind {
} }
pub fn expect_function(&self) -> &ast::StmtFunctionDef { pub fn expect_function(&self) -> &ast::StmtFunctionDef {
match self { self.as_function().expect("expected function")
Self::Function(function) => function.node(),
_ => panic!("expected function"),
}
} }
pub fn expect_type_alias(&self) -> &ast::StmtTypeAlias { pub fn expect_type_alias(&self) -> &ast::StmtTypeAlias {
@ -475,6 +472,13 @@ impl NodeWithScopeKind {
_ => panic!("expected type alias"), _ => panic!("expected type alias"),
} }
} }
pub const fn as_function(&self) -> Option<&ast::StmtFunctionDef> {
match self {
Self::Function(function) => Some(function.node()),
_ => None,
}
}
} }
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]

View file

@ -3086,13 +3086,16 @@ pub enum KnownFunction {
Len, Len,
/// `typing(_extensions).final` /// `typing(_extensions).final`
Final, Final,
/// [`typing(_extensions).no_type_check`](https://typing.readthedocs.io/en/latest/spec/directives.html#no-type-check)
NoTypeCheck,
} }
impl KnownFunction { impl KnownFunction {
pub fn constraint_function(self) -> Option<KnownConstraintFunction> { pub fn constraint_function(self) -> Option<KnownConstraintFunction> {
match self { match self {
Self::ConstraintFunction(f) => Some(f), Self::ConstraintFunction(f) => Some(f),
Self::RevealType | Self::Len | Self::Final => None, Self::RevealType | Self::Len | Self::Final | Self::NoTypeCheck => None,
} }
} }
@ -3111,6 +3114,9 @@ impl KnownFunction {
), ),
"len" if definition.is_builtin_definition(db) => Some(KnownFunction::Len), "len" if definition.is_builtin_definition(db) => Some(KnownFunction::Len),
"final" if definition.is_typing_definition(db) => Some(KnownFunction::Final), "final" if definition.is_typing_definition(db) => Some(KnownFunction::Final),
"no_type_check" if definition.is_typing_definition(db) => {
Some(KnownFunction::NoTypeCheck)
}
_ => None, _ => None,
} }
} }

View file

@ -8,14 +8,16 @@ use ruff_db::{
use ruff_python_ast::AnyNodeRef; use ruff_python_ast::AnyNodeRef;
use ruff_text_size::Ranged; use ruff_text_size::Ranged;
use super::{binding_ty, KnownFunction, TypeCheckDiagnostic, TypeCheckDiagnostics};
use crate::semantic_index::semantic_index;
use crate::semantic_index::symbol::ScopeId;
use crate::{ use crate::{
lint::{LintId, LintMetadata}, lint::{LintId, LintMetadata},
suppression::suppressions, suppression::suppressions,
Db, Db,
}; };
use super::{TypeCheckDiagnostic, TypeCheckDiagnostics};
/// Context for inferring the types of a single file. /// Context for inferring the types of a single file.
/// ///
/// One context exists for at least for every inferred region but it's /// One context exists for at least for every inferred region but it's
@ -30,17 +32,21 @@ use super::{TypeCheckDiagnostic, TypeCheckDiagnostics};
/// on the current [`TypeInference`](super::infer::TypeInference) result. /// on the current [`TypeInference`](super::infer::TypeInference) result.
pub(crate) struct InferContext<'db> { pub(crate) struct InferContext<'db> {
db: &'db dyn Db, db: &'db dyn Db,
scope: ScopeId<'db>,
file: File, file: File,
diagnostics: std::cell::RefCell<TypeCheckDiagnostics>, diagnostics: std::cell::RefCell<TypeCheckDiagnostics>,
no_type_check: InNoTypeCheck,
bomb: DebugDropBomb, bomb: DebugDropBomb,
} }
impl<'db> InferContext<'db> { impl<'db> InferContext<'db> {
pub(crate) fn new(db: &'db dyn Db, file: File) -> Self { pub(crate) fn new(db: &'db dyn Db, scope: ScopeId<'db>) -> Self {
Self { Self {
db, db,
file, scope,
file: scope.file(db),
diagnostics: std::cell::RefCell::new(TypeCheckDiagnostics::default()), diagnostics: std::cell::RefCell::new(TypeCheckDiagnostics::default()),
no_type_check: InNoTypeCheck::default(),
bomb: DebugDropBomb::new("`InferContext` needs to be explicitly consumed by calling `::finish` to prevent accidental loss of diagnostics."), bomb: DebugDropBomb::new("`InferContext` needs to be explicitly consumed by calling `::finish` to prevent accidental loss of diagnostics."),
} }
} }
@ -68,11 +74,19 @@ impl<'db> InferContext<'db> {
node: AnyNodeRef, node: AnyNodeRef,
message: fmt::Arguments, message: fmt::Arguments,
) { ) {
if !self.db.is_file_open(self.file) {
return;
}
// Skip over diagnostics if the rule is disabled. // Skip over diagnostics if the rule is disabled.
let Some(severity) = self.db.rule_selection().severity(LintId::of(lint)) else { let Some(severity) = self.db.rule_selection().severity(LintId::of(lint)) else {
return; return;
}; };
if self.is_in_no_type_check() {
return;
}
let suppressions = suppressions(self.db, self.file); let suppressions = suppressions(self.db, self.file);
if let Some(suppression) = suppressions.find_suppression(node.range(), LintId::of(lint)) { if let Some(suppression) = suppressions.find_suppression(node.range(), LintId::of(lint)) {
@ -112,6 +126,42 @@ impl<'db> InferContext<'db> {
}); });
} }
pub(super) fn set_in_no_type_check(&mut self, no_type_check: InNoTypeCheck) {
self.no_type_check = no_type_check;
}
fn is_in_no_type_check(&self) -> bool {
match self.no_type_check {
InNoTypeCheck::Possibly => {
// Accessing the semantic index here is fine because
// the index belongs to the same file as for which we emit the diagnostic.
let index = semantic_index(self.db, self.file);
let scope_id = self.scope.file_scope_id(self.db);
// Inspect all ancestor function scopes by walking bottom up and infer the function's type.
let mut function_scope_tys = index
.ancestor_scopes(scope_id)
.filter_map(|(_, scope)| scope.node().as_function())
.filter_map(|function| {
binding_ty(self.db, index.definition(function)).into_function_literal()
});
// Iterate over all functions and test if any is decorated with `@no_type_check`.
function_scope_tys.any(|function_ty| {
function_ty
.decorators(self.db)
.iter()
.filter_map(|decorator| decorator.into_function_literal())
.any(|decorator_ty| {
decorator_ty.is_known(self.db, KnownFunction::NoTypeCheck)
})
})
}
InNoTypeCheck::Yes => true,
}
}
#[must_use] #[must_use]
pub(crate) fn finish(mut self) -> TypeCheckDiagnostics { pub(crate) fn finish(mut self) -> TypeCheckDiagnostics {
self.bomb.defuse(); self.bomb.defuse();
@ -131,6 +181,17 @@ impl fmt::Debug for InferContext<'_> {
} }
} }
#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)]
pub(crate) enum InNoTypeCheck {
/// The inference might be in a `no_type_check` block but only if any
/// ancestor function is decorated with `@no_type_check`.
#[default]
Possibly,
/// The inference is known to be in an `@no_type_check` decorated function.
Yes,
}
pub(crate) trait WithDiagnostics { pub(crate) trait WithDiagnostics {
fn diagnostics(&self) -> &TypeCheckDiagnostics; fn diagnostics(&self) -> &TypeCheckDiagnostics;
} }

View file

@ -31,7 +31,7 @@ use std::num::NonZeroU32;
use itertools::Itertools; use itertools::Itertools;
use ruff_db::files::File; use ruff_db::files::File;
use ruff_db::parsed::parsed_module; use ruff_db::parsed::parsed_module;
use ruff_python_ast::{self as ast, AnyNodeRef, ExprContext, UnaryOp}; use ruff_python_ast::{self as ast, AnyNodeRef, ExprContext};
use ruff_text_size::Ranged; use ruff_text_size::Ranged;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use salsa; use salsa;
@ -72,7 +72,7 @@ use crate::unpack::Unpack;
use crate::util::subscript::{PyIndex, PySlice}; use crate::util::subscript::{PyIndex, PySlice};
use crate::Db; use crate::Db;
use super::context::{InferContext, WithDiagnostics}; use super::context::{InNoTypeCheck, InferContext, WithDiagnostics};
use super::diagnostic::{ use super::diagnostic::{
report_index_out_of_bounds, report_invalid_exception_caught, report_invalid_exception_cause, report_index_out_of_bounds, report_invalid_exception_caught, report_invalid_exception_cause,
report_invalid_exception_raised, report_non_subscriptable, report_invalid_exception_raised, report_non_subscriptable,
@ -169,7 +169,6 @@ pub(crate) fn infer_deferred_types<'db>(
/// Use rarely; only for cases where we'd otherwise risk double-inferring an expression: RHS of an /// Use rarely; only for cases where we'd otherwise risk double-inferring an expression: RHS of an
/// assignment, which might be unpacking/multi-target and thus part of multiple definitions, or a /// assignment, which might be unpacking/multi-target and thus part of multiple definitions, or a
/// type narrowing guard expression (e.g. if statement test node). /// type narrowing guard expression (e.g. if statement test node).
#[allow(unused)]
#[salsa::tracked(return_ref)] #[salsa::tracked(return_ref)]
pub(crate) fn infer_expression_types<'db>( pub(crate) fn infer_expression_types<'db>(
db: &'db dyn Db, db: &'db dyn Db,
@ -208,6 +207,7 @@ fn infer_unpack_types<'db>(db: &'db dyn Db, unpack: Unpack<'db>) -> UnpackResult
} }
/// A region within which we can infer types. /// A region within which we can infer types.
#[derive(Copy, Clone, Debug)]
pub(crate) enum InferenceRegion<'db> { pub(crate) enum InferenceRegion<'db> {
/// infer types for a standalone [`Expression`] /// infer types for a standalone [`Expression`]
Expression(Expression<'db>), Expression(Expression<'db>),
@ -219,6 +219,18 @@ pub(crate) enum InferenceRegion<'db> {
Scope(ScopeId<'db>), Scope(ScopeId<'db>),
} }
impl<'db> InferenceRegion<'db> {
fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
match self {
InferenceRegion::Expression(expression) => expression.scope(db),
InferenceRegion::Definition(definition) | InferenceRegion::Deferred(definition) => {
definition.scope(db)
}
InferenceRegion::Scope(scope) => scope,
}
}
}
/// The inferred types for a single region. /// The inferred types for a single region.
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
pub(crate) struct TypeInference<'db> { pub(crate) struct TypeInference<'db> {
@ -377,16 +389,10 @@ impl<'db> TypeInferenceBuilder<'db> {
region: InferenceRegion<'db>, region: InferenceRegion<'db>,
index: &'db SemanticIndex<'db>, index: &'db SemanticIndex<'db>,
) -> Self { ) -> Self {
let (file, scope) = match region { let scope = region.scope(db);
InferenceRegion::Expression(expression) => (expression.file(db), expression.scope(db)),
InferenceRegion::Definition(definition) | InferenceRegion::Deferred(definition) => {
(definition.file(db), definition.scope(db))
}
InferenceRegion::Scope(scope) => (scope.file(db), scope),
};
Self { Self {
context: InferContext::new(db, file), context: InferContext::new(db, scope),
index, index,
region, region,
deferred_state: DeferredExpressionState::None, deferred_state: DeferredExpressionState::None,
@ -1022,10 +1028,20 @@ impl<'db> TypeInferenceBuilder<'db> {
decorator_list, decorator_list,
} = function; } = function;
let decorator_tys: Box<[Type]> = decorator_list // Check if the function is decorated with the `no_type_check` decorator
.iter() // and, if so, suppress any errors that come after the decorators.
.map(|decorator| self.infer_decorator(decorator)) let mut decorator_tys = Vec::with_capacity(decorator_list.len());
.collect();
for decorator in decorator_list {
let ty = self.infer_decorator(decorator);
decorator_tys.push(ty);
if let Type::FunctionLiteral(function) = ty {
if function.is_known(self.db(), KnownFunction::NoTypeCheck) {
self.context.set_in_no_type_check(InNoTypeCheck::Yes);
}
}
}
for default in parameters for default in parameters
.iter_non_variadic_params() .iter_non_variadic_params()
@ -1061,7 +1077,7 @@ impl<'db> TypeInferenceBuilder<'db> {
&name.id, &name.id,
function_kind, function_kind,
body_scope, body_scope,
decorator_tys, decorator_tys.into_boxed_slice(),
)); ));
self.add_declaration_with_binding(function.into(), definition, function_ty, function_ty); self.add_declaration_with_binding(function.into(), definition, function_ty, function_ty);
@ -3239,17 +3255,19 @@ impl<'db> TypeInferenceBuilder<'db> {
(_, Type::Never) => Type::Never, (_, Type::Never) => Type::Never,
(_, Type::Unknown) => Type::Unknown, (_, Type::Unknown) => Type::Unknown,
(UnaryOp::UAdd, Type::IntLiteral(value)) => Type::IntLiteral(value), (ast::UnaryOp::UAdd, Type::IntLiteral(value)) => Type::IntLiteral(value),
(UnaryOp::USub, Type::IntLiteral(value)) => Type::IntLiteral(-value), (ast::UnaryOp::USub, Type::IntLiteral(value)) => Type::IntLiteral(-value),
(UnaryOp::Invert, Type::IntLiteral(value)) => Type::IntLiteral(!value), (ast::UnaryOp::Invert, Type::IntLiteral(value)) => Type::IntLiteral(!value),
(UnaryOp::UAdd, Type::BooleanLiteral(bool)) => Type::IntLiteral(i64::from(bool)), (ast::UnaryOp::UAdd, Type::BooleanLiteral(bool)) => Type::IntLiteral(i64::from(bool)),
(UnaryOp::USub, Type::BooleanLiteral(bool)) => Type::IntLiteral(-i64::from(bool)), (ast::UnaryOp::USub, Type::BooleanLiteral(bool)) => Type::IntLiteral(-i64::from(bool)),
(UnaryOp::Invert, Type::BooleanLiteral(bool)) => Type::IntLiteral(!i64::from(bool)), (ast::UnaryOp::Invert, Type::BooleanLiteral(bool)) => {
Type::IntLiteral(!i64::from(bool))
}
(UnaryOp::Not, ty) => ty.bool(self.db()).negate().into_type(self.db()), (ast::UnaryOp::Not, ty) => ty.bool(self.db()).negate().into_type(self.db()),
( (
op @ (UnaryOp::UAdd | UnaryOp::USub | UnaryOp::Invert), op @ (ast::UnaryOp::UAdd | ast::UnaryOp::USub | ast::UnaryOp::Invert),
Type::FunctionLiteral(_) Type::FunctionLiteral(_)
| Type::ModuleLiteral(_) | Type::ModuleLiteral(_)
| Type::ClassLiteral(_) | Type::ClassLiteral(_)
@ -3267,10 +3285,10 @@ impl<'db> TypeInferenceBuilder<'db> {
| Type::Tuple(_), | Type::Tuple(_),
) => { ) => {
let unary_dunder_method = match op { let unary_dunder_method = match op {
UnaryOp::Invert => "__invert__", ast::UnaryOp::Invert => "__invert__",
UnaryOp::UAdd => "__pos__", ast::UnaryOp::UAdd => "__pos__",
UnaryOp::USub => "__neg__", ast::UnaryOp::USub => "__neg__",
UnaryOp::Not => { ast::UnaryOp::Not => {
unreachable!("Not operator is handled in its own case"); unreachable!("Not operator is handled in its own case");
} }
}; };
@ -5215,7 +5233,7 @@ impl<'db> TypeInferenceBuilder<'db> {
} }
// for negative and positive numbers // for negative and positive numbers
ast::Expr::UnaryOp(ref u) ast::Expr::UnaryOp(ref u)
if matches!(u.op, UnaryOp::USub | UnaryOp::UAdd) if matches!(u.op, ast::UnaryOp::USub | ast::UnaryOp::UAdd)
&& u.operand.is_number_literal_expr() => && u.operand.is_number_literal_expr() =>
{ {
self.infer_unary_expression(u) self.infer_unary_expression(u)

View file

@ -25,7 +25,7 @@ pub(crate) struct Unpacker<'db> {
impl<'db> Unpacker<'db> { impl<'db> Unpacker<'db> {
pub(crate) fn new(db: &'db dyn Db, scope: ScopeId<'db>) -> Self { pub(crate) fn new(db: &'db dyn Db, scope: ScopeId<'db>) -> Self {
Self { Self {
context: InferContext::new(db, scope.file(db)), context: InferContext::new(db, scope),
targets: FxHashMap::default(), targets: FxHashMap::default(),
scope, scope,
} }