[ty] Update salsa (#17964)
Some checks are pending
CI / cargo clippy (push) Blocked by required conditions
CI / cargo build (msrv) (push) Blocked by required conditions
CI / Fuzz for new ty panics (push) Blocked by required conditions
CI / Determine changes (push) Waiting to run
CI / cargo fmt (push) Waiting to run
CI / cargo test (linux) (push) Blocked by required conditions
CI / cargo test (linux, release) (push) Blocked by required conditions
CI / cargo test (windows) (push) Blocked by required conditions
CI / cargo test (wasm) (push) Blocked by required conditions
CI / cargo build (release) (push) Waiting to run
CI / cargo fuzz build (push) Blocked by required conditions
CI / fuzz parser (push) Blocked by required conditions
CI / test scripts (push) Blocked by required conditions
CI / ecosystem (push) Blocked by required conditions
CI / cargo shear (push) Blocked by required conditions
CI / python package (push) Waiting to run
CI / pre-commit (push) Waiting to run
CI / mkdocs (push) Waiting to run
CI / formatter instabilities and black similarity (push) Blocked by required conditions
CI / test ruff-lsp (push) Blocked by required conditions
CI / check playground (push) Blocked by required conditions
CI / benchmarks (push) Blocked by required conditions
[ty Playground] Release / publish (push) Waiting to run

This commit is contained in:
Micha Reiser 2025-05-09 11:54:07 +02:00 committed by GitHub
parent 12ce445ff7
commit 6cd8a49638
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
35 changed files with 187 additions and 234 deletions

6
Cargo.lock generated
View file

@ -3249,7 +3249,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]] [[package]]
name = "salsa" name = "salsa"
version = "0.21.1" version = "0.21.1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=2c869364a9592d06fdf45c422e1e4a7265a8fe8a#2c869364a9592d06fdf45c422e1e4a7265a8fe8a" source = "git+https://github.com/salsa-rs/salsa.git?rev=7edce6e248f35c8114b4b021cdb474a3fb2813b3#7edce6e248f35c8114b4b021cdb474a3fb2813b3"
dependencies = [ dependencies = [
"boxcar", "boxcar",
"compact_str", "compact_str",
@ -3272,12 +3272,12 @@ dependencies = [
[[package]] [[package]]
name = "salsa-macro-rules" name = "salsa-macro-rules"
version = "0.21.1" version = "0.21.1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=2c869364a9592d06fdf45c422e1e4a7265a8fe8a#2c869364a9592d06fdf45c422e1e4a7265a8fe8a" source = "git+https://github.com/salsa-rs/salsa.git?rev=7edce6e248f35c8114b4b021cdb474a3fb2813b3#7edce6e248f35c8114b4b021cdb474a3fb2813b3"
[[package]] [[package]]
name = "salsa-macros" name = "salsa-macros"
version = "0.21.1" version = "0.21.1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=2c869364a9592d06fdf45c422e1e4a7265a8fe8a#2c869364a9592d06fdf45c422e1e4a7265a8fe8a" source = "git+https://github.com/salsa-rs/salsa.git?rev=7edce6e248f35c8114b4b021cdb474a3fb2813b3#7edce6e248f35c8114b4b021cdb474a3fb2813b3"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",

View file

@ -125,7 +125,7 @@ rayon = { version = "1.10.0" }
regex = { version = "1.10.2" } regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" } rustc-hash = { version = "2.0.0" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml` # When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "2c869364a9592d06fdf45c422e1e4a7265a8fe8a" } salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "7edce6e248f35c8114b4b021cdb474a3fb2813b3" }
schemars = { version = "0.8.16" } schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" } seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] } serde = { version = "1.0.197", features = ["derive"] }

View file

@ -277,7 +277,7 @@ impl std::panic::RefUnwindSafe for Files {}
#[salsa::input] #[salsa::input]
pub struct File { pub struct File {
/// The path of the file (immutable). /// The path of the file (immutable).
#[return_ref] #[returns(ref)]
pub path: FilePath, pub path: FilePath,
/// The unix permissions of the file. Only supported on unix systems. Always `None` on Windows /// The unix permissions of the file. Only supported on unix systems. Always `None` on Windows

View file

@ -19,8 +19,8 @@ use crate::Db;
#[salsa::input(debug)] #[salsa::input(debug)]
pub struct FileRoot { pub struct FileRoot {
/// The path of a root is guaranteed to never change. /// The path of a root is guaranteed to never change.
#[return_ref] #[returns(deref)]
path_buf: SystemPathBuf, pub path: SystemPathBuf,
/// The kind of the root at the time of its creation. /// The kind of the root at the time of its creation.
kind_at_time_of_creation: FileRootKind, kind_at_time_of_creation: FileRootKind,
@ -32,10 +32,6 @@ pub struct FileRoot {
} }
impl FileRoot { impl FileRoot {
pub fn path(self, db: &dyn Db) -> &SystemPath {
self.path_buf(db)
}
pub fn durability(self, db: &dyn Db) -> salsa::Durability { pub fn durability(self, db: &dyn Db) -> salsa::Durability {
self.kind_at_time_of_creation(db).durability() self.kind_at_time_of_creation(db).durability()
} }

View file

@ -20,7 +20,7 @@ use crate::Db;
/// reflected in the changed AST offsets. /// reflected in the changed AST offsets.
/// The other reason is that Ruff's AST doesn't implement `Eq` which Sala requires /// The other reason is that Ruff's AST doesn't implement `Eq` which Sala requires
/// for determining if a query result is unchanged. /// for determining if a query result is unchanged.
#[salsa::tracked(return_ref, no_eq)] #[salsa::tracked(returns(ref), no_eq)]
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule { pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
let _span = tracing::trace_span!("parsed_module", ?file).entered(); let _span = tracing::trace_span!("parsed_module", ?file).entered();

View file

@ -88,8 +88,8 @@ impl Db for ModuleDb {
!file.path(self).is_vendored_path() !file.path(self).is_vendored_path()
} }
fn rule_selection(&self) -> Arc<RuleSelection> { fn rule_selection(&self) -> &RuleSelection {
self.rule_selection.clone() &self.rule_selection
} }
fn lint_registry(&self) -> &LintRegistry { fn lint_registry(&self) -> &LintRegistry {

View file

@ -120,8 +120,8 @@ pub(crate) mod tests {
!file.path(self).is_vendored_path() !file.path(self).is_vendored_path()
} }
fn rule_selection(&self) -> Arc<RuleSelection> { fn rule_selection(&self) -> &RuleSelection {
self.rule_selection.clone() &self.rule_selection
} }
fn lint_registry(&self) -> &LintRegistry { fn lint_registry(&self) -> &LintRegistry {

View file

@ -149,7 +149,7 @@ impl SemanticDb for ProjectDatabase {
project.is_file_open(self, file) project.is_file_open(self, file)
} }
fn rule_selection(&self) -> Arc<RuleSelection> { fn rule_selection(&self) -> &RuleSelection {
self.project().rules(self) self.project().rules(self)
} }
@ -327,7 +327,7 @@ pub(crate) mod tests {
!file.path(self).is_vendored_path() !file.path(self).is_vendored_path()
} }
fn rule_selection(&self) -> Arc<RuleSelection> { fn rule_selection(&self) -> &RuleSelection {
self.project().rules(self) self.project().rules(self)
} }

View file

@ -60,21 +60,21 @@ pub struct Project {
/// ///
/// Setting the open files to a non-`None` value changes `check` to only check the /// Setting the open files to a non-`None` value changes `check` to only check the
/// open files rather than all files in the project. /// open files rather than all files in the project.
#[return_ref] #[returns(as_deref)]
#[default] #[default]
open_fileset: Option<Arc<FxHashSet<File>>>, open_fileset: Option<Arc<FxHashSet<File>>>,
/// The first-party files of this project. /// The first-party files of this project.
#[default] #[default]
#[return_ref] #[returns(ref)]
file_set: IndexedFiles, file_set: IndexedFiles,
/// The metadata describing the project, including the unresolved options. /// The metadata describing the project, including the unresolved options.
#[return_ref] #[returns(ref)]
pub metadata: ProjectMetadata, pub metadata: ProjectMetadata,
/// The resolved project settings. /// The resolved project settings.
#[return_ref] #[returns(ref)]
pub settings: Settings, pub settings: Settings,
/// The paths that should be included when checking this project. /// The paths that should be included when checking this project.
@ -98,11 +98,11 @@ pub struct Project {
/// in an IDE when the user only wants to check the open tabs. This could be modeled /// in an IDE when the user only wants to check the open tabs. This could be modeled
/// with `included_paths` too but it would require an explicit walk dir step that's simply unnecessary. /// with `included_paths` too but it would require an explicit walk dir step that's simply unnecessary.
#[default] #[default]
#[return_ref] #[returns(deref)]
included_paths_list: Vec<SystemPathBuf>, included_paths_list: Vec<SystemPathBuf>,
/// Diagnostics that were generated when resolving the project settings. /// Diagnostics that were generated when resolving the project settings.
#[return_ref] #[returns(deref)]
settings_diagnostics: Vec<OptionDiagnostic>, settings_diagnostics: Vec<OptionDiagnostic>,
} }
@ -131,7 +131,7 @@ impl Project {
/// This is a salsa query to prevent re-computing queries if other, unrelated /// This is a salsa query to prevent re-computing queries if other, unrelated
/// settings change. For example, we don't want that changing the terminal settings /// settings change. For example, we don't want that changing the terminal settings
/// invalidates any type checking queries. /// invalidates any type checking queries.
#[salsa::tracked] #[salsa::tracked(returns(deref))]
pub fn rules(self, db: &dyn Db) -> Arc<RuleSelection> { pub fn rules(self, db: &dyn Db) -> Arc<RuleSelection> {
self.settings(db).to_rules() self.settings(db).to_rules()
} }
@ -157,7 +157,7 @@ impl Project {
self.set_settings(db).to(settings); self.set_settings(db).to(settings);
} }
if self.settings_diagnostics(db) != &settings_diagnostics { if self.settings_diagnostics(db) != settings_diagnostics {
self.set_settings_diagnostics(db).to(settings_diagnostics); self.set_settings_diagnostics(db).to(settings_diagnostics);
} }
@ -284,7 +284,7 @@ impl Project {
/// This can be useful to check arbitrary files, but it isn't something we recommend. /// This can be useful to check arbitrary files, but it isn't something we recommend.
/// We should try to support this use case but it's okay if there are some limitations around it. /// We should try to support this use case but it's okay if there are some limitations around it.
fn included_paths_or_root(self, db: &dyn Db) -> &[SystemPathBuf] { fn included_paths_or_root(self, db: &dyn Db) -> &[SystemPathBuf] {
match &**self.included_paths_list(db) { match self.included_paths_list(db) {
[] => std::slice::from_ref(&self.metadata(db).root), [] => std::slice::from_ref(&self.metadata(db).root),
paths => paths, paths => paths,
} }
@ -292,7 +292,7 @@ impl Project {
/// Returns the open files in the project or `None` if the entire project should be checked. /// Returns the open files in the project or `None` if the entire project should be checked.
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> { pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
self.open_fileset(db).as_deref() self.open_fileset(db)
} }
/// Sets the open files in the project. /// Sets the open files in the project.

View file

@ -1,5 +1,3 @@
use std::sync::Arc;
use crate::lint::{LintRegistry, RuleSelection}; use crate::lint::{LintRegistry, RuleSelection};
use ruff_db::files::File; use ruff_db::files::File;
use ruff_db::{Db as SourceDb, Upcast}; use ruff_db::{Db as SourceDb, Upcast};
@ -9,7 +7,7 @@ use ruff_db::{Db as SourceDb, Upcast};
pub trait Db: SourceDb + Upcast<dyn SourceDb> { pub trait Db: SourceDb + Upcast<dyn SourceDb> {
fn is_file_open(&self, file: File) -> bool; fn is_file_open(&self, file: File) -> bool;
fn rule_selection(&self) -> Arc<RuleSelection>; fn rule_selection(&self) -> &RuleSelection;
fn lint_registry(&self) -> &LintRegistry; fn lint_registry(&self) -> &LintRegistry;
} }
@ -125,8 +123,8 @@ pub(crate) mod tests {
!file.path(self).is_vendored_path() !file.path(self).is_vendored_path()
} }
fn rule_selection(&self) -> Arc<RuleSelection> { fn rule_selection(&self) -> &RuleSelection {
self.rule_selection.clone() &self.rule_selection
} }
fn lint_registry(&self) -> &LintRegistry { fn lint_registry(&self) -> &LintRegistry {

View file

@ -28,20 +28,15 @@ fn dunder_all_names_cycle_initial(_db: &dyn Db, _file: File) -> Option<FxHashSet
/// Returns a set of names in the `__all__` variable for `file`, [`None`] if it is not defined or /// Returns a set of names in the `__all__` variable for `file`, [`None`] if it is not defined or
/// if it contains invalid elements. /// if it contains invalid elements.
pub(crate) fn dunder_all_names(db: &dyn Db, file: File) -> Option<&FxHashSet<Name>> { #[salsa::tracked(returns(as_ref), cycle_fn=dunder_all_names_cycle_recover, cycle_initial=dunder_all_names_cycle_initial)]
#[allow(clippy::ref_option)] pub(crate) fn dunder_all_names(db: &dyn Db, file: File) -> Option<FxHashSet<Name>> {
#[salsa::tracked(return_ref, cycle_fn=dunder_all_names_cycle_recover, cycle_initial=dunder_all_names_cycle_initial)] let _span = tracing::trace_span!("dunder_all_names", file=?file.path(db)).entered();
fn dunder_all_names_impl(db: &dyn Db, file: File) -> Option<FxHashSet<Name>> {
let _span = tracing::trace_span!("dunder_all_names", file=?file.path(db)).entered();
let module = parsed_module(db.upcast(), file); let module = parsed_module(db.upcast(), file);
let index = semantic_index(db, file); let index = semantic_index(db, file);
let mut collector = DunderAllNamesCollector::new(db, file, index); let mut collector = DunderAllNamesCollector::new(db, file, index);
collector.visit_body(module.suite()); collector.visit_body(module.suite());
collector.into_names() collector.into_names()
}
dunder_all_names_impl(db, file).as_ref()
} }
/// A visitor that collects the names in the `__all__` variable of a module. /// A visitor that collects the names in the `__all__` variable of a module.

View file

@ -349,7 +349,7 @@ impl SearchPaths {
/// The editable-install search paths for the first `site-packages` directory /// The editable-install search paths for the first `site-packages` directory
/// should come between the two `site-packages` directories when it comes to /// should come between the two `site-packages` directories when it comes to
/// module-resolution priority. /// module-resolution priority.
#[salsa::tracked(return_ref)] #[salsa::tracked(returns(deref))]
pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> { pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
tracing::debug!("Resolving dynamic module resolution paths"); tracing::debug!("Resolving dynamic module resolution paths");
@ -583,7 +583,7 @@ impl<'db> Iterator for PthFileIterator<'db> {
/// This is needed because Salsa requires that all query arguments are salsa ingredients. /// This is needed because Salsa requires that all query arguments are salsa ingredients.
#[salsa::interned(debug)] #[salsa::interned(debug)]
struct ModuleNameIngredient<'db> { struct ModuleNameIngredient<'db> {
#[return_ref] #[returns(ref)]
pub(super) name: ModuleName, pub(super) name: ModuleName,
} }

View file

@ -13,10 +13,10 @@ use salsa::Setter;
pub struct Program { pub struct Program {
pub python_version: PythonVersion, pub python_version: PythonVersion,
#[return_ref] #[returns(ref)]
pub python_platform: PythonPlatform, pub python_platform: PythonPlatform,
#[return_ref] #[returns(ref)]
pub(crate) search_paths: SearchPaths, pub(crate) search_paths: SearchPaths,
} }

View file

@ -46,7 +46,7 @@ type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), FxBuildHasher>;
/// Returns the semantic index for `file`. /// Returns the semantic index for `file`.
/// ///
/// Prefer using [`symbol_table`] when working with symbols from a single scope. /// Prefer using [`symbol_table`] when working with symbols from a single scope.
#[salsa::tracked(return_ref, no_eq)] #[salsa::tracked(returns(ref), no_eq)]
pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> { pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
let _span = tracing::trace_span!("semantic_index", ?file).entered(); let _span = tracing::trace_span!("semantic_index", ?file).entered();
@ -60,7 +60,7 @@ pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
/// Using [`symbol_table`] over [`semantic_index`] has the advantage that /// Using [`symbol_table`] over [`semantic_index`] has the advantage that
/// Salsa can avoid invalidating dependent queries if this scope's symbol table /// Salsa can avoid invalidating dependent queries if this scope's symbol table
/// is unchanged. /// is unchanged.
#[salsa::tracked] #[salsa::tracked(returns(deref))]
pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable> { pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable> {
let file = scope.file(db); let file = scope.file(db);
let _span = tracing::trace_span!("symbol_table", scope=?scope.as_id(), ?file).entered(); let _span = tracing::trace_span!("symbol_table", scope=?scope.as_id(), ?file).entered();
@ -80,7 +80,7 @@ pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<Sym
/// ///
/// - We cannot resolve relative imports (which aren't allowed in `import` statements) without /// - We cannot resolve relative imports (which aren't allowed in `import` statements) without
/// knowing the name of the current module, and whether it's a package. /// knowing the name of the current module, and whether it's a package.
#[salsa::tracked] #[salsa::tracked(returns(deref))]
pub(crate) fn imported_modules<'db>(db: &'db dyn Db, file: File) -> Arc<FxHashSet<ModuleName>> { pub(crate) fn imported_modules<'db>(db: &'db dyn Db, file: File) -> Arc<FxHashSet<ModuleName>> {
semantic_index(db, file).imported_modules.clone() semantic_index(db, file).imported_modules.clone()
} }
@ -90,7 +90,7 @@ pub(crate) fn imported_modules<'db>(db: &'db dyn Db, file: File) -> Arc<FxHashSe
/// Using [`use_def_map`] over [`semantic_index`] has the advantage that /// Using [`use_def_map`] over [`semantic_index`] has the advantage that
/// Salsa can avoid invalidating dependent queries if this scope's use-def map /// Salsa can avoid invalidating dependent queries if this scope's use-def map
/// is unchanged. /// is unchanged.
#[salsa::tracked] #[salsa::tracked(returns(deref))]
pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseDefMap<'db>> { pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseDefMap<'db>> {
let file = scope.file(db); let file = scope.file(db);
let _span = tracing::trace_span!("use_def_map", scope=?scope.as_id(), ?file).entered(); let _span = tracing::trace_span!("use_def_map", scope=?scope.as_id(), ?file).entered();
@ -599,7 +599,7 @@ mod tests {
let TestCase { db, file } = test_case(""); let TestCase { db, file } = test_case("");
let global_table = symbol_table(&db, global_scope(&db, file)); let global_table = symbol_table(&db, global_scope(&db, file));
let global_names = names(&global_table); let global_names = names(global_table);
assert_eq!(global_names, Vec::<&str>::new()); assert_eq!(global_names, Vec::<&str>::new());
} }
@ -609,7 +609,7 @@ mod tests {
let TestCase { db, file } = test_case("x"); let TestCase { db, file } = test_case("x");
let global_table = symbol_table(&db, global_scope(&db, file)); let global_table = symbol_table(&db, global_scope(&db, file));
assert_eq!(names(&global_table), vec!["x"]); assert_eq!(names(global_table), vec!["x"]);
} }
#[test] #[test]
@ -617,7 +617,7 @@ mod tests {
let TestCase { db, file } = test_case("x: int"); let TestCase { db, file } = test_case("x: int");
let global_table = symbol_table(&db, global_scope(&db, file)); let global_table = symbol_table(&db, global_scope(&db, file));
assert_eq!(names(&global_table), vec!["int", "x"]); assert_eq!(names(global_table), vec!["int", "x"]);
// TODO record definition // TODO record definition
} }
@ -627,7 +627,7 @@ mod tests {
let scope = global_scope(&db, file); let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope); let global_table = symbol_table(&db, scope);
assert_eq!(names(&global_table), vec!["foo"]); assert_eq!(names(global_table), vec!["foo"]);
let foo = global_table.symbol_id_by_name("foo").unwrap(); let foo = global_table.symbol_id_by_name("foo").unwrap();
let use_def = use_def_map(&db, scope); let use_def = use_def_map(&db, scope);
@ -640,7 +640,7 @@ mod tests {
let TestCase { db, file } = test_case("import foo.bar"); let TestCase { db, file } = test_case("import foo.bar");
let global_table = symbol_table(&db, global_scope(&db, file)); let global_table = symbol_table(&db, global_scope(&db, file));
assert_eq!(names(&global_table), vec!["foo"]); assert_eq!(names(global_table), vec!["foo"]);
} }
#[test] #[test]
@ -648,7 +648,7 @@ mod tests {
let TestCase { db, file } = test_case("import foo.bar as baz"); let TestCase { db, file } = test_case("import foo.bar as baz");
let global_table = symbol_table(&db, global_scope(&db, file)); let global_table = symbol_table(&db, global_scope(&db, file));
assert_eq!(names(&global_table), vec!["baz"]); assert_eq!(names(global_table), vec!["baz"]);
} }
#[test] #[test]
@ -657,7 +657,7 @@ mod tests {
let scope = global_scope(&db, file); let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope); let global_table = symbol_table(&db, scope);
assert_eq!(names(&global_table), vec!["foo"]); assert_eq!(names(global_table), vec!["foo"]);
assert!( assert!(
global_table global_table
.symbol_by_name("foo") .symbol_by_name("foo")
@ -682,7 +682,7 @@ mod tests {
let scope = global_scope(&db, file); let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope); let global_table = symbol_table(&db, scope);
assert_eq!(names(&global_table), vec!["foo", "x"]); assert_eq!(names(global_table), vec!["foo", "x"]);
assert!( assert!(
global_table global_table
.symbol_by_name("foo") .symbol_by_name("foo")
@ -702,7 +702,7 @@ mod tests {
let scope = global_scope(&db, file); let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope); let global_table = symbol_table(&db, scope);
assert_eq!(names(&global_table), vec!["x"]); assert_eq!(names(global_table), vec!["x"]);
let use_def = use_def_map(&db, scope); let use_def = use_def_map(&db, scope);
let binding = use_def let binding = use_def
@ -726,7 +726,7 @@ y = 2
); );
let global_table = symbol_table(&db, global_scope(&db, file)); let global_table = symbol_table(&db, global_scope(&db, file));
assert_eq!(names(&global_table), vec!["C", "y"]); assert_eq!(names(global_table), vec!["C", "y"]);
let index = semantic_index(&db, file); let index = semantic_index(&db, file);
@ -798,7 +798,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
let index = semantic_index(&db, file); let index = semantic_index(&db, file);
let global_table = symbol_table(&db, global_scope(&db, file)); let global_table = symbol_table(&db, global_scope(&db, file));
assert_eq!(names(&global_table), vec!["str", "int", "f"]); assert_eq!(names(global_table), vec!["str", "int", "f"]);
let [(function_scope_id, _function_scope)] = index let [(function_scope_id, _function_scope)] = index
.child_scopes(FileScopeId::global()) .child_scopes(FileScopeId::global())
@ -855,7 +855,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
let index = semantic_index(&db, file); let index = semantic_index(&db, file);
let global_table = symbol_table(&db, global_scope(&db, file)); let global_table = symbol_table(&db, global_scope(&db, file));
assert!(names(&global_table).is_empty()); assert!(names(global_table).is_empty());
let [(lambda_scope_id, _lambda_scope)] = index let [(lambda_scope_id, _lambda_scope)] = index
.child_scopes(FileScopeId::global()) .child_scopes(FileScopeId::global())
@ -1344,7 +1344,7 @@ match subject:
assert!(global_table.symbol_by_name("Foo").unwrap().is_used()); assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
assert_eq!( assert_eq!(
names(&global_table), names(global_table),
vec!["subject", "a", "b", "c", "d", "e", "f", "g", "h", "Foo", "i", "j", "k", "l"] vec!["subject", "a", "b", "c", "d", "e", "f", "g", "h", "Foo", "i", "j", "k", "l"]
); );
@ -1389,7 +1389,7 @@ match 1:
let global_scope_id = global_scope(&db, file); let global_scope_id = global_scope(&db, file);
let global_table = symbol_table(&db, global_scope_id); let global_table = symbol_table(&db, global_scope_id);
assert_eq!(names(&global_table), vec!["first", "second"]); assert_eq!(names(global_table), vec!["first", "second"]);
let use_def = use_def_map(&db, global_scope_id); let use_def = use_def_map(&db, global_scope_id);
for (name, expected_index) in [("first", 0), ("second", 0)] { for (name, expected_index) in [("first", 0), ("second", 0)] {
@ -1410,7 +1410,7 @@ match 1:
let scope = global_scope(&db, file); let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope); let global_table = symbol_table(&db, scope);
assert_eq!(&names(&global_table), &["a", "x"]); assert_eq!(&names(global_table), &["a", "x"]);
let use_def = use_def_map(&db, scope); let use_def = use_def_map(&db, scope);
let binding = use_def let binding = use_def
@ -1426,7 +1426,7 @@ match 1:
let scope = global_scope(&db, file); let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope); let global_table = symbol_table(&db, scope);
assert_eq!(&names(&global_table), &["a", "x", "y"]); assert_eq!(&names(global_table), &["a", "x", "y"]);
let use_def = use_def_map(&db, scope); let use_def = use_def_map(&db, scope);
let x_binding = use_def let x_binding = use_def
@ -1446,7 +1446,7 @@ match 1:
let scope = global_scope(&db, file); let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope); let global_table = symbol_table(&db, scope);
assert_eq!(&names(&global_table), &["e", "a", "b", "c", "d"]); assert_eq!(&names(global_table), &["e", "a", "b", "c", "d"]);
let use_def = use_def_map(&db, scope); let use_def = use_def_map(&db, scope);
let binding = use_def let binding = use_def

View file

@ -34,7 +34,7 @@ pub struct Definition<'db> {
/// WARNING: Only access this field when doing type inference for the same /// WARNING: Only access this field when doing type inference for the same
/// file as where `Definition` is defined to avoid cross-file query dependencies. /// file as where `Definition` is defined to avoid cross-file query dependencies.
#[no_eq] #[no_eq]
#[return_ref] #[returns(ref)]
#[tracked] #[tracked]
pub(crate) kind: DefinitionKind<'db>, pub(crate) kind: DefinitionKind<'db>,

View file

@ -41,7 +41,7 @@ pub(crate) struct Expression<'db> {
/// The expression node. /// The expression node.
#[no_eq] #[no_eq]
#[tracked] #[tracked]
#[return_ref] #[returns(deref)]
pub(crate) node_ref: AstNodeRef<ast::Expr>, pub(crate) node_ref: AstNodeRef<ast::Expr>,
/// An assignment statement, if this expression is immediately used as the rhs of that /// An assignment statement, if this expression is immediately used as the rhs of that

View file

@ -83,7 +83,7 @@ pub(crate) struct PatternPredicate<'db> {
pub(crate) subject: Expression<'db>, pub(crate) subject: Expression<'db>,
#[return_ref] #[returns(ref)]
pub(crate) kind: PatternPredicateKind<'db>, pub(crate) kind: PatternPredicateKind<'db>,
pub(crate) guard: Option<Expression<'db>>, pub(crate) guard: Option<Expression<'db>>,

View file

@ -43,7 +43,7 @@ fn exports_cycle_initial(_db: &dyn Db, _file: File) -> Box<[Name]> {
Box::default() Box::default()
} }
#[salsa::tracked(return_ref, cycle_fn=exports_cycle_recover, cycle_initial=exports_cycle_initial)] #[salsa::tracked(returns(deref), cycle_fn=exports_cycle_recover, cycle_initial=exports_cycle_initial)]
pub(super) fn exported_names(db: &dyn Db, file: File) -> Box<[Name]> { pub(super) fn exported_names(db: &dyn Db, file: File) -> Box<[Name]> {
let module = parsed_module(db.upcast(), file); let module = parsed_module(db.upcast(), file);
let mut finder = ExportFinder::new(db, file); let mut finder = ExportFinder::new(db, file);

View file

@ -86,7 +86,7 @@ declare_lint! {
} }
} }
#[salsa::tracked(return_ref)] #[salsa::tracked(returns(ref))]
pub(crate) fn suppressions(db: &dyn Db, file: File) -> Suppressions { pub(crate) fn suppressions(db: &dyn Db, file: File) -> Suppressions {
let parsed = parsed_module(db.upcast(), file); let parsed = parsed_module(db.upcast(), file);
let source = source_text(db.upcast(), file); let source = source_text(db.upcast(), file);

View file

@ -1014,7 +1014,7 @@ mod implicit_globals {
/// Conceptually this function could be a `Set` rather than a list, /// Conceptually this function could be a `Set` rather than a list,
/// but the number of symbols declared in this scope is likely to be very small, /// but the number of symbols declared in this scope is likely to be very small,
/// so the cost of hashing the names is likely to be more expensive than it's worth. /// so the cost of hashing the names is likely to be more expensive than it's worth.
#[salsa::tracked(return_ref)] #[salsa::tracked(returns(deref))]
fn module_type_symbols<'db>(db: &'db dyn Db) -> smallvec::SmallVec<[ast::name::Name; 8]> { fn module_type_symbols<'db>(db: &'db dyn Db) -> smallvec::SmallVec<[ast::name::Name; 8]> {
let Some(module_type) = KnownClass::ModuleType let Some(module_type) = KnownClass::ModuleType
.to_class_literal(db) .to_class_literal(db)

View file

@ -81,7 +81,7 @@ mod definition;
#[cfg(test)] #[cfg(test)]
mod property_tests; mod property_tests;
#[salsa::tracked(return_ref)] #[salsa::tracked(returns(ref))]
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics { pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
let _span = tracing::trace_span!("check_types", ?file).entered(); let _span = tracing::trace_span!("check_types", ?file).entered();
@ -3376,14 +3376,10 @@ impl<'db> Type<'db> {
/// [`CallErrorKind::NotCallable`]. /// [`CallErrorKind::NotCallable`].
fn signatures(self, db: &'db dyn Db) -> Signatures<'db> { fn signatures(self, db: &'db dyn Db) -> Signatures<'db> {
match self { match self {
Type::Callable(callable) => { Type::Callable(callable) => Signatures::single(match callable.signatures(db) {
Signatures::single(match callable.signatures(db).as_ref() { [signature] => CallableSignature::single(self, signature.clone()),
[signature] => CallableSignature::single(self, signature.clone()), signatures => CallableSignature::from_overloads(self, signatures.iter().cloned()),
signatures => { }),
CallableSignature::from_overloads(self, signatures.iter().cloned())
}
})
}
Type::BoundMethod(bound_method) => { Type::BoundMethod(bound_method) => {
let signature = bound_method.function(db).signature(db); let signature = bound_method.function(db).signature(db);
@ -5639,7 +5635,7 @@ pub enum TypeVarKind {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct TypeVarInstance<'db> { pub struct TypeVarInstance<'db> {
/// The name of this TypeVar (e.g. `T`) /// The name of this TypeVar (e.g. `T`)
#[return_ref] #[returns(ref)]
name: ast::name::Name, name: ast::name::Name,
/// The type var's definition /// The type var's definition
@ -6587,7 +6583,7 @@ impl<'db> OverloadedFunction<'db> {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct FunctionType<'db> { pub struct FunctionType<'db> {
/// Name of the function at definition. /// Name of the function at definition.
#[return_ref] #[returns(ref)]
pub name: ast::name::Name, pub name: ast::name::Name,
/// Is this a function that we special-case somehow? If so, which one? /// Is this a function that we special-case somehow? If so, which one?
@ -6697,7 +6693,7 @@ impl<'db> FunctionType<'db> {
/// ///
/// Were this not a salsa query, then the calling query /// Were this not a salsa query, then the calling query
/// would depend on the function's AST and rerun for every change in that file. /// would depend on the function's AST and rerun for every change in that file.
#[salsa::tracked(return_ref, cycle_fn=signature_cycle_recover, cycle_initial=signature_cycle_initial)] #[salsa::tracked(returns(ref), cycle_fn=signature_cycle_recover, cycle_initial=signature_cycle_initial)]
pub(crate) fn signature(self, db: &'db dyn Db) -> FunctionSignature<'db> { pub(crate) fn signature(self, db: &'db dyn Db) -> FunctionSignature<'db> {
if let Some(overloaded) = self.to_overloaded(db) { if let Some(overloaded) = self.to_overloaded(db) {
FunctionSignature::Overloaded( FunctionSignature::Overloaded(
@ -6846,68 +6842,56 @@ impl<'db> FunctionType<'db> {
/// 2. second `foo` definition, it would contain both overloads and still no implementation /// 2. second `foo` definition, it would contain both overloads and still no implementation
/// 3. third `foo` definition, it would contain both overloads and the implementation which is /// 3. third `foo` definition, it would contain both overloads and the implementation which is
/// itself /// itself
fn to_overloaded(self, db: &'db dyn Db) -> Option<&'db OverloadedFunction<'db>> { #[salsa::tracked(returns(as_ref))]
#[allow(clippy::ref_option)] fn to_overloaded(self, db: &'db dyn Db) -> Option<OverloadedFunction<'db>> {
#[salsa::tracked(return_ref)] let mut current = self;
fn to_overloaded_impl<'db>( let mut overloads = vec![];
db: &'db dyn Db,
function: FunctionType<'db>,
) -> Option<OverloadedFunction<'db>> {
let mut current = function;
let mut overloads = vec![];
loop { loop {
// The semantic model records a use for each function on the name node. This is used // The semantic model records a use for each function on the name node. This is used
// here to get the previous function definition with the same name. // here to get the previous function definition with the same name.
let scope = current.definition(db).scope(db); let scope = current.definition(db).scope(db);
let use_def = let use_def = semantic_index(db, scope.file(db)).use_def_map(scope.file_scope_id(db));
semantic_index(db, scope.file(db)).use_def_map(scope.file_scope_id(db)); let use_id = current
let use_id = current .body_scope(db)
.body_scope(db) .node(db)
.node(db) .expect_function()
.expect_function() .name
.name .scoped_use_id(db, scope);
.scoped_use_id(db, scope);
let Symbol::Type(Type::FunctionLiteral(previous), Boundness::Bound) = let Symbol::Type(Type::FunctionLiteral(previous), Boundness::Bound) =
symbol_from_bindings(db, use_def.bindings_at_use(use_id)) symbol_from_bindings(db, use_def.bindings_at_use(use_id))
else { else {
break; break;
};
if previous.has_known_decorator(db, FunctionDecorators::OVERLOAD) {
overloads.push(previous);
} else {
break;
}
current = previous;
}
// Overloads are inserted in reverse order, from bottom to top.
overloads.reverse();
let implementation = if function.has_known_decorator(db, FunctionDecorators::OVERLOAD) {
overloads.push(function);
None
} else {
Some(function)
}; };
if overloads.is_empty() { if previous.has_known_decorator(db, FunctionDecorators::OVERLOAD) {
None overloads.push(previous);
} else { } else {
Some(OverloadedFunction { break;
overloads,
implementation,
})
} }
current = previous;
} }
// HACK: This is required because salsa doesn't support returning `Option<&T>` from tracked // Overloads are inserted in reverse order, from bottom to top.
// functions yet. Refer to https://github.com/salsa-rs/salsa/pull/772. Remove the inner overloads.reverse();
// function once it's supported.
to_overloaded_impl(db, self).as_ref() let implementation = if self.has_known_decorator(db, FunctionDecorators::OVERLOAD) {
overloads.push(self);
None
} else {
Some(self)
};
if overloads.is_empty() {
None
} else {
Some(OverloadedFunction {
overloads,
implementation,
})
}
} }
} }
@ -7100,7 +7084,7 @@ impl<'db> BoundMethodType<'db> {
/// `CallableType`. /// `CallableType`.
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct CallableType<'db> { pub struct CallableType<'db> {
#[return_ref] #[returns(deref)]
signatures: Box<[Signature<'db>]>, signatures: Box<[Signature<'db>]>,
} }
@ -7210,7 +7194,7 @@ impl<'db> CallableType<'db> {
where where
F: Fn(&Signature<'db>, &Signature<'db>) -> bool, F: Fn(&Signature<'db>, &Signature<'db>) -> bool,
{ {
match (&**self.signatures(db), &**other.signatures(db)) { match (self.signatures(db), other.signatures(db)) {
([self_signature], [other_signature]) => { ([self_signature], [other_signature]) => {
// Base case: both callable types contain a single signature. // Base case: both callable types contain a single signature.
check_signature(self_signature, other_signature) check_signature(self_signature, other_signature)
@ -7252,7 +7236,7 @@ impl<'db> CallableType<'db> {
/// ///
/// See [`Type::is_equivalent_to`] for more details. /// See [`Type::is_equivalent_to`] for more details.
fn is_equivalent_to(self, db: &'db dyn Db, other: Self) -> bool { fn is_equivalent_to(self, db: &'db dyn Db, other: Self) -> bool {
match (&**self.signatures(db), &**other.signatures(db)) { match (self.signatures(db), other.signatures(db)) {
([self_signature], [other_signature]) => { ([self_signature], [other_signature]) => {
// Common case: both callable types contain a single signature, use the custom // Common case: both callable types contain a single signature, use the custom
// equivalence check instead of delegating it to the subtype check. // equivalence check instead of delegating it to the subtype check.
@ -7278,7 +7262,7 @@ impl<'db> CallableType<'db> {
/// ///
/// See [`Type::is_gradual_equivalent_to`] for more details. /// See [`Type::is_gradual_equivalent_to`] for more details.
fn is_gradual_equivalent_to(self, db: &'db dyn Db, other: Self) -> bool { fn is_gradual_equivalent_to(self, db: &'db dyn Db, other: Self) -> bool {
match (&**self.signatures(db), &**other.signatures(db)) { match (self.signatures(db), other.signatures(db)) {
([self_signature], [other_signature]) => { ([self_signature], [other_signature]) => {
self_signature.is_gradual_equivalent_to(db, other_signature) self_signature.is_gradual_equivalent_to(db, other_signature)
} }
@ -7371,7 +7355,7 @@ impl<'db> ModuleLiteralType<'db> {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct TypeAliasType<'db> { pub struct TypeAliasType<'db> {
#[return_ref] #[returns(ref)]
pub name: ast::name::Name, pub name: ast::name::Name,
rhs_scope: ScopeId<'db>, rhs_scope: ScopeId<'db>,
@ -7405,15 +7389,11 @@ pub(super) struct MetaclassCandidate<'db> {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct UnionType<'db> { pub struct UnionType<'db> {
/// The union type includes values in any of these types. /// The union type includes values in any of these types.
#[return_ref] #[returns(deref)]
elements_boxed: Box<[Type<'db>]>, pub elements: Box<[Type<'db>]>,
} }
impl<'db> UnionType<'db> { impl<'db> UnionType<'db> {
fn elements(self, db: &'db dyn Db) -> &'db [Type<'db>] {
self.elements_boxed(db)
}
/// Create a union from a list of elements /// Create a union from a list of elements
/// (which may be eagerly simplified into a different variant of [`Type`] altogether). /// (which may be eagerly simplified into a different variant of [`Type`] altogether).
pub fn from_elements<I, T>(db: &'db dyn Db, elements: I) -> Type<'db> pub fn from_elements<I, T>(db: &'db dyn Db, elements: I) -> Type<'db>
@ -7630,7 +7610,7 @@ impl<'db> UnionType<'db> {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct IntersectionType<'db> { pub struct IntersectionType<'db> {
/// The intersection type includes only values in all of these types. /// The intersection type includes only values in all of these types.
#[return_ref] #[returns(ref)]
positive: FxOrderSet<Type<'db>>, positive: FxOrderSet<Type<'db>>,
/// The intersection type does not include any value in any of these types. /// The intersection type does not include any value in any of these types.
@ -7638,7 +7618,7 @@ pub struct IntersectionType<'db> {
/// Negation types aren't expressible in annotations, and are most likely to arise from type /// Negation types aren't expressible in annotations, and are most likely to arise from type
/// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them /// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
/// directly in intersections rather than as a separate type. /// directly in intersections rather than as a separate type.
#[return_ref] #[returns(ref)]
negative: FxOrderSet<Type<'db>>, negative: FxOrderSet<Type<'db>>,
} }
@ -7868,7 +7848,7 @@ impl<'db> IntersectionType<'db> {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct StringLiteralType<'db> { pub struct StringLiteralType<'db> {
#[return_ref] #[returns(deref)]
value: Box<str>, value: Box<str>,
} }
@ -7889,7 +7869,7 @@ impl<'db> StringLiteralType<'db> {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct BytesLiteralType<'db> { pub struct BytesLiteralType<'db> {
#[return_ref] #[returns(deref)]
value: Box<[u8]>, value: Box<[u8]>,
} }
@ -7901,7 +7881,7 @@ impl<'db> BytesLiteralType<'db> {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct TupleType<'db> { pub struct TupleType<'db> {
#[return_ref] #[returns(deref)]
elements: Box<[Type<'db>]>, elements: Box<[Type<'db>]>,
} }
@ -8082,7 +8062,6 @@ impl<'db> SuperOwnerKind<'db> {
/// Represent a bound super object like `super(PivotClass, owner)` /// Represent a bound super object like `super(PivotClass, owner)`
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct BoundSuperType<'db> { pub struct BoundSuperType<'db> {
#[return_ref]
pub pivot_class: ClassBase<'db>, pub pivot_class: ClassBase<'db>,
#[return_ref] #[return_ref]
pub owner: SuperOwnerKind<'db>, pub owner: SuperOwnerKind<'db>,
@ -8223,7 +8202,7 @@ impl<'db> BoundSuperType<'db> {
.find_name_in_mro_with_policy(db, name, policy) .find_name_in_mro_with_policy(db, name, policy)
.expect("Calling `find_name_in_mro` on dynamic type should return `Some`") .expect("Calling `find_name_in_mro` on dynamic type should return `Some`")
} }
SuperOwnerKind::Class(class) => *class, SuperOwnerKind::Class(class) => class,
SuperOwnerKind::Instance(instance) => instance.class(), SuperOwnerKind::Instance(instance) => instance.class(),
}; };

View file

@ -447,7 +447,7 @@ impl<'db> Bindings<'db> {
overload.parameter_types() overload.parameter_types()
{ {
overload.set_return_type(Type::BooleanLiteral( overload.set_return_type(Type::BooleanLiteral(
literal.value(db).starts_with(&**prefix.value(db)), literal.value(db).starts_with(prefix.value(db)),
)); ));
} }
} }

View file

@ -471,7 +471,7 @@ impl<'db> From<ClassType<'db>> for Type<'db> {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct ClassLiteral<'db> { pub struct ClassLiteral<'db> {
/// Name of the class at definition /// Name of the class at definition
#[return_ref] #[returns(ref)]
pub(crate) name: ast::name::Name, pub(crate) name: ast::name::Name,
pub(crate) body_scope: ScopeId<'db>, pub(crate) body_scope: ScopeId<'db>,
@ -634,21 +634,8 @@ impl<'db> ClassLiteral<'db> {
/// ///
/// Were this not a salsa query, then the calling query /// Were this not a salsa query, then the calling query
/// would depend on the class's AST and rerun for every change in that file. /// would depend on the class's AST and rerun for every change in that file.
pub(super) fn explicit_bases(self, db: &'db dyn Db) -> &'db [Type<'db>] { #[salsa::tracked(returns(deref), cycle_fn=explicit_bases_cycle_recover, cycle_initial=explicit_bases_cycle_initial)]
self.explicit_bases_query(db) pub(super) fn explicit_bases(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
}
/// Iterate over this class's explicit bases, filtering out any bases that are not class
/// objects, and applying default specialization to any unspecialized generic class literals.
fn fully_static_explicit_bases(self, db: &'db dyn Db) -> impl Iterator<Item = ClassType<'db>> {
self.explicit_bases(db)
.iter()
.copied()
.filter_map(|ty| ty.to_class_type(db))
}
#[salsa::tracked(return_ref, cycle_fn=explicit_bases_cycle_recover, cycle_initial=explicit_bases_cycle_initial)]
fn explicit_bases_query(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
tracing::trace!("ClassLiteral::explicit_bases_query: {}", self.name(db)); tracing::trace!("ClassLiteral::explicit_bases_query: {}", self.name(db));
let class_stmt = self.node(db); let class_stmt = self.node(db);
@ -662,6 +649,15 @@ impl<'db> ClassLiteral<'db> {
.collect() .collect()
} }
/// Iterate over this class's explicit bases, filtering out any bases that are not class
/// objects, and applying default specialization to any unspecialized generic class literals.
fn fully_static_explicit_bases(self, db: &'db dyn Db) -> impl Iterator<Item = ClassType<'db>> {
self.explicit_bases(db)
.iter()
.copied()
.filter_map(|ty| ty.to_class_type(db))
}
/// Determine if this class is a protocol. /// Determine if this class is a protocol.
/// ///
/// This method relies on the accuracy of the [`KnownClass::is_protocol`] method, /// This method relies on the accuracy of the [`KnownClass::is_protocol`] method,
@ -700,7 +696,7 @@ impl<'db> ClassLiteral<'db> {
} }
/// Return the types of the decorators on this class /// Return the types of the decorators on this class
#[salsa::tracked(return_ref)] #[salsa::tracked(returns(deref))]
fn decorators(self, db: &'db dyn Db) -> Box<[Type<'db>]> { fn decorators(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
tracing::trace!("ClassLiteral::decorators: {}", self.name(db)); tracing::trace!("ClassLiteral::decorators: {}", self.name(db));
@ -746,7 +742,7 @@ impl<'db> ClassLiteral<'db> {
/// attribute on a class at runtime. /// attribute on a class at runtime.
/// ///
/// [method resolution order]: https://docs.python.org/3/glossary.html#term-method-resolution-order /// [method resolution order]: https://docs.python.org/3/glossary.html#term-method-resolution-order
#[salsa::tracked(return_ref, cycle_fn=try_mro_cycle_recover, cycle_initial=try_mro_cycle_initial)] #[salsa::tracked(returns(as_ref), cycle_fn=try_mro_cycle_recover, cycle_initial=try_mro_cycle_initial)]
pub(super) fn try_mro( pub(super) fn try_mro(
self, self,
db: &'db dyn Db, db: &'db dyn Db,
@ -842,11 +838,7 @@ impl<'db> ClassLiteral<'db> {
return Ok((SubclassOfType::subclass_of_unknown(), None)); return Ok((SubclassOfType::subclass_of_unknown(), None));
} }
if self if self.try_mro(db, None).is_err_and(MroError::is_cycle) {
.try_mro(db, None)
.as_ref()
.is_err_and(MroError::is_cycle)
{
return Ok((SubclassOfType::subclass_of_unknown(), None)); return Ok((SubclassOfType::subclass_of_unknown(), None));
} }
@ -2728,7 +2720,7 @@ impl<'db> Type<'db> {
if !alias.origin(db).is_known(db, KnownClass::Slice) { if !alias.origin(db).is_known(db, KnownClass::Slice) {
return None; return None;
} }
let [start, stop, step] = alias.specialization(db).types(db).as_ref() else { let [start, stop, step] = alias.specialization(db).types(db) else {
return None; return None;
}; };

View file

@ -239,7 +239,6 @@ impl<'db> ClassBase<'db> {
let (class_literal, specialization) = class.class_literal(db); let (class_literal, specialization) = class.class_literal(db);
class_literal class_literal
.try_mro(db, specialization) .try_mro(db, specialization)
.as_ref()
.is_err_and(MroError::is_cycle) .is_err_and(MroError::is_cycle)
} }
ClassBase::Dynamic(_) | ClassBase::Generic(_) | ClassBase::Protocol => false, ClassBase::Dynamic(_) | ClassBase::Generic(_) | ClassBase::Protocol => false,

View file

@ -223,8 +223,7 @@ impl Display for DisplayRepresentation<'_> {
Type::StringLiteral(string) => string.display(self.db).fmt(f), Type::StringLiteral(string) => string.display(self.db).fmt(f),
Type::LiteralString => f.write_str("LiteralString"), Type::LiteralString => f.write_str("LiteralString"),
Type::BytesLiteral(bytes) => { Type::BytesLiteral(bytes) => {
let escape = let escape = AsciiEscape::with_preferred_quote(bytes.value(self.db), Quote::Double);
AsciiEscape::with_preferred_quote(bytes.value(self.db).as_ref(), Quote::Double);
escape.bytes_repr(TripleQuotes::No).write(f) escape.bytes_repr(TripleQuotes::No).write(f)
} }

View file

@ -15,7 +15,7 @@ use crate::{Db, FxOrderSet};
/// containing context. /// containing context.
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct GenericContext<'db> { pub struct GenericContext<'db> {
#[return_ref] #[returns(ref)]
pub(crate) variables: FxOrderSet<TypeVarInstance<'db>>, pub(crate) variables: FxOrderSet<TypeVarInstance<'db>>,
} }
@ -216,7 +216,7 @@ impl<'db> GenericContext<'db> {
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub struct Specialization<'db> { pub struct Specialization<'db> {
pub(crate) generic_context: GenericContext<'db>, pub(crate) generic_context: GenericContext<'db>,
#[return_ref] #[returns(deref)]
pub(crate) types: Box<[Type<'db>]>, pub(crate) types: Box<[Type<'db>]>,
} }
@ -249,7 +249,7 @@ impl<'db> Specialization<'db> {
) -> Self { ) -> Self {
let types: Box<[_]> = self let types: Box<[_]> = self
.types(db) .types(db)
.into_iter() .iter()
.map(|ty| ty.apply_type_mapping(db, type_mapping)) .map(|ty| ty.apply_type_mapping(db, type_mapping))
.collect(); .collect();
Specialization::new(db, self.generic_context(db), types) Specialization::new(db, self.generic_context(db), types)
@ -282,7 +282,7 @@ impl<'db> Specialization<'db> {
// explicitly tells us which typevars are mapped. // explicitly tells us which typevars are mapped.
let types: Box<[_]> = self let types: Box<[_]> = self
.types(db) .types(db)
.into_iter() .iter()
.zip(other.types(db)) .zip(other.types(db))
.map(|(self_type, other_type)| match (self_type, other_type) { .map(|(self_type, other_type)| match (self_type, other_type) {
(unknown, known) | (known, unknown) if unknown.is_unknown() => *known, (unknown, known) | (known, unknown) if unknown.is_unknown() => *known,

View file

@ -117,7 +117,7 @@ use super::{BoundSuperError, BoundSuperType, ClassBase};
/// Infer all types for a [`ScopeId`], including all definitions and expressions in that scope. /// Infer all types for a [`ScopeId`], including all definitions and expressions in that scope.
/// Use when checking a scope, or needing to provide a type for an arbitrary expression in the /// Use when checking a scope, or needing to provide a type for an arbitrary expression in the
/// scope. /// scope.
#[salsa::tracked(return_ref, cycle_fn=scope_cycle_recover, cycle_initial=scope_cycle_initial)] #[salsa::tracked(returns(ref), cycle_fn=scope_cycle_recover, cycle_initial=scope_cycle_initial)]
pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> TypeInference<'db> { pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> TypeInference<'db> {
let file = scope.file(db); let file = scope.file(db);
let _span = tracing::trace_span!("infer_scope_types", scope=?scope.as_id(), ?file).entered(); let _span = tracing::trace_span!("infer_scope_types", scope=?scope.as_id(), ?file).entered();
@ -144,7 +144,7 @@ fn scope_cycle_initial<'db>(_db: &'db dyn Db, scope: ScopeId<'db>) -> TypeInfere
/// Infer all types for a [`Definition`] (including sub-expressions). /// Infer all types for a [`Definition`] (including sub-expressions).
/// Use when resolving a symbol name use or public type of a symbol. /// Use when resolving a symbol name use or public type of a symbol.
#[salsa::tracked(return_ref, cycle_fn=definition_cycle_recover, cycle_initial=definition_cycle_initial)] #[salsa::tracked(returns(ref), cycle_fn=definition_cycle_recover, cycle_initial=definition_cycle_initial)]
pub(crate) fn infer_definition_types<'db>( pub(crate) fn infer_definition_types<'db>(
db: &'db dyn Db, db: &'db dyn Db,
definition: Definition<'db>, definition: Definition<'db>,
@ -182,7 +182,7 @@ fn definition_cycle_initial<'db>(
/// ///
/// Deferred expressions are type expressions (annotations, base classes, aliases...) in a stub /// Deferred expressions are type expressions (annotations, base classes, aliases...) in a stub
/// file, or in a file with `from __future__ import annotations`, or stringified annotations. /// file, or in a file with `from __future__ import annotations`, or stringified annotations.
#[salsa::tracked(return_ref, cycle_fn=deferred_cycle_recover, cycle_initial=deferred_cycle_initial)] #[salsa::tracked(returns(ref), cycle_fn=deferred_cycle_recover, cycle_initial=deferred_cycle_initial)]
pub(crate) fn infer_deferred_types<'db>( pub(crate) fn infer_deferred_types<'db>(
db: &'db dyn Db, db: &'db dyn Db,
definition: Definition<'db>, definition: Definition<'db>,
@ -218,7 +218,7 @@ fn deferred_cycle_initial<'db>(db: &'db dyn Db, definition: Definition<'db>) ->
/// Use rarely; only for cases where we'd otherwise risk double-inferring an expression: RHS of an /// Use rarely; only for cases where we'd otherwise risk double-inferring an expression: RHS of an
/// assignment, which might be unpacking/multi-target and thus part of multiple definitions, or a /// assignment, which might be unpacking/multi-target and thus part of multiple definitions, or a
/// type narrowing guard expression (e.g. if statement test node). /// type narrowing guard expression (e.g. if statement test node).
#[salsa::tracked(return_ref, cycle_fn=expression_cycle_recover, cycle_initial=expression_cycle_initial)] #[salsa::tracked(returns(ref), cycle_fn=expression_cycle_recover, cycle_initial=expression_cycle_initial)]
pub(crate) fn infer_expression_types<'db>( pub(crate) fn infer_expression_types<'db>(
db: &'db dyn Db, db: &'db dyn Db,
expression: Expression<'db>, expression: Expression<'db>,
@ -305,7 +305,7 @@ fn single_expression_cycle_initial<'db>(
/// involved in an unpacking operation. It returns a result-like object that can be used to get the /// involved in an unpacking operation. It returns a result-like object that can be used to get the
/// type of the variables involved in this unpacking along with any violations that are detected /// type of the variables involved in this unpacking along with any violations that are detected
/// during this unpacking. /// during this unpacking.
#[salsa::tracked(return_ref)] #[salsa::tracked(returns(ref))]
pub(super) fn infer_unpack_types<'db>(db: &'db dyn Db, unpack: Unpack<'db>) -> UnpackResult<'db> { pub(super) fn infer_unpack_types<'db>(db: &'db dyn Db, unpack: Unpack<'db>) -> UnpackResult<'db> {
let file = unpack.file(db); let file = unpack.file(db);
let _span = let _span =
@ -876,7 +876,7 @@ impl<'db> TypeInferenceBuilder<'db> {
} }
// (3) Check that the class's MRO is resolvable // (3) Check that the class's MRO is resolvable
match class.try_mro(self.db(), None).as_ref() { match class.try_mro(self.db(), None) {
Err(mro_error) => { Err(mro_error) => {
match mro_error.reason() { match mro_error.reason() {
MroErrorKind::DuplicateBases(duplicates) => { MroErrorKind::DuplicateBases(duplicates) => {
@ -3265,7 +3265,7 @@ impl<'db> TypeInferenceBuilder<'db> {
| ast::Expr::Tuple(ast::ExprTuple { elts, .. }) => { | ast::Expr::Tuple(ast::ExprTuple { elts, .. }) => {
let mut assigned_tys = match assigned_ty { let mut assigned_tys = match assigned_ty {
Some(Type::Tuple(tuple)) => { Some(Type::Tuple(tuple)) => {
Either::Left(tuple.elements(self.db()).into_iter().copied()) Either::Left(tuple.elements(self.db()).iter().copied())
} }
Some(_) | None => Either::Right(std::iter::empty()), Some(_) | None => Either::Right(std::iter::empty()),
}; };
@ -4864,7 +4864,7 @@ impl<'db> TypeInferenceBuilder<'db> {
if !truthiness.is_always_true() { if !truthiness.is_always_true() {
if let Some(message) = message if let Some(message) = message
.and_then(Type::into_string_literal) .and_then(Type::into_string_literal)
.map(|s| &**s.value(self.db())) .map(|s| s.value(self.db()))
{ {
builder.into_diagnostic(format_args!( builder.into_diagnostic(format_args!(
"Static assertion error: {message}" "Static assertion error: {message}"
@ -5122,7 +5122,7 @@ impl<'db> TypeInferenceBuilder<'db> {
let name_param = name_param let name_param = name_param
.into_string_literal() .into_string_literal()
.map(|name| name.value(self.db()).as_ref()); .map(|name| name.value(self.db()));
if name_param if name_param
.is_none_or(|name_param| name_param != target.id) .is_none_or(|name_param| name_param != target.id)
{ {
@ -5856,13 +5856,13 @@ impl<'db> TypeInferenceBuilder<'db> {
} }
(Type::BytesLiteral(lhs), Type::BytesLiteral(rhs), ast::Operator::Add) => { (Type::BytesLiteral(lhs), Type::BytesLiteral(rhs), ast::Operator::Add) => {
let bytes = [&**lhs.value(self.db()), &**rhs.value(self.db())].concat(); let bytes = [lhs.value(self.db()), rhs.value(self.db())].concat();
Some(Type::bytes_literal(self.db(), &bytes)) Some(Type::bytes_literal(self.db(), &bytes))
} }
(Type::StringLiteral(lhs), Type::StringLiteral(rhs), ast::Operator::Add) => { (Type::StringLiteral(lhs), Type::StringLiteral(rhs), ast::Operator::Add) => {
let lhs_value = lhs.value(self.db()).to_string(); let lhs_value = lhs.value(self.db()).to_string();
let rhs_value = rhs.value(self.db()).as_ref(); let rhs_value = rhs.value(self.db());
let ty = if lhs_value.len() + rhs_value.len() <= Self::MAX_STRING_LITERAL_SIZE { let ty = if lhs_value.len() + rhs_value.len() <= Self::MAX_STRING_LITERAL_SIZE {
Type::string_literal(self.db(), &(lhs_value + rhs_value)) Type::string_literal(self.db(), &(lhs_value + rhs_value))
} else { } else {
@ -6463,8 +6463,8 @@ impl<'db> TypeInferenceBuilder<'db> {
ast::CmpOp::LtE => Ok(Type::BooleanLiteral(s1 <= s2)), ast::CmpOp::LtE => Ok(Type::BooleanLiteral(s1 <= s2)),
ast::CmpOp::Gt => Ok(Type::BooleanLiteral(s1 > s2)), ast::CmpOp::Gt => Ok(Type::BooleanLiteral(s1 > s2)),
ast::CmpOp::GtE => Ok(Type::BooleanLiteral(s1 >= s2)), ast::CmpOp::GtE => Ok(Type::BooleanLiteral(s1 >= s2)),
ast::CmpOp::In => Ok(Type::BooleanLiteral(s2.contains(s1.as_ref()))), ast::CmpOp::In => Ok(Type::BooleanLiteral(s2.contains(s1))),
ast::CmpOp::NotIn => Ok(Type::BooleanLiteral(!s2.contains(s1.as_ref()))), ast::CmpOp::NotIn => Ok(Type::BooleanLiteral(!s2.contains(s1))),
ast::CmpOp::Is => { ast::CmpOp::Is => {
if s1 == s2 { if s1 == s2 {
Ok(KnownClass::Bool.to_instance(self.db())) Ok(KnownClass::Bool.to_instance(self.db()))
@ -6508,8 +6508,8 @@ impl<'db> TypeInferenceBuilder<'db> {
), ),
(Type::BytesLiteral(salsa_b1), Type::BytesLiteral(salsa_b2)) => { (Type::BytesLiteral(salsa_b1), Type::BytesLiteral(salsa_b2)) => {
let b1 = &**salsa_b1.value(self.db()); let b1 = salsa_b1.value(self.db());
let b2 = &**salsa_b2.value(self.db()); let b2 = salsa_b2.value(self.db());
match op { match op {
ast::CmpOp::Eq => Ok(Type::BooleanLiteral(b1 == b2)), ast::CmpOp::Eq => Ok(Type::BooleanLiteral(b1 == b2)),
ast::CmpOp::NotEq => Ok(Type::BooleanLiteral(b1 != b2)), ast::CmpOp::NotEq => Ok(Type::BooleanLiteral(b1 != b2)),

View file

@ -16,7 +16,6 @@ use ruff_python_ast as ast;
use ruff_python_ast::{BoolOp, ExprBoolOp}; use ruff_python_ast::{BoolOp, ExprBoolOp};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use std::sync::Arc;
use super::UnionType; use super::UnionType;
@ -65,8 +64,7 @@ pub(crate) fn infer_narrowing_constraint<'db>(
} }
} }
#[allow(clippy::ref_option)] #[salsa::tracked(returns(as_ref))]
#[salsa::tracked(return_ref)]
fn all_narrowing_constraints_for_pattern<'db>( fn all_narrowing_constraints_for_pattern<'db>(
db: &'db dyn Db, db: &'db dyn Db,
pattern: PatternPredicate<'db>, pattern: PatternPredicate<'db>,
@ -74,9 +72,8 @@ fn all_narrowing_constraints_for_pattern<'db>(
NarrowingConstraintsBuilder::new(db, PredicateNode::Pattern(pattern), true).finish() NarrowingConstraintsBuilder::new(db, PredicateNode::Pattern(pattern), true).finish()
} }
#[allow(clippy::ref_option)]
#[salsa::tracked( #[salsa::tracked(
return_ref, returns(as_ref),
cycle_fn=constraints_for_expression_cycle_recover, cycle_fn=constraints_for_expression_cycle_recover,
cycle_initial=constraints_for_expression_cycle_initial, cycle_initial=constraints_for_expression_cycle_initial,
)] )]
@ -87,9 +84,8 @@ fn all_narrowing_constraints_for_expression<'db>(
NarrowingConstraintsBuilder::new(db, PredicateNode::Expression(expression), true).finish() NarrowingConstraintsBuilder::new(db, PredicateNode::Expression(expression), true).finish()
} }
#[allow(clippy::ref_option)]
#[salsa::tracked( #[salsa::tracked(
return_ref, returns(as_ref),
cycle_fn=negative_constraints_for_expression_cycle_recover, cycle_fn=negative_constraints_for_expression_cycle_recover,
cycle_initial=negative_constraints_for_expression_cycle_initial, cycle_initial=negative_constraints_for_expression_cycle_initial,
)] )]
@ -100,8 +96,7 @@ fn all_negative_narrowing_constraints_for_expression<'db>(
NarrowingConstraintsBuilder::new(db, PredicateNode::Expression(expression), false).finish() NarrowingConstraintsBuilder::new(db, PredicateNode::Expression(expression), false).finish()
} }
#[allow(clippy::ref_option)] #[salsa::tracked(returns(as_ref))]
#[salsa::tracked(return_ref)]
fn all_negative_narrowing_constraints_for_pattern<'db>( fn all_negative_narrowing_constraints_for_pattern<'db>(
db: &'db dyn Db, db: &'db dyn Db,
pattern: PatternPredicate<'db>, pattern: PatternPredicate<'db>,
@ -109,7 +104,7 @@ fn all_negative_narrowing_constraints_for_pattern<'db>(
NarrowingConstraintsBuilder::new(db, PredicateNode::Pattern(pattern), false).finish() NarrowingConstraintsBuilder::new(db, PredicateNode::Pattern(pattern), false).finish()
} }
#[allow(clippy::ref_option)] #[expect(clippy::ref_option)]
fn constraints_for_expression_cycle_recover<'db>( fn constraints_for_expression_cycle_recover<'db>(
_db: &'db dyn Db, _db: &'db dyn Db,
_value: &Option<NarrowingConstraints<'db>>, _value: &Option<NarrowingConstraints<'db>>,
@ -286,7 +281,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
expression: Expression<'db>, expression: Expression<'db>,
is_positive: bool, is_positive: bool,
) -> Option<NarrowingConstraints<'db>> { ) -> Option<NarrowingConstraints<'db>> {
let expression_node = expression.node_ref(self.db).node(); let expression_node = expression.node_ref(self.db);
self.evaluate_expression_node_predicate(expression_node, expression, is_positive) self.evaluate_expression_node_predicate(expression_node, expression, is_positive)
} }
@ -344,7 +339,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
}) })
} }
fn symbols(&self) -> Arc<SymbolTable> { fn symbols(&self) -> &'db SymbolTable {
symbol_table(self.db, self.scope()) symbol_table(self.db, self.scope())
} }

View file

@ -59,7 +59,7 @@ impl<'db> Deref for ProtocolClassLiteral<'db> {
/// The interface of a protocol: the members of that protocol, and the types of those members. /// The interface of a protocol: the members of that protocol, and the types of those members.
#[salsa::interned(debug)] #[salsa::interned(debug)]
pub(super) struct ProtocolInterface<'db> { pub(super) struct ProtocolInterface<'db> {
#[return_ref] #[returns(ref)]
_members: BTreeMap<Name, ProtocolMemberData<'db>>, _members: BTreeMap<Name, ProtocolMemberData<'db>>,
} }

View file

@ -150,20 +150,20 @@ pub(super) fn union_or_intersection_elements_ordering<'db>(
(Type::BoundSuper(left), Type::BoundSuper(right)) => { (Type::BoundSuper(left), Type::BoundSuper(right)) => {
(match (left.pivot_class(db), right.pivot_class(db)) { (match (left.pivot_class(db), right.pivot_class(db)) {
(ClassBase::Class(left), ClassBase::Class(right)) => left.cmp(right), (ClassBase::Class(left), ClassBase::Class(right)) => left.cmp(&right),
(ClassBase::Class(_), _) => Ordering::Less, (ClassBase::Class(_), _) => Ordering::Less,
(_, ClassBase::Class(_)) => Ordering::Greater, (_, ClassBase::Class(_)) => Ordering::Greater,
(ClassBase::Protocol, _) => Ordering::Less, (ClassBase::Protocol, _) => Ordering::Less,
(_, ClassBase::Protocol) => Ordering::Greater, (_, ClassBase::Protocol) => Ordering::Greater,
(ClassBase::Generic(left), ClassBase::Generic(right)) => left.cmp(right), (ClassBase::Generic(left), ClassBase::Generic(right)) => left.cmp(&right),
(ClassBase::Generic(_), _) => Ordering::Less, (ClassBase::Generic(_), _) => Ordering::Less,
(_, ClassBase::Generic(_)) => Ordering::Greater, (_, ClassBase::Generic(_)) => Ordering::Greater,
(ClassBase::Dynamic(left), ClassBase::Dynamic(right)) => { (ClassBase::Dynamic(left), ClassBase::Dynamic(right)) => {
dynamic_elements_ordering(*left, *right) dynamic_elements_ordering(left, right)
} }
}) })
.then_with(|| match (left.owner(db), right.owner(db)) { .then_with(|| match (left.owner(db), right.owner(db)) {
(SuperOwnerKind::Class(left), SuperOwnerKind::Class(right)) => left.cmp(right), (SuperOwnerKind::Class(left), SuperOwnerKind::Class(right)) => left.cmp(&right),
(SuperOwnerKind::Class(_), _) => Ordering::Less, (SuperOwnerKind::Class(_), _) => Ordering::Less,
(_, SuperOwnerKind::Class(_)) => Ordering::Greater, (_, SuperOwnerKind::Class(_)) => Ordering::Greater,
(SuperOwnerKind::Instance(left), SuperOwnerKind::Instance(right)) => { (SuperOwnerKind::Instance(left), SuperOwnerKind::Instance(right)) => {
@ -172,7 +172,7 @@ pub(super) fn union_or_intersection_elements_ordering<'db>(
(SuperOwnerKind::Instance(_), _) => Ordering::Less, (SuperOwnerKind::Instance(_), _) => Ordering::Less,
(_, SuperOwnerKind::Instance(_)) => Ordering::Greater, (_, SuperOwnerKind::Instance(_)) => Ordering::Greater,
(SuperOwnerKind::Dynamic(left), SuperOwnerKind::Dynamic(right)) => { (SuperOwnerKind::Dynamic(left), SuperOwnerKind::Dynamic(right)) => {
dynamic_elements_ordering(*left, *right) dynamic_elements_ordering(left, right)
} }
}) })
} }

View file

@ -226,7 +226,7 @@ impl<'db> Unpacker<'db> {
// If there is a starred expression, it will consume all of the types at that location. // If there is a starred expression, it will consume all of the types at that location.
let Some(starred_index) = targets.iter().position(ast::Expr::is_starred_expr) else { let Some(starred_index) = targets.iter().position(ast::Expr::is_starred_expr) else {
// Otherwise, the types will be unpacked 1-1 to the targets. // Otherwise, the types will be unpacked 1-1 to the targets.
return Cow::Borrowed(tuple_ty.elements(self.db()).as_ref()); return Cow::Borrowed(tuple_ty.elements(self.db()));
}; };
if tuple_ty.len(self.db()) >= targets.len() - 1 { if tuple_ty.len(self.db()) >= targets.len() - 1 {

View file

@ -37,7 +37,7 @@ pub(crate) struct Unpack<'db> {
/// The target expression that is being unpacked. For example, in `(a, b) = (1, 2)`, the target /// The target expression that is being unpacked. For example, in `(a, b) = (1, 2)`, the target
/// expression is `(a, b)`. /// expression is `(a, b)`.
#[no_eq] #[no_eq]
#[return_ref] #[returns(deref)]
#[tracked] #[tracked]
pub(crate) target: AstNodeRef<ast::Expr>, pub(crate) target: AstNodeRef<ast::Expr>,
@ -102,7 +102,7 @@ impl<'db> UnpackValue<'db> {
/// Returns the expression as an [`AnyNodeRef`]. /// Returns the expression as an [`AnyNodeRef`].
pub(crate) fn as_any_node_ref(self, db: &'db dyn Db) -> AnyNodeRef<'db> { pub(crate) fn as_any_node_ref(self, db: &'db dyn Db) -> AnyNodeRef<'db> {
self.expression().node_ref(db).node().into() self.expression().node_ref(db).into()
} }
pub(crate) const fn kind(self) -> UnpackKind { pub(crate) const fn kind(self) -> UnpackKind {

View file

@ -90,8 +90,8 @@ impl SemanticDb for Db {
!file.path(self).is_vendored_path() !file.path(self).is_vendored_path()
} }
fn rule_selection(&self) -> Arc<RuleSelection> { fn rule_selection(&self) -> &RuleSelection {
self.rule_selection.clone() &self.rule_selection
} }
fn lint_registry(&self) -> &LintRegistry { fn lint_registry(&self) -> &LintRegistry {

View file

@ -30,7 +30,7 @@ ty_python_semantic = { path = "../crates/ty_python_semantic" }
ty_vendored = { path = "../crates/ty_vendored" } ty_vendored = { path = "../crates/ty_vendored" }
libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer", default-features = false } libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer", default-features = false }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "2c869364a9592d06fdf45c422e1e4a7265a8fe8a" } salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "7edce6e248f35c8114b4b021cdb474a3fb2813b3" }
similar = { version = "2.5.0" } similar = { version = "2.5.0" }
tracing = { version = "0.1.40" } tracing = { version = "0.1.40" }

View file

@ -95,8 +95,8 @@ impl SemanticDb for TestDb {
!file.path(self).is_vendored_path() !file.path(self).is_vendored_path()
} }
fn rule_selection(&self) -> Arc<RuleSelection> { fn rule_selection(&self) -> &RuleSelection {
self.rule_selection.clone() &self.rule_selection
} }
fn lint_registry(&self) -> &LintRegistry { fn lint_registry(&self) -> &LintRegistry {