mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-04 18:58:26 +00:00
[ty] Update salsa (#17964)
This commit is contained in:
parent
12ce445ff7
commit
6cd8a49638
35 changed files with 187 additions and 234 deletions
6
Cargo.lock
generated
6
Cargo.lock
generated
|
@ -3249,7 +3249,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
|||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=2c869364a9592d06fdf45c422e1e4a7265a8fe8a#2c869364a9592d06fdf45c422e1e4a7265a8fe8a"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=7edce6e248f35c8114b4b021cdb474a3fb2813b3#7edce6e248f35c8114b4b021cdb474a3fb2813b3"
|
||||
dependencies = [
|
||||
"boxcar",
|
||||
"compact_str",
|
||||
|
@ -3272,12 +3272,12 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=2c869364a9592d06fdf45c422e1e4a7265a8fe8a#2c869364a9592d06fdf45c422e1e4a7265a8fe8a"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=7edce6e248f35c8114b4b021cdb474a3fb2813b3#7edce6e248f35c8114b4b021cdb474a3fb2813b3"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=2c869364a9592d06fdf45c422e1e4a7265a8fe8a#2c869364a9592d06fdf45c422e1e4a7265a8fe8a"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=7edce6e248f35c8114b4b021cdb474a3fb2813b3#7edce6e248f35c8114b4b021cdb474a3fb2813b3"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
|
|
@ -125,7 +125,7 @@ rayon = { version = "1.10.0" }
|
|||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "2c869364a9592d06fdf45c422e1e4a7265a8fe8a" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "7edce6e248f35c8114b4b021cdb474a3fb2813b3" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
|
|
|
@ -277,7 +277,7 @@ impl std::panic::RefUnwindSafe for Files {}
|
|||
#[salsa::input]
|
||||
pub struct File {
|
||||
/// The path of the file (immutable).
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub path: FilePath,
|
||||
|
||||
/// The unix permissions of the file. Only supported on unix systems. Always `None` on Windows
|
||||
|
|
|
@ -19,8 +19,8 @@ use crate::Db;
|
|||
#[salsa::input(debug)]
|
||||
pub struct FileRoot {
|
||||
/// The path of a root is guaranteed to never change.
|
||||
#[return_ref]
|
||||
path_buf: SystemPathBuf,
|
||||
#[returns(deref)]
|
||||
pub path: SystemPathBuf,
|
||||
|
||||
/// The kind of the root at the time of its creation.
|
||||
kind_at_time_of_creation: FileRootKind,
|
||||
|
@ -32,10 +32,6 @@ pub struct FileRoot {
|
|||
}
|
||||
|
||||
impl FileRoot {
|
||||
pub fn path(self, db: &dyn Db) -> &SystemPath {
|
||||
self.path_buf(db)
|
||||
}
|
||||
|
||||
pub fn durability(self, db: &dyn Db) -> salsa::Durability {
|
||||
self.kind_at_time_of_creation(db).durability()
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ use crate::Db;
|
|||
/// reflected in the changed AST offsets.
|
||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Sala requires
|
||||
/// for determining if a query result is unchanged.
|
||||
#[salsa::tracked(return_ref, no_eq)]
|
||||
#[salsa::tracked(returns(ref), no_eq)]
|
||||
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||
let _span = tracing::trace_span!("parsed_module", ?file).entered();
|
||||
|
||||
|
|
|
@ -88,8 +88,8 @@ impl Db for ModuleDb {
|
|||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
||||
self.rule_selection.clone()
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
&self.rule_selection
|
||||
}
|
||||
|
||||
fn lint_registry(&self) -> &LintRegistry {
|
||||
|
|
|
@ -120,8 +120,8 @@ pub(crate) mod tests {
|
|||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
||||
self.rule_selection.clone()
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
&self.rule_selection
|
||||
}
|
||||
|
||||
fn lint_registry(&self) -> &LintRegistry {
|
||||
|
|
|
@ -149,7 +149,7 @@ impl SemanticDb for ProjectDatabase {
|
|||
project.is_file_open(self, file)
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
self.project().rules(self)
|
||||
}
|
||||
|
||||
|
@ -327,7 +327,7 @@ pub(crate) mod tests {
|
|||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
self.project().rules(self)
|
||||
}
|
||||
|
||||
|
|
|
@ -60,21 +60,21 @@ pub struct Project {
|
|||
///
|
||||
/// Setting the open files to a non-`None` value changes `check` to only check the
|
||||
/// open files rather than all files in the project.
|
||||
#[return_ref]
|
||||
#[returns(as_deref)]
|
||||
#[default]
|
||||
open_fileset: Option<Arc<FxHashSet<File>>>,
|
||||
|
||||
/// The first-party files of this project.
|
||||
#[default]
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
file_set: IndexedFiles,
|
||||
|
||||
/// The metadata describing the project, including the unresolved options.
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub metadata: ProjectMetadata,
|
||||
|
||||
/// The resolved project settings.
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub settings: Settings,
|
||||
|
||||
/// The paths that should be included when checking this project.
|
||||
|
@ -98,11 +98,11 @@ pub struct Project {
|
|||
/// in an IDE when the user only wants to check the open tabs. This could be modeled
|
||||
/// with `included_paths` too but it would require an explicit walk dir step that's simply unnecessary.
|
||||
#[default]
|
||||
#[return_ref]
|
||||
#[returns(deref)]
|
||||
included_paths_list: Vec<SystemPathBuf>,
|
||||
|
||||
/// Diagnostics that were generated when resolving the project settings.
|
||||
#[return_ref]
|
||||
#[returns(deref)]
|
||||
settings_diagnostics: Vec<OptionDiagnostic>,
|
||||
}
|
||||
|
||||
|
@ -131,7 +131,7 @@ impl Project {
|
|||
/// This is a salsa query to prevent re-computing queries if other, unrelated
|
||||
/// settings change. For example, we don't want that changing the terminal settings
|
||||
/// invalidates any type checking queries.
|
||||
#[salsa::tracked]
|
||||
#[salsa::tracked(returns(deref))]
|
||||
pub fn rules(self, db: &dyn Db) -> Arc<RuleSelection> {
|
||||
self.settings(db).to_rules()
|
||||
}
|
||||
|
@ -157,7 +157,7 @@ impl Project {
|
|||
self.set_settings(db).to(settings);
|
||||
}
|
||||
|
||||
if self.settings_diagnostics(db) != &settings_diagnostics {
|
||||
if self.settings_diagnostics(db) != settings_diagnostics {
|
||||
self.set_settings_diagnostics(db).to(settings_diagnostics);
|
||||
}
|
||||
|
||||
|
@ -284,7 +284,7 @@ impl Project {
|
|||
/// This can be useful to check arbitrary files, but it isn't something we recommend.
|
||||
/// We should try to support this use case but it's okay if there are some limitations around it.
|
||||
fn included_paths_or_root(self, db: &dyn Db) -> &[SystemPathBuf] {
|
||||
match &**self.included_paths_list(db) {
|
||||
match self.included_paths_list(db) {
|
||||
[] => std::slice::from_ref(&self.metadata(db).root),
|
||||
paths => paths,
|
||||
}
|
||||
|
@ -292,7 +292,7 @@ impl Project {
|
|||
|
||||
/// Returns the open files in the project or `None` if the entire project should be checked.
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||
self.open_fileset(db).as_deref()
|
||||
self.open_fileset(db)
|
||||
}
|
||||
|
||||
/// Sets the open files in the project.
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::lint::{LintRegistry, RuleSelection};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
@ -9,7 +7,7 @@ use ruff_db::{Db as SourceDb, Upcast};
|
|||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
fn is_file_open(&self, file: File) -> bool;
|
||||
|
||||
fn rule_selection(&self) -> Arc<RuleSelection>;
|
||||
fn rule_selection(&self) -> &RuleSelection;
|
||||
|
||||
fn lint_registry(&self) -> &LintRegistry;
|
||||
}
|
||||
|
@ -125,8 +123,8 @@ pub(crate) mod tests {
|
|||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
||||
self.rule_selection.clone()
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
&self.rule_selection
|
||||
}
|
||||
|
||||
fn lint_registry(&self) -> &LintRegistry {
|
||||
|
|
|
@ -28,20 +28,15 @@ fn dunder_all_names_cycle_initial(_db: &dyn Db, _file: File) -> Option<FxHashSet
|
|||
|
||||
/// Returns a set of names in the `__all__` variable for `file`, [`None`] if it is not defined or
|
||||
/// if it contains invalid elements.
|
||||
pub(crate) fn dunder_all_names(db: &dyn Db, file: File) -> Option<&FxHashSet<Name>> {
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref, cycle_fn=dunder_all_names_cycle_recover, cycle_initial=dunder_all_names_cycle_initial)]
|
||||
fn dunder_all_names_impl(db: &dyn Db, file: File) -> Option<FxHashSet<Name>> {
|
||||
let _span = tracing::trace_span!("dunder_all_names", file=?file.path(db)).entered();
|
||||
#[salsa::tracked(returns(as_ref), cycle_fn=dunder_all_names_cycle_recover, cycle_initial=dunder_all_names_cycle_initial)]
|
||||
pub(crate) fn dunder_all_names(db: &dyn Db, file: File) -> Option<FxHashSet<Name>> {
|
||||
let _span = tracing::trace_span!("dunder_all_names", file=?file.path(db)).entered();
|
||||
|
||||
let module = parsed_module(db.upcast(), file);
|
||||
let index = semantic_index(db, file);
|
||||
let mut collector = DunderAllNamesCollector::new(db, file, index);
|
||||
collector.visit_body(module.suite());
|
||||
collector.into_names()
|
||||
}
|
||||
|
||||
dunder_all_names_impl(db, file).as_ref()
|
||||
let module = parsed_module(db.upcast(), file);
|
||||
let index = semantic_index(db, file);
|
||||
let mut collector = DunderAllNamesCollector::new(db, file, index);
|
||||
collector.visit_body(module.suite());
|
||||
collector.into_names()
|
||||
}
|
||||
|
||||
/// A visitor that collects the names in the `__all__` variable of a module.
|
||||
|
|
|
@ -349,7 +349,7 @@ impl SearchPaths {
|
|||
/// The editable-install search paths for the first `site-packages` directory
|
||||
/// should come between the two `site-packages` directories when it comes to
|
||||
/// module-resolution priority.
|
||||
#[salsa::tracked(return_ref)]
|
||||
#[salsa::tracked(returns(deref))]
|
||||
pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
tracing::debug!("Resolving dynamic module resolution paths");
|
||||
|
||||
|
@ -583,7 +583,7 @@ impl<'db> Iterator for PthFileIterator<'db> {
|
|||
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
|
||||
#[salsa::interned(debug)]
|
||||
struct ModuleNameIngredient<'db> {
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub(super) name: ModuleName,
|
||||
}
|
||||
|
||||
|
|
|
@ -13,10 +13,10 @@ use salsa::Setter;
|
|||
pub struct Program {
|
||||
pub python_version: PythonVersion,
|
||||
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub python_platform: PythonPlatform,
|
||||
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), FxBuildHasher>;
|
|||
/// Returns the semantic index for `file`.
|
||||
///
|
||||
/// Prefer using [`symbol_table`] when working with symbols from a single scope.
|
||||
#[salsa::tracked(return_ref, no_eq)]
|
||||
#[salsa::tracked(returns(ref), no_eq)]
|
||||
pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
|
||||
let _span = tracing::trace_span!("semantic_index", ?file).entered();
|
||||
|
||||
|
@ -60,7 +60,7 @@ pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
|
|||
/// Using [`symbol_table`] over [`semantic_index`] has the advantage that
|
||||
/// Salsa can avoid invalidating dependent queries if this scope's symbol table
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
#[salsa::tracked(returns(deref))]
|
||||
pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable> {
|
||||
let file = scope.file(db);
|
||||
let _span = tracing::trace_span!("symbol_table", scope=?scope.as_id(), ?file).entered();
|
||||
|
@ -80,7 +80,7 @@ pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<Sym
|
|||
///
|
||||
/// - We cannot resolve relative imports (which aren't allowed in `import` statements) without
|
||||
/// knowing the name of the current module, and whether it's a package.
|
||||
#[salsa::tracked]
|
||||
#[salsa::tracked(returns(deref))]
|
||||
pub(crate) fn imported_modules<'db>(db: &'db dyn Db, file: File) -> Arc<FxHashSet<ModuleName>> {
|
||||
semantic_index(db, file).imported_modules.clone()
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ pub(crate) fn imported_modules<'db>(db: &'db dyn Db, file: File) -> Arc<FxHashSe
|
|||
/// Using [`use_def_map`] over [`semantic_index`] has the advantage that
|
||||
/// Salsa can avoid invalidating dependent queries if this scope's use-def map
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
#[salsa::tracked(returns(deref))]
|
||||
pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseDefMap<'db>> {
|
||||
let file = scope.file(db);
|
||||
let _span = tracing::trace_span!("use_def_map", scope=?scope.as_id(), ?file).entered();
|
||||
|
@ -599,7 +599,7 @@ mod tests {
|
|||
let TestCase { db, file } = test_case("");
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
let global_names = names(&global_table);
|
||||
let global_names = names(global_table);
|
||||
|
||||
assert_eq!(global_names, Vec::<&str>::new());
|
||||
}
|
||||
|
@ -609,7 +609,7 @@ mod tests {
|
|||
let TestCase { db, file } = test_case("x");
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
assert_eq!(names(global_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -617,7 +617,7 @@ mod tests {
|
|||
let TestCase { db, file } = test_case("x: int");
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["int", "x"]);
|
||||
assert_eq!(names(global_table), vec!["int", "x"]);
|
||||
// TODO record definition
|
||||
}
|
||||
|
||||
|
@ -627,7 +627,7 @@ mod tests {
|
|||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
assert_eq!(names(global_table), vec!["foo"]);
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
|
@ -640,7 +640,7 @@ mod tests {
|
|||
let TestCase { db, file } = test_case("import foo.bar");
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
assert_eq!(names(global_table), vec!["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -648,7 +648,7 @@ mod tests {
|
|||
let TestCase { db, file } = test_case("import foo.bar as baz");
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["baz"]);
|
||||
assert_eq!(names(global_table), vec!["baz"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -657,7 +657,7 @@ mod tests {
|
|||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
assert_eq!(names(global_table), vec!["foo"]);
|
||||
assert!(
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
|
@ -682,7 +682,7 @@ mod tests {
|
|||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["foo", "x"]);
|
||||
assert_eq!(names(global_table), vec!["foo", "x"]);
|
||||
assert!(
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
|
@ -702,7 +702,7 @@ mod tests {
|
|||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
assert_eq!(names(global_table), vec!["x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let binding = use_def
|
||||
|
@ -726,7 +726,7 @@ y = 2
|
|||
);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["C", "y"]);
|
||||
assert_eq!(names(global_table), vec!["C", "y"]);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
|
||||
|
@ -798,7 +798,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
|||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["str", "int", "f"]);
|
||||
assert_eq!(names(global_table), vec!["str", "int", "f"]);
|
||||
|
||||
let [(function_scope_id, _function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
|
@ -855,7 +855,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
|||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(names(&global_table).is_empty());
|
||||
assert!(names(global_table).is_empty());
|
||||
|
||||
let [(lambda_scope_id, _lambda_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
|
@ -1344,7 +1344,7 @@ match subject:
|
|||
|
||||
assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
|
||||
assert_eq!(
|
||||
names(&global_table),
|
||||
names(global_table),
|
||||
vec!["subject", "a", "b", "c", "d", "e", "f", "g", "h", "Foo", "i", "j", "k", "l"]
|
||||
);
|
||||
|
||||
|
@ -1389,7 +1389,7 @@ match 1:
|
|||
let global_scope_id = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope_id);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["first", "second"]);
|
||||
assert_eq!(names(global_table), vec!["first", "second"]);
|
||||
|
||||
let use_def = use_def_map(&db, global_scope_id);
|
||||
for (name, expected_index) in [("first", 0), ("second", 0)] {
|
||||
|
@ -1410,7 +1410,7 @@ match 1:
|
|||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["a", "x"]);
|
||||
assert_eq!(&names(global_table), &["a", "x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let binding = use_def
|
||||
|
@ -1426,7 +1426,7 @@ match 1:
|
|||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["a", "x", "y"]);
|
||||
assert_eq!(&names(global_table), &["a", "x", "y"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let x_binding = use_def
|
||||
|
@ -1446,7 +1446,7 @@ match 1:
|
|||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["e", "a", "b", "c", "d"]);
|
||||
assert_eq!(&names(global_table), &["e", "a", "b", "c", "d"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let binding = use_def
|
||||
|
|
|
@ -34,7 +34,7 @@ pub struct Definition<'db> {
|
|||
/// WARNING: Only access this field when doing type inference for the same
|
||||
/// file as where `Definition` is defined to avoid cross-file query dependencies.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
#[tracked]
|
||||
pub(crate) kind: DefinitionKind<'db>,
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ pub(crate) struct Expression<'db> {
|
|||
/// The expression node.
|
||||
#[no_eq]
|
||||
#[tracked]
|
||||
#[return_ref]
|
||||
#[returns(deref)]
|
||||
pub(crate) node_ref: AstNodeRef<ast::Expr>,
|
||||
|
||||
/// An assignment statement, if this expression is immediately used as the rhs of that
|
||||
|
|
|
@ -83,7 +83,7 @@ pub(crate) struct PatternPredicate<'db> {
|
|||
|
||||
pub(crate) subject: Expression<'db>,
|
||||
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub(crate) kind: PatternPredicateKind<'db>,
|
||||
|
||||
pub(crate) guard: Option<Expression<'db>>,
|
||||
|
|
|
@ -43,7 +43,7 @@ fn exports_cycle_initial(_db: &dyn Db, _file: File) -> Box<[Name]> {
|
|||
Box::default()
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref, cycle_fn=exports_cycle_recover, cycle_initial=exports_cycle_initial)]
|
||||
#[salsa::tracked(returns(deref), cycle_fn=exports_cycle_recover, cycle_initial=exports_cycle_initial)]
|
||||
pub(super) fn exported_names(db: &dyn Db, file: File) -> Box<[Name]> {
|
||||
let module = parsed_module(db.upcast(), file);
|
||||
let mut finder = ExportFinder::new(db, file);
|
||||
|
|
|
@ -86,7 +86,7 @@ declare_lint! {
|
|||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
#[salsa::tracked(returns(ref))]
|
||||
pub(crate) fn suppressions(db: &dyn Db, file: File) -> Suppressions {
|
||||
let parsed = parsed_module(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file);
|
||||
|
|
|
@ -1014,7 +1014,7 @@ mod implicit_globals {
|
|||
/// Conceptually this function could be a `Set` rather than a list,
|
||||
/// but the number of symbols declared in this scope is likely to be very small,
|
||||
/// so the cost of hashing the names is likely to be more expensive than it's worth.
|
||||
#[salsa::tracked(return_ref)]
|
||||
#[salsa::tracked(returns(deref))]
|
||||
fn module_type_symbols<'db>(db: &'db dyn Db) -> smallvec::SmallVec<[ast::name::Name; 8]> {
|
||||
let Some(module_type) = KnownClass::ModuleType
|
||||
.to_class_literal(db)
|
||||
|
|
|
@ -81,7 +81,7 @@ mod definition;
|
|||
#[cfg(test)]
|
||||
mod property_tests;
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
#[salsa::tracked(returns(ref))]
|
||||
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
|
||||
let _span = tracing::trace_span!("check_types", ?file).entered();
|
||||
|
||||
|
@ -3376,14 +3376,10 @@ impl<'db> Type<'db> {
|
|||
/// [`CallErrorKind::NotCallable`].
|
||||
fn signatures(self, db: &'db dyn Db) -> Signatures<'db> {
|
||||
match self {
|
||||
Type::Callable(callable) => {
|
||||
Signatures::single(match callable.signatures(db).as_ref() {
|
||||
[signature] => CallableSignature::single(self, signature.clone()),
|
||||
signatures => {
|
||||
CallableSignature::from_overloads(self, signatures.iter().cloned())
|
||||
}
|
||||
})
|
||||
}
|
||||
Type::Callable(callable) => Signatures::single(match callable.signatures(db) {
|
||||
[signature] => CallableSignature::single(self, signature.clone()),
|
||||
signatures => CallableSignature::from_overloads(self, signatures.iter().cloned()),
|
||||
}),
|
||||
|
||||
Type::BoundMethod(bound_method) => {
|
||||
let signature = bound_method.function(db).signature(db);
|
||||
|
@ -5639,7 +5635,7 @@ pub enum TypeVarKind {
|
|||
#[salsa::interned(debug)]
|
||||
pub struct TypeVarInstance<'db> {
|
||||
/// The name of this TypeVar (e.g. `T`)
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
name: ast::name::Name,
|
||||
|
||||
/// The type var's definition
|
||||
|
@ -6587,7 +6583,7 @@ impl<'db> OverloadedFunction<'db> {
|
|||
#[salsa::interned(debug)]
|
||||
pub struct FunctionType<'db> {
|
||||
/// Name of the function at definition.
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub name: ast::name::Name,
|
||||
|
||||
/// Is this a function that we special-case somehow? If so, which one?
|
||||
|
@ -6697,7 +6693,7 @@ impl<'db> FunctionType<'db> {
|
|||
///
|
||||
/// Were this not a salsa query, then the calling query
|
||||
/// would depend on the function's AST and rerun for every change in that file.
|
||||
#[salsa::tracked(return_ref, cycle_fn=signature_cycle_recover, cycle_initial=signature_cycle_initial)]
|
||||
#[salsa::tracked(returns(ref), cycle_fn=signature_cycle_recover, cycle_initial=signature_cycle_initial)]
|
||||
pub(crate) fn signature(self, db: &'db dyn Db) -> FunctionSignature<'db> {
|
||||
if let Some(overloaded) = self.to_overloaded(db) {
|
||||
FunctionSignature::Overloaded(
|
||||
|
@ -6846,68 +6842,56 @@ impl<'db> FunctionType<'db> {
|
|||
/// 2. second `foo` definition, it would contain both overloads and still no implementation
|
||||
/// 3. third `foo` definition, it would contain both overloads and the implementation which is
|
||||
/// itself
|
||||
fn to_overloaded(self, db: &'db dyn Db) -> Option<&'db OverloadedFunction<'db>> {
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn to_overloaded_impl<'db>(
|
||||
db: &'db dyn Db,
|
||||
function: FunctionType<'db>,
|
||||
) -> Option<OverloadedFunction<'db>> {
|
||||
let mut current = function;
|
||||
let mut overloads = vec![];
|
||||
#[salsa::tracked(returns(as_ref))]
|
||||
fn to_overloaded(self, db: &'db dyn Db) -> Option<OverloadedFunction<'db>> {
|
||||
let mut current = self;
|
||||
let mut overloads = vec![];
|
||||
|
||||
loop {
|
||||
// The semantic model records a use for each function on the name node. This is used
|
||||
// here to get the previous function definition with the same name.
|
||||
let scope = current.definition(db).scope(db);
|
||||
let use_def =
|
||||
semantic_index(db, scope.file(db)).use_def_map(scope.file_scope_id(db));
|
||||
let use_id = current
|
||||
.body_scope(db)
|
||||
.node(db)
|
||||
.expect_function()
|
||||
.name
|
||||
.scoped_use_id(db, scope);
|
||||
loop {
|
||||
// The semantic model records a use for each function on the name node. This is used
|
||||
// here to get the previous function definition with the same name.
|
||||
let scope = current.definition(db).scope(db);
|
||||
let use_def = semantic_index(db, scope.file(db)).use_def_map(scope.file_scope_id(db));
|
||||
let use_id = current
|
||||
.body_scope(db)
|
||||
.node(db)
|
||||
.expect_function()
|
||||
.name
|
||||
.scoped_use_id(db, scope);
|
||||
|
||||
let Symbol::Type(Type::FunctionLiteral(previous), Boundness::Bound) =
|
||||
symbol_from_bindings(db, use_def.bindings_at_use(use_id))
|
||||
else {
|
||||
break;
|
||||
};
|
||||
|
||||
if previous.has_known_decorator(db, FunctionDecorators::OVERLOAD) {
|
||||
overloads.push(previous);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
||||
current = previous;
|
||||
}
|
||||
|
||||
// Overloads are inserted in reverse order, from bottom to top.
|
||||
overloads.reverse();
|
||||
|
||||
let implementation = if function.has_known_decorator(db, FunctionDecorators::OVERLOAD) {
|
||||
overloads.push(function);
|
||||
None
|
||||
} else {
|
||||
Some(function)
|
||||
let Symbol::Type(Type::FunctionLiteral(previous), Boundness::Bound) =
|
||||
symbol_from_bindings(db, use_def.bindings_at_use(use_id))
|
||||
else {
|
||||
break;
|
||||
};
|
||||
|
||||
if overloads.is_empty() {
|
||||
None
|
||||
if previous.has_known_decorator(db, FunctionDecorators::OVERLOAD) {
|
||||
overloads.push(previous);
|
||||
} else {
|
||||
Some(OverloadedFunction {
|
||||
overloads,
|
||||
implementation,
|
||||
})
|
||||
break;
|
||||
}
|
||||
|
||||
current = previous;
|
||||
}
|
||||
|
||||
// HACK: This is required because salsa doesn't support returning `Option<&T>` from tracked
|
||||
// functions yet. Refer to https://github.com/salsa-rs/salsa/pull/772. Remove the inner
|
||||
// function once it's supported.
|
||||
to_overloaded_impl(db, self).as_ref()
|
||||
// Overloads are inserted in reverse order, from bottom to top.
|
||||
overloads.reverse();
|
||||
|
||||
let implementation = if self.has_known_decorator(db, FunctionDecorators::OVERLOAD) {
|
||||
overloads.push(self);
|
||||
None
|
||||
} else {
|
||||
Some(self)
|
||||
};
|
||||
|
||||
if overloads.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(OverloadedFunction {
|
||||
overloads,
|
||||
implementation,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7100,7 +7084,7 @@ impl<'db> BoundMethodType<'db> {
|
|||
/// `CallableType`.
|
||||
#[salsa::interned(debug)]
|
||||
pub struct CallableType<'db> {
|
||||
#[return_ref]
|
||||
#[returns(deref)]
|
||||
signatures: Box<[Signature<'db>]>,
|
||||
}
|
||||
|
||||
|
@ -7210,7 +7194,7 @@ impl<'db> CallableType<'db> {
|
|||
where
|
||||
F: Fn(&Signature<'db>, &Signature<'db>) -> bool,
|
||||
{
|
||||
match (&**self.signatures(db), &**other.signatures(db)) {
|
||||
match (self.signatures(db), other.signatures(db)) {
|
||||
([self_signature], [other_signature]) => {
|
||||
// Base case: both callable types contain a single signature.
|
||||
check_signature(self_signature, other_signature)
|
||||
|
@ -7252,7 +7236,7 @@ impl<'db> CallableType<'db> {
|
|||
///
|
||||
/// See [`Type::is_equivalent_to`] for more details.
|
||||
fn is_equivalent_to(self, db: &'db dyn Db, other: Self) -> bool {
|
||||
match (&**self.signatures(db), &**other.signatures(db)) {
|
||||
match (self.signatures(db), other.signatures(db)) {
|
||||
([self_signature], [other_signature]) => {
|
||||
// Common case: both callable types contain a single signature, use the custom
|
||||
// equivalence check instead of delegating it to the subtype check.
|
||||
|
@ -7278,7 +7262,7 @@ impl<'db> CallableType<'db> {
|
|||
///
|
||||
/// See [`Type::is_gradual_equivalent_to`] for more details.
|
||||
fn is_gradual_equivalent_to(self, db: &'db dyn Db, other: Self) -> bool {
|
||||
match (&**self.signatures(db), &**other.signatures(db)) {
|
||||
match (self.signatures(db), other.signatures(db)) {
|
||||
([self_signature], [other_signature]) => {
|
||||
self_signature.is_gradual_equivalent_to(db, other_signature)
|
||||
}
|
||||
|
@ -7371,7 +7355,7 @@ impl<'db> ModuleLiteralType<'db> {
|
|||
|
||||
#[salsa::interned(debug)]
|
||||
pub struct TypeAliasType<'db> {
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub name: ast::name::Name,
|
||||
|
||||
rhs_scope: ScopeId<'db>,
|
||||
|
@ -7405,15 +7389,11 @@ pub(super) struct MetaclassCandidate<'db> {
|
|||
#[salsa::interned(debug)]
|
||||
pub struct UnionType<'db> {
|
||||
/// The union type includes values in any of these types.
|
||||
#[return_ref]
|
||||
elements_boxed: Box<[Type<'db>]>,
|
||||
#[returns(deref)]
|
||||
pub elements: Box<[Type<'db>]>,
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements(self, db: &'db dyn Db) -> &'db [Type<'db>] {
|
||||
self.elements_boxed(db)
|
||||
}
|
||||
|
||||
/// Create a union from a list of elements
|
||||
/// (which may be eagerly simplified into a different variant of [`Type`] altogether).
|
||||
pub fn from_elements<I, T>(db: &'db dyn Db, elements: I) -> Type<'db>
|
||||
|
@ -7630,7 +7610,7 @@ impl<'db> UnionType<'db> {
|
|||
#[salsa::interned(debug)]
|
||||
pub struct IntersectionType<'db> {
|
||||
/// The intersection type includes only values in all of these types.
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
|
||||
/// The intersection type does not include any value in any of these types.
|
||||
|
@ -7638,7 +7618,7 @@ pub struct IntersectionType<'db> {
|
|||
/// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
/// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
/// directly in intersections rather than as a separate type.
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
|
@ -7868,7 +7848,7 @@ impl<'db> IntersectionType<'db> {
|
|||
|
||||
#[salsa::interned(debug)]
|
||||
pub struct StringLiteralType<'db> {
|
||||
#[return_ref]
|
||||
#[returns(deref)]
|
||||
value: Box<str>,
|
||||
}
|
||||
|
||||
|
@ -7889,7 +7869,7 @@ impl<'db> StringLiteralType<'db> {
|
|||
|
||||
#[salsa::interned(debug)]
|
||||
pub struct BytesLiteralType<'db> {
|
||||
#[return_ref]
|
||||
#[returns(deref)]
|
||||
value: Box<[u8]>,
|
||||
}
|
||||
|
||||
|
@ -7901,7 +7881,7 @@ impl<'db> BytesLiteralType<'db> {
|
|||
|
||||
#[salsa::interned(debug)]
|
||||
pub struct TupleType<'db> {
|
||||
#[return_ref]
|
||||
#[returns(deref)]
|
||||
elements: Box<[Type<'db>]>,
|
||||
}
|
||||
|
||||
|
@ -8082,7 +8062,6 @@ impl<'db> SuperOwnerKind<'db> {
|
|||
/// Represent a bound super object like `super(PivotClass, owner)`
|
||||
#[salsa::interned(debug)]
|
||||
pub struct BoundSuperType<'db> {
|
||||
#[return_ref]
|
||||
pub pivot_class: ClassBase<'db>,
|
||||
#[return_ref]
|
||||
pub owner: SuperOwnerKind<'db>,
|
||||
|
@ -8223,7 +8202,7 @@ impl<'db> BoundSuperType<'db> {
|
|||
.find_name_in_mro_with_policy(db, name, policy)
|
||||
.expect("Calling `find_name_in_mro` on dynamic type should return `Some`")
|
||||
}
|
||||
SuperOwnerKind::Class(class) => *class,
|
||||
SuperOwnerKind::Class(class) => class,
|
||||
SuperOwnerKind::Instance(instance) => instance.class(),
|
||||
};
|
||||
|
||||
|
|
|
@ -447,7 +447,7 @@ impl<'db> Bindings<'db> {
|
|||
overload.parameter_types()
|
||||
{
|
||||
overload.set_return_type(Type::BooleanLiteral(
|
||||
literal.value(db).starts_with(&**prefix.value(db)),
|
||||
literal.value(db).starts_with(prefix.value(db)),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -471,7 +471,7 @@ impl<'db> From<ClassType<'db>> for Type<'db> {
|
|||
#[salsa::interned(debug)]
|
||||
pub struct ClassLiteral<'db> {
|
||||
/// Name of the class at definition
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub(crate) name: ast::name::Name,
|
||||
|
||||
pub(crate) body_scope: ScopeId<'db>,
|
||||
|
@ -634,21 +634,8 @@ impl<'db> ClassLiteral<'db> {
|
|||
///
|
||||
/// Were this not a salsa query, then the calling query
|
||||
/// would depend on the class's AST and rerun for every change in that file.
|
||||
pub(super) fn explicit_bases(self, db: &'db dyn Db) -> &'db [Type<'db>] {
|
||||
self.explicit_bases_query(db)
|
||||
}
|
||||
|
||||
/// Iterate over this class's explicit bases, filtering out any bases that are not class
|
||||
/// objects, and applying default specialization to any unspecialized generic class literals.
|
||||
fn fully_static_explicit_bases(self, db: &'db dyn Db) -> impl Iterator<Item = ClassType<'db>> {
|
||||
self.explicit_bases(db)
|
||||
.iter()
|
||||
.copied()
|
||||
.filter_map(|ty| ty.to_class_type(db))
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref, cycle_fn=explicit_bases_cycle_recover, cycle_initial=explicit_bases_cycle_initial)]
|
||||
fn explicit_bases_query(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
|
||||
#[salsa::tracked(returns(deref), cycle_fn=explicit_bases_cycle_recover, cycle_initial=explicit_bases_cycle_initial)]
|
||||
pub(super) fn explicit_bases(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
|
||||
tracing::trace!("ClassLiteral::explicit_bases_query: {}", self.name(db));
|
||||
|
||||
let class_stmt = self.node(db);
|
||||
|
@ -662,6 +649,15 @@ impl<'db> ClassLiteral<'db> {
|
|||
.collect()
|
||||
}
|
||||
|
||||
/// Iterate over this class's explicit bases, filtering out any bases that are not class
|
||||
/// objects, and applying default specialization to any unspecialized generic class literals.
|
||||
fn fully_static_explicit_bases(self, db: &'db dyn Db) -> impl Iterator<Item = ClassType<'db>> {
|
||||
self.explicit_bases(db)
|
||||
.iter()
|
||||
.copied()
|
||||
.filter_map(|ty| ty.to_class_type(db))
|
||||
}
|
||||
|
||||
/// Determine if this class is a protocol.
|
||||
///
|
||||
/// This method relies on the accuracy of the [`KnownClass::is_protocol`] method,
|
||||
|
@ -700,7 +696,7 @@ impl<'db> ClassLiteral<'db> {
|
|||
}
|
||||
|
||||
/// Return the types of the decorators on this class
|
||||
#[salsa::tracked(return_ref)]
|
||||
#[salsa::tracked(returns(deref))]
|
||||
fn decorators(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
|
||||
tracing::trace!("ClassLiteral::decorators: {}", self.name(db));
|
||||
|
||||
|
@ -746,7 +742,7 @@ impl<'db> ClassLiteral<'db> {
|
|||
/// attribute on a class at runtime.
|
||||
///
|
||||
/// [method resolution order]: https://docs.python.org/3/glossary.html#term-method-resolution-order
|
||||
#[salsa::tracked(return_ref, cycle_fn=try_mro_cycle_recover, cycle_initial=try_mro_cycle_initial)]
|
||||
#[salsa::tracked(returns(as_ref), cycle_fn=try_mro_cycle_recover, cycle_initial=try_mro_cycle_initial)]
|
||||
pub(super) fn try_mro(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
|
@ -842,11 +838,7 @@ impl<'db> ClassLiteral<'db> {
|
|||
return Ok((SubclassOfType::subclass_of_unknown(), None));
|
||||
}
|
||||
|
||||
if self
|
||||
.try_mro(db, None)
|
||||
.as_ref()
|
||||
.is_err_and(MroError::is_cycle)
|
||||
{
|
||||
if self.try_mro(db, None).is_err_and(MroError::is_cycle) {
|
||||
return Ok((SubclassOfType::subclass_of_unknown(), None));
|
||||
}
|
||||
|
||||
|
@ -2728,7 +2720,7 @@ impl<'db> Type<'db> {
|
|||
if !alias.origin(db).is_known(db, KnownClass::Slice) {
|
||||
return None;
|
||||
}
|
||||
let [start, stop, step] = alias.specialization(db).types(db).as_ref() else {
|
||||
let [start, stop, step] = alias.specialization(db).types(db) else {
|
||||
return None;
|
||||
};
|
||||
|
||||
|
|
|
@ -239,7 +239,6 @@ impl<'db> ClassBase<'db> {
|
|||
let (class_literal, specialization) = class.class_literal(db);
|
||||
class_literal
|
||||
.try_mro(db, specialization)
|
||||
.as_ref()
|
||||
.is_err_and(MroError::is_cycle)
|
||||
}
|
||||
ClassBase::Dynamic(_) | ClassBase::Generic(_) | ClassBase::Protocol => false,
|
||||
|
|
|
@ -223,8 +223,7 @@ impl Display for DisplayRepresentation<'_> {
|
|||
Type::StringLiteral(string) => string.display(self.db).fmt(f),
|
||||
Type::LiteralString => f.write_str("LiteralString"),
|
||||
Type::BytesLiteral(bytes) => {
|
||||
let escape =
|
||||
AsciiEscape::with_preferred_quote(bytes.value(self.db).as_ref(), Quote::Double);
|
||||
let escape = AsciiEscape::with_preferred_quote(bytes.value(self.db), Quote::Double);
|
||||
|
||||
escape.bytes_repr(TripleQuotes::No).write(f)
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ use crate::{Db, FxOrderSet};
|
|||
/// containing context.
|
||||
#[salsa::interned(debug)]
|
||||
pub struct GenericContext<'db> {
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
pub(crate) variables: FxOrderSet<TypeVarInstance<'db>>,
|
||||
}
|
||||
|
||||
|
@ -216,7 +216,7 @@ impl<'db> GenericContext<'db> {
|
|||
#[salsa::interned(debug)]
|
||||
pub struct Specialization<'db> {
|
||||
pub(crate) generic_context: GenericContext<'db>,
|
||||
#[return_ref]
|
||||
#[returns(deref)]
|
||||
pub(crate) types: Box<[Type<'db>]>,
|
||||
}
|
||||
|
||||
|
@ -249,7 +249,7 @@ impl<'db> Specialization<'db> {
|
|||
) -> Self {
|
||||
let types: Box<[_]> = self
|
||||
.types(db)
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|ty| ty.apply_type_mapping(db, type_mapping))
|
||||
.collect();
|
||||
Specialization::new(db, self.generic_context(db), types)
|
||||
|
@ -282,7 +282,7 @@ impl<'db> Specialization<'db> {
|
|||
// explicitly tells us which typevars are mapped.
|
||||
let types: Box<[_]> = self
|
||||
.types(db)
|
||||
.into_iter()
|
||||
.iter()
|
||||
.zip(other.types(db))
|
||||
.map(|(self_type, other_type)| match (self_type, other_type) {
|
||||
(unknown, known) | (known, unknown) if unknown.is_unknown() => *known,
|
||||
|
|
|
@ -117,7 +117,7 @@ use super::{BoundSuperError, BoundSuperType, ClassBase};
|
|||
/// Infer all types for a [`ScopeId`], including all definitions and expressions in that scope.
|
||||
/// Use when checking a scope, or needing to provide a type for an arbitrary expression in the
|
||||
/// scope.
|
||||
#[salsa::tracked(return_ref, cycle_fn=scope_cycle_recover, cycle_initial=scope_cycle_initial)]
|
||||
#[salsa::tracked(returns(ref), cycle_fn=scope_cycle_recover, cycle_initial=scope_cycle_initial)]
|
||||
pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> TypeInference<'db> {
|
||||
let file = scope.file(db);
|
||||
let _span = tracing::trace_span!("infer_scope_types", scope=?scope.as_id(), ?file).entered();
|
||||
|
@ -144,7 +144,7 @@ fn scope_cycle_initial<'db>(_db: &'db dyn Db, scope: ScopeId<'db>) -> TypeInfere
|
|||
|
||||
/// Infer all types for a [`Definition`] (including sub-expressions).
|
||||
/// Use when resolving a symbol name use or public type of a symbol.
|
||||
#[salsa::tracked(return_ref, cycle_fn=definition_cycle_recover, cycle_initial=definition_cycle_initial)]
|
||||
#[salsa::tracked(returns(ref), cycle_fn=definition_cycle_recover, cycle_initial=definition_cycle_initial)]
|
||||
pub(crate) fn infer_definition_types<'db>(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
|
@ -182,7 +182,7 @@ fn definition_cycle_initial<'db>(
|
|||
///
|
||||
/// Deferred expressions are type expressions (annotations, base classes, aliases...) in a stub
|
||||
/// file, or in a file with `from __future__ import annotations`, or stringified annotations.
|
||||
#[salsa::tracked(return_ref, cycle_fn=deferred_cycle_recover, cycle_initial=deferred_cycle_initial)]
|
||||
#[salsa::tracked(returns(ref), cycle_fn=deferred_cycle_recover, cycle_initial=deferred_cycle_initial)]
|
||||
pub(crate) fn infer_deferred_types<'db>(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
|
@ -218,7 +218,7 @@ fn deferred_cycle_initial<'db>(db: &'db dyn Db, definition: Definition<'db>) ->
|
|||
/// Use rarely; only for cases where we'd otherwise risk double-inferring an expression: RHS of an
|
||||
/// assignment, which might be unpacking/multi-target and thus part of multiple definitions, or a
|
||||
/// type narrowing guard expression (e.g. if statement test node).
|
||||
#[salsa::tracked(return_ref, cycle_fn=expression_cycle_recover, cycle_initial=expression_cycle_initial)]
|
||||
#[salsa::tracked(returns(ref), cycle_fn=expression_cycle_recover, cycle_initial=expression_cycle_initial)]
|
||||
pub(crate) fn infer_expression_types<'db>(
|
||||
db: &'db dyn Db,
|
||||
expression: Expression<'db>,
|
||||
|
@ -305,7 +305,7 @@ fn single_expression_cycle_initial<'db>(
|
|||
/// involved in an unpacking operation. It returns a result-like object that can be used to get the
|
||||
/// type of the variables involved in this unpacking along with any violations that are detected
|
||||
/// during this unpacking.
|
||||
#[salsa::tracked(return_ref)]
|
||||
#[salsa::tracked(returns(ref))]
|
||||
pub(super) fn infer_unpack_types<'db>(db: &'db dyn Db, unpack: Unpack<'db>) -> UnpackResult<'db> {
|
||||
let file = unpack.file(db);
|
||||
let _span =
|
||||
|
@ -876,7 +876,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
}
|
||||
|
||||
// (3) Check that the class's MRO is resolvable
|
||||
match class.try_mro(self.db(), None).as_ref() {
|
||||
match class.try_mro(self.db(), None) {
|
||||
Err(mro_error) => {
|
||||
match mro_error.reason() {
|
||||
MroErrorKind::DuplicateBases(duplicates) => {
|
||||
|
@ -3265,7 +3265,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
| ast::Expr::Tuple(ast::ExprTuple { elts, .. }) => {
|
||||
let mut assigned_tys = match assigned_ty {
|
||||
Some(Type::Tuple(tuple)) => {
|
||||
Either::Left(tuple.elements(self.db()).into_iter().copied())
|
||||
Either::Left(tuple.elements(self.db()).iter().copied())
|
||||
}
|
||||
Some(_) | None => Either::Right(std::iter::empty()),
|
||||
};
|
||||
|
@ -4864,7 +4864,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
if !truthiness.is_always_true() {
|
||||
if let Some(message) = message
|
||||
.and_then(Type::into_string_literal)
|
||||
.map(|s| &**s.value(self.db()))
|
||||
.map(|s| s.value(self.db()))
|
||||
{
|
||||
builder.into_diagnostic(format_args!(
|
||||
"Static assertion error: {message}"
|
||||
|
@ -5122,7 +5122,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
|
||||
let name_param = name_param
|
||||
.into_string_literal()
|
||||
.map(|name| name.value(self.db()).as_ref());
|
||||
.map(|name| name.value(self.db()));
|
||||
if name_param
|
||||
.is_none_or(|name_param| name_param != target.id)
|
||||
{
|
||||
|
@ -5856,13 +5856,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
}
|
||||
|
||||
(Type::BytesLiteral(lhs), Type::BytesLiteral(rhs), ast::Operator::Add) => {
|
||||
let bytes = [&**lhs.value(self.db()), &**rhs.value(self.db())].concat();
|
||||
let bytes = [lhs.value(self.db()), rhs.value(self.db())].concat();
|
||||
Some(Type::bytes_literal(self.db(), &bytes))
|
||||
}
|
||||
|
||||
(Type::StringLiteral(lhs), Type::StringLiteral(rhs), ast::Operator::Add) => {
|
||||
let lhs_value = lhs.value(self.db()).to_string();
|
||||
let rhs_value = rhs.value(self.db()).as_ref();
|
||||
let rhs_value = rhs.value(self.db());
|
||||
let ty = if lhs_value.len() + rhs_value.len() <= Self::MAX_STRING_LITERAL_SIZE {
|
||||
Type::string_literal(self.db(), &(lhs_value + rhs_value))
|
||||
} else {
|
||||
|
@ -6463,8 +6463,8 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
ast::CmpOp::LtE => Ok(Type::BooleanLiteral(s1 <= s2)),
|
||||
ast::CmpOp::Gt => Ok(Type::BooleanLiteral(s1 > s2)),
|
||||
ast::CmpOp::GtE => Ok(Type::BooleanLiteral(s1 >= s2)),
|
||||
ast::CmpOp::In => Ok(Type::BooleanLiteral(s2.contains(s1.as_ref()))),
|
||||
ast::CmpOp::NotIn => Ok(Type::BooleanLiteral(!s2.contains(s1.as_ref()))),
|
||||
ast::CmpOp::In => Ok(Type::BooleanLiteral(s2.contains(s1))),
|
||||
ast::CmpOp::NotIn => Ok(Type::BooleanLiteral(!s2.contains(s1))),
|
||||
ast::CmpOp::Is => {
|
||||
if s1 == s2 {
|
||||
Ok(KnownClass::Bool.to_instance(self.db()))
|
||||
|
@ -6508,8 +6508,8 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
),
|
||||
|
||||
(Type::BytesLiteral(salsa_b1), Type::BytesLiteral(salsa_b2)) => {
|
||||
let b1 = &**salsa_b1.value(self.db());
|
||||
let b2 = &**salsa_b2.value(self.db());
|
||||
let b1 = salsa_b1.value(self.db());
|
||||
let b2 = salsa_b2.value(self.db());
|
||||
match op {
|
||||
ast::CmpOp::Eq => Ok(Type::BooleanLiteral(b1 == b2)),
|
||||
ast::CmpOp::NotEq => Ok(Type::BooleanLiteral(b1 != b2)),
|
||||
|
|
|
@ -16,7 +16,6 @@ use ruff_python_ast as ast;
|
|||
use ruff_python_ast::{BoolOp, ExprBoolOp};
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::UnionType;
|
||||
|
||||
|
@ -65,8 +64,7 @@ pub(crate) fn infer_narrowing_constraint<'db>(
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
#[salsa::tracked(returns(as_ref))]
|
||||
fn all_narrowing_constraints_for_pattern<'db>(
|
||||
db: &'db dyn Db,
|
||||
pattern: PatternPredicate<'db>,
|
||||
|
@ -74,9 +72,8 @@ fn all_narrowing_constraints_for_pattern<'db>(
|
|||
NarrowingConstraintsBuilder::new(db, PredicateNode::Pattern(pattern), true).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(
|
||||
return_ref,
|
||||
returns(as_ref),
|
||||
cycle_fn=constraints_for_expression_cycle_recover,
|
||||
cycle_initial=constraints_for_expression_cycle_initial,
|
||||
)]
|
||||
|
@ -87,9 +84,8 @@ fn all_narrowing_constraints_for_expression<'db>(
|
|||
NarrowingConstraintsBuilder::new(db, PredicateNode::Expression(expression), true).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(
|
||||
return_ref,
|
||||
returns(as_ref),
|
||||
cycle_fn=negative_constraints_for_expression_cycle_recover,
|
||||
cycle_initial=negative_constraints_for_expression_cycle_initial,
|
||||
)]
|
||||
|
@ -100,8 +96,7 @@ fn all_negative_narrowing_constraints_for_expression<'db>(
|
|||
NarrowingConstraintsBuilder::new(db, PredicateNode::Expression(expression), false).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
#[salsa::tracked(returns(as_ref))]
|
||||
fn all_negative_narrowing_constraints_for_pattern<'db>(
|
||||
db: &'db dyn Db,
|
||||
pattern: PatternPredicate<'db>,
|
||||
|
@ -109,7 +104,7 @@ fn all_negative_narrowing_constraints_for_pattern<'db>(
|
|||
NarrowingConstraintsBuilder::new(db, PredicateNode::Pattern(pattern), false).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[expect(clippy::ref_option)]
|
||||
fn constraints_for_expression_cycle_recover<'db>(
|
||||
_db: &'db dyn Db,
|
||||
_value: &Option<NarrowingConstraints<'db>>,
|
||||
|
@ -286,7 +281,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
|||
expression: Expression<'db>,
|
||||
is_positive: bool,
|
||||
) -> Option<NarrowingConstraints<'db>> {
|
||||
let expression_node = expression.node_ref(self.db).node();
|
||||
let expression_node = expression.node_ref(self.db);
|
||||
self.evaluate_expression_node_predicate(expression_node, expression, is_positive)
|
||||
}
|
||||
|
||||
|
@ -344,7 +339,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
|||
})
|
||||
}
|
||||
|
||||
fn symbols(&self) -> Arc<SymbolTable> {
|
||||
fn symbols(&self) -> &'db SymbolTable {
|
||||
symbol_table(self.db, self.scope())
|
||||
}
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ impl<'db> Deref for ProtocolClassLiteral<'db> {
|
|||
/// The interface of a protocol: the members of that protocol, and the types of those members.
|
||||
#[salsa::interned(debug)]
|
||||
pub(super) struct ProtocolInterface<'db> {
|
||||
#[return_ref]
|
||||
#[returns(ref)]
|
||||
_members: BTreeMap<Name, ProtocolMemberData<'db>>,
|
||||
}
|
||||
|
||||
|
|
|
@ -150,20 +150,20 @@ pub(super) fn union_or_intersection_elements_ordering<'db>(
|
|||
|
||||
(Type::BoundSuper(left), Type::BoundSuper(right)) => {
|
||||
(match (left.pivot_class(db), right.pivot_class(db)) {
|
||||
(ClassBase::Class(left), ClassBase::Class(right)) => left.cmp(right),
|
||||
(ClassBase::Class(left), ClassBase::Class(right)) => left.cmp(&right),
|
||||
(ClassBase::Class(_), _) => Ordering::Less,
|
||||
(_, ClassBase::Class(_)) => Ordering::Greater,
|
||||
(ClassBase::Protocol, _) => Ordering::Less,
|
||||
(_, ClassBase::Protocol) => Ordering::Greater,
|
||||
(ClassBase::Generic(left), ClassBase::Generic(right)) => left.cmp(right),
|
||||
(ClassBase::Generic(left), ClassBase::Generic(right)) => left.cmp(&right),
|
||||
(ClassBase::Generic(_), _) => Ordering::Less,
|
||||
(_, ClassBase::Generic(_)) => Ordering::Greater,
|
||||
(ClassBase::Dynamic(left), ClassBase::Dynamic(right)) => {
|
||||
dynamic_elements_ordering(*left, *right)
|
||||
dynamic_elements_ordering(left, right)
|
||||
}
|
||||
})
|
||||
.then_with(|| match (left.owner(db), right.owner(db)) {
|
||||
(SuperOwnerKind::Class(left), SuperOwnerKind::Class(right)) => left.cmp(right),
|
||||
(SuperOwnerKind::Class(left), SuperOwnerKind::Class(right)) => left.cmp(&right),
|
||||
(SuperOwnerKind::Class(_), _) => Ordering::Less,
|
||||
(_, SuperOwnerKind::Class(_)) => Ordering::Greater,
|
||||
(SuperOwnerKind::Instance(left), SuperOwnerKind::Instance(right)) => {
|
||||
|
@ -172,7 +172,7 @@ pub(super) fn union_or_intersection_elements_ordering<'db>(
|
|||
(SuperOwnerKind::Instance(_), _) => Ordering::Less,
|
||||
(_, SuperOwnerKind::Instance(_)) => Ordering::Greater,
|
||||
(SuperOwnerKind::Dynamic(left), SuperOwnerKind::Dynamic(right)) => {
|
||||
dynamic_elements_ordering(*left, *right)
|
||||
dynamic_elements_ordering(left, right)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -226,7 +226,7 @@ impl<'db> Unpacker<'db> {
|
|||
// If there is a starred expression, it will consume all of the types at that location.
|
||||
let Some(starred_index) = targets.iter().position(ast::Expr::is_starred_expr) else {
|
||||
// Otherwise, the types will be unpacked 1-1 to the targets.
|
||||
return Cow::Borrowed(tuple_ty.elements(self.db()).as_ref());
|
||||
return Cow::Borrowed(tuple_ty.elements(self.db()));
|
||||
};
|
||||
|
||||
if tuple_ty.len(self.db()) >= targets.len() - 1 {
|
||||
|
|
|
@ -37,7 +37,7 @@ pub(crate) struct Unpack<'db> {
|
|||
/// The target expression that is being unpacked. For example, in `(a, b) = (1, 2)`, the target
|
||||
/// expression is `(a, b)`.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
#[returns(deref)]
|
||||
#[tracked]
|
||||
pub(crate) target: AstNodeRef<ast::Expr>,
|
||||
|
||||
|
@ -102,7 +102,7 @@ impl<'db> UnpackValue<'db> {
|
|||
|
||||
/// Returns the expression as an [`AnyNodeRef`].
|
||||
pub(crate) fn as_any_node_ref(self, db: &'db dyn Db) -> AnyNodeRef<'db> {
|
||||
self.expression().node_ref(db).node().into()
|
||||
self.expression().node_ref(db).into()
|
||||
}
|
||||
|
||||
pub(crate) const fn kind(self) -> UnpackKind {
|
||||
|
|
|
@ -90,8 +90,8 @@ impl SemanticDb for Db {
|
|||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
||||
self.rule_selection.clone()
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
&self.rule_selection
|
||||
}
|
||||
|
||||
fn lint_registry(&self) -> &LintRegistry {
|
||||
|
|
|
@ -30,7 +30,7 @@ ty_python_semantic = { path = "../crates/ty_python_semantic" }
|
|||
ty_vendored = { path = "../crates/ty_vendored" }
|
||||
|
||||
libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer", default-features = false }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "2c869364a9592d06fdf45c422e1e4a7265a8fe8a" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "7edce6e248f35c8114b4b021cdb474a3fb2813b3" }
|
||||
similar = { version = "2.5.0" }
|
||||
tracing = { version = "0.1.40" }
|
||||
|
||||
|
|
|
@ -95,8 +95,8 @@ impl SemanticDb for TestDb {
|
|||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> Arc<RuleSelection> {
|
||||
self.rule_selection.clone()
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
&self.rule_selection
|
||||
}
|
||||
|
||||
fn lint_registry(&self) -> &LintRegistry {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue