Update Rust toolchain to 1.88 and MSRV to 1.86 (#19011)

This commit is contained in:
Micha Reiser 2025-06-28 20:24:00 +02:00 committed by GitHub
parent c5995c40d3
commit 29927f2b59
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
59 changed files with 210 additions and 425 deletions

1
Cargo.lock generated
View file

@ -2703,6 +2703,7 @@ dependencies = [
"clap_complete_command",
"clearscreen",
"colored 3.0.0",
"dunce",
"filetime",
"globwalk",
"ignore",

View file

@ -5,7 +5,7 @@ resolver = "2"
[workspace.package]
# Please update rustfmt.toml when bumping the Rust edition
edition = "2024"
rust-version = "1.85"
rust-version = "1.86"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@ -227,6 +227,7 @@ unnecessary_debug_formatting = "allow" # too many instances, the display also d
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
needless_raw_string_hashes = "allow"
# Disallowed restriction lints
ignore_without_reason = "allow" # Too many exsisting instances, and there's no auto fix.
print_stdout = "warn"
print_stderr = "warn"
dbg_macro = "warn"

View file

@ -68,6 +68,7 @@ ruff_linter = { workspace = true, features = ["clap", "test-rules"] }
assert_fs = { workspace = true }
# Avoid writing colored snapshots when running tests from the terminal
colored = { workspace = true, features = ["no-color"] }
dunce = { workspace = true }
indoc = { workspace = true }
insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { workspace = true }

View file

@ -612,7 +612,7 @@ fn extend_passed_via_config_argument() {
#[test]
fn nonexistent_extend_file() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
fs::write(
project_dir.join("ruff.toml"),
r#"
@ -653,7 +653,7 @@ extend = "ruff3.toml"
#[test]
fn circular_extend() -> Result<()> {
let tempdir = TempDir::new()?;
let project_path = tempdir.path().canonicalize()?;
let project_path = dunce::canonicalize(tempdir.path())?;
fs::write(
project_path.join("ruff.toml"),
@ -698,7 +698,7 @@ extend = "ruff.toml"
#[test]
fn parse_error_extends() -> Result<()> {
let tempdir = TempDir::new()?;
let project_path = tempdir.path().canonicalize()?;
let project_path = dunce::canonicalize(tempdir.path())?;
fs::write(
project_path.join("ruff.toml"),
@ -2130,7 +2130,7 @@ select = ["UP006"]
#[test]
fn requires_python_no_tool() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("pyproject.toml");
fs::write(
&ruff_toml,
@ -2441,7 +2441,7 @@ requires-python = ">= 3.11"
#[test]
fn requires_python_no_tool_target_version_override() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("pyproject.toml");
fs::write(
&ruff_toml,
@ -2752,7 +2752,7 @@ requires-python = ">= 3.11"
#[test]
fn requires_python_no_tool_with_check() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("pyproject.toml");
fs::write(
&ruff_toml,
@ -2797,7 +2797,7 @@ requires-python = ">= 3.11"
#[test]
fn requires_python_ruff_toml_no_target_fallback() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
@ -3118,7 +3118,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_ruff_toml_no_target_fallback_check() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
@ -3173,7 +3173,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_pyproject_toml_above() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let outer_pyproject = tempdir.path().join("pyproject.toml");
fs::write(
&outer_pyproject,
@ -3200,7 +3200,7 @@ from typing import Union;foo: Union[int, str] = 1
"#,
)?;
let testpy_canon = testpy.canonicalize()?;
let testpy_canon = dunce::canonicalize(testpy)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/foo/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/"),(r"(?m)^foo\\test","foo/test")]
@ -3499,7 +3499,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_pyproject_toml_above_with_tool() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let outer_pyproject = tempdir.path().join("pyproject.toml");
fs::write(
&outer_pyproject,
@ -3528,7 +3528,7 @@ from typing import Union;foo: Union[int, str] = 1
"#,
)?;
let testpy_canon = testpy.canonicalize()?;
let testpy_canon = dunce::canonicalize(testpy)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/foo/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/"),(r"foo\\","foo/")]
@ -3827,7 +3827,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_ruff_toml_above() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
@ -3856,7 +3856,7 @@ from typing import Union;foo: Union[int, str] = 1
"#,
)?;
let testpy_canon = testpy.canonicalize()?;
let testpy_canon = dunce::canonicalize(testpy)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/foo/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/")]
@ -4441,7 +4441,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_extend_from_shared_config() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
@ -4479,7 +4479,7 @@ from typing import Union;foo: Union[int, str] = 1
"#,
)?;
let testpy_canon = testpy.canonicalize()?;
let testpy_canon = dunce::canonicalize(testpy)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/")]

View file

@ -12,10 +12,8 @@ fn display_default_settings() -> anyhow::Result<()> {
// Tempdir path's on macos are symlinks, which doesn't play nicely with
// our snapshot filtering.
let project_dir = tempdir
.path()
.canonicalize()
.context("Failed to canonical tempdir path.")?;
let project_dir =
dunce::canonicalize(tempdir.path()).context("Failed to canonical tempdir path.")?;
std::fs::write(
project_dir.join("pyproject.toml"),

View file

@ -821,11 +821,7 @@ impl DisplaySourceAnnotation<'_> {
// Length of this annotation as displayed in the stderr output
fn len(&self) -> usize {
// Account for usize underflows
if self.range.1 > self.range.0 {
self.range.1 - self.range.0
} else {
self.range.0 - self.range.1
}
self.range.1.abs_diff(self.range.0)
}
fn takes_space(&self) -> bool {

View file

@ -637,6 +637,22 @@ pub trait FileResolver {
fn input(&self, file: File) -> Input;
}
impl<T> FileResolver for T
where
T: Db,
{
fn path(&self, file: File) -> &str {
relativize_path(self.system().current_directory(), file.path(self).as_str())
}
fn input(&self, file: File) -> Input {
Input {
text: source_text(self, file),
line_index: line_index(self, file),
}
}
}
impl FileResolver for &dyn Db {
fn path(&self, file: File) -> &str {
relativize_path(self.system().current_directory(), file.path(*self).as_str())
@ -708,7 +724,6 @@ fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
#[cfg(test)]
mod tests {
use crate::Upcast;
use crate::diagnostic::{Annotation, DiagnosticId, Severity, Span};
use crate::files::system_path_to_file;
use crate::system::{DbWithWritableSystem, SystemPath};
@ -2221,7 +2236,7 @@ watermelon
///
/// (This will set the "printed" flag on `Diagnostic`.)
fn render(&self, diag: &Diagnostic) -> String {
diag.display(&self.db.upcast(), &self.config).to_string()
diag.display(&self.db, &self.config).to_string()
}
}

View file

@ -36,12 +36,6 @@ pub trait Db: salsa::Database {
fn python_version(&self) -> PythonVersion;
}
/// Trait for upcasting a reference to a base trait object.
pub trait Upcast<T: ?Sized> {
fn upcast(&self) -> &T;
fn upcast_mut(&mut self) -> &mut T;
}
/// Returns the maximum number of tasks that ty is allowed
/// to process in parallel.
///
@ -76,11 +70,11 @@ pub trait RustDoc {
mod tests {
use std::sync::{Arc, Mutex};
use crate::Db;
use crate::files::Files;
use crate::system::TestSystem;
use crate::system::{DbWithTestSystem, System};
use crate::vendored::VendoredFileSystem;
use crate::{Db, Upcast};
type Events = Arc<Mutex<Vec<salsa::Event>>>;
@ -153,15 +147,6 @@ mod tests {
}
}
impl Upcast<dyn Db> for TestDb {
fn upcast(&self) -> &(dyn Db + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn Db + 'static) {
self
}
}
impl DbWithTestSystem for TestDb {
fn test_system(&self) -> &TestSystem {
&self.system

View file

@ -212,7 +212,7 @@ impl Display for Error {
path: Some(path),
err,
} => {
write!(f, "IO error for operation on {}: {}", path, err)
write!(f, "IO error for operation on {path}: {err}")
}
ErrorKind::Io { path: None, err } => err.fmt(f),
ErrorKind::NonUtf8Path { path } => {

View file

@ -2,10 +2,10 @@ use anyhow::{Context, Result};
use std::sync::Arc;
use zip::CompressionMethod;
use ruff_db::Db as SourceDb;
use ruff_db::files::{File, Files};
use ruff_db::system::{OsSystem, System, SystemPathBuf};
use ruff_db::vendored::{VendoredFileSystem, VendoredFileSystemBuilder};
use ruff_db::{Db as SourceDb, Upcast};
use ruff_python_ast::PythonVersion;
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{
@ -66,15 +66,6 @@ impl ModuleDb {
}
}
impl Upcast<dyn SourceDb> for ModuleDb {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
#[salsa::db]
impl SourceDb for ModuleDb {
fn vendored(&self) -> &VendoredFileSystem {

View file

@ -191,6 +191,6 @@ where
#[expect(unsafe_code)]
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
let old_vec: &mut IndexVec<I, T> = unsafe { &mut *old_pointer };
unsafe { salsa::Update::maybe_update(&mut old_vec.raw, new_value.raw) }
unsafe { salsa::Update::maybe_update(&raw mut old_vec.raw, new_value.raw) }
}
}

View file

@ -1,9 +1,9 @@
use ruff_db::{Db as SourceDb, Upcast, files::File};
use ruff_db::{Db as SourceDb, files::File};
use crate::PyFormatOptions;
#[salsa::db]
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
pub trait Db: SourceDb {
/// Returns the formatting options
fn format_options(&self, file: File) -> PyFormatOptions;
}

View file

@ -165,14 +165,14 @@ where
pub fn formatted_file(db: &dyn Db, file: File) -> Result<Option<String>, FormatModuleError> {
let options = db.format_options(file);
let parsed = parsed_module(db.upcast(), file).load(db.upcast());
let parsed = parsed_module(db, file).load(db);
if let Some(first) = parsed.errors().first() {
return Err(FormatModuleError::ParseError(first.clone()));
}
let comment_ranges = CommentRanges::from(parsed.tokens());
let source = source_text(db.upcast(), file);
let source = source_text(db, file);
let formatted = format_node(&parsed, &comment_ranges, &source, options)?;
let printed = formatted.print()?;

View file

@ -228,7 +228,11 @@ mod imp {
let mut qos_class_raw = libc::qos_class_t::QOS_CLASS_UNSPECIFIED;
#[expect(unsafe_code)]
let code = unsafe {
libc::pthread_get_qos_class_np(current_thread, &mut qos_class_raw, std::ptr::null_mut())
libc::pthread_get_qos_class_np(
current_thread,
&raw mut qos_class_raw,
std::ptr::null_mut(),
)
};
if code != 0 {

View file

@ -19,8 +19,8 @@ use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use rayon::ThreadPoolBuilder;
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, Severity};
use ruff_db::max_parallelism;
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_db::{Upcast, max_parallelism};
use salsa::plumbing::ZalsaDatabase;
use ty_project::metadata::options::ProjectOptionsOverrides;
use ty_project::watch::ProjectWatcher;
@ -308,11 +308,7 @@ impl MainLoop {
let diagnostics_count = result.len();
for diagnostic in result {
write!(
stdout,
"{}",
diagnostic.display(&db.upcast(), &display_config)
)?;
write!(stdout, "{}", diagnostic.display(db, &display_config))?;
max_severity = max_severity.max(diagnostic.severity());
}

View file

@ -3,12 +3,12 @@ use std::io::Write;
use std::time::{Duration, Instant};
use anyhow::{Context, anyhow};
use ruff_db::Db as _;
use ruff_db::files::{File, FileError, system_path_to_file};
use ruff_db::source::source_text;
use ruff_db::system::{
OsSystem, System, SystemPath, SystemPathBuf, UserConfigDirectoryOverrideGuard, file_time_now,
};
use ruff_db::{Db as _, Upcast};
use ruff_python_ast::PythonVersion;
use ty_project::metadata::options::{EnvironmentOptions, Options, ProjectOptionsOverrides};
use ty_project::metadata::pyproject::{PyProject, Tool};
@ -788,10 +788,7 @@ fn directory_moved_to_project() -> anyhow::Result<()> {
.with_context(|| "Failed to create __init__.py")?;
std::fs::write(a_original_path.as_std_path(), "").with_context(|| "Failed to create a.py")?;
let sub_a_module = resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap(),
);
let sub_a_module = resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap());
assert_eq!(sub_a_module, None);
case.assert_indexed_project_files([bar]);
@ -812,13 +809,7 @@ fn directory_moved_to_project() -> anyhow::Result<()> {
.expect("a.py to exist");
// `import sub.a` should now resolve
assert!(
resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_some()
);
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some());
case.assert_indexed_project_files([bar, init_file, a_file]);
@ -834,13 +825,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
])?;
let bar = case.system_file(case.project_path("bar.py")).unwrap();
assert!(
resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_some()
);
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some());
let sub_path = case.project_path("sub");
let init_file = case
@ -862,13 +847,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
case.apply_changes(changes, None);
// `import sub.a` should no longer resolve
assert!(
resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_none()
);
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_none());
assert!(!init_file.exists(case.db()));
assert!(!a_file.exists(case.db()));
@ -888,20 +867,8 @@ fn directory_renamed() -> anyhow::Result<()> {
let bar = case.system_file(case.project_path("bar.py")).unwrap();
assert!(
resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_some()
);
assert!(
resolve_module(
case.db().upcast(),
&ModuleName::new_static("foo.baz").unwrap()
)
.is_none()
);
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some());
assert!(resolve_module(case.db(), &ModuleName::new_static("foo.baz").unwrap()).is_none());
let sub_path = case.project_path("sub");
let sub_init = case
@ -925,21 +892,9 @@ fn directory_renamed() -> anyhow::Result<()> {
case.apply_changes(changes, None);
// `import sub.a` should no longer resolve
assert!(
resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_none()
);
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_none());
// `import foo.baz` should now resolve
assert!(
resolve_module(
case.db().upcast(),
&ModuleName::new_static("foo.baz").unwrap()
)
.is_some()
);
assert!(resolve_module(case.db(), &ModuleName::new_static("foo.baz").unwrap()).is_some());
// The old paths are no longer tracked
assert!(!sub_init.exists(case.db()));
@ -972,13 +927,7 @@ fn directory_deleted() -> anyhow::Result<()> {
let bar = case.system_file(case.project_path("bar.py")).unwrap();
assert!(
resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_some()
);
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some());
let sub_path = case.project_path("sub");
@ -998,13 +947,7 @@ fn directory_deleted() -> anyhow::Result<()> {
case.apply_changes(changes, None);
// `import sub.a` should no longer resolve
assert!(
resolve_module(
case.db().upcast(),
&ModuleName::new_static("sub.a").unwrap()
)
.is_none()
);
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_none());
assert!(!init_file.exists(case.db()));
assert!(!a_file.exists(case.db()));
@ -1043,7 +986,7 @@ fn search_path() -> anyhow::Result<()> {
case.apply_changes(changes, None);
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_some());
assert!(resolve_module(case.db(), &ModuleName::new_static("a").unwrap()).is_some());
case.assert_indexed_project_files([case.system_file(case.project_path("bar.py")).unwrap()]);
Ok(())
@ -1056,7 +999,7 @@ fn add_search_path() -> anyhow::Result<()> {
let site_packages = case.project_path("site_packages");
std::fs::create_dir_all(site_packages.as_std_path())?;
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_none());
assert!(resolve_module(case.db(), &ModuleName::new_static("a").unwrap()).is_none());
// Register site-packages as a search path.
case.update_options(Options {
@ -1074,7 +1017,7 @@ fn add_search_path() -> anyhow::Result<()> {
case.apply_changes(changes, None);
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_some());
assert!(resolve_module(case.db(), &ModuleName::new_static("a").unwrap()).is_some());
Ok(())
}
@ -1444,11 +1387,8 @@ mod unix {
Ok(())
})?;
let baz = resolve_module(
case.db().upcast(),
&ModuleName::new_static("bar.baz").unwrap(),
)
.expect("Expected bar.baz to exist in site-packages.");
let baz = resolve_module(case.db(), &ModuleName::new_static("bar.baz").unwrap())
.expect("Expected bar.baz to exist in site-packages.");
let baz_project = case.project_path("bar/baz.py");
let baz_file = baz.file().unwrap();
@ -1523,11 +1463,8 @@ mod unix {
Ok(())
})?;
let baz = resolve_module(
case.db().upcast(),
&ModuleName::new_static("bar.baz").unwrap(),
)
.expect("Expected bar.baz to exist in site-packages.");
let baz = resolve_module(case.db(), &ModuleName::new_static("bar.baz").unwrap())
.expect("Expected bar.baz to exist in site-packages.");
let baz_file = baz.file().unwrap();
let bar_baz = case.project_path("bar/baz.py");
@ -1631,11 +1568,8 @@ mod unix {
Ok(())
})?;
let baz = resolve_module(
case.db().upcast(),
&ModuleName::new_static("bar.baz").unwrap(),
)
.expect("Expected bar.baz to exist in site-packages.");
let baz = resolve_module(case.db(), &ModuleName::new_static("bar.baz").unwrap())
.expect("Expected bar.baz to exist in site-packages.");
let baz_site_packages_path =
case.project_path(".venv/lib/python3.12/site-packages/bar/baz.py");
let baz_site_packages = case.system_file(&baz_site_packages_path).unwrap();

View file

@ -11,7 +11,7 @@ use crate::Db;
use crate::find_node::covering_node;
pub fn completion(db: &dyn Db, file: File, offset: TextSize) -> Vec<Completion> {
let parsed = parsed_module(db.upcast(), file).load(db.upcast());
let parsed = parsed_module(db, file).load(db);
let Some(target_token) = CompletionTargetTokens::find(&parsed, offset) else {
return vec![];
@ -20,7 +20,7 @@ pub fn completion(db: &dyn Db, file: File, offset: TextSize) -> Vec<Completion>
return vec![];
};
let model = SemanticModel::new(db.upcast(), file);
let model = SemanticModel::new(db, file);
let mut completions = match target {
CompletionTargetAst::ObjectDot { expr } => model.attribute_completions(expr),
CompletionTargetAst::ImportFrom { import, name } => model.import_completions(import, name),

View file

@ -1,18 +1,17 @@
use ruff_db::{Db as SourceDb, Upcast};
use ty_python_semantic::Db as SemanticDb;
#[salsa::db]
pub trait Db: SemanticDb + Upcast<dyn SemanticDb> + Upcast<dyn SourceDb> {}
pub trait Db: SemanticDb {}
#[cfg(test)]
pub(crate) mod tests {
use std::sync::{Arc, Mutex};
use super::Db;
use ruff_db::Db as SourceDb;
use ruff_db::files::{File, Files};
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{Db as SemanticDb, Program, default_lint_registry};
@ -95,25 +94,6 @@ pub(crate) mod tests {
}
}
impl Upcast<dyn SourceDb> for TestDb {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
impl Upcast<dyn SemanticDb> for TestDb {
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut dyn SemanticDb {
self
}
}
#[salsa::db]
impl SemanticDb for TestDb {
fn is_file_open(&self, file: File) -> bool {

View file

@ -13,16 +13,13 @@ pub fn goto_type_definition(
file: File,
offset: TextSize,
) -> Option<RangedValue<NavigationTargets>> {
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
let goto_target = find_goto_target(&module, offset)?;
let model = SemanticModel::new(db.upcast(), file);
let model = SemanticModel::new(db, file);
let ty = goto_target.inferred_type(&model)?;
tracing::debug!(
"Inferred type of covering node is {}",
ty.display(db.upcast())
);
tracing::debug!("Inferred type of covering node is {}", ty.display(db));
let navigation_targets = ty.navigation_targets(db);

View file

@ -9,7 +9,7 @@ use ty_python_semantic::SemanticModel;
use ty_python_semantic::types::Type;
pub fn hover(db: &dyn Db, file: File, offset: TextSize) -> Option<RangedValue<Hover<'_>>> {
let parsed = parsed_module(db.upcast(), file).load(db.upcast());
let parsed = parsed_module(db, file).load(db);
let goto_target = find_goto_target(&parsed, offset)?;
if let GotoTarget::Expression(expr) = goto_target {
@ -18,13 +18,10 @@ pub fn hover(db: &dyn Db, file: File, offset: TextSize) -> Option<RangedValue<Ho
}
}
let model = SemanticModel::new(db.upcast(), file);
let model = SemanticModel::new(db, file);
let ty = goto_target.inferred_type(&model)?;
tracing::debug!(
"Inferred type of covering node is {}",
ty.display(db.upcast())
);
tracing::debug!("Inferred type of covering node is {}", ty.display(db));
// TODO: Add documentation of the symbol (not the type's definition).
// TODO: Render the symbol's signature instead of just its type.
@ -121,7 +118,7 @@ impl fmt::Display for DisplayHoverContent<'_, '_> {
match self.content {
HoverContent::Type(ty) => self
.kind
.fenced_code_block(ty.display(self.db.upcast()), "text")
.fenced_code_block(ty.display(self.db), "text")
.fmt(f),
}
}
@ -132,7 +129,6 @@ mod tests {
use crate::tests::{CursorTest, cursor_test};
use crate::{MarkupKind, hover};
use insta::assert_snapshot;
use ruff_db::Upcast;
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig, LintName,
Severity, Span,
@ -774,7 +770,7 @@ mod tests {
.message("Cursor offset"),
);
write!(buf, "{}", diagnostic.display(&self.db.upcast(), &config)).unwrap();
write!(buf, "{}", diagnostic.display(&self.db, &config)).unwrap();
buf
}

View file

@ -42,10 +42,10 @@ impl fmt::Display for DisplayInlayHint<'_, '_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self.hint {
InlayHintContent::Type(ty) => {
write!(f, ": {}", ty.display(self.db.upcast()))
write!(f, ": {}", ty.display(self.db))
}
InlayHintContent::ReturnType(ty) => {
write!(f, " -> {}", ty.display(self.db.upcast()))
write!(f, " -> {}", ty.display(self.db))
}
}
}
@ -54,7 +54,7 @@ impl fmt::Display for DisplayInlayHint<'_, '_> {
pub fn inlay_hints(db: &dyn Db, file: File, range: TextRange) -> Vec<InlayHint<'_>> {
let mut visitor = InlayHintVisitor::new(db, file, range);
let ast = parsed_module(db.upcast(), file).load(db.upcast());
let ast = parsed_module(db, file).load(db);
visitor.visit_body(ast.suite());
@ -71,7 +71,7 @@ struct InlayHintVisitor<'db> {
impl<'db> InlayHintVisitor<'db> {
fn new(db: &'db dyn Db, file: File, range: TextRange) -> Self {
Self {
model: SemanticModel::new(db.upcast(), file),
model: SemanticModel::new(db, file),
hints: Vec::new(),
in_assignment: false,
range,

View file

@ -153,15 +153,13 @@ impl HasNavigationTargets for Type<'_> {
fn navigation_targets(&self, db: &dyn Db) -> NavigationTargets {
match self {
Type::Union(union) => union
.iter(db.upcast())
.iter(db)
.flat_map(|target| target.navigation_targets(db))
.collect(),
Type::Intersection(intersection) => {
// Only consider the positive elements because the negative elements are mainly from narrowing constraints.
let mut targets = intersection
.iter_positive(db.upcast())
.filter(|ty| !ty.is_unknown());
let mut targets = intersection.iter_positive(db).filter(|ty| !ty.is_unknown());
let Some(first) = targets.next() else {
return NavigationTargets::empty();
@ -178,7 +176,7 @@ impl HasNavigationTargets for Type<'_> {
}
ty => ty
.definition(db.upcast())
.definition(db)
.map(|definition| definition.navigation_targets(db))
.unwrap_or_else(NavigationTargets::empty),
}
@ -187,13 +185,13 @@ impl HasNavigationTargets for Type<'_> {
impl HasNavigationTargets for TypeDefinition<'_> {
fn navigation_targets(&self, db: &dyn Db) -> NavigationTargets {
let Some(full_range) = self.full_range(db.upcast()) else {
let Some(full_range) = self.full_range(db) else {
return NavigationTargets::empty();
};
NavigationTargets::single(NavigationTarget {
file: full_range.file(),
focus_range: self.focus_range(db.upcast()).unwrap_or(full_range).range(),
focus_range: self.focus_range(db).unwrap_or(full_range).range(),
full_range: full_range.range(),
})
}
@ -203,10 +201,10 @@ impl HasNavigationTargets for TypeDefinition<'_> {
mod tests {
use crate::db::tests::TestDb;
use insta::internals::SettingsBindDropGuard;
use ruff_db::Db;
use ruff_db::diagnostic::{Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig};
use ruff_db::files::{File, system_path_to_file};
use ruff_db::system::{DbWithWritableSystem, SystemPath, SystemPathBuf};
use ruff_db::{Db, Upcast};
use ruff_text_size::TextSize;
use ty_python_semantic::{
Program, ProgramSettings, PythonPlatform, PythonVersionWithSource, SearchPathSettings,
@ -253,7 +251,7 @@ mod tests {
.format(DiagnosticFormat::Full);
for diagnostic in diagnostics {
let diag = diagnostic.into_diagnostic();
write!(buf, "{}", diag.display(&self.db.upcast(), &config)).unwrap();
write!(buf, "{}", diag.display(&self.db, &config)).unwrap();
}
buf

View file

@ -5,11 +5,11 @@ use std::{cmp, fmt};
use crate::metadata::settings::file_settings;
use crate::{DEFAULT_LINT_REGISTRY, DummyReporter};
use crate::{Project, ProjectMetadata, Reporter};
use ruff_db::Db as SourceDb;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::{File, Files};
use ruff_db::system::System;
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use salsa::Event;
use salsa::plumbing::ZalsaDatabase;
use ty_ide::Db as IdeDb;
@ -19,7 +19,7 @@ use ty_python_semantic::{Db as SemanticDb, Program};
mod changes;
#[salsa::db]
pub trait Db: SemanticDb + Upcast<dyn SemanticDb> {
pub trait Db: SemanticDb {
fn project(&self) -> Project;
}
@ -276,46 +276,6 @@ impl SalsaMemoryDump {
}
}
impl Upcast<dyn SemanticDb> for ProjectDatabase {
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SemanticDb + 'static) {
self
}
}
impl Upcast<dyn SourceDb> for ProjectDatabase {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
impl Upcast<dyn IdeDb> for ProjectDatabase {
fn upcast(&self) -> &(dyn IdeDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn IdeDb + 'static) {
self
}
}
impl Upcast<dyn Db> for ProjectDatabase {
fn upcast(&self) -> &(dyn Db + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn Db + 'static) {
self
}
}
#[salsa::db]
impl IdeDb for ProjectDatabase {}
@ -371,7 +331,6 @@ impl Db for ProjectDatabase {
#[cfg(feature = "format")]
mod format {
use crate::ProjectDatabase;
use ruff_db::Upcast;
use ruff_db::files::File;
use ruff_python_formatter::{Db as FormatDb, PyFormatOptions};
@ -382,28 +341,18 @@ mod format {
PyFormatOptions::from_source_type(source_ty)
}
}
impl Upcast<dyn FormatDb> for ProjectDatabase {
fn upcast(&self) -> &(dyn FormatDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn FormatDb + 'static) {
self
}
}
}
#[cfg(test)]
pub(crate) mod tests {
use std::sync::{Arc, Mutex};
use ruff_db::Db as SourceDb;
use ruff_db::files::Files;
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use ty_python_semantic::Program;
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{Db as SemanticDb, Program};
use crate::DEFAULT_LINT_REGISTRY;
use crate::db::Db;
@ -484,24 +433,6 @@ pub(crate) mod tests {
}
}
impl Upcast<dyn SemanticDb> for TestDb {
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SemanticDb + 'static) {
self
}
}
impl Upcast<dyn SourceDb> for TestDb {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
#[salsa::db]
impl ty_python_semantic::Db for TestDb {
fn is_file_open(&self, file: ruff_db::files::File) -> bool {

View file

@ -477,7 +477,7 @@ impl Project {
let mut diagnostics: Vec<Diagnostic> = Vec::new();
// Abort checking if there are IO errors.
let source = source_text(db.upcast(), file);
let source = source_text(db, file);
if let Some(read_error) = source.read_error() {
diagnostics.push(
@ -490,9 +490,9 @@ impl Project {
return diagnostics;
}
let parsed = parsed_module(db.upcast(), file);
let parsed = parsed_module(db, file);
let parsed_ref = parsed.load(db.upcast());
let parsed_ref = parsed.load(db);
diagnostics.extend(
parsed_ref
.errors()
@ -502,17 +502,13 @@ impl Project {
diagnostics.extend(parsed_ref.unsupported_syntax_errors().iter().map(|error| {
let mut error = create_unsupported_syntax_diagnostic(file, error);
add_inferred_python_version_hint_to_diagnostic(
db.upcast(),
&mut error,
"parsing syntax",
);
add_inferred_python_version_hint_to_diagnostic(db, &mut error, "parsing syntax");
error
}));
{
let db = AssertUnwindSafe(db);
match catch(&**db, file, || check_types(db.upcast(), file)) {
match catch(&**db, file, || check_types(*db, file)) {
Ok(Some(type_check_diagnostics)) => {
diagnostics.extend(type_check_diagnostics.into_iter().cloned());
}

View file

@ -305,7 +305,7 @@ impl Options {
if let Some(file) = src_root
.source()
.file()
.and_then(|path| system_path_to_file(db.upcast(), path).ok())
.and_then(|path| system_path_to_file(db, path).ok())
{
diagnostic = diagnostic.with_annotation(Some(Annotation::primary(
Span::from(file).with_optional_range(src_root.range()),
@ -714,7 +714,7 @@ impl Rules {
// file in that case.
let file = source
.file()
.and_then(|path| system_path_to_file(db.upcast(), path).ok());
.and_then(|path| system_path_to_file(db, path).ok());
// TODO: Add a note if the value was configured on the CLI
let diagnostic = match error {
@ -808,7 +808,7 @@ fn build_include_filter(
// Add source annotation if we have source information
if let Some(source_file) = include_patterns.source().file() {
if let Ok(file) = system_path_to_file(db.upcast(), source_file) {
if let Ok(file) = system_path_to_file(db, source_file) {
let annotation = Annotation::primary(
Span::from(file).with_optional_range(include_patterns.range()),
)
@ -832,7 +832,7 @@ fn build_include_filter(
match pattern.source() {
ValueSource::File(file_path) => {
if let Ok(file) = system_path_to_file(db.upcast(), &**file_path) {
if let Ok(file) = system_path_to_file(db, &**file_path) {
diagnostic
.with_message("Invalid include pattern")
.with_annotation(Some(
@ -914,7 +914,7 @@ fn build_exclude_filter(
match exclude.source() {
ValueSource::File(file_path) => {
if let Ok(file) = system_path_to_file(db.upcast(), &**file_path) {
if let Ok(file) = system_path_to_file(db, &**file_path) {
diagnostic
.with_message("Invalid exclude pattern")
.with_annotation(Some(
@ -1176,7 +1176,7 @@ impl RangedValue<OverrideOptions> {
// Add source annotation if we have source information
if let Some(source_file) = self.source().file() {
if let Ok(file) = system_path_to_file(db.upcast(), source_file) {
if let Ok(file) = system_path_to_file(db, source_file) {
let annotation =
Annotation::primary(Span::from(file).with_optional_range(self.range()))
.message("This overrides section configures no rules");
@ -1227,7 +1227,7 @@ impl RangedValue<OverrideOptions> {
// Add source annotation if we have source information
if let Some(source_file) = self.source().file() {
if let Ok(file) = system_path_to_file(db.upcast(), source_file) {
if let Ok(file) = system_path_to_file(db, source_file) {
let annotation =
Annotation::primary(Span::from(file).with_optional_range(self.range()))
.message("This overrides section applies to all files");
@ -1301,7 +1301,7 @@ pub struct ToSettingsError {
impl ToSettingsError {
pub fn pretty<'a>(&'a self, db: &'a dyn Db) -> impl fmt::Display + use<'a> {
struct DisplayPretty<'a> {
db: &'a dyn Db,
db: &'a dyn ruff_db::Db,
error: &'a ToSettingsError,
}
@ -1317,7 +1317,7 @@ impl ToSettingsError {
self.error
.diagnostic
.to_diagnostic()
.display(&self.db.upcast(), &display_config)
.display(&self.db, &display_config)
)
}
}

View file

@ -270,7 +270,7 @@ impl<'a> ProjectFilesWalker<'a> {
.filter_map(move |path| {
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
// We can ignore this.
system_path_to_file(db.upcast(), &path).ok()
system_path_to_file(db, &path).ok()
})
.collect(),
diagnostics,
@ -283,7 +283,7 @@ impl<'a> ProjectFilesWalker<'a> {
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
for path in paths {
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
if let Ok(file) = system_path_to_file(db, &path) {
files.insert(file);
}
}

View file

@ -4,7 +4,6 @@ use std::hash::Hasher;
use tracing::info;
use ruff_cache::{CacheKey, CacheKeyHasher};
use ruff_db::Upcast;
use ruff_db::system::{SystemPath, SystemPathBuf};
use ty_python_semantic::system_module_search_paths;
@ -41,7 +40,7 @@ impl ProjectWatcher {
}
pub fn update(&mut self, db: &ProjectDatabase) {
let search_paths: Vec<_> = system_module_search_paths(db.upcast()).collect();
let search_paths: Vec<_> = system_module_search_paths(db).collect();
let project_path = db.project().root(db);
let new_cache_key = Self::compute_cache_key(project_path, &search_paths);

View file

@ -1,10 +1,10 @@
use crate::lint::{LintRegistry, RuleSelection};
use ruff_db::Db as SourceDb;
use ruff_db::files::File;
use ruff_db::{Db as SourceDb, Upcast};
/// Database giving access to semantic information about a Python program.
#[salsa::db]
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
pub trait Db: SourceDb {
fn is_file_open(&self, file: File) -> bool;
/// Resolves the rule selection for a given file.
@ -26,12 +26,12 @@ pub(crate) mod tests {
use super::Db;
use crate::lint::{LintRegistry, RuleSelection};
use anyhow::Context;
use ruff_db::Db as SourceDb;
use ruff_db::files::{File, Files};
use ruff_db::system::{
DbWithTestSystem, DbWithWritableSystem as _, System, SystemPath, SystemPathBuf, TestSystem,
};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use ruff_python_ast::PythonVersion;
type Events = Arc<Mutex<Vec<salsa::Event>>>;
@ -112,15 +112,6 @@ pub(crate) mod tests {
}
}
impl Upcast<dyn SourceDb> for TestDb {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
#[salsa::db]
impl Db for TestDb {
fn is_file_open(&self, file: File) -> bool {

View file

@ -32,7 +32,7 @@ fn dunder_all_names_cycle_initial(_db: &dyn Db, _file: File) -> Option<FxHashSet
pub(crate) fn dunder_all_names(db: &dyn Db, file: File) -> Option<FxHashSet<Name>> {
let _span = tracing::trace_span!("dunder_all_names", file=?file.path(db)).entered();
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
let index = semantic_index(db, file);
let mut collector = DunderAllNamesCollector::new(db, file, index);
collector.visit_body(module.suite());

View file

@ -76,7 +76,7 @@ impl ModulePath {
| SearchPathInner::FirstParty(search_path)
| SearchPathInner::SitePackages(search_path)
| SearchPathInner::Editable(search_path) => {
system_path_to_file(resolver.db.upcast(), search_path.join(relative_path))
system_path_to_file(resolver.db, search_path.join(relative_path))
== Err(FileError::IsADirectory)
}
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
@ -84,7 +84,7 @@ impl ModulePath {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => {
system_path_to_file(resolver.db.upcast(), stdlib_root.join(relative_path))
system_path_to_file(resolver.db, stdlib_root.join(relative_path))
== Err(FileError::IsADirectory)
}
}
@ -115,16 +115,15 @@ impl ModulePath {
| SearchPathInner::Editable(search_path) => {
let absolute_path = search_path.join(relative_path);
system_path_to_file(resolver.db.upcast(), absolute_path.join("__init__.py")).is_ok()
|| system_path_to_file(resolver.db.upcast(), absolute_path.join("__init__.pyi"))
.is_ok()
system_path_to_file(resolver.db, absolute_path.join("__init__.py")).is_ok()
|| system_path_to_file(resolver.db, absolute_path.join("__init__.pyi")).is_ok()
}
SearchPathInner::StandardLibraryCustom(search_path) => {
match query_stdlib_version(relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => system_path_to_file(
resolver.db.upcast(),
resolver.db,
search_path.join(relative_path).join("__init__.pyi"),
)
.is_ok(),
@ -161,7 +160,7 @@ impl ModulePath {
#[must_use]
pub(super) fn to_file(&self, resolver: &ResolverContext) -> Option<File> {
let db = resolver.db.upcast();
let db = resolver.db;
let ModulePath {
search_path,
relative_path,

View file

@ -73,7 +73,7 @@ pub(crate) fn path_to_module(db: &dyn Db, path: &FilePath) -> Option<Module> {
// all arguments are Salsa ingredients (something stored in Salsa). `Path`s aren't salsa ingredients but
// `VfsFile` is. So what we do here is to retrieve the `path`'s `VfsFile` so that we can make
// use of Salsa's caching and invalidation.
let file = path.to_file(db.upcast())?;
let file = path.to_file(db)?;
file_to_module(db, file)
}
@ -99,7 +99,7 @@ impl std::fmt::Display for SystemOrVendoredPathRef<'_> {
pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
let _span = tracing::trace_span!("file_to_module", ?file).entered();
let path = match file.path(db.upcast()) {
let path = match file.path(db) {
FilePath::System(system) => SystemOrVendoredPathRef::System(system),
FilePath::Vendored(vendored) => SystemOrVendoredPathRef::Vendored(vendored),
FilePath::SystemVirtual(_) => return None,
@ -260,7 +260,7 @@ impl SearchPaths {
for path in self.static_paths.iter().chain(self.site_packages.iter()) {
if let Some(system_path) = path.as_system_path() {
if !path.is_first_party() {
files.try_add_root(db.upcast(), system_path, FileRootKind::LibrarySearchPath);
files.try_add_root(db, system_path, FileRootKind::LibrarySearchPath);
}
}
}
@ -332,7 +332,7 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
}
let site_packages_root = files
.root(db.upcast(), site_packages_dir)
.root(db, site_packages_dir)
.expect("Site-package root to have been created");
// This query needs to be re-executed each time a `.pth` file
@ -340,7 +340,7 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
// However, we don't use Salsa queries to read the source text of `.pth` files;
// we use the APIs on the `System` trait directly. As such, add a dependency on the
// site-package directory's revision.
site_packages_root.revision(db.upcast());
site_packages_root.revision(db);
dynamic_paths.push(site_packages_search_path.clone());

View file

@ -329,7 +329,7 @@ pub(crate) fn imported_symbol<'db>(
requires_explicit_reexport: Option<RequiresExplicitReExport>,
) -> PlaceAndQualifiers<'db> {
let requires_explicit_reexport = requires_explicit_reexport.unwrap_or_else(|| {
if file.is_stub(db.upcast()) {
if file.is_stub(db) {
RequiresExplicitReExport::Yes
} else {
RequiresExplicitReExport::No
@ -717,7 +717,7 @@ fn place_by_id<'db>(
.expr
.is_name_and(|name| matches!(name, "__slots__" | "TYPE_CHECKING"));
if scope.file(db).is_stub(db.upcast()) {
if scope.file(db).is_stub(db) {
// We generally trust module-level undeclared places in stubs and do not union
// with `Unknown`. If we don't do this, simple aliases like `IOError = OSError` in
// stubs would result in `IOError` being a union of `OSError` and `Unknown`, which

View file

@ -137,7 +137,7 @@ impl PythonVersionFileSource {
/// Useful for subdiagnostics when informing the user
/// what the inferred Python version of their project is.
pub(crate) fn span(&self, db: &dyn Db) -> Option<Span> {
let file = system_path_to_file(db.upcast(), &*self.path).ok()?;
let file = system_path_to_file(db, &*self.path).ok()?;
Some(Span::from(file).with_optional_range(self.range))
}
}

View file

@ -12,7 +12,7 @@ use ruff_python_ast::{
pub fn pull_types(db: &dyn Db, file: File) {
let mut visitor = PullTypesVisitor::new(db, file);
let ast = parsed_module(db.upcast(), file).load(db.upcast());
let ast = parsed_module(db, file).load(db);
visitor.visit_body(ast.suite());
}

View file

@ -51,7 +51,7 @@ type PlaceSet = hashbrown::HashTable<ScopedPlaceId>;
pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
let _span = tracing::trace_span!("semantic_index", ?file).entered();
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
SemanticIndexBuilder::new(db, file, &module).build()
}
@ -133,7 +133,7 @@ pub(crate) fn attribute_scopes<'db, 's>(
class_body_scope: ScopeId<'db>,
) -> impl Iterator<Item = FileScopeId> + use<'s, 'db> {
let file = class_body_scope.file(db);
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
let index = semantic_index(db, file);
let class_scope_id = class_body_scope.file_scope_id(db);

View file

@ -121,7 +121,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
let mut builder = Self {
db,
file,
source_type: file.source_type(db.upcast()),
source_type: file.source_type(db),
module: module_ref,
scope_stack: Vec::new(),
current_assignments: vec![],
@ -1047,7 +1047,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> {
fn source_text(&self) -> &SourceText {
self.source_text
.get_or_init(|| source_text(self.db.upcast(), self.file))
.get_or_init(|| source_text(self.db, self.file))
}
}

View file

@ -45,7 +45,7 @@ fn exports_cycle_initial(_db: &dyn Db, _file: File) -> Box<[Name]> {
#[salsa::tracked(returns(deref), cycle_fn=exports_cycle_recover, cycle_initial=exports_cycle_initial, heap_size=get_size2::GetSize::get_heap_size)]
pub(super) fn exported_names(db: &dyn Db, file: File) -> Box<[Name]> {
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
let mut finder = ExportFinder::new(db, file);
finder.visit_body(module.suite());
finder.resolve_exports()
@ -64,7 +64,7 @@ impl<'db> ExportFinder<'db> {
Self {
db,
file,
visiting_stub_file: file.is_stub(db.upcast()),
visiting_stub_file: file.is_stub(db),
exports: FxHashMap::default(),
dunder_all: DunderAll::NotPresent,
}

View file

@ -34,7 +34,7 @@ impl<'db> SemanticModel<'db> {
}
pub fn line_index(&self) -> LineIndex {
line_index(self.db.upcast(), self.file)
line_index(self.db, self.file)
}
pub fn resolve_module(&self, module_name: &ModuleName) -> Option<Module> {

View file

@ -88,8 +88,8 @@ declare_lint! {
#[salsa::tracked(returns(ref), heap_size=get_size2::GetSize::get_heap_size)]
pub(crate) fn suppressions(db: &dyn Db, file: File) -> Suppressions {
let parsed = parsed_module(db.upcast(), file).load(db.upcast());
let source = source_text(db.upcast(), file);
let parsed = parsed_module(db, file).load(db);
let source = source_text(db, file);
let mut builder = SuppressionsBuilder::new(&source, db.lint_registry());
let mut line_start = TextSize::default();

View file

@ -5001,7 +5001,7 @@ impl<'db> Type<'db> {
SpecialFormType::Callable => Ok(CallableType::unknown(db)),
SpecialFormType::TypingSelf => {
let module = parsed_module(db.upcast(), scope_id.file(db)).load(db.upcast());
let module = parsed_module(db, scope_id.file(db)).load(db);
let index = semantic_index(db, scope_id.file(db));
let Some(class) = nearest_enclosing_class(db, index, scope_id, &module) else {
return Err(InvalidTypeExpressionError {
@ -7500,7 +7500,7 @@ impl get_size2::GetSize for PEP695TypeAliasType<'_> {}
impl<'db> PEP695TypeAliasType<'db> {
pub(crate) fn definition(self, db: &'db dyn Db) -> Definition<'db> {
let scope = self.rhs_scope(db);
let module = parsed_module(db.upcast(), scope.file(db)).load(db.upcast());
let module = parsed_module(db, scope.file(db)).load(db);
let type_alias_stmt_node = scope.node(db).expect_type_alias(&module);
semantic_index(db, scope.file(db)).expect_single_definition(type_alias_stmt_node)
@ -7509,7 +7509,7 @@ impl<'db> PEP695TypeAliasType<'db> {
#[salsa::tracked(heap_size=get_size2::GetSize::get_heap_size)]
pub(crate) fn value_type(self, db: &'db dyn Db) -> Type<'db> {
let scope = self.rhs_scope(db);
let module = parsed_module(db.upcast(), scope.file(db)).load(db.upcast());
let module = parsed_module(db, scope.file(db)).load(db);
let type_alias_stmt_node = scope.node(db).expect_type_alias(&module);
let definition = self.definition(db);
definition_expression_type(db, definition, &type_alias_stmt_node.value)

View file

@ -2751,9 +2751,8 @@ impl<'db> BindingError<'db> {
));
if let Some(typevar_definition) = typevar.definition(context.db()) {
let module =
parsed_module(context.db().upcast(), typevar_definition.file(context.db()))
.load(context.db().upcast());
let module = parsed_module(context.db(), typevar_definition.file(context.db()))
.load(context.db());
let typevar_range = typevar_definition.full_range(context.db(), &module);
let mut sub = SubDiagnostic::new(Severity::Info, "Type variable defined here");
sub.annotate(Annotation::primary(typevar_range.into()));

View file

@ -814,7 +814,7 @@ impl<'db> ClassLiteral<'db> {
#[salsa::tracked(cycle_fn=pep695_generic_context_cycle_recover, cycle_initial=pep695_generic_context_cycle_initial, heap_size=get_size2::GetSize::get_heap_size)]
pub(crate) fn pep695_generic_context(self, db: &'db dyn Db) -> Option<GenericContext<'db>> {
let scope = self.body_scope(db);
let parsed = parsed_module(db.upcast(), scope.file(db)).load(db.upcast());
let parsed = parsed_module(db, scope.file(db)).load(db);
let class_def_node = scope.node(db).expect_class(&parsed);
class_def_node.type_params.as_ref().map(|type_params| {
let index = semantic_index(db, scope.file(db));
@ -861,7 +861,7 @@ impl<'db> ClassLiteral<'db> {
pub(crate) fn definition(self, db: &'db dyn Db) -> Definition<'db> {
let body_scope = self.body_scope(db);
let module = parsed_module(db.upcast(), body_scope.file(db)).load(db.upcast());
let module = parsed_module(db, body_scope.file(db)).load(db);
let index = semantic_index(db, body_scope.file(db));
index.expect_single_definition(body_scope.node(db).expect_class(&module))
}
@ -925,7 +925,7 @@ impl<'db> ClassLiteral<'db> {
pub(super) fn explicit_bases(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
tracing::trace!("ClassLiteral::explicit_bases_query: {}", self.name(db));
let module = parsed_module(db.upcast(), self.file(db)).load(db.upcast());
let module = parsed_module(db, self.file(db)).load(db);
let class_stmt = self.node(db, &module);
let class_definition =
semantic_index(db, self.file(db)).expect_single_definition(class_stmt);
@ -1001,7 +1001,7 @@ impl<'db> ClassLiteral<'db> {
fn decorators(self, db: &'db dyn Db) -> Box<[Type<'db>]> {
tracing::trace!("ClassLiteral::decorators: {}", self.name(db));
let module = parsed_module(db.upcast(), self.file(db)).load(db.upcast());
let module = parsed_module(db, self.file(db)).load(db);
let class_stmt = self.node(db, &module);
if class_stmt.decorator_list.is_empty() {
@ -1146,7 +1146,7 @@ impl<'db> ClassLiteral<'db> {
return Ok((SubclassOfType::subclass_of_unknown(), None));
}
let module = parsed_module(db.upcast(), self.file(db)).load(db.upcast());
let module = parsed_module(db, self.file(db)).load(db);
let explicit_metaclass = self.explicit_metaclass(db, &module);
let (metaclass, class_metaclass_was_from) = if let Some(metaclass) = explicit_metaclass {
@ -1739,7 +1739,7 @@ impl<'db> ClassLiteral<'db> {
let mut is_attribute_bound = Truthiness::AlwaysFalse;
let file = class_body_scope.file(db);
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
let index = semantic_index(db, file);
let class_map = use_def_map(db, class_body_scope);
let class_table = place_table(db, class_body_scope);
@ -2178,7 +2178,7 @@ impl<'db> ClassLiteral<'db> {
/// ```
pub(super) fn header_range(self, db: &'db dyn Db) -> TextRange {
let class_scope = self.body_scope(db);
let module = parsed_module(db.upcast(), class_scope.file(db)).load(db.upcast());
let module = parsed_module(db, class_scope.file(db)).load(db);
let class_node = class_scope.node(db).expect_class(&module);
let class_name = &class_node.name;
TextRange::new(

View file

@ -187,7 +187,7 @@ impl<'db, 'ast> InferContext<'db, 'ast> {
/// Are we currently inferring types in a stub file?
pub(crate) fn in_stub(&self) -> bool {
self.file.is_stub(self.db().upcast())
self.file.is_stub(self.db())
}
#[must_use]

View file

@ -22,7 +22,7 @@ impl TypeDefinition<'_> {
| Self::Function(definition)
| Self::TypeVar(definition)
| Self::TypeAlias(definition) => {
let module = parsed_module(db.upcast(), definition.file(db)).load(db.upcast());
let module = parsed_module(db, definition.file(db)).load(db);
Some(definition.focus_range(db, &module))
}
}
@ -32,14 +32,14 @@ impl TypeDefinition<'_> {
match self {
Self::Module(module) => {
let file = module.file()?;
let source = source_text(db.upcast(), file);
let source = source_text(db, file);
Some(FileRange::new(file, TextRange::up_to(source.text_len())))
}
Self::Class(definition)
| Self::Function(definition)
| Self::TypeVar(definition)
| Self::TypeAlias(definition) => {
let module = parsed_module(db.upcast(), definition.file(db)).load(db.upcast());
let module = parsed_module(db, definition.file(db)).load(db);
Some(definition.full_range(db, &module))
}
}

View file

@ -274,7 +274,7 @@ impl<'db> OverloadLiteral<'db> {
/// over-invalidation.
fn definition(self, db: &'db dyn Db) -> Definition<'db> {
let body_scope = self.body_scope(db);
let module = parsed_module(db.upcast(), self.file(db)).load(db.upcast());
let module = parsed_module(db, self.file(db)).load(db);
let index = semantic_index(db, body_scope.file(db));
index.expect_single_definition(body_scope.node(db).expect_function(&module))
}
@ -285,7 +285,7 @@ impl<'db> OverloadLiteral<'db> {
// The semantic model records a use for each function on the name node. This is used
// here to get the previous function definition with the same name.
let scope = self.definition(db).scope(db);
let module = parsed_module(db.upcast(), self.file(db)).load(db.upcast());
let module = parsed_module(db, self.file(db)).load(db);
let use_def = semantic_index(db, scope.file(db)).use_def_map(scope.file_scope_id(db));
let use_id = self
.body_scope(db)
@ -326,7 +326,7 @@ impl<'db> OverloadLiteral<'db> {
inherited_generic_context: Option<GenericContext<'db>>,
) -> Signature<'db> {
let scope = self.body_scope(db);
let module = parsed_module(db.upcast(), self.file(db)).load(db.upcast());
let module = parsed_module(db, self.file(db)).load(db);
let function_stmt_node = scope.node(db).expect_function(&module);
let definition = self.definition(db);
let generic_context = function_stmt_node.type_params.as_ref().map(|type_params| {
@ -350,7 +350,7 @@ impl<'db> OverloadLiteral<'db> {
let function_scope = self.body_scope(db);
let span = Span::from(function_scope.file(db));
let node = function_scope.node(db);
let module = parsed_module(db.upcast(), self.file(db)).load(db.upcast());
let module = parsed_module(db, self.file(db)).load(db);
let func_def = node.as_function(&module)?;
let range = parameter_index
.and_then(|parameter_index| {
@ -370,7 +370,7 @@ impl<'db> OverloadLiteral<'db> {
let function_scope = self.body_scope(db);
let span = Span::from(function_scope.file(db));
let node = function_scope.node(db);
let module = parsed_module(db.upcast(), self.file(db)).load(db.upcast());
let module = parsed_module(db, self.file(db)).load(db);
let func_def = node.as_function(&module)?;
let return_type_range = func_def.returns.as_ref().map(|returns| returns.range());
let mut signature = func_def.name.range.cover(func_def.parameters.range);

View file

@ -134,7 +134,7 @@ pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Ty
let file = scope.file(db);
let _span = tracing::trace_span!("infer_scope_types", scope=?scope.as_id(), ?file).entered();
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
// Using the index here is fine because the code below depends on the AST anyway.
// The isolation of the query is by the return inferred types.
@ -164,7 +164,7 @@ pub(crate) fn infer_definition_types<'db>(
definition: Definition<'db>,
) -> TypeInference<'db> {
let file = definition.file(db);
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
let _span = tracing::trace_span!(
"infer_definition_types",
range = ?definition.kind(db).target_range(&module),
@ -203,7 +203,7 @@ pub(crate) fn infer_deferred_types<'db>(
definition: Definition<'db>,
) -> TypeInference<'db> {
let file = definition.file(db);
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
let _span = tracing::trace_span!(
"infer_deferred_types",
definition = ?definition.as_id(),
@ -240,7 +240,7 @@ pub(crate) fn infer_expression_types<'db>(
expression: Expression<'db>,
) -> TypeInference<'db> {
let file = expression.file(db);
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
let _span = tracing::trace_span!(
"infer_expression_types",
expression = ?expression.as_id(),
@ -302,7 +302,7 @@ pub(crate) fn infer_expression_type<'db>(
expression: Expression<'db>,
) -> Type<'db> {
let file = expression.file(db);
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
// It's okay to call the "same file" version here because we're inside a salsa query.
infer_same_file_expression_type(db, expression, &module)
@ -333,7 +333,7 @@ fn single_expression_cycle_initial<'db>(
#[salsa::tracked(returns(ref), cycle_fn=unpack_cycle_recover, cycle_initial=unpack_cycle_initial, heap_size=get_size2::GetSize::get_heap_size)]
pub(super) fn infer_unpack_types<'db>(db: &'db dyn Db, unpack: Unpack<'db>) -> UnpackResult<'db> {
let file = unpack.file(db);
let module = parsed_module(db.upcast(), file).load(db.upcast());
let module = parsed_module(db, file).load(db);
let _span = tracing::trace_span!("infer_unpack_types", range=?unpack.range(db, &module), ?file)
.entered();

View file

@ -74,7 +74,7 @@ fn all_narrowing_constraints_for_pattern<'db>(
db: &'db dyn Db,
pattern: PatternPredicate<'db>,
) -> Option<NarrowingConstraints<'db>> {
let module = parsed_module(db.upcast(), pattern.file(db)).load(db.upcast());
let module = parsed_module(db, pattern.file(db)).load(db);
NarrowingConstraintsBuilder::new(db, &module, PredicateNode::Pattern(pattern), true).finish()
}
@ -88,7 +88,7 @@ fn all_narrowing_constraints_for_expression<'db>(
db: &'db dyn Db,
expression: Expression<'db>,
) -> Option<NarrowingConstraints<'db>> {
let module = parsed_module(db.upcast(), expression.file(db)).load(db.upcast());
let module = parsed_module(db, expression.file(db)).load(db);
NarrowingConstraintsBuilder::new(db, &module, PredicateNode::Expression(expression), true)
.finish()
}
@ -103,7 +103,7 @@ fn all_negative_narrowing_constraints_for_expression<'db>(
db: &'db dyn Db,
expression: Expression<'db>,
) -> Option<NarrowingConstraints<'db>> {
let module = parsed_module(db.upcast(), expression.file(db)).load(db.upcast());
let module = parsed_module(db, expression.file(db)).load(db);
NarrowingConstraintsBuilder::new(db, &module, PredicateNode::Expression(expression), false)
.finish()
}
@ -113,7 +113,7 @@ fn all_negative_narrowing_constraints_for_pattern<'db>(
db: &'db dyn Db,
pattern: PatternPredicate<'db>,
) -> Option<NarrowingConstraints<'db>> {
let module = parsed_module(db.upcast(), pattern.file(db)).load(db.upcast());
let module = parsed_module(db, pattern.file(db)).load(db);
NarrowingConstraintsBuilder::new(db, &module, PredicateNode::Pattern(pattern), false).finish()
}

View file

@ -137,7 +137,7 @@ pub(crate) fn parse_string_annotation(
let _span = tracing::trace_span!("parse_string_annotation", string=?string_expr.range(), ?file)
.entered();
let source = source_text(db.upcast(), file);
let source = source_text(db, file);
if let Some(string_literal) = string_expr.as_single_part_string() {
let prefix = string_literal.flags.prefix();

View file

@ -415,7 +415,7 @@ where
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
unsafe {
let old_value = &mut *old_pointer;
Vec::maybe_update(&mut old_value.0, new_value.0)
Vec::maybe_update(&raw mut old_value.0, new_value.0)
}
}
}
@ -829,9 +829,9 @@ where
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
let old_value = unsafe { &mut *old_pointer };
let mut changed = false;
changed |= unsafe { Vec::maybe_update(&mut old_value.prefix, new_value.prefix) };
changed |= unsafe { T::maybe_update(&mut old_value.variable, new_value.variable) };
changed |= unsafe { Vec::maybe_update(&mut old_value.suffix, new_value.suffix) };
changed |= unsafe { Vec::maybe_update(&raw mut old_value.prefix, new_value.prefix) };
changed |= unsafe { T::maybe_update(&raw mut old_value.variable, new_value.variable) };
changed |= unsafe { Vec::maybe_update(&raw mut old_value.suffix, new_value.suffix) };
changed
}
}

View file

@ -1,8 +1,8 @@
use anyhow::{Context, anyhow};
use ruff_db::Db;
use ruff_db::files::{File, Files, system_path_to_file};
use ruff_db::system::{DbWithTestSystem, System, SystemPath, SystemPathBuf, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db, Upcast};
use ruff_python_ast::PythonVersion;
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
@ -249,15 +249,6 @@ impl ruff_db::Db for CorpusDb {
}
}
impl Upcast<dyn ruff_db::Db> for CorpusDb {
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn ruff_db::Db + 'static) {
self
}
}
#[salsa::db]
impl ty_python_semantic::Db for CorpusDb {
fn is_file_open(&self, file: File) -> bool {

View file

@ -20,7 +20,7 @@ pub(crate) trait ToLink {
impl ToLink for NavigationTarget {
fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option<Location> {
FileRange::new(self.file(), self.focus_range()).to_location(db.upcast(), encoding)
FileRange::new(self.file(), self.focus_range()).to_location(db, encoding)
}
fn to_link(
@ -30,16 +30,16 @@ impl ToLink for NavigationTarget {
encoding: PositionEncoding,
) -> Option<lsp_types::LocationLink> {
let file = self.file();
let uri = file_to_url(db.upcast(), file)?;
let source = source_text(db.upcast(), file);
let index = line_index(db.upcast(), file);
let uri = file_to_url(db, file)?;
let source = source_text(db, file);
let index = line_index(db, file);
let target_range = self.full_range().to_lsp_range(&source, &index, encoding);
let selection_range = self.focus_range().to_lsp_range(&source, &index, encoding);
let src = src.map(|src| {
let source = source_text(db.upcast(), src.file());
let index = line_index(db.upcast(), src.file());
let source = source_text(db, src.file());
let index = line_index(db, src.file());
src.range().to_lsp_range(&source, &index, encoding)
});

View file

@ -163,8 +163,8 @@ impl FileRangeExt for FileRange {
fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option<Location> {
let file = self.file();
let uri = file_to_url(db, file)?;
let source = source_text(db.upcast(), file);
let line_index = line_index(db.upcast(), file);
let source = source_text(db, file);
let line_index = line_index(db, file);
let range = self.range().to_lsp_range(&source, &line_index, encoding);
Some(Location { uri, range })

View file

@ -179,8 +179,8 @@ fn to_lsp_diagnostic(
) -> Diagnostic {
let range = if let Some(span) = diagnostic.primary_span() {
let file = span.expect_ty_file();
let index = line_index(db.upcast(), file);
let source = source_text(db.upcast(), file);
let index = line_index(db, file);
let source = source_text(db, file);
span.range()
.map(|range| range.to_lsp_range(&source, &index, encoding))
@ -265,7 +265,7 @@ fn annotation_to_related_information(
let annotation_message = annotation.get_message()?;
let range = FileRange::try_from(span).ok()?;
let location = range.to_location(db.upcast(), encoding)?;
let location = range.to_location(db, encoding)?;
Some(DiagnosticRelatedInformation {
location,
@ -283,7 +283,7 @@ fn sub_diagnostic_to_related_information(
let span = primary_annotation.get_span();
let range = FileRange::try_from(span).ok()?;
let location = range.to_location(db.upcast(), encoding)?;
let location = range.to_location(db, encoding)?;
Some(DiagnosticRelatedInformation {
location,

View file

@ -228,7 +228,11 @@ mod imp {
let mut qos_class_raw = libc::qos_class_t::QOS_CLASS_UNSPECIFIED;
#[expect(unsafe_code)]
let code = unsafe {
libc::pthread_get_qos_class_np(current_thread, &mut qos_class_raw, std::ptr::null_mut())
libc::pthread_get_qos_class_np(
current_thread,
&raw mut qos_class_raw,
std::ptr::null_mut(),
)
};
if code != 0 {

View file

@ -1,4 +1,5 @@
use camino::{Utf8Component, Utf8PathBuf};
use ruff_db::Db as SourceDb;
use ruff_db::diagnostic::Severity;
use ruff_db::files::{File, Files};
use ruff_db::system::{
@ -6,7 +7,6 @@ use ruff_db::system::{
SystemPathBuf, WritableSystem,
};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use ruff_notebook::{Notebook, NotebookError};
use std::borrow::Cow;
use std::sync::Arc;
@ -75,15 +75,6 @@ impl SourceDb for Db {
}
}
impl Upcast<dyn SourceDb> for Db {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
#[salsa::db]
impl SemanticDb for Db {
fn is_file_open(&self, file: File) -> bool {

View file

@ -5,6 +5,7 @@ use camino::Utf8Path;
use colored::Colorize;
use config::SystemKind;
use parser as test_parser;
use ruff_db::Db as _;
use ruff_db::diagnostic::{
Diagnostic, DisplayDiagnosticConfig, create_parse_diagnostic,
create_unsupported_syntax_diagnostic,
@ -14,7 +15,6 @@ use ruff_db::panic::catch_unwind;
use ruff_db::parsed::parsed_module;
use ruff_db::system::{DbWithWritableSystem as _, SystemPath, SystemPathBuf};
use ruff_db::testing::{setup_logging, setup_logging_with_filter};
use ruff_db::{Db as _, Upcast};
use ruff_source_file::{LineIndex, OneIndexed};
use std::backtrace::BacktraceStatus;
use std::fmt::Write;
@ -469,7 +469,7 @@ fn create_diagnostic_snapshot(
writeln!(snapshot).unwrap();
}
writeln!(snapshot, "```").unwrap();
write!(snapshot, "{}", diag.display(&db.upcast(), &display_config)).unwrap();
write!(snapshot, "{}", diag.display(db, &display_config)).unwrap();
writeln!(snapshot, "```").unwrap();
}
snapshot

View file

@ -1,6 +1,7 @@
use std::any::Any;
use js_sys::{Error, JsString};
use ruff_db::Db as _;
use ruff_db::diagnostic::{self, DisplayDiagnosticConfig};
use ruff_db::files::{File, FileRange, system_path_to_file};
use ruff_db::source::{line_index, source_text};
@ -9,7 +10,6 @@ use ruff_db::system::{
CaseSensitivity, DirectoryEntry, GlobError, MemoryFileSystem, Metadata, PatternError, System,
SystemPath, SystemPathBuf, SystemVirtualPath,
};
use ruff_db::{Db as _, Upcast};
use ruff_notebook::Notebook;
use ruff_python_formatter::formatted_file;
use ruff_source_file::{LineIndex, OneIndexed, SourceLocation};
@ -412,7 +412,7 @@ impl Diagnostic {
pub fn display(&self, workspace: &Workspace) -> JsString {
let config = DisplayDiagnosticConfig::default().color(false);
self.inner
.display(&workspace.db.upcast(), &config)
.display(&workspace.db, &config)
.to_string()
.into()
}
@ -439,8 +439,8 @@ impl Range {
file_range: FileRange,
position_encoding: PositionEncoding,
) -> Self {
let index = line_index(db.upcast(), file_range.file());
let source = source_text(db.upcast(), file_range.file());
let index = line_index(db, file_range.file());
let source = source_text(db, file_range.file());
Self::from_text_range(file_range.range(), &index, &source, position_encoding)
}

View file

@ -7,12 +7,12 @@ use std::sync::{Arc, Mutex, OnceLock};
use libfuzzer_sys::{Corpus, fuzz_target};
use ruff_db::Db as SourceDb;
use ruff_db::files::{File, Files, system_path_to_file};
use ruff_db::system::{
DbWithTestSystem, DbWithWritableSystem as _, System, SystemPathBuf, TestSystem,
};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use ruff_python_ast::PythonVersion;
use ruff_python_parser::{Mode, ParseOptions, parse_unchecked};
use ty_python_semantic::lint::LintRegistry;
@ -80,15 +80,6 @@ impl DbWithTestSystem for TestDb {
}
}
impl Upcast<dyn SourceDb> for TestDb {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
#[salsa::db]
impl SemanticDb for TestDb {
fn is_file_open(&self, file: File) -> bool {

View file

@ -1,2 +1,2 @@
[toolchain]
channel = "1.87"
channel = "1.88"