mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-03 10:22:24 +00:00
[red-knot] Add basic workspace support (#12318)
This commit is contained in:
parent
0c72577b5d
commit
91338ae902
29 changed files with 1018 additions and 664 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -2051,7 +2051,6 @@ dependencies = [
|
|||
"mimalloc",
|
||||
"once_cell",
|
||||
"red_knot",
|
||||
"red_knot_module_resolver",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_python_ast",
|
||||
|
@ -2088,6 +2087,7 @@ dependencies = [
|
|||
"filetime",
|
||||
"ignore",
|
||||
"insta",
|
||||
"ruff_cache",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
|
|
|
@ -15,7 +15,7 @@ license.workspace = true
|
|||
red_knot_module_resolver = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true, features = ["os"] }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
|
|
|
@ -1,10 +1,200 @@
|
|||
use red_knot_python_semantic::Db as SemanticDb;
|
||||
use ruff_db::Upcast;
|
||||
use salsa::DbWithJar;
|
||||
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled};
|
||||
use salsa::{Cancelled, Database, DbWithJar};
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::program::{Program, ProgramSettings};
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
|
||||
use crate::watch::{FileChangeKind, FileWatcherChange};
|
||||
use crate::workspace::{check_file, Package, Workspace, WorkspaceMetadata};
|
||||
|
||||
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(lint_syntax, lint_semantic, unwind_if_cancelled);
|
||||
pub struct Jar(
|
||||
Workspace,
|
||||
Package,
|
||||
lint_syntax,
|
||||
lint_semantic,
|
||||
unwind_if_cancelled,
|
||||
);
|
||||
|
||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
||||
pub struct RootDatabase {
|
||||
workspace: Option<Workspace>,
|
||||
storage: salsa::Storage<RootDatabase>,
|
||||
files: Files,
|
||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||
}
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
let mut db = Self {
|
||||
workspace: None,
|
||||
storage: salsa::Storage::default(),
|
||||
files: Files::default(),
|
||||
system: Arc::new(system),
|
||||
};
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
// Initialize the `Program` singleton
|
||||
Program::from_settings(&db, settings);
|
||||
|
||||
db.workspace = Some(workspace);
|
||||
db
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> Workspace {
|
||||
// SAFETY: The workspace is always initialized in `new`.
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<FileWatcherChange>) {
|
||||
let workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
|
||||
// TODO: Optimize change tracking by only reloading a package if a file that is part of the package was changed.
|
||||
let mut structural_change = false;
|
||||
for change in changes {
|
||||
if matches!(
|
||||
change.path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
) {
|
||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
||||
// from packages.
|
||||
structural_change = true;
|
||||
} else {
|
||||
match change.kind {
|
||||
FileChangeKind::Created => {
|
||||
// Reload the package when a new file was added. This is necessary because the file might be excluded
|
||||
// by a gitignore.
|
||||
if workspace.package(self, &change.path).is_some() {
|
||||
structural_change = true;
|
||||
}
|
||||
}
|
||||
FileChangeKind::Modified => {}
|
||||
FileChangeKind::Deleted => {
|
||||
if let Some(package) = workspace.package(self, &change.path) {
|
||||
if let Some(file) = system_path_to_file(self, &change.path) {
|
||||
package.remove_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File::touch_path(self, &change.path);
|
||||
}
|
||||
|
||||
if structural_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
||||
where
|
||||
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
|
||||
{
|
||||
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
|
||||
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
|
||||
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
|
||||
// unwind safe.
|
||||
//
|
||||
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
|
||||
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
|
||||
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
|
||||
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
|
||||
//
|
||||
// That still leaves us with possible logical bugs in two sources:
|
||||
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
|
||||
// Reviewing Salsa code specifically around unwind safety seems doable.
|
||||
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
|
||||
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
|
||||
// certainly makes it harder to catch these issues in our user code.
|
||||
//
|
||||
// For now, this is the only solution at hand unless Salsa decides to change its design.
|
||||
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
|
||||
let db = &AssertUnwindSafe(self);
|
||||
Cancelled::catch(|| f(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for RootDatabase {}
|
||||
|
||||
impl SemanticDb for RootDatabase {}
|
||||
|
||||
impl SourceDb for RootDatabase {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
vendored_typeshed_stubs()
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&*self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for RootDatabase {}
|
||||
|
||||
impl Db for RootDatabase {}
|
||||
|
||||
impl salsa::ParallelDatabase for RootDatabase {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
workspace: self.workspace,
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,53 +1,6 @@
|
|||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
use crate::db::Jar;
|
||||
|
||||
pub mod db;
|
||||
pub mod lint;
|
||||
pub mod program;
|
||||
pub mod target_version;
|
||||
pub mod watch;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Workspace {
|
||||
root: SystemPathBuf,
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
||||
open_files: FxHashSet<File>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub fn new(root: SystemPathBuf) -> Self {
|
||||
Self {
|
||||
root,
|
||||
open_files: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &SystemPath {
|
||||
self.root.as_path()
|
||||
}
|
||||
|
||||
// TODO having the content in workspace feels wrong.
|
||||
pub fn open_file(&mut self, file_id: File) {
|
||||
self.open_files.insert(file_id);
|
||||
}
|
||||
|
||||
pub fn close_file(&mut self, file_id: File) {
|
||||
self.open_files.remove(&file_id);
|
||||
}
|
||||
|
||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
||||
pub fn open_files(&self) -> impl Iterator<Item = File> + '_ {
|
||||
self.open_files.iter().copied()
|
||||
}
|
||||
|
||||
pub fn is_file_open(&self, file_id: File) -> bool {
|
||||
self.open_files.contains(&file_id)
|
||||
}
|
||||
}
|
||||
pub mod workspace;
|
||||
|
|
|
@ -10,13 +10,16 @@ use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
|||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::target_version::TargetVersion;
|
||||
use red_knot::db::RootDatabase;
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::Workspace;
|
||||
use red_knot_module_resolver::{set_module_resolution_settings, RawModuleResolutionSettings};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use red_knot::watch::FileWatcherChange;
|
||||
use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
|
||||
use self::target_version::TargetVersion;
|
||||
|
||||
mod target_version;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
|
@ -26,8 +29,14 @@ use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
|||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[clap(help = "File to check", required = true, value_name = "FILE")]
|
||||
entry_point: SystemPathBuf,
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
|
@ -55,54 +64,38 @@ pub fn main() -> anyhow::Result<()> {
|
|||
setup_tracing();
|
||||
|
||||
let Args {
|
||||
entry_point,
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_search_paths,
|
||||
extra_search_path: extra_paths,
|
||||
target_version,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
tracing::trace!("Target version: {target_version}");
|
||||
if let Some(custom_typeshed) = custom_typeshed_dir.as_ref() {
|
||||
tracing::trace!("Custom typeshed directory: {custom_typeshed}");
|
||||
}
|
||||
if !extra_search_paths.is_empty() {
|
||||
tracing::trace!("extra search paths: {extra_search_paths:?}");
|
||||
}
|
||||
let cwd = if let Some(cwd) = current_directory {
|
||||
let canonicalized = cwd.as_utf8_path().canonicalize_utf8().unwrap();
|
||||
SystemPathBuf::from_utf8_path_buf(canonicalized)
|
||||
} else {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
SystemPathBuf::from_path_buf(cwd).unwrap()
|
||||
};
|
||||
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
let cwd = SystemPath::from_std_path(&cwd).unwrap();
|
||||
let system = OsSystem::new(cwd);
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let workspace_metadata =
|
||||
WorkspaceMetadata::from_path(system.current_directory(), &system).unwrap();
|
||||
|
||||
if !system.path_exists(&entry_point) {
|
||||
eprintln!("The entry point does not exist.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
if !system.is_file(&entry_point) {
|
||||
eprintln!("The entry point is not a file.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
|
||||
let workspace_search_path = workspace.root().to_path_buf();
|
||||
|
||||
let mut program = Program::new(workspace, system);
|
||||
|
||||
set_module_resolution_settings(
|
||||
&mut program,
|
||||
RawModuleResolutionSettings {
|
||||
extra_paths: extra_search_paths,
|
||||
workspace_root: workspace_search_path,
|
||||
site_packages: None,
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
workspace_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
target_version: red_knot_module_resolver::TargetVersion::from(target_version),
|
||||
site_packages: None,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
let entry_id = system_path_to_file(&program, entry_point.clone()).unwrap();
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
|
@ -123,9 +116,9 @@ pub fn main() -> anyhow::Result<()> {
|
|||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
|
||||
file_watcher.watch_folder(workspace_folder.as_std_path())?;
|
||||
file_watcher.watch_folder(db.workspace().root(&db).as_std_path())?;
|
||||
|
||||
main_loop.run(&mut program);
|
||||
main_loop.run(&mut db);
|
||||
|
||||
println!("{}", countme::get_all());
|
||||
|
||||
|
@ -170,7 +163,7 @@ impl MainLoop {
|
|||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(self, program: &mut Program) {
|
||||
fn run(self, db: &mut RootDatabase) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
|
@ -179,16 +172,16 @@ impl MainLoop {
|
|||
tracing::trace!("Main Loop: Tick");
|
||||
|
||||
match message {
|
||||
MainLoopMessage::CheckProgram { revision } => {
|
||||
let program = program.snapshot();
|
||||
MainLoopMessage::CheckWorkspace { revision } => {
|
||||
let db = db.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
|
||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = program.check() {
|
||||
if let Ok(result) = db.check() {
|
||||
sender
|
||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
||||
.send(OrchestratorMessage::CheckCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
|
@ -198,7 +191,7 @@ impl MainLoop {
|
|||
}
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
program.apply_changes(changes);
|
||||
db.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
eprintln!("{}", diagnostics.join("\n"));
|
||||
|
@ -260,13 +253,13 @@ impl Orchestrator {
|
|||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckProgram {
|
||||
.send(MainLoopMessage::CheckWorkspace {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckProgramCompleted {
|
||||
OrchestratorMessage::CheckCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
|
@ -307,7 +300,7 @@ impl Orchestrator {
|
|||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
||||
Ok(OrchestratorMessage::CheckCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
@ -321,7 +314,7 @@ impl Orchestrator {
|
|||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -337,7 +330,7 @@ impl Orchestrator {
|
|||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckProgram { revision: usize },
|
||||
CheckWorkspace { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
Exit,
|
||||
|
@ -348,7 +341,7 @@ enum OrchestratorMessage {
|
|||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckProgramCompleted {
|
||||
CheckCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
use ruff_db::files::File;
|
||||
use salsa::Cancelled;
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::program::Program;
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| {
|
||||
let mut result = Vec::new();
|
||||
for open_file in db.workspace.open_files() {
|
||||
result.extend_from_slice(&db.check_file_impl(open_file));
|
||||
}
|
||||
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
self.with_db(|db| db.check_file_impl(file))
|
||||
}
|
||||
|
||||
fn check_file_impl(&self, file: File) -> Diagnostics {
|
||||
let mut diagnostics = Vec::new();
|
||||
diagnostics.extend_from_slice(lint_syntax(self, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(self, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
}
|
||||
}
|
|
@ -1,153 +0,0 @@
|
|||
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::{Cancelled, Database};
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{System, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::db::{Db, Jar};
|
||||
use crate::Workspace;
|
||||
|
||||
mod check;
|
||||
|
||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
||||
pub struct Program {
|
||||
storage: salsa::Storage<Program>,
|
||||
files: Files,
|
||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||
workspace: Workspace,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new<S>(workspace: Workspace, system: S) -> Self
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
files: Files::default(),
|
||||
system: Arc::new(system),
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_changes<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileWatcherChange>,
|
||||
{
|
||||
for change in changes {
|
||||
File::touch_path(self, &change.path);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &Workspace {
|
||||
&self.workspace
|
||||
}
|
||||
|
||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
||||
&mut self.workspace
|
||||
}
|
||||
|
||||
fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
||||
where
|
||||
F: FnOnce(&Program) -> T + std::panic::UnwindSafe,
|
||||
{
|
||||
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
|
||||
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
|
||||
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
|
||||
// unwind safe.
|
||||
//
|
||||
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
|
||||
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
|
||||
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
|
||||
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
|
||||
//
|
||||
// That still leaves us with possible logical bugs in two sources:
|
||||
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
|
||||
// Reviewing Salsa code specifically around unwind safety seems doable.
|
||||
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
|
||||
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
|
||||
// certainly makes it harder to catch these issues in our user code.
|
||||
//
|
||||
// For now, this is the only solution at hand unless Salsa decides to change its design.
|
||||
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
|
||||
let db = &AssertUnwindSafe(self);
|
||||
Cancelled::catch(|| f(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for Program {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for Program {}
|
||||
|
||||
impl SemanticDb for Program {}
|
||||
|
||||
impl SourceDb for Program {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
vendored_typeshed_stubs()
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&*self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for Program {}
|
||||
|
||||
impl Db for Program {}
|
||||
|
||||
impl salsa::ParallelDatabase for Program {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.clone(),
|
||||
workspace: self.workspace.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
path: SystemPathBuf,
|
||||
#[allow(unused)]
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
|
@ -1,5 +1,3 @@
|
|||
use std::fmt;
|
||||
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
|
@ -15,36 +13,22 @@ pub enum TargetVersion {
|
|||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
ruff_db::program::TargetVersion::from(*self).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_module_resolver::TargetVersion {
|
||||
impl From<TargetVersion> for ruff_db::program::TargetVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => red_knot_module_resolver::TargetVersion::Py37,
|
||||
TargetVersion::Py38 => red_knot_module_resolver::TargetVersion::Py38,
|
||||
TargetVersion::Py39 => red_knot_module_resolver::TargetVersion::Py39,
|
||||
TargetVersion::Py310 => red_knot_module_resolver::TargetVersion::Py310,
|
||||
TargetVersion::Py311 => red_knot_module_resolver::TargetVersion::Py311,
|
||||
TargetVersion::Py312 => red_knot_module_resolver::TargetVersion::Py312,
|
||||
TargetVersion::Py313 => red_knot_module_resolver::TargetVersion::Py313,
|
||||
TargetVersion::Py37 => Self::Py37,
|
||||
TargetVersion::Py38 => Self::Py38,
|
||||
TargetVersion::Py39 => Self::Py39,
|
||||
TargetVersion::Py310 => Self::Py310,
|
||||
TargetVersion::Py311 => Self::Py311,
|
||||
TargetVersion::Py312 => Self::Py312,
|
||||
TargetVersion::Py313 => Self::Py313,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, RemoveKind};
|
||||
use notify::event::{CreateKind, ModifyKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use ruff_db::system::SystemPath;
|
||||
|
||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
|
@ -35,12 +33,25 @@ impl FileWatcher {
|
|||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
||||
match changes {
|
||||
let watcher = recommended_watcher(move |event: notify::Result<Event>| {
|
||||
match event {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::From)) => {
|
||||
FileChangeKind::Deleted
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::To)) => {
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Any)) => {
|
||||
// TODO Introduce a better catch all event for cases that we don't understand.
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Both)) => {
|
||||
todo!("Handle both create and delete event.");
|
||||
}
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
|
@ -51,13 +62,9 @@ impl FileWatcher {
|
|||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if path.is_file() {
|
||||
if let Some(fs_path) = SystemPath::from_std_path(&path) {
|
||||
changes.push(FileWatcherChange::new(
|
||||
fs_path.to_path_buf(),
|
||||
change_kind,
|
||||
));
|
||||
}
|
||||
if let Some(fs_path) = SystemPath::from_std_path(&path) {
|
||||
changes
|
||||
.push(FileWatcherChange::new(fs_path.to_path_buf(), change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,3 +89,23 @@ impl FileWatcher {
|
|||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
pub path: SystemPathBuf,
|
||||
#[allow(unused)]
|
||||
pub kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
|
|
344
crates/red_knot/src/workspace.rs
Normal file
344
crates/red_knot/src/workspace.rs
Normal file
|
@ -0,0 +1,344 @@
|
|||
// TODO: Fix clippy warnings created by salsa macros
|
||||
#![allow(clippy::used_underscore_binding)]
|
||||
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
};
|
||||
use ruff_python_ast::{name::Name, PySourceType};
|
||||
|
||||
use crate::{
|
||||
db::Db,
|
||||
lint::{lint_semantic, lint_syntax, Diagnostics},
|
||||
};
|
||||
|
||||
mod metadata;
|
||||
|
||||
/// The project workspace as a Salsa ingredient.
|
||||
///
|
||||
/// A workspace consists of one or multiple packages. Packages can be nested. A file in a workspace
|
||||
/// belongs to no or exactly one package (files can't belong to multiple packages).
|
||||
///
|
||||
/// How workspaces and packages are discovered is TBD. For now, a workspace can be any directory,
|
||||
/// and it always contains a single package which has the same root as the workspace.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```text
|
||||
/// app-1/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// app-2/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// shared/
|
||||
/// pyproject.toml
|
||||
/// src/
|
||||
/// ... python files
|
||||
///
|
||||
/// pyproject.toml
|
||||
/// ```
|
||||
///
|
||||
/// The above project structure has three packages: `app-1`, `app-2`, and `shared`.
|
||||
/// Each of the packages can define their own settings in their `pyproject.toml` file, but
|
||||
/// they must be compatible. For example, each package can define a different `requires-python` range,
|
||||
/// but the ranges must overlap.
|
||||
///
|
||||
/// ## How is a workspace different from a program?
|
||||
/// There are two (related) motivations:
|
||||
///
|
||||
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
|
||||
/// without introducing a cyclic dependency. The workspace is defined in a higher level crate
|
||||
/// where it can reference these setting types.
|
||||
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
|
||||
/// it remains the same workspace. That's why program is a narrowed view of the workspace only
|
||||
/// holding on to the most fundamental settings required for checking.
|
||||
#[salsa::input]
|
||||
pub struct Workspace {
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// Setting the open files to a non-`None` value changes `check` to only check the
|
||||
/// open files rather than all files in the workspace.
|
||||
#[return_ref]
|
||||
open_file_set: Option<Arc<FxHashSet<File>>>,
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
#[return_ref]
|
||||
package_tree: BTreeMap<SystemPathBuf, Package>,
|
||||
}
|
||||
|
||||
/// A first-party package in a workspace.
|
||||
#[salsa::input]
|
||||
pub struct Package {
|
||||
#[return_ref]
|
||||
pub name: Name,
|
||||
|
||||
/// The path to the root directory of the package.
|
||||
#[id]
|
||||
#[return_ref]
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are part of this package.
|
||||
#[return_ref]
|
||||
file_set: Arc<FxHashSet<File>>,
|
||||
// TODO: Add the loaded settings.
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_metadata(db: &dyn Db, metadata: WorkspaceMetadata) -> Self {
|
||||
let mut packages = BTreeMap::new();
|
||||
|
||||
for package in metadata.packages {
|
||||
packages.insert(package.root.clone(), Package::from_metadata(db, package));
|
||||
}
|
||||
|
||||
Workspace::new(db, metadata.root, None, packages)
|
||||
}
|
||||
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
}
|
||||
|
||||
pub fn packages(self, db: &dyn Db) -> impl Iterator<Item = Package> + '_ {
|
||||
self.package_tree(db).values().copied()
|
||||
}
|
||||
|
||||
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
|
||||
assert_eq!(self.root(db), metadata.root());
|
||||
|
||||
let mut old_packages = self.package_tree(db).clone();
|
||||
let mut new_packages = BTreeMap::new();
|
||||
|
||||
for package_metadata in metadata.packages {
|
||||
let path = package_metadata.root().to_path_buf();
|
||||
|
||||
let package = if let Some(old_package) = old_packages.remove(&path) {
|
||||
old_package.update(db, package_metadata);
|
||||
old_package
|
||||
} else {
|
||||
Package::from_metadata(db, package_metadata)
|
||||
};
|
||||
|
||||
new_packages.insert(path, package);
|
||||
}
|
||||
|
||||
self.set_package_tree(db).to(new_packages);
|
||||
}
|
||||
|
||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
||||
let path = metadata.root().to_path_buf();
|
||||
|
||||
if let Some(package) = self.package_tree(db).get(&path).copied() {
|
||||
package.update(db, metadata);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Package {path} not found"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the closest package to which the first-party `path` belongs.
|
||||
///
|
||||
/// Returns `None` if the `path` is outside of any package or if `file` isn't a first-party file
|
||||
/// (e.g. third-party dependencies or `excluded`).
|
||||
pub fn package(self, db: &dyn Db, path: &SystemPath) -> Option<Package> {
|
||||
let packages = self.package_tree(db);
|
||||
|
||||
let (package_path, package) = packages.range(..path.to_path_buf()).next_back()?;
|
||||
|
||||
if path.starts_with(package_path) {
|
||||
Some(*package)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
if let Some(open_files) = self.open_files(db) {
|
||||
for file in open_files {
|
||||
result.extend_from_slice(&check_file(db, *file));
|
||||
}
|
||||
} else {
|
||||
for package in self.packages(db) {
|
||||
result.extend(package.check(db));
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Opens a file in the workspace.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
||||
let mut open_files = self.take_open_files(db);
|
||||
open_files.insert(file);
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
/// Closes a file in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut open_files = self.take_open_files(db);
|
||||
let removed = open_files.remove(&file);
|
||||
|
||||
if removed {
|
||||
self.set_open_files(db, open_files);
|
||||
}
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||
self.open_file_set(db).as_deref()
|
||||
}
|
||||
|
||||
/// Sets the open files in the workspace.
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
||||
self.set_open_file_set(db).to(Some(Arc::new(open_files)));
|
||||
}
|
||||
|
||||
/// This takes the open files from the workspace and returns them.
|
||||
///
|
||||
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
|
||||
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
||||
let open_files = self.open_file_set(db).clone();
|
||||
|
||||
if let Some(open_files) = open_files {
|
||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
||||
// so that the reference counter to `open_files` now drops to 1.
|
||||
self.set_open_file_set(db).to(None);
|
||||
|
||||
Arc::try_unwrap(open_files).unwrap()
|
||||
} else {
|
||||
FxHashSet::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Package {
|
||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||
self.root_buf(db)
|
||||
}
|
||||
|
||||
/// Returns `true` if `file` is a first-party file part of this package.
|
||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||
self.files(db).contains(&file)
|
||||
}
|
||||
|
||||
pub fn files(self, db: &dyn Db) -> &FxHashSet<File> {
|
||||
self.file_set(db)
|
||||
}
|
||||
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
let mut files_arc = self.file_set(db).clone();
|
||||
|
||||
// Set a dummy value. Salsa will cancel any pending queries and remove its own reference to `files`
|
||||
// so that the reference counter to `files` now drops to 1.
|
||||
self.set_file_set(db).to(Arc::new(FxHashSet::default()));
|
||||
|
||||
let files = Arc::get_mut(&mut files_arc).unwrap();
|
||||
let removed = files.remove(&file);
|
||||
self.set_file_set(db).to(files_arc);
|
||||
|
||||
removed
|
||||
}
|
||||
|
||||
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
for file in self.files(db) {
|
||||
let diagnostics = check_file(db, *file);
|
||||
result.extend_from_slice(&diagnostics);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
|
||||
let files = discover_package_files(db, metadata.root());
|
||||
|
||||
Self::new(db, metadata.name, metadata.root, Arc::new(files))
|
||||
}
|
||||
|
||||
fn update(self, db: &mut dyn Db, metadata: PackageMetadata) {
|
||||
let root = self.root(db);
|
||||
assert_eq!(root, metadata.root());
|
||||
|
||||
let files = discover_package_files(db, root);
|
||||
|
||||
self.set_name(db).to(metadata.name);
|
||||
self.set_file_set(db).to(Arc::new(files));
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
let mut diagnostics = Vec::new();
|
||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
}
|
||||
|
||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
let paths = std::sync::Mutex::new(Vec::new());
|
||||
|
||||
db.system().walk_directory(path).run(|| {
|
||||
Box::new(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||
if entry.file_type().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = paths.lock().unwrap();
|
||||
paths.push(entry.into_path());
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
// TODO Handle error
|
||||
tracing::error!("Failed to walk path: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
let paths = paths.into_inner().unwrap();
|
||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||
|
||||
for path in paths {
|
||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||
// We can ignore this.
|
||||
if let Some(file) = system_path_to_file(db.upcast(), &path) {
|
||||
files.insert(file);
|
||||
}
|
||||
}
|
||||
|
||||
files
|
||||
}
|
68
crates/red_knot/src/workspace/metadata.rs
Normal file
68
crates/red_knot/src/workspace/metadata.rs
Normal file
|
@ -0,0 +1,68 @@
|
|||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct WorkspaceMetadata {
|
||||
pub(super) root: SystemPathBuf,
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
pub(super) packages: Vec<PackageMetadata>,
|
||||
}
|
||||
|
||||
/// A first-party package in a workspace.
|
||||
#[derive(Debug)]
|
||||
pub struct PackageMetadata {
|
||||
pub(super) name: Name,
|
||||
|
||||
/// The path to the root directory of the package.
|
||||
pub(super) root: SystemPathBuf,
|
||||
// TODO: Add the loaded package configuration (not the nested ruff settings)
|
||||
}
|
||||
|
||||
impl WorkspaceMetadata {
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_path(path: &SystemPath, system: &dyn System) -> anyhow::Result<WorkspaceMetadata> {
|
||||
let root = if system.is_file(path) {
|
||||
path.parent().unwrap().to_path_buf()
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
};
|
||||
|
||||
if !system.is_directory(&root) {
|
||||
anyhow::bail!("no workspace found at {:?}", root);
|
||||
}
|
||||
|
||||
// TODO: Discover package name from `pyproject.toml`.
|
||||
let package_name: Name = path.file_name().unwrap_or("<root>").into();
|
||||
|
||||
let package = PackageMetadata {
|
||||
name: package_name,
|
||||
root: root.clone(),
|
||||
};
|
||||
|
||||
let workspace = WorkspaceMetadata {
|
||||
root,
|
||||
packages: vec![package],
|
||||
};
|
||||
|
||||
Ok(workspace)
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &SystemPath {
|
||||
&self.root
|
||||
}
|
||||
|
||||
pub fn packages(&self) -> &[PackageMetadata] {
|
||||
&self.packages
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageMetadata {
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &SystemPath {
|
||||
&self.root
|
||||
}
|
||||
}
|
|
@ -1,16 +1,15 @@
|
|||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
editable_install_resolution_paths, file_to_module,
|
||||
internal::{ModuleNameIngredient, ModuleResolverSettings},
|
||||
resolve_module_query,
|
||||
editable_install_resolution_paths, file_to_module, internal::ModuleNameIngredient,
|
||||
module_resolution_settings, resolve_module_query,
|
||||
};
|
||||
use crate::typeshed::parse_typeshed_versions;
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
ModuleResolverSettings,
|
||||
module_resolution_settings,
|
||||
editable_install_resolution_paths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
|
|
|
@ -4,7 +4,6 @@ mod module_name;
|
|||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod supported_py_version;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -13,8 +12,7 @@ mod testing;
|
|||
pub use db::{Db, Jar};
|
||||
pub use module::{Module, ModuleKind};
|
||||
pub use module_name::ModuleName;
|
||||
pub use resolver::{resolve_module, set_module_resolution_settings, RawModuleResolutionSettings};
|
||||
pub use supported_py_version::TargetVersion;
|
||||
pub use resolver::resolve_module;
|
||||
pub use typeshed::{
|
||||
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
|
||||
};
|
||||
|
|
|
@ -233,6 +233,10 @@ impl ModuleResolutionPathBuf {
|
|||
ModuleResolutionPathRef::from(self).is_directory(search_path, resolver)
|
||||
}
|
||||
|
||||
pub(crate) fn is_site_packages(&self) -> bool {
|
||||
matches!(self.0, ModuleResolutionPathBufInner::SitePackages(_))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn with_pyi_extension(&self) -> Self {
|
||||
ModuleResolutionPathRef::from(self).with_pyi_extension()
|
||||
|
@ -724,9 +728,9 @@ impl PartialEq<ModuleResolutionPathRef<'_>> for VendoredPathBuf {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_debug_snapshot;
|
||||
use ruff_db::program::TargetVersion;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
use crate::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
|
||||
use super::*;
|
||||
|
|
|
@ -1,109 +1,21 @@
|
|||
use std::collections;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::borrow::Cow;
|
||||
use std::iter::FusedIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module::{Module, ModuleKind};
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::path::ModuleResolutionPathBuf;
|
||||
use crate::resolver::internal::ModuleResolverSettings;
|
||||
use crate::state::ResolverState;
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
|
||||
type SearchPathRoot = Arc<ModuleResolutionPathBuf>;
|
||||
|
||||
/// An ordered sequence of search paths.
|
||||
///
|
||||
/// The sequence respects the invariant maintained by [`sys.path` at runtime]
|
||||
/// where no two module-resolution paths ever point to the same directory on disk.
|
||||
/// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
/// as module resolution paths simultaneously.)
|
||||
///
|
||||
/// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
#[derive(Debug, PartialEq, Eq, Default, Clone)]
|
||||
pub(crate) struct SearchPathSequence {
|
||||
raw_paths: collections::HashSet<SystemPathBuf, BuildHasherDefault<FxHasher>>,
|
||||
search_paths: Vec<SearchPathRoot>,
|
||||
}
|
||||
|
||||
impl SearchPathSequence {
|
||||
fn insert(&mut self, path: SearchPathRoot) -> bool {
|
||||
// Just assume that all search paths that aren't SystemPaths are unique
|
||||
if let Some(fs_path) = path.as_system_path() {
|
||||
if self.raw_paths.contains(fs_path) {
|
||||
false
|
||||
} else {
|
||||
let raw_path = fs_path.to_owned();
|
||||
self.search_paths.push(path);
|
||||
self.raw_paths.insert(raw_path)
|
||||
}
|
||||
} else {
|
||||
self.search_paths.push(path);
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn contains(&self, path: &SearchPathRoot) -> bool {
|
||||
if let Some(fs_path) = path.as_system_path() {
|
||||
self.raw_paths.contains(fs_path)
|
||||
} else {
|
||||
self.search_paths.contains(path)
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> std::slice::Iter<SearchPathRoot> {
|
||||
self.search_paths.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a SearchPathSequence {
|
||||
type IntoIter = std::slice::Iter<'a, SearchPathRoot>;
|
||||
type Item = &'a SearchPathRoot;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<SearchPathRoot> for SearchPathSequence {
|
||||
fn from_iter<T: IntoIterator<Item = SearchPathRoot>>(iter: T) -> Self {
|
||||
let mut sequence = Self::default();
|
||||
for item in iter {
|
||||
sequence.insert(item);
|
||||
}
|
||||
sequence
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<SearchPathRoot> for SearchPathSequence {
|
||||
fn extend<T: IntoIterator<Item = SearchPathRoot>>(&mut self, iter: T) {
|
||||
for item in iter {
|
||||
self.insert(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configures the module resolver settings.
|
||||
///
|
||||
/// Must be called before calling any other module resolution functions.
|
||||
pub fn set_module_resolution_settings(db: &mut dyn Db, config: RawModuleResolutionSettings) {
|
||||
// There's no concurrency issue here because we hold a `&mut dyn Db` reference. No other
|
||||
// thread can mutate the `Db` while we're in this call, so using `try_get` to test if
|
||||
// the settings have already been set is safe.
|
||||
let resolved_settings = config.into_configuration_settings(db.system().current_directory());
|
||||
if let Some(existing) = ModuleResolverSettings::try_get(db) {
|
||||
existing.set_settings(db).to(resolved_settings);
|
||||
} else {
|
||||
ModuleResolverSettings::new(db, resolved_settings);
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
let interned_name = internal::ModuleNameIngredient::new(db, module_name);
|
||||
|
@ -157,9 +69,9 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
|||
|
||||
let path = file.path(db.upcast());
|
||||
|
||||
let resolver_settings = module_resolver_settings(db);
|
||||
let settings = module_resolution_settings(db);
|
||||
|
||||
let mut search_paths = resolver_settings.search_paths(db);
|
||||
let mut search_paths = settings.search_paths(db);
|
||||
|
||||
let module_name = loop {
|
||||
let candidate = search_paths.next()?;
|
||||
|
@ -188,129 +100,110 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
|||
}
|
||||
}
|
||||
|
||||
/// "Raw" configuration settings for module resolution: unvalidated, unnormalized
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct RawModuleResolutionSettings {
|
||||
/// The target Python version the user has specified
|
||||
pub target_version: TargetVersion,
|
||||
/// Validate and normalize the raw settings given by the user
|
||||
/// into settings we can use for module resolution
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// TODO(Alex): this method does multiple `.unwrap()` calls when it should really return an error.
|
||||
/// Each `.unwrap()` call is a point where we're validating a setting that the user would pass
|
||||
/// and transforming it into an internal representation for a validated path.
|
||||
/// Rather than panicking if a path fails to validate, we should display an error message to the user
|
||||
/// and exit the process with a nonzero exit code.
|
||||
/// This validation should probably be done outside of Salsa?
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSettings {
|
||||
let program = Program::get(db.upcast());
|
||||
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
workspace_root,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
} = program.search_paths(db.upcast());
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub workspace_root: SystemPathBuf,
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::debug!("Custom typeshed directory: {custom_typeshed}");
|
||||
}
|
||||
|
||||
/// Optional (already validated) path to standard-library typeshed stubs.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
if !extra_paths.is_empty() {
|
||||
tracing::debug!("extra search paths: {extra_paths:?}");
|
||||
}
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Option<SystemPathBuf>,
|
||||
}
|
||||
let current_directory = db.system().current_directory();
|
||||
|
||||
impl RawModuleResolutionSettings {
|
||||
/// Validate and normalize the raw settings given by the user
|
||||
/// into settings we can use for module resolution
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// TODO(Alex): this method does multiple `.unwrap()` calls when it should really return an error.
|
||||
/// Each `.unwrap()` call is a point where we're validating a setting that the user would pass
|
||||
/// and transforming it into an internal representation for a validated path.
|
||||
/// Rather than panicking if a path fails to validate, we should display an error message to the user
|
||||
/// and exit the process with a nonzero exit code.
|
||||
/// This validation should probably be done outside of Salsa?
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
fn into_configuration_settings(
|
||||
self,
|
||||
current_directory: &SystemPath,
|
||||
) -> ModuleResolutionSettings {
|
||||
let RawModuleResolutionSettings {
|
||||
target_version,
|
||||
extra_paths,
|
||||
workspace_root,
|
||||
site_packages: site_packages_setting,
|
||||
custom_typeshed,
|
||||
} = self;
|
||||
|
||||
let mut static_search_paths: SearchPathSequence = extra_paths
|
||||
.into_iter()
|
||||
.map(|fs_path| {
|
||||
Arc::new(
|
||||
ModuleResolutionPathBuf::extra(SystemPath::absolute(
|
||||
fs_path,
|
||||
current_directory,
|
||||
))
|
||||
let mut static_search_paths: Vec<_> = extra_paths
|
||||
.iter()
|
||||
.map(|fs_path| {
|
||||
Arc::new(
|
||||
ModuleResolutionPathBuf::extra(SystemPath::absolute(fs_path, current_directory))
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
static_search_paths.insert(Arc::new(
|
||||
ModuleResolutionPathBuf::first_party(SystemPath::absolute(
|
||||
workspace_root,
|
||||
static_search_paths.push(Arc::new(
|
||||
ModuleResolutionPathBuf::first_party(SystemPath::absolute(
|
||||
workspace_root,
|
||||
current_directory,
|
||||
))
|
||||
.unwrap(),
|
||||
));
|
||||
|
||||
static_search_paths.push(Arc::new(custom_typeshed.as_ref().map_or_else(
|
||||
ModuleResolutionPathBuf::vendored_stdlib,
|
||||
|custom| {
|
||||
ModuleResolutionPathBuf::stdlib_from_custom_typeshed_root(&SystemPath::absolute(
|
||||
custom,
|
||||
current_directory,
|
||||
))
|
||||
.unwrap(),
|
||||
));
|
||||
.unwrap()
|
||||
},
|
||||
)));
|
||||
|
||||
static_search_paths.insert(Arc::new(custom_typeshed.map_or_else(
|
||||
ModuleResolutionPathBuf::vendored_stdlib,
|
||||
|custom| {
|
||||
ModuleResolutionPathBuf::stdlib_from_custom_typeshed_root(&SystemPath::absolute(
|
||||
custom,
|
||||
current_directory,
|
||||
))
|
||||
.unwrap()
|
||||
},
|
||||
)));
|
||||
|
||||
let mut site_packages = None;
|
||||
|
||||
if let Some(path) = site_packages_setting {
|
||||
let site_packages_root = Arc::new(
|
||||
ModuleResolutionPathBuf::site_packages(SystemPath::absolute(
|
||||
path,
|
||||
current_directory,
|
||||
))
|
||||
if let Some(path) = site_packages {
|
||||
let site_packages_root = Arc::new(
|
||||
ModuleResolutionPathBuf::site_packages(SystemPath::absolute(path, current_directory))
|
||||
.unwrap(),
|
||||
);
|
||||
site_packages = Some(site_packages_root.clone());
|
||||
static_search_paths.insert(site_packages_root);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
ModuleResolutionSettings {
|
||||
target_version,
|
||||
search_path_settings: ValidatedSearchPathSettings {
|
||||
static_search_paths,
|
||||
site_packages,
|
||||
},
|
||||
}
|
||||
);
|
||||
static_search_paths.push(site_packages_root);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
struct ValidatedSearchPathSettings {
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
///
|
||||
/// Note that `site-packages` *is included* as a search path in this sequence,
|
||||
/// but it is also stored separately so that we're able to find editable installs later.
|
||||
static_search_paths: SearchPathSequence,
|
||||
site_packages: Option<SearchPathRoot>,
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
let target_version = program.target_version(db.upcast());
|
||||
tracing::debug!("Target version: {target_version}");
|
||||
|
||||
// Filter out module resolution paths that point to the same directory on disk (the same invariant maintained by [`sys.path` at runtime]).
|
||||
// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
// as module resolution paths simultaneously.)
|
||||
//
|
||||
// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
// This code doesn't use an `IndexSet` because the key is the system path and not the search root.
|
||||
let mut seen_paths =
|
||||
FxHashSet::with_capacity_and_hasher(static_search_paths.len(), FxBuildHasher);
|
||||
|
||||
static_search_paths.retain(|path| {
|
||||
if let Some(path) = path.as_system_path() {
|
||||
seen_paths.insert(path.to_path_buf())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
ModuleResolutionSettings {
|
||||
target_version,
|
||||
static_search_paths,
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all dynamic search paths:
|
||||
/// search paths listed in `.pth` files in the `site-packages` directory
|
||||
/// due to editable installations of third-party packages.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> SearchPathSequence {
|
||||
pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> Vec<Arc<ModuleResolutionPathBuf>> {
|
||||
// This query needs to be re-executed each time a `.pth` file
|
||||
// is added, modified or removed from the `site-packages` directory.
|
||||
// However, we don't use Salsa queries to read the source text of `.pth` files;
|
||||
|
@ -324,12 +217,12 @@ pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> SearchPathSequen
|
|||
// more principled!
|
||||
db.report_untracked_read();
|
||||
|
||||
let ValidatedSearchPathSettings {
|
||||
static_search_paths,
|
||||
site_packages,
|
||||
} = &module_resolver_settings(db).search_path_settings;
|
||||
let static_search_paths = &module_resolution_settings(db).static_search_paths;
|
||||
let site_packages = static_search_paths
|
||||
.iter()
|
||||
.find(|path| path.is_site_packages());
|
||||
|
||||
let mut dynamic_paths = SearchPathSequence::default();
|
||||
let mut dynamic_paths = Vec::default();
|
||||
|
||||
if let Some(site_packages) = site_packages {
|
||||
let site_packages = site_packages
|
||||
|
@ -352,18 +245,25 @@ pub(crate) fn editable_install_resolution_paths(db: &dyn Db) -> SearchPathSequen
|
|||
let mut all_pth_files: Vec<PthFile> = pth_file_iterator.collect();
|
||||
all_pth_files.sort_by(|a, b| a.path.cmp(&b.path));
|
||||
|
||||
let mut existing_paths: FxHashSet<_> = static_search_paths
|
||||
.iter()
|
||||
.filter_map(|path| path.as_system_path())
|
||||
.map(Cow::Borrowed)
|
||||
.collect();
|
||||
|
||||
dynamic_paths.reserve(all_pth_files.len());
|
||||
|
||||
for pth_file in &all_pth_files {
|
||||
dynamic_paths.extend(
|
||||
pth_file
|
||||
.editable_installations()
|
||||
.filter_map(|editable_path| {
|
||||
let possible_search_path = Arc::new(editable_path);
|
||||
(!static_search_paths.contains(&possible_search_path))
|
||||
.then_some(possible_search_path)
|
||||
}),
|
||||
);
|
||||
for installation in pth_file.editable_installations() {
|
||||
if existing_paths.insert(Cow::Owned(
|
||||
installation.as_system_path().unwrap().to_path_buf(),
|
||||
)) {
|
||||
dynamic_paths.push(Arc::new(installation));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dynamic_paths
|
||||
}
|
||||
|
||||
|
@ -392,7 +292,7 @@ impl<'db> Iterator for SearchPathIterator<'db> {
|
|||
|
||||
static_paths.next().or_else(|| {
|
||||
dynamic_paths
|
||||
.get_or_insert_with(|| editable_install_resolution_paths(*db).into_iter())
|
||||
.get_or_insert_with(|| editable_install_resolution_paths(*db).iter())
|
||||
.next()
|
||||
})
|
||||
}
|
||||
|
@ -501,8 +401,13 @@ impl<'db> Iterator for PthFileIterator<'db> {
|
|||
/// Validated and normalized module-resolution settings.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct ModuleResolutionSettings {
|
||||
search_path_settings: ValidatedSearchPathSettings,
|
||||
target_version: TargetVersion,
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
///
|
||||
/// Note that `site-packages` *is included* as a search path in this sequence,
|
||||
/// but it is also stored separately so that we're able to find editable installs later.
|
||||
static_search_paths: Vec<SearchPathRoot>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
|
@ -513,7 +418,7 @@ impl ModuleResolutionSettings {
|
|||
fn search_paths<'db>(&'db self, db: &'db dyn Db) -> SearchPathIterator<'db> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.search_path_settings.static_search_paths.iter(),
|
||||
static_paths: self.static_search_paths.iter(),
|
||||
dynamic_paths: None,
|
||||
}
|
||||
}
|
||||
|
@ -526,13 +431,6 @@ impl ModuleResolutionSettings {
|
|||
#[allow(unreachable_pub, clippy::used_underscore_binding)]
|
||||
pub(crate) mod internal {
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::resolver::ModuleResolutionSettings;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub(crate) struct ModuleResolverSettings {
|
||||
#[return_ref]
|
||||
pub(super) settings: ModuleResolutionSettings,
|
||||
}
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
|
@ -544,17 +442,13 @@ pub(crate) mod internal {
|
|||
}
|
||||
}
|
||||
|
||||
fn module_resolver_settings(db: &dyn Db) -> &ModuleResolutionSettings {
|
||||
ModuleResolverSettings::get(db).settings(db)
|
||||
}
|
||||
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(
|
||||
db: &dyn Db,
|
||||
name: &ModuleName,
|
||||
) -> Option<(Arc<ModuleResolutionPathBuf>, File, ModuleKind)> {
|
||||
let resolver_settings = module_resolver_settings(db);
|
||||
let resolver_settings = module_resolution_settings(db);
|
||||
let resolver_state = ResolverState::new(db, resolver_settings.target_version());
|
||||
|
||||
for search_path in resolver_settings.search_paths(db) {
|
||||
|
@ -1190,6 +1084,8 @@ mod tests {
|
|||
#[test]
|
||||
#[cfg(target_family = "unix")]
|
||||
fn symlink() -> anyhow::Result<()> {
|
||||
use ruff_db::program::Program;
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
@ -1210,15 +1106,14 @@ mod tests {
|
|||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
|
||||
let settings = RawModuleResolutionSettings {
|
||||
target_version: TargetVersion::Py38,
|
||||
let search_paths = SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
};
|
||||
|
||||
set_module_resolution_settings(&mut db, settings);
|
||||
Program::new(&db, TargetVersion::Py38, search_paths);
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let bar_module = resolve_module(&db, ModuleName::new_static("bar").unwrap()).unwrap();
|
||||
|
@ -1698,7 +1593,7 @@ not_a_directory
|
|||
.build();
|
||||
|
||||
let search_paths: Vec<&SearchPathRoot> =
|
||||
module_resolver_settings(&db).search_paths(&db).collect();
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
|
||||
assert!(search_paths.contains(&&Arc::new(
|
||||
ModuleResolutionPathBuf::first_party("/src").unwrap()
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
use crate::typeshed::LazyTypeshedVersions;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
|
@ -1,9 +1,8 @@
|
|||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::resolver::{set_module_resolution_settings, RawModuleResolutionSettings};
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
|
@ -215,10 +214,10 @@ impl TestCaseBuilder<MockedTypeshed> {
|
|||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
set_module_resolution_settings(
|
||||
&mut db,
|
||||
RawModuleResolutionSettings {
|
||||
target_version,
|
||||
Program::new(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
|
@ -268,10 +267,10 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
|||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
set_module_resolution_settings(
|
||||
&mut db,
|
||||
RawModuleResolutionSettings {
|
||||
target_version,
|
||||
Program::new(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::ops::{RangeFrom, RangeInclusive};
|
|||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
|
@ -13,7 +14,6 @@ use ruff_db::files::{system_path_to_file, File};
|
|||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::supported_py_version::TargetVersion;
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
|
||||
|
@ -440,6 +440,7 @@ mod tests {
|
|||
use std::path::Path;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::program::TargetVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
|
|
@ -162,11 +162,9 @@ impl HasTy for ast::Alias {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use red_knot_module_resolver::{
|
||||
set_module_resolution_settings, RawModuleResolutionSettings, TargetVersion,
|
||||
};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
|
@ -174,15 +172,15 @@ mod tests {
|
|||
use crate::{HasTy, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let mut db = TestDb::new();
|
||||
set_module_resolution_settings(
|
||||
&mut db,
|
||||
RawModuleResolutionSettings {
|
||||
let db = TestDb::new();
|
||||
Program::new(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: SystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
target_version: TargetVersion::Py38,
|
||||
},
|
||||
);
|
||||
|
||||
|
|
|
@ -3,20 +3,19 @@ use salsa;
|
|||
|
||||
use red_knot_module_resolver::{resolve_module, ModuleName};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{ExprContext, TypeParams};
|
||||
|
||||
use crate::semantic_index::ast_ids::{HasScopedAstId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind, DefinitionNodeKey};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::NodeWithScopeKind;
|
||||
use crate::semantic_index::symbol::{NodeWithScopeRef, ScopeId};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::types::{definitions_ty, ClassType, FunctionType, Name, Type, UnionTypeBuilder};
|
||||
use crate::Db;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::NodeWithScopeKind;
|
||||
|
||||
/// Infer all types for a [`Definition`] (including sub-expressions).
|
||||
/// Use when resolving a symbol name use or public type of a symbol.
|
||||
|
@ -703,11 +702,9 @@ impl<'db> TypeInferenceBuilder<'db> {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use red_knot_module_resolver::{
|
||||
set_module_resolution_settings, RawModuleResolutionSettings, TargetVersion,
|
||||
};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
@ -721,12 +718,12 @@ mod tests {
|
|||
use crate::{HasTy, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let mut db = TestDb::new();
|
||||
let db = TestDb::new();
|
||||
|
||||
set_module_resolution_settings(
|
||||
&mut db,
|
||||
RawModuleResolutionSettings {
|
||||
target_version: TargetVersion::Py38,
|
||||
Program::new(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
workspace_root: SystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
|
|
|
@ -52,7 +52,6 @@ ruff_python_formatter = { workspace = true }
|
|||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
red_knot = { workspace = true }
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
#![allow(clippy::disallowed_names)]
|
||||
|
||||
use red_knot::program::Program;
|
||||
use red_knot::Workspace;
|
||||
use red_knot_module_resolver::{
|
||||
set_module_resolution_settings, RawModuleResolutionSettings, TargetVersion,
|
||||
};
|
||||
use red_knot::db::RootDatabase;
|
||||
use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_benchmark::criterion::{
|
||||
criterion_group, criterion_main, BatchSize, Criterion, Throughput,
|
||||
};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{MemoryFileSystem, SystemPath, TestSystem};
|
||||
use ruff_db::Upcast;
|
||||
|
||||
|
@ -45,7 +43,7 @@ def override(): ...
|
|||
"#;
|
||||
|
||||
struct Case {
|
||||
program: Program,
|
||||
db: RootDatabase,
|
||||
fs: MemoryFileSystem,
|
||||
foo: File,
|
||||
bar: File,
|
||||
|
@ -66,29 +64,27 @@ fn setup_case() -> Case {
|
|||
.unwrap();
|
||||
|
||||
let workspace_root = SystemPath::new("/src");
|
||||
let workspace = Workspace::new(workspace_root.to_path_buf());
|
||||
|
||||
let mut program = Program::new(workspace, system);
|
||||
let foo = system_path_to_file(&program, foo_path).unwrap();
|
||||
|
||||
set_module_resolution_settings(
|
||||
&mut program,
|
||||
RawModuleResolutionSettings {
|
||||
let metadata = WorkspaceMetadata::from_path(workspace_root, &system).unwrap();
|
||||
let settings = ProgramSettings {
|
||||
target_version: TargetVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: workspace_root.to_path_buf(),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
target_version: TargetVersion::Py38,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
program.workspace_mut().open_file(foo);
|
||||
let mut db = RootDatabase::new(metadata, settings, system);
|
||||
let foo = system_path_to_file(&db, foo_path).unwrap();
|
||||
|
||||
let bar = system_path_to_file(&program, bar_path).unwrap();
|
||||
let typing = system_path_to_file(&program, typing_path).unwrap();
|
||||
db.workspace().open_file(&mut db, foo);
|
||||
|
||||
let bar = system_path_to_file(&db, bar_path).unwrap();
|
||||
let typing = system_path_to_file(&db, typing_path).unwrap();
|
||||
|
||||
Case {
|
||||
program,
|
||||
db,
|
||||
fs,
|
||||
foo,
|
||||
bar,
|
||||
|
@ -105,14 +101,14 @@ fn benchmark_without_parse(criterion: &mut Criterion) {
|
|||
|| {
|
||||
let case = setup_case();
|
||||
// Pre-parse the module to only measure the semantic time.
|
||||
parsed_module(case.program.upcast(), case.foo);
|
||||
parsed_module(case.program.upcast(), case.bar);
|
||||
parsed_module(case.program.upcast(), case.typing);
|
||||
parsed_module(case.db.upcast(), case.foo);
|
||||
parsed_module(case.db.upcast(), case.bar);
|
||||
parsed_module(case.db.upcast(), case.typing);
|
||||
case
|
||||
},
|
||||
|case| {
|
||||
let Case { program, foo, .. } = case;
|
||||
let result = program.check_file(*foo).unwrap();
|
||||
let Case { db, foo, .. } = case;
|
||||
let result = db.check_file(*foo).unwrap();
|
||||
|
||||
assert_eq!(result.as_slice(), [] as [String; 0]);
|
||||
},
|
||||
|
@ -131,7 +127,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
|||
b.iter_batched_ref(
|
||||
|| {
|
||||
let mut case = setup_case();
|
||||
case.program.check_file(case.foo).unwrap();
|
||||
case.db.check_file(case.foo).unwrap();
|
||||
|
||||
case.fs
|
||||
.write_file(
|
||||
|
@ -140,12 +136,12 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
|||
)
|
||||
.unwrap();
|
||||
|
||||
case.bar.touch(&mut case.program);
|
||||
case.bar.touch(&mut case.db);
|
||||
case
|
||||
},
|
||||
|case| {
|
||||
let Case { program, foo, .. } = case;
|
||||
let result = program.check_file(*foo).unwrap();
|
||||
let Case { db, foo, .. } = case;
|
||||
let result = db.check_file(*foo).unwrap();
|
||||
|
||||
assert_eq!(result.as_slice(), [] as [String; 0]);
|
||||
},
|
||||
|
@ -164,8 +160,8 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
|||
b.iter_batched_ref(
|
||||
setup_case,
|
||||
|case| {
|
||||
let Case { program, foo, .. } = case;
|
||||
let result = program.check_file(*foo).unwrap();
|
||||
let Case { db, foo, .. } = case;
|
||||
let result = db.check_file(*foo).unwrap();
|
||||
|
||||
assert_eq!(result.as_slice(), [] as [String; 0]);
|
||||
},
|
||||
|
|
|
@ -11,6 +11,7 @@ repository = { workspace = true }
|
|||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_cache = { workspace = true, optional = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
@ -32,4 +33,5 @@ insta = { workspace = true }
|
|||
tempfile = { workspace = true }
|
||||
|
||||
[features]
|
||||
cache = ["ruff_cache"]
|
||||
os = ["ignore"]
|
||||
|
|
|
@ -58,7 +58,7 @@ impl Files {
|
|||
///
|
||||
/// The operation always succeeds even if the path doesn't exist on disk, isn't accessible or if the path points to a directory.
|
||||
/// In these cases, a file with status [`FileStatus::Deleted`] is returned.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
#[tracing::instrument(level = "debug", skip(self, db), ret)]
|
||||
fn system(&self, db: &dyn Db, path: &SystemPath) -> File {
|
||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||
let absolute = FilePath::System(absolute);
|
||||
|
@ -102,7 +102,7 @@ impl Files {
|
|||
|
||||
/// Looks up a vendored file by its path. Returns `Some` if a vendored file for the given path
|
||||
/// exists and `None` otherwise.
|
||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||
#[tracing::instrument(level = "debug", skip(self, db), ret)]
|
||||
fn vendored(&self, db: &dyn Db, path: &VendoredPath) -> Option<File> {
|
||||
let file = match self
|
||||
.inner
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use program::Program;
|
||||
use rustc_hash::FxHasher;
|
||||
use salsa::DbWithJar;
|
||||
|
||||
|
@ -12,6 +13,7 @@ use crate::vendored::VendoredFileSystem;
|
|||
pub mod file_revision;
|
||||
pub mod files;
|
||||
pub mod parsed;
|
||||
pub mod program;
|
||||
pub mod source;
|
||||
pub mod system;
|
||||
pub mod testing;
|
||||
|
@ -20,7 +22,7 @@ pub mod vendored;
|
|||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(File, source_text, line_index, parsed_module);
|
||||
pub struct Jar(File, Program, source_text, line_index, parsed_module);
|
||||
|
||||
/// Most basic database that gives access to files, the host system, source code, and parsed AST.
|
||||
pub trait Db: DbWithJar<Jar> {
|
||||
|
|
85
crates/ruff_db/src/program.rs
Normal file
85
crates/ruff_db/src/program.rs
Normal file
|
@ -0,0 +1,85 @@
|
|||
// TODO: Fix clippy warnings in Salsa macros
|
||||
#![allow(clippy::needless_lifetimes, clippy::clone_on_copy)]
|
||||
|
||||
use crate::{system::SystemPathBuf, Db};
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub struct Program {
|
||||
pub target_version: TargetVersion,
|
||||
|
||||
#[return_ref]
|
||||
pub search_paths: SearchPathSettings,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> Self {
|
||||
Program::new(db, settings.target_version, settings.search_paths)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: TargetVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
}
|
||||
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub workspace_root: SystemPathBuf,
|
||||
|
||||
/// Optional (already validated) path to standard-library typeshed stubs.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Option<SystemPathBuf>,
|
||||
}
|
|
@ -27,9 +27,12 @@ struct OsSystemInner {
|
|||
|
||||
impl OsSystem {
|
||||
pub fn new(cwd: impl AsRef<SystemPath>) -> Self {
|
||||
let cwd = cwd.as_ref();
|
||||
assert!(cwd.as_utf8_path().is_absolute());
|
||||
|
||||
Self {
|
||||
inner: Arc::new(OsSystemInner {
|
||||
cwd: cwd.as_ref().to_path_buf(),
|
||||
cwd: cwd.to_path_buf(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -311,7 +314,9 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn read_directory_nonexistent() {
|
||||
let fs = OsSystem::new("");
|
||||
let tempdir = TempDir::new().unwrap();
|
||||
|
||||
let fs = OsSystem::new(SystemPath::from_std_path(tempdir.path()).unwrap());
|
||||
let result = fs.read_directory(SystemPath::new("doesnt_exist"));
|
||||
assert!(result.is_err_and(|error| error.kind() == std::io::ErrorKind::NotFound));
|
||||
}
|
||||
|
|
|
@ -549,3 +549,17 @@ impl std::fmt::Display for SystemPathBuf {
|
|||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "cache")]
|
||||
impl ruff_cache::CacheKey for SystemPath {
|
||||
fn cache_key(&self, hasher: &mut ruff_cache::CacheKeyHasher) {
|
||||
self.0.as_str().cache_key(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "cache")]
|
||||
impl ruff_cache::CacheKey for SystemPathBuf {
|
||||
fn cache_key(&self, hasher: &mut ruff_cache::CacheKeyHasher) {
|
||||
self.as_path().cache_key(hasher);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue